diff options
author | Gavin Mak <gavinmak@google.com> | 2023-03-11 06:46:20 +0000 |
---|---|---|
committer | LUCI <gerrit-scoped@luci-project-accounts.iam.gserviceaccount.com> | 2023-03-22 17:46:28 +0000 |
commit | ea2e330e43c182dc16b0111ebc69ee5a71ee4ce1 (patch) | |
tree | dc33ba0e56825b3e007d0589891756724725a465 /subcmds | |
parent | 1604cf255f8c1786a23388db6d5277ac7949a24a (diff) | |
download | git-repo-ea2e330e43c182dc16b0111ebc69ee5a71ee4ce1.tar.gz |
Format codebase with black and check formatting in CQ
Apply rules set by https://gerrit-review.googlesource.com/c/git-repo/+/362954/ across the codebase and fix any lingering errors caught
by flake8. Also check black formatting in run_tests (and CQ).
Bug: b/267675342
Change-Id: I972d77649dac351150dcfeb1cd1ad0ea2efc1956
Reviewed-on: https://gerrit-review.googlesource.com/c/git-repo/+/363474
Reviewed-by: Mike Frysinger <vapier@google.com>
Tested-by: Gavin Mak <gavinmak@google.com>
Commit-Queue: Gavin Mak <gavinmak@google.com>
Diffstat (limited to 'subcmds')
-rw-r--r-- | subcmds/__init__.py | 50 | ||||
-rw-r--r-- | subcmds/abandon.py | 167 | ||||
-rw-r--r-- | subcmds/branches.py | 287 | ||||
-rw-r--r-- | subcmds/checkout.py | 83 | ||||
-rw-r--r-- | subcmds/cherry_pick.py | 177 | ||||
-rw-r--r-- | subcmds/diff.py | 85 | ||||
-rw-r--r-- | subcmds/diffmanifests.py | 374 | ||||
-rw-r--r-- | subcmds/download.py | 304 | ||||
-rw-r--r-- | subcmds/forall.py | 516 | ||||
-rw-r--r-- | subcmds/gitc_delete.py | 42 | ||||
-rw-r--r-- | subcmds/gitc_init.py | 69 | ||||
-rw-r--r-- | subcmds/grep.py | 529 | ||||
-rw-r--r-- | subcmds/help.py | 330 | ||||
-rw-r--r-- | subcmds/info.py | 413 | ||||
-rw-r--r-- | subcmds/init.py | 544 | ||||
-rw-r--r-- | subcmds/list.py | 158 | ||||
-rw-r--r-- | subcmds/manifest.py | 230 | ||||
-rw-r--r-- | subcmds/overview.py | 124 | ||||
-rw-r--r-- | subcmds/prune.py | 107 | ||||
-rw-r--r-- | subcmds/rebase.py | 313 | ||||
-rw-r--r-- | subcmds/selfupdate.py | 63 | ||||
-rw-r--r-- | subcmds/smartsync.py | 18 | ||||
-rw-r--r-- | subcmds/stage.py | 181 | ||||
-rw-r--r-- | subcmds/start.py | 250 | ||||
-rw-r--r-- | subcmds/status.py | 239 | ||||
-rw-r--r-- | subcmds/sync.py | 3130 | ||||
-rw-r--r-- | subcmds/upload.py | 1152 | ||||
-rw-r--r-- | subcmds/version.py | 75 |
28 files changed, 5584 insertions, 4426 deletions
diff --git a/subcmds/__init__.py b/subcmds/__init__.py index 051dda06..4e41afc0 100644 --- a/subcmds/__init__.py +++ b/subcmds/__init__.py | |||
@@ -19,31 +19,29 @@ all_commands = {} | |||
19 | 19 | ||
20 | my_dir = os.path.dirname(__file__) | 20 | my_dir = os.path.dirname(__file__) |
21 | for py in os.listdir(my_dir): | 21 | for py in os.listdir(my_dir): |
22 | if py == '__init__.py': | 22 | if py == "__init__.py": |
23 | continue | 23 | continue |
24 | 24 | ||
25 | if py.endswith('.py'): | 25 | if py.endswith(".py"): |
26 | name = py[:-3] | 26 | name = py[:-3] |
27 | 27 | ||
28 | clsn = name.capitalize() | 28 | clsn = name.capitalize() |
29 | while clsn.find('_') > 0: | 29 | while clsn.find("_") > 0: |
30 | h = clsn.index('_') | 30 | h = clsn.index("_") |
31 | clsn = clsn[0:h] + clsn[h + 1:].capitalize() | 31 | clsn = clsn[0:h] + clsn[h + 1 :].capitalize() |
32 | 32 | ||
33 | mod = __import__(__name__, | 33 | mod = __import__(__name__, globals(), locals(), ["%s" % name]) |
34 | globals(), | 34 | mod = getattr(mod, name) |
35 | locals(), | 35 | try: |
36 | ['%s' % name]) | 36 | cmd = getattr(mod, clsn) |
37 | mod = getattr(mod, name) | 37 | except AttributeError: |
38 | try: | 38 | raise SyntaxError( |
39 | cmd = getattr(mod, clsn) | 39 | "%s/%s does not define class %s" % (__name__, py, clsn) |
40 | except AttributeError: | 40 | ) |
41 | raise SyntaxError('%s/%s does not define class %s' % ( | 41 | |
42 | __name__, py, clsn)) | 42 | name = name.replace("_", "-") |
43 | 43 | cmd.NAME = name | |
44 | name = name.replace('_', '-') | 44 | all_commands[name] = cmd |
45 | cmd.NAME = name | ||
46 | all_commands[name] = cmd | ||
47 | 45 | ||
48 | # Add 'branch' as an alias for 'branches'. | 46 | # Add 'branch' as an alias for 'branches'. |
49 | all_commands['branch'] = all_commands['branches'] | 47 | all_commands["branch"] = all_commands["branches"] |
diff --git a/subcmds/abandon.py b/subcmds/abandon.py index c3d2d5b7..1f687f53 100644 --- a/subcmds/abandon.py +++ b/subcmds/abandon.py | |||
@@ -23,9 +23,9 @@ from progress import Progress | |||
23 | 23 | ||
24 | 24 | ||
25 | class Abandon(Command): | 25 | class Abandon(Command): |
26 | COMMON = True | 26 | COMMON = True |
27 | helpSummary = "Permanently abandon a development branch" | 27 | helpSummary = "Permanently abandon a development branch" |
28 | helpUsage = """ | 28 | helpUsage = """ |
29 | %prog [--all | <branchname>] [<project>...] | 29 | %prog [--all | <branchname>] [<project>...] |
30 | 30 | ||
31 | This subcommand permanently abandons a development branch by | 31 | This subcommand permanently abandons a development branch by |
@@ -33,83 +33,104 @@ deleting it (and all its history) from your local repository. | |||
33 | 33 | ||
34 | It is equivalent to "git branch -D <branchname>". | 34 | It is equivalent to "git branch -D <branchname>". |
35 | """ | 35 | """ |
36 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 36 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
37 | 37 | ||
38 | def _Options(self, p): | 38 | def _Options(self, p): |
39 | p.add_option('--all', | 39 | p.add_option( |
40 | dest='all', action='store_true', | 40 | "--all", |
41 | help='delete all branches in all projects') | 41 | dest="all", |
42 | action="store_true", | ||
43 | help="delete all branches in all projects", | ||
44 | ) | ||
42 | 45 | ||
43 | def ValidateOptions(self, opt, args): | 46 | def ValidateOptions(self, opt, args): |
44 | if not opt.all and not args: | 47 | if not opt.all and not args: |
45 | self.Usage() | 48 | self.Usage() |
46 | 49 | ||
47 | if not opt.all: | 50 | if not opt.all: |
48 | nb = args[0] | 51 | nb = args[0] |
49 | if not git.check_ref_format('heads/%s' % nb): | 52 | if not git.check_ref_format("heads/%s" % nb): |
50 | self.OptionParser.error("'%s' is not a valid branch name" % nb) | 53 | self.OptionParser.error("'%s' is not a valid branch name" % nb) |
51 | else: | 54 | else: |
52 | args.insert(0, "'All local branches'") | 55 | args.insert(0, "'All local branches'") |
53 | 56 | ||
54 | def _ExecuteOne(self, all_branches, nb, project): | 57 | def _ExecuteOne(self, all_branches, nb, project): |
55 | """Abandon one project.""" | 58 | """Abandon one project.""" |
56 | if all_branches: | 59 | if all_branches: |
57 | branches = project.GetBranches() | 60 | branches = project.GetBranches() |
58 | else: | 61 | else: |
59 | branches = [nb] | 62 | branches = [nb] |
60 | 63 | ||
61 | ret = {} | 64 | ret = {} |
62 | for name in branches: | 65 | for name in branches: |
63 | status = project.AbandonBranch(name) | 66 | status = project.AbandonBranch(name) |
64 | if status is not None: | 67 | if status is not None: |
65 | ret[name] = status | 68 | ret[name] = status |
66 | return (ret, project) | 69 | return (ret, project) |
67 | 70 | ||
68 | def Execute(self, opt, args): | 71 | def Execute(self, opt, args): |
69 | nb = args[0] | 72 | nb = args[0] |
70 | err = defaultdict(list) | 73 | err = defaultdict(list) |
71 | success = defaultdict(list) | 74 | success = defaultdict(list) |
72 | all_projects = self.GetProjects(args[1:], all_manifests=not opt.this_manifest_only) | 75 | all_projects = self.GetProjects( |
73 | _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) | 76 | args[1:], all_manifests=not opt.this_manifest_only |
77 | ) | ||
78 | _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) | ||
74 | 79 | ||
75 | def _ProcessResults(_pool, pm, states): | 80 | def _ProcessResults(_pool, pm, states): |
76 | for (results, project) in states: | 81 | for results, project in states: |
77 | for branch, status in results.items(): | 82 | for branch, status in results.items(): |
78 | if status: | 83 | if status: |
79 | success[branch].append(project) | 84 | success[branch].append(project) |
80 | else: | 85 | else: |
81 | err[branch].append(project) | 86 | err[branch].append(project) |
82 | pm.update() | 87 | pm.update() |
83 | 88 | ||
84 | self.ExecuteInParallel( | 89 | self.ExecuteInParallel( |
85 | opt.jobs, | 90 | opt.jobs, |
86 | functools.partial(self._ExecuteOne, opt.all, nb), | 91 | functools.partial(self._ExecuteOne, opt.all, nb), |
87 | all_projects, | 92 | all_projects, |
88 | callback=_ProcessResults, | 93 | callback=_ProcessResults, |
89 | output=Progress('Abandon %s' % (nb,), len(all_projects), quiet=opt.quiet)) | 94 | output=Progress( |
95 | "Abandon %s" % (nb,), len(all_projects), quiet=opt.quiet | ||
96 | ), | ||
97 | ) | ||
90 | 98 | ||
91 | width = max(itertools.chain( | 99 | width = max( |
92 | [25], (len(x) for x in itertools.chain(success, err)))) | 100 | itertools.chain( |
93 | if err: | 101 | [25], (len(x) for x in itertools.chain(success, err)) |
94 | for br in err.keys(): | 102 | ) |
95 | err_msg = "error: cannot abandon %s" % br | 103 | ) |
96 | print(err_msg, file=sys.stderr) | 104 | if err: |
97 | for proj in err[br]: | 105 | for br in err.keys(): |
98 | print(' ' * len(err_msg) + " | %s" % _RelPath(proj), file=sys.stderr) | 106 | err_msg = "error: cannot abandon %s" % br |
99 | sys.exit(1) | 107 | print(err_msg, file=sys.stderr) |
100 | elif not success: | 108 | for proj in err[br]: |
101 | print('error: no project has local branch(es) : %s' % nb, | 109 | print( |
102 | file=sys.stderr) | 110 | " " * len(err_msg) + " | %s" % _RelPath(proj), |
103 | sys.exit(1) | 111 | file=sys.stderr, |
104 | else: | 112 | ) |
105 | # Everything below here is displaying status. | 113 | sys.exit(1) |
106 | if opt.quiet: | 114 | elif not success: |
107 | return | 115 | print( |
108 | print('Abandoned branches:') | 116 | "error: no project has local branch(es) : %s" % nb, |
109 | for br in success.keys(): | 117 | file=sys.stderr, |
110 | if len(all_projects) > 1 and len(all_projects) == len(success[br]): | 118 | ) |
111 | result = "all project" | 119 | sys.exit(1) |
112 | else: | 120 | else: |
113 | result = "%s" % ( | 121 | # Everything below here is displaying status. |
114 | ('\n' + ' ' * width + '| ').join(_RelPath(p) for p in success[br])) | 122 | if opt.quiet: |
115 | print("%s%s| %s\n" % (br, ' ' * (width - len(br)), result)) | 123 | return |
124 | print("Abandoned branches:") | ||
125 | for br in success.keys(): | ||
126 | if len(all_projects) > 1 and len(all_projects) == len( | ||
127 | success[br] | ||
128 | ): | ||
129 | result = "all project" | ||
130 | else: | ||
131 | result = "%s" % ( | ||
132 | ("\n" + " " * width + "| ").join( | ||
133 | _RelPath(p) for p in success[br] | ||
134 | ) | ||
135 | ) | ||
136 | print("%s%s| %s\n" % (br, " " * (width - len(br)), result)) | ||
diff --git a/subcmds/branches.py b/subcmds/branches.py index fcf67ef5..4d5bb196 100644 --- a/subcmds/branches.py +++ b/subcmds/branches.py | |||
@@ -20,51 +20,51 @@ from command import Command, DEFAULT_LOCAL_JOBS | |||
20 | 20 | ||
21 | 21 | ||
22 | class BranchColoring(Coloring): | 22 | class BranchColoring(Coloring): |
23 | def __init__(self, config): | 23 | def __init__(self, config): |
24 | Coloring.__init__(self, config, 'branch') | 24 | Coloring.__init__(self, config, "branch") |
25 | self.current = self.printer('current', fg='green') | 25 | self.current = self.printer("current", fg="green") |
26 | self.local = self.printer('local') | 26 | self.local = self.printer("local") |
27 | self.notinproject = self.printer('notinproject', fg='red') | 27 | self.notinproject = self.printer("notinproject", fg="red") |
28 | 28 | ||
29 | 29 | ||
30 | class BranchInfo(object): | 30 | class BranchInfo(object): |
31 | def __init__(self, name): | 31 | def __init__(self, name): |
32 | self.name = name | 32 | self.name = name |
33 | self.current = 0 | 33 | self.current = 0 |
34 | self.published = 0 | 34 | self.published = 0 |
35 | self.published_equal = 0 | 35 | self.published_equal = 0 |
36 | self.projects = [] | 36 | self.projects = [] |
37 | 37 | ||
38 | def add(self, b): | 38 | def add(self, b): |
39 | if b.current: | 39 | if b.current: |
40 | self.current += 1 | 40 | self.current += 1 |
41 | if b.published: | 41 | if b.published: |
42 | self.published += 1 | 42 | self.published += 1 |
43 | if b.revision == b.published: | 43 | if b.revision == b.published: |
44 | self.published_equal += 1 | 44 | self.published_equal += 1 |
45 | self.projects.append(b) | 45 | self.projects.append(b) |
46 | 46 | ||
47 | @property | 47 | @property |
48 | def IsCurrent(self): | 48 | def IsCurrent(self): |
49 | return self.current > 0 | 49 | return self.current > 0 |
50 | 50 | ||
51 | @property | 51 | @property |
52 | def IsSplitCurrent(self): | 52 | def IsSplitCurrent(self): |
53 | return self.current != 0 and self.current != len(self.projects) | 53 | return self.current != 0 and self.current != len(self.projects) |
54 | 54 | ||
55 | @property | 55 | @property |
56 | def IsPublished(self): | 56 | def IsPublished(self): |
57 | return self.published > 0 | 57 | return self.published > 0 |
58 | 58 | ||
59 | @property | 59 | @property |
60 | def IsPublishedEqual(self): | 60 | def IsPublishedEqual(self): |
61 | return self.published_equal == len(self.projects) | 61 | return self.published_equal == len(self.projects) |
62 | 62 | ||
63 | 63 | ||
64 | class Branches(Command): | 64 | class Branches(Command): |
65 | COMMON = True | 65 | COMMON = True |
66 | helpSummary = "View current topic branches" | 66 | helpSummary = "View current topic branches" |
67 | helpUsage = """ | 67 | helpUsage = """ |
68 | %prog [<project>...] | 68 | %prog [<project>...] |
69 | 69 | ||
70 | Summarizes the currently available topic branches. | 70 | Summarizes the currently available topic branches. |
@@ -95,111 +95,114 @@ the branch appears in, or does not appear in. If no project list | |||
95 | is shown, then the branch appears in all projects. | 95 | is shown, then the branch appears in all projects. |
96 | 96 | ||
97 | """ | 97 | """ |
98 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 98 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
99 | 99 | ||
100 | def Execute(self, opt, args): | 100 | def Execute(self, opt, args): |
101 | projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) | 101 | projects = self.GetProjects( |
102 | out = BranchColoring(self.manifest.manifestProject.config) | 102 | args, all_manifests=not opt.this_manifest_only |
103 | all_branches = {} | 103 | ) |
104 | project_cnt = len(projects) | 104 | out = BranchColoring(self.manifest.manifestProject.config) |
105 | 105 | all_branches = {} | |
106 | def _ProcessResults(_pool, _output, results): | 106 | project_cnt = len(projects) |
107 | for name, b in itertools.chain.from_iterable(results): | 107 | |
108 | if name not in all_branches: | 108 | def _ProcessResults(_pool, _output, results): |
109 | all_branches[name] = BranchInfo(name) | 109 | for name, b in itertools.chain.from_iterable(results): |
110 | all_branches[name].add(b) | 110 | if name not in all_branches: |
111 | 111 | all_branches[name] = BranchInfo(name) | |
112 | self.ExecuteInParallel( | 112 | all_branches[name].add(b) |
113 | opt.jobs, | 113 | |
114 | expand_project_to_branches, | 114 | self.ExecuteInParallel( |
115 | projects, | 115 | opt.jobs, |
116 | callback=_ProcessResults) | 116 | expand_project_to_branches, |
117 | 117 | projects, | |
118 | names = sorted(all_branches) | 118 | callback=_ProcessResults, |
119 | 119 | ) | |
120 | if not names: | 120 | |
121 | print(' (no branches)', file=sys.stderr) | 121 | names = sorted(all_branches) |
122 | return | 122 | |
123 | 123 | if not names: | |
124 | width = 25 | 124 | print(" (no branches)", file=sys.stderr) |
125 | for name in names: | 125 | return |
126 | if width < len(name): | 126 | |
127 | width = len(name) | 127 | width = 25 |
128 | 128 | for name in names: | |
129 | for name in names: | 129 | if width < len(name): |
130 | i = all_branches[name] | 130 | width = len(name) |
131 | in_cnt = len(i.projects) | 131 | |
132 | 132 | for name in names: | |
133 | if i.IsCurrent: | 133 | i = all_branches[name] |
134 | current = '*' | 134 | in_cnt = len(i.projects) |
135 | hdr = out.current | 135 | |
136 | else: | 136 | if i.IsCurrent: |
137 | current = ' ' | 137 | current = "*" |
138 | hdr = out.local | 138 | hdr = out.current |
139 | |||
140 | if i.IsPublishedEqual: | ||
141 | published = 'P' | ||
142 | elif i.IsPublished: | ||
143 | published = 'p' | ||
144 | else: | ||
145 | published = ' ' | ||
146 | |||
147 | hdr('%c%c %-*s' % (current, published, width, name)) | ||
148 | out.write(' |') | ||
149 | |||
150 | _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) | ||
151 | if in_cnt < project_cnt: | ||
152 | fmt = out.write | ||
153 | paths = [] | ||
154 | non_cur_paths = [] | ||
155 | if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt): | ||
156 | in_type = 'in' | ||
157 | for b in i.projects: | ||
158 | relpath = _RelPath(b.project) | ||
159 | if not i.IsSplitCurrent or b.current: | ||
160 | paths.append(relpath) | ||
161 | else: | 139 | else: |
162 | non_cur_paths.append(relpath) | 140 | current = " " |
163 | else: | 141 | hdr = out.local |
164 | fmt = out.notinproject | 142 | |
165 | in_type = 'not in' | 143 | if i.IsPublishedEqual: |
166 | have = set() | 144 | published = "P" |
167 | for b in i.projects: | 145 | elif i.IsPublished: |
168 | have.add(_RelPath(b.project)) | 146 | published = "p" |
169 | for p in projects: | 147 | else: |
170 | if _RelPath(p) not in have: | 148 | published = " " |
171 | paths.append(_RelPath(p)) | 149 | |
172 | 150 | hdr("%c%c %-*s" % (current, published, width, name)) | |
173 | s = ' %s %s' % (in_type, ', '.join(paths)) | 151 | out.write(" |") |
174 | if not i.IsSplitCurrent and (width + 7 + len(s) < 80): | 152 | |
175 | fmt = out.current if i.IsCurrent else fmt | 153 | _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) |
176 | fmt(s) | 154 | if in_cnt < project_cnt: |
177 | else: | 155 | fmt = out.write |
178 | fmt(' %s:' % in_type) | 156 | paths = [] |
179 | fmt = out.current if i.IsCurrent else out.write | 157 | non_cur_paths = [] |
180 | for p in paths: | 158 | if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt): |
181 | out.nl() | 159 | in_type = "in" |
182 | fmt(width * ' ' + ' %s' % p) | 160 | for b in i.projects: |
183 | fmt = out.write | 161 | relpath = _RelPath(b.project) |
184 | for p in non_cur_paths: | 162 | if not i.IsSplitCurrent or b.current: |
163 | paths.append(relpath) | ||
164 | else: | ||
165 | non_cur_paths.append(relpath) | ||
166 | else: | ||
167 | fmt = out.notinproject | ||
168 | in_type = "not in" | ||
169 | have = set() | ||
170 | for b in i.projects: | ||
171 | have.add(_RelPath(b.project)) | ||
172 | for p in projects: | ||
173 | if _RelPath(p) not in have: | ||
174 | paths.append(_RelPath(p)) | ||
175 | |||
176 | s = " %s %s" % (in_type, ", ".join(paths)) | ||
177 | if not i.IsSplitCurrent and (width + 7 + len(s) < 80): | ||
178 | fmt = out.current if i.IsCurrent else fmt | ||
179 | fmt(s) | ||
180 | else: | ||
181 | fmt(" %s:" % in_type) | ||
182 | fmt = out.current if i.IsCurrent else out.write | ||
183 | for p in paths: | ||
184 | out.nl() | ||
185 | fmt(width * " " + " %s" % p) | ||
186 | fmt = out.write | ||
187 | for p in non_cur_paths: | ||
188 | out.nl() | ||
189 | fmt(width * " " + " %s" % p) | ||
190 | else: | ||
191 | out.write(" in all projects") | ||
185 | out.nl() | 192 | out.nl() |
186 | fmt(width * ' ' + ' %s' % p) | ||
187 | else: | ||
188 | out.write(' in all projects') | ||
189 | out.nl() | ||
190 | 193 | ||
191 | 194 | ||
192 | def expand_project_to_branches(project): | 195 | def expand_project_to_branches(project): |
193 | """Expands a project into a list of branch names & associated information. | 196 | """Expands a project into a list of branch names & associated information. |
194 | 197 | ||
195 | Args: | 198 | Args: |
196 | project: project.Project | 199 | project: project.Project |
197 | 200 | ||
198 | Returns: | 201 | Returns: |
199 | List[Tuple[str, git_config.Branch]] | 202 | List[Tuple[str, git_config.Branch]] |
200 | """ | 203 | """ |
201 | branches = [] | 204 | branches = [] |
202 | for name, b in project.GetBranches().items(): | 205 | for name, b in project.GetBranches().items(): |
203 | b.project = project | 206 | b.project = project |
204 | branches.append((name, b)) | 207 | branches.append((name, b)) |
205 | return branches | 208 | return branches |
diff --git a/subcmds/checkout.py b/subcmds/checkout.py index 768b6027..08012a82 100644 --- a/subcmds/checkout.py +++ b/subcmds/checkout.py | |||
@@ -20,12 +20,12 @@ from progress import Progress | |||
20 | 20 | ||
21 | 21 | ||
22 | class Checkout(Command): | 22 | class Checkout(Command): |
23 | COMMON = True | 23 | COMMON = True |
24 | helpSummary = "Checkout a branch for development" | 24 | helpSummary = "Checkout a branch for development" |
25 | helpUsage = """ | 25 | helpUsage = """ |
26 | %prog <branchname> [<project>...] | 26 | %prog <branchname> [<project>...] |
27 | """ | 27 | """ |
28 | helpDescription = """ | 28 | helpDescription = """ |
29 | The '%prog' command checks out an existing branch that was previously | 29 | The '%prog' command checks out an existing branch that was previously |
30 | created by 'repo start'. | 30 | created by 'repo start'. |
31 | 31 | ||
@@ -33,43 +33,50 @@ The command is equivalent to: | |||
33 | 33 | ||
34 | repo forall [<project>...] -c git checkout <branchname> | 34 | repo forall [<project>...] -c git checkout <branchname> |
35 | """ | 35 | """ |
36 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 36 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
37 | 37 | ||
38 | def ValidateOptions(self, opt, args): | 38 | def ValidateOptions(self, opt, args): |
39 | if not args: | 39 | if not args: |
40 | self.Usage() | 40 | self.Usage() |
41 | 41 | ||
42 | def _ExecuteOne(self, nb, project): | 42 | def _ExecuteOne(self, nb, project): |
43 | """Checkout one project.""" | 43 | """Checkout one project.""" |
44 | return (project.CheckoutBranch(nb), project) | 44 | return (project.CheckoutBranch(nb), project) |
45 | 45 | ||
46 | def Execute(self, opt, args): | 46 | def Execute(self, opt, args): |
47 | nb = args[0] | 47 | nb = args[0] |
48 | err = [] | 48 | err = [] |
49 | success = [] | 49 | success = [] |
50 | all_projects = self.GetProjects(args[1:], all_manifests=not opt.this_manifest_only) | 50 | all_projects = self.GetProjects( |
51 | args[1:], all_manifests=not opt.this_manifest_only | ||
52 | ) | ||
51 | 53 | ||
52 | def _ProcessResults(_pool, pm, results): | 54 | def _ProcessResults(_pool, pm, results): |
53 | for status, project in results: | 55 | for status, project in results: |
54 | if status is not None: | 56 | if status is not None: |
55 | if status: | 57 | if status: |
56 | success.append(project) | 58 | success.append(project) |
57 | else: | 59 | else: |
58 | err.append(project) | 60 | err.append(project) |
59 | pm.update() | 61 | pm.update() |
60 | 62 | ||
61 | self.ExecuteInParallel( | 63 | self.ExecuteInParallel( |
62 | opt.jobs, | 64 | opt.jobs, |
63 | functools.partial(self._ExecuteOne, nb), | 65 | functools.partial(self._ExecuteOne, nb), |
64 | all_projects, | 66 | all_projects, |
65 | callback=_ProcessResults, | 67 | callback=_ProcessResults, |
66 | output=Progress('Checkout %s' % (nb,), len(all_projects), quiet=opt.quiet)) | 68 | output=Progress( |
69 | "Checkout %s" % (nb,), len(all_projects), quiet=opt.quiet | ||
70 | ), | ||
71 | ) | ||
67 | 72 | ||
68 | if err: | 73 | if err: |
69 | for p in err: | 74 | for p in err: |
70 | print("error: %s/: cannot checkout %s" % (p.relpath, nb), | 75 | print( |
71 | file=sys.stderr) | 76 | "error: %s/: cannot checkout %s" % (p.relpath, nb), |
72 | sys.exit(1) | 77 | file=sys.stderr, |
73 | elif not success: | 78 | ) |
74 | print('error: no project has branch %s' % nb, file=sys.stderr) | 79 | sys.exit(1) |
75 | sys.exit(1) | 80 | elif not success: |
81 | print("error: no project has branch %s" % nb, file=sys.stderr) | ||
82 | sys.exit(1) | ||
diff --git a/subcmds/cherry_pick.py b/subcmds/cherry_pick.py index eecf4e17..4cfb8c88 100644 --- a/subcmds/cherry_pick.py +++ b/subcmds/cherry_pick.py | |||
@@ -17,96 +17,107 @@ import sys | |||
17 | from command import Command | 17 | from command import Command |
18 | from git_command import GitCommand | 18 | from git_command import GitCommand |
19 | 19 | ||
20 | CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$') | 20 | CHANGE_ID_RE = re.compile(r"^\s*Change-Id: I([0-9a-f]{40})\s*$") |
21 | 21 | ||
22 | 22 | ||
23 | class CherryPick(Command): | 23 | class CherryPick(Command): |
24 | COMMON = True | 24 | COMMON = True |
25 | helpSummary = "Cherry-pick a change." | 25 | helpSummary = "Cherry-pick a change." |
26 | helpUsage = """ | 26 | helpUsage = """ |
27 | %prog <sha1> | 27 | %prog <sha1> |
28 | """ | 28 | """ |
29 | helpDescription = """ | 29 | helpDescription = """ |
30 | '%prog' cherry-picks a change from one branch to another. | 30 | '%prog' cherry-picks a change from one branch to another. |
31 | The change id will be updated, and a reference to the old | 31 | The change id will be updated, and a reference to the old |
32 | change id will be added. | 32 | change id will be added. |
33 | """ | 33 | """ |
34 | 34 | ||
35 | def ValidateOptions(self, opt, args): | 35 | def ValidateOptions(self, opt, args): |
36 | if len(args) != 1: | 36 | if len(args) != 1: |
37 | self.Usage() | 37 | self.Usage() |
38 | 38 | ||
39 | def Execute(self, opt, args): | 39 | def Execute(self, opt, args): |
40 | reference = args[0] | 40 | reference = args[0] |
41 | 41 | ||
42 | p = GitCommand(None, | 42 | p = GitCommand( |
43 | ['rev-parse', '--verify', reference], | 43 | None, |
44 | capture_stdout=True, | 44 | ["rev-parse", "--verify", reference], |
45 | capture_stderr=True) | 45 | capture_stdout=True, |
46 | if p.Wait() != 0: | 46 | capture_stderr=True, |
47 | print(p.stderr, file=sys.stderr) | 47 | ) |
48 | sys.exit(1) | 48 | if p.Wait() != 0: |
49 | sha1 = p.stdout.strip() | 49 | print(p.stderr, file=sys.stderr) |
50 | 50 | sys.exit(1) | |
51 | p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True) | 51 | sha1 = p.stdout.strip() |
52 | if p.Wait() != 0: | 52 | |
53 | print("error: Failed to retrieve old commit message", file=sys.stderr) | 53 | p = GitCommand(None, ["cat-file", "commit", sha1], capture_stdout=True) |
54 | sys.exit(1) | 54 | if p.Wait() != 0: |
55 | old_msg = self._StripHeader(p.stdout) | 55 | print( |
56 | 56 | "error: Failed to retrieve old commit message", file=sys.stderr | |
57 | p = GitCommand(None, | 57 | ) |
58 | ['cherry-pick', sha1], | 58 | sys.exit(1) |
59 | capture_stdout=True, | 59 | old_msg = self._StripHeader(p.stdout) |
60 | capture_stderr=True) | 60 | |
61 | status = p.Wait() | 61 | p = GitCommand( |
62 | 62 | None, | |
63 | if p.stdout: | 63 | ["cherry-pick", sha1], |
64 | print(p.stdout.strip(), file=sys.stdout) | 64 | capture_stdout=True, |
65 | if p.stderr: | 65 | capture_stderr=True, |
66 | print(p.stderr.strip(), file=sys.stderr) | 66 | ) |
67 | 67 | status = p.Wait() | |
68 | if status == 0: | 68 | |
69 | # The cherry-pick was applied correctly. We just need to edit the | 69 | if p.stdout: |
70 | # commit message. | 70 | print(p.stdout.strip(), file=sys.stdout) |
71 | new_msg = self._Reformat(old_msg, sha1) | 71 | if p.stderr: |
72 | 72 | print(p.stderr.strip(), file=sys.stderr) | |
73 | p = GitCommand(None, ['commit', '--amend', '-F', '-'], | 73 | |
74 | input=new_msg, | 74 | if status == 0: |
75 | capture_stdout=True, | 75 | # The cherry-pick was applied correctly. We just need to edit the |
76 | capture_stderr=True) | 76 | # commit message. |
77 | if p.Wait() != 0: | 77 | new_msg = self._Reformat(old_msg, sha1) |
78 | print("error: Failed to update commit message", file=sys.stderr) | 78 | |
79 | sys.exit(1) | 79 | p = GitCommand( |
80 | 80 | None, | |
81 | else: | 81 | ["commit", "--amend", "-F", "-"], |
82 | print('NOTE: When committing (please see above) and editing the commit ' | 82 | input=new_msg, |
83 | 'message, please remove the old Change-Id-line and add:') | 83 | capture_stdout=True, |
84 | print(self._GetReference(sha1), file=sys.stderr) | 84 | capture_stderr=True, |
85 | print(file=sys.stderr) | 85 | ) |
86 | 86 | if p.Wait() != 0: | |
87 | def _IsChangeId(self, line): | 87 | print("error: Failed to update commit message", file=sys.stderr) |
88 | return CHANGE_ID_RE.match(line) | 88 | sys.exit(1) |
89 | 89 | ||
90 | def _GetReference(self, sha1): | 90 | else: |
91 | return "(cherry picked from commit %s)" % sha1 | 91 | print( |
92 | 92 | "NOTE: When committing (please see above) and editing the " | |
93 | def _StripHeader(self, commit_msg): | 93 | "commit message, please remove the old Change-Id-line and add:" |
94 | lines = commit_msg.splitlines() | 94 | ) |
95 | return "\n".join(lines[lines.index("") + 1:]) | 95 | print(self._GetReference(sha1), file=sys.stderr) |
96 | 96 | print(file=sys.stderr) | |
97 | def _Reformat(self, old_msg, sha1): | 97 | |
98 | new_msg = [] | 98 | def _IsChangeId(self, line): |
99 | 99 | return CHANGE_ID_RE.match(line) | |
100 | for line in old_msg.splitlines(): | 100 | |
101 | if not self._IsChangeId(line): | 101 | def _GetReference(self, sha1): |
102 | new_msg.append(line) | 102 | return "(cherry picked from commit %s)" % sha1 |
103 | 103 | ||
104 | # Add a blank line between the message and the change id/reference | 104 | def _StripHeader(self, commit_msg): |
105 | try: | 105 | lines = commit_msg.splitlines() |
106 | if new_msg[-1].strip() != "": | 106 | return "\n".join(lines[lines.index("") + 1 :]) |
107 | new_msg.append("") | 107 | |
108 | except IndexError: | 108 | def _Reformat(self, old_msg, sha1): |
109 | pass | 109 | new_msg = [] |
110 | 110 | ||
111 | new_msg.append(self._GetReference(sha1)) | 111 | for line in old_msg.splitlines(): |
112 | return "\n".join(new_msg) | 112 | if not self._IsChangeId(line): |
113 | new_msg.append(line) | ||
114 | |||
115 | # Add a blank line between the message and the change id/reference. | ||
116 | try: | ||
117 | if new_msg[-1].strip() != "": | ||
118 | new_msg.append("") | ||
119 | except IndexError: | ||
120 | pass | ||
121 | |||
122 | new_msg.append(self._GetReference(sha1)) | ||
123 | return "\n".join(new_msg) | ||
diff --git a/subcmds/diff.py b/subcmds/diff.py index a606ee9a..5c627c0c 100644 --- a/subcmds/diff.py +++ b/subcmds/diff.py | |||
@@ -19,54 +19,63 @@ from command import DEFAULT_LOCAL_JOBS, PagedCommand | |||
19 | 19 | ||
20 | 20 | ||
21 | class Diff(PagedCommand): | 21 | class Diff(PagedCommand): |
22 | COMMON = True | 22 | COMMON = True |
23 | helpSummary = "Show changes between commit and working tree" | 23 | helpSummary = "Show changes between commit and working tree" |
24 | helpUsage = """ | 24 | helpUsage = """ |
25 | %prog [<project>...] | 25 | %prog [<project>...] |
26 | 26 | ||
27 | The -u option causes '%prog' to generate diff output with file paths | 27 | The -u option causes '%prog' to generate diff output with file paths |
28 | relative to the repository root, so the output can be applied | 28 | relative to the repository root, so the output can be applied |
29 | to the Unix 'patch' command. | 29 | to the Unix 'patch' command. |
30 | """ | 30 | """ |
31 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 31 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
32 | 32 | ||
33 | def _Options(self, p): | 33 | def _Options(self, p): |
34 | p.add_option('-u', '--absolute', | 34 | p.add_option( |
35 | dest='absolute', action='store_true', | 35 | "-u", |
36 | help='paths are relative to the repository root') | 36 | "--absolute", |
37 | dest="absolute", | ||
38 | action="store_true", | ||
39 | help="paths are relative to the repository root", | ||
40 | ) | ||
37 | 41 | ||
38 | def _ExecuteOne(self, absolute, local, project): | 42 | def _ExecuteOne(self, absolute, local, project): |
39 | """Obtains the diff for a specific project. | 43 | """Obtains the diff for a specific project. |
40 | 44 | ||
41 | Args: | 45 | Args: |
42 | absolute: Paths are relative to the root. | 46 | absolute: Paths are relative to the root. |
43 | local: a boolean, if True, the path is relative to the local | 47 | local: a boolean, if True, the path is relative to the local |
44 | (sub)manifest. If false, the path is relative to the | 48 | (sub)manifest. If false, the path is relative to the outermost |
45 | outermost manifest. | 49 | manifest. |
46 | project: Project to get status of. | 50 | project: Project to get status of. |
47 | 51 | ||
48 | Returns: | 52 | Returns: |
49 | The status of the project. | 53 | The status of the project. |
50 | """ | 54 | """ |
51 | buf = io.StringIO() | 55 | buf = io.StringIO() |
52 | ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local) | 56 | ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local) |
53 | return (ret, buf.getvalue()) | 57 | return (ret, buf.getvalue()) |
54 | 58 | ||
55 | def Execute(self, opt, args): | 59 | def Execute(self, opt, args): |
56 | all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) | 60 | all_projects = self.GetProjects( |
61 | args, all_manifests=not opt.this_manifest_only | ||
62 | ) | ||
57 | 63 | ||
58 | def _ProcessResults(_pool, _output, results): | 64 | def _ProcessResults(_pool, _output, results): |
59 | ret = 0 | 65 | ret = 0 |
60 | for (state, output) in results: | 66 | for state, output in results: |
61 | if output: | 67 | if output: |
62 | print(output, end='') | 68 | print(output, end="") |
63 | if not state: | 69 | if not state: |
64 | ret = 1 | 70 | ret = 1 |
65 | return ret | 71 | return ret |
66 | 72 | ||
67 | return self.ExecuteInParallel( | 73 | return self.ExecuteInParallel( |
68 | opt.jobs, | 74 | opt.jobs, |
69 | functools.partial(self._ExecuteOne, opt.absolute, opt.this_manifest_only), | 75 | functools.partial( |
70 | all_projects, | 76 | self._ExecuteOne, opt.absolute, opt.this_manifest_only |
71 | callback=_ProcessResults, | 77 | ), |
72 | ordered=True) | 78 | all_projects, |
79 | callback=_ProcessResults, | ||
80 | ordered=True, | ||
81 | ) | ||
diff --git a/subcmds/diffmanifests.py b/subcmds/diffmanifests.py index 4f9f5b0f..b446dbd8 100644 --- a/subcmds/diffmanifests.py +++ b/subcmds/diffmanifests.py | |||
@@ -18,24 +18,24 @@ from manifest_xml import RepoClient | |||
18 | 18 | ||
19 | 19 | ||
20 | class _Coloring(Coloring): | 20 | class _Coloring(Coloring): |
21 | def __init__(self, config): | 21 | def __init__(self, config): |
22 | Coloring.__init__(self, config, "status") | 22 | Coloring.__init__(self, config, "status") |
23 | 23 | ||
24 | 24 | ||
25 | class Diffmanifests(PagedCommand): | 25 | class Diffmanifests(PagedCommand): |
26 | """ A command to see logs in projects represented by manifests | 26 | """A command to see logs in projects represented by manifests |
27 | 27 | ||
28 | This is used to see deeper differences between manifests. Where a simple | 28 | This is used to see deeper differences between manifests. Where a simple |
29 | diff would only show a diff of sha1s for example, this command will display | 29 | diff would only show a diff of sha1s for example, this command will display |
30 | the logs of the project between both sha1s, allowing user to see diff at a | 30 | the logs of the project between both sha1s, allowing user to see diff at a |
31 | deeper level. | 31 | deeper level. |
32 | """ | 32 | """ |
33 | 33 | ||
34 | COMMON = True | 34 | COMMON = True |
35 | helpSummary = "Manifest diff utility" | 35 | helpSummary = "Manifest diff utility" |
36 | helpUsage = """%prog manifest1.xml [manifest2.xml] [options]""" | 36 | helpUsage = """%prog manifest1.xml [manifest2.xml] [options]""" |
37 | 37 | ||
38 | helpDescription = """ | 38 | helpDescription = """ |
39 | The %prog command shows differences between project revisions of manifest1 and | 39 | The %prog command shows differences between project revisions of manifest1 and |
40 | manifest2. if manifest2 is not specified, current manifest.xml will be used | 40 | manifest2. if manifest2 is not specified, current manifest.xml will be used |
41 | instead. Both absolute and relative paths may be used for manifests. Relative | 41 | instead. Both absolute and relative paths may be used for manifests. Relative |
@@ -65,159 +65,209 @@ synced and their revisions won't be found. | |||
65 | 65 | ||
66 | """ | 66 | """ |
67 | 67 | ||
68 | def _Options(self, p): | 68 | def _Options(self, p): |
69 | p.add_option('--raw', | 69 | p.add_option( |
70 | dest='raw', action='store_true', | 70 | "--raw", dest="raw", action="store_true", help="display raw diff" |
71 | help='display raw diff') | 71 | ) |
72 | p.add_option('--no-color', | 72 | p.add_option( |
73 | dest='color', action='store_false', default=True, | 73 | "--no-color", |
74 | help='does not display the diff in color') | 74 | dest="color", |
75 | p.add_option('--pretty-format', | 75 | action="store_false", |
76 | dest='pretty_format', action='store', | 76 | default=True, |
77 | metavar='<FORMAT>', | 77 | help="does not display the diff in color", |
78 | help='print the log using a custom git pretty format string') | 78 | ) |
79 | 79 | p.add_option( | |
80 | def _printRawDiff(self, diff, pretty_format=None, local=False): | 80 | "--pretty-format", |
81 | _RelPath = lambda p: p.RelPath(local=local) | 81 | dest="pretty_format", |
82 | for project in diff['added']: | 82 | action="store", |
83 | self.printText("A %s %s" % (_RelPath(project), project.revisionExpr)) | 83 | metavar="<FORMAT>", |
84 | self.out.nl() | 84 | help="print the log using a custom git pretty format string", |
85 | 85 | ) | |
86 | for project in diff['removed']: | 86 | |
87 | self.printText("R %s %s" % (_RelPath(project), project.revisionExpr)) | 87 | def _printRawDiff(self, diff, pretty_format=None, local=False): |
88 | self.out.nl() | 88 | _RelPath = lambda p: p.RelPath(local=local) |
89 | 89 | for project in diff["added"]: | |
90 | for project, otherProject in diff['changed']: | 90 | self.printText( |
91 | self.printText("C %s %s %s" % (_RelPath(project), project.revisionExpr, | 91 | "A %s %s" % (_RelPath(project), project.revisionExpr) |
92 | otherProject.revisionExpr)) | 92 | ) |
93 | self.out.nl() | ||
94 | self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format) | ||
95 | |||
96 | for project, otherProject in diff['unreachable']: | ||
97 | self.printText("U %s %s %s" % (_RelPath(project), project.revisionExpr, | ||
98 | otherProject.revisionExpr)) | ||
99 | self.out.nl() | ||
100 | |||
101 | def _printDiff(self, diff, color=True, pretty_format=None, local=False): | ||
102 | _RelPath = lambda p: p.RelPath(local=local) | ||
103 | if diff['added']: | ||
104 | self.out.nl() | ||
105 | self.printText('added projects : \n') | ||
106 | self.out.nl() | ||
107 | for project in diff['added']: | ||
108 | self.printProject('\t%s' % (_RelPath(project))) | ||
109 | self.printText(' at revision ') | ||
110 | self.printRevision(project.revisionExpr) | ||
111 | self.out.nl() | ||
112 | |||
113 | if diff['removed']: | ||
114 | self.out.nl() | ||
115 | self.printText('removed projects : \n') | ||
116 | self.out.nl() | ||
117 | for project in diff['removed']: | ||
118 | self.printProject('\t%s' % (_RelPath(project))) | ||
119 | self.printText(' at revision ') | ||
120 | self.printRevision(project.revisionExpr) | ||
121 | self.out.nl() | ||
122 | |||
123 | if diff['missing']: | ||
124 | self.out.nl() | ||
125 | self.printText('missing projects : \n') | ||
126 | self.out.nl() | ||
127 | for project in diff['missing']: | ||
128 | self.printProject('\t%s' % (_RelPath(project))) | ||
129 | self.printText(' at revision ') | ||
130 | self.printRevision(project.revisionExpr) | ||
131 | self.out.nl() | ||
132 | |||
133 | if diff['changed']: | ||
134 | self.out.nl() | ||
135 | self.printText('changed projects : \n') | ||
136 | self.out.nl() | ||
137 | for project, otherProject in diff['changed']: | ||
138 | self.printProject('\t%s' % (_RelPath(project))) | ||
139 | self.printText(' changed from ') | ||
140 | self.printRevision(project.revisionExpr) | ||
141 | self.printText(' to ') | ||
142 | self.printRevision(otherProject.revisionExpr) | ||
143 | self.out.nl() | ||
144 | self._printLogs(project, otherProject, raw=False, color=color, | ||
145 | pretty_format=pretty_format) | ||
146 | self.out.nl() | ||
147 | |||
148 | if diff['unreachable']: | ||
149 | self.out.nl() | ||
150 | self.printText('projects with unreachable revisions : \n') | ||
151 | self.out.nl() | ||
152 | for project, otherProject in diff['unreachable']: | ||
153 | self.printProject('\t%s ' % (_RelPath(project))) | ||
154 | self.printRevision(project.revisionExpr) | ||
155 | self.printText(' or ') | ||
156 | self.printRevision(otherProject.revisionExpr) | ||
157 | self.printText(' not found') | ||
158 | self.out.nl() | ||
159 | |||
160 | def _printLogs(self, project, otherProject, raw=False, color=True, | ||
161 | pretty_format=None): | ||
162 | |||
163 | logs = project.getAddedAndRemovedLogs(otherProject, | ||
164 | oneline=(pretty_format is None), | ||
165 | color=color, | ||
166 | pretty_format=pretty_format) | ||
167 | if logs['removed']: | ||
168 | removedLogs = logs['removed'].split('\n') | ||
169 | for log in removedLogs: | ||
170 | if log.strip(): | ||
171 | if raw: | ||
172 | self.printText(' R ' + log) | ||
173 | self.out.nl() | 93 | self.out.nl() |
174 | else: | 94 | |
175 | self.printRemoved('\t\t[-] ') | 95 | for project in diff["removed"]: |
176 | self.printText(log) | 96 | self.printText( |
97 | "R %s %s" % (_RelPath(project), project.revisionExpr) | ||
98 | ) | ||
99 | self.out.nl() | ||
100 | |||
101 | for project, otherProject in diff["changed"]: | ||
102 | self.printText( | ||
103 | "C %s %s %s" | ||
104 | % ( | ||
105 | _RelPath(project), | ||
106 | project.revisionExpr, | ||
107 | otherProject.revisionExpr, | ||
108 | ) | ||
109 | ) | ||
110 | self.out.nl() | ||
111 | self._printLogs( | ||
112 | project, | ||
113 | otherProject, | ||
114 | raw=True, | ||
115 | color=False, | ||
116 | pretty_format=pretty_format, | ||
117 | ) | ||
118 | |||
119 | for project, otherProject in diff["unreachable"]: | ||
120 | self.printText( | ||
121 | "U %s %s %s" | ||
122 | % ( | ||
123 | _RelPath(project), | ||
124 | project.revisionExpr, | ||
125 | otherProject.revisionExpr, | ||
126 | ) | ||
127 | ) | ||
128 | self.out.nl() | ||
129 | |||
130 | def _printDiff(self, diff, color=True, pretty_format=None, local=False): | ||
131 | _RelPath = lambda p: p.RelPath(local=local) | ||
132 | if diff["added"]: | ||
133 | self.out.nl() | ||
134 | self.printText("added projects : \n") | ||
177 | self.out.nl() | 135 | self.out.nl() |
136 | for project in diff["added"]: | ||
137 | self.printProject("\t%s" % (_RelPath(project))) | ||
138 | self.printText(" at revision ") | ||
139 | self.printRevision(project.revisionExpr) | ||
140 | self.out.nl() | ||
178 | 141 | ||
179 | if logs['added']: | 142 | if diff["removed"]: |
180 | addedLogs = logs['added'].split('\n') | ||
181 | for log in addedLogs: | ||
182 | if log.strip(): | ||
183 | if raw: | ||
184 | self.printText(' A ' + log) | ||
185 | self.out.nl() | 143 | self.out.nl() |
186 | else: | 144 | self.printText("removed projects : \n") |
187 | self.printAdded('\t\t[+] ') | ||
188 | self.printText(log) | ||
189 | self.out.nl() | 145 | self.out.nl() |
146 | for project in diff["removed"]: | ||
147 | self.printProject("\t%s" % (_RelPath(project))) | ||
148 | self.printText(" at revision ") | ||
149 | self.printRevision(project.revisionExpr) | ||
150 | self.out.nl() | ||
190 | 151 | ||
191 | def ValidateOptions(self, opt, args): | 152 | if diff["missing"]: |
192 | if not args or len(args) > 2: | 153 | self.out.nl() |
193 | self.OptionParser.error('missing manifests to diff') | 154 | self.printText("missing projects : \n") |
194 | if opt.this_manifest_only is False: | 155 | self.out.nl() |
195 | raise self.OptionParser.error( | 156 | for project in diff["missing"]: |
196 | '`diffmanifest` only supports the current tree') | 157 | self.printProject("\t%s" % (_RelPath(project))) |
197 | 158 | self.printText(" at revision ") | |
198 | def Execute(self, opt, args): | 159 | self.printRevision(project.revisionExpr) |
199 | self.out = _Coloring(self.client.globalConfig) | 160 | self.out.nl() |
200 | self.printText = self.out.nofmt_printer('text') | 161 | |
201 | if opt.color: | 162 | if diff["changed"]: |
202 | self.printProject = self.out.nofmt_printer('project', attr='bold') | 163 | self.out.nl() |
203 | self.printAdded = self.out.nofmt_printer('green', fg='green', attr='bold') | 164 | self.printText("changed projects : \n") |
204 | self.printRemoved = self.out.nofmt_printer('red', fg='red', attr='bold') | 165 | self.out.nl() |
205 | self.printRevision = self.out.nofmt_printer('revision', fg='yellow') | 166 | for project, otherProject in diff["changed"]: |
206 | else: | 167 | self.printProject("\t%s" % (_RelPath(project))) |
207 | self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText | 168 | self.printText(" changed from ") |
208 | 169 | self.printRevision(project.revisionExpr) | |
209 | manifest1 = RepoClient(self.repodir) | 170 | self.printText(" to ") |
210 | manifest1.Override(args[0], load_local_manifests=False) | 171 | self.printRevision(otherProject.revisionExpr) |
211 | if len(args) == 1: | 172 | self.out.nl() |
212 | manifest2 = self.manifest | 173 | self._printLogs( |
213 | else: | 174 | project, |
214 | manifest2 = RepoClient(self.repodir) | 175 | otherProject, |
215 | manifest2.Override(args[1], load_local_manifests=False) | 176 | raw=False, |
216 | 177 | color=color, | |
217 | diff = manifest1.projectsDiff(manifest2) | 178 | pretty_format=pretty_format, |
218 | if opt.raw: | 179 | ) |
219 | self._printRawDiff(diff, pretty_format=opt.pretty_format, | 180 | self.out.nl() |
220 | local=opt.this_manifest_only) | 181 | |
221 | else: | 182 | if diff["unreachable"]: |
222 | self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format, | 183 | self.out.nl() |
223 | local=opt.this_manifest_only) | 184 | self.printText("projects with unreachable revisions : \n") |
185 | self.out.nl() | ||
186 | for project, otherProject in diff["unreachable"]: | ||
187 | self.printProject("\t%s " % (_RelPath(project))) | ||
188 | self.printRevision(project.revisionExpr) | ||
189 | self.printText(" or ") | ||
190 | self.printRevision(otherProject.revisionExpr) | ||
191 | self.printText(" not found") | ||
192 | self.out.nl() | ||
193 | |||
194 | def _printLogs( | ||
195 | self, project, otherProject, raw=False, color=True, pretty_format=None | ||
196 | ): | ||
197 | logs = project.getAddedAndRemovedLogs( | ||
198 | otherProject, | ||
199 | oneline=(pretty_format is None), | ||
200 | color=color, | ||
201 | pretty_format=pretty_format, | ||
202 | ) | ||
203 | if logs["removed"]: | ||
204 | removedLogs = logs["removed"].split("\n") | ||
205 | for log in removedLogs: | ||
206 | if log.strip(): | ||
207 | if raw: | ||
208 | self.printText(" R " + log) | ||
209 | self.out.nl() | ||
210 | else: | ||
211 | self.printRemoved("\t\t[-] ") | ||
212 | self.printText(log) | ||
213 | self.out.nl() | ||
214 | |||
215 | if logs["added"]: | ||
216 | addedLogs = logs["added"].split("\n") | ||
217 | for log in addedLogs: | ||
218 | if log.strip(): | ||
219 | if raw: | ||
220 | self.printText(" A " + log) | ||
221 | self.out.nl() | ||
222 | else: | ||
223 | self.printAdded("\t\t[+] ") | ||
224 | self.printText(log) | ||
225 | self.out.nl() | ||
226 | |||
227 | def ValidateOptions(self, opt, args): | ||
228 | if not args or len(args) > 2: | ||
229 | self.OptionParser.error("missing manifests to diff") | ||
230 | if opt.this_manifest_only is False: | ||
231 | raise self.OptionParser.error( | ||
232 | "`diffmanifest` only supports the current tree" | ||
233 | ) | ||
234 | |||
235 | def Execute(self, opt, args): | ||
236 | self.out = _Coloring(self.client.globalConfig) | ||
237 | self.printText = self.out.nofmt_printer("text") | ||
238 | if opt.color: | ||
239 | self.printProject = self.out.nofmt_printer("project", attr="bold") | ||
240 | self.printAdded = self.out.nofmt_printer( | ||
241 | "green", fg="green", attr="bold" | ||
242 | ) | ||
243 | self.printRemoved = self.out.nofmt_printer( | ||
244 | "red", fg="red", attr="bold" | ||
245 | ) | ||
246 | self.printRevision = self.out.nofmt_printer("revision", fg="yellow") | ||
247 | else: | ||
248 | self.printProject = ( | ||
249 | self.printAdded | ||
250 | ) = self.printRemoved = self.printRevision = self.printText | ||
251 | |||
252 | manifest1 = RepoClient(self.repodir) | ||
253 | manifest1.Override(args[0], load_local_manifests=False) | ||
254 | if len(args) == 1: | ||
255 | manifest2 = self.manifest | ||
256 | else: | ||
257 | manifest2 = RepoClient(self.repodir) | ||
258 | manifest2.Override(args[1], load_local_manifests=False) | ||
259 | |||
260 | diff = manifest1.projectsDiff(manifest2) | ||
261 | if opt.raw: | ||
262 | self._printRawDiff( | ||
263 | diff, | ||
264 | pretty_format=opt.pretty_format, | ||
265 | local=opt.this_manifest_only, | ||
266 | ) | ||
267 | else: | ||
268 | self._printDiff( | ||
269 | diff, | ||
270 | color=opt.color, | ||
271 | pretty_format=opt.pretty_format, | ||
272 | local=opt.this_manifest_only, | ||
273 | ) | ||
diff --git a/subcmds/download.py b/subcmds/download.py index 15824843..d81d1f8c 100644 --- a/subcmds/download.py +++ b/subcmds/download.py | |||
@@ -18,143 +18,187 @@ import sys | |||
18 | from command import Command | 18 | from command import Command |
19 | from error import GitError, NoSuchProjectError | 19 | from error import GitError, NoSuchProjectError |
20 | 20 | ||
21 | CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$') | 21 | CHANGE_RE = re.compile(r"^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$") |
22 | 22 | ||
23 | 23 | ||
24 | class Download(Command): | 24 | class Download(Command): |
25 | COMMON = True | 25 | COMMON = True |
26 | helpSummary = "Download and checkout a change" | 26 | helpSummary = "Download and checkout a change" |
27 | helpUsage = """ | 27 | helpUsage = """ |
28 | %prog {[project] change[/patchset]}... | 28 | %prog {[project] change[/patchset]}... |
29 | """ | 29 | """ |
30 | helpDescription = """ | 30 | helpDescription = """ |
31 | The '%prog' command downloads a change from the review system and | 31 | The '%prog' command downloads a change from the review system and |
32 | makes it available in your project's local working directory. | 32 | makes it available in your project's local working directory. |
33 | If no project is specified try to use current directory as a project. | 33 | If no project is specified try to use current directory as a project. |
34 | """ | 34 | """ |
35 | 35 | ||
36 | def _Options(self, p): | 36 | def _Options(self, p): |
37 | p.add_option('-b', '--branch', | 37 | p.add_option("-b", "--branch", help="create a new branch first") |
38 | help='create a new branch first') | 38 | p.add_option( |
39 | p.add_option('-c', '--cherry-pick', | 39 | "-c", |
40 | dest='cherrypick', action='store_true', | 40 | "--cherry-pick", |
41 | help="cherry-pick instead of checkout") | 41 | dest="cherrypick", |
42 | p.add_option('-x', '--record-origin', action='store_true', | 42 | action="store_true", |
43 | help='pass -x when cherry-picking') | 43 | help="cherry-pick instead of checkout", |
44 | p.add_option('-r', '--revert', | 44 | ) |
45 | dest='revert', action='store_true', | 45 | p.add_option( |
46 | help="revert instead of checkout") | 46 | "-x", |
47 | p.add_option('-f', '--ff-only', | 47 | "--record-origin", |
48 | dest='ffonly', action='store_true', | 48 | action="store_true", |
49 | help="force fast-forward merge") | 49 | help="pass -x when cherry-picking", |
50 | 50 | ) | |
51 | def _ParseChangeIds(self, opt, args): | 51 | p.add_option( |
52 | if not args: | 52 | "-r", |
53 | self.Usage() | 53 | "--revert", |
54 | 54 | dest="revert", | |
55 | to_get = [] | 55 | action="store_true", |
56 | project = None | 56 | help="revert instead of checkout", |
57 | 57 | ) | |
58 | for a in args: | 58 | p.add_option( |
59 | m = CHANGE_RE.match(a) | 59 | "-f", |
60 | if m: | 60 | "--ff-only", |
61 | if not project: | 61 | dest="ffonly", |
62 | project = self.GetProjects(".")[0] | 62 | action="store_true", |
63 | print('Defaulting to cwd project', project.name) | 63 | help="force fast-forward merge", |
64 | chg_id = int(m.group(1)) | 64 | ) |
65 | if m.group(2): | 65 | |
66 | ps_id = int(m.group(2)) | 66 | def _ParseChangeIds(self, opt, args): |
67 | else: | 67 | if not args: |
68 | ps_id = 1 | 68 | self.Usage() |
69 | refs = 'refs/changes/%2.2d/%d/' % (chg_id % 100, chg_id) | 69 | |
70 | output = project._LsRemote(refs + '*') | 70 | to_get = [] |
71 | if output: | 71 | project = None |
72 | regex = refs + r'(\d+)' | 72 | |
73 | rcomp = re.compile(regex, re.I) | 73 | for a in args: |
74 | for line in output.splitlines(): | 74 | m = CHANGE_RE.match(a) |
75 | match = rcomp.search(line) | 75 | if m: |
76 | if match: | 76 | if not project: |
77 | ps_id = max(int(match.group(1)), ps_id) | 77 | project = self.GetProjects(".")[0] |
78 | to_get.append((project, chg_id, ps_id)) | 78 | print("Defaulting to cwd project", project.name) |
79 | else: | 79 | chg_id = int(m.group(1)) |
80 | projects = self.GetProjects([a], all_manifests=not opt.this_manifest_only) | 80 | if m.group(2): |
81 | if len(projects) > 1: | 81 | ps_id = int(m.group(2)) |
82 | # If the cwd is one of the projects, assume they want that. | 82 | else: |
83 | try: | 83 | ps_id = 1 |
84 | project = self.GetProjects('.')[0] | 84 | refs = "refs/changes/%2.2d/%d/" % (chg_id % 100, chg_id) |
85 | except NoSuchProjectError: | 85 | output = project._LsRemote(refs + "*") |
86 | project = None | 86 | if output: |
87 | if project not in projects: | 87 | regex = refs + r"(\d+)" |
88 | print('error: %s matches too many projects; please re-run inside ' | 88 | rcomp = re.compile(regex, re.I) |
89 | 'the project checkout.' % (a,), file=sys.stderr) | 89 | for line in output.splitlines(): |
90 | for project in projects: | 90 | match = rcomp.search(line) |
91 | print(' %s/ @ %s' % (project.RelPath(local=opt.this_manifest_only), | 91 | if match: |
92 | project.revisionExpr), file=sys.stderr) | 92 | ps_id = max(int(match.group(1)), ps_id) |
93 | sys.exit(1) | 93 | to_get.append((project, chg_id, ps_id)) |
94 | else: | 94 | else: |
95 | project = projects[0] | 95 | projects = self.GetProjects( |
96 | print('Defaulting to cwd project', project.name) | 96 | [a], all_manifests=not opt.this_manifest_only |
97 | return to_get | 97 | ) |
98 | 98 | if len(projects) > 1: | |
99 | def ValidateOptions(self, opt, args): | 99 | # If the cwd is one of the projects, assume they want that. |
100 | if opt.record_origin: | 100 | try: |
101 | if not opt.cherrypick: | 101 | project = self.GetProjects(".")[0] |
102 | self.OptionParser.error('-x only makes sense with --cherry-pick') | 102 | except NoSuchProjectError: |
103 | 103 | project = None | |
104 | if opt.ffonly: | 104 | if project not in projects: |
105 | self.OptionParser.error('-x and --ff are mutually exclusive options') | 105 | print( |
106 | 106 | "error: %s matches too many projects; please " | |
107 | def Execute(self, opt, args): | 107 | "re-run inside the project checkout." % (a,), |
108 | for project, change_id, ps_id in self._ParseChangeIds(opt, args): | 108 | file=sys.stderr, |
109 | dl = project.DownloadPatchSet(change_id, ps_id) | 109 | ) |
110 | if not dl: | 110 | for project in projects: |
111 | print('[%s] change %d/%d not found' | 111 | print( |
112 | % (project.name, change_id, ps_id), | 112 | " %s/ @ %s" |
113 | file=sys.stderr) | 113 | % ( |
114 | sys.exit(1) | 114 | project.RelPath( |
115 | 115 | local=opt.this_manifest_only | |
116 | if not opt.revert and not dl.commits: | 116 | ), |
117 | print('[%s] change %d/%d has already been merged' | 117 | project.revisionExpr, |
118 | % (project.name, change_id, ps_id), | 118 | ), |
119 | file=sys.stderr) | 119 | file=sys.stderr, |
120 | continue | 120 | ) |
121 | 121 | sys.exit(1) | |
122 | if len(dl.commits) > 1: | 122 | else: |
123 | print('[%s] %d/%d depends on %d unmerged changes:' | 123 | project = projects[0] |
124 | % (project.name, change_id, ps_id, len(dl.commits)), | 124 | print("Defaulting to cwd project", project.name) |
125 | file=sys.stderr) | 125 | return to_get |
126 | for c in dl.commits: | 126 | |
127 | print(' %s' % (c), file=sys.stderr) | 127 | def ValidateOptions(self, opt, args): |
128 | 128 | if opt.record_origin: | |
129 | if opt.cherrypick: | 129 | if not opt.cherrypick: |
130 | mode = 'cherry-pick' | 130 | self.OptionParser.error( |
131 | elif opt.revert: | 131 | "-x only makes sense with --cherry-pick" |
132 | mode = 'revert' | 132 | ) |
133 | elif opt.ffonly: | 133 | |
134 | mode = 'fast-forward merge' | 134 | if opt.ffonly: |
135 | else: | 135 | self.OptionParser.error( |
136 | mode = 'checkout' | 136 | "-x and --ff are mutually exclusive options" |
137 | 137 | ) | |
138 | # We'll combine the branch+checkout operation, but all the rest need a | 138 | |
139 | # dedicated branch start. | 139 | def Execute(self, opt, args): |
140 | if opt.branch and mode != 'checkout': | 140 | for project, change_id, ps_id in self._ParseChangeIds(opt, args): |
141 | project.StartBranch(opt.branch) | 141 | dl = project.DownloadPatchSet(change_id, ps_id) |
142 | 142 | if not dl: | |
143 | try: | 143 | print( |
144 | if opt.cherrypick: | 144 | "[%s] change %d/%d not found" |
145 | project._CherryPick(dl.commit, ffonly=opt.ffonly, | 145 | % (project.name, change_id, ps_id), |
146 | record_origin=opt.record_origin) | 146 | file=sys.stderr, |
147 | elif opt.revert: | 147 | ) |
148 | project._Revert(dl.commit) | 148 | sys.exit(1) |
149 | elif opt.ffonly: | 149 | |
150 | project._FastForward(dl.commit, ffonly=True) | 150 | if not opt.revert and not dl.commits: |
151 | else: | 151 | print( |
152 | if opt.branch: | 152 | "[%s] change %d/%d has already been merged" |
153 | project.StartBranch(opt.branch, revision=dl.commit) | 153 | % (project.name, change_id, ps_id), |
154 | else: | 154 | file=sys.stderr, |
155 | project._Checkout(dl.commit) | 155 | ) |
156 | 156 | continue | |
157 | except GitError: | 157 | |
158 | print('[%s] Could not complete the %s of %s' | 158 | if len(dl.commits) > 1: |
159 | % (project.name, mode, dl.commit), file=sys.stderr) | 159 | print( |
160 | sys.exit(1) | 160 | "[%s] %d/%d depends on %d unmerged changes:" |
161 | % (project.name, change_id, ps_id, len(dl.commits)), | ||
162 | file=sys.stderr, | ||
163 | ) | ||
164 | for c in dl.commits: | ||
165 | print(" %s" % (c), file=sys.stderr) | ||
166 | |||
167 | if opt.cherrypick: | ||
168 | mode = "cherry-pick" | ||
169 | elif opt.revert: | ||
170 | mode = "revert" | ||
171 | elif opt.ffonly: | ||
172 | mode = "fast-forward merge" | ||
173 | else: | ||
174 | mode = "checkout" | ||
175 | |||
176 | # We'll combine the branch+checkout operation, but all the rest need | ||
177 | # a dedicated branch start. | ||
178 | if opt.branch and mode != "checkout": | ||
179 | project.StartBranch(opt.branch) | ||
180 | |||
181 | try: | ||
182 | if opt.cherrypick: | ||
183 | project._CherryPick( | ||
184 | dl.commit, | ||
185 | ffonly=opt.ffonly, | ||
186 | record_origin=opt.record_origin, | ||
187 | ) | ||
188 | elif opt.revert: | ||
189 | project._Revert(dl.commit) | ||
190 | elif opt.ffonly: | ||
191 | project._FastForward(dl.commit, ffonly=True) | ||
192 | else: | ||
193 | if opt.branch: | ||
194 | project.StartBranch(opt.branch, revision=dl.commit) | ||
195 | else: | ||
196 | project._Checkout(dl.commit) | ||
197 | |||
198 | except GitError: | ||
199 | print( | ||
200 | "[%s] Could not complete the %s of %s" | ||
201 | % (project.name, mode, dl.commit), | ||
202 | file=sys.stderr, | ||
203 | ) | ||
204 | sys.exit(1) | ||
diff --git a/subcmds/forall.py b/subcmds/forall.py index f9f34e33..0a897357 100644 --- a/subcmds/forall.py +++ b/subcmds/forall.py | |||
@@ -23,31 +23,36 @@ import sys | |||
23 | import subprocess | 23 | import subprocess |
24 | 24 | ||
25 | from color import Coloring | 25 | from color import Coloring |
26 | from command import DEFAULT_LOCAL_JOBS, Command, MirrorSafeCommand, WORKER_BATCH_SIZE | 26 | from command import ( |
27 | DEFAULT_LOCAL_JOBS, | ||
28 | Command, | ||
29 | MirrorSafeCommand, | ||
30 | WORKER_BATCH_SIZE, | ||
31 | ) | ||
27 | from error import ManifestInvalidRevisionError | 32 | from error import ManifestInvalidRevisionError |
28 | 33 | ||
29 | _CAN_COLOR = [ | 34 | _CAN_COLOR = [ |
30 | 'branch', | 35 | "branch", |
31 | 'diff', | 36 | "diff", |
32 | 'grep', | 37 | "grep", |
33 | 'log', | 38 | "log", |
34 | ] | 39 | ] |
35 | 40 | ||
36 | 41 | ||
37 | class ForallColoring(Coloring): | 42 | class ForallColoring(Coloring): |
38 | def __init__(self, config): | 43 | def __init__(self, config): |
39 | Coloring.__init__(self, config, 'forall') | 44 | Coloring.__init__(self, config, "forall") |
40 | self.project = self.printer('project', attr='bold') | 45 | self.project = self.printer("project", attr="bold") |
41 | 46 | ||
42 | 47 | ||
43 | class Forall(Command, MirrorSafeCommand): | 48 | class Forall(Command, MirrorSafeCommand): |
44 | COMMON = False | 49 | COMMON = False |
45 | helpSummary = "Run a shell command in each project" | 50 | helpSummary = "Run a shell command in each project" |
46 | helpUsage = """ | 51 | helpUsage = """ |
47 | %prog [<project>...] -c <command> [<arg>...] | 52 | %prog [<project>...] -c <command> [<arg>...] |
48 | %prog -r str1 [str2] ... -c <command> [<arg>...] | 53 | %prog -r str1 [str2] ... -c <command> [<arg>...] |
49 | """ | 54 | """ |
50 | helpDescription = """ | 55 | helpDescription = """ |
51 | Executes the same shell command in each project. | 56 | Executes the same shell command in each project. |
52 | 57 | ||
53 | The -r option allows running the command only on projects matching | 58 | The -r option allows running the command only on projects matching |
@@ -125,236 +130,285 @@ terminal and are not redirected. | |||
125 | If -e is used, when a command exits unsuccessfully, '%prog' will abort | 130 | If -e is used, when a command exits unsuccessfully, '%prog' will abort |
126 | without iterating through the remaining projects. | 131 | without iterating through the remaining projects. |
127 | """ | 132 | """ |
128 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 133 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
129 | 134 | ||
130 | @staticmethod | 135 | @staticmethod |
131 | def _cmd_option(option, _opt_str, _value, parser): | 136 | def _cmd_option(option, _opt_str, _value, parser): |
132 | setattr(parser.values, option.dest, list(parser.rargs)) | 137 | setattr(parser.values, option.dest, list(parser.rargs)) |
133 | while parser.rargs: | 138 | while parser.rargs: |
134 | del parser.rargs[0] | 139 | del parser.rargs[0] |
135 | 140 | ||
136 | def _Options(self, p): | 141 | def _Options(self, p): |
137 | p.add_option('-r', '--regex', | 142 | p.add_option( |
138 | dest='regex', action='store_true', | 143 | "-r", |
139 | help='execute the command only on projects matching regex or wildcard expression') | 144 | "--regex", |
140 | p.add_option('-i', '--inverse-regex', | 145 | dest="regex", |
141 | dest='inverse_regex', action='store_true', | 146 | action="store_true", |
142 | help='execute the command only on projects not matching regex or ' | 147 | help="execute the command only on projects matching regex or " |
143 | 'wildcard expression') | 148 | "wildcard expression", |
144 | p.add_option('-g', '--groups', | 149 | ) |
145 | dest='groups', | 150 | p.add_option( |
146 | help='execute the command only on projects matching the specified groups') | 151 | "-i", |
147 | p.add_option('-c', '--command', | 152 | "--inverse-regex", |
148 | help='command (and arguments) to execute', | 153 | dest="inverse_regex", |
149 | dest='command', | 154 | action="store_true", |
150 | action='callback', | 155 | help="execute the command only on projects not matching regex or " |
151 | callback=self._cmd_option) | 156 | "wildcard expression", |
152 | p.add_option('-e', '--abort-on-errors', | 157 | ) |
153 | dest='abort_on_errors', action='store_true', | 158 | p.add_option( |
154 | help='abort if a command exits unsuccessfully') | 159 | "-g", |
155 | p.add_option('--ignore-missing', action='store_true', | 160 | "--groups", |
156 | help='silently skip & do not exit non-zero due missing ' | 161 | dest="groups", |
157 | 'checkouts') | 162 | help="execute the command only on projects matching the specified " |
158 | 163 | "groups", | |
159 | g = p.get_option_group('--quiet') | 164 | ) |
160 | g.add_option('-p', | 165 | p.add_option( |
161 | dest='project_header', action='store_true', | 166 | "-c", |
162 | help='show project headers before output') | 167 | "--command", |
163 | p.add_option('--interactive', | 168 | help="command (and arguments) to execute", |
164 | action='store_true', | 169 | dest="command", |
165 | help='force interactive usage') | 170 | action="callback", |
166 | 171 | callback=self._cmd_option, | |
167 | def WantPager(self, opt): | 172 | ) |
168 | return opt.project_header and opt.jobs == 1 | 173 | p.add_option( |
169 | 174 | "-e", | |
170 | def ValidateOptions(self, opt, args): | 175 | "--abort-on-errors", |
171 | if not opt.command: | 176 | dest="abort_on_errors", |
172 | self.Usage() | 177 | action="store_true", |
173 | 178 | help="abort if a command exits unsuccessfully", | |
174 | def Execute(self, opt, args): | 179 | ) |
175 | cmd = [opt.command[0]] | 180 | p.add_option( |
176 | all_trees = not opt.this_manifest_only | 181 | "--ignore-missing", |
177 | 182 | action="store_true", | |
178 | shell = True | 183 | help="silently skip & do not exit non-zero due missing " |
179 | if re.compile(r'^[a-z0-9A-Z_/\.-]+$').match(cmd[0]): | 184 | "checkouts", |
180 | shell = False | 185 | ) |
181 | 186 | ||
182 | if shell: | 187 | g = p.get_option_group("--quiet") |
183 | cmd.append(cmd[0]) | 188 | g.add_option( |
184 | cmd.extend(opt.command[1:]) | 189 | "-p", |
185 | 190 | dest="project_header", | |
186 | # Historically, forall operated interactively, and in serial. If the user | 191 | action="store_true", |
187 | # has selected 1 job, then default to interacive mode. | 192 | help="show project headers before output", |
188 | if opt.jobs == 1: | 193 | ) |
189 | opt.interactive = True | 194 | p.add_option( |
190 | 195 | "--interactive", action="store_true", help="force interactive usage" | |
191 | if opt.project_header \ | 196 | ) |
192 | and not shell \ | 197 | |
193 | and cmd[0] == 'git': | 198 | def WantPager(self, opt): |
194 | # If this is a direct git command that can enable colorized | 199 | return opt.project_header and opt.jobs == 1 |
195 | # output and the user prefers coloring, add --color into the | 200 | |
196 | # command line because we are going to wrap the command into | 201 | def ValidateOptions(self, opt, args): |
197 | # a pipe and git won't know coloring should activate. | 202 | if not opt.command: |
198 | # | 203 | self.Usage() |
199 | for cn in cmd[1:]: | 204 | |
200 | if not cn.startswith('-'): | 205 | def Execute(self, opt, args): |
201 | break | 206 | cmd = [opt.command[0]] |
202 | else: | 207 | all_trees = not opt.this_manifest_only |
203 | cn = None | 208 | |
204 | if cn and cn in _CAN_COLOR: | 209 | shell = True |
205 | class ColorCmd(Coloring): | 210 | if re.compile(r"^[a-z0-9A-Z_/\.-]+$").match(cmd[0]): |
206 | def __init__(self, config, cmd): | 211 | shell = False |
207 | Coloring.__init__(self, config, cmd) | 212 | |
208 | if ColorCmd(self.manifest.manifestProject.config, cn).is_on: | 213 | if shell: |
209 | cmd.insert(cmd.index(cn) + 1, '--color') | 214 | cmd.append(cmd[0]) |
210 | 215 | cmd.extend(opt.command[1:]) | |
211 | mirror = self.manifest.IsMirror | 216 | |
212 | rc = 0 | 217 | # Historically, forall operated interactively, and in serial. If the |
213 | 218 | # user has selected 1 job, then default to interacive mode. | |
214 | smart_sync_manifest_name = "smart_sync_override.xml" | 219 | if opt.jobs == 1: |
215 | smart_sync_manifest_path = os.path.join( | 220 | opt.interactive = True |
216 | self.manifest.manifestProject.worktree, smart_sync_manifest_name) | 221 | |
217 | 222 | if opt.project_header and not shell and cmd[0] == "git": | |
218 | if os.path.isfile(smart_sync_manifest_path): | 223 | # If this is a direct git command that can enable colorized |
219 | self.manifest.Override(smart_sync_manifest_path) | 224 | # output and the user prefers coloring, add --color into the |
220 | 225 | # command line because we are going to wrap the command into | |
221 | if opt.regex: | 226 | # a pipe and git won't know coloring should activate. |
222 | projects = self.FindProjects(args, all_manifests=all_trees) | 227 | # |
223 | elif opt.inverse_regex: | 228 | for cn in cmd[1:]: |
224 | projects = self.FindProjects(args, inverse=True, all_manifests=all_trees) | 229 | if not cn.startswith("-"): |
225 | else: | 230 | break |
226 | projects = self.GetProjects(args, groups=opt.groups, all_manifests=all_trees) | 231 | else: |
227 | 232 | cn = None | |
228 | os.environ['REPO_COUNT'] = str(len(projects)) | 233 | if cn and cn in _CAN_COLOR: |
229 | 234 | ||
230 | try: | 235 | class ColorCmd(Coloring): |
231 | config = self.manifest.manifestProject.config | 236 | def __init__(self, config, cmd): |
232 | with multiprocessing.Pool(opt.jobs, InitWorker) as pool: | 237 | Coloring.__init__(self, config, cmd) |
233 | results_it = pool.imap( | 238 | |
234 | functools.partial(DoWorkWrapper, mirror, opt, cmd, shell, config), | 239 | if ColorCmd(self.manifest.manifestProject.config, cn).is_on: |
235 | enumerate(projects), | 240 | cmd.insert(cmd.index(cn) + 1, "--color") |
236 | chunksize=WORKER_BATCH_SIZE) | 241 | |
237 | first = True | 242 | mirror = self.manifest.IsMirror |
238 | for (r, output) in results_it: | 243 | rc = 0 |
239 | if output: | 244 | |
240 | if first: | 245 | smart_sync_manifest_name = "smart_sync_override.xml" |
241 | first = False | 246 | smart_sync_manifest_path = os.path.join( |
242 | elif opt.project_header: | 247 | self.manifest.manifestProject.worktree, smart_sync_manifest_name |
243 | print() | 248 | ) |
244 | # To simplify the DoWorkWrapper, take care of automatic newlines. | 249 | |
245 | end = '\n' | 250 | if os.path.isfile(smart_sync_manifest_path): |
246 | if output[-1] == '\n': | 251 | self.manifest.Override(smart_sync_manifest_path) |
247 | end = '' | 252 | |
248 | print(output, end=end) | 253 | if opt.regex: |
249 | rc = rc or r | 254 | projects = self.FindProjects(args, all_manifests=all_trees) |
250 | if r != 0 and opt.abort_on_errors: | 255 | elif opt.inverse_regex: |
251 | raise Exception('Aborting due to previous error') | 256 | projects = self.FindProjects( |
252 | except (KeyboardInterrupt, WorkerKeyboardInterrupt): | 257 | args, inverse=True, all_manifests=all_trees |
253 | # Catch KeyboardInterrupt raised inside and outside of workers | 258 | ) |
254 | rc = rc or errno.EINTR | 259 | else: |
255 | except Exception as e: | 260 | projects = self.GetProjects( |
256 | # Catch any other exceptions raised | 261 | args, groups=opt.groups, all_manifests=all_trees |
257 | print('forall: unhandled error, terminating the pool: %s: %s' % | 262 | ) |
258 | (type(e).__name__, e), | 263 | |
259 | file=sys.stderr) | 264 | os.environ["REPO_COUNT"] = str(len(projects)) |
260 | rc = rc or getattr(e, 'errno', 1) | 265 | |
261 | if rc != 0: | 266 | try: |
262 | sys.exit(rc) | 267 | config = self.manifest.manifestProject.config |
268 | with multiprocessing.Pool(opt.jobs, InitWorker) as pool: | ||
269 | results_it = pool.imap( | ||
270 | functools.partial( | ||
271 | DoWorkWrapper, mirror, opt, cmd, shell, config | ||
272 | ), | ||
273 | enumerate(projects), | ||
274 | chunksize=WORKER_BATCH_SIZE, | ||
275 | ) | ||
276 | first = True | ||
277 | for r, output in results_it: | ||
278 | if output: | ||
279 | if first: | ||
280 | first = False | ||
281 | elif opt.project_header: | ||
282 | print() | ||
283 | # To simplify the DoWorkWrapper, take care of automatic | ||
284 | # newlines. | ||
285 | end = "\n" | ||
286 | if output[-1] == "\n": | ||
287 | end = "" | ||
288 | print(output, end=end) | ||
289 | rc = rc or r | ||
290 | if r != 0 and opt.abort_on_errors: | ||
291 | raise Exception("Aborting due to previous error") | ||
292 | except (KeyboardInterrupt, WorkerKeyboardInterrupt): | ||
293 | # Catch KeyboardInterrupt raised inside and outside of workers | ||
294 | rc = rc or errno.EINTR | ||
295 | except Exception as e: | ||
296 | # Catch any other exceptions raised | ||
297 | print( | ||
298 | "forall: unhandled error, terminating the pool: %s: %s" | ||
299 | % (type(e).__name__, e), | ||
300 | file=sys.stderr, | ||
301 | ) | ||
302 | rc = rc or getattr(e, "errno", 1) | ||
303 | if rc != 0: | ||
304 | sys.exit(rc) | ||
263 | 305 | ||
264 | 306 | ||
265 | class WorkerKeyboardInterrupt(Exception): | 307 | class WorkerKeyboardInterrupt(Exception): |
266 | """ Keyboard interrupt exception for worker processes. """ | 308 | """Keyboard interrupt exception for worker processes.""" |
267 | 309 | ||
268 | 310 | ||
269 | def InitWorker(): | 311 | def InitWorker(): |
270 | signal.signal(signal.SIGINT, signal.SIG_IGN) | 312 | signal.signal(signal.SIGINT, signal.SIG_IGN) |
271 | 313 | ||
272 | 314 | ||
273 | def DoWorkWrapper(mirror, opt, cmd, shell, config, args): | 315 | def DoWorkWrapper(mirror, opt, cmd, shell, config, args): |
274 | """ A wrapper around the DoWork() method. | 316 | """A wrapper around the DoWork() method. |
275 | 317 | ||
276 | Catch the KeyboardInterrupt exceptions here and re-raise them as a different, | 318 | Catch the KeyboardInterrupt exceptions here and re-raise them as a |
277 | ``Exception``-based exception to stop it flooding the console with stacktraces | 319 | different, ``Exception``-based exception to stop it flooding the console |
278 | and making the parent hang indefinitely. | 320 | with stacktraces and making the parent hang indefinitely. |
279 | 321 | ||
280 | """ | 322 | """ |
281 | cnt, project = args | 323 | cnt, project = args |
282 | try: | 324 | try: |
283 | return DoWork(project, mirror, opt, cmd, shell, cnt, config) | 325 | return DoWork(project, mirror, opt, cmd, shell, cnt, config) |
284 | except KeyboardInterrupt: | 326 | except KeyboardInterrupt: |
285 | print('%s: Worker interrupted' % project.name) | 327 | print("%s: Worker interrupted" % project.name) |
286 | raise WorkerKeyboardInterrupt() | 328 | raise WorkerKeyboardInterrupt() |
287 | 329 | ||
288 | 330 | ||
289 | def DoWork(project, mirror, opt, cmd, shell, cnt, config): | 331 | def DoWork(project, mirror, opt, cmd, shell, cnt, config): |
290 | env = os.environ.copy() | 332 | env = os.environ.copy() |
291 | 333 | ||
292 | def setenv(name, val): | 334 | def setenv(name, val): |
293 | if val is None: | 335 | if val is None: |
294 | val = '' | 336 | val = "" |
295 | env[name] = val | 337 | env[name] = val |
296 | 338 | ||
297 | setenv('REPO_PROJECT', project.name) | 339 | setenv("REPO_PROJECT", project.name) |
298 | setenv('REPO_OUTERPATH', project.manifest.path_prefix) | 340 | setenv("REPO_OUTERPATH", project.manifest.path_prefix) |
299 | setenv('REPO_INNERPATH', project.relpath) | 341 | setenv("REPO_INNERPATH", project.relpath) |
300 | setenv('REPO_PATH', project.RelPath(local=opt.this_manifest_only)) | 342 | setenv("REPO_PATH", project.RelPath(local=opt.this_manifest_only)) |
301 | setenv('REPO_REMOTE', project.remote.name) | 343 | setenv("REPO_REMOTE", project.remote.name) |
302 | try: | 344 | try: |
303 | # If we aren't in a fully synced state and we don't have the ref the manifest | 345 | # If we aren't in a fully synced state and we don't have the ref the |
304 | # wants, then this will fail. Ignore it for the purposes of this code. | 346 | # manifest wants, then this will fail. Ignore it for the purposes of |
305 | lrev = '' if mirror else project.GetRevisionId() | 347 | # this code. |
306 | except ManifestInvalidRevisionError: | 348 | lrev = "" if mirror else project.GetRevisionId() |
307 | lrev = '' | 349 | except ManifestInvalidRevisionError: |
308 | setenv('REPO_LREV', lrev) | 350 | lrev = "" |
309 | setenv('REPO_RREV', project.revisionExpr) | 351 | setenv("REPO_LREV", lrev) |
310 | setenv('REPO_UPSTREAM', project.upstream) | 352 | setenv("REPO_RREV", project.revisionExpr) |
311 | setenv('REPO_DEST_BRANCH', project.dest_branch) | 353 | setenv("REPO_UPSTREAM", project.upstream) |
312 | setenv('REPO_I', str(cnt + 1)) | 354 | setenv("REPO_DEST_BRANCH", project.dest_branch) |
313 | for annotation in project.annotations: | 355 | setenv("REPO_I", str(cnt + 1)) |
314 | setenv("REPO__%s" % (annotation.name), annotation.value) | 356 | for annotation in project.annotations: |
315 | 357 | setenv("REPO__%s" % (annotation.name), annotation.value) | |
316 | if mirror: | 358 | |
317 | setenv('GIT_DIR', project.gitdir) | 359 | if mirror: |
318 | cwd = project.gitdir | 360 | setenv("GIT_DIR", project.gitdir) |
319 | else: | 361 | cwd = project.gitdir |
320 | cwd = project.worktree | 362 | else: |
321 | 363 | cwd = project.worktree | |
322 | if not os.path.exists(cwd): | 364 | |
323 | # Allow the user to silently ignore missing checkouts so they can run on | 365 | if not os.path.exists(cwd): |
324 | # partial checkouts (good for infra recovery tools). | 366 | # Allow the user to silently ignore missing checkouts so they can run on |
325 | if opt.ignore_missing: | 367 | # partial checkouts (good for infra recovery tools). |
326 | return (0, '') | 368 | if opt.ignore_missing: |
327 | 369 | return (0, "") | |
328 | output = '' | 370 | |
329 | if ((opt.project_header and opt.verbose) | 371 | output = "" |
330 | or not opt.project_header): | 372 | if (opt.project_header and opt.verbose) or not opt.project_header: |
331 | output = 'skipping %s/' % project.RelPath(local=opt.this_manifest_only) | 373 | output = "skipping %s/" % project.RelPath( |
332 | return (1, output) | 374 | local=opt.this_manifest_only |
333 | 375 | ) | |
334 | if opt.verbose: | 376 | return (1, output) |
335 | stderr = subprocess.STDOUT | 377 | |
336 | else: | 378 | if opt.verbose: |
337 | stderr = subprocess.DEVNULL | 379 | stderr = subprocess.STDOUT |
338 | 380 | else: | |
339 | stdin = None if opt.interactive else subprocess.DEVNULL | 381 | stderr = subprocess.DEVNULL |
340 | 382 | ||
341 | result = subprocess.run( | 383 | stdin = None if opt.interactive else subprocess.DEVNULL |
342 | cmd, cwd=cwd, shell=shell, env=env, check=False, | 384 | |
343 | encoding='utf-8', errors='replace', | 385 | result = subprocess.run( |
344 | stdin=stdin, stdout=subprocess.PIPE, stderr=stderr) | 386 | cmd, |
345 | 387 | cwd=cwd, | |
346 | output = result.stdout | 388 | shell=shell, |
347 | if opt.project_header: | 389 | env=env, |
348 | if output: | 390 | check=False, |
349 | buf = io.StringIO() | 391 | encoding="utf-8", |
350 | out = ForallColoring(config) | 392 | errors="replace", |
351 | out.redirect(buf) | 393 | stdin=stdin, |
352 | if mirror: | 394 | stdout=subprocess.PIPE, |
353 | project_header_path = project.name | 395 | stderr=stderr, |
354 | else: | 396 | ) |
355 | project_header_path = project.RelPath(local=opt.this_manifest_only) | 397 | |
356 | out.project('project %s/' % project_header_path) | 398 | output = result.stdout |
357 | out.nl() | 399 | if opt.project_header: |
358 | buf.write(output) | 400 | if output: |
359 | output = buf.getvalue() | 401 | buf = io.StringIO() |
360 | return (result.returncode, output) | 402 | out = ForallColoring(config) |
403 | out.redirect(buf) | ||
404 | if mirror: | ||
405 | project_header_path = project.name | ||
406 | else: | ||
407 | project_header_path = project.RelPath( | ||
408 | local=opt.this_manifest_only | ||
409 | ) | ||
410 | out.project("project %s/" % project_header_path) | ||
411 | out.nl() | ||
412 | buf.write(output) | ||
413 | output = buf.getvalue() | ||
414 | return (result.returncode, output) | ||
diff --git a/subcmds/gitc_delete.py b/subcmds/gitc_delete.py index df749469..ae9d4d1f 100644 --- a/subcmds/gitc_delete.py +++ b/subcmds/gitc_delete.py | |||
@@ -19,28 +19,34 @@ import platform_utils | |||
19 | 19 | ||
20 | 20 | ||
21 | class GitcDelete(Command, GitcClientCommand): | 21 | class GitcDelete(Command, GitcClientCommand): |
22 | COMMON = True | 22 | COMMON = True |
23 | visible_everywhere = False | 23 | visible_everywhere = False |
24 | helpSummary = "Delete a GITC Client." | 24 | helpSummary = "Delete a GITC Client." |
25 | helpUsage = """ | 25 | helpUsage = """ |
26 | %prog | 26 | %prog |
27 | """ | 27 | """ |
28 | helpDescription = """ | 28 | helpDescription = """ |
29 | This subcommand deletes the current GITC client, deleting the GITC manifest | 29 | This subcommand deletes the current GITC client, deleting the GITC manifest |
30 | and all locally downloaded sources. | 30 | and all locally downloaded sources. |
31 | """ | 31 | """ |
32 | 32 | ||
33 | def _Options(self, p): | 33 | def _Options(self, p): |
34 | p.add_option('-f', '--force', | 34 | p.add_option( |
35 | dest='force', action='store_true', | 35 | "-f", |
36 | help='force the deletion (no prompt)') | 36 | "--force", |
37 | dest="force", | ||
38 | action="store_true", | ||
39 | help="force the deletion (no prompt)", | ||
40 | ) | ||
37 | 41 | ||
38 | def Execute(self, opt, args): | 42 | def Execute(self, opt, args): |
39 | if not opt.force: | 43 | if not opt.force: |
40 | prompt = ('This will delete GITC client: %s\nAre you sure? (yes/no) ' % | 44 | prompt = ( |
41 | self.gitc_manifest.gitc_client_name) | 45 | "This will delete GITC client: %s\nAre you sure? (yes/no) " |
42 | response = input(prompt).lower() | 46 | % self.gitc_manifest.gitc_client_name |
43 | if not response == 'yes': | 47 | ) |
44 | print('Response was not "yes"\n Exiting...') | 48 | response = input(prompt).lower() |
45 | sys.exit(1) | 49 | if not response == "yes": |
46 | platform_utils.rmtree(self.gitc_manifest.gitc_client_dir) | 50 | print('Response was not "yes"\n Exiting...') |
51 | sys.exit(1) | ||
52 | platform_utils.rmtree(self.gitc_manifest.gitc_client_dir) | ||
diff --git a/subcmds/gitc_init.py b/subcmds/gitc_init.py index e3a5813d..54791d58 100644 --- a/subcmds/gitc_init.py +++ b/subcmds/gitc_init.py | |||
@@ -23,13 +23,13 @@ import wrapper | |||
23 | 23 | ||
24 | 24 | ||
25 | class GitcInit(init.Init, GitcAvailableCommand): | 25 | class GitcInit(init.Init, GitcAvailableCommand): |
26 | COMMON = True | 26 | COMMON = True |
27 | MULTI_MANIFEST_SUPPORT = False | 27 | MULTI_MANIFEST_SUPPORT = False |
28 | helpSummary = "Initialize a GITC Client." | 28 | helpSummary = "Initialize a GITC Client." |
29 | helpUsage = """ | 29 | helpUsage = """ |
30 | %prog [options] [client name] | 30 | %prog [options] [client name] |
31 | """ | 31 | """ |
32 | helpDescription = """ | 32 | helpDescription = """ |
33 | The '%prog' command is ran to initialize a new GITC client for use | 33 | The '%prog' command is ran to initialize a new GITC client for use |
34 | with the GITC file system. | 34 | with the GITC file system. |
35 | 35 | ||
@@ -47,30 +47,41 @@ The optional -f argument can be used to specify the manifest file to | |||
47 | use for this GITC client. | 47 | use for this GITC client. |
48 | """ | 48 | """ |
49 | 49 | ||
50 | def _Options(self, p): | 50 | def _Options(self, p): |
51 | super()._Options(p, gitc_init=True) | 51 | super()._Options(p, gitc_init=True) |
52 | 52 | ||
53 | def Execute(self, opt, args): | 53 | def Execute(self, opt, args): |
54 | gitc_client = gitc_utils.parse_clientdir(os.getcwd()) | 54 | gitc_client = gitc_utils.parse_clientdir(os.getcwd()) |
55 | if not gitc_client or (opt.gitc_client and gitc_client != opt.gitc_client): | 55 | if not gitc_client or ( |
56 | print('fatal: Please update your repo command. See go/gitc for instructions.', | 56 | opt.gitc_client and gitc_client != opt.gitc_client |
57 | file=sys.stderr) | 57 | ): |
58 | sys.exit(1) | 58 | print( |
59 | self.client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(), | 59 | "fatal: Please update your repo command. See go/gitc for " |
60 | gitc_client) | 60 | "instructions.", |
61 | super().Execute(opt, args) | 61 | file=sys.stderr, |
62 | ) | ||
63 | sys.exit(1) | ||
64 | self.client_dir = os.path.join( | ||
65 | gitc_utils.get_gitc_manifest_dir(), gitc_client | ||
66 | ) | ||
67 | super().Execute(opt, args) | ||
62 | 68 | ||
63 | manifest_file = self.manifest.manifestFile | 69 | manifest_file = self.manifest.manifestFile |
64 | if opt.manifest_file: | 70 | if opt.manifest_file: |
65 | if not os.path.exists(opt.manifest_file): | 71 | if not os.path.exists(opt.manifest_file): |
66 | print('fatal: Specified manifest file %s does not exist.' % | 72 | print( |
67 | opt.manifest_file) | 73 | "fatal: Specified manifest file %s does not exist." |
68 | sys.exit(1) | 74 | % opt.manifest_file |
69 | manifest_file = opt.manifest_file | 75 | ) |
76 | sys.exit(1) | ||
77 | manifest_file = opt.manifest_file | ||
70 | 78 | ||
71 | manifest = GitcManifest(self.repodir, os.path.join(self.client_dir, | 79 | manifest = GitcManifest( |
72 | '.manifest')) | 80 | self.repodir, os.path.join(self.client_dir, ".manifest") |
73 | manifest.Override(manifest_file) | 81 | ) |
74 | gitc_utils.generate_gitc_manifest(None, manifest) | 82 | manifest.Override(manifest_file) |
75 | print('Please run `cd %s` to view your GITC client.' % | 83 | gitc_utils.generate_gitc_manifest(None, manifest) |
76 | os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client)) | 84 | print( |
85 | "Please run `cd %s` to view your GITC client." | ||
86 | % os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client) | ||
87 | ) | ||
diff --git a/subcmds/grep.py b/subcmds/grep.py index 93c9ae51..5cd33763 100644 --- a/subcmds/grep.py +++ b/subcmds/grep.py | |||
@@ -22,19 +22,19 @@ from git_command import GitCommand | |||
22 | 22 | ||
23 | 23 | ||
24 | class GrepColoring(Coloring): | 24 | class GrepColoring(Coloring): |
25 | def __init__(self, config): | 25 | def __init__(self, config): |
26 | Coloring.__init__(self, config, 'grep') | 26 | Coloring.__init__(self, config, "grep") |
27 | self.project = self.printer('project', attr='bold') | 27 | self.project = self.printer("project", attr="bold") |
28 | self.fail = self.printer('fail', fg='red') | 28 | self.fail = self.printer("fail", fg="red") |
29 | 29 | ||
30 | 30 | ||
31 | class Grep(PagedCommand): | 31 | class Grep(PagedCommand): |
32 | COMMON = True | 32 | COMMON = True |
33 | helpSummary = "Print lines matching a pattern" | 33 | helpSummary = "Print lines matching a pattern" |
34 | helpUsage = """ | 34 | helpUsage = """ |
35 | %prog {pattern | -e pattern} [<project>...] | 35 | %prog {pattern | -e pattern} [<project>...] |
36 | """ | 36 | """ |
37 | helpDescription = """ | 37 | helpDescription = """ |
38 | Search for the specified patterns in all project files. | 38 | Search for the specified patterns in all project files. |
39 | 39 | ||
40 | # Boolean Options | 40 | # Boolean Options |
@@ -62,215 +62,304 @@ contain a line that matches both expressions: | |||
62 | repo grep --all-match -e NODE -e Unexpected | 62 | repo grep --all-match -e NODE -e Unexpected |
63 | 63 | ||
64 | """ | 64 | """ |
65 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 65 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
66 | 66 | ||
67 | @staticmethod | 67 | @staticmethod |
68 | def _carry_option(_option, opt_str, value, parser): | 68 | def _carry_option(_option, opt_str, value, parser): |
69 | pt = getattr(parser.values, 'cmd_argv', None) | 69 | pt = getattr(parser.values, "cmd_argv", None) |
70 | if pt is None: | 70 | if pt is None: |
71 | pt = [] | 71 | pt = [] |
72 | setattr(parser.values, 'cmd_argv', pt) | 72 | setattr(parser.values, "cmd_argv", pt) |
73 | 73 | ||
74 | if opt_str == '-(': | 74 | if opt_str == "-(": |
75 | pt.append('(') | 75 | pt.append("(") |
76 | elif opt_str == '-)': | 76 | elif opt_str == "-)": |
77 | pt.append(')') | 77 | pt.append(")") |
78 | else: | 78 | else: |
79 | pt.append(opt_str) | 79 | pt.append(opt_str) |
80 | 80 | ||
81 | if value is not None: | 81 | if value is not None: |
82 | pt.append(value) | 82 | pt.append(value) |
83 | 83 | ||
84 | def _CommonOptions(self, p): | 84 | def _CommonOptions(self, p): |
85 | """Override common options slightly.""" | 85 | """Override common options slightly.""" |
86 | super()._CommonOptions(p, opt_v=False) | 86 | super()._CommonOptions(p, opt_v=False) |
87 | 87 | ||
88 | def _Options(self, p): | 88 | def _Options(self, p): |
89 | g = p.add_option_group('Sources') | 89 | g = p.add_option_group("Sources") |
90 | g.add_option('--cached', | 90 | g.add_option( |
91 | action='callback', callback=self._carry_option, | 91 | "--cached", |
92 | help='Search the index, instead of the work tree') | 92 | action="callback", |
93 | g.add_option('-r', '--revision', | 93 | callback=self._carry_option, |
94 | dest='revision', action='append', metavar='TREEish', | 94 | help="Search the index, instead of the work tree", |
95 | help='Search TREEish, instead of the work tree') | 95 | ) |
96 | 96 | g.add_option( | |
97 | g = p.add_option_group('Pattern') | 97 | "-r", |
98 | g.add_option('-e', | 98 | "--revision", |
99 | action='callback', callback=self._carry_option, | 99 | dest="revision", |
100 | metavar='PATTERN', type='str', | 100 | action="append", |
101 | help='Pattern to search for') | 101 | metavar="TREEish", |
102 | g.add_option('-i', '--ignore-case', | 102 | help="Search TREEish, instead of the work tree", |
103 | action='callback', callback=self._carry_option, | 103 | ) |
104 | help='Ignore case differences') | 104 | |
105 | g.add_option('-a', '--text', | 105 | g = p.add_option_group("Pattern") |
106 | action='callback', callback=self._carry_option, | 106 | g.add_option( |
107 | help="Process binary files as if they were text") | 107 | "-e", |
108 | g.add_option('-I', | 108 | action="callback", |
109 | action='callback', callback=self._carry_option, | 109 | callback=self._carry_option, |
110 | help="Don't match the pattern in binary files") | 110 | metavar="PATTERN", |
111 | g.add_option('-w', '--word-regexp', | 111 | type="str", |
112 | action='callback', callback=self._carry_option, | 112 | help="Pattern to search for", |
113 | help='Match the pattern only at word boundaries') | 113 | ) |
114 | g.add_option('-v', '--invert-match', | 114 | g.add_option( |
115 | action='callback', callback=self._carry_option, | 115 | "-i", |
116 | help='Select non-matching lines') | 116 | "--ignore-case", |
117 | g.add_option('-G', '--basic-regexp', | 117 | action="callback", |
118 | action='callback', callback=self._carry_option, | 118 | callback=self._carry_option, |
119 | help='Use POSIX basic regexp for patterns (default)') | 119 | help="Ignore case differences", |
120 | g.add_option('-E', '--extended-regexp', | 120 | ) |
121 | action='callback', callback=self._carry_option, | 121 | g.add_option( |
122 | help='Use POSIX extended regexp for patterns') | 122 | "-a", |
123 | g.add_option('-F', '--fixed-strings', | 123 | "--text", |
124 | action='callback', callback=self._carry_option, | 124 | action="callback", |
125 | help='Use fixed strings (not regexp) for pattern') | 125 | callback=self._carry_option, |
126 | 126 | help="Process binary files as if they were text", | |
127 | g = p.add_option_group('Pattern Grouping') | 127 | ) |
128 | g.add_option('--all-match', | 128 | g.add_option( |
129 | action='callback', callback=self._carry_option, | 129 | "-I", |
130 | help='Limit match to lines that have all patterns') | 130 | action="callback", |
131 | g.add_option('--and', '--or', '--not', | 131 | callback=self._carry_option, |
132 | action='callback', callback=self._carry_option, | 132 | help="Don't match the pattern in binary files", |
133 | help='Boolean operators to combine patterns') | 133 | ) |
134 | g.add_option('-(', '-)', | 134 | g.add_option( |
135 | action='callback', callback=self._carry_option, | 135 | "-w", |
136 | help='Boolean operator grouping') | 136 | "--word-regexp", |
137 | 137 | action="callback", | |
138 | g = p.add_option_group('Output') | 138 | callback=self._carry_option, |
139 | g.add_option('-n', | 139 | help="Match the pattern only at word boundaries", |
140 | action='callback', callback=self._carry_option, | 140 | ) |
141 | help='Prefix the line number to matching lines') | 141 | g.add_option( |
142 | g.add_option('-C', | 142 | "-v", |
143 | action='callback', callback=self._carry_option, | 143 | "--invert-match", |
144 | metavar='CONTEXT', type='str', | 144 | action="callback", |
145 | help='Show CONTEXT lines around match') | 145 | callback=self._carry_option, |
146 | g.add_option('-B', | 146 | help="Select non-matching lines", |
147 | action='callback', callback=self._carry_option, | 147 | ) |
148 | metavar='CONTEXT', type='str', | 148 | g.add_option( |
149 | help='Show CONTEXT lines before match') | 149 | "-G", |
150 | g.add_option('-A', | 150 | "--basic-regexp", |
151 | action='callback', callback=self._carry_option, | 151 | action="callback", |
152 | metavar='CONTEXT', type='str', | 152 | callback=self._carry_option, |
153 | help='Show CONTEXT lines after match') | 153 | help="Use POSIX basic regexp for patterns (default)", |
154 | g.add_option('-l', '--name-only', '--files-with-matches', | 154 | ) |
155 | action='callback', callback=self._carry_option, | 155 | g.add_option( |
156 | help='Show only file names containing matching lines') | 156 | "-E", |
157 | g.add_option('-L', '--files-without-match', | 157 | "--extended-regexp", |
158 | action='callback', callback=self._carry_option, | 158 | action="callback", |
159 | help='Show only file names not containing matching lines') | 159 | callback=self._carry_option, |
160 | 160 | help="Use POSIX extended regexp for patterns", | |
161 | def _ExecuteOne(self, cmd_argv, project): | 161 | ) |
162 | """Process one project.""" | 162 | g.add_option( |
163 | try: | 163 | "-F", |
164 | p = GitCommand(project, | 164 | "--fixed-strings", |
165 | cmd_argv, | 165 | action="callback", |
166 | bare=False, | 166 | callback=self._carry_option, |
167 | capture_stdout=True, | 167 | help="Use fixed strings (not regexp) for pattern", |
168 | capture_stderr=True) | 168 | ) |
169 | except GitError as e: | 169 | |
170 | return (project, -1, None, str(e)) | 170 | g = p.add_option_group("Pattern Grouping") |
171 | 171 | g.add_option( | |
172 | return (project, p.Wait(), p.stdout, p.stderr) | 172 | "--all-match", |
173 | 173 | action="callback", | |
174 | @staticmethod | 174 | callback=self._carry_option, |
175 | def _ProcessResults(full_name, have_rev, opt, _pool, out, results): | 175 | help="Limit match to lines that have all patterns", |
176 | git_failed = False | 176 | ) |
177 | bad_rev = False | 177 | g.add_option( |
178 | have_match = False | 178 | "--and", |
179 | _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) | 179 | "--or", |
180 | 180 | "--not", | |
181 | for project, rc, stdout, stderr in results: | 181 | action="callback", |
182 | if rc < 0: | 182 | callback=self._carry_option, |
183 | git_failed = True | 183 | help="Boolean operators to combine patterns", |
184 | out.project('--- project %s ---' % _RelPath(project)) | 184 | ) |
185 | out.nl() | 185 | g.add_option( |
186 | out.fail('%s', stderr) | 186 | "-(", |
187 | out.nl() | 187 | "-)", |
188 | continue | 188 | action="callback", |
189 | 189 | callback=self._carry_option, | |
190 | if rc: | 190 | help="Boolean operator grouping", |
191 | # no results | 191 | ) |
192 | if stderr: | 192 | |
193 | if have_rev and 'fatal: ambiguous argument' in stderr: | 193 | g = p.add_option_group("Output") |
194 | bad_rev = True | 194 | g.add_option( |
195 | else: | 195 | "-n", |
196 | out.project('--- project %s ---' % _RelPath(project)) | 196 | action="callback", |
197 | out.nl() | 197 | callback=self._carry_option, |
198 | out.fail('%s', stderr.strip()) | 198 | help="Prefix the line number to matching lines", |
199 | out.nl() | 199 | ) |
200 | continue | 200 | g.add_option( |
201 | have_match = True | 201 | "-C", |
202 | 202 | action="callback", | |
203 | # We cut the last element, to avoid a blank line. | 203 | callback=self._carry_option, |
204 | r = stdout.split('\n') | 204 | metavar="CONTEXT", |
205 | r = r[0:-1] | 205 | type="str", |
206 | 206 | help="Show CONTEXT lines around match", | |
207 | if have_rev and full_name: | 207 | ) |
208 | for line in r: | 208 | g.add_option( |
209 | rev, line = line.split(':', 1) | 209 | "-B", |
210 | out.write("%s", rev) | 210 | action="callback", |
211 | out.write(':') | 211 | callback=self._carry_option, |
212 | out.project(_RelPath(project)) | 212 | metavar="CONTEXT", |
213 | out.write('/') | 213 | type="str", |
214 | out.write("%s", line) | 214 | help="Show CONTEXT lines before match", |
215 | out.nl() | 215 | ) |
216 | elif full_name: | 216 | g.add_option( |
217 | for line in r: | 217 | "-A", |
218 | out.project(_RelPath(project)) | 218 | action="callback", |
219 | out.write('/') | 219 | callback=self._carry_option, |
220 | out.write("%s", line) | 220 | metavar="CONTEXT", |
221 | out.nl() | 221 | type="str", |
222 | else: | 222 | help="Show CONTEXT lines after match", |
223 | for line in r: | 223 | ) |
224 | print(line) | 224 | g.add_option( |
225 | 225 | "-l", | |
226 | return (git_failed, bad_rev, have_match) | 226 | "--name-only", |
227 | 227 | "--files-with-matches", | |
228 | def Execute(self, opt, args): | 228 | action="callback", |
229 | out = GrepColoring(self.manifest.manifestProject.config) | 229 | callback=self._carry_option, |
230 | 230 | help="Show only file names containing matching lines", | |
231 | cmd_argv = ['grep'] | 231 | ) |
232 | if out.is_on: | 232 | g.add_option( |
233 | cmd_argv.append('--color') | 233 | "-L", |
234 | cmd_argv.extend(getattr(opt, 'cmd_argv', [])) | 234 | "--files-without-match", |
235 | 235 | action="callback", | |
236 | if '-e' not in cmd_argv: | 236 | callback=self._carry_option, |
237 | if not args: | 237 | help="Show only file names not containing matching lines", |
238 | self.Usage() | 238 | ) |
239 | cmd_argv.append('-e') | 239 | |
240 | cmd_argv.append(args[0]) | 240 | def _ExecuteOne(self, cmd_argv, project): |
241 | args = args[1:] | 241 | """Process one project.""" |
242 | 242 | try: | |
243 | projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) | 243 | p = GitCommand( |
244 | 244 | project, | |
245 | full_name = False | 245 | cmd_argv, |
246 | if len(projects) > 1: | 246 | bare=False, |
247 | cmd_argv.append('--full-name') | 247 | capture_stdout=True, |
248 | full_name = True | 248 | capture_stderr=True, |
249 | 249 | ) | |
250 | have_rev = False | 250 | except GitError as e: |
251 | if opt.revision: | 251 | return (project, -1, None, str(e)) |
252 | if '--cached' in cmd_argv: | 252 | |
253 | print('fatal: cannot combine --cached and --revision', file=sys.stderr) | 253 | return (project, p.Wait(), p.stdout, p.stderr) |
254 | sys.exit(1) | 254 | |
255 | have_rev = True | 255 | @staticmethod |
256 | cmd_argv.extend(opt.revision) | 256 | def _ProcessResults(full_name, have_rev, opt, _pool, out, results): |
257 | cmd_argv.append('--') | 257 | git_failed = False |
258 | 258 | bad_rev = False | |
259 | git_failed, bad_rev, have_match = self.ExecuteInParallel( | 259 | have_match = False |
260 | opt.jobs, | 260 | _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) |
261 | functools.partial(self._ExecuteOne, cmd_argv), | 261 | |
262 | projects, | 262 | for project, rc, stdout, stderr in results: |
263 | callback=functools.partial(self._ProcessResults, full_name, have_rev, opt), | 263 | if rc < 0: |
264 | output=out, | 264 | git_failed = True |
265 | ordered=True) | 265 | out.project("--- project %s ---" % _RelPath(project)) |
266 | 266 | out.nl() | |
267 | if git_failed: | 267 | out.fail("%s", stderr) |
268 | sys.exit(1) | 268 | out.nl() |
269 | elif have_match: | 269 | continue |
270 | sys.exit(0) | 270 | |
271 | elif have_rev and bad_rev: | 271 | if rc: |
272 | for r in opt.revision: | 272 | # no results |
273 | print("error: can't search revision %s" % r, file=sys.stderr) | 273 | if stderr: |
274 | sys.exit(1) | 274 | if have_rev and "fatal: ambiguous argument" in stderr: |
275 | else: | 275 | bad_rev = True |
276 | sys.exit(1) | 276 | else: |
277 | out.project("--- project %s ---" % _RelPath(project)) | ||
278 | out.nl() | ||
279 | out.fail("%s", stderr.strip()) | ||
280 | out.nl() | ||
281 | continue | ||
282 | have_match = True | ||
283 | |||
284 | # We cut the last element, to avoid a blank line. | ||
285 | r = stdout.split("\n") | ||
286 | r = r[0:-1] | ||
287 | |||
288 | if have_rev and full_name: | ||
289 | for line in r: | ||
290 | rev, line = line.split(":", 1) | ||
291 | out.write("%s", rev) | ||
292 | out.write(":") | ||
293 | out.project(_RelPath(project)) | ||
294 | out.write("/") | ||
295 | out.write("%s", line) | ||
296 | out.nl() | ||
297 | elif full_name: | ||
298 | for line in r: | ||
299 | out.project(_RelPath(project)) | ||
300 | out.write("/") | ||
301 | out.write("%s", line) | ||
302 | out.nl() | ||
303 | else: | ||
304 | for line in r: | ||
305 | print(line) | ||
306 | |||
307 | return (git_failed, bad_rev, have_match) | ||
308 | |||
309 | def Execute(self, opt, args): | ||
310 | out = GrepColoring(self.manifest.manifestProject.config) | ||
311 | |||
312 | cmd_argv = ["grep"] | ||
313 | if out.is_on: | ||
314 | cmd_argv.append("--color") | ||
315 | cmd_argv.extend(getattr(opt, "cmd_argv", [])) | ||
316 | |||
317 | if "-e" not in cmd_argv: | ||
318 | if not args: | ||
319 | self.Usage() | ||
320 | cmd_argv.append("-e") | ||
321 | cmd_argv.append(args[0]) | ||
322 | args = args[1:] | ||
323 | |||
324 | projects = self.GetProjects( | ||
325 | args, all_manifests=not opt.this_manifest_only | ||
326 | ) | ||
327 | |||
328 | full_name = False | ||
329 | if len(projects) > 1: | ||
330 | cmd_argv.append("--full-name") | ||
331 | full_name = True | ||
332 | |||
333 | have_rev = False | ||
334 | if opt.revision: | ||
335 | if "--cached" in cmd_argv: | ||
336 | print( | ||
337 | "fatal: cannot combine --cached and --revision", | ||
338 | file=sys.stderr, | ||
339 | ) | ||
340 | sys.exit(1) | ||
341 | have_rev = True | ||
342 | cmd_argv.extend(opt.revision) | ||
343 | cmd_argv.append("--") | ||
344 | |||
345 | git_failed, bad_rev, have_match = self.ExecuteInParallel( | ||
346 | opt.jobs, | ||
347 | functools.partial(self._ExecuteOne, cmd_argv), | ||
348 | projects, | ||
349 | callback=functools.partial( | ||
350 | self._ProcessResults, full_name, have_rev, opt | ||
351 | ), | ||
352 | output=out, | ||
353 | ordered=True, | ||
354 | ) | ||
355 | |||
356 | if git_failed: | ||
357 | sys.exit(1) | ||
358 | elif have_match: | ||
359 | sys.exit(0) | ||
360 | elif have_rev and bad_rev: | ||
361 | for r in opt.revision: | ||
362 | print("error: can't search revision %s" % r, file=sys.stderr) | ||
363 | sys.exit(1) | ||
364 | else: | ||
365 | sys.exit(1) | ||
diff --git a/subcmds/help.py b/subcmds/help.py index 1ad391db..50a48047 100644 --- a/subcmds/help.py +++ b/subcmds/help.py | |||
@@ -18,163 +18,193 @@ import textwrap | |||
18 | 18 | ||
19 | from subcmds import all_commands | 19 | from subcmds import all_commands |
20 | from color import Coloring | 20 | from color import Coloring |
21 | from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand | 21 | from command import ( |
22 | PagedCommand, | ||
23 | MirrorSafeCommand, | ||
24 | GitcAvailableCommand, | ||
25 | GitcClientCommand, | ||
26 | ) | ||
22 | import gitc_utils | 27 | import gitc_utils |
23 | from wrapper import Wrapper | 28 | from wrapper import Wrapper |
24 | 29 | ||
25 | 30 | ||
26 | class Help(PagedCommand, MirrorSafeCommand): | 31 | class Help(PagedCommand, MirrorSafeCommand): |
27 | COMMON = False | 32 | COMMON = False |
28 | helpSummary = "Display detailed help on a command" | 33 | helpSummary = "Display detailed help on a command" |
29 | helpUsage = """ | 34 | helpUsage = """ |
30 | %prog [--all|command] | 35 | %prog [--all|command] |
31 | """ | 36 | """ |
32 | helpDescription = """ | 37 | helpDescription = """ |
33 | Displays detailed usage information about a command. | 38 | Displays detailed usage information about a command. |
34 | """ | 39 | """ |
35 | 40 | ||
36 | def _PrintCommands(self, commandNames): | 41 | def _PrintCommands(self, commandNames): |
37 | """Helper to display |commandNames| summaries.""" | 42 | """Helper to display |commandNames| summaries.""" |
38 | maxlen = 0 | 43 | maxlen = 0 |
39 | for name in commandNames: | 44 | for name in commandNames: |
40 | maxlen = max(maxlen, len(name)) | 45 | maxlen = max(maxlen, len(name)) |
41 | fmt = ' %%-%ds %%s' % maxlen | 46 | fmt = " %%-%ds %%s" % maxlen |
42 | 47 | ||
43 | for name in commandNames: | 48 | for name in commandNames: |
44 | command = all_commands[name]() | 49 | command = all_commands[name]() |
45 | try: | 50 | try: |
46 | summary = command.helpSummary.strip() | 51 | summary = command.helpSummary.strip() |
47 | except AttributeError: | 52 | except AttributeError: |
48 | summary = '' | 53 | summary = "" |
49 | print(fmt % (name, summary)) | 54 | print(fmt % (name, summary)) |
50 | 55 | ||
51 | def _PrintAllCommands(self): | 56 | def _PrintAllCommands(self): |
52 | print('usage: repo COMMAND [ARGS]') | 57 | print("usage: repo COMMAND [ARGS]") |
53 | self.PrintAllCommandsBody() | 58 | self.PrintAllCommandsBody() |
54 | 59 | ||
55 | def PrintAllCommandsBody(self): | 60 | def PrintAllCommandsBody(self): |
56 | print('The complete list of recognized repo commands is:') | 61 | print("The complete list of recognized repo commands is:") |
57 | commandNames = list(sorted(all_commands)) | 62 | commandNames = list(sorted(all_commands)) |
58 | self._PrintCommands(commandNames) | 63 | self._PrintCommands(commandNames) |
59 | print("See 'repo help <command>' for more information on a " | 64 | print( |
60 | 'specific command.') | 65 | "See 'repo help <command>' for more information on a " |
61 | print('Bug reports:', Wrapper().BUG_URL) | 66 | "specific command." |
62 | 67 | ) | |
63 | def _PrintCommonCommands(self): | 68 | print("Bug reports:", Wrapper().BUG_URL) |
64 | print('usage: repo COMMAND [ARGS]') | 69 | |
65 | self.PrintCommonCommandsBody() | 70 | def _PrintCommonCommands(self): |
66 | 71 | print("usage: repo COMMAND [ARGS]") | |
67 | def PrintCommonCommandsBody(self): | 72 | self.PrintCommonCommandsBody() |
68 | print('The most commonly used repo commands are:') | 73 | |
69 | 74 | def PrintCommonCommandsBody(self): | |
70 | def gitc_supported(cmd): | 75 | print("The most commonly used repo commands are:") |
71 | if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand): | 76 | |
72 | return True | 77 | def gitc_supported(cmd): |
73 | if self.client.isGitcClient: | 78 | if not isinstance(cmd, GitcAvailableCommand) and not isinstance( |
74 | return True | 79 | cmd, GitcClientCommand |
75 | if isinstance(cmd, GitcClientCommand): | 80 | ): |
76 | return False | 81 | return True |
77 | if gitc_utils.get_gitc_manifest_dir(): | 82 | if self.client.isGitcClient: |
78 | return True | 83 | return True |
79 | return False | 84 | if isinstance(cmd, GitcClientCommand): |
80 | 85 | return False | |
81 | commandNames = list(sorted([name | 86 | if gitc_utils.get_gitc_manifest_dir(): |
82 | for name, command in all_commands.items() | 87 | return True |
83 | if command.COMMON and gitc_supported(command)])) | 88 | return False |
84 | self._PrintCommands(commandNames) | 89 | |
85 | 90 | commandNames = list( | |
86 | print( | 91 | sorted( |
87 | "See 'repo help <command>' for more information on a specific command.\n" | 92 | [ |
88 | "See 'repo help --all' for a complete list of recognized commands.") | 93 | name |
89 | print('Bug reports:', Wrapper().BUG_URL) | 94 | for name, command in all_commands.items() |
90 | 95 | if command.COMMON and gitc_supported(command) | |
91 | def _PrintCommandHelp(self, cmd, header_prefix=''): | 96 | ] |
92 | class _Out(Coloring): | 97 | ) |
93 | def __init__(self, gc): | 98 | ) |
94 | Coloring.__init__(self, gc, 'help') | 99 | self._PrintCommands(commandNames) |
95 | self.heading = self.printer('heading', attr='bold') | 100 | |
96 | self._first = True | 101 | print( |
97 | 102 | "See 'repo help <command>' for more information on a specific " | |
98 | def _PrintSection(self, heading, bodyAttr): | 103 | "command.\nSee 'repo help --all' for a complete list of recognized " |
99 | try: | 104 | "commands." |
100 | body = getattr(cmd, bodyAttr) | 105 | ) |
101 | except AttributeError: | 106 | print("Bug reports:", Wrapper().BUG_URL) |
102 | return | 107 | |
103 | if body == '' or body is None: | 108 | def _PrintCommandHelp(self, cmd, header_prefix=""): |
104 | return | 109 | class _Out(Coloring): |
105 | 110 | def __init__(self, gc): | |
106 | if not self._first: | 111 | Coloring.__init__(self, gc, "help") |
107 | self.nl() | 112 | self.heading = self.printer("heading", attr="bold") |
108 | self._first = False | 113 | self._first = True |
109 | 114 | ||
110 | self.heading('%s%s', header_prefix, heading) | 115 | def _PrintSection(self, heading, bodyAttr): |
111 | self.nl() | 116 | try: |
112 | self.nl() | 117 | body = getattr(cmd, bodyAttr) |
113 | 118 | except AttributeError: | |
114 | me = 'repo %s' % cmd.NAME | 119 | return |
115 | body = body.strip() | 120 | if body == "" or body is None: |
116 | body = body.replace('%prog', me) | 121 | return |
117 | 122 | ||
118 | # Extract the title, but skip any trailing {#anchors}. | 123 | if not self._first: |
119 | asciidoc_hdr = re.compile(r'^\n?#+ ([^{]+)(\{#.+\})?$') | 124 | self.nl() |
120 | for para in body.split("\n\n"): | 125 | self._first = False |
121 | if para.startswith(' '): | 126 | |
122 | self.write('%s', para) | 127 | self.heading("%s%s", header_prefix, heading) |
123 | self.nl() | 128 | self.nl() |
124 | self.nl() | 129 | self.nl() |
125 | continue | 130 | |
126 | 131 | me = "repo %s" % cmd.NAME | |
127 | m = asciidoc_hdr.match(para) | 132 | body = body.strip() |
128 | if m: | 133 | body = body.replace("%prog", me) |
129 | self.heading('%s%s', header_prefix, m.group(1)) | 134 | |
130 | self.nl() | 135 | # Extract the title, but skip any trailing {#anchors}. |
131 | self.nl() | 136 | asciidoc_hdr = re.compile(r"^\n?#+ ([^{]+)(\{#.+\})?$") |
132 | continue | 137 | for para in body.split("\n\n"): |
133 | 138 | if para.startswith(" "): | |
134 | lines = textwrap.wrap(para.replace(' ', ' '), width=80, | 139 | self.write("%s", para) |
135 | break_long_words=False, break_on_hyphens=False) | 140 | self.nl() |
136 | for line in lines: | 141 | self.nl() |
137 | self.write('%s', line) | 142 | continue |
138 | self.nl() | 143 | |
139 | self.nl() | 144 | m = asciidoc_hdr.match(para) |
140 | 145 | if m: | |
141 | out = _Out(self.client.globalConfig) | 146 | self.heading("%s%s", header_prefix, m.group(1)) |
142 | out._PrintSection('Summary', 'helpSummary') | 147 | self.nl() |
143 | cmd.OptionParser.print_help() | 148 | self.nl() |
144 | out._PrintSection('Description', 'helpDescription') | 149 | continue |
145 | 150 | ||
146 | def _PrintAllCommandHelp(self): | 151 | lines = textwrap.wrap( |
147 | for name in sorted(all_commands): | 152 | para.replace(" ", " "), |
148 | cmd = all_commands[name](manifest=self.manifest) | 153 | width=80, |
149 | self._PrintCommandHelp(cmd, header_prefix='[%s] ' % (name,)) | 154 | break_long_words=False, |
150 | 155 | break_on_hyphens=False, | |
151 | def _Options(self, p): | 156 | ) |
152 | p.add_option('-a', '--all', | 157 | for line in lines: |
153 | dest='show_all', action='store_true', | 158 | self.write("%s", line) |
154 | help='show the complete list of commands') | 159 | self.nl() |
155 | p.add_option('--help-all', | 160 | self.nl() |
156 | dest='show_all_help', action='store_true', | 161 | |
157 | help='show the --help of all commands') | 162 | out = _Out(self.client.globalConfig) |
158 | 163 | out._PrintSection("Summary", "helpSummary") | |
159 | def Execute(self, opt, args): | 164 | cmd.OptionParser.print_help() |
160 | if len(args) == 0: | 165 | out._PrintSection("Description", "helpDescription") |
161 | if opt.show_all_help: | 166 | |
162 | self._PrintAllCommandHelp() | 167 | def _PrintAllCommandHelp(self): |
163 | elif opt.show_all: | 168 | for name in sorted(all_commands): |
164 | self._PrintAllCommands() | 169 | cmd = all_commands[name](manifest=self.manifest) |
165 | else: | 170 | self._PrintCommandHelp(cmd, header_prefix="[%s] " % (name,)) |
166 | self._PrintCommonCommands() | 171 | |
167 | 172 | def _Options(self, p): | |
168 | elif len(args) == 1: | 173 | p.add_option( |
169 | name = args[0] | 174 | "-a", |
170 | 175 | "--all", | |
171 | try: | 176 | dest="show_all", |
172 | cmd = all_commands[name](manifest=self.manifest) | 177 | action="store_true", |
173 | except KeyError: | 178 | help="show the complete list of commands", |
174 | print("repo: '%s' is not a repo command." % name, file=sys.stderr) | 179 | ) |
175 | sys.exit(1) | 180 | p.add_option( |
176 | 181 | "--help-all", | |
177 | self._PrintCommandHelp(cmd) | 182 | dest="show_all_help", |
178 | 183 | action="store_true", | |
179 | else: | 184 | help="show the --help of all commands", |
180 | self._PrintCommandHelp(self) | 185 | ) |
186 | |||
187 | def Execute(self, opt, args): | ||
188 | if len(args) == 0: | ||
189 | if opt.show_all_help: | ||
190 | self._PrintAllCommandHelp() | ||
191 | elif opt.show_all: | ||
192 | self._PrintAllCommands() | ||
193 | else: | ||
194 | self._PrintCommonCommands() | ||
195 | |||
196 | elif len(args) == 1: | ||
197 | name = args[0] | ||
198 | |||
199 | try: | ||
200 | cmd = all_commands[name](manifest=self.manifest) | ||
201 | except KeyError: | ||
202 | print( | ||
203 | "repo: '%s' is not a repo command." % name, file=sys.stderr | ||
204 | ) | ||
205 | sys.exit(1) | ||
206 | |||
207 | self._PrintCommandHelp(cmd) | ||
208 | |||
209 | else: | ||
210 | self._PrintCommandHelp(self) | ||
diff --git a/subcmds/info.py b/subcmds/info.py index baa4c5b1..6e7f3ed2 100644 --- a/subcmds/info.py +++ b/subcmds/info.py | |||
@@ -20,203 +20,234 @@ from git_refs import R_M, R_HEADS | |||
20 | 20 | ||
21 | 21 | ||
22 | class _Coloring(Coloring): | 22 | class _Coloring(Coloring): |
23 | def __init__(self, config): | 23 | def __init__(self, config): |
24 | Coloring.__init__(self, config, "status") | 24 | Coloring.__init__(self, config, "status") |
25 | 25 | ||
26 | 26 | ||
27 | class Info(PagedCommand): | 27 | class Info(PagedCommand): |
28 | COMMON = True | 28 | COMMON = True |
29 | helpSummary = "Get info on the manifest branch, current branch or unmerged branches" | 29 | helpSummary = ( |
30 | helpUsage = "%prog [-dl] [-o [-c]] [<project>...]" | 30 | "Get info on the manifest branch, current branch or unmerged branches" |
31 | 31 | ) | |
32 | def _Options(self, p): | 32 | helpUsage = "%prog [-dl] [-o [-c]] [<project>...]" |
33 | p.add_option('-d', '--diff', | 33 | |
34 | dest='all', action='store_true', | 34 | def _Options(self, p): |
35 | help="show full info and commit diff including remote branches") | 35 | p.add_option( |
36 | p.add_option('-o', '--overview', | 36 | "-d", |
37 | dest='overview', action='store_true', | 37 | "--diff", |
38 | help='show overview of all local commits') | 38 | dest="all", |
39 | p.add_option('-c', '--current-branch', | 39 | action="store_true", |
40 | dest="current_branch", action="store_true", | 40 | help="show full info and commit diff including remote branches", |
41 | help="consider only checked out branches") | 41 | ) |
42 | p.add_option('--no-current-branch', | 42 | p.add_option( |
43 | dest='current_branch', action='store_false', | 43 | "-o", |
44 | help='consider all local branches') | 44 | "--overview", |
45 | # Turn this into a warning & remove this someday. | 45 | dest="overview", |
46 | p.add_option('-b', | 46 | action="store_true", |
47 | dest='current_branch', action='store_true', | 47 | help="show overview of all local commits", |
48 | help=optparse.SUPPRESS_HELP) | 48 | ) |
49 | p.add_option('-l', '--local-only', | 49 | p.add_option( |
50 | dest="local", action="store_true", | 50 | "-c", |
51 | help="disable all remote operations") | 51 | "--current-branch", |
52 | 52 | dest="current_branch", | |
53 | def Execute(self, opt, args): | 53 | action="store_true", |
54 | self.out = _Coloring(self.client.globalConfig) | 54 | help="consider only checked out branches", |
55 | self.heading = self.out.printer('heading', attr='bold') | 55 | ) |
56 | self.headtext = self.out.nofmt_printer('headtext', fg='yellow') | 56 | p.add_option( |
57 | self.redtext = self.out.printer('redtext', fg='red') | 57 | "--no-current-branch", |
58 | self.sha = self.out.printer("sha", fg='yellow') | 58 | dest="current_branch", |
59 | self.text = self.out.nofmt_printer('text') | 59 | action="store_false", |
60 | self.dimtext = self.out.printer('dimtext', attr='dim') | 60 | help="consider all local branches", |
61 | 61 | ) | |
62 | self.opt = opt | 62 | # Turn this into a warning & remove this someday. |
63 | 63 | p.add_option( | |
64 | if not opt.this_manifest_only: | 64 | "-b", |
65 | self.manifest = self.manifest.outer_client | 65 | dest="current_branch", |
66 | manifestConfig = self.manifest.manifestProject.config | 66 | action="store_true", |
67 | mergeBranch = manifestConfig.GetBranch("default").merge | 67 | help=optparse.SUPPRESS_HELP, |
68 | manifestGroups = self.manifest.GetGroupsStr() | 68 | ) |
69 | 69 | p.add_option( | |
70 | self.heading("Manifest branch: ") | 70 | "-l", |
71 | if self.manifest.default.revisionExpr: | 71 | "--local-only", |
72 | self.headtext(self.manifest.default.revisionExpr) | 72 | dest="local", |
73 | self.out.nl() | 73 | action="store_true", |
74 | self.heading("Manifest merge branch: ") | 74 | help="disable all remote operations", |
75 | self.headtext(mergeBranch) | 75 | ) |
76 | self.out.nl() | 76 | |
77 | self.heading("Manifest groups: ") | 77 | def Execute(self, opt, args): |
78 | self.headtext(manifestGroups) | 78 | self.out = _Coloring(self.client.globalConfig) |
79 | self.out.nl() | 79 | self.heading = self.out.printer("heading", attr="bold") |
80 | 80 | self.headtext = self.out.nofmt_printer("headtext", fg="yellow") | |
81 | self.printSeparator() | 81 | self.redtext = self.out.printer("redtext", fg="red") |
82 | 82 | self.sha = self.out.printer("sha", fg="yellow") | |
83 | if not opt.overview: | 83 | self.text = self.out.nofmt_printer("text") |
84 | self._printDiffInfo(opt, args) | 84 | self.dimtext = self.out.printer("dimtext", attr="dim") |
85 | else: | 85 | |
86 | self._printCommitOverview(opt, args) | 86 | self.opt = opt |
87 | 87 | ||
88 | def printSeparator(self): | 88 | if not opt.this_manifest_only: |
89 | self.text("----------------------------") | 89 | self.manifest = self.manifest.outer_client |
90 | self.out.nl() | 90 | manifestConfig = self.manifest.manifestProject.config |
91 | 91 | mergeBranch = manifestConfig.GetBranch("default").merge | |
92 | def _printDiffInfo(self, opt, args): | 92 | manifestGroups = self.manifest.GetGroupsStr() |
93 | # We let exceptions bubble up to main as they'll be well structured. | 93 | |
94 | projs = self.GetProjects(args, all_manifests=not opt.this_manifest_only) | 94 | self.heading("Manifest branch: ") |
95 | 95 | if self.manifest.default.revisionExpr: | |
96 | for p in projs: | 96 | self.headtext(self.manifest.default.revisionExpr) |
97 | self.heading("Project: ") | 97 | self.out.nl() |
98 | self.headtext(p.name) | 98 | self.heading("Manifest merge branch: ") |
99 | self.out.nl() | 99 | self.headtext(mergeBranch) |
100 | 100 | self.out.nl() | |
101 | self.heading("Mount path: ") | 101 | self.heading("Manifest groups: ") |
102 | self.headtext(p.worktree) | 102 | self.headtext(manifestGroups) |
103 | self.out.nl() | 103 | self.out.nl() |
104 | 104 | ||
105 | self.heading("Current revision: ") | 105 | self.printSeparator() |
106 | self.headtext(p.GetRevisionId()) | 106 | |
107 | self.out.nl() | 107 | if not opt.overview: |
108 | 108 | self._printDiffInfo(opt, args) | |
109 | currentBranch = p.CurrentBranch | 109 | else: |
110 | if currentBranch: | 110 | self._printCommitOverview(opt, args) |
111 | self.heading('Current branch: ') | 111 | |
112 | self.headtext(currentBranch) | 112 | def printSeparator(self): |
113 | self.text("----------------------------") | ||
113 | self.out.nl() | 114 | self.out.nl() |
114 | 115 | ||
115 | self.heading("Manifest revision: ") | 116 | def _printDiffInfo(self, opt, args): |
116 | self.headtext(p.revisionExpr) | 117 | # We let exceptions bubble up to main as they'll be well structured. |
117 | self.out.nl() | 118 | projs = self.GetProjects(args, all_manifests=not opt.this_manifest_only) |
118 | 119 | ||
119 | localBranches = list(p.GetBranches().keys()) | 120 | for p in projs: |
120 | self.heading("Local Branches: ") | 121 | self.heading("Project: ") |
121 | self.redtext(str(len(localBranches))) | 122 | self.headtext(p.name) |
122 | if localBranches: | 123 | self.out.nl() |
123 | self.text(" [") | 124 | |
124 | self.text(", ".join(localBranches)) | 125 | self.heading("Mount path: ") |
125 | self.text("]") | 126 | self.headtext(p.worktree) |
126 | self.out.nl() | 127 | self.out.nl() |
127 | 128 | ||
128 | if self.opt.all: | 129 | self.heading("Current revision: ") |
129 | self.findRemoteLocalDiff(p) | 130 | self.headtext(p.GetRevisionId()) |
130 | 131 | self.out.nl() | |
131 | self.printSeparator() | 132 | |
132 | 133 | currentBranch = p.CurrentBranch | |
133 | def findRemoteLocalDiff(self, project): | 134 | if currentBranch: |
134 | # Fetch all the latest commits. | 135 | self.heading("Current branch: ") |
135 | if not self.opt.local: | 136 | self.headtext(currentBranch) |
136 | project.Sync_NetworkHalf(quiet=True, current_branch_only=True) | 137 | self.out.nl() |
137 | 138 | ||
138 | branch = self.manifest.manifestProject.config.GetBranch('default').merge | 139 | self.heading("Manifest revision: ") |
139 | if branch.startswith(R_HEADS): | 140 | self.headtext(p.revisionExpr) |
140 | branch = branch[len(R_HEADS):] | 141 | self.out.nl() |
141 | logTarget = R_M + branch | 142 | |
142 | 143 | localBranches = list(p.GetBranches().keys()) | |
143 | bareTmp = project.bare_git._bare | 144 | self.heading("Local Branches: ") |
144 | project.bare_git._bare = False | 145 | self.redtext(str(len(localBranches))) |
145 | localCommits = project.bare_git.rev_list( | 146 | if localBranches: |
146 | '--abbrev=8', | 147 | self.text(" [") |
147 | '--abbrev-commit', | 148 | self.text(", ".join(localBranches)) |
148 | '--pretty=oneline', | 149 | self.text("]") |
149 | logTarget + "..", | 150 | self.out.nl() |
150 | '--') | 151 | |
151 | 152 | if self.opt.all: | |
152 | originCommits = project.bare_git.rev_list( | 153 | self.findRemoteLocalDiff(p) |
153 | '--abbrev=8', | 154 | |
154 | '--abbrev-commit', | 155 | self.printSeparator() |
155 | '--pretty=oneline', | 156 | |
156 | ".." + logTarget, | 157 | def findRemoteLocalDiff(self, project): |
157 | '--') | 158 | # Fetch all the latest commits. |
158 | project.bare_git._bare = bareTmp | 159 | if not self.opt.local: |
159 | 160 | project.Sync_NetworkHalf(quiet=True, current_branch_only=True) | |
160 | self.heading("Local Commits: ") | 161 | |
161 | self.redtext(str(len(localCommits))) | 162 | branch = self.manifest.manifestProject.config.GetBranch("default").merge |
162 | self.dimtext(" (on current branch)") | 163 | if branch.startswith(R_HEADS): |
163 | self.out.nl() | 164 | branch = branch[len(R_HEADS) :] |
164 | 165 | logTarget = R_M + branch | |
165 | for c in localCommits: | 166 | |
166 | split = c.split() | 167 | bareTmp = project.bare_git._bare |
167 | self.sha(split[0] + " ") | 168 | project.bare_git._bare = False |
168 | self.text(" ".join(split[1:])) | 169 | localCommits = project.bare_git.rev_list( |
169 | self.out.nl() | 170 | "--abbrev=8", |
170 | 171 | "--abbrev-commit", | |
171 | self.printSeparator() | 172 | "--pretty=oneline", |
172 | 173 | logTarget + "..", | |
173 | self.heading("Remote Commits: ") | 174 | "--", |
174 | self.redtext(str(len(originCommits))) | 175 | ) |
175 | self.out.nl() | 176 | |
176 | 177 | originCommits = project.bare_git.rev_list( | |
177 | for c in originCommits: | 178 | "--abbrev=8", |
178 | split = c.split() | 179 | "--abbrev-commit", |
179 | self.sha(split[0] + " ") | 180 | "--pretty=oneline", |
180 | self.text(" ".join(split[1:])) | 181 | ".." + logTarget, |
181 | self.out.nl() | 182 | "--", |
182 | 183 | ) | |
183 | def _printCommitOverview(self, opt, args): | 184 | project.bare_git._bare = bareTmp |
184 | all_branches = [] | 185 | |
185 | for project in self.GetProjects(args, all_manifests=not opt.this_manifest_only): | 186 | self.heading("Local Commits: ") |
186 | br = [project.GetUploadableBranch(x) | 187 | self.redtext(str(len(localCommits))) |
187 | for x in project.GetBranches()] | 188 | self.dimtext(" (on current branch)") |
188 | br = [x for x in br if x] | ||
189 | if self.opt.current_branch: | ||
190 | br = [x for x in br if x.name == project.CurrentBranch] | ||
191 | all_branches.extend(br) | ||
192 | |||
193 | if not all_branches: | ||
194 | return | ||
195 | |||
196 | self.out.nl() | ||
197 | self.heading('Projects Overview') | ||
198 | project = None | ||
199 | |||
200 | for branch in all_branches: | ||
201 | if project != branch.project: | ||
202 | project = branch.project | ||
203 | self.out.nl() | 189 | self.out.nl() |
204 | self.headtext(project.RelPath(local=opt.this_manifest_only)) | 190 | |
191 | for c in localCommits: | ||
192 | split = c.split() | ||
193 | self.sha(split[0] + " ") | ||
194 | self.text(" ".join(split[1:])) | ||
195 | self.out.nl() | ||
196 | |||
197 | self.printSeparator() | ||
198 | |||
199 | self.heading("Remote Commits: ") | ||
200 | self.redtext(str(len(originCommits))) | ||
205 | self.out.nl() | 201 | self.out.nl() |
206 | 202 | ||
207 | commits = branch.commits | 203 | for c in originCommits: |
208 | date = branch.date | 204 | split = c.split() |
209 | self.text('%s %-33s (%2d commit%s, %s)' % ( | 205 | self.sha(split[0] + " ") |
210 | branch.name == project.CurrentBranch and '*' or ' ', | 206 | self.text(" ".join(split[1:])) |
211 | branch.name, | 207 | self.out.nl() |
212 | len(commits), | 208 | |
213 | len(commits) != 1 and 's' or '', | 209 | def _printCommitOverview(self, opt, args): |
214 | date)) | 210 | all_branches = [] |
215 | self.out.nl() | 211 | for project in self.GetProjects( |
216 | 212 | args, all_manifests=not opt.this_manifest_only | |
217 | for commit in commits: | 213 | ): |
218 | split = commit.split() | 214 | br = [project.GetUploadableBranch(x) for x in project.GetBranches()] |
219 | self.text('{0:38}{1} '.format('', '-')) | 215 | br = [x for x in br if x] |
220 | self.sha(split[0] + " ") | 216 | if self.opt.current_branch: |
221 | self.text(" ".join(split[1:])) | 217 | br = [x for x in br if x.name == project.CurrentBranch] |
218 | all_branches.extend(br) | ||
219 | |||
220 | if not all_branches: | ||
221 | return | ||
222 | |||
222 | self.out.nl() | 223 | self.out.nl() |
224 | self.heading("Projects Overview") | ||
225 | project = None | ||
226 | |||
227 | for branch in all_branches: | ||
228 | if project != branch.project: | ||
229 | project = branch.project | ||
230 | self.out.nl() | ||
231 | self.headtext(project.RelPath(local=opt.this_manifest_only)) | ||
232 | self.out.nl() | ||
233 | |||
234 | commits = branch.commits | ||
235 | date = branch.date | ||
236 | self.text( | ||
237 | "%s %-33s (%2d commit%s, %s)" | ||
238 | % ( | ||
239 | branch.name == project.CurrentBranch and "*" or " ", | ||
240 | branch.name, | ||
241 | len(commits), | ||
242 | len(commits) != 1 and "s" or "", | ||
243 | date, | ||
244 | ) | ||
245 | ) | ||
246 | self.out.nl() | ||
247 | |||
248 | for commit in commits: | ||
249 | split = commit.split() | ||
250 | self.text("{0:38}{1} ".format("", "-")) | ||
251 | self.sha(split[0] + " ") | ||
252 | self.text(" ".join(split[1:])) | ||
253 | self.out.nl() | ||
diff --git a/subcmds/init.py b/subcmds/init.py index 813fa590..b5c2e3b5 100644 --- a/subcmds/init.py +++ b/subcmds/init.py | |||
@@ -22,13 +22,13 @@ from wrapper import Wrapper | |||
22 | 22 | ||
23 | 23 | ||
24 | class Init(InteractiveCommand, MirrorSafeCommand): | 24 | class Init(InteractiveCommand, MirrorSafeCommand): |
25 | COMMON = True | 25 | COMMON = True |
26 | MULTI_MANIFEST_SUPPORT = True | 26 | MULTI_MANIFEST_SUPPORT = True |
27 | helpSummary = "Initialize a repo client checkout in the current directory" | 27 | helpSummary = "Initialize a repo client checkout in the current directory" |
28 | helpUsage = """ | 28 | helpUsage = """ |
29 | %prog [options] [manifest url] | 29 | %prog [options] [manifest url] |
30 | """ | 30 | """ |
31 | helpDescription = """ | 31 | helpDescription = """ |
32 | The '%prog' command is run once to install and initialize repo. | 32 | The '%prog' command is run once to install and initialize repo. |
33 | The latest repo source code and manifest collection is downloaded | 33 | The latest repo source code and manifest collection is downloaded |
34 | from the server and is installed in the .repo/ directory in the | 34 | from the server and is installed in the .repo/ directory in the |
@@ -77,243 +77,303 @@ manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary | |||
77 | to update the working directory files. | 77 | to update the working directory files. |
78 | """ | 78 | """ |
79 | 79 | ||
80 | def _CommonOptions(self, p): | 80 | def _CommonOptions(self, p): |
81 | """Disable due to re-use of Wrapper().""" | 81 | """Disable due to re-use of Wrapper().""" |
82 | 82 | ||
83 | def _Options(self, p, gitc_init=False): | 83 | def _Options(self, p, gitc_init=False): |
84 | Wrapper().InitParser(p, gitc_init=gitc_init) | 84 | Wrapper().InitParser(p, gitc_init=gitc_init) |
85 | m = p.add_option_group('Multi-manifest') | 85 | m = p.add_option_group("Multi-manifest") |
86 | m.add_option('--outer-manifest', action='store_true', default=True, | 86 | m.add_option( |
87 | help='operate starting at the outermost manifest') | 87 | "--outer-manifest", |
88 | m.add_option('--no-outer-manifest', dest='outer_manifest', | 88 | action="store_true", |
89 | action='store_false', help='do not operate on outer manifests') | 89 | default=True, |
90 | m.add_option('--this-manifest-only', action='store_true', default=None, | 90 | help="operate starting at the outermost manifest", |
91 | help='only operate on this (sub)manifest') | 91 | ) |
92 | m.add_option('--no-this-manifest-only', '--all-manifests', | 92 | m.add_option( |
93 | dest='this_manifest_only', action='store_false', | 93 | "--no-outer-manifest", |
94 | help='operate on this manifest and its submanifests') | 94 | dest="outer_manifest", |
95 | 95 | action="store_false", | |
96 | def _RegisteredEnvironmentOptions(self): | 96 | help="do not operate on outer manifests", |
97 | return {'REPO_MANIFEST_URL': 'manifest_url', | 97 | ) |
98 | 'REPO_MIRROR_LOCATION': 'reference'} | 98 | m.add_option( |
99 | 99 | "--this-manifest-only", | |
100 | def _SyncManifest(self, opt): | 100 | action="store_true", |
101 | """Call manifestProject.Sync with arguments from opt. | 101 | default=None, |
102 | 102 | help="only operate on this (sub)manifest", | |
103 | Args: | 103 | ) |
104 | opt: options from optparse. | 104 | m.add_option( |
105 | """ | 105 | "--no-this-manifest-only", |
106 | # Normally this value is set when instantiating the project, but the | 106 | "--all-manifests", |
107 | # manifest project is special and is created when instantiating the | 107 | dest="this_manifest_only", |
108 | # manifest which happens before we parse options. | 108 | action="store_false", |
109 | self.manifest.manifestProject.clone_depth = opt.manifest_depth | 109 | help="operate on this manifest and its submanifests", |
110 | if not self.manifest.manifestProject.Sync( | 110 | ) |
111 | manifest_url=opt.manifest_url, | 111 | |
112 | manifest_branch=opt.manifest_branch, | 112 | def _RegisteredEnvironmentOptions(self): |
113 | standalone_manifest=opt.standalone_manifest, | 113 | return { |
114 | groups=opt.groups, | 114 | "REPO_MANIFEST_URL": "manifest_url", |
115 | platform=opt.platform, | 115 | "REPO_MIRROR_LOCATION": "reference", |
116 | mirror=opt.mirror, | 116 | } |
117 | dissociate=opt.dissociate, | 117 | |
118 | reference=opt.reference, | 118 | def _SyncManifest(self, opt): |
119 | worktree=opt.worktree, | 119 | """Call manifestProject.Sync with arguments from opt. |
120 | submodules=opt.submodules, | 120 | |
121 | archive=opt.archive, | 121 | Args: |
122 | partial_clone=opt.partial_clone, | 122 | opt: options from optparse. |
123 | clone_filter=opt.clone_filter, | 123 | """ |
124 | partial_clone_exclude=opt.partial_clone_exclude, | 124 | # Normally this value is set when instantiating the project, but the |
125 | clone_bundle=opt.clone_bundle, | 125 | # manifest project is special and is created when instantiating the |
126 | git_lfs=opt.git_lfs, | 126 | # manifest which happens before we parse options. |
127 | use_superproject=opt.use_superproject, | 127 | self.manifest.manifestProject.clone_depth = opt.manifest_depth |
128 | verbose=opt.verbose, | 128 | if not self.manifest.manifestProject.Sync( |
129 | current_branch_only=opt.current_branch_only, | 129 | manifest_url=opt.manifest_url, |
130 | tags=opt.tags, | 130 | manifest_branch=opt.manifest_branch, |
131 | depth=opt.depth, | 131 | standalone_manifest=opt.standalone_manifest, |
132 | git_event_log=self.git_event_log, | 132 | groups=opt.groups, |
133 | manifest_name=opt.manifest_name): | 133 | platform=opt.platform, |
134 | sys.exit(1) | 134 | mirror=opt.mirror, |
135 | 135 | dissociate=opt.dissociate, | |
136 | def _Prompt(self, prompt, value): | 136 | reference=opt.reference, |
137 | print('%-10s [%s]: ' % (prompt, value), end='', flush=True) | 137 | worktree=opt.worktree, |
138 | a = sys.stdin.readline().strip() | 138 | submodules=opt.submodules, |
139 | if a == '': | 139 | archive=opt.archive, |
140 | return value | 140 | partial_clone=opt.partial_clone, |
141 | return a | 141 | clone_filter=opt.clone_filter, |
142 | 142 | partial_clone_exclude=opt.partial_clone_exclude, | |
143 | def _ShouldConfigureUser(self, opt, existing_checkout): | 143 | clone_bundle=opt.clone_bundle, |
144 | gc = self.client.globalConfig | 144 | git_lfs=opt.git_lfs, |
145 | mp = self.manifest.manifestProject | 145 | use_superproject=opt.use_superproject, |
146 | 146 | verbose=opt.verbose, | |
147 | # If we don't have local settings, get from global. | 147 | current_branch_only=opt.current_branch_only, |
148 | if not mp.config.Has('user.name') or not mp.config.Has('user.email'): | 148 | tags=opt.tags, |
149 | if not gc.Has('user.name') or not gc.Has('user.email'): | 149 | depth=opt.depth, |
150 | return True | 150 | git_event_log=self.git_event_log, |
151 | 151 | manifest_name=opt.manifest_name, | |
152 | mp.config.SetString('user.name', gc.GetString('user.name')) | 152 | ): |
153 | mp.config.SetString('user.email', gc.GetString('user.email')) | 153 | sys.exit(1) |
154 | 154 | ||
155 | if not opt.quiet and not existing_checkout or opt.verbose: | 155 | def _Prompt(self, prompt, value): |
156 | print() | 156 | print("%-10s [%s]: " % (prompt, value), end="", flush=True) |
157 | print('Your identity is: %s <%s>' % (mp.config.GetString('user.name'), | 157 | a = sys.stdin.readline().strip() |
158 | mp.config.GetString('user.email'))) | 158 | if a == "": |
159 | print("If you want to change this, please re-run 'repo init' with --config-name") | 159 | return value |
160 | return False | 160 | return a |
161 | 161 | ||
162 | def _ConfigureUser(self, opt): | 162 | def _ShouldConfigureUser(self, opt, existing_checkout): |
163 | mp = self.manifest.manifestProject | 163 | gc = self.client.globalConfig |
164 | 164 | mp = self.manifest.manifestProject | |
165 | while True: | 165 | |
166 | if not opt.quiet: | 166 | # If we don't have local settings, get from global. |
167 | if not mp.config.Has("user.name") or not mp.config.Has("user.email"): | ||
168 | if not gc.Has("user.name") or not gc.Has("user.email"): | ||
169 | return True | ||
170 | |||
171 | mp.config.SetString("user.name", gc.GetString("user.name")) | ||
172 | mp.config.SetString("user.email", gc.GetString("user.email")) | ||
173 | |||
174 | if not opt.quiet and not existing_checkout or opt.verbose: | ||
175 | print() | ||
176 | print( | ||
177 | "Your identity is: %s <%s>" | ||
178 | % ( | ||
179 | mp.config.GetString("user.name"), | ||
180 | mp.config.GetString("user.email"), | ||
181 | ) | ||
182 | ) | ||
183 | print( | ||
184 | "If you want to change this, please re-run 'repo init' with " | ||
185 | "--config-name" | ||
186 | ) | ||
187 | return False | ||
188 | |||
189 | def _ConfigureUser(self, opt): | ||
190 | mp = self.manifest.manifestProject | ||
191 | |||
192 | while True: | ||
193 | if not opt.quiet: | ||
194 | print() | ||
195 | name = self._Prompt("Your Name", mp.UserName) | ||
196 | email = self._Prompt("Your Email", mp.UserEmail) | ||
197 | |||
198 | if not opt.quiet: | ||
199 | print() | ||
200 | print("Your identity is: %s <%s>" % (name, email)) | ||
201 | print("is this correct [y/N]? ", end="", flush=True) | ||
202 | a = sys.stdin.readline().strip().lower() | ||
203 | if a in ("yes", "y", "t", "true"): | ||
204 | break | ||
205 | |||
206 | if name != mp.UserName: | ||
207 | mp.config.SetString("user.name", name) | ||
208 | if email != mp.UserEmail: | ||
209 | mp.config.SetString("user.email", email) | ||
210 | |||
211 | def _HasColorSet(self, gc): | ||
212 | for n in ["ui", "diff", "status"]: | ||
213 | if gc.Has("color.%s" % n): | ||
214 | return True | ||
215 | return False | ||
216 | |||
217 | def _ConfigureColor(self): | ||
218 | gc = self.client.globalConfig | ||
219 | if self._HasColorSet(gc): | ||
220 | return | ||
221 | |||
222 | class _Test(Coloring): | ||
223 | def __init__(self): | ||
224 | Coloring.__init__(self, gc, "test color display") | ||
225 | self._on = True | ||
226 | |||
227 | out = _Test() | ||
228 | |||
167 | print() | 229 | print() |
168 | name = self._Prompt('Your Name', mp.UserName) | 230 | print("Testing colorized output (for 'repo diff', 'repo status'):") |
169 | email = self._Prompt('Your Email', mp.UserEmail) | 231 | |
232 | for c in ["black", "red", "green", "yellow", "blue", "magenta", "cyan"]: | ||
233 | out.write(" ") | ||
234 | out.printer(fg=c)(" %-6s ", c) | ||
235 | out.write(" ") | ||
236 | out.printer(fg="white", bg="black")(" %s " % "white") | ||
237 | out.nl() | ||
238 | |||
239 | for c in ["bold", "dim", "ul", "reverse"]: | ||
240 | out.write(" ") | ||
241 | out.printer(fg="black", attr=c)(" %-6s ", c) | ||
242 | out.nl() | ||
243 | |||
244 | print( | ||
245 | "Enable color display in this user account (y/N)? ", | ||
246 | end="", | ||
247 | flush=True, | ||
248 | ) | ||
249 | a = sys.stdin.readline().strip().lower() | ||
250 | if a in ("y", "yes", "t", "true", "on"): | ||
251 | gc.SetString("color.ui", "auto") | ||
252 | |||
253 | def _DisplayResult(self): | ||
254 | if self.manifest.IsMirror: | ||
255 | init_type = "mirror " | ||
256 | else: | ||
257 | init_type = "" | ||
170 | 258 | ||
171 | if not opt.quiet: | ||
172 | print() | 259 | print() |
173 | print('Your identity is: %s <%s>' % (name, email)) | 260 | print( |
174 | print('is this correct [y/N]? ', end='', flush=True) | 261 | "repo %shas been initialized in %s" |
175 | a = sys.stdin.readline().strip().lower() | 262 | % (init_type, self.manifest.topdir) |
176 | if a in ('yes', 'y', 't', 'true'): | 263 | ) |
177 | break | 264 | |
178 | 265 | current_dir = os.getcwd() | |
179 | if name != mp.UserName: | 266 | if current_dir != self.manifest.topdir: |
180 | mp.config.SetString('user.name', name) | 267 | print( |
181 | if email != mp.UserEmail: | 268 | "If this is not the directory in which you want to initialize " |
182 | mp.config.SetString('user.email', email) | 269 | "repo, please run:" |
183 | 270 | ) | |
184 | def _HasColorSet(self, gc): | 271 | print(" rm -r %s" % os.path.join(self.manifest.topdir, ".repo")) |
185 | for n in ['ui', 'diff', 'status']: | 272 | print("and try again.") |
186 | if gc.Has('color.%s' % n): | 273 | |
187 | return True | 274 | def ValidateOptions(self, opt, args): |
188 | return False | 275 | if opt.reference: |
189 | 276 | opt.reference = os.path.expanduser(opt.reference) | |
190 | def _ConfigureColor(self): | 277 | |
191 | gc = self.client.globalConfig | 278 | # Check this here, else manifest will be tagged "not new" and init won't |
192 | if self._HasColorSet(gc): | 279 | # be possible anymore without removing the .repo/manifests directory. |
193 | return | 280 | if opt.mirror: |
194 | 281 | if opt.archive: | |
195 | class _Test(Coloring): | 282 | self.OptionParser.error( |
196 | def __init__(self): | 283 | "--mirror and --archive cannot be used " "together." |
197 | Coloring.__init__(self, gc, 'test color display') | 284 | ) |
198 | self._on = True | 285 | if opt.use_superproject is not None: |
199 | out = _Test() | 286 | self.OptionParser.error( |
200 | 287 | "--mirror and --use-superproject cannot be " | |
201 | print() | 288 | "used together." |
202 | print("Testing colorized output (for 'repo diff', 'repo status'):") | 289 | ) |
203 | 290 | if opt.archive and opt.use_superproject is not None: | |
204 | for c in ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan']: | 291 | self.OptionParser.error( |
205 | out.write(' ') | 292 | "--archive and --use-superproject cannot be used " "together." |
206 | out.printer(fg=c)(' %-6s ', c) | 293 | ) |
207 | out.write(' ') | 294 | |
208 | out.printer(fg='white', bg='black')(' %s ' % 'white') | 295 | if opt.standalone_manifest and ( |
209 | out.nl() | 296 | opt.manifest_branch or opt.manifest_name != "default.xml" |
210 | 297 | ): | |
211 | for c in ['bold', 'dim', 'ul', 'reverse']: | 298 | self.OptionParser.error( |
212 | out.write(' ') | 299 | "--manifest-branch and --manifest-name cannot" |
213 | out.printer(fg='black', attr=c)(' %-6s ', c) | 300 | " be used with --standalone-manifest." |
214 | out.nl() | 301 | ) |
215 | 302 | ||
216 | print('Enable color display in this user account (y/N)? ', end='', flush=True) | 303 | if args: |
217 | a = sys.stdin.readline().strip().lower() | 304 | if opt.manifest_url: |
218 | if a in ('y', 'yes', 't', 'true', 'on'): | 305 | self.OptionParser.error( |
219 | gc.SetString('color.ui', 'auto') | 306 | "--manifest-url option and URL argument both specified: " |
220 | 307 | "only use one to select the manifest URL." | |
221 | def _DisplayResult(self): | 308 | ) |
222 | if self.manifest.IsMirror: | 309 | |
223 | init_type = 'mirror ' | 310 | opt.manifest_url = args.pop(0) |
224 | else: | 311 | |
225 | init_type = '' | 312 | if args: |
226 | 313 | self.OptionParser.error("too many arguments to init") | |
227 | print() | 314 | |
228 | print('repo %shas been initialized in %s' % (init_type, self.manifest.topdir)) | 315 | def Execute(self, opt, args): |
229 | 316 | git_require(MIN_GIT_VERSION_HARD, fail=True) | |
230 | current_dir = os.getcwd() | 317 | if not git_require(MIN_GIT_VERSION_SOFT): |
231 | if current_dir != self.manifest.topdir: | 318 | print( |
232 | print('If this is not the directory in which you want to initialize ' | 319 | "repo: warning: git-%s+ will soon be required; please upgrade " |
233 | 'repo, please run:') | 320 | "your version of git to maintain support." |
234 | print(' rm -r %s' % os.path.join(self.manifest.topdir, '.repo')) | 321 | % (".".join(str(x) for x in MIN_GIT_VERSION_SOFT),), |
235 | print('and try again.') | 322 | file=sys.stderr, |
236 | 323 | ) | |
237 | def ValidateOptions(self, opt, args): | 324 | |
238 | if opt.reference: | 325 | rp = self.manifest.repoProject |
239 | opt.reference = os.path.expanduser(opt.reference) | 326 | |
240 | 327 | # Handle new --repo-url requests. | |
241 | # Check this here, else manifest will be tagged "not new" and init won't be | 328 | if opt.repo_url: |
242 | # possible anymore without removing the .repo/manifests directory. | 329 | remote = rp.GetRemote("origin") |
243 | if opt.mirror: | 330 | remote.url = opt.repo_url |
244 | if opt.archive: | 331 | remote.Save() |
245 | self.OptionParser.error('--mirror and --archive cannot be used ' | 332 | |
246 | 'together.') | 333 | # Handle new --repo-rev requests. |
247 | if opt.use_superproject is not None: | 334 | if opt.repo_rev: |
248 | self.OptionParser.error('--mirror and --use-superproject cannot be ' | 335 | wrapper = Wrapper() |
249 | 'used together.') | 336 | try: |
250 | if opt.archive and opt.use_superproject is not None: | 337 | remote_ref, rev = wrapper.check_repo_rev( |
251 | self.OptionParser.error('--archive and --use-superproject cannot be used ' | 338 | rp.gitdir, |
252 | 'together.') | 339 | opt.repo_rev, |
253 | 340 | repo_verify=opt.repo_verify, | |
254 | if opt.standalone_manifest and (opt.manifest_branch or | 341 | quiet=opt.quiet, |
255 | opt.manifest_name != 'default.xml'): | 342 | ) |
256 | self.OptionParser.error('--manifest-branch and --manifest-name cannot' | 343 | except wrapper.CloneFailure: |
257 | ' be used with --standalone-manifest.') | 344 | print( |
258 | 345 | "fatal: double check your --repo-rev setting.", | |
259 | if args: | 346 | file=sys.stderr, |
260 | if opt.manifest_url: | 347 | ) |
261 | self.OptionParser.error( | 348 | sys.exit(1) |
262 | '--manifest-url option and URL argument both specified: only use ' | 349 | branch = rp.GetBranch("default") |
263 | 'one to select the manifest URL.') | 350 | branch.merge = remote_ref |
264 | 351 | rp.work_git.reset("--hard", rev) | |
265 | opt.manifest_url = args.pop(0) | 352 | branch.Save() |
266 | 353 | ||
267 | if args: | 354 | if opt.worktree: |
268 | self.OptionParser.error('too many arguments to init') | 355 | # Older versions of git supported worktree, but had dangerous gc |
269 | 356 | # bugs. | |
270 | def Execute(self, opt, args): | 357 | git_require((2, 15, 0), fail=True, msg="git gc worktree corruption") |
271 | git_require(MIN_GIT_VERSION_HARD, fail=True) | 358 | |
272 | if not git_require(MIN_GIT_VERSION_SOFT): | 359 | # Provide a short notice that we're reinitializing an existing checkout. |
273 | print('repo: warning: git-%s+ will soon be required; please upgrade your ' | 360 | # Sometimes developers might not realize that they're in one, or that |
274 | 'version of git to maintain support.' | 361 | # repo doesn't do nested checkouts. |
275 | % ('.'.join(str(x) for x in MIN_GIT_VERSION_SOFT),), | 362 | existing_checkout = self.manifest.manifestProject.Exists |
276 | file=sys.stderr) | 363 | if not opt.quiet and existing_checkout: |
277 | 364 | print( | |
278 | rp = self.manifest.repoProject | 365 | "repo: reusing existing repo client checkout in", |
279 | 366 | self.manifest.topdir, | |
280 | # Handle new --repo-url requests. | 367 | ) |
281 | if opt.repo_url: | 368 | |
282 | remote = rp.GetRemote('origin') | 369 | self._SyncManifest(opt) |
283 | remote.url = opt.repo_url | 370 | |
284 | remote.Save() | 371 | if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror: |
285 | 372 | if opt.config_name or self._ShouldConfigureUser( | |
286 | # Handle new --repo-rev requests. | 373 | opt, existing_checkout |
287 | if opt.repo_rev: | 374 | ): |
288 | wrapper = Wrapper() | 375 | self._ConfigureUser(opt) |
289 | try: | 376 | self._ConfigureColor() |
290 | remote_ref, rev = wrapper.check_repo_rev( | 377 | |
291 | rp.gitdir, opt.repo_rev, repo_verify=opt.repo_verify, quiet=opt.quiet) | 378 | if not opt.quiet: |
292 | except wrapper.CloneFailure: | 379 | self._DisplayResult() |
293 | print('fatal: double check your --repo-rev setting.', file=sys.stderr) | ||
294 | sys.exit(1) | ||
295 | branch = rp.GetBranch('default') | ||
296 | branch.merge = remote_ref | ||
297 | rp.work_git.reset('--hard', rev) | ||
298 | branch.Save() | ||
299 | |||
300 | if opt.worktree: | ||
301 | # Older versions of git supported worktree, but had dangerous gc bugs. | ||
302 | git_require((2, 15, 0), fail=True, msg='git gc worktree corruption') | ||
303 | |||
304 | # Provide a short notice that we're reinitializing an existing checkout. | ||
305 | # Sometimes developers might not realize that they're in one, or that | ||
306 | # repo doesn't do nested checkouts. | ||
307 | existing_checkout = self.manifest.manifestProject.Exists | ||
308 | if not opt.quiet and existing_checkout: | ||
309 | print('repo: reusing existing repo client checkout in', self.manifest.topdir) | ||
310 | |||
311 | self._SyncManifest(opt) | ||
312 | |||
313 | if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror: | ||
314 | if opt.config_name or self._ShouldConfigureUser(opt, existing_checkout): | ||
315 | self._ConfigureUser(opt) | ||
316 | self._ConfigureColor() | ||
317 | |||
318 | if not opt.quiet: | ||
319 | self._DisplayResult() | ||
diff --git a/subcmds/list.py b/subcmds/list.py index ad8036ee..24e3e1fc 100644 --- a/subcmds/list.py +++ b/subcmds/list.py | |||
@@ -18,13 +18,13 @@ from command import Command, MirrorSafeCommand | |||
18 | 18 | ||
19 | 19 | ||
20 | class List(Command, MirrorSafeCommand): | 20 | class List(Command, MirrorSafeCommand): |
21 | COMMON = True | 21 | COMMON = True |
22 | helpSummary = "List projects and their associated directories" | 22 | helpSummary = "List projects and their associated directories" |
23 | helpUsage = """ | 23 | helpUsage = """ |
24 | %prog [-f] [<project>...] | 24 | %prog [-f] [<project>...] |
25 | %prog [-f] -r str1 [str2]... | 25 | %prog [-f] -r str1 [str2]... |
26 | """ | 26 | """ |
27 | helpDescription = """ | 27 | helpDescription = """ |
28 | List all projects; pass '.' to list the project for the cwd. | 28 | List all projects; pass '.' to list the project for the cwd. |
29 | 29 | ||
30 | By default, only projects that currently exist in the checkout are shown. If | 30 | By default, only projects that currently exist in the checkout are shown. If |
@@ -35,69 +35,103 @@ groups, then also pass --groups all. | |||
35 | This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'. | 35 | This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'. |
36 | """ | 36 | """ |
37 | 37 | ||
38 | def _Options(self, p): | 38 | def _Options(self, p): |
39 | p.add_option('-r', '--regex', | 39 | p.add_option( |
40 | dest='regex', action='store_true', | 40 | "-r", |
41 | help='filter the project list based on regex or wildcard matching of strings') | 41 | "--regex", |
42 | p.add_option('-g', '--groups', | 42 | dest="regex", |
43 | dest='groups', | 43 | action="store_true", |
44 | help='filter the project list based on the groups the project is in') | 44 | help="filter the project list based on regex or wildcard matching " |
45 | p.add_option('-a', '--all', | 45 | "of strings", |
46 | action='store_true', | 46 | ) |
47 | help='show projects regardless of checkout state') | 47 | p.add_option( |
48 | p.add_option('-n', '--name-only', | 48 | "-g", |
49 | dest='name_only', action='store_true', | 49 | "--groups", |
50 | help='display only the name of the repository') | 50 | dest="groups", |
51 | p.add_option('-p', '--path-only', | 51 | help="filter the project list based on the groups the project is " |
52 | dest='path_only', action='store_true', | 52 | "in", |
53 | help='display only the path of the repository') | 53 | ) |
54 | p.add_option('-f', '--fullpath', | 54 | p.add_option( |
55 | dest='fullpath', action='store_true', | 55 | "-a", |
56 | help='display the full work tree path instead of the relative path') | 56 | "--all", |
57 | p.add_option('--relative-to', metavar='PATH', | 57 | action="store_true", |
58 | help='display paths relative to this one (default: top of repo client checkout)') | 58 | help="show projects regardless of checkout state", |
59 | ) | ||
60 | p.add_option( | ||
61 | "-n", | ||
62 | "--name-only", | ||
63 | dest="name_only", | ||
64 | action="store_true", | ||
65 | help="display only the name of the repository", | ||
66 | ) | ||
67 | p.add_option( | ||
68 | "-p", | ||
69 | "--path-only", | ||
70 | dest="path_only", | ||
71 | action="store_true", | ||
72 | help="display only the path of the repository", | ||
73 | ) | ||
74 | p.add_option( | ||
75 | "-f", | ||
76 | "--fullpath", | ||
77 | dest="fullpath", | ||
78 | action="store_true", | ||
79 | help="display the full work tree path instead of the relative path", | ||
80 | ) | ||
81 | p.add_option( | ||
82 | "--relative-to", | ||
83 | metavar="PATH", | ||
84 | help="display paths relative to this one (default: top of repo " | ||
85 | "client checkout)", | ||
86 | ) | ||
59 | 87 | ||
60 | def ValidateOptions(self, opt, args): | 88 | def ValidateOptions(self, opt, args): |
61 | if opt.fullpath and opt.name_only: | 89 | if opt.fullpath and opt.name_only: |
62 | self.OptionParser.error('cannot combine -f and -n') | 90 | self.OptionParser.error("cannot combine -f and -n") |
63 | 91 | ||
64 | # Resolve any symlinks so the output is stable. | 92 | # Resolve any symlinks so the output is stable. |
65 | if opt.relative_to: | 93 | if opt.relative_to: |
66 | opt.relative_to = os.path.realpath(opt.relative_to) | 94 | opt.relative_to = os.path.realpath(opt.relative_to) |
67 | 95 | ||
68 | def Execute(self, opt, args): | 96 | def Execute(self, opt, args): |
69 | """List all projects and the associated directories. | 97 | """List all projects and the associated directories. |
70 | 98 | ||
71 | This may be possible to do with 'repo forall', but repo newbies have | 99 | This may be possible to do with 'repo forall', but repo newbies have |
72 | trouble figuring that out. The idea here is that it should be more | 100 | trouble figuring that out. The idea here is that it should be more |
73 | discoverable. | 101 | discoverable. |
74 | 102 | ||
75 | Args: | 103 | Args: |
76 | opt: The options. | 104 | opt: The options. |
77 | args: Positional args. Can be a list of projects to list, or empty. | 105 | args: Positional args. Can be a list of projects to list, or empty. |
78 | """ | 106 | """ |
79 | if not opt.regex: | 107 | if not opt.regex: |
80 | projects = self.GetProjects(args, groups=opt.groups, missing_ok=opt.all, | 108 | projects = self.GetProjects( |
81 | all_manifests=not opt.this_manifest_only) | 109 | args, |
82 | else: | 110 | groups=opt.groups, |
83 | projects = self.FindProjects(args, all_manifests=not opt.this_manifest_only) | 111 | missing_ok=opt.all, |
112 | all_manifests=not opt.this_manifest_only, | ||
113 | ) | ||
114 | else: | ||
115 | projects = self.FindProjects( | ||
116 | args, all_manifests=not opt.this_manifest_only | ||
117 | ) | ||
84 | 118 | ||
85 | def _getpath(x): | 119 | def _getpath(x): |
86 | if opt.fullpath: | 120 | if opt.fullpath: |
87 | return x.worktree | 121 | return x.worktree |
88 | if opt.relative_to: | 122 | if opt.relative_to: |
89 | return os.path.relpath(x.worktree, opt.relative_to) | 123 | return os.path.relpath(x.worktree, opt.relative_to) |
90 | return x.RelPath(local=opt.this_manifest_only) | 124 | return x.RelPath(local=opt.this_manifest_only) |
91 | 125 | ||
92 | lines = [] | 126 | lines = [] |
93 | for project in projects: | 127 | for project in projects: |
94 | if opt.name_only and not opt.path_only: | 128 | if opt.name_only and not opt.path_only: |
95 | lines.append("%s" % (project.name)) | 129 | lines.append("%s" % (project.name)) |
96 | elif opt.path_only and not opt.name_only: | 130 | elif opt.path_only and not opt.name_only: |
97 | lines.append("%s" % (_getpath(project))) | 131 | lines.append("%s" % (_getpath(project))) |
98 | else: | 132 | else: |
99 | lines.append("%s : %s" % (_getpath(project), project.name)) | 133 | lines.append("%s : %s" % (_getpath(project), project.name)) |
100 | 134 | ||
101 | if lines: | 135 | if lines: |
102 | lines.sort() | 136 | lines.sort() |
103 | print('\n'.join(lines)) | 137 | print("\n".join(lines)) |
diff --git a/subcmds/manifest.py b/subcmds/manifest.py index f4602a59..f72df348 100644 --- a/subcmds/manifest.py +++ b/subcmds/manifest.py | |||
@@ -20,12 +20,12 @@ from command import PagedCommand | |||
20 | 20 | ||
21 | 21 | ||
22 | class Manifest(PagedCommand): | 22 | class Manifest(PagedCommand): |
23 | COMMON = False | 23 | COMMON = False |
24 | helpSummary = "Manifest inspection utility" | 24 | helpSummary = "Manifest inspection utility" |
25 | helpUsage = """ | 25 | helpUsage = """ |
26 | %prog [-o {-|NAME.xml}] [-m MANIFEST.xml] [-r] | 26 | %prog [-o {-|NAME.xml}] [-m MANIFEST.xml] [-r] |
27 | """ | 27 | """ |
28 | _helpDescription = """ | 28 | _helpDescription = """ |
29 | 29 | ||
30 | With the -o option, exports the current manifest for inspection. | 30 | With the -o option, exports the current manifest for inspection. |
31 | The manifest and (if present) local_manifests/ are combined | 31 | The manifest and (if present) local_manifests/ are combined |
@@ -40,92 +40,136 @@ when the manifest was generated. The 'dest-branch' attribute is set | |||
40 | to indicate the remote ref to push changes to via 'repo upload'. | 40 | to indicate the remote ref to push changes to via 'repo upload'. |
41 | """ | 41 | """ |
42 | 42 | ||
43 | @property | 43 | @property |
44 | def helpDescription(self): | 44 | def helpDescription(self): |
45 | helptext = self._helpDescription + '\n' | 45 | helptext = self._helpDescription + "\n" |
46 | r = os.path.dirname(__file__) | 46 | r = os.path.dirname(__file__) |
47 | r = os.path.dirname(r) | 47 | r = os.path.dirname(r) |
48 | with open(os.path.join(r, 'docs', 'manifest-format.md')) as fd: | 48 | with open(os.path.join(r, "docs", "manifest-format.md")) as fd: |
49 | for line in fd: | 49 | for line in fd: |
50 | helptext += line | 50 | helptext += line |
51 | return helptext | 51 | return helptext |
52 | 52 | ||
53 | def _Options(self, p): | 53 | def _Options(self, p): |
54 | p.add_option('-r', '--revision-as-HEAD', | 54 | p.add_option( |
55 | dest='peg_rev', action='store_true', | 55 | "-r", |
56 | help='save revisions as current HEAD') | 56 | "--revision-as-HEAD", |
57 | p.add_option('-m', '--manifest-name', | 57 | dest="peg_rev", |
58 | help='temporary manifest to use for this sync', metavar='NAME.xml') | 58 | action="store_true", |
59 | p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream', | 59 | help="save revisions as current HEAD", |
60 | default=True, action='store_false', | 60 | ) |
61 | help='if in -r mode, do not write the upstream field ' | 61 | p.add_option( |
62 | '(only of use if the branch names for a sha1 manifest are ' | 62 | "-m", |
63 | 'sensitive)') | 63 | "--manifest-name", |
64 | p.add_option('--suppress-dest-branch', dest='peg_rev_dest_branch', | 64 | help="temporary manifest to use for this sync", |
65 | default=True, action='store_false', | 65 | metavar="NAME.xml", |
66 | help='if in -r mode, do not write the dest-branch field ' | 66 | ) |
67 | '(only of use if the branch names for a sha1 manifest are ' | 67 | p.add_option( |
68 | 'sensitive)') | 68 | "--suppress-upstream-revision", |
69 | p.add_option('--json', default=False, action='store_true', | 69 | dest="peg_rev_upstream", |
70 | help='output manifest in JSON format (experimental)') | 70 | default=True, |
71 | p.add_option('--pretty', default=False, action='store_true', | 71 | action="store_false", |
72 | help='format output for humans to read') | 72 | help="if in -r mode, do not write the upstream field " |
73 | p.add_option('--no-local-manifests', default=False, action='store_true', | 73 | "(only of use if the branch names for a sha1 manifest are " |
74 | dest='ignore_local_manifests', help='ignore local manifests') | 74 | "sensitive)", |
75 | p.add_option('-o', '--output-file', | 75 | ) |
76 | dest='output_file', | 76 | p.add_option( |
77 | default='-', | 77 | "--suppress-dest-branch", |
78 | help='file to save the manifest to. (Filename prefix for multi-tree.)', | 78 | dest="peg_rev_dest_branch", |
79 | metavar='-|NAME.xml') | 79 | default=True, |
80 | 80 | action="store_false", | |
81 | def _Output(self, opt): | 81 | help="if in -r mode, do not write the dest-branch field " |
82 | # If alternate manifest is specified, override the manifest file that we're using. | 82 | "(only of use if the branch names for a sha1 manifest are " |
83 | if opt.manifest_name: | 83 | "sensitive)", |
84 | self.manifest.Override(opt.manifest_name, False) | 84 | ) |
85 | 85 | p.add_option( | |
86 | for manifest in self.ManifestList(opt): | 86 | "--json", |
87 | output_file = opt.output_file | 87 | default=False, |
88 | if output_file == '-': | 88 | action="store_true", |
89 | fd = sys.stdout | 89 | help="output manifest in JSON format (experimental)", |
90 | else: | 90 | ) |
91 | if manifest.path_prefix: | 91 | p.add_option( |
92 | output_file = f'{opt.output_file}:{manifest.path_prefix.replace("/", "%2f")}' | 92 | "--pretty", |
93 | fd = open(output_file, 'w') | 93 | default=False, |
94 | 94 | action="store_true", | |
95 | manifest.SetUseLocalManifests(not opt.ignore_local_manifests) | 95 | help="format output for humans to read", |
96 | 96 | ) | |
97 | if opt.json: | 97 | p.add_option( |
98 | print('warning: --json is experimental!', file=sys.stderr) | 98 | "--no-local-manifests", |
99 | doc = manifest.ToDict(peg_rev=opt.peg_rev, | 99 | default=False, |
100 | peg_rev_upstream=opt.peg_rev_upstream, | 100 | action="store_true", |
101 | peg_rev_dest_branch=opt.peg_rev_dest_branch) | 101 | dest="ignore_local_manifests", |
102 | 102 | help="ignore local manifests", | |
103 | json_settings = { | 103 | ) |
104 | # JSON style guide says Uunicode characters are fully allowed. | 104 | p.add_option( |
105 | 'ensure_ascii': False, | 105 | "-o", |
106 | # We use 2 space indent to match JSON style guide. | 106 | "--output-file", |
107 | 'indent': 2 if opt.pretty else None, | 107 | dest="output_file", |
108 | 'separators': (',', ': ') if opt.pretty else (',', ':'), | 108 | default="-", |
109 | 'sort_keys': True, | 109 | help="file to save the manifest to. (Filename prefix for " |
110 | } | 110 | "multi-tree.)", |
111 | fd.write(json.dumps(doc, **json_settings)) | 111 | metavar="-|NAME.xml", |
112 | else: | 112 | ) |
113 | manifest.Save(fd, | 113 | |
114 | peg_rev=opt.peg_rev, | 114 | def _Output(self, opt): |
115 | peg_rev_upstream=opt.peg_rev_upstream, | 115 | # If alternate manifest is specified, override the manifest file that |
116 | peg_rev_dest_branch=opt.peg_rev_dest_branch) | 116 | # we're using. |
117 | if output_file != '-': | 117 | if opt.manifest_name: |
118 | fd.close() | 118 | self.manifest.Override(opt.manifest_name, False) |
119 | if manifest.path_prefix: | 119 | |
120 | print(f'Saved {manifest.path_prefix} submanifest to {output_file}', | 120 | for manifest in self.ManifestList(opt): |
121 | file=sys.stderr) | 121 | output_file = opt.output_file |
122 | else: | 122 | if output_file == "-": |
123 | print(f'Saved manifest to {output_file}', file=sys.stderr) | 123 | fd = sys.stdout |
124 | 124 | else: | |
125 | 125 | if manifest.path_prefix: | |
126 | def ValidateOptions(self, opt, args): | 126 | output_file = ( |
127 | if args: | 127 | f"{opt.output_file}:" |
128 | self.Usage() | 128 | f'{manifest.path_prefix.replace("/", "%2f")}' |
129 | 129 | ) | |
130 | def Execute(self, opt, args): | 130 | fd = open(output_file, "w") |
131 | self._Output(opt) | 131 | |
132 | manifest.SetUseLocalManifests(not opt.ignore_local_manifests) | ||
133 | |||
134 | if opt.json: | ||
135 | print("warning: --json is experimental!", file=sys.stderr) | ||
136 | doc = manifest.ToDict( | ||
137 | peg_rev=opt.peg_rev, | ||
138 | peg_rev_upstream=opt.peg_rev_upstream, | ||
139 | peg_rev_dest_branch=opt.peg_rev_dest_branch, | ||
140 | ) | ||
141 | |||
142 | json_settings = { | ||
143 | # JSON style guide says Unicode characters are fully | ||
144 | # allowed. | ||
145 | "ensure_ascii": False, | ||
146 | # We use 2 space indent to match JSON style guide. | ||
147 | "indent": 2 if opt.pretty else None, | ||
148 | "separators": (",", ": ") if opt.pretty else (",", ":"), | ||
149 | "sort_keys": True, | ||
150 | } | ||
151 | fd.write(json.dumps(doc, **json_settings)) | ||
152 | else: | ||
153 | manifest.Save( | ||
154 | fd, | ||
155 | peg_rev=opt.peg_rev, | ||
156 | peg_rev_upstream=opt.peg_rev_upstream, | ||
157 | peg_rev_dest_branch=opt.peg_rev_dest_branch, | ||
158 | ) | ||
159 | if output_file != "-": | ||
160 | fd.close() | ||
161 | if manifest.path_prefix: | ||
162 | print( | ||
163 | f"Saved {manifest.path_prefix} submanifest to " | ||
164 | f"{output_file}", | ||
165 | file=sys.stderr, | ||
166 | ) | ||
167 | else: | ||
168 | print(f"Saved manifest to {output_file}", file=sys.stderr) | ||
169 | |||
170 | def ValidateOptions(self, opt, args): | ||
171 | if args: | ||
172 | self.Usage() | ||
173 | |||
174 | def Execute(self, opt, args): | ||
175 | self._Output(opt) | ||
diff --git a/subcmds/overview.py b/subcmds/overview.py index 11dba95f..8ccad611 100644 --- a/subcmds/overview.py +++ b/subcmds/overview.py | |||
@@ -19,12 +19,12 @@ from command import PagedCommand | |||
19 | 19 | ||
20 | 20 | ||
21 | class Overview(PagedCommand): | 21 | class Overview(PagedCommand): |
22 | COMMON = True | 22 | COMMON = True |
23 | helpSummary = "Display overview of unmerged project branches" | 23 | helpSummary = "Display overview of unmerged project branches" |
24 | helpUsage = """ | 24 | helpUsage = """ |
25 | %prog [--current-branch] [<project>...] | 25 | %prog [--current-branch] [<project>...] |
26 | """ | 26 | """ |
27 | helpDescription = """ | 27 | helpDescription = """ |
28 | The '%prog' command is used to display an overview of the projects branches, | 28 | The '%prog' command is used to display an overview of the projects branches, |
29 | and list any local commits that have not yet been merged into the project. | 29 | and list any local commits that have not yet been merged into the project. |
30 | 30 | ||
@@ -33,59 +33,77 @@ branches currently checked out in each project. By default, all branches | |||
33 | are displayed. | 33 | are displayed. |
34 | """ | 34 | """ |
35 | 35 | ||
36 | def _Options(self, p): | 36 | def _Options(self, p): |
37 | p.add_option('-c', '--current-branch', | 37 | p.add_option( |
38 | dest="current_branch", action="store_true", | 38 | "-c", |
39 | help="consider only checked out branches") | 39 | "--current-branch", |
40 | p.add_option('--no-current-branch', | 40 | dest="current_branch", |
41 | dest='current_branch', action='store_false', | 41 | action="store_true", |
42 | help='consider all local branches') | 42 | help="consider only checked out branches", |
43 | # Turn this into a warning & remove this someday. | 43 | ) |
44 | p.add_option('-b', | 44 | p.add_option( |
45 | dest='current_branch', action='store_true', | 45 | "--no-current-branch", |
46 | help=optparse.SUPPRESS_HELP) | 46 | dest="current_branch", |
47 | action="store_false", | ||
48 | help="consider all local branches", | ||
49 | ) | ||
50 | # Turn this into a warning & remove this someday. | ||
51 | p.add_option( | ||
52 | "-b", | ||
53 | dest="current_branch", | ||
54 | action="store_true", | ||
55 | help=optparse.SUPPRESS_HELP, | ||
56 | ) | ||
47 | 57 | ||
48 | def Execute(self, opt, args): | 58 | def Execute(self, opt, args): |
49 | all_branches = [] | 59 | all_branches = [] |
50 | for project in self.GetProjects(args, all_manifests=not opt.this_manifest_only): | 60 | for project in self.GetProjects( |
51 | br = [project.GetUploadableBranch(x) | 61 | args, all_manifests=not opt.this_manifest_only |
52 | for x in project.GetBranches()] | 62 | ): |
53 | br = [x for x in br if x] | 63 | br = [project.GetUploadableBranch(x) for x in project.GetBranches()] |
54 | if opt.current_branch: | 64 | br = [x for x in br if x] |
55 | br = [x for x in br if x.name == project.CurrentBranch] | 65 | if opt.current_branch: |
56 | all_branches.extend(br) | 66 | br = [x for x in br if x.name == project.CurrentBranch] |
67 | all_branches.extend(br) | ||
57 | 68 | ||
58 | if not all_branches: | 69 | if not all_branches: |
59 | return | 70 | return |
60 | 71 | ||
61 | class Report(Coloring): | 72 | class Report(Coloring): |
62 | def __init__(self, config): | 73 | def __init__(self, config): |
63 | Coloring.__init__(self, config, 'status') | 74 | Coloring.__init__(self, config, "status") |
64 | self.project = self.printer('header', attr='bold') | 75 | self.project = self.printer("header", attr="bold") |
65 | self.text = self.printer('text') | 76 | self.text = self.printer("text") |
66 | 77 | ||
67 | out = Report(all_branches[0].project.config) | 78 | out = Report(all_branches[0].project.config) |
68 | out.text("Deprecated. See repo info -o.") | 79 | out.text("Deprecated. See repo info -o.") |
69 | out.nl() | ||
70 | out.project('Projects Overview') | ||
71 | out.nl() | ||
72 | |||
73 | project = None | ||
74 | |||
75 | for branch in all_branches: | ||
76 | if project != branch.project: | ||
77 | project = branch.project | ||
78 | out.nl() | 80 | out.nl() |
79 | out.project('project %s/' % project.RelPath(local=opt.this_manifest_only)) | 81 | out.project("Projects Overview") |
80 | out.nl() | 82 | out.nl() |
81 | 83 | ||
82 | commits = branch.commits | 84 | project = None |
83 | date = branch.date | 85 | |
84 | print('%s %-33s (%2d commit%s, %s)' % ( | 86 | for branch in all_branches: |
85 | branch.name == project.CurrentBranch and '*' or ' ', | 87 | if project != branch.project: |
86 | branch.name, | 88 | project = branch.project |
87 | len(commits), | 89 | out.nl() |
88 | len(commits) != 1 and 's' or ' ', | 90 | out.project( |
89 | date)) | 91 | "project %s/" |
90 | for commit in commits: | 92 | % project.RelPath(local=opt.this_manifest_only) |
91 | print('%-35s - %s' % ('', commit)) | 93 | ) |
94 | out.nl() | ||
95 | |||
96 | commits = branch.commits | ||
97 | date = branch.date | ||
98 | print( | ||
99 | "%s %-33s (%2d commit%s, %s)" | ||
100 | % ( | ||
101 | branch.name == project.CurrentBranch and "*" or " ", | ||
102 | branch.name, | ||
103 | len(commits), | ||
104 | len(commits) != 1 and "s" or " ", | ||
105 | date, | ||
106 | ) | ||
107 | ) | ||
108 | for commit in commits: | ||
109 | print("%-35s - %s" % ("", commit)) | ||
diff --git a/subcmds/prune.py b/subcmds/prune.py index 251accaa..5a68c14a 100644 --- a/subcmds/prune.py +++ b/subcmds/prune.py | |||
@@ -19,63 +19,76 @@ from command import DEFAULT_LOCAL_JOBS, PagedCommand | |||
19 | 19 | ||
20 | 20 | ||
21 | class Prune(PagedCommand): | 21 | class Prune(PagedCommand): |
22 | COMMON = True | 22 | COMMON = True |
23 | helpSummary = "Prune (delete) already merged topics" | 23 | helpSummary = "Prune (delete) already merged topics" |
24 | helpUsage = """ | 24 | helpUsage = """ |
25 | %prog [<project>...] | 25 | %prog [<project>...] |
26 | """ | 26 | """ |
27 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 27 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
28 | 28 | ||
29 | def _ExecuteOne(self, project): | 29 | def _ExecuteOne(self, project): |
30 | """Process one project.""" | 30 | """Process one project.""" |
31 | return project.PruneHeads() | 31 | return project.PruneHeads() |
32 | 32 | ||
33 | def Execute(self, opt, args): | 33 | def Execute(self, opt, args): |
34 | projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) | 34 | projects = self.GetProjects( |
35 | args, all_manifests=not opt.this_manifest_only | ||
36 | ) | ||
35 | 37 | ||
36 | # NB: Should be able to refactor this module to display summary as results | 38 | # NB: Should be able to refactor this module to display summary as |
37 | # come back from children. | 39 | # results come back from children. |
38 | def _ProcessResults(_pool, _output, results): | 40 | def _ProcessResults(_pool, _output, results): |
39 | return list(itertools.chain.from_iterable(results)) | 41 | return list(itertools.chain.from_iterable(results)) |
40 | 42 | ||
41 | all_branches = self.ExecuteInParallel( | 43 | all_branches = self.ExecuteInParallel( |
42 | opt.jobs, | 44 | opt.jobs, |
43 | self._ExecuteOne, | 45 | self._ExecuteOne, |
44 | projects, | 46 | projects, |
45 | callback=_ProcessResults, | 47 | callback=_ProcessResults, |
46 | ordered=True) | 48 | ordered=True, |
49 | ) | ||
47 | 50 | ||
48 | if not all_branches: | 51 | if not all_branches: |
49 | return | 52 | return |
50 | 53 | ||
51 | class Report(Coloring): | 54 | class Report(Coloring): |
52 | def __init__(self, config): | 55 | def __init__(self, config): |
53 | Coloring.__init__(self, config, 'status') | 56 | Coloring.__init__(self, config, "status") |
54 | self.project = self.printer('header', attr='bold') | 57 | self.project = self.printer("header", attr="bold") |
55 | 58 | ||
56 | out = Report(all_branches[0].project.config) | 59 | out = Report(all_branches[0].project.config) |
57 | out.project('Pending Branches') | 60 | out.project("Pending Branches") |
58 | out.nl() | 61 | out.nl() |
59 | 62 | ||
60 | project = None | 63 | project = None |
61 | 64 | ||
62 | for branch in all_branches: | 65 | for branch in all_branches: |
63 | if project != branch.project: | 66 | if project != branch.project: |
64 | project = branch.project | 67 | project = branch.project |
65 | out.nl() | 68 | out.nl() |
66 | out.project('project %s/' % project.RelPath(local=opt.this_manifest_only)) | 69 | out.project( |
67 | out.nl() | 70 | "project %s/" |
71 | % project.RelPath(local=opt.this_manifest_only) | ||
72 | ) | ||
73 | out.nl() | ||
68 | 74 | ||
69 | print('%s %-33s ' % ( | 75 | print( |
70 | branch.name == project.CurrentBranch and '*' or ' ', | 76 | "%s %-33s " |
71 | branch.name), end='') | 77 | % ( |
78 | branch.name == project.CurrentBranch and "*" or " ", | ||
79 | branch.name, | ||
80 | ), | ||
81 | end="", | ||
82 | ) | ||
72 | 83 | ||
73 | if not branch.base_exists: | 84 | if not branch.base_exists: |
74 | print('(ignoring: tracking branch is gone: %s)' % (branch.base,)) | 85 | print( |
75 | else: | 86 | "(ignoring: tracking branch is gone: %s)" % (branch.base,) |
76 | commits = branch.commits | 87 | ) |
77 | date = branch.date | 88 | else: |
78 | print('(%2d commit%s, %s)' % ( | 89 | commits = branch.commits |
79 | len(commits), | 90 | date = branch.date |
80 | len(commits) != 1 and 's' or ' ', | 91 | print( |
81 | date)) | 92 | "(%2d commit%s, %s)" |
93 | % (len(commits), len(commits) != 1 and "s" or " ", date) | ||
94 | ) | ||
diff --git a/subcmds/rebase.py b/subcmds/rebase.py index 3d1a63e4..dc4f5805 100644 --- a/subcmds/rebase.py +++ b/subcmds/rebase.py | |||
@@ -20,146 +20,193 @@ from git_command import GitCommand | |||
20 | 20 | ||
21 | 21 | ||
22 | class RebaseColoring(Coloring): | 22 | class RebaseColoring(Coloring): |
23 | def __init__(self, config): | 23 | def __init__(self, config): |
24 | Coloring.__init__(self, config, 'rebase') | 24 | Coloring.__init__(self, config, "rebase") |
25 | self.project = self.printer('project', attr='bold') | 25 | self.project = self.printer("project", attr="bold") |
26 | self.fail = self.printer('fail', fg='red') | 26 | self.fail = self.printer("fail", fg="red") |
27 | 27 | ||
28 | 28 | ||
29 | class Rebase(Command): | 29 | class Rebase(Command): |
30 | COMMON = True | 30 | COMMON = True |
31 | helpSummary = "Rebase local branches on upstream branch" | 31 | helpSummary = "Rebase local branches on upstream branch" |
32 | helpUsage = """ | 32 | helpUsage = """ |
33 | %prog {[<project>...] | -i <project>...} | 33 | %prog {[<project>...] | -i <project>...} |
34 | """ | 34 | """ |
35 | helpDescription = """ | 35 | helpDescription = """ |
36 | '%prog' uses git rebase to move local changes in the current topic branch to | 36 | '%prog' uses git rebase to move local changes in the current topic branch to |
37 | the HEAD of the upstream history, useful when you have made commits in a topic | 37 | the HEAD of the upstream history, useful when you have made commits in a topic |
38 | branch but need to incorporate new upstream changes "underneath" them. | 38 | branch but need to incorporate new upstream changes "underneath" them. |
39 | """ | 39 | """ |
40 | 40 | ||
41 | def _Options(self, p): | 41 | def _Options(self, p): |
42 | g = p.get_option_group('--quiet') | 42 | g = p.get_option_group("--quiet") |
43 | g.add_option('-i', '--interactive', | 43 | g.add_option( |
44 | dest="interactive", action="store_true", | 44 | "-i", |
45 | help="interactive rebase (single project only)") | 45 | "--interactive", |
46 | 46 | dest="interactive", | |
47 | p.add_option('--fail-fast', | 47 | action="store_true", |
48 | dest='fail_fast', action='store_true', | 48 | help="interactive rebase (single project only)", |
49 | help='stop rebasing after first error is hit') | 49 | ) |
50 | p.add_option('-f', '--force-rebase', | 50 | |
51 | dest='force_rebase', action='store_true', | 51 | p.add_option( |
52 | help='pass --force-rebase to git rebase') | 52 | "--fail-fast", |
53 | p.add_option('--no-ff', | 53 | dest="fail_fast", |
54 | dest='ff', default=True, action='store_false', | 54 | action="store_true", |
55 | help='pass --no-ff to git rebase') | 55 | help="stop rebasing after first error is hit", |
56 | p.add_option('--autosquash', | 56 | ) |
57 | dest='autosquash', action='store_true', | 57 | p.add_option( |
58 | help='pass --autosquash to git rebase') | 58 | "-f", |
59 | p.add_option('--whitespace', | 59 | "--force-rebase", |
60 | dest='whitespace', action='store', metavar='WS', | 60 | dest="force_rebase", |
61 | help='pass --whitespace to git rebase') | 61 | action="store_true", |
62 | p.add_option('--auto-stash', | 62 | help="pass --force-rebase to git rebase", |
63 | dest='auto_stash', action='store_true', | 63 | ) |
64 | help='stash local modifications before starting') | 64 | p.add_option( |
65 | p.add_option('-m', '--onto-manifest', | 65 | "--no-ff", |
66 | dest='onto_manifest', action='store_true', | 66 | dest="ff", |
67 | help='rebase onto the manifest version instead of upstream ' | 67 | default=True, |
68 | 'HEAD (this helps to make sure the local tree stays ' | 68 | action="store_false", |
69 | 'consistent if you previously synced to a manifest)') | 69 | help="pass --no-ff to git rebase", |
70 | 70 | ) | |
71 | def Execute(self, opt, args): | 71 | p.add_option( |
72 | all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) | 72 | "--autosquash", |
73 | one_project = len(all_projects) == 1 | 73 | dest="autosquash", |
74 | 74 | action="store_true", | |
75 | if opt.interactive and not one_project: | 75 | help="pass --autosquash to git rebase", |
76 | print('error: interactive rebase not supported with multiple projects', | 76 | ) |
77 | file=sys.stderr) | 77 | p.add_option( |
78 | if len(args) == 1: | 78 | "--whitespace", |
79 | print('note: project %s is mapped to more than one path' % (args[0],), | 79 | dest="whitespace", |
80 | file=sys.stderr) | 80 | action="store", |
81 | return 1 | 81 | metavar="WS", |
82 | 82 | help="pass --whitespace to git rebase", | |
83 | # Setup the common git rebase args that we use for all projects. | 83 | ) |
84 | common_args = ['rebase'] | 84 | p.add_option( |
85 | if opt.whitespace: | 85 | "--auto-stash", |
86 | common_args.append('--whitespace=%s' % opt.whitespace) | 86 | dest="auto_stash", |
87 | if opt.quiet: | 87 | action="store_true", |
88 | common_args.append('--quiet') | 88 | help="stash local modifications before starting", |
89 | if opt.force_rebase: | 89 | ) |
90 | common_args.append('--force-rebase') | 90 | p.add_option( |
91 | if not opt.ff: | 91 | "-m", |
92 | common_args.append('--no-ff') | 92 | "--onto-manifest", |
93 | if opt.autosquash: | 93 | dest="onto_manifest", |
94 | common_args.append('--autosquash') | 94 | action="store_true", |
95 | if opt.interactive: | 95 | help="rebase onto the manifest version instead of upstream " |
96 | common_args.append('-i') | 96 | "HEAD (this helps to make sure the local tree stays " |
97 | 97 | "consistent if you previously synced to a manifest)", | |
98 | config = self.manifest.manifestProject.config | 98 | ) |
99 | out = RebaseColoring(config) | 99 | |
100 | out.redirect(sys.stdout) | 100 | def Execute(self, opt, args): |
101 | _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) | 101 | all_projects = self.GetProjects( |
102 | 102 | args, all_manifests=not opt.this_manifest_only | |
103 | ret = 0 | 103 | ) |
104 | for project in all_projects: | 104 | one_project = len(all_projects) == 1 |
105 | if ret and opt.fail_fast: | 105 | |
106 | break | 106 | if opt.interactive and not one_project: |
107 | 107 | print( | |
108 | cb = project.CurrentBranch | 108 | "error: interactive rebase not supported with multiple " |
109 | if not cb: | 109 | "projects", |
110 | if one_project: | 110 | file=sys.stderr, |
111 | print("error: project %s has a detached HEAD" % _RelPath(project), | 111 | ) |
112 | file=sys.stderr) | 112 | if len(args) == 1: |
113 | return 1 | 113 | print( |
114 | # ignore branches with detatched HEADs | 114 | "note: project %s is mapped to more than one path" |
115 | continue | 115 | % (args[0],), |
116 | 116 | file=sys.stderr, | |
117 | upbranch = project.GetBranch(cb) | 117 | ) |
118 | if not upbranch.LocalMerge: | 118 | return 1 |
119 | if one_project: | 119 | |
120 | print("error: project %s does not track any remote branches" | 120 | # Setup the common git rebase args that we use for all projects. |
121 | % _RelPath(project), file=sys.stderr) | 121 | common_args = ["rebase"] |
122 | return 1 | 122 | if opt.whitespace: |
123 | # ignore branches without remotes | 123 | common_args.append("--whitespace=%s" % opt.whitespace) |
124 | continue | 124 | if opt.quiet: |
125 | 125 | common_args.append("--quiet") | |
126 | args = common_args[:] | 126 | if opt.force_rebase: |
127 | if opt.onto_manifest: | 127 | common_args.append("--force-rebase") |
128 | args.append('--onto') | 128 | if not opt.ff: |
129 | args.append(project.revisionExpr) | 129 | common_args.append("--no-ff") |
130 | 130 | if opt.autosquash: | |
131 | args.append(upbranch.LocalMerge) | 131 | common_args.append("--autosquash") |
132 | 132 | if opt.interactive: | |
133 | out.project('project %s: rebasing %s -> %s', | 133 | common_args.append("-i") |
134 | _RelPath(project), cb, upbranch.LocalMerge) | 134 | |
135 | out.nl() | 135 | config = self.manifest.manifestProject.config |
136 | out.flush() | 136 | out = RebaseColoring(config) |
137 | 137 | out.redirect(sys.stdout) | |
138 | needs_stash = False | 138 | _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) |
139 | if opt.auto_stash: | 139 | |
140 | stash_args = ["update-index", "--refresh", "-q"] | 140 | ret = 0 |
141 | 141 | for project in all_projects: | |
142 | if GitCommand(project, stash_args).Wait() != 0: | 142 | if ret and opt.fail_fast: |
143 | needs_stash = True | 143 | break |
144 | # Dirty index, requires stash... | 144 | |
145 | stash_args = ["stash"] | 145 | cb = project.CurrentBranch |
146 | 146 | if not cb: | |
147 | if GitCommand(project, stash_args).Wait() != 0: | 147 | if one_project: |
148 | ret += 1 | 148 | print( |
149 | continue | 149 | "error: project %s has a detached HEAD" |
150 | 150 | % _RelPath(project), | |
151 | if GitCommand(project, args).Wait() != 0: | 151 | file=sys.stderr, |
152 | ret += 1 | 152 | ) |
153 | continue | 153 | return 1 |
154 | 154 | # Ignore branches with detached HEADs. | |
155 | if needs_stash: | 155 | continue |
156 | stash_args.append('pop') | 156 | |
157 | stash_args.append('--quiet') | 157 | upbranch = project.GetBranch(cb) |
158 | if GitCommand(project, stash_args).Wait() != 0: | 158 | if not upbranch.LocalMerge: |
159 | ret += 1 | 159 | if one_project: |
160 | 160 | print( | |
161 | if ret: | 161 | "error: project %s does not track any remote branches" |
162 | out.fail('%i projects had errors', ret) | 162 | % _RelPath(project), |
163 | out.nl() | 163 | file=sys.stderr, |
164 | 164 | ) | |
165 | return ret | 165 | return 1 |
166 | # Ignore branches without remotes. | ||
167 | continue | ||
168 | |||
169 | args = common_args[:] | ||
170 | if opt.onto_manifest: | ||
171 | args.append("--onto") | ||
172 | args.append(project.revisionExpr) | ||
173 | |||
174 | args.append(upbranch.LocalMerge) | ||
175 | |||
176 | out.project( | ||
177 | "project %s: rebasing %s -> %s", | ||
178 | _RelPath(project), | ||
179 | cb, | ||
180 | upbranch.LocalMerge, | ||
181 | ) | ||
182 | out.nl() | ||
183 | out.flush() | ||
184 | |||
185 | needs_stash = False | ||
186 | if opt.auto_stash: | ||
187 | stash_args = ["update-index", "--refresh", "-q"] | ||
188 | |||
189 | if GitCommand(project, stash_args).Wait() != 0: | ||
190 | needs_stash = True | ||
191 | # Dirty index, requires stash... | ||
192 | stash_args = ["stash"] | ||
193 | |||
194 | if GitCommand(project, stash_args).Wait() != 0: | ||
195 | ret += 1 | ||
196 | continue | ||
197 | |||
198 | if GitCommand(project, args).Wait() != 0: | ||
199 | ret += 1 | ||
200 | continue | ||
201 | |||
202 | if needs_stash: | ||
203 | stash_args.append("pop") | ||
204 | stash_args.append("--quiet") | ||
205 | if GitCommand(project, stash_args).Wait() != 0: | ||
206 | ret += 1 | ||
207 | |||
208 | if ret: | ||
209 | out.fail("%i projects had errors", ret) | ||
210 | out.nl() | ||
211 | |||
212 | return ret | ||
diff --git a/subcmds/selfupdate.py b/subcmds/selfupdate.py index 898bc3f2..d5d0a838 100644 --- a/subcmds/selfupdate.py +++ b/subcmds/selfupdate.py | |||
@@ -21,12 +21,12 @@ from subcmds.sync import _PostRepoFetch | |||
21 | 21 | ||
22 | 22 | ||
23 | class Selfupdate(Command, MirrorSafeCommand): | 23 | class Selfupdate(Command, MirrorSafeCommand): |
24 | COMMON = False | 24 | COMMON = False |
25 | helpSummary = "Update repo to the latest version" | 25 | helpSummary = "Update repo to the latest version" |
26 | helpUsage = """ | 26 | helpUsage = """ |
27 | %prog | 27 | %prog |
28 | """ | 28 | """ |
29 | helpDescription = """ | 29 | helpDescription = """ |
30 | The '%prog' command upgrades repo to the latest version, if a | 30 | The '%prog' command upgrades repo to the latest version, if a |
31 | newer version is available. | 31 | newer version is available. |
32 | 32 | ||
@@ -34,28 +34,33 @@ Normally this is done automatically by 'repo sync' and does not | |||
34 | need to be performed by an end-user. | 34 | need to be performed by an end-user. |
35 | """ | 35 | """ |
36 | 36 | ||
37 | def _Options(self, p): | 37 | def _Options(self, p): |
38 | g = p.add_option_group('repo Version options') | 38 | g = p.add_option_group("repo Version options") |
39 | g.add_option('--no-repo-verify', | 39 | g.add_option( |
40 | dest='repo_verify', default=True, action='store_false', | 40 | "--no-repo-verify", |
41 | help='do not verify repo source code') | 41 | dest="repo_verify", |
42 | g.add_option('--repo-upgraded', | 42 | default=True, |
43 | dest='repo_upgraded', action='store_true', | 43 | action="store_false", |
44 | help=SUPPRESS_HELP) | 44 | help="do not verify repo source code", |
45 | 45 | ) | |
46 | def Execute(self, opt, args): | 46 | g.add_option( |
47 | rp = self.manifest.repoProject | 47 | "--repo-upgraded", |
48 | rp.PreSync() | 48 | dest="repo_upgraded", |
49 | 49 | action="store_true", | |
50 | if opt.repo_upgraded: | 50 | help=SUPPRESS_HELP, |
51 | _PostRepoUpgrade(self.manifest) | 51 | ) |
52 | 52 | ||
53 | else: | 53 | def Execute(self, opt, args): |
54 | if not rp.Sync_NetworkHalf().success: | 54 | rp = self.manifest.repoProject |
55 | print("error: can't update repo", file=sys.stderr) | 55 | rp.PreSync() |
56 | sys.exit(1) | 56 | |
57 | 57 | if opt.repo_upgraded: | |
58 | rp.bare_git.gc('--auto') | 58 | _PostRepoUpgrade(self.manifest) |
59 | _PostRepoFetch(rp, | 59 | |
60 | repo_verify=opt.repo_verify, | 60 | else: |
61 | verbose=True) | 61 | if not rp.Sync_NetworkHalf().success: |
62 | print("error: can't update repo", file=sys.stderr) | ||
63 | sys.exit(1) | ||
64 | |||
65 | rp.bare_git.gc("--auto") | ||
66 | _PostRepoFetch(rp, repo_verify=opt.repo_verify, verbose=True) | ||
diff --git a/subcmds/smartsync.py b/subcmds/smartsync.py index d91d59c6..49d09972 100644 --- a/subcmds/smartsync.py +++ b/subcmds/smartsync.py | |||
@@ -16,18 +16,18 @@ from subcmds.sync import Sync | |||
16 | 16 | ||
17 | 17 | ||
18 | class Smartsync(Sync): | 18 | class Smartsync(Sync): |
19 | COMMON = True | 19 | COMMON = True |
20 | helpSummary = "Update working tree to the latest known good revision" | 20 | helpSummary = "Update working tree to the latest known good revision" |
21 | helpUsage = """ | 21 | helpUsage = """ |
22 | %prog [<project>...] | 22 | %prog [<project>...] |
23 | """ | 23 | """ |
24 | helpDescription = """ | 24 | helpDescription = """ |
25 | The '%prog' command is a shortcut for sync -s. | 25 | The '%prog' command is a shortcut for sync -s. |
26 | """ | 26 | """ |
27 | 27 | ||
28 | def _Options(self, p): | 28 | def _Options(self, p): |
29 | Sync._Options(self, p, show_smart=False) | 29 | Sync._Options(self, p, show_smart=False) |
30 | 30 | ||
31 | def Execute(self, opt, args): | 31 | def Execute(self, opt, args): |
32 | opt.smart_sync = True | 32 | opt.smart_sync = True |
33 | Sync.Execute(self, opt, args) | 33 | Sync.Execute(self, opt, args) |
diff --git a/subcmds/stage.py b/subcmds/stage.py index bdb72012..4d54eb19 100644 --- a/subcmds/stage.py +++ b/subcmds/stage.py | |||
@@ -20,98 +20,111 @@ from git_command import GitCommand | |||
20 | 20 | ||
21 | 21 | ||
22 | class _ProjectList(Coloring): | 22 | class _ProjectList(Coloring): |
23 | def __init__(self, gc): | 23 | def __init__(self, gc): |
24 | Coloring.__init__(self, gc, 'interactive') | 24 | Coloring.__init__(self, gc, "interactive") |
25 | self.prompt = self.printer('prompt', fg='blue', attr='bold') | 25 | self.prompt = self.printer("prompt", fg="blue", attr="bold") |
26 | self.header = self.printer('header', attr='bold') | 26 | self.header = self.printer("header", attr="bold") |
27 | self.help = self.printer('help', fg='red', attr='bold') | 27 | self.help = self.printer("help", fg="red", attr="bold") |
28 | 28 | ||
29 | 29 | ||
30 | class Stage(InteractiveCommand): | 30 | class Stage(InteractiveCommand): |
31 | COMMON = True | 31 | COMMON = True |
32 | helpSummary = "Stage file(s) for commit" | 32 | helpSummary = "Stage file(s) for commit" |
33 | helpUsage = """ | 33 | helpUsage = """ |
34 | %prog -i [<project>...] | 34 | %prog -i [<project>...] |
35 | """ | 35 | """ |
36 | helpDescription = """ | 36 | helpDescription = """ |
37 | The '%prog' command stages files to prepare the next commit. | 37 | The '%prog' command stages files to prepare the next commit. |
38 | """ | 38 | """ |
39 | 39 | ||
40 | def _Options(self, p): | 40 | def _Options(self, p): |
41 | g = p.get_option_group('--quiet') | 41 | g = p.get_option_group("--quiet") |
42 | g.add_option('-i', '--interactive', | 42 | g.add_option( |
43 | dest='interactive', action='store_true', | 43 | "-i", |
44 | help='use interactive staging') | 44 | "--interactive", |
45 | 45 | dest="interactive", | |
46 | def Execute(self, opt, args): | 46 | action="store_true", |
47 | if opt.interactive: | 47 | help="use interactive staging", |
48 | self._Interactive(opt, args) | 48 | ) |
49 | else: | 49 | |
50 | self.Usage() | 50 | def Execute(self, opt, args): |
51 | 51 | if opt.interactive: | |
52 | def _Interactive(self, opt, args): | 52 | self._Interactive(opt, args) |
53 | all_projects = [ | 53 | else: |
54 | p for p in self.GetProjects(args, all_manifests=not opt.this_manifest_only) | 54 | self.Usage() |
55 | if p.IsDirty()] | 55 | |
56 | if not all_projects: | 56 | def _Interactive(self, opt, args): |
57 | print('no projects have uncommitted modifications', file=sys.stderr) | 57 | all_projects = [ |
58 | return | 58 | p |
59 | 59 | for p in self.GetProjects( | |
60 | out = _ProjectList(self.manifest.manifestProject.config) | 60 | args, all_manifests=not opt.this_manifest_only |
61 | while True: | 61 | ) |
62 | out.header(' %s', 'project') | 62 | if p.IsDirty() |
63 | out.nl() | 63 | ] |
64 | 64 | if not all_projects: | |
65 | for i in range(len(all_projects)): | 65 | print("no projects have uncommitted modifications", file=sys.stderr) |
66 | project = all_projects[i] | 66 | return |
67 | out.write('%3d: %s', i + 1, | 67 | |
68 | project.RelPath(local=opt.this_manifest_only) + '/') | 68 | out = _ProjectList(self.manifest.manifestProject.config) |
69 | out.nl() | 69 | while True: |
70 | out.nl() | 70 | out.header(" %s", "project") |
71 | 71 | out.nl() | |
72 | out.write('%3d: (', 0) | 72 | |
73 | out.prompt('q') | 73 | for i in range(len(all_projects)): |
74 | out.write('uit)') | 74 | project = all_projects[i] |
75 | out.nl() | 75 | out.write( |
76 | 76 | "%3d: %s", | |
77 | out.prompt('project> ') | 77 | i + 1, |
78 | out.flush() | 78 | project.RelPath(local=opt.this_manifest_only) + "/", |
79 | try: | 79 | ) |
80 | a = sys.stdin.readline() | 80 | out.nl() |
81 | except KeyboardInterrupt: | 81 | out.nl() |
82 | out.nl() | 82 | |
83 | break | 83 | out.write("%3d: (", 0) |
84 | if a == '': | 84 | out.prompt("q") |
85 | out.nl() | 85 | out.write("uit)") |
86 | break | 86 | out.nl() |
87 | 87 | ||
88 | a = a.strip() | 88 | out.prompt("project> ") |
89 | if a.lower() in ('q', 'quit', 'exit'): | 89 | out.flush() |
90 | break | 90 | try: |
91 | if not a: | 91 | a = sys.stdin.readline() |
92 | continue | 92 | except KeyboardInterrupt: |
93 | 93 | out.nl() | |
94 | try: | 94 | break |
95 | a_index = int(a) | 95 | if a == "": |
96 | except ValueError: | 96 | out.nl() |
97 | a_index = None | 97 | break |
98 | 98 | ||
99 | if a_index is not None: | 99 | a = a.strip() |
100 | if a_index == 0: | 100 | if a.lower() in ("q", "quit", "exit"): |
101 | break | 101 | break |
102 | if 0 < a_index and a_index <= len(all_projects): | 102 | if not a: |
103 | _AddI(all_projects[a_index - 1]) | 103 | continue |
104 | continue | 104 | |
105 | 105 | try: | |
106 | projects = [ | 106 | a_index = int(a) |
107 | p for p in all_projects | 107 | except ValueError: |
108 | if a in [p.name, p.RelPath(local=opt.this_manifest_only)]] | 108 | a_index = None |
109 | if len(projects) == 1: | 109 | |
110 | _AddI(projects[0]) | 110 | if a_index is not None: |
111 | continue | 111 | if a_index == 0: |
112 | print('Bye.') | 112 | break |
113 | if 0 < a_index and a_index <= len(all_projects): | ||
114 | _AddI(all_projects[a_index - 1]) | ||
115 | continue | ||
116 | |||
117 | projects = [ | ||
118 | p | ||
119 | for p in all_projects | ||
120 | if a in [p.name, p.RelPath(local=opt.this_manifest_only)] | ||
121 | ] | ||
122 | if len(projects) == 1: | ||
123 | _AddI(projects[0]) | ||
124 | continue | ||
125 | print("Bye.") | ||
113 | 126 | ||
114 | 127 | ||
115 | def _AddI(project): | 128 | def _AddI(project): |
116 | p = GitCommand(project, ['add', '--interactive'], bare=False) | 129 | p = GitCommand(project, ["add", "--interactive"], bare=False) |
117 | p.Wait() | 130 | p.Wait() |
diff --git a/subcmds/start.py b/subcmds/start.py index 809df963..d7772b33 100644 --- a/subcmds/start.py +++ b/subcmds/start.py | |||
@@ -25,119 +25,147 @@ from project import SyncBuffer | |||
25 | 25 | ||
26 | 26 | ||
27 | class Start(Command): | 27 | class Start(Command): |
28 | COMMON = True | 28 | COMMON = True |
29 | helpSummary = "Start a new branch for development" | 29 | helpSummary = "Start a new branch for development" |
30 | helpUsage = """ | 30 | helpUsage = """ |
31 | %prog <newbranchname> [--all | <project>...] | 31 | %prog <newbranchname> [--all | <project>...] |
32 | """ | 32 | """ |
33 | helpDescription = """ | 33 | helpDescription = """ |
34 | '%prog' begins a new branch of development, starting from the | 34 | '%prog' begins a new branch of development, starting from the |
35 | revision specified in the manifest. | 35 | revision specified in the manifest. |
36 | """ | 36 | """ |
37 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 37 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
38 | 38 | ||
39 | def _Options(self, p): | 39 | def _Options(self, p): |
40 | p.add_option('--all', | 40 | p.add_option( |
41 | dest='all', action='store_true', | 41 | "--all", |
42 | help='begin branch in all projects') | 42 | dest="all", |
43 | p.add_option('-r', '--rev', '--revision', dest='revision', | 43 | action="store_true", |
44 | help='point branch at this revision instead of upstream') | 44 | help="begin branch in all projects", |
45 | p.add_option('--head', '--HEAD', | 45 | ) |
46 | dest='revision', action='store_const', const='HEAD', | 46 | p.add_option( |
47 | help='abbreviation for --rev HEAD') | 47 | "-r", |
48 | 48 | "--rev", | |
49 | def ValidateOptions(self, opt, args): | 49 | "--revision", |
50 | if not args: | 50 | dest="revision", |
51 | self.Usage() | 51 | help="point branch at this revision instead of upstream", |
52 | 52 | ) | |
53 | nb = args[0] | 53 | p.add_option( |
54 | if not git.check_ref_format('heads/%s' % nb): | 54 | "--head", |
55 | self.OptionParser.error("'%s' is not a valid name" % nb) | 55 | "--HEAD", |
56 | 56 | dest="revision", | |
57 | def _ExecuteOne(self, revision, nb, project): | 57 | action="store_const", |
58 | """Start one project.""" | 58 | const="HEAD", |
59 | # If the current revision is immutable, such as a SHA1, a tag or | 59 | help="abbreviation for --rev HEAD", |
60 | # a change, then we can't push back to it. Substitute with | 60 | ) |
61 | # dest_branch, if defined; or with manifest default revision instead. | 61 | |
62 | branch_merge = '' | 62 | def ValidateOptions(self, opt, args): |
63 | if IsImmutable(project.revisionExpr): | 63 | if not args: |
64 | if project.dest_branch: | 64 | self.Usage() |
65 | branch_merge = project.dest_branch | 65 | |
66 | else: | 66 | nb = args[0] |
67 | branch_merge = self.manifest.default.revisionExpr | 67 | if not git.check_ref_format("heads/%s" % nb): |
68 | 68 | self.OptionParser.error("'%s' is not a valid name" % nb) | |
69 | try: | 69 | |
70 | ret = project.StartBranch( | 70 | def _ExecuteOne(self, revision, nb, project): |
71 | nb, branch_merge=branch_merge, revision=revision) | 71 | """Start one project.""" |
72 | except Exception as e: | 72 | # If the current revision is immutable, such as a SHA1, a tag or |
73 | print('error: unable to checkout %s: %s' % (project.name, e), file=sys.stderr) | 73 | # a change, then we can't push back to it. Substitute with |
74 | ret = False | 74 | # dest_branch, if defined; or with manifest default revision instead. |
75 | return (ret, project) | 75 | branch_merge = "" |
76 | 76 | if IsImmutable(project.revisionExpr): | |
77 | def Execute(self, opt, args): | 77 | if project.dest_branch: |
78 | nb = args[0] | 78 | branch_merge = project.dest_branch |
79 | err = [] | 79 | else: |
80 | projects = [] | 80 | branch_merge = self.manifest.default.revisionExpr |
81 | if not opt.all: | 81 | |
82 | projects = args[1:] | 82 | try: |
83 | if len(projects) < 1: | 83 | ret = project.StartBranch( |
84 | projects = ['.'] # start it in the local project by default | 84 | nb, branch_merge=branch_merge, revision=revision |
85 | 85 | ) | |
86 | all_projects = self.GetProjects(projects, | 86 | except Exception as e: |
87 | missing_ok=bool(self.gitc_manifest), | 87 | print( |
88 | all_manifests=not opt.this_manifest_only) | 88 | "error: unable to checkout %s: %s" % (project.name, e), |
89 | 89 | file=sys.stderr, | |
90 | # This must happen after we find all_projects, since GetProjects may need | 90 | ) |
91 | # the local directory, which will disappear once we save the GITC manifest. | 91 | ret = False |
92 | if self.gitc_manifest: | 92 | return (ret, project) |
93 | gitc_projects = self.GetProjects(projects, manifest=self.gitc_manifest, | 93 | |
94 | missing_ok=True) | 94 | def Execute(self, opt, args): |
95 | for project in gitc_projects: | 95 | nb = args[0] |
96 | if project.old_revision: | 96 | err = [] |
97 | project.already_synced = True | 97 | projects = [] |
98 | else: | 98 | if not opt.all: |
99 | project.already_synced = False | 99 | projects = args[1:] |
100 | project.old_revision = project.revisionExpr | 100 | if len(projects) < 1: |
101 | project.revisionExpr = None | 101 | projects = ["."] # start it in the local project by default |
102 | # Save the GITC manifest. | 102 | |
103 | gitc_utils.save_manifest(self.gitc_manifest) | 103 | all_projects = self.GetProjects( |
104 | 104 | projects, | |
105 | # Make sure we have a valid CWD | 105 | missing_ok=bool(self.gitc_manifest), |
106 | if not os.path.exists(os.getcwd()): | 106 | all_manifests=not opt.this_manifest_only, |
107 | os.chdir(self.manifest.topdir) | 107 | ) |
108 | 108 | ||
109 | pm = Progress('Syncing %s' % nb, len(all_projects), quiet=opt.quiet) | 109 | # This must happen after we find all_projects, since GetProjects may |
110 | for project in all_projects: | 110 | # need the local directory, which will disappear once we save the GITC |
111 | gitc_project = self.gitc_manifest.paths[project.relpath] | 111 | # manifest. |
112 | # Sync projects that have not been opened. | 112 | if self.gitc_manifest: |
113 | if not gitc_project.already_synced: | 113 | gitc_projects = self.GetProjects( |
114 | proj_localdir = os.path.join(self.gitc_manifest.gitc_client_dir, | 114 | projects, manifest=self.gitc_manifest, missing_ok=True |
115 | project.relpath) | 115 | ) |
116 | project.worktree = proj_localdir | 116 | for project in gitc_projects: |
117 | if not os.path.exists(proj_localdir): | 117 | if project.old_revision: |
118 | os.makedirs(proj_localdir) | 118 | project.already_synced = True |
119 | project.Sync_NetworkHalf() | 119 | else: |
120 | sync_buf = SyncBuffer(self.manifest.manifestProject.config) | 120 | project.already_synced = False |
121 | project.Sync_LocalHalf(sync_buf) | 121 | project.old_revision = project.revisionExpr |
122 | project.revisionId = gitc_project.old_revision | 122 | project.revisionExpr = None |
123 | pm.update() | 123 | # Save the GITC manifest. |
124 | pm.end() | 124 | gitc_utils.save_manifest(self.gitc_manifest) |
125 | 125 | ||
126 | def _ProcessResults(_pool, pm, results): | 126 | # Make sure we have a valid CWD. |
127 | for (result, project) in results: | 127 | if not os.path.exists(os.getcwd()): |
128 | if not result: | 128 | os.chdir(self.manifest.topdir) |
129 | err.append(project) | 129 | |
130 | pm.update() | 130 | pm = Progress("Syncing %s" % nb, len(all_projects), quiet=opt.quiet) |
131 | 131 | for project in all_projects: | |
132 | self.ExecuteInParallel( | 132 | gitc_project = self.gitc_manifest.paths[project.relpath] |
133 | opt.jobs, | 133 | # Sync projects that have not been opened. |
134 | functools.partial(self._ExecuteOne, opt.revision, nb), | 134 | if not gitc_project.already_synced: |
135 | all_projects, | 135 | proj_localdir = os.path.join( |
136 | callback=_ProcessResults, | 136 | self.gitc_manifest.gitc_client_dir, project.relpath |
137 | output=Progress('Starting %s' % (nb,), len(all_projects), quiet=opt.quiet)) | 137 | ) |
138 | 138 | project.worktree = proj_localdir | |
139 | if err: | 139 | if not os.path.exists(proj_localdir): |
140 | for p in err: | 140 | os.makedirs(proj_localdir) |
141 | print("error: %s/: cannot start %s" % (p.RelPath(local=opt.this_manifest_only), nb), | 141 | project.Sync_NetworkHalf() |
142 | file=sys.stderr) | 142 | sync_buf = SyncBuffer(self.manifest.manifestProject.config) |
143 | sys.exit(1) | 143 | project.Sync_LocalHalf(sync_buf) |
144 | project.revisionId = gitc_project.old_revision | ||
145 | pm.update() | ||
146 | pm.end() | ||
147 | |||
148 | def _ProcessResults(_pool, pm, results): | ||
149 | for result, project in results: | ||
150 | if not result: | ||
151 | err.append(project) | ||
152 | pm.update() | ||
153 | |||
154 | self.ExecuteInParallel( | ||
155 | opt.jobs, | ||
156 | functools.partial(self._ExecuteOne, opt.revision, nb), | ||
157 | all_projects, | ||
158 | callback=_ProcessResults, | ||
159 | output=Progress( | ||
160 | "Starting %s" % (nb,), len(all_projects), quiet=opt.quiet | ||
161 | ), | ||
162 | ) | ||
163 | |||
164 | if err: | ||
165 | for p in err: | ||
166 | print( | ||
167 | "error: %s/: cannot start %s" | ||
168 | % (p.RelPath(local=opt.this_manifest_only), nb), | ||
169 | file=sys.stderr, | ||
170 | ) | ||
171 | sys.exit(1) | ||
diff --git a/subcmds/status.py b/subcmds/status.py index 572c72f7..6e0026f9 100644 --- a/subcmds/status.py +++ b/subcmds/status.py | |||
@@ -24,12 +24,12 @@ import platform_utils | |||
24 | 24 | ||
25 | 25 | ||
26 | class Status(PagedCommand): | 26 | class Status(PagedCommand): |
27 | COMMON = True | 27 | COMMON = True |
28 | helpSummary = "Show the working tree status" | 28 | helpSummary = "Show the working tree status" |
29 | helpUsage = """ | 29 | helpUsage = """ |
30 | %prog [<project>...] | 30 | %prog [<project>...] |
31 | """ | 31 | """ |
32 | helpDescription = """ | 32 | helpDescription = """ |
33 | '%prog' compares the working tree to the staging area (aka index), | 33 | '%prog' compares the working tree to the staging area (aka index), |
34 | and the most recent commit on this branch (HEAD), in each project | 34 | and the most recent commit on this branch (HEAD), in each project |
35 | specified. A summary is displayed, one line per file where there | 35 | specified. A summary is displayed, one line per file where there |
@@ -76,109 +76,128 @@ the following meanings: | |||
76 | d: deleted ( in index, not in work tree ) | 76 | d: deleted ( in index, not in work tree ) |
77 | 77 | ||
78 | """ | 78 | """ |
79 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 79 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
80 | 80 | ||
81 | def _Options(self, p): | 81 | def _Options(self, p): |
82 | p.add_option('-o', '--orphans', | 82 | p.add_option( |
83 | dest='orphans', action='store_true', | 83 | "-o", |
84 | help="include objects in working directory outside of repo projects") | 84 | "--orphans", |
85 | 85 | dest="orphans", | |
86 | def _StatusHelper(self, quiet, local, project): | 86 | action="store_true", |
87 | """Obtains the status for a specific project. | 87 | help="include objects in working directory outside of repo " |
88 | 88 | "projects", | |
89 | Obtains the status for a project, redirecting the output to | 89 | ) |
90 | the specified object. | 90 | |
91 | 91 | def _StatusHelper(self, quiet, local, project): | |
92 | Args: | 92 | """Obtains the status for a specific project. |
93 | quiet: Where to output the status. | 93 | |
94 | local: a boolean, if True, the path is relative to the local | 94 | Obtains the status for a project, redirecting the output to |
95 | (sub)manifest. If false, the path is relative to the | 95 | the specified object. |
96 | outermost manifest. | 96 | |
97 | project: Project to get status of. | 97 | Args: |
98 | 98 | quiet: Where to output the status. | |
99 | Returns: | 99 | local: a boolean, if True, the path is relative to the local |
100 | The status of the project. | 100 | (sub)manifest. If false, the path is relative to the outermost |
101 | """ | 101 | manifest. |
102 | buf = io.StringIO() | 102 | project: Project to get status of. |
103 | ret = project.PrintWorkTreeStatus(quiet=quiet, output_redir=buf, | 103 | |
104 | local=local) | 104 | Returns: |
105 | return (ret, buf.getvalue()) | 105 | The status of the project. |
106 | 106 | """ | |
107 | def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring): | 107 | buf = io.StringIO() |
108 | """find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'""" | 108 | ret = project.PrintWorkTreeStatus( |
109 | status_header = ' --\t' | 109 | quiet=quiet, output_redir=buf, local=local |
110 | for item in dirs: | 110 | ) |
111 | if not platform_utils.isdir(item): | 111 | return (ret, buf.getvalue()) |
112 | outstring.append(''.join([status_header, item])) | 112 | |
113 | continue | 113 | def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring): |
114 | if item in proj_dirs: | 114 | """find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'""" # noqa: E501 |
115 | continue | 115 | status_header = " --\t" |
116 | if item in proj_dirs_parents: | 116 | for item in dirs: |
117 | self._FindOrphans(glob.glob('%s/.*' % item) + | 117 | if not platform_utils.isdir(item): |
118 | glob.glob('%s/*' % item), | 118 | outstring.append("".join([status_header, item])) |
119 | proj_dirs, proj_dirs_parents, outstring) | 119 | continue |
120 | continue | 120 | if item in proj_dirs: |
121 | outstring.append(''.join([status_header, item, '/'])) | 121 | continue |
122 | 122 | if item in proj_dirs_parents: | |
123 | def Execute(self, opt, args): | 123 | self._FindOrphans( |
124 | all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) | 124 | glob.glob("%s/.*" % item) + glob.glob("%s/*" % item), |
125 | 125 | proj_dirs, | |
126 | def _ProcessResults(_pool, _output, results): | 126 | proj_dirs_parents, |
127 | ret = 0 | 127 | outstring, |
128 | for (state, output) in results: | 128 | ) |
129 | if output: | 129 | continue |
130 | print(output, end='') | 130 | outstring.append("".join([status_header, item, "/"])) |
131 | if state == 'CLEAN': | 131 | |
132 | ret += 1 | 132 | def Execute(self, opt, args): |
133 | return ret | 133 | all_projects = self.GetProjects( |
134 | 134 | args, all_manifests=not opt.this_manifest_only | |
135 | counter = self.ExecuteInParallel( | 135 | ) |
136 | opt.jobs, | 136 | |
137 | functools.partial(self._StatusHelper, opt.quiet, opt.this_manifest_only), | 137 | def _ProcessResults(_pool, _output, results): |
138 | all_projects, | 138 | ret = 0 |
139 | callback=_ProcessResults, | 139 | for state, output in results: |
140 | ordered=True) | 140 | if output: |
141 | 141 | print(output, end="") | |
142 | if not opt.quiet and len(all_projects) == counter: | 142 | if state == "CLEAN": |
143 | print('nothing to commit (working directory clean)') | 143 | ret += 1 |
144 | 144 | return ret | |
145 | if opt.orphans: | 145 | |
146 | proj_dirs = set() | 146 | counter = self.ExecuteInParallel( |
147 | proj_dirs_parents = set() | 147 | opt.jobs, |
148 | for project in self.GetProjects(None, missing_ok=True, all_manifests=not opt.this_manifest_only): | 148 | functools.partial( |
149 | relpath = project.RelPath(local=opt.this_manifest_only) | 149 | self._StatusHelper, opt.quiet, opt.this_manifest_only |
150 | proj_dirs.add(relpath) | 150 | ), |
151 | (head, _tail) = os.path.split(relpath) | 151 | all_projects, |
152 | while head != "": | 152 | callback=_ProcessResults, |
153 | proj_dirs_parents.add(head) | 153 | ordered=True, |
154 | (head, _tail) = os.path.split(head) | 154 | ) |
155 | proj_dirs.add('.repo') | 155 | |
156 | 156 | if not opt.quiet and len(all_projects) == counter: | |
157 | class StatusColoring(Coloring): | 157 | print("nothing to commit (working directory clean)") |
158 | def __init__(self, config): | 158 | |
159 | Coloring.__init__(self, config, 'status') | 159 | if opt.orphans: |
160 | self.project = self.printer('header', attr='bold') | 160 | proj_dirs = set() |
161 | self.untracked = self.printer('untracked', fg='red') | 161 | proj_dirs_parents = set() |
162 | 162 | for project in self.GetProjects( | |
163 | orig_path = os.getcwd() | 163 | None, missing_ok=True, all_manifests=not opt.this_manifest_only |
164 | try: | 164 | ): |
165 | os.chdir(self.manifest.topdir) | 165 | relpath = project.RelPath(local=opt.this_manifest_only) |
166 | 166 | proj_dirs.add(relpath) | |
167 | outstring = [] | 167 | (head, _tail) = os.path.split(relpath) |
168 | self._FindOrphans(glob.glob('.*') + | 168 | while head != "": |
169 | glob.glob('*'), | 169 | proj_dirs_parents.add(head) |
170 | proj_dirs, proj_dirs_parents, outstring) | 170 | (head, _tail) = os.path.split(head) |
171 | 171 | proj_dirs.add(".repo") | |
172 | if outstring: | 172 | |
173 | output = StatusColoring(self.client.globalConfig) | 173 | class StatusColoring(Coloring): |
174 | output.project('Objects not within a project (orphans)') | 174 | def __init__(self, config): |
175 | output.nl() | 175 | Coloring.__init__(self, config, "status") |
176 | for entry in outstring: | 176 | self.project = self.printer("header", attr="bold") |
177 | output.untracked(entry) | 177 | self.untracked = self.printer("untracked", fg="red") |
178 | output.nl() | 178 | |
179 | else: | 179 | orig_path = os.getcwd() |
180 | print('No orphan files or directories') | 180 | try: |
181 | 181 | os.chdir(self.manifest.topdir) | |
182 | finally: | 182 | |
183 | # Restore CWD. | 183 | outstring = [] |
184 | os.chdir(orig_path) | 184 | self._FindOrphans( |
185 | glob.glob(".*") + glob.glob("*"), | ||
186 | proj_dirs, | ||
187 | proj_dirs_parents, | ||
188 | outstring, | ||
189 | ) | ||
190 | |||
191 | if outstring: | ||
192 | output = StatusColoring(self.client.globalConfig) | ||
193 | output.project("Objects not within a project (orphans)") | ||
194 | output.nl() | ||
195 | for entry in outstring: | ||
196 | output.untracked(entry) | ||
197 | output.nl() | ||
198 | else: | ||
199 | print("No orphan files or directories") | ||
200 | |||
201 | finally: | ||
202 | # Restore CWD. | ||
203 | os.chdir(orig_path) | ||
diff --git a/subcmds/sync.py b/subcmds/sync.py index 9a8ca8f7..eabaa68b 100644 --- a/subcmds/sync.py +++ b/subcmds/sync.py | |||
@@ -33,18 +33,21 @@ import xml.parsers.expat | |||
33 | import xmlrpc.client | 33 | import xmlrpc.client |
34 | 34 | ||
35 | try: | 35 | try: |
36 | import threading as _threading | 36 | import threading as _threading |
37 | except ImportError: | 37 | except ImportError: |
38 | import dummy_threading as _threading | 38 | import dummy_threading as _threading |
39 | 39 | ||
40 | try: | 40 | try: |
41 | import resource | 41 | import resource |
42 | |||
43 | def _rlimit_nofile(): | ||
44 | return resource.getrlimit(resource.RLIMIT_NOFILE) | ||
42 | 45 | ||
43 | def _rlimit_nofile(): | ||
44 | return resource.getrlimit(resource.RLIMIT_NOFILE) | ||
45 | except ImportError: | 46 | except ImportError: |
46 | def _rlimit_nofile(): | 47 | |
47 | return (256, 256) | 48 | def _rlimit_nofile(): |
49 | return (256, 256) | ||
50 | |||
48 | 51 | ||
49 | import event_log | 52 | import event_log |
50 | from git_command import git_require | 53 | from git_command import git_require |
@@ -54,7 +57,12 @@ import git_superproject | |||
54 | import gitc_utils | 57 | import gitc_utils |
55 | from project import Project | 58 | from project import Project |
56 | from project import RemoteSpec | 59 | from project import RemoteSpec |
57 | from command import Command, DEFAULT_LOCAL_JOBS, MirrorSafeCommand, WORKER_BATCH_SIZE | 60 | from command import ( |
61 | Command, | ||
62 | DEFAULT_LOCAL_JOBS, | ||
63 | MirrorSafeCommand, | ||
64 | WORKER_BATCH_SIZE, | ||
65 | ) | ||
58 | from error import RepoChangedException, GitError | 66 | from error import RepoChangedException, GitError |
59 | import platform_utils | 67 | import platform_utils |
60 | from project import SyncBuffer | 68 | from project import SyncBuffer |
@@ -68,70 +76,74 @@ _ONE_DAY_S = 24 * 60 * 60 | |||
68 | 76 | ||
69 | # Env var to implicitly turn auto-gc back on. This was added to allow a user to | 77 | # Env var to implicitly turn auto-gc back on. This was added to allow a user to |
70 | # revert a change in default behavior in v2.29.9. Remove after 2023-04-01. | 78 | # revert a change in default behavior in v2.29.9. Remove after 2023-04-01. |
71 | _REPO_AUTO_GC = 'REPO_AUTO_GC' | 79 | _REPO_AUTO_GC = "REPO_AUTO_GC" |
72 | _AUTO_GC = os.environ.get(_REPO_AUTO_GC) == '1' | 80 | _AUTO_GC = os.environ.get(_REPO_AUTO_GC) == "1" |
73 | 81 | ||
74 | 82 | ||
75 | class _FetchOneResult(NamedTuple): | 83 | class _FetchOneResult(NamedTuple): |
76 | """_FetchOne return value. | 84 | """_FetchOne return value. |
77 | 85 | ||
78 | Attributes: | 86 | Attributes: |
79 | success (bool): True if successful. | 87 | success (bool): True if successful. |
80 | project (Project): The fetched project. | 88 | project (Project): The fetched project. |
81 | start (float): The starting time.time(). | 89 | start (float): The starting time.time(). |
82 | finish (float): The ending time.time(). | 90 | finish (float): The ending time.time(). |
83 | remote_fetched (bool): True if the remote was actually queried. | 91 | remote_fetched (bool): True if the remote was actually queried. |
84 | """ | 92 | """ |
85 | success: bool | 93 | |
86 | project: Project | 94 | success: bool |
87 | start: float | 95 | project: Project |
88 | finish: float | 96 | start: float |
89 | remote_fetched: bool | 97 | finish: float |
98 | remote_fetched: bool | ||
90 | 99 | ||
91 | 100 | ||
92 | class _FetchResult(NamedTuple): | 101 | class _FetchResult(NamedTuple): |
93 | """_Fetch return value. | 102 | """_Fetch return value. |
103 | |||
104 | Attributes: | ||
105 | success (bool): True if successful. | ||
106 | projects (Set[str]): The names of the git directories of fetched projects. | ||
107 | """ | ||
94 | 108 | ||
95 | Attributes: | 109 | success: bool |
96 | success (bool): True if successful. | 110 | projects: Set[str] |
97 | projects (Set[str]): The names of the git directories of fetched projects. | ||
98 | """ | ||
99 | success: bool | ||
100 | projects: Set[str] | ||
101 | 111 | ||
102 | 112 | ||
103 | class _FetchMainResult(NamedTuple): | 113 | class _FetchMainResult(NamedTuple): |
104 | """_FetchMain return value. | 114 | """_FetchMain return value. |
115 | |||
116 | Attributes: | ||
117 | all_projects (List[Project]): The fetched projects. | ||
118 | """ | ||
105 | 119 | ||
106 | Attributes: | 120 | all_projects: List[Project] |
107 | all_projects (List[Project]): The fetched projects. | ||
108 | """ | ||
109 | all_projects: List[Project] | ||
110 | 121 | ||
111 | 122 | ||
112 | class _CheckoutOneResult(NamedTuple): | 123 | class _CheckoutOneResult(NamedTuple): |
113 | """_CheckoutOne return value. | 124 | """_CheckoutOne return value. |
125 | |||
126 | Attributes: | ||
127 | success (bool): True if successful. | ||
128 | project (Project): The project. | ||
129 | start (float): The starting time.time(). | ||
130 | finish (float): The ending time.time(). | ||
131 | """ | ||
114 | 132 | ||
115 | Attributes: | 133 | success: bool |
116 | success (bool): True if successful. | 134 | project: Project |
117 | project (Project): The project. | 135 | start: float |
118 | start (float): The starting time.time(). | 136 | finish: float |
119 | finish (float): The ending time.time(). | ||
120 | """ | ||
121 | success: bool | ||
122 | project: Project | ||
123 | start: float | ||
124 | finish: float | ||
125 | 137 | ||
126 | 138 | ||
127 | class Sync(Command, MirrorSafeCommand): | 139 | class Sync(Command, MirrorSafeCommand): |
128 | COMMON = True | 140 | COMMON = True |
129 | MULTI_MANIFEST_SUPPORT = True | 141 | MULTI_MANIFEST_SUPPORT = True |
130 | helpSummary = "Update working tree to the latest revision" | 142 | helpSummary = "Update working tree to the latest revision" |
131 | helpUsage = """ | 143 | helpUsage = """ |
132 | %prog [<project>...] | 144 | %prog [<project>...] |
133 | """ | 145 | """ |
134 | helpDescription = """ | 146 | helpDescription = """ |
135 | The '%prog' command synchronizes local project directories | 147 | The '%prog' command synchronizes local project directories |
136 | with the remote repositories specified in the manifest. If a local | 148 | with the remote repositories specified in the manifest. If a local |
137 | project does not yet exist, it will clone a new local directory from | 149 | project does not yet exist, it will clone a new local directory from |
@@ -230,1293 +242,1604 @@ If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or | |||
230 | later is required to fix a server side protocol bug. | 242 | later is required to fix a server side protocol bug. |
231 | 243 | ||
232 | """ | 244 | """ |
233 | # A value of 0 means we want parallel jobs, but we'll determine the default | 245 | # A value of 0 means we want parallel jobs, but we'll determine the default |
234 | # value later on. | 246 | # value later on. |
235 | PARALLEL_JOBS = 0 | 247 | PARALLEL_JOBS = 0 |
236 | 248 | ||
237 | def _Options(self, p, show_smart=True): | 249 | def _Options(self, p, show_smart=True): |
238 | p.add_option('--jobs-network', default=None, type=int, metavar='JOBS', | 250 | p.add_option( |
239 | help='number of network jobs to run in parallel (defaults to --jobs or 1)') | 251 | "--jobs-network", |
240 | p.add_option('--jobs-checkout', default=None, type=int, metavar='JOBS', | 252 | default=None, |
241 | help='number of local checkout jobs to run in parallel (defaults to --jobs or ' | 253 | type=int, |
242 | f'{DEFAULT_LOCAL_JOBS})') | 254 | metavar="JOBS", |
243 | 255 | help="number of network jobs to run in parallel (defaults to " | |
244 | p.add_option('-f', '--force-broken', | 256 | "--jobs or 1)", |
245 | dest='force_broken', action='store_true', | 257 | ) |
246 | help='obsolete option (to be deleted in the future)') | 258 | p.add_option( |
247 | p.add_option('--fail-fast', | 259 | "--jobs-checkout", |
248 | dest='fail_fast', action='store_true', | 260 | default=None, |
249 | help='stop syncing after first error is hit') | 261 | type=int, |
250 | p.add_option('--force-sync', | 262 | metavar="JOBS", |
251 | dest='force_sync', action='store_true', | 263 | help="number of local checkout jobs to run in parallel (defaults " |
252 | help="overwrite an existing git directory if it needs to " | 264 | f"to --jobs or {DEFAULT_LOCAL_JOBS})", |
253 | "point to a different object directory. WARNING: this " | 265 | ) |
254 | "may cause loss of data") | 266 | |
255 | p.add_option('--force-remove-dirty', | 267 | p.add_option( |
256 | dest='force_remove_dirty', action='store_true', | 268 | "-f", |
257 | help="force remove projects with uncommitted modifications if " | 269 | "--force-broken", |
258 | "projects no longer exist in the manifest. " | 270 | dest="force_broken", |
259 | "WARNING: this may cause loss of data") | 271 | action="store_true", |
260 | p.add_option('-l', '--local-only', | 272 | help="obsolete option (to be deleted in the future)", |
261 | dest='local_only', action='store_true', | 273 | ) |
262 | help="only update working tree, don't fetch") | 274 | p.add_option( |
263 | p.add_option('--no-manifest-update', '--nmu', | 275 | "--fail-fast", |
264 | dest='mp_update', action='store_false', default='true', | 276 | dest="fail_fast", |
265 | help='use the existing manifest checkout as-is. ' | 277 | action="store_true", |
266 | '(do not update to the latest revision)') | 278 | help="stop syncing after first error is hit", |
267 | p.add_option('-n', '--network-only', | 279 | ) |
268 | dest='network_only', action='store_true', | 280 | p.add_option( |
269 | help="fetch only, don't update working tree") | 281 | "--force-sync", |
270 | p.add_option('-d', '--detach', | 282 | dest="force_sync", |
271 | dest='detach_head', action='store_true', | 283 | action="store_true", |
272 | help='detach projects back to manifest revision') | 284 | help="overwrite an existing git directory if it needs to " |
273 | p.add_option('-c', '--current-branch', | 285 | "point to a different object directory. WARNING: this " |
274 | dest='current_branch_only', action='store_true', | 286 | "may cause loss of data", |
275 | help='fetch only current branch from server') | 287 | ) |
276 | p.add_option('--no-current-branch', | 288 | p.add_option( |
277 | dest='current_branch_only', action='store_false', | 289 | "--force-remove-dirty", |
278 | help='fetch all branches from server') | 290 | dest="force_remove_dirty", |
279 | p.add_option('-m', '--manifest-name', | 291 | action="store_true", |
280 | dest='manifest_name', | 292 | help="force remove projects with uncommitted modifications if " |
281 | help='temporary manifest to use for this sync', metavar='NAME.xml') | 293 | "projects no longer exist in the manifest. " |
282 | p.add_option('--clone-bundle', action='store_true', | 294 | "WARNING: this may cause loss of data", |
283 | help='enable use of /clone.bundle on HTTP/HTTPS') | 295 | ) |
284 | p.add_option('--no-clone-bundle', dest='clone_bundle', action='store_false', | 296 | p.add_option( |
285 | help='disable use of /clone.bundle on HTTP/HTTPS') | 297 | "-l", |
286 | p.add_option('-u', '--manifest-server-username', action='store', | 298 | "--local-only", |
287 | dest='manifest_server_username', | 299 | dest="local_only", |
288 | help='username to authenticate with the manifest server') | 300 | action="store_true", |
289 | p.add_option('-p', '--manifest-server-password', action='store', | 301 | help="only update working tree, don't fetch", |
290 | dest='manifest_server_password', | 302 | ) |
291 | help='password to authenticate with the manifest server') | 303 | p.add_option( |
292 | p.add_option('--fetch-submodules', | 304 | "--no-manifest-update", |
293 | dest='fetch_submodules', action='store_true', | 305 | "--nmu", |
294 | help='fetch submodules from server') | 306 | dest="mp_update", |
295 | p.add_option('--use-superproject', action='store_true', | 307 | action="store_false", |
296 | help='use the manifest superproject to sync projects; implies -c') | 308 | default="true", |
297 | p.add_option('--no-use-superproject', action='store_false', | 309 | help="use the existing manifest checkout as-is. " |
298 | dest='use_superproject', | 310 | "(do not update to the latest revision)", |
299 | help='disable use of manifest superprojects') | 311 | ) |
300 | p.add_option('--tags', action='store_true', | 312 | p.add_option( |
301 | help='fetch tags') | 313 | "-n", |
302 | p.add_option('--no-tags', | 314 | "--network-only", |
303 | dest='tags', action='store_false', | 315 | dest="network_only", |
304 | help="don't fetch tags (default)") | 316 | action="store_true", |
305 | p.add_option('--optimized-fetch', | 317 | help="fetch only, don't update working tree", |
306 | dest='optimized_fetch', action='store_true', | 318 | ) |
307 | help='only fetch projects fixed to sha1 if revision does not exist locally') | 319 | p.add_option( |
308 | p.add_option('--retry-fetches', | 320 | "-d", |
309 | default=0, action='store', type='int', | 321 | "--detach", |
310 | help='number of times to retry fetches on transient errors') | 322 | dest="detach_head", |
311 | p.add_option('--prune', action='store_true', | 323 | action="store_true", |
312 | help='delete refs that no longer exist on the remote (default)') | 324 | help="detach projects back to manifest revision", |
313 | p.add_option('--no-prune', dest='prune', action='store_false', | 325 | ) |
314 | help='do not delete refs that no longer exist on the remote') | 326 | p.add_option( |
315 | p.add_option('--auto-gc', action='store_true', default=None, | 327 | "-c", |
316 | help='run garbage collection on all synced projects') | 328 | "--current-branch", |
317 | p.add_option('--no-auto-gc', dest='auto_gc', action='store_false', | 329 | dest="current_branch_only", |
318 | help='do not run garbage collection on any projects (default)') | 330 | action="store_true", |
319 | if show_smart: | 331 | help="fetch only current branch from server", |
320 | p.add_option('-s', '--smart-sync', | 332 | ) |
321 | dest='smart_sync', action='store_true', | 333 | p.add_option( |
322 | help='smart sync using manifest from the latest known good build') | 334 | "--no-current-branch", |
323 | p.add_option('-t', '--smart-tag', | 335 | dest="current_branch_only", |
324 | dest='smart_tag', action='store', | 336 | action="store_false", |
325 | help='smart sync using manifest from a known tag') | 337 | help="fetch all branches from server", |
326 | 338 | ) | |
327 | g = p.add_option_group('repo Version options') | 339 | p.add_option( |
328 | g.add_option('--no-repo-verify', | 340 | "-m", |
329 | dest='repo_verify', default=True, action='store_false', | 341 | "--manifest-name", |
330 | help='do not verify repo source code') | 342 | dest="manifest_name", |
331 | g.add_option('--repo-upgraded', | 343 | help="temporary manifest to use for this sync", |
332 | dest='repo_upgraded', action='store_true', | 344 | metavar="NAME.xml", |
333 | help=SUPPRESS_HELP) | 345 | ) |
334 | 346 | p.add_option( | |
335 | def _GetBranch(self, manifest_project): | 347 | "--clone-bundle", |
336 | """Returns the branch name for getting the approved smartsync manifest. | 348 | action="store_true", |
337 | 349 | help="enable use of /clone.bundle on HTTP/HTTPS", | |
338 | Args: | 350 | ) |
339 | manifest_project: the manifestProject to query. | 351 | p.add_option( |
340 | """ | 352 | "--no-clone-bundle", |
341 | b = manifest_project.GetBranch(manifest_project.CurrentBranch) | 353 | dest="clone_bundle", |
342 | branch = b.merge | 354 | action="store_false", |
343 | if branch.startswith(R_HEADS): | 355 | help="disable use of /clone.bundle on HTTP/HTTPS", |
344 | branch = branch[len(R_HEADS):] | 356 | ) |
345 | return branch | 357 | p.add_option( |
346 | 358 | "-u", | |
347 | def _GetCurrentBranchOnly(self, opt, manifest): | 359 | "--manifest-server-username", |
348 | """Returns whether current-branch or use-superproject options are enabled. | 360 | action="store", |
349 | 361 | dest="manifest_server_username", | |
350 | Args: | 362 | help="username to authenticate with the manifest server", |
351 | opt: Program options returned from optparse. See _Options(). | 363 | ) |
352 | manifest: The manifest to use. | 364 | p.add_option( |
353 | 365 | "-p", | |
354 | Returns: | 366 | "--manifest-server-password", |
355 | True if a superproject is requested, otherwise the value of the | 367 | action="store", |
356 | current_branch option (True, False or None). | 368 | dest="manifest_server_password", |
357 | """ | 369 | help="password to authenticate with the manifest server", |
358 | return git_superproject.UseSuperproject(opt.use_superproject, manifest) or opt.current_branch_only | 370 | ) |
359 | 371 | p.add_option( | |
360 | def _UpdateProjectsRevisionId(self, opt, args, superproject_logging_data, | 372 | "--fetch-submodules", |
361 | manifest): | 373 | dest="fetch_submodules", |
362 | """Update revisionId of projects with the commit hash from the superproject. | 374 | action="store_true", |
363 | 375 | help="fetch submodules from server", | |
364 | This function updates each project's revisionId with the commit hash from | 376 | ) |
365 | the superproject. It writes the updated manifest into a file and reloads | 377 | p.add_option( |
366 | the manifest from it. When appropriate, sub manifests are also processed. | 378 | "--use-superproject", |
367 | 379 | action="store_true", | |
368 | Args: | 380 | help="use the manifest superproject to sync projects; implies -c", |
369 | opt: Program options returned from optparse. See _Options(). | 381 | ) |
370 | args: Arguments to pass to GetProjects. See the GetProjects | 382 | p.add_option( |
371 | docstring for details. | 383 | "--no-use-superproject", |
372 | superproject_logging_data: A dictionary of superproject data to log. | 384 | action="store_false", |
373 | manifest: The manifest to use. | 385 | dest="use_superproject", |
374 | """ | 386 | help="disable use of manifest superprojects", |
375 | have_superproject = manifest.superproject or any( | 387 | ) |
376 | m.superproject for m in manifest.all_children) | 388 | p.add_option("--tags", action="store_true", help="fetch tags") |
377 | if not have_superproject: | 389 | p.add_option( |
378 | return | 390 | "--no-tags", |
379 | 391 | dest="tags", | |
380 | if opt.local_only and manifest.superproject: | 392 | action="store_false", |
381 | manifest_path = manifest.superproject.manifest_path | 393 | help="don't fetch tags (default)", |
382 | if manifest_path: | 394 | ) |
383 | self._ReloadManifest(manifest_path, manifest) | 395 | p.add_option( |
384 | return | 396 | "--optimized-fetch", |
385 | 397 | dest="optimized_fetch", | |
386 | all_projects = self.GetProjects(args, | 398 | action="store_true", |
387 | missing_ok=True, | 399 | help="only fetch projects fixed to sha1 if revision does not exist " |
388 | submodules_ok=opt.fetch_submodules, | 400 | "locally", |
389 | manifest=manifest, | 401 | ) |
390 | all_manifests=not opt.this_manifest_only) | 402 | p.add_option( |
391 | 403 | "--retry-fetches", | |
392 | per_manifest = collections.defaultdict(list) | 404 | default=0, |
393 | manifest_paths = {} | 405 | action="store", |
394 | if opt.this_manifest_only: | 406 | type="int", |
395 | per_manifest[manifest.path_prefix] = all_projects | 407 | help="number of times to retry fetches on transient errors", |
396 | else: | 408 | ) |
397 | for p in all_projects: | 409 | p.add_option( |
398 | per_manifest[p.manifest.path_prefix].append(p) | 410 | "--prune", |
399 | 411 | action="store_true", | |
400 | superproject_logging_data = {} | 412 | help="delete refs that no longer exist on the remote (default)", |
401 | need_unload = False | 413 | ) |
402 | for m in self.ManifestList(opt): | 414 | p.add_option( |
403 | if not m.path_prefix in per_manifest: | 415 | "--no-prune", |
404 | continue | 416 | dest="prune", |
405 | use_super = git_superproject.UseSuperproject(opt.use_superproject, m) | 417 | action="store_false", |
406 | if superproject_logging_data: | 418 | help="do not delete refs that no longer exist on the remote", |
407 | superproject_logging_data['multimanifest'] = True | 419 | ) |
408 | superproject_logging_data.update( | 420 | p.add_option( |
409 | superproject=use_super, | 421 | "--auto-gc", |
410 | haslocalmanifests=bool(m.HasLocalManifests), | 422 | action="store_true", |
411 | hassuperprojecttag=bool(m.superproject), | 423 | default=None, |
412 | ) | 424 | help="run garbage collection on all synced projects", |
413 | if use_super and (m.IsMirror or m.IsArchive): | 425 | ) |
414 | # Don't use superproject, because we have no working tree. | 426 | p.add_option( |
415 | use_super = False | 427 | "--no-auto-gc", |
416 | superproject_logging_data['superproject'] = False | 428 | dest="auto_gc", |
417 | superproject_logging_data['noworktree'] = True | 429 | action="store_false", |
418 | if opt.use_superproject is not False: | 430 | help="do not run garbage collection on any projects (default)", |
419 | print(f'{m.path_prefix}: not using superproject because there is no ' | 431 | ) |
420 | 'working tree.') | 432 | if show_smart: |
421 | 433 | p.add_option( | |
422 | if not use_super: | 434 | "-s", |
423 | continue | 435 | "--smart-sync", |
424 | m.superproject.SetQuiet(opt.quiet) | 436 | dest="smart_sync", |
425 | print_messages = git_superproject.PrintMessages(opt.use_superproject, m) | 437 | action="store_true", |
426 | m.superproject.SetPrintMessages(print_messages) | 438 | help="smart sync using manifest from the latest known good " |
427 | update_result = m.superproject.UpdateProjectsRevisionId( | 439 | "build", |
428 | per_manifest[m.path_prefix], git_event_log=self.git_event_log) | 440 | ) |
429 | manifest_path = update_result.manifest_path | 441 | p.add_option( |
430 | superproject_logging_data['updatedrevisionid'] = bool(manifest_path) | 442 | "-t", |
431 | if manifest_path: | 443 | "--smart-tag", |
432 | m.SetManifestOverride(manifest_path) | 444 | dest="smart_tag", |
433 | need_unload = True | 445 | action="store", |
434 | else: | 446 | help="smart sync using manifest from a known tag", |
435 | if print_messages: | 447 | ) |
436 | print(f'{m.path_prefix}: warning: Update of revisionId from ' | 448 | |
437 | 'superproject has failed, repo sync will not use superproject ' | 449 | g = p.add_option_group("repo Version options") |
438 | 'to fetch the source. ', | 450 | g.add_option( |
439 | 'Please resync with the --no-use-superproject option to avoid ' | 451 | "--no-repo-verify", |
440 | 'this repo warning.', | 452 | dest="repo_verify", |
441 | file=sys.stderr) | 453 | default=True, |
442 | if update_result.fatal and opt.use_superproject is not None: | 454 | action="store_false", |
443 | sys.exit(1) | 455 | help="do not verify repo source code", |
444 | if need_unload: | 456 | ) |
445 | m.outer_client.manifest.Unload() | 457 | g.add_option( |
446 | 458 | "--repo-upgraded", | |
447 | def _FetchProjectList(self, opt, projects): | 459 | dest="repo_upgraded", |
448 | """Main function of the fetch worker. | 460 | action="store_true", |
449 | 461 | help=SUPPRESS_HELP, | |
450 | The projects we're given share the same underlying git object store, so we | 462 | ) |
451 | have to fetch them in serial. | ||
452 | |||
453 | Delegates most of the work to _FetchHelper. | ||
454 | |||
455 | Args: | ||
456 | opt: Program options returned from optparse. See _Options(). | ||
457 | projects: Projects to fetch. | ||
458 | """ | ||
459 | return [self._FetchOne(opt, x) for x in projects] | ||
460 | 463 | ||
461 | def _FetchOne(self, opt, project): | 464 | def _GetBranch(self, manifest_project): |
462 | """Fetch git objects for a single project. | 465 | """Returns the branch name for getting the approved smartsync manifest. |
466 | |||
467 | Args: | ||
468 | manifest_project: The manifestProject to query. | ||
469 | """ | ||
470 | b = manifest_project.GetBranch(manifest_project.CurrentBranch) | ||
471 | branch = b.merge | ||
472 | if branch.startswith(R_HEADS): | ||
473 | branch = branch[len(R_HEADS) :] | ||
474 | return branch | ||
475 | |||
476 | def _GetCurrentBranchOnly(self, opt, manifest): | ||
477 | """Returns whether current-branch or use-superproject options are | ||
478 | enabled. | ||
479 | |||
480 | Args: | ||
481 | opt: Program options returned from optparse. See _Options(). | ||
482 | manifest: The manifest to use. | ||
483 | |||
484 | Returns: | ||
485 | True if a superproject is requested, otherwise the value of the | ||
486 | current_branch option (True, False or None). | ||
487 | """ | ||
488 | return ( | ||
489 | git_superproject.UseSuperproject(opt.use_superproject, manifest) | ||
490 | or opt.current_branch_only | ||
491 | ) | ||
463 | 492 | ||
464 | Args: | 493 | def _UpdateProjectsRevisionId( |
465 | opt: Program options returned from optparse. See _Options(). | 494 | self, opt, args, superproject_logging_data, manifest |
466 | project: Project object for the project to fetch. | 495 | ): |
496 | """Update revisionId of projects with the commit from the superproject. | ||
497 | |||
498 | This function updates each project's revisionId with the commit hash | ||
499 | from the superproject. It writes the updated manifest into a file and | ||
500 | reloads the manifest from it. When appropriate, sub manifests are also | ||
501 | processed. | ||
502 | |||
503 | Args: | ||
504 | opt: Program options returned from optparse. See _Options(). | ||
505 | args: Arguments to pass to GetProjects. See the GetProjects | ||
506 | docstring for details. | ||
507 | superproject_logging_data: A dictionary of superproject data to log. | ||
508 | manifest: The manifest to use. | ||
509 | """ | ||
510 | have_superproject = manifest.superproject or any( | ||
511 | m.superproject for m in manifest.all_children | ||
512 | ) | ||
513 | if not have_superproject: | ||
514 | return | ||
515 | |||
516 | if opt.local_only and manifest.superproject: | ||
517 | manifest_path = manifest.superproject.manifest_path | ||
518 | if manifest_path: | ||
519 | self._ReloadManifest(manifest_path, manifest) | ||
520 | return | ||
521 | |||
522 | all_projects = self.GetProjects( | ||
523 | args, | ||
524 | missing_ok=True, | ||
525 | submodules_ok=opt.fetch_submodules, | ||
526 | manifest=manifest, | ||
527 | all_manifests=not opt.this_manifest_only, | ||
528 | ) | ||
467 | 529 | ||
468 | Returns: | 530 | per_manifest = collections.defaultdict(list) |
469 | Whether the fetch was successful. | 531 | if opt.this_manifest_only: |
470 | """ | 532 | per_manifest[manifest.path_prefix] = all_projects |
471 | start = time.time() | 533 | else: |
472 | success = False | 534 | for p in all_projects: |
473 | remote_fetched = False | 535 | per_manifest[p.manifest.path_prefix].append(p) |
474 | buf = io.StringIO() | 536 | |
475 | try: | 537 | superproject_logging_data = {} |
476 | sync_result = project.Sync_NetworkHalf( | 538 | need_unload = False |
477 | quiet=opt.quiet, | 539 | for m in self.ManifestList(opt): |
478 | verbose=opt.verbose, | 540 | if m.path_prefix not in per_manifest: |
479 | output_redir=buf, | 541 | continue |
480 | current_branch_only=self._GetCurrentBranchOnly(opt, project.manifest), | 542 | use_super = git_superproject.UseSuperproject( |
481 | force_sync=opt.force_sync, | 543 | opt.use_superproject, m |
482 | clone_bundle=opt.clone_bundle, | 544 | ) |
483 | tags=opt.tags, archive=project.manifest.IsArchive, | 545 | if superproject_logging_data: |
484 | optimized_fetch=opt.optimized_fetch, | 546 | superproject_logging_data["multimanifest"] = True |
485 | retry_fetches=opt.retry_fetches, | 547 | superproject_logging_data.update( |
486 | prune=opt.prune, | 548 | superproject=use_super, |
487 | ssh_proxy=self.ssh_proxy, | 549 | haslocalmanifests=bool(m.HasLocalManifests), |
488 | clone_filter=project.manifest.CloneFilter, | 550 | hassuperprojecttag=bool(m.superproject), |
489 | partial_clone_exclude=project.manifest.PartialCloneExclude) | 551 | ) |
490 | success = sync_result.success | 552 | if use_super and (m.IsMirror or m.IsArchive): |
491 | remote_fetched = sync_result.remote_fetched | 553 | # Don't use superproject, because we have no working tree. |
492 | 554 | use_super = False | |
493 | output = buf.getvalue() | 555 | superproject_logging_data["superproject"] = False |
494 | if (opt.verbose or not success) and output: | 556 | superproject_logging_data["noworktree"] = True |
495 | print('\n' + output.rstrip()) | 557 | if opt.use_superproject is not False: |
496 | 558 | print( | |
497 | if not success: | 559 | f"{m.path_prefix}: not using superproject because " |
498 | print('error: Cannot fetch %s from %s' | 560 | "there is no working tree." |
499 | % (project.name, project.remote.url), | 561 | ) |
500 | file=sys.stderr) | 562 | |
501 | except KeyboardInterrupt: | 563 | if not use_super: |
502 | print(f'Keyboard interrupt while processing {project.name}') | 564 | continue |
503 | except GitError as e: | 565 | m.superproject.SetQuiet(opt.quiet) |
504 | print('error.GitError: Cannot fetch %s' % str(e), file=sys.stderr) | 566 | print_messages = git_superproject.PrintMessages( |
505 | except Exception as e: | 567 | opt.use_superproject, m |
506 | print('error: Cannot fetch %s (%s: %s)' | 568 | ) |
507 | % (project.name, type(e).__name__, str(e)), file=sys.stderr) | 569 | m.superproject.SetPrintMessages(print_messages) |
508 | raise | 570 | update_result = m.superproject.UpdateProjectsRevisionId( |
509 | 571 | per_manifest[m.path_prefix], git_event_log=self.git_event_log | |
510 | finish = time.time() | 572 | ) |
511 | return _FetchOneResult(success, project, start, finish, remote_fetched) | 573 | manifest_path = update_result.manifest_path |
512 | 574 | superproject_logging_data["updatedrevisionid"] = bool(manifest_path) | |
513 | @classmethod | 575 | if manifest_path: |
514 | def _FetchInitChild(cls, ssh_proxy): | 576 | m.SetManifestOverride(manifest_path) |
515 | cls.ssh_proxy = ssh_proxy | 577 | need_unload = True |
516 | 578 | else: | |
517 | def _Fetch(self, projects, opt, err_event, ssh_proxy): | 579 | if print_messages: |
518 | ret = True | 580 | print( |
519 | 581 | f"{m.path_prefix}: warning: Update of revisionId from " | |
520 | jobs = opt.jobs_network | 582 | "superproject has failed, repo sync will not use " |
521 | fetched = set() | 583 | "superproject to fetch the source. ", |
522 | remote_fetched = set() | 584 | "Please resync with the --no-use-superproject option " |
523 | pm = Progress('Fetching', len(projects), delay=False, quiet=opt.quiet) | 585 | "to avoid this repo warning.", |
524 | 586 | file=sys.stderr, | |
525 | objdir_project_map = dict() | 587 | ) |
526 | for project in projects: | 588 | if update_result.fatal and opt.use_superproject is not None: |
527 | objdir_project_map.setdefault(project.objdir, []).append(project) | 589 | sys.exit(1) |
528 | projects_list = list(objdir_project_map.values()) | 590 | if need_unload: |
529 | 591 | m.outer_client.manifest.Unload() | |
530 | def _ProcessResults(results_sets): | 592 | |
531 | ret = True | 593 | def _FetchProjectList(self, opt, projects): |
532 | for results in results_sets: | 594 | """Main function of the fetch worker. |
533 | for result in results: | 595 | |
534 | success = result.success | 596 | The projects we're given share the same underlying git object store, so |
535 | project = result.project | 597 | we have to fetch them in serial. |
536 | start = result.start | 598 | |
537 | finish = result.finish | 599 | Delegates most of the work to _FetchHelper. |
538 | self._fetch_times.Set(project, finish - start) | 600 | |
539 | self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK, | 601 | Args: |
540 | start, finish, success) | 602 | opt: Program options returned from optparse. See _Options(). |
541 | if result.remote_fetched: | 603 | projects: Projects to fetch. |
542 | remote_fetched.add(project) | 604 | """ |
543 | # Check for any errors before running any more tasks. | 605 | return [self._FetchOne(opt, x) for x in projects] |
544 | # ...we'll let existing jobs finish, though. | 606 | |
545 | if not success: | 607 | def _FetchOne(self, opt, project): |
546 | ret = False | 608 | """Fetch git objects for a single project. |
547 | else: | 609 | |
548 | fetched.add(project.gitdir) | 610 | Args: |
549 | pm.update(msg=f'Last synced: {project.name}') | 611 | opt: Program options returned from optparse. See _Options(). |
550 | if not ret and opt.fail_fast: | 612 | project: Project object for the project to fetch. |
551 | break | 613 | |
552 | return ret | 614 | Returns: |
553 | 615 | Whether the fetch was successful. | |
554 | # We pass the ssh proxy settings via the class. This allows multiprocessing | 616 | """ |
555 | # to pickle it up when spawning children. We can't pass it as an argument | 617 | start = time.time() |
556 | # to _FetchProjectList below as multiprocessing is unable to pickle those. | 618 | success = False |
557 | Sync.ssh_proxy = None | 619 | remote_fetched = False |
558 | 620 | buf = io.StringIO() | |
559 | # NB: Multiprocessing is heavy, so don't spin it up for one job. | 621 | try: |
560 | if len(projects_list) == 1 or jobs == 1: | 622 | sync_result = project.Sync_NetworkHalf( |
561 | self._FetchInitChild(ssh_proxy) | 623 | quiet=opt.quiet, |
562 | if not _ProcessResults(self._FetchProjectList(opt, x) for x in projects_list): | 624 | verbose=opt.verbose, |
563 | ret = False | 625 | output_redir=buf, |
564 | else: | 626 | current_branch_only=self._GetCurrentBranchOnly( |
565 | # Favor throughput over responsiveness when quiet. It seems that imap() | 627 | opt, project.manifest |
566 | # will yield results in batches relative to chunksize, so even as the | 628 | ), |
567 | # children finish a sync, we won't see the result until one child finishes | 629 | force_sync=opt.force_sync, |
568 | # ~chunksize jobs. When using a large --jobs with large chunksize, this | 630 | clone_bundle=opt.clone_bundle, |
569 | # can be jarring as there will be a large initial delay where repo looks | 631 | tags=opt.tags, |
570 | # like it isn't doing anything and sits at 0%, but then suddenly completes | 632 | archive=project.manifest.IsArchive, |
571 | # a lot of jobs all at once. Since this code is more network bound, we | 633 | optimized_fetch=opt.optimized_fetch, |
572 | # can accept a bit more CPU overhead with a smaller chunksize so that the | 634 | retry_fetches=opt.retry_fetches, |
573 | # user sees more immediate & continuous feedback. | 635 | prune=opt.prune, |
574 | if opt.quiet: | 636 | ssh_proxy=self.ssh_proxy, |
575 | chunksize = WORKER_BATCH_SIZE | 637 | clone_filter=project.manifest.CloneFilter, |
576 | else: | 638 | partial_clone_exclude=project.manifest.PartialCloneExclude, |
577 | pm.update(inc=0, msg='warming up') | 639 | ) |
578 | chunksize = 4 | 640 | success = sync_result.success |
579 | with multiprocessing.Pool(jobs, initializer=self._FetchInitChild, | 641 | remote_fetched = sync_result.remote_fetched |
580 | initargs=(ssh_proxy,)) as pool: | 642 | |
581 | results = pool.imap_unordered( | 643 | output = buf.getvalue() |
582 | functools.partial(self._FetchProjectList, opt), | 644 | if (opt.verbose or not success) and output: |
583 | projects_list, | 645 | print("\n" + output.rstrip()) |
584 | chunksize=chunksize) | 646 | |
585 | if not _ProcessResults(results): | 647 | if not success: |
586 | ret = False | 648 | print( |
587 | pool.close() | 649 | "error: Cannot fetch %s from %s" |
588 | 650 | % (project.name, project.remote.url), | |
589 | # Cleanup the reference now that we're done with it, and we're going to | 651 | file=sys.stderr, |
590 | # release any resources it points to. If we don't, later multiprocessing | 652 | ) |
591 | # usage (e.g. checkouts) will try to pickle and then crash. | 653 | except KeyboardInterrupt: |
592 | del Sync.ssh_proxy | 654 | print(f"Keyboard interrupt while processing {project.name}") |
593 | 655 | except GitError as e: | |
594 | pm.end() | 656 | print("error.GitError: Cannot fetch %s" % str(e), file=sys.stderr) |
595 | self._fetch_times.Save() | 657 | except Exception as e: |
596 | 658 | print( | |
597 | if not self.outer_client.manifest.IsArchive: | 659 | "error: Cannot fetch %s (%s: %s)" |
598 | self._GCProjects(projects, opt, err_event) | 660 | % (project.name, type(e).__name__, str(e)), |
599 | 661 | file=sys.stderr, | |
600 | return _FetchResult(ret, fetched) | 662 | ) |
601 | 663 | raise | |
602 | def _FetchMain(self, opt, args, all_projects, err_event, | 664 | |
603 | ssh_proxy, manifest): | 665 | finish = time.time() |
604 | """The main network fetch loop. | 666 | return _FetchOneResult(success, project, start, finish, remote_fetched) |
605 | 667 | ||
606 | Args: | 668 | @classmethod |
607 | opt: Program options returned from optparse. See _Options(). | 669 | def _FetchInitChild(cls, ssh_proxy): |
608 | args: Command line args used to filter out projects. | 670 | cls.ssh_proxy = ssh_proxy |
609 | all_projects: List of all projects that should be fetched. | 671 | |
610 | err_event: Whether an error was hit while processing. | 672 | def _Fetch(self, projects, opt, err_event, ssh_proxy): |
611 | ssh_proxy: SSH manager for clients & masters. | 673 | ret = True |
612 | manifest: The manifest to use. | 674 | |
613 | 675 | jobs = opt.jobs_network | |
614 | Returns: | 676 | fetched = set() |
615 | List of all projects that should be checked out. | 677 | remote_fetched = set() |
616 | """ | 678 | pm = Progress("Fetching", len(projects), delay=False, quiet=opt.quiet) |
617 | rp = manifest.repoProject | 679 | |
618 | 680 | objdir_project_map = dict() | |
619 | to_fetch = [] | 681 | for project in projects: |
620 | now = time.time() | 682 | objdir_project_map.setdefault(project.objdir, []).append(project) |
621 | if _ONE_DAY_S <= (now - rp.LastFetch): | 683 | projects_list = list(objdir_project_map.values()) |
622 | to_fetch.append(rp) | 684 | |
623 | to_fetch.extend(all_projects) | 685 | def _ProcessResults(results_sets): |
624 | to_fetch.sort(key=self._fetch_times.Get, reverse=True) | 686 | ret = True |
625 | 687 | for results in results_sets: | |
626 | result = self._Fetch(to_fetch, opt, err_event, ssh_proxy) | 688 | for result in results: |
627 | success = result.success | 689 | success = result.success |
628 | fetched = result.projects | 690 | project = result.project |
629 | if not success: | 691 | start = result.start |
630 | err_event.set() | 692 | finish = result.finish |
631 | 693 | self._fetch_times.Set(project, finish - start) | |
632 | _PostRepoFetch(rp, opt.repo_verify) | 694 | self.event_log.AddSync( |
633 | if opt.network_only: | 695 | project, |
634 | # bail out now; the rest touches the working tree | 696 | event_log.TASK_SYNC_NETWORK, |
635 | if err_event.is_set(): | 697 | start, |
636 | print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr) | 698 | finish, |
637 | sys.exit(1) | 699 | success, |
638 | return _FetchMainResult([]) | 700 | ) |
639 | 701 | if result.remote_fetched: | |
640 | # Iteratively fetch missing and/or nested unregistered submodules | 702 | remote_fetched.add(project) |
641 | previously_missing_set = set() | 703 | # Check for any errors before running any more tasks. |
642 | while True: | 704 | # ...we'll let existing jobs finish, though. |
643 | self._ReloadManifest(None, manifest) | 705 | if not success: |
644 | all_projects = self.GetProjects(args, | 706 | ret = False |
645 | missing_ok=True, | 707 | else: |
646 | submodules_ok=opt.fetch_submodules, | 708 | fetched.add(project.gitdir) |
647 | manifest=manifest, | 709 | pm.update(msg=f"Last synced: {project.name}") |
648 | all_manifests=not opt.this_manifest_only) | 710 | if not ret and opt.fail_fast: |
649 | missing = [] | 711 | break |
650 | for project in all_projects: | 712 | return ret |
651 | if project.gitdir not in fetched: | ||
652 | missing.append(project) | ||
653 | if not missing: | ||
654 | break | ||
655 | # Stop us from non-stopped fetching actually-missing repos: If set of | ||
656 | # missing repos has not been changed from last fetch, we break. | ||
657 | missing_set = set(p.name for p in missing) | ||
658 | if previously_missing_set == missing_set: | ||
659 | break | ||
660 | previously_missing_set = missing_set | ||
661 | result = self._Fetch(missing, opt, err_event, ssh_proxy) | ||
662 | success = result.success | ||
663 | new_fetched = result.projects | ||
664 | if not success: | ||
665 | err_event.set() | ||
666 | fetched.update(new_fetched) | ||
667 | |||
668 | return _FetchMainResult(all_projects) | ||
669 | |||
670 | def _CheckoutOne(self, detach_head, force_sync, project): | ||
671 | """Checkout work tree for one project | ||
672 | |||
673 | Args: | ||
674 | detach_head: Whether to leave a detached HEAD. | ||
675 | force_sync: Force checking out of the repo. | ||
676 | project: Project object for the project to checkout. | ||
677 | |||
678 | Returns: | ||
679 | Whether the fetch was successful. | ||
680 | """ | ||
681 | start = time.time() | ||
682 | syncbuf = SyncBuffer(project.manifest.manifestProject.config, | ||
683 | detach_head=detach_head) | ||
684 | success = False | ||
685 | try: | ||
686 | project.Sync_LocalHalf(syncbuf, force_sync=force_sync) | ||
687 | success = syncbuf.Finish() | ||
688 | except GitError as e: | ||
689 | print('error.GitError: Cannot checkout %s: %s' % | ||
690 | (project.name, str(e)), file=sys.stderr) | ||
691 | except Exception as e: | ||
692 | print('error: Cannot checkout %s: %s: %s' % | ||
693 | (project.name, type(e).__name__, str(e)), | ||
694 | file=sys.stderr) | ||
695 | raise | ||
696 | |||
697 | if not success: | ||
698 | print('error: Cannot checkout %s' % (project.name), file=sys.stderr) | ||
699 | finish = time.time() | ||
700 | return _CheckoutOneResult(success, project, start, finish) | ||
701 | |||
702 | def _Checkout(self, all_projects, opt, err_results): | ||
703 | """Checkout projects listed in all_projects | ||
704 | |||
705 | Args: | ||
706 | all_projects: List of all projects that should be checked out. | ||
707 | opt: Program options returned from optparse. See _Options(). | ||
708 | err_results: A list of strings, paths to git repos where checkout failed. | ||
709 | """ | ||
710 | # Only checkout projects with worktrees. | ||
711 | all_projects = [x for x in all_projects if x.worktree] | ||
712 | 713 | ||
713 | def _ProcessResults(pool, pm, results): | 714 | # We pass the ssh proxy settings via the class. This allows |
714 | ret = True | 715 | # multiprocessing to pickle it up when spawning children. We can't pass |
715 | for result in results: | 716 | # it as an argument to _FetchProjectList below as multiprocessing is |
717 | # unable to pickle those. | ||
718 | Sync.ssh_proxy = None | ||
719 | |||
720 | # NB: Multiprocessing is heavy, so don't spin it up for one job. | ||
721 | if len(projects_list) == 1 or jobs == 1: | ||
722 | self._FetchInitChild(ssh_proxy) | ||
723 | if not _ProcessResults( | ||
724 | self._FetchProjectList(opt, x) for x in projects_list | ||
725 | ): | ||
726 | ret = False | ||
727 | else: | ||
728 | # Favor throughput over responsiveness when quiet. It seems that | ||
729 | # imap() will yield results in batches relative to chunksize, so | ||
730 | # even as the children finish a sync, we won't see the result until | ||
731 | # one child finishes ~chunksize jobs. When using a large --jobs | ||
732 | # with large chunksize, this can be jarring as there will be a large | ||
733 | # initial delay where repo looks like it isn't doing anything and | ||
734 | # sits at 0%, but then suddenly completes a lot of jobs all at once. | ||
735 | # Since this code is more network bound, we can accept a bit more | ||
736 | # CPU overhead with a smaller chunksize so that the user sees more | ||
737 | # immediate & continuous feedback. | ||
738 | if opt.quiet: | ||
739 | chunksize = WORKER_BATCH_SIZE | ||
740 | else: | ||
741 | pm.update(inc=0, msg="warming up") | ||
742 | chunksize = 4 | ||
743 | with multiprocessing.Pool( | ||
744 | jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,) | ||
745 | ) as pool: | ||
746 | results = pool.imap_unordered( | ||
747 | functools.partial(self._FetchProjectList, opt), | ||
748 | projects_list, | ||
749 | chunksize=chunksize, | ||
750 | ) | ||
751 | if not _ProcessResults(results): | ||
752 | ret = False | ||
753 | pool.close() | ||
754 | |||
755 | # Cleanup the reference now that we're done with it, and we're going to | ||
756 | # release any resources it points to. If we don't, later | ||
757 | # multiprocessing usage (e.g. checkouts) will try to pickle and then | ||
758 | # crash. | ||
759 | del Sync.ssh_proxy | ||
760 | |||
761 | pm.end() | ||
762 | self._fetch_times.Save() | ||
763 | |||
764 | if not self.outer_client.manifest.IsArchive: | ||
765 | self._GCProjects(projects, opt, err_event) | ||
766 | |||
767 | return _FetchResult(ret, fetched) | ||
768 | |||
769 | def _FetchMain( | ||
770 | self, opt, args, all_projects, err_event, ssh_proxy, manifest | ||
771 | ): | ||
772 | """The main network fetch loop. | ||
773 | |||
774 | Args: | ||
775 | opt: Program options returned from optparse. See _Options(). | ||
776 | args: Command line args used to filter out projects. | ||
777 | all_projects: List of all projects that should be fetched. | ||
778 | err_event: Whether an error was hit while processing. | ||
779 | ssh_proxy: SSH manager for clients & masters. | ||
780 | manifest: The manifest to use. | ||
781 | |||
782 | Returns: | ||
783 | List of all projects that should be checked out. | ||
784 | """ | ||
785 | rp = manifest.repoProject | ||
786 | |||
787 | to_fetch = [] | ||
788 | now = time.time() | ||
789 | if _ONE_DAY_S <= (now - rp.LastFetch): | ||
790 | to_fetch.append(rp) | ||
791 | to_fetch.extend(all_projects) | ||
792 | to_fetch.sort(key=self._fetch_times.Get, reverse=True) | ||
793 | |||
794 | result = self._Fetch(to_fetch, opt, err_event, ssh_proxy) | ||
716 | success = result.success | 795 | success = result.success |
717 | project = result.project | 796 | fetched = result.projects |
718 | start = result.start | ||
719 | finish = result.finish | ||
720 | self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL, | ||
721 | start, finish, success) | ||
722 | # Check for any errors before running any more tasks. | ||
723 | # ...we'll let existing jobs finish, though. | ||
724 | if not success: | 797 | if not success: |
725 | ret = False | 798 | err_event.set() |
726 | err_results.append(project.RelPath(local=opt.this_manifest_only)) | 799 | |
727 | if opt.fail_fast: | 800 | _PostRepoFetch(rp, opt.repo_verify) |
728 | if pool: | 801 | if opt.network_only: |
729 | pool.close() | 802 | # Bail out now; the rest touches the working tree. |
803 | if err_event.is_set(): | ||
804 | print( | ||
805 | "\nerror: Exited sync due to fetch errors.\n", | ||
806 | file=sys.stderr, | ||
807 | ) | ||
808 | sys.exit(1) | ||
809 | return _FetchMainResult([]) | ||
810 | |||
811 | # Iteratively fetch missing and/or nested unregistered submodules. | ||
812 | previously_missing_set = set() | ||
813 | while True: | ||
814 | self._ReloadManifest(None, manifest) | ||
815 | all_projects = self.GetProjects( | ||
816 | args, | ||
817 | missing_ok=True, | ||
818 | submodules_ok=opt.fetch_submodules, | ||
819 | manifest=manifest, | ||
820 | all_manifests=not opt.this_manifest_only, | ||
821 | ) | ||
822 | missing = [] | ||
823 | for project in all_projects: | ||
824 | if project.gitdir not in fetched: | ||
825 | missing.append(project) | ||
826 | if not missing: | ||
827 | break | ||
828 | # Stop us from non-stopped fetching actually-missing repos: If set | ||
829 | # of missing repos has not been changed from last fetch, we break. | ||
830 | missing_set = set(p.name for p in missing) | ||
831 | if previously_missing_set == missing_set: | ||
832 | break | ||
833 | previously_missing_set = missing_set | ||
834 | result = self._Fetch(missing, opt, err_event, ssh_proxy) | ||
835 | success = result.success | ||
836 | new_fetched = result.projects | ||
837 | if not success: | ||
838 | err_event.set() | ||
839 | fetched.update(new_fetched) | ||
840 | |||
841 | return _FetchMainResult(all_projects) | ||
842 | |||
843 | def _CheckoutOne(self, detach_head, force_sync, project): | ||
844 | """Checkout work tree for one project | ||
845 | |||
846 | Args: | ||
847 | detach_head: Whether to leave a detached HEAD. | ||
848 | force_sync: Force checking out of the repo. | ||
849 | project: Project object for the project to checkout. | ||
850 | |||
851 | Returns: | ||
852 | Whether the fetch was successful. | ||
853 | """ | ||
854 | start = time.time() | ||
855 | syncbuf = SyncBuffer( | ||
856 | project.manifest.manifestProject.config, detach_head=detach_head | ||
857 | ) | ||
858 | success = False | ||
859 | try: | ||
860 | project.Sync_LocalHalf(syncbuf, force_sync=force_sync) | ||
861 | success = syncbuf.Finish() | ||
862 | except GitError as e: | ||
863 | print( | ||
864 | "error.GitError: Cannot checkout %s: %s" | ||
865 | % (project.name, str(e)), | ||
866 | file=sys.stderr, | ||
867 | ) | ||
868 | except Exception as e: | ||
869 | print( | ||
870 | "error: Cannot checkout %s: %s: %s" | ||
871 | % (project.name, type(e).__name__, str(e)), | ||
872 | file=sys.stderr, | ||
873 | ) | ||
874 | raise | ||
875 | |||
876 | if not success: | ||
877 | print("error: Cannot checkout %s" % (project.name), file=sys.stderr) | ||
878 | finish = time.time() | ||
879 | return _CheckoutOneResult(success, project, start, finish) | ||
880 | |||
881 | def _Checkout(self, all_projects, opt, err_results): | ||
882 | """Checkout projects listed in all_projects | ||
883 | |||
884 | Args: | ||
885 | all_projects: List of all projects that should be checked out. | ||
886 | opt: Program options returned from optparse. See _Options(). | ||
887 | err_results: A list of strings, paths to git repos where checkout | ||
888 | failed. | ||
889 | """ | ||
890 | # Only checkout projects with worktrees. | ||
891 | all_projects = [x for x in all_projects if x.worktree] | ||
892 | |||
893 | def _ProcessResults(pool, pm, results): | ||
894 | ret = True | ||
895 | for result in results: | ||
896 | success = result.success | ||
897 | project = result.project | ||
898 | start = result.start | ||
899 | finish = result.finish | ||
900 | self.event_log.AddSync( | ||
901 | project, event_log.TASK_SYNC_LOCAL, start, finish, success | ||
902 | ) | ||
903 | # Check for any errors before running any more tasks. | ||
904 | # ...we'll let existing jobs finish, though. | ||
905 | if not success: | ||
906 | ret = False | ||
907 | err_results.append( | ||
908 | project.RelPath(local=opt.this_manifest_only) | ||
909 | ) | ||
910 | if opt.fail_fast: | ||
911 | if pool: | ||
912 | pool.close() | ||
913 | return ret | ||
914 | pm.update(msg=project.name) | ||
730 | return ret | 915 | return ret |
731 | pm.update(msg=project.name) | ||
732 | return ret | ||
733 | |||
734 | return self.ExecuteInParallel( | ||
735 | opt.jobs_checkout, | ||
736 | functools.partial(self._CheckoutOne, opt.detach_head, opt.force_sync), | ||
737 | all_projects, | ||
738 | callback=_ProcessResults, | ||
739 | output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results | ||
740 | |||
741 | @staticmethod | ||
742 | def _GetPreciousObjectsState(project: Project, opt): | ||
743 | """Get the preciousObjects state for the project. | ||
744 | |||
745 | Args: | ||
746 | project (Project): the project to examine, and possibly correct. | ||
747 | opt (optparse.Values): options given to sync. | ||
748 | |||
749 | Returns: | ||
750 | Expected state of extensions.preciousObjects: | ||
751 | False: Should be disabled. (not present) | ||
752 | True: Should be enabled. | ||
753 | """ | ||
754 | if project.use_git_worktrees: | ||
755 | return False | ||
756 | projects = project.manifest.GetProjectsWithName(project.name, | ||
757 | all_manifests=True) | ||
758 | if len(projects) == 1: | ||
759 | return False | ||
760 | relpath = project.RelPath(local=opt.this_manifest_only) | ||
761 | if len(projects) > 1: | ||
762 | # Objects are potentially shared with another project. | ||
763 | # See the logic in Project.Sync_NetworkHalf regarding UseAlternates. | ||
764 | # - When False, shared projects share (via symlink) | ||
765 | # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only objects | ||
766 | # directory. All objects are precious, since there is no project with a | ||
767 | # complete set of refs. | ||
768 | # - When True, shared projects share (via info/alternates) | ||
769 | # .repo/project-objects/{PROJECT_NAME}.git as an alternate object store, | ||
770 | # which is written only on the first clone of the project, and is not | ||
771 | # written subsequently. (When Sync_NetworkHalf sees that it exists, it | ||
772 | # makes sure that the alternates file points there, and uses a | ||
773 | # project-local .git/objects directory for all syncs going forward. | ||
774 | # We do not support switching between the options. The environment | ||
775 | # variable is present for testing and migration only. | ||
776 | return not project.UseAlternates | ||
777 | |||
778 | return False | ||
779 | |||
780 | def _SetPreciousObjectsState(self, project: Project, opt): | ||
781 | """Correct the preciousObjects state for the project. | ||
782 | |||
783 | Args: | ||
784 | project: the project to examine, and possibly correct. | ||
785 | opt: options given to sync. | ||
786 | """ | ||
787 | expected = self._GetPreciousObjectsState(project, opt) | ||
788 | actual = project.config.GetBoolean('extensions.preciousObjects') or False | ||
789 | relpath = project.RelPath(local=opt.this_manifest_only) | ||
790 | |||
791 | if expected != actual: | ||
792 | # If this is unexpected, log it and repair. | ||
793 | Trace(f'{relpath} expected preciousObjects={expected}, got {actual}') | ||
794 | if expected: | ||
795 | if not opt.quiet: | ||
796 | print('\r%s: Shared project %s found, disabling pruning.' % | ||
797 | (relpath, project.name)) | ||
798 | if git_require((2, 7, 0)): | ||
799 | project.EnableRepositoryExtension('preciousObjects') | ||
800 | else: | ||
801 | # This isn't perfect, but it's the best we can do with old git. | ||
802 | print('\r%s: WARNING: shared projects are unreliable when using ' | ||
803 | 'old versions of git; please upgrade to git-2.7.0+.' | ||
804 | % (relpath,), | ||
805 | file=sys.stderr) | ||
806 | project.config.SetString('gc.pruneExpire', 'never') | ||
807 | else: | ||
808 | if not opt.quiet: | ||
809 | print(f'\r{relpath}: not shared, disabling pruning.') | ||
810 | project.config.SetString('extensions.preciousObjects', None) | ||
811 | project.config.SetString('gc.pruneExpire', None) | ||
812 | 916 | ||
813 | def _GCProjects(self, projects, opt, err_event): | 917 | return ( |
814 | """Perform garbage collection. | 918 | self.ExecuteInParallel( |
919 | opt.jobs_checkout, | ||
920 | functools.partial( | ||
921 | self._CheckoutOne, opt.detach_head, opt.force_sync | ||
922 | ), | ||
923 | all_projects, | ||
924 | callback=_ProcessResults, | ||
925 | output=Progress( | ||
926 | "Checking out", len(all_projects), quiet=opt.quiet | ||
927 | ), | ||
928 | ) | ||
929 | and not err_results | ||
930 | ) | ||
815 | 931 | ||
816 | If We are skipping garbage collection (opt.auto_gc not set), we still want | 932 | @staticmethod |
817 | to potentially mark objects precious, so that `git gc` does not discard | 933 | def _GetPreciousObjectsState(project: Project, opt): |
818 | shared objects. | 934 | """Get the preciousObjects state for the project. |
819 | """ | 935 | |
820 | if not opt.auto_gc: | 936 | Args: |
821 | # Just repair preciousObjects state, and return. | 937 | project (Project): the project to examine, and possibly correct. |
822 | for project in projects: | 938 | opt (optparse.Values): options given to sync. |
823 | self._SetPreciousObjectsState(project, opt) | 939 | |
824 | return | 940 | Returns: |
825 | 941 | Expected state of extensions.preciousObjects: | |
826 | pm = Progress('Garbage collecting', len(projects), delay=False, | 942 | False: Should be disabled. (not present) |
827 | quiet=opt.quiet) | 943 | True: Should be enabled. |
828 | pm.update(inc=0, msg='prescan') | 944 | """ |
829 | 945 | if project.use_git_worktrees: | |
830 | tidy_dirs = {} | 946 | return False |
831 | for project in projects: | 947 | projects = project.manifest.GetProjectsWithName( |
832 | self._SetPreciousObjectsState(project, opt) | 948 | project.name, all_manifests=True |
833 | |||
834 | project.config.SetString('gc.autoDetach', 'false') | ||
835 | # Only call git gc once per objdir, but call pack-refs for the remainder. | ||
836 | if project.objdir not in tidy_dirs: | ||
837 | tidy_dirs[project.objdir] = ( | ||
838 | True, # Run a full gc. | ||
839 | project.bare_git, | ||
840 | ) | 949 | ) |
841 | elif project.gitdir not in tidy_dirs: | 950 | if len(projects) == 1: |
842 | tidy_dirs[project.gitdir] = ( | 951 | return False |
843 | False, # Do not run a full gc; just run pack-refs. | 952 | if len(projects) > 1: |
844 | project.bare_git, | 953 | # Objects are potentially shared with another project. |
954 | # See the logic in Project.Sync_NetworkHalf regarding UseAlternates. | ||
955 | # - When False, shared projects share (via symlink) | ||
956 | # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only | ||
957 | # objects directory. All objects are precious, since there is no | ||
958 | # project with a complete set of refs. | ||
959 | # - When True, shared projects share (via info/alternates) | ||
960 | # .repo/project-objects/{PROJECT_NAME}.git as an alternate object | ||
961 | # store, which is written only on the first clone of the project, | ||
962 | # and is not written subsequently. (When Sync_NetworkHalf sees | ||
963 | # that it exists, it makes sure that the alternates file points | ||
964 | # there, and uses a project-local .git/objects directory for all | ||
965 | # syncs going forward. | ||
966 | # We do not support switching between the options. The environment | ||
967 | # variable is present for testing and migration only. | ||
968 | return not project.UseAlternates | ||
969 | |||
970 | return False | ||
971 | |||
972 | def _SetPreciousObjectsState(self, project: Project, opt): | ||
973 | """Correct the preciousObjects state for the project. | ||
974 | |||
975 | Args: | ||
976 | project: the project to examine, and possibly correct. | ||
977 | opt: options given to sync. | ||
978 | """ | ||
979 | expected = self._GetPreciousObjectsState(project, opt) | ||
980 | actual = ( | ||
981 | project.config.GetBoolean("extensions.preciousObjects") or False | ||
845 | ) | 982 | ) |
846 | 983 | relpath = project.RelPath(local=opt.this_manifest_only) | |
847 | jobs = opt.jobs | 984 | |
848 | 985 | if expected != actual: | |
849 | if jobs < 2: | 986 | # If this is unexpected, log it and repair. |
850 | for (run_gc, bare_git) in tidy_dirs.values(): | 987 | Trace( |
851 | pm.update(msg=bare_git._project.name) | 988 | f"{relpath} expected preciousObjects={expected}, got {actual}" |
852 | 989 | ) | |
853 | if run_gc: | 990 | if expected: |
854 | bare_git.gc('--auto') | 991 | if not opt.quiet: |
992 | print( | ||
993 | "\r%s: Shared project %s found, disabling pruning." | ||
994 | % (relpath, project.name) | ||
995 | ) | ||
996 | if git_require((2, 7, 0)): | ||
997 | project.EnableRepositoryExtension("preciousObjects") | ||
998 | else: | ||
999 | # This isn't perfect, but it's the best we can do with old | ||
1000 | # git. | ||
1001 | print( | ||
1002 | "\r%s: WARNING: shared projects are unreliable when " | ||
1003 | "using old versions of git; please upgrade to " | ||
1004 | "git-2.7.0+." % (relpath,), | ||
1005 | file=sys.stderr, | ||
1006 | ) | ||
1007 | project.config.SetString("gc.pruneExpire", "never") | ||
1008 | else: | ||
1009 | if not opt.quiet: | ||
1010 | print(f"\r{relpath}: not shared, disabling pruning.") | ||
1011 | project.config.SetString("extensions.preciousObjects", None) | ||
1012 | project.config.SetString("gc.pruneExpire", None) | ||
1013 | |||
1014 | def _GCProjects(self, projects, opt, err_event): | ||
1015 | """Perform garbage collection. | ||
1016 | |||
1017 | If We are skipping garbage collection (opt.auto_gc not set), we still | ||
1018 | want to potentially mark objects precious, so that `git gc` does not | ||
1019 | discard shared objects. | ||
1020 | """ | ||
1021 | if not opt.auto_gc: | ||
1022 | # Just repair preciousObjects state, and return. | ||
1023 | for project in projects: | ||
1024 | self._SetPreciousObjectsState(project, opt) | ||
1025 | return | ||
1026 | |||
1027 | pm = Progress( | ||
1028 | "Garbage collecting", len(projects), delay=False, quiet=opt.quiet | ||
1029 | ) | ||
1030 | pm.update(inc=0, msg="prescan") | ||
1031 | |||
1032 | tidy_dirs = {} | ||
1033 | for project in projects: | ||
1034 | self._SetPreciousObjectsState(project, opt) | ||
1035 | |||
1036 | project.config.SetString("gc.autoDetach", "false") | ||
1037 | # Only call git gc once per objdir, but call pack-refs for the | ||
1038 | # remainder. | ||
1039 | if project.objdir not in tidy_dirs: | ||
1040 | tidy_dirs[project.objdir] = ( | ||
1041 | True, # Run a full gc. | ||
1042 | project.bare_git, | ||
1043 | ) | ||
1044 | elif project.gitdir not in tidy_dirs: | ||
1045 | tidy_dirs[project.gitdir] = ( | ||
1046 | False, # Do not run a full gc; just run pack-refs. | ||
1047 | project.bare_git, | ||
1048 | ) | ||
1049 | |||
1050 | jobs = opt.jobs | ||
1051 | |||
1052 | if jobs < 2: | ||
1053 | for run_gc, bare_git in tidy_dirs.values(): | ||
1054 | pm.update(msg=bare_git._project.name) | ||
1055 | |||
1056 | if run_gc: | ||
1057 | bare_git.gc("--auto") | ||
1058 | else: | ||
1059 | bare_git.pack_refs() | ||
1060 | pm.end() | ||
1061 | return | ||
1062 | |||
1063 | cpu_count = os.cpu_count() | ||
1064 | config = {"pack.threads": cpu_count // jobs if cpu_count > jobs else 1} | ||
1065 | |||
1066 | threads = set() | ||
1067 | sem = _threading.Semaphore(jobs) | ||
1068 | |||
1069 | def tidy_up(run_gc, bare_git): | ||
1070 | pm.start(bare_git._project.name) | ||
1071 | try: | ||
1072 | try: | ||
1073 | if run_gc: | ||
1074 | bare_git.gc("--auto", config=config) | ||
1075 | else: | ||
1076 | bare_git.pack_refs(config=config) | ||
1077 | except GitError: | ||
1078 | err_event.set() | ||
1079 | except Exception: | ||
1080 | err_event.set() | ||
1081 | raise | ||
1082 | finally: | ||
1083 | pm.finish(bare_git._project.name) | ||
1084 | sem.release() | ||
1085 | |||
1086 | for run_gc, bare_git in tidy_dirs.values(): | ||
1087 | if err_event.is_set() and opt.fail_fast: | ||
1088 | break | ||
1089 | sem.acquire() | ||
1090 | t = _threading.Thread( | ||
1091 | target=tidy_up, | ||
1092 | args=( | ||
1093 | run_gc, | ||
1094 | bare_git, | ||
1095 | ), | ||
1096 | ) | ||
1097 | t.daemon = True | ||
1098 | threads.add(t) | ||
1099 | t.start() | ||
1100 | |||
1101 | for t in threads: | ||
1102 | t.join() | ||
1103 | pm.end() | ||
1104 | |||
1105 | def _ReloadManifest(self, manifest_name, manifest): | ||
1106 | """Reload the manfiest from the file specified by the |manifest_name|. | ||
1107 | |||
1108 | It unloads the manifest if |manifest_name| is None. | ||
1109 | |||
1110 | Args: | ||
1111 | manifest_name: Manifest file to be reloaded. | ||
1112 | manifest: The manifest to use. | ||
1113 | """ | ||
1114 | if manifest_name: | ||
1115 | # Override calls Unload already. | ||
1116 | manifest.Override(manifest_name) | ||
855 | else: | 1117 | else: |
856 | bare_git.pack_refs() | 1118 | manifest.Unload() |
857 | pm.end() | 1119 | |
858 | return | 1120 | def UpdateProjectList(self, opt, manifest): |
859 | 1121 | """Update the cached projects list for |manifest| | |
860 | cpu_count = os.cpu_count() | 1122 | |
861 | config = {'pack.threads': cpu_count // jobs if cpu_count > jobs else 1} | 1123 | In a multi-manifest checkout, each manifest has its own project.list. |
862 | 1124 | ||
863 | threads = set() | 1125 | Args: |
864 | sem = _threading.Semaphore(jobs) | 1126 | opt: Program options returned from optparse. See _Options(). |
1127 | manifest: The manifest to use. | ||
1128 | |||
1129 | Returns: | ||
1130 | 0: success | ||
1131 | 1: failure | ||
1132 | """ | ||
1133 | new_project_paths = [] | ||
1134 | for project in self.GetProjects( | ||
1135 | None, missing_ok=True, manifest=manifest, all_manifests=False | ||
1136 | ): | ||
1137 | if project.relpath: | ||
1138 | new_project_paths.append(project.relpath) | ||
1139 | file_name = "project.list" | ||
1140 | file_path = os.path.join(manifest.subdir, file_name) | ||
1141 | old_project_paths = [] | ||
1142 | |||
1143 | if os.path.exists(file_path): | ||
1144 | with open(file_path, "r") as fd: | ||
1145 | old_project_paths = fd.read().split("\n") | ||
1146 | # In reversed order, so subfolders are deleted before parent folder. | ||
1147 | for path in sorted(old_project_paths, reverse=True): | ||
1148 | if not path: | ||
1149 | continue | ||
1150 | if path not in new_project_paths: | ||
1151 | # If the path has already been deleted, we don't need to do | ||
1152 | # it. | ||
1153 | gitdir = os.path.join(manifest.topdir, path, ".git") | ||
1154 | if os.path.exists(gitdir): | ||
1155 | project = Project( | ||
1156 | manifest=manifest, | ||
1157 | name=path, | ||
1158 | remote=RemoteSpec("origin"), | ||
1159 | gitdir=gitdir, | ||
1160 | objdir=gitdir, | ||
1161 | use_git_worktrees=os.path.isfile(gitdir), | ||
1162 | worktree=os.path.join(manifest.topdir, path), | ||
1163 | relpath=path, | ||
1164 | revisionExpr="HEAD", | ||
1165 | revisionId=None, | ||
1166 | groups=None, | ||
1167 | ) | ||
1168 | if not project.DeleteWorktree( | ||
1169 | quiet=opt.quiet, force=opt.force_remove_dirty | ||
1170 | ): | ||
1171 | return 1 | ||
1172 | |||
1173 | new_project_paths.sort() | ||
1174 | with open(file_path, "w") as fd: | ||
1175 | fd.write("\n".join(new_project_paths)) | ||
1176 | fd.write("\n") | ||
1177 | return 0 | ||
1178 | |||
1179 | def UpdateCopyLinkfileList(self, manifest): | ||
1180 | """Save all dests of copyfile and linkfile, and update them if needed. | ||
1181 | |||
1182 | Returns: | ||
1183 | Whether update was successful. | ||
1184 | """ | ||
1185 | new_paths = {} | ||
1186 | new_linkfile_paths = [] | ||
1187 | new_copyfile_paths = [] | ||
1188 | for project in self.GetProjects( | ||
1189 | None, missing_ok=True, manifest=manifest, all_manifests=False | ||
1190 | ): | ||
1191 | new_linkfile_paths.extend(x.dest for x in project.linkfiles) | ||
1192 | new_copyfile_paths.extend(x.dest for x in project.copyfiles) | ||
1193 | |||
1194 | new_paths = { | ||
1195 | "linkfile": new_linkfile_paths, | ||
1196 | "copyfile": new_copyfile_paths, | ||
1197 | } | ||
1198 | |||
1199 | copylinkfile_name = "copy-link-files.json" | ||
1200 | copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name) | ||
1201 | old_copylinkfile_paths = {} | ||
1202 | |||
1203 | if os.path.exists(copylinkfile_path): | ||
1204 | with open(copylinkfile_path, "rb") as fp: | ||
1205 | try: | ||
1206 | old_copylinkfile_paths = json.load(fp) | ||
1207 | except Exception: | ||
1208 | print( | ||
1209 | "error: %s is not a json formatted file." | ||
1210 | % copylinkfile_path, | ||
1211 | file=sys.stderr, | ||
1212 | ) | ||
1213 | platform_utils.remove(copylinkfile_path) | ||
1214 | return False | ||
1215 | |||
1216 | need_remove_files = [] | ||
1217 | need_remove_files.extend( | ||
1218 | set(old_copylinkfile_paths.get("linkfile", [])) | ||
1219 | - set(new_linkfile_paths) | ||
1220 | ) | ||
1221 | need_remove_files.extend( | ||
1222 | set(old_copylinkfile_paths.get("copyfile", [])) | ||
1223 | - set(new_copyfile_paths) | ||
1224 | ) | ||
1225 | |||
1226 | for need_remove_file in need_remove_files: | ||
1227 | # Try to remove the updated copyfile or linkfile. | ||
1228 | # So, if the file is not exist, nothing need to do. | ||
1229 | platform_utils.remove(need_remove_file, missing_ok=True) | ||
1230 | |||
1231 | # Create copy-link-files.json, save dest path of "copyfile" and | ||
1232 | # "linkfile". | ||
1233 | with open(copylinkfile_path, "w", encoding="utf-8") as fp: | ||
1234 | json.dump(new_paths, fp) | ||
1235 | return True | ||
1236 | |||
1237 | def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest): | ||
1238 | if not manifest.manifest_server: | ||
1239 | print( | ||
1240 | "error: cannot smart sync: no manifest server defined in " | ||
1241 | "manifest", | ||
1242 | file=sys.stderr, | ||
1243 | ) | ||
1244 | sys.exit(1) | ||
1245 | |||
1246 | manifest_server = manifest.manifest_server | ||
1247 | if not opt.quiet: | ||
1248 | print("Using manifest server %s" % manifest_server) | ||
1249 | |||
1250 | if "@" not in manifest_server: | ||
1251 | username = None | ||
1252 | password = None | ||
1253 | if opt.manifest_server_username and opt.manifest_server_password: | ||
1254 | username = opt.manifest_server_username | ||
1255 | password = opt.manifest_server_password | ||
1256 | else: | ||
1257 | try: | ||
1258 | info = netrc.netrc() | ||
1259 | except IOError: | ||
1260 | # .netrc file does not exist or could not be opened. | ||
1261 | pass | ||
1262 | else: | ||
1263 | try: | ||
1264 | parse_result = urllib.parse.urlparse(manifest_server) | ||
1265 | if parse_result.hostname: | ||
1266 | auth = info.authenticators(parse_result.hostname) | ||
1267 | if auth: | ||
1268 | username, _account, password = auth | ||
1269 | else: | ||
1270 | print( | ||
1271 | "No credentials found for %s in .netrc" | ||
1272 | % parse_result.hostname, | ||
1273 | file=sys.stderr, | ||
1274 | ) | ||
1275 | except netrc.NetrcParseError as e: | ||
1276 | print( | ||
1277 | "Error parsing .netrc file: %s" % e, file=sys.stderr | ||
1278 | ) | ||
1279 | |||
1280 | if username and password: | ||
1281 | manifest_server = manifest_server.replace( | ||
1282 | "://", "://%s:%s@" % (username, password), 1 | ||
1283 | ) | ||
1284 | |||
1285 | transport = PersistentTransport(manifest_server) | ||
1286 | if manifest_server.startswith("persistent-"): | ||
1287 | manifest_server = manifest_server[len("persistent-") :] | ||
865 | 1288 | ||
866 | def tidy_up(run_gc, bare_git): | ||
867 | pm.start(bare_git._project.name) | ||
868 | try: | ||
869 | try: | 1289 | try: |
870 | if run_gc: | 1290 | server = xmlrpc.client.Server(manifest_server, transport=transport) |
871 | bare_git.gc('--auto', config=config) | 1291 | if opt.smart_sync: |
872 | else: | 1292 | branch = self._GetBranch(manifest.manifestProject) |
873 | bare_git.pack_refs(config=config) | 1293 | |
874 | except GitError: | 1294 | if "SYNC_TARGET" in os.environ: |
875 | err_event.set() | 1295 | target = os.environ["SYNC_TARGET"] |
876 | except Exception: | 1296 | [success, manifest_str] = server.GetApprovedManifest( |
877 | err_event.set() | 1297 | branch, target |
878 | raise | 1298 | ) |
879 | finally: | 1299 | elif ( |
880 | pm.finish(bare_git._project.name) | 1300 | "TARGET_PRODUCT" in os.environ |
881 | sem.release() | 1301 | and "TARGET_BUILD_VARIANT" in os.environ |
882 | 1302 | ): | |
883 | for (run_gc, bare_git) in tidy_dirs.values(): | 1303 | target = "%s-%s" % ( |
884 | if err_event.is_set() and opt.fail_fast: | 1304 | os.environ["TARGET_PRODUCT"], |
885 | break | 1305 | os.environ["TARGET_BUILD_VARIANT"], |
886 | sem.acquire() | 1306 | ) |
887 | t = _threading.Thread(target=tidy_up, args=(run_gc, bare_git,)) | 1307 | [success, manifest_str] = server.GetApprovedManifest( |
888 | t.daemon = True | 1308 | branch, target |
889 | threads.add(t) | 1309 | ) |
890 | t.start() | 1310 | else: |
891 | 1311 | [success, manifest_str] = server.GetApprovedManifest(branch) | |
892 | for t in threads: | 1312 | else: |
893 | t.join() | 1313 | assert opt.smart_tag |
894 | pm.end() | 1314 | [success, manifest_str] = server.GetManifest(opt.smart_tag) |
895 | 1315 | ||
896 | def _ReloadManifest(self, manifest_name, manifest): | 1316 | if success: |
897 | """Reload the manfiest from the file specified by the |manifest_name|. | 1317 | manifest_name = os.path.basename(smart_sync_manifest_path) |
898 | 1318 | try: | |
899 | It unloads the manifest if |manifest_name| is None. | 1319 | with open(smart_sync_manifest_path, "w") as f: |
900 | 1320 | f.write(manifest_str) | |
901 | Args: | 1321 | except IOError as e: |
902 | manifest_name: Manifest file to be reloaded. | 1322 | print( |
903 | manifest: The manifest to use. | 1323 | "error: cannot write manifest to %s:\n%s" |
904 | """ | 1324 | % (smart_sync_manifest_path, e), |
905 | if manifest_name: | 1325 | file=sys.stderr, |
906 | # Override calls Unload already | 1326 | ) |
907 | manifest.Override(manifest_name) | 1327 | sys.exit(1) |
908 | else: | 1328 | self._ReloadManifest(manifest_name, manifest) |
909 | manifest.Unload() | 1329 | else: |
910 | 1330 | print( | |
911 | def UpdateProjectList(self, opt, manifest): | 1331 | "error: manifest server RPC call failed: %s" % manifest_str, |
912 | """Update the cached projects list for |manifest| | 1332 | file=sys.stderr, |
913 | 1333 | ) | |
914 | In a multi-manifest checkout, each manifest has its own project.list. | 1334 | sys.exit(1) |
1335 | except (socket.error, IOError, xmlrpc.client.Fault) as e: | ||
1336 | print( | ||
1337 | "error: cannot connect to manifest server %s:\n%s" | ||
1338 | % (manifest.manifest_server, e), | ||
1339 | file=sys.stderr, | ||
1340 | ) | ||
1341 | sys.exit(1) | ||
1342 | except xmlrpc.client.ProtocolError as e: | ||
1343 | print( | ||
1344 | "error: cannot connect to manifest server %s:\n%d %s" | ||
1345 | % (manifest.manifest_server, e.errcode, e.errmsg), | ||
1346 | file=sys.stderr, | ||
1347 | ) | ||
1348 | sys.exit(1) | ||
1349 | |||
1350 | return manifest_name | ||
1351 | |||
1352 | def _UpdateAllManifestProjects(self, opt, mp, manifest_name): | ||
1353 | """Fetch & update the local manifest project. | ||
1354 | |||
1355 | After syncing the manifest project, if the manifest has any sub | ||
1356 | manifests, those are recursively processed. | ||
1357 | |||
1358 | Args: | ||
1359 | opt: Program options returned from optparse. See _Options(). | ||
1360 | mp: the manifestProject to query. | ||
1361 | manifest_name: Manifest file to be reloaded. | ||
1362 | """ | ||
1363 | if not mp.standalone_manifest_url: | ||
1364 | self._UpdateManifestProject(opt, mp, manifest_name) | ||
1365 | |||
1366 | if mp.manifest.submanifests: | ||
1367 | for submanifest in mp.manifest.submanifests.values(): | ||
1368 | child = submanifest.repo_client.manifest | ||
1369 | child.manifestProject.SyncWithPossibleInit( | ||
1370 | submanifest, | ||
1371 | current_branch_only=self._GetCurrentBranchOnly(opt, child), | ||
1372 | verbose=opt.verbose, | ||
1373 | tags=opt.tags, | ||
1374 | git_event_log=self.git_event_log, | ||
1375 | ) | ||
1376 | self._UpdateAllManifestProjects( | ||
1377 | opt, child.manifestProject, None | ||
1378 | ) | ||
1379 | |||
1380 | def _UpdateManifestProject(self, opt, mp, manifest_name): | ||
1381 | """Fetch & update the local manifest project. | ||
1382 | |||
1383 | Args: | ||
1384 | opt: Program options returned from optparse. See _Options(). | ||
1385 | mp: the manifestProject to query. | ||
1386 | manifest_name: Manifest file to be reloaded. | ||
1387 | """ | ||
1388 | if not opt.local_only: | ||
1389 | start = time.time() | ||
1390 | success = mp.Sync_NetworkHalf( | ||
1391 | quiet=opt.quiet, | ||
1392 | verbose=opt.verbose, | ||
1393 | current_branch_only=self._GetCurrentBranchOnly( | ||
1394 | opt, mp.manifest | ||
1395 | ), | ||
1396 | force_sync=opt.force_sync, | ||
1397 | tags=opt.tags, | ||
1398 | optimized_fetch=opt.optimized_fetch, | ||
1399 | retry_fetches=opt.retry_fetches, | ||
1400 | submodules=mp.manifest.HasSubmodules, | ||
1401 | clone_filter=mp.manifest.CloneFilter, | ||
1402 | partial_clone_exclude=mp.manifest.PartialCloneExclude, | ||
1403 | ) | ||
1404 | finish = time.time() | ||
1405 | self.event_log.AddSync( | ||
1406 | mp, event_log.TASK_SYNC_NETWORK, start, finish, success | ||
1407 | ) | ||
1408 | |||
1409 | if mp.HasChanges: | ||
1410 | syncbuf = SyncBuffer(mp.config) | ||
1411 | start = time.time() | ||
1412 | mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules) | ||
1413 | clean = syncbuf.Finish() | ||
1414 | self.event_log.AddSync( | ||
1415 | mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean | ||
1416 | ) | ||
1417 | if not clean: | ||
1418 | sys.exit(1) | ||
1419 | self._ReloadManifest(manifest_name, mp.manifest) | ||
1420 | |||
1421 | def ValidateOptions(self, opt, args): | ||
1422 | if opt.force_broken: | ||
1423 | print( | ||
1424 | "warning: -f/--force-broken is now the default behavior, and " | ||
1425 | "the options are deprecated", | ||
1426 | file=sys.stderr, | ||
1427 | ) | ||
1428 | if opt.network_only and opt.detach_head: | ||
1429 | self.OptionParser.error("cannot combine -n and -d") | ||
1430 | if opt.network_only and opt.local_only: | ||
1431 | self.OptionParser.error("cannot combine -n and -l") | ||
1432 | if opt.manifest_name and opt.smart_sync: | ||
1433 | self.OptionParser.error("cannot combine -m and -s") | ||
1434 | if opt.manifest_name and opt.smart_tag: | ||
1435 | self.OptionParser.error("cannot combine -m and -t") | ||
1436 | if opt.manifest_server_username or opt.manifest_server_password: | ||
1437 | if not (opt.smart_sync or opt.smart_tag): | ||
1438 | self.OptionParser.error( | ||
1439 | "-u and -p may only be combined with -s or -t" | ||
1440 | ) | ||
1441 | if None in [ | ||
1442 | opt.manifest_server_username, | ||
1443 | opt.manifest_server_password, | ||
1444 | ]: | ||
1445 | self.OptionParser.error("both -u and -p must be given") | ||
1446 | |||
1447 | if opt.prune is None: | ||
1448 | opt.prune = True | ||
1449 | |||
1450 | if opt.auto_gc is None and _AUTO_GC: | ||
1451 | print( | ||
1452 | f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.", | ||
1453 | f"{_REPO_AUTO_GC} is deprecated and will be removed in a ", | ||
1454 | "future release. Use `--auto-gc` instead.", | ||
1455 | file=sys.stderr, | ||
1456 | ) | ||
1457 | opt.auto_gc = True | ||
1458 | |||
1459 | def _ValidateOptionsWithManifest(self, opt, mp): | ||
1460 | """Like ValidateOptions, but after we've updated the manifest. | ||
1461 | |||
1462 | Needed to handle sync-xxx option defaults in the manifest. | ||
1463 | |||
1464 | Args: | ||
1465 | opt: The options to process. | ||
1466 | mp: The manifest project to pull defaults from. | ||
1467 | """ | ||
1468 | if not opt.jobs: | ||
1469 | # If the user hasn't made a choice, use the manifest value. | ||
1470 | opt.jobs = mp.manifest.default.sync_j | ||
1471 | if opt.jobs: | ||
1472 | # If --jobs has a non-default value, propagate it as the default for | ||
1473 | # --jobs-xxx flags too. | ||
1474 | if not opt.jobs_network: | ||
1475 | opt.jobs_network = opt.jobs | ||
1476 | if not opt.jobs_checkout: | ||
1477 | opt.jobs_checkout = opt.jobs | ||
1478 | else: | ||
1479 | # Neither user nor manifest have made a choice, so setup defaults. | ||
1480 | if not opt.jobs_network: | ||
1481 | opt.jobs_network = 1 | ||
1482 | if not opt.jobs_checkout: | ||
1483 | opt.jobs_checkout = DEFAULT_LOCAL_JOBS | ||
1484 | opt.jobs = os.cpu_count() | ||
1485 | |||
1486 | # Try to stay under user rlimit settings. | ||
1487 | # | ||
1488 | # Since each worker requires at 3 file descriptors to run `git fetch`, | ||
1489 | # use that to scale down the number of jobs. Unfortunately there isn't | ||
1490 | # an easy way to determine this reliably as systems change, but it was | ||
1491 | # last measured by hand in 2011. | ||
1492 | soft_limit, _ = _rlimit_nofile() | ||
1493 | jobs_soft_limit = max(1, (soft_limit - 5) // 3) | ||
1494 | opt.jobs = min(opt.jobs, jobs_soft_limit) | ||
1495 | opt.jobs_network = min(opt.jobs_network, jobs_soft_limit) | ||
1496 | opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit) | ||
1497 | |||
1498 | def Execute(self, opt, args): | ||
1499 | manifest = self.outer_manifest | ||
1500 | if not opt.outer_manifest: | ||
1501 | manifest = self.manifest | ||
1502 | |||
1503 | if opt.manifest_name: | ||
1504 | manifest.Override(opt.manifest_name) | ||
1505 | |||
1506 | manifest_name = opt.manifest_name | ||
1507 | smart_sync_manifest_path = os.path.join( | ||
1508 | manifest.manifestProject.worktree, "smart_sync_override.xml" | ||
1509 | ) | ||
915 | 1510 | ||
916 | Args: | 1511 | if opt.clone_bundle is None: |
917 | opt: Program options returned from optparse. See _Options(). | 1512 | opt.clone_bundle = manifest.CloneBundle |
918 | manifest: The manifest to use. | ||
919 | 1513 | ||
920 | Returns: | 1514 | if opt.smart_sync or opt.smart_tag: |
921 | 0: success | 1515 | manifest_name = self._SmartSyncSetup( |
922 | 1: failure | 1516 | opt, smart_sync_manifest_path, manifest |
923 | """ | 1517 | ) |
924 | new_project_paths = [] | ||
925 | for project in self.GetProjects(None, missing_ok=True, manifest=manifest, | ||
926 | all_manifests=False): | ||
927 | if project.relpath: | ||
928 | new_project_paths.append(project.relpath) | ||
929 | file_name = 'project.list' | ||
930 | file_path = os.path.join(manifest.subdir, file_name) | ||
931 | old_project_paths = [] | ||
932 | |||
933 | if os.path.exists(file_path): | ||
934 | with open(file_path, 'r') as fd: | ||
935 | old_project_paths = fd.read().split('\n') | ||
936 | # In reversed order, so subfolders are deleted before parent folder. | ||
937 | for path in sorted(old_project_paths, reverse=True): | ||
938 | if not path: | ||
939 | continue | ||
940 | if path not in new_project_paths: | ||
941 | # If the path has already been deleted, we don't need to do it | ||
942 | gitdir = os.path.join(manifest.topdir, path, '.git') | ||
943 | if os.path.exists(gitdir): | ||
944 | project = Project( | ||
945 | manifest=manifest, | ||
946 | name=path, | ||
947 | remote=RemoteSpec('origin'), | ||
948 | gitdir=gitdir, | ||
949 | objdir=gitdir, | ||
950 | use_git_worktrees=os.path.isfile(gitdir), | ||
951 | worktree=os.path.join(manifest.topdir, path), | ||
952 | relpath=path, | ||
953 | revisionExpr='HEAD', | ||
954 | revisionId=None, | ||
955 | groups=None) | ||
956 | if not project.DeleteWorktree( | ||
957 | quiet=opt.quiet, | ||
958 | force=opt.force_remove_dirty): | ||
959 | return 1 | ||
960 | |||
961 | new_project_paths.sort() | ||
962 | with open(file_path, 'w') as fd: | ||
963 | fd.write('\n'.join(new_project_paths)) | ||
964 | fd.write('\n') | ||
965 | return 0 | ||
966 | |||
967 | def UpdateCopyLinkfileList(self, manifest): | ||
968 | """Save all dests of copyfile and linkfile, and update them if needed. | ||
969 | |||
970 | Returns: | ||
971 | Whether update was successful. | ||
972 | """ | ||
973 | new_paths = {} | ||
974 | new_linkfile_paths = [] | ||
975 | new_copyfile_paths = [] | ||
976 | for project in self.GetProjects(None, missing_ok=True, | ||
977 | manifest=manifest, all_manifests=False): | ||
978 | new_linkfile_paths.extend(x.dest for x in project.linkfiles) | ||
979 | new_copyfile_paths.extend(x.dest for x in project.copyfiles) | ||
980 | |||
981 | new_paths = { | ||
982 | 'linkfile': new_linkfile_paths, | ||
983 | 'copyfile': new_copyfile_paths, | ||
984 | } | ||
985 | |||
986 | copylinkfile_name = 'copy-link-files.json' | ||
987 | copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name) | ||
988 | old_copylinkfile_paths = {} | ||
989 | |||
990 | if os.path.exists(copylinkfile_path): | ||
991 | with open(copylinkfile_path, 'rb') as fp: | ||
992 | try: | ||
993 | old_copylinkfile_paths = json.load(fp) | ||
994 | except Exception: | ||
995 | print('error: %s is not a json formatted file.' % | ||
996 | copylinkfile_path, file=sys.stderr) | ||
997 | platform_utils.remove(copylinkfile_path) | ||
998 | return False | ||
999 | |||
1000 | need_remove_files = [] | ||
1001 | need_remove_files.extend( | ||
1002 | set(old_copylinkfile_paths.get('linkfile', [])) - | ||
1003 | set(new_linkfile_paths)) | ||
1004 | need_remove_files.extend( | ||
1005 | set(old_copylinkfile_paths.get('copyfile', [])) - | ||
1006 | set(new_copyfile_paths)) | ||
1007 | |||
1008 | for need_remove_file in need_remove_files: | ||
1009 | # Try to remove the updated copyfile or linkfile. | ||
1010 | # So, if the file is not exist, nothing need to do. | ||
1011 | platform_utils.remove(need_remove_file, missing_ok=True) | ||
1012 | |||
1013 | # Create copy-link-files.json, save dest path of "copyfile" and "linkfile". | ||
1014 | with open(copylinkfile_path, 'w', encoding='utf-8') as fp: | ||
1015 | json.dump(new_paths, fp) | ||
1016 | return True | ||
1017 | |||
1018 | def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest): | ||
1019 | if not manifest.manifest_server: | ||
1020 | print('error: cannot smart sync: no manifest server defined in ' | ||
1021 | 'manifest', file=sys.stderr) | ||
1022 | sys.exit(1) | ||
1023 | |||
1024 | manifest_server = manifest.manifest_server | ||
1025 | if not opt.quiet: | ||
1026 | print('Using manifest server %s' % manifest_server) | ||
1027 | |||
1028 | if '@' not in manifest_server: | ||
1029 | username = None | ||
1030 | password = None | ||
1031 | if opt.manifest_server_username and opt.manifest_server_password: | ||
1032 | username = opt.manifest_server_username | ||
1033 | password = opt.manifest_server_password | ||
1034 | else: | ||
1035 | try: | ||
1036 | info = netrc.netrc() | ||
1037 | except IOError: | ||
1038 | # .netrc file does not exist or could not be opened | ||
1039 | pass | ||
1040 | else: | 1518 | else: |
1041 | try: | 1519 | if os.path.isfile(smart_sync_manifest_path): |
1042 | parse_result = urllib.parse.urlparse(manifest_server) | 1520 | try: |
1043 | if parse_result.hostname: | 1521 | platform_utils.remove(smart_sync_manifest_path) |
1044 | auth = info.authenticators(parse_result.hostname) | 1522 | except OSError as e: |
1045 | if auth: | 1523 | print( |
1046 | username, _account, password = auth | 1524 | "error: failed to remove existing smart sync override " |
1047 | else: | 1525 | "manifest: %s" % e, |
1048 | print('No credentials found for %s in .netrc' | 1526 | file=sys.stderr, |
1049 | % parse_result.hostname, file=sys.stderr) | 1527 | ) |
1050 | except netrc.NetrcParseError as e: | 1528 | |
1051 | print('Error parsing .netrc file: %s' % e, file=sys.stderr) | 1529 | err_event = multiprocessing.Event() |
1052 | 1530 | ||
1053 | if (username and password): | 1531 | rp = manifest.repoProject |
1054 | manifest_server = manifest_server.replace('://', '://%s:%s@' % | 1532 | rp.PreSync() |
1055 | (username, password), | 1533 | cb = rp.CurrentBranch |
1056 | 1) | 1534 | if cb: |
1057 | 1535 | base = rp.GetBranch(cb).merge | |
1058 | transport = PersistentTransport(manifest_server) | 1536 | if not base or not base.startswith("refs/heads/"): |
1059 | if manifest_server.startswith('persistent-'): | 1537 | print( |
1060 | manifest_server = manifest_server[len('persistent-'):] | 1538 | "warning: repo is not tracking a remote branch, so it will " |
1061 | 1539 | "not receive updates; run `repo init --repo-rev=stable` to " | |
1062 | try: | 1540 | "fix.", |
1063 | server = xmlrpc.client.Server(manifest_server, transport=transport) | 1541 | file=sys.stderr, |
1064 | if opt.smart_sync: | 1542 | ) |
1065 | branch = self._GetBranch(manifest.manifestProject) | 1543 | |
1066 | 1544 | for m in self.ManifestList(opt): | |
1067 | if 'SYNC_TARGET' in os.environ: | 1545 | if not m.manifestProject.standalone_manifest_url: |
1068 | target = os.environ['SYNC_TARGET'] | 1546 | m.manifestProject.PreSync() |
1069 | [success, manifest_str] = server.GetApprovedManifest(branch, target) | 1547 | |
1070 | elif ('TARGET_PRODUCT' in os.environ and | 1548 | if opt.repo_upgraded: |
1071 | 'TARGET_BUILD_VARIANT' in os.environ): | 1549 | _PostRepoUpgrade(manifest, quiet=opt.quiet) |
1072 | target = '%s-%s' % (os.environ['TARGET_PRODUCT'], | 1550 | |
1073 | os.environ['TARGET_BUILD_VARIANT']) | 1551 | mp = manifest.manifestProject |
1074 | [success, manifest_str] = server.GetApprovedManifest(branch, target) | 1552 | if opt.mp_update: |
1553 | self._UpdateAllManifestProjects(opt, mp, manifest_name) | ||
1075 | else: | 1554 | else: |
1076 | [success, manifest_str] = server.GetApprovedManifest(branch) | 1555 | print("Skipping update of local manifest project.") |
1077 | else: | ||
1078 | assert(opt.smart_tag) | ||
1079 | [success, manifest_str] = server.GetManifest(opt.smart_tag) | ||
1080 | 1556 | ||
1081 | if success: | 1557 | # Now that the manifests are up-to-date, setup options whose defaults |
1082 | manifest_name = os.path.basename(smart_sync_manifest_path) | 1558 | # might be in the manifest. |
1083 | try: | 1559 | self._ValidateOptionsWithManifest(opt, mp) |
1084 | with open(smart_sync_manifest_path, 'w') as f: | 1560 | |
1085 | f.write(manifest_str) | 1561 | superproject_logging_data = {} |
1086 | except IOError as e: | 1562 | self._UpdateProjectsRevisionId( |
1087 | print('error: cannot write manifest to %s:\n%s' | 1563 | opt, args, superproject_logging_data, manifest |
1088 | % (smart_sync_manifest_path, e), | ||
1089 | file=sys.stderr) | ||
1090 | sys.exit(1) | ||
1091 | self._ReloadManifest(manifest_name, manifest) | ||
1092 | else: | ||
1093 | print('error: manifest server RPC call failed: %s' % | ||
1094 | manifest_str, file=sys.stderr) | ||
1095 | sys.exit(1) | ||
1096 | except (socket.error, IOError, xmlrpc.client.Fault) as e: | ||
1097 | print('error: cannot connect to manifest server %s:\n%s' | ||
1098 | % (manifest.manifest_server, e), file=sys.stderr) | ||
1099 | sys.exit(1) | ||
1100 | except xmlrpc.client.ProtocolError as e: | ||
1101 | print('error: cannot connect to manifest server %s:\n%d %s' | ||
1102 | % (manifest.manifest_server, e.errcode, e.errmsg), | ||
1103 | file=sys.stderr) | ||
1104 | sys.exit(1) | ||
1105 | |||
1106 | return manifest_name | ||
1107 | |||
1108 | def _UpdateAllManifestProjects(self, opt, mp, manifest_name): | ||
1109 | """Fetch & update the local manifest project. | ||
1110 | |||
1111 | After syncing the manifest project, if the manifest has any sub manifests, | ||
1112 | those are recursively processed. | ||
1113 | |||
1114 | Args: | ||
1115 | opt: Program options returned from optparse. See _Options(). | ||
1116 | mp: the manifestProject to query. | ||
1117 | manifest_name: Manifest file to be reloaded. | ||
1118 | """ | ||
1119 | if not mp.standalone_manifest_url: | ||
1120 | self._UpdateManifestProject(opt, mp, manifest_name) | ||
1121 | |||
1122 | if mp.manifest.submanifests: | ||
1123 | for submanifest in mp.manifest.submanifests.values(): | ||
1124 | child = submanifest.repo_client.manifest | ||
1125 | child.manifestProject.SyncWithPossibleInit( | ||
1126 | submanifest, | ||
1127 | current_branch_only=self._GetCurrentBranchOnly(opt, child), | ||
1128 | verbose=opt.verbose, | ||
1129 | tags=opt.tags, | ||
1130 | git_event_log=self.git_event_log, | ||
1131 | ) | 1564 | ) |
1132 | self._UpdateAllManifestProjects(opt, child.manifestProject, None) | ||
1133 | 1565 | ||
1134 | def _UpdateManifestProject(self, opt, mp, manifest_name): | 1566 | if self.gitc_manifest: |
1135 | """Fetch & update the local manifest project. | 1567 | gitc_manifest_projects = self.GetProjects(args, missing_ok=True) |
1568 | gitc_projects = [] | ||
1569 | opened_projects = [] | ||
1570 | for project in gitc_manifest_projects: | ||
1571 | if ( | ||
1572 | project.relpath in self.gitc_manifest.paths | ||
1573 | and self.gitc_manifest.paths[project.relpath].old_revision | ||
1574 | ): | ||
1575 | opened_projects.append(project.relpath) | ||
1576 | else: | ||
1577 | gitc_projects.append(project.relpath) | ||
1578 | |||
1579 | if not args: | ||
1580 | gitc_projects = None | ||
1581 | |||
1582 | if gitc_projects != [] and not opt.local_only: | ||
1583 | print( | ||
1584 | "Updating GITC client: %s" | ||
1585 | % self.gitc_manifest.gitc_client_name | ||
1586 | ) | ||
1587 | manifest = GitcManifest( | ||
1588 | self.repodir, self.gitc_manifest.gitc_client_name | ||
1589 | ) | ||
1590 | if manifest_name: | ||
1591 | manifest.Override(manifest_name) | ||
1592 | else: | ||
1593 | manifest.Override(manifest.manifestFile) | ||
1594 | gitc_utils.generate_gitc_manifest( | ||
1595 | self.gitc_manifest, manifest, gitc_projects | ||
1596 | ) | ||
1597 | print("GITC client successfully synced.") | ||
1598 | |||
1599 | # The opened projects need to be synced as normal, therefore we | ||
1600 | # generate a new args list to represent the opened projects. | ||
1601 | # TODO: make this more reliable -- if there's a project name/path | ||
1602 | # overlap, this may choose the wrong project. | ||
1603 | args = [ | ||
1604 | os.path.relpath(manifest.paths[path].worktree, os.getcwd()) | ||
1605 | for path in opened_projects | ||
1606 | ] | ||
1607 | if not args: | ||
1608 | return | ||
1609 | |||
1610 | all_projects = self.GetProjects( | ||
1611 | args, | ||
1612 | missing_ok=True, | ||
1613 | submodules_ok=opt.fetch_submodules, | ||
1614 | manifest=manifest, | ||
1615 | all_manifests=not opt.this_manifest_only, | ||
1616 | ) | ||
1136 | 1617 | ||
1137 | Args: | 1618 | err_network_sync = False |
1138 | opt: Program options returned from optparse. See _Options(). | 1619 | err_update_projects = False |
1139 | mp: the manifestProject to query. | 1620 | err_update_linkfiles = False |
1140 | manifest_name: Manifest file to be reloaded. | 1621 | |
1141 | """ | 1622 | self._fetch_times = _FetchTimes(manifest) |
1142 | if not opt.local_only: | 1623 | if not opt.local_only: |
1143 | start = time.time() | 1624 | with multiprocessing.Manager() as manager: |
1144 | success = mp.Sync_NetworkHalf(quiet=opt.quiet, verbose=opt.verbose, | 1625 | with ssh.ProxyManager(manager) as ssh_proxy: |
1145 | current_branch_only=self._GetCurrentBranchOnly(opt, mp.manifest), | 1626 | # Initialize the socket dir once in the parent. |
1146 | force_sync=opt.force_sync, | 1627 | ssh_proxy.sock() |
1147 | tags=opt.tags, | 1628 | result = self._FetchMain( |
1148 | optimized_fetch=opt.optimized_fetch, | 1629 | opt, args, all_projects, err_event, ssh_proxy, manifest |
1149 | retry_fetches=opt.retry_fetches, | 1630 | ) |
1150 | submodules=mp.manifest.HasSubmodules, | 1631 | all_projects = result.all_projects |
1151 | clone_filter=mp.manifest.CloneFilter, | 1632 | |
1152 | partial_clone_exclude=mp.manifest.PartialCloneExclude) | 1633 | if opt.network_only: |
1153 | finish = time.time() | 1634 | return |
1154 | self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK, | 1635 | |
1155 | start, finish, success) | 1636 | # If we saw an error, exit with code 1 so that other scripts can |
1156 | 1637 | # check. | |
1157 | if mp.HasChanges: | 1638 | if err_event.is_set(): |
1158 | syncbuf = SyncBuffer(mp.config) | 1639 | err_network_sync = True |
1159 | start = time.time() | 1640 | if opt.fail_fast: |
1160 | mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules) | 1641 | print( |
1161 | clean = syncbuf.Finish() | 1642 | "\nerror: Exited sync due to fetch errors.\n" |
1162 | self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL, | 1643 | "Local checkouts *not* updated. Resolve network issues " |
1163 | start, time.time(), clean) | 1644 | "& retry.\n" |
1164 | if not clean: | 1645 | "`repo sync -l` will update some local checkouts.", |
1165 | sys.exit(1) | 1646 | file=sys.stderr, |
1166 | self._ReloadManifest(manifest_name, mp.manifest) | 1647 | ) |
1167 | 1648 | sys.exit(1) | |
1168 | def ValidateOptions(self, opt, args): | 1649 | |
1169 | if opt.force_broken: | 1650 | for m in self.ManifestList(opt): |
1170 | print('warning: -f/--force-broken is now the default behavior, and the ' | 1651 | if m.IsMirror or m.IsArchive: |
1171 | 'options are deprecated', file=sys.stderr) | 1652 | # Bail out now, we have no working tree. |
1172 | if opt.network_only and opt.detach_head: | 1653 | continue |
1173 | self.OptionParser.error('cannot combine -n and -d') | 1654 | |
1174 | if opt.network_only and opt.local_only: | 1655 | if self.UpdateProjectList(opt, m): |
1175 | self.OptionParser.error('cannot combine -n and -l') | 1656 | err_event.set() |
1176 | if opt.manifest_name and opt.smart_sync: | 1657 | err_update_projects = True |
1177 | self.OptionParser.error('cannot combine -m and -s') | 1658 | if opt.fail_fast: |
1178 | if opt.manifest_name and opt.smart_tag: | 1659 | print( |
1179 | self.OptionParser.error('cannot combine -m and -t') | 1660 | "\nerror: Local checkouts *not* updated.", |
1180 | if opt.manifest_server_username or opt.manifest_server_password: | 1661 | file=sys.stderr, |
1181 | if not (opt.smart_sync or opt.smart_tag): | 1662 | ) |
1182 | self.OptionParser.error('-u and -p may only be combined with -s or -t') | 1663 | sys.exit(1) |
1183 | if None in [opt.manifest_server_username, opt.manifest_server_password]: | 1664 | |
1184 | self.OptionParser.error('both -u and -p must be given') | 1665 | err_update_linkfiles = not self.UpdateCopyLinkfileList(m) |
1185 | 1666 | if err_update_linkfiles: | |
1186 | if opt.prune is None: | 1667 | err_event.set() |
1187 | opt.prune = True | 1668 | if opt.fail_fast: |
1188 | 1669 | print( | |
1189 | if opt.auto_gc is None and _AUTO_GC: | 1670 | "\nerror: Local update copyfile or linkfile failed.", |
1190 | print(f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.", | 1671 | file=sys.stderr, |
1191 | f'{_REPO_AUTO_GC} is deprecated and will be removed in a future', | 1672 | ) |
1192 | 'release. Use `--auto-gc` instead.', file=sys.stderr) | 1673 | sys.exit(1) |
1193 | opt.auto_gc = True | 1674 | |
1194 | 1675 | err_results = [] | |
1195 | def _ValidateOptionsWithManifest(self, opt, mp): | 1676 | # NB: We don't exit here because this is the last step. |
1196 | """Like ValidateOptions, but after we've updated the manifest. | 1677 | err_checkout = not self._Checkout(all_projects, opt, err_results) |
1197 | 1678 | if err_checkout: | |
1198 | Needed to handle sync-xxx option defaults in the manifest. | 1679 | err_event.set() |
1199 | 1680 | ||
1200 | Args: | 1681 | printed_notices = set() |
1201 | opt: The options to process. | 1682 | # If there's a notice that's supposed to print at the end of the sync, |
1202 | mp: The manifest project to pull defaults from. | 1683 | # print it now... But avoid printing duplicate messages, and preserve |
1203 | """ | 1684 | # order. |
1204 | if not opt.jobs: | 1685 | for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix): |
1205 | # If the user hasn't made a choice, use the manifest value. | 1686 | if m.notice and m.notice not in printed_notices: |
1206 | opt.jobs = mp.manifest.default.sync_j | 1687 | print(m.notice) |
1207 | if opt.jobs: | 1688 | printed_notices.add(m.notice) |
1208 | # If --jobs has a non-default value, propagate it as the default for | 1689 | |
1209 | # --jobs-xxx flags too. | 1690 | # If we saw an error, exit with code 1 so that other scripts can check. |
1210 | if not opt.jobs_network: | 1691 | if err_event.is_set(): |
1211 | opt.jobs_network = opt.jobs | 1692 | print("\nerror: Unable to fully sync the tree.", file=sys.stderr) |
1212 | if not opt.jobs_checkout: | 1693 | if err_network_sync: |
1213 | opt.jobs_checkout = opt.jobs | 1694 | print( |
1214 | else: | 1695 | "error: Downloading network changes failed.", |
1215 | # Neither user nor manifest have made a choice, so setup defaults. | 1696 | file=sys.stderr, |
1216 | if not opt.jobs_network: | 1697 | ) |
1217 | opt.jobs_network = 1 | 1698 | if err_update_projects: |
1218 | if not opt.jobs_checkout: | 1699 | print( |
1219 | opt.jobs_checkout = DEFAULT_LOCAL_JOBS | 1700 | "error: Updating local project lists failed.", |
1220 | opt.jobs = os.cpu_count() | 1701 | file=sys.stderr, |
1221 | 1702 | ) | |
1222 | # Try to stay under user rlimit settings. | 1703 | if err_update_linkfiles: |
1223 | # | 1704 | print( |
1224 | # Since each worker requires at 3 file descriptors to run `git fetch`, use | 1705 | "error: Updating copyfiles or linkfiles failed.", |
1225 | # that to scale down the number of jobs. Unfortunately there isn't an easy | 1706 | file=sys.stderr, |
1226 | # way to determine this reliably as systems change, but it was last measured | 1707 | ) |
1227 | # by hand in 2011. | 1708 | if err_checkout: |
1228 | soft_limit, _ = _rlimit_nofile() | 1709 | print( |
1229 | jobs_soft_limit = max(1, (soft_limit - 5) // 3) | 1710 | "error: Checking out local projects failed.", |
1230 | opt.jobs = min(opt.jobs, jobs_soft_limit) | 1711 | file=sys.stderr, |
1231 | opt.jobs_network = min(opt.jobs_network, jobs_soft_limit) | 1712 | ) |
1232 | opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit) | 1713 | if err_results: |
1233 | 1714 | print( | |
1234 | def Execute(self, opt, args): | 1715 | "Failing repos:\n%s" % "\n".join(err_results), |
1235 | manifest = self.outer_manifest | 1716 | file=sys.stderr, |
1236 | if not opt.outer_manifest: | 1717 | ) |
1237 | manifest = self.manifest | 1718 | print( |
1238 | 1719 | 'Try re-running with "-j1 --fail-fast" to exit at the first ' | |
1239 | if opt.manifest_name: | 1720 | "error.", |
1240 | manifest.Override(opt.manifest_name) | 1721 | file=sys.stderr, |
1241 | 1722 | ) | |
1242 | manifest_name = opt.manifest_name | 1723 | sys.exit(1) |
1243 | smart_sync_manifest_path = os.path.join( | 1724 | |
1244 | manifest.manifestProject.worktree, 'smart_sync_override.xml') | 1725 | # Log the previous sync analysis state from the config. |
1245 | 1726 | self.git_event_log.LogDataConfigEvents( | |
1246 | if opt.clone_bundle is None: | 1727 | mp.config.GetSyncAnalysisStateData(), "previous_sync_state" |
1247 | opt.clone_bundle = manifest.CloneBundle | 1728 | ) |
1248 | |||
1249 | if opt.smart_sync or opt.smart_tag: | ||
1250 | manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path, manifest) | ||
1251 | else: | ||
1252 | if os.path.isfile(smart_sync_manifest_path): | ||
1253 | try: | ||
1254 | platform_utils.remove(smart_sync_manifest_path) | ||
1255 | except OSError as e: | ||
1256 | print('error: failed to remove existing smart sync override manifest: %s' % | ||
1257 | e, file=sys.stderr) | ||
1258 | |||
1259 | err_event = multiprocessing.Event() | ||
1260 | |||
1261 | rp = manifest.repoProject | ||
1262 | rp.PreSync() | ||
1263 | cb = rp.CurrentBranch | ||
1264 | if cb: | ||
1265 | base = rp.GetBranch(cb).merge | ||
1266 | if not base or not base.startswith('refs/heads/'): | ||
1267 | print('warning: repo is not tracking a remote branch, so it will not ' | ||
1268 | 'receive updates; run `repo init --repo-rev=stable` to fix.', | ||
1269 | file=sys.stderr) | ||
1270 | |||
1271 | for m in self.ManifestList(opt): | ||
1272 | if not m.manifestProject.standalone_manifest_url: | ||
1273 | m.manifestProject.PreSync() | ||
1274 | |||
1275 | if opt.repo_upgraded: | ||
1276 | _PostRepoUpgrade(manifest, quiet=opt.quiet) | ||
1277 | |||
1278 | mp = manifest.manifestProject | ||
1279 | if opt.mp_update: | ||
1280 | self._UpdateAllManifestProjects(opt, mp, manifest_name) | ||
1281 | else: | ||
1282 | print('Skipping update of local manifest project.') | ||
1283 | |||
1284 | # Now that the manifests are up-to-date, setup options whose defaults might | ||
1285 | # be in the manifest. | ||
1286 | self._ValidateOptionsWithManifest(opt, mp) | ||
1287 | |||
1288 | superproject_logging_data = {} | ||
1289 | self._UpdateProjectsRevisionId(opt, args, superproject_logging_data, | ||
1290 | manifest) | ||
1291 | |||
1292 | if self.gitc_manifest: | ||
1293 | gitc_manifest_projects = self.GetProjects(args, missing_ok=True) | ||
1294 | gitc_projects = [] | ||
1295 | opened_projects = [] | ||
1296 | for project in gitc_manifest_projects: | ||
1297 | if project.relpath in self.gitc_manifest.paths and \ | ||
1298 | self.gitc_manifest.paths[project.relpath].old_revision: | ||
1299 | opened_projects.append(project.relpath) | ||
1300 | else: | ||
1301 | gitc_projects.append(project.relpath) | ||
1302 | 1729 | ||
1303 | if not args: | 1730 | # Update and log with the new sync analysis state. |
1304 | gitc_projects = None | 1731 | mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data) |
1732 | self.git_event_log.LogDataConfigEvents( | ||
1733 | mp.config.GetSyncAnalysisStateData(), "current_sync_state" | ||
1734 | ) | ||
1305 | 1735 | ||
1306 | if gitc_projects != [] and not opt.local_only: | 1736 | if not opt.quiet: |
1307 | print('Updating GITC client: %s' % self.gitc_manifest.gitc_client_name) | 1737 | print("repo sync has finished successfully.") |
1308 | manifest = GitcManifest(self.repodir, self.gitc_manifest.gitc_client_name) | ||
1309 | if manifest_name: | ||
1310 | manifest.Override(manifest_name) | ||
1311 | else: | ||
1312 | manifest.Override(manifest.manifestFile) | ||
1313 | gitc_utils.generate_gitc_manifest(self.gitc_manifest, | ||
1314 | manifest, | ||
1315 | gitc_projects) | ||
1316 | print('GITC client successfully synced.') | ||
1317 | |||
1318 | # The opened projects need to be synced as normal, therefore we | ||
1319 | # generate a new args list to represent the opened projects. | ||
1320 | # TODO: make this more reliable -- if there's a project name/path overlap, | ||
1321 | # this may choose the wrong project. | ||
1322 | args = [os.path.relpath(manifest.paths[path].worktree, os.getcwd()) | ||
1323 | for path in opened_projects] | ||
1324 | if not args: | ||
1325 | return | ||
1326 | |||
1327 | all_projects = self.GetProjects(args, | ||
1328 | missing_ok=True, | ||
1329 | submodules_ok=opt.fetch_submodules, | ||
1330 | manifest=manifest, | ||
1331 | all_manifests=not opt.this_manifest_only) | ||
1332 | |||
1333 | err_network_sync = False | ||
1334 | err_update_projects = False | ||
1335 | err_update_linkfiles = False | ||
1336 | |||
1337 | self._fetch_times = _FetchTimes(manifest) | ||
1338 | if not opt.local_only: | ||
1339 | with multiprocessing.Manager() as manager: | ||
1340 | with ssh.ProxyManager(manager) as ssh_proxy: | ||
1341 | # Initialize the socket dir once in the parent. | ||
1342 | ssh_proxy.sock() | ||
1343 | result = self._FetchMain(opt, args, all_projects, err_event, | ||
1344 | ssh_proxy, manifest) | ||
1345 | all_projects = result.all_projects | ||
1346 | |||
1347 | if opt.network_only: | ||
1348 | return | ||
1349 | |||
1350 | # If we saw an error, exit with code 1 so that other scripts can check. | ||
1351 | if err_event.is_set(): | ||
1352 | err_network_sync = True | ||
1353 | if opt.fail_fast: | ||
1354 | print('\nerror: Exited sync due to fetch errors.\n' | ||
1355 | 'Local checkouts *not* updated. Resolve network issues & ' | ||
1356 | 'retry.\n' | ||
1357 | '`repo sync -l` will update some local checkouts.', | ||
1358 | file=sys.stderr) | ||
1359 | sys.exit(1) | ||
1360 | |||
1361 | for m in self.ManifestList(opt): | ||
1362 | if m.IsMirror or m.IsArchive: | ||
1363 | # bail out now, we have no working tree | ||
1364 | continue | ||
1365 | |||
1366 | if self.UpdateProjectList(opt, m): | ||
1367 | err_event.set() | ||
1368 | err_update_projects = True | ||
1369 | if opt.fail_fast: | ||
1370 | print('\nerror: Local checkouts *not* updated.', file=sys.stderr) | ||
1371 | sys.exit(1) | ||
1372 | |||
1373 | err_update_linkfiles = not self.UpdateCopyLinkfileList(m) | ||
1374 | if err_update_linkfiles: | ||
1375 | err_event.set() | ||
1376 | if opt.fail_fast: | ||
1377 | print('\nerror: Local update copyfile or linkfile failed.', file=sys.stderr) | ||
1378 | sys.exit(1) | ||
1379 | |||
1380 | err_results = [] | ||
1381 | # NB: We don't exit here because this is the last step. | ||
1382 | err_checkout = not self._Checkout(all_projects, opt, err_results) | ||
1383 | if err_checkout: | ||
1384 | err_event.set() | ||
1385 | |||
1386 | printed_notices = set() | ||
1387 | # If there's a notice that's supposed to print at the end of the sync, | ||
1388 | # print it now... But avoid printing duplicate messages, and preserve | ||
1389 | # order. | ||
1390 | for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix): | ||
1391 | if m.notice and m.notice not in printed_notices: | ||
1392 | print(m.notice) | ||
1393 | printed_notices.add(m.notice) | ||
1394 | |||
1395 | # If we saw an error, exit with code 1 so that other scripts can check. | ||
1396 | if err_event.is_set(): | ||
1397 | print('\nerror: Unable to fully sync the tree.', file=sys.stderr) | ||
1398 | if err_network_sync: | ||
1399 | print('error: Downloading network changes failed.', file=sys.stderr) | ||
1400 | if err_update_projects: | ||
1401 | print('error: Updating local project lists failed.', file=sys.stderr) | ||
1402 | if err_update_linkfiles: | ||
1403 | print('error: Updating copyfiles or linkfiles failed.', file=sys.stderr) | ||
1404 | if err_checkout: | ||
1405 | print('error: Checking out local projects failed.', file=sys.stderr) | ||
1406 | if err_results: | ||
1407 | print('Failing repos:\n%s' % '\n'.join(err_results), file=sys.stderr) | ||
1408 | print('Try re-running with "-j1 --fail-fast" to exit at the first error.', | ||
1409 | file=sys.stderr) | ||
1410 | sys.exit(1) | ||
1411 | |||
1412 | # Log the previous sync analysis state from the config. | ||
1413 | self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(), | ||
1414 | 'previous_sync_state') | ||
1415 | |||
1416 | # Update and log with the new sync analysis state. | ||
1417 | mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data) | ||
1418 | self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(), | ||
1419 | 'current_sync_state') | ||
1420 | |||
1421 | if not opt.quiet: | ||
1422 | print('repo sync has finished successfully.') | ||
1423 | 1738 | ||
1424 | 1739 | ||
1425 | def _PostRepoUpgrade(manifest, quiet=False): | 1740 | def _PostRepoUpgrade(manifest, quiet=False): |
1426 | # Link the docs for the internal .repo/ layout for people | 1741 | # Link the docs for the internal .repo/ layout for people. |
1427 | link = os.path.join(manifest.repodir, 'internal-fs-layout.md') | 1742 | link = os.path.join(manifest.repodir, "internal-fs-layout.md") |
1428 | if not platform_utils.islink(link): | 1743 | if not platform_utils.islink(link): |
1429 | target = os.path.join('repo', 'docs', 'internal-fs-layout.md') | 1744 | target = os.path.join("repo", "docs", "internal-fs-layout.md") |
1430 | try: | 1745 | try: |
1431 | platform_utils.symlink(target, link) | 1746 | platform_utils.symlink(target, link) |
1432 | except Exception: | 1747 | except Exception: |
1433 | pass | 1748 | pass |
1434 | 1749 | ||
1435 | wrapper = Wrapper() | 1750 | wrapper = Wrapper() |
1436 | if wrapper.NeedSetupGnuPG(): | 1751 | if wrapper.NeedSetupGnuPG(): |
1437 | wrapper.SetupGnuPG(quiet) | 1752 | wrapper.SetupGnuPG(quiet) |
1438 | for project in manifest.projects: | 1753 | for project in manifest.projects: |
1439 | if project.Exists: | 1754 | if project.Exists: |
1440 | project.PostRepoUpgrade() | 1755 | project.PostRepoUpgrade() |
1441 | 1756 | ||
1442 | 1757 | ||
1443 | def _PostRepoFetch(rp, repo_verify=True, verbose=False): | 1758 | def _PostRepoFetch(rp, repo_verify=True, verbose=False): |
1444 | if rp.HasChanges: | 1759 | if rp.HasChanges: |
1445 | print('info: A new version of repo is available', file=sys.stderr) | 1760 | print("info: A new version of repo is available", file=sys.stderr) |
1446 | wrapper = Wrapper() | 1761 | wrapper = Wrapper() |
1447 | try: | 1762 | try: |
1448 | rev = rp.bare_git.describe(rp.GetRevisionId()) | 1763 | rev = rp.bare_git.describe(rp.GetRevisionId()) |
1449 | except GitError: | 1764 | except GitError: |
1450 | rev = None | 1765 | rev = None |
1451 | _, new_rev = wrapper.check_repo_rev(rp.gitdir, rev, repo_verify=repo_verify) | 1766 | _, new_rev = wrapper.check_repo_rev( |
1452 | # See if we're held back due to missing signed tag. | 1767 | rp.gitdir, rev, repo_verify=repo_verify |
1453 | current_revid = rp.bare_git.rev_parse('HEAD') | 1768 | ) |
1454 | new_revid = rp.bare_git.rev_parse('--verify', new_rev) | 1769 | # See if we're held back due to missing signed tag. |
1455 | if current_revid != new_revid: | 1770 | current_revid = rp.bare_git.rev_parse("HEAD") |
1456 | # We want to switch to the new rev, but also not trash any uncommitted | 1771 | new_revid = rp.bare_git.rev_parse("--verify", new_rev) |
1457 | # changes. This helps with local testing/hacking. | 1772 | if current_revid != new_revid: |
1458 | # If a local change has been made, we will throw that away. | 1773 | # We want to switch to the new rev, but also not trash any |
1459 | # We also have to make sure this will switch to an older commit if that's | 1774 | # uncommitted changes. This helps with local testing/hacking. |
1460 | # the latest tag in order to support release rollback. | 1775 | # If a local change has been made, we will throw that away. |
1461 | try: | 1776 | # We also have to make sure this will switch to an older commit if |
1462 | rp.work_git.reset('--keep', new_rev) | 1777 | # that's the latest tag in order to support release rollback. |
1463 | except GitError as e: | 1778 | try: |
1464 | sys.exit(str(e)) | 1779 | rp.work_git.reset("--keep", new_rev) |
1465 | print('info: Restarting repo with latest version', file=sys.stderr) | 1780 | except GitError as e: |
1466 | raise RepoChangedException(['--repo-upgraded']) | 1781 | sys.exit(str(e)) |
1782 | print("info: Restarting repo with latest version", file=sys.stderr) | ||
1783 | raise RepoChangedException(["--repo-upgraded"]) | ||
1784 | else: | ||
1785 | print( | ||
1786 | "warning: Skipped upgrade to unverified version", | ||
1787 | file=sys.stderr, | ||
1788 | ) | ||
1467 | else: | 1789 | else: |
1468 | print('warning: Skipped upgrade to unverified version', file=sys.stderr) | 1790 | if verbose: |
1469 | else: | 1791 | print( |
1470 | if verbose: | 1792 | "repo version %s is current" % rp.work_git.describe(HEAD), |
1471 | print('repo version %s is current' % rp.work_git.describe(HEAD), | 1793 | file=sys.stderr, |
1472 | file=sys.stderr) | 1794 | ) |
1473 | 1795 | ||
1474 | 1796 | ||
1475 | class _FetchTimes(object): | 1797 | class _FetchTimes(object): |
1476 | _ALPHA = 0.5 | 1798 | _ALPHA = 0.5 |
1477 | 1799 | ||
1478 | def __init__(self, manifest): | 1800 | def __init__(self, manifest): |
1479 | self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json') | 1801 | self._path = os.path.join(manifest.repodir, ".repo_fetchtimes.json") |
1480 | self._times = None | 1802 | self._times = None |
1481 | self._seen = set() | 1803 | self._seen = set() |
1482 | 1804 | ||
1483 | def Get(self, project): | 1805 | def Get(self, project): |
1484 | self._Load() | 1806 | self._Load() |
1485 | return self._times.get(project.name, _ONE_DAY_S) | 1807 | return self._times.get(project.name, _ONE_DAY_S) |
1486 | 1808 | ||
1487 | def Set(self, project, t): | 1809 | def Set(self, project, t): |
1488 | self._Load() | 1810 | self._Load() |
1489 | name = project.name | 1811 | name = project.name |
1490 | old = self._times.get(name, t) | 1812 | old = self._times.get(name, t) |
1491 | self._seen.add(name) | 1813 | self._seen.add(name) |
1492 | a = self._ALPHA | 1814 | a = self._ALPHA |
1493 | self._times[name] = (a * t) + ((1 - a) * old) | 1815 | self._times[name] = (a * t) + ((1 - a) * old) |
1494 | 1816 | ||
1495 | def _Load(self): | 1817 | def _Load(self): |
1496 | if self._times is None: | 1818 | if self._times is None: |
1497 | try: | 1819 | try: |
1498 | with open(self._path) as f: | 1820 | with open(self._path) as f: |
1499 | self._times = json.load(f) | 1821 | self._times = json.load(f) |
1500 | except (IOError, ValueError): | 1822 | except (IOError, ValueError): |
1501 | platform_utils.remove(self._path, missing_ok=True) | 1823 | platform_utils.remove(self._path, missing_ok=True) |
1502 | self._times = {} | 1824 | self._times = {} |
1503 | 1825 | ||
1504 | def Save(self): | 1826 | def Save(self): |
1505 | if self._times is None: | 1827 | if self._times is None: |
1506 | return | 1828 | return |
1507 | 1829 | ||
1508 | to_delete = [] | 1830 | to_delete = [] |
1509 | for name in self._times: | 1831 | for name in self._times: |
1510 | if name not in self._seen: | 1832 | if name not in self._seen: |
1511 | to_delete.append(name) | 1833 | to_delete.append(name) |
1512 | for name in to_delete: | 1834 | for name in to_delete: |
1513 | del self._times[name] | 1835 | del self._times[name] |
1514 | 1836 | ||
1515 | try: | 1837 | try: |
1516 | with open(self._path, 'w') as f: | 1838 | with open(self._path, "w") as f: |
1517 | json.dump(self._times, f, indent=2) | 1839 | json.dump(self._times, f, indent=2) |
1518 | except (IOError, TypeError): | 1840 | except (IOError, TypeError): |
1519 | platform_utils.remove(self._path, missing_ok=True) | 1841 | platform_utils.remove(self._path, missing_ok=True) |
1842 | |||
1520 | 1843 | ||
1521 | # This is a replacement for xmlrpc.client.Transport using urllib2 | 1844 | # This is a replacement for xmlrpc.client.Transport using urllib2 |
1522 | # and supporting persistent-http[s]. It cannot change hosts from | 1845 | # and supporting persistent-http[s]. It cannot change hosts from |
@@ -1525,98 +1848,105 @@ class _FetchTimes(object): | |||
1525 | 1848 | ||
1526 | 1849 | ||
1527 | class PersistentTransport(xmlrpc.client.Transport): | 1850 | class PersistentTransport(xmlrpc.client.Transport): |
1528 | def __init__(self, orig_host): | 1851 | def __init__(self, orig_host): |
1529 | self.orig_host = orig_host | 1852 | self.orig_host = orig_host |
1530 | 1853 | ||
1531 | def request(self, host, handler, request_body, verbose=False): | 1854 | def request(self, host, handler, request_body, verbose=False): |
1532 | with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy): | 1855 | with GetUrlCookieFile(self.orig_host, not verbose) as ( |
1533 | # Python doesn't understand cookies with the #HttpOnly_ prefix | 1856 | cookiefile, |
1534 | # Since we're only using them for HTTP, copy the file temporarily, | 1857 | proxy, |
1535 | # stripping those prefixes away. | 1858 | ): |
1536 | if cookiefile: | 1859 | # Python doesn't understand cookies with the #HttpOnly_ prefix |
1537 | tmpcookiefile = tempfile.NamedTemporaryFile(mode='w') | 1860 | # Since we're only using them for HTTP, copy the file temporarily, |
1538 | tmpcookiefile.write("# HTTP Cookie File") | 1861 | # stripping those prefixes away. |
1539 | try: | 1862 | if cookiefile: |
1540 | with open(cookiefile) as f: | 1863 | tmpcookiefile = tempfile.NamedTemporaryFile(mode="w") |
1541 | for line in f: | 1864 | tmpcookiefile.write("# HTTP Cookie File") |
1542 | if line.startswith("#HttpOnly_"): | 1865 | try: |
1543 | line = line[len("#HttpOnly_"):] | 1866 | with open(cookiefile) as f: |
1544 | tmpcookiefile.write(line) | 1867 | for line in f: |
1545 | tmpcookiefile.flush() | 1868 | if line.startswith("#HttpOnly_"): |
1546 | 1869 | line = line[len("#HttpOnly_") :] | |
1547 | cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name) | 1870 | tmpcookiefile.write(line) |
1548 | try: | 1871 | tmpcookiefile.flush() |
1549 | cookiejar.load() | 1872 | |
1550 | except cookielib.LoadError: | 1873 | cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name) |
1551 | cookiejar = cookielib.CookieJar() | 1874 | try: |
1552 | finally: | 1875 | cookiejar.load() |
1553 | tmpcookiefile.close() | 1876 | except cookielib.LoadError: |
1554 | else: | 1877 | cookiejar = cookielib.CookieJar() |
1555 | cookiejar = cookielib.CookieJar() | 1878 | finally: |
1556 | 1879 | tmpcookiefile.close() | |
1557 | proxyhandler = urllib.request.ProxyHandler | 1880 | else: |
1558 | if proxy: | 1881 | cookiejar = cookielib.CookieJar() |
1559 | proxyhandler = urllib.request.ProxyHandler({ | 1882 | |
1560 | "http": proxy, | 1883 | proxyhandler = urllib.request.ProxyHandler |
1561 | "https": proxy}) | 1884 | if proxy: |
1562 | 1885 | proxyhandler = urllib.request.ProxyHandler( | |
1563 | opener = urllib.request.build_opener( | 1886 | {"http": proxy, "https": proxy} |
1564 | urllib.request.HTTPCookieProcessor(cookiejar), | 1887 | ) |
1565 | proxyhandler) | 1888 | |
1566 | 1889 | opener = urllib.request.build_opener( | |
1567 | url = urllib.parse.urljoin(self.orig_host, handler) | 1890 | urllib.request.HTTPCookieProcessor(cookiejar), proxyhandler |
1568 | parse_results = urllib.parse.urlparse(url) | 1891 | ) |
1569 | 1892 | ||
1570 | scheme = parse_results.scheme | 1893 | url = urllib.parse.urljoin(self.orig_host, handler) |
1571 | if scheme == 'persistent-http': | 1894 | parse_results = urllib.parse.urlparse(url) |
1572 | scheme = 'http' | 1895 | |
1573 | if scheme == 'persistent-https': | 1896 | scheme = parse_results.scheme |
1574 | # If we're proxying through persistent-https, use http. The | 1897 | if scheme == "persistent-http": |
1575 | # proxy itself will do the https. | 1898 | scheme = "http" |
1576 | if proxy: | 1899 | if scheme == "persistent-https": |
1577 | scheme = 'http' | 1900 | # If we're proxying through persistent-https, use http. The |
1578 | else: | 1901 | # proxy itself will do the https. |
1579 | scheme = 'https' | 1902 | if proxy: |
1580 | 1903 | scheme = "http" | |
1581 | # Parse out any authentication information using the base class | 1904 | else: |
1582 | host, extra_headers, _ = self.get_host_info(parse_results.netloc) | 1905 | scheme = "https" |
1583 | 1906 | ||
1584 | url = urllib.parse.urlunparse(( | 1907 | # Parse out any authentication information using the base class. |
1585 | scheme, | 1908 | host, extra_headers, _ = self.get_host_info(parse_results.netloc) |
1586 | host, | 1909 | |
1587 | parse_results.path, | 1910 | url = urllib.parse.urlunparse( |
1588 | parse_results.params, | 1911 | ( |
1589 | parse_results.query, | 1912 | scheme, |
1590 | parse_results.fragment)) | 1913 | host, |
1591 | 1914 | parse_results.path, | |
1592 | request = urllib.request.Request(url, request_body) | 1915 | parse_results.params, |
1593 | if extra_headers is not None: | 1916 | parse_results.query, |
1594 | for (name, header) in extra_headers: | 1917 | parse_results.fragment, |
1595 | request.add_header(name, header) | 1918 | ) |
1596 | request.add_header('Content-Type', 'text/xml') | 1919 | ) |
1597 | try: | 1920 | |
1598 | response = opener.open(request) | 1921 | request = urllib.request.Request(url, request_body) |
1599 | except urllib.error.HTTPError as e: | 1922 | if extra_headers is not None: |
1600 | if e.code == 501: | 1923 | for name, header in extra_headers: |
1601 | # We may have been redirected through a login process | 1924 | request.add_header(name, header) |
1602 | # but our POST turned into a GET. Retry. | 1925 | request.add_header("Content-Type", "text/xml") |
1603 | response = opener.open(request) | 1926 | try: |
1604 | else: | 1927 | response = opener.open(request) |
1605 | raise | 1928 | except urllib.error.HTTPError as e: |
1606 | 1929 | if e.code == 501: | |
1607 | p, u = xmlrpc.client.getparser() | 1930 | # We may have been redirected through a login process |
1608 | # Response should be fairly small, so read it all at once. | 1931 | # but our POST turned into a GET. Retry. |
1609 | # This way we can show it to the user in case of error (e.g. HTML). | 1932 | response = opener.open(request) |
1610 | data = response.read() | 1933 | else: |
1611 | try: | 1934 | raise |
1612 | p.feed(data) | 1935 | |
1613 | except xml.parsers.expat.ExpatError as e: | 1936 | p, u = xmlrpc.client.getparser() |
1614 | raise IOError( | 1937 | # Response should be fairly small, so read it all at once. |
1615 | f'Parsing the manifest failed: {e}\n' | 1938 | # This way we can show it to the user in case of error (e.g. HTML). |
1616 | f'Please report this to your manifest server admin.\n' | 1939 | data = response.read() |
1617 | f'Here is the full response:\n{data.decode("utf-8")}') | 1940 | try: |
1618 | p.close() | 1941 | p.feed(data) |
1619 | return u.close() | 1942 | except xml.parsers.expat.ExpatError as e: |
1620 | 1943 | raise IOError( | |
1621 | def close(self): | 1944 | f"Parsing the manifest failed: {e}\n" |
1622 | pass | 1945 | f"Please report this to your manifest server admin.\n" |
1946 | f'Here is the full response:\n{data.decode("utf-8")}' | ||
1947 | ) | ||
1948 | p.close() | ||
1949 | return u.close() | ||
1950 | |||
1951 | def close(self): | ||
1952 | pass | ||
diff --git a/subcmds/upload.py b/subcmds/upload.py index 9c279230..63216afb 100644 --- a/subcmds/upload.py +++ b/subcmds/upload.py | |||
@@ -32,69 +32,77 @@ _DEFAULT_UNUSUAL_COMMIT_THRESHOLD = 5 | |||
32 | 32 | ||
33 | 33 | ||
34 | def _VerifyPendingCommits(branches: List[ReviewableBranch]) -> bool: | 34 | def _VerifyPendingCommits(branches: List[ReviewableBranch]) -> bool: |
35 | """Perform basic safety checks on the given set of branches. | 35 | """Perform basic safety checks on the given set of branches. |
36 | 36 | ||
37 | Ensures that each branch does not have a "large" number of commits | 37 | Ensures that each branch does not have a "large" number of commits |
38 | and, if so, prompts the user to confirm they want to proceed with | 38 | and, if so, prompts the user to confirm they want to proceed with |
39 | the upload. | 39 | the upload. |
40 | 40 | ||
41 | Returns true if all branches pass the safety check or the user | 41 | Returns true if all branches pass the safety check or the user |
42 | confirmed. Returns false if the upload should be aborted. | 42 | confirmed. Returns false if the upload should be aborted. |
43 | """ | 43 | """ |
44 | 44 | ||
45 | # Determine if any branch has a suspicious number of commits. | 45 | # Determine if any branch has a suspicious number of commits. |
46 | many_commits = False | 46 | many_commits = False |
47 | for branch in branches: | 47 | for branch in branches: |
48 | # Get the user's unusual threshold for the branch. | 48 | # Get the user's unusual threshold for the branch. |
49 | # | 49 | # |
50 | # Each branch may be configured to have a different threshold. | 50 | # Each branch may be configured to have a different threshold. |
51 | remote = branch.project.GetBranch(branch.name).remote | 51 | remote = branch.project.GetBranch(branch.name).remote |
52 | key = f'review.{remote.review}.uploadwarningthreshold' | 52 | key = f"review.{remote.review}.uploadwarningthreshold" |
53 | threshold = branch.project.config.GetInt(key) | 53 | threshold = branch.project.config.GetInt(key) |
54 | if threshold is None: | 54 | if threshold is None: |
55 | threshold = _DEFAULT_UNUSUAL_COMMIT_THRESHOLD | 55 | threshold = _DEFAULT_UNUSUAL_COMMIT_THRESHOLD |
56 | 56 | ||
57 | # If the branch has more commits than the threshold, show a warning. | 57 | # If the branch has more commits than the threshold, show a warning. |
58 | if len(branch.commits) > threshold: | 58 | if len(branch.commits) > threshold: |
59 | many_commits = True | 59 | many_commits = True |
60 | break | 60 | break |
61 | 61 | ||
62 | # If any branch has many commits, prompt the user. | 62 | # If any branch has many commits, prompt the user. |
63 | if many_commits: | 63 | if many_commits: |
64 | if len(branches) > 1: | 64 | if len(branches) > 1: |
65 | print('ATTENTION: One or more branches has an unusually high number ' | 65 | print( |
66 | 'of commits.') | 66 | "ATTENTION: One or more branches has an unusually high number " |
67 | else: | 67 | "of commits." |
68 | print('ATTENTION: You are uploading an unusually high number of commits.') | 68 | ) |
69 | print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across ' | 69 | else: |
70 | 'branches?)') | 70 | print( |
71 | answer = input( | 71 | "ATTENTION: You are uploading an unusually high number of " |
72 | "If you are sure you intend to do this, type 'yes': ").strip() | 72 | "commits." |
73 | return answer == 'yes' | 73 | ) |
74 | 74 | print( | |
75 | return True | 75 | "YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across " |
76 | "branches?)" | ||
77 | ) | ||
78 | answer = input( | ||
79 | "If you are sure you intend to do this, type 'yes': " | ||
80 | ).strip() | ||
81 | return answer == "yes" | ||
82 | |||
83 | return True | ||
76 | 84 | ||
77 | 85 | ||
78 | def _die(fmt, *args): | 86 | def _die(fmt, *args): |
79 | msg = fmt % args | 87 | msg = fmt % args |
80 | print('error: %s' % msg, file=sys.stderr) | 88 | print("error: %s" % msg, file=sys.stderr) |
81 | sys.exit(1) | 89 | sys.exit(1) |
82 | 90 | ||
83 | 91 | ||
84 | def _SplitEmails(values): | 92 | def _SplitEmails(values): |
85 | result = [] | 93 | result = [] |
86 | for value in values: | 94 | for value in values: |
87 | result.extend([s.strip() for s in value.split(',')]) | 95 | result.extend([s.strip() for s in value.split(",")]) |
88 | return result | 96 | return result |
89 | 97 | ||
90 | 98 | ||
91 | class Upload(InteractiveCommand): | 99 | class Upload(InteractiveCommand): |
92 | COMMON = True | 100 | COMMON = True |
93 | helpSummary = "Upload changes for code review" | 101 | helpSummary = "Upload changes for code review" |
94 | helpUsage = """ | 102 | helpUsage = """ |
95 | %prog [--re --cc] [<project>]... | 103 | %prog [--re --cc] [<project>]... |
96 | """ | 104 | """ |
97 | helpDescription = """ | 105 | helpDescription = """ |
98 | The '%prog' command is used to send changes to the Gerrit Code | 106 | The '%prog' command is used to send changes to the Gerrit Code |
99 | Review system. It searches for topic branches in local projects | 107 | Review system. It searches for topic branches in local projects |
100 | that have not yet been published for review. If multiple topic | 108 | that have not yet been published for review. If multiple topic |
@@ -195,443 +203,611 @@ threshold to a different value. | |||
195 | Gerrit Code Review: https://www.gerritcodereview.com/ | 203 | Gerrit Code Review: https://www.gerritcodereview.com/ |
196 | 204 | ||
197 | """ | 205 | """ |
198 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS | 206 | PARALLEL_JOBS = DEFAULT_LOCAL_JOBS |
199 | 207 | ||
200 | def _Options(self, p): | 208 | def _Options(self, p): |
201 | p.add_option('-t', | 209 | p.add_option( |
202 | dest='auto_topic', action='store_true', | 210 | "-t", |
203 | help='send local branch name to Gerrit Code Review') | 211 | dest="auto_topic", |
204 | p.add_option('--hashtag', '--ht', | 212 | action="store_true", |
205 | dest='hashtags', action='append', default=[], | 213 | help="send local branch name to Gerrit Code Review", |
206 | help='add hashtags (comma delimited) to the review') | 214 | ) |
207 | p.add_option('--hashtag-branch', '--htb', | 215 | p.add_option( |
208 | action='store_true', | 216 | "--hashtag", |
209 | help='add local branch name as a hashtag') | 217 | "--ht", |
210 | p.add_option('-l', '--label', | 218 | dest="hashtags", |
211 | dest='labels', action='append', default=[], | 219 | action="append", |
212 | help='add a label when uploading') | 220 | default=[], |
213 | p.add_option('--re', '--reviewers', | 221 | help="add hashtags (comma delimited) to the review", |
214 | type='string', action='append', dest='reviewers', | 222 | ) |
215 | help='request reviews from these people') | 223 | p.add_option( |
216 | p.add_option('--cc', | 224 | "--hashtag-branch", |
217 | type='string', action='append', dest='cc', | 225 | "--htb", |
218 | help='also send email to these email addresses') | 226 | action="store_true", |
219 | p.add_option('--br', '--branch', | 227 | help="add local branch name as a hashtag", |
220 | type='string', action='store', dest='branch', | 228 | ) |
221 | help='(local) branch to upload') | 229 | p.add_option( |
222 | p.add_option('-c', '--current-branch', | 230 | "-l", |
223 | dest='current_branch', action='store_true', | 231 | "--label", |
224 | help='upload current git branch') | 232 | dest="labels", |
225 | p.add_option('--no-current-branch', | 233 | action="append", |
226 | dest='current_branch', action='store_false', | 234 | default=[], |
227 | help='upload all git branches') | 235 | help="add a label when uploading", |
228 | # Turn this into a warning & remove this someday. | 236 | ) |
229 | p.add_option('--cbr', | 237 | p.add_option( |
230 | dest='current_branch', action='store_true', | 238 | "--re", |
231 | help=optparse.SUPPRESS_HELP) | 239 | "--reviewers", |
232 | p.add_option('--ne', '--no-emails', | 240 | type="string", |
233 | action='store_false', dest='notify', default=True, | 241 | action="append", |
234 | help='do not send e-mails on upload') | 242 | dest="reviewers", |
235 | p.add_option('-p', '--private', | 243 | help="request reviews from these people", |
236 | action='store_true', dest='private', default=False, | 244 | ) |
237 | help='upload as a private change (deprecated; use --wip)') | 245 | p.add_option( |
238 | p.add_option('-w', '--wip', | 246 | "--cc", |
239 | action='store_true', dest='wip', default=False, | 247 | type="string", |
240 | help='upload as a work-in-progress change') | 248 | action="append", |
241 | p.add_option('-r', '--ready', | 249 | dest="cc", |
242 | action='store_true', default=False, | 250 | help="also send email to these email addresses", |
243 | help='mark change as ready (clears work-in-progress setting)') | 251 | ) |
244 | p.add_option('-o', '--push-option', | 252 | p.add_option( |
245 | type='string', action='append', dest='push_options', | 253 | "--br", |
246 | default=[], | 254 | "--branch", |
247 | help='additional push options to transmit') | 255 | type="string", |
248 | p.add_option('-D', '--destination', '--dest', | 256 | action="store", |
249 | type='string', action='store', dest='dest_branch', | 257 | dest="branch", |
250 | metavar='BRANCH', | 258 | help="(local) branch to upload", |
251 | help='submit for review on this target branch') | 259 | ) |
252 | p.add_option('-n', '--dry-run', | 260 | p.add_option( |
253 | dest='dryrun', default=False, action='store_true', | 261 | "-c", |
254 | help='do everything except actually upload the CL') | 262 | "--current-branch", |
255 | p.add_option('-y', '--yes', | 263 | dest="current_branch", |
256 | default=False, action='store_true', | 264 | action="store_true", |
257 | help='answer yes to all safe prompts') | 265 | help="upload current git branch", |
258 | p.add_option('--ignore-untracked-files', | 266 | ) |
259 | action='store_true', default=False, | 267 | p.add_option( |
260 | help='ignore untracked files in the working copy') | 268 | "--no-current-branch", |
261 | p.add_option('--no-ignore-untracked-files', | 269 | dest="current_branch", |
262 | dest='ignore_untracked_files', action='store_false', | 270 | action="store_false", |
263 | help='always ask about untracked files in the working copy') | 271 | help="upload all git branches", |
264 | p.add_option('--no-cert-checks', | 272 | ) |
265 | dest='validate_certs', action='store_false', default=True, | 273 | # Turn this into a warning & remove this someday. |
266 | help='disable verifying ssl certs (unsafe)') | 274 | p.add_option( |
267 | RepoHook.AddOptionGroup(p, 'pre-upload') | 275 | "--cbr", |
268 | 276 | dest="current_branch", | |
269 | def _SingleBranch(self, opt, branch, people): | 277 | action="store_true", |
270 | project = branch.project | 278 | help=optparse.SUPPRESS_HELP, |
271 | name = branch.name | 279 | ) |
272 | remote = project.GetBranch(name).remote | 280 | p.add_option( |
273 | 281 | "--ne", | |
274 | key = 'review.%s.autoupload' % remote.review | 282 | "--no-emails", |
275 | answer = project.config.GetBoolean(key) | 283 | action="store_false", |
276 | 284 | dest="notify", | |
277 | if answer is False: | 285 | default=True, |
278 | _die("upload blocked by %s = false" % key) | 286 | help="do not send e-mails on upload", |
279 | 287 | ) | |
280 | if answer is None: | 288 | p.add_option( |
281 | date = branch.date | 289 | "-p", |
282 | commit_list = branch.commits | 290 | "--private", |
283 | 291 | action="store_true", | |
284 | destination = opt.dest_branch or project.dest_branch or project.revisionExpr | 292 | dest="private", |
285 | print('Upload project %s/ to remote branch %s%s:' % | 293 | default=False, |
286 | (project.RelPath(local=opt.this_manifest_only), destination, | 294 | help="upload as a private change (deprecated; use --wip)", |
287 | ' (private)' if opt.private else '')) | 295 | ) |
288 | print(' branch %s (%2d commit%s, %s):' % ( | 296 | p.add_option( |
289 | name, | 297 | "-w", |
290 | len(commit_list), | 298 | "--wip", |
291 | len(commit_list) != 1 and 's' or '', | 299 | action="store_true", |
292 | date)) | 300 | dest="wip", |
293 | for commit in commit_list: | 301 | default=False, |
294 | print(' %s' % commit) | 302 | help="upload as a work-in-progress change", |
295 | 303 | ) | |
296 | print('to %s (y/N)? ' % remote.review, end='', flush=True) | 304 | p.add_option( |
297 | if opt.yes: | 305 | "-r", |
298 | print('<--yes>') | 306 | "--ready", |
299 | answer = True | 307 | action="store_true", |
300 | else: | 308 | default=False, |
301 | answer = sys.stdin.readline().strip().lower() | 309 | help="mark change as ready (clears work-in-progress setting)", |
302 | answer = answer in ('y', 'yes', '1', 'true', 't') | 310 | ) |
303 | if not answer: | 311 | p.add_option( |
304 | _die("upload aborted by user") | 312 | "-o", |
305 | 313 | "--push-option", | |
306 | # Perform some basic safety checks prior to uploading. | 314 | type="string", |
307 | if not opt.yes and not _VerifyPendingCommits([branch]): | 315 | action="append", |
308 | _die("upload aborted by user") | 316 | dest="push_options", |
309 | 317 | default=[], | |
310 | self._UploadAndReport(opt, [branch], people) | 318 | help="additional push options to transmit", |
311 | 319 | ) | |
312 | def _MultipleBranches(self, opt, pending, people): | 320 | p.add_option( |
313 | projects = {} | 321 | "-D", |
314 | branches = {} | 322 | "--destination", |
315 | 323 | "--dest", | |
316 | script = [] | 324 | type="string", |
317 | script.append('# Uncomment the branches to upload:') | 325 | action="store", |
318 | for project, avail in pending: | 326 | dest="dest_branch", |
319 | project_path = project.RelPath(local=opt.this_manifest_only) | 327 | metavar="BRANCH", |
320 | script.append('#') | 328 | help="submit for review on this target branch", |
321 | script.append(f'# project {project_path}/:') | 329 | ) |
322 | 330 | p.add_option( | |
323 | b = {} | 331 | "-n", |
324 | for branch in avail: | 332 | "--dry-run", |
325 | if branch is None: | 333 | dest="dryrun", |
326 | continue | 334 | default=False, |
335 | action="store_true", | ||
336 | help="do everything except actually upload the CL", | ||
337 | ) | ||
338 | p.add_option( | ||
339 | "-y", | ||
340 | "--yes", | ||
341 | default=False, | ||
342 | action="store_true", | ||
343 | help="answer yes to all safe prompts", | ||
344 | ) | ||
345 | p.add_option( | ||
346 | "--ignore-untracked-files", | ||
347 | action="store_true", | ||
348 | default=False, | ||
349 | help="ignore untracked files in the working copy", | ||
350 | ) | ||
351 | p.add_option( | ||
352 | "--no-ignore-untracked-files", | ||
353 | dest="ignore_untracked_files", | ||
354 | action="store_false", | ||
355 | help="always ask about untracked files in the working copy", | ||
356 | ) | ||
357 | p.add_option( | ||
358 | "--no-cert-checks", | ||
359 | dest="validate_certs", | ||
360 | action="store_false", | ||
361 | default=True, | ||
362 | help="disable verifying ssl certs (unsafe)", | ||
363 | ) | ||
364 | RepoHook.AddOptionGroup(p, "pre-upload") | ||
365 | |||
366 | def _SingleBranch(self, opt, branch, people): | ||
367 | project = branch.project | ||
327 | name = branch.name | 368 | name = branch.name |
328 | date = branch.date | 369 | remote = project.GetBranch(name).remote |
329 | commit_list = branch.commits | 370 | |
330 | 371 | key = "review.%s.autoupload" % remote.review | |
331 | if b: | 372 | answer = project.config.GetBoolean(key) |
332 | script.append('#') | 373 | |
333 | destination = opt.dest_branch or project.dest_branch or project.revisionExpr | 374 | if answer is False: |
334 | script.append('# branch %s (%2d commit%s, %s) to remote branch %s:' % ( | 375 | _die("upload blocked by %s = false" % key) |
335 | name, | 376 | |
336 | len(commit_list), | 377 | if answer is None: |
337 | len(commit_list) != 1 and 's' or '', | 378 | date = branch.date |
338 | date, | 379 | commit_list = branch.commits |
339 | destination)) | 380 | |
340 | for commit in commit_list: | 381 | destination = ( |
341 | script.append('# %s' % commit) | 382 | opt.dest_branch or project.dest_branch or project.revisionExpr |
342 | b[name] = branch | 383 | ) |
343 | 384 | print( | |
344 | projects[project_path] = project | 385 | "Upload project %s/ to remote branch %s%s:" |
345 | branches[project_path] = b | 386 | % ( |
346 | script.append('') | 387 | project.RelPath(local=opt.this_manifest_only), |
347 | 388 | destination, | |
348 | script = Editor.EditString("\n".join(script)).split("\n") | 389 | " (private)" if opt.private else "", |
349 | 390 | ) | |
350 | project_re = re.compile(r'^#?\s*project\s*([^\s]+)/:$') | 391 | ) |
351 | branch_re = re.compile(r'^\s*branch\s*([^\s(]+)\s*\(.*') | 392 | print( |
352 | 393 | " branch %s (%2d commit%s, %s):" | |
353 | project = None | 394 | % ( |
354 | todo = [] | 395 | name, |
355 | 396 | len(commit_list), | |
356 | for line in script: | 397 | len(commit_list) != 1 and "s" or "", |
357 | m = project_re.match(line) | 398 | date, |
358 | if m: | 399 | ) |
359 | name = m.group(1) | 400 | ) |
360 | project = projects.get(name) | 401 | for commit in commit_list: |
361 | if not project: | 402 | print(" %s" % commit) |
362 | _die('project %s not available for upload', name) | 403 | |
363 | continue | 404 | print("to %s (y/N)? " % remote.review, end="", flush=True) |
364 | |||
365 | m = branch_re.match(line) | ||
366 | if m: | ||
367 | name = m.group(1) | ||
368 | if not project: | ||
369 | _die('project for branch %s not in script', name) | ||
370 | project_path = project.RelPath(local=opt.this_manifest_only) | ||
371 | branch = branches[project_path].get(name) | ||
372 | if not branch: | ||
373 | _die('branch %s not in %s', name, project_path) | ||
374 | todo.append(branch) | ||
375 | if not todo: | ||
376 | _die("nothing uncommented for upload") | ||
377 | |||
378 | # Perform some basic safety checks prior to uploading. | ||
379 | if not opt.yes and not _VerifyPendingCommits(todo): | ||
380 | _die("upload aborted by user") | ||
381 | |||
382 | self._UploadAndReport(opt, todo, people) | ||
383 | |||
384 | def _AppendAutoList(self, branch, people): | ||
385 | """ | ||
386 | Appends the list of reviewers in the git project's config. | ||
387 | Appends the list of users in the CC list in the git project's config if a | ||
388 | non-empty reviewer list was found. | ||
389 | """ | ||
390 | name = branch.name | ||
391 | project = branch.project | ||
392 | |||
393 | key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review | ||
394 | raw_list = project.config.GetString(key) | ||
395 | if raw_list is not None: | ||
396 | people[0].extend([entry.strip() for entry in raw_list.split(',')]) | ||
397 | |||
398 | key = 'review.%s.autocopy' % project.GetBranch(name).remote.review | ||
399 | raw_list = project.config.GetString(key) | ||
400 | if raw_list is not None and len(people[0]) > 0: | ||
401 | people[1].extend([entry.strip() for entry in raw_list.split(',')]) | ||
402 | |||
403 | def _FindGerritChange(self, branch): | ||
404 | last_pub = branch.project.WasPublished(branch.name) | ||
405 | if last_pub is None: | ||
406 | return "" | ||
407 | |||
408 | refs = branch.GetPublishedRefs() | ||
409 | try: | ||
410 | # refs/changes/XYZ/N --> XYZ | ||
411 | return refs.get(last_pub).split('/')[-2] | ||
412 | except (AttributeError, IndexError): | ||
413 | return "" | ||
414 | |||
415 | def _UploadAndReport(self, opt, todo, original_people): | ||
416 | have_errors = False | ||
417 | for branch in todo: | ||
418 | try: | ||
419 | people = copy.deepcopy(original_people) | ||
420 | self._AppendAutoList(branch, people) | ||
421 | |||
422 | # Check if there are local changes that may have been forgotten | ||
423 | changes = branch.project.UncommitedFiles() | ||
424 | if opt.ignore_untracked_files: | ||
425 | untracked = set(branch.project.UntrackedFiles()) | ||
426 | changes = [x for x in changes if x not in untracked] | ||
427 | |||
428 | if changes: | ||
429 | key = 'review.%s.autoupload' % branch.project.remote.review | ||
430 | answer = branch.project.config.GetBoolean(key) | ||
431 | |||
432 | # if they want to auto upload, let's not ask because it could be automated | ||
433 | if answer is None: | ||
434 | print() | ||
435 | print('Uncommitted changes in %s (did you forget to amend?):' | ||
436 | % branch.project.name) | ||
437 | print('\n'.join(changes)) | ||
438 | print('Continue uploading? (y/N) ', end='', flush=True) | ||
439 | if opt.yes: | 405 | if opt.yes: |
440 | print('<--yes>') | 406 | print("<--yes>") |
441 | a = 'yes' | 407 | answer = True |
408 | else: | ||
409 | answer = sys.stdin.readline().strip().lower() | ||
410 | answer = answer in ("y", "yes", "1", "true", "t") | ||
411 | if not answer: | ||
412 | _die("upload aborted by user") | ||
413 | |||
414 | # Perform some basic safety checks prior to uploading. | ||
415 | if not opt.yes and not _VerifyPendingCommits([branch]): | ||
416 | _die("upload aborted by user") | ||
417 | |||
418 | self._UploadAndReport(opt, [branch], people) | ||
419 | |||
420 | def _MultipleBranches(self, opt, pending, people): | ||
421 | projects = {} | ||
422 | branches = {} | ||
423 | |||
424 | script = [] | ||
425 | script.append("# Uncomment the branches to upload:") | ||
426 | for project, avail in pending: | ||
427 | project_path = project.RelPath(local=opt.this_manifest_only) | ||
428 | script.append("#") | ||
429 | script.append(f"# project {project_path}/:") | ||
430 | |||
431 | b = {} | ||
432 | for branch in avail: | ||
433 | if branch is None: | ||
434 | continue | ||
435 | name = branch.name | ||
436 | date = branch.date | ||
437 | commit_list = branch.commits | ||
438 | |||
439 | if b: | ||
440 | script.append("#") | ||
441 | destination = ( | ||
442 | opt.dest_branch | ||
443 | or project.dest_branch | ||
444 | or project.revisionExpr | ||
445 | ) | ||
446 | script.append( | ||
447 | "# branch %s (%2d commit%s, %s) to remote branch %s:" | ||
448 | % ( | ||
449 | name, | ||
450 | len(commit_list), | ||
451 | len(commit_list) != 1 and "s" or "", | ||
452 | date, | ||
453 | destination, | ||
454 | ) | ||
455 | ) | ||
456 | for commit in commit_list: | ||
457 | script.append("# %s" % commit) | ||
458 | b[name] = branch | ||
459 | |||
460 | projects[project_path] = project | ||
461 | branches[project_path] = b | ||
462 | script.append("") | ||
463 | |||
464 | script = Editor.EditString("\n".join(script)).split("\n") | ||
465 | |||
466 | project_re = re.compile(r"^#?\s*project\s*([^\s]+)/:$") | ||
467 | branch_re = re.compile(r"^\s*branch\s*([^\s(]+)\s*\(.*") | ||
468 | |||
469 | project = None | ||
470 | todo = [] | ||
471 | |||
472 | for line in script: | ||
473 | m = project_re.match(line) | ||
474 | if m: | ||
475 | name = m.group(1) | ||
476 | project = projects.get(name) | ||
477 | if not project: | ||
478 | _die("project %s not available for upload", name) | ||
479 | continue | ||
480 | |||
481 | m = branch_re.match(line) | ||
482 | if m: | ||
483 | name = m.group(1) | ||
484 | if not project: | ||
485 | _die("project for branch %s not in script", name) | ||
486 | project_path = project.RelPath(local=opt.this_manifest_only) | ||
487 | branch = branches[project_path].get(name) | ||
488 | if not branch: | ||
489 | _die("branch %s not in %s", name, project_path) | ||
490 | todo.append(branch) | ||
491 | if not todo: | ||
492 | _die("nothing uncommented for upload") | ||
493 | |||
494 | # Perform some basic safety checks prior to uploading. | ||
495 | if not opt.yes and not _VerifyPendingCommits(todo): | ||
496 | _die("upload aborted by user") | ||
497 | |||
498 | self._UploadAndReport(opt, todo, people) | ||
499 | |||
500 | def _AppendAutoList(self, branch, people): | ||
501 | """ | ||
502 | Appends the list of reviewers in the git project's config. | ||
503 | Appends the list of users in the CC list in the git project's config if | ||
504 | a non-empty reviewer list was found. | ||
505 | """ | ||
506 | name = branch.name | ||
507 | project = branch.project | ||
508 | |||
509 | key = "review.%s.autoreviewer" % project.GetBranch(name).remote.review | ||
510 | raw_list = project.config.GetString(key) | ||
511 | if raw_list is not None: | ||
512 | people[0].extend([entry.strip() for entry in raw_list.split(",")]) | ||
513 | |||
514 | key = "review.%s.autocopy" % project.GetBranch(name).remote.review | ||
515 | raw_list = project.config.GetString(key) | ||
516 | if raw_list is not None and len(people[0]) > 0: | ||
517 | people[1].extend([entry.strip() for entry in raw_list.split(",")]) | ||
518 | |||
519 | def _FindGerritChange(self, branch): | ||
520 | last_pub = branch.project.WasPublished(branch.name) | ||
521 | if last_pub is None: | ||
522 | return "" | ||
523 | |||
524 | refs = branch.GetPublishedRefs() | ||
525 | try: | ||
526 | # refs/changes/XYZ/N --> XYZ | ||
527 | return refs.get(last_pub).split("/")[-2] | ||
528 | except (AttributeError, IndexError): | ||
529 | return "" | ||
530 | |||
531 | def _UploadAndReport(self, opt, todo, original_people): | ||
532 | have_errors = False | ||
533 | for branch in todo: | ||
534 | try: | ||
535 | people = copy.deepcopy(original_people) | ||
536 | self._AppendAutoList(branch, people) | ||
537 | |||
538 | # Check if there are local changes that may have been forgotten. | ||
539 | changes = branch.project.UncommitedFiles() | ||
540 | if opt.ignore_untracked_files: | ||
541 | untracked = set(branch.project.UntrackedFiles()) | ||
542 | changes = [x for x in changes if x not in untracked] | ||
543 | |||
544 | if changes: | ||
545 | key = "review.%s.autoupload" % branch.project.remote.review | ||
546 | answer = branch.project.config.GetBoolean(key) | ||
547 | |||
548 | # If they want to auto upload, let's not ask because it | ||
549 | # could be automated. | ||
550 | if answer is None: | ||
551 | print() | ||
552 | print( | ||
553 | "Uncommitted changes in %s (did you forget to " | ||
554 | "amend?):" % branch.project.name | ||
555 | ) | ||
556 | print("\n".join(changes)) | ||
557 | print("Continue uploading? (y/N) ", end="", flush=True) | ||
558 | if opt.yes: | ||
559 | print("<--yes>") | ||
560 | a = "yes" | ||
561 | else: | ||
562 | a = sys.stdin.readline().strip().lower() | ||
563 | if a not in ("y", "yes", "t", "true", "on"): | ||
564 | print("skipping upload", file=sys.stderr) | ||
565 | branch.uploaded = False | ||
566 | branch.error = "User aborted" | ||
567 | continue | ||
568 | |||
569 | # Check if topic branches should be sent to the server during | ||
570 | # upload. | ||
571 | if opt.auto_topic is not True: | ||
572 | key = "review.%s.uploadtopic" % branch.project.remote.review | ||
573 | opt.auto_topic = branch.project.config.GetBoolean(key) | ||
574 | |||
575 | def _ExpandCommaList(value): | ||
576 | """Split |value| up into comma delimited entries.""" | ||
577 | if not value: | ||
578 | return | ||
579 | for ret in value.split(","): | ||
580 | ret = ret.strip() | ||
581 | if ret: | ||
582 | yield ret | ||
583 | |||
584 | # Check if hashtags should be included. | ||
585 | key = "review.%s.uploadhashtags" % branch.project.remote.review | ||
586 | hashtags = set( | ||
587 | _ExpandCommaList(branch.project.config.GetString(key)) | ||
588 | ) | ||
589 | for tag in opt.hashtags: | ||
590 | hashtags.update(_ExpandCommaList(tag)) | ||
591 | if opt.hashtag_branch: | ||
592 | hashtags.add(branch.name) | ||
593 | |||
594 | # Check if labels should be included. | ||
595 | key = "review.%s.uploadlabels" % branch.project.remote.review | ||
596 | labels = set( | ||
597 | _ExpandCommaList(branch.project.config.GetString(key)) | ||
598 | ) | ||
599 | for label in opt.labels: | ||
600 | labels.update(_ExpandCommaList(label)) | ||
601 | |||
602 | # Handle e-mail notifications. | ||
603 | if opt.notify is False: | ||
604 | notify = "NONE" | ||
605 | else: | ||
606 | key = ( | ||
607 | "review.%s.uploadnotify" % branch.project.remote.review | ||
608 | ) | ||
609 | notify = branch.project.config.GetString(key) | ||
610 | |||
611 | destination = opt.dest_branch or branch.project.dest_branch | ||
612 | |||
613 | if branch.project.dest_branch and not opt.dest_branch: | ||
614 | merge_branch = self._GetMergeBranch( | ||
615 | branch.project, local_branch=branch.name | ||
616 | ) | ||
617 | |||
618 | full_dest = destination | ||
619 | if not full_dest.startswith(R_HEADS): | ||
620 | full_dest = R_HEADS + full_dest | ||
621 | |||
622 | # If the merge branch of the local branch is different from | ||
623 | # the project's revision AND destination, this might not be | ||
624 | # intentional. | ||
625 | if ( | ||
626 | merge_branch | ||
627 | and merge_branch != branch.project.revisionExpr | ||
628 | and merge_branch != full_dest | ||
629 | ): | ||
630 | print( | ||
631 | f"For local branch {branch.name}: merge branch " | ||
632 | f"{merge_branch} does not match destination branch " | ||
633 | f"{destination}" | ||
634 | ) | ||
635 | print("skipping upload.") | ||
636 | print( | ||
637 | f"Please use `--destination {destination}` if this " | ||
638 | "is intentional" | ||
639 | ) | ||
640 | branch.uploaded = False | ||
641 | continue | ||
642 | |||
643 | branch.UploadForReview( | ||
644 | people, | ||
645 | dryrun=opt.dryrun, | ||
646 | auto_topic=opt.auto_topic, | ||
647 | hashtags=hashtags, | ||
648 | labels=labels, | ||
649 | private=opt.private, | ||
650 | notify=notify, | ||
651 | wip=opt.wip, | ||
652 | ready=opt.ready, | ||
653 | dest_branch=destination, | ||
654 | validate_certs=opt.validate_certs, | ||
655 | push_options=opt.push_options, | ||
656 | ) | ||
657 | |||
658 | branch.uploaded = True | ||
659 | except UploadError as e: | ||
660 | branch.error = e | ||
661 | branch.uploaded = False | ||
662 | have_errors = True | ||
663 | |||
664 | print(file=sys.stderr) | ||
665 | print("-" * 70, file=sys.stderr) | ||
666 | |||
667 | if have_errors: | ||
668 | for branch in todo: | ||
669 | if not branch.uploaded: | ||
670 | if len(str(branch.error)) <= 30: | ||
671 | fmt = " (%s)" | ||
672 | else: | ||
673 | fmt = "\n (%s)" | ||
674 | print( | ||
675 | ("[FAILED] %-15s %-15s" + fmt) | ||
676 | % ( | ||
677 | branch.project.RelPath(local=opt.this_manifest_only) | ||
678 | + "/", | ||
679 | branch.name, | ||
680 | str(branch.error), | ||
681 | ), | ||
682 | file=sys.stderr, | ||
683 | ) | ||
684 | print() | ||
685 | |||
686 | for branch in todo: | ||
687 | if branch.uploaded: | ||
688 | print( | ||
689 | "[OK ] %-15s %s" | ||
690 | % ( | ||
691 | branch.project.RelPath(local=opt.this_manifest_only) | ||
692 | + "/", | ||
693 | branch.name, | ||
694 | ), | ||
695 | file=sys.stderr, | ||
696 | ) | ||
697 | |||
698 | if have_errors: | ||
699 | sys.exit(1) | ||
700 | |||
701 | def _GetMergeBranch(self, project, local_branch=None): | ||
702 | if local_branch is None: | ||
703 | p = GitCommand( | ||
704 | project, | ||
705 | ["rev-parse", "--abbrev-ref", "HEAD"], | ||
706 | capture_stdout=True, | ||
707 | capture_stderr=True, | ||
708 | ) | ||
709 | p.Wait() | ||
710 | local_branch = p.stdout.strip() | ||
711 | p = GitCommand( | ||
712 | project, | ||
713 | ["config", "--get", "branch.%s.merge" % local_branch], | ||
714 | capture_stdout=True, | ||
715 | capture_stderr=True, | ||
716 | ) | ||
717 | p.Wait() | ||
718 | merge_branch = p.stdout.strip() | ||
719 | return merge_branch | ||
720 | |||
721 | @staticmethod | ||
722 | def _GatherOne(opt, project): | ||
723 | """Figure out the upload status for |project|.""" | ||
724 | if opt.current_branch: | ||
725 | cbr = project.CurrentBranch | ||
726 | up_branch = project.GetUploadableBranch(cbr) | ||
727 | avail = [up_branch] if up_branch else None | ||
728 | else: | ||
729 | avail = project.GetUploadableBranches(opt.branch) | ||
730 | return (project, avail) | ||
731 | |||
732 | def Execute(self, opt, args): | ||
733 | projects = self.GetProjects( | ||
734 | args, all_manifests=not opt.this_manifest_only | ||
735 | ) | ||
736 | |||
737 | def _ProcessResults(_pool, _out, results): | ||
738 | pending = [] | ||
739 | for result in results: | ||
740 | project, avail = result | ||
741 | if avail is None: | ||
742 | print( | ||
743 | 'repo: error: %s: Unable to upload branch "%s". ' | ||
744 | "You might be able to fix the branch by running:\n" | ||
745 | " git branch --set-upstream-to m/%s" | ||
746 | % ( | ||
747 | project.RelPath(local=opt.this_manifest_only), | ||
748 | project.CurrentBranch, | ||
749 | project.manifest.branch, | ||
750 | ), | ||
751 | file=sys.stderr, | ||
752 | ) | ||
753 | elif avail: | ||
754 | pending.append(result) | ||
755 | return pending | ||
756 | |||
757 | pending = self.ExecuteInParallel( | ||
758 | opt.jobs, | ||
759 | functools.partial(self._GatherOne, opt), | ||
760 | projects, | ||
761 | callback=_ProcessResults, | ||
762 | ) | ||
763 | |||
764 | if not pending: | ||
765 | if opt.branch is None: | ||
766 | print( | ||
767 | "repo: error: no branches ready for upload", file=sys.stderr | ||
768 | ) | ||
442 | else: | 769 | else: |
443 | a = sys.stdin.readline().strip().lower() | 770 | print( |
444 | if a not in ('y', 'yes', 't', 'true', 'on'): | 771 | 'repo: error: no branches named "%s" ready for upload' |
445 | print("skipping upload", file=sys.stderr) | 772 | % (opt.branch,), |
446 | branch.uploaded = False | 773 | file=sys.stderr, |
447 | branch.error = 'User aborted' | 774 | ) |
448 | continue | 775 | return 1 |
449 | 776 | ||
450 | # Check if topic branches should be sent to the server during upload | 777 | manifests = { |
451 | if opt.auto_topic is not True: | 778 | project.manifest.topdir: project.manifest |
452 | key = 'review.%s.uploadtopic' % branch.project.remote.review | 779 | for (project, available) in pending |
453 | opt.auto_topic = branch.project.config.GetBoolean(key) | 780 | } |
454 | 781 | ret = 0 | |
455 | def _ExpandCommaList(value): | 782 | for manifest in manifests.values(): |
456 | """Split |value| up into comma delimited entries.""" | 783 | pending_proj_names = [ |
457 | if not value: | 784 | project.name |
458 | return | 785 | for (project, available) in pending |
459 | for ret in value.split(','): | 786 | if project.manifest.topdir == manifest.topdir |
460 | ret = ret.strip() | 787 | ] |
461 | if ret: | 788 | pending_worktrees = [ |
462 | yield ret | 789 | project.worktree |
463 | 790 | for (project, available) in pending | |
464 | # Check if hashtags should be included. | 791 | if project.manifest.topdir == manifest.topdir |
465 | key = 'review.%s.uploadhashtags' % branch.project.remote.review | 792 | ] |
466 | hashtags = set(_ExpandCommaList(branch.project.config.GetString(key))) | 793 | hook = RepoHook.FromSubcmd( |
467 | for tag in opt.hashtags: | 794 | hook_type="pre-upload", |
468 | hashtags.update(_ExpandCommaList(tag)) | 795 | manifest=manifest, |
469 | if opt.hashtag_branch: | 796 | opt=opt, |
470 | hashtags.add(branch.name) | 797 | abort_if_user_denies=True, |
471 | 798 | ) | |
472 | # Check if labels should be included. | 799 | if not hook.Run( |
473 | key = 'review.%s.uploadlabels' % branch.project.remote.review | 800 | project_list=pending_proj_names, worktree_list=pending_worktrees |
474 | labels = set(_ExpandCommaList(branch.project.config.GetString(key))) | 801 | ): |
475 | for label in opt.labels: | 802 | ret = 1 |
476 | labels.update(_ExpandCommaList(label)) | 803 | if ret: |
477 | 804 | return ret | |
478 | # Handle e-mail notifications. | 805 | |
479 | if opt.notify is False: | 806 | reviewers = _SplitEmails(opt.reviewers) if opt.reviewers else [] |
480 | notify = 'NONE' | 807 | cc = _SplitEmails(opt.cc) if opt.cc else [] |
808 | people = (reviewers, cc) | ||
809 | |||
810 | if len(pending) == 1 and len(pending[0][1]) == 1: | ||
811 | self._SingleBranch(opt, pending[0][1][0], people) | ||
481 | else: | 812 | else: |
482 | key = 'review.%s.uploadnotify' % branch.project.remote.review | 813 | self._MultipleBranches(opt, pending, people) |
483 | notify = branch.project.config.GetString(key) | ||
484 | |||
485 | destination = opt.dest_branch or branch.project.dest_branch | ||
486 | |||
487 | if branch.project.dest_branch and not opt.dest_branch: | ||
488 | |||
489 | merge_branch = self._GetMergeBranch( | ||
490 | branch.project, local_branch=branch.name) | ||
491 | |||
492 | full_dest = destination | ||
493 | if not full_dest.startswith(R_HEADS): | ||
494 | full_dest = R_HEADS + full_dest | ||
495 | |||
496 | # If the merge branch of the local branch is different from the | ||
497 | # project's revision AND destination, this might not be intentional. | ||
498 | if (merge_branch and merge_branch != branch.project.revisionExpr | ||
499 | and merge_branch != full_dest): | ||
500 | print(f'For local branch {branch.name}: merge branch ' | ||
501 | f'{merge_branch} does not match destination branch ' | ||
502 | f'{destination}') | ||
503 | print('skipping upload.') | ||
504 | print(f'Please use `--destination {destination}` if this is intentional') | ||
505 | branch.uploaded = False | ||
506 | continue | ||
507 | |||
508 | branch.UploadForReview(people, | ||
509 | dryrun=opt.dryrun, | ||
510 | auto_topic=opt.auto_topic, | ||
511 | hashtags=hashtags, | ||
512 | labels=labels, | ||
513 | private=opt.private, | ||
514 | notify=notify, | ||
515 | wip=opt.wip, | ||
516 | ready=opt.ready, | ||
517 | dest_branch=destination, | ||
518 | validate_certs=opt.validate_certs, | ||
519 | push_options=opt.push_options) | ||
520 | |||
521 | branch.uploaded = True | ||
522 | except UploadError as e: | ||
523 | branch.error = e | ||
524 | branch.uploaded = False | ||
525 | have_errors = True | ||
526 | |||
527 | print(file=sys.stderr) | ||
528 | print('----------------------------------------------------------------------', file=sys.stderr) | ||
529 | |||
530 | if have_errors: | ||
531 | for branch in todo: | ||
532 | if not branch.uploaded: | ||
533 | if len(str(branch.error)) <= 30: | ||
534 | fmt = ' (%s)' | ||
535 | else: | ||
536 | fmt = '\n (%s)' | ||
537 | print(('[FAILED] %-15s %-15s' + fmt) % ( | ||
538 | branch.project.RelPath(local=opt.this_manifest_only) + '/', | ||
539 | branch.name, | ||
540 | str(branch.error)), | ||
541 | file=sys.stderr) | ||
542 | print() | ||
543 | |||
544 | for branch in todo: | ||
545 | if branch.uploaded: | ||
546 | print('[OK ] %-15s %s' % ( | ||
547 | branch.project.RelPath(local=opt.this_manifest_only) + '/', | ||
548 | branch.name), | ||
549 | file=sys.stderr) | ||
550 | |||
551 | if have_errors: | ||
552 | sys.exit(1) | ||
553 | |||
554 | def _GetMergeBranch(self, project, local_branch=None): | ||
555 | if local_branch is None: | ||
556 | p = GitCommand(project, | ||
557 | ['rev-parse', '--abbrev-ref', 'HEAD'], | ||
558 | capture_stdout=True, | ||
559 | capture_stderr=True) | ||
560 | p.Wait() | ||
561 | local_branch = p.stdout.strip() | ||
562 | p = GitCommand(project, | ||
563 | ['config', '--get', 'branch.%s.merge' % local_branch], | ||
564 | capture_stdout=True, | ||
565 | capture_stderr=True) | ||
566 | p.Wait() | ||
567 | merge_branch = p.stdout.strip() | ||
568 | return merge_branch | ||
569 | |||
570 | @staticmethod | ||
571 | def _GatherOne(opt, project): | ||
572 | """Figure out the upload status for |project|.""" | ||
573 | if opt.current_branch: | ||
574 | cbr = project.CurrentBranch | ||
575 | up_branch = project.GetUploadableBranch(cbr) | ||
576 | avail = [up_branch] if up_branch else None | ||
577 | else: | ||
578 | avail = project.GetUploadableBranches(opt.branch) | ||
579 | return (project, avail) | ||
580 | |||
581 | def Execute(self, opt, args): | ||
582 | projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) | ||
583 | |||
584 | def _ProcessResults(_pool, _out, results): | ||
585 | pending = [] | ||
586 | for result in results: | ||
587 | project, avail = result | ||
588 | if avail is None: | ||
589 | print('repo: error: %s: Unable to upload branch "%s". ' | ||
590 | 'You might be able to fix the branch by running:\n' | ||
591 | ' git branch --set-upstream-to m/%s' % | ||
592 | (project.RelPath(local=opt.this_manifest_only), project.CurrentBranch, | ||
593 | project.manifest.branch), | ||
594 | file=sys.stderr) | ||
595 | elif avail: | ||
596 | pending.append(result) | ||
597 | return pending | ||
598 | |||
599 | pending = self.ExecuteInParallel( | ||
600 | opt.jobs, | ||
601 | functools.partial(self._GatherOne, opt), | ||
602 | projects, | ||
603 | callback=_ProcessResults) | ||
604 | |||
605 | if not pending: | ||
606 | if opt.branch is None: | ||
607 | print('repo: error: no branches ready for upload', file=sys.stderr) | ||
608 | else: | ||
609 | print('repo: error: no branches named "%s" ready for upload' % | ||
610 | (opt.branch,), file=sys.stderr) | ||
611 | return 1 | ||
612 | |||
613 | manifests = {project.manifest.topdir: project.manifest | ||
614 | for (project, available) in pending} | ||
615 | ret = 0 | ||
616 | for manifest in manifests.values(): | ||
617 | pending_proj_names = [project.name for (project, available) in pending | ||
618 | if project.manifest.topdir == manifest.topdir] | ||
619 | pending_worktrees = [project.worktree for (project, available) in pending | ||
620 | if project.manifest.topdir == manifest.topdir] | ||
621 | hook = RepoHook.FromSubcmd( | ||
622 | hook_type='pre-upload', manifest=manifest, | ||
623 | opt=opt, abort_if_user_denies=True) | ||
624 | if not hook.Run(project_list=pending_proj_names, | ||
625 | worktree_list=pending_worktrees): | ||
626 | ret = 1 | ||
627 | if ret: | ||
628 | return ret | ||
629 | |||
630 | reviewers = _SplitEmails(opt.reviewers) if opt.reviewers else [] | ||
631 | cc = _SplitEmails(opt.cc) if opt.cc else [] | ||
632 | people = (reviewers, cc) | ||
633 | |||
634 | if len(pending) == 1 and len(pending[0][1]) == 1: | ||
635 | self._SingleBranch(opt, pending[0][1][0], people) | ||
636 | else: | ||
637 | self._MultipleBranches(opt, pending, people) | ||
diff --git a/subcmds/version.py b/subcmds/version.py index c68cb0af..c539db63 100644 --- a/subcmds/version.py +++ b/subcmds/version.py | |||
@@ -22,45 +22,52 @@ from wrapper import Wrapper | |||
22 | 22 | ||
23 | 23 | ||
24 | class Version(Command, MirrorSafeCommand): | 24 | class Version(Command, MirrorSafeCommand): |
25 | wrapper_version = None | 25 | wrapper_version = None |
26 | wrapper_path = None | 26 | wrapper_path = None |
27 | 27 | ||
28 | COMMON = False | 28 | COMMON = False |
29 | helpSummary = "Display the version of repo" | 29 | helpSummary = "Display the version of repo" |
30 | helpUsage = """ | 30 | helpUsage = """ |
31 | %prog | 31 | %prog |
32 | """ | 32 | """ |
33 | 33 | ||
34 | def Execute(self, opt, args): | 34 | def Execute(self, opt, args): |
35 | rp = self.manifest.repoProject | 35 | rp = self.manifest.repoProject |
36 | rem = rp.GetRemote() | 36 | rem = rp.GetRemote() |
37 | branch = rp.GetBranch('default') | 37 | branch = rp.GetBranch("default") |
38 | 38 | ||
39 | # These might not be the same. Report them both. | 39 | # These might not be the same. Report them both. |
40 | src_ver = RepoSourceVersion() | 40 | src_ver = RepoSourceVersion() |
41 | rp_ver = rp.bare_git.describe(HEAD) | 41 | rp_ver = rp.bare_git.describe(HEAD) |
42 | print('repo version %s' % rp_ver) | 42 | print("repo version %s" % rp_ver) |
43 | print(' (from %s)' % rem.url) | 43 | print(" (from %s)" % rem.url) |
44 | print(' (tracking %s)' % branch.merge) | 44 | print(" (tracking %s)" % branch.merge) |
45 | print(' (%s)' % rp.bare_git.log('-1', '--format=%cD', HEAD)) | 45 | print(" (%s)" % rp.bare_git.log("-1", "--format=%cD", HEAD)) |
46 | 46 | ||
47 | if self.wrapper_path is not None: | 47 | if self.wrapper_path is not None: |
48 | print('repo launcher version %s' % self.wrapper_version) | 48 | print("repo launcher version %s" % self.wrapper_version) |
49 | print(' (from %s)' % self.wrapper_path) | 49 | print(" (from %s)" % self.wrapper_path) |
50 | 50 | ||
51 | if src_ver != rp_ver: | 51 | if src_ver != rp_ver: |
52 | print(' (currently at %s)' % src_ver) | 52 | print(" (currently at %s)" % src_ver) |
53 | 53 | ||
54 | print('repo User-Agent %s' % user_agent.repo) | 54 | print("repo User-Agent %s" % user_agent.repo) |
55 | print('git %s' % git.version_tuple().full) | 55 | print("git %s" % git.version_tuple().full) |
56 | print('git User-Agent %s' % user_agent.git) | 56 | print("git User-Agent %s" % user_agent.git) |
57 | print('Python %s' % sys.version) | 57 | print("Python %s" % sys.version) |
58 | uname = platform.uname() | 58 | uname = platform.uname() |
59 | if sys.version_info.major < 3: | 59 | if sys.version_info.major < 3: |
60 | # Python 3 returns a named tuple, but Python 2 is simpler. | 60 | # Python 3 returns a named tuple, but Python 2 is simpler. |
61 | print(uname) | 61 | print(uname) |
62 | else: | 62 | else: |
63 | print('OS %s %s (%s)' % (uname.system, uname.release, uname.version)) | 63 | print( |
64 | print('CPU %s (%s)' % | 64 | "OS %s %s (%s)" % (uname.system, uname.release, uname.version) |
65 | (uname.machine, uname.processor if uname.processor else 'unknown')) | 65 | ) |
66 | print('Bug reports:', Wrapper().BUG_URL) | 66 | print( |
67 | "CPU %s (%s)" | ||
68 | % ( | ||
69 | uname.machine, | ||
70 | uname.processor if uname.processor else "unknown", | ||
71 | ) | ||
72 | ) | ||
73 | print("Bug reports:", Wrapper().BUG_URL) | ||