diff options
Diffstat (limited to 'subcmds/sync.py')
-rw-r--r-- | subcmds/sync.py | 178 |
1 files changed, 166 insertions, 12 deletions
diff --git a/subcmds/sync.py b/subcmds/sync.py index 43d450be..4af411c9 100644 --- a/subcmds/sync.py +++ b/subcmds/sync.py | |||
@@ -23,18 +23,26 @@ import shutil | |||
23 | import socket | 23 | import socket |
24 | import subprocess | 24 | import subprocess |
25 | import sys | 25 | import sys |
26 | import tempfile | ||
26 | import time | 27 | import time |
27 | 28 | ||
28 | from pyversion import is_python3 | 29 | from pyversion import is_python3 |
29 | if is_python3(): | 30 | if is_python3(): |
31 | import http.cookiejar as cookielib | ||
32 | import urllib.error | ||
30 | import urllib.parse | 33 | import urllib.parse |
34 | import urllib.request | ||
31 | import xmlrpc.client | 35 | import xmlrpc.client |
32 | else: | 36 | else: |
37 | import cookielib | ||
33 | import imp | 38 | import imp |
39 | import urllib2 | ||
34 | import urlparse | 40 | import urlparse |
35 | import xmlrpclib | 41 | import xmlrpclib |
36 | urllib = imp.new_module('urllib') | 42 | urllib = imp.new_module('urllib') |
43 | urllib.error = urllib2 | ||
37 | urllib.parse = urlparse | 44 | urllib.parse = urlparse |
45 | urllib.request = urllib2 | ||
38 | xmlrpc = imp.new_module('xmlrpc') | 46 | xmlrpc = imp.new_module('xmlrpc') |
39 | xmlrpc.client = xmlrpclib | 47 | xmlrpc.client = xmlrpclib |
40 | 48 | ||
@@ -57,7 +65,9 @@ except ImportError: | |||
57 | multiprocessing = None | 65 | multiprocessing = None |
58 | 66 | ||
59 | from git_command import GIT, git_require | 67 | from git_command import GIT, git_require |
68 | from git_config import GetUrlCookieFile | ||
60 | from git_refs import R_HEADS, HEAD | 69 | from git_refs import R_HEADS, HEAD |
70 | import gitc_utils | ||
61 | from project import Project | 71 | from project import Project |
62 | from project import RemoteSpec | 72 | from project import RemoteSpec |
63 | from command import Command, MirrorSafeCommand | 73 | from command import Command, MirrorSafeCommand |
@@ -65,6 +75,7 @@ from error import RepoChangedException, GitError, ManifestParseError | |||
65 | from project import SyncBuffer | 75 | from project import SyncBuffer |
66 | from progress import Progress | 76 | from progress import Progress |
67 | from wrapper import Wrapper | 77 | from wrapper import Wrapper |
78 | from manifest_xml import GitcManifest | ||
68 | 79 | ||
69 | _ONE_DAY_S = 24 * 60 * 60 | 80 | _ONE_DAY_S = 24 * 60 * 60 |
70 | 81 | ||
@@ -140,6 +151,9 @@ The --optimized-fetch option can be used to only fetch projects that | |||
140 | are fixed to a sha1 revision if the sha1 revision does not already | 151 | are fixed to a sha1 revision if the sha1 revision does not already |
141 | exist locally. | 152 | exist locally. |
142 | 153 | ||
154 | The --prune option can be used to remove any refs that no longer | ||
155 | exist on the remote. | ||
156 | |||
143 | SSH Connections | 157 | SSH Connections |
144 | --------------- | 158 | --------------- |
145 | 159 | ||
@@ -223,6 +237,8 @@ later is required to fix a server side protocol bug. | |||
223 | p.add_option('--optimized-fetch', | 237 | p.add_option('--optimized-fetch', |
224 | dest='optimized_fetch', action='store_true', | 238 | dest='optimized_fetch', action='store_true', |
225 | help='only fetch projects fixed to sha1 if revision does not exist locally') | 239 | help='only fetch projects fixed to sha1 if revision does not exist locally') |
240 | p.add_option('--prune', dest='prune', action='store_true', | ||
241 | help='delete refs that no longer exist on the remote') | ||
226 | if show_smart: | 242 | if show_smart: |
227 | p.add_option('-s', '--smart-sync', | 243 | p.add_option('-s', '--smart-sync', |
228 | dest='smart_sync', action='store_true', | 244 | dest='smart_sync', action='store_true', |
@@ -294,7 +310,8 @@ later is required to fix a server side protocol bug. | |||
294 | force_sync=opt.force_sync, | 310 | force_sync=opt.force_sync, |
295 | clone_bundle=not opt.no_clone_bundle, | 311 | clone_bundle=not opt.no_clone_bundle, |
296 | no_tags=opt.no_tags, archive=self.manifest.IsArchive, | 312 | no_tags=opt.no_tags, archive=self.manifest.IsArchive, |
297 | optimized_fetch=opt.optimized_fetch) | 313 | optimized_fetch=opt.optimized_fetch, |
314 | prune=opt.prune) | ||
298 | self._fetch_times.Set(project, time.time() - start) | 315 | self._fetch_times.Set(project, time.time() - start) |
299 | 316 | ||
300 | # Lock around all the rest of the code, since printing, updating a set | 317 | # Lock around all the rest of the code, since printing, updating a set |
@@ -303,6 +320,7 @@ later is required to fix a server side protocol bug. | |||
303 | did_lock = True | 320 | did_lock = True |
304 | 321 | ||
305 | if not success: | 322 | if not success: |
323 | err_event.set() | ||
306 | print('error: Cannot fetch %s' % project.name, file=sys.stderr) | 324 | print('error: Cannot fetch %s' % project.name, file=sys.stderr) |
307 | if opt.force_broken: | 325 | if opt.force_broken: |
308 | print('warn: --force-broken, continuing to sync', | 326 | print('warn: --force-broken, continuing to sync', |
@@ -313,7 +331,7 @@ later is required to fix a server side protocol bug. | |||
313 | fetched.add(project.gitdir) | 331 | fetched.add(project.gitdir) |
314 | pm.update() | 332 | pm.update() |
315 | except _FetchError: | 333 | except _FetchError: |
316 | err_event.set() | 334 | pass |
317 | except Exception as e: | 335 | except Exception as e: |
318 | print('error: Cannot fetch %s (%s: %s)' \ | 336 | print('error: Cannot fetch %s (%s: %s)' \ |
319 | % (project.name, type(e).__name__, str(e)), file=sys.stderr) | 337 | % (project.name, type(e).__name__, str(e)), file=sys.stderr) |
@@ -554,19 +572,18 @@ later is required to fix a server side protocol bug. | |||
554 | try: | 572 | try: |
555 | info = netrc.netrc() | 573 | info = netrc.netrc() |
556 | except IOError: | 574 | except IOError: |
557 | print('.netrc file does not exist or could not be opened', | 575 | # .netrc file does not exist or could not be opened |
558 | file=sys.stderr) | 576 | pass |
559 | else: | 577 | else: |
560 | try: | 578 | try: |
561 | parse_result = urllib.parse.urlparse(manifest_server) | 579 | parse_result = urllib.parse.urlparse(manifest_server) |
562 | if parse_result.hostname: | 580 | if parse_result.hostname: |
563 | username, _account, password = \ | 581 | auth = info.authenticators(parse_result.hostname) |
564 | info.authenticators(parse_result.hostname) | 582 | if auth: |
565 | except TypeError: | 583 | username, _account, password = auth |
566 | # TypeError is raised when the given hostname is not present | 584 | else: |
567 | # in the .netrc file. | 585 | print('No credentials found for %s in .netrc' |
568 | print('No credentials found for %s in .netrc' | 586 | % parse_result.hostname, file=sys.stderr) |
569 | % parse_result.hostname, file=sys.stderr) | ||
570 | except netrc.NetrcParseError as e: | 587 | except netrc.NetrcParseError as e: |
571 | print('Error parsing .netrc file: %s' % e, file=sys.stderr) | 588 | print('Error parsing .netrc file: %s' % e, file=sys.stderr) |
572 | 589 | ||
@@ -575,8 +592,12 @@ later is required to fix a server side protocol bug. | |||
575 | (username, password), | 592 | (username, password), |
576 | 1) | 593 | 1) |
577 | 594 | ||
595 | transport = PersistentTransport(manifest_server) | ||
596 | if manifest_server.startswith('persistent-'): | ||
597 | manifest_server = manifest_server[len('persistent-'):] | ||
598 | |||
578 | try: | 599 | try: |
579 | server = xmlrpc.client.Server(manifest_server) | 600 | server = xmlrpc.client.Server(manifest_server, transport=transport) |
580 | if opt.smart_sync: | 601 | if opt.smart_sync: |
581 | p = self.manifest.manifestProject | 602 | p = self.manifest.manifestProject |
582 | b = p.GetBranch(p.CurrentBranch) | 603 | b = p.GetBranch(p.CurrentBranch) |
@@ -656,6 +677,42 @@ later is required to fix a server side protocol bug. | |||
656 | self._ReloadManifest(manifest_name) | 677 | self._ReloadManifest(manifest_name) |
657 | if opt.jobs is None: | 678 | if opt.jobs is None: |
658 | self.jobs = self.manifest.default.sync_j | 679 | self.jobs = self.manifest.default.sync_j |
680 | |||
681 | if self.gitc_manifest: | ||
682 | gitc_manifest_projects = self.GetProjects(args, | ||
683 | missing_ok=True) | ||
684 | gitc_projects = [] | ||
685 | opened_projects = [] | ||
686 | for project in gitc_manifest_projects: | ||
687 | if project.relpath in self.gitc_manifest.paths and \ | ||
688 | self.gitc_manifest.paths[project.relpath].old_revision: | ||
689 | opened_projects.append(project.relpath) | ||
690 | else: | ||
691 | gitc_projects.append(project.relpath) | ||
692 | |||
693 | if not args: | ||
694 | gitc_projects = None | ||
695 | |||
696 | if gitc_projects != [] and not opt.local_only: | ||
697 | print('Updating GITC client: %s' % self.gitc_manifest.gitc_client_name) | ||
698 | manifest = GitcManifest(self.repodir, self.gitc_manifest.gitc_client_name) | ||
699 | if manifest_name: | ||
700 | manifest.Override(manifest_name) | ||
701 | else: | ||
702 | manifest.Override(self.manifest.manifestFile) | ||
703 | gitc_utils.generate_gitc_manifest(self.gitc_manifest, | ||
704 | manifest, | ||
705 | gitc_projects) | ||
706 | print('GITC client successfully synced.') | ||
707 | |||
708 | # The opened projects need to be synced as normal, therefore we | ||
709 | # generate a new args list to represent the opened projects. | ||
710 | # TODO: make this more reliable -- if there's a project name/path overlap, | ||
711 | # this may choose the wrong project. | ||
712 | args = [os.path.relpath(self.manifest.paths[p].worktree, os.getcwd()) | ||
713 | for p in opened_projects] | ||
714 | if not args: | ||
715 | return | ||
659 | all_projects = self.GetProjects(args, | 716 | all_projects = self.GetProjects(args, |
660 | missing_ok=True, | 717 | missing_ok=True, |
661 | submodules_ok=opt.fetch_submodules) | 718 | submodules_ok=opt.fetch_submodules) |
@@ -850,3 +907,100 @@ class _FetchTimes(object): | |||
850 | os.remove(self._path) | 907 | os.remove(self._path) |
851 | except OSError: | 908 | except OSError: |
852 | pass | 909 | pass |
910 | |||
911 | # This is a replacement for xmlrpc.client.Transport using urllib2 | ||
912 | # and supporting persistent-http[s]. It cannot change hosts from | ||
913 | # request to request like the normal transport, the real url | ||
914 | # is passed during initialization. | ||
915 | class PersistentTransport(xmlrpc.client.Transport): | ||
916 | def __init__(self, orig_host): | ||
917 | self.orig_host = orig_host | ||
918 | |||
919 | def request(self, host, handler, request_body, verbose=False): | ||
920 | with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy): | ||
921 | # Python doesn't understand cookies with the #HttpOnly_ prefix | ||
922 | # Since we're only using them for HTTP, copy the file temporarily, | ||
923 | # stripping those prefixes away. | ||
924 | if cookiefile: | ||
925 | tmpcookiefile = tempfile.NamedTemporaryFile() | ||
926 | tmpcookiefile.write("# HTTP Cookie File") | ||
927 | try: | ||
928 | with open(cookiefile) as f: | ||
929 | for line in f: | ||
930 | if line.startswith("#HttpOnly_"): | ||
931 | line = line[len("#HttpOnly_"):] | ||
932 | tmpcookiefile.write(line) | ||
933 | tmpcookiefile.flush() | ||
934 | |||
935 | cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name) | ||
936 | try: | ||
937 | cookiejar.load() | ||
938 | except cookielib.LoadError: | ||
939 | cookiejar = cookielib.CookieJar() | ||
940 | finally: | ||
941 | tmpcookiefile.close() | ||
942 | else: | ||
943 | cookiejar = cookielib.CookieJar() | ||
944 | |||
945 | proxyhandler = urllib.request.ProxyHandler | ||
946 | if proxy: | ||
947 | proxyhandler = urllib.request.ProxyHandler({ | ||
948 | "http": proxy, | ||
949 | "https": proxy }) | ||
950 | |||
951 | opener = urllib.request.build_opener( | ||
952 | urllib.request.HTTPCookieProcessor(cookiejar), | ||
953 | proxyhandler) | ||
954 | |||
955 | url = urllib.parse.urljoin(self.orig_host, handler) | ||
956 | parse_results = urllib.parse.urlparse(url) | ||
957 | |||
958 | scheme = parse_results.scheme | ||
959 | if scheme == 'persistent-http': | ||
960 | scheme = 'http' | ||
961 | if scheme == 'persistent-https': | ||
962 | # If we're proxying through persistent-https, use http. The | ||
963 | # proxy itself will do the https. | ||
964 | if proxy: | ||
965 | scheme = 'http' | ||
966 | else: | ||
967 | scheme = 'https' | ||
968 | |||
969 | # Parse out any authentication information using the base class | ||
970 | host, extra_headers, _ = self.get_host_info(parse_results.netloc) | ||
971 | |||
972 | url = urllib.parse.urlunparse(( | ||
973 | scheme, | ||
974 | host, | ||
975 | parse_results.path, | ||
976 | parse_results.params, | ||
977 | parse_results.query, | ||
978 | parse_results.fragment)) | ||
979 | |||
980 | request = urllib.request.Request(url, request_body) | ||
981 | if extra_headers is not None: | ||
982 | for (name, header) in extra_headers: | ||
983 | request.add_header(name, header) | ||
984 | request.add_header('Content-Type', 'text/xml') | ||
985 | try: | ||
986 | response = opener.open(request) | ||
987 | except urllib.error.HTTPError as e: | ||
988 | if e.code == 501: | ||
989 | # We may have been redirected through a login process | ||
990 | # but our POST turned into a GET. Retry. | ||
991 | response = opener.open(request) | ||
992 | else: | ||
993 | raise | ||
994 | |||
995 | p, u = xmlrpc.client.getparser() | ||
996 | while 1: | ||
997 | data = response.read(1024) | ||
998 | if not data: | ||
999 | break | ||
1000 | p.feed(data) | ||
1001 | p.close() | ||
1002 | return u.close() | ||
1003 | |||
1004 | def close(self): | ||
1005 | pass | ||
1006 | |||