diff options
author | Shawn O. Pearce <sop@google.com> | 2011-10-11 12:00:38 -0700 |
---|---|---|
committer | Shawn O. Pearce <sop@google.com> | 2011-10-11 12:18:07 -0700 |
commit | fab96c68e3acfb5403ffe65577563f3cb39e2530 (patch) | |
tree | 972a82f40c3e7de6b88f174f5f5b2a3418f92e40 | |
parent | bf1fbb20ab33cc479881a2b755e336872971dd78 (diff) | |
download | git-repo-fab96c68e3acfb5403ffe65577563f3cb39e2530.tar.gz |
Work around Python 2.7 urllib2 bug
If the remote is using authenticated HTTP, but does not have
$GIT_URL/clone.bundle files in each repository, an initial sync
would fail around 8 projects in due to the library not resetting
the number of failures after getting a 404.
Work around this by updating the retry counter ourselves.
The urllib2 library is also not thread-safe. Make it somewhat
safer by wrapping the critical section with a lock.
Change-Id: I886e2750ef4793cbe2150c3b5396eb9f10974f7f
Signed-off-by: Shawn O. Pearce <sop@google.com>
-rwxr-xr-x | main.py | 11 | ||||
-rw-r--r-- | project.py | 77 |
2 files changed, 54 insertions, 34 deletions
@@ -273,6 +273,15 @@ class _UserAgentHandler(urllib2.BaseHandler): | |||
273 | req.add_header('User-Agent', _UserAgent()) | 273 | req.add_header('User-Agent', _UserAgent()) |
274 | return req | 274 | return req |
275 | 275 | ||
276 | class _BasicAuthHandler(urllib2.HTTPBasicAuthHandler): | ||
277 | def http_error_auth_reqed(self, authreq, host, req, headers): | ||
278 | try: | ||
279 | return urllib2.AbstractBasicAuthHandler.http_error_auth_reqed( | ||
280 | self, authreq, host, req, headers) | ||
281 | except: | ||
282 | self.reset_retry_count() | ||
283 | raise | ||
284 | |||
276 | def init_http(): | 285 | def init_http(): |
277 | handlers = [_UserAgentHandler()] | 286 | handlers = [_UserAgentHandler()] |
278 | 287 | ||
@@ -287,7 +296,7 @@ def init_http(): | |||
287 | pass | 296 | pass |
288 | except IOError: | 297 | except IOError: |
289 | pass | 298 | pass |
290 | handlers.append(urllib2.HTTPBasicAuthHandler(mgr)) | 299 | handlers.append(_BasicAuthHandler(mgr)) |
291 | 300 | ||
292 | if 'http_proxy' in os.environ: | 301 | if 'http_proxy' in os.environ: |
293 | url = os.environ['http_proxy'] | 302 | url = os.environ['http_proxy'] |
@@ -24,6 +24,11 @@ import sys | |||
24 | import time | 24 | import time |
25 | import urllib2 | 25 | import urllib2 |
26 | 26 | ||
27 | try: | ||
28 | import threading as _threading | ||
29 | except ImportError: | ||
30 | import dummy_threading as _threading | ||
31 | |||
27 | from color import Coloring | 32 | from color import Coloring |
28 | from git_command import GitCommand | 33 | from git_command import GitCommand |
29 | from git_config import GitConfig, IsId, GetSchemeFromUrl | 34 | from git_config import GitConfig, IsId, GetSchemeFromUrl |
@@ -34,6 +39,8 @@ from progress import Progress | |||
34 | 39 | ||
35 | from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M | 40 | from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M |
36 | 41 | ||
42 | _urllib_lock = _threading.Lock() | ||
43 | |||
37 | def _lwrite(path, content): | 44 | def _lwrite(path, content): |
38 | lock = '%s.lock' % path | 45 | lock = '%s.lock' % path |
39 | 46 | ||
@@ -1458,40 +1465,44 @@ class Project(object): | |||
1458 | dest.seek(0, os.SEEK_END) | 1465 | dest.seek(0, os.SEEK_END) |
1459 | pos = dest.tell() | 1466 | pos = dest.tell() |
1460 | 1467 | ||
1461 | req = urllib2.Request(srcUrl) | 1468 | _urllib_lock.acquire() |
1462 | if pos > 0: | ||
1463 | req.add_header('Range', 'bytes=%d-' % pos) | ||
1464 | |||
1465 | try: | 1469 | try: |
1466 | r = urllib2.urlopen(req) | 1470 | req = urllib2.Request(srcUrl) |
1467 | except urllib2.HTTPError, e: | 1471 | if pos > 0: |
1468 | def _content_type(): | 1472 | req.add_header('Range', 'bytes=%d-' % pos) |
1469 | try: | 1473 | |
1470 | return e.info()['content-type'] | 1474 | try: |
1471 | except: | 1475 | r = urllib2.urlopen(req) |
1472 | return None | 1476 | except urllib2.HTTPError, e: |
1473 | 1477 | def _content_type(): | |
1474 | if e.code == 404: | 1478 | try: |
1475 | keep = False | 1479 | return e.info()['content-type'] |
1476 | return False | 1480 | except: |
1477 | elif _content_type() == 'text/plain': | 1481 | return None |
1478 | try: | 1482 | |
1479 | msg = e.read() | 1483 | if e.code == 404: |
1480 | if len(msg) > 0 and msg[-1] == '\n': | 1484 | keep = False |
1481 | msg = msg[0:-1] | 1485 | return False |
1482 | msg = ' (%s)' % msg | 1486 | elif _content_type() == 'text/plain': |
1483 | except: | 1487 | try: |
1484 | msg = '' | 1488 | msg = e.read() |
1485 | else: | 1489 | if len(msg) > 0 and msg[-1] == '\n': |
1486 | try: | 1490 | msg = msg[0:-1] |
1487 | from BaseHTTPServer import BaseHTTPRequestHandler | 1491 | msg = ' (%s)' % msg |
1488 | res = BaseHTTPRequestHandler.responses[e.code] | 1492 | except: |
1489 | msg = ' (%s: %s)' % (res[0], res[1]) | 1493 | msg = '' |
1490 | except: | 1494 | else: |
1491 | msg = '' | 1495 | try: |
1492 | raise DownloadError('HTTP %s%s' % (e.code, msg)) | 1496 | from BaseHTTPServer import BaseHTTPRequestHandler |
1493 | except urllib2.URLError, e: | 1497 | res = BaseHTTPRequestHandler.responses[e.code] |
1494 | raise DownloadError('%s: %s ' % (req.get_host(), str(e))) | 1498 | msg = ' (%s: %s)' % (res[0], res[1]) |
1499 | except: | ||
1500 | msg = '' | ||
1501 | raise DownloadError('HTTP %s%s' % (e.code, msg)) | ||
1502 | except urllib2.URLError, e: | ||
1503 | raise DownloadError('%s: %s ' % (req.get_host(), str(e))) | ||
1504 | finally: | ||
1505 | _urllib_lock.release() | ||
1495 | 1506 | ||
1496 | p = None | 1507 | p = None |
1497 | try: | 1508 | try: |