diff options
Diffstat (limited to 'scripts/lib')
| -rw-r--r-- | scripts/lib/mic/conf.py | 58 | ||||
| -rw-r--r-- | scripts/lib/mic/utils/misc.py | 782 | ||||
| -rw-r--r-- | scripts/lib/mic/utils/proxy.py | 183 |
3 files changed, 1 insertions, 1022 deletions
diff --git a/scripts/lib/mic/conf.py b/scripts/lib/mic/conf.py index b850d80520..1fe6edd724 100644 --- a/scripts/lib/mic/conf.py +++ b/scripts/lib/mic/conf.py | |||
| @@ -20,7 +20,7 @@ import ConfigParser | |||
| 20 | 20 | ||
| 21 | from mic import msger | 21 | from mic import msger |
| 22 | from mic import kickstart | 22 | from mic import kickstart |
| 23 | from mic.utils import misc, runner, proxy, errors | 23 | from mic.utils import misc, runner, errors |
| 24 | 24 | ||
| 25 | 25 | ||
| 26 | def get_siteconf(): | 26 | def get_siteconf(): |
| @@ -55,8 +55,6 @@ class ConfigMgr(object): | |||
| 55 | "pack_to": None, | 55 | "pack_to": None, |
| 56 | "name_prefix": None, | 56 | "name_prefix": None, |
| 57 | "name_suffix": None, | 57 | "name_suffix": None, |
| 58 | "proxy": None, | ||
| 59 | "no_proxy": None, | ||
| 60 | "copy_kernel": False, | 58 | "copy_kernel": False, |
| 61 | "install_pkgs": None, | 59 | "install_pkgs": None, |
| 62 | "repourl": {}, | 60 | "repourl": {}, |
| @@ -104,16 +102,6 @@ class ConfigMgr(object): | |||
| 104 | for sec, vals in self.DEFAULTS.iteritems(): | 102 | for sec, vals in self.DEFAULTS.iteritems(): |
| 105 | setattr(self, sec, vals) | 103 | setattr(self, sec, vals) |
| 106 | 104 | ||
| 107 | def __set_siteconf(self, siteconf): | ||
| 108 | try: | ||
| 109 | self.__siteconf = siteconf | ||
| 110 | self._parse_siteconf(siteconf) | ||
| 111 | except ConfigParser.Error, error: | ||
| 112 | raise errors.ConfigError("%s" % error) | ||
| 113 | def __get_siteconf(self): | ||
| 114 | return self.__siteconf | ||
| 115 | _siteconf = property(__get_siteconf, __set_siteconf) | ||
| 116 | |||
| 117 | def __set_ksconf(self, ksconf): | 105 | def __set_ksconf(self, ksconf): |
| 118 | if not os.path.isfile(ksconf): | 106 | if not os.path.isfile(ksconf): |
| 119 | msger.error('Cannot find ks file: %s' % ksconf) | 107 | msger.error('Cannot find ks file: %s' % ksconf) |
| @@ -124,50 +112,6 @@ class ConfigMgr(object): | |||
| 124 | return self.__ksconf | 112 | return self.__ksconf |
| 125 | _ksconf = property(__get_ksconf, __set_ksconf) | 113 | _ksconf = property(__get_ksconf, __set_ksconf) |
| 126 | 114 | ||
| 127 | def _parse_siteconf(self, siteconf): | ||
| 128 | if not siteconf: | ||
| 129 | return | ||
| 130 | |||
| 131 | if not os.path.exists(siteconf): | ||
| 132 | msger.warning("cannot read config file: %s" % siteconf) | ||
| 133 | return | ||
| 134 | |||
| 135 | parser = ConfigParser.SafeConfigParser() | ||
| 136 | parser.read(siteconf) | ||
| 137 | |||
| 138 | for section in parser.sections(): | ||
| 139 | if section in self.DEFAULTS: | ||
| 140 | getattr(self, section).update(dict(parser.items(section))) | ||
| 141 | |||
| 142 | # append common section items to other sections | ||
| 143 | for section in self.DEFAULTS.keys(): | ||
| 144 | if section != "common": | ||
| 145 | getattr(self, section).update(self.common) | ||
| 146 | |||
| 147 | # check and normalize the scheme of proxy url | ||
| 148 | if self.create['proxy']: | ||
| 149 | m = re.match('^(\w+)://.*', self.create['proxy']) | ||
| 150 | if m: | ||
| 151 | scheme = m.group(1) | ||
| 152 | if scheme not in ('http', 'https', 'ftp', 'socks'): | ||
| 153 | msger.error("%s: proxy scheme is incorrect" % siteconf) | ||
| 154 | else: | ||
| 155 | msger.warning("%s: proxy url w/o scheme, use http as default" | ||
| 156 | % siteconf) | ||
| 157 | self.create['proxy'] = "http://" + self.create['proxy'] | ||
| 158 | |||
| 159 | proxy.set_proxies(self.create['proxy'], self.create['no_proxy']) | ||
| 160 | |||
| 161 | # bootstrap option handling | ||
| 162 | self.set_runtime(self.create['runtime']) | ||
| 163 | if isinstance(self.bootstrap['packages'], basestring): | ||
| 164 | packages = self.bootstrap['packages'].replace('\n', ' ') | ||
| 165 | if packages.find(',') != -1: | ||
| 166 | packages = packages.split(',') | ||
| 167 | else: | ||
| 168 | packages = packages.split() | ||
| 169 | self.bootstrap['packages'] = packages | ||
| 170 | |||
| 171 | def _parse_kickstart(self, ksconf=None): | 115 | def _parse_kickstart(self, ksconf=None): |
| 172 | if not ksconf: | 116 | if not ksconf: |
| 173 | return | 117 | return |
diff --git a/scripts/lib/mic/utils/misc.py b/scripts/lib/mic/utils/misc.py index 95241d7f15..8c1f0160b1 100644 --- a/scripts/lib/mic/utils/misc.py +++ b/scripts/lib/mic/utils/misc.py | |||
| @@ -42,15 +42,8 @@ xmlparse = cElementTree.parse | |||
| 42 | from mic import msger | 42 | from mic import msger |
| 43 | from mic.utils.errors import CreatorError, SquashfsError | 43 | from mic.utils.errors import CreatorError, SquashfsError |
| 44 | from mic.utils.fs_related import find_binary_path, makedirs | 44 | from mic.utils.fs_related import find_binary_path, makedirs |
| 45 | from mic.utils.proxy import get_proxy_for | ||
| 46 | from mic.utils import runner | 45 | from mic.utils import runner |
| 47 | 46 | ||
| 48 | |||
| 49 | RPM_RE = re.compile("(.*)\.(.*) (.*)-(.*)") | ||
| 50 | RPM_FMT = "%(name)s.%(arch)s %(version)s-%(release)s" | ||
| 51 | SRPM_RE = re.compile("(.*)-(\d+.*)-(\d+\.\d+).src.rpm") | ||
| 52 | |||
| 53 | |||
| 54 | def build_name(kscfg, release=None, prefix = None, suffix = None): | 47 | def build_name(kscfg, release=None, prefix = None, suffix = None): |
| 55 | """Construct and return an image name string. | 48 | """Construct and return an image name string. |
| 56 | 49 | ||
| @@ -123,136 +116,6 @@ def get_distro_str(): | |||
| 123 | 116 | ||
| 124 | _LOOP_RULE_PTH = None | 117 | _LOOP_RULE_PTH = None |
| 125 | 118 | ||
| 126 | def hide_loopdev_presentation(): | ||
| 127 | udev_rules = "80-prevent-loop-present.rules" | ||
| 128 | udev_rules_dir = [ | ||
| 129 | '/usr/lib/udev/rules.d/', | ||
| 130 | '/lib/udev/rules.d/', | ||
| 131 | '/etc/udev/rules.d/' | ||
| 132 | ] | ||
| 133 | |||
| 134 | global _LOOP_RULE_PTH | ||
| 135 | |||
| 136 | for rdir in udev_rules_dir: | ||
| 137 | if os.path.exists(rdir): | ||
| 138 | _LOOP_RULE_PTH = os.path.join(rdir, udev_rules) | ||
| 139 | |||
| 140 | if not _LOOP_RULE_PTH: | ||
| 141 | return | ||
| 142 | |||
| 143 | try: | ||
| 144 | with open(_LOOP_RULE_PTH, 'w') as wf: | ||
| 145 | wf.write('KERNEL=="loop*", ENV{UDISKS_PRESENTATION_HIDE}="1"') | ||
| 146 | |||
| 147 | runner.quiet('udevadm trigger') | ||
| 148 | except: | ||
| 149 | pass | ||
| 150 | |||
| 151 | def unhide_loopdev_presentation(): | ||
| 152 | global _LOOP_RULE_PTH | ||
| 153 | |||
| 154 | if not _LOOP_RULE_PTH: | ||
| 155 | return | ||
| 156 | |||
| 157 | try: | ||
| 158 | os.unlink(_LOOP_RULE_PTH) | ||
| 159 | runner.quiet('udevadm trigger') | ||
| 160 | except: | ||
| 161 | pass | ||
| 162 | |||
| 163 | def extract_rpm(rpmfile, targetdir): | ||
| 164 | rpm2cpio = find_binary_path("rpm2cpio") | ||
| 165 | cpio = find_binary_path("cpio") | ||
| 166 | |||
| 167 | olddir = os.getcwd() | ||
| 168 | os.chdir(targetdir) | ||
| 169 | |||
| 170 | msger.verbose("Extract rpm file with cpio: %s" % rpmfile) | ||
| 171 | p1 = subprocess.Popen([rpm2cpio, rpmfile], stdout=subprocess.PIPE) | ||
| 172 | p2 = subprocess.Popen([cpio, "-idv"], stdin=p1.stdout, | ||
| 173 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
| 174 | (sout, serr) = p2.communicate() | ||
| 175 | msger.verbose(sout or serr) | ||
| 176 | |||
| 177 | os.chdir(olddir) | ||
| 178 | |||
| 179 | def compressing(fpath, method): | ||
| 180 | comp_map = { | ||
| 181 | "gz": "gzip", | ||
| 182 | "bz2": "bzip2" | ||
| 183 | } | ||
| 184 | if method not in comp_map: | ||
| 185 | raise CreatorError("Unsupport compress format: %s, valid values: %s" | ||
| 186 | % (method, ','.join(comp_map.keys()))) | ||
| 187 | cmd = find_binary_path(comp_map[method]) | ||
| 188 | rc = runner.show([cmd, "-f", fpath]) | ||
| 189 | if rc: | ||
| 190 | raise CreatorError("Failed to %s file: %s" % (comp_map[method], fpath)) | ||
| 191 | |||
| 192 | def taring(dstfile, target): | ||
| 193 | import tarfile | ||
| 194 | basen, ext = os.path.splitext(dstfile) | ||
| 195 | comp = {".tar": None, | ||
| 196 | ".gz": "gz", # for .tar.gz | ||
| 197 | ".bz2": "bz2", # for .tar.bz2 | ||
| 198 | ".tgz": "gz", | ||
| 199 | ".tbz": "bz2"}[ext] | ||
| 200 | |||
| 201 | # specify tarball file path | ||
| 202 | if not comp: | ||
| 203 | tarpath = dstfile | ||
| 204 | elif basen.endswith(".tar"): | ||
| 205 | tarpath = basen | ||
| 206 | else: | ||
| 207 | tarpath = basen + ".tar" | ||
| 208 | wf = tarfile.open(tarpath, 'w') | ||
| 209 | |||
| 210 | if os.path.isdir(target): | ||
| 211 | for item in os.listdir(target): | ||
| 212 | wf.add(os.path.join(target, item), item) | ||
| 213 | else: | ||
| 214 | wf.add(target, os.path.basename(target)) | ||
| 215 | wf.close() | ||
| 216 | |||
| 217 | if comp: | ||
| 218 | compressing(tarpath, comp) | ||
| 219 | # when dstfile ext is ".tgz" and ".tbz", should rename | ||
| 220 | if not basen.endswith(".tar"): | ||
| 221 | shutil.move("%s.%s" % (tarpath, comp), dstfile) | ||
| 222 | |||
| 223 | def ziping(dstfile, target): | ||
| 224 | import zipfile | ||
| 225 | wf = zipfile.ZipFile(dstfile, 'w', compression=zipfile.ZIP_DEFLATED) | ||
| 226 | if os.path.isdir(target): | ||
| 227 | for item in os.listdir(target): | ||
| 228 | fpath = os.path.join(target, item) | ||
| 229 | if not os.path.isfile(fpath): | ||
| 230 | continue | ||
| 231 | wf.write(fpath, item, zipfile.ZIP_DEFLATED) | ||
| 232 | else: | ||
| 233 | wf.write(target, os.path.basename(target), zipfile.ZIP_DEFLATED) | ||
| 234 | wf.close() | ||
| 235 | |||
| 236 | pack_formats = { | ||
| 237 | ".tar": taring, | ||
| 238 | ".tar.gz": taring, | ||
| 239 | ".tar.bz2": taring, | ||
| 240 | ".tgz": taring, | ||
| 241 | ".tbz": taring, | ||
| 242 | ".zip": ziping, | ||
| 243 | } | ||
| 244 | |||
| 245 | def packing(dstfile, target): | ||
| 246 | (base, ext) = os.path.splitext(dstfile) | ||
| 247 | if ext in (".gz", ".bz2") and base.endswith(".tar"): | ||
| 248 | ext = ".tar" + ext | ||
| 249 | if ext not in pack_formats: | ||
| 250 | raise CreatorError("Unsupport pack format: %s, valid values: %s" | ||
| 251 | % (ext, ','.join(pack_formats.keys()))) | ||
| 252 | func = pack_formats[ext] | ||
| 253 | # func should be callable | ||
| 254 | func(dstfile, target) | ||
| 255 | |||
| 256 | def human_size(size): | 119 | def human_size(size): |
| 257 | """Return human readable string for Bytes size | 120 | """Return human readable string for Bytes size |
| 258 | """ | 121 | """ |
| @@ -371,22 +234,6 @@ def normalize_ksfile(ksconf, release, arch): | |||
| 371 | return ksconf | 234 | return ksconf |
| 372 | 235 | ||
| 373 | 236 | ||
| 374 | def _check_mic_chroot(rootdir): | ||
| 375 | def _path(path): | ||
| 376 | return rootdir.rstrip('/') + path | ||
| 377 | |||
| 378 | release_files = map(_path, [ "/etc/moblin-release", | ||
| 379 | "/etc/meego-release", | ||
| 380 | "/etc/tizen-release"]) | ||
| 381 | |||
| 382 | if not any(map(os.path.exists, release_files)): | ||
| 383 | msger.warning("Dir %s is not a MeeGo/Tizen chroot env" % rootdir) | ||
| 384 | |||
| 385 | if not glob.glob(rootdir + "/boot/vmlinuz-*"): | ||
| 386 | msger.warning("Failed to find kernel module under %s" % rootdir) | ||
| 387 | |||
| 388 | return | ||
| 389 | |||
| 390 | def selinux_check(arch, fstypes): | 237 | def selinux_check(arch, fstypes): |
| 391 | try: | 238 | try: |
| 392 | getenforce = find_binary_path('getenforce') | 239 | getenforce = find_binary_path('getenforce') |
| @@ -403,64 +250,6 @@ def selinux_check(arch, fstypes): | |||
| 403 | raise CreatorError("Can't create btrfs image if selinux is enabled," | 250 | raise CreatorError("Can't create btrfs image if selinux is enabled," |
| 404 | " please run 'setenforce 0' to disable selinux") | 251 | " please run 'setenforce 0' to disable selinux") |
| 405 | 252 | ||
| 406 | def get_image_type(path): | ||
| 407 | def _get_extension_name(path): | ||
| 408 | match = re.search("(?<=\.)\w+$", path) | ||
| 409 | if match: | ||
| 410 | return match.group(0) | ||
| 411 | else: | ||
| 412 | return None | ||
| 413 | |||
| 414 | if os.path.isdir(path): | ||
| 415 | _check_mic_chroot(path) | ||
| 416 | return "fs" | ||
| 417 | |||
| 418 | maptab = { | ||
| 419 | "tar": "loop", | ||
| 420 | "raw":"raw", | ||
| 421 | "vmdk":"vmdk", | ||
| 422 | "vdi":"vdi", | ||
| 423 | "iso":"livecd", | ||
| 424 | "usbimg":"liveusb", | ||
| 425 | } | ||
| 426 | |||
| 427 | extension = _get_extension_name(path) | ||
| 428 | if extension in maptab: | ||
| 429 | return maptab[extension] | ||
| 430 | |||
| 431 | fd = open(path, "rb") | ||
| 432 | file_header = fd.read(1024) | ||
| 433 | fd.close() | ||
| 434 | vdi_flag = "<<< Sun VirtualBox Disk Image >>>" | ||
| 435 | if file_header[0:len(vdi_flag)] == vdi_flag: | ||
| 436 | return maptab["vdi"] | ||
| 437 | |||
| 438 | output = runner.outs(['file', path]) | ||
| 439 | isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*") | ||
| 440 | usbimgptn = re.compile(r".*x86 boot sector.*active.*") | ||
| 441 | rawptn = re.compile(r".*x86 boot sector.*") | ||
| 442 | vmdkptn = re.compile(r".*VMware. disk image.*") | ||
| 443 | ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*") | ||
| 444 | ext4fsimgptn = re.compile(r".*Linux.*ext4 filesystem data.*") | ||
| 445 | btrfsimgptn = re.compile(r".*BTRFS.*") | ||
| 446 | if isoptn.match(output): | ||
| 447 | return maptab["iso"] | ||
| 448 | elif usbimgptn.match(output): | ||
| 449 | return maptab["usbimg"] | ||
| 450 | elif rawptn.match(output): | ||
| 451 | return maptab["raw"] | ||
| 452 | elif vmdkptn.match(output): | ||
| 453 | return maptab["vmdk"] | ||
| 454 | elif ext3fsimgptn.match(output): | ||
| 455 | return "ext3fsimg" | ||
| 456 | elif ext4fsimgptn.match(output): | ||
| 457 | return "ext4fsimg" | ||
| 458 | elif btrfsimgptn.match(output): | ||
| 459 | return "btrfsimg" | ||
| 460 | else: | ||
| 461 | raise CreatorError("Cannot detect the type of image: %s" % path) | ||
| 462 | |||
| 463 | |||
| 464 | def get_file_size(filename): | 253 | def get_file_size(filename): |
| 465 | """ Return size in MB unit """ | 254 | """ Return size in MB unit """ |
| 466 | cmd = ['du', "-s", "-b", "-B", "1M", filename] | 255 | cmd = ['du', "-s", "-b", "-B", "1M", filename] |
| @@ -482,583 +271,12 @@ def get_filesystem_avail(fs): | |||
| 482 | vfstat = os.statvfs(fs) | 271 | vfstat = os.statvfs(fs) |
| 483 | return vfstat.f_bavail * vfstat.f_bsize | 272 | return vfstat.f_bavail * vfstat.f_bsize |
| 484 | 273 | ||
| 485 | def convert_image(srcimg, srcfmt, dstimg, dstfmt): | ||
| 486 | #convert disk format | ||
| 487 | if dstfmt != "raw": | ||
| 488 | raise CreatorError("Invalid destination image format: %s" % dstfmt) | ||
| 489 | msger.debug("converting %s image to %s" % (srcimg, dstimg)) | ||
| 490 | if srcfmt == "vmdk": | ||
| 491 | path = find_binary_path("qemu-img") | ||
| 492 | argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt, dstimg] | ||
| 493 | elif srcfmt == "vdi": | ||
| 494 | path = find_binary_path("VBoxManage") | ||
| 495 | argv = [path, "internalcommands", "converttoraw", srcimg, dstimg] | ||
| 496 | else: | ||
| 497 | raise CreatorError("Invalid soure image format: %s" % srcfmt) | ||
| 498 | |||
| 499 | rc = runner.show(argv) | ||
| 500 | if rc == 0: | ||
| 501 | msger.debug("convert successful") | ||
| 502 | if rc != 0: | ||
| 503 | raise CreatorError("Unable to convert disk to %s" % dstfmt) | ||
| 504 | |||
| 505 | def uncompress_squashfs(squashfsimg, outdir): | ||
| 506 | """Uncompress file system from squshfs image""" | ||
| 507 | unsquashfs = find_binary_path("unsquashfs") | ||
| 508 | args = [ unsquashfs, "-d", outdir, squashfsimg ] | ||
| 509 | rc = runner.show(args) | ||
| 510 | if (rc != 0): | ||
| 511 | raise SquashfsError("Failed to uncompress %s." % squashfsimg) | ||
| 512 | |||
| 513 | def mkdtemp(dir = "/var/tmp", prefix = "wic-tmp-"): | 274 | def mkdtemp(dir = "/var/tmp", prefix = "wic-tmp-"): |
| 514 | """ FIXME: use the dir in wic.conf instead """ | 275 | """ FIXME: use the dir in wic.conf instead """ |
| 515 | 276 | ||
| 516 | makedirs(dir) | 277 | makedirs(dir) |
| 517 | return tempfile.mkdtemp(dir = dir, prefix = prefix) | 278 | return tempfile.mkdtemp(dir = dir, prefix = prefix) |
| 518 | 279 | ||
| 519 | def get_repostrs_from_ks(ks): | ||
| 520 | def _get_temp_reponame(baseurl): | ||
| 521 | md5obj = hashlib.md5(baseurl) | ||
| 522 | tmpreponame = "%s" % md5obj.hexdigest() | ||
| 523 | return tmpreponame | ||
| 524 | |||
| 525 | kickstart_repos = [] | ||
| 526 | |||
| 527 | for repodata in ks.handler.repo.repoList: | ||
| 528 | repo = {} | ||
| 529 | for attr in ('name', | ||
| 530 | 'baseurl', | ||
| 531 | 'mirrorlist', | ||
| 532 | 'includepkgs', # val is list | ||
| 533 | 'excludepkgs', # val is list | ||
| 534 | 'cost', # int | ||
| 535 | 'priority',# int | ||
| 536 | 'save', | ||
| 537 | 'proxy', | ||
| 538 | 'proxyuser', | ||
| 539 | 'proxypasswd', | ||
| 540 | 'proxypasswd', | ||
| 541 | 'debuginfo', | ||
| 542 | 'source', | ||
| 543 | 'gpgkey', | ||
| 544 | 'ssl_verify'): | ||
| 545 | if hasattr(repodata, attr) and getattr(repodata, attr): | ||
| 546 | repo[attr] = getattr(repodata, attr) | ||
| 547 | |||
| 548 | if 'name' not in repo: | ||
| 549 | repo['name'] = _get_temp_reponame(repodata.baseurl) | ||
| 550 | |||
| 551 | kickstart_repos.append(repo) | ||
| 552 | |||
| 553 | return kickstart_repos | ||
| 554 | |||
| 555 | def _get_uncompressed_data_from_url(url, filename, proxies): | ||
| 556 | filename = myurlgrab(url, filename, proxies) | ||
| 557 | suffix = None | ||
| 558 | if filename.endswith(".gz"): | ||
| 559 | suffix = ".gz" | ||
| 560 | runner.quiet(['gunzip', "-f", filename]) | ||
| 561 | elif filename.endswith(".bz2"): | ||
| 562 | suffix = ".bz2" | ||
| 563 | runner.quiet(['bunzip2', "-f", filename]) | ||
| 564 | if suffix: | ||
| 565 | filename = filename.replace(suffix, "") | ||
| 566 | return filename | ||
| 567 | |||
| 568 | def _get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename, | ||
| 569 | sumtype=None, checksum=None): | ||
| 570 | url = os.path.join(baseurl, filename) | ||
| 571 | filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename))) | ||
| 572 | if os.path.splitext(filename_tmp)[1] in (".gz", ".bz2"): | ||
| 573 | filename = os.path.splitext(filename_tmp)[0] | ||
| 574 | else: | ||
| 575 | filename = filename_tmp | ||
| 576 | if sumtype and checksum and os.path.exists(filename): | ||
| 577 | try: | ||
| 578 | sumcmd = find_binary_path("%ssum" % sumtype) | ||
| 579 | except: | ||
| 580 | file_checksum = None | ||
| 581 | else: | ||
| 582 | file_checksum = runner.outs([sumcmd, filename]).split()[0] | ||
| 583 | |||
| 584 | if file_checksum and file_checksum == checksum: | ||
| 585 | return filename | ||
| 586 | |||
| 587 | return _get_uncompressed_data_from_url(url,filename_tmp,proxies) | ||
| 588 | |||
| 589 | def get_metadata_from_repos(repos, cachedir): | ||
| 590 | my_repo_metadata = [] | ||
| 591 | for repo in repos: | ||
| 592 | reponame = repo['name'] | ||
| 593 | baseurl = repo['baseurl'] | ||
| 594 | |||
| 595 | |||
| 596 | if 'proxy' in repo: | ||
| 597 | proxy = repo['proxy'] | ||
| 598 | else: | ||
| 599 | proxy = get_proxy_for(baseurl) | ||
| 600 | |||
| 601 | proxies = None | ||
| 602 | if proxy: | ||
| 603 | proxies = {str(baseurl.split(":")[0]):str(proxy)} | ||
| 604 | |||
| 605 | makedirs(os.path.join(cachedir, reponame)) | ||
| 606 | url = os.path.join(baseurl, "repodata/repomd.xml") | ||
| 607 | filename = os.path.join(cachedir, reponame, 'repomd.xml') | ||
| 608 | repomd = myurlgrab(url, filename, proxies) | ||
| 609 | try: | ||
| 610 | root = xmlparse(repomd) | ||
| 611 | except SyntaxError: | ||
| 612 | raise CreatorError("repomd.xml syntax error.") | ||
| 613 | |||
| 614 | ns = root.getroot().tag | ||
| 615 | ns = ns[0:ns.rindex("}")+1] | ||
| 616 | |||
| 617 | filepaths = {} | ||
| 618 | checksums = {} | ||
| 619 | sumtypes = {} | ||
| 620 | |||
| 621 | for elm in root.getiterator("%sdata" % ns): | ||
| 622 | if elm.attrib["type"] == "patterns": | ||
| 623 | filepaths['patterns'] = elm.find("%slocation" % ns).attrib['href'] | ||
| 624 | checksums['patterns'] = elm.find("%sopen-checksum" % ns).text | ||
| 625 | sumtypes['patterns'] = elm.find("%sopen-checksum" % ns).attrib['type'] | ||
| 626 | break | ||
| 627 | |||
| 628 | for elm in root.getiterator("%sdata" % ns): | ||
| 629 | if elm.attrib["type"] in ("group_gz", "group"): | ||
| 630 | filepaths['comps'] = elm.find("%slocation" % ns).attrib['href'] | ||
| 631 | checksums['comps'] = elm.find("%sopen-checksum" % ns).text | ||
| 632 | sumtypes['comps'] = elm.find("%sopen-checksum" % ns).attrib['type'] | ||
| 633 | break | ||
| 634 | |||
| 635 | primary_type = None | ||
| 636 | for elm in root.getiterator("%sdata" % ns): | ||
| 637 | if elm.attrib["type"] in ("primary_db", "primary"): | ||
| 638 | primary_type = elm.attrib["type"] | ||
| 639 | filepaths['primary'] = elm.find("%slocation" % ns).attrib['href'] | ||
| 640 | checksums['primary'] = elm.find("%sopen-checksum" % ns).text | ||
| 641 | sumtypes['primary'] = elm.find("%sopen-checksum" % ns).attrib['type'] | ||
| 642 | break | ||
| 643 | |||
| 644 | if not primary_type: | ||
| 645 | continue | ||
| 646 | |||
| 647 | for item in ("primary", "patterns", "comps"): | ||
| 648 | if item not in filepaths: | ||
| 649 | filepaths[item] = None | ||
| 650 | continue | ||
| 651 | if not filepaths[item]: | ||
| 652 | continue | ||
| 653 | filepaths[item] = _get_metadata_from_repo(baseurl, | ||
| 654 | proxies, | ||
| 655 | cachedir, | ||
| 656 | reponame, | ||
| 657 | filepaths[item], | ||
| 658 | sumtypes[item], | ||
| 659 | checksums[item]) | ||
| 660 | |||
| 661 | """ Get repo key """ | ||
| 662 | try: | ||
| 663 | repokey = _get_metadata_from_repo(baseurl, | ||
| 664 | proxies, | ||
| 665 | cachedir, | ||
| 666 | reponame, | ||
| 667 | "repodata/repomd.xml.key") | ||
| 668 | except CreatorError: | ||
| 669 | repokey = None | ||
| 670 | msger.debug("\ncan't get %s/%s" % (baseurl, "repodata/repomd.xml.key")) | ||
| 671 | |||
| 672 | my_repo_metadata.append({"name":reponame, | ||
| 673 | "baseurl":baseurl, | ||
| 674 | "repomd":repomd, | ||
| 675 | "primary":filepaths['primary'], | ||
| 676 | "cachedir":cachedir, | ||
| 677 | "proxies":proxies, | ||
| 678 | "patterns":filepaths['patterns'], | ||
| 679 | "comps":filepaths['comps'], | ||
| 680 | "repokey":repokey}) | ||
| 681 | |||
| 682 | return my_repo_metadata | ||
| 683 | |||
| 684 | def get_rpmver_in_repo(repometadata): | ||
| 685 | for repo in repometadata: | ||
| 686 | if repo["primary"].endswith(".xml"): | ||
| 687 | root = xmlparse(repo["primary"]) | ||
| 688 | ns = root.getroot().tag | ||
| 689 | ns = ns[0:ns.rindex("}")+1] | ||
| 690 | |||
| 691 | versionlist = [] | ||
| 692 | for elm in root.getiterator("%spackage" % ns): | ||
| 693 | if elm.find("%sname" % ns).text == 'rpm': | ||
| 694 | for node in elm.getchildren(): | ||
| 695 | if node.tag == "%sversion" % ns: | ||
| 696 | versionlist.append(node.attrib['ver']) | ||
| 697 | |||
| 698 | if versionlist: | ||
| 699 | return reversed( | ||
| 700 | sorted( | ||
| 701 | versionlist, | ||
| 702 | key = lambda ver: map(int, ver.split('.')))).next() | ||
| 703 | |||
| 704 | elif repo["primary"].endswith(".sqlite"): | ||
| 705 | con = sqlite.connect(repo["primary"]) | ||
| 706 | for row in con.execute("select version from packages where " | ||
| 707 | "name=\"rpm\" ORDER by version DESC"): | ||
| 708 | con.close() | ||
| 709 | return row[0] | ||
| 710 | |||
| 711 | return None | ||
| 712 | |||
| 713 | def get_arch(repometadata): | ||
| 714 | archlist = [] | ||
| 715 | for repo in repometadata: | ||
| 716 | if repo["primary"].endswith(".xml"): | ||
| 717 | root = xmlparse(repo["primary"]) | ||
| 718 | ns = root.getroot().tag | ||
| 719 | ns = ns[0:ns.rindex("}")+1] | ||
| 720 | for elm in root.getiterator("%spackage" % ns): | ||
| 721 | if elm.find("%sarch" % ns).text not in ("noarch", "src"): | ||
| 722 | arch = elm.find("%sarch" % ns).text | ||
| 723 | if arch not in archlist: | ||
| 724 | archlist.append(arch) | ||
| 725 | elif repo["primary"].endswith(".sqlite"): | ||
| 726 | con = sqlite.connect(repo["primary"]) | ||
| 727 | for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"): | ||
| 728 | if row[0] not in archlist: | ||
| 729 | archlist.append(row[0]) | ||
| 730 | |||
| 731 | con.close() | ||
| 732 | |||
| 733 | uniq_arch = [] | ||
| 734 | for i in range(len(archlist)): | ||
| 735 | if archlist[i] not in rpmmisc.archPolicies.keys(): | ||
| 736 | continue | ||
| 737 | need_append = True | ||
| 738 | j = 0 | ||
| 739 | while j < len(uniq_arch): | ||
| 740 | if archlist[i] in rpmmisc.archPolicies[uniq_arch[j]].split(':'): | ||
| 741 | need_append = False | ||
| 742 | break | ||
| 743 | if uniq_arch[j] in rpmmisc.archPolicies[archlist[i]].split(':'): | ||
| 744 | if need_append: | ||
| 745 | uniq_arch[j] = archlist[i] | ||
| 746 | need_append = False | ||
| 747 | else: | ||
| 748 | uniq_arch.remove(uniq_arch[j]) | ||
| 749 | continue | ||
| 750 | j += 1 | ||
| 751 | if need_append: | ||
| 752 | uniq_arch.append(archlist[i]) | ||
| 753 | |||
| 754 | return uniq_arch, archlist | ||
| 755 | |||
| 756 | def get_package(pkg, repometadata, arch = None): | ||
| 757 | ver = "" | ||
| 758 | target_repo = None | ||
| 759 | if not arch: | ||
| 760 | arches = [] | ||
| 761 | elif arch not in rpmmisc.archPolicies: | ||
| 762 | arches = [arch] | ||
| 763 | else: | ||
| 764 | arches = rpmmisc.archPolicies[arch].split(':') | ||
| 765 | arches.append('noarch') | ||
| 766 | |||
| 767 | for repo in repometadata: | ||
| 768 | if repo["primary"].endswith(".xml"): | ||
| 769 | root = xmlparse(repo["primary"]) | ||
| 770 | ns = root.getroot().tag | ||
| 771 | ns = ns[0:ns.rindex("}")+1] | ||
| 772 | for elm in root.getiterator("%spackage" % ns): | ||
| 773 | if elm.find("%sname" % ns).text == pkg: | ||
| 774 | if elm.find("%sarch" % ns).text in arches: | ||
| 775 | version = elm.find("%sversion" % ns) | ||
| 776 | tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel']) | ||
| 777 | if tmpver > ver: | ||
| 778 | ver = tmpver | ||
| 779 | location = elm.find("%slocation" % ns) | ||
| 780 | pkgpath = "%s" % location.attrib['href'] | ||
| 781 | target_repo = repo | ||
| 782 | break | ||
| 783 | if repo["primary"].endswith(".sqlite"): | ||
| 784 | con = sqlite.connect(repo["primary"]) | ||
| 785 | if arch: | ||
| 786 | sql = 'select version, release, location_href from packages ' \ | ||
| 787 | 'where name = "%s" and arch IN ("%s")' % \ | ||
| 788 | (pkg, '","'.join(arches)) | ||
| 789 | for row in con.execute(sql): | ||
| 790 | tmpver = "%s-%s" % (row[0], row[1]) | ||
| 791 | if tmpver > ver: | ||
| 792 | ver = tmpver | ||
| 793 | pkgpath = "%s" % row[2] | ||
| 794 | target_repo = repo | ||
| 795 | break | ||
| 796 | else: | ||
| 797 | sql = 'select version, release, location_href from packages ' \ | ||
| 798 | 'where name = "%s"' % pkg | ||
| 799 | for row in con.execute(sql): | ||
| 800 | tmpver = "%s-%s" % (row[0], row[1]) | ||
| 801 | if tmpver > ver: | ||
| 802 | ver = tmpver | ||
| 803 | pkgpath = "%s" % row[2] | ||
| 804 | target_repo = repo | ||
| 805 | break | ||
| 806 | con.close() | ||
| 807 | if target_repo: | ||
| 808 | makedirs("%s/packages/%s" % (target_repo["cachedir"], target_repo["name"])) | ||
| 809 | url = os.path.join(target_repo["baseurl"], pkgpath) | ||
| 810 | filename = str("%s/packages/%s/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath))) | ||
| 811 | if os.path.exists(filename): | ||
| 812 | ret = rpmmisc.checkRpmIntegrity('rpm', filename) | ||
| 813 | if ret == 0: | ||
| 814 | return filename | ||
| 815 | |||
| 816 | msger.warning("package %s is damaged: %s" % | ||
| 817 | (os.path.basename(filename), filename)) | ||
| 818 | os.unlink(filename) | ||
| 819 | |||
| 820 | pkg = myurlgrab(str(url), filename, target_repo["proxies"]) | ||
| 821 | return pkg | ||
| 822 | else: | ||
| 823 | return None | ||
| 824 | |||
| 825 | def get_source_name(pkg, repometadata): | ||
| 826 | |||
| 827 | def get_bin_name(pkg): | ||
| 828 | m = RPM_RE.match(pkg) | ||
| 829 | if m: | ||
| 830 | return m.group(1) | ||
| 831 | return None | ||
| 832 | |||
| 833 | def get_src_name(srpm): | ||
| 834 | m = SRPM_RE.match(srpm) | ||
| 835 | if m: | ||
| 836 | return m.group(1) | ||
| 837 | return None | ||
| 838 | |||
| 839 | ver = "" | ||
| 840 | target_repo = None | ||
| 841 | |||
| 842 | pkg_name = get_bin_name(pkg) | ||
| 843 | if not pkg_name: | ||
| 844 | return None | ||
| 845 | |||
| 846 | for repo in repometadata: | ||
| 847 | if repo["primary"].endswith(".xml"): | ||
| 848 | root = xmlparse(repo["primary"]) | ||
| 849 | ns = root.getroot().tag | ||
| 850 | ns = ns[0:ns.rindex("}")+1] | ||
| 851 | for elm in root.getiterator("%spackage" % ns): | ||
| 852 | if elm.find("%sname" % ns).text == pkg_name: | ||
| 853 | if elm.find("%sarch" % ns).text != "src": | ||
| 854 | version = elm.find("%sversion" % ns) | ||
| 855 | tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel']) | ||
| 856 | if tmpver > ver: | ||
| 857 | ver = tmpver | ||
| 858 | fmt = elm.find("%sformat" % ns) | ||
| 859 | if fmt: | ||
| 860 | fns = fmt.getchildren()[0].tag | ||
| 861 | fns = fns[0:fns.rindex("}")+1] | ||
| 862 | pkgpath = fmt.find("%ssourcerpm" % fns).text | ||
| 863 | target_repo = repo | ||
| 864 | break | ||
| 865 | |||
| 866 | if repo["primary"].endswith(".sqlite"): | ||
| 867 | con = sqlite.connect(repo["primary"]) | ||
| 868 | for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name): | ||
| 869 | tmpver = "%s-%s" % (row[0], row[1]) | ||
| 870 | if tmpver > ver: | ||
| 871 | pkgpath = "%s" % row[2] | ||
| 872 | target_repo = repo | ||
| 873 | break | ||
| 874 | con.close() | ||
| 875 | if target_repo: | ||
| 876 | return get_src_name(pkgpath) | ||
| 877 | else: | ||
| 878 | return None | ||
| 879 | |||
| 880 | def get_pkglist_in_patterns(group, patterns): | ||
| 881 | found = False | ||
| 882 | pkglist = [] | ||
| 883 | try: | ||
| 884 | root = xmlparse(patterns) | ||
| 885 | except SyntaxError: | ||
| 886 | raise SyntaxError("%s syntax error." % patterns) | ||
| 887 | |||
| 888 | for elm in list(root.getroot()): | ||
| 889 | ns = elm.tag | ||
| 890 | ns = ns[0:ns.rindex("}")+1] | ||
| 891 | name = elm.find("%sname" % ns) | ||
| 892 | summary = elm.find("%ssummary" % ns) | ||
| 893 | if name.text == group or summary.text == group: | ||
| 894 | found = True | ||
| 895 | break | ||
| 896 | |||
| 897 | if not found: | ||
| 898 | return pkglist | ||
| 899 | |||
| 900 | found = False | ||
| 901 | for requires in list(elm): | ||
| 902 | if requires.tag.endswith("requires"): | ||
| 903 | found = True | ||
| 904 | break | ||
| 905 | |||
| 906 | if not found: | ||
| 907 | return pkglist | ||
| 908 | |||
| 909 | for pkg in list(requires): | ||
| 910 | pkgname = pkg.attrib["name"] | ||
| 911 | if pkgname not in pkglist: | ||
| 912 | pkglist.append(pkgname) | ||
| 913 | |||
| 914 | return pkglist | ||
| 915 | |||
| 916 | def get_pkglist_in_comps(group, comps): | ||
| 917 | found = False | ||
| 918 | pkglist = [] | ||
| 919 | try: | ||
| 920 | root = xmlparse(comps) | ||
| 921 | except SyntaxError: | ||
| 922 | raise SyntaxError("%s syntax error." % comps) | ||
| 923 | |||
| 924 | for elm in root.getiterator("group"): | ||
| 925 | id = elm.find("id") | ||
| 926 | name = elm.find("name") | ||
| 927 | if id.text == group or name.text == group: | ||
| 928 | packagelist = elm.find("packagelist") | ||
| 929 | found = True | ||
| 930 | break | ||
| 931 | |||
| 932 | if not found: | ||
| 933 | return pkglist | ||
| 934 | |||
| 935 | for require in elm.getiterator("packagereq"): | ||
| 936 | if require.tag.endswith("packagereq"): | ||
| 937 | pkgname = require.text | ||
| 938 | if pkgname not in pkglist: | ||
| 939 | pkglist.append(pkgname) | ||
| 940 | |||
| 941 | return pkglist | ||
| 942 | |||
| 943 | def is_statically_linked(binary): | ||
| 944 | return ", statically linked, " in runner.outs(['file', binary]) | ||
| 945 | |||
| 946 | def setup_qemu_emulator(rootdir, arch): | ||
| 947 | # mount binfmt_misc if it doesn't exist | ||
| 948 | if not os.path.exists("/proc/sys/fs/binfmt_misc"): | ||
| 949 | modprobecmd = find_binary_path("modprobe") | ||
| 950 | runner.show([modprobecmd, "binfmt_misc"]) | ||
| 951 | if not os.path.exists("/proc/sys/fs/binfmt_misc/register"): | ||
| 952 | mountcmd = find_binary_path("mount") | ||
| 953 | runner.show([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"]) | ||
| 954 | |||
| 955 | # qemu_emulator is a special case, we can't use find_binary_path | ||
| 956 | # qemu emulator should be a statically-linked executable file | ||
| 957 | qemu_emulator = "/usr/bin/qemu-arm" | ||
| 958 | if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator): | ||
| 959 | qemu_emulator = "/usr/bin/qemu-arm-static" | ||
| 960 | if not os.path.exists(qemu_emulator): | ||
| 961 | raise CreatorError("Please install a statically-linked qemu-arm") | ||
| 962 | |||
| 963 | # qemu emulator version check | ||
| 964 | armv7_list = [arch for arch in rpmmisc.archPolicies.keys() if arch.startswith('armv7')] | ||
| 965 | if arch in armv7_list: # need qemu (>=0.13.0) | ||
| 966 | qemuout = runner.outs([qemu_emulator, "-h"]) | ||
| 967 | m = re.search("version\s*([.\d]+)", qemuout) | ||
| 968 | if m: | ||
| 969 | qemu_version = m.group(1) | ||
| 970 | if qemu_version < "0.13": | ||
| 971 | raise CreatorError("Requires %s version >=0.13 for %s" % (qemu_emulator, arch)) | ||
| 972 | else: | ||
| 973 | msger.warning("Can't get version info of %s, please make sure it's higher than 0.13.0" % qemu_emulator) | ||
| 974 | |||
| 975 | if not os.path.exists(rootdir + "/usr/bin"): | ||
| 976 | makedirs(rootdir + "/usr/bin") | ||
| 977 | shutil.copy(qemu_emulator, rootdir + "/usr/bin/qemu-arm-static") | ||
| 978 | qemu_emulator = "/usr/bin/qemu-arm-static" | ||
| 979 | |||
| 980 | # disable selinux, selinux will block qemu emulator to run | ||
| 981 | if os.path.exists("/usr/sbin/setenforce"): | ||
| 982 | msger.info('Try to disable selinux') | ||
| 983 | runner.show(["/usr/sbin/setenforce", "0"]) | ||
| 984 | |||
| 985 | # unregister it if it has been registered and is a dynamically-linked executable | ||
| 986 | node = "/proc/sys/fs/binfmt_misc/arm" | ||
| 987 | if os.path.exists(node): | ||
| 988 | qemu_unregister_string = "-1\n" | ||
| 989 | fd = open("/proc/sys/fs/binfmt_misc/arm", "w") | ||
| 990 | fd.write(qemu_unregister_string) | ||
| 991 | fd.close() | ||
| 992 | |||
| 993 | # register qemu emulator for interpreting other arch executable file | ||
| 994 | if not os.path.exists(node): | ||
| 995 | qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator | ||
| 996 | fd = open("/proc/sys/fs/binfmt_misc/register", "w") | ||
| 997 | fd.write(qemu_arm_string) | ||
| 998 | fd.close() | ||
| 999 | |||
| 1000 | return qemu_emulator | ||
| 1001 | |||
| 1002 | def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir): | ||
| 1003 | def get_source_repometadata(repometadata): | ||
| 1004 | src_repometadata=[] | ||
| 1005 | for repo in repometadata: | ||
| 1006 | if repo["name"].endswith("-source"): | ||
| 1007 | src_repometadata.append(repo) | ||
| 1008 | if src_repometadata: | ||
| 1009 | return src_repometadata | ||
| 1010 | return None | ||
| 1011 | |||
| 1012 | def get_src_name(srpm): | ||
| 1013 | m = SRPM_RE.match(srpm) | ||
| 1014 | if m: | ||
| 1015 | return m.group(1) | ||
| 1016 | return None | ||
| 1017 | |||
| 1018 | src_repometadata = get_source_repometadata(repometadata) | ||
| 1019 | |||
| 1020 | if not src_repometadata: | ||
| 1021 | msger.warning("No source repo found") | ||
| 1022 | return None | ||
| 1023 | |||
| 1024 | src_pkgs = [] | ||
| 1025 | lpkgs_dict = {} | ||
| 1026 | lpkgs_path = [] | ||
| 1027 | for repo in src_repometadata: | ||
| 1028 | cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"]) | ||
| 1029 | lpkgs_path += glob.glob(cachepath) | ||
| 1030 | |||
| 1031 | for lpkg in lpkgs_path: | ||
| 1032 | lpkg_name = get_src_name(os.path.basename(lpkg)) | ||
| 1033 | lpkgs_dict[lpkg_name] = lpkg | ||
| 1034 | localpkgs = lpkgs_dict.keys() | ||
| 1035 | |||
| 1036 | cached_count = 0 | ||
| 1037 | destdir = instroot+'/usr/src/SRPMS' | ||
| 1038 | if not os.path.exists(destdir): | ||
| 1039 | os.makedirs(destdir) | ||
| 1040 | |||
| 1041 | srcpkgset = set() | ||
| 1042 | for _pkg in pkgs: | ||
| 1043 | srcpkg_name = get_source_name(_pkg, repometadata) | ||
| 1044 | if not srcpkg_name: | ||
| 1045 | continue | ||
| 1046 | srcpkgset.add(srcpkg_name) | ||
| 1047 | |||
| 1048 | for pkg in list(srcpkgset): | ||
| 1049 | if pkg in localpkgs: | ||
| 1050 | cached_count += 1 | ||
| 1051 | shutil.copy(lpkgs_dict[pkg], destdir) | ||
| 1052 | src_pkgs.append(os.path.basename(lpkgs_dict[pkg])) | ||
| 1053 | else: | ||
| 1054 | src_pkg = get_package(pkg, src_repometadata, 'src') | ||
| 1055 | if src_pkg: | ||
| 1056 | shutil.copy(src_pkg, destdir) | ||
| 1057 | src_pkgs.append(src_pkg) | ||
| 1058 | msger.info("%d source packages gotten from cache" % cached_count) | ||
| 1059 | |||
| 1060 | return src_pkgs | ||
| 1061 | |||
| 1062 | def strip_end(text, suffix): | 280 | def strip_end(text, suffix): |
| 1063 | if not text.endswith(suffix): | 281 | if not text.endswith(suffix): |
| 1064 | return text | 282 | return text |
diff --git a/scripts/lib/mic/utils/proxy.py b/scripts/lib/mic/utils/proxy.py deleted file mode 100644 index 91451a2d01..0000000000 --- a/scripts/lib/mic/utils/proxy.py +++ /dev/null | |||
| @@ -1,183 +0,0 @@ | |||
| 1 | #!/usr/bin/python -tt | ||
| 2 | # | ||
| 3 | # Copyright (c) 2010, 2011 Intel, Inc. | ||
| 4 | # | ||
| 5 | # This program is free software; you can redistribute it and/or modify it | ||
| 6 | # under the terms of the GNU General Public License as published by the Free | ||
| 7 | # Software Foundation; version 2 of the License | ||
| 8 | # | ||
| 9 | # This program is distributed in the hope that it will be useful, but | ||
| 10 | # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY | ||
| 11 | # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | ||
| 12 | # for more details. | ||
| 13 | # | ||
| 14 | # You should have received a copy of the GNU General Public License along | ||
| 15 | # with this program; if not, write to the Free Software Foundation, Inc., 59 | ||
| 16 | # Temple Place - Suite 330, Boston, MA 02111-1307, USA. | ||
| 17 | |||
| 18 | import os | ||
| 19 | import urlparse | ||
| 20 | |||
| 21 | _my_proxies = {} | ||
| 22 | _my_noproxy = None | ||
| 23 | _my_noproxy_list = [] | ||
| 24 | |||
| 25 | def set_proxy_environ(): | ||
| 26 | global _my_noproxy, _my_proxies | ||
| 27 | if not _my_proxies: | ||
| 28 | return | ||
| 29 | for key in _my_proxies.keys(): | ||
| 30 | os.environ[key + "_proxy"] = _my_proxies[key] | ||
| 31 | if not _my_noproxy: | ||
| 32 | return | ||
| 33 | os.environ["no_proxy"] = _my_noproxy | ||
| 34 | |||
| 35 | def unset_proxy_environ(): | ||
| 36 | for env in ('http_proxy', | ||
| 37 | 'https_proxy', | ||
| 38 | 'ftp_proxy', | ||
| 39 | 'all_proxy'): | ||
| 40 | if env in os.environ: | ||
| 41 | del os.environ[env] | ||
| 42 | |||
| 43 | ENV=env.upper() | ||
| 44 | if ENV in os.environ: | ||
| 45 | del os.environ[ENV] | ||
| 46 | |||
| 47 | def _set_proxies(proxy = None, no_proxy = None): | ||
| 48 | """Return a dictionary of scheme -> proxy server URL mappings. | ||
| 49 | """ | ||
| 50 | |||
| 51 | global _my_noproxy, _my_proxies | ||
| 52 | _my_proxies = {} | ||
| 53 | _my_noproxy = None | ||
| 54 | proxies = [] | ||
| 55 | if proxy: | ||
| 56 | proxies.append(("http_proxy", proxy)) | ||
| 57 | if no_proxy: | ||
| 58 | proxies.append(("no_proxy", no_proxy)) | ||
| 59 | |||
| 60 | # Get proxy settings from environment if not provided | ||
| 61 | if not proxy and not no_proxy: | ||
| 62 | proxies = os.environ.items() | ||
| 63 | |||
| 64 | # Remove proxy env variables, urllib2 can't handle them correctly | ||
| 65 | unset_proxy_environ() | ||
| 66 | |||
| 67 | for name, value in proxies: | ||
| 68 | name = name.lower() | ||
| 69 | if value and name[-6:] == '_proxy': | ||
| 70 | if name[0:2] != "no": | ||
| 71 | _my_proxies[name[:-6]] = value | ||
| 72 | else: | ||
| 73 | _my_noproxy = value | ||
| 74 | |||
| 75 | def _ip_to_int(ip): | ||
| 76 | ipint=0 | ||
| 77 | shift=24 | ||
| 78 | for dec in ip.split("."): | ||
| 79 | ipint |= int(dec) << shift | ||
| 80 | shift -= 8 | ||
| 81 | return ipint | ||
| 82 | |||
| 83 | def _int_to_ip(val): | ||
| 84 | ipaddr="" | ||
| 85 | shift=0 | ||
| 86 | for i in range(4): | ||
| 87 | dec = val >> shift | ||
| 88 | dec &= 0xff | ||
| 89 | ipaddr = ".%d%s" % (dec, ipaddr) | ||
| 90 | shift += 8 | ||
| 91 | return ipaddr[1:] | ||
| 92 | |||
| 93 | def _isip(host): | ||
| 94 | if host.replace(".", "").isdigit(): | ||
| 95 | return True | ||
| 96 | return False | ||
| 97 | |||
| 98 | def _set_noproxy_list(): | ||
| 99 | global _my_noproxy, _my_noproxy_list | ||
| 100 | _my_noproxy_list = [] | ||
| 101 | if not _my_noproxy: | ||
| 102 | return | ||
| 103 | for item in _my_noproxy.split(","): | ||
| 104 | item = item.strip() | ||
| 105 | if not item: | ||
| 106 | continue | ||
| 107 | |||
| 108 | if item[0] != '.' and item.find("/") == -1: | ||
| 109 | # Need to match it | ||
| 110 | _my_noproxy_list.append({"match":0,"needle":item}) | ||
| 111 | |||
| 112 | elif item[0] == '.': | ||
| 113 | # Need to match at tail | ||
| 114 | _my_noproxy_list.append({"match":1,"needle":item}) | ||
| 115 | |||
| 116 | elif item.find("/") > 3: | ||
| 117 | # IP/MASK, need to match at head | ||
| 118 | needle = item[0:item.find("/")].strip() | ||
| 119 | ip = _ip_to_int(needle) | ||
| 120 | netmask = 0 | ||
| 121 | mask = item[item.find("/")+1:].strip() | ||
| 122 | |||
| 123 | if mask.isdigit(): | ||
| 124 | netmask = int(mask) | ||
| 125 | netmask = ~((1<<(32-netmask)) - 1) | ||
| 126 | ip &= netmask | ||
| 127 | else: | ||
| 128 | shift=24 | ||
| 129 | netmask=0 | ||
| 130 | for dec in mask.split("."): | ||
| 131 | netmask |= int(dec) << shift | ||
| 132 | shift -= 8 | ||
| 133 | ip &= netmask | ||
| 134 | |||
| 135 | _my_noproxy_list.append({"match":2,"needle":ip,"netmask":netmask}) | ||
| 136 | |||
| 137 | def _isnoproxy(url): | ||
| 138 | (scheme, host, path, parm, query, frag) = urlparse.urlparse(url) | ||
| 139 | |||
| 140 | if '@' in host: | ||
| 141 | user_pass, host = host.split('@', 1) | ||
| 142 | |||
| 143 | if ':' in host: | ||
| 144 | host, port = host.split(':', 1) | ||
| 145 | |||
| 146 | hostisip = _isip(host) | ||
| 147 | for item in _my_noproxy_list: | ||
| 148 | if hostisip and item["match"] <= 1: | ||
| 149 | continue | ||
| 150 | |||
| 151 | if item["match"] == 2 and hostisip: | ||
| 152 | if (_ip_to_int(host) & item["netmask"]) == item["needle"]: | ||
| 153 | return True | ||
| 154 | |||
| 155 | if item["match"] == 0: | ||
| 156 | if host == item["needle"]: | ||
| 157 | return True | ||
| 158 | |||
| 159 | if item["match"] == 1: | ||
| 160 | if host.rfind(item["needle"]) > 0: | ||
| 161 | return True | ||
| 162 | |||
| 163 | return False | ||
| 164 | |||
| 165 | def set_proxies(proxy = None, no_proxy = None): | ||
| 166 | _set_proxies(proxy, no_proxy) | ||
| 167 | _set_noproxy_list() | ||
| 168 | set_proxy_environ() | ||
| 169 | |||
| 170 | def get_proxy_for(url): | ||
| 171 | if url.startswith('file:') or _isnoproxy(url): | ||
| 172 | return None | ||
| 173 | |||
| 174 | type = url[0:url.index(":")] | ||
| 175 | proxy = None | ||
| 176 | if _my_proxies.has_key(type): | ||
| 177 | proxy = _my_proxies[type] | ||
| 178 | elif _my_proxies.has_key("http"): | ||
| 179 | proxy = _my_proxies["http"] | ||
| 180 | else: | ||
| 181 | proxy = None | ||
| 182 | |||
| 183 | return proxy | ||
