[Kimchi-devel] [PATCH 03/15] V3 Ginger Base : base folder files part 2
Daniel Henrique Barboza
dhbarboza82 at gmail.com
Wed Oct 7 12:57:45 UTC 2015
Reviewed-by: Daniel Barboza <dhbarboza82 at gmail.com>
On 10/07/2015 07:39 AM, chandra at linux.vnet.ibm.com wrote:
> From: chandrureddy <chandra at linux.vnet.ibm.com>
>
> ---
> src/wok/plugins/gingerbase/disks.py | 196 +++++++++++
> src/wok/plugins/gingerbase/i18n.py | 96 ++++++
> src/wok/plugins/gingerbase/repositories.py | 533 +++++++++++++++++++++++++++++
> src/wok/plugins/gingerbase/swupdate.py | 275 +++++++++++++++
> src/wok/plugins/gingerbase/yumparser.py | 353 +++++++++++++++++++
> src/wok/plugins/kimchi/disks.py | 196 -----------
> src/wok/plugins/kimchi/repositories.py | 533 -----------------------------
> src/wok/plugins/kimchi/swupdate.py | 274 ---------------
> src/wok/plugins/kimchi/yumparser.py | 353 -------------------
> 9 files changed, 1453 insertions(+), 1356 deletions(-)
> create mode 100644 src/wok/plugins/gingerbase/disks.py
> create mode 100644 src/wok/plugins/gingerbase/i18n.py
> create mode 100644 src/wok/plugins/gingerbase/repositories.py
> create mode 100644 src/wok/plugins/gingerbase/swupdate.py
> create mode 100644 src/wok/plugins/gingerbase/yumparser.py
> delete mode 100644 src/wok/plugins/kimchi/disks.py
> delete mode 100644 src/wok/plugins/kimchi/repositories.py
> delete mode 100644 src/wok/plugins/kimchi/swupdate.py
> delete mode 100644 src/wok/plugins/kimchi/yumparser.py
>
> diff --git a/src/wok/plugins/gingerbase/disks.py b/src/wok/plugins/gingerbase/disks.py
> new file mode 100644
> index 0000000..7ce5135
> --- /dev/null
> +++ b/src/wok/plugins/gingerbase/disks.py
> @@ -0,0 +1,196 @@
> +#
> +# Project Ginger Base
> +#
> +# Copyright IBM, Corp. 2013-2015
> +#
> +# This library is free software; you can redistribute it and/or
> +# modify it under the terms of the GNU Lesser General Public
> +# License as published by the Free Software Foundation; either
> +# version 2.1 of the License, or (at your option) any later version.
> +#
> +# This library is distributed in the hope that it will be useful,
> +# but WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> +# Lesser General Public License for more details.
> +#
> +# You should have received a copy of the GNU Lesser General Public
> +# License along with this library; if not, write to the Free Software
> +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> +
> +import os.path
> +import re
> +import subprocess
> +from parted import Device as PDevice
> +from parted import Disk as PDisk
> +
> +from wok.exception import OperationFailed
> +from wok.utils import wok_log
> +
> +
> +def _get_dev_node_path(maj_min):
> + """ Returns device node path given the device number 'major:min' """
> +
> + dm_name = "/sys/dev/block/%s/dm/name" % maj_min
> + if os.path.exists(dm_name):
> + with open(dm_name) as dm_f:
> + content = dm_f.read().rstrip('\n')
> + return "/dev/mapper/" + content
> +
> + uevent = "/sys/dev/block/%s/uevent" % maj_min
> + with open(uevent) as ueventf:
> + content = ueventf.read()
> +
> + data = dict(re.findall(r'(\S+)=(".*?"|\S+)', content.replace("\n", " ")))
> +
> + return "/dev/%s" % data["DEVNAME"]
> +
> +
> +def _get_lsblk_devs(keys, devs=[]):
> + lsblk = subprocess.Popen(
> + ["lsblk", "-Pbo"] + [','.join(keys)] + devs,
> + stdout=subprocess.PIPE, stderr=subprocess.PIPE)
> + out, err = lsblk.communicate()
> + if lsblk.returncode != 0:
> + raise OperationFailed("GGBDISKS0001E", {'err': err})
> +
> + return _parse_lsblk_output(out, keys)
> +
> +
> +def _get_dev_major_min(name):
> + maj_min = None
> +
> + keys = ["NAME", "MAJ:MIN"]
> + dev_list = _get_lsblk_devs(keys)
> +
> + for dev in dev_list:
> + if dev['name'].split()[0] == name:
> + maj_min = dev['maj:min']
> + break
> + else:
> + raise OperationFailed("GGBDISKS0002E", {'device': name})
> +
> + return maj_min
> +
> +
> +def _is_dev_leaf(devNodePath):
> + try:
> + # By default, lsblk prints a device information followed by children
> + # device information
> + childrenCount = len(
> + _get_lsblk_devs(["NAME"], [devNodePath])) - 1
> + except OperationFailed as e:
> + # lsblk is known to fail on multipath devices
> + # Assume these devices contain children
> + wok_log.error(
> + "Error getting device info for %s: %s", devNodePath, e)
> + return False
> +
> + return childrenCount == 0
> +
> +
> +def _is_dev_extended_partition(devType, devNodePath):
> + if devType != 'part':
> + return False
> + diskPath = devNodePath.rstrip('0123456789')
> + device = PDevice(diskPath)
> + try:
> + extended_part = PDisk(device).getExtendedPartition()
> + except NotImplementedError as e:
> + wok_log.warning(
> + "Error getting extended partition info for dev %s type %s: %s",
> + devNodePath, devType, e.message)
> + # Treate disk with unsupported partiton table as if it does not
> + # contain extended partitions.
> + return False
> + if extended_part and extended_part.path == devNodePath:
> + return True
> + return False
> +
> +
> +def _parse_lsblk_output(output, keys):
> + # output is on format key="value",
> + # where key can be NAME, TYPE, FSTYPE, SIZE, MOUNTPOINT, etc
> + lines = output.rstrip("\n").split("\n")
> + r = []
> + for line in lines:
> + d = {}
> + for key in keys:
> + expression = r"%s=\".*?\"" % key
> + match = re.search(expression, line)
> + field = match.group()
> + k, v = field.split('=', 1)
> + d[k.lower()] = v[1:-1]
> + r.append(d)
> + return r
> +
> +
> +def _get_vgname(devNodePath):
> + """ Return volume group name of a physical volume. If the device node path
> + is not a physical volume, return empty string. """
> + pvs = subprocess.Popen(
> + ["pvs", "--unbuffered", "--nameprefixes", "--noheadings",
> + "-o", "vg_name", devNodePath],
> + stdout=subprocess.PIPE, stderr=subprocess.PIPE)
> + out, err = pvs.communicate()
> + if pvs.returncode != 0:
> + return ""
> +
> + return re.findall(r"LVM2_VG_NAME='([^\']*)'", out)[0]
> +
> +
> +def _is_available(name, devtype, fstype, mountpoint, majmin):
> + devNodePath = _get_dev_node_path(majmin)
> + # Only list unmounted and unformated and leaf and (partition or disk)
> + # leaf means a partition, a disk has no partition, or a disk not held
> + # by any multipath device. Physical volume belongs to no volume group
> + # is also listed. Extended partitions should not be listed.
> + if (devtype in ['part', 'disk', 'mpath'] and
> + fstype in ['', 'LVM2_member'] and
> + mountpoint == "" and
> + _get_vgname(devNodePath) == "" and
> + _is_dev_leaf(devNodePath) and
> + not _is_dev_extended_partition(devtype, devNodePath)):
> + return True
> + return False
> +
> +
> +def get_partitions_names(check=False):
> + names = set()
> + keys = ["NAME", "TYPE", "FSTYPE", "MOUNTPOINT", "MAJ:MIN"]
> + # output is on format key="value",
> + # where key can be NAME, TYPE, FSTYPE, MOUNTPOINT
> + for dev in _get_lsblk_devs(keys):
> + # split()[0] to avoid the second part of the name, after the
> + # whiteline
> + name = dev['name'].split()[0]
> + if check and not _is_available(name, dev['type'], dev['fstype'],
> + dev['mountpoint'], dev['maj:min']):
> + continue
> + names.add(name)
> +
> + return list(names)
> +
> +
> +def get_partition_details(name):
> + majmin = _get_dev_major_min(name)
> + dev_path = _get_dev_node_path(majmin)
> +
> + keys = ["TYPE", "FSTYPE", "SIZE", "MOUNTPOINT"]
> + try:
> + dev = _get_lsblk_devs(keys, [dev_path])[0]
> + except OperationFailed as e:
> + wok_log.error(
> + "Error getting partition info for %s: %s", name, e)
> + return {}
> +
> + dev['available'] = _is_available(name, dev['type'], dev['fstype'],
> + dev['mountpoint'], majmin)
> + if dev['mountpoint']:
> + # Sometimes the mountpoint comes with [SWAP] or other
> + # info which is not an actual mount point. Filtering it
> + regexp = re.compile(r"\[.*\]")
> + if regexp.search(dev['mountpoint']) is not None:
> + dev['mountpoint'] = ''
> + dev['path'] = dev_path
> + dev['name'] = name
> + return dev
> diff --git a/src/wok/plugins/gingerbase/i18n.py b/src/wok/plugins/gingerbase/i18n.py
> new file mode 100644
> index 0000000..fa93ee6
> --- /dev/null
> +++ b/src/wok/plugins/gingerbase/i18n.py
> @@ -0,0 +1,96 @@
> +#
> +# Project Ginger Base
> +#
> +# Copyright IBM, Corp. 2015
> +#
> +# This library is free software; you can redistribute it and/or
> +# modify it under the terms of the GNU Lesser General Public
> +# License as published by the Free Software Foundation; either
> +# version 2.1 of the License, or (at your option) any later version.
> +#
> +# This library is distributed in the hope that it will be useful,
> +# but WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> +# Lesser General Public License for more details.
> +#
> +# You should have received a copy of the GNU Lesser General Public
> +# License along with this library; if not, write to the Free Software
> +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> +
> +import gettext
> +
> +_ = gettext.gettext
> +
> +
> +messages = {
> + "GGBAPI0001E": _("Unknown parameter %(value)s"),
> +
> + "GGBASYNC0001E": _("Timeout of %(seconds)s seconds expired while running task '%(task)s."),
> +
> + "GGBDISKS0001E": _("Error while getting block devices. Details: %(err)s"),
> + "GGBDISKS0002E": _("Error while getting block device information for %(device)s."),
> +
> + "GGBDR0001E": _("Debug report %(name)s does not exist"),
> + "GGBDR0002E": _("Debug report tool not found in system"),
> + "GGBDR0003E": _("Unable to create debug report %(name)s. Details: %(err)s."),
> + "GGBDR0004E": _("Can not find any debug report with the given name %(name)s"),
> + "GGBDR0005E": _("Unable to generate debug report %(name)s. Details: %(err)s"),
> + "GGBDR0006E": _("You should give a name for the debug report file."),
> + "GGBDR0007E": _("Debug report name must be a string. Only letters, digits, underscore ('_') and hyphen ('-') are allowed."),
> + "GGBDR0008E": _("The debug report with specified name \"%(name)s\" already exists. Please use another one."),
> +
> + "GGBPART0001E": _("Partition %(name)s does not exist in the host"),
> +
> + "GGBHOST0001E": _("Unable to shutdown host machine as there are running virtual machines"),
> + "GGBHOST0002E": _("Unable to reboot host machine as there are running virtual machines"),
> + "GGBHOST0003E": _("Node device '%(name)s' not found"),
> + "GGBHOST0004E": _("Conflicting flag filters specified."),
> + "GGBHOST0005E": _("When specifying CPU topology, each element must be an integer greater than zero."),
> +
> + "GGBPKGUPD0001E": _("No packages marked for update"),
> + "GGBPKGUPD0002E": _("Package %(name)s is not marked to be updated."),
> + "GGBPKGUPD0003E": _("Error while getting packages marked to be updated. Details: %(err)s"),
> + "GGBPKGUPD0004E": _("There is no compatible package manager for this system."),
> +
> +
> + "GGBREPOS0001E": _("YUM Repository ID must be one word only string."),
> + "GGBREPOS0002E": _("Repository URL must be an http://, ftp:// or file:// URL."),
> + "GGBREPOS0003E": _("Repository configuration is a dictionary with specific values according to repository type."),
> + "GGBREPOS0004E": _("Distribution to DEB repository must be a string"),
> + "GGBREPOS0005E": _("Components to DEB repository must be listed in a array"),
> + "GGBREPOS0006E": _("Components to DEB repository must be a string"),
> + "GGBREPOS0007E": _("Mirror list to repository must be a string"),
> + "GGBREPOS0008E": _("YUM Repository name must be string."),
> + "GGBREPOS0009E": _("GPG check must be a boolean value."),
> + "GGBREPOS0010E": _("GPG key must be a URL pointing to the ASCII-armored file."),
> + "GGBREPOS0011E": _("Could not update repository %(repo_id)s."),
> + "GGBREPOS0012E": _("Repository %(repo_id)s does not exist."),
> + "GGBREPOS0013E": _("Specify repository base URL, mirror list or metalink in order to create or update a YUM repository."),
> + "GGBREPOS0014E": _("Repository management tool was not recognized for your system."),
> + "GGBREPOS0015E": _("Repository %(repo_id)s is already enabled."),
> + "GGBREPOS0016E": _("Repository %(repo_id)s is already disabled."),
> + "GGBREPOS0017E": _("Could not remove repository %(repo_id)s."),
> + "GGBREPOS0018E": _("Could not write repository configuration file %(repo_file)s"),
> + "GGBREPOS0019E": _("Specify repository distribution in order to create a DEB repository."),
> + "GGBREPOS0020E": _("Could not enable repository %(repo_id)s."),
> + "GGBREPOS0021E": _("Could not disable repository %(repo_id)s."),
> + "GGBREPOS0022E": _("YUM Repository ID already exists"),
> + "GGBREPOS0023E": _("YUM Repository name must be a string"),
> + "GGBREPOS0024E": _("Unable to list repositories. Details: '%(err)s'"),
> + "GGBREPOS0025E": _("Unable to retrieve repository information. Details: '%(err)s'"),
> + "GGBREPOS0026E": _("Unable to add repository. Details: '%(err)s'"),
> + "GGBREPOS0027E": _("Unable to remove repository. Details: '%(err)s'"),
> + "GGBREPOS0028E": _("Configuration items: '%(items)s' are not supported by repository manager"),
> + "GGBREPOS0029E": _("Repository metalink must be an http://, ftp:// or file:// URL."),
> + "GGBREPOS0030E": _("Cannot specify mirrorlist and metalink at the same time."),
> +
> +
> + "GGBCPUINF0001E": _("The number of vCPUs is too large for this system."),
> + "GGBCPUINF0002E": _("Invalid vCPU/topology combination."),
> + "GGBCPUINF0003E": _("This host (or current configuration) does not allow CPU topology."),
> + "GGBCPUINF0004E": _("This host (or current configuration) does not allow to fetch lscpu details."),
> + "GGBCPUINF0005E": _("This host (or current configuration) does not provide Socket(s) information."),
> + "GGBCPUINF0006E": _("This host (or current configuration) does not provide Core(s) per socket information."),
> + "GGBCPUINF0007E": _("This host (or current configuration) does not provide Thread(s) per core information."),
> +
> +}
> diff --git a/src/wok/plugins/gingerbase/repositories.py b/src/wok/plugins/gingerbase/repositories.py
> new file mode 100644
> index 0000000..06ea100
> --- /dev/null
> +++ b/src/wok/plugins/gingerbase/repositories.py
> @@ -0,0 +1,533 @@
> +#
> +# Project Ginger Base
> +#
> +# Copyright IBM, Corp. 2014-2015
> +#
> +# This library is free software; you can redistribute it and/or
> +# modify it under the terms of the GNU Lesser General Public
> +# License as published by the Free Software Foundation; either
> +# version 2.1 of the License, or (at your option) any later version.
> +#
> +# This library is distributed in the hope that it will be useful,
> +# but WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> +# Lesser General Public License for more details.
> +#
> +# You should have received a copy of the GNU Lesser General Public
> +# License along with this library; if not, write to the Free Software
> +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> +
> +import copy
> +import os
> +import time
> +import urlparse
> +from ConfigParser import ConfigParser
> +
> +from wok.basemodel import Singleton
> +from wok.exception import InvalidOperation, InvalidParameter
> +from wok.exception import OperationFailed, NotFoundError, MissingParameter
> +from wok.utils import validate_repo_url
> +
> +from config import gingerBaseLock
> +from yumparser import get_yum_repositories, write_repo_to_file
> +from yumparser import get_display_name, get_expanded_url
> +
> +
> +class Repositories(object):
> + __metaclass__ = Singleton
> +
> + """
> + Class to represent and operate with repositories information.
> + """
> + def __init__(self):
> + try:
> + __import__('yum')
> + self._pkg_mnger = YumRepo()
> + except ImportError:
> + try:
> + __import__('apt_pkg')
> + self._pkg_mnger = AptRepo()
> + except ImportError:
> + raise InvalidOperation('GGBREPOS0014E')
> +
> + def addRepository(self, params):
> + """
> + Add and enable a new repository
> + """
> + config = params.get('config', {})
> + extra_keys = list(
> + set(config.keys()).difference(set(self._pkg_mnger.CONFIG_ENTRY)))
> + if len(extra_keys) > 0:
> + raise InvalidParameter("GGBREPOS0028E",
> + {'items': ",".join(extra_keys)})
> +
> + return self._pkg_mnger.addRepo(params)
> +
> + def getRepositories(self):
> + """
> + Return a dictionary with all Ginger Base repositories. Each element uses
> + the format {<repo_id>: {repo}}, where repo is a dictionary in the
> + repositories.Repositories() format.
> + """
> + return self._pkg_mnger.getRepositoriesList()
> +
> + def getRepository(self, repo_id):
> + """
> + Return a dictionary with all info from a given repository ID.
> + """
> + info = self._pkg_mnger.getRepo(repo_id)
> + info['repo_id'] = repo_id
> + return info
> +
> + def enableRepository(self, repo_id):
> + """
> + Enable a repository.
> + """
> + return self._pkg_mnger.toggleRepo(repo_id, True)
> +
> + def disableRepository(self, repo_id):
> + """
> + Disable a given repository.
> + """
> + return self._pkg_mnger.toggleRepo(repo_id, False)
> +
> + def updateRepository(self, repo_id, params):
> + """
> + Update the information of a given repository.
> + The input is the repo_id of the repository to be updated and a dict
> + with the information to be updated.
> + """
> + return self._pkg_mnger.updateRepo(repo_id, params)
> +
> + def removeRepository(self, repo_id):
> + """
> + Remove a given repository
> + """
> + return self._pkg_mnger.removeRepo(repo_id)
> +
> +
> +class YumRepo(object):
> + """
> + Class to represent and operate with YUM repositories.
> + It's loaded only on those systems listed at YUM_DISTROS and loads necessary
> + modules in runtime.
> + """
> + TYPE = 'yum'
> + DEFAULT_CONF_DIR = "/etc/yum.repos.d"
> + CONFIG_ENTRY = ('repo_name', 'mirrorlist', 'metalink')
> +
> + def __init__(self):
> + self._confdir = self.DEFAULT_CONF_DIR
> +
> + def _get_repos(self, errcode):
> + try:
> + gingerBaseLock.acquire()
> + repos = get_yum_repositories()
> + except Exception, e:
> + gingerBaseLock.release()
> + raise OperationFailed(errcode, {'err': str(e)})
> + finally:
> + gingerBaseLock.release()
> +
> + return repos
> +
> + def getRepositoriesList(self):
> + """
> + Return a list of repositories IDs
> + """
> + repos = self._get_repos('GGBREPOS0024E')
> + return repos.keys()
> +
> + def getRepo(self, repo_id):
> + """
> + Return a dictionary in the repositories.Repositories() of the given
> + repository ID format with the information of a YumRepository object.
> + """
> + repos = self._get_repos('GGBREPOS0025E')
> +
> + if repo_id not in repos.keys():
> + raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
> +
> + entry = repos.get(repo_id)
> +
> + display_name = get_display_name(entry.name)
> +
> + info = {}
> + info['enabled'] = entry.enabled
> + info['baseurl'] = entry.baseurl or ''
> + info['config'] = {}
> + info['config']['display_repo_name'] = display_name
> + info['config']['repo_name'] = entry.name or ''
> + info['config']['gpgcheck'] = entry.gpgcheck
> + info['config']['gpgkey'] = entry.gpgkey or ''
> + info['config']['mirrorlist'] = entry.mirrorlist or ''
> + info['config']['metalink'] = entry.metalink or ''
> + return info
> +
> + def addRepo(self, params):
> + """
> + Add a given repository to YumBase
> + """
> + # At least one base url, or one mirror, must be given.
> + baseurl = params.get('baseurl', '')
> +
> + config = params.get('config', {})
> + mirrorlist = config.get('mirrorlist', '')
> + metalink = config.get('metalink', '')
> + if not baseurl and not mirrorlist and not metalink:
> + raise MissingParameter("GGBREPOS0013E")
> +
> + if baseurl:
> + validate_repo_url(get_expanded_url(baseurl))
> +
> + if mirrorlist:
> + validate_repo_url(get_expanded_url(mirrorlist))
> +
> + if metalink:
> + validate_repo_url(get_expanded_url(metalink))
> +
> + if mirrorlist and metalink:
> + raise InvalidOperation('GGBREPOS0030E')
> +
> + repo_id = params.get('repo_id', None)
> + if repo_id is None:
> + repo_id = "gingerbase_repo_%s" % str(int(time.time() * 1000))
> +
> + repos = self._get_repos('GGBREPOS0026E')
> + if repo_id in repos.keys():
> + raise InvalidOperation("GGBREPOS0022E", {'repo_id': repo_id})
> +
> + repo_name = config.get('repo_name', repo_id)
> + repo = {'baseurl': baseurl, 'mirrorlist': mirrorlist,
> + 'name': repo_name, 'gpgcheck': 1,
> + 'gpgkey': [], 'enabled': 1, 'metalink': metalink}
> +
> + # write a repo file in the system with repo{} information.
> + parser = ConfigParser()
> + parser.add_section(repo_id)
> +
> + for key, value in repo.iteritems():
> + if value:
> + parser.set(repo_id, key, value)
> +
> + repofile = os.path.join(self._confdir, repo_id + '.repo')
> + try:
> + with open(repofile, 'w') as fd:
> + parser.write(fd)
> + except:
> + raise OperationFailed("GGBREPOS0018E",
> + {'repo_file': repofile})
> +
> + return repo_id
> +
> + def toggleRepo(self, repo_id, enable):
> + repos = self._get_repos('GGBREPOS0011E')
> + if repo_id not in repos.keys():
> + raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
> +
> + entry = repos.get(repo_id)
> + if enable and entry.enabled:
> + raise InvalidOperation("GGBREPOS0015E", {'repo_id': repo_id})
> +
> + if not enable and not entry.enabled:
> + raise InvalidOperation("GGBREPOS0016E", {'repo_id': repo_id})
> +
> + gingerBaseLock.acquire()
> + try:
> + if enable:
> + entry.enable()
> + else:
> + entry.disable()
> +
> + write_repo_to_file(entry)
> + except:
> + if enable:
> + raise OperationFailed("GGBREPOS0020E", {'repo_id': repo_id})
> +
> + raise OperationFailed("GGBREPOS0021E", {'repo_id': repo_id})
> + finally:
> + gingerBaseLock.release()
> +
> + return repo_id
> +
> + def updateRepo(self, repo_id, params):
> + """
> + Update a given repository in repositories.Repositories() format
> + """
> + repos = self._get_repos('GGBREPOS0011E')
> + if repo_id not in repos.keys():
> + raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
> +
> + entry = repos.get(repo_id)
> +
> + baseurl = params.get('baseurl', None)
> + config = params.get('config', {})
> + mirrorlist = config.get('mirrorlist', None)
> + metalink = config.get('metalink', None)
> +
> + if baseurl is not None and len(baseurl.strip()) == 0:
> + baseurl = None
> +
> + if mirrorlist is not None and len(mirrorlist.strip()) == 0:
> + mirrorlist = None
> +
> + if metalink is not None and len(metalink.strip()) == 0:
> + metalink = None
> +
> + if baseurl is None and mirrorlist is None and metalink is None:
> + raise MissingParameter("GGBREPOS0013E")
> +
> + if baseurl is not None:
> + validate_repo_url(get_expanded_url(baseurl))
> + entry.baseurl = baseurl
> +
> + if mirrorlist is not None:
> + validate_repo_url(get_expanded_url(mirrorlist))
> + entry.mirrorlist = mirrorlist
> +
> + if metalink is not None:
> + validate_repo_url(get_expanded_url(metalink))
> + entry.metalink = metalink
> +
> + if mirrorlist and metalink:
> + raise InvalidOperation('GGBREPOS0030E')
> +
> + entry.id = params.get('repo_id', repo_id)
> + entry.name = config.get('repo_name', entry.name)
> + entry.gpgcheck = config.get('gpgcheck', entry.gpgcheck)
> + entry.gpgkey = config.get('gpgkey', entry.gpgkey)
> + gingerBaseLock.acquire()
> + write_repo_to_file(entry)
> + gingerBaseLock.release()
> + return repo_id
> +
> + def removeRepo(self, repo_id):
> + """
> + Remove a given repository
> + """
> + repos = self._get_repos('GGBREPOS0027E')
> + if repo_id not in repos.keys():
> + raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
> +
> + entry = repos.get(repo_id)
> + parser = ConfigParser()
> + with open(entry.repofile) as fd:
> + parser.readfp(fd)
> +
> + if len(parser.sections()) == 1:
> + os.remove(entry.repofile)
> + return
> +
> + parser.remove_section(repo_id)
> + with open(entry.repofile, "w") as fd:
> + parser.write(fd)
> +
> +
> +class AptRepo(object):
> + """
> + Class to represent and operate with YUM repositories.
> + It's loaded only on those systems listed at YUM_DISTROS and loads necessary
> + modules in runtime.
> + """
> + TYPE = 'deb'
> + GINGERBASE_LIST = "gingerbase-source.list"
> + CONFIG_ENTRY = ('dist', 'comps')
> +
> + def __init__(self):
> + getattr(__import__('apt_pkg'), 'init_config')()
> + getattr(__import__('apt_pkg'), 'init_system')()
> + config = getattr(__import__('apt_pkg'), 'config')
> + self.pkg_lock = getattr(__import__('apt_pkg'), 'SystemLock')
> + module = __import__('aptsources.sourceslist', globals(), locals(),
> + ['SourcesList'], -1)
> +
> + self._sourceparts_path = '/%s%s' % (
> + config.get('Dir::Etc'), config.get('Dir::Etc::sourceparts'))
> + self._sourceslist = getattr(module, 'SourcesList')
> + self.filename = os.path.join(self._sourceparts_path, self.GINGERBASE_LIST)
> + if not os.path.exists(self.filename):
> + with open(self.filename, 'w') as fd:
> + fd.write("# This file is managed by Ginger Base and it must not "
> + "be modified manually\n")
> +
> + def _get_repos(self):
> + try:
> + with self.pkg_lock():
> + repos = self._sourceslist()
> + repos.refresh()
> + except Exception, e:
> + gingerBaseLock.release()
> + raise OperationFailed('GGBREPOS0025E', {'err': e.message})
> +
> + return repos
> +
> + def _get_repo_id(self, repo):
> + data = urlparse.urlparse(repo.uri)
> + name = data.hostname or data.path
> + return '%s-%s-%s' % (name, repo.dist, "-".join(repo.comps))
> +
> + def _get_source_entry(self, repo_id):
> + gingerBaseLock.acquire()
> + repos = self._get_repos()
> + gingerBaseLock.release()
> +
> + for r in repos:
> + # Ignore deb-src repositories
> + if r.type != 'deb':
> + continue
> +
> + if self._get_repo_id(r) != repo_id:
> + continue
> +
> + return r
> +
> + return None
> +
> + def getRepositoriesList(self):
> + """
> + Return a list of repositories IDs
> +
> + APT repositories there aren't the concept about repository ID, so for
> + internal control, the repository ID will be built as described in
> + _get_repo_id()
> + """
> + gingerBaseLock.acquire()
> + repos = self._get_repos()
> + gingerBaseLock.release()
> +
> + res = []
> + for r in repos:
> + # Ignore deb-src repositories
> + if r.type != 'deb':
> + continue
> +
> + res.append(self._get_repo_id(r))
> +
> + return res
> +
> + def getRepo(self, repo_id):
> + """
> + Return a dictionary in the repositories.Repositories() format of the
> + given repository ID with the information of a SourceEntry object.
> + """
> + r = self._get_source_entry(repo_id)
> + if r is None:
> + raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
> +
> + info = {'enabled': not r.disabled,
> + 'baseurl': r.uri,
> + 'config': {'dist': r.dist,
> + 'comps': r.comps}}
> + return info
> +
> + def addRepo(self, params):
> + """
> + Add a new APT repository based on <params>
> + """
> + # To create a APT repository the dist is a required parameter
> + # (in addition to baseurl, verified on controller through API.json)
> + config = params.get('config', None)
> + if config is None:
> + raise MissingParameter("GGBREPOS0019E")
> +
> + if 'dist' not in config.keys():
> + raise MissingParameter("GGBREPOS0019E")
> +
> + uri = params['baseurl']
> + dist = config['dist']
> + comps = config.get('comps', [])
> +
> + validate_repo_url(get_expanded_url(uri))
> +
> + gingerBaseLock.acquire()
> + try:
> + repos = self._get_repos()
> + source_entry = repos.add('deb', uri, dist, comps,
> + file=self.filename)
> + with self.pkg_lock():
> + repos.save()
> + except Exception as e:
> + gingerBaseLock.release()
> + raise OperationFailed("GGBREPOS0026E", {'err': e.message})
> + gingerBaseLock.release()
> + return self._get_repo_id(source_entry)
> +
> + def toggleRepo(self, repo_id, enable):
> + """
> + Enable a given repository
> + """
> + r = self._get_source_entry(repo_id)
> + if r is None:
> + raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
> +
> + if enable and not r.disabled:
> + raise InvalidOperation("GGBREPOS0015E", {'repo_id': repo_id})
> +
> + if not enable and r.disabled:
> + raise InvalidOperation("GGBREPOS0016E", {'repo_id': repo_id})
> +
> + if enable:
> + line = 'deb'
> + else:
> + line = '#deb'
> +
> + gingerBaseLock.acquire()
> + try:
> + repos = self._get_repos()
> + with self.pkg_lock():
> + repos.remove(r)
> + repos.add(line, r.uri, r.dist, r.comps, file=self.filename)
> + repos.save()
> + except:
> + gingerBaseLock.release()
> + if enable:
> + raise OperationFailed("GGBREPOS0020E", {'repo_id': repo_id})
> +
> + raise OperationFailed("GGBREPOS0021E", {'repo_id': repo_id})
> + finally:
> + gingerBaseLock.release()
> +
> + return repo_id
> +
> + def updateRepo(self, repo_id, params):
> + """
> + Update a given repository in repositories.Repositories() format
> + """
> + old_info = self.getRepo(repo_id)
> + updated_info = copy.deepcopy(old_info)
> + updated_info['baseurl'] = params.get(
> + 'baseurl', updated_info['baseurl'])
> +
> + if 'config' in params.keys():
> + config = params['config']
> + updated_info['config']['dist'] = config.get(
> + 'dist', old_info['config']['dist'])
> + updated_info['config']['comps'] = config.get(
> + 'comps', old_info['config']['comps'])
> +
> + self.removeRepo(repo_id)
> + try:
> + return self.addRepo(updated_info)
> + except:
> + self.addRepo(old_info)
> + raise
> +
> + def removeRepo(self, repo_id):
> + """
> + Remove a given repository
> + """
> + r = self._get_source_entry(repo_id)
> + if r is None:
> + raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
> +
> + gingerBaseLock.acquire()
> + try:
> + repos = self._get_repos()
> + with self.pkg_lock():
> + repos.remove(r)
> + repos.save()
> + except:
> + gingerBaseLock.release()
> + raise OperationFailed("GGBREPOS0017E", {'repo_id': repo_id})
> + finally:
> + gingerBaseLock.release()
> diff --git a/src/wok/plugins/gingerbase/swupdate.py b/src/wok/plugins/gingerbase/swupdate.py
> new file mode 100644
> index 0000000..ee7f8a6
> --- /dev/null
> +++ b/src/wok/plugins/gingerbase/swupdate.py
> @@ -0,0 +1,275 @@
> +#
> +# Project Ginger Base
> +#
> +# Copyright IBM, Corp. 2014-2015
> +#
> +# This library is free software; you can redistribute it and/or
> +# modify it under the terms of the GNU Lesser General Public
> +# License as published by the Free Software Foundation; either
> +# version 2.1 of the License, or (at your option) any later version.
> +#
> +# This library is distributed in the hope that it will be useful,
> +# but WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> +# Lesser General Public License for more details.
> +#
> +# You should have received a copy of the GNU Lesser General Public
> +# License along with this library; if not, write to the Free Software
> +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> +
> +
> +import os
> +import signal
> +import subprocess
> +import time
> +
> +from wok.basemodel import Singleton
> +from wok.exception import NotFoundError, OperationFailed
> +from wok.utils import run_command, wok_log
> +
> +from config import gingerBaseLock
> +from yumparser import get_yum_packages_list_update
> +
> +
> +class SoftwareUpdate(object):
> + __metaclass__ = Singleton
> +
> + """
> + Class to represent and operate with OS software update.
> + """
> + def __init__(self):
> + # This stores all packages to be updated for Ginger Base perspective. It's a
> + # dictionary of dictionaries, in the format {'package_name': package},
> + # where:
> + # package = {'package_name': <string>, 'version': <string>,
> + # 'arch': <string>, 'repository': <string>
> + # }
> + self._packages = {}
> +
> + # This stores the number of packages to update
> + self._num2update = 0
> +
> + # Get the distro of host machine and creates an object related to
> + # correct package management system
> + try:
> + __import__('yum')
> + wok_log.info("Loading YumUpdate features.")
> + self._pkg_mnger = YumUpdate()
> + except ImportError:
> + try:
> + __import__('apt')
> + wok_log.info("Loading AptUpdate features.")
> + self._pkg_mnger = AptUpdate()
> + except ImportError:
> + zypper_help = ["zypper", "--help"]
> + (stdout, stderr, returncode) = run_command(zypper_help)
> + if returncode == 0:
> + wok_log.info("Loading ZypperUpdate features.")
> + self._pkg_mnger = ZypperUpdate()
> + else:
> + raise Exception("There is no compatible package manager "
> + "for this system.")
> +
> + def _scanUpdates(self):
> + """
> + Update self._packages with packages to be updated.
> + """
> + self._packages = {}
> + self._num2update = 0
> +
> + # Call system pkg_mnger to get the packages as list of dictionaries.
> + for pkg in self._pkg_mnger.getPackagesList():
> +
> + # Check if already exist a package in self._packages
> + pkg_id = pkg.get('package_name')
> + if pkg_id in self._packages.keys():
> + # package already listed to update. do nothing
> + continue
> +
> + # Update the self._packages and self._num2update
> + self._packages[pkg_id] = pkg
> + self._num2update = self._num2update + 1
> +
> + def getUpdates(self):
> + """
> + Return the self._packages.
> + """
> + self._scanUpdates()
> + return self._packages
> +
> + def getUpdate(self, name):
> + """
> + Return a dictionary with all info from a given package name.
> + """
> + if name not in self._packages.keys():
> + raise NotFoundError('GGBPKGUPD0002E', {'name': name})
> +
> + return self._packages[name]
> +
> + def getNumOfUpdates(self):
> + """
> + Return the number of packages to be updated.
> + """
> + self._scanUpdates()
> + return self._num2update
> +
> + def preUpdate(self):
> + """
> + Make adjustments before executing the command in
> + a child process.
> + """
> + os.setsid()
> + signal.signal(signal.SIGTERM, signal.SIG_IGN)
> +
> + def doUpdate(self, cb, params):
> + """
> + Execute the update
> + """
> + # reset messages
> + cb('')
> +
> + cmd = self._pkg_mnger.update_cmd
> + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
> + stderr=subprocess.PIPE,
> + preexec_fn=self.preUpdate)
> + msgs = []
> + while proc.poll() is None:
> + msgs.append(proc.stdout.readline())
> + cb(''.join(msgs))
> + time.sleep(0.5)
> +
> + # read the final output lines
> + msgs.extend(proc.stdout.readlines())
> +
> + retcode = proc.poll()
> + if retcode == 0:
> + return cb(''.join(msgs), True)
> +
> + msgs.extend(proc.stderr.readlines())
> + return cb(''.join(msgs), False)
> +
> +
> +class YumUpdate(object):
> + """
> + Class to represent and operate with YUM software update system.
> + It's loaded only on those systems listed at YUM_DISTROS and loads necessary
> + modules in runtime.
> + """
> + def __init__(self):
> + self._pkgs = {}
> + self.update_cmd = ["yum", "-y", "update"]
> +
> + def _refreshUpdateList(self):
> + """
> + Update the list of packages to be updated in the system.
> + """
> + try:
> + gingerBaseLock.acquire()
> + self._pkgs = get_yum_packages_list_update()
> + except Exception, e:
> + raise OperationFailed('GGBPKGUPD0003E', {'err': str(e)})
> + finally:
> + gingerBaseLock.release()
> +
> + def getPackagesList(self):
> + """
> + Return a list of package's dictionaries. Each dictionary contains the
> + information about a package, in the format:
> + package = {'package_name': <string>, 'version': <string>,
> + 'arch': <string>, 'repository': <string>}
> + """
> + self._refreshUpdateList()
> + pkg_list = []
> + for pkg in self._pkgs:
> + package = {'package_name': pkg.name, 'version': pkg.version,
> + 'arch': pkg.arch, 'repository': pkg.ui_from_repo}
> + pkg_list.append(package)
> + return pkg_list
> +
> +
> +class AptUpdate(object):
> + """
> + Class to represent and operate with APT software update system.
> + It's loaded only on those systems listed at APT_DISTROS and loads necessary
> + modules in runtime.
> + """
> + def __init__(self):
> + self._pkgs = {}
> + self.pkg_lock = getattr(__import__('apt_pkg'), 'SystemLock')
> + self.update_cmd = ['apt-get', 'upgrade', '-y']
> +
> + def _refreshUpdateList(self):
> + """
> + Update the list of packages to be updated in the system.
> + """
> + apt_cache = getattr(__import__('apt'), 'Cache')()
> + try:
> + with self.pkg_lock():
> + apt_cache.update()
> + apt_cache.upgrade()
> + self._pkgs = apt_cache.get_changes()
> + except Exception, e:
> + gingerBaseLock.release()
> + raise OperationFailed('GGBPKGUPD0003E', {'err': e.message})
> +
> + def getPackagesList(self):
> + """
> + Return a list of package's dictionaries. Each dictionary contains the
> + information about a package, in the format
> + package = {'package_name': <string>, 'version': <string>,
> + 'arch': <string>, 'repository': <string>}
> + """
> + gingerBaseLock.acquire()
> + self._refreshUpdateList()
> + gingerBaseLock.release()
> + pkg_list = []
> + for pkg in self._pkgs:
> + package = {'package_name': pkg.shortname,
> + 'version': pkg.candidate.version,
> + 'arch': pkg._pkg.architecture,
> + 'repository': pkg.candidate.origins[0].label}
> + pkg_list.append(package)
> +
> + return pkg_list
> +
> +
> +class ZypperUpdate(object):
> + """
> + Class to represent and operate with Zypper software update system.
> + It's loaded only on those systems listed at ZYPPER_DISTROS and loads
> + necessary modules in runtime.
> + """
> + def __init__(self):
> + self._pkgs = {}
> + self.update_cmd = ["zypper", "--non-interactive", "update",
> + "--auto-agree-with-licenses"]
> +
> + def _refreshUpdateList(self):
> + """
> + Update the list of packages to be updated in the system.
> + """
> + self._pkgs = []
> + cmd = ["zypper", "list-updates"]
> + (stdout, stderr, returncode) = run_command(cmd)
> +
> + if len(stderr) > 0:
> + raise OperationFailed('GGBPKGUPD0003E', {'err': stderr})
> +
> + for line in stdout.split('\n'):
> + if line.find('v |') >= 0:
> + info = line.split(' | ')
> + package = {'package_name': info[2], 'version': info[4],
> + 'arch': info[5], 'repository': info[1]}
> + self._pkgs.append(package)
> +
> + def getPackagesList(self):
> + """
> + Return a list of package's dictionaries. Each dictionary contains the
> + information about a package, in the format
> + package = {'package_name': <string>, 'version': <string>,
> + 'arch': <string>, 'repository': <string>}
> + """
> + gingerBaseLock.acquire()
> + self._refreshUpdateList()
> + gingerBaseLock.release()
> + return self._pkgs
> diff --git a/src/wok/plugins/gingerbase/yumparser.py b/src/wok/plugins/gingerbase/yumparser.py
> new file mode 100644
> index 0000000..8590bd2
> --- /dev/null
> +++ b/src/wok/plugins/gingerbase/yumparser.py
> @@ -0,0 +1,353 @@
> +#
> +# Project Ginger Base
> +#
> +# Copyright IBM, Corp. 2015
> +#
> +# This library is free software; you can redistribute it and/or
> +# modify it under the terms of the GNU Lesser General Public
> +# License as published by the Free Software Foundation; either
> +# version 2.1 of the License, or (at your option) any later version.
> +#
> +# This library is distributed in the hope that it will be useful,
> +# but WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> +# Lesser General Public License for more details.
> +#
> +# You should have received a copy of the GNU Lesser General Public
> +# License along with this library; if not, write to the Free Software
> +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> +import subprocess
> +import glob
> +
> +from os import listdir
> +from os.path import isfile, splitext, basename
> +
> +try:
> + import rpm
> +except ImportError:
> + pass
> +
> +
> +class YumRepoObject(object):
> +
> + def __init__(self, repo_id, repofile):
> + self.repo_id = repo_id
> + self.name = None
> + self.baseurl = None
> + self.enabled = True
> + self.gpgcheck = True
> + self.gpgkey = None
> + self.metalink = None
> + self.mirrorlist = None
> + self.repofile = repofile
> + self.string_attrs = ['baseurl', 'gpgkey', 'name',
> + 'metalink', 'mirrorlist']
> + self.boolean_attrs = ['enabled', 'gpgcheck']
> +
> + def set_attribute(self, key, strvalue):
> + if key in self.string_attrs:
> + setattr(self, key, strvalue)
> + elif key in self.boolean_attrs:
> + setattr(self, key, (strvalue == '1'))
> +
> + def get_attribute_str(self, key):
> + if key not in self.get_attributes():
> + return None
> +
> + if key in self.boolean_attrs:
> + str_value = '1' if getattr(self, key) is True else '0'
> + else:
> + str_value = getattr(self, key)
> +
> + if str_value is None:
> + return None
> +
> + return key + '=' + str_value
> +
> + def get_attributes(self):
> + return self.string_attrs + self.boolean_attrs
> +
> + def enable(self):
> + self.enabled = True
> +
> + def disable(self):
> + self.enabled = False
> +
> + def __str__(self):
> + str_obj = '[' + self.repo_id + ']' + '\n'
> + for key in self.get_attributes():
> + if self.get_attribute_str(key) is not None:
> + str_obj += self.get_attribute_str(key) + '\n'
> + return str_obj
> +
> +
> +def get_repo_files():
> + def _is_repository_file(f):
> + _, f_extension = splitext(f)
> + return isfile(f) and (f_extension == '.repo')
> +
> + YUM_REPO_DIR = '/etc/yum.repos.d'
> + return [YUM_REPO_DIR+'/'+f for f in listdir(YUM_REPO_DIR)
> + if _is_repository_file(YUM_REPO_DIR+'/'+f)]
> +
> +
> +def _ignore_line_repo_file(line):
> + return line.startswith("#") or '=' not in line
> +
> +
> +def _get_repos_from_file(repo_file):
> + repos_from_file = {}
> + current_repo = None
> + current_repo_id = None
> + with open(repo_file) as f:
> + for line in f.readlines():
> + line = line.strip()
> + if line.startswith("["):
> + if current_repo is not None:
> + repos_from_file[current_repo_id] = current_repo
> + current_repo_id = line.strip('[]')
> + current_repo = YumRepoObject(current_repo_id, repo_file)
> + continue
> + if _ignore_line_repo_file(line):
> + continue
> + key, value = line.split('=', 1)
> + key = key.strip()
> + value = value.strip()
> + current_repo.set_attribute(key, value)
> +
> + # add the last repo from file.
> + if current_repo is not None:
> + repos_from_file[current_repo_id] = current_repo
> +
> + return repos_from_file
> +
> +
> +def get_yum_repositories():
> + repo_files = get_repo_files()
> + repos = {}
> + for yum_repo in repo_files:
> + repos.update(_get_repos_from_file(yum_repo))
> +
> + return repos
> +
> +
> +def _retrieve_repo_line_index(data, repo):
> + repo_entry = '[' + repo.repo_id + ']\n'
> + try:
> + repo_index = data.index(repo_entry)
> + except:
> + return None
> + return repo_index
> +
> +
> +def _update_repo_file_data(data, repo, repo_index):
> + remaining_repo_attrs = repo.get_attributes()
> +
> + for i in range(repo_index + 1, len(data)):
> + line = data[i].strip()
> + if line.startswith('['):
> + break
> + if _ignore_line_repo_file(line):
> + continue
> + key, _ = line.split('=', 1)
> + key = key.strip()
> + attr_str = repo.get_attribute_str(key)
> + if attr_str is None:
> + continue
> + remaining_repo_attrs.remove(key)
> + data[i] = attr_str + '\n'
> +
> + for attr in remaining_repo_attrs:
> + attr_str = repo.get_attribute_str(attr)
> + if attr_str is None:
> + continue
> + data.insert(repo_index+1, attr_str + '\n')
> +
> + return data
> +
> +
> +def write_repo_to_file(repo):
> + with open(repo.repofile) as f:
> + data = f.readlines()
> +
> + repo_index = _retrieve_repo_line_index(data, repo)
> + if repo_index is None:
> + return
> +
> + data = _update_repo_file_data(data, repo, repo_index)
> +
> + with open(repo.repofile, 'w') as f:
> + f.writelines(data)
> +
> +
> +def _get_last_line_repo(data, repo_index):
> + stop_delete_index = None
> + for i in range(repo_index+1, len(data)):
> + line = data[i].strip()
> + if line.startswith('['):
> + stop_delete_index = i - 1
> + break
> + if stop_delete_index is None:
> + stop_delete_index = len(data) - 1
> +
> + return stop_delete_index
> +
> +
> +def _remove_repo_file_data(data, repo_index):
> + last_line_repo = _get_last_line_repo(data, repo_index)
> + for i in range(last_line_repo, repo_index - 1, -1):
> + data.pop(i)
> + return data
> +
> +
> +def delete_repo_from_file(repo):
> + with open(repo.repofile) as f:
> + data = f.readlines()
> +
> + repo_index = _retrieve_repo_line_index(data, repo)
> + if repo_index is None:
> + return
> +
> + data = _remove_repo_file_data(data, repo_index)
> +
> + with open(repo.repofile, 'w') as f:
> + f.writelines(data)
> +
> +
> +def _get_releasever():
> + release_file = glob.glob('/etc/*-release')[0]
> + transaction = rpm.TransactionSet()
> + match_iter = transaction.dbMatch('basenames', release_file)
> +
> + ret = '%releasever'
> + try:
> + ret = match_iter.next()['version']
> +
> + except StopIteration:
> + pass
> +
> + return ret
> +
> +
> +def _get_basearch():
> + cmd = ['uname', '-i']
> + uname = subprocess.Popen(cmd, stdout=subprocess.PIPE)
> + return uname.communicate()[0].strip('"\n')
> +
> +
> +def _get_all_yum_vars():
> + variables = {}
> +
> + def _get_var_content(varfile):
> + with open(varfile) as f:
> + variables[basename(varfile)] = f.read().strip('\n')
> +
> + map(lambda vfile:
> + _get_var_content(vfile),
> + glob.glob('/etc/yum/vars/*'))
> +
> + return variables
> +
> +
> +def _expand_variables(stringvar, split_char=' '):
> + yum_variables = _get_all_yum_vars()
> + yum_variables['releasever'] = _get_releasever()
> + yum_variables['basearch'] = _get_basearch()
> +
> + name_vars = [var for var in stringvar.split(split_char)
> + if var.startswith('$') and var.strip('$') in yum_variables]
> +
> + return reduce(lambda nm, var:
> + nm.replace(var, yum_variables[var.strip('$')]),
> + name_vars,
> + stringvar)
> +
> +
> +def get_display_name(name):
> + if not name or '$' not in name:
> + return name
> +
> + return _expand_variables(name)
> +
> +
> +def get_expanded_url(url):
> + url_path = url.split('://')
> + if len(url_path) != 2 or '$' not in url:
> + return url
> +
> + return _expand_variables(url, '/')
> +
> +
> +class YumUpdatePackageObject(object):
> +
> + def __init__(self, name, arch, version, repo):
> + self.name = name
> + self.arch = arch
> + self.version = version
> + self.ui_from_repo = repo
> +
> +
> +def _include_line_checkupdate_output(line):
> + tokens = line.split()
> +
> + if len(tokens) != 3:
> + return False
> +
> + if '.' not in tokens[0]:
> + return False
> +
> + return True
> +
> +
> +def _ignore_obsoleting_packages_in(output):
> + out = ''
> + for l in output.split('\n'):
> + if 'Obsoleting ' in l:
> + break
> + out += l + '\n'
> + return out
> +
> +
> +def _filter_lines_checkupdate_output(output):
> + if output is None:
> + return []
> +
> + output = _ignore_obsoleting_packages_in(output)
> +
> + out = [l for l in output.split('\n')
> + if _include_line_checkupdate_output(l)]
> + return out
> +
> +
> +def _get_yum_checkupdate_output():
> + cmd = ['yum', 'check-update', '-d0']
> + yum_update_cmd = subprocess.Popen(cmd,
> + stdout=subprocess.PIPE,
> + stderr=subprocess.PIPE)
> + out, error = yum_update_cmd.communicate()
> + return_code = yum_update_cmd.returncode
> + if return_code == 1:
> + return None
> +
> + return out
> +
> +
> +def get_yum_packages_list_update(checkupdate_output=None):
> + if checkupdate_output is None:
> + checkupdate_output = _get_yum_checkupdate_output()
> +
> + filtered_output = _filter_lines_checkupdate_output(checkupdate_output)
> +
> + packages = []
> + for line in filtered_output:
> + line = line.split()
> + index = 0
> + name_arch = line[index]
> + index += 1
> + version = line[index]
> + index += 1
> + repo = line[index]
> + name, arch = name_arch.rsplit('.', 1)
> + packages.append(YumUpdatePackageObject(name, arch, version, repo))
> +
> + return packages
> diff --git a/src/wok/plugins/kimchi/disks.py b/src/wok/plugins/kimchi/disks.py
> deleted file mode 100644
> index eb40e3a..0000000
> --- a/src/wok/plugins/kimchi/disks.py
> +++ /dev/null
> @@ -1,196 +0,0 @@
> -#
> -# Project Kimchi
> -#
> -# Copyright IBM, Corp. 2013-2015
> -#
> -# This library is free software; you can redistribute it and/or
> -# modify it under the terms of the GNU Lesser General Public
> -# License as published by the Free Software Foundation; either
> -# version 2.1 of the License, or (at your option) any later version.
> -#
> -# This library is distributed in the hope that it will be useful,
> -# but WITHOUT ANY WARRANTY; without even the implied warranty of
> -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> -# Lesser General Public License for more details.
> -#
> -# You should have received a copy of the GNU Lesser General Public
> -# License along with this library; if not, write to the Free Software
> -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> -
> -import os.path
> -import re
> -import subprocess
> -from parted import Device as PDevice
> -from parted import Disk as PDisk
> -
> -from wok.exception import OperationFailed
> -from wok.utils import wok_log
> -
> -
> -def _get_dev_node_path(maj_min):
> - """ Returns device node path given the device number 'major:min' """
> -
> - dm_name = "/sys/dev/block/%s/dm/name" % maj_min
> - if os.path.exists(dm_name):
> - with open(dm_name) as dm_f:
> - content = dm_f.read().rstrip('\n')
> - return "/dev/mapper/" + content
> -
> - uevent = "/sys/dev/block/%s/uevent" % maj_min
> - with open(uevent) as ueventf:
> - content = ueventf.read()
> -
> - data = dict(re.findall(r'(\S+)=(".*?"|\S+)', content.replace("\n", " ")))
> -
> - return "/dev/%s" % data["DEVNAME"]
> -
> -
> -def _get_lsblk_devs(keys, devs=[]):
> - lsblk = subprocess.Popen(
> - ["lsblk", "-Pbo"] + [','.join(keys)] + devs,
> - stdout=subprocess.PIPE, stderr=subprocess.PIPE)
> - out, err = lsblk.communicate()
> - if lsblk.returncode != 0:
> - raise OperationFailed("KCHDISKS0001E", {'err': err})
> -
> - return _parse_lsblk_output(out, keys)
> -
> -
> -def _get_dev_major_min(name):
> - maj_min = None
> -
> - keys = ["NAME", "MAJ:MIN"]
> - dev_list = _get_lsblk_devs(keys)
> -
> - for dev in dev_list:
> - if dev['name'].split()[0] == name:
> - maj_min = dev['maj:min']
> - break
> - else:
> - raise OperationFailed("KCHDISKS0002E", {'device': name})
> -
> - return maj_min
> -
> -
> -def _is_dev_leaf(devNodePath):
> - try:
> - # By default, lsblk prints a device information followed by children
> - # device information
> - childrenCount = len(
> - _get_lsblk_devs(["NAME"], [devNodePath])) - 1
> - except OperationFailed as e:
> - # lsblk is known to fail on multipath devices
> - # Assume these devices contain children
> - wok_log.error(
> - "Error getting device info for %s: %s", devNodePath, e)
> - return False
> -
> - return childrenCount == 0
> -
> -
> -def _is_dev_extended_partition(devType, devNodePath):
> - if devType != 'part':
> - return False
> - diskPath = devNodePath.rstrip('0123456789')
> - device = PDevice(diskPath)
> - try:
> - extended_part = PDisk(device).getExtendedPartition()
> - except NotImplementedError as e:
> - wok_log.warning(
> - "Error getting extended partition info for dev %s type %s: %s",
> - devNodePath, devType, e.message)
> - # Treate disk with unsupported partiton table as if it does not
> - # contain extended partitions.
> - return False
> - if extended_part and extended_part.path == devNodePath:
> - return True
> - return False
> -
> -
> -def _parse_lsblk_output(output, keys):
> - # output is on format key="value",
> - # where key can be NAME, TYPE, FSTYPE, SIZE, MOUNTPOINT, etc
> - lines = output.rstrip("\n").split("\n")
> - r = []
> - for line in lines:
> - d = {}
> - for key in keys:
> - expression = r"%s=\".*?\"" % key
> - match = re.search(expression, line)
> - field = match.group()
> - k, v = field.split('=', 1)
> - d[k.lower()] = v[1:-1]
> - r.append(d)
> - return r
> -
> -
> -def _get_vgname(devNodePath):
> - """ Return volume group name of a physical volume. If the device node path
> - is not a physical volume, return empty string. """
> - pvs = subprocess.Popen(
> - ["pvs", "--unbuffered", "--nameprefixes", "--noheadings",
> - "-o", "vg_name", devNodePath],
> - stdout=subprocess.PIPE, stderr=subprocess.PIPE)
> - out, err = pvs.communicate()
> - if pvs.returncode != 0:
> - return ""
> -
> - return re.findall(r"LVM2_VG_NAME='([^\']*)'", out)[0]
> -
> -
> -def _is_available(name, devtype, fstype, mountpoint, majmin):
> - devNodePath = _get_dev_node_path(majmin)
> - # Only list unmounted and unformated and leaf and (partition or disk)
> - # leaf means a partition, a disk has no partition, or a disk not held
> - # by any multipath device. Physical volume belongs to no volume group
> - # is also listed. Extended partitions should not be listed.
> - if (devtype in ['part', 'disk', 'mpath'] and
> - fstype in ['', 'LVM2_member'] and
> - mountpoint == "" and
> - _get_vgname(devNodePath) == "" and
> - _is_dev_leaf(devNodePath) and
> - not _is_dev_extended_partition(devtype, devNodePath)):
> - return True
> - return False
> -
> -
> -def get_partitions_names(check=False):
> - names = set()
> - keys = ["NAME", "TYPE", "FSTYPE", "MOUNTPOINT", "MAJ:MIN"]
> - # output is on format key="value",
> - # where key can be NAME, TYPE, FSTYPE, MOUNTPOINT
> - for dev in _get_lsblk_devs(keys):
> - # split()[0] to avoid the second part of the name, after the
> - # whiteline
> - name = dev['name'].split()[0]
> - if check and not _is_available(name, dev['type'], dev['fstype'],
> - dev['mountpoint'], dev['maj:min']):
> - continue
> - names.add(name)
> -
> - return list(names)
> -
> -
> -def get_partition_details(name):
> - majmin = _get_dev_major_min(name)
> - dev_path = _get_dev_node_path(majmin)
> -
> - keys = ["TYPE", "FSTYPE", "SIZE", "MOUNTPOINT"]
> - try:
> - dev = _get_lsblk_devs(keys, [dev_path])[0]
> - except OperationFailed as e:
> - wok_log.error(
> - "Error getting partition info for %s: %s", name, e)
> - return {}
> -
> - dev['available'] = _is_available(name, dev['type'], dev['fstype'],
> - dev['mountpoint'], majmin)
> - if dev['mountpoint']:
> - # Sometimes the mountpoint comes with [SWAP] or other
> - # info which is not an actual mount point. Filtering it
> - regexp = re.compile(r"\[.*\]")
> - if regexp.search(dev['mountpoint']) is not None:
> - dev['mountpoint'] = ''
> - dev['path'] = dev_path
> - dev['name'] = name
> - return dev
> diff --git a/src/wok/plugins/kimchi/repositories.py b/src/wok/plugins/kimchi/repositories.py
> deleted file mode 100644
> index c6e061f..0000000
> --- a/src/wok/plugins/kimchi/repositories.py
> +++ /dev/null
> @@ -1,533 +0,0 @@
> -#
> -# Project Kimchi
> -#
> -# Copyright IBM, Corp. 2014-2015
> -#
> -# This library is free software; you can redistribute it and/or
> -# modify it under the terms of the GNU Lesser General Public
> -# License as published by the Free Software Foundation; either
> -# version 2.1 of the License, or (at your option) any later version.
> -#
> -# This library is distributed in the hope that it will be useful,
> -# but WITHOUT ANY WARRANTY; without even the implied warranty of
> -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> -# Lesser General Public License for more details.
> -#
> -# You should have received a copy of the GNU Lesser General Public
> -# License along with this library; if not, write to the Free Software
> -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> -
> -import copy
> -import os
> -import time
> -import urlparse
> -from ConfigParser import ConfigParser
> -
> -from wok.basemodel import Singleton
> -from wok.exception import InvalidOperation, InvalidParameter
> -from wok.exception import OperationFailed, NotFoundError, MissingParameter
> -from wok.utils import validate_repo_url
> -
> -from config import kimchiLock
> -from yumparser import get_yum_repositories, write_repo_to_file
> -from yumparser import get_display_name, get_expanded_url
> -
> -
> -class Repositories(object):
> - __metaclass__ = Singleton
> -
> - """
> - Class to represent and operate with repositories information.
> - """
> - def __init__(self):
> - try:
> - __import__('yum')
> - self._pkg_mnger = YumRepo()
> - except ImportError:
> - try:
> - __import__('apt_pkg')
> - self._pkg_mnger = AptRepo()
> - except ImportError:
> - raise InvalidOperation('KCHREPOS0014E')
> -
> - def addRepository(self, params):
> - """
> - Add and enable a new repository
> - """
> - config = params.get('config', {})
> - extra_keys = list(
> - set(config.keys()).difference(set(self._pkg_mnger.CONFIG_ENTRY)))
> - if len(extra_keys) > 0:
> - raise InvalidParameter("KCHREPOS0028E",
> - {'items': ",".join(extra_keys)})
> -
> - return self._pkg_mnger.addRepo(params)
> -
> - def getRepositories(self):
> - """
> - Return a dictionary with all Kimchi's repositories. Each element uses
> - the format {<repo_id>: {repo}}, where repo is a dictionary in the
> - repositories.Repositories() format.
> - """
> - return self._pkg_mnger.getRepositoriesList()
> -
> - def getRepository(self, repo_id):
> - """
> - Return a dictionary with all info from a given repository ID.
> - """
> - info = self._pkg_mnger.getRepo(repo_id)
> - info['repo_id'] = repo_id
> - return info
> -
> - def enableRepository(self, repo_id):
> - """
> - Enable a repository.
> - """
> - return self._pkg_mnger.toggleRepo(repo_id, True)
> -
> - def disableRepository(self, repo_id):
> - """
> - Disable a given repository.
> - """
> - return self._pkg_mnger.toggleRepo(repo_id, False)
> -
> - def updateRepository(self, repo_id, params):
> - """
> - Update the information of a given repository.
> - The input is the repo_id of the repository to be updated and a dict
> - with the information to be updated.
> - """
> - return self._pkg_mnger.updateRepo(repo_id, params)
> -
> - def removeRepository(self, repo_id):
> - """
> - Remove a given repository
> - """
> - return self._pkg_mnger.removeRepo(repo_id)
> -
> -
> -class YumRepo(object):
> - """
> - Class to represent and operate with YUM repositories.
> - It's loaded only on those systems listed at YUM_DISTROS and loads necessary
> - modules in runtime.
> - """
> - TYPE = 'yum'
> - DEFAULT_CONF_DIR = "/etc/yum.repos.d"
> - CONFIG_ENTRY = ('repo_name', 'mirrorlist', 'metalink')
> -
> - def __init__(self):
> - self._confdir = self.DEFAULT_CONF_DIR
> -
> - def _get_repos(self, errcode):
> - try:
> - kimchiLock.acquire()
> - repos = get_yum_repositories()
> - except Exception, e:
> - kimchiLock.release()
> - raise OperationFailed(errcode, {'err': str(e)})
> - finally:
> - kimchiLock.release()
> -
> - return repos
> -
> - def getRepositoriesList(self):
> - """
> - Return a list of repositories IDs
> - """
> - repos = self._get_repos('KCHREPOS0024E')
> - return repos.keys()
> -
> - def getRepo(self, repo_id):
> - """
> - Return a dictionary in the repositories.Repositories() of the given
> - repository ID format with the information of a YumRepository object.
> - """
> - repos = self._get_repos('KCHREPOS0025E')
> -
> - if repo_id not in repos.keys():
> - raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
> -
> - entry = repos.get(repo_id)
> -
> - display_name = get_display_name(entry.name)
> -
> - info = {}
> - info['enabled'] = entry.enabled
> - info['baseurl'] = entry.baseurl or ''
> - info['config'] = {}
> - info['config']['display_repo_name'] = display_name
> - info['config']['repo_name'] = entry.name or ''
> - info['config']['gpgcheck'] = entry.gpgcheck
> - info['config']['gpgkey'] = entry.gpgkey or ''
> - info['config']['mirrorlist'] = entry.mirrorlist or ''
> - info['config']['metalink'] = entry.metalink or ''
> - return info
> -
> - def addRepo(self, params):
> - """
> - Add a given repository to YumBase
> - """
> - # At least one base url, or one mirror, must be given.
> - baseurl = params.get('baseurl', '')
> -
> - config = params.get('config', {})
> - mirrorlist = config.get('mirrorlist', '')
> - metalink = config.get('metalink', '')
> - if not baseurl and not mirrorlist and not metalink:
> - raise MissingParameter("KCHREPOS0013E")
> -
> - if baseurl:
> - validate_repo_url(get_expanded_url(baseurl))
> -
> - if mirrorlist:
> - validate_repo_url(get_expanded_url(mirrorlist))
> -
> - if metalink:
> - validate_repo_url(get_expanded_url(metalink))
> -
> - if mirrorlist and metalink:
> - raise InvalidOperation('KCHREPOS0030E')
> -
> - repo_id = params.get('repo_id', None)
> - if repo_id is None:
> - repo_id = "kimchi_repo_%s" % str(int(time.time() * 1000))
> -
> - repos = self._get_repos('KCHREPOS0026E')
> - if repo_id in repos.keys():
> - raise InvalidOperation("KCHREPOS0022E", {'repo_id': repo_id})
> -
> - repo_name = config.get('repo_name', repo_id)
> - repo = {'baseurl': baseurl, 'mirrorlist': mirrorlist,
> - 'name': repo_name, 'gpgcheck': 1,
> - 'gpgkey': [], 'enabled': 1, 'metalink': metalink}
> -
> - # write a repo file in the system with repo{} information.
> - parser = ConfigParser()
> - parser.add_section(repo_id)
> -
> - for key, value in repo.iteritems():
> - if value:
> - parser.set(repo_id, key, value)
> -
> - repofile = os.path.join(self._confdir, repo_id + '.repo')
> - try:
> - with open(repofile, 'w') as fd:
> - parser.write(fd)
> - except:
> - raise OperationFailed("KCHREPOS0018E",
> - {'repo_file': repofile})
> -
> - return repo_id
> -
> - def toggleRepo(self, repo_id, enable):
> - repos = self._get_repos('KCHREPOS0011E')
> - if repo_id not in repos.keys():
> - raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
> -
> - entry = repos.get(repo_id)
> - if enable and entry.enabled:
> - raise InvalidOperation("KCHREPOS0015E", {'repo_id': repo_id})
> -
> - if not enable and not entry.enabled:
> - raise InvalidOperation("KCHREPOS0016E", {'repo_id': repo_id})
> -
> - kimchiLock.acquire()
> - try:
> - if enable:
> - entry.enable()
> - else:
> - entry.disable()
> -
> - write_repo_to_file(entry)
> - except:
> - if enable:
> - raise OperationFailed("KCHREPOS0020E", {'repo_id': repo_id})
> -
> - raise OperationFailed("KCHREPOS0021E", {'repo_id': repo_id})
> - finally:
> - kimchiLock.release()
> -
> - return repo_id
> -
> - def updateRepo(self, repo_id, params):
> - """
> - Update a given repository in repositories.Repositories() format
> - """
> - repos = self._get_repos('KCHREPOS0011E')
> - if repo_id not in repos.keys():
> - raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
> -
> - entry = repos.get(repo_id)
> -
> - baseurl = params.get('baseurl', None)
> - config = params.get('config', {})
> - mirrorlist = config.get('mirrorlist', None)
> - metalink = config.get('metalink', None)
> -
> - if baseurl is not None and len(baseurl.strip()) == 0:
> - baseurl = None
> -
> - if mirrorlist is not None and len(mirrorlist.strip()) == 0:
> - mirrorlist = None
> -
> - if metalink is not None and len(metalink.strip()) == 0:
> - metalink = None
> -
> - if baseurl is None and mirrorlist is None and metalink is None:
> - raise MissingParameter("KCHREPOS0013E")
> -
> - if baseurl is not None:
> - validate_repo_url(get_expanded_url(baseurl))
> - entry.baseurl = baseurl
> -
> - if mirrorlist is not None:
> - validate_repo_url(get_expanded_url(mirrorlist))
> - entry.mirrorlist = mirrorlist
> -
> - if metalink is not None:
> - validate_repo_url(get_expanded_url(metalink))
> - entry.metalink = metalink
> -
> - if mirrorlist and metalink:
> - raise InvalidOperation('KCHREPOS0030E')
> -
> - entry.id = params.get('repo_id', repo_id)
> - entry.name = config.get('repo_name', entry.name)
> - entry.gpgcheck = config.get('gpgcheck', entry.gpgcheck)
> - entry.gpgkey = config.get('gpgkey', entry.gpgkey)
> - kimchiLock.acquire()
> - write_repo_to_file(entry)
> - kimchiLock.release()
> - return repo_id
> -
> - def removeRepo(self, repo_id):
> - """
> - Remove a given repository
> - """
> - repos = self._get_repos('KCHREPOS0027E')
> - if repo_id not in repos.keys():
> - raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
> -
> - entry = repos.get(repo_id)
> - parser = ConfigParser()
> - with open(entry.repofile) as fd:
> - parser.readfp(fd)
> -
> - if len(parser.sections()) == 1:
> - os.remove(entry.repofile)
> - return
> -
> - parser.remove_section(repo_id)
> - with open(entry.repofile, "w") as fd:
> - parser.write(fd)
> -
> -
> -class AptRepo(object):
> - """
> - Class to represent and operate with YUM repositories.
> - It's loaded only on those systems listed at YUM_DISTROS and loads necessary
> - modules in runtime.
> - """
> - TYPE = 'deb'
> - KIMCHI_LIST = "kimchi-source.list"
> - CONFIG_ENTRY = ('dist', 'comps')
> -
> - def __init__(self):
> - getattr(__import__('apt_pkg'), 'init_config')()
> - getattr(__import__('apt_pkg'), 'init_system')()
> - config = getattr(__import__('apt_pkg'), 'config')
> - self.pkg_lock = getattr(__import__('apt_pkg'), 'SystemLock')
> - module = __import__('aptsources.sourceslist', globals(), locals(),
> - ['SourcesList'], -1)
> -
> - self._sourceparts_path = '/%s%s' % (
> - config.get('Dir::Etc'), config.get('Dir::Etc::sourceparts'))
> - self._sourceslist = getattr(module, 'SourcesList')
> - self.filename = os.path.join(self._sourceparts_path, self.KIMCHI_LIST)
> - if not os.path.exists(self.filename):
> - with open(self.filename, 'w') as fd:
> - fd.write("# This file is managed by Kimchi and it must not "
> - "be modified manually\n")
> -
> - def _get_repos(self):
> - try:
> - with self.pkg_lock():
> - repos = self._sourceslist()
> - repos.refresh()
> - except Exception, e:
> - kimchiLock.release()
> - raise OperationFailed('KCHREPOS0025E', {'err': e.message})
> -
> - return repos
> -
> - def _get_repo_id(self, repo):
> - data = urlparse.urlparse(repo.uri)
> - name = data.hostname or data.path
> - return '%s-%s-%s' % (name, repo.dist, "-".join(repo.comps))
> -
> - def _get_source_entry(self, repo_id):
> - kimchiLock.acquire()
> - repos = self._get_repos()
> - kimchiLock.release()
> -
> - for r in repos:
> - # Ignore deb-src repositories
> - if r.type != 'deb':
> - continue
> -
> - if self._get_repo_id(r) != repo_id:
> - continue
> -
> - return r
> -
> - return None
> -
> - def getRepositoriesList(self):
> - """
> - Return a list of repositories IDs
> -
> - APT repositories there aren't the concept about repository ID, so for
> - internal control, the repository ID will be built as described in
> - _get_repo_id()
> - """
> - kimchiLock.acquire()
> - repos = self._get_repos()
> - kimchiLock.release()
> -
> - res = []
> - for r in repos:
> - # Ignore deb-src repositories
> - if r.type != 'deb':
> - continue
> -
> - res.append(self._get_repo_id(r))
> -
> - return res
> -
> - def getRepo(self, repo_id):
> - """
> - Return a dictionary in the repositories.Repositories() format of the
> - given repository ID with the information of a SourceEntry object.
> - """
> - r = self._get_source_entry(repo_id)
> - if r is None:
> - raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
> -
> - info = {'enabled': not r.disabled,
> - 'baseurl': r.uri,
> - 'config': {'dist': r.dist,
> - 'comps': r.comps}}
> - return info
> -
> - def addRepo(self, params):
> - """
> - Add a new APT repository based on <params>
> - """
> - # To create a APT repository the dist is a required parameter
> - # (in addition to baseurl, verified on controller through API.json)
> - config = params.get('config', None)
> - if config is None:
> - raise MissingParameter("KCHREPOS0019E")
> -
> - if 'dist' not in config.keys():
> - raise MissingParameter("KCHREPOS0019E")
> -
> - uri = params['baseurl']
> - dist = config['dist']
> - comps = config.get('comps', [])
> -
> - validate_repo_url(get_expanded_url(uri))
> -
> - kimchiLock.acquire()
> - try:
> - repos = self._get_repos()
> - source_entry = repos.add('deb', uri, dist, comps,
> - file=self.filename)
> - with self.pkg_lock():
> - repos.save()
> - except Exception as e:
> - kimchiLock.release()
> - raise OperationFailed("KCHREPOS0026E", {'err': e.message})
> - kimchiLock.release()
> - return self._get_repo_id(source_entry)
> -
> - def toggleRepo(self, repo_id, enable):
> - """
> - Enable a given repository
> - """
> - r = self._get_source_entry(repo_id)
> - if r is None:
> - raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
> -
> - if enable and not r.disabled:
> - raise InvalidOperation("KCHREPOS0015E", {'repo_id': repo_id})
> -
> - if not enable and r.disabled:
> - raise InvalidOperation("KCHREPOS0016E", {'repo_id': repo_id})
> -
> - if enable:
> - line = 'deb'
> - else:
> - line = '#deb'
> -
> - kimchiLock.acquire()
> - try:
> - repos = self._get_repos()
> - with self.pkg_lock():
> - repos.remove(r)
> - repos.add(line, r.uri, r.dist, r.comps, file=self.filename)
> - repos.save()
> - except:
> - kimchiLock.release()
> - if enable:
> - raise OperationFailed("KCHREPOS0020E", {'repo_id': repo_id})
> -
> - raise OperationFailed("KCHREPOS0021E", {'repo_id': repo_id})
> - finally:
> - kimchiLock.release()
> -
> - return repo_id
> -
> - def updateRepo(self, repo_id, params):
> - """
> - Update a given repository in repositories.Repositories() format
> - """
> - old_info = self.getRepo(repo_id)
> - updated_info = copy.deepcopy(old_info)
> - updated_info['baseurl'] = params.get(
> - 'baseurl', updated_info['baseurl'])
> -
> - if 'config' in params.keys():
> - config = params['config']
> - updated_info['config']['dist'] = config.get(
> - 'dist', old_info['config']['dist'])
> - updated_info['config']['comps'] = config.get(
> - 'comps', old_info['config']['comps'])
> -
> - self.removeRepo(repo_id)
> - try:
> - return self.addRepo(updated_info)
> - except:
> - self.addRepo(old_info)
> - raise
> -
> - def removeRepo(self, repo_id):
> - """
> - Remove a given repository
> - """
> - r = self._get_source_entry(repo_id)
> - if r is None:
> - raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
> -
> - kimchiLock.acquire()
> - try:
> - repos = self._get_repos()
> - with self.pkg_lock():
> - repos.remove(r)
> - repos.save()
> - except:
> - kimchiLock.release()
> - raise OperationFailed("KCHREPOS0017E", {'repo_id': repo_id})
> - finally:
> - kimchiLock.release()
> diff --git a/src/wok/plugins/kimchi/swupdate.py b/src/wok/plugins/kimchi/swupdate.py
> deleted file mode 100644
> index b966424..0000000
> --- a/src/wok/plugins/kimchi/swupdate.py
> +++ /dev/null
> @@ -1,274 +0,0 @@
> -#
> -# Project Kimchi
> -#
> -# Copyright IBM, Corp. 2014-2015
> -#
> -# This library is free software; you can redistribute it and/or
> -# modify it under the terms of the GNU Lesser General Public
> -# License as published by the Free Software Foundation; either
> -# version 2.1 of the License, or (at your option) any later version.
> -#
> -# This library is distributed in the hope that it will be useful,
> -# but WITHOUT ANY WARRANTY; without even the implied warranty of
> -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> -# Lesser General Public License for more details.
> -#
> -# You should have received a copy of the GNU Lesser General Public
> -# License along with this library; if not, write to the Free Software
> -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> -
> -import os
> -import signal
> -import subprocess
> -import time
> -
> -from wok.basemodel import Singleton
> -from wok.exception import NotFoundError, OperationFailed
> -from wok.utils import run_command, wok_log
> -
> -from config import kimchiLock
> -from yumparser import get_yum_packages_list_update
> -
> -
> -class SoftwareUpdate(object):
> - __metaclass__ = Singleton
> -
> - """
> - Class to represent and operate with OS software update.
> - """
> - def __init__(self):
> - # This stores all packages to be updated for Kimchi perspective. It's a
> - # dictionary of dictionaries, in the format {'package_name': package},
> - # where:
> - # package = {'package_name': <string>, 'version': <string>,
> - # 'arch': <string>, 'repository': <string>
> - # }
> - self._packages = {}
> -
> - # This stores the number of packages to update
> - self._num2update = 0
> -
> - # Get the distro of host machine and creates an object related to
> - # correct package management system
> - try:
> - __import__('yum')
> - wok_log.info("Loading YumUpdate features.")
> - self._pkg_mnger = YumUpdate()
> - except ImportError:
> - try:
> - __import__('apt')
> - wok_log.info("Loading AptUpdate features.")
> - self._pkg_mnger = AptUpdate()
> - except ImportError:
> - zypper_help = ["zypper", "--help"]
> - (stdout, stderr, returncode) = run_command(zypper_help)
> - if returncode == 0:
> - wok_log.info("Loading ZypperUpdate features.")
> - self._pkg_mnger = ZypperUpdate()
> - else:
> - raise Exception("There is no compatible package manager "
> - "for this system.")
> -
> - def _scanUpdates(self):
> - """
> - Update self._packages with packages to be updated.
> - """
> - self._packages = {}
> - self._num2update = 0
> -
> - # Call system pkg_mnger to get the packages as list of dictionaries.
> - for pkg in self._pkg_mnger.getPackagesList():
> -
> - # Check if already exist a package in self._packages
> - pkg_id = pkg.get('package_name')
> - if pkg_id in self._packages.keys():
> - # package already listed to update. do nothing
> - continue
> -
> - # Update the self._packages and self._num2update
> - self._packages[pkg_id] = pkg
> - self._num2update = self._num2update + 1
> -
> - def getUpdates(self):
> - """
> - Return the self._packages.
> - """
> - self._scanUpdates()
> - return self._packages
> -
> - def getUpdate(self, name):
> - """
> - Return a dictionary with all info from a given package name.
> - """
> - if name not in self._packages.keys():
> - raise NotFoundError('KCHPKGUPD0002E', {'name': name})
> -
> - return self._packages[name]
> -
> - def getNumOfUpdates(self):
> - """
> - Return the number of packages to be updated.
> - """
> - self._scanUpdates()
> - return self._num2update
> -
> - def preUpdate(self):
> - """
> - Make adjustments before executing the command in
> - a child process.
> - """
> - os.setsid()
> - signal.signal(signal.SIGTERM, signal.SIG_IGN)
> -
> - def doUpdate(self, cb, params):
> - """
> - Execute the update
> - """
> - # reset messages
> - cb('')
> -
> - cmd = self._pkg_mnger.update_cmd
> - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
> - stderr=subprocess.PIPE,
> - preexec_fn=self.preUpdate)
> - msgs = []
> - while proc.poll() is None:
> - msgs.append(proc.stdout.readline())
> - cb(''.join(msgs))
> - time.sleep(0.5)
> -
> - # read the final output lines
> - msgs.extend(proc.stdout.readlines())
> -
> - retcode = proc.poll()
> - if retcode == 0:
> - return cb(''.join(msgs), True)
> -
> - msgs.extend(proc.stderr.readlines())
> - return cb(''.join(msgs), False)
> -
> -
> -class YumUpdate(object):
> - """
> - Class to represent and operate with YUM software update system.
> - It's loaded only on those systems listed at YUM_DISTROS and loads necessary
> - modules in runtime.
> - """
> - def __init__(self):
> - self._pkgs = {}
> - self.update_cmd = ["yum", "-y", "update"]
> -
> - def _refreshUpdateList(self):
> - """
> - Update the list of packages to be updated in the system.
> - """
> - try:
> - kimchiLock.acquire()
> - self._pkgs = get_yum_packages_list_update()
> - except Exception, e:
> - raise OperationFailed('KCHPKGUPD0003E', {'err': str(e)})
> - finally:
> - kimchiLock.release()
> -
> - def getPackagesList(self):
> - """
> - Return a list of package's dictionaries. Each dictionary contains the
> - information about a package, in the format:
> - package = {'package_name': <string>, 'version': <string>,
> - 'arch': <string>, 'repository': <string>}
> - """
> - self._refreshUpdateList()
> - pkg_list = []
> - for pkg in self._pkgs:
> - package = {'package_name': pkg.name, 'version': pkg.version,
> - 'arch': pkg.arch, 'repository': pkg.ui_from_repo}
> - pkg_list.append(package)
> - return pkg_list
> -
> -
> -class AptUpdate(object):
> - """
> - Class to represent and operate with APT software update system.
> - It's loaded only on those systems listed at APT_DISTROS and loads necessary
> - modules in runtime.
> - """
> - def __init__(self):
> - self._pkgs = {}
> - self.pkg_lock = getattr(__import__('apt_pkg'), 'SystemLock')
> - self.update_cmd = ['apt-get', 'upgrade', '-y']
> -
> - def _refreshUpdateList(self):
> - """
> - Update the list of packages to be updated in the system.
> - """
> - apt_cache = getattr(__import__('apt'), 'Cache')()
> - try:
> - with self.pkg_lock():
> - apt_cache.update()
> - apt_cache.upgrade()
> - self._pkgs = apt_cache.get_changes()
> - except Exception, e:
> - kimchiLock.release()
> - raise OperationFailed('KCHPKGUPD0003E', {'err': e.message})
> -
> - def getPackagesList(self):
> - """
> - Return a list of package's dictionaries. Each dictionary contains the
> - information about a package, in the format
> - package = {'package_name': <string>, 'version': <string>,
> - 'arch': <string>, 'repository': <string>}
> - """
> - kimchiLock.acquire()
> - self._refreshUpdateList()
> - kimchiLock.release()
> - pkg_list = []
> - for pkg in self._pkgs:
> - package = {'package_name': pkg.shortname,
> - 'version': pkg.candidate.version,
> - 'arch': pkg._pkg.architecture,
> - 'repository': pkg.candidate.origins[0].label}
> - pkg_list.append(package)
> -
> - return pkg_list
> -
> -
> -class ZypperUpdate(object):
> - """
> - Class to represent and operate with Zypper software update system.
> - It's loaded only on those systems listed at ZYPPER_DISTROS and loads
> - necessary modules in runtime.
> - """
> - def __init__(self):
> - self._pkgs = {}
> - self.update_cmd = ["zypper", "--non-interactive", "update",
> - "--auto-agree-with-licenses"]
> -
> - def _refreshUpdateList(self):
> - """
> - Update the list of packages to be updated in the system.
> - """
> - self._pkgs = []
> - cmd = ["zypper", "list-updates"]
> - (stdout, stderr, returncode) = run_command(cmd)
> -
> - if len(stderr) > 0:
> - raise OperationFailed('KCHPKGUPD0003E', {'err': stderr})
> -
> - for line in stdout.split('\n'):
> - if line.find('v |') >= 0:
> - info = line.split(' | ')
> - package = {'package_name': info[2], 'version': info[4],
> - 'arch': info[5], 'repository': info[1]}
> - self._pkgs.append(package)
> -
> - def getPackagesList(self):
> - """
> - Return a list of package's dictionaries. Each dictionary contains the
> - information about a package, in the format
> - package = {'package_name': <string>, 'version': <string>,
> - 'arch': <string>, 'repository': <string>}
> - """
> - kimchiLock.acquire()
> - self._refreshUpdateList()
> - kimchiLock.release()
> - return self._pkgs
> diff --git a/src/wok/plugins/kimchi/yumparser.py b/src/wok/plugins/kimchi/yumparser.py
> deleted file mode 100644
> index a481ac2..0000000
> --- a/src/wok/plugins/kimchi/yumparser.py
> +++ /dev/null
> @@ -1,353 +0,0 @@
> -#
> -# Project Kimchi
> -#
> -# Copyright IBM, Corp. 2015
> -#
> -# This library is free software; you can redistribute it and/or
> -# modify it under the terms of the GNU Lesser General Public
> -# License as published by the Free Software Foundation; either
> -# version 2.1 of the License, or (at your option) any later version.
> -#
> -# This library is distributed in the hope that it will be useful,
> -# but WITHOUT ANY WARRANTY; without even the implied warranty of
> -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> -# Lesser General Public License for more details.
> -#
> -# You should have received a copy of the GNU Lesser General Public
> -# License along with this library; if not, write to the Free Software
> -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> -import subprocess
> -import glob
> -
> -from os import listdir
> -from os.path import isfile, splitext, basename
> -
> -try:
> - import rpm
> -except ImportError:
> - pass
> -
> -
> -class YumRepoObject(object):
> -
> - def __init__(self, repo_id, repofile):
> - self.repo_id = repo_id
> - self.name = None
> - self.baseurl = None
> - self.enabled = True
> - self.gpgcheck = True
> - self.gpgkey = None
> - self.metalink = None
> - self.mirrorlist = None
> - self.repofile = repofile
> - self.string_attrs = ['baseurl', 'gpgkey', 'name',
> - 'metalink', 'mirrorlist']
> - self.boolean_attrs = ['enabled', 'gpgcheck']
> -
> - def set_attribute(self, key, strvalue):
> - if key in self.string_attrs:
> - setattr(self, key, strvalue)
> - elif key in self.boolean_attrs:
> - setattr(self, key, (strvalue == '1'))
> -
> - def get_attribute_str(self, key):
> - if key not in self.get_attributes():
> - return None
> -
> - if key in self.boolean_attrs:
> - str_value = '1' if getattr(self, key) is True else '0'
> - else:
> - str_value = getattr(self, key)
> -
> - if str_value is None:
> - return None
> -
> - return key + '=' + str_value
> -
> - def get_attributes(self):
> - return self.string_attrs + self.boolean_attrs
> -
> - def enable(self):
> - self.enabled = True
> -
> - def disable(self):
> - self.enabled = False
> -
> - def __str__(self):
> - str_obj = '[' + self.repo_id + ']' + '\n'
> - for key in self.get_attributes():
> - if self.get_attribute_str(key) is not None:
> - str_obj += self.get_attribute_str(key) + '\n'
> - return str_obj
> -
> -
> -def get_repo_files():
> - def _is_repository_file(f):
> - _, f_extension = splitext(f)
> - return isfile(f) and (f_extension == '.repo')
> -
> - YUM_REPO_DIR = '/etc/yum.repos.d'
> - return [YUM_REPO_DIR+'/'+f for f in listdir(YUM_REPO_DIR)
> - if _is_repository_file(YUM_REPO_DIR+'/'+f)]
> -
> -
> -def _ignore_line_repo_file(line):
> - return line.startswith("#") or '=' not in line
> -
> -
> -def _get_repos_from_file(repo_file):
> - repos_from_file = {}
> - current_repo = None
> - current_repo_id = None
> - with open(repo_file) as f:
> - for line in f.readlines():
> - line = line.strip()
> - if line.startswith("["):
> - if current_repo is not None:
> - repos_from_file[current_repo_id] = current_repo
> - current_repo_id = line.strip('[]')
> - current_repo = YumRepoObject(current_repo_id, repo_file)
> - continue
> - if _ignore_line_repo_file(line):
> - continue
> - key, value = line.split('=', 1)
> - key = key.strip()
> - value = value.strip()
> - current_repo.set_attribute(key, value)
> -
> - # add the last repo from file.
> - if current_repo is not None:
> - repos_from_file[current_repo_id] = current_repo
> -
> - return repos_from_file
> -
> -
> -def get_yum_repositories():
> - repo_files = get_repo_files()
> - repos = {}
> - for yum_repo in repo_files:
> - repos.update(_get_repos_from_file(yum_repo))
> -
> - return repos
> -
> -
> -def _retrieve_repo_line_index(data, repo):
> - repo_entry = '[' + repo.repo_id + ']\n'
> - try:
> - repo_index = data.index(repo_entry)
> - except:
> - return None
> - return repo_index
> -
> -
> -def _update_repo_file_data(data, repo, repo_index):
> - remaining_repo_attrs = repo.get_attributes()
> -
> - for i in range(repo_index + 1, len(data)):
> - line = data[i].strip()
> - if line.startswith('['):
> - break
> - if _ignore_line_repo_file(line):
> - continue
> - key, _ = line.split('=', 1)
> - key = key.strip()
> - attr_str = repo.get_attribute_str(key)
> - if attr_str is None:
> - continue
> - remaining_repo_attrs.remove(key)
> - data[i] = attr_str + '\n'
> -
> - for attr in remaining_repo_attrs:
> - attr_str = repo.get_attribute_str(attr)
> - if attr_str is None:
> - continue
> - data.insert(repo_index+1, attr_str + '\n')
> -
> - return data
> -
> -
> -def write_repo_to_file(repo):
> - with open(repo.repofile) as f:
> - data = f.readlines()
> -
> - repo_index = _retrieve_repo_line_index(data, repo)
> - if repo_index is None:
> - return
> -
> - data = _update_repo_file_data(data, repo, repo_index)
> -
> - with open(repo.repofile, 'w') as f:
> - f.writelines(data)
> -
> -
> -def _get_last_line_repo(data, repo_index):
> - stop_delete_index = None
> - for i in range(repo_index+1, len(data)):
> - line = data[i].strip()
> - if line.startswith('['):
> - stop_delete_index = i - 1
> - break
> - if stop_delete_index is None:
> - stop_delete_index = len(data) - 1
> -
> - return stop_delete_index
> -
> -
> -def _remove_repo_file_data(data, repo_index):
> - last_line_repo = _get_last_line_repo(data, repo_index)
> - for i in range(last_line_repo, repo_index - 1, -1):
> - data.pop(i)
> - return data
> -
> -
> -def delete_repo_from_file(repo):
> - with open(repo.repofile) as f:
> - data = f.readlines()
> -
> - repo_index = _retrieve_repo_line_index(data, repo)
> - if repo_index is None:
> - return
> -
> - data = _remove_repo_file_data(data, repo_index)
> -
> - with open(repo.repofile, 'w') as f:
> - f.writelines(data)
> -
> -
> -def _get_releasever():
> - release_file = glob.glob('/etc/*-release')[0]
> - transaction = rpm.TransactionSet()
> - match_iter = transaction.dbMatch('basenames', release_file)
> -
> - ret = '%releasever'
> - try:
> - ret = match_iter.next()['version']
> -
> - except StopIteration:
> - pass
> -
> - return ret
> -
> -
> -def _get_basearch():
> - cmd = ['uname', '-i']
> - uname = subprocess.Popen(cmd, stdout=subprocess.PIPE)
> - return uname.communicate()[0].strip('"\n')
> -
> -
> -def _get_all_yum_vars():
> - variables = {}
> -
> - def _get_var_content(varfile):
> - with open(varfile) as f:
> - variables[basename(varfile)] = f.read().strip('\n')
> -
> - map(lambda vfile:
> - _get_var_content(vfile),
> - glob.glob('/etc/yum/vars/*'))
> -
> - return variables
> -
> -
> -def _expand_variables(stringvar, split_char=' '):
> - yum_variables = _get_all_yum_vars()
> - yum_variables['releasever'] = _get_releasever()
> - yum_variables['basearch'] = _get_basearch()
> -
> - name_vars = [var for var in stringvar.split(split_char)
> - if var.startswith('$') and var.strip('$') in yum_variables]
> -
> - return reduce(lambda nm, var:
> - nm.replace(var, yum_variables[var.strip('$')]),
> - name_vars,
> - stringvar)
> -
> -
> -def get_display_name(name):
> - if not name or '$' not in name:
> - return name
> -
> - return _expand_variables(name)
> -
> -
> -def get_expanded_url(url):
> - url_path = url.split('://')
> - if len(url_path) != 2 or '$' not in url:
> - return url
> -
> - return _expand_variables(url, '/')
> -
> -
> -class YumUpdatePackageObject(object):
> -
> - def __init__(self, name, arch, version, repo):
> - self.name = name
> - self.arch = arch
> - self.version = version
> - self.ui_from_repo = repo
> -
> -
> -def _include_line_checkupdate_output(line):
> - tokens = line.split()
> -
> - if len(tokens) != 3:
> - return False
> -
> - if '.' not in tokens[0]:
> - return False
> -
> - return True
> -
> -
> -def _ignore_obsoleting_packages_in(output):
> - out = ''
> - for l in output.split('\n'):
> - if 'Obsoleting ' in l:
> - break
> - out += l + '\n'
> - return out
> -
> -
> -def _filter_lines_checkupdate_output(output):
> - if output is None:
> - return []
> -
> - output = _ignore_obsoleting_packages_in(output)
> -
> - out = [l for l in output.split('\n')
> - if _include_line_checkupdate_output(l)]
> - return out
> -
> -
> -def _get_yum_checkupdate_output():
> - cmd = ['yum', 'check-update', '-d0']
> - yum_update_cmd = subprocess.Popen(cmd,
> - stdout=subprocess.PIPE,
> - stderr=subprocess.PIPE)
> - out, error = yum_update_cmd.communicate()
> - return_code = yum_update_cmd.returncode
> - if return_code == 1:
> - return None
> -
> - return out
> -
> -
> -def get_yum_packages_list_update(checkupdate_output=None):
> - if checkupdate_output is None:
> - checkupdate_output = _get_yum_checkupdate_output()
> -
> - filtered_output = _filter_lines_checkupdate_output(checkupdate_output)
> -
> - packages = []
> - for line in filtered_output:
> - line = line.split()
> - index = 0
> - name_arch = line[index]
> - index += 1
> - version = line[index]
> - index += 1
> - repo = line[index]
> - name, arch = name_arch.rsplit('.', 1)
> - packages.append(YumUpdatePackageObject(name, arch, version, repo))
> -
> - return packages
More information about the Kimchi-devel
mailing list