[Kimchi-devel] [PATCH 02/17] Ginger Base : base plugin files

chandra at linux.vnet.ibm.com chandra at linux.vnet.ibm.com
Tue Sep 1 17:55:04 UTC 2015


From: chandrureddy <chandra at linux.vnet.ibm.com>

---
 plugins/gingerbase/Makefile.am     | 151 +++++++++++
 plugins/gingerbase/VERSION         |   1 +
 plugins/gingerbase/__init__.py     |  21 ++
 plugins/gingerbase/autogen.sh      |  21 ++
 plugins/gingerbase/config.py.in    |  47 ++++
 plugins/gingerbase/configure.ac    | 116 ++++++++
 plugins/gingerbase/disks.py        | 196 ++++++++++++++
 plugins/gingerbase/gingerbase.conf |  40 +++
 plugins/gingerbase/gingerbase.py   |  70 +++++
 plugins/gingerbase/i18n.py         |  96 +++++++
 plugins/gingerbase/lscpu.py        | 122 +++++++++
 plugins/gingerbase/mockmodel.py    | 298 +++++++++++++++++++++
 plugins/gingerbase/repositories.py | 529 +++++++++++++++++++++++++++++++++++++
 plugins/gingerbase/swupdate.py     | 263 ++++++++++++++++++
 plugins/gingerbase/yumparser.py    | 283 ++++++++++++++++++++
 plugins/kimchi/disks.py            | 196 --------------
 plugins/kimchi/repositories.py     | 529 -------------------------------------
 plugins/kimchi/swupdate.py         | 263 ------------------
 plugins/kimchi/yumparser.py        | 283 --------------------
 19 files changed, 2254 insertions(+), 1271 deletions(-)
 create mode 100644 plugins/gingerbase/Makefile.am
 create mode 100644 plugins/gingerbase/VERSION
 create mode 100644 plugins/gingerbase/__init__.py
 create mode 100755 plugins/gingerbase/autogen.sh
 create mode 100644 plugins/gingerbase/config.py.in
 create mode 100644 plugins/gingerbase/configure.ac
 create mode 100644 plugins/gingerbase/disks.py
 create mode 100644 plugins/gingerbase/gingerbase.conf
 create mode 100644 plugins/gingerbase/gingerbase.py
 create mode 100644 plugins/gingerbase/i18n.py
 create mode 100644 plugins/gingerbase/lscpu.py
 create mode 100644 plugins/gingerbase/mockmodel.py
 create mode 100644 plugins/gingerbase/repositories.py
 create mode 100644 plugins/gingerbase/swupdate.py
 create mode 100644 plugins/gingerbase/yumparser.py
 delete mode 100644 plugins/kimchi/disks.py
 delete mode 100644 plugins/kimchi/repositories.py
 delete mode 100644 plugins/kimchi/swupdate.py
 delete mode 100644 plugins/kimchi/yumparser.py

diff --git a/plugins/gingerbase/Makefile.am b/plugins/gingerbase/Makefile.am
new file mode 100644
index 0000000..03daf25
--- /dev/null
+++ b/plugins/gingerbase/Makefile.am
@@ -0,0 +1,151 @@
+#
+# Kimchi
+#
+# Copyright IBM Corp, 2013
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+SUBDIRS = contrib control docs model po tests ui
+
+gingerbase_PYTHON = $(filter-out config.py, $(wildcard *.py))
+
+nodist_gingerbase_PYTHON = config.py
+
+wokdir = $(pythondir)/wok
+gingerbasedir = $(pythondir)/wok/plugins/gingerbase
+
+confdir = $(sysconfdir)/wok/plugins.d
+dist_conf_DATA = gingerbase.conf
+
+AUTOMAKE_OPTIONS = foreign
+
+ACLOCAL_AMFLAGS = --install -I m4
+
+EXTRA_DIST = \
+	config.rpath \
+	API.json \
+	autogen.sh \
+	COPYING.ASL2 \
+	COPYING.LGPL \
+	CONTRIBUTE.md \
+	VERSION \
+	build-aux/pkg-version \
+	config.py.in \
+	$(NULL)
+
+
+PEP8_BLACKLIST = *config.py,*i18n.py,*tests/test_config.py
+
+I18N_FILES = ./i18n.py \
+	$(NULL)
+
+check-local:
+	contrib/check_i18n.py $(I18N_FILES)
+	find . -path './.git' -prune -type f -o \
+		-name '*.py' -o -name '*.py.in'  | xargs $(PYFLAKES) | \
+		while read LINE; do echo "$$LINE"; false; done
+
+	$(PEP8) --version
+	$(PEP8) --filename '*.py,*.py.in' --exclude="$(PEP8_BLACKLIST)" .
+
+
+# Link built mo files in the source tree to enable use of translations from
+# within the source tree
+all-local:
+	while read L && test -n "$$L"; do                  \
+		dir=mo/$$L/LC_MESSAGES ;                       \
+		$(MKDIR_P) $$dir ;                             \
+		ln -sf ../../../po/$$L.gmo $$dir/gingerbase.mo ;   \
+	done < po/LINGUAS
+
+do_substitution = \
+	sed -e 's,[@]prefix[@],$(prefix),g'             \
+	-e 's,[@]datadir[@],$(datadir),g'               \
+	-e 's,[@]sysconfdir[@],$(sysconfdir),g'         \
+	-e 's,[@]localstatedir[@],$(localstatedir),g'   \
+	-e 's,[@]pkgdatadir[@],$(pkgdatadir),g'         \
+	-e 's,[@]wokdir[@],$(wokdir),g'           \
+	-e 's,[@]gingerbasedir[@],$(gingerbasedir),g'           \
+	-e 's,[@]kimchiversion[@],$(PACKAGE_VERSION),g' \
+	-e 's,[@]kimchirelease[@],$(PACKAGE_RELEASE),g' \
+	-e 's,[@]withspice[@],$(WITH_SPICE),g'
+
+config.py: config.py.in Makefile
+	$(do_substitution) < $(srcdir)/config.py.in > config.py
+
+
+#
+# Packaging helpers
+#
+
+install-deb: install
+	cp -R $(top_srcdir)/contrib/DEBIAN $(DESTDIR)/
+	mkdir -p $(DESTDIR)/var/lib/kimchi/debugreports
+
+
+deb: contrib/make-deb.sh
+	$(top_srcdir)/contrib/make-deb.sh
+
+gingerbase.spec: contrib/gingerbase.spec.fedora contrib/gingerbase.spec.suse
+	@if test -e /etc/redhat-release; then                   \
+		ln -sf contrib/gingerbase.spec.fedora $@ ;              \
+	elif test -e /etc/SuSE-release; then                    \
+		ln -sf contrib/gingerbase.spec.suse $@ ;                \
+	else                                                    \
+		echo "Unable to select a spec file for RPM build" ; \
+		/bin/false ;                                        \
+	fi
+
+rpm: dist gingerbase.spec
+	$(MKDIR_P) rpm/BUILD rpm/RPMS rpm/SOURCES rpm/SPECS rpm/SRPMS
+	cp $(top_srcdir)/gingerbase.spec rpm/SPECS/gingerbase.spec
+	cp $(DIST_ARCHIVES) rpm/SOURCES
+	rpmbuild -ba --define "_topdir `pwd`/rpm" rpm/SPECS/gingerbase.spec
+
+fedora-rpm: contrib/gingerbase.spec.fedora
+	ln -sf contrib/gingerbase.spec.fedora gingerbase.spec
+	$(MAKE) rpm
+
+suse-rpm: contrib/gingerbase.spec.suse
+	ln -sf contrib/gingerbase.spec.suse gingerbase.spec
+	$(MAKE) rpm
+
+ChangeLog:
+	@if test -d .git; then                                   \
+		$(top_srcdir)/build-aux/genChangelog --release > $@; \
+	fi
+
+install-data-local:
+	$(MKDIR_P) $(DESTDIR)$(gingerbasedir)
+	$(INSTALL_DATA) API.json $(DESTDIR)$(gingerbasedir)/API.json
+	mkdir -p $(DESTDIR)/var/lib/kimchi/{debugreports}
+
+uninstall-local:
+	$(RM) $(DESTDIR)$(gingerbasedir)/API.json
+	$(RM) -rf $(DESTDIR)/var/lib/kimchi
+
+VERSION:
+	@if test -d .git; then                                \
+		git describe --abbrev=0 > $@;                     \
+	fi
+
+.PHONY: deb install-deb rpm fedora-rpm suse-rpm ChangeLog VERSION
+
+
+clean-local:
+	rm -rf mo rpm
+
+BUILT_SOURCES = config.py
+CLEANFILES = config.py gingerbase.spec `find "$(top_srcdir)" -type f -name "*.pyc" -print`
diff --git a/plugins/gingerbase/VERSION b/plugins/gingerbase/VERSION
new file mode 100644
index 0000000..bc80560
--- /dev/null
+++ b/plugins/gingerbase/VERSION
@@ -0,0 +1 @@
+1.5.0
diff --git a/plugins/gingerbase/__init__.py b/plugins/gingerbase/__init__.py
new file mode 100644
index 0000000..e383126
--- /dev/null
+++ b/plugins/gingerbase/__init__.py
@@ -0,0 +1,21 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2013-2014
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+
+from gingerbase import GingerBase
+__all__ = [GingerBase]
diff --git a/plugins/gingerbase/autogen.sh b/plugins/gingerbase/autogen.sh
new file mode 100755
index 0000000..0f22dba
--- /dev/null
+++ b/plugins/gingerbase/autogen.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+aclocal
+automake --add-missing
+autoreconf
+
+if [ ! -f "configure" ]; then
+    echo "Failed to generate configure script.  Check to make sure autoconf, "
+    echo "automake, and other build dependencies are properly installed."
+    exit 1
+fi
+
+if [ "x$1" == "x--system" ]; then
+    ./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var
+else
+   if [ $# -gt 0 ]; then
+        ./configure $@
+   else
+        ./configure --prefix=/usr/local
+   fi
+fi
diff --git a/plugins/gingerbase/config.py.in b/plugins/gingerbase/config.py.in
new file mode 100644
index 0000000..44e1cc4
--- /dev/null
+++ b/plugins/gingerbase/config.py.in
@@ -0,0 +1,47 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2013-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+#
+
+import os
+import platform
+import threading
+
+from wok.config import PluginPaths
+from wok.xmlutils.utils import xpath_get_text
+
+kimchiLock = threading.Lock()
+
+
+def get_debugreports_path():
+    return os.path.join(PluginPaths('gingerbase').state_dir, 'debugreports')
+
+
+class KimchiPaths(PluginPaths):
+
+    def __init__(self):
+        super(KimchiPaths, self).__init__('gingerbase')
+
+
+kimchiPaths = KimchiPaths()
+
+
+class KimchiConfig(dict):
+    def __init__(self):
+        super(KimchiConfig, self).__init__(self)
+
diff --git a/plugins/gingerbase/configure.ac b/plugins/gingerbase/configure.ac
new file mode 100644
index 0000000..2e44e72
--- /dev/null
+++ b/plugins/gingerbase/configure.ac
@@ -0,0 +1,116 @@
+#
+# Kimchi
+#
+# Copyright IBM Corp, 2013-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+AC_INIT([gingerbase], [m4_esyscmd([./build-aux/pkg-version --version])])
+
+AC_SUBST([PACKAGE_VERSION],
+         [m4_esyscmd([./build-aux/pkg-version --version])])
+
+AC_SUBST([PACKAGE_RELEASE],
+         [m4_esyscmd([./build-aux/pkg-version --release])])
+
+# Testing for version and release
+AS_IF([test "x$PACKAGE_VERSION" = x],
+      AC_MSG_ERROR([package version not defined]))
+AS_IF([test "x$PACKAGE_RELEASE" = x],
+      AC_MSG_ERROR([package release not defined]))
+
+AC_CONFIG_AUX_DIR([build-aux])
+AM_INIT_AUTOMAKE([-Wno-portability])
+AM_PATH_PYTHON([2.6])
+AC_PATH_PROG([PEP8], [pep8], [/usr/bin/pep8])
+AC_PYTHON_MODULE([unittest])
+AC_SUBST([HAVE_PYMOD_UNITTEST])
+AC_SUBST([PYTHON_VERSION])
+AM_GNU_GETTEXT([external])
+AM_GNU_GETTEXT_VERSION([0.10])
+AC_PATH_PROG([CHEETAH], [cheetah], [/usr/bin/cheetah])
+
+# Checking for pyflakes
+AC_PATH_PROG([PYFLAKES], [pyflakes])
+if test "x$PYFLAKES" = "x"; then
+  AC_MSG_WARN([pyflakes not found])
+fi
+
+AC_ARG_ENABLE(
+    [sample],
+    [AS_HELP_STRING(
+        [--enable-sample],
+        [enable sample plugin @<:@default=no@:>@]
+    )],
+    ,
+    [enable_sample="no"]
+)
+
+if test "${enable_sample}" = "yes"; then
+AC_SUBST([ENABLE_SAMPLE], [True])
+else
+AC_SUBST([ENABLE_SAMPLE], [False])
+fi
+
+#AC_ARG_WITH(
+#    [spice-html5],
+#    [AS_HELP_STRING([--with-spice-html5],
+#                    [Build Kimchi with spice-html5 @<:@default=no@:>@])],
+#    ,
+#    [with_spice_html5="no"]
+#)
+#AM_CONDITIONAL([WITH_SPICE], [test "x$with_spice_html5" = xyes])
+
+AC_CONFIG_FILES([
+    po/Makefile.in
+    po/gen-pot
+    Makefile
+    docs/Makefile
+    control/Makefile
+    model/Makefile
+    ui/Makefile
+    ui/config/Makefile
+    ui/css/Makefile
+    ui/images/Makefile
+    ui/images/theme-default/Makefile
+    ui/js/Makefile
+    ui/libs/Makefile
+    ui/libs/themes/Makefile
+    ui/libs/themes/base/Makefile
+    ui/libs/themes/base/images/Makefile
+    ui/pages/Makefile
+    ui/pages/help/Makefile
+    ui/pages/help/en_US/Makefile
+    ui/pages/help/de_DE/Makefile
+    ui/pages/help/es_ES/Makefile
+    ui/pages/help/fr_FR/Makefile
+    ui/pages/help/it_IT/Makefile
+    ui/pages/help/ja_JP/Makefile
+    ui/pages/help/ko_KR/Makefile
+    ui/pages/help/pt_BR/Makefile
+    ui/pages/help/ru_RU/Makefile
+    ui/pages/help/zh_CN/Makefile
+    ui/pages/help/zh_TW/Makefile
+    contrib/Makefile
+    contrib/DEBIAN/Makefile
+    contrib/DEBIAN/control
+    contrib/gingerbase.spec.fedora
+    contrib/gingerbase.spec.suse
+    tests/Makefile
+],[
+    chmod +x po/gen-pot
+])
+
+AC_OUTPUT
diff --git a/plugins/gingerbase/disks.py b/plugins/gingerbase/disks.py
new file mode 100644
index 0000000..fa20a1b
--- /dev/null
+++ b/plugins/gingerbase/disks.py
@@ -0,0 +1,196 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2013-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+
+import os.path
+import re
+import subprocess
+from parted import Device as PDevice
+from parted import Disk as PDisk
+
+from wok.exception import OperationFailed
+from wok.utils import wok_log
+
+
+def _get_dev_node_path(maj_min):
+    """ Returns device node path given the device number 'major:min' """
+
+    dm_name = "/sys/dev/block/%s/dm/name" % maj_min
+    if os.path.exists(dm_name):
+        with open(dm_name) as dm_f:
+            content = dm_f.read().rstrip('\n')
+        return "/dev/mapper/" + content
+
+    uevent = "/sys/dev/block/%s/uevent" % maj_min
+    with open(uevent) as ueventf:
+        content = ueventf.read()
+
+    data = dict(re.findall(r'(\S+)=(".*?"|\S+)', content.replace("\n", " ")))
+
+    return "/dev/%s" % data["DEVNAME"]
+
+
+def _get_lsblk_devs(keys, devs=[]):
+    lsblk = subprocess.Popen(
+        ["lsblk", "-Pbo"] + [','.join(keys)] + devs,
+        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    out, err = lsblk.communicate()
+    if lsblk.returncode != 0:
+        raise OperationFailed("GGBDISKS0001E", {'err': err})
+
+    return _parse_lsblk_output(out, keys)
+
+
+def _get_dev_major_min(name):
+    maj_min = None
+
+    keys = ["NAME", "MAJ:MIN"]
+    dev_list = _get_lsblk_devs(keys)
+
+    for dev in dev_list:
+        if dev['name'].split()[0] == name:
+            maj_min = dev['maj:min']
+            break
+    else:
+        raise OperationFailed("GGBDISKS0002E", {'device': name})
+
+    return maj_min
+
+
+def _is_dev_leaf(devNodePath):
+    try:
+        # By default, lsblk prints a device information followed by children
+        # device information
+        childrenCount = len(
+            _get_lsblk_devs(["NAME"], [devNodePath])) - 1
+    except OperationFailed as e:
+        # lsblk is known to fail on multipath devices
+        # Assume these devices contain children
+        wok_log.error(
+            "Error getting device info for %s: %s", devNodePath, e)
+        return False
+
+    return childrenCount == 0
+
+
+def _is_dev_extended_partition(devType, devNodePath):
+    if devType != 'part':
+        return False
+    diskPath = devNodePath.rstrip('0123456789')
+    device = PDevice(diskPath)
+    try:
+        extended_part = PDisk(device).getExtendedPartition()
+    except NotImplementedError as e:
+        wok_log.warning(
+            "Error getting extended partition info for dev %s type %s: %s",
+            devNodePath, devType, e.message)
+        # Treate disk with unsupported partiton table as if it does not
+        # contain extended partitions.
+        return False
+    if extended_part and extended_part.path == devNodePath:
+        return True
+    return False
+
+
+def _parse_lsblk_output(output, keys):
+    # output is on format key="value",
+    # where key can be NAME, TYPE, FSTYPE, SIZE, MOUNTPOINT, etc
+    lines = output.rstrip("\n").split("\n")
+    r = []
+    for line in lines:
+        d = {}
+        for key in keys:
+            expression = r"%s=\".*?\"" % key
+            match = re.search(expression, line)
+            field = match.group()
+            k, v = field.split('=', 1)
+            d[k.lower()] = v[1:-1]
+        r.append(d)
+    return r
+
+
+def _get_vgname(devNodePath):
+    """ Return volume group name of a physical volume. If the device node path
+    is not a physical volume, return empty string. """
+    pvs = subprocess.Popen(
+        ["pvs", "--unbuffered", "--nameprefixes", "--noheadings",
+         "-o", "vg_name", devNodePath],
+        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    out, err = pvs.communicate()
+    if pvs.returncode != 0:
+        return ""
+
+    return re.findall(r"LVM2_VG_NAME='([^\']*)'", out)[0]
+
+
+def _is_available(name, devtype, fstype, mountpoint, majmin):
+    devNodePath = _get_dev_node_path(majmin)
+    # Only list unmounted and unformated and leaf and (partition or disk)
+    # leaf means a partition, a disk has no partition, or a disk not held
+    # by any multipath device. Physical volume belongs to no volume group
+    # is also listed. Extended partitions should not be listed.
+    if (devtype in ['part', 'disk', 'mpath'] and
+            fstype in ['', 'LVM2_member'] and
+            mountpoint == "" and
+            _get_vgname(devNodePath) == "" and
+            _is_dev_leaf(devNodePath) and
+            not _is_dev_extended_partition(devtype, devNodePath)):
+        return True
+    return False
+
+
+def get_partitions_names(check=False):
+    names = set()
+    keys = ["NAME", "TYPE", "FSTYPE", "MOUNTPOINT", "MAJ:MIN"]
+    # output is on format key="value",
+    # where key can be NAME, TYPE, FSTYPE, MOUNTPOINT
+    for dev in _get_lsblk_devs(keys):
+        # split()[0] to avoid the second part of the name, after the
+        # whiteline
+        name = dev['name'].split()[0]
+        if check and not _is_available(name, dev['type'], dev['fstype'],
+                                       dev['mountpoint'], dev['maj:min']):
+            continue
+        names.add(name)
+
+    return list(names)
+
+
+def get_partition_details(name):
+    majmin = _get_dev_major_min(name)
+    dev_path = _get_dev_node_path(majmin)
+
+    keys = ["TYPE", "FSTYPE", "SIZE", "MOUNTPOINT"]
+    try:
+        dev = _get_lsblk_devs(keys, [dev_path])[0]
+    except OperationFailed as e:
+        wok_log.error(
+            "Error getting partition info for %s: %s", name, e)
+        return {}
+
+    dev['available'] = _is_available(name, dev['type'], dev['fstype'],
+                                     dev['mountpoint'], majmin)
+    if dev['mountpoint']:
+        # Sometimes the mountpoint comes with [SWAP] or other
+        # info which is not an actual mount point. Filtering it
+        regexp = re.compile(r"\[.*\]")
+        if regexp.search(dev['mountpoint']) is not None:
+            dev['mountpoint'] = ''
+    dev['path'] = dev_path
+    dev['name'] = name
+    return dev
diff --git a/plugins/gingerbase/gingerbase.conf b/plugins/gingerbase/gingerbase.conf
new file mode 100644
index 0000000..544ac7b
--- /dev/null
+++ b/plugins/gingerbase/gingerbase.conf
@@ -0,0 +1,40 @@
+[wok]
+enable = True
+plugin_class = "GingerBase"
+uri = "/plugins/gingerbase"
+extra_auth_api_class = "control.sub_nodes"
+
+[/]
+tools.trailing_slash.on = False
+request.methods_with_bodies = ('POST', 'PUT')
+tools.nocache.on = True
+tools.proxy.on = True
+tools.sessions.on = True
+tools.sessions.name = 'wok'
+tools.sessions.secure = True
+tools.sessions.httponly = True
+tools.sessions.locking = 'explicit'
+tools.sessions.storage_type = 'ram'
+tools.sessions.timeout = 10
+tools.wokauth.on = True
+
+[/data/debugreports]
+tools.staticdir.on = True
+tools.staticdir.dir = wok.config.PluginPaths('gingerbase').state_dir + '/debugreports'
+tools.nocache.on = False
+tools.wokauth.on = True
+tools.staticdir.content_types = {'xz': 'application/x-xz'}
+
+[/favicon.ico]
+tools.staticfile.on = True
+tools.staticfile.filename = wok.config.PluginPaths('gingerbase').ui_dir + '/images/logo.ico'
+
+[/robots.txt]
+tools.staticfile.on = True
+tools.staticfile.filename = wok.config.PluginPaths('gingerbase').ui_dir + '/robots.txt'
+
+[/help]
+tools.staticdir.on = True
+tools.staticdir.dir = wok.config.PluginPaths('gingerbase').ui_dir + '/pages/help'
+tools.nocache.on = True
+
diff --git a/plugins/gingerbase/gingerbase.py b/plugins/gingerbase/gingerbase.py
new file mode 100644
index 0000000..f726042
--- /dev/null
+++ b/plugins/gingerbase/gingerbase.py
@@ -0,0 +1,70 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2013-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+
+import json
+import os
+import cherrypy
+
+from wok import vnc
+from wok.i18n import messages
+from wok.root import WokRoot
+
+import config
+import mockmodel
+from control import sub_nodes
+from model import model as gingerBaseModel
+
+
+class GingerBase(WokRoot):
+    def __init__(self, wok_options):
+        if hasattr(wok_options, "model"):
+            self.model = wok_options.model
+        elif wok_options.test:
+            self.model = mockmodel.MockModel()
+        else:
+            self.model = gingerBaseModel.Model()
+
+        dev_env = wok_options.environment != 'production'
+        super(GingerBase, self).__init__(self.model, dev_env)
+
+        for ident, node in sub_nodes.items():
+            setattr(self, ident, node(self.model))
+
+        if isinstance(self.model, gingerBaseModel.Model):
+            vnc_ws_proxy = vnc.new_ws_proxy()
+            cherrypy.engine.subscribe('exit', vnc_ws_proxy.terminate)
+
+        self.api_schema = json.load(open(os.path.join(os.path.dirname(
+                                    os.path.abspath(__file__)), 'API.json')))
+        self.paths = config.kimchiPaths
+        self.domain = 'gingerbase'
+        self.messages = messages
+
+        make_dirs = [
+            # os.path.abspath(config.get_distros_store()),
+            os.path.abspath(config.get_debugreports_path()),
+            # os.path.abspath(config.get_screenshot_path())
+        ]
+        for directory in make_dirs:
+            if not os.path.isdir(directory):
+                os.makedirs(directory)
+
+    def get_custom_conf(self):
+        return config.KimchiConfig()
+
diff --git a/plugins/gingerbase/i18n.py b/plugins/gingerbase/i18n.py
new file mode 100644
index 0000000..249d170
--- /dev/null
+++ b/plugins/gingerbase/i18n.py
@@ -0,0 +1,96 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2014-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+
+import gettext
+
+_ = gettext.gettext
+
+
+messages = {
+    "GGBAPI0001E": _("Unknown parameter %(value)s"),
+
+    "GGBASYNC0001E": _("Timeout of %(seconds)s seconds expired while running task '%(task)s."),
+
+    "GGBDISKS0001E": _("Error while getting block devices. Details: %(err)s"),
+    "GGBDISKS0002E": _("Error while getting block device information for %(device)s."),
+
+    "GGBDR0001E": _("Debug report %(name)s does not exist"),
+    "GGBDR0002E": _("Debug report tool not found in system"),
+    "GGBDR0003E": _("Unable to create debug report %(name)s. Details: %(err)s."),
+    "GGBDR0004E": _("Can not find any debug report with the given name %(name)s"),
+    "GGBDR0005E": _("Unable to generate debug report %(name)s. Details: %(err)s"),
+    "GGBDR0006E": _("You should give a name for the debug report file."),
+    "GGBDR0007E": _("Debug report name must be a string. Only letters, digits, underscore ('_') and hyphen ('-') are allowed."),
+    "GGBDR0008E": _("The debug report with specified name \"%(name)s\" already exists. Please use another one."),
+
+    "GGBPART0001E": _("Partition %(name)s does not exist in the host"),
+
+    "GGBHOST0001E": _("Unable to shutdown host machine as there are running virtual machines"),
+    "GGBHOST0002E": _("Unable to reboot host machine as there are running virtual machines"),
+    "GGBHOST0003E": _("Node device '%(name)s' not found"),
+    "GGBHOST0004E": _("Conflicting flag filters specified."),
+    "GGBHOST0005E": _("When specifying CPU topology, each element must be an integer greater than zero."),
+
+    "GGBPKGUPD0001E": _("No packages marked for update"),
+    "GGBPKGUPD0002E": _("Package %(name)s is not marked to be updated."),
+    "GGBPKGUPD0003E": _("Error while getting packages marked to be updated. Details: %(err)s"),
+    "GGBPKGUPD0004E": _("There is no compatible package manager for this system."),
+
+
+    "GGBREPOS0001E": _("YUM Repository ID must be one word only string."),
+    "GGBREPOS0002E": _("Repository URL must be an http://, ftp:// or file:// URL."),
+    "GGBREPOS0003E": _("Repository configuration is a dictionary with specific values according to repository type."),
+    "GGBREPOS0004E": _("Distribution to DEB repository must be a string"),
+    "GGBREPOS0005E": _("Components to DEB repository must be listed in a array"),
+    "GGBREPOS0006E": _("Components to DEB repository must be a string"),
+    "GGBREPOS0007E": _("Mirror list to repository must be a string"),
+    "GGBREPOS0008E": _("YUM Repository name must be string."),
+    "GGBREPOS0009E": _("GPG check must be a boolean value."),
+    "GGBREPOS0010E": _("GPG key must be a URL pointing to the ASCII-armored file."),
+    "GGBREPOS0011E": _("Could not update repository %(repo_id)s."),
+    "GGBREPOS0012E": _("Repository %(repo_id)s does not exist."),
+    "GGBREPOS0013E": _("Specify repository base URL,  mirror list or metalink in order to create or update a YUM repository."),
+    "GGBREPOS0014E": _("Repository management tool was not recognized for your system."),
+    "GGBREPOS0015E": _("Repository %(repo_id)s is already enabled."),
+    "GGBREPOS0016E": _("Repository %(repo_id)s is already disabled."),
+    "GGBREPOS0017E": _("Could not remove repository %(repo_id)s."),
+    "GGBREPOS0018E": _("Could not write repository configuration file %(repo_file)s"),
+    "GGBREPOS0019E": _("Specify repository distribution in order to create a DEB repository."),
+    "GGBREPOS0020E": _("Could not enable repository %(repo_id)s."),
+    "GGBREPOS0021E": _("Could not disable repository %(repo_id)s."),
+    "GGBREPOS0022E": _("YUM Repository ID already exists"),
+    "GGBREPOS0023E": _("YUM Repository name must be a string"),
+    "GGBREPOS0024E": _("Unable to list repositories. Details: '%(err)s'"),
+    "GGBREPOS0025E": _("Unable to retrieve repository information. Details: '%(err)s'"),
+    "GGBREPOS0026E": _("Unable to add repository. Details: '%(err)s'"),
+    "GGBREPOS0027E": _("Unable to remove repository. Details: '%(err)s'"),
+    "GGBREPOS0028E": _("Configuration items: '%(items)s' are not supported by repository manager"),
+    "GGBREPOS0029E": _("Repository metalink must be an http://, ftp:// or file:// URL."),
+    "GGBREPOS0030E": _("Cannot specify mirrorlist and metalink at the same time."),
+
+
+    "GGBCPUINF0001E": _("The number of vCPUs is too large for this system."),
+    "GGBCPUINF0002E": _("Invalid vCPU/topology combination."),
+    "GGBCPUINF0003E": _("This host (or current configuration) does not allow CPU topology."),
+    "GGBCPUINF0004E": _("This host (or current configuration) does not allow to fetch lscpu details."),
+    "GGBCPUINF0005E": _("This host (or current configuration) does not provide Socket(s) information."),
+    "GGBCPUINF0006E": _("This host (or current configuration) does not provide Core(s) per socket information."),
+    "GGBCPUINF0007E": _("This host (or current configuration) does not provide Thread(s) per core information."),
+
+}
diff --git a/plugins/gingerbase/lscpu.py b/plugins/gingerbase/lscpu.py
new file mode 100644
index 0000000..7c6a688
--- /dev/null
+++ b/plugins/gingerbase/lscpu.py
@@ -0,0 +1,122 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2014-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+import logging
+
+from wok.utils import run_command
+from wok.exception import NotFoundError
+
+
+class LsCpu(object):
+    """
+    Get CPU information about a CPU hyper threading/architecture on x86
+    """
+    def log_error(e):
+        """
+            param e: error details to be logged
+        """
+        log = logging.getLogger('Util')
+        log.warning('Exception in fetching the CPU architecture details: %s', e)
+
+    def __init__(self):
+        self.lsCpuInfo = {}
+        try:
+            # lscpu - display information about the CPU architecture
+            out, error, rc = run_command(['lscpu'])
+            # Output of lscpu on x86 is expected to be:
+            # Architecture:          x86_64
+            # CPU op-mode(s):        32-bit, 64-bit
+            # Byte Order:            Little Endian
+            # CPU(s):                4
+            # On-line CPU(s) list:   0-3
+            # Thread(s) per core:    2
+            # Core(s) per socket:    2
+            # Socket(s):             1
+            # NUMA node(s):          1
+            # Vendor ID:             GenuineIntel
+            # CPU family:            6
+            # Model:                 42
+            # Model name:            Intel(R) Core(TM) i5-2540M CPU @ 2.60GHz
+            # Stepping:              7
+            # CPU MHz:               976.421
+            # CPU max MHz:           3300.0000
+            # CPU min MHz:           800.0000
+            # BogoMIPS:              5182.99
+            # Virtualization:        VT-x
+            # L1d cache:             32K
+            # L1i cache:             32K
+            # L2 cache:              256K
+            # L3 cache:              3072K
+            # NUMA node0 CPU(s):     0-3
+
+            if not rc and (not out.isspace()):
+                lscpuout = out.split('\n')
+                if lscpuout and len(lscpuout) > 0:
+                    for line in lscpuout:
+                        if ":" in line and (len(line.split(':')) == 2) :
+                            self.lsCpuInfo[line.split(':')[0].strip()] = line.split(':')[1].strip()
+                        else:
+                            continue
+        except Exception, e:
+            self.log_error(e)
+            raise NotFoundError("GGBCPUINF0004E")
+
+    def get_sockets(self):
+        """
+            param self: object of the class self
+            return: Socket(s) (information about the CPU architecture)
+        """
+        try:
+            sockets = "Socket(s)"
+            if len(self.lsCpuInfo) > 0 and sockets in self.lsCpuInfo.keys():
+                return int(self.lsCpuInfo[sockets])
+            else:
+                raise NotFoundError("GGBCPUINF0005E")
+        except IndexError, e:
+            self.log_error(e)
+            raise NotFoundError("GGBCPUINF0005E")
+
+    def get_cores_per_socket(self):
+        """
+            param self: object of the class self
+            return: Core(s) per socket (information about the CPU architecture)
+        """
+        try:
+            cores_per_socket = "Core(s) per socket"
+            if len(self.lsCpuInfo) > 0  and cores_per_socket in self.lsCpuInfo.keys():
+                return int(self.lsCpuInfo[cores_per_socket])
+            else:
+                raise NotFoundError("GGBCPUINF0006E")
+        except IndexError, e:
+            self.log_error(e)
+            raise NotFoundError("GGBCPUINF0006E")
+
+    def get_threads_per_core(self):
+        """
+            param self: object of the class self
+            return: Thread(s) per core (information about the CPU architecture)
+        """
+        try:
+            threads_per_core = "Thread(s) per core"
+            if len(self.lsCpuInfo) > 0  and threads_per_core in self.lsCpuInfo.keys():
+                return int(self.lsCpuInfo[threads_per_core])
+            else:
+                raise NotFoundError("GGBCPUINF0007E")
+        except IndexError, e:
+            self.log_error(e)
+            raise NotFoundError("GGBCPUINF0007E")
diff --git a/plugins/gingerbase/mockmodel.py b/plugins/gingerbase/mockmodel.py
new file mode 100644
index 0000000..05bcb1c
--- /dev/null
+++ b/plugins/gingerbase/mockmodel.py
@@ -0,0 +1,298 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2013-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+
+import lxml.etree as ET
+import os
+import random
+import time
+
+from wok.objectstore import ObjectStore
+from wok.utils import add_task, wok_log
+
+import config
+from model import cpuinfo
+from model.debugreports import DebugReportsModel
+from model.model import Model
+
+
+fake_user = {'root': 'letmein!'}
+mockmodel_defaults = {'domain': 'test', 'arch': 'i686'}
+
+
+class MockModel(Model):
+
+    def __init__(self, objstore_loc=None):
+        # Override osinfo.defaults to ajust the values according to
+        # test:///default driver
+
+        self._mock_devices = MockDevices()
+        self._mock_partitions = MockPartitions()
+        self._mock_swupdate = MockSoftwareUpdate()
+        self._mock_repositories = MockRepositories()
+
+        cpuinfo.get_topo_capabilities = \
+            MockModel.get_topo_capabilities
+
+        super(MockModel, self).__init__(objstore_loc)
+        self.objstore_loc = objstore_loc
+        self.objstore = ObjectStore(objstore_loc)
+
+        # The MockModel methods are instantiated on runtime according to Model
+        # and BaseModel
+        # Because that a normal method override will not work here
+        # Instead of that we also need to do the override on runtime
+        for method in dir(self):
+            if method.startswith('_mock_'):
+                mock_method = getattr(self, method)
+                if not callable(mock_method):
+                    continue
+
+                m = method[6:]
+                model_method = getattr(self, m)
+                setattr(self, '_model_' + m, model_method)
+                setattr(self, m, mock_method)
+
+        DebugReportsModel._gen_debugreport_file = self._gen_debugreport_file
+
+    def reset(self):
+        self._mock_swupdate = MockSoftwareUpdate()
+        self._mock_repositories = MockRepositories()
+
+        if hasattr(self, 'objstore'):
+            self.objstore = ObjectStore(self.objstore_loc)
+
+    @staticmethod
+    def get_topo_capabilities(conn):
+        # The libvirt test driver doesn't return topology.
+        xml = "<topology sockets='1' cores='2' threads='2'/>"
+        return ET.fromstring(xml)
+
+    def _gen_debugreport_file(self, name):
+        return add_task('/plugins/gingerbase/debugreports/%s' % name, self._create_log,
+                        self.objstore, name)
+
+    def _create_log(self, cb, name):
+        path = config.get_debugreports_path()
+        tmpf = os.path.join(path, name + '.tmp')
+        realf = os.path.join(path, name + '.txt')
+        length = random.randint(1000, 10000)
+        with open(tmpf, 'w') as fd:
+            while length:
+                fd.write('I am logged')
+                length = length - 1
+        os.rename(tmpf, realf)
+        cb("OK", True)
+
+    def _mock_host_shutdown(self, *name):
+        wok_log.info("The host system will be shutted down")
+
+    def _mock_host_reboot(self, *name):
+        wok_log.info("The host system will be rebooted")
+
+    def _mock_partitions_get_list(self):
+        return self._mock_partitions.partitions.keys()
+
+    def _mock_partition_lookup(self, name):
+        return self._mock_partitions.partitions[name]
+
+    def _mock_packagesupdate_get_list(self):
+        return self._mock_swupdate.pkgs.keys()
+
+    def _mock_packageupdate_lookup(self, pkg_name):
+        return self._mock_swupdate.pkgs[pkg_name]
+
+    def _mock_host_swupdate(self, args=None):
+        task_id = add_task('/plugins/gingerbase/host/swupdate', self._mock_swupdate.doUpdate,
+                           self.objstore)
+        return self.task_lookup(task_id)
+
+    def _mock_repositories_get_list(self):
+        return self._mock_repositories.repos.keys()
+
+    def _mock_repositories_create(self, params):
+        # Create a repo_id if not given by user. The repo_id will follow
+        # the format kimchi_repo_<integer>, where integer is the number of
+        # seconds since the Epoch (January 1st, 1970), in UTC.
+        repo_id = params.get('repo_id', None)
+        if repo_id is None:
+            repo_id = "kimchi_repo_%s" % str(int(time.time() * 1000))
+            params.update({'repo_id': repo_id})
+
+        config = params.get('config', {})
+        info = {'repo_id': repo_id,
+                'baseurl': params['baseurl'],
+                'enabled': True,
+                'config': {'repo_name': config.get('repo_name', repo_id),
+                           'gpgkey': config.get('gpgkey', []),
+                           'gpgcheck': True,
+                           'mirrorlist': params.get('mirrorlist', '')}}
+        self._mock_repositories.repos[repo_id] = info
+        return repo_id
+
+    def _mock_repository_lookup(self, repo_id):
+        return self._mock_repositories.repos[repo_id]
+
+    def _mock_repository_delete(self, repo_id):
+        del self._mock_repositories.repos[repo_id]
+
+    def _mock_repository_enable(self, repo_id):
+        self._mock_repositories.repos[repo_id]['enabled'] = True
+
+    def _mock_repository_disable(self, repo_id):
+        self._mock_repositories.repos[repo_id]['enabled'] = False
+
+    def _mock_repository_update(self, repo_id, params):
+        self._mock_repositories.repos[repo_id].update(params)
+        return repo_id
+
+class MockPartitions(object):
+    def __init__(self):
+        self.partitions = {"vdx": {"available": True, "name": "vdx",
+                                   "fstype": "", "path": "/dev/vdx",
+                                   "mountpoint": "", "type": "disk",
+                                   "size": "2147483648"},
+                           "vdz": {"available": True, "name": "vdz",
+                                   "fstype": "", "path": "/dev/vdz",
+                                   "mountpoint": "", "type": "disk",
+                                   "size": "2147483648"}}
+
+
+class MockDevices(object):
+    def __init__(self):
+        self.devices = {
+            'computer': {'device_type': 'system',
+                         'firmware': {'release_date': '01/01/2012',
+                                      'vendor': 'LENOVO',
+                                      'version': 'XXXXX (X.XX )'},
+                         'hardware': {'serial': 'PXXXXX',
+                                      'uuid':
+                                      '9d660370-820f-4241-8731-5a60c97e8aa6',
+                                      'vendor': 'LENOVO',
+                                      'version': 'ThinkPad T420'},
+                         'name': 'computer',
+                         'parent': None,
+                         'product': '4180XXX'},
+            'pci_0000_03_00_0': {'bus': 3,
+                                 'device_type': 'pci',
+                                 'domain': 0,
+                                 'driver': {'name': 'iwlwifi'},
+                                 'function': 0,
+                                 'iommuGroup': 7,
+                                 'name': 'pci_0000_03_00_0',
+                                 'parent': 'computer',
+                                 'path':
+                                 '/sys/devices/pci0000:00/0000:03:00.0',
+                                 'product': {
+                                     'description':
+                                     'Centrino Advanced-N 6205 [Taylor Peak]',
+                                     'id': '0x0085'},
+                                 'slot': 0,
+                                 'vendor': {'description': 'Intel Corporation',
+                                            'id': '0x8086'}},
+            'pci_0000_0d_00_0': {'bus': 13,
+                                 'device_type': 'pci',
+                                 'domain': 0,
+                                 'driver': {'name': 'sdhci-pci'},
+                                 'function': 0,
+                                 'iommuGroup': 7,
+                                 'name': 'pci_0000_0d_00_0',
+                                 'parent': 'computer',
+                                 'path':
+                                 '/sys/devices/pci0000:00/0000:0d:00.0',
+                                 'product': {'description':
+                                             'PCIe SDXC/MMC Host Controller',
+                                             'id': '0xe823'},
+                                 'slot': 0,
+                                 'vendor': {'description': 'Ricoh Co Ltd',
+                                            'id': '0x1180'}},
+            'scsi_host0': {'adapter': {'fabric_wwn': '37df6c1efa1b4388',
+                                       'type': 'fc_host',
+                                       'wwnn': 'efb6563f06434a98',
+                                       'wwpn': '742f32073aab45d7'},
+                           'device_type': 'scsi_host',
+                           'host': 0,
+                           'name': 'scsi_host0',
+                           'parent': 'computer',
+                           'path': '/sys/devices/pci0000:00/0000:40:00.0/0'},
+            'scsi_host1': {'adapter': {'fabric_wwn': '542efa5dced34123',
+                                       'type': 'fc_host',
+                                       'wwnn': 'b7433a40c9b84092',
+                                       'wwpn': '25c1f485ae42497f'},
+                           'device_type': 'scsi_host',
+                           'host': 0,
+                           'name': 'scsi_host1',
+                           'parent': 'computer',
+                           'path': '/sys/devices/pci0000:00/0000:40:00.0/1'},
+            'scsi_host2': {'adapter': {'fabric_wwn': '5c373c334c20478d',
+                                       'type': 'fc_host',
+                                       'wwnn': 'f2030bec4a254e6b',
+                                       'wwpn': '07dbca4164d44096'},
+                           'device_type': 'scsi_host',
+                           'host': 0,
+                           'name': 'scsi_host2',
+                           'parent': 'computer',
+                           'path': '/sys/devices/pci0000:00/0000:40:00.0/2'}}
+
+
+class MockSoftwareUpdate(object):
+    def __init__(self):
+        self.pkgs = {
+            'udevmountd': {'repository': 'openSUSE-13.1-Update',
+                           'version': '0.81.5-14.1',
+                           'arch': 'x86_64',
+                           'package_name': 'udevmountd'},
+            'sysconfig-network': {'repository': 'openSUSE-13.1-Extras',
+                                  'version': '0.81.5-14.1',
+                                  'arch': 'x86_64',
+                                  'package_name': 'sysconfig-network'},
+            'libzypp': {'repository': 'openSUSE-13.1-Update',
+                        'version': '13.9.0-10.1',
+                        'arch': 'noarch',
+                        'package_name': 'libzypp'}}
+        self._num2update = 3
+
+    def doUpdate(self, cb, params):
+        msgs = []
+        for pkg in self.pkgs.keys():
+            msgs.append("Updating package %s" % pkg)
+            cb('\n'.join(msgs))
+            time.sleep(1)
+
+        time.sleep(2)
+        msgs.append("All packages updated")
+        cb('\n'.join(msgs), True)
+
+        # After updating all packages any package should be listed to be
+        # updated, so reset self._packages
+        self.pkgs = {}
+
+
+class MockRepositories(object):
+    def __init__(self):
+        self.repos = {"kimchi_repo_1392167832":
+                      {"repo_id": "kimchi_repo_1392167832",
+                       "enabled": True,
+                       "baseurl": "http://www.fedora.org",
+                       "config": {"repo_name": "kimchi_repo_1392167832",
+                                  "gpgkey": [],
+                                  "gpgcheck": True,
+                                  "mirrorlist": ""}}}
+
+
+
diff --git a/plugins/gingerbase/repositories.py b/plugins/gingerbase/repositories.py
new file mode 100644
index 0000000..b99ea5f
--- /dev/null
+++ b/plugins/gingerbase/repositories.py
@@ -0,0 +1,529 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2014-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+
+import copy
+import os
+import time
+import urlparse
+from ConfigParser import ConfigParser
+
+from wok.basemodel import Singleton
+from wok.exception import InvalidOperation, InvalidParameter
+from wok.exception import OperationFailed, NotFoundError, MissingParameter
+from wok.utils import validate_repo_url
+
+from config import kimchiLock
+from yumparser import get_yum_repositories, write_repo_to_file
+
+
+class Repositories(object):
+    __metaclass__ = Singleton
+
+    """
+    Class to represent and operate with repositories information.
+    """
+    def __init__(self):
+        try:
+            __import__('yum')
+            self._pkg_mnger = YumRepo()
+        except ImportError:
+            try:
+                __import__('apt_pkg')
+                self._pkg_mnger = AptRepo()
+            except ImportError:
+                raise InvalidOperation('GGBREPOS0014E')
+
+    def addRepository(self, params):
+        """
+        Add and enable a new repository
+        """
+        config = params.get('config', {})
+        extra_keys = list(
+            set(config.keys()).difference(set(self._pkg_mnger.CONFIG_ENTRY)))
+        if len(extra_keys) > 0:
+            raise InvalidParameter("GGBREPOS0028E",
+                                   {'items': ",".join(extra_keys)})
+
+        return self._pkg_mnger.addRepo(params)
+
+    def getRepositories(self):
+        """
+        Return a dictionary with all Kimchi's repositories. Each element uses
+        the format {<repo_id>: {repo}}, where repo is a dictionary in the
+        repositories.Repositories() format.
+        """
+        return self._pkg_mnger.getRepositoriesList()
+
+    def getRepository(self, repo_id):
+        """
+        Return a dictionary with all info from a given repository ID.
+        """
+        info = self._pkg_mnger.getRepo(repo_id)
+        info['repo_id'] = repo_id
+        return info
+
+    def enableRepository(self, repo_id):
+        """
+        Enable a repository.
+        """
+        return self._pkg_mnger.toggleRepo(repo_id, True)
+
+    def disableRepository(self, repo_id):
+        """
+        Disable a given repository.
+        """
+        return self._pkg_mnger.toggleRepo(repo_id, False)
+
+    def updateRepository(self, repo_id, params):
+        """
+        Update the information of a given repository.
+        The input is the repo_id of the repository to be updated and a dict
+        with the information to be updated.
+        """
+        return self._pkg_mnger.updateRepo(repo_id, params)
+
+    def removeRepository(self, repo_id):
+        """
+        Remove a given repository
+        """
+        return self._pkg_mnger.removeRepo(repo_id)
+
+
+class YumRepo(object):
+    """
+    Class to represent and operate with YUM repositories.
+    It's loaded only on those systems listed at YUM_DISTROS and loads necessary
+    modules in runtime.
+    """
+    TYPE = 'yum'
+    DEFAULT_CONF_DIR = "/etc/yum.repos.d"
+    CONFIG_ENTRY = ('repo_name', 'mirrorlist', 'metalink')
+
+    def __init__(self):
+        self._confdir = self.DEFAULT_CONF_DIR
+
+    def _get_repos(self, errcode):
+        try:
+            kimchiLock.acquire()
+            repos = get_yum_repositories()
+        except Exception, e:
+            kimchiLock.release()
+            raise OperationFailed(errcode, {'err': str(e)})
+        finally:
+            kimchiLock.release()
+
+        return repos
+
+    def getRepositoriesList(self):
+        """
+        Return a list of repositories IDs
+        """
+        repos = self._get_repos('GGBREPOS0024E')
+        return repos.keys()
+
+    def getRepo(self, repo_id):
+        """
+        Return a dictionary in the repositories.Repositories() of the given
+        repository ID format with the information of a YumRepository object.
+        """
+        repos = self._get_repos('GGBREPOS0025E')
+
+        if repo_id not in repos.keys():
+            raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
+
+        entry = repos.get(repo_id)
+
+        info = {}
+        info['enabled'] = entry.enabled
+        info['baseurl'] = entry.baseurl or ''
+        info['config'] = {}
+        info['config']['repo_name'] = entry.name or ''
+        info['config']['gpgcheck'] = entry.gpgcheck
+        info['config']['gpgkey'] = entry.gpgkey or ''
+        info['config']['mirrorlist'] = entry.mirrorlist or ''
+        info['config']['metalink'] = entry.metalink or ''
+        return info
+
+    def addRepo(self, params):
+        """
+        Add a given repository to YumBase
+        """
+        # At least one base url, or one mirror, must be given.
+        baseurl = params.get('baseurl', '')
+
+        config = params.get('config', {})
+        mirrorlist = config.get('mirrorlist', '')
+        metalink = config.get('metalink', '')
+        if not baseurl and not mirrorlist and not metalink:
+            raise MissingParameter("GGBREPOS0013E")
+
+        if baseurl:
+            validate_repo_url(baseurl)
+
+        if mirrorlist:
+            validate_repo_url(mirrorlist)
+
+        if metalink:
+            validate_repo_url(metalink)
+
+        if mirrorlist and metalink:
+            raise InvalidOperation('GGBREPOS0030E')
+
+        repo_id = params.get('repo_id', None)
+        if repo_id is None:
+            repo_id = "kimchi_repo_%s" % str(int(time.time() * 1000))
+
+        repos = self._get_repos('GGBREPOS0026E')
+        if repo_id in repos.keys():
+            raise InvalidOperation("GGBREPOS0022E", {'repo_id': repo_id})
+
+        repo_name = config.get('repo_name', repo_id)
+        repo = {'baseurl': baseurl, 'mirrorlist': mirrorlist,
+                'name': repo_name, 'gpgcheck': 1,
+                'gpgkey': [], 'enabled': 1, 'metalink': metalink}
+
+        # write a repo file in the system with repo{} information.
+        parser = ConfigParser()
+        parser.add_section(repo_id)
+
+        for key, value in repo.iteritems():
+            if value:
+                parser.set(repo_id, key, value)
+
+        repofile = os.path.join(self._confdir, repo_id + '.repo')
+        try:
+            with open(repofile, 'w') as fd:
+                parser.write(fd)
+        except:
+            raise OperationFailed("GGBREPOS0018E",
+                                  {'repo_file': repofile})
+
+        return repo_id
+
+    def toggleRepo(self, repo_id, enable):
+        repos = self._get_repos('GGBREPOS0011E')
+        if repo_id not in repos.keys():
+            raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
+
+        entry = repos.get(repo_id)
+        if enable and entry.enabled:
+            raise InvalidOperation("GGBREPOS0015E", {'repo_id': repo_id})
+
+        if not enable and not entry.enabled:
+            raise InvalidOperation("GGBREPOS0016E", {'repo_id': repo_id})
+
+        kimchiLock.acquire()
+        try:
+            if enable:
+                entry.enable()
+            else:
+                entry.disable()
+
+            write_repo_to_file(entry)
+        except:
+            if enable:
+                raise OperationFailed("GGBREPOS0020E", {'repo_id': repo_id})
+
+            raise OperationFailed("GGBREPOS0021E", {'repo_id': repo_id})
+        finally:
+            kimchiLock.release()
+
+        return repo_id
+
+    def updateRepo(self, repo_id, params):
+        """
+        Update a given repository in repositories.Repositories() format
+        """
+        repos = self._get_repos('GGBREPOS0011E')
+        if repo_id not in repos.keys():
+            raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
+
+        entry = repos.get(repo_id)
+
+        baseurl = params.get('baseurl', None)
+        config = params.get('config', {})
+        mirrorlist = config.get('mirrorlist', None)
+        metalink = config.get('metalink', None)
+
+        if baseurl is not None and len(baseurl.strip()) == 0:
+            baseurl = None
+
+        if mirrorlist is not None and len(mirrorlist.strip()) == 0:
+            mirrorlist = None
+
+        if metalink is not None and len(metalink.strip()) == 0:
+            metalink = None
+
+        if baseurl is None and mirrorlist is None and metalink is None:
+            raise MissingParameter("GGBREPOS0013E")
+
+        if baseurl is not None:
+            validate_repo_url(baseurl)
+            entry.baseurl = baseurl
+
+        if mirrorlist is not None:
+            validate_repo_url(mirrorlist)
+            entry.mirrorlist = mirrorlist
+
+        if metalink is not None:
+            validate_repo_url(metalink)
+            entry.metalink = metalink
+
+        if mirrorlist and metalink:
+            raise InvalidOperation('GGBREPOS0030E')
+
+        entry.id = params.get('repo_id', repo_id)
+        entry.name = config.get('repo_name', entry.name)
+        entry.gpgcheck = config.get('gpgcheck', entry.gpgcheck)
+        entry.gpgkey = config.get('gpgkey', entry.gpgkey)
+        kimchiLock.acquire()
+        write_repo_to_file(entry)
+        kimchiLock.release()
+        return repo_id
+
+    def removeRepo(self, repo_id):
+        """
+        Remove a given repository
+        """
+        repos = self._get_repos('GGBREPOS0027E')
+        if repo_id not in repos.keys():
+            raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
+
+        entry = repos.get(repo_id)
+        parser = ConfigParser()
+        with open(entry.repofile) as fd:
+            parser.readfp(fd)
+
+        if len(parser.sections()) == 1:
+            os.remove(entry.repofile)
+            return
+
+        parser.remove_section(repo_id)
+        with open(entry.repofile, "w") as fd:
+            parser.write(fd)
+
+
+class AptRepo(object):
+    """
+    Class to represent and operate with YUM repositories.
+    It's loaded only on those systems listed at YUM_DISTROS and loads necessary
+    modules in runtime.
+    """
+    TYPE = 'deb'
+    KIMCHI_LIST = "kimchi-source.list"
+    CONFIG_ENTRY = ('dist', 'comps')
+
+    def __init__(self):
+        getattr(__import__('apt_pkg'), 'init_config')()
+        getattr(__import__('apt_pkg'), 'init_system')()
+        config = getattr(__import__('apt_pkg'), 'config')
+        self.pkg_lock = getattr(__import__('apt_pkg'), 'SystemLock')
+        module = __import__('aptsources.sourceslist', globals(), locals(),
+                            ['SourcesList'], -1)
+
+        self._sourceparts_path = '/%s%s' % (
+            config.get('Dir::Etc'), config.get('Dir::Etc::sourceparts'))
+        self._sourceslist = getattr(module, 'SourcesList')
+        self.filename = os.path.join(self._sourceparts_path, self.KIMCHI_LIST)
+        if not os.path.exists(self.filename):
+            with open(self.filename, 'w') as fd:
+                fd.write("# This file is managed by Kimchi and it must not "
+                         "be modified manually\n")
+
+    def _get_repos(self):
+        try:
+            with self.pkg_lock():
+                repos = self._sourceslist()
+                repos.refresh()
+        except Exception, e:
+            kimchiLock.release()
+            raise OperationFailed('GGBREPOS0025E', {'err': e.message})
+
+        return repos
+
+    def _get_repo_id(self, repo):
+        data = urlparse.urlparse(repo.uri)
+        name = data.hostname or data.path
+        return '%s-%s-%s' % (name, repo.dist, "-".join(repo.comps))
+
+    def _get_source_entry(self, repo_id):
+        kimchiLock.acquire()
+        repos = self._get_repos()
+        kimchiLock.release()
+
+        for r in repos:
+            # Ignore deb-src repositories
+            if r.type != 'deb':
+                continue
+
+            if self._get_repo_id(r) != repo_id:
+                continue
+
+            return r
+
+        return None
+
+    def getRepositoriesList(self):
+        """
+        Return a list of repositories IDs
+
+        APT repositories there aren't the concept about repository ID, so for
+        internal control, the repository ID will be built as described in
+        _get_repo_id()
+        """
+        kimchiLock.acquire()
+        repos = self._get_repos()
+        kimchiLock.release()
+
+        res = []
+        for r in repos:
+            # Ignore deb-src repositories
+            if r.type != 'deb':
+                continue
+
+            res.append(self._get_repo_id(r))
+
+        return res
+
+    def getRepo(self, repo_id):
+        """
+        Return a dictionary in the repositories.Repositories() format of the
+        given repository ID with the information of a SourceEntry object.
+        """
+        r = self._get_source_entry(repo_id)
+        if r is None:
+            raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
+
+        info = {'enabled': not r.disabled,
+                'baseurl': r.uri,
+                'config': {'dist': r.dist,
+                           'comps': r.comps}}
+        return info
+
+    def addRepo(self, params):
+        """
+        Add a new APT repository based on <params>
+        """
+        # To create a APT repository the dist is a required parameter
+        # (in addition to baseurl, verified on controller through API.json)
+        config = params.get('config', None)
+        if config is None:
+            raise MissingParameter("GGBREPOS0019E")
+
+        if 'dist' not in config.keys():
+            raise MissingParameter("GGBREPOS0019E")
+
+        uri = params['baseurl']
+        dist = config['dist']
+        comps = config.get('comps', [])
+
+        validate_repo_url(uri)
+
+        kimchiLock.acquire()
+        try:
+            repos = self._get_repos()
+            source_entry = repos.add('deb', uri, dist, comps,
+                                     file=self.filename)
+            with self.pkg_lock():
+                repos.save()
+        except Exception as e:
+            kimchiLock.release()
+            raise OperationFailed("GGBREPOS0026E", {'err': e.message})
+        kimchiLock.release()
+        return self._get_repo_id(source_entry)
+
+    def toggleRepo(self, repo_id, enable):
+        """
+        Enable a given repository
+        """
+        r = self._get_source_entry(repo_id)
+        if r is None:
+            raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
+
+        if enable and not r.disabled:
+            raise InvalidOperation("GGBREPOS0015E", {'repo_id': repo_id})
+
+        if not enable and r.disabled:
+            raise InvalidOperation("GGBREPOS0016E", {'repo_id': repo_id})
+
+        if enable:
+            line = 'deb'
+        else:
+            line = '#deb'
+
+        kimchiLock.acquire()
+        try:
+            repos = self._get_repos()
+            with self.pkg_lock():
+                repos.remove(r)
+                repos.add(line, r.uri, r.dist, r.comps, file=self.filename)
+                repos.save()
+        except:
+            kimchiLock.release()
+            if enable:
+                raise OperationFailed("GGBREPOS0020E", {'repo_id': repo_id})
+
+            raise OperationFailed("GGBREPOS0021E", {'repo_id': repo_id})
+        finally:
+            kimchiLock.release()
+
+        return repo_id
+
+    def updateRepo(self, repo_id, params):
+        """
+        Update a given repository in repositories.Repositories() format
+        """
+        old_info = self.getRepo(repo_id)
+        updated_info = copy.deepcopy(old_info)
+        updated_info['baseurl'] = params.get(
+            'baseurl', updated_info['baseurl'])
+
+        if 'config' in params.keys():
+            config = params['config']
+            updated_info['config']['dist'] = config.get(
+                'dist', old_info['config']['dist'])
+            updated_info['config']['comps'] = config.get(
+                'comps', old_info['config']['comps'])
+
+        self.removeRepo(repo_id)
+        try:
+            return self.addRepo(updated_info)
+        except:
+            self.addRepo(old_info)
+            raise
+
+    def removeRepo(self, repo_id):
+        """
+        Remove a given repository
+        """
+        r = self._get_source_entry(repo_id)
+        if r is None:
+            raise NotFoundError("GGBREPOS0012E", {'repo_id': repo_id})
+
+        kimchiLock.acquire()
+        try:
+            repos = self._get_repos()
+            with self.pkg_lock():
+                repos.remove(r)
+                repos.save()
+        except:
+            kimchiLock.release()
+            raise OperationFailed("GGBREPOS0017E", {'repo_id': repo_id})
+        finally:
+            kimchiLock.release()
diff --git a/plugins/gingerbase/swupdate.py b/plugins/gingerbase/swupdate.py
new file mode 100644
index 0000000..b26d1f5
--- /dev/null
+++ b/plugins/gingerbase/swupdate.py
@@ -0,0 +1,263 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2014-2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+
+import subprocess
+import time
+
+from wok.basemodel import Singleton
+from wok.exception import NotFoundError, OperationFailed
+from wok.utils import run_command, wok_log
+
+from config import kimchiLock
+from yumparser import get_yum_packages_list_update
+
+
+class SoftwareUpdate(object):
+    __metaclass__ = Singleton
+
+    """
+    Class to represent and operate with OS software update.
+    """
+    def __init__(self):
+        # This stores all packages to be updated for Kimchi perspective. It's a
+        # dictionary of dictionaries, in the format {'package_name': package},
+        # where:
+        # package = {'package_name': <string>, 'version': <string>,
+        #           'arch': <string>, 'repository': <string>
+        #           }
+        self._packages = {}
+
+        # This stores the number of packages to update
+        self._num2update = 0
+
+        # Get the distro of host machine and creates an object related to
+        # correct package management system
+        try:
+            __import__('yum')
+            wok_log.info("Loading YumUpdate features.")
+            self._pkg_mnger = YumUpdate()
+        except ImportError:
+            try:
+                __import__('apt')
+                wok_log.info("Loading AptUpdate features.")
+                self._pkg_mnger = AptUpdate()
+            except ImportError:
+                zypper_help = ["zypper", "--help"]
+                (stdout, stderr, returncode) = run_command(zypper_help)
+                if returncode == 0:
+                    wok_log.info("Loading ZypperUpdate features.")
+                    self._pkg_mnger = ZypperUpdate()
+                else:
+                    raise Exception("There is no compatible package manager "
+                                    "for this system.")
+
+    def _scanUpdates(self):
+        """
+        Update self._packages with packages to be updated.
+        """
+        self._packages = {}
+        self._num2update = 0
+
+        # Call system pkg_mnger to get the packages as list of dictionaries.
+        for pkg in self._pkg_mnger.getPackagesList():
+
+            # Check if already exist a package in self._packages
+            pkg_id = pkg.get('package_name')
+            if pkg_id in self._packages.keys():
+                # package already listed to update. do nothing
+                continue
+
+            # Update the self._packages and self._num2update
+            self._packages[pkg_id] = pkg
+            self._num2update = self._num2update + 1
+
+    def getUpdates(self):
+        """
+        Return the self._packages.
+        """
+        self._scanUpdates()
+        return self._packages
+
+    def getUpdate(self, name):
+        """
+        Return a dictionary with all info from a given package name.
+        """
+        if name not in self._packages.keys():
+            raise NotFoundError('GGBPKGUPD0002E', {'name': name})
+
+        return self._packages[name]
+
+    def getNumOfUpdates(self):
+        """
+        Return the number of packages to be updated.
+        """
+        self._scanUpdates()
+        return self._num2update
+
+    def doUpdate(self, cb, params):
+        """
+        Execute the update
+        """
+        # reset messages
+        cb('')
+
+        cmd = self._pkg_mnger.update_cmd
+        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE)
+        msgs = []
+        while proc.poll() is None:
+            msgs.append(proc.stdout.readline())
+            cb(''.join(msgs))
+            time.sleep(0.5)
+
+        # read the final output lines
+        msgs.extend(proc.stdout.readlines())
+
+        retcode = proc.poll()
+        if retcode == 0:
+            return cb(''.join(msgs), True)
+
+        msgs.extend(proc.stderr.readlines())
+        return cb(''.join(msgs), False)
+
+
+class YumUpdate(object):
+    """
+    Class to represent and operate with YUM software update system.
+    It's loaded only on those systems listed at YUM_DISTROS and loads necessary
+    modules in runtime.
+    """
+    def __init__(self):
+        self._pkgs = {}
+        self.update_cmd = ["yum", "-y", "update"]
+
+    def _refreshUpdateList(self):
+        """
+        Update the list of packages to be updated in the system.
+        """
+        try:
+            kimchiLock.acquire()
+            self._pkgs = get_yum_packages_list_update()
+        except Exception, e:
+            raise OperationFailed('GGBPKGUPD0003E', {'err': str(e)})
+        finally:
+            kimchiLock.release()
+
+    def getPackagesList(self):
+        """
+        Return a list of package's dictionaries. Each dictionary contains the
+        information about a package, in the format:
+        package = {'package_name': <string>, 'version': <string>,
+                   'arch': <string>, 'repository': <string>}
+        """
+        self._refreshUpdateList()
+        pkg_list = []
+        for pkg in self._pkgs:
+            package = {'package_name': pkg.name, 'version': pkg.version,
+                       'arch': pkg.arch, 'repository': pkg.ui_from_repo}
+            pkg_list.append(package)
+        return pkg_list
+
+
+class AptUpdate(object):
+    """
+    Class to represent and operate with APT software update system.
+    It's loaded only on those systems listed at APT_DISTROS and loads necessary
+    modules in runtime.
+    """
+    def __init__(self):
+        self._pkgs = {}
+        self.pkg_lock = getattr(__import__('apt_pkg'), 'SystemLock')
+        self.update_cmd = ['apt-get', 'upgrade', '-y']
+
+    def _refreshUpdateList(self):
+        """
+        Update the list of packages to be updated in the system.
+        """
+        apt_cache = getattr(__import__('apt'), 'Cache')()
+        try:
+            with self.pkg_lock():
+                apt_cache.update()
+                apt_cache.upgrade()
+                self._pkgs = apt_cache.get_changes()
+        except Exception, e:
+            kimchiLock.release()
+            raise OperationFailed('GGBPKGUPD0003E', {'err': e.message})
+
+    def getPackagesList(self):
+        """
+        Return a list of package's dictionaries. Each dictionary contains the
+        information about a package, in the format
+        package = {'package_name': <string>, 'version': <string>,
+                   'arch': <string>, 'repository': <string>}
+        """
+        kimchiLock.acquire()
+        self._refreshUpdateList()
+        kimchiLock.release()
+        pkg_list = []
+        for pkg in self._pkgs:
+            package = {'package_name': pkg.shortname,
+                       'version': pkg.candidate.version,
+                       'arch': pkg._pkg.architecture,
+                       'repository': pkg.candidate.origins[0].label}
+            pkg_list.append(package)
+
+        return pkg_list
+
+
+class ZypperUpdate(object):
+    """
+    Class to represent and operate with Zypper software update system.
+    It's loaded only on those systems listed at ZYPPER_DISTROS and loads
+    necessary modules in runtime.
+    """
+    def __init__(self):
+        self._pkgs = {}
+        self.update_cmd = ["zypper", "--non-interactive", "update",
+                           "--auto-agree-with-licenses"]
+
+    def _refreshUpdateList(self):
+        """
+        Update the list of packages to be updated in the system.
+        """
+        self._pkgs = []
+        cmd = ["zypper", "list-updates"]
+        (stdout, stderr, returncode) = run_command(cmd)
+
+        if len(stderr) > 0:
+            raise OperationFailed('GGBPKGUPD0003E', {'err': stderr})
+
+        for line in stdout.split('\n'):
+            if line.find('v |') >= 0:
+                info = line.split(' | ')
+                package = {'package_name': info[2], 'version': info[4],
+                           'arch': info[5], 'repository': info[1]}
+                self._pkgs.append(package)
+
+    def getPackagesList(self):
+        """
+        Return a list of package's dictionaries. Each dictionary contains the
+        information about a package, in the format
+        package = {'package_name': <string>, 'version': <string>,
+                   'arch': <string>, 'repository': <string>}
+        """
+        kimchiLock.acquire()
+        self._refreshUpdateList()
+        kimchiLock.release()
+        return self._pkgs
diff --git a/plugins/gingerbase/yumparser.py b/plugins/gingerbase/yumparser.py
new file mode 100644
index 0000000..74f9fa0
--- /dev/null
+++ b/plugins/gingerbase/yumparser.py
@@ -0,0 +1,283 @@
+#
+# Project Kimchi
+#
+# Copyright IBM, Corp. 2015
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
+
+import subprocess
+from os import listdir
+from os.path import isfile, splitext
+
+
+class YumRepoObject(object):
+
+    def __init__(self, repo_id, repofile):
+        self.repo_id = repo_id
+        self.name = None
+        self.baseurl = None
+        self.enabled = True
+        self.gpgcheck = True
+        self.gpgkey = None
+        self.metalink = None
+        self.mirrorlist = None
+        self.repofile = repofile
+        self.string_attrs = ['baseurl', 'gpgkey', 'name',
+                             'metalink', 'mirrorlist']
+        self.boolean_attrs = ['enabled', 'gpgcheck']
+
+    def set_attribute(self, key, strvalue):
+        if key in self.string_attrs:
+            setattr(self, key, strvalue)
+        elif key in self.boolean_attrs:
+            setattr(self, key, (strvalue == '1'))
+
+    def get_attribute_str(self, key):
+        if key not in self.get_attributes():
+            return None
+
+        if key in self.boolean_attrs:
+            str_value = '1' if getattr(self, key) is True else '0'
+        else:
+            str_value = getattr(self, key)
+
+        if str_value is None:
+            return None
+
+        return key + '=' + str_value
+
+    def get_attributes(self):
+        return self.string_attrs + self.boolean_attrs
+
+    def enable(self):
+        self.enabled = True
+
+    def disable(self):
+        self.enabled = False
+
+    def __str__(self):
+        str_obj = '[' + self.repo_id + ']' + '\n'
+        for key in self.get_attributes():
+            if self.get_attribute_str(key) is not None:
+                str_obj += self.get_attribute_str(key) + '\n'
+        return str_obj
+
+
+def get_repo_files():
+    def _is_repository_file(f):
+        _, f_extension = splitext(f)
+        return isfile(f) and (f_extension == '.repo')
+
+    YUM_REPO_DIR = '/etc/yum.repos.d'
+    return [YUM_REPO_DIR+'/'+f for f in listdir(YUM_REPO_DIR)
+            if _is_repository_file(YUM_REPO_DIR+'/'+f)]
+
+
+def _ignore_line_repo_file(line):
+    return line.startswith("#") or '=' not in line
+
+
+def _get_repos_from_file(repo_file):
+    repos_from_file = {}
+    current_repo = None
+    current_repo_id = None
+    with open(repo_file) as f:
+        for line in f.readlines():
+            line = line.strip()
+            if line.startswith("["):
+                if current_repo is not None:
+                    repos_from_file[current_repo_id] = current_repo
+                current_repo_id = line.strip('[]')
+                current_repo = YumRepoObject(current_repo_id, repo_file)
+                continue
+            if _ignore_line_repo_file(line):
+                continue
+            key, value = line.split('=', 1)
+            key = key.strip()
+            value = value.strip()
+            current_repo.set_attribute(key, value)
+
+        # add the last repo from file.
+        if current_repo is not None:
+            repos_from_file[current_repo_id] = current_repo
+
+    return repos_from_file
+
+
+def get_yum_repositories():
+    repo_files = get_repo_files()
+    repos = {}
+    for yum_repo in repo_files:
+        repos.update(_get_repos_from_file(yum_repo))
+
+    return repos
+
+
+def _retrieve_repo_line_index(data, repo):
+    repo_entry = '[' + repo.repo_id + ']\n'
+    try:
+        repo_index = data.index(repo_entry)
+    except:
+        return None
+    return repo_index
+
+
+def _update_repo_file_data(data, repo, repo_index):
+    remaining_repo_attrs = repo.get_attributes()
+
+    for i in range(repo_index + 1, len(data)):
+        line = data[i].strip()
+        if line.startswith('['):
+            break
+        if _ignore_line_repo_file(line):
+            continue
+        key, _ = line.split('=', 1)
+        key = key.strip()
+        attr_str = repo.get_attribute_str(key)
+        if attr_str is None:
+            continue
+        remaining_repo_attrs.remove(key)
+        data[i] = attr_str + '\n'
+
+    for attr in remaining_repo_attrs:
+        attr_str = repo.get_attribute_str(attr)
+        if attr_str is None:
+            continue
+        data.insert(repo_index+1, attr_str + '\n')
+
+    return data
+
+
+def write_repo_to_file(repo):
+    with open(repo.repofile) as f:
+        data = f.readlines()
+
+    repo_index = _retrieve_repo_line_index(data, repo)
+    if repo_index is None:
+        return
+
+    data = _update_repo_file_data(data, repo, repo_index)
+
+    with open(repo.repofile, 'w') as f:
+        f.writelines(data)
+
+
+def _get_last_line_repo(data, repo_index):
+    stop_delete_index = None
+    for i in range(repo_index+1, len(data)):
+        line = data[i].strip()
+        if line.startswith('['):
+            stop_delete_index = i - 1
+            break
+    if stop_delete_index is None:
+        stop_delete_index = len(data) - 1
+
+    return stop_delete_index
+
+
+def _remove_repo_file_data(data, repo_index):
+    last_line_repo = _get_last_line_repo(data, repo_index)
+    for i in range(last_line_repo, repo_index - 1, -1):
+        data.pop(i)
+    return data
+
+
+def delete_repo_from_file(repo):
+    with open(repo.repofile) as f:
+        data = f.readlines()
+
+    repo_index = _retrieve_repo_line_index(data, repo)
+    if repo_index is None:
+        return
+
+    data = _remove_repo_file_data(data, repo_index)
+
+    with open(repo.repofile, 'w') as f:
+        f.writelines(data)
+
+
+class YumUpdatePackageObject(object):
+
+    def __init__(self, name, arch, version, repo):
+        self.name = name
+        self.arch = arch
+        self.version = version
+        self.ui_from_repo = repo
+
+
+def _include_line_checkupdate_output(line):
+    tokens = line.split()
+
+    if len(tokens) != 3:
+        return False
+
+    if '.' not in tokens[0]:
+        return False
+
+    return True
+
+
+def _ignore_obsoleting_packages_in(output):
+    out = ''
+    for l in output.split('\n'):
+        if 'Obsoleting ' in l:
+            break
+        out += l + '\n'
+    return out
+
+
+def _filter_lines_checkupdate_output(output):
+    if output is None:
+        return []
+
+    output = _ignore_obsoleting_packages_in(output)
+
+    out = [l for l in output.split('\n')
+           if _include_line_checkupdate_output(l)]
+    return out
+
+
+def _get_yum_checkupdate_output():
+    cmd = ['yum', 'check-update', '-d0']
+    yum_update_cmd = subprocess.Popen(cmd,
+                                      stdout=subprocess.PIPE,
+                                      stderr=subprocess.PIPE)
+    out, error = yum_update_cmd.communicate()
+    return_code = yum_update_cmd.returncode
+    if return_code == 1:
+        return None
+
+    return out
+
+
+def get_yum_packages_list_update(checkupdate_output=None):
+    if checkupdate_output is None:
+        checkupdate_output = _get_yum_checkupdate_output()
+
+    filtered_output = _filter_lines_checkupdate_output(checkupdate_output)
+
+    packages = []
+    for line in filtered_output:
+        line = line.split()
+        index = 0
+        name_arch = line[index]
+        index += 1
+        version = line[index]
+        index += 1
+        repo = line[index]
+        name, arch = name_arch.rsplit('.', 1)
+        packages.append(YumUpdatePackageObject(name, arch, version, repo))
+
+    return packages
diff --git a/plugins/kimchi/disks.py b/plugins/kimchi/disks.py
deleted file mode 100644
index eb40e3a..0000000
--- a/plugins/kimchi/disks.py
+++ /dev/null
@@ -1,196 +0,0 @@
-#
-# Project Kimchi
-#
-# Copyright IBM, Corp. 2013-2015
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
-
-import os.path
-import re
-import subprocess
-from parted import Device as PDevice
-from parted import Disk as PDisk
-
-from wok.exception import OperationFailed
-from wok.utils import wok_log
-
-
-def _get_dev_node_path(maj_min):
-    """ Returns device node path given the device number 'major:min' """
-
-    dm_name = "/sys/dev/block/%s/dm/name" % maj_min
-    if os.path.exists(dm_name):
-        with open(dm_name) as dm_f:
-            content = dm_f.read().rstrip('\n')
-        return "/dev/mapper/" + content
-
-    uevent = "/sys/dev/block/%s/uevent" % maj_min
-    with open(uevent) as ueventf:
-        content = ueventf.read()
-
-    data = dict(re.findall(r'(\S+)=(".*?"|\S+)', content.replace("\n", " ")))
-
-    return "/dev/%s" % data["DEVNAME"]
-
-
-def _get_lsblk_devs(keys, devs=[]):
-    lsblk = subprocess.Popen(
-        ["lsblk", "-Pbo"] + [','.join(keys)] + devs,
-        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = lsblk.communicate()
-    if lsblk.returncode != 0:
-        raise OperationFailed("KCHDISKS0001E", {'err': err})
-
-    return _parse_lsblk_output(out, keys)
-
-
-def _get_dev_major_min(name):
-    maj_min = None
-
-    keys = ["NAME", "MAJ:MIN"]
-    dev_list = _get_lsblk_devs(keys)
-
-    for dev in dev_list:
-        if dev['name'].split()[0] == name:
-            maj_min = dev['maj:min']
-            break
-    else:
-        raise OperationFailed("KCHDISKS0002E", {'device': name})
-
-    return maj_min
-
-
-def _is_dev_leaf(devNodePath):
-    try:
-        # By default, lsblk prints a device information followed by children
-        # device information
-        childrenCount = len(
-            _get_lsblk_devs(["NAME"], [devNodePath])) - 1
-    except OperationFailed as e:
-        # lsblk is known to fail on multipath devices
-        # Assume these devices contain children
-        wok_log.error(
-            "Error getting device info for %s: %s", devNodePath, e)
-        return False
-
-    return childrenCount == 0
-
-
-def _is_dev_extended_partition(devType, devNodePath):
-    if devType != 'part':
-        return False
-    diskPath = devNodePath.rstrip('0123456789')
-    device = PDevice(diskPath)
-    try:
-        extended_part = PDisk(device).getExtendedPartition()
-    except NotImplementedError as e:
-        wok_log.warning(
-            "Error getting extended partition info for dev %s type %s: %s",
-            devNodePath, devType, e.message)
-        # Treate disk with unsupported partiton table as if it does not
-        # contain extended partitions.
-        return False
-    if extended_part and extended_part.path == devNodePath:
-        return True
-    return False
-
-
-def _parse_lsblk_output(output, keys):
-    # output is on format key="value",
-    # where key can be NAME, TYPE, FSTYPE, SIZE, MOUNTPOINT, etc
-    lines = output.rstrip("\n").split("\n")
-    r = []
-    for line in lines:
-        d = {}
-        for key in keys:
-            expression = r"%s=\".*?\"" % key
-            match = re.search(expression, line)
-            field = match.group()
-            k, v = field.split('=', 1)
-            d[k.lower()] = v[1:-1]
-        r.append(d)
-    return r
-
-
-def _get_vgname(devNodePath):
-    """ Return volume group name of a physical volume. If the device node path
-    is not a physical volume, return empty string. """
-    pvs = subprocess.Popen(
-        ["pvs", "--unbuffered", "--nameprefixes", "--noheadings",
-         "-o", "vg_name", devNodePath],
-        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = pvs.communicate()
-    if pvs.returncode != 0:
-        return ""
-
-    return re.findall(r"LVM2_VG_NAME='([^\']*)'", out)[0]
-
-
-def _is_available(name, devtype, fstype, mountpoint, majmin):
-    devNodePath = _get_dev_node_path(majmin)
-    # Only list unmounted and unformated and leaf and (partition or disk)
-    # leaf means a partition, a disk has no partition, or a disk not held
-    # by any multipath device. Physical volume belongs to no volume group
-    # is also listed. Extended partitions should not be listed.
-    if (devtype in ['part', 'disk', 'mpath'] and
-            fstype in ['', 'LVM2_member'] and
-            mountpoint == "" and
-            _get_vgname(devNodePath) == "" and
-            _is_dev_leaf(devNodePath) and
-            not _is_dev_extended_partition(devtype, devNodePath)):
-        return True
-    return False
-
-
-def get_partitions_names(check=False):
-    names = set()
-    keys = ["NAME", "TYPE", "FSTYPE", "MOUNTPOINT", "MAJ:MIN"]
-    # output is on format key="value",
-    # where key can be NAME, TYPE, FSTYPE, MOUNTPOINT
-    for dev in _get_lsblk_devs(keys):
-        # split()[0] to avoid the second part of the name, after the
-        # whiteline
-        name = dev['name'].split()[0]
-        if check and not _is_available(name, dev['type'], dev['fstype'],
-                                       dev['mountpoint'], dev['maj:min']):
-            continue
-        names.add(name)
-
-    return list(names)
-
-
-def get_partition_details(name):
-    majmin = _get_dev_major_min(name)
-    dev_path = _get_dev_node_path(majmin)
-
-    keys = ["TYPE", "FSTYPE", "SIZE", "MOUNTPOINT"]
-    try:
-        dev = _get_lsblk_devs(keys, [dev_path])[0]
-    except OperationFailed as e:
-        wok_log.error(
-            "Error getting partition info for %s: %s", name, e)
-        return {}
-
-    dev['available'] = _is_available(name, dev['type'], dev['fstype'],
-                                     dev['mountpoint'], majmin)
-    if dev['mountpoint']:
-        # Sometimes the mountpoint comes with [SWAP] or other
-        # info which is not an actual mount point. Filtering it
-        regexp = re.compile(r"\[.*\]")
-        if regexp.search(dev['mountpoint']) is not None:
-            dev['mountpoint'] = ''
-    dev['path'] = dev_path
-    dev['name'] = name
-    return dev
diff --git a/plugins/kimchi/repositories.py b/plugins/kimchi/repositories.py
deleted file mode 100644
index 9caabc4..0000000
--- a/plugins/kimchi/repositories.py
+++ /dev/null
@@ -1,529 +0,0 @@
-#
-# Project Kimchi
-#
-# Copyright IBM, Corp. 2014-2015
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
-
-import copy
-import os
-import time
-import urlparse
-from ConfigParser import ConfigParser
-
-from wok.basemodel import Singleton
-from wok.exception import InvalidOperation, InvalidParameter
-from wok.exception import OperationFailed, NotFoundError, MissingParameter
-from wok.utils import validate_repo_url
-
-from config import kimchiLock
-from yumparser import get_yum_repositories, write_repo_to_file
-
-
-class Repositories(object):
-    __metaclass__ = Singleton
-
-    """
-    Class to represent and operate with repositories information.
-    """
-    def __init__(self):
-        try:
-            __import__('yum')
-            self._pkg_mnger = YumRepo()
-        except ImportError:
-            try:
-                __import__('apt_pkg')
-                self._pkg_mnger = AptRepo()
-            except ImportError:
-                raise InvalidOperation('KCHREPOS0014E')
-
-    def addRepository(self, params):
-        """
-        Add and enable a new repository
-        """
-        config = params.get('config', {})
-        extra_keys = list(
-            set(config.keys()).difference(set(self._pkg_mnger.CONFIG_ENTRY)))
-        if len(extra_keys) > 0:
-            raise InvalidParameter("KCHREPOS0028E",
-                                   {'items': ",".join(extra_keys)})
-
-        return self._pkg_mnger.addRepo(params)
-
-    def getRepositories(self):
-        """
-        Return a dictionary with all Kimchi's repositories. Each element uses
-        the format {<repo_id>: {repo}}, where repo is a dictionary in the
-        repositories.Repositories() format.
-        """
-        return self._pkg_mnger.getRepositoriesList()
-
-    def getRepository(self, repo_id):
-        """
-        Return a dictionary with all info from a given repository ID.
-        """
-        info = self._pkg_mnger.getRepo(repo_id)
-        info['repo_id'] = repo_id
-        return info
-
-    def enableRepository(self, repo_id):
-        """
-        Enable a repository.
-        """
-        return self._pkg_mnger.toggleRepo(repo_id, True)
-
-    def disableRepository(self, repo_id):
-        """
-        Disable a given repository.
-        """
-        return self._pkg_mnger.toggleRepo(repo_id, False)
-
-    def updateRepository(self, repo_id, params):
-        """
-        Update the information of a given repository.
-        The input is the repo_id of the repository to be updated and a dict
-        with the information to be updated.
-        """
-        return self._pkg_mnger.updateRepo(repo_id, params)
-
-    def removeRepository(self, repo_id):
-        """
-        Remove a given repository
-        """
-        return self._pkg_mnger.removeRepo(repo_id)
-
-
-class YumRepo(object):
-    """
-    Class to represent and operate with YUM repositories.
-    It's loaded only on those systems listed at YUM_DISTROS and loads necessary
-    modules in runtime.
-    """
-    TYPE = 'yum'
-    DEFAULT_CONF_DIR = "/etc/yum.repos.d"
-    CONFIG_ENTRY = ('repo_name', 'mirrorlist', 'metalink')
-
-    def __init__(self):
-        self._confdir = self.DEFAULT_CONF_DIR
-
-    def _get_repos(self, errcode):
-        try:
-            kimchiLock.acquire()
-            repos = get_yum_repositories()
-        except Exception, e:
-            kimchiLock.release()
-            raise OperationFailed(errcode, {'err': str(e)})
-        finally:
-            kimchiLock.release()
-
-        return repos
-
-    def getRepositoriesList(self):
-        """
-        Return a list of repositories IDs
-        """
-        repos = self._get_repos('KCHREPOS0024E')
-        return repos.keys()
-
-    def getRepo(self, repo_id):
-        """
-        Return a dictionary in the repositories.Repositories() of the given
-        repository ID format with the information of a YumRepository object.
-        """
-        repos = self._get_repos('KCHREPOS0025E')
-
-        if repo_id not in repos.keys():
-            raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
-
-        entry = repos.get(repo_id)
-
-        info = {}
-        info['enabled'] = entry.enabled
-        info['baseurl'] = entry.baseurl or ''
-        info['config'] = {}
-        info['config']['repo_name'] = entry.name or ''
-        info['config']['gpgcheck'] = entry.gpgcheck
-        info['config']['gpgkey'] = entry.gpgkey or ''
-        info['config']['mirrorlist'] = entry.mirrorlist or ''
-        info['config']['metalink'] = entry.metalink or ''
-        return info
-
-    def addRepo(self, params):
-        """
-        Add a given repository to YumBase
-        """
-        # At least one base url, or one mirror, must be given.
-        baseurl = params.get('baseurl', '')
-
-        config = params.get('config', {})
-        mirrorlist = config.get('mirrorlist', '')
-        metalink = config.get('metalink', '')
-        if not baseurl and not mirrorlist and not metalink:
-            raise MissingParameter("KCHREPOS0013E")
-
-        if baseurl:
-            validate_repo_url(baseurl)
-
-        if mirrorlist:
-            validate_repo_url(mirrorlist)
-
-        if metalink:
-            validate_repo_url(metalink)
-
-        if mirrorlist and metalink:
-            raise InvalidOperation('KCHREPOS0030E')
-
-        repo_id = params.get('repo_id', None)
-        if repo_id is None:
-            repo_id = "kimchi_repo_%s" % str(int(time.time() * 1000))
-
-        repos = self._get_repos('KCHREPOS0026E')
-        if repo_id in repos.keys():
-            raise InvalidOperation("KCHREPOS0022E", {'repo_id': repo_id})
-
-        repo_name = config.get('repo_name', repo_id)
-        repo = {'baseurl': baseurl, 'mirrorlist': mirrorlist,
-                'name': repo_name, 'gpgcheck': 1,
-                'gpgkey': [], 'enabled': 1, 'metalink': metalink}
-
-        # write a repo file in the system with repo{} information.
-        parser = ConfigParser()
-        parser.add_section(repo_id)
-
-        for key, value in repo.iteritems():
-            if value:
-                parser.set(repo_id, key, value)
-
-        repofile = os.path.join(self._confdir, repo_id + '.repo')
-        try:
-            with open(repofile, 'w') as fd:
-                parser.write(fd)
-        except:
-            raise OperationFailed("KCHREPOS0018E",
-                                  {'repo_file': repofile})
-
-        return repo_id
-
-    def toggleRepo(self, repo_id, enable):
-        repos = self._get_repos('KCHREPOS0011E')
-        if repo_id not in repos.keys():
-            raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
-
-        entry = repos.get(repo_id)
-        if enable and entry.enabled:
-            raise InvalidOperation("KCHREPOS0015E", {'repo_id': repo_id})
-
-        if not enable and not entry.enabled:
-            raise InvalidOperation("KCHREPOS0016E", {'repo_id': repo_id})
-
-        kimchiLock.acquire()
-        try:
-            if enable:
-                entry.enable()
-            else:
-                entry.disable()
-
-            write_repo_to_file(entry)
-        except:
-            if enable:
-                raise OperationFailed("KCHREPOS0020E", {'repo_id': repo_id})
-
-            raise OperationFailed("KCHREPOS0021E", {'repo_id': repo_id})
-        finally:
-            kimchiLock.release()
-
-        return repo_id
-
-    def updateRepo(self, repo_id, params):
-        """
-        Update a given repository in repositories.Repositories() format
-        """
-        repos = self._get_repos('KCHREPOS0011E')
-        if repo_id not in repos.keys():
-            raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
-
-        entry = repos.get(repo_id)
-
-        baseurl = params.get('baseurl', None)
-        config = params.get('config', {})
-        mirrorlist = config.get('mirrorlist', None)
-        metalink = config.get('metalink', None)
-
-        if baseurl is not None and len(baseurl.strip()) == 0:
-            baseurl = None
-
-        if mirrorlist is not None and len(mirrorlist.strip()) == 0:
-            mirrorlist = None
-
-        if metalink is not None and len(metalink.strip()) == 0:
-            metalink = None
-
-        if baseurl is None and mirrorlist is None and metalink is None:
-            raise MissingParameter("KCHREPOS0013E")
-
-        if baseurl is not None:
-            validate_repo_url(baseurl)
-            entry.baseurl = baseurl
-
-        if mirrorlist is not None:
-            validate_repo_url(mirrorlist)
-            entry.mirrorlist = mirrorlist
-
-        if metalink is not None:
-            validate_repo_url(metalink)
-            entry.metalink = metalink
-
-        if mirrorlist and metalink:
-            raise InvalidOperation('KCHREPOS0030E')
-
-        entry.id = params.get('repo_id', repo_id)
-        entry.name = config.get('repo_name', entry.name)
-        entry.gpgcheck = config.get('gpgcheck', entry.gpgcheck)
-        entry.gpgkey = config.get('gpgkey', entry.gpgkey)
-        kimchiLock.acquire()
-        write_repo_to_file(entry)
-        kimchiLock.release()
-        return repo_id
-
-    def removeRepo(self, repo_id):
-        """
-        Remove a given repository
-        """
-        repos = self._get_repos('KCHREPOS0027E')
-        if repo_id not in repos.keys():
-            raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
-
-        entry = repos.get(repo_id)
-        parser = ConfigParser()
-        with open(entry.repofile) as fd:
-            parser.readfp(fd)
-
-        if len(parser.sections()) == 1:
-            os.remove(entry.repofile)
-            return
-
-        parser.remove_section(repo_id)
-        with open(entry.repofile, "w") as fd:
-            parser.write(fd)
-
-
-class AptRepo(object):
-    """
-    Class to represent and operate with YUM repositories.
-    It's loaded only on those systems listed at YUM_DISTROS and loads necessary
-    modules in runtime.
-    """
-    TYPE = 'deb'
-    KIMCHI_LIST = "kimchi-source.list"
-    CONFIG_ENTRY = ('dist', 'comps')
-
-    def __init__(self):
-        getattr(__import__('apt_pkg'), 'init_config')()
-        getattr(__import__('apt_pkg'), 'init_system')()
-        config = getattr(__import__('apt_pkg'), 'config')
-        self.pkg_lock = getattr(__import__('apt_pkg'), 'SystemLock')
-        module = __import__('aptsources.sourceslist', globals(), locals(),
-                            ['SourcesList'], -1)
-
-        self._sourceparts_path = '/%s%s' % (
-            config.get('Dir::Etc'), config.get('Dir::Etc::sourceparts'))
-        self._sourceslist = getattr(module, 'SourcesList')
-        self.filename = os.path.join(self._sourceparts_path, self.KIMCHI_LIST)
-        if not os.path.exists(self.filename):
-            with open(self.filename, 'w') as fd:
-                fd.write("# This file is managed by Kimchi and it must not "
-                         "be modified manually\n")
-
-    def _get_repos(self):
-        try:
-            with self.pkg_lock():
-                repos = self._sourceslist()
-                repos.refresh()
-        except Exception, e:
-            kimchiLock.release()
-            raise OperationFailed('KCHREPOS0025E', {'err': e.message})
-
-        return repos
-
-    def _get_repo_id(self, repo):
-        data = urlparse.urlparse(repo.uri)
-        name = data.hostname or data.path
-        return '%s-%s-%s' % (name, repo.dist, "-".join(repo.comps))
-
-    def _get_source_entry(self, repo_id):
-        kimchiLock.acquire()
-        repos = self._get_repos()
-        kimchiLock.release()
-
-        for r in repos:
-            # Ignore deb-src repositories
-            if r.type != 'deb':
-                continue
-
-            if self._get_repo_id(r) != repo_id:
-                continue
-
-            return r
-
-        return None
-
-    def getRepositoriesList(self):
-        """
-        Return a list of repositories IDs
-
-        APT repositories there aren't the concept about repository ID, so for
-        internal control, the repository ID will be built as described in
-        _get_repo_id()
-        """
-        kimchiLock.acquire()
-        repos = self._get_repos()
-        kimchiLock.release()
-
-        res = []
-        for r in repos:
-            # Ignore deb-src repositories
-            if r.type != 'deb':
-                continue
-
-            res.append(self._get_repo_id(r))
-
-        return res
-
-    def getRepo(self, repo_id):
-        """
-        Return a dictionary in the repositories.Repositories() format of the
-        given repository ID with the information of a SourceEntry object.
-        """
-        r = self._get_source_entry(repo_id)
-        if r is None:
-            raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
-
-        info = {'enabled': not r.disabled,
-                'baseurl': r.uri,
-                'config': {'dist': r.dist,
-                           'comps': r.comps}}
-        return info
-
-    def addRepo(self, params):
-        """
-        Add a new APT repository based on <params>
-        """
-        # To create a APT repository the dist is a required parameter
-        # (in addition to baseurl, verified on controller through API.json)
-        config = params.get('config', None)
-        if config is None:
-            raise MissingParameter("KCHREPOS0019E")
-
-        if 'dist' not in config.keys():
-            raise MissingParameter("KCHREPOS0019E")
-
-        uri = params['baseurl']
-        dist = config['dist']
-        comps = config.get('comps', [])
-
-        validate_repo_url(uri)
-
-        kimchiLock.acquire()
-        try:
-            repos = self._get_repos()
-            source_entry = repos.add('deb', uri, dist, comps,
-                                     file=self.filename)
-            with self.pkg_lock():
-                repos.save()
-        except Exception as e:
-            kimchiLock.release()
-            raise OperationFailed("KCHREPOS0026E", {'err': e.message})
-        kimchiLock.release()
-        return self._get_repo_id(source_entry)
-
-    def toggleRepo(self, repo_id, enable):
-        """
-        Enable a given repository
-        """
-        r = self._get_source_entry(repo_id)
-        if r is None:
-            raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
-
-        if enable and not r.disabled:
-            raise InvalidOperation("KCHREPOS0015E", {'repo_id': repo_id})
-
-        if not enable and r.disabled:
-            raise InvalidOperation("KCHREPOS0016E", {'repo_id': repo_id})
-
-        if enable:
-            line = 'deb'
-        else:
-            line = '#deb'
-
-        kimchiLock.acquire()
-        try:
-            repos = self._get_repos()
-            with self.pkg_lock():
-                repos.remove(r)
-                repos.add(line, r.uri, r.dist, r.comps, file=self.filename)
-                repos.save()
-        except:
-            kimchiLock.release()
-            if enable:
-                raise OperationFailed("KCHREPOS0020E", {'repo_id': repo_id})
-
-            raise OperationFailed("KCHREPOS0021E", {'repo_id': repo_id})
-        finally:
-            kimchiLock.release()
-
-        return repo_id
-
-    def updateRepo(self, repo_id, params):
-        """
-        Update a given repository in repositories.Repositories() format
-        """
-        old_info = self.getRepo(repo_id)
-        updated_info = copy.deepcopy(old_info)
-        updated_info['baseurl'] = params.get(
-            'baseurl', updated_info['baseurl'])
-
-        if 'config' in params.keys():
-            config = params['config']
-            updated_info['config']['dist'] = config.get(
-                'dist', old_info['config']['dist'])
-            updated_info['config']['comps'] = config.get(
-                'comps', old_info['config']['comps'])
-
-        self.removeRepo(repo_id)
-        try:
-            return self.addRepo(updated_info)
-        except:
-            self.addRepo(old_info)
-            raise
-
-    def removeRepo(self, repo_id):
-        """
-        Remove a given repository
-        """
-        r = self._get_source_entry(repo_id)
-        if r is None:
-            raise NotFoundError("KCHREPOS0012E", {'repo_id': repo_id})
-
-        kimchiLock.acquire()
-        try:
-            repos = self._get_repos()
-            with self.pkg_lock():
-                repos.remove(r)
-                repos.save()
-        except:
-            kimchiLock.release()
-            raise OperationFailed("KCHREPOS0017E", {'repo_id': repo_id})
-        finally:
-            kimchiLock.release()
diff --git a/plugins/kimchi/swupdate.py b/plugins/kimchi/swupdate.py
deleted file mode 100644
index 84b927f..0000000
--- a/plugins/kimchi/swupdate.py
+++ /dev/null
@@ -1,263 +0,0 @@
-#
-# Project Kimchi
-#
-# Copyright IBM, Corp. 2014-2015
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
-
-import subprocess
-import time
-
-from wok.basemodel import Singleton
-from wok.exception import NotFoundError, OperationFailed
-from wok.utils import run_command, wok_log
-
-from config import kimchiLock
-from yumparser import get_yum_packages_list_update
-
-
-class SoftwareUpdate(object):
-    __metaclass__ = Singleton
-
-    """
-    Class to represent and operate with OS software update.
-    """
-    def __init__(self):
-        # This stores all packages to be updated for Kimchi perspective. It's a
-        # dictionary of dictionaries, in the format {'package_name': package},
-        # where:
-        # package = {'package_name': <string>, 'version': <string>,
-        #           'arch': <string>, 'repository': <string>
-        #           }
-        self._packages = {}
-
-        # This stores the number of packages to update
-        self._num2update = 0
-
-        # Get the distro of host machine and creates an object related to
-        # correct package management system
-        try:
-            __import__('yum')
-            wok_log.info("Loading YumUpdate features.")
-            self._pkg_mnger = YumUpdate()
-        except ImportError:
-            try:
-                __import__('apt')
-                wok_log.info("Loading AptUpdate features.")
-                self._pkg_mnger = AptUpdate()
-            except ImportError:
-                zypper_help = ["zypper", "--help"]
-                (stdout, stderr, returncode) = run_command(zypper_help)
-                if returncode == 0:
-                    wok_log.info("Loading ZypperUpdate features.")
-                    self._pkg_mnger = ZypperUpdate()
-                else:
-                    raise Exception("There is no compatible package manager "
-                                    "for this system.")
-
-    def _scanUpdates(self):
-        """
-        Update self._packages with packages to be updated.
-        """
-        self._packages = {}
-        self._num2update = 0
-
-        # Call system pkg_mnger to get the packages as list of dictionaries.
-        for pkg in self._pkg_mnger.getPackagesList():
-
-            # Check if already exist a package in self._packages
-            pkg_id = pkg.get('package_name')
-            if pkg_id in self._packages.keys():
-                # package already listed to update. do nothing
-                continue
-
-            # Update the self._packages and self._num2update
-            self._packages[pkg_id] = pkg
-            self._num2update = self._num2update + 1
-
-    def getUpdates(self):
-        """
-        Return the self._packages.
-        """
-        self._scanUpdates()
-        return self._packages
-
-    def getUpdate(self, name):
-        """
-        Return a dictionary with all info from a given package name.
-        """
-        if name not in self._packages.keys():
-            raise NotFoundError('KCHPKGUPD0002E', {'name': name})
-
-        return self._packages[name]
-
-    def getNumOfUpdates(self):
-        """
-        Return the number of packages to be updated.
-        """
-        self._scanUpdates()
-        return self._num2update
-
-    def doUpdate(self, cb, params):
-        """
-        Execute the update
-        """
-        # reset messages
-        cb('')
-
-        cmd = self._pkg_mnger.update_cmd
-        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
-                                stderr=subprocess.PIPE)
-        msgs = []
-        while proc.poll() is None:
-            msgs.append(proc.stdout.readline())
-            cb(''.join(msgs))
-            time.sleep(0.5)
-
-        # read the final output lines
-        msgs.extend(proc.stdout.readlines())
-
-        retcode = proc.poll()
-        if retcode == 0:
-            return cb(''.join(msgs), True)
-
-        msgs.extend(proc.stderr.readlines())
-        return cb(''.join(msgs), False)
-
-
-class YumUpdate(object):
-    """
-    Class to represent and operate with YUM software update system.
-    It's loaded only on those systems listed at YUM_DISTROS and loads necessary
-    modules in runtime.
-    """
-    def __init__(self):
-        self._pkgs = {}
-        self.update_cmd = ["yum", "-y", "update"]
-
-    def _refreshUpdateList(self):
-        """
-        Update the list of packages to be updated in the system.
-        """
-        try:
-            kimchiLock.acquire()
-            self._pkgs = get_yum_packages_list_update()
-        except Exception, e:
-            raise OperationFailed('KCHPKGUPD0003E', {'err': str(e)})
-        finally:
-            kimchiLock.release()
-
-    def getPackagesList(self):
-        """
-        Return a list of package's dictionaries. Each dictionary contains the
-        information about a package, in the format:
-        package = {'package_name': <string>, 'version': <string>,
-                   'arch': <string>, 'repository': <string>}
-        """
-        self._refreshUpdateList()
-        pkg_list = []
-        for pkg in self._pkgs:
-            package = {'package_name': pkg.name, 'version': pkg.version,
-                       'arch': pkg.arch, 'repository': pkg.ui_from_repo}
-            pkg_list.append(package)
-        return pkg_list
-
-
-class AptUpdate(object):
-    """
-    Class to represent and operate with APT software update system.
-    It's loaded only on those systems listed at APT_DISTROS and loads necessary
-    modules in runtime.
-    """
-    def __init__(self):
-        self._pkgs = {}
-        self.pkg_lock = getattr(__import__('apt_pkg'), 'SystemLock')
-        self.update_cmd = ['apt-get', 'upgrade', '-y']
-
-    def _refreshUpdateList(self):
-        """
-        Update the list of packages to be updated in the system.
-        """
-        apt_cache = getattr(__import__('apt'), 'Cache')()
-        try:
-            with self.pkg_lock():
-                apt_cache.update()
-                apt_cache.upgrade()
-                self._pkgs = apt_cache.get_changes()
-        except Exception, e:
-            kimchiLock.release()
-            raise OperationFailed('KCHPKGUPD0003E', {'err': e.message})
-
-    def getPackagesList(self):
-        """
-        Return a list of package's dictionaries. Each dictionary contains the
-        information about a package, in the format
-        package = {'package_name': <string>, 'version': <string>,
-                   'arch': <string>, 'repository': <string>}
-        """
-        kimchiLock.acquire()
-        self._refreshUpdateList()
-        kimchiLock.release()
-        pkg_list = []
-        for pkg in self._pkgs:
-            package = {'package_name': pkg.shortname,
-                       'version': pkg.candidate.version,
-                       'arch': pkg._pkg.architecture,
-                       'repository': pkg.candidate.origins[0].label}
-            pkg_list.append(package)
-
-        return pkg_list
-
-
-class ZypperUpdate(object):
-    """
-    Class to represent and operate with Zypper software update system.
-    It's loaded only on those systems listed at ZYPPER_DISTROS and loads
-    necessary modules in runtime.
-    """
-    def __init__(self):
-        self._pkgs = {}
-        self.update_cmd = ["zypper", "--non-interactive", "update",
-                           "--auto-agree-with-licenses"]
-
-    def _refreshUpdateList(self):
-        """
-        Update the list of packages to be updated in the system.
-        """
-        self._pkgs = []
-        cmd = ["zypper", "list-updates"]
-        (stdout, stderr, returncode) = run_command(cmd)
-
-        if len(stderr) > 0:
-            raise OperationFailed('KCHPKGUPD0003E', {'err': stderr})
-
-        for line in stdout.split('\n'):
-            if line.find('v |') >= 0:
-                info = line.split(' | ')
-                package = {'package_name': info[2], 'version': info[4],
-                           'arch': info[5], 'repository': info[1]}
-                self._pkgs.append(package)
-
-    def getPackagesList(self):
-        """
-        Return a list of package's dictionaries. Each dictionary contains the
-        information about a package, in the format
-        package = {'package_name': <string>, 'version': <string>,
-                   'arch': <string>, 'repository': <string>}
-        """
-        kimchiLock.acquire()
-        self._refreshUpdateList()
-        kimchiLock.release()
-        return self._pkgs
diff --git a/plugins/kimchi/yumparser.py b/plugins/kimchi/yumparser.py
deleted file mode 100644
index 74f9fa0..0000000
--- a/plugins/kimchi/yumparser.py
+++ /dev/null
@@ -1,283 +0,0 @@
-#
-# Project Kimchi
-#
-# Copyright IBM, Corp. 2015
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
-
-import subprocess
-from os import listdir
-from os.path import isfile, splitext
-
-
-class YumRepoObject(object):
-
-    def __init__(self, repo_id, repofile):
-        self.repo_id = repo_id
-        self.name = None
-        self.baseurl = None
-        self.enabled = True
-        self.gpgcheck = True
-        self.gpgkey = None
-        self.metalink = None
-        self.mirrorlist = None
-        self.repofile = repofile
-        self.string_attrs = ['baseurl', 'gpgkey', 'name',
-                             'metalink', 'mirrorlist']
-        self.boolean_attrs = ['enabled', 'gpgcheck']
-
-    def set_attribute(self, key, strvalue):
-        if key in self.string_attrs:
-            setattr(self, key, strvalue)
-        elif key in self.boolean_attrs:
-            setattr(self, key, (strvalue == '1'))
-
-    def get_attribute_str(self, key):
-        if key not in self.get_attributes():
-            return None
-
-        if key in self.boolean_attrs:
-            str_value = '1' if getattr(self, key) is True else '0'
-        else:
-            str_value = getattr(self, key)
-
-        if str_value is None:
-            return None
-
-        return key + '=' + str_value
-
-    def get_attributes(self):
-        return self.string_attrs + self.boolean_attrs
-
-    def enable(self):
-        self.enabled = True
-
-    def disable(self):
-        self.enabled = False
-
-    def __str__(self):
-        str_obj = '[' + self.repo_id + ']' + '\n'
-        for key in self.get_attributes():
-            if self.get_attribute_str(key) is not None:
-                str_obj += self.get_attribute_str(key) + '\n'
-        return str_obj
-
-
-def get_repo_files():
-    def _is_repository_file(f):
-        _, f_extension = splitext(f)
-        return isfile(f) and (f_extension == '.repo')
-
-    YUM_REPO_DIR = '/etc/yum.repos.d'
-    return [YUM_REPO_DIR+'/'+f for f in listdir(YUM_REPO_DIR)
-            if _is_repository_file(YUM_REPO_DIR+'/'+f)]
-
-
-def _ignore_line_repo_file(line):
-    return line.startswith("#") or '=' not in line
-
-
-def _get_repos_from_file(repo_file):
-    repos_from_file = {}
-    current_repo = None
-    current_repo_id = None
-    with open(repo_file) as f:
-        for line in f.readlines():
-            line = line.strip()
-            if line.startswith("["):
-                if current_repo is not None:
-                    repos_from_file[current_repo_id] = current_repo
-                current_repo_id = line.strip('[]')
-                current_repo = YumRepoObject(current_repo_id, repo_file)
-                continue
-            if _ignore_line_repo_file(line):
-                continue
-            key, value = line.split('=', 1)
-            key = key.strip()
-            value = value.strip()
-            current_repo.set_attribute(key, value)
-
-        # add the last repo from file.
-        if current_repo is not None:
-            repos_from_file[current_repo_id] = current_repo
-
-    return repos_from_file
-
-
-def get_yum_repositories():
-    repo_files = get_repo_files()
-    repos = {}
-    for yum_repo in repo_files:
-        repos.update(_get_repos_from_file(yum_repo))
-
-    return repos
-
-
-def _retrieve_repo_line_index(data, repo):
-    repo_entry = '[' + repo.repo_id + ']\n'
-    try:
-        repo_index = data.index(repo_entry)
-    except:
-        return None
-    return repo_index
-
-
-def _update_repo_file_data(data, repo, repo_index):
-    remaining_repo_attrs = repo.get_attributes()
-
-    for i in range(repo_index + 1, len(data)):
-        line = data[i].strip()
-        if line.startswith('['):
-            break
-        if _ignore_line_repo_file(line):
-            continue
-        key, _ = line.split('=', 1)
-        key = key.strip()
-        attr_str = repo.get_attribute_str(key)
-        if attr_str is None:
-            continue
-        remaining_repo_attrs.remove(key)
-        data[i] = attr_str + '\n'
-
-    for attr in remaining_repo_attrs:
-        attr_str = repo.get_attribute_str(attr)
-        if attr_str is None:
-            continue
-        data.insert(repo_index+1, attr_str + '\n')
-
-    return data
-
-
-def write_repo_to_file(repo):
-    with open(repo.repofile) as f:
-        data = f.readlines()
-
-    repo_index = _retrieve_repo_line_index(data, repo)
-    if repo_index is None:
-        return
-
-    data = _update_repo_file_data(data, repo, repo_index)
-
-    with open(repo.repofile, 'w') as f:
-        f.writelines(data)
-
-
-def _get_last_line_repo(data, repo_index):
-    stop_delete_index = None
-    for i in range(repo_index+1, len(data)):
-        line = data[i].strip()
-        if line.startswith('['):
-            stop_delete_index = i - 1
-            break
-    if stop_delete_index is None:
-        stop_delete_index = len(data) - 1
-
-    return stop_delete_index
-
-
-def _remove_repo_file_data(data, repo_index):
-    last_line_repo = _get_last_line_repo(data, repo_index)
-    for i in range(last_line_repo, repo_index - 1, -1):
-        data.pop(i)
-    return data
-
-
-def delete_repo_from_file(repo):
-    with open(repo.repofile) as f:
-        data = f.readlines()
-
-    repo_index = _retrieve_repo_line_index(data, repo)
-    if repo_index is None:
-        return
-
-    data = _remove_repo_file_data(data, repo_index)
-
-    with open(repo.repofile, 'w') as f:
-        f.writelines(data)
-
-
-class YumUpdatePackageObject(object):
-
-    def __init__(self, name, arch, version, repo):
-        self.name = name
-        self.arch = arch
-        self.version = version
-        self.ui_from_repo = repo
-
-
-def _include_line_checkupdate_output(line):
-    tokens = line.split()
-
-    if len(tokens) != 3:
-        return False
-
-    if '.' not in tokens[0]:
-        return False
-
-    return True
-
-
-def _ignore_obsoleting_packages_in(output):
-    out = ''
-    for l in output.split('\n'):
-        if 'Obsoleting ' in l:
-            break
-        out += l + '\n'
-    return out
-
-
-def _filter_lines_checkupdate_output(output):
-    if output is None:
-        return []
-
-    output = _ignore_obsoleting_packages_in(output)
-
-    out = [l for l in output.split('\n')
-           if _include_line_checkupdate_output(l)]
-    return out
-
-
-def _get_yum_checkupdate_output():
-    cmd = ['yum', 'check-update', '-d0']
-    yum_update_cmd = subprocess.Popen(cmd,
-                                      stdout=subprocess.PIPE,
-                                      stderr=subprocess.PIPE)
-    out, error = yum_update_cmd.communicate()
-    return_code = yum_update_cmd.returncode
-    if return_code == 1:
-        return None
-
-    return out
-
-
-def get_yum_packages_list_update(checkupdate_output=None):
-    if checkupdate_output is None:
-        checkupdate_output = _get_yum_checkupdate_output()
-
-    filtered_output = _filter_lines_checkupdate_output(checkupdate_output)
-
-    packages = []
-    for line in filtered_output:
-        line = line.split()
-        index = 0
-        name_arch = line[index]
-        index += 1
-        version = line[index]
-        index += 1
-        repo = line[index]
-        name, arch = name_arch.rsplit('.', 1)
-        packages.append(YumUpdatePackageObject(name, arch, version, repo))
-
-    return packages
-- 
2.1.0




More information about the Kimchi-devel mailing list