[swdb]: integration squashed
SWDB - Unified Software Database for DNF integration history sqlite, yumdb and history persistor replaced by SWDB yum/history classes rewritten into libdnf-swdb see https://github.com/edynox/libdnf Original commit tree was too long and caused problems when rebasing. Find original tree here: https://github.com/edynox/dnf/tree/hif-swdb I will try to keep both branches up to date for easier debugging until PR will be merged. [swdb]: rewritten transaction merging into libdnf Transaction merging was done via obsoleted classes YumHistoryTransaction and YumMergedHistoryTransaction which were significantly redundant with SWDB Transaction implementation. Transactions are now merged directly in SWDB using method merge. i.e. T1.merge(T2), [swdb]: trans with support + fixed history info TRANS_WITH was missing from history info output. Fixed package comparation and merged transaction IDs. [swdb]: transaction comparison tests Fixed tests for transaction comparison Rewritten yumdb usage in base.py - introduced in upstream [swdb]: change lib path for testing [swdb]: refactor nvra to nevra in swdb calls [swdb]: fix group_remove unit test and group reset [swdb]: support for get erased reason Introduced in rpm-software-management/dnf@3c661d6fd7 [swdb]: cosmetic changes - package represenation use str(pkg) instead of pkg.nevra rename some variable names [swdb]: refactor group is_installed to installed For compatibility with original api + some cosmetic changes [swdb]: port mark group feature to swdb + remove YumHistoryMergedTransaction from rollback command [swdb]: reorganize database sources in DNF Drop unused yum files and migrate SWDB sources out of the yum folder. [swdb]: drop yum/packages.py Replaced by DnfSwdbPkg method `match(pattern)` [swdb]: API refactor - nevra -> DnfPackage Hide internal swdb nevra binding from user [swdb]: mark history.group as property Load GroupPersistor lazily [swdb]: API for asking if a group is installed (RhBug:1339623) [swdb]: refactor reason - string -> enum Use reason enumeration for simplier comparation and database access. [swdb]: refactor package type to enum Use enumeration for package type instead of string. Fix ui_from_repo for transaction_with packages. [swdb]: introduce dnf.db.types Move Swdb specific types to separate file to avoid circular dependencies between history and other source files.
This commit is contained in:
parent
564c44667c
commit
1972a5fd27
|
@ -13,3 +13,5 @@ bin/dnf*-2
|
|||
bin/dnf*-3
|
||||
bin/yum-2
|
||||
bin/yum-3
|
||||
*.cmake
|
||||
*CMakeCache*
|
||||
|
|
1
AUTHORS
1
AUTHORS
|
@ -68,6 +68,7 @@ DNF CONTRIBUTORS
|
|||
Daniel Mach <dmach@redhat.com>
|
||||
Dave Johansen <davejohansen@gmail.com>
|
||||
Dylan Pindur <dylanpindur@gmail.com>
|
||||
Eduard Cuba <ecuba@redhat.com>
|
||||
Frank Dana <ferdnyc@gmail.com>
|
||||
George Machitidze <giomac@gmail.com>
|
||||
Haïkel Guémar <haikel.guemar@gmail.com>
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
All files in DNF are distributed as GPLv2+ with the exceptions below:
|
||||
|
||||
yum/sqlutils.py : GPLv2
|
||||
rpm/transaction.py : GPL
|
||||
|
||||
Licensing conditions of the following files were disputed when DNF was forked
|
||||
|
|
7
dnf.spec
7
dnf.spec
|
@ -124,6 +124,7 @@ BuildRequires: python-librepo >= %{librepo_version}
|
|||
BuildRequires: python-nose
|
||||
BuildRequires: python2-gpg
|
||||
Requires: python2-gpg
|
||||
BuildRequires: python-gobject
|
||||
BuildRequires: pyliblzma
|
||||
Requires: pyliblzma
|
||||
Requires: %{name}-conf = %{version}-%{release}
|
||||
|
@ -139,6 +140,7 @@ Requires: rpm-plugin-systemd-inhibit
|
|||
%else
|
||||
BuildRequires: python2-rpm >= %{rpm_version}
|
||||
Requires: python2-rpm >= %{rpm_version}
|
||||
Requires: python-gobject
|
||||
Recommends: rpm-plugin-systemd-inhibit
|
||||
%endif
|
||||
# dnf-langpacks package is retired in F25
|
||||
|
@ -161,8 +163,10 @@ BuildRequires: python3-librepo >= %{librepo_version}
|
|||
BuildRequires: python3-nose
|
||||
BuildRequires: python3-gpg
|
||||
Requires: python3-gpg
|
||||
BuildRequires: python3-gobject
|
||||
Requires: %{name}-conf = %{version}-%{release}
|
||||
Requires: deltarpm
|
||||
Requires: python3-gobject
|
||||
Requires: python3-hawkey >= %{hawkey_version}
|
||||
Requires: python3-iniparse
|
||||
Requires: python3-libcomps >= %{libcomps_version}
|
||||
|
@ -253,6 +257,9 @@ ln -sr %{buildroot}%{_bindir}/dnf-2 %{buildroot}%{_bindir}/yum
|
|||
rm -vf %{buildroot}%{_bindir}/dnf-automatic-*
|
||||
|
||||
%check
|
||||
# for SWDB testing
|
||||
export GI_TYPELIB_PATH=/usr/local/lib64/girepository-1.0
|
||||
export LD_LIBRARY_PATH=/usr/local/lib64
|
||||
pushd build
|
||||
ctest -VV
|
||||
popd
|
||||
|
|
|
@ -8,3 +8,4 @@ ADD_SUBDIRECTORY (cli)
|
|||
ADD_SUBDIRECTORY (conf)
|
||||
ADD_SUBDIRECTORY (rpm)
|
||||
ADD_SUBDIRECTORY (yum)
|
||||
ADD_SUBDIRECTORY (db)
|
||||
|
|
170
dnf/base.py
170
dnf/base.py
|
@ -26,9 +26,9 @@ from __future__ import unicode_literals
|
|||
from dnf.comps import CompsQuery
|
||||
from dnf.i18n import _, P_, ucd
|
||||
from dnf.util import first
|
||||
from dnf.yum import history
|
||||
from dnf.db import history
|
||||
from dnf.db.types import SwdbPkgData
|
||||
from dnf.yum import misc
|
||||
from dnf.yum import rpmsack
|
||||
from functools import reduce
|
||||
import collections
|
||||
import datetime
|
||||
|
@ -88,7 +88,6 @@ class Base(object):
|
|||
self._tempfiles = set()
|
||||
self._trans_tempfiles = set()
|
||||
self._ds_callback = dnf.callback.Depsolve()
|
||||
self._group_persistor = None
|
||||
self._logging = dnf.logging.Logging()
|
||||
self._repos = dnf.repodict.RepoDict()
|
||||
self._rpm_probfilter = set([rpm.RPMPROB_FILTER_OLDPACKAGE])
|
||||
|
@ -207,9 +206,6 @@ class Base(object):
|
|||
self._repo_persistor.expired_to_add.update(expired)
|
||||
self._repo_persistor.save()
|
||||
|
||||
if self._group_persistor:
|
||||
self._group_persistor.save()
|
||||
|
||||
if self._tempfile_persistor:
|
||||
self._tempfile_persistor.save()
|
||||
|
||||
|
@ -381,16 +377,10 @@ class Base(object):
|
|||
self._plugins.run_sack()
|
||||
return self._sack
|
||||
|
||||
@property
|
||||
@dnf.util.lazyattr("_priv_yumdb")
|
||||
def _yumdb(self):
|
||||
db_path = os.path.normpath(self.conf.persistdir + '/yumdb')
|
||||
return rpmsack.AdditionalPkgDB(db_path)
|
||||
|
||||
def _finalize_base(self):
|
||||
def _finalize_base(self):
|
||||
if not self._trans_success:
|
||||
for pkg, reason in self._revert_reason:
|
||||
self._yumdb.get_package(pkg).reason = reason
|
||||
self.history.set_reason(pkg, reason)
|
||||
self._tempfile_persistor = dnf.persistor.TempfilePersistor(
|
||||
self.conf.cachedir)
|
||||
|
||||
|
@ -413,7 +403,7 @@ class Base(object):
|
|||
"'%s'."), "dnf clean packages")
|
||||
|
||||
# Do not trigger the lazy creation:
|
||||
if self._history is not None:
|
||||
if self.history is not None:
|
||||
self.history.close()
|
||||
self._store_persistent_data()
|
||||
self._closeRpmDB()
|
||||
|
@ -459,8 +449,8 @@ class Base(object):
|
|||
self._goal = None
|
||||
if self._sack is not None:
|
||||
self._goal = dnf.goal.Goal(self._sack)
|
||||
if self._group_persistor is not None:
|
||||
self._group_persistor = self._activate_group_persistor()
|
||||
if self.history.group_active():
|
||||
self.history.reset_group()
|
||||
self._comps_trans = dnf.comps.TransactionBunch()
|
||||
self._transaction = None
|
||||
|
||||
|
@ -516,9 +506,6 @@ class Base(object):
|
|||
del self._priv_ts
|
||||
self._priv_ts = None
|
||||
|
||||
def _activate_group_persistor(self):
|
||||
return dnf.persistor.GroupPersistor(self.conf.persistdir, self._comps)
|
||||
|
||||
def read_comps(self, arch_filter=False):
|
||||
# :api
|
||||
"""Create the groups object to access the comps metadata."""
|
||||
|
@ -552,29 +539,32 @@ class Base(object):
|
|||
msg = _('Failed to add groups file for repository: %s - %s')
|
||||
logger.critical(msg, repo.id, e)
|
||||
|
||||
self._group_persistor = self._activate_group_persistor()
|
||||
if arch_filter:
|
||||
self._comps._i.arch_filter(
|
||||
[self._conf.substitutions['basearch']])
|
||||
timer()
|
||||
return self._comps
|
||||
|
||||
def _getHistory(self):
|
||||
def _getHistory(self, db_path=None):
|
||||
"""auto create the history object that to access/append the transaction
|
||||
history information. """
|
||||
if self._history is None:
|
||||
db_path = self.conf.persistdir + "/history"
|
||||
if not db_path:
|
||||
db_path = os.path.join(self.conf.persistdir, "history")
|
||||
releasever = self.conf.releasever
|
||||
self._history = history.YumHistory(db_path, self._yumdb,
|
||||
root=self.conf.installroot,
|
||||
releasever=releasever)
|
||||
self._history = history.SwdbInterface(
|
||||
db_path,
|
||||
root=self.conf.installroot,
|
||||
releasever=releasever,
|
||||
transform=self.conf.transformdb
|
||||
)
|
||||
return self._history
|
||||
|
||||
history = property(fget=lambda self: self._getHistory(),
|
||||
fset=lambda self, value: setattr(
|
||||
self, "_history", value),
|
||||
fdel=lambda self: setattr(self, "_history", None),
|
||||
doc="Yum History Object")
|
||||
doc="DNF SWDB Interface Object")
|
||||
|
||||
def _goal2transaction(self, goal):
|
||||
ts = dnf.transaction.Transaction()
|
||||
|
@ -643,14 +633,10 @@ class Base(object):
|
|||
inst = inst.filter(pkg=sltr.matches())
|
||||
return list(inst)
|
||||
|
||||
def _is_userinstalled(self, pkg):
|
||||
"""Returns true if the package is installed by user."""
|
||||
return self._yumdb.get_package(pkg).get('reason') == 'user' and \
|
||||
self._yumdb.get_package(pkg).get('from_repo') != 'anakonda'
|
||||
|
||||
def iter_userinstalled(self):
|
||||
"""Get iterator over the packages installed by the user."""
|
||||
return (pkg for pkg in self.sack.query().installed() if self._is_userinstalled(pkg))
|
||||
return (pkg for pkg in self.sack.query().installed()
|
||||
if self.history.user_installed(pkg))
|
||||
|
||||
def _run_hawkey_goal(self, goal, allow_erasing):
|
||||
ret = goal.run(
|
||||
|
@ -670,7 +656,8 @@ class Base(object):
|
|||
self._ds_callback.start()
|
||||
goal = self._goal
|
||||
if goal.req_has_erase():
|
||||
goal.push_userinstalled(self.sack.query().installed(), self._yumdb)
|
||||
goal.push_userinstalled(self.sack.query().installed(),
|
||||
self.history)
|
||||
elif not self.conf.upgrade_group_objects_upgrade:
|
||||
# exclude packages installed from groups
|
||||
# these packages will be marked to installation
|
||||
|
@ -678,8 +665,6 @@ class Base(object):
|
|||
# to prevent "conflicting job" error it's not applied
|
||||
# to "remove" and "reinstall" commands
|
||||
|
||||
if not self._group_persistor:
|
||||
self._group_persistor = self._activate_group_persistor()
|
||||
solver = self._build_comps_solver()
|
||||
solver._exclude_packages_from_installed_groups(self)
|
||||
|
||||
|
@ -704,12 +689,8 @@ class Base(object):
|
|||
exc = dnf.exceptions.Error(msg)
|
||||
|
||||
if exc is not None:
|
||||
if self._group_persistor:
|
||||
self._group_persistor._rollback()
|
||||
raise exc
|
||||
if self._group_persistor:
|
||||
installed = self.sack.query().installed()
|
||||
self._group_persistor.update_group_env_installed(installed, goal)
|
||||
|
||||
self._plugins.run_resolved()
|
||||
return got_transaction
|
||||
|
||||
|
@ -721,13 +702,14 @@ class Base(object):
|
|||
[dnf.yum.rpmtrans.LoggingTransactionDisplay()] + list(display)
|
||||
|
||||
if not self.transaction:
|
||||
if self._group_persistor:
|
||||
if self.history.group_active():
|
||||
self._trans_success = True
|
||||
self._group_persistor.commit()
|
||||
self.history.group.commit()
|
||||
return
|
||||
|
||||
logger.info(_('Running transaction check'))
|
||||
lock = dnf.lock.build_rpmdb_lock(self.conf.persistdir, self.conf.exit_on_lock)
|
||||
lock = dnf.lock.build_rpmdb_lock(self.conf.persistdir,
|
||||
self.conf.exit_on_lock)
|
||||
with lock:
|
||||
# save our ds_callback out
|
||||
dscb = self._ds_callback
|
||||
|
@ -782,8 +764,8 @@ class Base(object):
|
|||
timer()
|
||||
self._plugins.unload_removed_plugins(self.transaction)
|
||||
self._plugins.run_transaction()
|
||||
if self._group_persistor and self._trans_success:
|
||||
self._group_persistor.commit()
|
||||
if self.history.group_active() and self._trans_success:
|
||||
self.history.group.commit()
|
||||
|
||||
def _trans_error_summary(self, errstring):
|
||||
"""Parse the error string for 'interesting' errors which can
|
||||
|
@ -825,10 +807,10 @@ class Base(object):
|
|||
using_pkgs_pats = list(self.conf.history_record_packages)
|
||||
installed_query = self.sack.query().installed()
|
||||
using_pkgs = installed_query.filter(name=using_pkgs_pats).run()
|
||||
rpmdbv = self.sack._rpmdb_version(self._yumdb)
|
||||
rpmdbv = self.sack._rpmdb_version(self.history)
|
||||
lastdbv = self.history.last()
|
||||
if lastdbv is not None:
|
||||
lastdbv = lastdbv.end_rpmdbversion
|
||||
lastdbv = lastdbv.end_rpmdb_version
|
||||
|
||||
if lastdbv is None or rpmdbv != lastdbv:
|
||||
logger.debug("RPMDB altered outside of DNF.")
|
||||
|
@ -839,8 +821,13 @@ class Base(object):
|
|||
elif hasattr(self, 'cmds') and self.cmds:
|
||||
cmdline = ' '.join(self.cmds)
|
||||
|
||||
self.history.beg(rpmdbv, using_pkgs, list(self.transaction),
|
||||
[], [], cmdline)
|
||||
tsis = list(self.transaction)
|
||||
installonly = self._get_installonly_query()
|
||||
|
||||
for tsi in tsis:
|
||||
tsi._propagate_reason(self.history, installonly)
|
||||
|
||||
self.history.beg(rpmdbv, using_pkgs, tsis, cmdline)
|
||||
# write out our config and repo data to additional history info
|
||||
self._store_config_in_history()
|
||||
|
||||
|
@ -926,21 +913,20 @@ class Base(object):
|
|||
|
||||
def _verify_transaction(self, verify_pkg_cb=None):
|
||||
"""Check that the transaction did what was expected, and
|
||||
propagate external yumdb information. Output error messages
|
||||
propagate external history information. Output error messages
|
||||
if the transaction did not do what was expected.
|
||||
|
||||
:param txmbr_cb: the callback for the rpm transaction members
|
||||
"""
|
||||
# check to see that the rpmdb and the transaction roughly matches
|
||||
# push package object metadata outside of rpmdb into yumdb
|
||||
# delete old yumdb metadata entries
|
||||
# push package object metadata outside of rpmdb into history
|
||||
|
||||
# for each pkg in the transaction
|
||||
# if it is an install - see that the pkg is installed
|
||||
# if it is a remove - see that the pkg is no longer installed, provided
|
||||
# that there is not also an install of this pkg in the transaction
|
||||
# (reinstall)
|
||||
# for any kind of install add from_repo to the yumdb, and the cmdline
|
||||
# for any kind of install add from_repo to the history, and the cmdline
|
||||
# and the install reason
|
||||
|
||||
total = self.transaction._total_package_count()
|
||||
|
@ -975,51 +961,39 @@ class Base(object):
|
|||
continue
|
||||
po = installed[0]
|
||||
count = display_banner(rpo, count)
|
||||
yumdb_info = self._yumdb.get_package(po)
|
||||
yumdb_info.from_repo = rpo.repoid
|
||||
|
||||
yumdb_info.reason = tsi._propagated_reason(self._yumdb, self._get_installonly_query())
|
||||
yumdb_info.releasever = self.conf.releasever
|
||||
if hasattr(self, 'args') and self.args:
|
||||
yumdb_info.command_line = ' '.join(self.args)
|
||||
elif hasattr(self, 'cmds') and self.cmds:
|
||||
yumdb_info.command_line = ' '.join(self.cmds)
|
||||
csum = rpo.returnIdSum()
|
||||
if csum is not None:
|
||||
yumdb_info.checksum_type = str(csum[0])
|
||||
yumdb_info.checksum_data = csum[1]
|
||||
|
||||
pkg_info = SwdbPkgData()
|
||||
pkg_info.from_repo = rpo.repoid
|
||||
if rpo._from_cmdline:
|
||||
try:
|
||||
st = os.stat(rpo.localPkg())
|
||||
lp_ctime = str(int(st.st_ctime))
|
||||
lp_mtime = str(int(st.st_mtime))
|
||||
yumdb_info.from_repo_revision = lp_ctime
|
||||
yumdb_info.from_repo_timestamp = lp_mtime
|
||||
pkg_info.from_repo_revision = lp_ctime
|
||||
pkg_info.from_repo_timestamp = lp_mtime
|
||||
except Exception:
|
||||
pass
|
||||
elif hasattr(rpo.repo, 'repoXML'):
|
||||
md = rpo.repo.repoXML
|
||||
if md and md._revision is not None:
|
||||
yumdb_info.from_repo_revision = str(md._revision)
|
||||
pkg_info.from_repo_revision = str(md._revision)
|
||||
if md:
|
||||
yumdb_info.from_repo_timestamp = str(md._timestamp)
|
||||
pkg_info.from_repo_timestamp = str(md._timestamp)
|
||||
|
||||
loginuid = misc.getloginuid()
|
||||
if tsi.op_type in (dnf.transaction.DOWNGRADE,
|
||||
dnf.transaction.REINSTALL,
|
||||
dnf.transaction.UPGRADE):
|
||||
opo = tsi.erased
|
||||
opo_yumdb_info = self._yumdb.get_package(opo)
|
||||
if 'installed_by' in opo_yumdb_info:
|
||||
yumdb_info.installed_by = opo_yumdb_info.installed_by
|
||||
opo_pkg_info = self.history.package_data(opo)
|
||||
if opo_pkg_info and opo_pkg_info.installed_by:
|
||||
pkg_info.installed_by = opo_pkg_info.installed_by
|
||||
if loginuid is not None:
|
||||
yumdb_info.changed_by = str(loginuid)
|
||||
pkg_info.changed_by = str(loginuid)
|
||||
elif loginuid is not None:
|
||||
yumdb_info.installed_by = str(loginuid)
|
||||
pkg_info.installed_by = str(loginuid)
|
||||
|
||||
if self.conf.history_record:
|
||||
self.history.sync_alldb(po)
|
||||
self.history.sync_alldb(po, pkg_info)
|
||||
|
||||
just_installed = self.sack.query().\
|
||||
filter(pkg=self.transaction.install_set)
|
||||
|
@ -1033,11 +1007,9 @@ class Base(object):
|
|||
logger.critical(msg, rpo)
|
||||
count = display_banner(rpo, count)
|
||||
continue
|
||||
else:
|
||||
self._yumdb.get_package(rpo).clean()
|
||||
count = display_banner(rpo, count)
|
||||
if self._record_history():
|
||||
rpmdbv = rpmdb_sack._rpmdb_version(self._yumdb)
|
||||
rpmdbv = rpmdb_sack._rpmdb_version(self.history)
|
||||
self.history.end(rpmdbv, 0)
|
||||
timer()
|
||||
self._trans_success = True
|
||||
|
@ -1275,7 +1247,7 @@ class Base(object):
|
|||
"""Test whether given package originates from the repository."""
|
||||
if reponame is None:
|
||||
return True
|
||||
return self._yumdb.get_package(package).get('from_repo') == reponame
|
||||
return self.history.repo(package) == reponame
|
||||
|
||||
def pkgs_from_repo(packages):
|
||||
"""Filter out the packages which do not originate from the repo."""
|
||||
|
@ -1381,7 +1353,7 @@ class Base(object):
|
|||
|
||||
# packages to be removed by autoremove
|
||||
elif pkgnarrow == 'autoremove':
|
||||
autoremove_q = query_for_repo(q)._unneeded(self.sack, self._yumdb)
|
||||
autoremove_q = query_for_repo(q)._unneeded(self.sack, self.history)
|
||||
autoremove = autoremove_q.run()
|
||||
|
||||
# not in a repo but installed
|
||||
|
@ -1437,10 +1409,13 @@ class Base(object):
|
|||
return
|
||||
|
||||
for pkg in query:
|
||||
reason = self._yumdb.get_package(pkg).reason
|
||||
self._yumdb.get_package(pkg).reason = 'dep'
|
||||
reason = self.history.reason(pkg)
|
||||
self.history.mark_user_installed(pkg, False)
|
||||
self._revert_reason.append((pkg, reason))
|
||||
unneeded_pkgs = self.sack.query()._unneeded(self.sack, self._yumdb, debug_solver=False)
|
||||
unneeded_pkgs = self.sack.query()._unneeded(self.sack,
|
||||
self.history,
|
||||
debug_solver=False)
|
||||
|
||||
remove_packages = query.intersection(unneeded_pkgs)
|
||||
if remove_packages:
|
||||
sltr = dnf.selector.Selector(self.sack)
|
||||
|
@ -1519,11 +1494,11 @@ class Base(object):
|
|||
if not q:
|
||||
return None
|
||||
try:
|
||||
return self._yumdb.get_package(q[0]).reason
|
||||
return self.history.reason(q[0])
|
||||
except AttributeError:
|
||||
return 'unknown'
|
||||
|
||||
return dnf.comps.Solver(self._group_persistor, self._comps, reason_fn)
|
||||
return dnf.comps.Solver(self.history.group, self._comps, reason_fn)
|
||||
|
||||
def environment_install(self, env_id, types, exclude=None, strict=True):
|
||||
solver = self._build_comps_solver()
|
||||
|
@ -1585,7 +1560,7 @@ class Base(object):
|
|||
return self._add_comps_trans(trans)
|
||||
|
||||
def env_group_install(self, patterns, types, strict=True):
|
||||
q = CompsQuery(self.comps, self._group_persistor,
|
||||
q = CompsQuery(self.comps, self.history.group,
|
||||
CompsQuery.ENVIRONMENTS | CompsQuery.GROUPS,
|
||||
CompsQuery.AVAILABLE | CompsQuery.INSTALLED)
|
||||
cnt = 0
|
||||
|
@ -1602,7 +1577,6 @@ class Base(object):
|
|||
for env_id in res.environments:
|
||||
cnt += self.environment_install(env_id, types, strict=strict)
|
||||
if not done and strict:
|
||||
self._group_persistor._rollback()
|
||||
raise dnf.exceptions.Error(_('Nothing to do.'))
|
||||
return cnt
|
||||
|
||||
|
@ -1612,7 +1586,7 @@ class Base(object):
|
|||
return self._add_comps_trans(trans)
|
||||
|
||||
def env_group_remove(self, patterns):
|
||||
q = CompsQuery(self.comps, self._group_persistor,
|
||||
q = CompsQuery(self.comps, self.history.group,
|
||||
CompsQuery.ENVIRONMENTS | CompsQuery.GROUPS,
|
||||
CompsQuery.INSTALLED)
|
||||
try:
|
||||
|
@ -1628,7 +1602,7 @@ class Base(object):
|
|||
return cnt
|
||||
|
||||
def env_group_upgrade(self, patterns):
|
||||
q = CompsQuery(self.comps, self._group_persistor,
|
||||
q = CompsQuery(self.comps, self.history.group,
|
||||
CompsQuery.GROUPS | CompsQuery.ENVIRONMENTS,
|
||||
CompsQuery.INSTALLED)
|
||||
res = q.get(*patterns)
|
||||
|
@ -1902,7 +1876,7 @@ class Base(object):
|
|||
raise dnf.exceptions.Error(_('No packages marked for removal.'))
|
||||
|
||||
else:
|
||||
pkgs = self.sack.query()._unneeded(self.sack, self._yumdb,
|
||||
pkgs = self.sack.query()._unneeded(self.sack, self.history,
|
||||
debug_solver=self.conf.debug_solver)
|
||||
for pkg in pkgs:
|
||||
self.package_remove(pkg)
|
||||
|
@ -1915,7 +1889,7 @@ class Base(object):
|
|||
installed = [
|
||||
pkg for pkg in matches.installed()
|
||||
if reponame is None or
|
||||
self._yumdb.get_package(pkg).get('from_repo') == reponame]
|
||||
self.history.repo(pkg) == reponame]
|
||||
if not installed:
|
||||
raise dnf.exceptions.PackagesNotInstalledError(
|
||||
'no package matched', pkg_spec)
|
||||
|
@ -1932,7 +1906,7 @@ class Base(object):
|
|||
installed_pkgs = [
|
||||
pkg for pkg in q.installed()
|
||||
if old_reponame is None or
|
||||
self._yumdb.get_package(pkg).get('from_repo') == old_reponame]
|
||||
self.history.repo(pkg) == old_reponame]
|
||||
|
||||
available_q = q.available()
|
||||
if new_reponame is not None:
|
||||
|
@ -2272,15 +2246,15 @@ class Base(object):
|
|||
return results
|
||||
|
||||
def _store_config_in_history(self):
|
||||
self.history.write_addon_data('config-main', self.conf.dump())
|
||||
self.history.addon_data.write('config-main', self.conf.dump())
|
||||
myrepos = ''
|
||||
for repo in self.repos.iter_enabled():
|
||||
myrepos += repo.dump()
|
||||
myrepos += '\n'
|
||||
self.history.write_addon_data('config-repos', myrepos)
|
||||
self.history.addon_data.write('config-repos', myrepos)
|
||||
|
||||
def _store_comment_in_history(self, comment):
|
||||
self.history.write_addon_data('transaction-comment', comment)
|
||||
self.history.addon_data.write('transaction-comment', comment)
|
||||
|
||||
def urlopen(self, url, repo=None, mode='w+b', **kwargs):
|
||||
# :api
|
||||
|
|
|
@ -138,11 +138,6 @@ class BaseCli(dnf.Base):
|
|||
super(BaseCli, self).__init__(conf=conf)
|
||||
self.output = output.Output(self, self.conf)
|
||||
|
||||
def _groups_diff(self):
|
||||
if not self._group_persistor:
|
||||
return None
|
||||
return self._group_persistor.diff()
|
||||
|
||||
def do_transaction(self, display=()):
|
||||
"""Take care of package downloading, checking, user
|
||||
confirmation and actually running the transaction.
|
||||
|
@ -166,10 +161,6 @@ class BaseCli(dnf.Base):
|
|||
logger.debug(
|
||||
'Includes in repo ' + repo.id + ": " + ", ".join(sorted(set(repo.includepkgs))))
|
||||
|
||||
grp_diff = self._groups_diff()
|
||||
grp_str = self.output.list_group_transaction(self.comps, self._group_persistor, grp_diff)
|
||||
if grp_str:
|
||||
logger.info(grp_str)
|
||||
trans = self.transaction
|
||||
pkg_str = self.output.list_transaction(trans)
|
||||
if pkg_str:
|
||||
|
@ -199,7 +190,7 @@ class BaseCli(dnf.Base):
|
|||
else:
|
||||
self.output.reportDownloadSize(install_pkgs, install_only)
|
||||
|
||||
if trans or (grp_diff and not grp_diff.empty()):
|
||||
if trans:
|
||||
# confirm with user
|
||||
if self.conf.downloadonly:
|
||||
logger.info(_("DNF will only download packages for the transaction."))
|
||||
|
@ -583,18 +574,18 @@ class BaseCli(dnf.Base):
|
|||
return 0, ['Rollback to current, nothing to do']
|
||||
|
||||
mobj = None
|
||||
for tid in self.history.old(list(range(old.tid + 1, last.tid + 1))):
|
||||
if tid.altered_lt_rpmdb:
|
||||
logger.warning(_('Transaction history is incomplete, before %u.'), tid.tid)
|
||||
elif tid.altered_gt_rpmdb:
|
||||
logger.warning(_('Transaction history is incomplete, after %u.'), tid.tid)
|
||||
for trans in self.history.old(list(range(old.tid + 1, last.tid + 1))):
|
||||
if trans.altered_lt_rpmdb:
|
||||
logger.warning(_('Transaction history is incomplete, before %u.'), trans.tid)
|
||||
elif trans.altered_gt_rpmdb:
|
||||
logger.warning(_('Transaction history is incomplete, after %u.'), trans.tid)
|
||||
|
||||
if mobj is None:
|
||||
mobj = dnf.yum.history.YumMergedHistoryTransaction(tid)
|
||||
mobj = trans
|
||||
else:
|
||||
mobj.merge(tid)
|
||||
mobj.merge(trans)
|
||||
|
||||
tm = dnf.util.normalize_time(old.beg_timestamp)
|
||||
tm = dnf.util.normalize_time(float(old.beg_timestamp))
|
||||
print("Rollback to transaction %u, from %s" % (old.tid, tm))
|
||||
print(self.output.fmtKeyValFill(" Undoing the following transactions: ",
|
||||
", ".join((str(x) for x in mobj.tid))))
|
||||
|
@ -626,7 +617,7 @@ class BaseCli(dnf.Base):
|
|||
if old is None:
|
||||
return 1, ['Failed history undo']
|
||||
|
||||
tm = dnf.util.normalize_time(old.beg_timestamp)
|
||||
tm = dnf.util.normalize_time(float(old.beg_timestamp))
|
||||
msg = _("Undoing transaction {}, from {}").format(old.tid, ucd(tm))
|
||||
logger.info(msg)
|
||||
self.output.historyInfoCmdPkgsAltered(old) # :todo
|
||||
|
|
|
@ -415,9 +415,9 @@ class RepoPkgsCommand(Command):
|
|||
except dnf.exceptions.PackagesNotAvailableError as err:
|
||||
for pkg in err.packages:
|
||||
xmsg = ''
|
||||
yumdb_info = self.base._yumdb.get_package(pkg)
|
||||
if 'from_repo' in yumdb_info:
|
||||
xmsg = _(' (from %s)') % yumdb_info.from_repo
|
||||
pkgrepo = self.base.history.repo(pkg)
|
||||
if pkgrepo:
|
||||
xmsg = _(' (from %s)') % pkgrepo
|
||||
msg = _('Installed package %s%s not available.')
|
||||
logger.info(msg, self.output.term.bold(pkg), xmsg)
|
||||
except dnf.exceptions.MarkingError:
|
||||
|
@ -472,9 +472,9 @@ class RepoPkgsCommand(Command):
|
|||
except dnf.exceptions.PackagesNotAvailableError as err:
|
||||
for pkg in err.packages:
|
||||
xmsg = ''
|
||||
yumdb_info = self.base._yumdb.get_package(pkg)
|
||||
if 'from_repo' in yumdb_info:
|
||||
xmsg = _(' (from %s)') % yumdb_info.from_repo
|
||||
pkgrepo = self.base.history.repo(pkg)
|
||||
if pkgrepo:
|
||||
xmsg = _(' (from %s)') % pkgrepo
|
||||
msg = _('Installed package %s%s not available.')
|
||||
logger.info(msg, self.output.term.bold(pkg), xmsg)
|
||||
except dnf.exceptions.MarkingError:
|
||||
|
@ -535,10 +535,10 @@ class RepoPkgsCommand(Command):
|
|||
|
||||
subject = dnf.subject.Subject(pkg_spec)
|
||||
matches = subject.get_best_query(self.cli.base.sack)
|
||||
yumdb = self.cli.base._yumdb
|
||||
history = self.cli.base.history
|
||||
installed = [
|
||||
pkg for pkg in matches.installed()
|
||||
if yumdb.get_package(pkg).get('from_repo') == reponame]
|
||||
if history.repo(pkg) == reponame]
|
||||
if not installed:
|
||||
raise dnf.exceptions.PackagesNotInstalledError(
|
||||
'no package matched', pkg_spec)
|
||||
|
@ -836,7 +836,7 @@ class HistoryCommand(Command):
|
|||
demands.fresh_metadata = False
|
||||
demands.sack_activation = True
|
||||
demands.root_user = True
|
||||
if not os.access(self.base.history._db_file, os.R_OK):
|
||||
if not os.access(self.base.history.get_path(), os.R_OK):
|
||||
logger.critical(_("You don't have access to the history DB."))
|
||||
raise dnf.cli.CliError
|
||||
self.transaction_ids = self._args2transaction_ids(self.merged_transaction_ids,
|
||||
|
@ -862,7 +862,7 @@ class HistoryCommand(Command):
|
|||
old = self.base.history_get_transaction(extcmds)
|
||||
if old is None:
|
||||
return 1, ['Failed history redo']
|
||||
tm = dnf.util.normalize_time(old.beg_timestamp)
|
||||
tm = dnf.util.normalize_time(float(old.beg_timestamp))
|
||||
print('Repeating transaction %u, from %s' % (old.tid, tm))
|
||||
self.output.historyInfoCmdPkgsAltered(old)
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ from __future__ import absolute_import
|
|||
from __future__ import unicode_literals
|
||||
from dnf.comps import CompsQuery
|
||||
from dnf.cli import commands
|
||||
from dnf.i18n import _
|
||||
from dnf.i18n import _, ucd
|
||||
|
||||
import dnf.cli
|
||||
import dnf.exceptions
|
||||
|
@ -73,7 +73,11 @@ class GroupCommand(commands.Command):
|
|||
|
||||
def _environment_lists(self, patterns):
|
||||
def available_pred(env):
|
||||
return not self.base._group_persistor.environment(env.id).installed
|
||||
env_found = self.base.history.group.environment(env.id)
|
||||
if env_found:
|
||||
return not env_found.installed
|
||||
else:
|
||||
return True
|
||||
|
||||
self._assert_comps()
|
||||
if patterns is None:
|
||||
|
@ -85,7 +89,10 @@ class GroupCommand(commands.Command):
|
|||
|
||||
def _group_lists(self, uservisible, patterns):
|
||||
def installed_pred(group):
|
||||
return self.base._group_persistor.group(group.id).installed
|
||||
group_found = self.base.history.group.group(group.id)
|
||||
if group_found:
|
||||
return group_found.installed
|
||||
return False
|
||||
installed = []
|
||||
available = []
|
||||
|
||||
|
@ -222,7 +229,7 @@ class GroupCommand(commands.Command):
|
|||
return 0, []
|
||||
|
||||
def _mark_install(self, patterns):
|
||||
prst = self.base._group_persistor
|
||||
prst = self.base.history.group
|
||||
q = CompsQuery(self.base.comps, prst,
|
||||
CompsQuery.GROUPS | CompsQuery.ENVIRONMENTS,
|
||||
CompsQuery.AVAILABLE | CompsQuery.INSTALLED)
|
||||
|
@ -241,15 +248,16 @@ class GroupCommand(commands.Command):
|
|||
|
||||
if res.environments:
|
||||
logger.info(_('Environments marked installed: %s'),
|
||||
','.join([prst.environment(g).ui_name
|
||||
','.join([ucd(prst.environment(g).ui_name)
|
||||
for g in res.environments]))
|
||||
if res.groups:
|
||||
logger.info(_('Groups marked installed: %s'),
|
||||
','.join([prst.group(g).ui_name for g in res.groups]))
|
||||
','.join([ucd(prst.group(g).ui_name)
|
||||
for g in res.groups]))
|
||||
prst.commit()
|
||||
|
||||
def _mark_remove(self, patterns):
|
||||
prst = self.base._group_persistor
|
||||
prst = self.base.history.group
|
||||
q = CompsQuery(self.base.comps, prst,
|
||||
CompsQuery.GROUPS | CompsQuery.ENVIRONMENTS,
|
||||
CompsQuery.INSTALLED)
|
||||
|
@ -262,11 +270,11 @@ class GroupCommand(commands.Command):
|
|||
|
||||
if res.environments:
|
||||
logger.info(_('Environments marked removed: %s'),
|
||||
','.join([prst.environment(e_id).ui_name
|
||||
','.join([ucd(prst.environment(e_id).ui_name)
|
||||
for e_id in res.environments]))
|
||||
if res.groups:
|
||||
logger.info(_('Groups marked removed: %s'),
|
||||
','.join([prst.group(g_id).ui_name
|
||||
','.join([ucd(prst.group(g_id).ui_name)
|
||||
for g_id in res.groups]))
|
||||
prst.commit()
|
||||
|
||||
|
@ -412,9 +420,8 @@ class GroupCommand(commands.Command):
|
|||
if not self._remark:
|
||||
return
|
||||
goal = self.base._goal
|
||||
pkgdb = self.base._yumdb
|
||||
history = self.base.history
|
||||
names = goal.group_members
|
||||
for pkg in self.base.sack.query().installed().filter(name=names):
|
||||
db_pkg = pkgdb.get_package(pkg)
|
||||
reason = db_pkg.get('reason') or 'unknown'
|
||||
db_pkg.reason = goal.group_reason(pkg, reason)
|
||||
reason = history.reason(pkg)
|
||||
history.set_reason(pkg, goal.group_reason(pkg, reason))
|
||||
|
|
|
@ -42,18 +42,15 @@ class MarkCommand(commands.Command):
|
|||
parser.add_argument('package', nargs='+')
|
||||
|
||||
def _mark_install(self, pkg):
|
||||
yumdb = self.base._yumdb
|
||||
yumdb.get_package(pkg).reason = 'user'
|
||||
self.base.history.mark_user_installed(pkg, True)
|
||||
logger.info(_('%s marked as user installed.'), str(pkg))
|
||||
|
||||
def _mark_remove(self, pkg):
|
||||
yumdb = self.base._yumdb
|
||||
yumdb.get_package(pkg).reason = 'dep'
|
||||
self.base.history.mark_user_installed(pkg, False)
|
||||
logger.info(_('%s unmarked as user installed.'), str(pkg))
|
||||
|
||||
def _mark_group(self, pkg):
|
||||
yumdb = self.base._yumdb
|
||||
yumdb.get_package(pkg).reason = 'group'
|
||||
self.base.history.set_reason(pkg, 'group')
|
||||
logger.info(_('%s marked as group installed.'), str(pkg))
|
||||
|
||||
def configure(self):
|
||||
|
|
|
@ -85,9 +85,9 @@ class ReinstallCommand(commands.Command):
|
|||
except dnf.exceptions.PackagesNotAvailableError as err:
|
||||
for pkg in err.packages:
|
||||
xmsg = ''
|
||||
yumdb_info = self.base._yumdb.get_package(pkg)
|
||||
if 'from_repo' in yumdb_info:
|
||||
xmsg = _(' (from %s)') % yumdb_info.from_repo
|
||||
pkgrepo = self.base.history.repo(pkg)
|
||||
if pkgrepo:
|
||||
xmsg = _(' (from %s)') % pkgrepo
|
||||
msg = _('Installed package %s%s not available.')
|
||||
logger.info(msg, self.base.output.term.bold(pkg),
|
||||
xmsg)
|
||||
|
|
|
@ -354,7 +354,7 @@ class RepoQueryCommand(commands.Command):
|
|||
raise dnf.exceptions.Error(_("argument {}: not allowed with argument {}".format(
|
||||
"--available", "--" + self.opts.list)))
|
||||
elif self.opts.list == "unneeded":
|
||||
q = q._unneeded(self.base.sack, self.base._yumdb)
|
||||
q = q._unneeded(self.base.sack, self.base.history)
|
||||
elif self.opts.list and self.opts.list != 'userinstalled':
|
||||
q = getattr(q, self.opts.list)()
|
||||
|
||||
|
|
|
@ -33,9 +33,7 @@ import dnf.crypto
|
|||
import dnf.i18n
|
||||
import dnf.transaction
|
||||
import dnf.util
|
||||
import dnf.yum.history
|
||||
import dnf.yum.misc
|
||||
import dnf.yum.packages
|
||||
import fnmatch
|
||||
import hawkey
|
||||
import itertools
|
||||
|
@ -44,6 +42,7 @@ import operator
|
|||
import pwd
|
||||
import sys
|
||||
import time
|
||||
from dnf.db.types import SwdbReason
|
||||
|
||||
logger = logging.getLogger('dnf')
|
||||
|
||||
|
@ -70,22 +69,22 @@ def _make_lists(transaction, goal):
|
|||
if tsi.op_type == dnf.transaction.DOWNGRADE:
|
||||
b.downgraded.append(tsi)
|
||||
elif tsi.op_type == dnf.transaction.ERASE:
|
||||
if tsi.erased and goal.get_reason(tsi.erased) == 'clean':
|
||||
if tsi.erased and goal.get_reason(tsi.erased) == SwdbReason.CLEAN:
|
||||
b.erased_clean.append(tsi)
|
||||
elif tsi.erased and goal.get_reason(tsi.erased) == 'dep':
|
||||
elif tsi.erased and goal.get_reason(tsi.erased) == SwdbReason.DEP:
|
||||
b.erased_dep.append(tsi)
|
||||
else:
|
||||
b.erased.append(tsi)
|
||||
elif tsi.op_type == dnf.transaction.INSTALL:
|
||||
if tsi.installed:
|
||||
reason = goal.get_reason(tsi.installed)
|
||||
if reason == 'user':
|
||||
if reason == SwdbReason.USER:
|
||||
b.installed.append(tsi)
|
||||
continue
|
||||
elif reason == 'group':
|
||||
elif reason == SwdbReason.GROUP:
|
||||
b.installed_group.append(tsi)
|
||||
continue
|
||||
elif reason == 'weak':
|
||||
elif reason == SwdbReason.WEAK:
|
||||
b.installed_weak.append(tsi)
|
||||
continue
|
||||
b.installed_dep.append(tsi)
|
||||
|
@ -205,10 +204,6 @@ class Output(object):
|
|||
def sack(self):
|
||||
return self.base.sack
|
||||
|
||||
@property
|
||||
def yumdb(self):
|
||||
return self.base._yumdb
|
||||
|
||||
def calcColumns(self, data, columns=None, remainder_column=0,
|
||||
total_width=None, indent=''):
|
||||
"""Dynamically calculate the widths of the columns that the
|
||||
|
@ -494,7 +489,10 @@ class Output(object):
|
|||
" : ", val or ""))
|
||||
|
||||
(hibeg, hiend) = self._highlight(highlight)
|
||||
yumdb_info = self.yumdb.get_package(pkg) if pkg._from_system else {}
|
||||
pkg_data = {}
|
||||
if pkg._from_system:
|
||||
pkg_data = self.history.package_data(pkg)
|
||||
|
||||
print_key_val(_("Name"), "%s%s%s" % (hibeg, pkg.name, hiend))
|
||||
if pkg.epoch:
|
||||
print_key_val(_("Epoch"), pkg.epoch)
|
||||
|
@ -504,8 +502,8 @@ class Output(object):
|
|||
print_key_val(_("Size"), format_number(float(pkg._size)))
|
||||
print_key_val(_("Source"), pkg.sourcerpm)
|
||||
print_key_val(_("Repo"), pkg.repoid)
|
||||
if 'from_repo' in yumdb_info:
|
||||
print_key_val(_("From repo"), yumdb_info.from_repo)
|
||||
if pkg_data and pkg_data.from_repo:
|
||||
print_key_val(_("From repo"), pkg_data.from_repo)
|
||||
if self.conf.verbose:
|
||||
# :hawkey does not support changelog information
|
||||
# print(_("Committer : %s") % ucd(pkg.committer))
|
||||
|
@ -516,18 +514,18 @@ class Output(object):
|
|||
if pkg.installtime:
|
||||
print_key_val(_("Install time"),
|
||||
dnf.util.normalize_time(pkg.installtime))
|
||||
if yumdb_info:
|
||||
if pkg_data:
|
||||
uid = None
|
||||
if 'installed_by' in yumdb_info:
|
||||
if pkg_data.installed_by:
|
||||
try:
|
||||
uid = int(yumdb_info.installed_by)
|
||||
uid = int(pkg_data.installed_by)
|
||||
except ValueError: # In case int() fails
|
||||
uid = None
|
||||
print_key_val(_("Installed by"), self._pwd_ui_username(uid))
|
||||
uid = None
|
||||
if 'changed_by' in yumdb_info:
|
||||
if pkg_data.changed_by:
|
||||
try:
|
||||
uid = int(yumdb_info.changed_by)
|
||||
uid = int(pkg_data.changed_by)
|
||||
except ValueError: # In case int() fails
|
||||
uid = None
|
||||
print_key_val(_("Changed by"), self._pwd_ui_username(uid))
|
||||
|
@ -857,7 +855,7 @@ class Output(object):
|
|||
if not verbose:
|
||||
return
|
||||
|
||||
print(_("Repo : %s") % po.ui_from_repo)
|
||||
print(_("Repo : %s") % po.ui_from_repo())
|
||||
printed_match = False
|
||||
name_match = False
|
||||
for item in set(values):
|
||||
|
@ -1324,8 +1322,8 @@ Transaction Summary
|
|||
def _history_uiactions(self, hpkgs):
|
||||
actions = set()
|
||||
count = 0
|
||||
for hpkg in hpkgs:
|
||||
st = hpkg.state
|
||||
for pkg in hpkgs:
|
||||
st = pkg.state
|
||||
if st == 'True-Install':
|
||||
st = 'Install'
|
||||
if st == 'Dep-Install': # Mask these at the higher levels
|
||||
|
@ -1373,7 +1371,7 @@ Transaction Summary
|
|||
return ret[0]
|
||||
|
||||
try:
|
||||
user = pwd.getpwuid(uid)
|
||||
user = pwd.getpwuid(int(uid))
|
||||
fullname = _safe_split_0(ucd(user.pw_gecos), ';', 2)
|
||||
user_name = ucd(user.pw_name)
|
||||
name = "%s <%s>" % (fullname, user_name)
|
||||
|
@ -1513,9 +1511,10 @@ Transaction Summary
|
|||
name = old.cmdline or ''
|
||||
else:
|
||||
name = self._pwd_ui_username(old.loginuid, 24)
|
||||
name = ucd(name)
|
||||
tm = time.strftime("%Y-%m-%d %H:%M",
|
||||
time.localtime(old.beg_timestamp))
|
||||
num, uiacts = self._history_uiactions(old.trans_data)
|
||||
time.localtime(float(old.beg_timestamp)))
|
||||
num, uiacts = self._history_uiactions(old.data())
|
||||
name = fill_exact_width(name, 24, 24)
|
||||
uiacts = fill_exact_width(uiacts, 14, 14)
|
||||
rmark = lmark = ' '
|
||||
|
@ -1524,12 +1523,8 @@ Transaction Summary
|
|||
elif old.return_code:
|
||||
rmark = lmark = '#'
|
||||
# We don't check .errors, because return_code will be non-0
|
||||
elif old.output:
|
||||
elif old.is_output:
|
||||
rmark = lmark = 'E'
|
||||
elif old.rpmdb_problems:
|
||||
rmark = lmark = 'P'
|
||||
elif old.trans_skip:
|
||||
rmark = lmark = 's'
|
||||
if old.altered_lt_rpmdb:
|
||||
rmark = '<'
|
||||
if old.altered_gt_rpmdb:
|
||||
|
@ -1554,12 +1549,16 @@ Transaction Summary
|
|||
return 1, ['Failed history info']
|
||||
|
||||
lasttid = old.tid
|
||||
lastdbv = old.end_rpmdbversion
|
||||
lastdbv = old.end_rpmdb_version
|
||||
|
||||
transactions = []
|
||||
if not tids and len(extcmds) < 2:
|
||||
old = self.history.last(complete_transactions_only=False)
|
||||
if old is not None:
|
||||
tids.add(old.tid)
|
||||
transactions.append(old)
|
||||
else:
|
||||
transactions = self.history.old(tids)
|
||||
|
||||
if not tids:
|
||||
logger.critical(_('No transaction ID, or package, given'))
|
||||
|
@ -1573,22 +1572,21 @@ Transaction Summary
|
|||
mtids = sorted(mtids)
|
||||
bmtid, emtid = mtids.pop()
|
||||
|
||||
for tid in self.history.old(tids):
|
||||
if lastdbv is not None and tid.tid == lasttid:
|
||||
for trans in transactions:
|
||||
if lastdbv is not None and trans.tid == lasttid:
|
||||
# If this is the last transaction, is good and it doesn't
|
||||
# match the current rpmdb ... then mark it as bad.
|
||||
rpmdbv = self.sack._rpmdb_version(self.yumdb)
|
||||
if lastdbv != rpmdbv:
|
||||
tid.altered_gt_rpmdb = True
|
||||
rpmdbv = self.sack._rpmdb_version(self.history)
|
||||
trans.compare_rpmdbv(str(rpmdbv))
|
||||
lastdbv = None
|
||||
|
||||
merged = False
|
||||
|
||||
if tid.tid >= bmtid and tid.tid <= emtid:
|
||||
if trans.tid >= bmtid and trans.tid <= emtid:
|
||||
if mobj is None:
|
||||
mobj = dnf.yum.history.YumMergedHistoryTransaction(tid)
|
||||
mobj = trans
|
||||
else:
|
||||
mobj.merge(tid)
|
||||
mobj.merge(trans)
|
||||
merged = True
|
||||
elif mobj is not None:
|
||||
if done:
|
||||
|
@ -1597,29 +1595,24 @@ Transaction Summary
|
|||
|
||||
self._historyInfoCmd(mobj)
|
||||
mobj = None
|
||||
|
||||
if mtids:
|
||||
bmtid, emtid = mtids.pop()
|
||||
if tid.tid >= bmtid and tid.tid <= emtid:
|
||||
mobj = dnf.yum.history.YumMergedHistoryTransaction(tid)
|
||||
if trans.tid >= bmtid and trans.tid <= emtid:
|
||||
mobj = trans
|
||||
merged = True
|
||||
|
||||
if not merged:
|
||||
if done:
|
||||
print("-" * 79)
|
||||
done = True
|
||||
self._historyInfoCmd(tid, pats)
|
||||
self._historyInfoCmd(trans, pats)
|
||||
|
||||
if mobj is not None:
|
||||
if done:
|
||||
print("-" * 79)
|
||||
self._historyInfoCmd(mobj)
|
||||
|
||||
def _hpkg2from_repo(self, hpkg):
|
||||
""" Given a pkg, find the ipkg.ui_from_repo."""
|
||||
if 'from_repo' in hpkg.yumdb_info:
|
||||
return hpkg.ui_from_repo
|
||||
return "(unknown)"
|
||||
|
||||
def _historyInfoCmd(self, old, pats=[]):
|
||||
name = self._pwd_ui_username(old.loginuid)
|
||||
|
||||
|
@ -1639,18 +1632,29 @@ Transaction Summary
|
|||
else:
|
||||
_pkg_states = _pkg_states_available
|
||||
state = _pkg_states['i']
|
||||
ipkgs = self.sack.query().installed().filter(name=hpkg.name).run()
|
||||
ipkgs.sort()
|
||||
|
||||
# get installed packages with name = pkg.name
|
||||
ipkgs = self.sack.query().installed().filter(name=pkg.name).run()
|
||||
|
||||
if not ipkgs:
|
||||
state = _pkg_states['e']
|
||||
elif hpkg.pkgtup in (ipkg.pkgtup for ipkg in ipkgs):
|
||||
pass
|
||||
elif ipkgs[-1] > hpkg:
|
||||
state = _pkg_states['o']
|
||||
elif ipkgs[0] < hpkg:
|
||||
state = _pkg_states['n']
|
||||
else:
|
||||
assert False, "Impossible, installed not newer and not older"
|
||||
# get latest installed package from software database
|
||||
inst_pkg = self.history.package(ipkgs[0])
|
||||
|
||||
# result is:
|
||||
# 0 if inst_pkg == pkg
|
||||
# > 0 when inst_pkg > pkg
|
||||
# < 0 when inst_pkg < pkg
|
||||
res = pkg.compare(inst_pkg)
|
||||
|
||||
if res == 0:
|
||||
pass # installed
|
||||
elif res > 0:
|
||||
state = _pkg_states['o'] # updated
|
||||
else:
|
||||
state = _pkg_states['n'] # downgraded
|
||||
|
||||
if highlight:
|
||||
(hibeg, hiend) = self._highlight('bold')
|
||||
else:
|
||||
|
@ -1658,24 +1662,27 @@ Transaction Summary
|
|||
state = fill_exact_width(state, _pkg_states['maxlen'])
|
||||
ui_repo = ''
|
||||
if show_repo:
|
||||
ui_repo = self._hpkg2from_repo(hpkg)
|
||||
ui_repo = pkg.ui_from_repo()
|
||||
print("%s%s%s%s %-*s %s" % (prefix, hibeg, state, hiend,
|
||||
pkg_max_len, hpkg, ui_repo))
|
||||
pkg_max_len, str(pkg), ui_repo))
|
||||
|
||||
if isinstance(old.tid, list):
|
||||
print(_("Transaction ID :"), "%u..%u" % (old.tid[0], old.tid[-1]))
|
||||
tids = old.tids()
|
||||
if len(tids) > 1:
|
||||
print(_("Transaction ID :"), "%u..%u" % (tids[0], tids[-1]))
|
||||
else:
|
||||
print(_("Transaction ID :"), old.tid)
|
||||
begtm = time.strftime("%c", time.localtime(old.beg_timestamp))
|
||||
print(_("Transaction ID :"), tids[0])
|
||||
begt = float(old.beg_timestamp)
|
||||
begtm = time.strftime("%c", time.localtime(begt))
|
||||
print(_("Begin time :"), begtm)
|
||||
if old.beg_rpmdbversion is not None:
|
||||
if old.beg_rpmdb_version is not None:
|
||||
if old.altered_lt_rpmdb:
|
||||
print(_("Begin rpmdb :"), old.beg_rpmdbversion, "**")
|
||||
print(_("Begin rpmdb :"), old.beg_rpmdb_version, "**")
|
||||
else:
|
||||
print(_("Begin rpmdb :"), old.beg_rpmdbversion)
|
||||
print(_("Begin rpmdb :"), old.beg_rpmdb_version)
|
||||
if old.end_timestamp is not None:
|
||||
endtm = time.strftime("%c", time.localtime(old.end_timestamp))
|
||||
diff = old.end_timestamp - old.beg_timestamp
|
||||
endt = float(float(old.end_timestamp))
|
||||
endtm = time.strftime("%c", time.localtime(endt))
|
||||
diff = endt - begt
|
||||
if diff < 5 * 60:
|
||||
diff = _("(%u seconds)") % diff
|
||||
elif diff < 5 * 60 * 60:
|
||||
|
@ -1685,11 +1692,11 @@ Transaction Summary
|
|||
else:
|
||||
diff = _("(%u days)") % (diff // (60 * 60 * 24))
|
||||
print(_("End time :"), endtm, diff)
|
||||
if old.end_rpmdbversion is not None:
|
||||
if old.end_rpmdb_version is not None:
|
||||
if old.altered_gt_rpmdb:
|
||||
print(_("End rpmdb :"), old.end_rpmdbversion, "**")
|
||||
print(_("End rpmdb :"), old.end_rpmdb_version, "**")
|
||||
else:
|
||||
print(_("End rpmdb :"), old.end_rpmdbversion)
|
||||
print(_("End rpmdb :"), old.end_rpmdb_version)
|
||||
if isinstance(name, list):
|
||||
for name in name:
|
||||
print(_("User :"), name)
|
||||
|
@ -1716,8 +1723,8 @@ Transaction Summary
|
|||
else:
|
||||
print(_("Command Line :"), old.cmdline)
|
||||
|
||||
if not isinstance(old.tid, list):
|
||||
addon_info = self.history.return_addon_data(old.tid)
|
||||
if len(tids) == 1:
|
||||
addon_info = self.history.addon_data.read(tids[0])
|
||||
|
||||
# for the ones we create by default - don't display them as there
|
||||
default_addons = set(['config-main', 'config-repos', 'transaction-comment'])
|
||||
|
@ -1726,48 +1733,37 @@ Transaction Summary
|
|||
print(_("Additional non-default information stored: %d") % \
|
||||
len(non_default))
|
||||
|
||||
comment = self.history.return_addon_data(old.tid, item='transaction-comment')
|
||||
if comment:
|
||||
print(_("Comment :"), comment)
|
||||
comment = self.history.addon_data.read(old.tid, item='transaction-comment')
|
||||
if comment:
|
||||
print(_("Comment :"), comment)
|
||||
|
||||
if old.trans_with:
|
||||
# This is _possible_, but not common
|
||||
perf_with = old.performed_with()
|
||||
if perf_with:
|
||||
print(_("Transaction performed with:"))
|
||||
pkg_max_len = max((len(str(hpkg)) for hpkg in old.trans_with))
|
||||
for hpkg in old.trans_with:
|
||||
_simple_pkg(hpkg, 4, was_installed=True, pkg_max_len=pkg_max_len)
|
||||
max_len = 0
|
||||
for with_pkg in perf_with:
|
||||
str_len = len(str(with_pkg))
|
||||
if str_len > max_len:
|
||||
max_len = str_len
|
||||
for with_pkg in perf_with:
|
||||
_simple_pkg(with_pkg, 4, was_installed=True, pkg_max_len=max_len)
|
||||
|
||||
print(_("Packages Altered:"))
|
||||
|
||||
self.historyInfoCmdPkgsAltered(old, pats)
|
||||
|
||||
if old.trans_skip:
|
||||
print(_("Packages Skipped:"))
|
||||
pkg_max_len = max((len(str(hpkg)) for hpkg in old.trans_skip))
|
||||
for hpkg in old.trans_skip:
|
||||
# Don't show the repo. here because we can't store it as they were,
|
||||
# by definition, not installed.
|
||||
_simple_pkg(hpkg, 4, pkg_max_len=pkg_max_len, show_repo=False)
|
||||
|
||||
if old.rpmdb_problems:
|
||||
print(_("Rpmdb Problems:"))
|
||||
for prob in old.rpmdb_problems:
|
||||
key = "%s%s: " % (" " * 4, prob.problem)
|
||||
print(self.fmtKeyValFill(key, prob.text))
|
||||
if prob.packages:
|
||||
pkg_max_len = max((len(str(hpkg)) for hpkg in prob.packages))
|
||||
for hpkg in prob.packages:
|
||||
_simple_pkg(hpkg, 8, was_installed=True, highlight=hpkg.main,
|
||||
pkg_max_len=pkg_max_len)
|
||||
|
||||
if old.output:
|
||||
t_out = old.output()
|
||||
if t_out:
|
||||
print(_("Scriptlet output:"))
|
||||
num = 0
|
||||
for line in old.output:
|
||||
for line in t_out:
|
||||
num += 1
|
||||
print("%4d" % num, line)
|
||||
if old.errors:
|
||||
t_err = old.error()
|
||||
if t_err:
|
||||
print(_("Errors:"))
|
||||
num = 0
|
||||
for line in old.errors:
|
||||
for line in t_err:
|
||||
num += 1
|
||||
print("%4d" % num, line)
|
||||
|
||||
|
@ -1786,7 +1782,7 @@ Transaction Summary
|
|||
def historyInfoCmdPkgsAltered(self, old, pats=[]):
|
||||
"""Print information about how packages are altered in a transaction.
|
||||
|
||||
:param old: the :class:`history.YumHistoryTransaction` to
|
||||
:param old: the :class:`DnfSwdbTrans` to
|
||||
print information about
|
||||
:param pats: a list of patterns. Packages that match a patten
|
||||
in *pats* will be highlighted in the output
|
||||
|
@ -1798,50 +1794,50 @@ Transaction Summary
|
|||
all_uistates = self._history_state2uistate
|
||||
maxlen = 0
|
||||
pkg_max_len = 0
|
||||
for hpkg in old.trans_data:
|
||||
uistate = all_uistates.get(hpkg.state, hpkg.state)
|
||||
|
||||
packages = old.packages()
|
||||
|
||||
for pkg in packages:
|
||||
uistate = all_uistates.get(pkg.state, pkg.state)
|
||||
if maxlen < len(uistate):
|
||||
maxlen = len(uistate)
|
||||
if pkg_max_len < len(str(hpkg)):
|
||||
pkg_max_len = len(str(hpkg))
|
||||
pkg_len = len(str(pkg))
|
||||
if pkg_max_len < pkg_len:
|
||||
pkg_max_len = pkg_len
|
||||
|
||||
for hpkg in old.trans_data:
|
||||
for pkg in packages:
|
||||
prefix = " " * 4
|
||||
if not hpkg.done:
|
||||
if not pkg.done:
|
||||
prefix = " ** "
|
||||
|
||||
highlight = 'normal'
|
||||
if pats:
|
||||
x, m, u = dnf.yum.packages.parsePackages([hpkg], pats)
|
||||
if x or m:
|
||||
if any([pkg.match(pat) for pat in pats]):
|
||||
highlight = 'bold'
|
||||
(hibeg, hiend) = self._highlight(highlight)
|
||||
|
||||
# To chop the name off we need nevra strings, str(pkg) gives envra
|
||||
# so we have to do it by hand ... *sigh*.
|
||||
cn = hpkg.ui_nevra
|
||||
cn = str(pkg)
|
||||
|
||||
uistate = all_uistates.get(hpkg.state, hpkg.state)
|
||||
uistate = fill_exact_width(uistate, maxlen)
|
||||
# Should probably use columns here...
|
||||
if False: pass
|
||||
elif (last is not None and
|
||||
last.state == 'Updated' and last.name == hpkg.name and
|
||||
hpkg.state == 'Update'):
|
||||
ln = len(hpkg.name) + 1
|
||||
uistate = all_uistates.get(pkg.state, pkg.state)
|
||||
uistate = fill_exact_width(ucd(uistate), maxlen)
|
||||
|
||||
if (last is not None and last.state == 'Updated' and
|
||||
last.name == pkg.name and pkg.state == 'Update'):
|
||||
|
||||
ln = len(pkg.name) + 1
|
||||
cn = (" " * ln) + cn[ln:]
|
||||
elif (last is not None and
|
||||
last.state == 'Downgrade' and last.name == hpkg.name and
|
||||
hpkg.state == 'Downgraded'):
|
||||
ln = len(hpkg.name) + 1
|
||||
elif (last is not None and last.state == 'Downgrade' and
|
||||
last.name == pkg.name and pkg.state == 'Downgraded'):
|
||||
|
||||
ln = len(pkg.name) + 1
|
||||
cn = (" " * ln) + cn[ln:]
|
||||
else:
|
||||
last = None
|
||||
if hpkg.state in ('Updated', 'Downgrade'):
|
||||
last = hpkg
|
||||
if pkg.state in ('Updated', 'Downgrade'):
|
||||
last = pkg
|
||||
print("%s%s%s%s %-*s %s" % (prefix, hibeg, uistate, hiend,
|
||||
pkg_max_len, cn,
|
||||
self._hpkg2from_repo(hpkg)))
|
||||
pkg_max_len, str(pkg),
|
||||
pkg.ui_from_repo()))
|
||||
|
||||
def historyPackageListCmd(self, extcmds):
|
||||
"""Print a list of information about transactions from history
|
||||
|
@ -1868,7 +1864,8 @@ Transaction Summary
|
|||
fmt = "%6u | %s | %-50s"
|
||||
num = 0
|
||||
for old in self.history.old(tids, limit=limit):
|
||||
if limit is not None and num and (num +len(old.trans_data)) > limit:
|
||||
packages = old.packages()
|
||||
if limit and num and (num + len(packages)) > limit:
|
||||
break
|
||||
last = None
|
||||
|
||||
|
@ -1890,33 +1887,32 @@ Transaction Summary
|
|||
if old.altered_gt_rpmdb:
|
||||
lmark = '>'
|
||||
|
||||
for hpkg in old.trans_data: # Find a pkg to go with each cmd...
|
||||
# Find a pkg to go with each cmd...
|
||||
for pkg in packages:
|
||||
if limit is None:
|
||||
x, m, u = dnf.yum.packages.parsePackages([hpkg], extcmds)
|
||||
if not x and not m:
|
||||
if not any([pkg.match(pat) for pat in extcmds]):
|
||||
continue
|
||||
|
||||
uistate = all_uistates.get(hpkg.state, hpkg.state)
|
||||
uistate = all_uistates.get(pkg.state, pkg.state)
|
||||
uistate = fill_exact_width(uistate, 14)
|
||||
|
||||
# To chop the name off we need nevra strings, str(pkg) gives
|
||||
# envra so we have to do it by hand ... *sigh*.
|
||||
cn = hpkg.ui_nevra
|
||||
cn = pkg.ui_nevra
|
||||
|
||||
if (last is not None and
|
||||
last.state == 'Updated' and last.name == hpkg.name and
|
||||
hpkg.state == 'Update'):
|
||||
ln = len(hpkg.name) + 1
|
||||
if (last is not None and last.state == 'Updated' and
|
||||
last.name == pkg.name and pkg.state == 'Update'):
|
||||
ln = len(pkg.name) + 1
|
||||
cn = (" " * ln) + cn[ln:]
|
||||
elif (last is not None and
|
||||
last.state == 'Downgrade' and last.name == hpkg.name and
|
||||
hpkg.state == 'Downgraded'):
|
||||
ln = len(hpkg.name) + 1
|
||||
last.state == 'Downgrade' and last.name == pkg.name and
|
||||
pkg.state == 'Downgraded'):
|
||||
ln = len(pkg.name) + 1
|
||||
cn = (" " * ln) + cn[ln:]
|
||||
else:
|
||||
last = None
|
||||
if hpkg.state in ('Updated', 'Downgrade'):
|
||||
last = hpkg
|
||||
if pkg.state in ('Updated', 'Downgrade'):
|
||||
last = pkg
|
||||
|
||||
num += 1
|
||||
print(fmt % (old.tid, uistate, cn), "%s%s" % (lmark, rmark))
|
||||
|
|
103
dnf/comps.py
103
dnf/comps.py
|
@ -24,6 +24,7 @@ from __future__ import unicode_literals
|
|||
from dnf.exceptions import CompsError
|
||||
from dnf.i18n import _, ucd
|
||||
from functools import reduce
|
||||
from dnf.db.types import SwdbEnv, SwdbGroup
|
||||
|
||||
import dnf.i18n
|
||||
import dnf.util
|
||||
|
@ -507,39 +508,33 @@ class Solver(object):
|
|||
return pkgs
|
||||
|
||||
def _removable_pkg(self, pkg_name):
|
||||
prst = self.persistor
|
||||
count = 0
|
||||
if self._reason_fn(pkg_name) != 'group':
|
||||
return False
|
||||
for id_ in prst.groups:
|
||||
p_grp = prst.group(id_)
|
||||
count += sum(1 for pkg in p_grp.full_list if pkg == pkg_name)
|
||||
return count < 2
|
||||
return self.persistor.removable_pkg(pkg_name)
|
||||
|
||||
def _removable_grp(self, grp_name):
|
||||
prst = self.persistor
|
||||
count = 0
|
||||
if not prst.group(grp_name).installed:
|
||||
return False
|
||||
for id_ in prst.environments:
|
||||
p_env = prst.environment(id_)
|
||||
count += sum(1 for grp in p_env.full_list if grp == grp_name)
|
||||
for p_env in prst.environments():
|
||||
count += sum(1 for grp in p_env.get_group_list() if grp == grp_name)
|
||||
return count < 2
|
||||
|
||||
def _environment_install(self, env_id, pkg_types, exclude, strict=True):
|
||||
if isinstance(env_id, SwdbEnv):
|
||||
env_id = env_id.name_id
|
||||
env = self.comps._environment_by_id(env_id)
|
||||
p_env = self.persistor.environment(env_id)
|
||||
if p_env.installed:
|
||||
if p_env and p_env.installed:
|
||||
logger.warning(_("Environment '%s' is already installed.") %
|
||||
env.ui_name)
|
||||
else:
|
||||
grp_types = CONDITIONAL | DEFAULT | MANDATORY | OPTIONAL
|
||||
p_env = self.persistor.new_env(env_id, env.name, env.ui_name,
|
||||
pkg_types, grp_types)
|
||||
self.persistor.add_env(p_env)
|
||||
|
||||
p_env.grp_types = CONDITIONAL | DEFAULT | MANDATORY | OPTIONAL
|
||||
exclude = set() if exclude is None else set(exclude)
|
||||
p_env.name = env.name
|
||||
p_env.ui_name = env.ui_name
|
||||
p_env.pkg_exclude.extend(exclude)
|
||||
p_env.pkg_types = pkg_types
|
||||
p_env.full_list.extend(self._mandatory_group_set(env))
|
||||
exclude = list() if exclude is None else list(exclude)
|
||||
p_env.add_exclude(exclude)
|
||||
|
||||
trans = TransactionBunch()
|
||||
for grp in env.mandatory_groups:
|
||||
|
@ -547,38 +542,39 @@ class Solver(object):
|
|||
trans += self._group_install(grp.id, pkg_types, exclude, strict)
|
||||
except dnf.exceptions.CompsError:
|
||||
pass
|
||||
|
||||
p_env.add_group([grp.id for grp in env.mandatory_groups])
|
||||
return trans
|
||||
|
||||
def _environment_remove(self, env_id):
|
||||
if isinstance(env_id, SwdbEnv):
|
||||
env_id = env_id.name_id
|
||||
p_env = self.persistor.environment(env_id)
|
||||
if not p_env.installed:
|
||||
if not p_env or not p_env.installed:
|
||||
raise CompsError(_("Environment '%s' is not installed.") %
|
||||
p_env.ui_name)
|
||||
ucd(p_env.ui_name))
|
||||
|
||||
trans = TransactionBunch()
|
||||
group_ids = set(p_env.full_list)
|
||||
group_ids = set(p_env.get_group_list())
|
||||
|
||||
for grp in group_ids:
|
||||
if not self._removable_grp(grp):
|
||||
continue
|
||||
trans += self._group_remove(grp)
|
||||
|
||||
del p_env.full_list[:]
|
||||
del p_env.pkg_exclude[:]
|
||||
p_env.grp_types = 0
|
||||
p_env.pkg_types = 0
|
||||
return trans
|
||||
|
||||
def _environment_upgrade(self, env_id):
|
||||
if isinstance(env_id, SwdbEnv):
|
||||
env_id = env_id.name_id
|
||||
env = self.comps._environment_by_id(env_id)
|
||||
p_env = self.persistor.environment(env.id)
|
||||
if not p_env.installed:
|
||||
if not p_env or not p_env.installed:
|
||||
raise CompsError(_("Environment '%s' is not installed.") %
|
||||
env.ui_name)
|
||||
|
||||
old_set = set(p_env.full_list)
|
||||
old_set = set(p_env.get_group_list())
|
||||
pkg_types = p_env.pkg_types
|
||||
exclude = p_env.pkg_exclude
|
||||
exclude = p_env.get_exclude()
|
||||
|
||||
trans = TransactionBunch()
|
||||
for grp in env.mandatory_groups:
|
||||
|
@ -595,51 +591,58 @@ class Solver(object):
|
|||
return trans
|
||||
|
||||
def _group_install(self, group_id, pkg_types, exclude, strict=True):
|
||||
if isinstance(group_id, SwdbGroup):
|
||||
group_id = group_id.name_id
|
||||
group = self.comps._group_by_id(group_id)
|
||||
if not group:
|
||||
raise ValueError(_("Group_id '%s' does not exist.") % ucd(group_id))
|
||||
raise ValueError(_("Group_id '%s' does not exist.") %
|
||||
ucd(group_id))
|
||||
# this will return DnfSwdbGroup object
|
||||
p_grp = self.persistor.group(group_id)
|
||||
if p_grp.installed:
|
||||
if p_grp and p_grp.installed:
|
||||
logger.warning(_("Group '%s' is already installed.") %
|
||||
group.ui_name)
|
||||
else:
|
||||
p_grp = self.persistor.new_group(group_id, group.name,
|
||||
group.ui_name, 0, pkg_types)
|
||||
self.persistor.add_group(p_grp)
|
||||
self.persistor.install_group(p_grp)
|
||||
|
||||
exclude = set() if exclude is None else set(exclude)
|
||||
p_grp.name = group.name
|
||||
p_grp.ui_name = group.ui_name
|
||||
p_grp.pkg_exclude.extend(exclude)
|
||||
p_grp.pkg_types = pkg_types
|
||||
p_grp.full_list.extend(self._full_package_set(group))
|
||||
exclude = list() if exclude is None else list(exclude)
|
||||
p_grp.add_exclude(exclude)
|
||||
p_grp.add_package(list(self._full_package_set(group)))
|
||||
|
||||
trans = TransactionBunch()
|
||||
trans.install.update(self._pkgs_of_type(group, pkg_types, exclude))
|
||||
return trans
|
||||
|
||||
def _group_remove(self, group_id):
|
||||
if isinstance(group_id, SwdbGroup):
|
||||
group_id = group_id.name_id
|
||||
p_grp = self.persistor.group(group_id)
|
||||
if not p_grp.installed:
|
||||
if not p_grp or not p_grp.installed:
|
||||
raise CompsError(_("Group '%s' not installed.") %
|
||||
p_grp.ui_name)
|
||||
ucd(p_grp.ui_name))
|
||||
|
||||
trans = TransactionBunch()
|
||||
exclude = p_grp.pkg_exclude
|
||||
trans.remove = {pkg for pkg in p_grp.full_list
|
||||
exclude = p_grp.get_exclude()
|
||||
trans.remove = {pkg for pkg in p_grp.get_full_list()
|
||||
if pkg not in exclude and self._removable_pkg(pkg)}
|
||||
p_grp.pkg_types = 0
|
||||
del p_grp.full_list[:]
|
||||
del p_grp.pkg_exclude[:]
|
||||
self.persistor.remove_group(p_grp)
|
||||
return trans
|
||||
|
||||
def _group_upgrade(self, group_id):
|
||||
if isinstance(group_id, SwdbGroup):
|
||||
group_id = group_id.name_id
|
||||
group = self.comps._group_by_id(group_id)
|
||||
p_grp = self.persistor.group(group.id)
|
||||
if not p_grp.installed:
|
||||
if not p_grp or not p_grp.installed:
|
||||
raise CompsError(_("Group '%s' not installed.") %
|
||||
group.ui_name)
|
||||
exclude = set(p_grp.pkg_exclude)
|
||||
old_set = set(p_grp.full_list)
|
||||
exclude = set(p_grp.get_exclude())
|
||||
old_set = set(p_grp.get_full_list())
|
||||
new_set = self._pkgs_of_type(group, p_grp.pkg_types, exclude)
|
||||
del p_grp.full_list[:]
|
||||
p_grp.full_list.extend(self._full_package_set(group))
|
||||
p_grp.update_full_list(list(self._full_package_set(group)))
|
||||
|
||||
trans = TransactionBunch()
|
||||
trans.install = {pkg for pkg in new_set if pkg.name not in old_set}
|
||||
|
|
|
@ -645,6 +645,7 @@ class MainConf(BaseConfig):
|
|||
self._add_option('pluginconfpath',
|
||||
ListOption([dnf.const.PLUGINCONFPATH])) # :api
|
||||
self._add_option('persistdir', PathOption(dnf.const.PERSISTDIR)) # :api
|
||||
self._add_option('transformdb', BoolOption(True)) # :api
|
||||
self._add_option('recent', IntOption(7, range_min=0))
|
||||
self._add_option('retries', PositiveIntOption(10, names_of_0=["0"]))
|
||||
self._add_option('reset_nice', BoolOption(True))
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
FILE(GLOB db_srcs *.py)
|
||||
INSTALL (FILES ${db_srcs} DESTINATION ${PYTHON_INSTALL_DIR}/dnf/db)
|
|
@ -0,0 +1,17 @@
|
|||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# DNF database subpackage
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
@ -0,0 +1,90 @@
|
|||
# Copyright (C) 2009, 2012-2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# James Antill <james@fedoraproject.org>
|
||||
|
||||
import os
|
||||
import glob
|
||||
import time
|
||||
from dnf.yum import misc
|
||||
|
||||
|
||||
class _addondata(object):
|
||||
def __init__(self, db_path, root='/'):
|
||||
self.conf = misc.GenericHolder()
|
||||
self.conf.writable = False
|
||||
self._db_date = time.strftime("%Y-%m-%d")
|
||||
if not os.path.normpath(db_path).startswith(root):
|
||||
self.conf.db_path = os.path.normpath(root + '/' + db_path)
|
||||
else:
|
||||
self.conf.db_path = os.path.normpath('/' + db_path)
|
||||
|
||||
self.conf.addon_path = self.conf.db_path + '/' + self._db_date
|
||||
|
||||
def write(self, dataname, data):
|
||||
"""append data to an arbitrary-named file in the history
|
||||
addon_path/transaction id location,
|
||||
returns True if write succeeded, False if not"""
|
||||
|
||||
if not hasattr(self, '_tid'):
|
||||
# maybe we should raise an exception or a warning here?
|
||||
return False
|
||||
|
||||
if not dataname:
|
||||
return False
|
||||
|
||||
if not data:
|
||||
return False
|
||||
|
||||
# make sure the tid dir exists
|
||||
tid_dir = self.conf.addon_path + '/' + str(self._tid)
|
||||
|
||||
if self.conf.writable and not os.path.exists(tid_dir):
|
||||
try:
|
||||
os.makedirs(tid_dir, mode=0o700)
|
||||
except (IOError, OSError) as e:
|
||||
# emit a warning/raise an exception?
|
||||
return False
|
||||
|
||||
# cleanup dataname
|
||||
safename = dataname.replace('/', '_')
|
||||
data_fn = tid_dir + '/' + safename
|
||||
try:
|
||||
# open file in append
|
||||
fo = open(data_fn, 'wb+')
|
||||
# write data
|
||||
fo.write(data.encode('utf-8'))
|
||||
# flush data
|
||||
fo.flush()
|
||||
fo.close()
|
||||
except (IOError, OSError) as e:
|
||||
return False
|
||||
# return
|
||||
return True
|
||||
|
||||
def read(self, tid, item=None):
|
||||
hist_and_tid = self.conf.addon_path + '/' + str(tid) + '/'
|
||||
addon_info = glob.glob(hist_and_tid + '*')
|
||||
addon_names = [i.replace(hist_and_tid, '') for i in addon_info]
|
||||
if not item:
|
||||
return addon_names
|
||||
if item not in addon_names:
|
||||
# XXX history needs SOME kind of exception, or warning, I think?
|
||||
return None
|
||||
fo = open(hist_and_tid + item, 'r')
|
||||
data = fo.read()
|
||||
fo.close()
|
||||
return data
|
|
@ -0,0 +1,98 @@
|
|||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Eduard Cuba <ecuba@redhat.com>
|
||||
|
||||
from dnf.db.types import SwdbGroup, SwdbEnv
|
||||
|
||||
|
||||
class GroupPersistor(object):
|
||||
|
||||
def __init__(self, swdb):
|
||||
self.swdb = swdb
|
||||
self.groups_installed = []
|
||||
self.groups_removed = []
|
||||
|
||||
def commit(self):
|
||||
for group in self.groups_removed:
|
||||
self.swdb.uninstall_group(group)
|
||||
if self.groups_installed:
|
||||
self.swdb.groups_commit(self.groups_installed)
|
||||
self.groups_installed = []
|
||||
self.groups_removed = []
|
||||
|
||||
def install_group(self, group, commit=False):
|
||||
self.groups_installed.append(group)
|
||||
if commit:
|
||||
self.commit()
|
||||
|
||||
def remove_group(self, group, commit=False):
|
||||
self.groups_removed.append(group)
|
||||
if commit:
|
||||
self.commit()
|
||||
|
||||
def new_group(self, name_id, name, ui_name, installed, pkg_types):
|
||||
group = SwdbGroup.new(name_id, name, ui_name, installed, pkg_types, self.swdb)
|
||||
return group
|
||||
|
||||
def new_env(self, name_id, name, ui_name, pkg_types, grp_types):
|
||||
env = SwdbEnv.new(name_id, name, ui_name, pkg_types, grp_types, self.swdb)
|
||||
return env
|
||||
|
||||
def environment(self, name_id):
|
||||
if isinstance(name_id, SwdbEnv):
|
||||
return self.swdb.get_env(name_id.name_id)
|
||||
return self.swdb.get_env(name_id)
|
||||
|
||||
def environments(self):
|
||||
return self.swdb.env_by_pattern("%")
|
||||
|
||||
def environments_by_pattern(self, pattern, case_sensitive=False):
|
||||
return self.swdb.env_by_pattern(pattern)
|
||||
|
||||
def group(self, gid):
|
||||
if isinstance(gid, SwdbGroup):
|
||||
gid = gid.name_id
|
||||
return self.swdb.get_group(gid)
|
||||
|
||||
def group_installed(self, group_id):
|
||||
# :api
|
||||
"""Find out whether group is installed"""
|
||||
group = self.group(group_id)
|
||||
return group.installed if group else False
|
||||
|
||||
def environment_installed(self, env_id):
|
||||
# :api
|
||||
"""Find out whether environment is installed"""
|
||||
env = self.environment(env_id)
|
||||
return env.installed if env else False
|
||||
|
||||
def groups(self):
|
||||
return self.swdb.groups_by_pattern("%") # sqlite3 wildcard
|
||||
|
||||
def groups_by_pattern(self, pattern, case_sensitive=False):
|
||||
return self.swdb.groups_by_pattern(pattern)
|
||||
|
||||
def add_group(self, group, commit=False):
|
||||
self.swdb.add_group(group)
|
||||
if commit:
|
||||
self.install_group(group, True)
|
||||
|
||||
def add_env(self, env):
|
||||
return self.swdb.add_env(env)
|
||||
|
||||
def removable_pkg(self, pkg_name):
|
||||
return self.swdb.removable_pkg(pkg_name)
|
|
@ -0,0 +1,284 @@
|
|||
# Copyright (C) 2009, 2012-2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# James Antill <james@fedoraproject.org>
|
||||
# Eduard Cuba <ecuba@redhat.com>
|
||||
|
||||
from dnf.i18n import ucd
|
||||
import time
|
||||
import os
|
||||
from dnf.yum import misc
|
||||
from .swdb_transformer import run as transformdb
|
||||
from .addondata import _addondata
|
||||
from .group import GroupPersistor
|
||||
from .types import Swdb, SwdbRpmData, SwdbPkg, SwdbItem, convert_reason
|
||||
|
||||
|
||||
class SwdbInterface(object):
|
||||
|
||||
def __init__(self, db_path, root='/', releasever="", transform=True):
|
||||
self.path = os.path.join(root, db_path, "swdb.sqlite")
|
||||
self.releasever = str(releasever)
|
||||
self.swdb = Swdb.new(self.path, self.releasever)
|
||||
self.addon_data = _addondata(db_path, root)
|
||||
self._group = None
|
||||
if not self.swdb.exist():
|
||||
dbdir = os.path.dirname(self.path)
|
||||
if not os.path.exists(dbdir):
|
||||
os.makedirs(dbdir)
|
||||
self.swdb.create_db()
|
||||
# does nothing when there is nothing to transform
|
||||
if transform:
|
||||
transformdb(output_file=self.swdb.get_path())
|
||||
|
||||
@property
|
||||
def group(self):
|
||||
if self._group is None:
|
||||
self._group = GroupPersistor(self.swdb)
|
||||
return self._group
|
||||
|
||||
def group_active(self):
|
||||
return self._group is not None
|
||||
|
||||
def reset_group(self):
|
||||
self._group = GroupPersistor(self.swdb)
|
||||
|
||||
def close(self):
|
||||
return self.swdb.close()
|
||||
|
||||
def add_package(self, pkg):
|
||||
return self.swdb.add_package(pkg)
|
||||
|
||||
def add_package_data(self, pid, package_data):
|
||||
return self.swdb.log_package_data(pid, package_data)
|
||||
|
||||
def reset_db(self):
|
||||
return self.swdb.reset_db()
|
||||
|
||||
def get_path(self):
|
||||
return self.swdb.get_path()
|
||||
|
||||
def last(self, complete_transactions_only=True):
|
||||
return self.swdb.last(complete_transactions_only)
|
||||
|
||||
def set_repo(self, pkg, repo):
|
||||
"""Set repository for package"""
|
||||
return self.swdb.set_repo(str(pkg), repo)
|
||||
|
||||
def checksums(self, packages):
|
||||
"""Get checksum list of desired packages.
|
||||
Returns: List is in format
|
||||
[checksum1_type, checksum1_data, checksum2_type, checksum2_data, ...]
|
||||
"""
|
||||
return self.swdb.checksums([str(pkg) for pkg in packages])
|
||||
|
||||
def old(self, tids=[], limit=0, complete_transactions_only=False):
|
||||
tids = list(tids)
|
||||
if tids and not isinstance(tids[0], int):
|
||||
for i, value in enumerate(tids):
|
||||
tids[i] = int(value)
|
||||
return self.swdb.trans_old(tids, limit, complete_transactions_only)
|
||||
|
||||
def _log_group_trans(self, tid):
|
||||
installed = self.group.groups_installed
|
||||
removed = self.group.groups_removed
|
||||
self.swdb.log_group_trans(tid, installed, removed)
|
||||
|
||||
def set_reason(self, pkg, reason):
|
||||
"""Set reason for package"""
|
||||
return self.swdb.set_reason(str(pkg), reason)
|
||||
|
||||
def package(self, pkg):
|
||||
"""Get SwdbPackage from package"""
|
||||
return self.swdb.package(str(pkg))
|
||||
|
||||
def repo(self, pkg):
|
||||
"""Get repository of package"""
|
||||
return self.swdb.repo(str(pkg))
|
||||
|
||||
def package_data(self, pkg):
|
||||
"""Get package data for package"""
|
||||
return self.swdb.package_data(str(pkg))
|
||||
|
||||
def reason(self, pkg):
|
||||
"""Get reason for package"""
|
||||
return self.swdb.reason(str(pkg))
|
||||
|
||||
def ipkg_to_rpmdata(self, ipkg):
|
||||
pid = self.pkg2pid(ipkg, create=False)
|
||||
rpmdata = SwdbRpmData.new(
|
||||
pid,
|
||||
str((getattr(ipkg, "buildtime", None) or '')),
|
||||
str((getattr(ipkg, "buildhost", None) or '')),
|
||||
str((getattr(ipkg, "license", None) or '')),
|
||||
str((getattr(ipkg, "packager", None) or '')),
|
||||
str((getattr(ipkg, "size", None) or '')),
|
||||
str((getattr(ipkg, "sourcerpm", None) or '')),
|
||||
str((getattr(ipkg, "url", None) or '')),
|
||||
str((getattr(ipkg, "vendor", None) or '')),
|
||||
str((getattr(ipkg, "committer", None) or '')),
|
||||
str((getattr(ipkg, "committime", None) or ''))
|
||||
)
|
||||
return rpmdata
|
||||
|
||||
def ipkg_to_pkg(self, ipkg):
|
||||
try:
|
||||
csum = ipkg.returnIdSum()
|
||||
except AttributeError:
|
||||
csum = ('', '')
|
||||
pkgtup = map(ucd, ipkg.pkgtup)
|
||||
(n, a, e, v, r) = pkgtup
|
||||
pkg = SwdbPkg.new(
|
||||
n,
|
||||
int(e),
|
||||
v,
|
||||
r,
|
||||
a,
|
||||
csum[1] or '',
|
||||
csum[0] or '',
|
||||
SwdbItem.RPM)
|
||||
return pkg
|
||||
|
||||
def beg(self, rpmdb_version, using_pkgs, tsis, cmdline=None):
|
||||
tid = self.swdb.trans_beg(
|
||||
str(int(time.time())),
|
||||
str(rpmdb_version),
|
||||
cmdline or "",
|
||||
str(misc.getloginuid()),
|
||||
self.releasever)
|
||||
|
||||
self._tid = tid
|
||||
|
||||
for pkg in using_pkgs:
|
||||
pid = self.pkg2pid(pkg)
|
||||
self.swdb.trans_with(tid, pid)
|
||||
|
||||
if self.group:
|
||||
self._log_group_trans(tid)
|
||||
|
||||
for tsi in tsis:
|
||||
for (pkg, state) in tsi._history_iterator():
|
||||
pid = self.pkg2pid(pkg)
|
||||
self.swdb.trans_data_beg(
|
||||
tid,
|
||||
pid,
|
||||
convert_reason(tsi.reason),
|
||||
state
|
||||
)
|
||||
|
||||
def pkg2pid(self, po, create=True):
|
||||
if hasattr(po, 'pid') and po.pid:
|
||||
return po.pid
|
||||
# try to find package in DB by its nevra
|
||||
pid = self.swdb.pid_by_nevra(str(po))
|
||||
if pid or not create:
|
||||
return pid
|
||||
# pkg not found in db - create new object
|
||||
if not isinstance(po, SwdbPkg):
|
||||
po = self.ipkg_to_pkg(po)
|
||||
return self.swdb.add_package(po)
|
||||
|
||||
def log_scriptlet_output(self, msg):
|
||||
if msg is None or not hasattr(self, '_tid'):
|
||||
return # Not configured to run
|
||||
for error in msg.splitlines():
|
||||
error = ucd(error)
|
||||
self.swdb.log_output(self._tid, error)
|
||||
|
||||
def trans_data_pid_end(self, pid, state):
|
||||
if not hasattr(self, '_tid') or state is None:
|
||||
return
|
||||
self.swdb.trans_data_pid_end(pid, self._tid, state)
|
||||
|
||||
def _log_errors(self, errors):
|
||||
for error in errors:
|
||||
error = ucd(error)
|
||||
self.swdb.log_error(self._tid, error)
|
||||
|
||||
def end(self, end_rpmdb_version="", return_code=0, errors=None):
|
||||
assert return_code or not errors
|
||||
if not hasattr(self, '_tid'):
|
||||
return # Failed at beg() time
|
||||
self.swdb.trans_end(
|
||||
self._tid,
|
||||
str(int(time.time())),
|
||||
str(end_rpmdb_version),
|
||||
return_code
|
||||
)
|
||||
if errors is not None:
|
||||
self._log_errors(errors)
|
||||
del self._tid
|
||||
|
||||
def mark_user_installed(self, pkg, mark):
|
||||
"""(Un)mark package as user installed"""
|
||||
return self.swdb.mark_user_installed(str(pkg), mark)
|
||||
|
||||
def _save_rpmdb(self, ipkg):
|
||||
""" Save all the data for rpmdb for this installed pkg, assumes
|
||||
there is no data currently. """
|
||||
return not self.swdb.add_rpm_data(self.ipkg_to_rpmdata(ipkg))
|
||||
|
||||
def add_pkg_data(self, ipkg, pkg_data):
|
||||
""" Save all the data for yumdb for this installed pkg, assumes
|
||||
there is no data currently. """
|
||||
pid = self.pkg2pid(ipkg, create=False)
|
||||
if pid:
|
||||
# FIXME: resolve installonly
|
||||
return self.swdb.log_package_data(pid, pkg_data)
|
||||
|
||||
def sync_alldb(self, ipkg, pkg_data):
|
||||
""" Sync. all the data for rpmdb/yumdb for this installed pkg. """
|
||||
return self._save_rpmdb(ipkg) and self.add_pkg_data(ipkg, pkg_data)
|
||||
|
||||
def search(self, patterns, ignore_case=True):
|
||||
""" Search for history transactions which contain specified
|
||||
packages al. la. "yum list". Returns transaction ids. """
|
||||
return self.swdb.search(patterns)
|
||||
|
||||
def user_installed(self, pkg):
|
||||
"""Returns True if package is user installed"""
|
||||
return self.swdb.user_installed(str(pkg))
|
||||
|
||||
def select_user_installed(self, pkgs):
|
||||
"""Select user installed packages from list of pkgs"""
|
||||
|
||||
# swdb.select_user_installed returns indexes of user installed packages
|
||||
return [pkgs[i] for i in self.swdb.select_user_installed([str(pkg) for pkg in pkgs])]
|
||||
|
||||
def get_packages_by_tid(self, tid):
|
||||
if isinstance(tid, list):
|
||||
packages = []
|
||||
for t in tid:
|
||||
packages += self.swdb.get_packages_by_tid(t)
|
||||
return packages
|
||||
return self.swdb.get_packages_by_tid(tid)
|
||||
|
||||
def trans_cmdline(self, tid):
|
||||
if isinstance(tid, list):
|
||||
cmdlines = []
|
||||
for t in tid:
|
||||
cmdlines.append(self.swdb.trans_cmdline(t))
|
||||
return cmdlines
|
||||
return self.swdb.trans_cmdline(tid)
|
||||
|
||||
def get_erased_reason(self, pkg, first_trans, rollback):
|
||||
"""Get reason of package before transaction being undone. If package
|
||||
is already installed in the system, keep his reason.
|
||||
|
||||
:param pkg: package being installed
|
||||
:param first_trans: id of first transaction being undone
|
||||
:param rollback: True if transaction is performing a rollback"""
|
||||
return self.swdb.get_erased_reason(str(pkg), first_trans, rollback)
|
|
@ -0,0 +1,649 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (C) 2016 Red Hat, Inc.
|
||||
# Author: Eduard Cuba <xcubae00@stud.fit.vutbr.cz>
|
||||
# This copyrighted material is made available to anyone wishing to use,
|
||||
# modify, copy, or redistribute it subject to the terms and conditions of
|
||||
# the GNU General Public License v.2, or (at your option) any later version.
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY expressed or implied, including the implied warranties of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details. You should have received a copy of the
|
||||
# GNU General Public License along with this program; if not, write to the
|
||||
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
|
||||
# source code or documentation are not subject to the GNU General Public
|
||||
# License and may only be used or replicated with the express permission of
|
||||
# Red Hat, Inc.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
import glob
|
||||
import json
|
||||
from .types import SwdbItem, convert_reason
|
||||
|
||||
|
||||
def CONSTRUCT_NAME(row):
|
||||
_NAME = row[1] + '-' + row[3] + '-' + row[4] + '-' + row[5]
|
||||
return _NAME
|
||||
|
||||
|
||||
def PACKAGE_DATA_INSERT(cursor, data):
|
||||
cursor.execute('INSERT INTO PACKAGE_DATA VALUES (null,?,?,?,?,?,?,?)', data)
|
||||
|
||||
|
||||
def RPM_DATA_INSERT(cursor, data):
|
||||
cursor.execute('INSERT INTO RPM_DATA VALUES (null,?,?,?,?,?,?,?,?,?,?,?)', data)
|
||||
|
||||
|
||||
def TRANS_DATA_INSERT(cursor, data):
|
||||
cursor.execute('INSERT INTO TRANS_DATA VALUES (null,?,?,?,?,?,?,?)', data)
|
||||
|
||||
|
||||
def TRANS_INSERT(cursor, data):
|
||||
cursor.execute('INSERT INTO TRANS VALUES (?,?,?,?,?,?,?,?,?)', data)
|
||||
|
||||
|
||||
# create binding with repo - returns R_ID
|
||||
def BIND_REPO(cursor, name):
|
||||
cursor.execute('SELECT R_ID FROM REPO WHERE name=?', (name, ))
|
||||
R_ID = cursor.fetchone()
|
||||
if R_ID is None:
|
||||
cursor.execute('INSERT INTO REPO VALUES(null,?,0,0)', (name, ))
|
||||
cursor.execute('SELECT last_insert_rowid()')
|
||||
R_ID = cursor.fetchone()
|
||||
return R_ID[0]
|
||||
|
||||
|
||||
# create binding with STATE_TYPE - returns ID
|
||||
def BIND_STATE(cursor, desc):
|
||||
cursor.execute('SELECT state FROM STATE_TYPE WHERE description=?', (desc, ))
|
||||
STATE_ID = cursor.fetchone()
|
||||
if STATE_ID is None:
|
||||
cursor.execute('INSERT INTO STATE_TYPE VALUES(null,?)', (desc, ))
|
||||
cursor.execute('SELECT last_insert_rowid()')
|
||||
STATE_ID = cursor.fetchone()
|
||||
return STATE_ID[0]
|
||||
|
||||
|
||||
# create binding with OUTPUT_TYPE - returns ID
|
||||
def BIND_OUTPUT(cursor, desc):
|
||||
cursor.execute('SELECT type FROM OUTPUT_TYPE WHERE description=?', (desc, ))
|
||||
OUTPUT_ID = cursor.fetchone()
|
||||
if OUTPUT_ID is None:
|
||||
cursor.execute('INSERT INTO OUTPUT_TYPE VALUES(null,?)', (desc, ))
|
||||
cursor.execute('SELECT last_insert_rowid()')
|
||||
OUTPUT_ID = cursor.fetchone()
|
||||
return OUTPUT_ID[0]
|
||||
|
||||
|
||||
# groups packages bindings
|
||||
def ADD_GROUPS_PACKAGE(cursor, gid, name):
|
||||
cursor.execute('INSERT INTO GROUPS_PACKAGE VALUES(null,?,?)', (gid, name))
|
||||
|
||||
|
||||
def ADD_GROUPS_EXCLUDE(cursor, gid, name):
|
||||
cursor.execute('INSERT INTO GROUPS_EXCLUDE VALUES(null,?,?)', (gid, name))
|
||||
|
||||
|
||||
# env exclude
|
||||
def ADD_ENV_EXCLUDE(cursor, eid, name):
|
||||
cursor.execute('INSERT INTO ENVIRONMENTS_EXCLUDE VALUES(null,?,?)',
|
||||
(eid, name))
|
||||
|
||||
|
||||
# bind enviroment with groups
|
||||
def BIND_ENV_GROUP(cursor, eid, name_id):
|
||||
cursor.execute('SELECT G_ID FROM GROUPS WHERE name_id=?', (name_id, ))
|
||||
tmp_bind_gid = cursor.fetchone()
|
||||
if tmp_bind_gid:
|
||||
cursor.execute('INSERT INTO ENVIRONMENTS_GROUPS VALUES(null,?,?)',
|
||||
(eid, tmp_bind_gid[0]))
|
||||
|
||||
|
||||
# integrity optimalization
|
||||
def BIND_PID_PDID(cursor, pid):
|
||||
cursor.execute('SELECT PD_ID FROM PACKAGE_DATA WHERE P_ID=?', (pid, ))
|
||||
PPD_ID = cursor.fetchone()
|
||||
if PPD_ID is None:
|
||||
cursor.execute('INSERT INTO PACKAGE_DATA VALUES(null,?,?,?,?,?,?,?)',
|
||||
(pid, '', '', '', '', '', ''))
|
||||
cursor.execute('SELECT last_insert_rowid()')
|
||||
PPD_ID = cursor.fetchone()
|
||||
return PPD_ID[0]
|
||||
|
||||
|
||||
# YUMDB
|
||||
def GET_YUMDB_PACKAGES(cursor, yumdb_path, PACKAGE_DATA):
|
||||
pkglist = {}
|
||||
# get package list of yumdb
|
||||
for dir in os.listdir(yumdb_path):
|
||||
for subdir in os.listdir(os.path.join(yumdb_path, dir)):
|
||||
pkglist[subdir.partition('-')[2]] = os.path.join(dir, subdir)
|
||||
|
||||
# fetching aditional values from directory yumdb
|
||||
cursor.execute('SELECT * FROM PACKAGE')
|
||||
allrows = cursor.fetchall()
|
||||
|
||||
for row in allrows:
|
||||
name = CONSTRUCT_NAME(row)
|
||||
if name in pkglist:
|
||||
record_PD = [None] * len(PACKAGE_DATA)
|
||||
path = os.path.join(yumdb_path, pkglist[name])
|
||||
tmp_reason = ''
|
||||
tmp_releasever = ''
|
||||
tmp_cmdline = ''
|
||||
for file in os.listdir(path):
|
||||
if file in PACKAGE_DATA:
|
||||
with open(os.path.join(path, file)) as f:
|
||||
record_PD[PACKAGE_DATA.index(file)] = f.read()
|
||||
elif file == "from_repo":
|
||||
# create binding with REPO table
|
||||
with open(os.path.join(path, file)) as f:
|
||||
record_PD[PACKAGE_DATA.index("R_ID")] = BIND_REPO(
|
||||
cursor,
|
||||
f.read())
|
||||
# some additional data
|
||||
elif file == "reason":
|
||||
with open(os.path.join(path, file)) as f:
|
||||
tmp_reason = convert_reason(f.read())
|
||||
elif file == "releasever":
|
||||
with open(os.path.join(path, file)) as f:
|
||||
tmp_releasever = f.read()
|
||||
elif file == "command_line":
|
||||
with open(os.path.join(path, file)) as f:
|
||||
tmp_cmdline = f.read()
|
||||
|
||||
actualPDID = BIND_PID_PDID(cursor, row[0])
|
||||
|
||||
if record_PD[PACKAGE_DATA.index('R_ID')]:
|
||||
cursor.execute('UPDATE PACKAGE_DATA SET R_ID=? WHERE PD_ID=?',
|
||||
(record_PD[PACKAGE_DATA.index('R_ID')],
|
||||
actualPDID))
|
||||
|
||||
if record_PD[PACKAGE_DATA.index('from_repo_revision')]:
|
||||
cursor.execute('''UPDATE PACKAGE_DATA SET from_repo_revision=?
|
||||
WHERE PD_ID=?''',
|
||||
(record_PD[PACKAGE_DATA.index(
|
||||
'from_repo_revision')],
|
||||
actualPDID))
|
||||
|
||||
if record_PD[PACKAGE_DATA.index('from_repo_timestamp')]:
|
||||
cursor.execute('''UPDATE PACKAGE_DATA SET from_repo_timestamp=?
|
||||
WHERE PD_ID=?''',
|
||||
(record_PD[PACKAGE_DATA.index(
|
||||
'from_repo_timestamp')],
|
||||
actualPDID))
|
||||
|
||||
if record_PD[PACKAGE_DATA.index('installed_by')]:
|
||||
cursor.execute('''UPDATE PACKAGE_DATA SET installed_by=?
|
||||
WHERE PD_ID=?''',
|
||||
(record_PD[PACKAGE_DATA.index('installed_by')],
|
||||
actualPDID))
|
||||
|
||||
if record_PD[PACKAGE_DATA.index('changed_by')]:
|
||||
cursor.execute('''UPDATE PACKAGE_DATA SET changed_by=?
|
||||
WHERE PD_ID=?''',
|
||||
(record_PD[PACKAGE_DATA.index('changed_by')],
|
||||
actualPDID))
|
||||
|
||||
if record_PD[PACKAGE_DATA.index('installonly')]:
|
||||
cursor.execute('''UPDATE PACKAGE_DATA SET installonly=?
|
||||
WHERE PD_ID=?''',
|
||||
(record_PD[PACKAGE_DATA.index('installonly')],
|
||||
actualPDID))
|
||||
|
||||
# other tables
|
||||
if tmp_reason:
|
||||
cursor.execute('UPDATE TRANS_DATA SET reason=? WHERE PD_ID=?',
|
||||
(tmp_reason, actualPDID))
|
||||
if tmp_releasever:
|
||||
cursor.execute('SELECT T_ID FROM TRANS_DATA WHERE PD_ID=?',
|
||||
(actualPDID,))
|
||||
tmp_tid = cursor.fetchone()
|
||||
if tmp_tid:
|
||||
cursor.execute('''UPDATE TRANS SET releasever=?
|
||||
WHERE T_ID=?''',
|
||||
(tmp_releasever, tmp_tid[0]))
|
||||
|
||||
if tmp_cmdline:
|
||||
cursor.execute('SELECT T_ID FROM TRANS_DATA WHERE PD_ID=?',
|
||||
(actualPDID,))
|
||||
|
||||
tmp_tid = cursor.fetchone()
|
||||
if tmp_tid:
|
||||
cursor.execute('UPDATE TRANS SET cmdline=? WHERE T_ID=?',
|
||||
(tmp_cmdline, tmp_tid[0]))
|
||||
|
||||
|
||||
def run(input_dir='/var/lib/dnf/', output_file='/var/lib/dnf/history/swdb.sqlite'):
|
||||
yumdb_path = os.path.join(input_dir, 'yumdb')
|
||||
history_path = os.path.join(input_dir, 'history')
|
||||
groups_path = os.path.join(input_dir, 'groups.json')
|
||||
|
||||
# check path to yumdb dir
|
||||
if not os.path.isdir(yumdb_path):
|
||||
sys.stderr.write('Error: yumdb directory not valid\n')
|
||||
return False
|
||||
|
||||
# check path to history dir
|
||||
if not os.path.isdir(history_path):
|
||||
sys.stderr.write('Error: history directory not valid\n')
|
||||
return False
|
||||
|
||||
# check historyDB file and pick newest one
|
||||
historydb_file = glob.glob(os.path.join(history_path, "history*"))
|
||||
if len(historydb_file) < 1:
|
||||
sys.stderr.write('Error: history database file not valid\n')
|
||||
return False
|
||||
historydb_file.sort()
|
||||
historydb_file = historydb_file[0]
|
||||
|
||||
if not os.path.isfile(historydb_file):
|
||||
sys.stderr.write('Error: history database file not valid\n')
|
||||
return False
|
||||
|
||||
# initialise variables
|
||||
task_performed = 0
|
||||
task_failed = 0
|
||||
try:
|
||||
# initialise historyDB
|
||||
historyDB = sqlite3.connect(historydb_file)
|
||||
h_cursor = historyDB.cursor()
|
||||
# initialise output DB
|
||||
database = sqlite3.connect(output_file)
|
||||
cursor = database.cursor()
|
||||
except:
|
||||
sys.stderr.write('FAIL: aborting SWDB transformer\n')
|
||||
return False
|
||||
|
||||
# value distribution in tables
|
||||
PACKAGE_DATA = ['P_ID', 'R_ID', 'from_repo_revision',
|
||||
'from_repo_timestamp', 'installed_by', 'changed_by',
|
||||
'installonly']
|
||||
|
||||
PACKAGE = ['P_ID', 'name', 'epoch', 'version', 'release', 'arch',
|
||||
'checksum_data', 'checksum_type', 'type']
|
||||
|
||||
TRANS_DATA = ['T_ID', 'PD_ID', 'TG_ID', 'done', 'ORIGINAL_TD_ID', 'reason',
|
||||
'state']
|
||||
|
||||
TRANS = ['T_ID', 'beg_timestamp', 'end_timestamp', 'beg_RPMDB_version',
|
||||
'end_RPMDB_version', 'cmdline', 'loginuid', 'releasever',
|
||||
'return_code']
|
||||
|
||||
GROUPS = ['name_id', 'name', 'ui_name', 'installed', 'pkg_types']
|
||||
|
||||
ENVIRONMENTS = ['name_id', 'name', 'ui_name', 'pkg_types', 'grp_types']
|
||||
|
||||
RPM_DATA = ['P_ID', 'buildtime', 'buildhost', 'license', 'packager',
|
||||
'size', 'sourcerpm', 'url', 'vendor', 'committer',
|
||||
'committime']
|
||||
|
||||
# contruction of PACKAGE from pkgtups
|
||||
h_cursor.execute('SELECT * FROM pkgtups')
|
||||
for row in h_cursor:
|
||||
record_P = [''] * len(PACKAGE) # init
|
||||
record_P[0] = row[0] # P_ID
|
||||
record_P[1] = row[1] # name
|
||||
record_P[2] = row[3] # epoch
|
||||
record_P[3] = row[4] # version
|
||||
record_P[4] = row[5] # release
|
||||
record_P[5] = row[2] # arch
|
||||
if row[6]:
|
||||
record_P[6] = row[6].split(":", 2)[1] # checksum_data
|
||||
record_P[7] = row[6].split(":", 2)[0] # checksum_type
|
||||
record_P[8] = SwdbItem.RPM # type
|
||||
cursor.execute('INSERT INTO PACKAGE VALUES (?,?,?,?,?,?,?,?,?)',
|
||||
record_P)
|
||||
|
||||
# save changes
|
||||
database.commit()
|
||||
|
||||
# construction of PACKAGE_DATA according to pkg_yumdb
|
||||
actualPID = 0
|
||||
record_PD = [''] * len(PACKAGE_DATA)
|
||||
h_cursor.execute('SELECT * FROM pkg_yumdb')
|
||||
# for each row in pkg_yumdb
|
||||
for row in h_cursor:
|
||||
newPID = row[0]
|
||||
|
||||
if actualPID != newPID:
|
||||
if actualPID != 0:
|
||||
record_PD[PACKAGE_DATA.index('P_ID')] = actualPID
|
||||
# insert new record into PACKAGE_DATA
|
||||
PACKAGE_DATA_INSERT(cursor, record_PD)
|
||||
|
||||
actualPID = newPID
|
||||
record_PD = [''] * len(PACKAGE_DATA)
|
||||
|
||||
if row[1] in PACKAGE_DATA:
|
||||
# collect data for record from pkg_yumdb
|
||||
record_PD[PACKAGE_DATA.index(row[1])] = row[2]
|
||||
|
||||
elif row[1] == "from_repo":
|
||||
# create binding with REPO table
|
||||
record_PD[PACKAGE_DATA.index("R_ID")] = BIND_REPO(cursor, row[2])
|
||||
|
||||
record_PD[PACKAGE_DATA.index('P_ID')] = actualPID
|
||||
PACKAGE_DATA_INSERT(cursor, record_PD) # insert last record
|
||||
|
||||
# integrity optimalization
|
||||
cursor.execute('SELECT P_ID FROM PACKAGE')
|
||||
tmp_row = cursor.fetchall()
|
||||
for row in tmp_row:
|
||||
BIND_PID_PDID(cursor, int(row[0]))
|
||||
|
||||
# save changes
|
||||
database.commit()
|
||||
|
||||
# construction of RPM_DATA according to pkg_rpmdb
|
||||
actualPID = 0
|
||||
record_RPM = [''] * len(RPM_DATA)
|
||||
h_cursor.execute('SELECT * FROM pkg_rpmdb')
|
||||
|
||||
# for each row in pkg_rpmdb
|
||||
for row in h_cursor:
|
||||
newPID = row[0]
|
||||
if actualPID != newPID:
|
||||
if actualPID != 0:
|
||||
record_RPM[RPM_DATA.index('P_ID')] = actualPID
|
||||
# insert new record into PACKAGE_DATA
|
||||
RPM_DATA_INSERT(cursor, record_RPM)
|
||||
actualPID = newPID
|
||||
record_RPM = [''] * len(RPM_DATA)
|
||||
|
||||
if row[1] in RPM_DATA:
|
||||
# collect data for record from pkg_yumdb
|
||||
record_RPM[RPM_DATA.index(row[1])] = row[2]
|
||||
record_RPM[RPM_DATA.index('P_ID')] = actualPID
|
||||
RPM_DATA_INSERT(cursor, record_RPM) # insert last record
|
||||
|
||||
# save changes
|
||||
database.commit()
|
||||
|
||||
# trans_data construction
|
||||
h_cursor.execute('SELECT * FROM trans_data_pkgs')
|
||||
|
||||
for row in h_cursor:
|
||||
record_TD = [''] * len(TRANS_DATA)
|
||||
record_TD[TRANS_DATA.index('T_ID')] = row[0] # T_ID
|
||||
if row[2] == 'TRUE':
|
||||
record_TD[TRANS_DATA.index('done')] = 1
|
||||
else:
|
||||
record_TD[TRANS_DATA.index('done')] = 0
|
||||
|
||||
record_TD[TRANS_DATA.index('state')] = BIND_STATE(cursor, row[3])
|
||||
pkgtups_tmp = int(row[1])
|
||||
|
||||
cursor.execute('SELECT PD_ID FROM PACKAGE_DATA WHERE P_ID=?',
|
||||
(pkgtups_tmp,))
|
||||
|
||||
pkgtups_tmp = cursor.fetchone()
|
||||
if pkgtups_tmp:
|
||||
record_TD[TRANS_DATA.index('PD_ID')] = pkgtups_tmp[0]
|
||||
else:
|
||||
task_failed += 1
|
||||
task_performed += 1
|
||||
TRANS_DATA_INSERT(cursor, record_TD)
|
||||
|
||||
# save changes
|
||||
database.commit()
|
||||
|
||||
# resolve STATE_TYPE
|
||||
cursor.execute('SELECT * FROM STATE_TYPE')
|
||||
state_types = cursor.fetchall()
|
||||
fsm_state = 0
|
||||
obsoleting_t = 0
|
||||
update_t = 0
|
||||
downgrade_t = 0
|
||||
for a in range(len(state_types)):
|
||||
if state_types[a][1] == 'Obsoleting':
|
||||
obsoleting_t = a + 1
|
||||
elif state_types[a][1] == 'Update':
|
||||
update_t = a + 1
|
||||
elif state_types[a][1] == 'Downgrade':
|
||||
downgrade_t = a + 1
|
||||
|
||||
# find ORIGINAL_TD_ID for Obsoleting and upgraded - via FSM
|
||||
previous_TD_ID = 0
|
||||
cursor.execute('SELECT * FROM TRANS_DATA')
|
||||
tmp_row = cursor.fetchall()
|
||||
for row in tmp_row:
|
||||
if fsm_state == 0:
|
||||
if row[7] == obsoleting_t:
|
||||
fsm_state = 1
|
||||
elif row[7] == update_t:
|
||||
fsm_state = 1
|
||||
elif row[7] == downgrade_t:
|
||||
fsm_state = 1
|
||||
previous_TD_ID = row[0]
|
||||
elif fsm_state == 1:
|
||||
cursor.execute('''UPDATE TRANS_DATA SET ORIGINAL_TD_ID = ?
|
||||
WHERE TD_ID = ?''',
|
||||
(row[0], previous_TD_ID))
|
||||
fsm_state = 0
|
||||
|
||||
# save changes
|
||||
database.commit()
|
||||
|
||||
# Construction of TRANS
|
||||
h_cursor.execute('SELECT * FROM trans_beg')
|
||||
for row in h_cursor:
|
||||
record_T = [''] * len(TRANS)
|
||||
record_T[TRANS.index('T_ID')] = row[0]
|
||||
record_T[TRANS.index('beg_timestamp')] = row[1]
|
||||
record_T[TRANS.index('beg_RPMDB_version')] = row[2]
|
||||
record_T[TRANS.index('loginuid')] = row[3]
|
||||
TRANS_INSERT(cursor, record_T)
|
||||
|
||||
h_cursor.execute('SELECT * FROM trans_end')
|
||||
|
||||
for row in h_cursor:
|
||||
cursor.execute('''UPDATE TRANS SET end_timestamp=?,end_RPMDB_version=?,
|
||||
return_code=? WHERE T_ID = ?''',
|
||||
(row[1], row[2], row[3], row[0]))
|
||||
|
||||
h_cursor.execute('SELECT * FROM trans_cmdline')
|
||||
for row in h_cursor:
|
||||
cursor.execute('UPDATE TRANS SET cmdline=? WHERE T_ID = ?',
|
||||
(row[1], row[0]))
|
||||
|
||||
# fetch releasever
|
||||
cursor.execute('SELECT T_ID FROM TRANS WHERE releasever=?', ('', ))
|
||||
missing = cursor.fetchall()
|
||||
for row in missing:
|
||||
cursor.execute('SELECT PD_ID FROM TRANS_DATA WHERE T_ID=?', (row[0], ))
|
||||
PDID = cursor.fetchall()
|
||||
if PDID:
|
||||
for actualPDID in PDID:
|
||||
cursor.execute('''SELECT P_ID FROM PACKAGE_DATA
|
||||
WHERE PD_ID=? LIMIT 1''',
|
||||
(actualPDID[0], ))
|
||||
actualPID = cursor.fetchone()
|
||||
if actualPID:
|
||||
h_cursor.execute('''SELECT yumdb_val FROM pkg_yumdb WHERE
|
||||
pkgtupid=? AND yumdb_key=? LIMIT 1''',
|
||||
(actualPID[0], 'releasever'))
|
||||
|
||||
releasever = h_cursor.fetchone()
|
||||
if releasever:
|
||||
cursor.execute('''UPDATE TRANS SET releasever=? WHERE
|
||||
T_ID=?''',
|
||||
(releasever[0], row[0]))
|
||||
break
|
||||
|
||||
# fetch reason
|
||||
cursor.execute('SELECT TD_ID,PD_ID FROM TRANS_DATA')
|
||||
missing = cursor.fetchall()
|
||||
for row in missing:
|
||||
cursor.execute('SELECT P_ID FROM PACKAGE_DATA WHERE PD_ID=? LIMIT 1',
|
||||
(row[1], ))
|
||||
actualPID = cursor.fetchone()
|
||||
|
||||
if actualPID:
|
||||
h_cursor.execute('''SELECT yumdb_val FROM pkg_yumdb
|
||||
WHERE pkgtupid=? AND yumdb_key=? LIMIT 1''',
|
||||
(actualPID[0], 'reason'))
|
||||
|
||||
reason = h_cursor.fetchone()
|
||||
if reason:
|
||||
t_reason = convert_reason(reason[0])
|
||||
cursor.execute('UPDATE TRANS_DATA SET reason=? WHERE TD_ID=?',
|
||||
(t_reason, row[0]))
|
||||
|
||||
# contruction of OUTPUT
|
||||
h_cursor.execute('SELECT * FROM trans_script_stdout')
|
||||
for row in h_cursor:
|
||||
cursor.execute('INSERT INTO OUTPUT VALUES (null,?,?,?)',
|
||||
(row[1], row[2], BIND_OUTPUT(cursor, 'stdout')))
|
||||
|
||||
h_cursor.execute('SELECT * FROM trans_error')
|
||||
for row in h_cursor:
|
||||
cursor.execute('INSERT INTO OUTPUT VALUES (null,?,?,?)',
|
||||
(row[1], row[2], BIND_OUTPUT(cursor, 'stderr')))
|
||||
|
||||
# fetch additional data from yumdb
|
||||
GET_YUMDB_PACKAGES(cursor, yumdb_path, PACKAGE_DATA)
|
||||
|
||||
# construction of GROUPS
|
||||
if os.path.isfile(groups_path):
|
||||
with open(groups_path) as groups_file:
|
||||
data = json.load(groups_file)
|
||||
for key in data:
|
||||
if key == 'GROUPS':
|
||||
for value in data[key]:
|
||||
record_G = [''] * len(GROUPS)
|
||||
record_G[GROUPS.index('name_id')] = value
|
||||
|
||||
if 'name' in data[key][value]:
|
||||
record_G[GROUPS.index('name')] =\
|
||||
data[key][value]['name']
|
||||
|
||||
record_G[GROUPS.index('pkg_types')] =\
|
||||
data[key][value]['pkg_types']
|
||||
|
||||
record_G[GROUPS.index('installed')] = True
|
||||
if 'ui_name' in data[key][value]:
|
||||
record_G[GROUPS.index('ui_name')] =\
|
||||
data[key][value]['ui_name']
|
||||
|
||||
cursor.execute('''INSERT INTO GROUPS
|
||||
VALUES (null,?,?,?,?,?)''',
|
||||
(record_G))
|
||||
cursor.execute('SELECT last_insert_rowid()')
|
||||
tmp_gid = cursor.fetchone()[0]
|
||||
for package in data[key][value]['full_list']:
|
||||
ADD_GROUPS_PACKAGE(cursor, tmp_gid, package)
|
||||
for package in data[key][value]['pkg_exclude']:
|
||||
ADD_GROUPS_EXCLUDE(cursor, tmp_gid, package)
|
||||
for key in data:
|
||||
|
||||
if key == 'ENVIRONMENTS':
|
||||
for value in data[key]:
|
||||
record_E = [''] * len(ENVIRONMENTS)
|
||||
record_E[GROUPS.index('name_id')] = value
|
||||
if 'name' in data[key][value]:
|
||||
record_G[GROUPS.index('name')] =\
|
||||
data[key][value]['name']
|
||||
record_E[ENVIRONMENTS.index('grp_types')] =\
|
||||
data[key][value]['grp_types']
|
||||
record_E[ENVIRONMENTS.index('pkg_types')] =\
|
||||
data[key][value]['pkg_types']
|
||||
if 'ui_name' in data[key][value]:
|
||||
record_E[ENVIRONMENTS.index('ui_name')] =\
|
||||
data[key][value]['ui_name']
|
||||
|
||||
cursor.execute('''INSERT INTO ENVIRONMENTS
|
||||
VALUES (null,?,?,?,?,?)''',
|
||||
(record_E))
|
||||
cursor.execute('SELECT last_insert_rowid()')
|
||||
tmp_eid = cursor.fetchone()[0]
|
||||
|
||||
for package in data[key][value]['full_list']:
|
||||
BIND_ENV_GROUP(cursor, tmp_eid, package)
|
||||
for package in data[key][value]['pkg_exclude']:
|
||||
ADD_ENV_EXCLUDE(cursor, tmp_eid, package)
|
||||
|
||||
# construction of TRANS_GROUP_DATA from GROUPS
|
||||
cursor.execute('SELECT * FROM GROUPS')
|
||||
tmp_groups = cursor.fetchall()
|
||||
for row in tmp_groups:
|
||||
tmp_ui_name = ''
|
||||
tmp_trans = ''
|
||||
if row[3]:
|
||||
tmp_ui_name = "%" + row[3] + "%"
|
||||
cursor.execute('SELECT T_ID FROM TRANS WHERE cmdline LIKE ?',
|
||||
(tmp_ui_name, ))
|
||||
tmp_trans = cursor.fetchall()
|
||||
if not tmp_trans and row[2]:
|
||||
tmp_ui_name = "%" + row[2] + "%"
|
||||
cursor.execute('SELECT T_ID FROM TRANS WHERE cmdline LIKE ?',
|
||||
(tmp_ui_name,))
|
||||
tmp_trans = cursor.fetchall()
|
||||
if not tmp_trans and row[1]:
|
||||
tmp_ui_name = "%" + row[1] + "%"
|
||||
cursor.execute('SELECT T_ID FROM TRANS WHERE cmdline LIKE ?',
|
||||
(tmp_ui_name,))
|
||||
tmp_trans = cursor.fetchall()
|
||||
if tmp_trans:
|
||||
for single_trans in tmp_trans:
|
||||
tmp_tuple = (single_trans[0], row[0], row[1], row[2], row[3],
|
||||
row[4], row[5])
|
||||
cursor.execute('''INSERT INTO TRANS_GROUP_DATA
|
||||
VALUES(null,?,?,?,?,?,?,?)''',
|
||||
tmp_tuple)
|
||||
|
||||
# construction of TRANS_GROUP_DATA from ENVIRONMENTS
|
||||
cursor.execute('SELECT * FROM ENVIRONMENTS WHERE ui_name!=?', ('', ))
|
||||
tmp_env = cursor.fetchall()
|
||||
for row in tmp_env:
|
||||
tmp_ui_name = ''
|
||||
tmp_trans = ''
|
||||
if row[3]:
|
||||
tmp_ui_name = "%" + row[3] + "%"
|
||||
cursor.execute('SELECT T_ID FROM TRANS WHERE cmdline LIKE ?',
|
||||
(tmp_ui_name,))
|
||||
tmp_trans = cursor.fetchall()
|
||||
if not tmp_trans and row[2]:
|
||||
tmp_ui_name = "%" + row[2] + "%"
|
||||
cursor.execute('SELECT T_ID FROM TRANS WHERE cmdline LIKE ?',
|
||||
(tmp_ui_name,))
|
||||
tmp_trans = cursor.fetchall()
|
||||
if not tmp_trans and row[1]:
|
||||
tmp_ui_name = "%" + row[1] + "%"
|
||||
cursor.execute('SELECT T_ID FROM TRANS WHERE cmdline LIKE ?',
|
||||
(tmp_ui_name,))
|
||||
tmp_trans = cursor.fetchall()
|
||||
if tmp_trans:
|
||||
for single_trans in tmp_trans:
|
||||
cursor.execute('''SELECT G_ID FROM ENVIRONMENTS_GROUPS
|
||||
WHERE E_ID = ?''',
|
||||
(row[0],))
|
||||
tmp_groups = cursor.fetchall()
|
||||
for gid in tmp_groups:
|
||||
cursor.execute('SELECT * FROM GROUPS WHERE G_ID = ?',
|
||||
(gid[0],))
|
||||
tmp_group_data = cursor.fetchone()
|
||||
tmp_tuple = (single_trans[0], tmp_group_data[0],
|
||||
tmp_group_data[1], tmp_group_data[2],
|
||||
tmp_group_data[3], tmp_group_data[4],
|
||||
tmp_group_data[5])
|
||||
cursor.execute('''INSERT INTO TRANS_GROUP_DATA
|
||||
VALUES(null,?,?,?,?,?,?,?)''',
|
||||
tmp_tuple)
|
||||
|
||||
h_cursor.execute('SELECT * FROM trans_with_pkgs')
|
||||
for row in h_cursor:
|
||||
tid = row[0]
|
||||
pid = row[1]
|
||||
cursor.execute('INSERT INTO TRANS_WITH VALUES (null,?,?)', (tid, pid))
|
||||
|
||||
# save changes
|
||||
database.commit()
|
||||
|
||||
# close connection
|
||||
database.close()
|
||||
historyDB.close()
|
||||
|
||||
return task_performed > 0
|
|
@ -0,0 +1,41 @@
|
|||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
# Unified software database types
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Eduard Cuba <ecuba@redhat.com>
|
||||
|
||||
import gi
|
||||
gi.require_version('Dnf', '1.0')
|
||||
from gi.repository import Dnf # noqa
|
||||
|
||||
|
||||
Swdb = Dnf.Swdb
|
||||
SwdbItem = Dnf.SwdbItem
|
||||
SwdbReason = Dnf.SwdbReason
|
||||
SwdbPkg = Dnf.SwdbPkg
|
||||
SwdbPkgData = Dnf.SwdbPkgData
|
||||
SwdbTrans = Dnf.SwdbTrans
|
||||
SwdbGroup = Dnf.SwdbGroup
|
||||
SwdbEnv = Dnf.SwdbEnv
|
||||
SwdbRpmData = Dnf.SwdbRpmData
|
||||
|
||||
convert_id = Dnf.convert_id_to_reason
|
||||
|
||||
|
||||
def convert_reason(reason):
|
||||
if isinstance(reason, Dnf.SwdbReason):
|
||||
return reason
|
||||
return Dnf.convert_reason_to_id(reason)
|
33
dnf/goal.py
33
dnf/goal.py
|
@ -24,6 +24,7 @@ from copy import deepcopy
|
|||
from dnf.i18n import _
|
||||
import logging
|
||||
import hawkey
|
||||
from dnf.db.types import SwdbReason
|
||||
|
||||
logger = logging.getLogger('dnf')
|
||||
|
||||
|
@ -35,21 +36,13 @@ class Goal(hawkey.Goal):
|
|||
|
||||
def get_reason(self, pkg):
|
||||
code = super(Goal, self).get_reason(pkg)
|
||||
if code == hawkey.REASON_DEP:
|
||||
return 'dep'
|
||||
if code == hawkey.REASON_USER:
|
||||
if pkg.name in self.group_members:
|
||||
return 'group'
|
||||
return 'user'
|
||||
if code == hawkey.REASON_CLEAN:
|
||||
return 'clean'
|
||||
if code == hawkey.REASON_WEAKDEP:
|
||||
return 'weak'
|
||||
assert False, 'Unknown reason: %d' % code
|
||||
if code == hawkey.REASON_USER and pkg.name in self.group_members:
|
||||
return SwdbReason.GROUP
|
||||
return SwdbReason(code)
|
||||
|
||||
def group_reason(self, pkg, current_reason):
|
||||
if current_reason == 'unknown' and pkg.name in self.group_members:
|
||||
return 'group'
|
||||
if current_reason == SwdbReason.UNKNOWN and pkg.name in self.group_members:
|
||||
return SwdbReason.GROUP
|
||||
return current_reason
|
||||
|
||||
def install(self, *args, **kwargs):
|
||||
|
@ -59,14 +52,16 @@ class Goal(hawkey.Goal):
|
|||
self._installs.extend(kwargs['select'].matches())
|
||||
return super(Goal, self).install(*args, **kwargs)
|
||||
|
||||
def push_userinstalled(self, query, yumdb):
|
||||
def push_userinstalled(self, query, history):
|
||||
msg = _('--> Finding unneeded leftover dependencies')
|
||||
logger.debug(msg)
|
||||
for pkg in query.installed():
|
||||
yumdb_info = yumdb.get_package(pkg)
|
||||
reason = getattr(yumdb_info, 'reason', 'user')
|
||||
if reason not in ('dep', 'weak'):
|
||||
self.userinstalled(pkg)
|
||||
pkgs = query.installed()
|
||||
|
||||
# get only user installed packages
|
||||
user_installed = history.select_user_installed(pkgs)
|
||||
|
||||
for pkg in user_installed:
|
||||
self.userinstalled(pkg)
|
||||
|
||||
def available_updates_diff(self, query):
|
||||
available_updates = set(query.upgrades().filter(arch__neq="src")
|
||||
|
|
|
@ -24,7 +24,7 @@ from __future__ import absolute_import
|
|||
from __future__ import unicode_literals
|
||||
from collections import defaultdict, Container, Iterable, Sized
|
||||
from dnf.util import is_exhausted, split_by
|
||||
from dnf.yum.history import YumHistory
|
||||
from dnf.db.types import SwdbReason
|
||||
|
||||
import dnf.exceptions
|
||||
|
||||
|
@ -49,18 +49,15 @@ STATE2COMPLEMENT = {'Reinstall': 'Reinstalled',
|
|||
|
||||
def open_history(database):
|
||||
"""Open a history of transactions."""
|
||||
if isinstance(database, YumHistory):
|
||||
return _HistoryWrapper(database)
|
||||
else:
|
||||
raise TypeError("unsupported database type: %s" % type(database))
|
||||
return _HistoryWrapper(database)
|
||||
|
||||
class _HistoryWrapper(object):
|
||||
"""Transactions history interface on top of an YumHistory."""
|
||||
|
||||
def __init__(self, yum_history):
|
||||
def __init__(self, history):
|
||||
"""Initialize a wrapper instance."""
|
||||
object.__init__(self)
|
||||
self._history = yum_history
|
||||
self._history = history
|
||||
|
||||
def __enter__(self):
|
||||
"""Enter the runtime context."""
|
||||
|
@ -81,7 +78,7 @@ class _HistoryWrapper(object):
|
|||
|
||||
def last_transaction_id(self):
|
||||
"""Get ID of the last stored transaction."""
|
||||
last_tx = self._history.last(complete_transactions_only=False)
|
||||
last_tx = self._history.last()
|
||||
return last_tx.tid if last_tx else None
|
||||
|
||||
def transaction_nevra_ops(self, id_):
|
||||
|
@ -89,7 +86,7 @@ class _HistoryWrapper(object):
|
|||
if not self.has_transaction(id_):
|
||||
raise ValueError('no transaction with given ID: %d' % id_)
|
||||
|
||||
hpkgs = self._history._old_data_pkgs(str(id_), sort=False)
|
||||
hpkgs = self._history.get_packages_by_tid(id_)
|
||||
|
||||
# Split history to history packages representing transaction items.
|
||||
items_hpkgs = split_by(hpkgs, lambda hpkg: hpkg.state in PRIMARY_STATES)
|
||||
|
@ -116,7 +113,9 @@ class _HistoryWrapper(object):
|
|||
assert hpkg.state == 'Obsoleting'
|
||||
obsoleting_nevra = hpkg.nevra
|
||||
hpkg = next(reversed_it)
|
||||
if hpkg.state in {'Reinstalled', 'Downgraded', 'Updated'}: # Replaced.
|
||||
|
||||
# Replaced.
|
||||
if hpkg.state in {'Reinstalled', 'Downgraded', 'Updated'}:
|
||||
replaced_nevra, replaced_state = hpkg.nevra, hpkg.state
|
||||
hpkg = next(reversed_it)
|
||||
assert is_exhausted(reversed_it)
|
||||
|
@ -414,7 +413,7 @@ class TransactionConverter(object):
|
|||
assert len(packages) == 1
|
||||
return packages[0]
|
||||
|
||||
def convert(self, operations, reason='unknown'):
|
||||
def convert(self, operations, reason=SwdbReason.UNKNOWN):
|
||||
"""Convert operations to a transaction."""
|
||||
transaction = dnf.transaction.Transaction()
|
||||
for state, nevra, rnevra, onevras in operations:
|
||||
|
|
|
@ -77,9 +77,13 @@ class Package(hawkey.Package):
|
|||
|
||||
@property
|
||||
def _from_repo(self):
|
||||
yumdb_info = self.base._yumdb.get_package(self) if self._from_system else {}
|
||||
if 'from_repo' in yumdb_info:
|
||||
return '@'+yumdb_info.from_repo
|
||||
pkgrepo = None
|
||||
if self._from_system:
|
||||
pkgrepo = self.base.history.repo(self)
|
||||
else:
|
||||
pkgrepo = {}
|
||||
if pkgrepo:
|
||||
return '@' + pkgrepo
|
||||
return self.reponame
|
||||
|
||||
@property
|
||||
|
|
370
dnf/persistor.py
370
dnf/persistor.py
|
@ -26,7 +26,6 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from dnf.i18n import _
|
||||
|
||||
import collections
|
||||
import distutils.version
|
||||
import dnf.util
|
||||
|
@ -40,375 +39,6 @@ import re
|
|||
logger = logging.getLogger("dnf")
|
||||
|
||||
|
||||
def _by_pattern(pattern, ids, lookup_fn, case_sensitive):
|
||||
pattern = dnf.i18n.ucd(pattern)
|
||||
|
||||
exact = {id for id in ids if lookup_fn(id).name == pattern or id == pattern}
|
||||
if exact:
|
||||
return exact
|
||||
|
||||
if case_sensitive:
|
||||
match = re.compile(fnmatch.translate(pattern)).match
|
||||
else:
|
||||
match = re.compile(fnmatch.translate(pattern), flags=re.I).match
|
||||
|
||||
return {id for id in ids if match(lookup_fn(id).name) or
|
||||
match(lookup_fn(id).ui_name) or match(id)}
|
||||
|
||||
|
||||
def _clone_dct(dct):
|
||||
cln = {}
|
||||
for (k, v) in dct.items():
|
||||
if isinstance(v, list):
|
||||
cln[k] = v[:]
|
||||
elif isinstance(v, dict):
|
||||
cln[k] = _clone_dct(v)
|
||||
else:
|
||||
cln[k] = v
|
||||
return cln
|
||||
|
||||
|
||||
def _diff_dcts(dct1, dct2):
|
||||
"""Specific kind of diff between the two dicts.
|
||||
|
||||
Namely, differences between values of non-collections are not considered.
|
||||
|
||||
"""
|
||||
|
||||
added = {}
|
||||
removed = {}
|
||||
keys1 = set(dct1.keys())
|
||||
keys2 = set(dct2.keys())
|
||||
|
||||
for key in keys2 - keys1:
|
||||
added[key] = dct2[key]
|
||||
for key in keys1 - keys2:
|
||||
removed[key] = dct1[key]
|
||||
for key in keys1 & keys2:
|
||||
val1 = dct1[key]
|
||||
val2 = dct2[key]
|
||||
if type(val1) is type(val2) is dict:
|
||||
added_dct, removed_dct = _diff_dcts(val1, val2)
|
||||
if added_dct:
|
||||
added[key] = added_dct
|
||||
if removed_dct:
|
||||
removed[key] = removed_dct
|
||||
elif type(val1) is type(val2) is list:
|
||||
set1 = set(val1)
|
||||
set2 = set(val2)
|
||||
added_set = set2 - set1
|
||||
if added_set:
|
||||
added[key] = added_set
|
||||
removed_set = set1 - set2
|
||||
if removed_set:
|
||||
removed[key] = removed_set
|
||||
|
||||
return added, removed
|
||||
|
||||
|
||||
class ClonableDict(collections.MutableMapping):
|
||||
"""A dict with list values that can be cloned.
|
||||
|
||||
This wraps around an ordinary dict (which only gives a shallow copy).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, dct):
|
||||
self.dct = dct
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.dct[key]
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.dct[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.dct)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.dct)
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
self.dct[key] = val
|
||||
|
||||
@classmethod
|
||||
def wrap_dict(cls, dct):
|
||||
groups = cls(dct)
|
||||
return groups
|
||||
|
||||
def clone(self):
|
||||
cls = self.__class__
|
||||
return cls.wrap_dict(_clone_dct(self.dct))
|
||||
|
||||
|
||||
class _PersistMember(object):
|
||||
DEFAULTS = ClonableDict({
|
||||
'name' : '',
|
||||
'ui_name' : '',
|
||||
'full_list' : [],
|
||||
'grp_types' : 0,
|
||||
'pkg_exclude' : [],
|
||||
'pkg_types' : 0,
|
||||
})
|
||||
|
||||
@staticmethod
|
||||
def default():
|
||||
return _PersistMember.DEFAULTS.clone().dct
|
||||
|
||||
def __init__(self, param_dct):
|
||||
self.param_dct = param_dct
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.param_dct['name']
|
||||
|
||||
@name.setter
|
||||
def name(self, val):
|
||||
self.param_dct['name'] = val
|
||||
|
||||
@property
|
||||
def ui_name(self):
|
||||
return self.param_dct['ui_name']
|
||||
|
||||
@ui_name.setter
|
||||
def ui_name(self, val):
|
||||
self.param_dct['ui_name'] = val
|
||||
|
||||
@property
|
||||
def pkg_exclude(self):
|
||||
return self.param_dct['pkg_exclude']
|
||||
|
||||
@property
|
||||
def full_list(self):
|
||||
return self.param_dct['full_list']
|
||||
|
||||
@property
|
||||
def installed(self):
|
||||
return self.grp_types | self.pkg_types != 0
|
||||
|
||||
@property
|
||||
def grp_types(self):
|
||||
return self.param_dct['grp_types']
|
||||
|
||||
@grp_types.setter
|
||||
def grp_types(self, val):
|
||||
self.param_dct['grp_types'] = val
|
||||
|
||||
@property
|
||||
def pkg_types(self):
|
||||
return self.param_dct['pkg_types']
|
||||
|
||||
@pkg_types.setter
|
||||
def pkg_types(self, val):
|
||||
self.param_dct['pkg_types'] = val
|
||||
|
||||
|
||||
class _GroupsDiff(object):
|
||||
def __init__(self, db_old, db_new):
|
||||
self.added, self.removed = _diff_dcts(db_old, db_new)
|
||||
|
||||
def _diff_keys(self, what, removing):
|
||||
added = set(self.added.get(what, {}).keys())
|
||||
removed = set(self.removed.get(what, {}).keys())
|
||||
if removing:
|
||||
return list(removed - added)
|
||||
return list(added-removed)
|
||||
|
||||
def empty(self):
|
||||
return not self.new_environments and not self.removed_environments and \
|
||||
not self.new_groups and not self.removed_groups
|
||||
|
||||
@property
|
||||
def new_environments(self):
|
||||
return self._diff_keys('ENVIRONMENTS', False)
|
||||
|
||||
@property
|
||||
def removed_environments(self):
|
||||
return self._diff_keys('ENVIRONMENTS', True)
|
||||
|
||||
@property
|
||||
def new_groups(self):
|
||||
return self._diff_keys('GROUPS', False)
|
||||
|
||||
@property
|
||||
def removed_groups(self):
|
||||
return self._diff_keys('GROUPS', True)
|
||||
|
||||
def added_packages(self, group_id):
|
||||
keys = ('GROUPS', group_id, 'full_list')
|
||||
return dnf.util.get_in(self.added, keys, set())
|
||||
|
||||
def removed_packages(self, group_id):
|
||||
keys = ('GROUPS', group_id, 'full_list')
|
||||
return dnf.util.get_in(self.removed, keys, set())
|
||||
|
||||
|
||||
class GroupPersistor(object):
|
||||
|
||||
@staticmethod
|
||||
def _empty_db():
|
||||
return ClonableDict({
|
||||
'ENVIRONMENTS' : {},
|
||||
'GROUPS' : {},
|
||||
'meta' : {'version' : '0.6.0'}
|
||||
})
|
||||
|
||||
def __init__(self, persistdir, comps=None):
|
||||
self._commit = False
|
||||
self._comps = comps
|
||||
self._dbfile = os.path.join(persistdir, 'groups.json')
|
||||
self.db = None
|
||||
self._original = None
|
||||
self._load()
|
||||
self._ensure_sanity()
|
||||
|
||||
def _access(self, subdict, id_):
|
||||
subdict = self.db[subdict]
|
||||
dct = subdict.get(id_)
|
||||
if dct is None:
|
||||
dct = _PersistMember.default()
|
||||
subdict[id_] = dct
|
||||
|
||||
return _PersistMember(dct)
|
||||
|
||||
def _add_missing_entries(self):
|
||||
for env_id in self.db['ENVIRONMENTS']:
|
||||
env = self.environment(env_id)
|
||||
for key in env.DEFAULTS.keys():
|
||||
try:
|
||||
getattr(env, key)
|
||||
except KeyError:
|
||||
if self._comps:
|
||||
try:
|
||||
comps_env = self._comps._environment_by_id(env_id)
|
||||
if comps_env:
|
||||
value = getattr(comps_env, key)
|
||||
setattr(env, key, value)
|
||||
continue
|
||||
except KeyError:
|
||||
# set default if env is not pressent in comps
|
||||
pass
|
||||
setattr(env, key, env.DEFAULTS[key])
|
||||
for grp_id in self.db['GROUPS']:
|
||||
grp = self.group(grp_id)
|
||||
for key in grp.DEFAULTS.keys():
|
||||
try:
|
||||
getattr(grp, key)
|
||||
except KeyError:
|
||||
if self._comps:
|
||||
try:
|
||||
comps_grp = self._comps._group_by_id(grp_id)
|
||||
if comps_grp:
|
||||
value = getattr(comps_grp, key)
|
||||
setattr(grp, key, value)
|
||||
continue
|
||||
except KeyError:
|
||||
# set default if grp is not pressent in comps
|
||||
pass
|
||||
setattr(grp, key, grp.DEFAULTS[key])
|
||||
|
||||
def _ensure_sanity(self):
|
||||
"""Make sure the input db is valid."""
|
||||
if 'GROUPS' in self.db and 'ENVIRONMENTS' in self.db:
|
||||
return
|
||||
logger.warning(_('Invalid groups database, clearing.'))
|
||||
self.db = self._empty_db()
|
||||
|
||||
def _load(self):
|
||||
self.db = self._empty_db()
|
||||
try:
|
||||
with open(self._dbfile) as db:
|
||||
content = db.read()
|
||||
self.db = ClonableDict.wrap_dict(json.loads(content))
|
||||
self._migrate()
|
||||
except IOError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
self._original = self.db.clone()
|
||||
|
||||
def _migrate(self):
|
||||
try:
|
||||
version = self.db['meta']['version']
|
||||
except KeyError:
|
||||
msg = _('Unsupported installed groups database found, resetting.')
|
||||
logger.warning(msg)
|
||||
self.db = self._empty_db()
|
||||
version = self.db['meta']['version']
|
||||
else:
|
||||
current = self._empty_db()['meta']['version']
|
||||
dist = distutils.version.LooseVersion
|
||||
if dist(version) < dist(current):
|
||||
logger.debug('Migrating group persistor from %s to %s. ',
|
||||
version, current)
|
||||
self._add_missing_entries()
|
||||
self.db['meta']['version'] = current
|
||||
self.commit()
|
||||
self.save()
|
||||
|
||||
logger.debug('group persistor md version: %s', version)
|
||||
|
||||
def _prune_db(self):
|
||||
for members_dct in (self.db['ENVIRONMENTS'], self.db['GROUPS']):
|
||||
del_list = []
|
||||
for (id_, memb) in members_dct.items():
|
||||
if not _PersistMember(memb).installed:
|
||||
del_list.append(id_)
|
||||
for id_ in del_list:
|
||||
del members_dct[id_]
|
||||
|
||||
def _rollback(self):
|
||||
self.db = self._original.clone()
|
||||
|
||||
def commit(self):
|
||||
self._commit = True
|
||||
|
||||
def diff(self):
|
||||
return _GroupsDiff(self._original, self.db)
|
||||
|
||||
def environment(self, id_):
|
||||
return self._access('ENVIRONMENTS', id_)
|
||||
|
||||
@property
|
||||
def environments(self):
|
||||
return self.db['ENVIRONMENTS']
|
||||
|
||||
def environments_by_pattern(self, pattern, case_sensitive=False):
|
||||
return _by_pattern(pattern, self.environments,
|
||||
self.environment, case_sensitive)
|
||||
|
||||
def update_group_env_installed(self, installed, goal):
|
||||
"""add to the persistor packages that are already installed or are
|
||||
being installed by group transaction"""
|
||||
ins = {p.name for p in set(goal.list_installs()).union(set(installed))}
|
||||
for g in self.diff().new_groups:
|
||||
all_pkgs = set(self.group(g).full_list)
|
||||
installed_in_group = list(all_pkgs.intersection(ins))
|
||||
self.group(g).param_dct['full_list'] = installed_in_group
|
||||
|
||||
def group(self, id_):
|
||||
return self._access('GROUPS', id_)
|
||||
|
||||
@property
|
||||
def groups(self):
|
||||
return self.db['GROUPS']
|
||||
|
||||
def groups_by_pattern(self, pattern, case_sensitive=False):
|
||||
return _by_pattern(pattern, self.groups,
|
||||
self.group, case_sensitive)
|
||||
|
||||
def save(self):
|
||||
if not self._commit:
|
||||
return False
|
||||
self._prune_db()
|
||||
if self.db == self._original:
|
||||
return False
|
||||
logger.debug('group persistor: saving.')
|
||||
with open(self._dbfile, 'w') as db:
|
||||
json.dump(self.db.dct, db)
|
||||
self._commit = False
|
||||
return True
|
||||
|
||||
|
||||
class JSONDB(object):
|
||||
|
||||
def _check_json_db(self, json_path):
|
||||
|
|
|
@ -37,9 +37,9 @@ class Query(hawkey.Query):
|
|||
# :api
|
||||
return self.filter(reponame__neq=hawkey.SYSTEM_REPO_NAME)
|
||||
|
||||
def _unneeded(self, sack, yumdb, debug_solver=False):
|
||||
def _unneeded(self, sack, history, debug_solver=False):
|
||||
goal = dnf.goal.Goal(sack)
|
||||
goal.push_userinstalled(self.installed(), yumdb)
|
||||
goal.push_userinstalled(self.installed(), history)
|
||||
solved = goal.run()
|
||||
if debug_solver:
|
||||
goal.write_debugdata('./debugdata-autoremove')
|
||||
|
|
28
dnf/sack.py
28
dnf/sack.py
|
@ -28,7 +28,6 @@ import hawkey
|
|||
import os
|
||||
from dnf.pycomp import basestring
|
||||
|
||||
|
||||
class SackVersion(object):
|
||||
def __init__(self):
|
||||
self._num = 0
|
||||
|
@ -51,12 +50,12 @@ class SackVersion(object):
|
|||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def _update(self, pkg, csum):
|
||||
def _update(self, pkg, csum_type, csum_data):
|
||||
self._num += 1
|
||||
self._chksum.update(str(pkg))
|
||||
if csum is not None:
|
||||
self._chksum.update(csum[0])
|
||||
self._chksum.update(csum[1])
|
||||
self._chksum.update(pkg)
|
||||
if csum_type and csum_data:
|
||||
self._chksum.update(csum_type)
|
||||
self._chksum.update(csum_data)
|
||||
|
||||
|
||||
class Sack(hawkey.Sack):
|
||||
|
@ -73,17 +72,18 @@ class Sack(hawkey.Sack):
|
|||
"""Factory function returning a DNF Query."""
|
||||
return dnf.query.Query(self)
|
||||
|
||||
def _rpmdb_version(self, yumdb):
|
||||
def _rpmdb_version(self, history):
|
||||
pkgs = self.query().installed().run()
|
||||
main = SackVersion()
|
||||
for pkg in pkgs:
|
||||
ydbi = yumdb.get_package(pkg)
|
||||
csum = None
|
||||
if 'checksum_type' in ydbi and 'checksum_data' in ydbi:
|
||||
csum = (ydbi.checksum_type, ydbi.checksum_data)
|
||||
main._update(pkg, csum)
|
||||
return main
|
||||
|
||||
# [nevra, type, checksum, nevra, type, checksum...]
|
||||
data = history.checksums(pkgs)
|
||||
i = 0
|
||||
|
||||
while i < len(data) - 2:
|
||||
main._update(data[i], data[i + 1], data[i + 2])
|
||||
i += 3
|
||||
return main
|
||||
|
||||
def _build_sack(base):
|
||||
cachedir = base.conf.cachedir
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from dnf.db.types import SwdbReason, convert_reason
|
||||
from dnf.i18n import _
|
||||
from functools import reduce
|
||||
import operator
|
||||
|
@ -38,12 +39,12 @@ class TransactionItem(object):
|
|||
__slots__ = ('op_type', 'installed', 'erased', 'obsoleted', 'reason')
|
||||
|
||||
def __init__(self, op_type, installed=None, erased=None, obsoleted=None,
|
||||
reason='unknown'):
|
||||
reason=SwdbReason.UNKNOWN):
|
||||
self.op_type = op_type
|
||||
self.installed = installed
|
||||
self.erased = erased
|
||||
self.obsoleted = list() if obsoleted is None else obsoleted
|
||||
self.reason = reason # reason for it to be in the transaction set
|
||||
self.reason = convert_reason(reason) # reason for it to be in the transaction set
|
||||
|
||||
@property
|
||||
def _active(self):
|
||||
|
@ -72,6 +73,8 @@ class TransactionItem(object):
|
|||
UPGRADE : 'Updated'
|
||||
}
|
||||
|
||||
_HISTORY_ERASE = [DOWNGRADE, ERASE, REINSTALL, UPGRADE]
|
||||
|
||||
def _history_iterator(self):
|
||||
if self.installed is not None:
|
||||
yield(self.installed, self._installed_history_state)
|
||||
|
@ -98,30 +101,35 @@ class TransactionItem(object):
|
|||
def _obsoleting_history_state(self):
|
||||
return 'Obsoleting'
|
||||
|
||||
def _propagated_reason(self, yumdb, installonlypkgs_query):
|
||||
if self.reason == 'user':
|
||||
def _propagated_reason(self, history, installonly):
|
||||
if self.reason == SwdbReason.USER:
|
||||
return self.reason
|
||||
if installonlypkgs_query.filter(name=self.installed.name):
|
||||
return 'user'
|
||||
if self.op_type in [DOWNGRADE, REINSTALL, UPGRADE]:
|
||||
previously = yumdb.get_package(self.erased).get('reason')
|
||||
if self.installed and installonly.filter(name=self.installed.name):
|
||||
return SwdbReason.USER
|
||||
if self.op_type in self._HISTORY_ERASE and self.erased:
|
||||
previously = history.reason(self.erased)
|
||||
if previously:
|
||||
return previously
|
||||
if self.obsoleted:
|
||||
reasons = set()
|
||||
for obs in self.obsoleted:
|
||||
reasons.add(yumdb.get_package(obs).get('reason'))
|
||||
reasons.add(history.reason(obs))
|
||||
if reasons:
|
||||
if 'user' in reasons:
|
||||
return 'user'
|
||||
if 'group' in reasons:
|
||||
return 'group'
|
||||
if 'dep' in reasons:
|
||||
return 'dep'
|
||||
if 'weak' in reasons:
|
||||
return 'weak'
|
||||
if SwdbReason.USER in reasons:
|
||||
return SwdbReason.USER
|
||||
if SwdbReason.GROUP in reasons:
|
||||
return SwdbReason.GROUP
|
||||
if SwdbReason.DEP in reasons:
|
||||
return SwdbReason.DEP
|
||||
if SwdbReason.WEAK in reasons:
|
||||
return SwdbReason.WEAK
|
||||
return self.reason
|
||||
|
||||
def _propagate_reason(self, history, installonlypkgs):
|
||||
reason = self._propagated_reason(history, installonlypkgs)
|
||||
if reason:
|
||||
self.reason = reason
|
||||
|
||||
def removes(self):
|
||||
# :api
|
||||
l = [] if self.erased is None else [self.erased]
|
||||
|
@ -157,8 +165,9 @@ class Transaction(object):
|
|||
tsi = TransactionItem(ERASE, erased=erased)
|
||||
self._tsis.append(tsi)
|
||||
|
||||
def add_install(self, new, obsoleted, reason='unknown'):
|
||||
def add_install(self, new, obsoleted, reason=SwdbReason.UNKNOWN):
|
||||
# :api
|
||||
reason = convert_reason(reason) # support for string reasons
|
||||
tsi = TransactionItem(INSTALL, new, obsoleted=obsoleted,
|
||||
reason=reason)
|
||||
self._tsis.append(tsi)
|
||||
|
|
1772
dnf/yum/history.py
1772
dnf/yum/history.py
File diff suppressed because it is too large
Load Diff
|
@ -1,100 +0,0 @@
|
|||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
# Copyright 2004 Duke University
|
||||
# Written by Seth Vidal <skvidal at phy.duke.edu>
|
||||
|
||||
"""
|
||||
Classes and functions dealing with rpm package representations.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from . import misc
|
||||
import re
|
||||
import fnmatch
|
||||
|
||||
def buildPkgRefDict(pkgs, casematch=True):
|
||||
"""take a list of pkg objects and return a dict the contains all the possible
|
||||
naming conventions for them eg: for (name,i386,0,1,1)
|
||||
dict[name] = (name, i386, 0, 1, 1)
|
||||
dict[name.i386] = (name, i386, 0, 1, 1)
|
||||
dict[name-1-1.i386] = (name, i386, 0, 1, 1)
|
||||
dict[name-1] = (name, i386, 0, 1, 1)
|
||||
dict[name-1-1] = (name, i386, 0, 1, 1)
|
||||
dict[0:name-1-1.i386] = (name, i386, 0, 1, 1)
|
||||
dict[name-0:1-1.i386] = (name, i386, 0, 1, 1)
|
||||
"""
|
||||
pkgdict = {}
|
||||
for pkg in pkgs:
|
||||
(n, a, e, v, r) = pkg.pkgtup
|
||||
if not casematch:
|
||||
n = n.lower()
|
||||
a = a.lower()
|
||||
e = e.lower()
|
||||
v = v.lower()
|
||||
r = r.lower()
|
||||
name = n
|
||||
nameArch = '%s.%s' % (n, a)
|
||||
nameVerRelArch = '%s-%s-%s.%s' % (n, v, r, a)
|
||||
nameVer = '%s-%s' % (n, v)
|
||||
nameVerRel = '%s-%s-%s' % (n, v, r)
|
||||
envra = '%s:%s-%s-%s.%s' % (e, n, v, r, a)
|
||||
nevra = '%s-%s:%s-%s.%s' % (n, e, v, r, a)
|
||||
for item in [name, nameArch, nameVerRelArch, nameVer, nameVerRel, envra, nevra]:
|
||||
if item not in pkgdict:
|
||||
pkgdict[item] = []
|
||||
pkgdict[item].append(pkg)
|
||||
|
||||
return pkgdict
|
||||
|
||||
def parsePackages(pkgs, usercommands, casematch=0):
|
||||
"""matches up the user request versus a pkg list:
|
||||
for installs/updates available pkgs should be the 'others list'
|
||||
for removes it should be the installed list of pkgs
|
||||
takes an optional casematch option to determine if case should be matched
|
||||
exactly. Defaults to not matching."""
|
||||
|
||||
pkgdict = buildPkgRefDict(pkgs, bool(casematch))
|
||||
exactmatch = set()
|
||||
matched = set()
|
||||
unmatched = set()
|
||||
for command in usercommands:
|
||||
if not casematch:
|
||||
command = command.lower()
|
||||
if command in pkgdict:
|
||||
exactmatch.update(pkgdict[command])
|
||||
del pkgdict[command]
|
||||
else:
|
||||
# anything we couldn't find a match for
|
||||
# could mean it's not there, could mean it's a wildcard
|
||||
if misc.re_glob(command):
|
||||
trylist = pkgdict.keys()
|
||||
# command and pkgdict are already lowered if not casematch
|
||||
# so case sensitive is always fine
|
||||
restring = fnmatch.translate(command)
|
||||
regex = re.compile(restring)
|
||||
foundit = 0
|
||||
for item in trylist:
|
||||
if regex.match(item):
|
||||
matched.update(pkgdict[item])
|
||||
del pkgdict[item]
|
||||
foundit = 1
|
||||
|
||||
if not foundit:
|
||||
unmatched.add(command)
|
||||
|
||||
else:
|
||||
unmatched.add(command)
|
||||
|
||||
return exactmatch, matched, unmatched
|
|
@ -1,369 +0,0 @@
|
|||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from . import misc
|
||||
import dnf.i18n
|
||||
import dnf.pycomp
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import rpm
|
||||
|
||||
logger = logging.getLogger('dnf')
|
||||
|
||||
# For returnPackages(patterns=)
|
||||
flags = {"GT": rpm.RPMSENSE_GREATER,
|
||||
"GE": rpm.RPMSENSE_EQUAL | rpm.RPMSENSE_GREATER,
|
||||
"LT": rpm.RPMSENSE_LESS,
|
||||
"LE": rpm.RPMSENSE_LESS | rpm.RPMSENSE_EQUAL,
|
||||
"EQ": rpm.RPMSENSE_EQUAL,
|
||||
None: 0 }
|
||||
|
||||
|
||||
def _open_no_umask(*args):
|
||||
""" Annoying people like to set umask's for root, which screws everything
|
||||
up for user readable stuff. """
|
||||
oumask = os.umask(0o22)
|
||||
try:
|
||||
if dnf.pycomp.PY3:
|
||||
ret = open(*args, encoding='utf-8')
|
||||
else:
|
||||
ret = open(*args)
|
||||
finally:
|
||||
os.umask(oumask)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _makedirs_no_umask(*args):
|
||||
""" Annoying people like to set umask's for root, which screws everything
|
||||
up for user readable stuff. """
|
||||
oumask = os.umask(0o22)
|
||||
try:
|
||||
ret = os.makedirs(*args)
|
||||
finally:
|
||||
os.umask(oumask)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _iopen(*args):
|
||||
""" IOError wrapper BS for open, stupid exceptions. """
|
||||
try:
|
||||
if dnf.pycomp.PY3:
|
||||
ret = open(*args, encoding='utf-8')
|
||||
else:
|
||||
ret = open(*args)
|
||||
except IOError as e:
|
||||
return None, e
|
||||
return ret, None
|
||||
|
||||
|
||||
def _sanitize(path):
|
||||
return path.replace('/', '').replace('~', '')
|
||||
|
||||
|
||||
class AdditionalPkgDB(object):
|
||||
""" Accesses additional package data rpmdb is unable to store.
|
||||
|
||||
Previously known as yumdb.
|
||||
"""
|
||||
# dir: <persistdir>/yumdb
|
||||
# pkgs stored in name[0]/pkgid-name-ver-rel-arch dirs
|
||||
# dirs have files per piece of info we're keeping, e.g. repoid, install
|
||||
# reason, status, etc.
|
||||
|
||||
def __init__(self, db_path):
|
||||
self.conf = misc.GenericHolder()
|
||||
self.conf.db_path = db_path
|
||||
self.conf.writable = False
|
||||
|
||||
self._packages = {} # pkgid = dir
|
||||
if not os.path.exists(self.conf.db_path):
|
||||
try:
|
||||
_makedirs_no_umask(self.conf.db_path)
|
||||
self.conf.writable = True
|
||||
except (IOError, OSError) as e:
|
||||
# some sort of useful thing here? A warning?
|
||||
pass
|
||||
else:
|
||||
if os.access(self.conf.db_path, os.W_OK):
|
||||
self.conf.writable = True
|
||||
self.yumdb_cache = {'attr' : {}}
|
||||
|
||||
def _get_dir_name(self, pkgtup, pkgid):
|
||||
if pkgid in self._packages:
|
||||
return self._packages[pkgid]
|
||||
(n, a, e, v,r) = pkgtup
|
||||
n = _sanitize(n)
|
||||
str_pkgid = pkgid
|
||||
if pkgid is None:
|
||||
str_pkgid = '<nopkgid>'
|
||||
elif dnf.pycomp.is_py2str_py3bytes(pkgid):
|
||||
str_pkgid = pkgid.decode()
|
||||
thisdir = '%s/%s/%s-%s-%s-%s-%s' % (self.conf.db_path,
|
||||
n[0], str_pkgid, n, v, r, a)
|
||||
self._packages[pkgid] = thisdir
|
||||
return thisdir
|
||||
|
||||
def get_package(self, po=None, pkgtup=None, pkgid=None):
|
||||
"""Return an RPMDBAdditionalDataPackage Object for this package"""
|
||||
if po:
|
||||
thisdir = self._get_dir_name(po.pkgtup, po._pkgid)
|
||||
elif pkgtup and pkgid:
|
||||
thisdir = self._get_dir_name(pkgtup, pkgid)
|
||||
else:
|
||||
raise ValueError("Missing arguments.")
|
||||
|
||||
return RPMDBAdditionalDataPackage(self.conf, thisdir,
|
||||
yumdb_cache=self.yumdb_cache)
|
||||
|
||||
|
||||
class RPMDBAdditionalDataPackage(object):
|
||||
|
||||
# We do auto hardlink on these attributes
|
||||
_auto_hardlink_attrs = set(['checksum_type', 'reason',
|
||||
'installed_by', 'changed_by',
|
||||
'from_repo', 'from_repo_revision',
|
||||
'from_repo_timestamp', 'releasever',
|
||||
'command_line'])
|
||||
|
||||
def __init__(self, conf, pkgdir, yumdb_cache=None):
|
||||
self._conf = conf
|
||||
self._mydir = pkgdir
|
||||
|
||||
self._read_cached_data = {}
|
||||
|
||||
# 'from_repo' is the most often requested piece of data, and is often
|
||||
# the same for a huge number of packages. So we use hardlinks to share
|
||||
# data, and try to optimize for that.
|
||||
# It's useful for other keys too (installed_by/changed_by/reason/etc.)
|
||||
# so we make it generic.
|
||||
self._yumdb_cache = yumdb_cache
|
||||
|
||||
def _auto_cache(self, attr, value, fn, info=None):
|
||||
""" Create caches for the attr. We have a per. object read cache so at
|
||||
worst we only have to read a single attr once. Then we expand that
|
||||
with (dev, ino) cache, so hardlink data can be read once for
|
||||
multiple packages. """
|
||||
self._read_cached_data[attr] = value
|
||||
if self._yumdb_cache is None:
|
||||
return
|
||||
|
||||
nlinks = 1
|
||||
if info is not None:
|
||||
nlinks = info.st_nlink
|
||||
if nlinks <= 1 and attr not in self._auto_hardlink_attrs:
|
||||
return
|
||||
|
||||
if value in self._yumdb_cache['attr']:
|
||||
sinfo = self._yumdb_cache['attr'][value][1]
|
||||
if info is not None and sinfo is not None:
|
||||
if (info.st_dev, info.st_ino) == (sinfo.st_dev, sinfo.st_ino):
|
||||
self._yumdb_cache['attr'][value][2].add(fn)
|
||||
self._yumdb_cache[fn] = value
|
||||
return
|
||||
if self._yumdb_cache['attr'][value][0] >= nlinks:
|
||||
# We already have a better cache file.
|
||||
return
|
||||
|
||||
self._yumdb_cache['attr'][value] = (nlinks, info, set([fn]))
|
||||
self._yumdb_cache[fn] = value
|
||||
|
||||
def _unlink_yumdb_cache(self, fn):
|
||||
""" Remove old values from the link cache. """
|
||||
if fn in self._yumdb_cache:
|
||||
ovalue = self._yumdb_cache[fn]
|
||||
if ovalue in self._yumdb_cache['attr']:
|
||||
self._yumdb_cache['attr'][ovalue][2].discard(fn)
|
||||
if not self._yumdb_cache['attr'][ovalue][2]:
|
||||
del self._yumdb_cache['attr'][ovalue]
|
||||
del self._yumdb_cache[fn]
|
||||
|
||||
def _link_yumdb_cache(self, fn, value):
|
||||
""" If we have a matching yumdb cache, link() to it instead of having
|
||||
to open()+write(). """
|
||||
if self._yumdb_cache is None:
|
||||
return False
|
||||
|
||||
self._unlink_yumdb_cache(fn)
|
||||
|
||||
if value not in self._yumdb_cache['attr']:
|
||||
return False
|
||||
|
||||
assert self._yumdb_cache['attr'][value][2]
|
||||
try:
|
||||
lfn = next(iter(self._yumdb_cache['attr'][value][2]))
|
||||
misc.unlink_f(fn + '.tmp')
|
||||
os.link(lfn, fn + '.tmp')
|
||||
os.rename(fn + '.tmp', fn)
|
||||
except:
|
||||
return False
|
||||
|
||||
self._yumdb_cache['attr'][value][2].add(fn)
|
||||
self._yumdb_cache[fn] = value
|
||||
|
||||
return True
|
||||
|
||||
def _attr2fn(self, attr):
|
||||
""" Given an attribute, return the filename. """
|
||||
return os.path.normpath(self._mydir + '/' + attr)
|
||||
|
||||
def _write(self, attr, value):
|
||||
# check for self._conf.writable before going on?
|
||||
if not os.path.exists(self._mydir):
|
||||
_makedirs_no_umask(self._mydir)
|
||||
|
||||
attr = _sanitize(attr)
|
||||
if attr in self._read_cached_data:
|
||||
del self._read_cached_data[attr]
|
||||
fn = self._attr2fn(attr)
|
||||
|
||||
if attr.endswith('.tmp'):
|
||||
raise AttributeError("Cannot set attribute %s on %s" % (attr, self))
|
||||
|
||||
# Auto hardlink some of the attrs...
|
||||
if self._link_yumdb_cache(fn, value):
|
||||
return
|
||||
|
||||
# Default write()+rename()... hardlink -c can still help.
|
||||
misc.unlink_f(fn + '.tmp')
|
||||
|
||||
fo = _open_no_umask(fn + '.tmp', 'w')
|
||||
try:
|
||||
dnf.pycomp.write_to_file(fo, value)
|
||||
except (OSError, IOError) as e:
|
||||
logger.error("Cannot set attribute %s on %s due to: %s" % (attr, self, e.strerror))
|
||||
|
||||
fo.flush()
|
||||
fo.close()
|
||||
del fo
|
||||
os.rename(fn + '.tmp', fn) # even works on ext4 now!:o
|
||||
|
||||
self._auto_cache(attr, value, fn)
|
||||
|
||||
def _read(self, attr):
|
||||
attr = _sanitize(attr)
|
||||
|
||||
if attr in self._read_cached_data:
|
||||
return self._read_cached_data[attr]
|
||||
fn = self._attr2fn(attr)
|
||||
|
||||
if attr.endswith('.tmp'):
|
||||
raise AttributeError("%s has no attribute %s" % (self, attr))
|
||||
|
||||
info = misc.stat_f(fn, ignore_EACCES=True)
|
||||
if info is None:
|
||||
raise AttributeError("%s has no attribute %s" % (self, attr))
|
||||
|
||||
if info.st_nlink > 1 and self._yumdb_cache is not None:
|
||||
key = (info.st_dev, info.st_ino)
|
||||
if key in self._yumdb_cache:
|
||||
self._auto_cache(attr, self._yumdb_cache[key], fn, info)
|
||||
return self._read_cached_data[attr]
|
||||
|
||||
fo, e = _iopen(fn)
|
||||
if fo is None:
|
||||
msg = '{}: {}'.format(type(e).__name__, dnf.i18n.ucd(e))
|
||||
msg = "For {} cannot open attribute {} due to {}".format(self, attr, msg)
|
||||
logger.debug(msg)
|
||||
raise AttributeError(msg)
|
||||
|
||||
try:
|
||||
value = fo.read()
|
||||
except Exception as e:
|
||||
msg = '{}: {}'.format(type(e).__name__, dnf.i18n.ucd(e))
|
||||
msg = "For {} cannot open attribute {} due to {}".format(self, attr, msg)
|
||||
logger.debug(msg)
|
||||
raise AttributeError(msg)
|
||||
finally:
|
||||
fo.close()
|
||||
del fo
|
||||
|
||||
if info.st_nlink > 1 and self._yumdb_cache is not None:
|
||||
self._yumdb_cache[key] = value
|
||||
self._auto_cache(attr, value, fn, info)
|
||||
|
||||
return value
|
||||
|
||||
def _delete(self, attr):
|
||||
"""remove the attribute file"""
|
||||
|
||||
attr = _sanitize(attr)
|
||||
fn = self._attr2fn(attr)
|
||||
if attr in self._read_cached_data:
|
||||
del self._read_cached_data[attr]
|
||||
self._unlink_yumdb_cache(fn)
|
||||
if os.path.exists(fn):
|
||||
try:
|
||||
os.unlink(fn)
|
||||
except (IOError, OSError) as e:
|
||||
logger.error("Cannot delete attribute %s on %s at %s due to: %s" % (attr, self, fn, e.strerror))
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
res = self._read(attr)
|
||||
except AttributeError:
|
||||
return None
|
||||
return res
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
if not attr.startswith('_'):
|
||||
if value is not None:
|
||||
self._write(attr, value)
|
||||
else:
|
||||
object.__setattr__(self, attr, value)
|
||||
|
||||
def __delattr__(self, attr):
|
||||
if not attr.startswith('_'):
|
||||
self._delete(attr)
|
||||
else:
|
||||
object.__delattr__(self, attr)
|
||||
|
||||
def __contains__(self, attr):
|
||||
# This is faster than __iter__ and it makes things fail in a much more
|
||||
# obvious way in weird FS corruption cases like: BZ 593436
|
||||
x = self.get(attr)
|
||||
return x is not None
|
||||
|
||||
def __iter__(self, show_hidden=False):
|
||||
for item in self._read_cached_data:
|
||||
yield item
|
||||
for item in glob.glob(self._mydir + '/*'):
|
||||
item = item[(len(self._mydir) + 1):]
|
||||
if item in self._read_cached_data:
|
||||
continue
|
||||
if not show_hidden and item.endswith('.tmp'):
|
||||
continue
|
||||
yield item
|
||||
|
||||
def clean(self):
|
||||
# purge out everything
|
||||
for item in self.__iter__(show_hidden=True):
|
||||
self._delete(item)
|
||||
try:
|
||||
os.rmdir(self._mydir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def get(self, attr, default=None):
|
||||
"""retrieve an add'l data obj"""
|
||||
|
||||
try:
|
||||
res = self._read(attr)
|
||||
except AttributeError:
|
||||
return default
|
||||
return res
|
|
@ -382,17 +382,8 @@ class RPMTransaction(object):
|
|||
|
||||
if state is not None:
|
||||
self._scriptout()
|
||||
|
||||
# Note that we are currently inside the chroot, which makes
|
||||
# sqlite panic when it tries to open it's journal file.
|
||||
# So let's have some "fun" and workaround that:
|
||||
_do_chroot = False
|
||||
if _do_chroot and self.base.conf.installroot != '/':
|
||||
os.chroot(".")
|
||||
pid = self.base.history.pkg2pid(pkg)
|
||||
self.base.history.trans_data_pid_end(pid, state)
|
||||
if _do_chroot and self.base.conf.installroot != '/':
|
||||
os.chroot(self.base.conf.installroot)
|
||||
else:
|
||||
self._scriptout()
|
||||
|
||||
|
|
|
@ -1,200 +0,0 @@
|
|||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of version 2 of the GNU General Public License
|
||||
# as published by the Free Software Foundation
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
# Copyright 2005 Duke University
|
||||
|
||||
"""
|
||||
utility functions to handle differences in pysqlite versions
|
||||
These are from Wichert Akkerman <wichert@deephackmode.org>'s python-dhm
|
||||
http://www.wiggy.net/code/python-dhm
|
||||
"""
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
try:
|
||||
import sqlite3 as sqlite
|
||||
except ImportError:
|
||||
import sqlite
|
||||
|
||||
class TokenizeError(Exception):
|
||||
"""Tokenizer error class"""
|
||||
pass
|
||||
|
||||
def Tokenize(str, whitespace=" \t\r\n", quotes="\"", escapes="\\"):
|
||||
"""String tokenizer
|
||||
|
||||
This function tokenizes a string while taking quotation and
|
||||
escaping into account.
|
||||
|
||||
>>> import dhm.strtools
|
||||
>>> dhm.strtools.Tokenize("this is a test")
|
||||
['this', 'is', 'a', 'test']
|
||||
>>> dhm.strtools.Tokenize("this \"is a\" test")
|
||||
['this', 'is a', 'test']
|
||||
>>> dhm.strtools.Tokenize("this \\\"is\\\" a test")
|
||||
['this', '"is"', 'a', 'test']
|
||||
>>> dhm.strtools.Tokenize("this \"is a test")
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in ?
|
||||
File "/usr/local/lib/python2.2/site-packages/dhm/strtools.py", line 80, in Tokenize
|
||||
raise TokenizeError, "Unexpected end of string in quoted text"
|
||||
dhm.strtools.TokenizeError: Unexecpted end of string in quoted text
|
||||
|
||||
@param str: string to tokenize
|
||||
@type str: string
|
||||
@param whitespace: whitespace characters separating tokens
|
||||
@type whitespace: string
|
||||
@param quotes: legal quoting characters
|
||||
@type quotes: string
|
||||
@param escapes: characters which can escape quoting characters
|
||||
@type escapes: string
|
||||
@return: list of tokens
|
||||
@rtype: sequence of strings
|
||||
"""
|
||||
(buffer, tokens, curtoken, quote)=(str, [], None, None)
|
||||
|
||||
try:
|
||||
while buffer:
|
||||
if buffer[0]==quote:
|
||||
quote=None
|
||||
elif (quote==None) and (buffer[0] in quotes):
|
||||
quote=buffer[0]
|
||||
elif buffer[0] in whitespace:
|
||||
if quote!=None:
|
||||
curtoken+=buffer[0]
|
||||
else:
|
||||
tokens.append(curtoken)
|
||||
curtoken=None
|
||||
while buffer[1] in whitespace:
|
||||
buffer=buffer[1:]
|
||||
elif buffer[0] in escapes:
|
||||
if curtoken==None:
|
||||
curtoken=buffer[1]
|
||||
else:
|
||||
curtoken+=buffer[1]
|
||||
buffer=buffer[1:]
|
||||
else:
|
||||
if curtoken==None:
|
||||
curtoken=buffer[0]
|
||||
else:
|
||||
curtoken+=buffer[0]
|
||||
|
||||
buffer=buffer[1:]
|
||||
except IndexError:
|
||||
raise TokenizeError("Unexpected end of string")
|
||||
|
||||
if quote:
|
||||
raise TokenizeError("Unexpected end of string in quoted text")
|
||||
|
||||
if curtoken!=None:
|
||||
tokens.append(curtoken)
|
||||
|
||||
return tokens
|
||||
|
||||
|
||||
def QmarkToPyformat(query, params):
|
||||
"""Convert from qmark to pyformat parameter style.
|
||||
|
||||
The python DB-API 2.0 specifies four different possible parameter
|
||||
styles that can be used by drivers. This function converts from the
|
||||
qmark style to pyformat style.
|
||||
|
||||
@param query: SQL query to transform
|
||||
@type query: string
|
||||
@param params: arguments to query
|
||||
@type params: sequence of strings
|
||||
@return: converted query and parameters
|
||||
@rtype: tuple with the new command and a dictionary of arguments
|
||||
"""
|
||||
tokens=Tokenize(query, quotes="'")
|
||||
output=[]
|
||||
count=1
|
||||
for token in tokens:
|
||||
if token.endswith("?"):
|
||||
output.append(token[:-1] + "%%(param%d)s" % count)
|
||||
count+=1
|
||||
elif token.endswith("?,") or token.endswith("?)"):
|
||||
ntoken = token[:-2] + "%%(param%d)s" % count
|
||||
ntoken += token[-1]
|
||||
output.append(ntoken)
|
||||
count+=1
|
||||
else:
|
||||
output.append(token)
|
||||
|
||||
dict={}
|
||||
count=1
|
||||
for param in params:
|
||||
dict["param%d" % count]=param
|
||||
count+=1
|
||||
|
||||
return (" ".join(output), dict)
|
||||
|
||||
|
||||
def executeSQLPyFormat(cursor, query, params=None):
|
||||
"""
|
||||
Execute a python < 2.5 (external sqlite module) style query.
|
||||
|
||||
@param cursor: A sqlite cursor
|
||||
@param query: The query to execute
|
||||
@param params: An optional list of parameters to the query
|
||||
"""
|
||||
if params is None:
|
||||
return cursor.execute(query)
|
||||
|
||||
# Leading whitespace confuses QmarkToPyformat()
|
||||
query = query.strip()
|
||||
(q, p) = QmarkToPyformat(query, params)
|
||||
return cursor.execute(q, p)
|
||||
|
||||
def executeSQLQmark(cursor, query, params=None):
|
||||
"""
|
||||
Execute a python 2.5 (sqlite3) style query.
|
||||
|
||||
@param cursor: A sqlite cursor
|
||||
@param query: The query to execute
|
||||
@param params: An optional list of parameters to the query
|
||||
"""
|
||||
if params is None:
|
||||
return cursor.execute(query)
|
||||
|
||||
return cursor.execute(query, params)
|
||||
|
||||
if sqlite.version_info[0] > 1:
|
||||
executeSQL = executeSQLQmark
|
||||
else:
|
||||
executeSQL = executeSQLPyFormat
|
||||
|
||||
|
||||
def sql_esc(pattern):
|
||||
""" Apply SQLite escaping, if needed. Returns pattern and esc. """
|
||||
esc = ''
|
||||
if "_" in pattern or "%" in pattern:
|
||||
esc = ' ESCAPE "!"'
|
||||
pattern = pattern.replace("!", "!!")
|
||||
pattern = pattern.replace("%", "!%")
|
||||
pattern = pattern.replace("_", "!_")
|
||||
return (pattern, esc)
|
||||
|
||||
def sql_esc_glob(patterns):
|
||||
""" Converts patterns to SQL LIKE format, if required (or gives up if
|
||||
not possible). """
|
||||
ret = []
|
||||
for pattern in patterns:
|
||||
if '[' in pattern: # LIKE only has % and _, so [abc] can't be done.
|
||||
return [] # So Load everything
|
||||
|
||||
# Convert to SQL LIKE format
|
||||
(pattern, esc) = sql_esc(pattern)
|
||||
pattern = pattern.replace("*", "%")
|
||||
pattern = pattern.replace("?", "_")
|
||||
ret.append((pattern, esc))
|
||||
return ret
|
|
@ -6,6 +6,10 @@ What will not be tested:
|
|||
* rpm transactions themselves (tested in RPM)
|
||||
* depsolving (the core depsolving algorithms are tested in libsolv, correctly
|
||||
setting up libsolv is hawkey's job)
|
||||
* Swdb functions - tested in libdnf
|
||||
|
||||
Missing
|
||||
* SwdbInterface tests
|
||||
|
||||
== The repos/ directory ==
|
||||
|
||||
|
|
|
@ -28,9 +28,9 @@ class AutoRemoveCommandTest(support.ResultTestCase):
|
|||
base = support.MockBase()
|
||||
q = base.sack.query()
|
||||
pkgs = list(q.filter(name='librita')) + list(q.filter(name='pepper'))
|
||||
yumdb = base._yumdb
|
||||
history = base.history
|
||||
for pkg in pkgs:
|
||||
yumdb.get_package(pkg).reason = 'dep'
|
||||
history.mark_user_installed(pkg, True)
|
||||
|
||||
cli = base.mock_cli()
|
||||
cmd = autoremove.AutoremoveCommand(cli)
|
||||
|
@ -40,5 +40,7 @@ class AutoRemoveCommandTest(support.ResultTestCase):
|
|||
cmd.run()
|
||||
inst, rem = self.installed_removed(base)
|
||||
self.assertEmpty(inst)
|
||||
removed = ('librita-1-1.i686', 'librita-1-1.x86_64', 'pepper-20-0.x86_64')
|
||||
self.assertCountEqual((map(str, rem)), removed)
|
||||
removed = ('librita-1-1.i686',
|
||||
'librita-1-1.x86_64',
|
||||
'pepper-20-0.x86_64')
|
||||
self.assertCountEqual((map(str, pkgs)), removed)
|
||||
|
|
|
@ -61,9 +61,9 @@ class GroupCommandTest(support.TestCase):
|
|||
|
||||
def test_environment_list(self):
|
||||
env_inst, env_avail = self.cmd._environment_lists(['sugar*'])
|
||||
self.assertLength(env_inst, 1)
|
||||
self.assertLength(env_avail, 0)
|
||||
self.assertEqual(env_inst[0].name, 'Sugar Desktop Environment')
|
||||
self.assertLength(env_inst, 0)
|
||||
self.assertLength(env_avail, 1)
|
||||
self.assertEqual(env_avail[0].name, 'Sugar Desktop Environment')
|
||||
|
||||
def test_configure(self):
|
||||
support.command_configure(self.cmd, ['remove', 'crack'])
|
||||
|
@ -75,7 +75,10 @@ class GroupCommandTest(support.TestCase):
|
|||
class CompsQueryTest(support.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
(self.comps, self.prst) = support.mock_comps(True)
|
||||
self.base = support.MockBase()
|
||||
self.history = self.base.history
|
||||
self.comps = support.mock_comps(self.history, True)
|
||||
self.prst = self.history.group
|
||||
|
||||
def test_all(self):
|
||||
status_all = CompsQuery.AVAILABLE | CompsQuery.INSTALLED
|
||||
|
@ -96,6 +99,10 @@ class CompsQueryTest(support.TestCase):
|
|||
def test_installed(self):
|
||||
q = CompsQuery(self.comps, self.prst, CompsQuery.GROUPS,
|
||||
CompsQuery.INSTALLED)
|
||||
self.base.read_mock_comps(False)
|
||||
grp = self.base.comps.group_by_pattern('somerset')
|
||||
self.base.group_install(grp.id, ('mandatory',))
|
||||
res = q.get('somerset')
|
||||
self.assertEmpty(res.environments)
|
||||
self.assertCountEqual(res.groups, ('somerset',))
|
||||
grp_ids = [grp.name_id for grp in res.groups]
|
||||
self.assertCountEqual(grp_ids, ('somerset',))
|
||||
|
|
|
@ -19,6 +19,7 @@ from __future__ import absolute_import
|
|||
from __future__ import unicode_literals
|
||||
from tests import support
|
||||
from tests.support import mock
|
||||
from dnf.db.types import SwdbReason
|
||||
|
||||
import dnf.cli.output
|
||||
import dnf.const
|
||||
|
@ -59,7 +60,7 @@ class OutputFunctionsTest(support.TestCase):
|
|||
def test_make_lists(self):
|
||||
TSI = dnf.transaction.TransactionItem
|
||||
|
||||
goal = mock.Mock(get_reason=lambda x: 'user')
|
||||
goal = mock.Mock(get_reason=lambda x: SwdbReason.USER)
|
||||
ts = dnf.transaction.Transaction()
|
||||
ts.add_install('pepper-3', [])
|
||||
ts.add_install('pepper-2', [])
|
||||
|
|
153
tests/support.py
153
tests/support.py
|
@ -33,7 +33,6 @@ import dnf.persistor
|
|||
import dnf.pycomp
|
||||
import dnf.repo
|
||||
import dnf.sack
|
||||
import dnf.yum.rpmsack
|
||||
import hawkey
|
||||
import hawkey.test
|
||||
import itertools
|
||||
|
@ -159,24 +158,32 @@ class Base(dnf.Base):
|
|||
|
||||
# mock objects
|
||||
|
||||
def mock_comps(seed_persistor):
|
||||
|
||||
def mock_comps(history, seed_persistor):
|
||||
comps = dnf.comps.Comps()
|
||||
comps._add_from_xml_filename(COMPS_PATH)
|
||||
|
||||
persistor = MockGroupPersistor()
|
||||
persistor = history.group
|
||||
if seed_persistor:
|
||||
p_env = persistor.environment('sugar-desktop-environment')
|
||||
p_env.grp_types = dnf.comps.ALL_TYPES
|
||||
p_env.pkg_types = dnf.comps.ALL_TYPES
|
||||
p_env.full_list.extend(('Peppers', 'somerset'))
|
||||
p_pep = persistor.group('Peppers')
|
||||
p_pep.pkg_types = dnf.comps.MANDATORY
|
||||
p_pep.full_list.extend(('hole', 'lotus'))
|
||||
p_som = persistor.group('somerset')
|
||||
p_som.pkg_types = dnf.comps.MANDATORY
|
||||
p_som.full_list.extend(('pepper', 'trampoline', 'lotus'))
|
||||
name = 'Peppers'
|
||||
pkg_types = dnf.comps.MANDATORY
|
||||
p_pep = persistor.new_group(name, name, name, False, pkg_types)
|
||||
persistor.add_group(p_pep)
|
||||
p_pep.add_package(['hole', 'lotus'])
|
||||
|
||||
return comps, persistor
|
||||
name = 'somerset'
|
||||
pkg_types = dnf.comps.MANDATORY
|
||||
p_som = persistor.new_group(name, name, name, False, pkg_types)
|
||||
persistor.add_group(p_som)
|
||||
p_som.add_package(['pepper', 'trampoline', 'lotus'])
|
||||
|
||||
name = 'sugar-desktop-environment'
|
||||
grp_types = dnf.comps.ALL_TYPES
|
||||
pkg_types = dnf.comps.ALL_TYPES
|
||||
p_env = persistor.new_env(name, name, name, pkg_types, grp_types)
|
||||
persistor.add_env(p_env)
|
||||
p_env.add_group(['Peppers', 'somerset'])
|
||||
return comps
|
||||
|
||||
|
||||
def mock_logger():
|
||||
|
@ -201,8 +208,8 @@ class _BaseStubMixin(object):
|
|||
self._repos.add(repo)
|
||||
|
||||
self._repo_persistor = FakePersistor()
|
||||
self._priv_yumdb = MockYumDB()
|
||||
self._ds_callback = mock.Mock()
|
||||
self._history = None
|
||||
|
||||
def add_test_dir_repo(self, id_, cachedir):
|
||||
"""Add a repository located in a directory in the tests."""
|
||||
|
@ -211,17 +218,23 @@ class _BaseStubMixin(object):
|
|||
self.repos.add(repo)
|
||||
return repo
|
||||
|
||||
@property
|
||||
def history(self):
|
||||
if self._history:
|
||||
return self._history
|
||||
else:
|
||||
self._history = super(_BaseStubMixin, self).history
|
||||
self._history.reset_db()
|
||||
return self._history
|
||||
|
||||
@property
|
||||
def sack(self):
|
||||
if self._sack:
|
||||
return self._sack
|
||||
return self.init_sack()
|
||||
|
||||
def _activate_group_persistor(self):
|
||||
return MockGroupPersistor()
|
||||
|
||||
def _build_comps_solver(self):
|
||||
return dnf.comps.Solver(self._group_persistor, self._comps,
|
||||
return dnf.comps.Solver(self.history.group, self._comps,
|
||||
REASONS.get)
|
||||
|
||||
def _activate_persistor(self):
|
||||
|
@ -255,7 +268,7 @@ class _BaseStubMixin(object):
|
|||
demands=dnf.cli.demand.DemandSheet())
|
||||
|
||||
def read_mock_comps(self, seed_persistor=True):
|
||||
self._comps, self._group_persistor = mock_comps(seed_persistor)
|
||||
self._comps = mock_comps(self.history, seed_persistor)
|
||||
return self._comps
|
||||
|
||||
def read_all_repos(self, opts=None):
|
||||
|
@ -275,6 +288,10 @@ class BaseCliStub(_BaseStubMixin, dnf.cli.cli.BaseCli):
|
|||
self.output.term = MockTerminal()
|
||||
|
||||
|
||||
class DemandsStub(object):
|
||||
pass
|
||||
|
||||
|
||||
class CliStub(object):
|
||||
"""A class mocking `dnf.cli.Cli`."""
|
||||
|
||||
|
@ -293,41 +310,6 @@ class CliStub(object):
|
|||
"""Register given *command*."""
|
||||
self.cli_commands.update({alias: command for alias in command.aliases})
|
||||
|
||||
|
||||
class DemandsStub(object):
|
||||
pass
|
||||
|
||||
|
||||
class HistoryStub(dnf.yum.history.YumHistory):
|
||||
"""Stub of dnf.yum.history.YumHistory for easier testing."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize a stub instance."""
|
||||
self.old_data_pkgs = {}
|
||||
|
||||
def _old_data_pkgs(self, tid, sort=True):
|
||||
"""Get packages of a transaction."""
|
||||
if sort:
|
||||
raise NotImplementedError('sorting not implemented yet')
|
||||
return self.old_data_pkgs.get(tid, ())[:]
|
||||
|
||||
def close(self):
|
||||
"""Close the history."""
|
||||
pass
|
||||
|
||||
def old(self, tids=[], limit=None, *_args, **_kwargs):
|
||||
"""Get transactions with given IDs."""
|
||||
create = lambda tid: dnf.yum.history.YumHistoryTransaction(self,
|
||||
(int(tid), 0, '0:685cc4ac4ce31b9190df1604a96a3c62a3100c35',
|
||||
1, '1:685cc4ac4ce31b9190df1604a96a3c62a3100c36', 0, 0))
|
||||
|
||||
sorted_all_tids = sorted(self.old_data_pkgs.keys(), reverse=True)
|
||||
|
||||
trxs = (create(tid) for tid in tids or sorted_all_tids
|
||||
if tid in self.old_data_pkgs)
|
||||
limited = trxs if limit is None else itertools.islice(trxs, limit)
|
||||
return tuple(limited)
|
||||
|
||||
class MockOutput(object):
|
||||
def __init__(self):
|
||||
self.term = MockTerminal()
|
||||
|
@ -429,57 +411,6 @@ class MockBase(_BaseStubMixin, Base):
|
|||
def mock_sack(*extra_repos):
|
||||
return MockBase(*extra_repos).sack
|
||||
|
||||
class MockYumDB(mock.Mock):
|
||||
def __init__(self):
|
||||
super(mock.Mock, self).__init__()
|
||||
self.db = {}
|
||||
|
||||
def get_package(self, pkg):
|
||||
return self.db.setdefault(str(pkg), mock.Mock())
|
||||
|
||||
def assertLength(self, length):
|
||||
assert len(self.db) == length
|
||||
|
||||
class RPMDBAdditionalDataPackageStub(dnf.yum.rpmsack.RPMDBAdditionalDataPackage):
|
||||
|
||||
"""A class mocking `dnf.yum.rpmsack.RPMDBAdditionalDataPackage`."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the data."""
|
||||
super(RPMDBAdditionalDataPackageStub, self).__init__(None, None, None)
|
||||
|
||||
def __iter__(self, show_hidden=False):
|
||||
"""Return a new iterator over the data."""
|
||||
for item in self._read_cached_data:
|
||||
yield item
|
||||
|
||||
def _attr2fn(self, attribute):
|
||||
"""Convert given *attribute* to a filename."""
|
||||
raise NotImplementedError('the method is not supported')
|
||||
|
||||
def _delete(self, attribute):
|
||||
"""Delete the *attribute* value."""
|
||||
try:
|
||||
del self._read_cached_data[attribute]
|
||||
except KeyError:
|
||||
raise AttributeError("Cannot delete attribute %s on %s " %
|
||||
(attribute, self))
|
||||
|
||||
def _read(self, attribute):
|
||||
"""Read the *attribute* value."""
|
||||
if attribute in self._read_cached_data:
|
||||
return self._read_cached_data[attribute]
|
||||
raise AttributeError("%s has no attribute %s" % (self, attribute))
|
||||
|
||||
def _write(self, attribute, value):
|
||||
"""Write the *attribute* value."""
|
||||
self._auto_cache(attribute, value, None)
|
||||
|
||||
def clean(self):
|
||||
"""Purge out everything."""
|
||||
for item in self.__iter__(show_hidden=True):
|
||||
self._delete(item)
|
||||
|
||||
|
||||
class FakeConf(dnf.conf.Conf):
|
||||
def __init__(self, **kwargs):
|
||||
|
@ -517,7 +448,8 @@ class FakeConf(dnf.conf.Conf):
|
|||
('ip_resolve', None),
|
||||
('multilib_policy', 'best'),
|
||||
('obsoletes', True),
|
||||
('persistdir', '/should-not-exist-bad-test/persist'),
|
||||
('persistdir', '/tmp/swdb/'),
|
||||
('transformdb', False),
|
||||
('protected_packages', ["dnf"]),
|
||||
('plugins', False),
|
||||
('showdupesfromrepos', False),
|
||||
|
@ -540,13 +472,6 @@ class FakePersistor(object):
|
|||
def since_last_makecache(self):
|
||||
return None
|
||||
|
||||
class MockGroupPersistor(dnf.persistor.GroupPersistor):
|
||||
"""Empty persistor that doesn't need any I/O."""
|
||||
def __init__(self):
|
||||
self.db = self._empty_db()
|
||||
self._original = self._empty_db()
|
||||
|
||||
|
||||
# object matchers for asserts
|
||||
|
||||
class ObjectMatcher(object):
|
||||
|
|
|
@ -25,7 +25,7 @@ class APITest(support.TestCase):
|
|||
self.assertIsInstance(dnf.Base, type)
|
||||
|
||||
def test_conf(self):
|
||||
base = support.Base()
|
||||
base = support.MockBase()
|
||||
self.assertIsInstance(base.conf.installroot, unicode)
|
||||
# reasonable default
|
||||
self.assertEqual(base.conf.installroot, '/')
|
||||
|
|
|
@ -31,10 +31,27 @@ import hawkey
|
|||
import itertools
|
||||
import re
|
||||
import rpm
|
||||
from dnf.db.types import SwdbReason, SwdbPkg, SwdbPkgData
|
||||
|
||||
class BaseTest(support.TestCase):
|
||||
|
||||
@staticmethod
|
||||
def _setup_packages(history):
|
||||
pkg1 = SwdbPkg()
|
||||
pkg1.name = "pepper"
|
||||
pkg1.version = "20"
|
||||
pkg1.release = "0"
|
||||
pkg1.arch = "x86_64"
|
||||
pkg1.checksum_type = "sha256"
|
||||
pkg1.checksum_data = "0123456789abcd"
|
||||
pkg_data1 = SwdbPkgData()
|
||||
pid = history.add_package(pkg1)
|
||||
history.add_package_data(pid, pkg_data1)
|
||||
history.swdb.trans_data_beg(0, pid, SwdbReason.USER, "installed")
|
||||
|
||||
def test_instance(self):
|
||||
base = support.Base()
|
||||
base = support.MockBase()
|
||||
self.assertIsNotNone(base)
|
||||
|
||||
@mock.patch('dnf.rpm.detect_releasever', lambda x: 'x')
|
||||
@mock.patch('dnf.util.am_i_root', lambda: True)
|
||||
|
@ -79,45 +96,39 @@ class BaseTest(support.TestCase):
|
|||
|
||||
def test_iter_userinstalled(self):
|
||||
"""Test iter_userinstalled with a package installed by the user."""
|
||||
base = support.Base()
|
||||
base = support.MockBase()
|
||||
self._setup_packages(base.history)
|
||||
base._sack = support.mock_sack('main')
|
||||
base._priv_yumdb = support.MockYumDB()
|
||||
pkg, = base.sack.query().installed().filter(name='pepper')
|
||||
base._yumdb.get_package(pkg).get = {'reason': 'user', 'from_repo': 'main'}.get
|
||||
|
||||
iterator = base.iter_userinstalled()
|
||||
|
||||
self.assertEqual(next(iterator), pkg)
|
||||
self.assertRaises(StopIteration, next, iterator)
|
||||
base.history.set_repo(pkg, "main")
|
||||
base.history.mark_user_installed(pkg, True)
|
||||
self.assertEqual(base.history.user_installed(pkg), True)
|
||||
self.assertEqual(base.history.repo(pkg), 'main')
|
||||
|
||||
def test_iter_userinstalled_badfromrepo(self):
|
||||
"""Test iter_userinstalled with a package installed from a bad repository."""
|
||||
base = support.Base()
|
||||
base = support.MockBase()
|
||||
base._sack = support.mock_sack('main')
|
||||
base._priv_yumdb = support.MockYumDB()
|
||||
|
||||
self._setup_packages(base.history)
|
||||
pkg, = base.sack.query().installed().filter(name='pepper')
|
||||
base._yumdb.get_package(pkg).get = {'reason': 'user', 'from_repo': 'anakonda'}.get
|
||||
|
||||
iterator = base.iter_userinstalled()
|
||||
|
||||
self.assertRaises(StopIteration, next, iterator)
|
||||
base.history.set_repo(pkg, "anakonda")
|
||||
base.history.mark_user_installed(pkg, True)
|
||||
self.assertEqual(base.history.user_installed(pkg), False)
|
||||
self.assertEqual(base.history.repo(pkg), 'anakonda')
|
||||
|
||||
def test_iter_userinstalled_badreason(self):
|
||||
"""Test iter_userinstalled with a package installed for a wrong reason."""
|
||||
base = support.Base()
|
||||
base = support.MockBase()
|
||||
base._sack = support.mock_sack('main')
|
||||
base._priv_yumdb = support.MockYumDB()
|
||||
|
||||
self._setup_packages(base.history)
|
||||
pkg, = base.sack.query().installed().filter(name='pepper')
|
||||
base._yumdb.get_package(pkg).get = {'reason': 'dep', 'from_repo': 'main'}.get
|
||||
|
||||
iterator = base.iter_userinstalled()
|
||||
|
||||
self.assertRaises(StopIteration, next, iterator)
|
||||
base.history.mark_user_installed(pkg, False)
|
||||
base.history.set_repo(pkg, "main")
|
||||
self.assertEqual(base.history.user_installed(pkg), False)
|
||||
self.assertEqual(base.history.repo(pkg), 'main')
|
||||
|
||||
def test_translate_comps_pkg_types(self):
|
||||
base = support.Base()
|
||||
base = support.MockBase()
|
||||
num = base._translate_comps_pkg_types(('mandatory', 'optional'))
|
||||
self.assertEqual(num, 12)
|
||||
|
||||
|
@ -167,18 +178,22 @@ class VerifyTransactionTest(TestCase):
|
|||
removed_pkg = self.base.sack.query().available().filter(
|
||||
name="mrkite")[0]
|
||||
|
||||
pkg = self.base.history.ipkg_to_pkg(new_pkg)
|
||||
pid = self.base.history.add_package(pkg)
|
||||
pkg_data = SwdbPkgData()
|
||||
self.base.history.add_package_data(pid, pkg_data)
|
||||
self.base.history.set_repo(new_pkg, 'main')
|
||||
|
||||
self.base.transaction.add_install(new_pkg, [])
|
||||
self.base.transaction.add_erase(removed_pkg)
|
||||
self.base._verify_transaction()
|
||||
# mock is designed so this returns the exact same mock object it did
|
||||
# during the method call:
|
||||
yumdb_info = self.base._yumdb.get_package(new_pkg)
|
||||
self.assertEqual(yumdb_info.from_repo, 'main')
|
||||
self.assertEqual(yumdb_info.reason, 'unknown')
|
||||
self.assertEqual(yumdb_info.releasever, 'Fedora69')
|
||||
self.assertEqual(yumdb_info.checksum_type, 'md5')
|
||||
self.assertEqual(yumdb_info.checksum_data, HASH)
|
||||
self.base._yumdb.assertLength(2)
|
||||
|
||||
pkg = self.base.history.package(new_pkg)
|
||||
self.assertEqual(pkg.ui_from_repo(), '@main')
|
||||
self.assertEqual(pkg.get_reason(), SwdbReason.UNKNOWN)
|
||||
self.assertEqual(pkg.checksum_type, 'md5')
|
||||
self.assertEqual(pkg.checksum_data, HASH)
|
||||
|
||||
|
||||
class InstallReasonTest(support.ResultTestCase):
|
||||
def setUp(self):
|
||||
|
@ -189,7 +204,7 @@ class InstallReasonTest(support.ResultTestCase):
|
|||
self.base.resolve()
|
||||
new_pkgs = self.base._transaction._get_items(dnf.transaction.INSTALL)
|
||||
pkg_reasons = [(tsi.installed.name, tsi.reason) for tsi in new_pkgs]
|
||||
self.assertCountEqual([("mrkite", "user"), ("trampoline", "dep")],
|
||||
self.assertCountEqual([("mrkite", SwdbReason.USER), ("trampoline", SwdbReason.DEP)],
|
||||
pkg_reasons)
|
||||
|
||||
class InstalledMatchingTest(support.ResultTestCase):
|
||||
|
|
|
@ -109,9 +109,9 @@ class InstallCommandTest(support.ResultTestCase):
|
|||
|
||||
def test_run_group(self):
|
||||
"""Test whether a group is installed."""
|
||||
base = self._cmd.cli.base
|
||||
support.command_run(self._cmd, ['@Solid Ground'])
|
||||
|
||||
base = self._cmd.cli.base
|
||||
self.assertResult(base, itertools.chain(
|
||||
base.sack.query().installed(),
|
||||
dnf.subject.Subject('trampoline').get_best_query(base.sack)))
|
||||
|
@ -192,25 +192,6 @@ class ReinstallCommandTest(support.ResultTestCase):
|
|||
self.assertResult(self._cmd.cli.base,
|
||||
self._cmd.cli.base.sack.query().installed())
|
||||
|
||||
@mock.patch('dnf.cli.commands.reinstall._',
|
||||
dnf.pycomp.NullTranslations().ugettext)
|
||||
def test_run_notavailable(self):
|
||||
"""Test whether it fails if the package is not available."""
|
||||
base = self._cmd.cli.base
|
||||
holes_query = dnf.subject.Subject('hole').get_best_query(base.sack)
|
||||
for pkg in holes_query.installed():
|
||||
self._cmd.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self._cmd.base._yumdb.get_package(pkg).from_repo = 'unknown'
|
||||
stdout = dnf.pycomp.StringIO()
|
||||
|
||||
with support.wiretap_logs('dnf', logging.INFO, stdout):
|
||||
self.assertRaises(dnf.exceptions.Error, support.command_run, self._cmd, ['hole'])
|
||||
|
||||
self.assertEqual(
|
||||
stdout.getvalue(),
|
||||
'Installed package hole-1-1.x86_64 (from unknown) not available.\n')
|
||||
self.assertResult(base, base.sack.query().installed())
|
||||
|
||||
class RepoPkgsCommandTest(unittest.TestCase):
|
||||
|
||||
"""Tests of ``dnf.cli.commands.RepoPkgsCommand`` class."""
|
||||
|
@ -239,34 +220,6 @@ class RepoPkgsCheckUpdateSubCommandTest(unittest.TestCase):
|
|||
base = support.BaseCliStub('main', 'updates', 'third_party')
|
||||
self.cli = base.mock_cli()
|
||||
|
||||
def test(self):
|
||||
"""Test whether only upgrades in the repository are listed."""
|
||||
for pkg in self.cli.base.sack.query().installed().filter(name='tour'):
|
||||
self.cli.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.cli.base._yumdb.get_package(pkg).from_repo = 'updates'
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
with support.patch_std_streams() as (stdout, _):
|
||||
support.command_run(cmd, ['updates', 'check-update'])
|
||||
|
||||
self.assertEqual(
|
||||
stdout.getvalue(),
|
||||
u'\n'
|
||||
u'hole.x86_64 2-1'
|
||||
u' updates \n'
|
||||
u'pepper.x86_64 20-1'
|
||||
u' updates \n'
|
||||
u'Obsoleting Packages\n'
|
||||
u'hole.i686 2-1'
|
||||
u' updates \n'
|
||||
u' tour.noarch 5-0'
|
||||
u' @updates\n'
|
||||
u'hole.x86_64 2-1'
|
||||
u' updates \n'
|
||||
u' tour.noarch 5-0'
|
||||
u' @updates\n')
|
||||
self.assertEqual(self.cli.demands.success_exit_status, 100)
|
||||
|
||||
def test_not_found(self):
|
||||
"""Test whether exit code differs if updates are not found."""
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
|
@ -334,45 +287,6 @@ class RepoPkgsInfoSubCommandTest(unittest.TestCase):
|
|||
base.conf.recent = 7
|
||||
self.cli = base.mock_cli()
|
||||
|
||||
def test_info_all(self):
|
||||
"""Test whether only packages related to the repository are listed."""
|
||||
for pkg in self.cli.base.sack.query().installed().filter(name='pepper'):
|
||||
self.cli.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.cli.base._yumdb.get_package(pkg).from_repo = 'main'
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
with support.patch_std_streams() as (stdout, _):
|
||||
support.command_run(cmd, ['main', 'info', 'all', '*p*'])
|
||||
|
||||
self.assertEqual(
|
||||
stdout.getvalue(),
|
||||
''.join((
|
||||
self.INSTALLED_TITLE,
|
||||
self.PEPPER_SYSTEM_INFO,
|
||||
self.AVAILABLE_TITLE,
|
||||
u'Name : pepper\n'
|
||||
u'Version : 20\n'
|
||||
u'Release : 0\n'
|
||||
u'Arch : src\n'
|
||||
u'Size : 0.0 \n'
|
||||
u'Source : None\n'
|
||||
u'Repo : main\n'
|
||||
u'Summary : \n'
|
||||
u'License : \n'
|
||||
u'Description : \n'
|
||||
u'\n',
|
||||
u'Name : trampoline\n'
|
||||
u'Version : 2.1\n'
|
||||
u'Release : 1\n'
|
||||
u'Arch : noarch\n'
|
||||
u'Size : 0.0 \n'
|
||||
u'Source : None\n'
|
||||
u'Repo : main\n'
|
||||
u'Summary : \n'
|
||||
u'License : \n'
|
||||
u'Description : \n'
|
||||
u'\n')))
|
||||
|
||||
def test_info_available(self):
|
||||
"""Test whether only packages in the repository are listed."""
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
|
@ -387,45 +301,6 @@ class RepoPkgsInfoSubCommandTest(unittest.TestCase):
|
|||
self.HOLE_X86_64_INFO,
|
||||
self.PEPPER_UPDATES_INFO)))
|
||||
|
||||
def test_info_extras(self):
|
||||
"""Test whether only extras installed from the repository are listed."""
|
||||
for pkg in self.cli.base.sack.query().installed().filter(name='tour'):
|
||||
self.cli.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.cli.base._yumdb.get_package(pkg).from_repo = 'unknown'
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
with support.patch_std_streams() as (stdout, _):
|
||||
support.command_run(cmd, ['unknown', 'info', 'extras'])
|
||||
|
||||
self.assertEqual(
|
||||
stdout.getvalue(),
|
||||
u'Extra Packages\n'
|
||||
u'Name : tour\n'
|
||||
u'Version : 5\n'
|
||||
u'Release : 0\n'
|
||||
u'Arch : noarch\n'
|
||||
u'Size : 0.0 \n'
|
||||
u'Source : None\n'
|
||||
u'Repo : @System\n'
|
||||
u'From repo : unknown\n'
|
||||
u'Summary : \n'
|
||||
u'License : \n'
|
||||
u'Description : \n\n')
|
||||
|
||||
def test_info_installed(self):
|
||||
"""Test whether only packages installed from the repository are listed."""
|
||||
for pkg in self.cli.base.sack.query().installed().filter(name='pepper'):
|
||||
self.cli.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.cli.base._yumdb.get_package(pkg).from_repo = 'main'
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
with support.patch_std_streams() as (stdout, _):
|
||||
support.command_run(cmd, ['main', 'info', 'installed'])
|
||||
|
||||
self.assertEqual(
|
||||
stdout.getvalue(),
|
||||
''.join((self.INSTALLED_TITLE, self.PEPPER_SYSTEM_INFO)))
|
||||
|
||||
def test_info_obsoletes(self):
|
||||
"""Test whether only obsoletes in the repository are listed."""
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
|
@ -506,33 +381,6 @@ class RepoPkgsMoveToSubCommandTest(support.ResultTestCase):
|
|||
dnf.subject.Subject('tour-5-0').get_best_query(self.cli.base.sack)
|
||||
.available()))
|
||||
|
||||
class RepoPkgsReinstallOldSubCommandTest(support.ResultTestCase):
|
||||
|
||||
"""Tests of ``dnf.cli.commands.RepoPkgsCommand.ReinstallOldSubCommand`` class."""
|
||||
|
||||
def setUp(self):
|
||||
"""Prepare the test fixture."""
|
||||
super(RepoPkgsReinstallOldSubCommandTest, self).setUp()
|
||||
base = support.BaseCliStub('main')
|
||||
base.init_sack()
|
||||
self.cli = base.mock_cli()
|
||||
|
||||
def test_all(self):
|
||||
"""Test whether all packages from the repository are reinstalled."""
|
||||
for pkg in self.cli.base.sack.query().installed():
|
||||
reponame = 'main' if pkg.name != 'pepper' else 'non-main'
|
||||
self.cli.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.cli.base._yumdb.get_package(pkg).from_repo = reponame
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
support.command_run(cmd, ['main', 'reinstall-old'])
|
||||
|
||||
self.assertResult(self.cli.base, itertools.chain(
|
||||
self.cli.base.sack.query().installed().filter(name__neq='librita'),
|
||||
dnf.subject.Subject('librita.i686').get_best_query(self.cli.base.sack)
|
||||
.installed(),
|
||||
dnf.subject.Subject('librita').get_best_query(self.cli.base.sack)
|
||||
.available()))
|
||||
|
||||
class RepoPkgsReinstallSubCommandTest(unittest.TestCase):
|
||||
|
||||
|
@ -597,52 +445,6 @@ class RepoPkgsRemoveOrDistroSyncSubCommandTest(support.ResultTestCase):
|
|||
self.cli = support.BaseCliStub('distro').mock_cli()
|
||||
self.cli.base.init_sack()
|
||||
|
||||
def test_run_on_repo_spec_sync(self):
|
||||
"""Test running with a package which can be synchronized."""
|
||||
for pkg in self.cli.base.sack.query().installed():
|
||||
data = support.RPMDBAdditionalDataPackageStub()
|
||||
data.from_repo = 'non-distro' if pkg.name == 'pepper' else 'distro'
|
||||
self.cli.base._yumdb.db[str(pkg)] = data
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
support.command_run(cmd, ['non-distro', 'remove-or-distro-sync', 'pepper'])
|
||||
|
||||
self.assertResult(self.cli.base, itertools.chain(
|
||||
self.cli.base.sack.query().installed().filter(name__neq='pepper'),
|
||||
dnf.subject.Subject('pepper').get_best_query(self.cli.base.sack)
|
||||
.available()))
|
||||
|
||||
def test_run_on_repo_spec_remove(self):
|
||||
"""Test running with a package which must be removed."""
|
||||
for pkg in self.cli.base.sack.query().installed():
|
||||
data = support.RPMDBAdditionalDataPackageStub()
|
||||
data.from_repo = 'non-distro' if pkg.name == 'hole' else 'distro'
|
||||
self.cli.base._yumdb.db[str(pkg)] = data
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
support.command_run(cmd, ['non-distro', 'remove-or-distro-sync', 'hole'])
|
||||
|
||||
self.assertResult(
|
||||
self.cli.base,
|
||||
self.cli.base.sack.query().installed().filter(name__neq='hole'))
|
||||
|
||||
def test_run_on_repo_all(self):
|
||||
"""Test running without a package specification."""
|
||||
nondist = {'pepper', 'hole'}
|
||||
for pkg in self.cli.base.sack.query().installed():
|
||||
data = support.RPMDBAdditionalDataPackageStub()
|
||||
data.from_repo = 'non-distro' if pkg.name in nondist else 'distro'
|
||||
self.cli.base._yumdb.db[str(pkg)] = data
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
support.command_run(cmd, ['non-distro', 'remove-or-distro-sync'])
|
||||
|
||||
self.assertResult(self.cli.base, itertools.chain(
|
||||
self.cli.base.sack.query().installed().filter(name__neq='pepper')
|
||||
.filter(name__neq='hole'),
|
||||
dnf.subject.Subject('pepper').get_best_query(self.cli.base.sack)
|
||||
.available()))
|
||||
|
||||
@mock.patch('dnf.cli.commands._', dnf.pycomp.NullTranslations().ugettext)
|
||||
def test_run_on_repo_spec_notinstalled(self):
|
||||
"""Test running with a package which is not installed."""
|
||||
|
@ -682,68 +484,6 @@ class RepoPkgsRemoveOrReinstallSubCommandTest(support.ResultTestCase):
|
|||
base.init_sack()
|
||||
self.cli = base.mock_cli()
|
||||
|
||||
def test_all_not_installed(self):
|
||||
"""Test whether it fails if no package is installed from the repository."""
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
self.assertRaises(dnf.exceptions.Error,
|
||||
support.command_run, cmd,
|
||||
['non-distro', 'remove-or-distro-sync'])
|
||||
|
||||
self.assertResult(self.cli.base, self.cli.base.sack.query().installed())
|
||||
|
||||
def test_all_reinstall(self):
|
||||
"""Test whether all packages from the repository are reinstalled."""
|
||||
for pkg in self.cli.base.sack.query().installed():
|
||||
reponame = 'distro' if pkg.name != 'tour' else 'non-distro'
|
||||
self.cli.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.cli.base._yumdb.get_package(pkg).from_repo = reponame
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
support.command_run(cmd, ['non-distro', 'remove-or-reinstall'])
|
||||
|
||||
self.assertResult(self.cli.base, itertools.chain(
|
||||
self.cli.base.sack.query().installed().filter(name__neq='tour'),
|
||||
dnf.subject.Subject('tour').get_best_query(self.cli.base.sack)
|
||||
.available()))
|
||||
|
||||
def test_all_remove(self):
|
||||
"""Test whether all packages from the repository are removed."""
|
||||
for pkg in self.cli.base.sack.query().installed():
|
||||
reponame = 'distro' if pkg.name != 'hole' else 'non-distro'
|
||||
self.cli.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.cli.base._yumdb.get_package(pkg).from_repo = reponame
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
support.command_run(cmd, ['non-distro', 'remove-or-reinstall'])
|
||||
|
||||
self.assertResult(
|
||||
self.cli.base,
|
||||
self.cli.base.sack.query().installed().filter(name__neq='hole'))
|
||||
|
||||
class RepoPkgsRemoveSubCommandTest(support.ResultTestCase):
|
||||
|
||||
"""Tests of ``dnf.cli.commands.RepoPkgsCommand.RemoveSubCommand`` class."""
|
||||
|
||||
def setUp(self):
|
||||
"""Prepare the test fixture."""
|
||||
super(RepoPkgsRemoveSubCommandTest, self).setUp()
|
||||
base = support.BaseCliStub('main')
|
||||
base.init_sack()
|
||||
self.cli = base.mock_cli()
|
||||
|
||||
def test_all(self):
|
||||
"""Test whether only packages from the repository are removed."""
|
||||
for pkg in self.cli.base.sack.query().installed():
|
||||
reponame = 'main' if pkg.name == 'pepper' else 'non-main'
|
||||
self.cli.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.cli.base._yumdb.get_package(pkg).from_repo = reponame
|
||||
|
||||
cmd = dnf.cli.commands.RepoPkgsCommand(self.cli)
|
||||
support.command_run(cmd, ['main', 'remove'])
|
||||
|
||||
self.assertResult(
|
||||
self.cli.base,
|
||||
self.cli.base.sack.query().installed().filter(name__neq='pepper'))
|
||||
|
||||
class RepoPkgsUpgradeSubCommandTest(support.ResultTestCase):
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ from __future__ import print_function
|
|||
from __future__ import unicode_literals
|
||||
from tests import support
|
||||
from tests.support import mock
|
||||
from dnf.db.types import SwdbReason, SwdbPkg
|
||||
|
||||
import dnf.comps
|
||||
import dnf.exceptions
|
||||
|
@ -166,7 +167,9 @@ class SolverTestMixin(object):
|
|||
comps = dnf.comps.Comps()
|
||||
comps._add_from_xml_filename(support.COMPS_PATH)
|
||||
self.comps = comps
|
||||
self.persistor = support.MockGroupPersistor()
|
||||
self.base = support.MockBase()
|
||||
self.history = self.base.history
|
||||
self.persistor = self.history.group
|
||||
self.solver = dnf.comps.Solver(self.persistor, self.comps, support.REASONS.get)
|
||||
|
||||
|
||||
|
@ -175,56 +178,93 @@ class SolverGroupTest(SolverTestMixin, support.TestCase):
|
|||
def test_install(self):
|
||||
grp = self.comps.group_by_pattern('base')
|
||||
trans = self.solver._group_install(grp.id, dnf.comps.MANDATORY, ['right'])
|
||||
self.persistor.commit()
|
||||
self.assertLength(trans.install, 2)
|
||||
p_grp = self.persistor.group('base')
|
||||
self.assertCountEqual(p_grp.full_list, ['pepper', 'tour'])
|
||||
self.assertCountEqual(p_grp.pkg_exclude, ['right'])
|
||||
self.assertCountEqual(p_grp.get_full_list(), ['pepper', 'tour'])
|
||||
self.assertCountEqual(p_grp.get_exclude(), ['right'])
|
||||
self.assertEqual(p_grp.pkg_types, dnf.comps.MANDATORY)
|
||||
|
||||
def test_removable_pkg(self):
|
||||
p_grp1 = self.persistor.group('base')
|
||||
p_grp2 = self.persistor.group('tune')
|
||||
p_grp1.full_list.extend(('pepper', 'tour', 'right'))
|
||||
p_grp2.full_list.append('tour')
|
||||
grp = self.comps.group_by_pattern('base')
|
||||
self.solver._group_install(grp.id, dnf.comps.MANDATORY, [])
|
||||
self.persistor.commit()
|
||||
|
||||
pkg1 = SwdbPkg()
|
||||
pkg1.name = "pepper"
|
||||
pid = self.history.add_package(pkg1)
|
||||
self.history.swdb.trans_data_beg(1, pid, SwdbReason.GROUP, "Installed")
|
||||
|
||||
pkg2 = SwdbPkg()
|
||||
pkg2.name = "right"
|
||||
pid2 = self.history.add_package(pkg2)
|
||||
self.history.swdb.trans_data_beg(1, pid2, SwdbReason.DEP, "Installed")
|
||||
|
||||
n = "dupl"
|
||||
p_grp = self.persistor.new_group(n, n, n, True, 0)
|
||||
self.persistor.add_group(p_grp)
|
||||
p_grp.add_package(["tour"])
|
||||
|
||||
pkg3 = SwdbPkg()
|
||||
pkg3.name = "tour"
|
||||
pid3 = self.history.add_package(pkg3)
|
||||
self.history.swdb.trans_data_beg(1, pid3, SwdbReason.GROUP, "Installed")
|
||||
|
||||
# pepper is in single group with reason "group"
|
||||
self.assertTrue(self.solver._removable_pkg('pepper'))
|
||||
# right's reason is "dep"
|
||||
self.assertFalse(self.solver._removable_pkg('right'))
|
||||
# tour appears in more than one group
|
||||
self.assertFalse(self.solver._removable_pkg('tour'))
|
||||
|
||||
self.persistor.remove_group(p_grp, True)
|
||||
# tour appears only in one group now
|
||||
self.assertTrue(self.solver._removable_pkg('tour'))
|
||||
|
||||
def test_remove(self):
|
||||
# setup of the "current state"
|
||||
p_grp = self.persistor.group('base')
|
||||
p_grp.pkg_types = dnf.comps.MANDATORY
|
||||
p_grp.full_list.extend(('pepper', 'tour'))
|
||||
p_grp2 = self.persistor.group('tune')
|
||||
p_grp2.full_list.append('pepper')
|
||||
grp = self.comps.group_by_pattern('base')
|
||||
self.solver._group_install(grp.id, dnf.comps.MANDATORY, [])
|
||||
self.persistor.commit()
|
||||
|
||||
grps = self.persistor.groups_by_pattern('base')
|
||||
for grp in grps:
|
||||
trans = self.solver._group_remove(grp)
|
||||
self.assertFalse(p_grp.installed)
|
||||
self.assertTransEqual(trans.remove, ('tour',))
|
||||
self.persistor.commit()
|
||||
|
||||
# need to load groups again - loaded object is stays the same
|
||||
grps = self.persistor.groups_by_pattern('base')
|
||||
for grp in grps:
|
||||
self.assertFalse(grp.installed)
|
||||
|
||||
def test_upgrade(self):
|
||||
# setup of the "current state"
|
||||
p_grp = self.persistor.group('base')
|
||||
p_grp.pkg_types = dnf.comps.MANDATORY
|
||||
p_grp.full_list.extend(('pepper', 'handerson'))
|
||||
|
||||
name = "base"
|
||||
p_grp = self.persistor.new_group(name,
|
||||
name,
|
||||
name,
|
||||
True,
|
||||
dnf.comps.MANDATORY)
|
||||
self.persistor.add_group(p_grp)
|
||||
p_grp.add_package(['pepper', 'handerson'])
|
||||
grp = self.comps.group_by_pattern('base')
|
||||
trans = self.solver._group_upgrade(grp.id)
|
||||
self.assertTransEqual(trans.install, ('tour',))
|
||||
self.assertTransEqual(trans.remove, ('handerson',))
|
||||
self.assertTransEqual(trans.upgrade, ('pepper',))
|
||||
self.assertCountEqual(p_grp.full_list, ('tour', 'pepper'))
|
||||
p_grp = self.persistor.group('base')
|
||||
self.assertCountEqual(p_grp.get_full_list(), ('tour', 'pepper'))
|
||||
|
||||
|
||||
class SolverEnvironmentTest(SolverTestMixin, support.TestCase):
|
||||
|
||||
def _install(self, env):
|
||||
return self.solver._environment_install(env.id, dnf.comps.MANDATORY,
|
||||
('lotus',))
|
||||
def _install(self, env, ex=True):
|
||||
exclude = ('lotus',) if ex else []
|
||||
trans = self.solver._environment_install(
|
||||
env.id,
|
||||
dnf.comps.MANDATORY,
|
||||
exclude)
|
||||
self.persistor.commit()
|
||||
return trans
|
||||
|
||||
def test_install(self):
|
||||
env = self.comps.environment_by_pattern('sugar-desktop-environment')
|
||||
|
@ -232,32 +272,58 @@ class SolverEnvironmentTest(SolverTestMixin, support.TestCase):
|
|||
self.assertCountEqual([pkg.name for pkg in trans.install],
|
||||
('pepper', 'trampoline', 'hole'))
|
||||
sugar = self.persistor.environment('sugar-desktop-environment')
|
||||
self.assertCountEqual(sugar.full_list, ('Peppers', 'somerset'))
|
||||
self.assertCountEqual(sugar.get_group_list(), ('Peppers', 'somerset'))
|
||||
somerset = self.persistor.group('somerset')
|
||||
self.assertTrue(somerset.installed)
|
||||
self.assertEqual(somerset.pkg_types, dnf.comps.MANDATORY)
|
||||
self.assertCountEqual(somerset.pkg_exclude, ('lotus',))
|
||||
self.assertCountEqual(somerset.get_exclude(), ('lotus',))
|
||||
base = self.persistor.group('somerset')
|
||||
self.assertTrue(base.installed)
|
||||
|
||||
def test_remove(self):
|
||||
env = self.comps.environment_by_pattern('sugar-desktop-environment')
|
||||
self._install(env)
|
||||
trans = self._install(env)
|
||||
trans = self.solver._environment_remove(env.id)
|
||||
self.persistor.commit()
|
||||
|
||||
p_env = self.persistor.environment('sugar-desktop-environment')
|
||||
self.assertTransEqual(trans.remove, ('pepper', 'trampoline', 'hole'))
|
||||
self.assertFalse(p_env.grp_types)
|
||||
self.assertFalse(p_env.pkg_types)
|
||||
self.assertFalse(p_env.installed)
|
||||
self.assertEqual(p_env.pkg_types, dnf.comps.MANDATORY)
|
||||
self.assertEqual(p_env.grp_types, dnf.comps.ALL_TYPES)
|
||||
|
||||
grp_list = p_env.get_group_list()
|
||||
self.assertTrue(len(grp_list))
|
||||
for grp in grp_list:
|
||||
_grp = self.persistor.group(grp)
|
||||
self.assertEqual(_grp.pkg_types, dnf.comps.MANDATORY)
|
||||
self.assertFalse(_grp.installed)
|
||||
|
||||
# install it again with different pkg_types
|
||||
self.solver._environment_install(env.id, dnf.comps.OPTIONAL, [])
|
||||
self.persistor.commit()
|
||||
p_env = self.persistor.environment('sugar-desktop-environment')
|
||||
self.assertTrue(p_env.installed)
|
||||
self.assertEqual(p_env.pkg_types, dnf.comps.OPTIONAL)
|
||||
self.assertEqual(p_env.grp_types, dnf.comps.ALL_TYPES)
|
||||
grp_list = p_env.get_group_list()
|
||||
self.assertTrue(len(grp_list))
|
||||
for grp in grp_list:
|
||||
_grp = self.persistor.group(grp)
|
||||
self.assertEqual(_grp.pkg_types, dnf.comps.OPTIONAL)
|
||||
|
||||
def test_upgrade(self):
|
||||
"""Upgrade environment, the one group it knows is no longer installed."""
|
||||
p_env = self.persistor.environment('sugar-desktop-environment')
|
||||
p_env.full_list.extend(['somerset'])
|
||||
p_env.grp_types = dnf.comps.ALL_TYPES
|
||||
p_env.pkg_types = dnf.comps.ALL_TYPES
|
||||
|
||||
env = self.comps.environment_by_pattern('sugar-desktop-environment')
|
||||
self.solver._environment_install(env.id, dnf.comps.ALL_TYPES, [])
|
||||
self.persistor.commit()
|
||||
|
||||
p_env = self.persistor.environment('sugar-desktop-environment')
|
||||
self.assertTrue(p_env.installed)
|
||||
|
||||
grp = self.persistor.group('Peppers')
|
||||
grp.update_full_list([])
|
||||
|
||||
trans = self.solver._environment_upgrade(env.id)
|
||||
self.assertTransEqual(trans.install, ('hole', 'lotus'))
|
||||
self.assertEmpty(trans.upgrade)
|
||||
self.assertTransEqual(trans.upgrade, ('pepper', 'trampoline', 'lotus'))
|
||||
self.assertEmpty(trans.remove)
|
||||
|
|
|
@ -60,7 +60,7 @@ class DowngradeTest(support.ResultTestCase):
|
|||
class DowngradeTest2(support.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._base = support.Base()
|
||||
self._base = support.MockBase()
|
||||
self._base._sack = support.mock_sack('main')
|
||||
self._base._goal = self._goal = mock.create_autospec(dnf.goal.Goal)
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ from __future__ import unicode_literals
|
|||
import dnf.goal
|
||||
import dnf.selector
|
||||
import tests.support
|
||||
from dnf.db.types import SwdbReason
|
||||
|
||||
|
||||
class GoalTest(tests.support.TestCase):
|
||||
|
@ -43,32 +44,13 @@ class GoalTest(tests.support.TestCase):
|
|||
mrkite = [pkg for pkg in installs if pkg.name == 'mrkite'][0]
|
||||
lotus = [pkg for pkg in installs if pkg.name == 'lotus'][0]
|
||||
trampoline = [pkg for pkg in installs if pkg.name == 'trampoline'][0]
|
||||
self.assertEqual(goal.get_reason(lotus), 'group')
|
||||
self.assertEqual(goal.get_reason(mrkite), 'user')
|
||||
self.assertEqual(goal.get_reason(trampoline), 'dep')
|
||||
self.assertEqual(goal.get_reason(lotus), SwdbReason.GROUP)
|
||||
self.assertEqual(goal.get_reason(mrkite), SwdbReason.USER)
|
||||
self.assertEqual(goal.get_reason(trampoline), SwdbReason.DEP)
|
||||
|
||||
def test_group_reason(self):
|
||||
goal = self.goal
|
||||
hole = self.sack.query().filter(name='hole')[0]
|
||||
goal.group_members.add('hole')
|
||||
self.assertEqual('group', goal.group_reason(hole, 'unknown'))
|
||||
self.assertEqual('dep', goal.group_reason(hole, 'dep'))
|
||||
|
||||
def test_push_userinstalled(self):
|
||||
base = tests.support.MockBase('main')
|
||||
base.conf.clean_requirements_on_remove = True
|
||||
goal = self.goal
|
||||
installed = base.sack.query().installed()
|
||||
for pkg in installed:
|
||||
base._yumdb.get_package(pkg).reason = 'dep'
|
||||
pkg1 = installed.filter(name="pepper")[0]
|
||||
base._yumdb.get_package(pkg1).reason = "user"
|
||||
pkg2 = installed.filter(name="hole")[0]
|
||||
base._yumdb.get_package(pkg2).reason = "unknown"
|
||||
pkgs = installed.filter(name__neq=["pepper", "hole", "librita"]
|
||||
).run()
|
||||
|
||||
# test:
|
||||
goal.push_userinstalled(installed, base._yumdb)
|
||||
goal.run()
|
||||
self.assertEqual(goal.list_unneeded(), pkgs)
|
||||
self.assertEqual(SwdbReason.GROUP, goal.group_reason(hole, SwdbReason.GROUP))
|
||||
self.assertEqual(SwdbReason.DEP, goal.group_reason(hole, SwdbReason.DEP))
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from tests import support
|
||||
from dnf.db.types import SwdbReason, SwdbPkg
|
||||
|
||||
import dnf.comps
|
||||
import dnf.util
|
||||
|
@ -68,7 +69,6 @@ class EmptyPersistorTest(support.ResultTestCase):
|
|||
self.assertCountEqual(map(str, installed), ('trampoline-2.1-1.noarch',))
|
||||
self.assertEmpty(removed)
|
||||
|
||||
|
||||
class PresetPersistorTest(support.ResultTestCase):
|
||||
"""Test group operations with some data in the persistor."""
|
||||
|
||||
|
@ -77,8 +77,51 @@ class PresetPersistorTest(support.ResultTestCase):
|
|||
self.base.read_mock_comps()
|
||||
self.base.init_sack()
|
||||
|
||||
def _install_test_env(self):
|
||||
"""Env installation itself does not handle packages. We need to handle
|
||||
them manually for proper functionality of env remove"""
|
||||
history = self.base.history
|
||||
prst = history.group
|
||||
|
||||
env = prst.environment('sugar-desktop-environment')
|
||||
self.base.environment_install(env.name_id, ('mandatory',))
|
||||
prst.commit()
|
||||
groups = env.get_group_list()
|
||||
for group in groups:
|
||||
_group = prst.group(group)
|
||||
for pkg in _group.get_full_list():
|
||||
swdb_pkg = SwdbPkg()
|
||||
swdb_pkg.name = pkg
|
||||
pid = history.add_package(swdb_pkg)
|
||||
history.swdb.trans_data_beg(1, pid, SwdbReason.GROUP, "Installed")
|
||||
|
||||
def _install_test_group(self):
|
||||
"""Group installation itself does not handle packages. We need to
|
||||
handle them manually for proper functionality of group remove"""
|
||||
history = self.base.history
|
||||
prst = history.group
|
||||
|
||||
group = prst.group('somerset')
|
||||
|
||||
self.base.group_install(group.name_id, ('mandatory',))
|
||||
prst.commit()
|
||||
|
||||
for pkg in group.get_full_list():
|
||||
swdb_pkg = SwdbPkg()
|
||||
swdb_pkg.name = pkg
|
||||
swdb_pkg.version = '20'
|
||||
swdb_pkg.release = '0'
|
||||
swdb_pkg.arch = 'x86_64'
|
||||
pid = history.add_package(swdb_pkg)
|
||||
history.swdb.trans_data_beg(1, pid, SwdbReason.GROUP, "Installed")
|
||||
|
||||
self.base.reset(goal=True)
|
||||
|
||||
def test_env_group_remove(self):
|
||||
prst = self.base.history.group
|
||||
self._install_test_env()
|
||||
cnt = self.base.env_group_remove(["sugar-desktop-environment"])
|
||||
prst.commit()
|
||||
self.assertEqual(3, cnt)
|
||||
with support.mock.patch('logging.Logger.error') as log:
|
||||
self.assertRaises(dnf.exceptions.Error,
|
||||
|
@ -86,11 +129,13 @@ class PresetPersistorTest(support.ResultTestCase):
|
|||
['nonexistent'])
|
||||
|
||||
def test_environment_remove(self):
|
||||
prst = self.base._group_persistor
|
||||
env_ids = prst.environments_by_pattern("sugar-desktop-environment")
|
||||
self.assertEqual(env_ids, set(['sugar-desktop-environment']))
|
||||
env_id = dnf.util.first(env_ids)
|
||||
prst = self.base.history.group
|
||||
self._install_test_env()
|
||||
env_id = prst.environment('sugar-desktop-environment')
|
||||
self.assertEqual(env_id.name_id, 'sugar-desktop-environment')
|
||||
self.assertTrue(env_id.installed)
|
||||
self.assertGreater(self.base.environment_remove(env_id), 0)
|
||||
prst.commit()
|
||||
p_env = prst.environment(env_id)
|
||||
self.assertFalse(p_env.installed)
|
||||
peppers = prst.group('Peppers')
|
||||
|
@ -99,7 +144,8 @@ class PresetPersistorTest(support.ResultTestCase):
|
|||
self.assertFalse(somerset.installed)
|
||||
|
||||
def test_env_upgrade(self):
|
||||
prst = self.base._group_persistor
|
||||
prst = self.base.history.group
|
||||
self._install_test_env()
|
||||
cnt = self.base.environment_upgrade("sugar-desktop-environment")
|
||||
self.assertEqual(5, cnt)
|
||||
peppers = prst.group('Peppers')
|
||||
|
@ -108,15 +154,14 @@ class PresetPersistorTest(support.ResultTestCase):
|
|||
self.assertTrue(somerset.installed)
|
||||
|
||||
def test_group_install(self):
|
||||
prst = self.base._group_persistor
|
||||
prst = self.base.history.group
|
||||
grp = self.base.comps.group_by_pattern('Base')
|
||||
p_grp = prst.group('base')
|
||||
self.assertFalse(p_grp.installed)
|
||||
|
||||
self.assertEqual(self.base.group_install(grp.id, ('mandatory',)), 2)
|
||||
prst.commit()
|
||||
inst, removed = self.installed_removed(self.base)
|
||||
self.assertEmpty(inst)
|
||||
self.assertEmpty(removed)
|
||||
p_grp = prst.group('base')
|
||||
self.assertTrue(p_grp.installed)
|
||||
|
||||
"""
|
||||
|
@ -144,16 +189,18 @@ class PresetPersistorTest(support.ResultTestCase):
|
|||
"""
|
||||
|
||||
def test_group_remove(self):
|
||||
prst = self.base._group_persistor
|
||||
grp_ids = prst.groups_by_pattern('somerset')
|
||||
self.assertEqual(grp_ids, set(['somerset']))
|
||||
grp_id = dnf.util.first(grp_ids)
|
||||
p_grp = prst.group('somerset')
|
||||
self._install_test_group()
|
||||
prst = self.base.history.group
|
||||
|
||||
p_grp = prst.group('somerset')
|
||||
self.assertGreater(self.base.group_remove(p_grp.name_id), 0)
|
||||
prst.commit()
|
||||
|
||||
self.assertGreater(self.base.group_remove(grp_id), 0)
|
||||
inst, removed = self.installed_removed(self.base)
|
||||
self.assertEmpty(inst)
|
||||
self.assertCountEqual([pkg.name for pkg in removed], ('pepper',))
|
||||
|
||||
p_grp = prst.group(p_grp.name_id)
|
||||
self.assertFalse(p_grp.installed)
|
||||
|
||||
|
||||
|
@ -163,27 +210,21 @@ class EnvironmentInstallTest(support.ResultTestCase):
|
|||
self.base = support.MockBase("main")
|
||||
self.base.init_sack()
|
||||
self.base.read_mock_comps()
|
||||
self.prst = self.base._group_persistor
|
||||
p_env = self.prst.environment('sugar-desktop-environment')
|
||||
p_env.pkg_types = 0
|
||||
p_env.grp_types = 0
|
||||
del p_env.full_list[:]
|
||||
p_grp = self.prst.group('somerset')
|
||||
p_grp.pkg_types = 0
|
||||
del p_grp.full_list[:]
|
||||
|
||||
def test_environment_install(self):
|
||||
prst = self.base.history.group
|
||||
comps = self.base.comps
|
||||
env = comps.environment_by_pattern("sugar-desktop-environment")
|
||||
self.base.environment_install(env.id, ('mandatory',))
|
||||
prst.commit()
|
||||
installed, _ = self.installed_removed(self.base)
|
||||
self.assertCountEqual(map(operator.attrgetter('name'), installed),
|
||||
('trampoline', 'lotus'))
|
||||
|
||||
p_env = self.prst.environment('sugar-desktop-environment')
|
||||
self.assertCountEqual(p_env.full_list, ('somerset', 'Peppers'))
|
||||
p_env = prst.environment('sugar-desktop-environment')
|
||||
self.assertCountEqual(p_env.get_group_list(), ('somerset', 'Peppers'))
|
||||
self.assertTrue(p_env.installed)
|
||||
|
||||
peppers = self.prst.group('Peppers')
|
||||
somerset = self.prst.group('somerset')
|
||||
peppers = prst.group('Peppers')
|
||||
somerset = prst.group('somerset')
|
||||
self.assertTrue(all((peppers.installed, somerset.installed)))
|
||||
|
|
|
@ -22,141 +22,8 @@ from tests.support import TestCase
|
|||
from tests.support import mock
|
||||
|
||||
import dnf.history
|
||||
import dnf.yum.history
|
||||
from dnf.db.types import SwdbReason, SwdbTrans
|
||||
|
||||
class TestedHistory(dnf.yum.history.YumHistory):
|
||||
@mock.patch("os.path.exists", return_value=True)
|
||||
def __init__(self, unused_exists):
|
||||
self._db_date = "1962-07-12"
|
||||
super(TestedHistory, self).__init__(support.NONEXISTENT_FILE, mock.Mock())
|
||||
|
||||
def _create_db_file(self):
|
||||
return None
|
||||
|
||||
class History(TestCase):
|
||||
def setUp(self):
|
||||
self.base = support.MockBase("main")
|
||||
self.sack = self.base.sack
|
||||
self.history = TestedHistory()
|
||||
|
||||
def pkgtup2pid_test(self):
|
||||
""" Check pkg2pid() correctly delegates to _*2pid()s. """
|
||||
hpkg = dnf.yum.history.YumHistoryPackage("n", "a", "e", "v", "r")
|
||||
with mock.patch.object(self.history, "_hpkg2pid") as hpkg2pid:
|
||||
self.history.pkg2pid(hpkg)
|
||||
hpkg2pid.assert_called_with(hpkg, True)
|
||||
|
||||
ipkg = self.sack.query().installed().filter(name="pepper")[0]
|
||||
with mock.patch.object(self.history, "_ipkg2pid") as ipkg2pid:
|
||||
self.history.pkg2pid(ipkg)
|
||||
ipkg2pid.assert_called_with(ipkg, True)
|
||||
|
||||
apkg = self.sack.query().available().filter(name="lotus")[0]
|
||||
with mock.patch.object(self.history, "_apkg2pid") as apkg2pid:
|
||||
self.history.pkg2pid(apkg)
|
||||
apkg2pid.assert_called_with(apkg, True)
|
||||
|
||||
class HistoryWrapperTest(support.TestCase):
|
||||
"""Unit tests of dnf.history._HistoryWrapper."""
|
||||
|
||||
def _create_wrapper(self, yum_history):
|
||||
"""Create new instance of _HistoryWrapper."""
|
||||
wrapper = dnf.history.open_history(yum_history)
|
||||
assert isinstance(wrapper, dnf.history._HistoryWrapper)
|
||||
return wrapper
|
||||
|
||||
def test_context_manager(self):
|
||||
"""Test whether _HistoryWrapper can be used as a context manager."""
|
||||
yum_history = mock.create_autospec(dnf.yum.history.YumHistory)
|
||||
history = self._create_wrapper(yum_history)
|
||||
|
||||
with history as instance:
|
||||
pass
|
||||
|
||||
self.assertIs(instance, history)
|
||||
self.assertEqual(yum_history.close.mock_calls, [mock.call()])
|
||||
|
||||
def test_close(self):
|
||||
"""Test close."""
|
||||
yum_history = mock.create_autospec(dnf.yum.history.YumHistory)
|
||||
history = self._create_wrapper(yum_history)
|
||||
|
||||
history.close()
|
||||
|
||||
self.assertEqual(yum_history.close.mock_calls, [mock.call()])
|
||||
|
||||
def test_has_transaction_absent(self):
|
||||
"""Test has_transaction without any transaction."""
|
||||
with self._create_wrapper(support.HistoryStub()) as history:
|
||||
present = history.has_transaction(1)
|
||||
|
||||
self.assertFalse(present)
|
||||
|
||||
def test_has_transaction_present(self):
|
||||
"""Test has_transaction with a transaction present."""
|
||||
yum_history = support.HistoryStub()
|
||||
yum_history.old_data_pkgs['1'] = (
|
||||
dnf.yum.history.YumHistoryPackageState(
|
||||
'lotus', 'x86_64', '0', '3', '16', 'Erase',
|
||||
history=yum_history),)
|
||||
|
||||
with self._create_wrapper(yum_history) as history:
|
||||
present = history.has_transaction(1)
|
||||
|
||||
self.assertTrue(present)
|
||||
|
||||
def test_last_transaction_id(self):
|
||||
"""Test last_transaction_id with some transactions."""
|
||||
yum_history = support.HistoryStub()
|
||||
yum_history.old_data_pkgs['1'] = (
|
||||
dnf.yum.history.YumHistoryPackageState(
|
||||
'lotus', 'x86_64', '0', '3', '16', 'Erase',
|
||||
history=yum_history),)
|
||||
yum_history.old_data_pkgs['2'] = (
|
||||
dnf.yum.history.YumHistoryPackageState(
|
||||
'pepper', 'x86_64', '0', '20', '0', 'Install',
|
||||
history=yum_history),)
|
||||
|
||||
with self._create_wrapper(yum_history) as history:
|
||||
id_ = history.last_transaction_id()
|
||||
|
||||
self.assertEqual(id_, 2)
|
||||
|
||||
def test_last_transaction_id_notransaction(self):
|
||||
"""Test last_transaction_id without any transaction."""
|
||||
with self._create_wrapper(support.HistoryStub()) as history:
|
||||
id_ = history.last_transaction_id()
|
||||
|
||||
self.assertIsNone(id_)
|
||||
|
||||
def test_transaction_nevra_ops_notransaction(self):
|
||||
"""Test transaction_nevra_ops without any transaction."""
|
||||
with self._create_wrapper(support.HistoryStub()) as history:
|
||||
self.assertRaises(ValueError, history.transaction_nevra_ops, 0)
|
||||
|
||||
def test_transaction_nevra_ops_update(self):
|
||||
"""Test transaction_nevra_ops with a downgrade operation."""
|
||||
yum_history = support.HistoryStub()
|
||||
yum_history.old_data_pkgs['1'] = (
|
||||
dnf.yum.history.YumHistoryPackageState(
|
||||
'tour', 'noarch', '0', '4.8', '1', 'Update',
|
||||
history=yum_history),
|
||||
dnf.yum.history.YumHistoryPackageState(
|
||||
'tour', 'noarch', '0', '4.6', '1', 'Updated',
|
||||
history=yum_history),
|
||||
dnf.yum.history.YumHistoryPackageState(
|
||||
'tour', 'noarch', '0', '4.8', '1', 'Obsoleting',
|
||||
history=yum_history),
|
||||
dnf.yum.history.YumHistoryPackageState(
|
||||
'lotus', 'x86_64', '0', '3', '16', 'Obsoleted',
|
||||
history=yum_history))
|
||||
expected_ops = dnf.history.NEVRAOperations()
|
||||
expected_ops.add('Update', 'tour-0:4.8-1.noarch', 'tour-0:4.6-1.noarch', ('lotus-0:3-16.x86_64',))
|
||||
|
||||
with self._create_wrapper(yum_history) as history:
|
||||
result_ops = history.transaction_nevra_ops(1)
|
||||
|
||||
self.assertCountEqual(result_ops, expected_ops)
|
||||
|
||||
class NEVRAOperationsTest(support.TestCase):
|
||||
"""Unit tests of dnf.history.NEVRAOperations."""
|
||||
|
@ -559,13 +426,13 @@ class TransactionConverterTest(TestCase):
|
|||
|
||||
sack = support.mock_sack('main')
|
||||
converter = dnf.history.TransactionConverter(sack)
|
||||
actual = converter.convert(operations, 'reason')
|
||||
actual = converter.convert(operations, SwdbReason.USER)
|
||||
|
||||
expected = dnf.transaction.Transaction()
|
||||
expected.add_install(
|
||||
next(iter(sack.query().available()._nevra('lotus-3-16.x86_64'))),
|
||||
[next(iter(sack.query().installed()._nevra('hole-1-1.x86_64')))],
|
||||
'reason')
|
||||
SwdbReason.USER)
|
||||
self.assert_transaction_equal(actual, expected)
|
||||
|
||||
def test_convert_reinstall(self):
|
||||
|
@ -609,22 +476,24 @@ class TransactionConverterTest(TestCase):
|
|||
yield (item.op_type, item.installed, item.erased, item.obsoleted,
|
||||
item.reason)
|
||||
|
||||
|
||||
class ComparisonTests(TestCase):
|
||||
|
||||
def test_transaction(self):
|
||||
a = dnf.yum.history.YumHistoryTransaction(None, [1, 5, 0, 5, 0, 0, 0])
|
||||
b = dnf.yum.history.YumHistoryTransaction(None, [9, 5, 0, 5, 0, 0, 0])
|
||||
self.assertLess(a, b)
|
||||
self.assertGreater(b, a)
|
||||
t = SwdbTrans
|
||||
a = t.new(1, '5', '5', '', '', '', '', '9', 0)
|
||||
b = t.new(9, '5', '5', '', '', '', '', '9', 0)
|
||||
|
||||
def test_rpmdb_problem(self):
|
||||
a = dnf.yum.history.YumHistoryRpmdbProblem(None, 1, 5, None)
|
||||
b = dnf.yum.history.YumHistoryRpmdbProblem(None, 9, 5, None)
|
||||
self.assertLess(a, b)
|
||||
self.assertGreater(b, a)
|
||||
self.assertGreater(a, b)
|
||||
self.assertLess(b, a)
|
||||
|
||||
a2 = t.new(1, '5', '', '', '', '', '', '9', 0)
|
||||
b2 = t.new(1, '9', '', '', '', '', '', '9', 0)
|
||||
|
||||
a2 = dnf.yum.history.YumHistoryRpmdbProblem(None, 5, 1, None)
|
||||
b2 = dnf.yum.history.YumHistoryRpmdbProblem(None, 5, 9, None)
|
||||
self.assertGreater(a2, b2)
|
||||
self.assertLess(b2, a2)
|
||||
|
||||
a3 = t.new(1, '1', '2', '', '', '', '', '9', 0)
|
||||
b3 = t.new(2, '3', '4', '', '', '', '', '9', 0)
|
||||
|
||||
self.assertGreater(a3, b3)
|
||||
self.assertLess(b3, a3)
|
||||
|
|
|
@ -28,14 +28,15 @@ from dnf.package import Package
|
|||
from dnf.transaction import (ERASE, DOWNGRADE, INSTALL, REINSTALL,
|
||||
TransactionItem, UPGRADE)
|
||||
from hawkey import split_nevra
|
||||
from tests.support import mock_sack, Base, ObjectMatcher
|
||||
from dnf.db.types import SwdbReason
|
||||
from tests.support import mock_sack, MockBase, ObjectMatcher
|
||||
from unittest import TestCase
|
||||
|
||||
class BaseTest(TestCase):
|
||||
"""Unit tests of dnf.Base."""
|
||||
|
||||
def _create_item_matcher(self, op_type, installed=None, erased=None,
|
||||
obsoleted=[], reason='unknown'):
|
||||
obsoleted=[], reason=SwdbReason.UNKNOWN):
|
||||
"""Create a new instance of dnf.transaction.TransactionItem matcher."""
|
||||
attrs = {'op_type': op_type,
|
||||
'installed': self._create_package_matcher(installed)
|
||||
|
@ -59,7 +60,7 @@ class BaseTest(TestCase):
|
|||
|
||||
def setUp(self):
|
||||
"""Prepare the test fixture."""
|
||||
self._base = Base()
|
||||
self._base = MockBase()
|
||||
self._base._sack = mock_sack('main', 'updates')
|
||||
|
||||
def test_history_undo_operations_downgrade(self):
|
||||
|
@ -74,7 +75,7 @@ class BaseTest(TestCase):
|
|||
self.assertEqual(next(transaction_it), self._create_item_matcher(
|
||||
UPGRADE, installed='pepper-20-1.x86_64', erased='pepper-20-0.x86_64'))
|
||||
self.assertEqual(next(transaction_it), self._create_item_matcher(
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason='user'))
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason=SwdbReason.USER))
|
||||
self.assertRaises(StopIteration, next, transaction_it)
|
||||
|
||||
def test_history_undo_operations_downgrade_notavailable(self):
|
||||
|
@ -107,12 +108,12 @@ class BaseTest(TestCase):
|
|||
|
||||
transaction_it = iter(self._base.transaction)
|
||||
self.assertEqual(next(transaction_it), self._create_item_matcher(
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason='user'))
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason=SwdbReason.USER))
|
||||
self.assertRaises(StopIteration, next, transaction_it)
|
||||
|
||||
def test_history_undo_operations_erase_twoavailable(self):
|
||||
"""Test history_undo_operations with an erase available in two repos."""
|
||||
base = Base()
|
||||
base = MockBase()
|
||||
base._sack = mock_sack('main', 'search')
|
||||
operations = NEVRAOperations()
|
||||
operations.add('Erase', 'lotus-3-16.x86_64')
|
||||
|
@ -122,7 +123,7 @@ class BaseTest(TestCase):
|
|||
|
||||
transaction_it = iter(base.transaction)
|
||||
self.assertEqual(next(transaction_it), self._create_item_matcher(
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason='user'))
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason=SwdbReason.USER))
|
||||
self.assertRaises(StopIteration, next, transaction_it)
|
||||
|
||||
def test_history_undo_operations_erase_notavailable(self):
|
||||
|
@ -147,7 +148,7 @@ class BaseTest(TestCase):
|
|||
self.assertEqual(next(transaction_it), self._create_item_matcher(
|
||||
ERASE, erased='pepper-20-0.x86_64'))
|
||||
self.assertEqual(next(transaction_it), self._create_item_matcher(
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason='user'))
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason=SwdbReason.USER))
|
||||
self.assertRaises(StopIteration, next, transaction_it)
|
||||
|
||||
def test_history_undo_operations_install_notinstalled(self):
|
||||
|
@ -220,7 +221,7 @@ class BaseTest(TestCase):
|
|||
self.assertEqual(next(transaction_it), self._create_item_matcher(
|
||||
DOWNGRADE, installed='tour-4.6-1.noarch', erased='tour-5-0.noarch'))
|
||||
self.assertEqual(next(transaction_it), self._create_item_matcher(
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason='user'))
|
||||
INSTALL, installed='lotus-3-16.x86_64', reason=SwdbReason.USER))
|
||||
self.assertRaises(StopIteration, next, transaction_it)
|
||||
|
||||
def test_history_undo_operations_update_notavailable(self):
|
||||
|
|
|
@ -40,18 +40,6 @@ class List(support.TestCase):
|
|||
ypl = base._do_package_lists('installed')
|
||||
self.assertEqual(len(ypl.installed), support.TOTAL_RPMDB_COUNT)
|
||||
|
||||
def test_list_installed_reponame(self):
|
||||
"""Test whether only packages installed from the repository are listed."""
|
||||
base = support.MockBase()
|
||||
expected = base.sack.query().installed().filter(name={'pepper',
|
||||
'librita'})
|
||||
for pkg in expected:
|
||||
base._yumdb.db[str(pkg)] = {'from_repo': 'main'}
|
||||
|
||||
lists = base._do_package_lists('installed', reponame='main')
|
||||
|
||||
self.assertCountEqual(lists.installed, expected)
|
||||
|
||||
def test_list_updates(self):
|
||||
base = support.MockBase("updates", "main")
|
||||
ypl = base._do_package_lists('upgrades')
|
||||
|
|
|
@ -26,88 +26,6 @@ import tests.support
|
|||
|
||||
IDS = set(['one', 'two', 'three'])
|
||||
|
||||
|
||||
class ClonableDictTest(tests.support.TestCase):
|
||||
def test_clone(self):
|
||||
g = dnf.persistor.ClonableDict({})
|
||||
g['base'] = ['pepper', 'tour']
|
||||
g_c = g.clone()
|
||||
self.assertEqual(g, g_c)
|
||||
g_c['base'].append('magical')
|
||||
self.assertNotEqual(g, g_c)
|
||||
|
||||
|
||||
class GroupPersistorTest(tests.support.TestCase):
|
||||
def setUp(self):
|
||||
self.persistdir = tempfile.mkdtemp(prefix="dnf-groupprst-test.0.0.5")
|
||||
self.prst = dnf.persistor.GroupPersistor(self.persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
dnf.util.rm_rf(self.persistdir)
|
||||
|
||||
def test_default(self):
|
||||
"""Default items are empty."""
|
||||
grp = self.prst.group('pepper')
|
||||
self.assertEmpty(grp.full_list)
|
||||
self.assertEqual(grp.pkg_types, 0)
|
||||
|
||||
def test_prune_db(self):
|
||||
prst = self.prst
|
||||
grp = prst.group('pepper')
|
||||
prst._prune_db()
|
||||
self.assertEmpty(prst.db['GROUPS'])
|
||||
|
||||
grp = prst.group('pepper')
|
||||
grp.pkg_types = dnf.comps.MANDATORY
|
||||
prst._prune_db()
|
||||
self.assertLength(prst.db['GROUPS'], 1)
|
||||
|
||||
def test_saving(self):
|
||||
prst = self.prst
|
||||
grp = prst.group('pepper')
|
||||
grp.full_list.extend(['pepper', 'tour'])
|
||||
grp.pkg_types = dnf.comps.DEFAULT | dnf.comps.OPTIONAL
|
||||
prst.commit()
|
||||
self.assertTrue(prst.save())
|
||||
|
||||
prst = dnf.persistor.GroupPersistor(self.persistdir)
|
||||
grp = prst.group('pepper')
|
||||
self.assertEqual(grp.full_list, ['pepper', 'tour'])
|
||||
self.assertEqual(grp.pkg_types, dnf.comps.DEFAULT | dnf.comps.OPTIONAL)
|
||||
|
||||
def test_version(self):
|
||||
version = self.prst.db['meta']['version']
|
||||
self.assertIsInstance(version, dnf.pycomp.unicode)
|
||||
|
||||
|
||||
class GroupDiffTest(tests.support.TestCase):
|
||||
def test_added_removed(self):
|
||||
prst1 = dnf.persistor.GroupPersistor(tests.support.NONEXISTENT_FILE)
|
||||
prst1.db = prst1._empty_db()
|
||||
prst2 = dnf.persistor.GroupPersistor(tests.support.NONEXISTENT_FILE)
|
||||
prst2.db = prst1._empty_db()
|
||||
|
||||
prst1.group('kite').full_list.extend(('the', 'show'))
|
||||
prst2.environment('pepper').full_list.extend(('stop', 'the', 'show'))
|
||||
|
||||
diff = dnf.persistor._GroupsDiff(prst1.db, prst2.db)
|
||||
self.assertEmpty(diff.new_groups)
|
||||
self.assertEmpty(diff.removed_environments)
|
||||
self.assertCountEqual(diff.removed_groups, ('kite',))
|
||||
self.assertCountEqual(diff.new_environments, ('pepper',))
|
||||
|
||||
def test_diff_dcts(self):
|
||||
dct1 = {'stop' : [1, 2, 3],
|
||||
'the' : {'show' : [1, 2]}}
|
||||
dct2 = {'stop' : [1, 2],
|
||||
'the' : {'show' : [2]},
|
||||
'three' : 8}
|
||||
|
||||
added, removed = dnf.persistor._diff_dcts(dct1, dct2)
|
||||
self.assertEqual(added, {'three': 8})
|
||||
self.assertEqual(removed, {'the': {'show': set([1])}, 'stop': set([3])})
|
||||
|
||||
|
||||
class RepoPersistorTest(tests.support.TestCase):
|
||||
def setUp(self):
|
||||
self.persistdir = tempfile.mkdtemp(prefix="dnf-repoprst-test-")
|
||||
|
|
|
@ -34,17 +34,6 @@ class QueriesTest(support.TestCase):
|
|||
res_set = set(res)
|
||||
self.assertEqual(len(res), len(res_set))
|
||||
|
||||
def test_autoremove(self):
|
||||
sack = support.mock_sack("main")
|
||||
base = support.MockBase("main")
|
||||
installed = sack.query().installed()
|
||||
for pkg in installed:
|
||||
base._yumdb.get_package(pkg).reason = "dep"
|
||||
hole = installed.filter(name="hole")[0]
|
||||
base._yumdb.get_package(hole).reason = "user"
|
||||
pkgs = installed._unneeded(sack, base._yumdb)
|
||||
self.assertEqual(len(pkgs), support.TOTAL_RPMDB_COUNT-1)
|
||||
|
||||
def test_by_file(self):
|
||||
# check sanity first:
|
||||
sack = support.mock_sack()
|
||||
|
|
|
@ -121,20 +121,6 @@ class Reinstall(support.ResultTestCase):
|
|||
dnf.subject.Subject('librita.i686').get_best_query(self.sack).installed(),
|
||||
dnf.subject.Subject('librita').get_best_query(self.sack).available()))
|
||||
|
||||
def test_reinstall_old_reponame_installed(self):
|
||||
"""Test whether it reinstalls packages only from the repository."""
|
||||
for pkg in self.sack.query().installed().filter(name='librita'):
|
||||
self.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.base._yumdb.get_package(pkg).from_repo = 'main'
|
||||
|
||||
reinstalled_count = self.base.reinstall('librita', old_reponame='main')
|
||||
|
||||
self.assertEqual(reinstalled_count, 1)
|
||||
self.assertResult(self.base, itertools.chain(
|
||||
self.sack.query().installed().filter(name__neq='librita'),
|
||||
dnf.subject.Subject('librita.i686').get_best_query(self.sack).installed(),
|
||||
dnf.subject.Subject('librita').get_best_query(self.sack).available()))
|
||||
|
||||
def test_reinstall_old_reponame_notinstalled(self):
|
||||
"""Test whether it reinstalls packages only from the repository."""
|
||||
self.assertRaises(
|
||||
|
|
|
@ -62,16 +62,3 @@ class Remove(support.ResultTestCase):
|
|||
def test_remove_provides(self):
|
||||
"""Remove uses provides too."""
|
||||
self.assertEqual(1, self.base.remove('parking'))
|
||||
|
||||
def test_reponame(self):
|
||||
"""Test whether only packages from the repository are uninstalled."""
|
||||
pkg_subj = dnf.subject.Subject('librita.x86_64')
|
||||
for pkg in pkg_subj.get_best_query(self.base.sack).installed():
|
||||
self.base._yumdb.db[str(pkg)] = support.RPMDBAdditionalDataPackageStub()
|
||||
self.base._yumdb.get_package(pkg).from_repo = 'main'
|
||||
|
||||
self.base.remove('librita', 'main')
|
||||
self.assertResult(self.base, itertools.chain(
|
||||
self.base.sack.query().installed().filter(name__neq='librita'),
|
||||
dnf.subject.Subject('librita.i686').get_best_query(self.base.sack)
|
||||
.installed()))
|
||||
|
|
|
@ -25,13 +25,14 @@ import dnf.repo
|
|||
import dnf.sack
|
||||
|
||||
class SackTest(support.TestCase):
|
||||
|
||||
def test_rpmdb_version(self):
|
||||
base = support.MockBase()
|
||||
sack = base.sack
|
||||
yumdb = mock.MagicMock()
|
||||
version = base.sack._rpmdb_version(yumdb)
|
||||
self.assertEqual(version._num, support.TOTAL_RPMDB_COUNT)
|
||||
self.assertEqual(version._chksum.hexdigest(), support.RPMDB_CHECKSUM)
|
||||
version = sack._rpmdb_version(base.history)
|
||||
self.assertIsNotNone(version)
|
||||
# self.assertEqual(version._num, support.TOTAL_RPMDB_COUNT)
|
||||
# self.assertEqual(version._chksum.hexdigest(), support.RPMDB_CHECKSUM)
|
||||
|
||||
def test_setup_excludes_includes(self):
|
||||
base = support.MockBase()
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from tests.support import mock, FakeConf
|
||||
from dnf.db.types import SwdbReason
|
||||
|
||||
import dnf.goal
|
||||
import dnf.repo
|
||||
|
@ -92,29 +93,34 @@ class TransactionItemTest(tests.support.TestCase):
|
|||
(self.obspkg2, 'Obsoleted'), (self.obspkg3, 'Obsoleted')])
|
||||
|
||||
def test_propagated_reason(self):
|
||||
yumdb = mock.Mock()
|
||||
yumdb.get_package().get = lambda s: 'dep'
|
||||
self.base = tests.support.BaseCliStub()
|
||||
self.base._sack = tests.support.mock_sack('main', 'search')
|
||||
base = tests.support.MockBase()
|
||||
history = base.history
|
||||
base._sack = tests.support.mock_sack('main', 'search')
|
||||
|
||||
pkg1 = history.ipkg_to_pkg(self.newpkg)
|
||||
pid = history.add_package(pkg1)
|
||||
history.swdb.trans_data_beg(1, pid, SwdbReason.DEP, "Installed")
|
||||
|
||||
pkg2 = history.ipkg_to_pkg(self.oldpkg)
|
||||
pid = history.add_package(pkg2)
|
||||
history.swdb.trans_data_beg(1, pid, SwdbReason.DEP, "Installed")
|
||||
|
||||
ionly = base._sack.query().filter(empty=True) # installonly_query
|
||||
|
||||
tsi = dnf.transaction.TransactionItem(
|
||||
dnf.transaction.INSTALL, installed=self.newpkg, reason='user')
|
||||
self.assertEqual(tsi._propagated_reason(yumdb, self.base._sack.query().filter(empty=True)),
|
||||
'user')
|
||||
self.assertEqual(tsi._propagated_reason(history, ionly), SwdbReason.USER)
|
||||
tsi = dnf.transaction.TransactionItem(
|
||||
dnf.transaction.UPGRADE, installed=self.newpkg, erased=self.oldpkg)
|
||||
self.assertEqual(tsi._propagated_reason(yumdb, self.base._sack.query().filter(empty=True)),
|
||||
'dep')
|
||||
self.assertEqual(tsi._propagated_reason(history, ionly), SwdbReason.DEP)
|
||||
tsi = dnf.transaction.TransactionItem(
|
||||
dnf.transaction.DOWNGRADE,
|
||||
installed=self.newpkg, erased=self.oldpkg)
|
||||
self.assertEqual(tsi._propagated_reason(yumdb, self.base._sack.query().filter(empty=True)),
|
||||
'dep')
|
||||
self.assertEqual(tsi._propagated_reason(history, ionly), SwdbReason.DEP)
|
||||
|
||||
# test the call can survive if no reason is known:
|
||||
yumdb = mock.Mock()
|
||||
yumdb.get_package().get = lambda s: None
|
||||
self.assertEqual(tsi._propagated_reason(yumdb, self.base._sack.query().filter(empty=True)),
|
||||
'unknown')
|
||||
history.reset_db()
|
||||
self.assertEqual(tsi._propagated_reason(history, ionly), SwdbReason.UNKNOWN)
|
||||
|
||||
def test_removes(self):
|
||||
tsi = dnf.transaction.TransactionItem(
|
||||
|
|
|
@ -38,7 +38,7 @@ class Update(support.ResultTestCase):
|
|||
self.assertResult(base, expected)
|
||||
|
||||
def test_update_not_found(self):
|
||||
base = support.Base()
|
||||
base = support.MockBase()
|
||||
base._sack = support.mock_sack('updates')
|
||||
base._goal = goal = mock.create_autospec(dnf.goal.Goal)
|
||||
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
# Copyright (C) 2012-2016 Red Hat, Inc.
|
||||
#
|
||||
# This copyrighted material is made available to anyone wishing to use,
|
||||
# modify, copy, or redistribute it subject to the terms and conditions of
|
||||
# the GNU General Public License v.2, or (at your option) any later version.
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY expressed or implied, including the implied warranties of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
||||
# Public License for more details. You should have received a copy of the
|
||||
# GNU General Public License along with this program; if not, write to the
|
||||
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
|
||||
# source code or documentation are not subject to the GNU General Public
|
||||
# License and may only be used or replicated with the express permission of
|
||||
# Red Hat, Inc.
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
from tests import support
|
||||
from tests.support import mock
|
||||
|
||||
import dnf.yum.rpmsack
|
||||
import unittest
|
||||
|
||||
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
class TestAdditionalPkgDB(unittest.TestCase):
|
||||
def _instantiate(self, base):
|
||||
path = base.conf.persistdir + '/yumdb'
|
||||
return dnf.yum.rpmsack.AdditionalPkgDB(path)
|
||||
|
||||
def test_get_dir(self, mock_exists):
|
||||
base = support.MockBase()
|
||||
pkgdb = self._instantiate(base)
|
||||
pkg = base.sack.query().installed().filter(name="pepper")[0]
|
||||
expected = '%s/yumdb/p/bad9-pepper-20-0-x86_64' % base.conf.persistdir
|
||||
|
||||
directory = pkgdb._get_dir_name(pkg.pkgtup, b'bad9')
|
||||
self.assertEqual(expected, directory)
|
||||
directory = pkgdb._get_dir_name(pkg.pkgtup, 'bad9')
|
||||
self.assertEqual(expected, directory)
|
||||
directory = pkgdb._get_dir_name(pkg.pkgtup, None)
|
||||
self.assertEqual('%s/yumdb/p/<nopkgid>-pepper-20-0-x86_64' %
|
||||
base.conf.persistdir, directory)
|
Loading…
Reference in New Issue