remove prefixdata from solver interface (#1550)

This commit is contained in:
Wolf Vollprecht 2022-03-21 17:33:38 +01:00 committed by GitHub
parent e74ca4e087
commit d232bcb9af
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 63 additions and 50 deletions

View File

@ -52,9 +52,7 @@ namespace mamba
class MSolver
{
public:
MSolver(MPool& pool,
const std::vector<std::pair<int, int>>& flags = {},
const PrefixData* = nullptr);
MSolver(MPool& pool, const std::vector<std::pair<int, int>>& flags = {});
~MSolver();
MSolver(const MSolver&) = delete;
@ -100,7 +98,6 @@ namespace mamba
Solver* m_solver;
Pool* m_pool;
Queue m_jobs;
const PrefixData* m_prefix_data = nullptr;
};
} // namespace mamba

View File

@ -386,8 +386,7 @@ namespace mamba
{ { SOLVER_FLAG_ALLOW_UNINSTALL, ctx.allow_uninstall },
{ SOLVER_FLAG_ALLOW_DOWNGRADE, ctx.allow_downgrade },
{ SOLVER_FLAG_STRICT_REPO_PRIORITY,
ctx.channel_priority == ChannelPriority::kStrict } },
&prefix_data);
ctx.channel_priority == ChannelPriority::kStrict } });
solver.set_postsolve_flags({ { MAMBA_NO_DEPS, no_deps },
{ MAMBA_ONLY_DEPS, only_deps },

View File

@ -110,7 +110,7 @@ namespace mamba
build_number = n;
}
Id real_repo_key = pool_str2id(pool, "solvable:real_repo_url", 1);
static Id real_repo_key = pool_str2id(pool, "solvable:real_repo_url", 1);
if (solvable_lookup_str(s, real_repo_key))
{
url = solvable_lookup_str(s, real_repo_key);

View File

@ -41,14 +41,11 @@ namespace mamba
return pool_dep2str(solver->pool, dep_id);
}
MSolver::MSolver(MPool& pool,
const std::vector<std::pair<int, int>>& flags,
const PrefixData* prefix_data)
MSolver::MSolver(MPool& pool, const std::vector<std::pair<int, int>>& flags)
: m_flags(flags)
, m_is_solved(false)
, m_solver(nullptr)
, m_pool(pool)
, m_prefix_data(prefix_data)
{
queue_init(&m_jobs);
pool_createwhatprovides(pool);
@ -103,15 +100,17 @@ namespace mamba
void MSolver::add_reinstall_job(MatchSpec& ms, int job_flag)
{
if (!m_prefix_data)
if (!m_pool->installed)
{
throw std::runtime_error("Solver needs PrefixData for reinstall jobs.");
throw std::runtime_error("Did not find any packages marked as installed.");
}
Pool* pool = m_pool;
// 1. check if spec is already installed
Id needle = pool_str2id(m_pool, ms.name.c_str(), 0);
static Id real_repo_key = pool_str2id(pool, "solvable:real_repo_url", 1);
if (needle && m_pool->installed)
{
Id pkg_id;
@ -121,18 +120,21 @@ namespace mamba
if (s->name == needle)
{
// the data about the channel is only in the prefix_data unfortunately
const auto& records = m_prefix_data->records();
auto record = records.find(ms.name);
std::string selected_channel;
if (record != records.end())
if (solvable_lookup_str(s, real_repo_key))
{
selected_channel = record->second.channel;
// this is the _full_ url to the file (incl. abc.tar.bz2)
selected_channel = solvable_lookup_str(s, real_repo_key);
}
else
{
throw std::runtime_error("Could not retrieve the original channel.");
throw std::runtime_error(
"Could not find channel associated with reinstall package");
}
selected_channel = make_channel(selected_channel).name();
MatchSpec modified_spec(ms);
if (!ms.channel.empty() || !ms.version.empty() || !ms.build.empty())
{

View File

@ -71,6 +71,17 @@ PYBIND11_MODULE(bindings, m)
.def("get_tarball_path", &MultiPackageCache::get_tarball_path)
.def_property_readonly("first_writable_path", &MultiPackageCache::first_writable_path);
struct ExtraPkgInfo
{
std::string noarch;
std::string repo_url;
};
py::class_<ExtraPkgInfo>(m, "ExtraPkgInfo")
.def(py::init<>())
.def_readwrite("noarch", &ExtraPkgInfo::noarch)
.def_readwrite("repo_url", &ExtraPkgInfo::repo_url);
py::class_<MRepo, std::unique_ptr<MRepo, py::nodelete>>(m, "Repo")
.def(py::init(
[](MPool& pool,
@ -82,24 +93,26 @@ PYBIND11_MODULE(bindings, m)
}))
.def(py::init([](MPool& pool, const PrefixData& data)
{ return std::unique_ptr<MRepo, py::nodelete>(&MRepo::create(pool, data)); }))
.def("add_python_noarch_info",
[](const MRepo& self, const std::vector<std::string>& names)
.def("add_extra_pkg_info",
[](const MRepo& self, const std::map<std::string, ExtraPkgInfo>& additional_info)
{
Id pkg_id;
Solvable* pkg_s;
Pool* p = self.repo()->pool;
static Id noarch_repo_key = pool_str2id(p, "solvable:noarch_type", 1);
static Id real_repo_url_key = pool_str2id(p, "solvable:real_repo_url", 1);
for (auto& name : names)
FOR_REPO_SOLVABLES(self.repo(), pkg_id, pkg_s)
{
Id nid = pool_str2id(p, name.c_str(), 0);
FOR_REPO_SOLVABLES(self.repo(), pkg_id, pkg_s)
std::string name = pool_id2str(p, pkg_s->name);
auto it = additional_info.find(name);
if (it != additional_info.end())
{
if (pkg_s->name == nid)
{
solvable_set_str(pkg_s, noarch_repo_key, "python");
}
if (!it->second.noarch.empty())
solvable_set_str(pkg_s, noarch_repo_key, it->second.noarch.c_str());
if (!it->second.repo_url.empty())
solvable_set_str(
pkg_s, real_repo_url_key, it->second.repo_url.c_str());
}
}
repo_internalize(self.repo());
@ -123,7 +136,6 @@ PYBIND11_MODULE(bindings, m)
py::class_<MSolver>(m, "Solver")
.def(py::init<MPool&, std::vector<std::pair<int, int>>>())
.def(py::init<MPool&, std::vector<std::pair<int, int>>, const PrefixData*>())
.def("add_jobs", &MSolver::add_jobs)
.def("add_global_job", &MSolver::add_global_job)
.def("add_constraint", &MSolver::add_constraint)

View File

@ -43,7 +43,6 @@ from conda.gateways.disk.create import mkdir_p
from conda.gateways.disk.delete import delete_trash, path_is_clean, rm_rf
from conda.gateways.disk.test import is_conda_environment
from conda.misc import explicit, touch_nonadmin
from conda.models.enums import NoarchType
from conda.models.match_spec import MatchSpec
import libmambapy as api
@ -55,6 +54,7 @@ from mamba.utils import (
get_installed_jsonfile,
init_api_context,
load_channels,
load_conda_installed,
print_activate,
to_txn,
)
@ -203,14 +203,7 @@ def remove(args, parser):
repo = api.Repo(pool, prefix_data)
repos.append(repo)
else:
repo = api.Repo(pool, "installed", installed_json_f.name, "")
py_noarchs = [
rec.name
for rec in installed_pkg_recs
if rec.noarch == NoarchType.python
]
repo.add_python_noarch_info(py_noarchs)
repo.set_installed()
repo = load_conda_installed(pool, installed_json_f, installed_pkg_recs)
repos.append(repo)
solver = api.Solver(pool, solver_options)
@ -483,12 +476,7 @@ def install(args, parser, command="install"):
repo = api.Repo(pool, prefix_data)
repos.append(repo)
else:
repo = api.Repo(pool, "installed", installed_json_f.name, "")
py_noarchs = [
rec.name for rec in installed_pkg_recs if rec.noarch == NoarchType.python
]
repo.add_python_noarch_info(py_noarchs)
repo.set_installed()
repo = load_conda_installed(pool, installed_json_f, installed_pkg_recs)
repos.append(repo)
if newenv and not specs:
@ -504,10 +492,7 @@ def install(args, parser, command="install"):
else:
index = load_channels(pool, channels, repos)
if context.force_reinstall:
solver = api.Solver(pool, solver_options, prefix_data)
else:
solver = api.Solver(pool, solver_options)
solver = api.Solver(pool, solver_options)
solver.set_postsolve_flags(
[

View File

@ -22,6 +22,7 @@ from mamba.utils import (
get_installed_jsonfile,
init_api_context,
load_channels,
load_conda_installed,
to_txn,
to_txn_precs,
)
@ -74,8 +75,8 @@ def mamba_install(prefix, specs, args, env, dry_run=False, *_, **kwargs):
installed_pkg_recs_prefix = installed_pkg_recs
with tempfile.TemporaryDirectory() as td:
installed_json_f, installed_pkg_recs = get_installed_jsonfile(td)
repo = api.Repo(pool, "installed", installed_json_f.name, "")
repo.set_installed()
repo = load_conda_installed(pool, installed_json_f, installed_pkg_recs)
repos.append(repo)
solver = api.Solver(pool, solver_options)

View File

@ -26,6 +26,23 @@ from conda.models.records import PackageRecord
import libmambapy as api
def load_conda_installed(pool, installed_json_f, installed_pkg_recs):
repo = api.Repo(pool, "installed", installed_json_f.name, "")
additional_infos = {}
for rec in installed_pkg_recs:
info = api.ExtraPkgInfo()
if rec.noarch:
info.noarch = rec.noarch.value
if rec.url:
info.repo_url = rec.url
additional_infos[rec.name] = info
repo.add_extra_pkg_info(additional_infos)
repo.set_installed()
return repo
def load_channel(subdir_data, result_container):
if not context.quiet:
print("Getting ", subdir_data.channel.name, subdir_data.channel.platform)