first working version

This commit is contained in:
Wolf Vollprecht 2019-03-21 14:11:52 +01:00
parent 2edc57599b
commit 966fdc826c
17 changed files with 900 additions and 883 deletions

5
.gitignore vendored
View File

@ -4,4 +4,7 @@ __cache__/
.rendered*
installed.json
*.so
__pycache__
.vscode
.ipynb_checkpoints
build/

169
bin/mamba Executable file
View File

@ -0,0 +1,169 @@
#!/usr/bin/env python3
import sys, os
cur_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(cur_dir, '../mamba/'))
from conda.cli.main import generate_parser
from conda.base.context import context
from conda.common.compat import ensure_text_type, init_std_stream_encoding
from conda.core.index import calculate_channel_urls, check_whitelist #, get_index
from conda.models.channel import Channel, prioritize_channels
from conda.models.records import PackageRecord
from conda.cli.main_list import list_packages
from conda.core.prefix_data import PrefixData
from conda.common.serialize import json_dump
from conda.cli.common import specs_from_args, specs_from_url
from conda.core.subdir_data import SubdirData
from conda.common.url import join_url
from conda.core.link import UnlinkLinkTransaction, PrefixSetup
from conda.cli.install import handle_txn
from conda.base.constants import ChannelPriority
import json
import tempfile
from FastSubdirData import FastSubdirData
import mamba_api as api
from multiprocessing.pool import Pool as MPool
def get_channel(x):
print("Getting ", x)
return FastSubdirData(Channel(x)).load()
def get_index(channel_urls=(), prepend=True, platform=None,
use_local=False, use_cache=False, unknown=None, prefix=None):
channel_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local)
check_whitelist(channel_urls)
pl = MPool(8)
result = pl.map(get_channel, channel_urls)
return result
def to_package_record_from_subjson(subdir, pkg, jsn_string):
channel = subdir.channel
channel_url = subdir.url_w_credentials
info = json.loads(jsn_string)
info['fn'] = pkg
info['channel'] = channel
info['url'] = join_url(channel_url, pkg)
package_record = PackageRecord(**info)
return package_record
# if add_pip and info['name'] == 'python' and info['version'].startswith(('2.', '3.')):
# info['depends'].append('pip')
def get_installed_packages(prefix, show_channel_urls=None):
result = {'packages': {}}
# Currently, we need to have pip interop disabled :/
installed = list(PrefixData(prefix, pip_interop_enabled=False).iter_records())
for prec in installed:
json_rec = prec.dist_fields_dump()
json_rec['depends'] = prec.depends
json_rec['build'] = prec.build
result['packages'][prec.fn] = json_rec
return installed, result
def main():
args = sys.argv
args = tuple(ensure_text_type(s) for s in args)
# print(args)
if len(args) == 1:
args = args + ('-h',)
p = generate_parser()
args = p.parse_args(args[1:])
context.__init__(argparse_args=args)
prepend = not args.override_channels
prefix = context.target_prefix
index_args = {
'use_cache': args.use_index_cache,
'channel_urls': context.channels,
'unknown': args.unknown,
'prepend': not args.override_channels,
'use_local': args.use_local
}
index = get_index(channel_urls=index_args['channel_urls'],
prepend=index_args['prepend'], platform=None,
use_local=index_args['use_local'], use_cache=index_args['use_cache'],
unknown=index_args['unknown'], prefix=prefix)
channel_json = [(str(x.channel), x.cache_path_json) for x in index]
installed_pkg_recs, output = get_installed_packages(prefix, show_channel_urls=True)
installed_json_f = tempfile.NamedTemporaryFile('w')
installed_json_f.write(json_dump(output))
installed_json_f.flush()
args_packages = [s.strip('"\'') for s in args.packages]
specs = []
if args.file:
for fpath in args.file:
try:
specs.extend(specs_from_url(fpath, json=context.json))
except UnicodeError:
raise CondaError("Error reading file, file should be a text file containing"
" packages \nconda create --help for details")
if '@EXPLICIT' in specs:
explicit(specs, prefix, verbose=not context.quiet, index_args=index_args)
return
specs.extend(specs_from_args(args_packages, json=context.json))
def seperate(s):
ass = str(s)
for ix, c in enumerate(str(ass)):
if c == '=':
return ass[:ix] + ' ' + ass[ix:]
if c in ['<', '>']:
raise Error("Complex versions not yet supported on command line, only `==` and `==x.*` etc are supported.")
return ass
specs_seperated = [seperate(s) for s in specs]
print("\n\nLooking for: {}\n\n".format(specs))
strict_priority = (context.channel_priority == ChannelPriority.STRICT)
to_link, to_unlink = api.solve(channel_json, installed_json_f.name, specs_seperated, strict_priority)
to_link_records, to_unlink_records = [], []
def get_channel(c):
for x in index:
if str(x.channel) == c:
return x
for c, pkg in to_unlink:
for i_rec in installed_pkg_recs:
if i_rec.fn == pkg:
to_unlink_records.append(i_rec)
break
else:
print("No package record found!")
for c, pkg, jsn_s in to_link:
sdir = get_channel(c)
rec = to_package_record_from_subjson(sdir, pkg, jsn_s)
to_link_records.append(rec)
pref_setup = PrefixSetup(
target_prefix = prefix,
unlink_precs = to_unlink_records,
link_precs = to_link_records,
remove_specs = [],
update_specs = specs
)
conda_transaction = UnlinkLinkTransaction(pref_setup)
handle_txn(conda_transaction, prefix, args, True)
if __name__ == '__main__':
sys.exit(main())

View File

@ -22,8 +22,9 @@ api = cppimport.imp('interface')
prefix = '/home/wolfv/miniconda3/'
channels = ['conda-forge', 'r', 'anaconda']
what_to_get = ["opencv ==3.4.2"]
# what_to_get = ["xtensor-r", "r-base ==3.5.1"]
# what_to_get = ["opencv ==3.4.2"]
# what_to_get = ["conda-build"]
what_to_get = ["xtensor-r", "r-base ==3.5.1"]
url_template = 'https://conda.anaconda.org/{}/linux-64/repodata.json.bz2'
@ -105,7 +106,7 @@ for c, pkg in to_unlink:
if p['name'] == pkg:
to_unlink_records.append(p)
for c, pkg in to_link:
for c, pkg, json_ in to_link:
for p in channel_data[c]._package_records:
if p['fn'] == pkg:
to_link_records.append(p)

62
include/CMakeLists.txt Normal file
View File

@ -0,0 +1,62 @@
cmake_minimum_required(VERSION 2.8)
project(mamba)
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "Release"
CACHE STRING "Build configuration 'Release' or 'Debug'."
FORCE)
endif()
set(CONDA $ENV{CONDA_PREFIX})
if (CONDA)
set(CMAKE_INSTALL_LIBDIR lib)
set(CMAKE_INSTALL_PREFIX ${CONDA})
set(CMAKE_MODULE_PATH ${CONDA}/share/cmake/Modules)
# if (WIN32)
# set (LS_INSTALL_SUFFIX "Library/")
# else ()
# set (LS_INSTALL_SUFFIX "")
# endif ()
find_library(LibSolv_LIBRARIES
NAMES solv
HINTS ${CONDA}
PATHS ${CONDA}
PATH_SUFFIXES lib
REQUIRED
)
find_path(LibSolv_INCLUDE_DIRS NAME solv/repo.h
HINTS ${CONDA}
PATHS ${CONDA}
PATH_SUFFIXES include
)
message(STATUS "Found libsolv: ${LibSolv_LIBRARIES}")
message(STATUS "Found libsolv include: ${LibSolv_INCLUDE_DIRS}")
else ()
find_package(LibSolv REQUIRED)
endif ()
find_package(pybind11 REQUIRED)
include_directories(thirdparty)
include_directories(${CMAKE_INSTALL_PREFIX}/include)
pybind11_add_module(mamba_api py_interface.cpp solver.cpp parsing.cpp thirdparty/simdjson/simdjson.cpp)
target_link_libraries(mamba_api PRIVATE ${LibSolv_LIBRARIES})
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR (CMAKE_CXX_COMPILER_ID MATCHES "Intel" AND NOT WIN32))
target_compile_options(mamba_api PUBLIC -std=c++17 -march=core-avx2 -O3)
else ()
# TODO
target_compile_options(mamba_api PUBLIC /std:c++17 /arch:AVX2 /Ox)
endif()
set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/../mamba/)

47
include/json_helper.hpp Normal file
View File

@ -0,0 +1,47 @@
#include "thirdparty/simdjson/simdjson.h"
#include <ostream>
void compute_dump(ParsedJson::iterator& pjh, std::ostream& os)
{
if (pjh.is_object())
{
os << "{";
if (pjh.down())
{
pjh.print(os); // must be a string
os << ":";
pjh.next();
compute_dump(pjh, os); // let us recurse
while (pjh.next())
{
os << ",";
pjh.print(os);
os << ":";
pjh.next();
compute_dump(pjh, os); // let us recurse
}
pjh.up();
}
os << "}";
}
else if (pjh.is_array())
{
os << "[";
if (pjh.down())
{
compute_dump(pjh, os); // let us recurse
while (pjh.next())
{
os << ",";
compute_dump(pjh, os); // let us recurse
}
pjh.up();
}
os << "]";
}
else
{
pjh.print(os); // just print the lone value
}
}

View File

@ -1,34 +1,295 @@
/*
<%
cfg['compiler_args'] = ['-O3', '-std=c++17', '-march=native']
cfg['libraries'] = ['solv']
setup_pybind11(cfg)
%>
*/
#ifndef MAMBA_PARSING
#define MAMBA_PARSING
#include "parsing.hpp"
#include <pybind11/pybind11.h>
#include <array>
#include <string>
#include <stdexcept>
#include <string_view>
#include <cassert>
static Pool* gb_pool;
extern "C"
{
#include "solv/pool.h"
#include "solv/repo.h"
#include "solv/queue.h"
#include "solv/solver.h"
}
PYBIND11_MODULE(parsing, m) {
m.def("init", []() {
Pool* pool = pool_create();
gb_pool = pool;
});
#include "thirdparty/pystring/pystring.hpp"
// m.def("normalize_version", (std::string (std::string_view, int)*) &normalize_version);
// m.def("normalize_version", (std::string (std::string_view, int, std::string_view)*) &normalize_version);
m.def("parse_to_relation", [](const std::string_view& dep) { parse_to_relation(dep, gb_pool); });
m.def("test_parse", [](const std::string t) {
auto id = pool_str2id(gb_pool, "python", 1);
auto rel_id = parse_version_relation(id, t, gb_pool);
std::cout << pool_dep2str(gb_pool, rel_id) << std::endl;
return parse_version_relation(id, t, gb_pool);
});
m.def("test_fparse", [](const std::string t) {
auto rel_id = parse_to_relation(t, gb_pool);
std::cout << pool_dep2str(gb_pool, rel_id) << std::endl;
});
}
#include <iostream>
struct parsed_relation {
int relation = -1;
bool fuzzy = false;
std::string_view version;
};
// TODO better concat
std::string normalize_version(std::string_view version, int build_number)
{
static const std::string zero = "0";
auto split = pystring::split(version, ".");
while (split.size() < 3) split.push_back(zero);
std::string out = pystring::join(".", split);
out += ".";
out += std::to_string(build_number);
return out;
}
std::string normalize_version(std::string_view version, int build_number, std::string_view build)
{
static const std::string zero = "0";
auto split = pystring::split(version, ".");
while (split.size() < 3) split.push_back(zero);
std::string out = pystring::join(".", split);
out += ".";
out += std::to_string(build_number);
out += "-";
out += build;
return out;
}
parsed_relation get_relation(const std::string_view& vs)
{
// std::cout << "VS: " << vs << std::endl;
parsed_relation res;
std::size_t start = 0, end;
if (vs[0] == '<' || vs[0] == '>')
{
res.relation = (vs[0] == '<') ? REL_LT : REL_GT;
if (vs[1] == '=')
{
res.relation |= REL_EQ;
start = 2;
}
else
{
start = 1;
}
}
else if (vs[0] == '=' && vs[1] == '=')
{
res.relation = REL_EQ;
start = 2;
}
if (vs[vs.size() - 1] == '*')
{
res.fuzzy = true;
end = vs.size() - 1;
}
else
{
end = vs.size();
}
if (res.relation == -1)
{
res.relation = REL_EQ;
}
if (res.fuzzy && res.relation != REL_EQ)
{
if (res.relation | REL_GT)
{
res.relation = REL_EQ;
}
else
{
if (res.relation & REL_LT)
{
// just remove * from end, do nothing
res.fuzzy = false;
}
else
{
std::cout << vs << std::endl;
throw std::runtime_error("Cannot match fuzzy version with other than `==`");
}
// TODO fix this intelligently with build string comparison ... ?
}
}
res.version = std::string_view(&vs[start], end - start);
return res;
}
std::ostream& operator<<(std::ostream& os, const parsed_relation& rel)
{
if (rel.fuzzy) { os << "fzy "; }
switch (rel.relation){
case REL_GT: os << ">"; break;
case REL_GT | REL_EQ: os << ">="; break;
case REL_LT: os << "<"; break;
case REL_LT | REL_EQ: os << "<="; break;
case REL_EQ: os << "=="; break;
}
os << " " << rel.version;
return os;
}
Id get_between(Id name_id, const std::string_view& v_lo, const std::string_view& v_hi, Pool* pool)
{
auto evr1 = pool_strn2id(pool, &v_lo[0], v_lo.size(), 1);
auto evr2 = pool_strn2id(pool, &v_hi[0], v_hi.size(), 1);
auto xr1 = pool_rel2id(pool, name_id, evr1, REL_GT | REL_EQ, 1);
auto xr2 = pool_rel2id(pool, name_id, evr2, REL_LT, 1);
return pool_rel2id(pool, xr1, xr2, REL_WITH, 1);
}
Id get_fuzzy_relation(Id name_id, const std::string_view& vnumber, Pool* pool)
{
int idx_back, idx_front;
std::size_t lversion = 0;
if (vnumber.back() == '.')
{
// remove last dot!
idx_back = vnumber.size() - 1;
}
else
{
idx_back = vnumber.size();
}
for (idx_front = idx_back - 1; idx_front >= 0; --idx_front)
{
if (vnumber[idx_front] == '.')
{
auto* end_ptr = const_cast<char*>(&vnumber[idx_back]);
lversion = std::strtoul(&vnumber[idx_front + 1], &end_ptr, 10);
break;
}
}
if (idx_front == -1)
{
auto* end_ptr = const_cast<char*>(&vnumber[idx_back]);
lversion = std::strtoul(&vnumber[idx_front + 1], &end_ptr, 10);
}
// lower_version = vnumber[0] -> vnumber[idx_back];
std::string higher_version(vnumber.begin(), vnumber.begin() + idx_front + 1);
higher_version += std::to_string(lversion + 1);
return get_between(name_id, std::string_view(&vnumber[0], idx_back),
higher_version, pool);
}
Id parse_version_relation(Id name_id,
const std::string_view& version_rel,
Pool* pool)
{
if (version_rel.size() == 1)
{
if (version_rel[0] == '*')
{
return name_id;
}
else
{
// TODO make sure that char is alpha num
auto v_lo = normalize_version(version_rel, 0);
auto v_hi = normalize_version(version_rel, 9999);
return get_between(name_id, v_lo, v_hi, pool);
}
}
// now we know that the size of `c` == 2 or larger!
auto rel = get_relation(version_rel);
if (!rel.fuzzy)
{
if (rel.relation == REL_EQ)
{
if (4 == std::count_if(rel.version.begin(), rel.version.end(), [](char c) { return c == '.';}))
{
Id evr_id = pool_strn2id(pool, &rel.version[0], rel.version.size(), 1);
return pool_rel2id(pool, name_id, evr_id, rel.relation, 1);
}
auto v_lo = normalize_version(rel.version, 0);
auto v_hi = normalize_version(rel.version, 9999);
return get_between(name_id, v_lo, v_hi, pool);
}
else
{
assert(rel.relation & REL_GT || rel.relation & REL_LT);
Id evr_id = pool_strn2id(pool, &rel.version[0], rel.version.size(), 1);
return pool_rel2id(pool, name_id, evr_id, rel.relation, 1);
}
}
else
{
return get_fuzzy_relation(name_id, rel.version, pool);
}
}
Id parse_rel_string(Id name_id, const std::string_view& str, Pool* pool)
{
// TODO implement operator precedence
for (std::size_t i = 0; i < str.size(); ++i)
{
switch (str[i])
{
case '|':
{
auto lhs_rel = parse_version_relation(name_id, std::string_view(&str[0], i), pool);
return pool_rel2id(pool, lhs_rel,
parse_rel_string(name_id, std::string_view(&str[i + 1], str.size() - (i + 1)), pool),
REL_OR, 1);
}
case ',':
{
auto lhs_rel = parse_version_relation(name_id, std::string_view(&str[0], i), pool);
return pool_rel2id(pool, lhs_rel,
parse_rel_string(name_id, std::string_view(&str[i + 1], str.size() - (i + 1)), pool),
REL_WITH, 1);
}
default:
break;
}
}
return parse_version_relation(name_id, str, pool);
}
Id parse_to_relation(const std::string_view& dep, Pool* pool) {
auto pkg = pystring::split(dep, " ");
if (pkg.size() == 1)
{
return pool_strn2id(pool, &dep[0], dep.size(), 1);
}
else
{
const auto& name = pkg[0];
Id name_id;
if (pkg.size() == 2)
{
name_id = pool_strn2id(pool, &name[0], name.size(), 1);
}
else
{
assert(pkg.size() == 3);
// add feature to dependency
std::stringstream os;
os << name << "[" << pkg[2] << "]";
std::string tmp_string = os.str();
name_id = pool_strn2id(pool, tmp_string.c_str(), tmp_string.size(), 1);
}
auto id = parse_rel_string(name_id, pkg[1], pool);
return id;
}
}
#endif

View File

@ -1,6 +1,7 @@
#ifndef MAMBA_PARSING
#define MAMBA_PARSING
#include <iostream>
#include <array>
#include <string>
#include <stdexcept>
@ -17,8 +18,6 @@ extern "C"
#include "thirdparty/pystring/pystring.hpp"
#include <iostream>
struct parsed_relation {
int relation = -1;
bool fuzzy = false;
@ -26,275 +25,8 @@ struct parsed_relation {
};
// TODO better concat
std::string normalize_version(std::string_view version, int build_number)
{
static const std::string zero = "0";
auto split = pystring::split(version, ".");
while (split.size() < 3) split.push_back(zero);
std::string out = pystring::join(".", split);
out += ".";
out += std::to_string(build_number);
return out;
}
std::string normalize_version(std::string_view version, int build_number, std::string_view build)
{
static const std::string zero = "0";
auto split = pystring::split(version, ".");
while (split.size() < 3) split.push_back(zero);
std::string out = pystring::join(".", split);
out += ".";
out += std::to_string(build_number);
out += "-";
out += build;
return out;
}
parsed_relation get_relation(const std::string_view& vs)
{
// std::cout << "VS: " << vs << std::endl;
parsed_relation res;
std::size_t start = 0, end;
if (vs[0] == '<' || vs[0] == '>')
{
res.relation = (vs[0] == '<') ? REL_LT : REL_GT;
if (vs[1] == '=')
{
res.relation |= REL_EQ;
start = 2;
}
else
{
start = 1;
}
}
else if (vs[0] == '=' && vs[1] == '=')
{
res.relation = REL_EQ;
start = 2;
}
if (vs[vs.size() - 1] == '*')
{
res.fuzzy = true;
end = vs.size() - 1;
}
else
{
end = vs.size();
}
if (res.relation == -1)
{
res.relation = REL_EQ;
}
if (res.fuzzy && res.relation != REL_EQ)
{
if (res.relation == (REL_GT | REL_EQ))
{
res.relation = REL_EQ;
}
else
{
if (res.relation & REL_LT)
{
// just remove * from end, do nothing
res.fuzzy = false;
}
else
{
std::cout << vs << std::endl;
throw std::runtime_error("Cannot match fuzzy version with other than `==`");
}
// TODO fix this intelligently with build string comparison ... ?
}
}
res.version = std::string_view(&vs[start], end - start);
return res;
}
std::ostream& operator<<(std::ostream& os, const parsed_relation& rel)
{
if (rel.fuzzy) { os << "fzy "; }
switch (rel.relation){
case REL_GT: os << ">"; break;
case REL_GT | REL_EQ: os << ">="; break;
case REL_LT: os << "<"; break;
case REL_LT | REL_EQ: os << "<="; break;
case REL_EQ: os << "=="; break;
}
os << " " << rel.version;
return os;
}
Id get_between(Id name_id, const std::string_view& v_lo, const std::string_view& v_hi, Pool* pool)
{
auto evr1 = pool_strn2id(pool, &v_lo[0], v_lo.size(), 1);
auto evr2 = pool_strn2id(pool, &v_hi[0], v_hi.size(), 1);
auto xr1 = pool_rel2id(pool, name_id, evr1, REL_GT | REL_EQ, 1);
auto xr2 = pool_rel2id(pool, name_id, evr2, REL_LT, 1);
return pool_rel2id(pool, xr1, xr2, REL_WITH, 1);
}
Id get_fuzzy_relation(Id name_id, const std::string_view& vnumber, Pool* pool)
{
int idx_back, idx_front;
std::size_t lversion = 0;
if (vnumber.back() == '.')
{
// remove last dot!
idx_back = vnumber.size() - 1;
}
else
{
idx_back = vnumber.size();
}
for (idx_front = idx_back - 1; idx_front >= 0; --idx_front)
{
if (vnumber[idx_front] == '.')
{
auto* end_ptr = const_cast<char*>(&vnumber[idx_back]);
lversion = std::strtoul(&vnumber[idx_front + 1], &end_ptr, 10);
break;
}
}
if (idx_front == -1)
{
auto* end_ptr = const_cast<char*>(&vnumber[idx_back]);
lversion = std::strtoul(&vnumber[idx_front + 1], &end_ptr, 10);
}
// lower_version = vnumber[0] -> vnumber[idx_back];
std::string higher_version(vnumber.begin(), vnumber.begin() + idx_front + 1);
higher_version += std::to_string(lversion + 1);
return get_between(name_id, std::string_view(&vnumber[0], idx_back),
higher_version, pool);
}
Id parse_version_relation(Id name_id,
const std::string_view& version_rel,
Pool* pool)
{
if (version_rel.size() == 1)
{
if (version_rel[0] == '*')
{
return name_id;
}
else
{
// TODO make sure that char is alpha num
auto v_lo = normalize_version(version_rel, 0);
auto v_hi = normalize_version(version_rel, 9999);
return get_between(name_id, v_lo, v_hi, pool);
}
}
// now we know that the size of `c` == 2 or larger!
auto rel = get_relation(version_rel);
if (!rel.fuzzy)
{
if (rel.relation == REL_EQ)
{
if (4 == std::count_if(rel.version.begin(), rel.version.end(), [](char c) { return c == '.';}))
{
Id evr_id = pool_strn2id(pool, &rel.version[0], rel.version.size(), 1);
return pool_rel2id(pool, name_id, evr_id, rel.relation, 1);
}
auto v_lo = normalize_version(rel.version, 0);
auto v_hi = normalize_version(rel.version, 9999);
return get_between(name_id, v_lo, v_hi, pool);
}
else
{
assert(rel.relation & REL_GT || rel.relation & REL_LT);
Id evr_id = pool_strn2id(pool, &rel.version[0], rel.version.size(), 1);
return pool_rel2id(pool, name_id, evr_id, rel.relation, 1);
}
}
else
{
return get_fuzzy_relation(name_id, rel.version, pool);
}
}
Id parse_rel_string(Id name_id, const std::string_view& str, Pool* pool)
{
// TODO implement operator precedence
for (std::size_t i = 0; i < str.size(); ++i)
{
switch (str[i])
{
case '|':
{
auto lhs_rel = parse_version_relation(name_id, std::string_view(&str[0], i), pool);
return pool_rel2id(pool, lhs_rel,
parse_rel_string(name_id, std::string_view(&str[i + 1], str.size() - (i + 1)), pool),
REL_OR, 1);
}
case ',':
{
auto lhs_rel = parse_version_relation(name_id, std::string_view(&str[0], i), pool);
return pool_rel2id(pool, lhs_rel,
parse_rel_string(name_id, std::string_view(&str[i + 1], str.size() - (i + 1)), pool),
REL_WITH, 1);
}
default:
break;
}
}
return parse_version_relation(name_id, str, pool);
}
Id parse_to_relation(const std::string_view& dep, Pool* pool) {
auto pkg = pystring::split(dep, " ");
if (pkg.size() == 1)
{
return pool_strn2id(pool, &dep[0], dep.size(), 1);
}
else
{
const auto& name = pkg[0];
Id name_id;
if (pkg.size() == 2)
{
name_id = pool_strn2id(pool, &name[0], name.size(), 1);
}
else
{
assert(pkg.size() == 3);
// add feature to dependency
std::stringstream os;
os << name << "[" << pkg[2] << "]";
std::string tmp_string = os.str();
name_id = pool_strn2id(pool, tmp_string.c_str(), tmp_string.size(), 1);
}
// std::cout << "PARTS ";
// for (auto& el : pkg)
// {
// std::cout << el << " :: ";
// }
// std::cout << "\n";
auto id = parse_rel_string(name_id, pkg[1], pool);
return id;
}
}
std::string normalize_version(std::string_view version, int build_number);
std::string normalize_version(std::string_view version, int build_number, std::string_view build);
Id parse_to_relation(const std::string_view& dep, Pool* pool);
#endif

10
include/py_interface.cpp Normal file
View File

@ -0,0 +1,10 @@
#include "solver.hpp"
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
namespace py = pybind11;
PYBIND11_MODULE(mamba_api, m) {
m.def("solve", &solve);
}

View File

@ -1,13 +1,4 @@
/*
<%
cfg['compiler_args'] = ['-std=c++17', '-march=native']
cfg['libraries'] = ['solv']
setup_pybind11(cfg)
%>
*/
#include "thirdparty/simdjson/simdjson.h"
#include "thirdparty/simdjson/simdjson.cpp"
extern "C"
{
@ -23,35 +14,12 @@ static Pool* global_pool;
#include <iostream>
#include <map>
#include "solver.hpp"
#include "parsing.hpp"
#include "json_helper.hpp"
struct package {
std::string name;
std::string version;
std::string build_string;
int build_number;
};
std::ostream& operator<<(std::ostream& os, package& pkg)
{
return os << pkg.name << " -> " << pkg.version << ", " << pkg.build_string;
}
struct repo_package {
std::string name;
std::string version;
std::string build_string;
int build_number;
std::vector<std::string> dependencies;
};
std::ostream& operator<<(std::ostream& os, repo_package& pkg)
{
return os << pkg.name << " -> " << pkg.version << ", " << pkg.build_string;
}
void parse_repo(ParsedJson::iterator &i, Repo* repo, std::map<Id, std::string>& rmap) {
package pkg;
if (!i.move_to_key("packages"))
{
@ -73,7 +41,6 @@ void parse_repo(ParsedJson::iterator &i, Repo* repo, std::map<Id, std::string>&
if (strcmp(i.get_string(), "name") == 0)
{
i.next();
pkg.name = i.get_string();
name = i.get_string();
Id name_id = pool_str2id(global_pool, i.get_string(), 1);
s.name = name_id;
@ -150,80 +117,36 @@ void parse_repo(ParsedJson::iterator &i, Repo* repo, std::map<Id, std::string>&
} while (i.next());
}
void installed_packages(Repo* repo, ParsedJson::iterator &i, std::map<Id, std::string>& rmap) {
package pkg;
switch (i.get_type()) {
case '{':
{
package pkg;
i.down();
do {
if (strcmp(i.get_string(), "name") == 0)
{
i.next();
pkg.name = i.get_string();
}
else if (strcmp(i.get_string(), "build_number") == 0)
{
i.next();
pkg.build_number = i.get_integer();
}
else if (strcmp(i.get_string(), "build_string") == 0)
{
i.next();
pkg.build_string = i.get_string();
}
else if (strcmp(i.get_string(), "version") == 0)
{
i.next();
pkg.version = i.get_string();
}
else {
i.next(); // skip value?
}
} while (i.next());
i.up();
Id s_id = repo_add_solvable(repo);
rmap[s_id] = std::string(pkg.name);
auto& s = global_pool->solvables[s_id];
s.name = pool_str2id(global_pool, pkg.name.c_str(), 1);
s.evr = pool_str2id(global_pool, normalize_version(pkg.version, pkg.build_number, pkg.build_string).c_str(), 1);
solvable_add_deparray(&s, SOLVABLE_PROVIDES,
pool_rel2id(global_pool, s.name, s.evr, REL_EQ, 1), -1);
break;
}
case '[':
if (i.down()) {
do {
if (i.is_object_or_array()) {
installed_packages(repo, i, rmap);
}
} while (i.next());
i.up();
auto get_package_info(ParsedJson::iterator &i, const std::string& key)
{
if (!i.move_to_key("packages"))
{
throw std::runtime_error("Could not find packages key!");
}
break;
case 'l':
case 'd':
case 'n':
case 't':
case 'f':
default:
break;
}
if (!i.move_to_key(key.c_str()))
{
throw std::runtime_error("Could not find package " + key);
}
std::stringstream json;
compute_dump(i, json);
return json.str();
}
auto solve(std::vector<std::pair<std::string, std::string>> repos,
std::tuple<std::vector<std::tuple<std::string, std::string, std::string>>,
std::vector<std::tuple<std::string, std::string>>>
solve(std::vector<std::pair<std::string, std::string>> repos,
std::string installed,
std::vector<std::string> jobs)
std::vector<std::string> jobs,
bool strict_priority)
{
Pool* pool = pool_create();
global_pool = pool;
std::map<std::string, std::map<Id, std::string>> repo_to_file_map;
std::map<std::string, ParsedJson> chan_to_json;
if (installed.size())
{
Repo* repo = repo_create(pool, "installed");
@ -232,9 +155,10 @@ auto solve(std::vector<std::pair<std::string, std::string>> repos,
std::string_view p = get_corpus(installed);
ParsedJson pj = build_parsed_json(p);
ParsedJson::iterator pjh(pj);
installed_packages(repo, pjh, repo_to_file_map["installed"]);
parse_repo(pjh, repo, repo_to_file_map["installed"]);
}
int priority = repos.size();
for (auto& fn : repos)
{
std::string_view p = get_corpus(fn.second);
@ -242,15 +166,26 @@ auto solve(std::vector<std::pair<std::string, std::string>> repos,
repo_to_file_map[fn.first] = std::map<Id, std::string>();
Repo* repo = repo_create(pool, fn.first.c_str());
ParsedJson pj = build_parsed_json(p);
if (strict_priority)
{
repo->priority = priority--;
}
else
{
repo->subpriority = priority--;
}
chan_to_json.emplace(fn.first, build_parsed_json(p));
auto& pj = chan_to_json[fn.first];
if (!pj.isValid())
{
std::cout << "Found invalid json!";
throw std::runtime_error("Invalid JSON detected!");
}
else
{
std::cout << "Parsing " << fn.second << std::endl;
}
ParsedJson::iterator pjh(pj);
parse_repo(pjh, repo, repo_to_file_map[fn.first]);
std::cout << repo->nsolvables << " packages in " << fn.first << std::endl;
@ -262,8 +197,7 @@ auto solve(std::vector<std::pair<std::string, std::string>> repos,
solver_set_flag(solvy, SOLVER_FLAG_ALLOW_DOWNGRADE, 1);
std::cout << "Allowing downgrade: " << solver_get_flag(solvy, SOLVER_FLAG_ALLOW_DOWNGRADE) << std::endl;
std::cout << "Creating the solver...\n" << std::endl;
std::cout << "Creating the solver..." << std::endl;
Queue q;
queue_init(&q);
@ -291,52 +225,29 @@ auto solve(std::vector<std::pair<std::string, std::string>> repos,
}
if (cnt > 0)
{
exit(1);
throw std::runtime_error("Encountered problems while solving.");
}
queue_free(&problem_queue);
transaction_print(transy);
Queue q2;
int cut;
queue_init(&q2);
cut = transaction_installedresult(transy, &q2);
queue_truncate(&q2, cut);
std::cout << "Solution: \n" << std::endl;
std::vector<std::pair<std::string, std::string>> to_install_structured;
std::vector<std::pair<std::string, std::string>> to_remove_structured;
std::vector<std::string> to_install;
for (int i = 0; i < q2.count; ++i)
{
to_install.emplace_back(pool_id2str(global_pool, global_pool->solvables[q2.elements[i]].name));
to_install.back() += " ==";
std::string version = pool_id2str(global_pool, global_pool->solvables[q2.elements[i]].evr);
auto vsplit = pystring::split(version, ".");
vsplit.pop_back();
version = pystring::join(".", vsplit);
to_install.back() += version;
auto& s = global_pool->solvables[q2.elements[i]];
// std::pair<std::string, std::string> elem = {std::string(s.repo->name), repo_to_file_map[s.repo->name][q2.elements[i]]};
// to_install_structured.push_back(elem);
}
queue_free(&q2);
std::vector<std::tuple<std::string, std::string, std::string>> to_install_structured;
std::vector<std::tuple<std::string, std::string>> to_remove_structured;
{
Queue classes, pkgs;
int i, j, mode;
const char *n;
queue_init(&classes);
queue_init(&pkgs);
mode = SOLVER_TRANSACTION_SHOW_OBSOLETES |
SOLVER_TRANSACTION_OBSOLETE_IS_UPGRADE;
transaction_classify(transy, mode, &classes);
Id cls, cnt;
Id cls;
for (i = 0; i < classes.count; i += 4) {
cls = classes.elements[i];
cnt = classes.elements[i + 1];
@ -353,7 +264,7 @@ auto solve(std::vector<std::pair<std::string, std::string>> repos,
case SOLVER_TRANSACTION_UPGRADED:
s2 = pool->solvables + transaction_obs_pkg(transy, p);
to_remove_structured.emplace_back(s->repo->name, repo_to_file_map[s->repo->name][p]);
to_install_structured.emplace_back(s2->repo->name, repo_to_file_map[s2->repo->name][transaction_obs_pkg(transy, p)]);
to_install_structured.emplace_back(s2->repo->name, repo_to_file_map[s2->repo->name][transaction_obs_pkg(transy, p)], "");
break;
case SOLVER_TRANSACTION_VENDORCHANGE:
case SOLVER_TRANSACTION_ARCHCHANGE:
@ -363,7 +274,7 @@ auto solve(std::vector<std::pair<std::string, std::string>> repos,
to_remove_structured.emplace_back(s->repo->name, repo_to_file_map[s->repo->name][p]);
break;
case SOLVER_TRANSACTION_INSTALL:
to_install_structured.emplace_back(s->repo->name, repo_to_file_map[s->repo->name][p]);
to_install_structured.emplace_back(s->repo->name, repo_to_file_map[s->repo->name][p], "");
break;
default:
std::cout << "CASE NOT HANDLED." << std::endl;
@ -375,12 +286,11 @@ auto solve(std::vector<std::pair<std::string, std::string>> repos,
queue_free(&pkgs);
}
std::sort(to_install.begin(), to_install.end());
std::string result;
for (auto& line : to_install)
for (auto& el : to_install_structured)
{
result += "- " + line + "\n";
auto& json = chan_to_json[std::get<0>(el)];
ParsedJson::iterator pjh(json);
std::get<2>(el) = get_package_info(pjh, std::get<1>(el));
}
transaction_free(transy);

9
include/solver.hpp Normal file
View File

@ -0,0 +1,9 @@
#include <vector>
#include <string>
std::tuple<std::vector<std::tuple<std::string, std::string, std::string>>,
std::vector<std::tuple<std::string, std::string>>>
solve(std::vector<std::pair<std::string, std::string>> repos,
std::string installed,
std::vector<std::string> jobs,
bool strict_priority);

View File

@ -88,7 +88,7 @@ using namespace std::string_literals;
namespace {
void split_whitespace(const string_view& str, std::vector<string_view>& result, std::ptrdiff_t maxsplit)
inline void split_whitespace(const string_view& str, std::vector<string_view>& result, std::ptrdiff_t maxsplit)
{
std::size_t i, j, len = str.size();
@ -124,7 +124,7 @@ using namespace std::string_literals;
}
}
void rsplit_whitespace(const string_view& str, std::vector<string_view>& result, std::ptrdiff_t maxsplit)
inline void rsplit_whitespace(const string_view& str, std::vector<string_view>& result, std::ptrdiff_t maxsplit)
{
std::size_t len = str.size();
std::size_t i, j;
@ -162,7 +162,7 @@ using namespace std::string_literals;
}
} // namespace
std::vector<string_view> split(const string_view& str, const string_view& sep, std::ptrdiff_t maxsplit)
inline std::vector<string_view> split(const string_view& str, const string_view& sep, std::ptrdiff_t maxsplit)
{
std::vector<string_view> result;
@ -204,7 +204,7 @@ using namespace std::string_literals;
return result;
}
std::vector<string_view> rsplit(const string_view& str, const string_view& sep, std::ptrdiff_t maxsplit)
inline std::vector<string_view> rsplit(const string_view& str, const string_view& sep, std::ptrdiff_t maxsplit)
{
if (maxsplit < 0)
{
@ -249,7 +249,7 @@ using namespace std::string_literals;
}
template <class S = std::string>
string_view do_strip(const string_view& str, strip_dir striptype, const string_view& chars)
inline string_view do_strip(const string_view& str, strip_dir striptype, const string_view& chars)
{
std::ptrdiff_t i, j;
std::ptrdiff_t len = static_cast<std::ptrdiff_t>(str.size());
@ -316,7 +316,7 @@ using namespace std::string_literals;
}
template <class S>
std::array<string_view, 3> partition(const string_view& str, const string_view& sep)
inline std::array<string_view, 3> partition(const string_view& str, const string_view& sep)
{
std::array<string_view, 3> result;
std::ptrdiff_t index = find(str, sep);
@ -337,7 +337,7 @@ using namespace std::string_literals;
}
template <class S>
std::array<string_view, 3> rpartition(const string_view& str, const string_view& sep)
inline std::array<string_view, 3> rpartition(const string_view& str, const string_view& sep)
{
std::array<string_view, 3> result;
std::ptrdiff_t index = rfind(str, sep);
@ -358,23 +358,23 @@ using namespace std::string_literals;
return result;
}
string_view strip(const string_view& str, const string_view& chars)
inline string_view strip(const string_view& str, const string_view& chars)
{
return do_strip(str, strip_dir::both, chars);
}
string_view lstrip(const string_view& str, const string_view& chars)
inline string_view lstrip(const string_view& str, const string_view& chars)
{
return do_strip(str, strip_dir::left, chars);
}
string_view rstrip(const string_view& str, const string_view& chars)
inline string_view rstrip(const string_view& str, const string_view& chars)
{
return do_strip(str, strip_dir::right, chars);
}
template <class S, class C>
string join(const S& str, const C& seq)
inline string join(const S& str, const C& seq)
{
std::size_t seqlen = detail::size(seq), i;
@ -401,7 +401,7 @@ using namespace std::string_literals;
* -1 on error, 0 if not found and 1 if found.
********************************************************************/
template <class S, class C>
std::ptrdiff_t _string_tailmatch(const S& self, const C& substr,
inline std::ptrdiff_t _string_tailmatch(const S& self, const C& substr,
std::ptrdiff_t start, std::ptrdiff_t end, std::ptrdiff_t direction)
{
std::ptrdiff_t len = static_cast<std::ptrdiff_t>(detail::size(self));
@ -442,7 +442,7 @@ using namespace std::string_literals;
} // namespace
template <class S = std::string, class C = S>
bool endswith(const S& str, const C& suffix, std::ptrdiff_t start, std::ptrdiff_t end)
inline bool endswith(const S& str, const C& suffix, std::ptrdiff_t start, std::ptrdiff_t end)
{
std::ptrdiff_t result = _string_tailmatch(str, suffix, (std::ptrdiff_t)start, (std::ptrdiff_t)end, +1);
// if (result == -1) // TODO: Error condition
@ -451,7 +451,7 @@ using namespace std::string_literals;
}
template <class S = std::string, class C>
bool startswith(const S& str, const C& prefix, std::ptrdiff_t start, std::ptrdiff_t end)
inline bool startswith(const S& str, const C& prefix, std::ptrdiff_t start, std::ptrdiff_t end)
{
std::ptrdiff_t result = _string_tailmatch(str, prefix, (std::ptrdiff_t)start, (std::ptrdiff_t)end, -1);
// if (result == -1) // TODO: Error condition
@ -460,7 +460,7 @@ using namespace std::string_literals;
}
template <class S = std::string>
bool isalnum(const S& str)
inline bool isalnum(const S& str)
{
auto it = std::begin(str),
end = detail::fast_end(str);
@ -480,7 +480,7 @@ using namespace std::string_literals;
}
template <class S = std::string>
bool isalpha(const S& str)
inline bool isalpha(const S& str)
{
auto it = std::begin(str),
end = detail::fast_end(str);
@ -499,7 +499,7 @@ using namespace std::string_literals;
return true;
}
bool isdigit(const string_view& str)
inline bool isdigit(const string_view& str)
{
std::size_t len = str.size(), i;
if (len == 0)
@ -516,7 +516,7 @@ using namespace std::string_literals;
return true;
}
bool islower(const string_view& str)
inline bool islower(const string_view& str)
{
std::size_t len = str.size(), i;
if (len == 0)
@ -534,7 +534,7 @@ using namespace std::string_literals;
return true;
}
bool isspace(const string_view& str)
inline bool isspace(const string_view& str)
{
std::size_t len = str.size(), i;
if (len == 0)
@ -552,7 +552,7 @@ using namespace std::string_literals;
return true;
}
bool istitle(const string_view& str)
inline bool istitle(const string_view& str)
{
std::size_t len = str.size(), i;
@ -598,7 +598,7 @@ using namespace std::string_literals;
return cased;
}
bool isupper(const string_view& str)
inline bool isupper(const string_view& str)
{
std::size_t len = str.size(), i;
if (len == 0)
@ -617,7 +617,7 @@ using namespace std::string_literals;
}
template <class S>
S capitalize(const S& str)
inline S capitalize(const S& str)
{
S s(str);
std::size_t len = detail::size(s), i;
@ -639,7 +639,7 @@ using namespace std::string_literals;
}
template <class S>
S lower(const S& str)
inline S lower(const S& str)
{
S s(str);
std::size_t len = detail::size(s), i;
@ -656,7 +656,7 @@ using namespace std::string_literals;
}
template <class S>
S upper(const S& str)
inline S upper(const S& str)
{
S s(str);
std::size_t len = detail::size(s), i;
@ -673,7 +673,7 @@ using namespace std::string_literals;
}
template <class S>
S swapcase(const S& str)
inline S swapcase(const S& str)
{
S s(str);
std::size_t len = detail::size(s), i;
@ -694,7 +694,7 @@ using namespace std::string_literals;
}
template <class S>
S title(const S& str)
inline S title(const S& str)
{
S s(str);
std::size_t len = detail::size(s), i;
@ -729,7 +729,7 @@ using namespace std::string_literals;
}
template <class S>
string translate(const S& str, const string_view& table, const string_view& deletechars)
inline string translate(const S& str, const string_view& table, const string_view& deletechars)
{
string s;
std::size_t len = detail::size(str), del_len = deletechars.size();
@ -779,7 +779,7 @@ using namespace std::string_literals;
std::ptrdiff_t n;
};
std::ostream& operator<<(std::ostream& str, const fillstream& fs)
inline std::ostream& operator<<(std::ostream& str, const fillstream& fs)
{
for (std::ptrdiff_t i = 0; i < fs.n; ++i)
{
@ -788,7 +788,7 @@ using namespace std::string_literals;
return str;
}
string zfill(const string_view& str, std::ptrdiff_t width)
inline string zfill(const string_view& str, std::ptrdiff_t width)
{
std::ptrdiff_t len = (std::ptrdiff_t)str.size();
@ -813,7 +813,7 @@ using namespace std::string_literals;
return s;
}
string ljust(const string_view& str, std::ptrdiff_t width)
inline string ljust(const string_view& str, std::ptrdiff_t width)
{
std::ptrdiff_t len = static_cast<std::ptrdiff_t>(str.size());
std::ostringstream os;
@ -821,7 +821,7 @@ using namespace std::string_literals;
return os.str();
}
string rjust(const string_view& str, std::ptrdiff_t width)
inline string rjust(const string_view& str, std::ptrdiff_t width)
{
std::ptrdiff_t len = static_cast<std::ptrdiff_t>(str.size());
std::ostringstream os;
@ -829,7 +829,7 @@ using namespace std::string_literals;
return os.str();
}
string center(const string_view& str, std::ptrdiff_t width)
inline string center(const string_view& str, std::ptrdiff_t width)
{
std::ptrdiff_t len = static_cast<std::ptrdiff_t>(str.size());
std::ptrdiff_t marg, left;
@ -840,7 +840,7 @@ using namespace std::string_literals;
return os.str();
}
string_view slice(const string_view& str, std::ptrdiff_t start, std::ptrdiff_t end)
inline string_view slice(const string_view& str, std::ptrdiff_t start, std::ptrdiff_t end)
{
ADJUST_INDICES(start, end, (std::ptrdiff_t) str.size());
if (start >= end)
@ -849,7 +849,7 @@ using namespace std::string_literals;
}
template <class S>
std::ptrdiff_t find(const string_view& str, const S& sub, std::ptrdiff_t start, std::ptrdiff_t end)
inline std::ptrdiff_t find(const string_view& str, const S& sub, std::ptrdiff_t start, std::ptrdiff_t end)
{
ADJUST_INDICES(start, end, (std::ptrdiff_t) detail::size(str));
@ -870,13 +870,13 @@ using namespace std::string_literals;
}
template <class S>
std::ptrdiff_t index(const string_view& str, const S& sub, std::ptrdiff_t start, std::ptrdiff_t end)
inline std::ptrdiff_t index(const string_view& str, const S& sub, std::ptrdiff_t start, std::ptrdiff_t end)
{
return find(str, sub, start, end);
}
template <class S>
std::ptrdiff_t rfind(const string_view& str, const S& sub, std::ptrdiff_t start, std::ptrdiff_t end)
inline std::ptrdiff_t rfind(const string_view& str, const S& sub, std::ptrdiff_t start, std::ptrdiff_t end)
{
ADJUST_INDICES(start, end, (std::ptrdiff_t) str.size());
@ -891,12 +891,12 @@ using namespace std::string_literals;
}
template <class S>
std::ptrdiff_t rindex(const string_view& str, const S& sub, std::ptrdiff_t start, std::ptrdiff_t end)
inline std::ptrdiff_t rindex(const string_view& str, const S& sub, std::ptrdiff_t start, std::ptrdiff_t end)
{
return rfind(str, sub, start, end);
}
string expandtabs(const string_view& str, std::ptrdiff_t tabsize)
inline string expandtabs(const string_view& str, std::ptrdiff_t tabsize)
{
std::ostringstream os;
std::size_t len = str.size(), i = 0;
@ -927,7 +927,7 @@ using namespace std::string_literals;
return os.str();
}
std::ptrdiff_t count(const string_view& str, const string_view& substr, std::ptrdiff_t start, std::ptrdiff_t end)
inline std::ptrdiff_t count(const string_view& str, const string_view& substr, std::ptrdiff_t start, std::ptrdiff_t end)
{
std::ptrdiff_t nummatches = 0;
std::ptrdiff_t cursor = start;
@ -946,7 +946,7 @@ using namespace std::string_literals;
}
template <class S, class T, class U>
string replace(const S& str, const T& oldstr, const U& newstr, std::ptrdiff_t count)
inline string replace(const S& str, const T& oldstr, const U& newstr, std::ptrdiff_t count)
{
std::ptrdiff_t sofar = 0;
std::ptrdiff_t cursor = 0;
@ -976,7 +976,7 @@ using namespace std::string_literals;
return s;
}
std::vector<string_view> splitlines(const string_view& str, bool keepends)
inline std::vector<string_view> splitlines(const string_view& str, bool keepends)
{
std::vector<string_view> result;
std::size_t len = str.size(), i, j, eol;
@ -1011,7 +1011,7 @@ using namespace std::string_literals;
}
template <class S>
string mul(const string_view& str, std::ptrdiff_t n)
inline string mul(const string_view& str, std::ptrdiff_t n)
{
// Early exits
if (n <= 0)

View File

@ -1,3 +1,5 @@
#ifndef PYSTRING_HEADER
#define PYSTRING_HEADER
namespace pystring {
@ -398,3 +400,5 @@ namespace os {
} // namespace path
} // namespace os
#endif

View File

@ -50,13 +50,13 @@ namespace pystring
namespace detail
{
template <class S>
auto size(const S& s)
inline auto size(const S& s)
{
return s.size();
}
template <class T, std::size_t N>
auto size(const T (&str)[N])
inline auto size(const T (&str)[N])
{
return std::strlen(str);
}
@ -72,25 +72,25 @@ namespace detail
}
};
auto size(const char_substr& str)
inline auto size(const char_substr& str)
{
return std::distance(str.pos, str.end);
}
template <class T>
bool operator==(const char_substr& lhs, const T& rhs)
inline bool operator==(const char_substr& lhs, const T& rhs)
{
return std::equal(lhs.pos, lhs.end, std::begin(rhs)) && size(lhs) == size(rhs);
}
template <class S>
auto substr(S& str, std::size_t pos, std::size_t len)
inline auto substr(S& str, std::size_t pos, std::size_t len)
{
return str.substr(pos, len);
}
template <class T, std::size_t N>
auto substr(T (&str)[N], std::size_t pos, std::size_t len)
inline auto substr(T (&str)[N], std::size_t pos, std::size_t len)
{
return char_substr{str + pos, str + pos + len};
}
@ -99,7 +99,7 @@ namespace detail
using std::begin;
template <class C, std::size_t N>
auto end(const C (&c)[N])
inline auto end(const C (&c)[N])
{
return &c[0] + std::strlen(c);
}
@ -107,41 +107,40 @@ namespace detail
struct fast_end_t {};
template <class C>
auto fast_end(C& c)
inline auto fast_end(C& c)
{
return c.end();
}
template <class T, std::size_t N>
auto fast_end(T (&)[N])
inline auto fast_end(T (&)[N])
{
return fast_end_t();
}
template <class It>
bool operator==(It it, fast_end_t)
inline bool operator==(It it, fast_end_t)
{
return (*it == '\0');
}
template <class It>
bool operator!=(It it, fast_end_t)
inline bool operator!=(It it, fast_end_t)
{
return !(it == fast_end_t());
}
const char* c_str(const std::string& str)
inline const char* c_str(const std::string& str)
{
return str.c_str();
}
template <std::size_t N>
const char* c_str(const char(&str)[N])
inline const char* c_str(const char(&str)[N])
{
return str;
}
} // ns detail
}

View File

@ -1,19 +0,0 @@
/*
<%
cfg['compiler_args'] = ['-std=c++17', '-march=native']
cfg['libraries'] = ['solv']
cfg['include_dirs'] = ['/home/wolfv/miniconda3/include/']
setup_pybind11(cfg)
%>
*/
#include "include/api.cpp"
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
namespace py = pybind11;
PYBIND11_MODULE(interface, m) {
m.def("solve", &solve);
}

194
mamba/FastSubdirData.py Normal file
View File

@ -0,0 +1,194 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import absolute_import, division, print_function, unicode_literals
import bz2
from collections import defaultdict
from contextlib import closing
from errno import EACCES, ENODEV, EPERM
from genericpath import getmtime, isfile
import hashlib
import json
from logging import DEBUG, getLogger
from mmap import ACCESS_READ, mmap
from os.path import dirname, isdir, join, splitext
import re
from time import time
import warnings
from io import open as io_open
from conda import CondaError
from conda._vendor.auxlib.ish import dals
from conda._vendor.auxlib.logz import stringify
from conda._vendor.toolz import concat, take
from conda.base.constants import CONDA_HOMEPAGE_URL
from conda.base.context import context
from conda.common.compat import (ensure_binary, ensure_text_type, ensure_unicode, iteritems,
string_types, text_type, with_metaclass)
from conda.common.io import ThreadLimitedThreadPoolExecutor, as_completed
from conda.common.url import join_url, maybe_unquote
from conda.core.package_cache_data import PackageCacheData
from conda.exceptions import (CondaDependencyError, CondaHTTPError, CondaUpgradeError,
NotWritableError, UnavailableInvalidChannel)
from conda.gateways.connection import (ConnectionError, HTTPError, InsecureRequestWarning,
InvalidSchema, SSLError)
from conda.gateways.connection.session import CondaSession
from conda.gateways.disk import mkdir_p, mkdir_p_sudo_safe
from conda.gateways.disk.delete import rm_rf
from conda.gateways.disk.update import touch
from conda.models.channel import Channel, all_channel_urls
from conda.models.match_spec import MatchSpec
from conda.models.records import PackageRecord
from conda.core.subdir_data import *
log = getLogger(__name__)
stderrlog = getLogger('conda.stderrlog')
REPODATA_PICKLE_VERSION = 28
MAX_REPODATA_VERSION = 1
REPODATA_HEADER_RE = b'"(_etag|_mod|_cache_control)":[ ]?"(.*?[^\\\\])"[,\}\s]' # NOQA
class SubdirDataType(type):
def __call__(cls, channel):
assert channel.subdir
assert not channel.package_filename
assert type(channel) is Channel
cache_key = channel.url(with_credentials=True)
if not cache_key.startswith('file://') and cache_key in FastSubdirData._cache_:
return FastSubdirData._cache_[cache_key]
subdir_data_instance = super(SubdirDataType, cls).__call__(channel)
FastSubdirData._cache_[cache_key] = subdir_data_instance
return subdir_data_instance
@with_metaclass(SubdirDataType)
class FastSubdirData(object):
_cache_ = {}
def __init__(self, channel):
assert channel.subdir
if channel.package_filename:
parts = channel.dump()
del parts['package_filename']
channel = Channel(**parts)
self.channel = channel
self.url_w_subdir = self.channel.url(with_credentials=False)
self.url_w_credentials = self.channel.url(with_credentials=True)
self.cache_path_base = join(create_cache_dir(),
splitext(cache_fn_url(self.url_w_credentials))[0])
self._loaded = False
def reload(self):
self._loaded = False
self.load()
return self
@property
def cache_path_json(self):
return self.cache_path_base + '.json'
def load(self):
self._load()
self._loaded = True
return self
def iter_records(self):
if not self._loaded:
self.load()
return iter(self._package_records)
def _load(self):
try:
mtime = getmtime(self.cache_path_json)
except (IOError, OSError):
log.debug("No local cache found for %s at %s", self.url_w_subdir, self.cache_path_json)
if context.use_index_cache or (context.offline
and not self.url_w_subdir.startswith('file://')):
log.debug("Using cached data for %s at %s forced. Returning empty repodata.",
self.url_w_subdir, self.cache_path_json)
return {
'_package_records': (),
'_names_index': defaultdict(list),
'_track_features_index': defaultdict(list),
}
else:
mod_etag_headers = {}
else:
mod_etag_headers = read_mod_and_etag(self.cache_path_json)
if context.use_index_cache:
log.debug("Using cached repodata for %s at %s because use_cache=True",
self.url_w_subdir, self.cache_path_json)
_internal_state = self._read_local_repdata(mod_etag_headers.get('_etag'),
mod_etag_headers.get('_mod'))
return _internal_state
if context.local_repodata_ttl > 1:
max_age = context.local_repodata_ttl
elif context.local_repodata_ttl == 1:
max_age = get_cache_control_max_age(mod_etag_headers.get('_cache_control', ''))
else:
max_age = 0
timeout = mtime + max_age - time()
if (timeout > 0 or context.offline) and not self.url_w_subdir.startswith('file://'):
log.debug("Using cached repodata for %s at %s. Timeout in %d sec",
self.url_w_subdir, self.cache_path_json, timeout)
_internal_state = self._read_local_repdata(mod_etag_headers.get('_etag'),
mod_etag_headers.get('_mod'))
return _internal_state
log.debug("Local cache timed out for %s at %s",
self.url_w_subdir, self.cache_path_json)
try:
raw_repodata_str = fetch_repodata_remote_request(self.url_w_credentials,
mod_etag_headers.get('_etag'),
mod_etag_headers.get('_mod'))
except Response304ContentUnchanged:
log.debug("304 NOT MODIFIED for '%s'. Updating mtime and loading from disk",
self.url_w_subdir)
touch(self.cache_path_json)
_internal_state = self._read_local_repdata(mod_etag_headers.get('_etag'),
mod_etag_headers.get('_mod'))
return _internal_state
else:
if not isdir(dirname(self.cache_path_json)):
mkdir_p(dirname(self.cache_path_json))
try:
with io_open(self.cache_path_json, 'w') as fh:
fh.write(raw_repodata_str or '{}')
except (IOError, OSError) as e:
if e.errno in (EACCES, EPERM):
raise NotWritableError(self.cache_path_json, e.errno, caused_by=e)
else:
raise
_internal_state = self._process_raw_repodata_str(raw_repodata_str)
self._internal_state = _internal_state
return _internal_state
def _read_local_repdata(self, etag, mod_stamp):
log.debug("Loading raw json for %s at %s", self.url_w_subdir, self.cache_path_json)
with open(self.cache_path_json) as fh:
try:
raw_repodata_str = fh.read()
except ValueError as e:
# ValueError: Expecting object: line 11750 column 6 (char 303397)
log.debug("Error for cache path: '%s'\n%r", self.cache_path_json, e)
message = dals("""
An error occurred when loading cached repodata. Executing
`conda clean --index-cache` will remove cached repodata files
so they can be downloaded again.
""")
raise CondaError(message)
else:
_internal_state = self._process_raw_repodata_str(raw_repodata_str)
return _internal_state
def _process_raw_repodata_str(self, raw_repodata_str):
self.raw_repodata_str = raw_repodata_str

0
mamba/__init__.py Normal file
View File

365
test.py
View File

@ -1,365 +0,0 @@
import sys
CONDA_PATH = '/home/wolfv/Programs/conda/'
sys.path.insert(0, CONDA_PATH)
# orig, sys.path = sys.path, [CONDA_PATH]
# import conda
# from tests import helpers
# sys.path = orig + [CONDA_PATH]
# i, r = helpers.get_index_r_1()
# print(i, r)
from conda.models import match_spec
from conda.models import version as cndversion
M = match_spec.MatchSpec('test >=2.5*, <3.5')
from multiprocessing.pool import Pool as MPool
import bz2
import sys
import solv
import json
from packaging import version
from urllib import request
import pandas as pd
import numpy as np
import re
# from clint.textui import progress
pool = solv.Pool()
# what is installed in the system
installed = pool.add_repo('installed')
pool.installed = installed
def normalize_version(version, build_number, build):
vsplit = version.split('.')
while len(vsplit) < 3:
vsplit.append('0')
return '.'.join(vsplit + [str(build_number)]) + '-' + build
def add_installed(f, installed_pool):
parsed = json.load(open(f, 'r'))
for pkg in parsed:
v = normalize_version(pkg['version'], pkg['build_number'], pkg['build_string'])
# print(pkg['name'], v)
s = installed_pool.add_solvable()
s.name = pkg['name']
s.evr = v
# s.arch = sel_pkg.get('target-triplet')
add_installed('installed.json', installed)
def parse_to_rel(d):
pkg = d.split(' ')
if len(pkg) > 1:
# print(d)
name = pkg[0]
constraint = pkg[1]
if len(pkg) > 2:
feature = pkg[2]
name = "{}[{}]".format(name, feature)
# print(constraint)
# Implement OR operator :)
constraints = constraint.split(',')
pcs = []
for c in constraints:
if len(c) and c[0] == '<':
rel = solv.REL_LT
if c[1] == '=':
rel |= solv.REL_EQ
target_version = c[2:]
else:
target_version = c[1:]
elif len(c) and c[0] == '>':
rel = solv.REL_GT
if c[1] == '=':
target_version = c[2:]
rel |= solv.REL_EQ
else:
target_version = c[1:]
else:
if '*' in c:
rel1 = solv.REL_GT | solv.REL_EQ
rel2 = solv.REL_LT
fill_value = 0
v1, v2 = [], []
for el in c.split('.'):
matches = re.match(r'(\d+)?(\D+)?', el).groups()
# print(el, v1, v2)
if matches[0] is None:
if matches[1] == '*' and len(v2) > 0:
# increment previous
v2[-1] += 1
# if len(v2) < 3:
# v1.append(fill_value)
# v2.append(fill_value)
else:
# only * as version number
return [pool.str2id(name)]
elif matches[0] is not None and matches[1] == '*':
v1.append(int(matches[0]))
v2.append(int(matches[0]) + 1)
elif matches[1] is None:
v1.append(int(matches[0]))
v2.append(int(matches[0]))
# print(c, '.'.join([str(x) for x in v1]), '.'.join([str(x) for x in v2]))
r1 = pool.rel2id(pool.str2id(name), pool.str2id('.'.join([str(x) for x in v1])), rel1)
r2 = pool.rel2id(pool.str2id(name), pool.str2id('.'.join([str(x) for x in v2])), rel2)
return [pool.rel2id(r1, r2, solv.REL_WITH)]
else:
vsplit = c.split('.')
if len(vsplit) == 4:
rel = solv.REL_EQ
return [pool.rel2id(pool.str2id(name), pool.str2id(c), rel)]
else:
# fix for build numbers as 4th güggel
while len(vsplit) < 4:
vsplit.append('0')
if len(vsplit) > 4:
print("WHAAAAAAAAAAAT ", vsplit)
rel1 = solv.REL_GT | solv.REL_EQ
rel2 = solv.REL_LT
r1 = pool.rel2id(pool.str2id(name), pool.str2id('.'.join([x for x in vsplit])), rel1)
vsplit[-1] = '9999'
r2 = pool.rel2id(pool.str2id(name), pool.str2id('.'.join([x for x in vsplit])), rel2)
return [pool.rel2id(r1, r2, solv.REL_WITH)]
start = 0
if len(c) and c[0] == '=':
start = 1
if c[1] == '=':
start = 2
target_version = c[start:]
pcs.append(pool.rel2id(pool.str2id(name), pool.str2id(target_version), rel))
if len(pcs) == 2:
pcs = [pool.rel2id(pcs[0], pcs[1], solv.REL_WITH)]
return pcs
else:
return [pool.str2id(d)]
def parse_json(json_str, channel):
repo_json = json.loads(json_str)
label, channel = channel
print("Parsing JSON ", label)
packages = repo_json.get('packages')
if packages is None:
packages = repo_json
df = pd.DataFrame(packages).T
try:
all_features = df['features'].dropna().unique()
except:
all_features = []
print("all features: ", all_features)
print(df)
channel['main'] = pool.add_repo(label)
for f in all_features:
channel[f] = pool.add_repo(label + '[' + f + ']')
for name, package_df in df.groupby(['name']):
for vers, package_v in package_df.groupby(['version', 'build']):
sel_pkg = package_v.loc[package_v['build_number'].astype(int).idxmax()]
additional_name = None
if len(all_features) and type(sel_pkg['features']) is str:
s = channel[sel_pkg['features']].add_solvable()
additional_name = "{}[{}]".format(name, sel_pkg['features'])
else:
s = channel['main'].add_solvable()
if type(sel_pkg['build']) is str:
additional_name = "{}[{}]".format(name, sel_pkg['build'])
# print("Adding ", sel_pkg['name'], sel_pkg['version'], sel_pkg['build_number'])
# print(sel_pkg['name'])
s.name = sel_pkg['name']
s.evr = normalize_version(sel_pkg['version'], sel_pkg['build_number'], sel_pkg['build'])
s.arch = sel_pkg.get('target-triplet')
if s.name == 'unixodbc':
print(s.name, s.evr)
s.add_deparray(solv.SOLVABLE_PROVIDES, pool.rel2id(pool.str2id(s.name), pool.str2id(s.evr), solv.REL_EQ))
for d in sel_pkg['depends']:
pcs = parse_to_rel(d)
for p in pcs:
s.add_deparray(solv.SOLVABLE_REQUIRES, p)
if additional_name:
# print("additional name: ", additional_name)
s.add_deparray(solv.SOLVABLE_PROVIDES, pool.rel2id(pool.str2id(additional_name), pool.str2id(s.evr), solv.REL_EQ))
print("Done")
# for key, package in repo_json['packages'].items():
# s = channel.add_solvable()
# s.name = package['name']
# s.evr = package['version']
# s.arch = package.get('target-triplet')
# s.add_deparray(solv.SOLVABLE_PROVIDES, pool.rel2id(pool.str2id(s.name), pool.str2id(s.evr), solv.REL_EQ))
# for d in package['depends']:
# parse_to_rel(d)
# s.add_deparray(solv.SOLVABLE_REQUIRES, pool.str2id(d.split(' ')[0]))
progress = []
progress_bars = []
url_template = 'https://conda.anaconda.org/{}/linux-64/repodata.json.bz2'
def ensure_dir(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory):
os.makedirs(directory)
import os
import time
from datetime import datetime, timedelta
repos = {}
def download(args):
channel, url, idx = args
print("Downloading: ", args)
cache_file = os.path.join('./__cache__/', channel + '.json')
has_cache = os.path.isfile(cache_file)
if has_cache:
print(datetime.fromtimestamp(os.path.getmtime(cache_file)))
if datetime.now() - datetime.fromtimestamp(os.path.getmtime(cache_file)) > timedelta(hours=24):
print("Cache invalidated...")
has_cache = False
if has_cache == False:
req = request.urlopen(url)
print("Downloading ", cache_file)
with open(cache_file, 'w') as ftmp:
ftmp.write(bz2.decompress(req.read()).decode('utf-8'))
return True
def download_all(channels):
global repos
repos = {channel: {} for channel in channels}
if (channels[0] == 'xtest'):
c = channels[0]
with open(c + '.json', 'r') as f:
parse_json(f.read(), (c, repos[c]))
return repos
channel_args = [(channel, url_template.format(channel), i) for i, channel in enumerate(channels)]
mpool = MPool(8)
result = mpool.map(download, channel_args)
for c in repos:
# if os.path.isfile('__cache__/' + c + '.solv'):
# repos[c].add_solv('__cache__/' + c + '.solv')
# with open('__cache__/' + c + '.json', 'r') as f:
# repos[c].read(f)
# else:
# # with open(c + '.json', 'r') as f:
# # parse_json(f.read(), (c, repos[c]))
with open('__cache__/' + c + '.json', 'r') as f:
parse_json(f.read(), (c, repos[c]))
# print(result)
return repos
channels = ['conda-forge', 'r', 'anaconda']
# channels = ['r']
# channels = ['xtest']
# channels = ['xtest']
repos = download_all(channels)
def internalize_repos(repos):
for key, repo in repos.items():
# print(key, repo)
for subkey, subrepo in repo.items():
subrepo.internalize()
# with open('__cache__/' + key + '.solv', 'wb') as f:
# xf = solv.xfopen_fd(None, f.fileno())
# repo.write(xf)
def get_solver(specs, jobs, features):
for x in repos.values():
for r in x.values():
print("PRIO: {}, SUB: {}".format(r.priority, r.subpriority))
for r in repos.values():
for k in r.keys():
if k in features:
print("Setting priority to 100", r[k])
r[k].priority = 100
else:
pass
# if k != 'main':
# r[k].free()
internalize_repos(repos)
pool.createwhatprovides()
solver = pool.Solver()
for s in specs:
jobs.append(pool.Job(solv.Job.SOLVER_INSTALL | solv.Job.SOLVER_SOLVABLE_NAME, parse_to_rel(s)[0]))
return solver
# instl = ['iopro 1.4*', 'python 2.7*', 'numpy 1.7*']
# instl = ['xtensor']
instl = ['hdf5 >=1.10']
jobs = []
# jobs.append(pool.Job(solv.Job.SOLVER_INSTALL | solv.Job.SOLVER_SOLVABLE_NAME, pool.str2id("r-rcpp")))
# jobs.append(pool.Job(solv.Job.SOLVER_INSTALL | solv.Job.SOLVER_SOLVABLE_NAME, pool.rel2id(pool.str2id("r-rcpp"), pool.str2id('1.0.0'), solv.REL_EQ)))
# jobs.append(pool.Job(solv.Job.SOLVER_INSTALL | solv.Job.SOLVER_SOLVABLE_NAME, pool.str2id("jupyter")))
# jobs.append(pool.Job(solv.Job.SOLVER_INSTALL | solv.Job.SOLVER_SOLVABLE_NAME, parse_to_rel("iopro 1.4*")[0]))
# jobs.append(pool.Job(solv.Job.SOLVER_INSTALL | solv.Job.SOLVER_SOLVABLE_NAME, parse_to_rel("python 2.7*")[0]))
# jobs.append(pool.Job(solv.Job.SOLVER_INSTALL | solv.Job.SOLVER_SOLVABLE_NAME, parse_to_rel("numpy 1.7*")[0]))
# instl = ['iopro', 'python 2.7*', 'numpy 1.5*']
# solver = get_solver(instl, jobs, features=['mkl'])
solver = get_solver(instl, jobs, features=[])
problems = solver.solve(jobs)
if problems:
for p in problems:
print("problem:", p)
sys.exit(1)
transaction = solver.transaction()
# xls = [p for p in transaction.newsolvables()]
# print("LEN: ", len(xls))
yml_export = """
name: {env_name}
channels:
{channels}
dependencies:
{dependencies}
"""
yml_channels = ''
for c in channels:
yml_channels += '- {}\n'.format(c)
yml_deps = ''
for p in transaction.newsolvables():
yml_deps += '- {}::{}={}\n'.format(p.repo.name, p.name, p.evr)
print(yml_export.format(env_name='test', channels=yml_channels, dependencies='\n'.join(sorted(yml_deps.split('\n')))))