mirror of https://github.com/mamba-org/mamba.git
Add pre-commit configuration
Run pre-commit Fixed small problems Fixed linters problems Improve conditional expression
This commit is contained in:
parent
5e801e3ece
commit
5c2951cc18
|
@ -0,0 +1,4 @@
|
|||
[flake8]
|
||||
max-line-length=88
|
||||
extend-ignore=E203,D104,D100,I004
|
||||
exclude=test/*
|
|
@ -0,0 +1,27 @@
|
|||
name: linter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Install dependencies
|
||||
shell: pwsh
|
||||
run: |
|
||||
python -m pip install pre-commit
|
||||
- name: Run all linters
|
||||
shell: pwsh
|
||||
run: |
|
||||
pre-commit run --all-files
|
|
@ -59,4 +59,4 @@ _deps
|
|||
# Executables
|
||||
*.exe
|
||||
*.out
|
||||
*.app
|
||||
*.app
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
[settings]
|
||||
line_length=88
|
||||
known_third_party=pybind11,conda,conda_env
|
||||
multi_line_output=3
|
||||
include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
use_parentheses=True
|
|
@ -0,0 +1,43 @@
|
|||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 19.10b0
|
||||
hooks:
|
||||
- id: black
|
||||
args: [--safe, --quiet]
|
||||
- repo: https://github.com/asottile/blacken-docs
|
||||
rev: v1.7.0
|
||||
hooks:
|
||||
- id: blacken-docs
|
||||
additional_dependencies: [black==19.10b0]
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: fix-encoding-pragma
|
||||
args: [--remove]
|
||||
- id: check-yaml
|
||||
exclude: tests
|
||||
- id: check-toml
|
||||
- id: check-json
|
||||
- id: check-merge-conflict
|
||||
- id: pretty-format-json
|
||||
args: [--autofix]
|
||||
- id: debug-statements
|
||||
language_version: python3
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v5.2.2
|
||||
hooks:
|
||||
- id: isort
|
||||
exclude: tests/data
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.3
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: tests/data
|
||||
language_version: python3
|
||||
additional_dependencies:
|
||||
- flake8-typing-imports==1.9.0
|
||||
- flake8-builtins==1.5.3
|
||||
- flake8-bugbear==20.1.4
|
||||
- flake8-isort==3.0.1
|
|
@ -52,7 +52,7 @@ set(MAMBA_REQUIRED_LIBS
|
|||
file (READ mamba/_version.py VERSION_FILE)
|
||||
|
||||
STRING(REGEX REPLACE "\r?\n" ";" TMP_VERSION_FILE_LIST "${VERSION_FILE}")
|
||||
LIST(GET TMP_VERSION_FILE_LIST 1 VERSION_LINE)
|
||||
LIST(GET TMP_VERSION_FILE_LIST 0 VERSION_LINE)
|
||||
string(REGEX MATCHALL "version_info = \\(([0-9]*), ([0-9]*), ([0-9]*)\\)" MATCHES ${VERSION_LINE})
|
||||
|
||||
set(MAMBA_VERSION_MAJOR "${CMAKE_MATCH_1}")
|
||||
|
|
|
@ -96,13 +96,13 @@ With the `--tree` (or `-t`) flag, you can get the same information in a tree.
|
|||
|
||||
## micromamba
|
||||
|
||||
`micromamba` is a tiny version of the `mamba` package manager.
|
||||
`micromamba` is a tiny version of the `mamba` package manager.
|
||||
It is a pure C++ package with a separate command line interface.
|
||||
It can be used to bootstrap environments (as an alternative to miniconda), but it's currently experimental.
|
||||
The benefit is that it's very tiny and does not come with a default version of Python.
|
||||
|
||||
`micromamba` works in the bash & zsh shell on Linux & OS X.
|
||||
It's completely statically linked, which allows you to drop it in some place and just execute it.
|
||||
`micromamba` works in the bash & zsh shell on Linux & OS X.
|
||||
It's completely statically linked, which allows you to drop it in some place and just execute it.
|
||||
|
||||
Note: it's advised to use micromamba in containers & CI only.
|
||||
|
||||
|
@ -112,7 +112,7 @@ Download and unzip the executable (from the official conda-forge package):
|
|||
wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba
|
||||
```
|
||||
|
||||
We can use `./micromamba shell init ... ` to initialize a shell (`.bashrc`) and a new root environment in `~/micromamba`:
|
||||
We can use `./micromamba shell init ... ` to initialize a shell (`.bashrc`) and a new root environment in `~/micromamba`:
|
||||
|
||||
```sh
|
||||
./bin/micromamba shell init -s bash -p ~/micromamba
|
||||
|
|
|
@ -298,4 +298,4 @@ Export-ModuleMember `
|
|||
|
||||
# We don't export TabExpansion as it's currently not implemented for Micromamba
|
||||
# TabExpansion
|
||||
)MAMBARAW"
|
||||
)MAMBARAW"
|
||||
|
|
|
@ -47,5 +47,5 @@ R"MAMBARAW(
|
|||
@IF NOT "%CONDA_TEST_SAVE_TEMPS%x"=="x" @ECHO CONDA_TEST_SAVE_TEMPS :: retaining activate_batch %_TEMP_SCRIPT_PATH% 1>&2
|
||||
@IF "%CONDA_TEST_SAVE_TEMPS%x"=="x" @DEL /F /Q "%_TEMP_SCRIPT_PATH%"
|
||||
@SET _TEMP_SCRIPT_PATH=
|
||||
@SET "PROMPT=%CONDA_PROMPT_MODIFIER%%PROMPT%"
|
||||
)MAMBARAW"
|
||||
@SET "PROMPT=%CONDA_PROMPT_MODIFIER%%PROMPT%"
|
||||
)MAMBARAW"
|
||||
|
|
|
@ -3,4 +3,4 @@ R"MAMBARAW(
|
|||
@REM SPDX-License-Identifier: BSD-3-Clause
|
||||
@CALL "%~dp0..\condabin\conda_hook.bat"
|
||||
conda.bat activate %*
|
||||
)MAMBARAW"
|
||||
)MAMBARAW"
|
||||
|
|
|
@ -29,4 +29,4 @@ __MAMBA_INSERT_ROOT_PREFIX__
|
|||
@IF [%1]==[uninstall] "%~dp0_mamba_activate" reactivate
|
||||
|
||||
@EXIT /B %errorlevel%
|
||||
)MAMBARAW"
|
||||
)MAMBARAW"
|
||||
|
|
|
@ -112,4 +112,4 @@ __xonsh__.completers['mamba'] = _mamba_completer
|
|||
# bump to top of list
|
||||
__xonsh__.completers.move_to_end('mamba', last=False)
|
||||
|
||||
)MAMBARAW"
|
||||
)MAMBARAW"
|
||||
|
|
|
@ -18,4 +18,4 @@ __MAMBA_INSERT_MAMBA_EXE__
|
|||
@DOSKEY micromamba="%MAMBA_BAT%" $*
|
||||
|
||||
@SET MAMBA_SHLVL=0
|
||||
)MAMBARAW"
|
||||
)MAMBARAW"
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
R"MAMBARAW(
|
||||
Import-Module "$Env:MAMBA_ROOT_PREFIX\condabin\Mamba.psm1"
|
||||
)MAMBARAW"
|
||||
)MAMBARAW"
|
||||
|
|
|
@ -42,12 +42,12 @@ namespace mamba
|
|||
std::vector<std::string> urls(bool with_credential = true) const;
|
||||
std::vector<std::string> urls(const std::vector<std::string>& platforms,
|
||||
bool with_credential = true) const;
|
||||
|
||||
|
||||
static Channel make_simple_channel(const Channel& channel_alias,
|
||||
const std::string& channel_url,
|
||||
const std::string& channel_name = "",
|
||||
const std::string& multi_name = "");
|
||||
|
||||
|
||||
static Channel& make_cached_channel(const std::string& value);
|
||||
static void clear_cache();
|
||||
|
||||
|
|
|
@ -70,7 +70,7 @@ namespace mamba
|
|||
std::string env_prompt = "({default_env}) ";
|
||||
|
||||
// ssl verify can be either an empty string (regular SSL verification),
|
||||
// the string "<false>" to indicate no SSL verification, or a path to
|
||||
// the string "<false>" to indicate no SSL verification, or a path to
|
||||
// a directory with cert files, or a cert file.
|
||||
std::string ssl_verify = "";
|
||||
|
||||
|
@ -78,7 +78,7 @@ namespace mamba
|
|||
bool add_pip_as_python_dependency = true;
|
||||
|
||||
void set_verbosity(int lvl);
|
||||
|
||||
|
||||
std::string platform() const;
|
||||
std::vector<std::string> platforms() const;
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ namespace mamba
|
|||
#ifndef _WIN32
|
||||
// set permissions to 0o2775
|
||||
fs::permissions(path, fs::perms::set_gid |
|
||||
fs::perms::owner_all |
|
||||
fs::perms::owner_all |
|
||||
fs::perms::group_all |
|
||||
fs::perms::others_read | fs::perms::others_exec);
|
||||
#endif
|
||||
|
|
|
@ -229,7 +229,7 @@ namespace mamba
|
|||
}
|
||||
|
||||
template <class G>
|
||||
inline auto predecessor_recorder<G>::get_predecessors() const -> const predecessor_map&
|
||||
inline auto predecessor_recorder<G>::get_predecessors() const -> const predecessor_map&
|
||||
{
|
||||
return m_pred;
|
||||
}
|
||||
|
@ -295,4 +295,3 @@ namespace mamba
|
|||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
|
|
@ -45,4 +45,4 @@ namespace mamba
|
|||
};
|
||||
}
|
||||
|
||||
#endif
|
||||
#endif
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
#include "context.hpp"
|
||||
#include "nlohmann/json.hpp"
|
||||
|
||||
#define ENUM_FLAG_OPERATOR(T,X) inline T operator X (T lhs, T rhs) { return (T) (static_cast<std::underlying_type_t <T>>(lhs) X static_cast<std::underlying_type_t <T>>(rhs)); }
|
||||
#define ENUM_FLAG_OPERATOR(T,X) inline T operator X (T lhs, T rhs) { return (T) (static_cast<std::underlying_type_t <T>>(lhs) X static_cast<std::underlying_type_t <T>>(rhs)); }
|
||||
#define ENUM_FLAGS(T) \
|
||||
enum class T; \
|
||||
inline T operator ~ (T t) { return (T) (~static_cast<std::underlying_type_t <T>>(t)); } \
|
||||
|
@ -180,7 +180,7 @@ namespace mamba
|
|||
};
|
||||
}
|
||||
|
||||
// The next two functions / classes were ported from the awesome indicators library
|
||||
// The next two functions / classes were ported from the awesome indicators library
|
||||
// by p-ranav (MIT License)
|
||||
// https://github.com/p-ranav/indicators
|
||||
std::ostream& write_duration(std::ostream &os, std::chrono::nanoseconds ns);
|
||||
|
@ -208,7 +208,7 @@ namespace mamba
|
|||
class ProgressBar
|
||||
{
|
||||
public:
|
||||
|
||||
|
||||
ProgressBar(const std::string& prefix);
|
||||
|
||||
void set_start();
|
||||
|
|
|
@ -25,7 +25,7 @@ namespace mamba
|
|||
std::string file_path;
|
||||
};
|
||||
|
||||
enum class PathType
|
||||
enum class PathType
|
||||
{
|
||||
UNDEFINED,
|
||||
HARDLINK,
|
||||
|
|
|
@ -88,7 +88,7 @@ namespace mamba
|
|||
nl::json json() const;
|
||||
|
||||
private:
|
||||
|
||||
|
||||
void reset_pkg_view_list();
|
||||
std::string get_package_repr(const PackageInfo& pkg) const;
|
||||
|
||||
|
|
|
@ -371,7 +371,7 @@ namespace mamba
|
|||
|
||||
std::string hook_content = mamba_hook_bat;
|
||||
std::cout << "Self exe path: " << self_path << std::endl;
|
||||
replace_all(hook_content, std::string("__MAMBA_INSERT_MAMBA_EXE__"),
|
||||
replace_all(hook_content, std::string("__MAMBA_INSERT_MAMBA_EXE__"),
|
||||
std::string("@SET \"MAMBA_EXE=" + self_path.string() + "\""));
|
||||
|
||||
std::ofstream mamba_hook_bat_f(root_prefix / "condabin" / "mamba_hook.bat");
|
||||
|
|
|
@ -75,7 +75,7 @@ namespace mamba
|
|||
// Contrary to conda original function, this one expects a full url
|
||||
// (that is channel url + / + repodata_fn). It is not the
|
||||
// responsibility of this function to decide whether it should
|
||||
// concatenante base url and repodata depending on repodata value
|
||||
// concatenante base url and repodata depending on repodata value
|
||||
// and old behavior support.
|
||||
std::string cache_fn_url(const std::string& url);
|
||||
std::string create_cache_dir();
|
||||
|
|
|
@ -226,7 +226,7 @@ public:
|
|||
template <typename char_type>
|
||||
constexpr char_type path_helper_base<char_type>::preferred_separator;
|
||||
#endif
|
||||
|
||||
|
||||
// 30.10.8 class path
|
||||
class GHC_FS_API_CLASS path
|
||||
#if defined(GHC_OS_WINDOWS) && defined(GHC_WIN_WSTRING_STRING_TYPE)
|
||||
|
@ -243,7 +243,7 @@ public:
|
|||
#endif
|
||||
using string_type = std::basic_string<value_type>;
|
||||
using path_helper_base<value_type>::preferred_separator;
|
||||
|
||||
|
||||
// 30.10.10.1 enumeration format
|
||||
/// The path format in wich the constructor argument is given.
|
||||
enum format {
|
||||
|
@ -1307,7 +1307,7 @@ GHC_INLINE std::error_code make_system_error(int err)
|
|||
return std::error_code(err ? err : errno, std::system_category());
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
#endif // GHC_EXPAND_IMPL
|
||||
|
||||
template <typename Enum>
|
||||
|
@ -1430,7 +1430,7 @@ GHC_INLINE unsigned consumeUtf8Fragment(const unsigned state, const uint8_t frag
|
|||
codepoint = (state ? (codepoint << 6) | (fragment & 0x3fu) : (0xffu >> category) & fragment);
|
||||
return state == S_RJCT ? static_cast<unsigned>(S_RJCT) : static_cast<unsigned>((utf8_state_info[category + 16] >> (state << 2)) & 0xf);
|
||||
}
|
||||
|
||||
|
||||
GHC_INLINE bool validUtf8(const std::string& utf8String)
|
||||
{
|
||||
std::string::const_iterator iter = utf8String.begin();
|
||||
|
@ -1448,9 +1448,9 @@ GHC_INLINE bool validUtf8(const std::string& utf8String)
|
|||
}
|
||||
|
||||
} // namespace detail
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
namespace detail {
|
||||
|
||||
template <class StringType, typename std::enable_if<(sizeof(typename StringType::value_type) == 1)>::type* = nullptr>
|
||||
|
|
|
@ -201,8 +201,8 @@ namespace termcolor
|
|||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
#if defined(TERMCOLOR_OS_MACOS) || defined(TERMCOLOR_OS_LINUX)
|
||||
|
||||
template <uint8_t code>
|
||||
|
@ -215,7 +215,7 @@ namespace termcolor
|
|||
std::snprintf(command, 14, "\033[38;5;%dm", code);
|
||||
stream << command;
|
||||
}
|
||||
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
|
|
|
@ -184,4 +184,3 @@ namespace mamba
|
|||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ namespace mamba
|
|||
Transaction* m_transaction;
|
||||
|
||||
bool m_force_reinstall = false;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
#endif // MAMBA_TRANSACTION_HPP
|
||||
|
|
|
@ -65,7 +65,7 @@ namespace mamba
|
|||
class URLHandler
|
||||
{
|
||||
public:
|
||||
|
||||
|
||||
URLHandler(const std::string& url = "");
|
||||
~URLHandler();
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
from ._version import version_info, __version__
|
||||
|
||||
from ._version import __version__, version_info # noqa
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
version_info = (0, 4, 4)
|
||||
__version__ = '.'.join(map(str, version_info))
|
||||
__version__ = ".".join(map(str, version_info))
|
||||
|
|
504
mamba/mamba.py
504
mamba/mamba.py
|
@ -1,59 +1,67 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2019, QuantStack
|
||||
# SPDX-License-Identifier: BSD-3-Clause
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import codecs
|
||||
import sys, os
|
||||
|
||||
import os
|
||||
import sys
|
||||
from logging import getLogger
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
from conda.cli import common as cli_common
|
||||
from conda.cli.main import generate_parser, init_loggers
|
||||
from conda.base.context import context
|
||||
from conda.models.match_spec import MatchSpec
|
||||
from conda.core.prefix_data import PrefixData
|
||||
from conda.misc import explicit, touch_nonadmin
|
||||
from conda.cli.common import specs_from_url, confirm_yn, check_non_admin, ensure_name_or_prefix
|
||||
from conda.history import History
|
||||
from conda.core.link import UnlinkLinkTransaction, PrefixSetup
|
||||
from conda.cli.install import check_prefix, clone, print_activate
|
||||
from conda.base.constants import ChannelPriority, UpdateModifier, DepsModifier
|
||||
from conda.core.solve import get_pinned_specs
|
||||
from conda.core.envs_manager import unregister_env
|
||||
from conda.core.package_cache_data import PackageCacheData
|
||||
from conda.common.compat import on_win
|
||||
|
||||
# create support
|
||||
from conda.base.constants import ChannelPriority, DepsModifier, UpdateModifier
|
||||
from conda.base.context import context
|
||||
from conda.cli import common as cli_common
|
||||
from conda.cli.common import (
|
||||
check_non_admin,
|
||||
confirm_yn,
|
||||
ensure_name_or_prefix,
|
||||
specs_from_url,
|
||||
)
|
||||
from conda.cli.install import check_prefix, clone, get_revision, print_activate
|
||||
from conda.cli.main import generate_parser, init_loggers
|
||||
from conda.common.compat import on_win
|
||||
from conda.common.path import paths_equal
|
||||
from conda.exceptions import (CondaExitZero, CondaOSError, CondaSystemExit,
|
||||
CondaValueError, DirectoryNotACondaEnvironmentError, CondaEnvironmentError,
|
||||
DryRunExit, EnvironmentLocationNotFound,
|
||||
NoBaseEnvironmentError, PackageNotInstalledError, PackagesNotFoundError,
|
||||
TooManyArgumentsError)
|
||||
|
||||
from conda.core.envs_manager import unregister_env
|
||||
from conda.core.link import PrefixSetup, UnlinkLinkTransaction
|
||||
from conda.core.package_cache_data import PackageCacheData
|
||||
from conda.core.prefix_data import PrefixData
|
||||
from conda.core.solve import get_pinned_specs
|
||||
from conda.exceptions import (
|
||||
CondaEnvironmentError,
|
||||
CondaExitZero,
|
||||
CondaOSError,
|
||||
CondaSystemExit,
|
||||
CondaValueError,
|
||||
DirectoryNotACondaEnvironmentError,
|
||||
DryRunExit,
|
||||
EnvironmentLocationNotFound,
|
||||
NoBaseEnvironmentError,
|
||||
PackageNotInstalledError,
|
||||
PackagesNotFoundError,
|
||||
TooManyArgumentsError,
|
||||
)
|
||||
from conda.gateways.disk.create import mkdir_p
|
||||
from conda.gateways.disk.delete import rm_rf, delete_trash, path_is_clean
|
||||
from conda.gateways.disk.delete import delete_trash, path_is_clean, rm_rf
|
||||
from conda.gateways.disk.test import is_conda_environment
|
||||
|
||||
from logging import getLogger
|
||||
from conda.misc import explicit, touch_nonadmin
|
||||
from conda.models.match_spec import MatchSpec
|
||||
|
||||
import mamba
|
||||
import mamba.mamba_api as api
|
||||
|
||||
from mamba.utils import get_index, to_package_record_from_subjson, init_api_context, get_installed_jsonfile, to_txn
|
||||
from mamba.utils import get_index, get_installed_jsonfile, init_api_context, to_txn
|
||||
|
||||
if sys.version_info < (3, 2):
|
||||
sys.stdout = codecs.lookup('utf-8')[-1](sys.stdout)
|
||||
sys.stdout = codecs.lookup("utf-8")[-1](sys.stdout)
|
||||
elif sys.version_info < (3, 7):
|
||||
sys.stdout = codecs.getwriter("utf-8")(sys.stdout.detach())
|
||||
else:
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
sys.stdout.reconfigure(encoding="utf-8")
|
||||
|
||||
|
||||
log = getLogger(__name__)
|
||||
stderrlog = getLogger('conda.stderr')
|
||||
stderrlog = getLogger("conda.stderr")
|
||||
|
||||
banner = f"""
|
||||
__ __ __ __
|
||||
|
@ -79,33 +87,42 @@ banner = f"""
|
|||
█████████████████████████████████████████████████████████████
|
||||
"""
|
||||
|
||||
|
||||
class MambaException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
solver_options = [(api.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]
|
||||
|
||||
def specs_from_args(args, json=False):
|
||||
|
||||
def specs_from_args(args, json=False):
|
||||
def arg2spec(arg, json=False, update=False):
|
||||
try:
|
||||
spec = MatchSpec(arg)
|
||||
except:
|
||||
except Exception:
|
||||
from ..exceptions import CondaValueError
|
||||
raise CondaValueError('invalid package specification: %s' % arg)
|
||||
|
||||
raise CondaValueError("invalid package specification: %s" % arg)
|
||||
|
||||
name = spec.name
|
||||
if not spec._is_simple() and update:
|
||||
from ..exceptions import CondaValueError
|
||||
raise CondaValueError("""version specifications not allowed with 'update'; use
|
||||
|
||||
raise CondaValueError(
|
||||
"""version specifications not allowed with 'update'; use
|
||||
conda update %s%s or
|
||||
conda install %s""" % (name, ' ' * (len(arg) - len(name)), arg))
|
||||
conda install %s"""
|
||||
% (name, " " * (len(arg) - len(name)), arg)
|
||||
)
|
||||
|
||||
return spec
|
||||
|
||||
return [arg2spec(arg, json=json) for arg in args]
|
||||
|
||||
|
||||
use_mamba_experimental = False
|
||||
|
||||
|
||||
def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False):
|
||||
if unlink_link_transaction.nothing_to_do:
|
||||
if remove_op:
|
||||
|
@ -113,7 +130,9 @@ def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False):
|
|||
raise PackagesNotFoundError(args.package_names)
|
||||
elif not newenv:
|
||||
if context.json:
|
||||
cli_common.stdout_json_success(message='All requested packages already installed.')
|
||||
cli_common.stdout_json_success(
|
||||
message="All requested packages already installed."
|
||||
)
|
||||
return
|
||||
|
||||
if context.dry_run:
|
||||
|
@ -125,12 +144,14 @@ def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False):
|
|||
try:
|
||||
unlink_link_transaction.download_and_extract()
|
||||
if context.download_only:
|
||||
raise CondaExitZero('Package caches prepared. UnlinkLinkTransaction cancelled with '
|
||||
'--download-only option.')
|
||||
raise CondaExitZero(
|
||||
"Package caches prepared. UnlinkLinkTransaction cancelled with "
|
||||
"--download-only option."
|
||||
)
|
||||
unlink_link_transaction.execute()
|
||||
|
||||
except SystemExit as e:
|
||||
raise CondaSystemExit('Exiting', e)
|
||||
raise CondaSystemExit("Exiting", e)
|
||||
|
||||
if newenv:
|
||||
touch_nonadmin(prefix)
|
||||
|
@ -140,18 +161,23 @@ def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False):
|
|||
actions = unlink_link_transaction._make_legacy_action_groups()[0]
|
||||
cli_common.stdout_json_success(prefix=prefix, actions=actions)
|
||||
|
||||
|
||||
def remove(args, parser):
|
||||
if not (args.all or args.package_names):
|
||||
raise CondaValueError('no package names supplied,\n'
|
||||
' try "mamba remove -h" for more details')
|
||||
raise CondaValueError(
|
||||
"no package names supplied,\n"
|
||||
' try "mamba remove -h" for more details'
|
||||
)
|
||||
|
||||
prefix = context.target_prefix
|
||||
check_non_admin()
|
||||
init_api_context()
|
||||
|
||||
if args.all and prefix == context.default_prefix:
|
||||
raise CondaEnvironmentError("cannot remove current environment. \
|
||||
deactivate and run mamba remove again")
|
||||
raise CondaEnvironmentError(
|
||||
"cannot remove current environment. \
|
||||
deactivate and run mamba remove again"
|
||||
)
|
||||
|
||||
if args.all and path_is_clean(prefix):
|
||||
# full environment removal was requested, but environment doesn't exist anyway
|
||||
|
@ -159,11 +185,13 @@ def remove(args, parser):
|
|||
|
||||
if args.all:
|
||||
if prefix == context.root_prefix:
|
||||
raise CondaEnvironmentError('cannot remove root environment,\n'
|
||||
' add -n NAME or -p PREFIX option')
|
||||
raise CondaEnvironmentError(
|
||||
"cannot remove root environment,\n"
|
||||
" add -n NAME or -p PREFIX option"
|
||||
)
|
||||
print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr)
|
||||
|
||||
if 'package_names' in args:
|
||||
if "package_names" in args:
|
||||
stp = PrefixSetup(
|
||||
target_prefix=prefix,
|
||||
unlink_precs=tuple(PrefixData(prefix).iter_records()),
|
||||
|
@ -176,7 +204,9 @@ def remove(args, parser):
|
|||
try:
|
||||
handle_txn(txn, prefix, args, False, True)
|
||||
except PackagesNotFoundError:
|
||||
print("No packages found in %s. Continuing environment removal" % prefix)
|
||||
print(
|
||||
"No packages found in %s. Continuing environment removal" % prefix
|
||||
)
|
||||
|
||||
rm_rf(prefix, clean_empty_parents=True)
|
||||
unregister_env(prefix)
|
||||
|
@ -221,7 +251,9 @@ def remove(args, parser):
|
|||
|
||||
package_cache = api.MultiPackageCache(context.pkgs_dirs)
|
||||
transaction = api.Transaction(solver, package_cache)
|
||||
downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir, repos)
|
||||
downloaded = transaction.prompt(
|
||||
PackageCacheData.first_writable().pkgs_dir, repos
|
||||
)
|
||||
if not downloaded:
|
||||
exit(0)
|
||||
|
||||
|
@ -231,10 +263,18 @@ def remove(args, parser):
|
|||
specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
|
||||
specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]
|
||||
|
||||
conda_transaction = to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs)
|
||||
conda_transaction = to_txn(
|
||||
specs_to_add,
|
||||
specs_to_remove,
|
||||
prefix,
|
||||
to_link,
|
||||
to_unlink,
|
||||
installed_pkg_recs,
|
||||
)
|
||||
handle_txn(conda_transaction, prefix, args, False, True)
|
||||
|
||||
def install(args, parser, command='install'):
|
||||
|
||||
def install(args, parser, command="install"):
|
||||
"""
|
||||
mamba install, mamba update, and mamba create
|
||||
"""
|
||||
|
@ -243,11 +283,11 @@ def install(args, parser, command='install'):
|
|||
|
||||
init_api_context(use_mamba_experimental)
|
||||
|
||||
newenv = bool(command == 'create')
|
||||
isinstall = bool(command == 'install')
|
||||
newenv = bool(command == "create")
|
||||
isinstall = bool(command == "install")
|
||||
solver_task = api.SOLVER_INSTALL
|
||||
|
||||
isupdate = bool(command == 'update')
|
||||
isupdate = bool(command == "update")
|
||||
if isupdate:
|
||||
solver_task = api.SOLVER_UPDATE
|
||||
solver_options.clear()
|
||||
|
@ -259,20 +299,26 @@ def install(args, parser, command='install'):
|
|||
check_prefix(prefix, json=context.json)
|
||||
if context.force_32bit and prefix == context.root_prefix:
|
||||
raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
|
||||
if isupdate and not (args.file or args.packages
|
||||
or context.update_modifier == UpdateModifier.UPDATE_ALL):
|
||||
raise CondaValueError("""no package names supplied
|
||||
if isupdate and not (
|
||||
args.file
|
||||
or args.packages
|
||||
or context.update_modifier == UpdateModifier.UPDATE_ALL
|
||||
):
|
||||
raise CondaValueError(
|
||||
"""no package names supplied
|
||||
# If you want to update to a newer version of Anaconda, type:
|
||||
#
|
||||
# $ conda update --prefix %s anaconda
|
||||
""" % prefix)
|
||||
"""
|
||||
% prefix
|
||||
)
|
||||
|
||||
if not newenv:
|
||||
if isdir(prefix):
|
||||
if on_win:
|
||||
delete_trash(prefix)
|
||||
|
||||
if not isfile(join(prefix, 'conda-meta', 'history')):
|
||||
if not isfile(join(prefix, "conda-meta", "history")):
|
||||
if paths_equal(prefix, context.conda_prefix):
|
||||
raise NoBaseEnvironmentError()
|
||||
else:
|
||||
|
@ -286,7 +332,9 @@ def install(args, parser, command='install'):
|
|||
try:
|
||||
mkdir_p(prefix)
|
||||
except EnvironmentError as e:
|
||||
raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e)
|
||||
raise CondaOSError(
|
||||
"Could not create directory: %s" % prefix, caused_by=e
|
||||
)
|
||||
else:
|
||||
raise EnvironmentLocationNotFound(prefix)
|
||||
|
||||
|
@ -296,33 +344,36 @@ def install(args, parser, command='install'):
|
|||
# Get SPECS #
|
||||
#############################
|
||||
|
||||
args_packages = [s.strip('"\'') for s in args.packages]
|
||||
args_packages = [s.strip("\"'") for s in args.packages]
|
||||
if newenv and not args.no_default_packages:
|
||||
# Override defaults if they are specified at the command line
|
||||
# TODO: rework in 4.4 branch using MatchSpec
|
||||
args_packages_names = [pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages]
|
||||
args_packages_names = [
|
||||
pkg.replace(" ", "=").split("=", 1)[0] for pkg in args_packages
|
||||
]
|
||||
for default_pkg in context.create_default_packages:
|
||||
default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0]
|
||||
default_pkg_name = default_pkg.replace(" ", "=").split("=", 1)[0]
|
||||
if default_pkg_name not in args_packages_names:
|
||||
args_packages.append(default_pkg)
|
||||
|
||||
num_cp = sum(s.endswith('.tar.bz2') for s in args_packages)
|
||||
num_cp = sum(s.endswith(".tar.bz2") for s in args_packages)
|
||||
if num_cp:
|
||||
if num_cp == len(args_packages):
|
||||
explicit(args_packages, prefix, verbose=not (context.quiet or context.json))
|
||||
return
|
||||
else:
|
||||
raise CondaValueError("cannot mix specifications with conda package"
|
||||
" filenames")
|
||||
raise CondaValueError(
|
||||
"cannot mix specifications with conda package" " filenames"
|
||||
)
|
||||
|
||||
specs = []
|
||||
|
||||
index_args = {
|
||||
'use_cache': args.use_index_cache,
|
||||
'channel_urls': context.channels,
|
||||
'unknown': args.unknown,
|
||||
'prepend': not args.override_channels,
|
||||
'use_local': args.use_local
|
||||
"use_cache": args.use_index_cache,
|
||||
"channel_urls": context.channels,
|
||||
"unknown": args.unknown,
|
||||
"prepend": not args.override_channels,
|
||||
"use_local": args.use_local,
|
||||
}
|
||||
|
||||
if args.file:
|
||||
|
@ -331,10 +382,17 @@ def install(args, parser, command='install'):
|
|||
try:
|
||||
file_specs += specs_from_url(fpath, json=context.json)
|
||||
except UnicodeError:
|
||||
raise CondaValueError("Error reading file, file should be a text file containing"
|
||||
" packages \nconda create --help for details")
|
||||
if '@EXPLICIT' in file_specs:
|
||||
explicit(file_specs, prefix, verbose=not (context.quiet or context.json), index_args=index_args)
|
||||
raise CondaValueError(
|
||||
"Error reading file, file should be a text file containing"
|
||||
" packages \nconda create --help for details"
|
||||
)
|
||||
if "@EXPLICIT" in file_specs:
|
||||
explicit(
|
||||
file_specs,
|
||||
prefix,
|
||||
verbose=not (context.quiet or context.json),
|
||||
index_args=index_args,
|
||||
)
|
||||
return
|
||||
specs.extend([MatchSpec(s) for s in file_specs])
|
||||
|
||||
|
@ -344,19 +402,24 @@ def install(args, parser, command='install'):
|
|||
channels = [c for c in context.channels]
|
||||
for spec in specs:
|
||||
# CONDA TODO: correct handling for subdir isn't yet done
|
||||
spec_channel = spec.get_exact_value('channel')
|
||||
spec_channel = spec.get_exact_value("channel")
|
||||
if spec_channel and spec_channel not in channels:
|
||||
channels.append(spec_channel)
|
||||
|
||||
index_args['channel_urls'] = channels
|
||||
index_args["channel_urls"] = channels
|
||||
|
||||
index = get_index(channel_urls=index_args['channel_urls'],
|
||||
prepend=index_args['prepend'], platform=None,
|
||||
use_local=index_args['use_local'], use_cache=index_args['use_cache'],
|
||||
unknown=index_args['unknown'], prefix=prefix)
|
||||
index = get_index(
|
||||
channel_urls=index_args["channel_urls"],
|
||||
prepend=index_args["prepend"],
|
||||
platform=None,
|
||||
use_local=index_args["use_local"],
|
||||
use_cache=index_args["use_cache"],
|
||||
unknown=index_args["unknown"],
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
channel_json = []
|
||||
strict_priority = (context.channel_priority == ChannelPriority.STRICT)
|
||||
strict_priority = context.channel_priority == ChannelPriority.STRICT
|
||||
subprio_index = len(index)
|
||||
if strict_priority:
|
||||
# first, count unique channels
|
||||
|
@ -374,12 +437,12 @@ def install(args, parser, command='install'):
|
|||
else:
|
||||
priority = 0
|
||||
if strict_priority:
|
||||
subpriority = 0 if chan.platform == 'noarch' else 1
|
||||
subpriority = 0 if chan.platform == "noarch" else 1
|
||||
else:
|
||||
subpriority = subprio_index
|
||||
subprio_index -= 1
|
||||
|
||||
if subdir.loaded() == False and chan.platform != 'noarch':
|
||||
if not subdir.loaded() and chan.platform != "noarch":
|
||||
# ignore non-loaded subdir if channel is != noarch
|
||||
continue
|
||||
|
||||
|
@ -394,25 +457,28 @@ def install(args, parser, command='install'):
|
|||
if isinstall and args.revision:
|
||||
get_revision(args.revision, json=context.json)
|
||||
elif isinstall and not (args.file or args_packages):
|
||||
raise CondaValueError("too few arguments, "
|
||||
"must supply command line package specs or --file")
|
||||
raise CondaValueError(
|
||||
"too few arguments, " "must supply command line package specs or --file"
|
||||
)
|
||||
|
||||
installed_names = [i_rec.name for i_rec in installed_pkg_recs]
|
||||
# for 'conda update', make sure the requested specs actually exist in the prefix
|
||||
# and that they are name-only specs
|
||||
if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL:
|
||||
for i in installed_names:
|
||||
if i != 'python':
|
||||
if i != "python":
|
||||
specs.append(MatchSpec(i))
|
||||
|
||||
prefix_data = PrefixData(prefix)
|
||||
for s in args_packages:
|
||||
s = MatchSpec(s)
|
||||
if s.name == 'python':
|
||||
if s.name == "python":
|
||||
specs.append(s)
|
||||
if not s.is_name_only_spec:
|
||||
raise CondaValueError("Invalid spec for 'conda update': %s\n"
|
||||
"Use 'conda install' instead." % s)
|
||||
raise CondaValueError(
|
||||
"Invalid spec for 'conda update': %s\n"
|
||||
"Use 'conda install' instead." % s
|
||||
)
|
||||
if not prefix_data.get(s.name, None):
|
||||
raise PackageNotInstalledError(prefix, s.name)
|
||||
|
||||
|
@ -424,21 +490,30 @@ def install(args, parser, command='install'):
|
|||
prec = installed_pkg_recs[installed_names.index(spec.name)]
|
||||
for dep in prec.depends:
|
||||
ms = MatchSpec(dep)
|
||||
if ms.name != 'python':
|
||||
if ms.name != "python":
|
||||
final_specs.append(MatchSpec(ms.name))
|
||||
specs = set(final_specs)
|
||||
|
||||
if newenv and args.clone:
|
||||
if args.packages:
|
||||
raise TooManyArgumentsError(0, len(args.packages), list(args.packages),
|
||||
'did not expect any arguments for --clone')
|
||||
raise TooManyArgumentsError(
|
||||
0,
|
||||
len(args.packages),
|
||||
list(args.packages),
|
||||
"did not expect any arguments for --clone",
|
||||
)
|
||||
|
||||
clone(args.clone, prefix, json=context.json, quiet=(context.quiet or context.json), index_args=index_args)
|
||||
clone(
|
||||
args.clone,
|
||||
prefix,
|
||||
json=context.json,
|
||||
quiet=(context.quiet or context.json),
|
||||
index_args=index_args,
|
||||
)
|
||||
touch_nonadmin(prefix)
|
||||
print_activate(args.name if args.name else prefix)
|
||||
return
|
||||
|
||||
|
||||
if not (context.quiet or context.json):
|
||||
print("\nLooking for: {}\n".format([str(s) for s in specs]))
|
||||
|
||||
|
@ -447,13 +522,12 @@ def install(args, parser, command='install'):
|
|||
# If python was not specified, check if it is installed.
|
||||
# If yes, add the installed python to the specs to prevent updating it.
|
||||
python_constraint = None
|
||||
additional_specs = []
|
||||
|
||||
if 'python' not in spec_names:
|
||||
if 'python' in installed_names:
|
||||
i = installed_names.index('python')
|
||||
if "python" not in spec_names:
|
||||
if "python" in installed_names:
|
||||
i = installed_names.index("python")
|
||||
version = installed_pkg_recs[i].version
|
||||
python_constraint = MatchSpec('python==' + version).conda_build_form()
|
||||
python_constraint = MatchSpec("python==" + version).conda_build_form()
|
||||
|
||||
mamba_solve_specs = [s.__str__() for s in specs]
|
||||
|
||||
|
@ -474,7 +548,7 @@ def install(args, parser, command='install'):
|
|||
repo.set_installed()
|
||||
repos.append(repo)
|
||||
|
||||
for channel, subdir, priority, subpriority in channel_json:
|
||||
for _, subdir, priority, subpriority in channel_json:
|
||||
repo = subdir.create_repo(pool)
|
||||
repo.set_priority(priority, subpriority)
|
||||
repos.append(repo)
|
||||
|
@ -485,9 +559,11 @@ def install(args, parser, command='install'):
|
|||
solver = api.Solver(pool, solver_options)
|
||||
|
||||
solver.set_postsolve_flags(
|
||||
[(api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
|
||||
(api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS),
|
||||
(api.MAMBA_FORCE_REINSTALL, context.force_reinstall)]
|
||||
[
|
||||
(api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS),
|
||||
(api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS),
|
||||
(api.MAMBA_FORCE_REINSTALL, context.force_reinstall),
|
||||
]
|
||||
)
|
||||
solver.add_jobs(mamba_solve_specs, solver_task)
|
||||
|
||||
|
@ -508,7 +584,10 @@ def install(args, parser, command='install'):
|
|||
if x:
|
||||
for el in x:
|
||||
if not s.match(el):
|
||||
print("Your pinning does not match what's currently installed. Please remove the pin and fix your installation")
|
||||
print(
|
||||
"Your pinning does not match what's currently installed."
|
||||
" Please remove the pin and fix your installation"
|
||||
)
|
||||
print(" Pin: {}".format(s))
|
||||
print(" Currently installed: {}".format(el))
|
||||
exit(1)
|
||||
|
@ -534,49 +613,68 @@ def install(args, parser, command='install'):
|
|||
exit(0)
|
||||
PackageCacheData.first_writable().reload()
|
||||
|
||||
if use_mamba_experimental and not os.name == 'nt':
|
||||
if use_mamba_experimental and not os.name == "nt":
|
||||
if newenv and not isdir(context.target_prefix) and not context.dry_run:
|
||||
mkdir_p(prefix)
|
||||
|
||||
transaction.execute(prefix_data, PackageCacheData.first_writable().pkgs_dir)
|
||||
else:
|
||||
conda_transaction = to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index)
|
||||
conda_transaction = to_txn(
|
||||
specs_to_add,
|
||||
specs_to_remove,
|
||||
prefix,
|
||||
to_link,
|
||||
to_unlink,
|
||||
installed_pkg_recs,
|
||||
index,
|
||||
)
|
||||
handle_txn(conda_transaction, prefix, args, newenv)
|
||||
|
||||
try:
|
||||
installed_json_f.close()
|
||||
os.unlink(installed_json_f.name)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def create(args, parser):
|
||||
if is_conda_environment(context.target_prefix):
|
||||
if paths_equal(context.target_prefix, context.root_prefix):
|
||||
raise CondaValueError("The target prefix is the base prefix. Aborting.")
|
||||
confirm_yn("WARNING: A conda environment already exists at '%s'\n"
|
||||
"Remove existing environment" % context.target_prefix,
|
||||
default='no',
|
||||
dry_run=False)
|
||||
confirm_yn(
|
||||
"WARNING: A conda environment already exists at '%s'\n"
|
||||
"Remove existing environment" % context.target_prefix,
|
||||
default="no",
|
||||
dry_run=False,
|
||||
)
|
||||
log.info("Removing existing environment %s", context.target_prefix)
|
||||
rm_rf(context.target_prefix)
|
||||
elif isdir(context.target_prefix):
|
||||
confirm_yn("WARNING: A directory already exists at the target location '%s'\n"
|
||||
"but it is not a conda environment.\n"
|
||||
"Continue creating environment" % context.target_prefix,
|
||||
default='no',
|
||||
dry_run=False)
|
||||
install(args, parser, 'create')
|
||||
confirm_yn(
|
||||
"WARNING: A directory already exists at the target location '%s'\n"
|
||||
"but it is not a conda environment.\n"
|
||||
"Continue creating environment" % context.target_prefix,
|
||||
default="no",
|
||||
dry_run=False,
|
||||
)
|
||||
install(args, parser, "create")
|
||||
|
||||
|
||||
def update(args, parser):
|
||||
if context.force:
|
||||
print("\n\n"
|
||||
"WARNING: The --force flag will be removed in a future conda release.\n"
|
||||
" See 'conda update --help' for details about the --force-reinstall\n"
|
||||
" and --clobber flags.\n"
|
||||
"\n", file=sys.stderr)
|
||||
print(
|
||||
"\n\n"
|
||||
"WARNING: The --force flag will be removed in a future conda release.\n"
|
||||
" See 'conda update --help'"
|
||||
" for details about the --force-reinstall\n"
|
||||
" and --clobber flags.\n"
|
||||
"\n",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
# need to implement some modifications on the update function
|
||||
install(args, parser, 'update')
|
||||
install(args, parser, "update")
|
||||
|
||||
|
||||
def repoquery(args, parser):
|
||||
if not args.subcmd:
|
||||
|
@ -586,18 +684,18 @@ def repoquery(args, parser):
|
|||
exit(1)
|
||||
|
||||
if args.platform:
|
||||
context._subdirs = (args.platform, 'noarch')
|
||||
context._subdirs = (args.platform, "noarch")
|
||||
|
||||
prefix = context.target_prefix
|
||||
|
||||
init_api_context()
|
||||
|
||||
index_args = {
|
||||
'use_cache': args.use_index_cache,
|
||||
'channel_urls': context.channels,
|
||||
'unknown': args.unknown,
|
||||
'prepend': not args.override_channels,
|
||||
'use_local': args.use_local
|
||||
"use_cache": args.use_index_cache,
|
||||
"channel_urls": context.channels,
|
||||
"unknown": args.unknown,
|
||||
"prepend": not args.override_channels,
|
||||
"use_local": args.use_local,
|
||||
}
|
||||
|
||||
installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
|
||||
|
@ -607,7 +705,7 @@ def repoquery(args, parser):
|
|||
|
||||
only_installed = True
|
||||
channels = args.channel or []
|
||||
if args.subcmd == "search" and args.installed == False:
|
||||
if args.subcmd == "search" and not args.installed:
|
||||
only_installed = False
|
||||
elif args.all_channels or len(channels):
|
||||
only_installed = False
|
||||
|
@ -623,19 +721,28 @@ def repoquery(args, parser):
|
|||
repo.set_installed()
|
||||
repos.append(repo)
|
||||
|
||||
|
||||
if not only_installed:
|
||||
index = get_index(channel_urls=index_args['channel_urls'],
|
||||
prepend=index_args['prepend'], platform=None,
|
||||
use_local=index_args['use_local'], use_cache=index_args['use_cache'],
|
||||
unknown=index_args['unknown'], prefix=prefix)
|
||||
index = get_index(
|
||||
channel_urls=index_args["channel_urls"],
|
||||
prepend=index_args["prepend"],
|
||||
platform=None,
|
||||
use_local=index_args["use_local"],
|
||||
use_cache=index_args["use_cache"],
|
||||
unknown=index_args["unknown"],
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
for subdir, channel in index:
|
||||
if subdir.loaded() == False and channel.platform != 'noarch':
|
||||
if not subdir.loaded() and channel.platform != "noarch":
|
||||
# ignore non-loaded subdir if channel is != noarch
|
||||
continue
|
||||
|
||||
repo = api.Repo(pool, str(channel), subdir.cache_path(), channel.url(with_credentials=True))
|
||||
repo = api.Repo(
|
||||
pool,
|
||||
str(channel),
|
||||
subdir.cache_path(),
|
||||
channel.url(with_credentials=True),
|
||||
)
|
||||
repo.set_priority(0, 0)
|
||||
repos.append(repo)
|
||||
|
||||
|
@ -652,111 +759,117 @@ def repoquery(args, parser):
|
|||
|
||||
|
||||
def do_call(args, parser):
|
||||
relative_mod, func_name = args.func.rsplit('.', 1)
|
||||
relative_mod, func_name = args.func.rsplit(".", 1)
|
||||
|
||||
# func_name should always be 'execute'
|
||||
if relative_mod in ['.main_list', '.main_search', '.main_run', '.main_clean', '.main_info']:
|
||||
if relative_mod in [
|
||||
".main_list",
|
||||
".main_search",
|
||||
".main_run",
|
||||
".main_clean",
|
||||
".main_info",
|
||||
]:
|
||||
from importlib import import_module
|
||||
module = import_module('conda.cli' + relative_mod, __name__.rsplit('.', 1)[0])
|
||||
|
||||
module = import_module("conda.cli" + relative_mod, __name__.rsplit(".", 1)[0])
|
||||
exit_code = getattr(module, func_name)(args, parser)
|
||||
elif relative_mod == '.main_install':
|
||||
exit_code = install(args, parser, 'install')
|
||||
elif relative_mod == '.main_remove':
|
||||
elif relative_mod == ".main_install":
|
||||
exit_code = install(args, parser, "install")
|
||||
elif relative_mod == ".main_remove":
|
||||
exit_code = remove(args, parser)
|
||||
elif relative_mod == '.main_create':
|
||||
elif relative_mod == ".main_create":
|
||||
exit_code = create(args, parser)
|
||||
elif relative_mod == '.main_update':
|
||||
elif relative_mod == ".main_update":
|
||||
exit_code = update(args, parser)
|
||||
elif relative_mod == '.main_repoquery':
|
||||
elif relative_mod == ".main_repoquery":
|
||||
exit_code = repoquery(args, parser)
|
||||
else:
|
||||
print("Currently, only install, create, list, search, run, info and clean are supported through mamba.")
|
||||
print(
|
||||
"Currently, only install, create, list, search, run,"
|
||||
" info and clean are supported through mamba."
|
||||
)
|
||||
|
||||
return 0
|
||||
return exit_code
|
||||
|
||||
def configure_parser_repoquery(sub_parsers):
|
||||
help = "Query repositories using mamba. "
|
||||
descr = (help)
|
||||
|
||||
example = ("""
|
||||
def configure_parser_repoquery(sub_parsers):
|
||||
help_cli = "Query repositories using mamba. "
|
||||
descr = help_cli
|
||||
|
||||
example = """
|
||||
Examples:
|
||||
|
||||
mamba repoquery search xtensor>=0.18
|
||||
mamba repoquery depends xtensor
|
||||
mamba repoquery whoneeds xtl
|
||||
|
||||
""")
|
||||
"""
|
||||
|
||||
import argparse
|
||||
from argparse import SUPPRESS
|
||||
|
||||
p = sub_parsers.add_parser(
|
||||
'repoquery',
|
||||
description=descr,
|
||||
help=help,
|
||||
epilog=example
|
||||
"repoquery", description=descr, help=help_cli, epilog=example
|
||||
)
|
||||
subsub_parser = p.add_subparsers(dest='subcmd')
|
||||
subsub_parser = p.add_subparsers(dest="subcmd")
|
||||
package_cmds = argparse.ArgumentParser(add_help=False)
|
||||
package_cmds.add_argument('package_query', help='the target package')
|
||||
package_cmds.add_argument(
|
||||
"-i", "--installed",
|
||||
action="store_true",
|
||||
help=SUPPRESS
|
||||
)
|
||||
package_cmds.add_argument("package_query", help="the target package")
|
||||
package_cmds.add_argument("-i", "--installed", action="store_true", help=SUPPRESS)
|
||||
|
||||
package_cmds.add_argument('-p', '--platform')
|
||||
package_cmds.add_argument('--no-installed', action='store_true')
|
||||
package_cmds.add_argument("-p", "--platform")
|
||||
package_cmds.add_argument("--no-installed", action="store_true")
|
||||
|
||||
package_cmds.add_argument(
|
||||
"-a", "--all-channels",
|
||||
"-a",
|
||||
"--all-channels",
|
||||
action="store_true",
|
||||
help="Look at all channels (for depends / whoneeds)"
|
||||
help="Look at all channels (for depends / whoneeds)",
|
||||
)
|
||||
|
||||
view_cmds = argparse.ArgumentParser(add_help=False)
|
||||
view_cmds.add_argument(
|
||||
"-t", "--tree",
|
||||
action="store_true"
|
||||
view_cmds.add_argument("-t", "--tree", action="store_true")
|
||||
|
||||
c1 = subsub_parser.add_parser(
|
||||
"whoneeds",
|
||||
help="shows packages that depends on this package",
|
||||
parents=[package_cmds, view_cmds],
|
||||
)
|
||||
|
||||
c1 = subsub_parser.add_parser('whoneeds',
|
||||
help='shows packages that depends on this package',
|
||||
parents=[package_cmds, view_cmds]
|
||||
c2 = subsub_parser.add_parser(
|
||||
"depends",
|
||||
help="shows packages that depends on this package",
|
||||
parents=[package_cmds, view_cmds],
|
||||
)
|
||||
|
||||
c2 = subsub_parser.add_parser('depends',
|
||||
help='shows packages that depends on this package',
|
||||
parents=[package_cmds, view_cmds]
|
||||
)
|
||||
|
||||
c3 = subsub_parser.add_parser('search',
|
||||
help='shows packages that depends on this package',
|
||||
parents=[package_cmds]
|
||||
c3 = subsub_parser.add_parser(
|
||||
"search",
|
||||
help="shows packages that depends on this package",
|
||||
parents=[package_cmds],
|
||||
)
|
||||
|
||||
from conda.cli import conda_argparse
|
||||
|
||||
for cmd in (c1, c2, c3):
|
||||
conda_argparse.add_parser_channels(cmd)
|
||||
conda_argparse.add_parser_networking(cmd)
|
||||
conda_argparse.add_parser_known(cmd)
|
||||
conda_argparse.add_parser_json(cmd)
|
||||
|
||||
p.set_defaults(func='.main_repoquery.execute')
|
||||
p.set_defaults(func=".main_repoquery.execute")
|
||||
return p
|
||||
|
||||
|
||||
def _wrapped_main(*args, **kwargs):
|
||||
if len(args) == 1:
|
||||
args = args + ('-h',)
|
||||
args = args + ("-h",)
|
||||
|
||||
import copy
|
||||
argv = list(args)
|
||||
|
||||
if "--mamba-experimental" in argv:
|
||||
global use_mamba_experimental
|
||||
use_mamba_experimental = True
|
||||
argv.remove('--mamba-experimental')
|
||||
argv.remove("--mamba-experimental")
|
||||
|
||||
args = argv
|
||||
|
||||
|
@ -771,21 +884,25 @@ def _wrapped_main(*args, **kwargs):
|
|||
init_loggers(context)
|
||||
|
||||
result = do_call(args, p)
|
||||
exit_code = getattr(result, 'rc', result) # may be Result objects with code in rc field
|
||||
exit_code = getattr(
|
||||
result, "rc", result
|
||||
) # may be Result objects with code in rc field
|
||||
if isinstance(exit_code, int):
|
||||
return exit_code
|
||||
|
||||
|
||||
# Main entry point!
|
||||
def main(*args, **kwargs):
|
||||
# Set to false so we don't allow uploading our issues to conda!
|
||||
context.report_errors = False
|
||||
|
||||
from conda.common.compat import ensure_text_type, init_std_stream_encoding
|
||||
|
||||
init_std_stream_encoding()
|
||||
|
||||
if 'activate' in sys.argv or 'deactivate' in sys.argv:
|
||||
if "activate" in sys.argv or "deactivate" in sys.argv:
|
||||
print("Use conda to activate / deactivate the environment.")
|
||||
print('\n $ conda ' + ' '.join(sys.argv[1:]) + '\n')
|
||||
print("\n $ conda " + " ".join(sys.argv[1:]) + "\n")
|
||||
return sys.exit(-1)
|
||||
|
||||
if not args:
|
||||
|
@ -793,13 +910,15 @@ def main(*args, **kwargs):
|
|||
|
||||
if "--version" in args:
|
||||
from mamba._version import __version__
|
||||
|
||||
print("mamba {}".format(__version__))
|
||||
|
||||
args = tuple(ensure_text_type(s) for s in args)
|
||||
|
||||
if len(args) > 2 and args[1] == 'env' and args[2] in ('create', 'update'):
|
||||
if len(args) > 2 and args[1] == "env" and args[2] in ("create", "update"):
|
||||
# special handling for conda env create!
|
||||
from mamba import mamba_env
|
||||
|
||||
return mamba_env.main()
|
||||
|
||||
def exception_converter(*args, **kwargs):
|
||||
|
@ -815,4 +934,5 @@ def main(*args, **kwargs):
|
|||
return exit_code
|
||||
|
||||
from conda.exceptions import conda_exception_handler
|
||||
|
||||
return conda_exception_handler(exception_converter, *args, **kwargs)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2019, QuantStack
|
||||
# SPDX-License-Identifier: BSD-3-Clause
|
||||
|
||||
|
@ -6,37 +5,25 @@
|
|||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from os.path import basename
|
||||
import os, sys
|
||||
|
||||
from conda._vendor.boltons.setutils import IndexedSet
|
||||
from conda.base.context import context
|
||||
from conda.core.solve import Solver
|
||||
from conda.models.channel import Channel, prioritize_channels
|
||||
from conda.models.match_spec import MatchSpec
|
||||
from conda.core.link import UnlinkLinkTransaction, PrefixSetup
|
||||
from conda.cli.install import handle_txn
|
||||
from conda_env.installers import conda
|
||||
from conda.core.prefix_data import PrefixData
|
||||
from conda.core.solve import diff_for_unlink_link_precs
|
||||
from conda.models.prefix_graph import PrefixGraph
|
||||
from conda.common.url import split_anaconda_token
|
||||
|
||||
from mamba.utils import get_index, to_package_record_from_subjson, init_api_context, get_installed_jsonfile, to_txn
|
||||
import mamba.mamba_api as api
|
||||
|
||||
import tempfile
|
||||
import threading
|
||||
import sys
|
||||
|
||||
from conda.base.context import context
|
||||
from conda.models.channel import prioritize_channels
|
||||
from conda.models.match_spec import MatchSpec
|
||||
from conda_env.installers import conda
|
||||
|
||||
import mamba.mamba_api as api
|
||||
from mamba.utils import get_index, get_installed_jsonfile, init_api_context, to_txn
|
||||
|
||||
|
||||
def mamba_install(prefix, specs, args, env, *_, **kwargs):
|
||||
# TODO: support all various ways this happens
|
||||
init_api_context()
|
||||
api.Context().target_prefix = prefix
|
||||
# Including 'nodefaults' in the channels list disables the defaults
|
||||
channel_urls = [chan for chan in env.channels if chan != 'nodefaults']
|
||||
channel_urls = [chan for chan in env.channels if chan != "nodefaults"]
|
||||
|
||||
if 'nodefaults' not in env.channels:
|
||||
if "nodefaults" not in env.channels:
|
||||
channel_urls.extend(context.channels)
|
||||
_channel_priority_map = prioritize_channels(channel_urls)
|
||||
|
||||
|
@ -46,9 +33,12 @@ def mamba_install(prefix, specs, args, env, *_, **kwargs):
|
|||
|
||||
for subdir, chan in index:
|
||||
# add priority here
|
||||
priority = len(_channel_priority_map) - _channel_priority_map[chan.url(with_credentials=True)][1]
|
||||
subpriority = 0 if chan.platform == 'noarch' else 1
|
||||
if subdir.loaded() == False and chan.platform != 'noarch':
|
||||
priority = (
|
||||
len(_channel_priority_map)
|
||||
- _channel_priority_map[chan.url(with_credentials=True)][1]
|
||||
)
|
||||
subpriority = 0 if chan.platform == "noarch" else 1
|
||||
if not subdir.loaded() and chan.platform != "noarch":
|
||||
# ignore non-loaded subdir if channel is != noarch
|
||||
continue
|
||||
|
||||
|
@ -65,7 +55,7 @@ def mamba_install(prefix, specs, args, env, *_, **kwargs):
|
|||
# if using update
|
||||
installed_pkg_recs = []
|
||||
python_constraint = None
|
||||
if 'update' in args.func:
|
||||
if "update" in args.func:
|
||||
installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix)
|
||||
repo = api.Repo(pool, "installed", installed_json_f.name, "")
|
||||
repo.set_installed()
|
||||
|
@ -74,14 +64,14 @@ def mamba_install(prefix, specs, args, env, *_, **kwargs):
|
|||
# Also pin the Python version if it's installed
|
||||
# If python was not specified, check if it is installed.
|
||||
# If yes, add the installed python to the specs to prevent updating it.
|
||||
if 'python' not in [MatchSpec(s).name for s in specs]:
|
||||
if "python" not in [MatchSpec(s).name for s in specs]:
|
||||
installed_names = [i_rec.name for i_rec in installed_pkg_recs]
|
||||
if 'python' in installed_names:
|
||||
i = installed_names.index('python')
|
||||
if "python" in installed_names:
|
||||
i = installed_names.index("python")
|
||||
version = installed_pkg_recs[i].version
|
||||
python_constraint = MatchSpec('python==' + version).conda_build_form()
|
||||
python_constraint = MatchSpec("python==" + version).conda_build_form()
|
||||
|
||||
for channel, subdir, priority, subpriority in channel_json:
|
||||
for _, subdir, priority, subpriority in channel_json:
|
||||
repo = subdir.create_repo(pool)
|
||||
repo.set_priority(priority, subpriority)
|
||||
repos.append(repo)
|
||||
|
@ -105,17 +95,20 @@ def mamba_install(prefix, specs, args, env, *_, **kwargs):
|
|||
|
||||
specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
|
||||
|
||||
final_precs = IndexedSet()
|
||||
|
||||
conda_transaction = to_txn(specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index)
|
||||
conda_transaction = to_txn(
|
||||
specs_to_add, [], prefix, to_link, to_unlink, installed_pkg_recs, index
|
||||
)
|
||||
|
||||
pfe = conda_transaction._get_pfe()
|
||||
pfe.execute()
|
||||
conda_transaction.execute()
|
||||
|
||||
|
||||
conda.install = mamba_install
|
||||
|
||||
|
||||
def main():
|
||||
from conda_env.cli.main import main
|
||||
|
||||
sys.argv = sys.argv[0:1] + sys.argv[2:]
|
||||
return main()
|
||||
|
|
145
mamba/utils.py
145
mamba/utils.py
|
@ -1,66 +1,70 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2019, QuantStack
|
||||
# SPDX-License-Identifier: BSD-3-Clause
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from conda.models.channel import Channel, prioritize_channels
|
||||
from conda.core.index import calculate_channel_urls, check_whitelist #, get_index
|
||||
from conda.models.records import PackageRecord
|
||||
from conda.models.enums import PackageType
|
||||
from conda.common.url import join_url
|
||||
from conda.base.context import context
|
||||
from conda.core.subdir_data import cache_fn_url, create_cache_dir
|
||||
from conda.core.prefix_data import PrefixData
|
||||
from conda.core.index import _supplement_index_with_system
|
||||
from conda.common.serialize import json_dump
|
||||
import tempfile
|
||||
from conda._vendor.boltons.setutils import IndexedSet
|
||||
|
||||
from conda.common.url import split_anaconda_token
|
||||
from conda.gateways.connection.session import CondaHttpAuth
|
||||
from conda.core.solve import diff_for_unlink_link_precs
|
||||
from conda.models.prefix_graph import PrefixGraph
|
||||
from conda.core.link import UnlinkLinkTransaction, PrefixSetup
|
||||
|
||||
|
||||
import threading
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from conda._vendor.boltons.setutils import IndexedSet
|
||||
from conda.base.context import context
|
||||
from conda.common.serialize import json_dump
|
||||
from conda.common.url import join_url, split_anaconda_token
|
||||
from conda.core.index import (
|
||||
_supplement_index_with_system,
|
||||
calculate_channel_urls,
|
||||
check_whitelist,
|
||||
)
|
||||
from conda.core.link import PrefixSetup, UnlinkLinkTransaction
|
||||
from conda.core.prefix_data import PrefixData
|
||||
from conda.core.solve import diff_for_unlink_link_precs
|
||||
from conda.gateways.connection.session import CondaHttpAuth
|
||||
from conda.models.channel import Channel
|
||||
from conda.models.prefix_graph import PrefixGraph
|
||||
from conda.models.records import PackageRecord
|
||||
|
||||
import mamba.mamba_api as api
|
||||
|
||||
|
||||
def load_channel(subdir_data, result_container):
|
||||
if not context.quiet:
|
||||
print("Getting ", subdir_data.channel.name, subdir_data.channel.platform)
|
||||
return result_container.append(subdir_data.load())
|
||||
|
||||
def get_index(channel_urls=(), prepend=True, platform=None,
|
||||
use_local=False, use_cache=False, unknown=None, prefix=None,
|
||||
repodata_fn="repodata.json"):
|
||||
|
||||
def get_index(
|
||||
channel_urls=(),
|
||||
prepend=True,
|
||||
platform=None,
|
||||
use_local=False,
|
||||
use_cache=False,
|
||||
unknown=None,
|
||||
prefix=None,
|
||||
repodata_fn="repodata.json",
|
||||
):
|
||||
|
||||
real_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local)
|
||||
check_whitelist(real_urls)
|
||||
|
||||
dlist = api.DownloadTargetList()
|
||||
|
||||
sddata = []
|
||||
index = []
|
||||
|
||||
for idx, url in enumerate(real_urls):
|
||||
for url in real_urls:
|
||||
channel = Channel(url)
|
||||
full_url = CondaHttpAuth.add_binstar_token(channel.url(with_credentials=True) + '/' + repodata_fn)
|
||||
full_url = CondaHttpAuth.add_binstar_token(
|
||||
channel.url(with_credentials=True) + "/" + repodata_fn
|
||||
)
|
||||
|
||||
full_path_cache = os.path.join(
|
||||
api.create_cache_dir(),
|
||||
api.cache_fn_url(full_url))
|
||||
api.create_cache_dir(), api.cache_fn_url(full_url)
|
||||
)
|
||||
if channel.name:
|
||||
channel_name = channel.name + '/' + channel.subdir
|
||||
channel_name = channel.name + "/" + channel.subdir
|
||||
else:
|
||||
channel_name = channel.url(with_credentials=False)
|
||||
sd = api.SubdirData(channel_name,
|
||||
full_url,
|
||||
full_path_cache)
|
||||
sd = api.SubdirData(channel_name, full_url, full_path_cache)
|
||||
|
||||
sd.load()
|
||||
index.append((sd, channel))
|
||||
|
@ -73,6 +77,7 @@ def get_index(channel_urls=(), prepend=True, platform=None,
|
|||
|
||||
return index
|
||||
|
||||
|
||||
def init_api_context(use_mamba_experimental=False):
|
||||
api_ctx = api.Context()
|
||||
|
||||
|
@ -83,7 +88,6 @@ def init_api_context(use_mamba_experimental=False):
|
|||
context.quiet = True
|
||||
if use_mamba_experimental:
|
||||
context.json = False
|
||||
# context.dry_run = False
|
||||
api_ctx.set_verbosity(context.verbosity)
|
||||
api_ctx.quiet = context.quiet
|
||||
api_ctx.offline = context.offline
|
||||
|
@ -92,9 +96,9 @@ def init_api_context(use_mamba_experimental=False):
|
|||
api_ctx.always_yes = context.always_yes
|
||||
api_ctx.channels = context.channels
|
||||
|
||||
if context.ssl_verify == False:
|
||||
if context.ssl_verify is False:
|
||||
api_ctx.ssl_verify = "<false>"
|
||||
elif context.ssl_verify is not True:
|
||||
elif context.ssl_verify is None or context.ssl_verify == "":
|
||||
api_ctx.ssl_verify = context.ssl_verify
|
||||
api_ctx.target_prefix = context.target_prefix
|
||||
api_ctx.root_prefix = context.root_prefix
|
||||
|
@ -102,31 +106,34 @@ def init_api_context(use_mamba_experimental=False):
|
|||
api_ctx.pkgs_dirs = context.pkgs_dirs
|
||||
api_ctx.envs_dirs = context.envs_dirs
|
||||
|
||||
# api_ctx.read_timeout_secs = int(round(context.remote_read_timeout_secs))
|
||||
api_ctx.connect_timeout_secs = int(round(context.remote_connect_timeout_secs))
|
||||
api_ctx.max_retries = context.remote_max_retries
|
||||
api_ctx.retry_backoff = context.remote_backoff_factor
|
||||
api_ctx.add_pip_as_python_dependency = context.add_pip_as_python_dependency
|
||||
|
||||
|
||||
def to_package_record_from_subjson(channel, pkg, jsn_string):
|
||||
channel = channel
|
||||
# print(channel, pkg, jsn_string)
|
||||
channel_url = channel.url(with_credentials=True)
|
||||
info = json.loads(jsn_string)
|
||||
info['fn'] = pkg
|
||||
info['channel'] = channel
|
||||
info['url'] = join_url(channel_url, pkg)
|
||||
info["fn"] = pkg
|
||||
info["channel"] = channel
|
||||
info["url"] = join_url(channel_url, pkg)
|
||||
package_record = PackageRecord(**info)
|
||||
return package_record
|
||||
|
||||
|
||||
def get_installed_packages(prefix, show_channel_urls=None):
|
||||
result = {'packages': {}}
|
||||
result = {"packages": {}}
|
||||
|
||||
# Currently, we need to have pip interop disabled :/
|
||||
installed = {rec: rec for rec in PrefixData(prefix, pip_interop_enabled=False).iter_records()}
|
||||
installed = {
|
||||
rec: rec for rec in PrefixData(prefix, pip_interop_enabled=False).iter_records()
|
||||
}
|
||||
|
||||
# add virtual packages as installed packages
|
||||
# they are packages installed on the system that conda can do nothing about (e.g. glibc)
|
||||
# they are packages installed on the system that conda can do nothing
|
||||
# about (e.g. glibc)
|
||||
# if another version is needed, installation just fails
|
||||
# they don't exist anywhere (they start with __)
|
||||
_supplement_index_with_system(installed)
|
||||
|
@ -134,23 +141,36 @@ def get_installed_packages(prefix, show_channel_urls=None):
|
|||
|
||||
for prec in installed:
|
||||
json_rec = prec.dist_fields_dump()
|
||||
json_rec['depends'] = prec.depends
|
||||
json_rec['build'] = prec.build
|
||||
result['packages'][prec.fn] = json_rec
|
||||
json_rec["depends"] = prec.depends
|
||||
json_rec["build"] = prec.build
|
||||
result["packages"][prec.fn] = json_rec
|
||||
|
||||
return installed, result
|
||||
|
||||
|
||||
installed_pkg_recs = None
|
||||
|
||||
|
||||
def get_installed_jsonfile(prefix):
|
||||
global installed_pkg_recs
|
||||
installed_pkg_recs, output = get_installed_packages(prefix, show_channel_urls=True)
|
||||
installed_json_f = tempfile.NamedTemporaryFile('w', delete=False)
|
||||
installed_json_f = tempfile.NamedTemporaryFile("w", delete=False)
|
||||
installed_json_f.write(json_dump(output))
|
||||
installed_json_f.flush()
|
||||
return installed_json_f, installed_pkg_recs
|
||||
|
||||
def to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index=[]):
|
||||
|
||||
def to_txn(
|
||||
specs_to_add,
|
||||
specs_to_remove,
|
||||
prefix,
|
||||
to_link,
|
||||
to_unlink,
|
||||
installed_pkg_recs,
|
||||
index=None,
|
||||
):
|
||||
if index is None:
|
||||
index = []
|
||||
to_link_records, to_unlink_records = [], []
|
||||
|
||||
prefix_data = PrefixData(prefix)
|
||||
|
@ -160,7 +180,7 @@ def to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_
|
|||
for _, c in index:
|
||||
lookup_dict[c.url(with_credentials=True)] = c
|
||||
|
||||
for c, pkg in to_unlink:
|
||||
for _, pkg in to_unlink:
|
||||
for i_rec in installed_pkg_recs:
|
||||
if i_rec.fn == pkg:
|
||||
final_precs.remove(i_rec)
|
||||
|
@ -175,20 +195,21 @@ def to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_
|
|||
final_precs.add(rec)
|
||||
to_link_records.append(rec)
|
||||
|
||||
unlink_precs, link_precs = diff_for_unlink_link_precs(prefix,
|
||||
final_precs=IndexedSet(PrefixGraph(final_precs).graph),
|
||||
specs_to_add=specs_to_add,
|
||||
force_reinstall=context.force_reinstall)
|
||||
unlink_precs, link_precs = diff_for_unlink_link_precs(
|
||||
prefix,
|
||||
final_precs=IndexedSet(PrefixGraph(final_precs).graph),
|
||||
specs_to_add=specs_to_add,
|
||||
force_reinstall=context.force_reinstall,
|
||||
)
|
||||
|
||||
pref_setup = PrefixSetup(
|
||||
target_prefix = prefix,
|
||||
unlink_precs = unlink_precs,
|
||||
link_precs = link_precs,
|
||||
remove_specs = specs_to_remove,
|
||||
update_specs = specs_to_add,
|
||||
neutered_specs = ()
|
||||
target_prefix=prefix,
|
||||
unlink_precs=unlink_precs,
|
||||
link_precs=link_precs,
|
||||
remove_specs=specs_to_remove,
|
||||
update_specs=specs_to_add,
|
||||
neutered_specs=(),
|
||||
)
|
||||
|
||||
conda_transaction = UnlinkLinkTransaction(pref_setup)
|
||||
return conda_transaction
|
||||
|
||||
|
|
171
setup.py
171
setup.py
|
@ -6,30 +6,35 @@
|
|||
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from setuptools import setup, Extension
|
||||
from setuptools.command.build_ext import build_ext
|
||||
import sys, os, platform
|
||||
import os
|
||||
import sys
|
||||
|
||||
import setuptools
|
||||
from setuptools import Extension, setup
|
||||
from setuptools.command.build_ext import build_ext
|
||||
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
version_ns = {}
|
||||
with open(os.path.join(here, 'mamba', '_version.py')) as f:
|
||||
with open(os.path.join(here, "mamba", "_version.py")) as f:
|
||||
exec(f.read(), {}, version_ns)
|
||||
|
||||
__version__ = version_ns['__version__']
|
||||
__version__ = version_ns["__version__"]
|
||||
|
||||
with open("include/version.hpp.in", "r") as fi:
|
||||
cpp_version_template = fi.read()
|
||||
|
||||
v = version_ns['version_info']
|
||||
cpp_version_template = cpp_version_template.replace('@MAMBA_VERSION_MAJOR@', str(v[0])) \
|
||||
.replace('@MAMBA_VERSION_MINOR@', str(v[1])) \
|
||||
.replace('@MAMBA_VERSION_PATCH@', str(v[2]))
|
||||
v = version_ns["version_info"]
|
||||
cpp_version_template = (
|
||||
cpp_version_template.replace("@MAMBA_VERSION_MAJOR@", str(v[0]))
|
||||
.replace("@MAMBA_VERSION_MINOR@", str(v[1]))
|
||||
.replace("@MAMBA_VERSION_PATCH@", str(v[2]))
|
||||
)
|
||||
|
||||
with open("include/version.hpp", "w") as fo:
|
||||
fo.write(cpp_version_template)
|
||||
|
||||
|
||||
class get_pybind_include(object):
|
||||
"""Helper class to determine the pybind11 include path
|
||||
The purpose of this class is to postpone importing pybind11
|
||||
|
@ -41,84 +46,87 @@ class get_pybind_include(object):
|
|||
|
||||
def __str__(self):
|
||||
import pybind11
|
||||
|
||||
return pybind11.get_include(self.user)
|
||||
|
||||
if sys.platform.startswith('win'):
|
||||
libsolv_prefix = os.path.join(sys.prefix, 'Library\\')
|
||||
|
||||
if sys.platform.startswith("win"):
|
||||
libsolv_prefix = os.path.join(sys.prefix, "Library\\")
|
||||
else:
|
||||
libsolv_prefix = sys.prefix
|
||||
|
||||
print("Looking for libsolv in: ", libsolv_prefix)
|
||||
|
||||
extra_link_args = []
|
||||
if sys.platform == 'darwin':
|
||||
extra_link_args = ['-Wl,-rpath', '-Wl,%s' % os.path.abspath(libsolv_prefix)]
|
||||
if sys.platform == "darwin":
|
||||
extra_link_args = ["-Wl,-rpath", "-Wl,%s" % os.path.abspath(libsolv_prefix)]
|
||||
|
||||
library_dir = []
|
||||
if sys.platform == 'win32':
|
||||
if sys.platform == "win32":
|
||||
try:
|
||||
conda_prefix = os.getenv('CONDA_PREFIX')
|
||||
conda_prefix = os.getenv("CONDA_PREFIX")
|
||||
if not conda_prefix:
|
||||
conda_prefix = os.getenv('MINICONDA')
|
||||
conda_prefix = os.getenv("MINICONDA")
|
||||
if not conda_prefix:
|
||||
raise RuntimeError("No conda prefix found")
|
||||
|
||||
library_dir = [os.path.join(conda_prefix, 'Library\\lib\\')]
|
||||
library_dir = [os.path.join(conda_prefix, "Library\\lib\\")]
|
||||
print("Looking for libsolv library in ", library_dir)
|
||||
except:
|
||||
except Exception:
|
||||
print("could not find conda prefix")
|
||||
|
||||
CURL_LIB = 'libcurl'
|
||||
CRYPTO_LIB = 'libcrypto'
|
||||
CURL_LIB = "libcurl"
|
||||
CRYPTO_LIB = "libcrypto"
|
||||
else:
|
||||
CURL_LIB = 'curl'
|
||||
CRYPTO_LIB = 'crypto'
|
||||
CURL_LIB = "curl"
|
||||
CRYPTO_LIB = "crypto"
|
||||
|
||||
ext_modules = [
|
||||
Extension(
|
||||
'mamba.mamba_api',
|
||||
"mamba.mamba_api",
|
||||
[
|
||||
'src/py_interface.cpp',
|
||||
'src/activation.cpp',
|
||||
'src/channel.cpp',
|
||||
'src/context.cpp',
|
||||
'src/fetch.cpp',
|
||||
'src/history.cpp',
|
||||
'src/match_spec.cpp',
|
||||
'src/output.cpp',
|
||||
'src/package_handling.cpp',
|
||||
'src/package_cache.cpp',
|
||||
'src/package_paths.cpp',
|
||||
'src/prefix_data.cpp',
|
||||
'src/package_info.cpp',
|
||||
'src/pool.cpp',
|
||||
'src/query.cpp',
|
||||
'src/repo.cpp',
|
||||
'src/solver.cpp',
|
||||
'src/subdirdata.cpp',
|
||||
'src/thread_utils.cpp',
|
||||
'src/transaction.cpp',
|
||||
'src/transaction_context.cpp',
|
||||
'src/url.cpp',
|
||||
'src/util.cpp',
|
||||
'src/validate.cpp',
|
||||
'src/version.cpp',
|
||||
'src/link.cpp'
|
||||
"src/py_interface.cpp",
|
||||
"src/activation.cpp",
|
||||
"src/channel.cpp",
|
||||
"src/context.cpp",
|
||||
"src/fetch.cpp",
|
||||
"src/history.cpp",
|
||||
"src/match_spec.cpp",
|
||||
"src/output.cpp",
|
||||
"src/package_handling.cpp",
|
||||
"src/package_cache.cpp",
|
||||
"src/package_paths.cpp",
|
||||
"src/prefix_data.cpp",
|
||||
"src/package_info.cpp",
|
||||
"src/pool.cpp",
|
||||
"src/query.cpp",
|
||||
"src/repo.cpp",
|
||||
"src/solver.cpp",
|
||||
"src/subdirdata.cpp",
|
||||
"src/thread_utils.cpp",
|
||||
"src/transaction.cpp",
|
||||
"src/transaction_context.cpp",
|
||||
"src/url.cpp",
|
||||
"src/util.cpp",
|
||||
"src/validate.cpp",
|
||||
"src/version.cpp",
|
||||
"src/link.cpp",
|
||||
],
|
||||
include_dirs=[
|
||||
get_pybind_include(),
|
||||
get_pybind_include(user=True),
|
||||
os.path.join(libsolv_prefix, 'include'),
|
||||
os.path.join(libsolv_prefix, "include"),
|
||||
"include/",
|
||||
"include/thirdparty/"
|
||||
"include/thirdparty/",
|
||||
],
|
||||
library_dirs=library_dir,
|
||||
extra_link_args=extra_link_args,
|
||||
libraries=['archive', 'solv', 'solvext', CURL_LIB, CRYPTO_LIB],
|
||||
language='c++'
|
||||
libraries=["archive", "solv", "solvext", CURL_LIB, CRYPTO_LIB],
|
||||
language="c++",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# As of Python 3.6, CCompiler has a `has_flag` method.
|
||||
# cf http://bugs.python.org/issue26689
|
||||
def has_flag(compiler, flagname):
|
||||
|
@ -126,60 +134,59 @@ def has_flag(compiler, flagname):
|
|||
the specified compiler.
|
||||
"""
|
||||
import tempfile
|
||||
with tempfile.NamedTemporaryFile('w', suffix='.cpp') as f:
|
||||
f.write('int main (int argc, char **argv) { return 0; }')
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".cpp") as f:
|
||||
f.write("int main (int argc, char **argv) { return 0; }")
|
||||
try:
|
||||
compiler.compile([f.name], extra_postargs=[flagname])
|
||||
except setuptools.distutils.errors.CompileError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class BuildExt(build_ext):
|
||||
"""A custom build extension for adding compiler-specific options."""
|
||||
|
||||
c_opts = {
|
||||
'msvc': ['/EHsc', '/std:c++17', '/Ox', '/DNOMINMAX'],
|
||||
'unix': ['-std=c++17', '-O3'],
|
||||
# 'unix': ['-std=c++17', '-Og', '-g'],
|
||||
"msvc": ["/EHsc", "/std:c++17", "/Ox", "/DNOMINMAX"],
|
||||
"unix": ["-std=c++17", "-O3"],
|
||||
}
|
||||
|
||||
def build_extensions(self):
|
||||
ct = self.compiler.compiler_type
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
self.c_opts['unix'] += ['-stdlib=libc++', '-mmacosx-version-min=10.7']
|
||||
if not has_flag(self.compiler, '-std=c++17'):
|
||||
self.c_opts['unix'].remove('-std=c++17')
|
||||
self.c_opts['unix'].append('-std=c++1z')
|
||||
if sys.platform == "darwin":
|
||||
self.c_opts["unix"] += ["-stdlib=libc++", "-mmacosx-version-min=10.7"]
|
||||
if not has_flag(self.compiler, "-std=c++17"):
|
||||
self.c_opts["unix"].remove("-std=c++17")
|
||||
self.c_opts["unix"].append("-std=c++1z")
|
||||
|
||||
opts = self.c_opts.get(ct, [])
|
||||
if ct == 'unix':
|
||||
if ct == "unix":
|
||||
opts.append('-DVERSION_INFO="%s"' % self.distribution.get_version())
|
||||
if has_flag(self.compiler, '-fvisibility=hidden'):
|
||||
opts.append('-fvisibility=hidden')
|
||||
elif ct == 'msvc':
|
||||
if has_flag(self.compiler, "-fvisibility=hidden"):
|
||||
opts.append("-fvisibility=hidden")
|
||||
elif ct == "msvc":
|
||||
opts.append('/DVERSION_INFO=\\"%s\\"' % self.distribution.get_version())
|
||||
|
||||
for ext in self.extensions:
|
||||
ext.extra_compile_args = opts
|
||||
build_ext.build_extensions(self)
|
||||
|
||||
|
||||
setup(
|
||||
name='mamba',
|
||||
name="mamba",
|
||||
version=__version__,
|
||||
author='Wolf Vollprecht',
|
||||
author_email='w.vollprecht@gmail.com',
|
||||
url='https://github.com/wolfv/mamba',
|
||||
description='A fast, libsolv based solver and installer for conda packages.',
|
||||
packages=['mamba'],
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'mamba = mamba.mamba:main'
|
||||
]
|
||||
},
|
||||
long_description='A (hopefully faster) reimplementation of the slow bits of conda.',
|
||||
author="Wolf Vollprecht",
|
||||
author_email="w.vollprecht@gmail.com",
|
||||
url="https://github.com/wolfv/mamba",
|
||||
description="A fast, libsolv based solver and installer for conda packages.",
|
||||
packages=["mamba"],
|
||||
entry_points={"console_scripts": ["mamba = mamba.mamba:main"]},
|
||||
long_description="A (hopefully faster) reimplementation of the slow bits of conda.",
|
||||
ext_modules=ext_modules,
|
||||
install_requires=['pybind11>=2.2'],
|
||||
extras_require = {'test': ['pytest']},
|
||||
cmdclass={'build_ext': BuildExt},
|
||||
install_requires=["pybind11>=2.2"],
|
||||
extras_require={"test": ["pytest"]},
|
||||
cmdclass={"build_ext": BuildExt},
|
||||
zip_safe=False,
|
||||
)
|
||||
|
|
|
@ -38,7 +38,7 @@ namespace mamba
|
|||
const std::string LOCAL_CHANNELS_NAME = "local";
|
||||
const std::string DEFAULT_CHANNELS_NAME = "defaults";
|
||||
|
||||
const std::vector<std::string> DEFAULT_CHANNELS =
|
||||
const std::vector<std::string> DEFAULT_CHANNELS =
|
||||
{
|
||||
#ifdef _WIN32
|
||||
"https://repo.anaconda.com/pkgs/main",
|
||||
|
@ -744,4 +744,3 @@ namespace mamba
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -159,7 +159,7 @@ namespace mamba
|
|||
++colon_idx;
|
||||
}
|
||||
// remove \r\n header ending
|
||||
|
||||
|
||||
value = header.substr(colon_idx, header.size() - colon_idx - 2);
|
||||
if (key == "ETag")
|
||||
{
|
||||
|
@ -232,16 +232,16 @@ namespace mamba
|
|||
return std::string(key + ": " + value);
|
||||
};
|
||||
|
||||
if (mod_etag.find("_etag") != mod_etag.end())
|
||||
if (mod_etag.find("_etag") != mod_etag.end())
|
||||
{
|
||||
m_headers = curl_slist_append(m_headers, to_header("If-None-Match", mod_etag["_etag"]).c_str());
|
||||
}
|
||||
if (mod_etag.find("_mod") != mod_etag.end())
|
||||
if (mod_etag.find("_mod") != mod_etag.end())
|
||||
{
|
||||
m_headers = curl_slist_append(m_headers, to_header("If-Modified-Since", mod_etag["_mod"]).c_str());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void DownloadTarget::set_progress_bar(ProgressProxy progress_proxy)
|
||||
{
|
||||
m_has_progress_bar = true;
|
||||
|
@ -302,7 +302,7 @@ namespace mamba
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool DownloadTarget::finalize()
|
||||
{
|
||||
char* effective_url = nullptr;
|
||||
|
@ -447,7 +447,7 @@ namespace mamba
|
|||
const long max_wait_msecs = 1000;
|
||||
do
|
||||
{
|
||||
CURLMcode code = curl_multi_perform(m_handle, &still_running);
|
||||
CURLMcode code = curl_multi_perform(m_handle, &still_running);
|
||||
|
||||
if(code != CURLM_OK)
|
||||
{
|
||||
|
@ -518,4 +518,3 @@ namespace mamba
|
|||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
16
src/link.cpp
16
src/link.cpp
|
@ -288,14 +288,14 @@ namespace mamba
|
|||
}
|
||||
|
||||
std::unique_ptr<TemporaryFile> wrap_call(const fs::path& root_prefix,
|
||||
const fs::path& prefix,
|
||||
const fs::path& prefix,
|
||||
bool dev_mode,
|
||||
bool debug_wrapper_scripts,
|
||||
const std::vector<std::string>& arguments)
|
||||
{
|
||||
// todo add abspath here
|
||||
fs::path tmp_prefix = prefix / ".tmp";
|
||||
|
||||
|
||||
#ifdef _WIN32
|
||||
ensure_comspec_set();
|
||||
std::string comspec = env::get("COMSPEC");
|
||||
|
@ -720,7 +720,7 @@ namespace mamba
|
|||
all_py_files_f.close();
|
||||
// TODO use the real python file here?!
|
||||
std::vector<std::string> command = {
|
||||
m_context->target_prefix / m_context->python_path,
|
||||
m_context->target_prefix / m_context->python_path,
|
||||
"-Wi", "-m", "compileall", "-q", "-l", "-i",
|
||||
all_py_files.path()
|
||||
};
|
||||
|
@ -828,9 +828,9 @@ namespace mamba
|
|||
|
||||
paths_json["paths"].push_back(json_record);
|
||||
}
|
||||
|
||||
std::string f_name = index_json["name"].get<std::string>() + "-" +
|
||||
index_json["version"].get<std::string>() + "-" +
|
||||
|
||||
std::string f_name = index_json["name"].get<std::string>() + "-" +
|
||||
index_json["version"].get<std::string>() + "-" +
|
||||
index_json["build"].get<std::string>();
|
||||
|
||||
out_json = index_json;
|
||||
|
@ -879,7 +879,7 @@ namespace mamba
|
|||
out_json["files"].push_back(pyc_path);
|
||||
}
|
||||
|
||||
if (link_json.find("noarch") != link_json.end() &&
|
||||
if (link_json.find("noarch") != link_json.end() &&
|
||||
link_json["noarch"].find("entry_points") != link_json["noarch"].end())
|
||||
{
|
||||
for (auto& ep : link_json["noarch"]["entry_points"])
|
||||
|
@ -889,7 +889,7 @@ namespace mamba
|
|||
auto entry_point_path = get_bin_directory_short_path() / entry_point_parsed.command;
|
||||
LOG_INFO << "entry point path: " << entry_point_path << std::endl;
|
||||
auto files = create_python_entry_point(entry_point_path, entry_point_parsed);
|
||||
|
||||
|
||||
#ifdef _WIN32
|
||||
out_json["paths_data"]["paths"].push_back(
|
||||
{
|
||||
|
|
|
@ -84,7 +84,7 @@ namespace mamba
|
|||
{
|
||||
auto key = kv_match[1].str();
|
||||
auto value = kv_match[3].str();
|
||||
if (key.size() == 0 || value.size() == 0)
|
||||
if (key.size() == 0 || value.size() == 0)
|
||||
{
|
||||
throw std::runtime_error("key-value mismatch in brackets " + spec_str);
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ namespace mamba
|
|||
{
|
||||
throw std::runtime_error(std::string(file) + ": Could not open archive for reading.");
|
||||
}
|
||||
|
||||
|
||||
for (;;)
|
||||
{
|
||||
interruption_point();
|
||||
|
@ -253,4 +253,3 @@ namespace mamba
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -40,4 +40,3 @@ namespace mamba
|
|||
return m_pool;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ PYBIND11_MODULE(mamba_api, m) {
|
|||
.def("loaded", &MSubdirData::loaded)
|
||||
.def("cache_path", &MSubdirData::cache_path)
|
||||
;
|
||||
|
||||
|
||||
m.def("cache_fn_url", &cache_fn_url);
|
||||
m.def("create_cache_dir", &create_cache_dir);
|
||||
|
||||
|
|
|
@ -327,8 +327,8 @@ namespace mamba
|
|||
nlohmann::json MSubdirData::read_mod_and_etag()
|
||||
{
|
||||
// parse json at the beginning of the stream such as
|
||||
// {"_url": "https://conda.anaconda.org/conda-forge/linux-64",
|
||||
// "_etag": "W/\"6092e6a2b6cec6ea5aade4e177c3edda-8\"",
|
||||
// {"_url": "https://conda.anaconda.org/conda-forge/linux-64",
|
||||
// "_etag": "W/\"6092e6a2b6cec6ea5aade4e177c3edda-8\"",
|
||||
// "_mod": "Sat, 04 Apr 2020 03:29:49 GMT",
|
||||
// "_cache_control": "public, max-age=1200"
|
||||
|
||||
|
@ -355,7 +355,7 @@ namespace mamba
|
|||
{
|
||||
return result + "\"}";
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (next == '\\')
|
||||
{
|
||||
escaped = true;
|
||||
|
@ -414,4 +414,3 @@ namespace mamba
|
|||
return MRepo(pool, m_name, cache_path(), meta);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -153,7 +153,7 @@ namespace mamba
|
|||
{
|
||||
sigset_t sigset;
|
||||
}
|
||||
|
||||
|
||||
interruption_guard::~interruption_guard()
|
||||
{
|
||||
if (is_sig_interrupted())
|
||||
|
@ -181,4 +181,3 @@ namespace mamba
|
|||
#endif
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -352,7 +352,7 @@ namespace mamba
|
|||
queue_free(&pkgs);
|
||||
}
|
||||
|
||||
std::string MTransaction::find_python_version()
|
||||
std::string MTransaction::find_python_version()
|
||||
{
|
||||
// We need to find the python version that will be there after this Transaction is finished
|
||||
// in order to compile the noarch packages correctly, for example
|
||||
|
@ -483,11 +483,11 @@ namespace mamba
|
|||
Console::stream() << "Changing " << PackageInfo(s).str() << " ==> " << PackageInfo(s2).str();
|
||||
PackageInfo p_unlink(s);
|
||||
PackageInfo p_link(s2);
|
||||
|
||||
|
||||
UnlinkPackage up(p_unlink, fs::path(cache_dir), &m_transaction_context);
|
||||
up.execute();
|
||||
rollback.record(up);
|
||||
|
||||
|
||||
|
||||
LinkPackage lp(p_link, fs::path(cache_dir), &m_transaction_context);
|
||||
lp.execute();
|
||||
|
|
|
@ -307,7 +307,7 @@ namespace mamba
|
|||
std::string string_transform(const std::string_view& input, int (*functor)(int))
|
||||
{
|
||||
std::string res(input);
|
||||
std::transform(res.begin(), res.end(), res.begin(),
|
||||
std::transform(res.begin(), res.end(), res.begin(),
|
||||
[&](unsigned char c) { return functor(c); }
|
||||
);
|
||||
return res;
|
||||
|
|
|
@ -30,7 +30,7 @@ namespace validate
|
|||
{
|
||||
infile.read(buffer.data(), BUFSIZE);
|
||||
size_t count = infile.gcount();
|
||||
if (!count)
|
||||
if (!count)
|
||||
break;
|
||||
SHA256_Update(&sha256, buffer.data(), count);
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ namespace validate
|
|||
{
|
||||
infile.read(buffer.data(), BUFSIZE);
|
||||
size_t count = infile.gcount();
|
||||
if (!count)
|
||||
if (!count)
|
||||
break;
|
||||
MD5_Update(&md5, buffer.data(), count);
|
||||
}
|
||||
|
@ -81,4 +81,3 @@ namespace validate
|
|||
return fs::file_size(path) == validation;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
{}
|
||||
{}
|
||||
|
|
|
@ -6,8 +6,7 @@
|
|||
"A_0.1.0.tar.bz2": {
|
||||
"build": "abc",
|
||||
"build_number": 0,
|
||||
"depends": [
|
||||
],
|
||||
"depends": [],
|
||||
"license": "BSD",
|
||||
"license_family": "BSD",
|
||||
"md5": "85107fc10154734ef34a5a75685be684",
|
||||
|
|
|
@ -1 +1 @@
|
|||
{}
|
||||
{}
|
||||
|
|
|
@ -6,8 +6,7 @@
|
|||
"A_0.1.0.tar.bz2": {
|
||||
"build": "abc",
|
||||
"build_number": 0,
|
||||
"depends": [
|
||||
],
|
||||
"depends": [],
|
||||
"license": "BSD",
|
||||
"license_family": "BSD",
|
||||
"md5": "85107fc10154734ef34a5a75685be684",
|
||||
|
@ -21,8 +20,7 @@
|
|||
"A_0.2.0.tar.bz2": {
|
||||
"build": "abc",
|
||||
"build_number": 0,
|
||||
"depends": [
|
||||
],
|
||||
"depends": [],
|
||||
"license": "BSD",
|
||||
"license_family": "BSD",
|
||||
"md5": "85107fc10154734ef34a5a75685be684",
|
||||
|
@ -37,7 +35,7 @@
|
|||
"build": "abc",
|
||||
"build_number": 0,
|
||||
"depends": [
|
||||
"A"
|
||||
"A"
|
||||
],
|
||||
"license": "BSD",
|
||||
"license_family": "BSD",
|
||||
|
|
|
@ -6,8 +6,7 @@
|
|||
"A_0.1.0.tar.bz2": {
|
||||
"build": "abc",
|
||||
"build_number": 0,
|
||||
"depends": [
|
||||
],
|
||||
"depends": [],
|
||||
"license": "BSD",
|
||||
"license_family": "BSD",
|
||||
"md5": "85107fc10154734ef34a5a75685be684",
|
||||
|
|
|
@ -1,21 +1,26 @@
|
|||
from distutils.version import StrictVersion
|
||||
from utils import Environment, add_glibc_virtual_package, copy_channels_osx, run_mamba_conda
|
||||
|
||||
import json
|
||||
import pytest
|
||||
import subprocess
|
||||
import uuid
|
||||
from distutils.version import StrictVersion
|
||||
|
||||
import pytest
|
||||
from utils import (
|
||||
Environment,
|
||||
add_glibc_virtual_package,
|
||||
copy_channels_osx,
|
||||
run_mamba_conda,
|
||||
)
|
||||
|
||||
|
||||
def test_install():
|
||||
add_glibc_virtual_package()
|
||||
copy_channels_osx()
|
||||
|
||||
channels = ['./test/channel_b', './test/channel_a']
|
||||
package = 'a'
|
||||
channels = ["./test/channel_b", "./test/channel_a"]
|
||||
package = "a"
|
||||
run_mamba_conda(channels, package)
|
||||
|
||||
package = 'b'
|
||||
package = "b"
|
||||
run_mamba_conda(channels, package)
|
||||
|
||||
channels = channels[::-1]
|
||||
|
@ -26,14 +31,14 @@ def test_update():
|
|||
# check updating a package when a newer version
|
||||
with Environment() as env:
|
||||
# first install an older version
|
||||
version = '1.25.7'
|
||||
env.execute(f'$MAMBA install -q -y urllib3={version}')
|
||||
version = "1.25.7"
|
||||
env.execute(f"$MAMBA install -q -y urllib3={version}")
|
||||
out = env.execute('python -c "import urllib3; print(urllib3.__version__)"')
|
||||
# check that the installed version is the old one
|
||||
assert out[-1] == version
|
||||
|
||||
# then update package
|
||||
env.execute('$MAMBA update -q -y urllib3')
|
||||
env.execute("$MAMBA update -q -y urllib3")
|
||||
out = env.execute('python -c "import urllib3; print(urllib3.__version__)"')
|
||||
# check that the installed version is newer
|
||||
assert StrictVersion(out[-1]) > StrictVersion(version)
|
||||
|
@ -42,21 +47,25 @@ def test_update():
|
|||
def test_track_features():
|
||||
with Environment() as env:
|
||||
# should install CPython since PyPy has track features
|
||||
version = '3.6.9'
|
||||
env.execute(f'$MAMBA install -q -y "python={version}" --strict-channel-priority')
|
||||
version = "3.6.9"
|
||||
env.execute(
|
||||
f'$MAMBA install -q -y "python={version}" --strict-channel-priority'
|
||||
)
|
||||
out = env.execute('python -c "import sys; print(sys.version)"')
|
||||
assert out[-2].startswith(version)
|
||||
assert out[-1].startswith('[GCC')
|
||||
assert out[-1].startswith("[GCC")
|
||||
|
||||
# now force PyPy install
|
||||
env.execute(f'$MAMBA install -q -y "python={version}=*pypy" --strict-channel-priority')
|
||||
env.execute(
|
||||
f'$MAMBA install -q -y "python={version}=*pypy" --strict-channel-priority'
|
||||
)
|
||||
out = env.execute('python -c "import sys; print(sys.version)"')
|
||||
assert out[-2].startswith(version)
|
||||
assert out[-1].startswith('[PyPy')
|
||||
assert out[-1].startswith("[PyPy")
|
||||
|
||||
|
||||
@pytest.mark.parametrize('experimental', [True, False])
|
||||
@pytest.mark.parametrize('use_json', [True, False])
|
||||
@pytest.mark.parametrize("experimental", [True, False])
|
||||
@pytest.mark.parametrize("use_json", [True, False])
|
||||
def test_create_dry_run(experimental, use_json, tmpdir):
|
||||
env_dir = tmpdir / str(uuid.uuid1())
|
||||
|
||||
|
@ -85,24 +94,26 @@ def test_create_files(tmpdir):
|
|||
"""Check that multiple --file arguments are respected."""
|
||||
(tmpdir / "1.txt").write(b"a")
|
||||
(tmpdir / "2.txt").write(b"b")
|
||||
output = subprocess.check_output([
|
||||
'mamba',
|
||||
'create',
|
||||
'-p',
|
||||
str(tmpdir / 'env'),
|
||||
'--json',
|
||||
'--override-channels',
|
||||
'--strict-channel-priority',
|
||||
'--dry-run',
|
||||
'-c',
|
||||
'./test/channel_b',
|
||||
'-c',
|
||||
'./test/channel_a',
|
||||
'--file',
|
||||
str(tmpdir / "1.txt"),
|
||||
'--file',
|
||||
str(tmpdir / "2.txt")
|
||||
])
|
||||
output = subprocess.check_output(
|
||||
[
|
||||
"mamba",
|
||||
"create",
|
||||
"-p",
|
||||
str(tmpdir / "env"),
|
||||
"--json",
|
||||
"--override-channels",
|
||||
"--strict-channel-priority",
|
||||
"--dry-run",
|
||||
"-c",
|
||||
"./test/channel_b",
|
||||
"-c",
|
||||
"./test/channel_a",
|
||||
"--file",
|
||||
str(tmpdir / "1.txt"),
|
||||
"--file",
|
||||
str(tmpdir / "2.txt"),
|
||||
]
|
||||
)
|
||||
output = json.loads(output)
|
||||
names = {x['name'] for x in output['actions']['FETCH']}
|
||||
assert names == {'a', 'b'}
|
||||
names = {x["name"] for x in output["actions"]["FETCH"]}
|
||||
assert names == {"a", "b"}
|
||||
|
|
|
@ -85,14 +85,14 @@ namespace mamba
|
|||
EXPECT_EQ(ms.name, "ipykernel");
|
||||
}
|
||||
{
|
||||
MatchSpec ms("numpy 1.7*");
|
||||
MatchSpec ms("numpy 1.7*");
|
||||
EXPECT_EQ(ms.version, "1.7*");
|
||||
EXPECT_EQ(ms.name, "numpy");
|
||||
EXPECT_EQ(ms.conda_build_form(), "numpy 1.7*");
|
||||
EXPECT_EQ(ms.str(), "numpy=1.7");
|
||||
}
|
||||
{
|
||||
MatchSpec ms("numpy[version='1.7|1.8']");
|
||||
MatchSpec ms("numpy[version='1.7|1.8']");
|
||||
// TODO!
|
||||
// EXPECT_EQ(ms.version, "1.7|1.8");
|
||||
EXPECT_EQ(ms.name, "numpy");
|
||||
|
@ -100,7 +100,7 @@ namespace mamba
|
|||
EXPECT_EQ(ms.str(), "numpy[version='1.7|1.8']");
|
||||
}
|
||||
{
|
||||
MatchSpec ms("conda-forge/linux64::xtensor==0.12.3");
|
||||
MatchSpec ms("conda-forge/linux64::xtensor==0.12.3");
|
||||
EXPECT_EQ(ms.version, "0.12.3");
|
||||
EXPECT_EQ(ms.name, "xtensor");
|
||||
EXPECT_EQ(ms.channel, "conda-forge/linux64");
|
||||
|
@ -243,4 +243,4 @@ namespace mamba
|
|||
EXPECT_THROW(path::is_writable("/tmp/this/path/doesnt/exist"), std::runtime_error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,4 +13,4 @@ namespace mamba
|
|||
// the prefix should be cleaned out, because it doesn't have the `conda-meta/history` file
|
||||
EXPECT_EQ(new_prefixes.size(), prefixes.size());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -125,4 +125,3 @@ namespace mamba
|
|||
EXPECT_TRUE(vis.get_cross_edge_map().empty());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,4 +23,4 @@ namespace mamba
|
|||
EXPECT_TRUE(starts_with(expanded.string(), "/home/"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ namespace mamba
|
|||
EXPECT_EQ(current_command, "mamba");
|
||||
|
||||
Console::instance().init_multi_progress();
|
||||
{
|
||||
{
|
||||
interruption_guard g([&res]()
|
||||
{
|
||||
// Test for double free (segfault if that happends)
|
||||
|
@ -37,7 +37,6 @@ namespace mamba
|
|||
std::this_thread::sleep_for(std::chrono::seconds(1));
|
||||
}
|
||||
EXPECT_EQ(res, -1);
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,4 +37,4 @@ namespace mamba
|
|||
Context::instance().quiet = false;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,29 +1,33 @@
|
|||
import subprocess
|
||||
import shutil
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import uuid
|
||||
|
||||
|
||||
def get_lines(std_pipe):
|
||||
'''Generator that yields lines from a standard pipe as they are printed.'''
|
||||
for line in iter(std_pipe.readline, ''):
|
||||
"""Generator that yields lines from a standard pipe as they are printed."""
|
||||
for line in iter(std_pipe.readline, ""):
|
||||
yield line
|
||||
#std_pipe.close()
|
||||
# std_pipe.close()
|
||||
|
||||
|
||||
class Shell:
|
||||
|
||||
def __init__(self):
|
||||
self.process = subprocess.Popen(['bash'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
self.sentinel = '__command_done__'
|
||||
self.echo_sentinel = 'echo ' + self.sentinel + '\n'
|
||||
self.process = subprocess.Popen(
|
||||
["bash"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
)
|
||||
self.sentinel = "__command_done__"
|
||||
self.echo_sentinel = "echo " + self.sentinel + "\n"
|
||||
|
||||
def execute(self, commands):
|
||||
if type(commands) == str:
|
||||
commands = [commands]
|
||||
for cmd in commands:
|
||||
if not cmd.endswith('\n'):
|
||||
cmd += '\n'
|
||||
if not cmd.endswith("\n"):
|
||||
cmd += "\n"
|
||||
self.process.stdin.write(cmd)
|
||||
self.process.stdin.flush()
|
||||
self.process.stdin.write(self.echo_sentinel)
|
||||
|
@ -32,7 +36,7 @@ class Shell:
|
|||
out = []
|
||||
for line in get_lines(self.process.stdout):
|
||||
if not self.sentinel in line:
|
||||
print(line, end='')
|
||||
print(line, end="")
|
||||
out.append(line[:-1])
|
||||
else:
|
||||
break
|
||||
|
@ -44,67 +48,76 @@ class Shell:
|
|||
|
||||
|
||||
class Environment:
|
||||
|
||||
def __init__(self):
|
||||
self.shell = Shell()
|
||||
self.name = 'env_' + str(uuid.uuid1())
|
||||
self.shell.execute('MAMBA=$CONDA_PREFIX/bin/mamba')
|
||||
self.shell.execute('conda create -q -y -n ' + self.name)
|
||||
self.shell.execute('CONDA_BASE=$(conda info --base)')
|
||||
self.shell.execute('source $CONDA_BASE/etc/profile.d/conda.sh')
|
||||
self.shell.execute('conda activate ' + self.name)
|
||||
self.name = "env_" + str(uuid.uuid1())
|
||||
self.shell.execute("MAMBA=$CONDA_PREFIX/bin/mamba")
|
||||
self.shell.execute("conda create -q -y -n " + self.name)
|
||||
self.shell.execute("CONDA_BASE=$(conda info --base)")
|
||||
self.shell.execute("source $CONDA_BASE/etc/profile.d/conda.sh")
|
||||
self.shell.execute("conda activate " + self.name)
|
||||
|
||||
def __enter__(self):
|
||||
return self.shell
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.shell.execute('conda deactivate')
|
||||
self.shell.execute(f'conda remove -q -y --name {self.name} --all')
|
||||
self.shell.execute("conda deactivate")
|
||||
self.shell.execute(f"conda remove -q -y --name {self.name} --all")
|
||||
self.shell.exit()
|
||||
|
||||
|
||||
def get_glibc_version():
|
||||
try:
|
||||
output = subprocess.check_output(['ldd', '--version'])
|
||||
output = subprocess.check_output(["ldd", "--version"])
|
||||
except:
|
||||
return
|
||||
output.splitlines()
|
||||
version = output.splitlines()[0].split()[-1]
|
||||
return version.decode('ascii')
|
||||
return version.decode("ascii")
|
||||
|
||||
|
||||
def run(exe, channels, package):
|
||||
cmd = [exe, 'create', '-n', 'xxx', '--override-channels', '--strict-channel-priority', '--dry-run']
|
||||
cmd = [
|
||||
exe,
|
||||
"create",
|
||||
"-n",
|
||||
"xxx",
|
||||
"--override-channels",
|
||||
"--strict-channel-priority",
|
||||
"--dry-run",
|
||||
]
|
||||
for channel in channels:
|
||||
cmd += ['-c', channel]
|
||||
cmd += ["-c", channel]
|
||||
cmd.append(package)
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
|
||||
def run_mamba_conda(channels, package):
|
||||
run('conda', channels, package)
|
||||
run('mamba', channels, package)
|
||||
run("conda", channels, package)
|
||||
run("mamba", channels, package)
|
||||
|
||||
|
||||
def add_glibc_virtual_package():
|
||||
version = get_glibc_version()
|
||||
with open('test/channel_a/linux-64/repodata.tpl') as f:
|
||||
with open("test/channel_a/linux-64/repodata.tpl") as f:
|
||||
repodata = f.read()
|
||||
with open('test/channel_a/linux-64/repodata.json', 'w') as f:
|
||||
with open("test/channel_a/linux-64/repodata.json", "w") as f:
|
||||
if version is not None:
|
||||
glibc_placeholder = ', "__glibc=' + version + '"'
|
||||
else:
|
||||
glibc_placeholder = ''
|
||||
repodata = repodata.replace('GLIBC_PLACEHOLDER', glibc_placeholder)
|
||||
glibc_placeholder = ""
|
||||
repodata = repodata.replace("GLIBC_PLACEHOLDER", glibc_placeholder)
|
||||
f.write(repodata)
|
||||
|
||||
|
||||
def copy_channels_osx():
|
||||
for channel in ['a', 'b']:
|
||||
if not os.path.exists(f'test/channel_{channel}/osx-64'):
|
||||
shutil.copytree(f'test/channel_{channel}/linux-64', f'test/channel_{channel}/osx-64')
|
||||
with open(f'test/channel_{channel}/osx-64/repodata.json') as f:
|
||||
for channel in ["a", "b"]:
|
||||
if not os.path.exists(f"test/channel_{channel}/osx-64"):
|
||||
shutil.copytree(
|
||||
f"test/channel_{channel}/linux-64", f"test/channel_{channel}/osx-64"
|
||||
)
|
||||
with open(f"test/channel_{channel}/osx-64/repodata.json") as f:
|
||||
repodata = f.read()
|
||||
with open(f'test/channel_{channel}/osx-64/repodata.json', 'w') as f:
|
||||
repodata = repodata.replace('linux', 'osx')
|
||||
with open(f"test/channel_{channel}/osx-64/repodata.json", "w") as f:
|
||||
repodata = repodata.replace("linux", "osx")
|
||||
f.write(repodata)
|
||||
|
|
Loading…
Reference in New Issue