├── .bumpversion.cfg ├── .clang-format ├── .gitignore ├── .pre-commit-config.yaml ├── CMakeLists.txt ├── LICENCE.txt ├── README.md ├── bin ├── CMakeLists.txt └── main.cpp ├── cmake └── modules │ └── FindLibSolv.cmake ├── include └── picomamba │ ├── conda_meta.hpp │ ├── picomamba.hpp │ ├── picomamba_config.hpp │ └── picomamba_core.hpp ├── picomamba.pc.in ├── picomambaConfig.cmake.in └── python ├── CMakeLists.txt ├── module └── picomamba │ ├── __init__.py │ ├── browser.py │ ├── conda_env.py │ ├── logging.py │ ├── picomamba.py │ ├── pkg_spec.py │ ├── tar.py │ └── transaction.py ├── src ├── CMakeLists.txt ├── def_build_config.cpp ├── def_picomamba_core.cpp └── main.cpp └── tests └── test_picomamba.py /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.3.0 3 | commit = True 4 | tag = True 5 | 6 | message = Bump version: {current_version} → {new_version} 7 | 8 | 9 | [bumpversion:file:include/picomamba/picomamba_version_major.hpp] 10 | 11 | parse = (?P\d+) 12 | serialize = {major} 13 | 14 | search = PICOMAMBA_VERSION_MAJOR {current_version} 15 | replace = PICOMAMBA_VERSION_MAJOR {new_version} 16 | 17 | 18 | [bumpversion:file:include/picomamba/picomamba_version_minor.hpp] 19 | 20 | parse = (?P\d+) 21 | serialize = {minor} 22 | search = PICOMAMBA_VERSION_MINOR {current_version} 23 | replace = PICOMAMBA_VERSION_MINOR {new_version} 24 | 25 | 26 | [bumpversion:file:include/picomamba/picomamba_version_patch.hpp] 27 | 28 | parse = (?P\d+) 29 | serialize = {patch} 30 | search = PICOMAMBA_VERSION_PATCH {current_version} 31 | replace = PICOMAMBA_VERSION_PATCH {new_version} -------------------------------------------------------------------------------- /.clang-format: -------------------------------------------------------------------------------- 1 | BasedOnStyle: Mozilla 2 | AccessModifierOffset: '-4' 3 | AlignAfterOpenBracket: Align 4 | AlignEscapedNewlinesLeft: 'false' 5 | AllowAllParametersOfDeclarationOnNextLine: 'true' 6 | AllowShortBlocksOnASingleLine: 'false' 7 | AllowShortCaseLabelsOnASingleLine: 'false' 8 | AllowShortFunctionsOnASingleLine: 'false' 9 | AllowShortIfStatementsOnASingleLine: 'false' 10 | AllowShortLoopsOnASingleLine: 'false' 11 | AlwaysBreakTemplateDeclarations: 'true' 12 | SpaceAfterTemplateKeyword: 'true' 13 | BreakBeforeBinaryOperators: All 14 | BreakBeforeBraces: Allman 15 | BreakBeforeTernaryOperators: 'true' 16 | BreakConstructorInitializersBeforeComma: 'true' 17 | BreakStringLiterals: 'false' 18 | ColumnLimit: '100' 19 | ConstructorInitializerAllOnOneLineOrOnePerLine: 'false' 20 | ConstructorInitializerIndentWidth: '4' 21 | ContinuationIndentWidth: '4' 22 | Cpp11BracedListStyle: 'false' 23 | DerivePointerAlignment: 'false' 24 | DisableFormat: 'false' 25 | ExperimentalAutoDetectBinPacking: 'true' 26 | IndentCaseLabels: 'true' 27 | IndentWidth: '4' 28 | IndentWrappedFunctionNames: 'false' 29 | JavaScriptQuotes: Single 30 | KeepEmptyLinesAtTheStartOfBlocks: 'false' 31 | Language: Cpp 32 | MaxEmptyLinesToKeep: '2' 33 | NamespaceIndentation: All 34 | ObjCBlockIndentWidth: '4' 35 | ObjCSpaceAfterProperty: 'false' 36 | ObjCSpaceBeforeProtocolList: 'false' 37 | PointerAlignment: Left 38 | ReflowComments: 'true' 39 | SortIncludes: 'false' 40 | SpaceAfterCStyleCast: 'true' 41 | SpaceBeforeAssignmentOperators: 'true' 42 | SpaceBeforeParens: ControlStatements 43 | SpaceInEmptyParentheses: 'false' 44 | SpacesBeforeTrailingComments: '2' 45 | SpacesInAngles: 'false' 46 | SpacesInCStyleCastParentheses: 'false' 47 | SpacesInContainerLiterals: 'false' 48 | SpacesInParentheses: 'false' 49 | SpacesInSquareBrackets: 'false' 50 | Standard: c++17 51 | TabWidth: '4' 52 | UseTab: Never 53 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | build_host 3 | *.pyc 4 | __pycache__/** 5 | **/__pycache__/**/* 6 | **/__pycache__/* 7 | .pytest_cache -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: libmamba/tests/repodata_json_cache* 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v4.1.0 5 | hooks: 6 | - id: trailing-whitespace 7 | - id: end-of-file-fixer 8 | - id: fix-encoding-pragma 9 | args: [--remove] 10 | - id: check-yaml 11 | exclude: tests 12 | - id: check-toml 13 | - id: check-json 14 | - id: check-merge-conflict 15 | - id: pretty-format-json 16 | args: [--autofix] 17 | - id: debug-statements 18 | language_version: python3 19 | - repo: https://github.com/pre-commit/mirrors-clang-format 20 | rev: v13.0.1 21 | hooks: 22 | - id: clang-format 23 | args: [--style=file, -i] 24 | exclude: "(.json$|.js$)" 25 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | cmake_minimum_required(VERSION 3.1) 3 | set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules) 4 | project (picomamba) 5 | 6 | 7 | set(PROJECT_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/include) 8 | set(CPP_ROOT_FOLDER_NAME picomamba) 9 | include_directories(${PROJECT_INCLUDE_DIR}) 10 | 11 | # C++ 17 12 | # =========== 13 | 14 | set(CMAKE_CXX_STANDARD 17 CACHE STRING "C++ version selection") # or 11, 14, 17, 20 15 | set(CMAKE_CXX_STANDARD_REQUIRED ON) # optional, ensure standard is supported 16 | set(CMAKE_CXX_EXTENSIONS OFF) # optional, keep compiler extensions off 17 | 18 | include(CheckCXXCompilerFlag) 19 | if (CMAKE_CXX_COMPILER_ID MATCHES "Clang" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Intel") 20 | CHECK_CXX_COMPILER_FLAG("-std=c++17" HAS_CPP17_FLAG) 21 | if (HAS_CPP17_FLAG) 22 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") 23 | else() 24 | message(FATAL_ERROR "Unsupported compiler -- C++17 support required!") 25 | endif() 26 | endif() 27 | 28 | 29 | # Emscripten 30 | if(EMSCRIPTEN) 31 | set_property(GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS TRUE) 32 | set(CMAKE_SHARED_LIBRARY_CREATE_C_FLAGS "-s SIDE_MODULE=1") 33 | set(CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS "-s SIDE_MODULE=1") 34 | set(CMAKE_STRIP FALSE) # used by default in pybind11 on .so modules 35 | endif() 36 | 37 | # Versionning 38 | # =========== 39 | set(${PROJECT_NAME}_VERSION 0.2.0) 40 | message(STATUS "Building project v${${PROJECT_NAME}_VERSION}") 41 | 42 | # Dependencies 43 | # ============ 44 | find_package(nlohmann_json REQUIRED) 45 | find_package(LibSolv REQUIRED) 46 | # ind_package(xtl REQUIRED) 47 | 48 | # # Build 49 | # # ===== 50 | 51 | 52 | SET(INTERFACE_LIB_NAME picomamba) 53 | 54 | file(GLOB_RECURSE ${PROJECT_NAME}_HEADERS ${PROJECT_INCLUDE_DIR}/*.hpp) 55 | 56 | 57 | add_library(${INTERFACE_LIB_NAME} INTERFACE) 58 | 59 | 60 | target_compile_features(${INTERFACE_LIB_NAME} INTERFACE cxx_std_17) 61 | 62 | 63 | message(STATUS "INSTALL_INTERFACE: ${CMAKE_INSTALL_INCLUDEDIR}") 64 | target_include_directories(${INTERFACE_LIB_NAME} INTERFACE 65 | $ 66 | $) 67 | 68 | 69 | 70 | target_link_libraries(${INTERFACE_LIB_NAME} 71 | INTERFACE ${LibSolv_LIBRARIES} nlohmann_json::nlohmann_json) 72 | 73 | 74 | # Python 75 | # ============ 76 | OPTION(BUILD_PYTHON "${PROJECT_NAME} python binding" ON) 77 | if(BUILD_PYTHON) 78 | add_subdirectory(python) 79 | endif() 80 | 81 | 82 | # binary 83 | # ============ 84 | OPTION(BUILD_BINARY "build binary" ON) 85 | if(BUILD_BINARY) 86 | add_subdirectory(bin) 87 | endif() 88 | 89 | 90 | 91 | # Installation 92 | # ============ 93 | 94 | include(GNUInstallDirs) 95 | include(CMakePackageConfigHelpers) 96 | 97 | 98 | set(${PROJECT_NAME}_CMAKECONFIG_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}" CACHE 99 | STRING "install path for ${PROJECT_NAME}Config.cmake") 100 | 101 | 102 | message(STATUS "DEST ${${PROJECT_NAME}_CMAKECONFIG_INSTALL_DIR}" ) 103 | 104 | 105 | install(TARGETS ${INTERFACE_LIB_NAME} 106 | EXPORT ${INTERFACE_LIB_NAME}-targets) 107 | 108 | install(EXPORT ${INTERFACE_LIB_NAME}-targets 109 | FILE ${INTERFACE_LIB_NAME}Targets.cmake 110 | #NAMESPACE ${PROJECT_NAME}:: 111 | DESTINATION lib/cmake/${PROJECT_NAME} 112 | ) 113 | 114 | 115 | install(DIRECTORY ${PROJECT_INCLUDE_DIR}/${CPP_ROOT_FOLDER_NAME} 116 | DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) 117 | 118 | 119 | 120 | 121 | configure_package_config_file(${PROJECT_NAME}Config.cmake.in 122 | "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" 123 | INSTALL_DESTINATION ${${PROJECT_NAME}_CMAKECONFIG_INSTALL_DIR}) 124 | 125 | 126 | # ${PROJECT_NAME} is header-only and does not depend on the architecture. 127 | # Remove CMAKE_SIZEOF_VOID_P from ${PROJECT_NAME}ConfigVersion.cmake so that an ${PROJECT_NAME}Config.cmake 128 | # generated for a 64 bit target can be used for 32 bit targets and vice versa. 129 | set(_${PROJECT_NAME}_CMAKE_SIZEOF_VOID_P ${CMAKE_SIZEOF_VOID_P}) 130 | unset(CMAKE_SIZEOF_VOID_P) 131 | write_basic_package_version_file(${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake 132 | VERSION ${${PROJECT_NAME}_VERSION} 133 | COMPATIBILITY AnyNewerVersion) 134 | set(CMAKE_SIZEOF_VOID_P ${_${PROJECT_NAME}_CMAKE_SIZEOF_VOID_P}) 135 | install(FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake 136 | ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake 137 | DESTINATION ${${PROJECT_NAME}_CMAKECONFIG_INSTALL_DIR}) 138 | 139 | install(EXPORT ${PROJECT_NAME}-targets 140 | FILE ${PROJECT_NAME}Targets.cmake 141 | DESTINATION ${${PROJECT_NAME}_CMAKECONFIG_INSTALL_DIR}) 142 | 143 | configure_file(${PROJECT_NAME}.pc.in 144 | "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc" 145 | @ONLY) 146 | install(FILES "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc" 147 | DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig/") 148 | 149 | 150 | 151 | -------------------------------------------------------------------------------- /LICENCE.txt: -------------------------------------------------------------------------------- 1 | 2 | MIT License 3 | 4 | Copyright (c) 2022, Thorsten Beier 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Picomamba 2 | ========== 3 | 4 | 5 | 6 | 7 | 8 | Mamba in the browser can be used in conjunction with python compiled to wasm from `emscripten-forge` 9 | The usage looks like: 10 | ```python 11 | import picomamba 12 | 13 | def callback(name, done, total): 14 | percent = 100.0 * done / total 15 | print(f"{name} {percent:.2f}% ({done}/{total})") 16 | 17 | pm = picomamba.PicoMamba( 18 | env_prefix=env_prefix, # the name of the env 19 | arch_root_url=arch_url, # root url for arch pkgs 20 | noarch_template=noarch_template, # templated url for norach pkgs 21 | progress_callback=callback, # report download progress 22 | ) 23 | await pm.fetch_repodata( 24 | arch_url=arch_url, # url for arch repodata tar.bz2 file 25 | noarch_url=noarch_url # url for noarch repodata tar.bz2 file 26 | ) 27 | transaction = pm.solve( 28 | ["regex", "imageio", "numpy", "networkx"] 29 | ) 30 | await pm.install_transaction(transaction) 31 | 32 | 33 | import numpy 34 | import regex 35 | import networkx 36 | ``` 37 | the output would be 38 | ``` 39 | 2022-09-15 11:08:11,235 INFO -- download_repodata 40 | repodata 0.00% (0/13688) 41 | repodata 0.00% (0/3511132) 42 | repodata 0.39% (13688/3511132) 43 | repodata 0.39% (13688/3511132) 44 | repodata 30.25% (1062264/3511132) 45 | repodata 60.12% (2110840/3511132) 46 | repodata 100.00% (3511132/3511132) 47 | repodata 100.00% (3511132/3511132) 48 | 2022-09-15 11:08:12,095 INFO -- download_repodata DONE! took 0.86s 49 | 2022-09-15 11:08:12,096 INFO -- untar repodata 50 | 2022-09-15 11:08:13,648 INFO -- untar repodata DONE! took 1.55s 51 | 2022-09-15 11:08:13,649 INFO -- load installed packages 52 | 2022-09-15 11:08:13,654 INFO -- load installed packages DONE! took 0.01s 53 | 2022-09-15 11:08:13,655 INFO -- solve 54 | 2022-09-15 11:08:13,665 INFO -- solve DONE! took 0.01s 55 | 2022-09-15 11:08:13,667 INFO -- install transaction 56 | 2022-09-15 11:08:13,668 INFO -- install-noarch [networkx=2.8.6=pyhd8ed1ab_0, imageio=2.21.2=pyhfa7a67d_0] 57 | packages 0.00% (0/1629208) 58 | packages 0.00% (0/4973031) 59 | packages 32.76% (1629208/4973031) 60 | packages 32.76% (1629208/4973031) 61 | packages 74.93% (3726360/4973031) 62 | packages 84.16% (4185112/4973031) 63 | packages 100.00% (4973031/4973031) 64 | packages 100.00% (4973031/4973031) 65 | 2022-09-15 11:08:13,799 INFO -- install noarch networkx=2.8.6=pyhd8ed1ab_0 66 | 2022-09-15 11:08:16,246 INFO -- install noarch networkx=2.8.6=pyhd8ed1ab_0 DONE! took 2.45s 67 | 2022-09-15 11:08:16,247 INFO -- install noarch imageio=2.21.2=pyhfa7a67d_0 68 | 2022-09-15 11:08:16,854 INFO -- install noarch imageio=2.21.2=pyhfa7a67d_0 DONE! took 0.61s 69 | 2022-09-15 11:08:16,855 INFO -- wait for deps 70 | 2022-09-15 11:08:16,857 INFO -- install-arch [regex=2022.1.18=py310h672cd09_0, pillow=9.1.0=h8b4d581_0, clapack=3.2.1=h1a65802_0, scipy=1.8.1=py310h7c23efa_0] 71 | ``` 72 | 73 | 74 | 75 | Warnings 76 | ========= 77 | This is all very experimental and far from usable!!! 78 | -------------------------------------------------------------------------------- /bin/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | # add binary 3 | add_executable(picomamba_core main.cpp) 4 | 5 | # link libraries 6 | target_link_libraries(picomamba_core PRIVATE picomamba) -------------------------------------------------------------------------------- /bin/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include 6 | #include "picomamba/picomamba_core.hpp" 7 | 8 | int main(const int argc, const char ** argv) 9 | { 10 | if (argc < 4) 11 | { 12 | std::cerr << "Usage: picomamba " << std::endl; 13 | return 1; 14 | } 15 | 16 | std::string noarch_conda_forge_repodata_path = argv[1]; 17 | std::string noarch_emscripten_forge = argv[2]; 18 | std::string arch_emscripten_forge = argv[3]; 19 | 20 | picomamba::PicoMambaCore picomamba_core; 21 | picomamba_core.load_repodata_from_file(noarch_conda_forge_repodata_path, "noarch_conda_forge"); 22 | picomamba_core.load_repodata_from_file(noarch_emscripten_forge, "noarch_emscripten_forge"); 23 | picomamba_core.load_repodata_from_file(arch_emscripten_forge, "arch_emscripten_forge"); 24 | 25 | std::vector specs = {"python", "numpy"}; 26 | 27 | picomamba_core.solve(specs.begin(), specs.end(), picomamba::PicoMambaCore::SolveConfig{false, true}, 28 | 29 | [&](Transaction * transaction){ 30 | std::cout << "Solving done: " << std::endl; 31 | 32 | Pool* pool = transaction->pool; 33 | 34 | auto as_tuple = [&pool](Solvable* s) 35 | { 36 | const char* name = pool_id2str(pool, s->name); 37 | const char* evr = pool_id2str(pool, s->evr); 38 | const char* build_string; 39 | build_string = solvable_lookup_str(s, SOLVABLE_BUILDFLAVOR); 40 | int build_number = std::stoi(solvable_lookup_str(s, SOLVABLE_BUILDVERSION)); 41 | 42 | return std::make_tuple(name, evr, build_string, build_number, s->repo->name); 43 | }; 44 | 45 | for (int i = 0; i < transaction->steps.count; i++) 46 | { 47 | Id p = transaction->steps.elements[i]; 48 | Id ttype = transaction_type(transaction, p, SOLVER_TRANSACTION_SHOW_ALL); 49 | Solvable* s = pool_id2solvable(transaction->pool, p); 50 | Id i2; 51 | Solvable* s2; 52 | switch (ttype) 53 | { 54 | case SOLVER_TRANSACTION_DOWNGRADED: 55 | case SOLVER_TRANSACTION_UPGRADED: 56 | case SOLVER_TRANSACTION_CHANGED: 57 | case SOLVER_TRANSACTION_REINSTALLED: 58 | { 59 | 60 | s2 = transaction->pool->solvables + transaction_obs_pkg(transaction, p); 61 | s2 = pool_id2solvable(pool, i2); 62 | auto t = as_tuple(s); 63 | std::cout << "Removing " << std::get<0>(t) << std::get<1>(t) << std::get<2>(t) << std::get<3>(t) << std::get<4>(t) << std::endl; 64 | break; 65 | } 66 | case SOLVER_TRANSACTION_ERASE: 67 | { 68 | auto t = as_tuple(s); 69 | std::cout << "Removing " << std::get<0>(t) << std::get<1>(t) << std::get<2>(t) << std::get<3>(t) << std::get<4>(t) << std::endl; 70 | break; 71 | } 72 | case SOLVER_TRANSACTION_INSTALL: 73 | { 74 | auto t = as_tuple(s); 75 | std::cout << "Installing " << std::get<0>(t) << std::get<1>(t) << std::get<2>(t) << std::get<3>(t) << std::get<4>(t) << std::endl; 76 | break; 77 | } 78 | case SOLVER_TRANSACTION_IGNORE: 79 | { 80 | auto t = as_tuple(s); 81 | std::cout << "Ignoring " << std::get<0>(t) << std::get<1>(t) << std::get<2>(t) << std::get<3>(t) << std::get<4>(t) << std::endl; 82 | break; 83 | } 84 | default: 85 | std::cout << "Some weird case not handled" << std::endl; 86 | break; 87 | } 88 | } 89 | 90 | 91 | 92 | 93 | 94 | 95 | }); 96 | 97 | } -------------------------------------------------------------------------------- /cmake/modules/FindLibSolv.cmake: -------------------------------------------------------------------------------- 1 | # FindLibSolv - Find libsolv headers and libraries. 2 | # 3 | # Sample: 4 | # 5 | # SET( LibSolv_USE_STATIC_LIBS OFF ) 6 | # FIND_PACKAGE( LibSolv REQUIRED ext ) 7 | # IF( LibSolv_FOUND ) 8 | # INCLUDE_DIRECTORIES( ${LibSolv_INCLUDE_DIRS} ) 9 | # TARGET_LINK_LIBRARIES( ... ${LibSolv_LIBRARIES} ) 10 | # ENDIF() 11 | # 12 | # Variables used by this module need to be set before calling find_package 13 | # (not that they are cmale cased like the modiulemane itself): 14 | # 15 | # LibSolv_USE_STATIC_LIBS Can be set to ON to force the use of the static 16 | # libsolv libraries. Defaults to OFF. 17 | # 18 | # Supported components: 19 | # 20 | # ext Also include libsolvext 21 | # 22 | # Variables provided by this module: 23 | # 24 | # LibSolv_FOUND Include dir, libsolv and all extra libraries 25 | # specified in the COMPONENTS list were found. 26 | # 27 | # LibSolv_LIBRARIES Link to these to use all the libraries you specified. 28 | # 29 | # LibSolv_INCLUDE_DIRS Include directories. 30 | # 31 | # For each component you specify in find_package(), the following (UPPER-CASE) 32 | # variables are set to pick and choose components instead of just using LibSolv_LIBRARIES: 33 | # 34 | # LIBSOLV_FOUND TRUE if libsolv was found 35 | # LIBSOLV_LIBRARY libsolv libraries 36 | # 37 | # LIBSOLV_${COMPONENT}_FOUND TRUE if the library component was found 38 | # LIBSOLV_${COMPONENT}_LIBRARY The libraries for the specified component 39 | # 40 | include(FindPackageHandleStandardArgs) 41 | 42 | # Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES 43 | IF(LibSolv_USE_STATIC_LIBS) 44 | SET( _ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES}) 45 | SET(CMAKE_FIND_LIBRARY_SUFFIXES .a ) 46 | ENDIF() 47 | 48 | # Look for the header files 49 | # UNSET(LibSolv_INCLUDE_DIRS CACHE) 50 | FIND_PATH(LibSolv_INCLUDE_DIRS NAMES solv/solvable.h) 51 | 52 | # Look for the core library 53 | # UNSET(LIBSOLV_LIBRARY CACHE) 54 | FIND_LIBRARY(LIBSOLV_LIBRARY NAMES solv) 55 | FIND_LIBRARY(LIBSOLV_EXT_LIBRARY NAMES solvext) 56 | FIND_PACKAGE_HANDLE_STANDARD_ARGS(LibSolv DEFAULT_MSG LIBSOLV_LIBRARY LibSolv_INCLUDE_DIRS) 57 | MARK_AS_ADVANCED( 58 | LIBSOLV_FOUND 59 | LIBSOLV_LIBRARY 60 | LIBSOLV_EXT_LIBRARY 61 | ) 62 | 63 | # Prepare return values and collectiong more components 64 | SET(LibSolv_FOUND ${LIBSOLV_FOUND}) 65 | SET(LibSolv_LIBRARIES ${LIBSOLV_LIBRARY} ${LIBSOLV_EXT_LIBRARY}) 66 | MARK_AS_ADVANCED( 67 | LibSolv_FOUND 68 | LibSolv_LIBRARIES 69 | LibSolv_INCLUDE_DIRS 70 | ) 71 | 72 | # Look for components 73 | FOREACH(COMPONENT ${LibSolv_FIND_COMPONENTS}) 74 | STRING(TOUPPER ${COMPONENT} _UPPERCOMPONENT) 75 | UNSET(LIBSOLV_${_UPPERCOMPONENT}_LIBRARY CACHE) 76 | FIND_LIBRARY(LIBSOLV_${_UPPERCOMPONENT}_LIBRARY NAMES solv${COMPONENT}) 77 | SET(LibSolv_${COMPONENT}_FIND_REQUIRED ${LibSolv_FIND_REQUIRED}) 78 | SET(LibSolv_${COMPONENT}_FIND_QUIETLY ${LibSolv_FIND_QUIETLY}) 79 | FIND_PACKAGE_HANDLE_STANDARD_ARGS(LibSolv_${COMPONENT} DEFAULT_MSG LIBSOLV_${_UPPERCOMPONENT}_LIBRARY) 80 | MARK_AS_ADVANCED( 81 | LIBSOLV_${_UPPERCOMPONENT}_FOUND 82 | LIBSOLV_${_UPPERCOMPONENT}_LIBRARY 83 | ) 84 | IF(LIBSOLV_${_UPPERCOMPONENT}_FOUND) 85 | SET(LibSolv_LIBRARIES ${LibSolv_LIBRARIES} ${LIBSOLV_${_UPPERCOMPONENT}_LIBRARY}) 86 | ELSE() 87 | SET(LibSolv_FOUND FALSE) 88 | ENDIF() 89 | ENDFOREACH() 90 | 91 | # restore CMAKE_FIND_LIBRARY_SUFFIXES 92 | IF(Solv_USE_STATIC_LIBS) 93 | SET(CMAKE_FIND_LIBRARY_SUFFIXES ${_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES} ) 94 | ENDIF() 95 | 96 | IF(LibSolv_FOUND AND NOT LibSolv_FIND_QUIETLY) 97 | MESSAGE(STATUS "Found LibSolv: ${LibSolv_INCLUDE_DIRS} ${LibSolv_LIBRARIES}") 98 | ENDIF() 99 | -------------------------------------------------------------------------------- /include/picomamba/conda_meta.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #ifndef PICOMAMBA_CONDA_META_HPP 3 | #define PICOMAMBA_CONDA_META_HPP 4 | 5 | #include 6 | #include 7 | #include 8 | 9 | 10 | 11 | namespace picomamba { 12 | 13 | template 14 | void for_each_pkg_meta(const std::string & pkg_prefix, FUNCTOR && functor) 15 | { 16 | 17 | auto conda_meta_path = std::filesystem::path(pkg_prefix) / std::filesystem::path("conda-meta"); 18 | 19 | for (auto &p : std::filesystem::recursive_directory_iterator(conda_meta_path)) 20 | { 21 | if (p.path().extension() == ".json") 22 | { 23 | std::ifstream fs(p.path().string()); 24 | nlohmann::json pkg_meta = nlohmann::json::parse(fs); 25 | functor(pkg_meta); 26 | } 27 | } 28 | } 29 | 30 | 31 | } 32 | 33 | #endif // PICOMAMBA_CONDA_META_HPP -------------------------------------------------------------------------------- /include/picomamba/picomamba.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #ifndef PICOMAMBA_PICOMAMBA_HPP 3 | #define PICOMAMBA_PICOMAMBA_HPP 4 | 5 | #include 6 | #include 7 | 8 | namespace picomamba { 9 | 10 | 11 | } // end namespace picomamba 12 | 13 | 14 | #endif // PICOMAMBA_PICOMAMBA_HPP -------------------------------------------------------------------------------- /include/picomamba/picomamba_config.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #ifndef PICOMAMBA_PICOMAMBA_CONFIG_HPP 3 | #define PICOMAMBA_PICOMAMBA_CONFIG_HPP 4 | 5 | #define PICOMAMBA_VERSION_PATCH 0 6 | #define PICOMAMBA_VERSION_MINOR 4 7 | #define PICOMAMBA_VERSION_MAJOR 0 8 | 9 | namespace picomamba { 10 | 11 | 12 | } // end namespace picomamba 13 | 14 | 15 | #endif // PICOMAMBA_PICOMAMBA_CONFIG_HPP -------------------------------------------------------------------------------- /include/picomamba/picomamba_core.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #ifndef PICOMAMBA_PICOMAMBA_HPP 3 | #define PICOMAMBA_PICOMAMBA_HPP 4 | 5 | #include 6 | #include 7 | 8 | 9 | #include 10 | 11 | #include "picomamba/conda_meta.hpp" 12 | 13 | 14 | extern "C" 15 | { 16 | #include "solv/solver.h" 17 | #include "solv/transaction.h" 18 | #include "solv/solverdebug.h" 19 | #include "solv/repo_conda.h" 20 | #include "solv/conda.h" 21 | #include "solv/repo_solv.h" 22 | #include "solv/pool.h" 23 | #include "solv/repo_write.h" 24 | } 25 | 26 | namespace picomamba { 27 | 28 | 29 | class PicoMambaCore 30 | { 31 | public: 32 | 33 | struct SolveConfig{ 34 | bool silent_errors; 35 | bool throw_on_errors; 36 | }; 37 | 38 | PicoMambaCore(); 39 | ~PicoMambaCore(); 40 | Pool* get(); 41 | void load_repodata_from_file(const std::string& fname, const std::string& url); 42 | 43 | // make pool aware of already installed packages 44 | int load_installed(const std::string & pkg_prefix); 45 | 46 | void add_to_installed(const std::string name, const std::string version, const std::string build, const int build_number); 47 | 48 | template 49 | bool solve(SPECS_ITER specs_begin, SPECS_ITER specs_end, const PicoMambaCore::SolveConfig & config,FUNCTOR && ); 50 | 51 | 52 | Pool* m_pool; 53 | Repo* m_repo; 54 | Repodata * m_data_installed; 55 | }; 56 | 57 | PicoMambaCore::PicoMambaCore() 58 | : m_pool(pool_create()), 59 | m_repo(nullptr) 60 | { 61 | } 62 | 63 | PicoMambaCore::~PicoMambaCore() 64 | { 65 | pool_free(m_pool); 66 | } 67 | 68 | 69 | inline void PicoMambaCore::load_repodata_from_file(const std::string& fname, const std::string& name) 70 | { 71 | FILE* f = fopen(fname.c_str(), "r"); 72 | Repo* r = repo_create(m_pool, name.c_str()); 73 | repo_add_conda(r, f, 0); 74 | fclose(f); 75 | } 76 | 77 | template 78 | bool PicoMambaCore::solve( 79 | SPECS_ITER specs_begin, 80 | SPECS_ITER specs_end, 81 | const PicoMambaCore::SolveConfig & config, 82 | FUNCTOR && functor 83 | ) 84 | { 85 | Solver* s = solver_create(m_pool); 86 | 87 | Queue q; 88 | queue_init(&q); 89 | 90 | for (auto iter=specs_begin; iter!=specs_end; ++iter) 91 | { 92 | Id inst_id = pool_conda_matchspec(m_pool, iter->c_str()); 93 | queue_push2(&q, SOLVER_INSTALL | SOLVER_SOLVABLE_PROVIDES, inst_id); 94 | } 95 | 96 | solver_solve(s, &q); 97 | 98 | const int problem_count = solver_problem_count(s); 99 | const bool success = problem_count == 0; 100 | if (!success) 101 | { 102 | if(!config.silent_errors) 103 | { 104 | Id problem = 0; 105 | int pcnt; 106 | while ((problem = solver_next_problem(s, problem)) != 0) 107 | { 108 | solver_printcompleteprobleminfo(s, problem); 109 | } 110 | } 111 | if(!config.throw_on_errors) 112 | { 113 | throw std::runtime_error("cannot solve problem"); 114 | } 115 | } 116 | else 117 | { 118 | Transaction* transaction = solver_create_transaction(s); 119 | transaction_order(transaction, 0); 120 | //py::dict res = transaction_to_py(transaction); 121 | 122 | functor(transaction); 123 | 124 | transaction_free(transaction); 125 | } 126 | solver_free(s); 127 | queue_free(&q); 128 | return success; 129 | } 130 | 131 | // THIS IS NOT WORKING :/ 132 | // void PicoMambaCore::add_to_installed(const std::string name, const std::string version, const std::string build, const int build_number) 133 | // { 134 | // std::cout<<"add_to_installed name "<name = pool_str2id(m_pool, name.c_str(), 1); 145 | // s->evr = pool_str2id(m_pool, version.c_str(), 1); 146 | 147 | // s->provides = repo_addid_dep(m_repo, s->provides, pool_rel2id(m_pool, s->name, s->evr, REL_EQ, 1), 0); 148 | // repodata_internalize(data); 149 | // // pool_set_installed(m_repo->pool, m_repo); 150 | // } 151 | 152 | int PicoMambaCore::load_installed(const std::string & pkg_prefix) 153 | { 154 | if(m_repo != nullptr) 155 | { 156 | repo_free(m_repo, true); 157 | } 158 | // create new repo for the installed pkgs 159 | m_repo = repo_create(m_pool, "installed"); 160 | m_repo->appdata = this; 161 | int flags = REPO_REUSE_REPODATA; 162 | Repodata* data = repo_add_repodata(m_repo, flags); 163 | 164 | // static Id real_repo_key = pool_str2id(m_pool, "solvable:real_repo_url", 1); 165 | // static Id noarch_repo_key = pool_str2id(m_pool, "solvable:noarch_type", 1); 166 | 167 | auto n_packages = 0; 168 | for_each_pkg_meta(pkg_prefix,[&](auto && pkg_meta){ 169 | n_packages += 1; 170 | 171 | 172 | // basic info of the pkg 173 | const auto name = pkg_meta["name"]. template get(); 174 | const auto version = pkg_meta["version"]. template get(); 175 | const auto build = pkg_meta["build"]. template get(); 176 | const auto build_number = pkg_meta["build_number"]. template get(); 177 | 178 | // libsolv handle for that pkg instance 179 | Id handle = repo_add_solvable(m_repo); 180 | Solvable* s = pool_id2solvable(m_pool, handle); 181 | 182 | repodata_set_str(data, handle, SOLVABLE_BUILDVERSION, std::to_string(build_number).c_str()); 183 | repodata_add_poolstr_array(data, handle, SOLVABLE_BUILDFLAVOR, build.c_str()); 184 | 185 | s->name = pool_str2id(m_pool, name.c_str(), 1); 186 | s->evr = pool_str2id(m_pool, version.c_str(), 1); 187 | 188 | s->provides = repo_addid_dep(m_repo, s->provides, pool_rel2id(m_pool, s->name, s->evr, REL_EQ, 1), 0); 189 | 190 | }); 191 | 192 | repodata_internalize(data); 193 | pool_set_installed(m_repo->pool, m_repo); 194 | return n_packages; 195 | } 196 | 197 | 198 | } // end namespace picomamba 199 | 200 | 201 | #endif // PICOMAMBA_PICOMAMBA_HPP -------------------------------------------------------------------------------- /picomamba.pc.in: -------------------------------------------------------------------------------- 1 | prefix=@CMAKE_INSTALL_PREFIX@ 2 | libdir=${prefix}/@CMAKE_INSTALL_LIBDIR@ 3 | includedir=${prefix}/include 4 | 5 | Name: picomamba 6 | Description: picomambas is a conda/mamba flavoured package manager. It allows to download conda and emscripten-forge packages to be downloaded from python-wasm 7 | Version: @picomamba_VERSION@ 8 | Cflags: -I${includedir} -------------------------------------------------------------------------------- /picomambaConfig.cmake.in: -------------------------------------------------------------------------------- 1 | set(${PN}_INCLUDE_DIR "${INC_DIRS}") 2 | 3 | 4 | get_filename_component(SELF_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH) 5 | get_filename_component(TOP_DIR "${SELF_DIR}/../../../" ABSOLUTE) 6 | SET(INC_DIRS "${TOP_DIR}/include/") 7 | 8 | 9 | 10 | 11 | 12 | @PACKAGE_INIT@ 13 | if(NOT TARGET @PROJECT_NAME@) 14 | find_package(xtensor REQUIRED) 15 | include("${CMAKE_CURRENT_LIST_DIR}/@PROJECT_NAME@Targets.cmake") 16 | set_target_properties( picomamba PROPERTIES 17 | INTERFACE_INCLUDE_DIRECTORIES ${INC_DIRS} 18 | ) 19 | SET(@PROJECT_NAME@_INCLUDE_DIRS ${INC_DIRS}) 20 | endif() -------------------------------------------------------------------------------- /python/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | #------------------------------------------------------------------------------------------------------------------- 3 | # Setup python bindings 4 | #------------------------------------------------------------------------------------------------------------------- 5 | find_package(pybind11 CONFIG REQUIRED) 6 | 7 | IF(NOT DEFINED PYTHON_MODULE_INSTALL_DIR) 8 | execute_process( 9 | COMMAND "${PYTHON_EXECUTABLE}" -c "from __future__ import print_function; from distutils.sysconfig import get_python_lib; print(get_python_lib())" 10 | OUTPUT_VARIABLE PYTHON_SITE 11 | OUTPUT_STRIP_TRAILING_WHITESPACE) 12 | SET(PYTHON_MODULE_INSTALL_DIR ${PYTHON_SITE}) 13 | 14 | endif() 15 | 16 | SET(PYTHON_MODULE_INSTALL_DIR ${PYTHON_MODULE_INSTALL_DIR} 17 | CACHE PATH "where to install the Python package" FORCE) 18 | 19 | # temporary python module dir 20 | file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/python/module/${PY_MOD_NAME}) 21 | 22 | 23 | add_subdirectory(src) 24 | 25 | 26 | file(COPY ${CMAKE_SOURCE_DIR}/python/module/${PY_MOD_NAME} DESTINATION ${CMAKE_BINARY_DIR}/python/module) 27 | 28 | install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/module/ 29 | DESTINATION ${PYTHON_MODULE_INSTALL_DIR}) 30 | 31 | 32 | -------------------------------------------------------------------------------- /python/module/picomamba/__init__.py: -------------------------------------------------------------------------------- 1 | from ._picomamba import * 2 | from .picomamba import PicoMamba 3 | -------------------------------------------------------------------------------- /python/module/picomamba/browser.py: -------------------------------------------------------------------------------- 1 | import pyjs 2 | import tarfile 3 | import io 4 | import json 5 | 6 | 7 | def set_emscripten_module_locate_file_base_url(base_url): 8 | 9 | pyjs.js.Function( 10 | f""" 11 | globalThis.EmscriptenForgeModule['locateFile'] = function(f){{ 12 | let path = "{base_url}/" + f; 13 | return path 14 | }} 15 | """ 16 | )() 17 | 18 | 19 | def make_js_func(py_func): 20 | jspy = pyjs.JsValue(py_func) 21 | f = jspy.py_call.bind(jspy) 22 | return f, jspy 23 | 24 | 25 | def make_js_array(arr): 26 | js_arr = pyjs.js_array() 27 | for url in arr: 28 | js_arr.push(url) 29 | return js_arr 30 | 31 | 32 | _parallel_fetch_array_buffer = pyjs.js.Function( 33 | """ 34 | return async function(urls){ 35 | let promises = urls.map(url => fetch(url).then(response => response.arrayBuffer())); 36 | return await Promise.all(promises); 37 | } 38 | """ 39 | )() 40 | 41 | _parallel_fetch_arraybuffers_with_progress_bar = pyjs.js.Function( 42 | """ 43 | return async function(urls, callback){ 44 | 45 | if(callback===undefined || callback===null) 46 | { 47 | 48 | let futures = urls.map((url) => { 49 | return fetch(url).then(response => response.arrayBuffer()); 50 | }) 51 | return await Promise.all(futures); 52 | } 53 | 54 | 55 | async function fetch_arraybuffer_with_progress_bar(url,index, report_total_length,report_progress, report_finished){ 56 | let response = await fetch(url); 57 | const reader = response.body.getReader(); 58 | 59 | // Step 2: get total length 60 | const contentLength = +response.headers.get('Content-Length'); 61 | 62 | report_total_length(index, contentLength); 63 | 64 | // Step 3: read the data 65 | let receivedLength = 0; // received that many bytes at the moment 66 | let chunks = []; // array of received binary chunks (comprises the body) 67 | while(true) { 68 | const {done, value} = await reader.read(); 69 | 70 | if (done) { 71 | report_finished(index); 72 | break; 73 | } 74 | chunks.push(value); 75 | receivedLength += value.length; 76 | 77 | report_progress(index, receivedLength); 78 | } 79 | 80 | // Step 4: concatenate chunks into single Uint8Array 81 | let chunksAll = new Uint8Array(receivedLength); // (4.1) 82 | let position = 0; 83 | for(let chunk of chunks) { 84 | chunksAll.set(chunk, position); // (4.2) 85 | position += chunk.length; 86 | } 87 | 88 | return chunksAll 89 | } 90 | 91 | let n_urls = urls.length; 92 | let receivedArr = Array(n_urls).fill(0); 93 | let totalArr = Array(n_urls).fill(0); 94 | let finishedArr = Array(n_urls).fill(0); 95 | 96 | function on_progress(){ 97 | let total = totalArr.reduce((partialSum, a) => partialSum + a, 0); 98 | let recived = receivedArr.reduce((partialSum, a) => partialSum + a, 0); 99 | let n_finished = finishedArr.reduce((partialSum, a) => partialSum + a, 0); 100 | 101 | if(callback !== undefined){ 102 | callback(recived,total,n_finished, n_urls); 103 | } 104 | } 105 | 106 | function report_finished(index){ 107 | finishedArr[index] = 1; 108 | on_progress(); 109 | } 110 | 111 | function report_total_length(index, total){ 112 | totalArr[index] = total; 113 | on_progress(); 114 | } 115 | function report_progress(index, p){ 116 | receivedArr[index] = p; 117 | on_progress(); 118 | } 119 | 120 | let futures = urls.map((url, index) => { 121 | return fetch_arraybuffer_with_progress_bar(url,index, report_total_length,report_progress, report_finished) 122 | }) 123 | return await Promise.all(futures); 124 | } 125 | """ 126 | )() 127 | 128 | 129 | async def parallel_fetch_bytes(urls, callback=None): 130 | js_urls = make_js_array(urls) 131 | if callback is None: 132 | return pyjs.to_py(await _parallel_fetch_array_buffer(js_urls)) 133 | else: 134 | j_callback, handle = make_js_func(callback) 135 | result = pyjs.to_py( 136 | await _parallel_fetch_arraybuffers_with_progress_bar(js_urls, j_callback) 137 | ) 138 | handle.delete() 139 | return result 140 | 141 | 142 | async def parallel_fetch_tarfiles(urls, callback=None): 143 | arrays = await parallel_fetch_bytes(urls, callback) 144 | return [tarfile.open(fileobj=io.BytesIO(array), mode="r:bz2") for array in arrays] 145 | 146 | 147 | async def parallel_fetch_jsons(urls, callback=None): 148 | arrays = await parallel_fetch_bytes(urls, callback) 149 | return [json.load(io.BytesIO(array)) for array in arrays] 150 | 151 | 152 | async def parallel_imports(urls): 153 | js_urls = make_js_array(urls) 154 | promises = [pyjs.async_import_javascript(url) for url in urls] 155 | await pyjs.js.Promise.all(make_js_array(promises)) 156 | -------------------------------------------------------------------------------- /python/module/picomamba/conda_env.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import json 3 | 4 | 5 | def ensure_is_conda_env(env_prefix): 6 | if not (Path(env_prefix) / "conda-meta").is_dir(): 7 | raise RuntimeError(f"{env_prefix} is not a conda environment") 8 | 9 | 10 | def create_conda_meta(package, env_prefix): 11 | pkg_conda_meta = dict( 12 | name=package.name, 13 | version=package.version, 14 | build=package.build, 15 | build_number=package.build_number, 16 | ) 17 | 18 | with open(env_prefix / "conda-meta" / package.filename(extension="json"), "w") as f: 19 | json.dump(pkg_conda_meta, f) 20 | -------------------------------------------------------------------------------- /python/module/picomamba/logging.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import logging 3 | import time 4 | 5 | 6 | logger = logging.getLogger("picomamba") 7 | 8 | 9 | @contextlib.contextmanager 10 | def logged(msg): 11 | t0 = time.time() 12 | logger.info(msg) 13 | yield 14 | logger.info(f"{msg} DONE! took {time.time()-t0:0.2f}s") 15 | -------------------------------------------------------------------------------- /python/module/picomamba/picomamba.py: -------------------------------------------------------------------------------- 1 | from ._picomamba import _PicoMambaCore, PicoMambaCoreSolveConfig 2 | 3 | from .browser import ( 4 | set_emscripten_module_locate_file_base_url, 5 | make_js_array, 6 | parallel_fetch_tarfiles, 7 | parallel_fetch_jsons, 8 | parallel_imports, 9 | make_js_func, 10 | ) 11 | from .logging import logger, logged 12 | from .tar import untar_to_temporary, untar_repodata 13 | from .transaction import Transaction 14 | from .conda_env import ( 15 | ensure_is_conda_env, 16 | create_conda_meta, 17 | ) 18 | 19 | import logging 20 | import json 21 | from pathlib import Path 22 | import pyjs 23 | import shutil 24 | import sysconfig 25 | from itertools import chain 26 | from functools import partial 27 | from contextlib import contextmanager 28 | 29 | 30 | class PicoMamba(_PicoMambaCore): 31 | def __init__( 32 | self, 33 | env_prefix, 34 | noarch_template, 35 | arch_root_url, 36 | side_path=None, 37 | progress_callback=None, 38 | use_indexded_db_cache=False, 39 | ): 40 | super(PicoMamba, self).__init__() 41 | 42 | set_emscripten_module_locate_file_base_url(arch_root_url) 43 | 44 | self.progress_callback = progress_callback 45 | self.noarch_template = noarch_template 46 | self.arch_root_url = arch_root_url 47 | # where is the environment located 48 | self._env_prefix = Path(env_prefix) 49 | 50 | if side_path is None: 51 | self.side_path = Path(sysconfig.get_path("purelib")) 52 | else: 53 | self.side_path = Path(side_path) 54 | 55 | # where downloaded repo-data will be stored 56 | self.indexeddb_mount_path = "/indexeddb" 57 | self._repodata_dir = Path(self.indexeddb_mount_path) 58 | 59 | # for progress bar 60 | self._progress = dict() 61 | self.use_indexded_db_cache = use_indexded_db_cache 62 | if self.use_indexded_db_cache: 63 | self._setup_indexeddb() 64 | 65 | async def initialize(self): 66 | # fetch from indexed db and store to filesystem 67 | await self.syncfs(polulate=True) 68 | 69 | def _setup_indexeddb(self): 70 | if self.use_indexded_db_cache: 71 | pyjs.js.Function( 72 | "indexeddb_mount_path", 73 | """ 74 | globalThis.pyjs.FS.mkdir(indexeddb_mount_path) 75 | globalThis.pyjs.FS.mkdir(indexeddb_mount_path + "/arch") 76 | globalThis.pyjs.FS.mkdir(indexeddb_mount_path + "/noarch") 77 | globalThis.pyjs.FS.mount(globalThis.pyjs.IDBFS, {}, indexeddb_mount_path) 78 | """, 79 | )(str(self.indexeddb_mount_path)) 80 | 81 | def _register_installed_packages(self): 82 | # ensure that this is really a path to an env 83 | ensure_is_conda_env(self._env_prefix) 84 | with logged("load installed packages"): 85 | self._load_installed(str(self._env_prefix)) 86 | 87 | def _load_repodata(self, repodata, filename, url): 88 | if isinstance(repodata, bytes): 89 | with open(filename, "wb") as f: 90 | f.write(repodata) 91 | else: 92 | with open(filename, "w") as f: 93 | json.dump(repodata, f) 94 | 95 | self._load_repodata_from_file(filename, url) 96 | 97 | async def syncfs(self, polulate): 98 | if self.use_indexded_db_cache: 99 | await pyjs.js.Function( 100 | "polulate", 101 | """ 102 | return new Promise((resolve, reject) => { 103 | globalThis.pyjs.FS.syncfs(polulate, function (err) { 104 | if(err === null){ 105 | resolve(); 106 | } 107 | else{ 108 | reject(err); 109 | } 110 | }) 111 | }); 112 | """, 113 | )(bool(polulate)) 114 | 115 | async def fetch_repodata(self, arch_url, noarch_url): 116 | 117 | repo_data_urls = {"arch": arch_url, "noarch": noarch_url} 118 | 119 | # check if repodata exists 120 | for repo_name in list(repo_data_urls.keys()): 121 | repodata_dir = self._repodata_dir / repo_name 122 | if repodata_dir.is_dir(): 123 | n = sum(1 for _ in repodata_dir.iterdir()) 124 | if n == 1: 125 | logger.info(f"{repo_name}-repodata exists! Skipping download") 126 | del repo_data_urls[repo_name] 127 | 128 | callback = None 129 | if self.progress_callback is not None: 130 | callback = partial(self.progress_callback, "repodata") 131 | 132 | if repo_data_urls: 133 | with logged("download_repodata"): 134 | # download tarfiles which contain repodata as compressed json 135 | callback = [None, self.on_repodata_progress][ 136 | self.progress_callback is not None 137 | ] 138 | tars = await parallel_fetch_tarfiles( 139 | list(repo_data_urls.values()), callback 140 | ) 141 | 142 | with logged("untar repodata"): 143 | # untar and load repodata into libsolv 144 | for tar, repo_name in zip(tars, repo_data_urls.keys()): 145 | json_path = untar_repodata(tar, path=self._repodata_dir / repo_name) 146 | self._load_repodata_from_file(str(json_path), repo_name) 147 | 148 | await self.syncfs(polulate=False) 149 | 150 | def solve(self, specs, dry_run=False, pin_installed=False): 151 | 152 | # make libsolv aware of pkgs already 153 | # present in the environment 154 | self._register_installed_packages() 155 | 156 | with logged("solve"): 157 | config = PicoMambaCoreSolveConfig() 158 | raw_transaction = self._solve(specs, config) 159 | return Transaction(raw_transaction=raw_transaction) 160 | 161 | # this triggered from on_arch_progress and on_noarch_progress 162 | # and adds up both for a total progress bar 163 | def on_progress(self): 164 | # print("on progres..") 165 | downloaded = ( 166 | self._progress["arch_downloaded"] + self._progress["noarch_downloaded"] 167 | ) 168 | total = self._progress["arch_total"] + self._progress["noarch_total"] 169 | if self.progress_callback is not None: 170 | self.progress_callback("packages", downloaded, total) 171 | 172 | # this is called when we make progress on downloading arch packages 173 | def on_arch_progress(self, downloaded, total): 174 | self._progress["arch_downloaded"] = downloaded 175 | self._progress["arch_total"] = total 176 | self.on_progress() 177 | 178 | # this is called when we make progress on downloading noarch packages 179 | def on_noarch_progress(self, downloaded, total, finished_items, n_items): 180 | self._progress["noarch_downloaded"] = downloaded 181 | self._progress["noarch_total"] = total 182 | self.on_progress() 183 | 184 | # this is called when we make progress on downloading the repotdata 185 | def on_repodata_progress(self, downloaded, total, finished_items, n_items): 186 | if self.progress_callback is not None: 187 | self.progress_callback("repodata", downloaded, total) 188 | 189 | @contextmanager 190 | def _progress_reporting(self): 191 | 192 | last = [0.0] 193 | offset = [None] 194 | 195 | # we need to do a tiny hack and remove the 196 | # first reported "downloaded" since this 197 | # is what was already downloaded from 198 | # previous runs 199 | def progress_callback(name, pkg_name, downloaded, total): 200 | if offset[0] is None: 201 | offset[0] = downloaded 202 | downloaded -= offset[0] 203 | total -= offset[0] 204 | self.on_arch_progress(downloaded, total) 205 | 206 | js_on_progress, js_on_progress_handle = make_js_func(progress_callback) 207 | pyjs.js.globalThis.EmscriptenForgeModule.empackSetStatus = js_on_progress 208 | yield 209 | js_on_progress_handle.delete() 210 | 211 | async def install_transaction(self, transaction): 212 | self._progress = dict( 213 | arch_downloaded=0, arch_total=0, noarch_downloaded=0, noarch_total=0 214 | ) 215 | 216 | with self._progress_reporting(): 217 | logger.info("install transaction") 218 | 219 | # arch 220 | arch_promise = self.install_arch_packages(transaction.install["arch"]) 221 | 222 | # noarch 223 | install_noarch = transaction.install["noarch"] 224 | tarfiles = await self.download_noarch_packages(install_noarch) 225 | self.install_noarch_packages(install_noarch, tarfiles) 226 | 227 | await arch_promise 228 | await self.wait_for_emscripten() 229 | 230 | def install_noarch_package(self, package, tar): 231 | with logged(f"install noarch {package}"): 232 | 233 | with untar_to_temporary(tar) as temp_dir: 234 | 235 | create_conda_meta(package=package, env_prefix=self._env_prefix) 236 | 237 | # copy side-packages 238 | if (temp_dir / "site-packages").is_dir(): 239 | shutil.copytree( 240 | str(temp_dir / "site-packages"), 241 | self.side_path, 242 | dirs_exist_ok=True, 243 | ) 244 | 245 | async def install_arch_packages(self, packages): 246 | logger.info(f"install-arch {packages}") 247 | 248 | json_urls = [f"{self.arch_root_url}/{pkg.filename()}.json" for pkg in packages] 249 | 250 | urls = await parallel_fetch_jsons(json_urls) 251 | 252 | urls = [f"{self.arch_root_url}/{url}" for sublist in urls for url in sublist] 253 | 254 | await parallel_imports(urls) 255 | 256 | def install_noarch_packages(self, packages, tarfiles): 257 | for package, tar in zip(packages, tarfiles): 258 | self.install_noarch_package(package=package, tar=tar) 259 | tar.close() 260 | 261 | async def download_noarch_packages(self, packages): 262 | 263 | logger.info(f"install-noarch {packages}") 264 | urls = [pkg.format_url(self.noarch_template) for pkg in packages] 265 | callback = [None, self.on_noarch_progress][self.progress_callback is not None] 266 | return await parallel_fetch_tarfiles(urls, callback) 267 | 268 | async def wait_for_emscripten(self): 269 | await pyjs._module._wait_run_dependencies() 270 | -------------------------------------------------------------------------------- /python/module/picomamba/pkg_spec.py: -------------------------------------------------------------------------------- 1 | class PkgSpec(object): 2 | def __init__(self, name, version, build, build_number): 3 | self.name = name 4 | self.version = version 5 | self.build = build 6 | self.build_number = build_number 7 | 8 | def __repr__(self): 9 | return f"{self.name}={self.version}={self.build}" 10 | 11 | def filename(self, extension="tar.bz2"): 12 | return f"{self.name}-{self.version}-{self.build}.{extension}" 13 | 14 | def format_url(self, url_template): 15 | return url_template.format( 16 | name=self.name, version=self.version, build=self.build 17 | ) 18 | -------------------------------------------------------------------------------- /python/module/picomamba/tar.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | import tarfile 3 | import contextlib 4 | from pathlib import Path 5 | 6 | 7 | @contextlib.contextmanager 8 | def untar_to_temporary(tar): 9 | with tempfile.TemporaryDirectory() as temp_dir: 10 | temp_dir_path = Path(str(temp_dir)) 11 | tar.extractall(path=str(temp_dir)) 12 | yield temp_dir_path 13 | 14 | 15 | def untar_repodata(repodata_tar, path): 16 | path = Path(path) 17 | path.mkdir(parents=True, exist_ok=True) 18 | members = repodata_tar.getmembers() 19 | if len(members) != 1: 20 | raise RuntimeError( 21 | f"there must be precisely 1 file in the repodata tar.bz2 file, not {len(members)}" 22 | ) 23 | if not members[0].name.endswith(".json"): 24 | raise RuntimeError( 25 | f"there must be a json file in the the repodata tar.bz2 file not: {members[0]}" 26 | ) 27 | 28 | repodata_tar.extractall(path=path) 29 | 30 | return path / members[0].name 31 | -------------------------------------------------------------------------------- /python/module/picomamba/transaction.py: -------------------------------------------------------------------------------- 1 | from .pkg_spec import PkgSpec 2 | 3 | 4 | class Transaction(object): 5 | def __init__(self, raw_transaction): 6 | raw_install = raw_transaction["install"] 7 | self.install = {k: [] for k in ["arch", "noarch", "installed", "mocked"]} 8 | for name, version, build, build_number, repo_name in raw_install: 9 | self.install[repo_name].append( 10 | PkgSpec( 11 | name=name, version=version, build=build, build_number=build_number 12 | ) 13 | ) 14 | -------------------------------------------------------------------------------- /python/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------------------------------------------- 2 | # Add The module 3 | #------------------------------------------------------------------------------------------------------------------- 4 | 5 | set(PY_MOD_NAME picomamba ) 6 | set(PY_MOD_LIB_NAME _${PY_MOD_NAME}) 7 | 8 | 9 | # add the python library 10 | pybind11_add_module(${PY_MOD_LIB_NAME} 11 | main.cpp 12 | def_build_config.cpp 13 | def_picomamba_core.cpp 14 | ) 15 | 16 | 17 | 18 | # `link` against pybind11 interface module 19 | target_link_libraries(${PY_MOD_LIB_NAME} PRIVATE 20 | ${INTERFACE_LIB_NAME} 21 | pybind11::module) 22 | 23 | # include directories for non-modern cmake 24 | set(PY_INCLUDE_DIRS 25 | # ${PYTHON_NUMPY_INCLUDE_DIR} 26 | # ${xtensor-python_INCLUDE_DIRS} 27 | ) 28 | target_include_directories(${PY_MOD_LIB_NAME} PUBLIC ${PY_INCLUDE_DIRS}) 29 | 30 | # custom target for `make python-module` 31 | add_custom_target(python-module DEPENDS ${PY_MOD_LIB_NAME}) 32 | 33 | 34 | 35 | add_custom_command(TARGET ${PY_MOD_LIB_NAME} POST_BUILD 36 | COMMAND "${CMAKE_COMMAND}" -E copy 37 | "$" 38 | "${CMAKE_BINARY_DIR}/python/module/${PY_MOD_NAME}/$" 39 | COMMENT "Copying to output directory") 40 | 41 | 42 | 43 | 44 | add_custom_target(python-test COMMAND ${PYTHON_EXECUTABLE} -m pytest "${CMAKE_SOURCE_DIR}/python/tests" WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/python/module" ) 45 | add_dependencies(python-test ${PY_MOD_LIB_NAME} ) 46 | 47 | 48 | install(TARGETS ${PY_MOD_LIB_NAME} 49 | DESTINATION ${PYTHON_MODULE_INSTALL_DIR}/${PY_MOD_NAME}/) -------------------------------------------------------------------------------- /python/src/def_build_config.cpp: -------------------------------------------------------------------------------- 1 | #include "pybind11/pybind11.h" 2 | #include "pybind11/numpy.h" 3 | 4 | #include 5 | #include 6 | 7 | 8 | // our headers 9 | #include "picomamba/picomamba.hpp" 10 | #include "picomamba/picomamba_config.hpp" 11 | 12 | 13 | namespace py = pybind11; 14 | 15 | 16 | 17 | namespace picomamba { 18 | 19 | void def_build_config(py::module & m) 20 | { 21 | 22 | struct BuildConfiguration { 23 | 24 | }; 25 | 26 | 27 | py::class_(m, "BuildConfiguration", 28 | "This class show the compile/build configuration\n" 29 | "Of picomamba\n" 30 | ) 31 | .def_property_readonly_static("VERSION_MAJOR", [](py::object /* self */) { 32 | return PICOMAMBA_VERSION_MAJOR ; 33 | }) 34 | .def_property_readonly_static("VERSION_MINOR", [](py::object /* self */) { 35 | return PICOMAMBA_VERSION_MINOR ; 36 | }) 37 | .def_property_readonly_static("VERSION_PATCH", [](py::object /* self */) { 38 | return PICOMAMBA_VERSION_MAJOR ; 39 | }) 40 | .def_property_readonly_static("DEBUG", [](py::object /* self */) { 41 | #ifdef NDEBUG 42 | return false; 43 | #else 44 | return true; 45 | #endif 46 | }) 47 | 48 | ; 49 | 50 | 51 | 52 | } 53 | 54 | } 55 | -------------------------------------------------------------------------------- /python/src/def_picomamba_core.cpp: -------------------------------------------------------------------------------- 1 | #include "pybind11/pybind11.h" 2 | #include "pybind11/numpy.h" 3 | #include "pybind11/stl.h" 4 | 5 | #include 6 | #include 7 | 8 | 9 | // our headers 10 | #include "picomamba/picomamba_core.hpp" 11 | 12 | namespace py = pybind11; 13 | 14 | 15 | 16 | namespace picomamba { 17 | 18 | 19 | inline py::dict transaction_to_py(Transaction* transaction) 20 | { 21 | Pool* pool = transaction->pool; 22 | py::dict res; 23 | 24 | py::list remove_list; 25 | py::list install_list; 26 | py::list ignore_list; 27 | 28 | res["remove"] = remove_list; 29 | res["install"] = install_list; 30 | res["ignore"] = ignore_list; 31 | 32 | auto as_tuple = [&pool](Solvable* s) 33 | { 34 | const char* name = pool_id2str(pool, s->name); 35 | const char* evr = pool_id2str(pool, s->evr); 36 | const char* build_string; 37 | build_string = solvable_lookup_str(s, SOLVABLE_BUILDFLAVOR); 38 | int build_number = std::stoi(solvable_lookup_str(s, SOLVABLE_BUILDVERSION)); 39 | 40 | return std::make_tuple(name, evr, build_string, build_number, s->repo->name); 41 | }; 42 | 43 | for (int i = 0; i < transaction->steps.count; i++) 44 | { 45 | Id p = transaction->steps.elements[i]; 46 | Id ttype = transaction_type(transaction, p, SOLVER_TRANSACTION_SHOW_ALL); 47 | Solvable* s = pool_id2solvable(transaction->pool, p); 48 | Id i2; 49 | Solvable* s2; 50 | switch (ttype) 51 | { 52 | case SOLVER_TRANSACTION_DOWNGRADED: 53 | case SOLVER_TRANSACTION_UPGRADED: 54 | case SOLVER_TRANSACTION_CHANGED: 55 | case SOLVER_TRANSACTION_REINSTALLED: 56 | { 57 | // std::cout << "Removing " << as_tuple(s) << std::endl; 58 | 59 | remove_list.append(as_tuple(s)); 60 | s2 = transaction->pool->solvables + transaction_obs_pkg(transaction, p); 61 | s2 = pool_id2solvable(pool, i2); 62 | install_list.append(as_tuple(s)); 63 | break; 64 | } 65 | case SOLVER_TRANSACTION_ERASE: 66 | { 67 | // std::cout << "Removing " << as_tuple(s) << std::endl; 68 | remove_list.append(as_tuple(s)); 69 | break; 70 | } 71 | case SOLVER_TRANSACTION_INSTALL: 72 | { 73 | // std::cout << "Installing " << as_tuple(s) << std::endl; 74 | install_list.append(as_tuple(s)); 75 | break; 76 | } 77 | case SOLVER_TRANSACTION_IGNORE: 78 | ignore_list.append(as_tuple(s)); 79 | break; 80 | default: 81 | std::cout << "Some weird case not handled" << std::endl; 82 | break; 83 | } 84 | } 85 | return res; 86 | } 87 | 88 | 89 | 90 | void def_pikomamba_core(py::module & m) 91 | { 92 | 93 | py::class_(m, "PicoMambaCoreSolveConfig") 94 | .def(py::init<>()) 95 | ; 96 | 97 | py::class_(m, "_PicoMambaCore") 98 | .def(py::init<>()) 99 | .def("_load_repodata_from_file", &PicoMambaCore::load_repodata_from_file, py::arg("path"),py::arg("name")) 100 | .def("_load_installed", &PicoMambaCore::load_installed, py::arg("prefix")) 101 | // .def("_add_to_installed", &PicoMambaCore::add_to_installed) 102 | .def("_solve",[](PicoMambaCore & self, const std::vector& match_specs, const PicoMambaCore::SolveConfig & config) 103 | { 104 | 105 | py::dict res; 106 | self.solve( 107 | match_specs.begin(), 108 | match_specs.end(), 109 | config, 110 | [&](Transaction * transaction){ 111 | res = transaction_to_py(transaction); 112 | } 113 | ); 114 | return res; 115 | }) 116 | ; 117 | } 118 | 119 | } 120 | -------------------------------------------------------------------------------- /python/src/main.cpp: -------------------------------------------------------------------------------- 1 | #include "pybind11/pybind11.h" 2 | 3 | 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | 11 | // our headers 12 | #include "picomamba/picomamba.hpp" 13 | #include "picomamba/picomamba_config.hpp" 14 | 15 | namespace py = pybind11; 16 | 17 | 18 | 19 | namespace picomamba { 20 | 21 | 22 | // implementation in def_myclass.cpp 23 | void def_pikomamba_core(py::module & m); 24 | 25 | // implementation in def.cpp 26 | void def_build_config(py::module & m); 27 | 28 | } 29 | 30 | 31 | // Python Module and Docstrings 32 | PYBIND11_MODULE(_picomamba , module) 33 | { 34 | module.doc() = R"pbdoc( 35 | _picomamba python bindings 36 | 37 | .. currentmodule:: _picomamba 38 | 39 | .. autosummary:: 40 | :toctree: _generate 41 | 42 | BuildConfiguration 43 | PikoMambaCore 44 | )pbdoc"; 45 | 46 | picomamba::def_build_config(module); 47 | picomamba::def_pikomamba_core(module); 48 | 49 | // make version string 50 | std::stringstream ss; 51 | ss<