├── .flake8 ├── .github └── FUNDING.yml ├── .gitignore ├── .isort.cfg ├── .pre-commit-config.yaml ├── LICENSE ├── MANIFEST.in ├── README.md ├── setup.cfg ├── setup.py └── vinca ├── __init__.py ├── azure_templates ├── unix.sh ├── win_build.bat └── win_preconfig.bat ├── config.py ├── distro.py ├── generate_azure.py ├── generate_gha.py ├── generate_gitlab.py ├── main.py ├── migrate.py ├── resolve.py ├── snapshot.py ├── template.py ├── templates ├── activate.bat.in ├── activate.ps1.in ├── activate.sh.in ├── bld_ament_cmake.bat.in ├── bld_ament_python.bat.in ├── bld_catkin.bat.in ├── bld_catkin_merge.bat.in ├── bld_colcon_merge.bat.in ├── build_ament_cmake.sh.in ├── build_ament_python.sh.in ├── build_catkin.sh.in ├── deactivate.bat.in ├── deactivate.ps1.in └── deactivate.sh.in └── utils.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length=88 3 | extend-ignore=E203,D104,D100,I004,E501 4 | exclude=tests/data/* 5 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: RoboStack 2 | open_collective: robostack 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | pip-wheel-metadata/ 25 | share/python-wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | cover/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Django stuff: 61 | *.log 62 | local_settings.py 63 | db.sqlite3 64 | db.sqlite3-journal 65 | 66 | # Flask stuff: 67 | instance/ 68 | .webassets-cache 69 | 70 | # Scrapy stuff: 71 | .scrapy 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 99 | __pypackages__/ 100 | 101 | # Celery stuff 102 | celerybeat-schedule 103 | celerybeat.pid 104 | 105 | # SageMath parsed files 106 | *.sage.py 107 | 108 | # Environments 109 | .env 110 | .venv 111 | env/ 112 | venv/ 113 | ENV/ 114 | env.bak/ 115 | venv.bak/ 116 | 117 | # Spyder project settings 118 | .spyderproject 119 | .spyproject 120 | 121 | # Rope project settings 122 | .ropeproject 123 | 124 | # mkdocs documentation 125 | /site 126 | 127 | # mypy 128 | .mypy_cache/ 129 | .dmypy.json 130 | dmypy.json 131 | 132 | # Pyre type checker 133 | .pyre/ 134 | 135 | # pytype static type analyzer 136 | .pytype/ 137 | 138 | # Visual Studio Code 139 | .vscode/ 140 | 141 | # Generated examples 142 | # examples/ 143 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | line_length=1000 3 | known_third_party=requests,ruamel,yaml,pytest,rapidfuzz,opensource,colorama,progressbar,progressbar2 4 | multi_line_output=3 5 | include_trailing_comma=True 6 | force_grid_wrap=0 7 | use_parentheses=True 8 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 24.3.0 4 | hooks: 5 | - id: black 6 | args: [--safe, --quiet] 7 | - repo: https://github.com/asottile/blacken-docs 8 | rev: 1.16.0 9 | hooks: 10 | - id: blacken-docs 11 | additional_dependencies: [black] 12 | - repo: https://github.com/pre-commit/pre-commit-hooks 13 | rev: v4.6.0 14 | hooks: 15 | - id: trailing-whitespace 16 | exclude: ^examples/ 17 | - id: end-of-file-fixer 18 | exclude: ^examples/ 19 | # - repo: https://github.com/pre-commit/mirrors-isort 20 | # rev: v5.2.2 21 | # hooks: 22 | # - id: isort 23 | # exclude: tests/data 24 | # - repo: https://github.com/PyCQA/flake8 25 | # rev: 7.0.0 26 | # hooks: 27 | # - id: flake8 28 | # exclude: tests/data 29 | # language_version: python3 30 | # additional_dependencies: 31 | # - flake8-typing-imports==1.9.0 32 | # - flake8-builtins==1.5.3 33 | # - flake8-bugbear==20.1.4 34 | # # - flake8-isort==3.0.1 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Sean Yen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include vinca/templates * 2 | recursive-include vinca/azure_templates * 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # vinca 2 | 3 | rattler-build recipe (i.e. conda recipe v1) generator for ROS packages 4 | 5 | **WARNING**: 6 | This project is actively mantained and can frequently change based on the needs of the RoboStack project. 7 | 8 | ## Concept 9 | 10 | The tool generates `conda` rattler-build recipes to capture all the selected ROS packages. 11 | 12 | ## Example 13 | 14 | The repo contains a `vinca` tool that reads a `vinca.yaml` file that contains all its metadata. 15 | 16 | For an up-to-date example of how to write a `vinca.yaml`, check the repos of the mantained RoboStack distros: 17 | * https://github.com/RoboStack/ros-noetic/ 18 | * https://github.com/RoboStack/ros-humble 19 | * https://github.com/RoboStack/ros-jazzy/ 20 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = vinca 3 | version = attr: vinca.__version__ 4 | url = https://github.com/RoboStack/vinca 5 | project_urls = 6 | Changelog = https://github.com/RoboStack/vinca/milestones?direction=desc&sort=due_date&state=closed 7 | GitHub = https://github.com/RoboStack/vinca 8 | author = Sean Yen 9 | author_email = seanyen@microsoft.com 10 | maintainer = Sean Yen 11 | maintainer_email = seanyen@microsoft.com 12 | classifiers = 13 | Development Status :: 3 - Alpha 14 | Intended Audience :: Developers 15 | License :: OSI Approved :: MIT Licens 16 | Programming Language :: Python 17 | Operating System :: OS Independent 18 | Topic :: Software Development :: Build Tools 19 | license = MIT 20 | description = Conda recipe generator for ROS packages. 21 | long_description = file: README.md 22 | keywords = ros 23 | 24 | [options] 25 | python_requires = >=3.6 26 | setup_requires = setuptools 27 | install_requires = 28 | catkin_pkg >=0.4.16 29 | ruamel.yaml >=0.16.6,<0.18.0 30 | rosdistro >=0.8.0 31 | empy >=3.3.4,<4.0.0 32 | requests >=2.24.0 33 | networkx >=2.5 34 | rich >=10 35 | packages = find: 36 | zip_safe = false 37 | 38 | [options.entry_points] 39 | console_scripts = 40 | vinca = vinca.main:main 41 | vinca-glab = vinca.generate_gitlab:main 42 | vinca-gha = vinca.generate_gha:main 43 | vinca-azure = vinca.generate_azure:main 44 | vinca-migrate = vinca.migrate:main 45 | vinca-snapshot = vinca.snapshot:main 46 | 47 | [flake8] 48 | import-order-style = google 49 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | args = { 4 | "include_package_data": True, 5 | "packages": find_packages(), 6 | "package_data": {"vinca": ["templates/*", "azure_templates/*"]}, 7 | } 8 | 9 | setup(**args) 10 | -------------------------------------------------------------------------------- /vinca/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.1.0" 2 | -------------------------------------------------------------------------------- /vinca/azure_templates/unix.sh: -------------------------------------------------------------------------------- 1 | export CI=azure 2 | export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME 3 | export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) 4 | .scripts/build_unix.sh --target $BUILD_TARGET 5 | -------------------------------------------------------------------------------- /vinca/azure_templates/win_build.bat: -------------------------------------------------------------------------------- 1 | setlocal EnableExtensions EnableDelayedExpansion 2 | call activate base 3 | 4 | set "FEEDSTOCK_ROOT=%cd%" 5 | 6 | call conda config --add channels conda-forge 7 | call conda config --add channels robostack-staging 8 | call conda config --set channel_priority strict 9 | 10 | :: Enable long path names on Windows 11 | reg add HKLM\SYSTEM\CurrentControlSet\Control\FileSystem /v LongPathsEnabled /t REG_DWORD /d 1 /f 12 | 13 | :: conda remove --force m2-git 14 | 15 | for %%X in (%CURRENT_RECIPES%) do ( 16 | echo "BUILDING RECIPE %%X" 17 | cd %FEEDSTOCK_ROOT%\\recipes\\%%X\\ 18 | copy %FEEDSTOCK_ROOT%\\conda_build_config.yaml .\\conda_build_config.yaml 19 | boa build . 20 | if errorlevel 1 exit 1 21 | ) 22 | 23 | anaconda -t %ANACONDA_API_TOKEN% upload "C:\\bld\\win-64\\*.tar.bz2" --force 24 | if errorlevel 1 exit 1 25 | -------------------------------------------------------------------------------- /vinca/azure_templates/win_preconfig.bat: -------------------------------------------------------------------------------- 1 | set "CI=true" 2 | 3 | :: 4 cores available on GHA: https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners 4 | :: CPU_COUNT is passed through conda build: https://github.com/conda/conda-build/pull/1149 5 | set CPU_COUNT=4 6 | 7 | set PYTHONUNBUFFERED=1 8 | 9 | call setup_x64 10 | 11 | :: Set the conda-build working directory to a smaller path 12 | if "%CONDA_BLD_PATH%" == "" ( 13 | set "CONDA_BLD_PATH=C:\\bld\\" 14 | ) 15 | 16 | :: On azure, there are libcrypto*.dll & libssl*.dll under 17 | :: C:\\Windows\\System32, which should not be there (no vendor dlls in windows folder). 18 | :: They would be found before the openssl libs of the conda environment, so we delete them. 19 | if defined CI ( 20 | DEL C:\\Windows\\System32\\libcrypto-1_1-x64.dll || (Echo Ignoring failure to delete C:\\Windows\\System32\\libcrypto-1_1-x64.dll) 21 | DEL C:\\Windows\\System32\\libssl-1_1-x64.dll || (Echo Ignoring failure to delete C:\\Windows\\System32\\libssl-1_1-x64.dll) 22 | ) 23 | 24 | :: Make paths like C:\\hostedtoolcache\\windows\\Ruby\\2.5.7\\x64\\bin garbage 25 | set "PATH=%PATH:ostedtoolcache=%" 26 | -------------------------------------------------------------------------------- /vinca/config.py: -------------------------------------------------------------------------------- 1 | selected_platform = None 2 | ros_distro = None 3 | skip_testing = None 4 | parsed_args = None 5 | -------------------------------------------------------------------------------- /vinca/distro.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from rosdistro import get_cached_distribution, get_index, get_index_url 4 | from rosdistro.dependency_walker import DependencyWalker 5 | from rosdistro.manifest_provider import get_release_tag 6 | 7 | 8 | class Distro(object): 9 | def __init__(self, distro_name, python_version=None, snapshot=None): 10 | index = get_index(get_index_url()) 11 | self._distro = get_cached_distribution(index, distro_name) 12 | self.distro_name = distro_name 13 | self.snapshot = snapshot 14 | # set up ROS environments 15 | if python_version is None: 16 | python_version = index.distributions[distro_name]["python_version"] 17 | os.environ["ROS_PYTHON_VERSION"] = "{0}".format(python_version) 18 | os.environ["ROS_DISTRO"] = "{0}".format(distro_name) 19 | if "ROS_ROOT" in os.environ: 20 | os.environ.pop("ROS_ROOT") 21 | if "ROS_PACKAGE_PATH" in os.environ: 22 | os.environ.pop("ROS_PACKAGE_PATH") 23 | self._walker = DependencyWalker( 24 | self._distro, evaluate_condition_context=os.environ 25 | ) 26 | 27 | # cache distribution type 28 | self._distribution_type = index.distributions[distro_name]["distribution_type"] 29 | self._python_version = index.distributions[distro_name]["python_version"] 30 | self.build_packages = set() 31 | 32 | os.environ["ROS_VERSION"] = "1" if self.check_ros1() else "2" 33 | 34 | @property 35 | def name(self): 36 | return self.distro_name 37 | 38 | def add_packages(self, packages): 39 | self.build_packages = set(packages) 40 | 41 | def get_depends(self, pkg, ignore_pkgs=None): 42 | dependencies = set() 43 | if pkg not in self._distro.release_packages: 44 | print(f"{pkg} not in released packages anymore") 45 | return dependencies 46 | 47 | dependencies |= self._walker.get_recursive_depends( 48 | pkg, 49 | [ 50 | "buildtool", 51 | "buildtool_export", 52 | "build", 53 | "build_export", 54 | "run", 55 | "test", 56 | "exec", 57 | ], 58 | ros_packages_only=True, 59 | ignore_pkgs=ignore_pkgs, 60 | ) 61 | return dependencies 62 | 63 | def get_released_repo(self, pkg_name): 64 | if self.snapshot and pkg_name in self.snapshot: 65 | return ( 66 | self.snapshot[pkg_name].get("url", None), 67 | self.snapshot[pkg_name].get("tag", None), 68 | ) 69 | 70 | pkg = self._distro.release_packages[pkg_name] 71 | repo = self._distro.repositories[pkg.repository_name].release_repository 72 | release_tag = get_release_tag(repo, pkg_name) 73 | return repo.url, release_tag 74 | 75 | def check_package(self, pkg_name): 76 | if pkg_name in self._distro.release_packages: 77 | return self.snapshot is None or pkg_name in self.snapshot 78 | elif pkg_name in self.build_packages: 79 | return True 80 | else: 81 | return False 82 | 83 | def get_version(self, pkg_name): 84 | if self.snapshot and pkg_name in self.snapshot: 85 | return self.snapshot[pkg_name].get("version", None) 86 | 87 | pkg = self._distro.release_packages[pkg_name] 88 | repo = self._distro.repositories[pkg.repository_name].release_repository 89 | return repo.version.split("-")[0] 90 | 91 | def get_release_package_xml(self, pkg_name): 92 | return self._distro.get_release_package_xml(pkg_name) 93 | 94 | def check_ros1(self): 95 | return self._distribution_type == "ros1" 96 | 97 | def get_python_version(self): 98 | return self._python_version 99 | 100 | def get_package_names(self): 101 | return self._distro.release_packages.keys() 102 | -------------------------------------------------------------------------------- /vinca/generate_azure.py: -------------------------------------------------------------------------------- 1 | import networkx as nx 2 | import yaml 3 | import re 4 | import glob 5 | import sys 6 | import os 7 | import argparse 8 | import pkg_resources 9 | from distutils.dir_util import copy_tree 10 | 11 | from rich import print 12 | 13 | from vinca.utils import get_repodata 14 | from vinca.utils import literal_unicode as lu 15 | from vinca.distro import Distro 16 | from vinca.main import ( 17 | get_selected_packages, 18 | generate_outputs, 19 | read_vinca_yaml, 20 | get_conda_subdir, 21 | ) 22 | from vinca import config 23 | 24 | 25 | def read_azure_script(fn): 26 | template_in = pkg_resources.resource_filename("vinca", f"azure_templates/{fn}") 27 | with open(template_in, "r") as fi: 28 | return fi.read() 29 | 30 | 31 | azure_linux_script = lu(read_azure_script("linux.sh")) 32 | azure_osx_script = lu(read_azure_script("osx_64.sh")) 33 | azure_osx_arm64_script = lu(read_azure_script("osx_arm64.sh")) 34 | azure_win_preconfig_script = lu(read_azure_script("win_preconfig.bat")) 35 | azure_win_script = lu(read_azure_script("win_build.bat")) 36 | 37 | 38 | def parse_command_line(argv): 39 | parser = argparse.ArgumentParser( 40 | description="Conda recipe Azure pipeline generator for ROS packages" 41 | ) 42 | 43 | default_dir = "./recipes" 44 | parser.add_argument( 45 | "-d", 46 | "--dir", 47 | dest="dir", 48 | default=default_dir, 49 | help="The recipes directory to process (default: {}).".format(default_dir), 50 | ) 51 | 52 | parser.add_argument( 53 | "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" 54 | ) 55 | 56 | parser.add_argument( 57 | "-p", 58 | "--platform", 59 | dest="platform", 60 | default="linux-64", 61 | help="Platform to emit build pipeline for", 62 | ) 63 | 64 | parser.add_argument( 65 | "-a", 66 | "--additional-recipes", 67 | action="store_true", 68 | help="search for additional_recipes folder?", 69 | ) 70 | 71 | arguments = parser.parse_args(argv[1:]) 72 | config.parsed_args = arguments 73 | return arguments 74 | 75 | 76 | def normalize_name(s): 77 | s = s.replace("-", "_") 78 | return re.sub("[^a-zA-Z0-9_]+", "", s) 79 | 80 | 81 | def batch_stages(stages, max_batch_size=5): 82 | with open("vinca.yaml", "r") as vinca_yaml: 83 | vinca_conf = yaml.safe_load(vinca_yaml) 84 | 85 | # this reduces the number of individual builds to try to save some time 86 | stage_lengths = [len(s) for s in stages] 87 | merged_stages = [] 88 | curr_stage = [] 89 | build_individually = vinca_conf.get("build_in_own_azure_stage", []) 90 | 91 | def chunks(lst, n): 92 | """Yield successive n-sized chunks from lst.""" 93 | for i in range(0, len(lst), n): 94 | yield lst[i : i + n] 95 | 96 | i = 0 97 | while i < len(stages): 98 | for build_individually_pkg in build_individually: 99 | if build_individually_pkg in stages[i]: 100 | merged_stages.append([[build_individually_pkg]]) 101 | stages[i].remove(build_individually_pkg) 102 | 103 | if ( 104 | stage_lengths[i] < max_batch_size 105 | and len(curr_stage) + stage_lengths[i] < max_batch_size 106 | ): 107 | # merge with previous stage 108 | curr_stage += stages[i] 109 | else: 110 | if len(curr_stage): 111 | merged_stages.append([curr_stage]) 112 | curr_stage = [] 113 | if stage_lengths[i] < max_batch_size: 114 | curr_stage += stages[i] 115 | else: 116 | # split this stage into multiple 117 | merged_stages.append(list(chunks(stages[i], max_batch_size))) 118 | i += 1 119 | if len(curr_stage): 120 | merged_stages.append([curr_stage]) 121 | return merged_stages 122 | 123 | 124 | def get_skip_existing(vinca_conf, platform): 125 | fn = vinca_conf.get("skip_existing") 126 | repodatas = [] 127 | if fn is not None: 128 | fns = list(fn) 129 | else: 130 | fns = [] 131 | 132 | for fn in fns: 133 | print(f"Fetching repodata: {fn}") 134 | repodata = get_repodata(fn, platform) 135 | repodatas.append(repodata) 136 | 137 | return repodatas 138 | 139 | 140 | def get_all_ancestors(graph, node): 141 | ancestors = set() 142 | visited = set() 143 | current_node = node 144 | 145 | while True: 146 | a = { 147 | a 148 | for a in graph.get(node, []) 149 | if a.startswith("ros-") or a.startswith("ros2") 150 | } 151 | if not graph.get(node): 152 | print(f"[yellow]{node} not found") 153 | 154 | ancestors |= a 155 | visited.add(current_node) 156 | 157 | if len(ancestors - visited) == 0: 158 | print(f"Returning all ancestors for {node} : {ancestors}") 159 | return ancestors 160 | else: 161 | current_node = list(ancestors - visited)[0] 162 | 163 | 164 | def add_additional_recipes(args): 165 | additional_recipes_path = os.path.abspath( 166 | os.path.join(args.dir, "..", "additional_recipes") 167 | ) 168 | 169 | print("Searching additional recipes in ", additional_recipes_path) 170 | 171 | if not os.path.exists(additional_recipes_path): 172 | return 173 | 174 | with open("vinca.yaml", "r") as vinca_yaml: 175 | vinca_conf = yaml.safe_load(vinca_yaml) 176 | 177 | repodatas = get_skip_existing(vinca_conf, args.platform) 178 | 179 | additional_recipes = [] 180 | for recipe_path in glob.glob(additional_recipes_path + "/**/recipe.yaml"): 181 | with open(recipe_path) as recipe: 182 | additional_recipe = yaml.safe_load(recipe) 183 | 184 | name, version, bnumber = ( 185 | additional_recipe["package"]["name"], 186 | additional_recipe["package"]["version"], 187 | additional_recipe["build"]["number"], 188 | ) 189 | print("Checking if ", name, version, bnumber, " exists") 190 | skip = False 191 | for repo in repodatas: 192 | for _, pkg in repo.get("packages", {}).items(): 193 | if ( 194 | pkg["name"] == name 195 | and pkg["version"] == version 196 | and pkg["build_number"] == bnumber 197 | ): 198 | skip = True 199 | print(f"{name}=={version}=={bnumber} already exists. Skipping.") 200 | break 201 | 202 | if not skip: 203 | print("Adding ", os.path.dirname(recipe_path)) 204 | goal_folder = os.path.join(args.dir, name) 205 | os.makedirs(goal_folder, exist_ok=True) 206 | copy_tree(os.path.dirname(recipe_path), goal_folder) 207 | additional_recipes.append(additional_recipe) 208 | 209 | return additional_recipes 210 | 211 | 212 | def build_linux_pipeline( 213 | stages, 214 | trigger_branch, 215 | script=azure_linux_script, 216 | azure_template=None, 217 | docker_image=None, 218 | outfile="linux.yml", 219 | ): 220 | # Build Linux pipeline 221 | if azure_template is None: 222 | azure_template = {"pool": {"vmImage": "ubuntu-latest"}} 223 | 224 | if docker_image is None: 225 | docker_image = "condaforge/linux-anvil-cos7-x86_64" 226 | azure_stages = [] 227 | 228 | stage_names = [] 229 | for i, s in enumerate(stages): 230 | stage_name = f"stage_{i}" 231 | stage = {"stage": stage_name, "jobs": []} 232 | stage_names.append(stage_name) 233 | 234 | for batch in s: 235 | stage["jobs"].append( 236 | { 237 | "job": f"stage_{i}_job_{len(stage['jobs'])}", 238 | "steps": [ 239 | { 240 | "script": script, 241 | "env": { 242 | "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", 243 | "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", 244 | "DOCKER_IMAGE": docker_image, 245 | }, 246 | "displayName": f"Build {' '.join([pkg for pkg in batch])}", 247 | } 248 | ], 249 | } 250 | ) 251 | 252 | if len(stage["jobs"]) != 0: 253 | # all packages skipped ... 254 | azure_stages.append(stage) 255 | 256 | azure_template["trigger"] = [trigger_branch] 257 | azure_template["pr"] = "none" 258 | if azure_stages: 259 | azure_template["stages"] = azure_stages 260 | 261 | if not len(azure_stages): 262 | return 263 | 264 | with open(outfile, "w") as fo: 265 | fo.write(yaml.dump(azure_template, sort_keys=False)) 266 | 267 | 268 | def build_osx_pipeline( 269 | stages, 270 | trigger_branch, 271 | vm_imagename="macOS-10.15", 272 | outfile="osx.yml", 273 | script=azure_osx_script, 274 | ): 275 | # Build OSX pipeline 276 | azure_template = {"pool": {"vmImage": vm_imagename}} 277 | 278 | azure_stages = [] 279 | 280 | stage_names = [] 281 | for i, s in enumerate(stages): 282 | stage_name = f"stage_{i}" 283 | stage = {"stage": stage_name, "jobs": []} 284 | stage_names.append(stage_name) 285 | 286 | for batch in s: 287 | stage["jobs"].append( 288 | { 289 | "job": f"stage_{i}_job_{len(stage['jobs'])}", 290 | "steps": [ 291 | { 292 | "script": script, 293 | "env": { 294 | "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", 295 | "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", 296 | }, 297 | "displayName": f"Build {' '.join([pkg for pkg in batch])}", 298 | } 299 | ], 300 | } 301 | ) 302 | 303 | if len(stage["jobs"]) != 0: 304 | # all packages skipped ... 305 | azure_stages.append(stage) 306 | 307 | azure_template["trigger"] = [trigger_branch] 308 | azure_template["pr"] = "none" 309 | if azure_stages: 310 | azure_template["stages"] = azure_stages 311 | 312 | if not len(azure_stages): 313 | return 314 | 315 | with open(outfile, "w") as fo: 316 | fo.write(yaml.dump(azure_template, sort_keys=False)) 317 | 318 | 319 | def build_win_pipeline(stages, trigger_branch, outfile="win.yml"): 320 | azure_template = {"pool": {"vmImage": "windows-2019"}} 321 | 322 | azure_stages = [] 323 | script = azure_win_script 324 | 325 | # overwrite with what we're finding in the repo! 326 | if os.path.exists(".scripts/build_win.bat"): 327 | with open(".scripts/build_win.bat", "r") as fi: 328 | script = lu(fi.read()) 329 | 330 | stage_names = [] 331 | for i, s in enumerate(stages): 332 | stage_name = f"stage_{i}" 333 | stage = {"stage": stage_name, "jobs": []} 334 | stage_names.append(stage_name) 335 | 336 | for batch in s: 337 | stage["jobs"].append( 338 | { 339 | "job": f"stage_{i}_job_{len(stage['jobs'])}", 340 | "variables": {"CONDA_BLD_PATH": "C:\\\\bld\\\\"}, 341 | "steps": [ 342 | { 343 | "task": "PythonScript@0", 344 | "displayName": "Download Miniforge", 345 | "inputs": { 346 | "scriptSource": "inline", 347 | "script": lu( 348 | """import urllib.request 349 | url = 'https://github.com/conda-forge/miniforge/releases/latest/download/Mambaforge-Windows-x86_64.exe' 350 | path = r"$(Build.ArtifactStagingDirectory)/Miniforge.exe" 351 | urllib.request.urlretrieve(url, path)""" 352 | ), 353 | }, 354 | }, 355 | { 356 | "script": lu( 357 | """start /wait "" %BUILD_ARTIFACTSTAGINGDIRECTORY%\\Miniforge.exe /InstallationType=JustMe /RegisterPython=0 /S /D=C:\\Miniforge""" 358 | ), 359 | "displayName": "Install Miniforge", 360 | }, 361 | { 362 | "powershell": 'Write-Host "##vso[task.prependpath]C:\\Miniforge\\Scripts"', 363 | "displayName": "Add conda to PATH", 364 | }, 365 | { 366 | "script": lu( 367 | """call activate base 368 | mamba.exe install -c conda-forge --yes --quiet conda-build pip ruamel.yaml anaconda-client""" 369 | ), 370 | "displayName": "Install conda-build, boa and activate environment", 371 | }, 372 | { 373 | "script": azure_win_preconfig_script, 374 | "displayName": "conda-forge build setup", 375 | }, 376 | { 377 | "script": script, 378 | "env": { 379 | "ANACONDA_API_TOKEN": "$(ANACONDA_API_TOKEN)", 380 | "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", 381 | "PYTHONUNBUFFERED": 1, 382 | }, 383 | "displayName": f"Build {' '.join([pkg for pkg in batch])}", 384 | }, 385 | ], 386 | } 387 | ) 388 | 389 | if len(stage["jobs"]) != 0: 390 | # all packages skipped ... 391 | azure_stages.append(stage) 392 | 393 | azure_template["trigger"] = [trigger_branch] 394 | azure_template["pr"] = "none" 395 | if azure_stages: 396 | azure_template["stages"] = azure_stages 397 | 398 | if not len(azure_stages): 399 | return 400 | 401 | with open(outfile, "w") as fo: 402 | fo.write(yaml.dump(azure_template, sort_keys=False)) 403 | 404 | 405 | def get_full_tree(): 406 | recipes_dir = config.parsed_args.dir 407 | 408 | vinca_yaml = os.path.join(os.path.dirname(recipes_dir), "vinca.yaml") 409 | 410 | temp_vinca_conf = read_vinca_yaml(vinca_yaml) 411 | temp_vinca_conf["build_all"] = True 412 | temp_vinca_conf["skip_built_packages"] = [] 413 | config.selected_platform = get_conda_subdir() 414 | 415 | python_version = temp_vinca_conf.get("python_version", None) 416 | distro = Distro(temp_vinca_conf["ros_distro"], python_version) 417 | 418 | all_packages = get_selected_packages(distro, temp_vinca_conf) 419 | temp_vinca_conf["_selected_pkgs"] = all_packages 420 | 421 | all_outputs = generate_outputs(distro, temp_vinca_conf) 422 | return all_outputs 423 | 424 | 425 | def main(): 426 | 427 | args = parse_command_line(sys.argv) 428 | 429 | full_tree = get_full_tree() 430 | 431 | metas = [] 432 | 433 | additional_recipes = [] 434 | if args.additional_recipes: 435 | additional_recipes = add_additional_recipes(args) 436 | 437 | if not os.path.exists(args.dir): 438 | print(f"{args.dir} not found. Not generating a pipeline.") 439 | 440 | all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) 441 | for f in all_recipes: 442 | with open(f) as fi: 443 | metas.append(yaml.safe_load(fi.read())) 444 | 445 | if len(metas) >= 1: 446 | requirements = {} 447 | 448 | for pkg in full_tree + additional_recipes: 449 | requirements[pkg["package"]["name"]] = pkg["requirements"].get( 450 | "host", [] 451 | ) + pkg["requirements"].get("run", []) 452 | 453 | # sort out requirements that are not built in this run 454 | for pkg_name, reqs in requirements.items(): 455 | requirements[pkg_name] = [ 456 | r.split()[0] for r in reqs if (isinstance(r, str) and r in reqs) 457 | ] 458 | 459 | G = nx.DiGraph() 460 | for pkg, reqs in requirements.items(): 461 | G.add_node(pkg) 462 | for r in reqs: 463 | if r.startswith("ros-") or r.startswith("ros2-"): 464 | G.add_edge(pkg, r) 465 | 466 | # print(requirements) 467 | # import matplotlib.pyplot as plt 468 | # nx.draw(G, with_labels=True, font_weight='bold') 469 | # plt.show() 470 | 471 | tg = list(reversed(list(nx.topological_sort(G)))) 472 | 473 | names_to_build = {pkg["package"]["name"] for pkg in metas} 474 | print("Names to build: ", names_to_build) 475 | tg_slimmed = [el for el in tg if el in names_to_build] 476 | 477 | stages = [] 478 | current_stage = [] 479 | for pkg in tg_slimmed: 480 | reqs = get_all_ancestors(requirements, pkg) 481 | 482 | sort_in_stage = 0 483 | for r in reqs: 484 | # sort up the stages, until first stage found where all requirements are fulfilled. 485 | for sidx, _ in enumerate(stages): 486 | if r in stages[sidx]: 487 | sort_in_stage = max(sidx + 1, sort_in_stage) 488 | 489 | if sort_in_stage >= len(stages): 490 | stages.append([pkg]) 491 | else: 492 | stages[sort_in_stage].append(pkg) 493 | 494 | if len(current_stage): 495 | stages.append(current_stage) 496 | 497 | elif len(metas) == 1: 498 | fn_wo_yaml = os.path.splitext(os.path.basename(all_recipes[0]))[0] 499 | stages = [[fn_wo_yaml]] 500 | requirements = [fn_wo_yaml] 501 | else: 502 | stages = [] 503 | requirements = [] 504 | 505 | # filter out packages that we are not actually building 506 | filtered_stages = [] 507 | for stage in stages: 508 | filtered = [pkg for pkg in stage if pkg in requirements] 509 | if len(filtered): 510 | filtered_stages.append(filtered) 511 | 512 | stages = batch_stages(filtered_stages) 513 | print(stages) 514 | 515 | with open("buildorder.txt", "w") as fo: 516 | order = [] 517 | for stage in filtered_stages: 518 | for el in stage: 519 | print(el) 520 | order.append(el) 521 | 522 | fo.write("\n".join(order)) 523 | 524 | if args.platform == "linux-64": 525 | build_linux_pipeline(stages, args.trigger_branch, outfile="linux.yml") 526 | 527 | if args.platform == "osx-64": 528 | build_osx_pipeline( 529 | stages, 530 | args.trigger_branch, 531 | script=azure_osx_script, 532 | ) 533 | 534 | if args.platform == "osx-arm64": 535 | build_osx_pipeline( 536 | stages, 537 | args.trigger_branch, 538 | vm_imagename="macOS-11", 539 | outfile="osx_arm64.yml", 540 | script=azure_osx_arm64_script, 541 | ) 542 | 543 | if args.platform == "linux-aarch64": 544 | # Build aarch64 pipeline 545 | aarch64_azure_template = { 546 | "pool": { 547 | "name": "Default", 548 | "demands": [ 549 | "Agent.OS -equals linux", 550 | "Agent.OSArchitecture -equals ARM64", 551 | ], 552 | } 553 | } 554 | 555 | build_linux_pipeline( 556 | stages, 557 | args.trigger_branch, 558 | azure_template=aarch64_azure_template, 559 | docker_image="condaforge/linux-anvil-aarch64", 560 | outfile="linux_aarch64.yml", 561 | ) 562 | 563 | # windows 564 | if args.platform == "win-64": 565 | build_win_pipeline(stages, args.trigger_branch, outfile="win.yml") 566 | -------------------------------------------------------------------------------- /vinca/generate_gha.py: -------------------------------------------------------------------------------- 1 | import networkx as nx 2 | import yaml 3 | import re 4 | import glob 5 | import sys 6 | import os 7 | import argparse 8 | import pkg_resources 9 | from distutils.dir_util import copy_tree 10 | 11 | from rich import print 12 | 13 | from vinca.utils import get_repodata, NoAliasDumper 14 | from vinca.utils import literal_unicode as lu 15 | from vinca.distro import Distro 16 | from vinca.main import ( 17 | get_selected_packages, 18 | generate_outputs, 19 | read_vinca_yaml, 20 | get_conda_subdir, 21 | ) 22 | from vinca import config 23 | 24 | 25 | def read_azure_script(fn): 26 | template_in = pkg_resources.resource_filename("vinca", f"azure_templates/{fn}") 27 | with open(template_in, "r") as fi: 28 | return fi.read() 29 | 30 | 31 | azure_unix_script = lu(read_azure_script("unix.sh")) 32 | azure_win_preconfig_script = lu(read_azure_script("win_preconfig.bat")) 33 | azure_win_script = lu(read_azure_script("win_build.bat")) 34 | 35 | 36 | def parse_command_line(argv): 37 | parser = argparse.ArgumentParser( 38 | description="Conda recipe Azure pipeline generator for ROS packages" 39 | ) 40 | 41 | default_dir = "./recipes" 42 | parser.add_argument( 43 | "-d", 44 | "--dir", 45 | dest="dir", 46 | default=default_dir, 47 | help="The recipes directory to process (default: {}).".format(default_dir), 48 | ) 49 | 50 | parser.add_argument( 51 | "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" 52 | ) 53 | 54 | parser.add_argument( 55 | "-p", 56 | "--platform", 57 | dest="platform", 58 | default="linux-64", 59 | help="Platform to emit build pipeline for", 60 | ) 61 | 62 | parser.add_argument( 63 | "-a", 64 | "--additional-recipes", 65 | action="store_true", 66 | help="search for additional_recipes folder?", 67 | ) 68 | 69 | parser.add_argument( 70 | "-b", 71 | "--batch_size", 72 | dest="batch_size", 73 | default=5, 74 | type=int, 75 | help="How many packages to build at most per stage", 76 | ) 77 | 78 | arguments = parser.parse_args(argv[1:]) 79 | config.parsed_args = arguments 80 | return arguments 81 | 82 | 83 | def normalize_name(s): 84 | s = s.replace("-", "_") 85 | return re.sub("[^a-zA-Z0-9_]+", "", s) 86 | 87 | 88 | def batch_stages(stages, max_batch_size=5): 89 | with open("vinca.yaml", "r") as vinca_yaml: 90 | vinca_conf = yaml.safe_load(vinca_yaml) 91 | 92 | # this reduces the number of individual builds to try to save some time 93 | stage_lengths = [len(s) for s in stages] 94 | merged_stages = [] 95 | curr_stage = [] 96 | build_individually = vinca_conf.get("build_in_own_azure_stage", []) 97 | 98 | def chunks(lst, n): 99 | """Yield successive n-sized chunks from lst.""" 100 | for i in range(0, len(lst), n): 101 | yield lst[i : i + n] 102 | 103 | i = 0 104 | while i < len(stages): 105 | for build_individually_pkg in build_individually: 106 | if build_individually_pkg in stages[i]: 107 | merged_stages.append([[build_individually_pkg]]) 108 | stages[i].remove(build_individually_pkg) 109 | 110 | if ( 111 | stage_lengths[i] < max_batch_size 112 | and len(curr_stage) + stage_lengths[i] < max_batch_size 113 | ): 114 | # merge with previous stage 115 | curr_stage += stages[i] 116 | else: 117 | if len(curr_stage): 118 | merged_stages.append([curr_stage]) 119 | curr_stage = [] 120 | if stage_lengths[i] < max_batch_size: 121 | curr_stage += stages[i] 122 | else: 123 | # split this stage into multiple 124 | merged_stages.append(list(chunks(stages[i], max_batch_size))) 125 | i += 1 126 | if len(curr_stage): 127 | merged_stages.append([curr_stage]) 128 | return merged_stages 129 | 130 | 131 | def get_skip_existing(vinca_conf, platform): 132 | fn = vinca_conf.get("skip_existing") 133 | repodatas = [] 134 | if fn is not None: 135 | fns = list(fn) 136 | else: 137 | fns = [] 138 | 139 | for fn in fns: 140 | print(f"Fetching repodata: {fn}") 141 | repodata = get_repodata(fn, platform) 142 | repodatas.append(repodata) 143 | 144 | return repodatas 145 | 146 | 147 | def get_all_ancestors(graph, node): 148 | ancestors = set() 149 | visited = set() 150 | current_node = node 151 | 152 | while True: 153 | a = { 154 | a 155 | for a in graph.get(node, []) 156 | if a.startswith("ros-") or a.startswith("ros2") 157 | } 158 | if not graph.get(node): 159 | print(f"[yellow]{node} not found") 160 | 161 | ancestors |= a 162 | visited.add(current_node) 163 | 164 | if len(ancestors - visited) == 0: 165 | print(f"Returning all ancestors for {node} : {ancestors}") 166 | return ancestors 167 | else: 168 | current_node = list(ancestors - visited)[0] 169 | 170 | 171 | def add_additional_recipes(args): 172 | additional_recipes_path = os.path.abspath( 173 | os.path.join(args.dir, "..", "additional_recipes") 174 | ) 175 | 176 | print("Searching additional recipes in ", additional_recipes_path) 177 | 178 | if not os.path.exists(additional_recipes_path): 179 | return 180 | 181 | with open("vinca.yaml", "r") as vinca_yaml: 182 | vinca_conf = yaml.safe_load(vinca_yaml) 183 | 184 | repodatas = get_skip_existing(vinca_conf, args.platform) 185 | 186 | additional_recipes = [] 187 | for recipe_path in glob.glob(additional_recipes_path + "/**/recipe.yaml"): 188 | with open(recipe_path) as recipe: 189 | additional_recipe = yaml.safe_load(recipe) 190 | 191 | name, version, bnumber = ( 192 | additional_recipe["package"]["name"], 193 | additional_recipe["package"]["version"], 194 | additional_recipe["build"]["number"], 195 | ) 196 | print("Checking if ", name, version, bnumber, " exists") 197 | skip = False 198 | for repo in repodatas: 199 | repo_pkgs = repo.get("packages", {}) 200 | repo_pkgs.update(repo.get("packages.conda", {})) 201 | for _, pkg in repo_pkgs.items(): 202 | if ( 203 | pkg["name"] == name 204 | and pkg["version"] == version 205 | and pkg["build_number"] == bnumber 206 | ): 207 | skip = True 208 | print(f"{name}=={version}=={bnumber} already exists. Skipping.") 209 | break 210 | 211 | if not skip: 212 | print("Adding ", os.path.dirname(recipe_path)) 213 | goal_folder = os.path.join(args.dir, name) 214 | os.makedirs(goal_folder, exist_ok=True) 215 | copy_tree(os.path.dirname(recipe_path), goal_folder) 216 | additional_recipes.append(additional_recipe) 217 | 218 | return additional_recipes 219 | 220 | 221 | # on: 222 | # pull_request: 223 | # paths: 224 | # - '*.yaml' 225 | 226 | 227 | def dump_for_gha(doc, f): 228 | s = yaml.dump(doc, sort_keys=False, Dumper=NoAliasDumper) 229 | s = s.replace("'on':", "on:") 230 | with open(f, "w") as fo: 231 | fo.write(s) 232 | 233 | 234 | def get_stage_name(batch): 235 | stage_name = [] 236 | for pkg in batch: 237 | if len(pkg.split("-")) > 2: 238 | stage_name.append("-".join(pkg.split("-")[2:])) 239 | else: 240 | stage_name.append(pkg) 241 | return " ".join(stage_name) 242 | 243 | 244 | def build_unix_pipeline( 245 | stages, 246 | trigger_branch, 247 | script=azure_unix_script, 248 | azure_template=None, 249 | runs_on="ubuntu-latest", 250 | outfile="linux.yml", 251 | pipeline_name="build_unix", 252 | target="", 253 | ): 254 | blurb = {"jobs": {}, "name": pipeline_name} 255 | 256 | if azure_template is None: 257 | azure_template = blurb 258 | 259 | prev_batch_keys = [] 260 | 261 | for i, s in enumerate(stages): 262 | stage_name = f"stage_{i}" 263 | batch_keys = [] 264 | for batch in s: 265 | batch_key = f"{stage_name}_job_{len(azure_template['jobs'])}" 266 | batch_keys.append(batch_key) 267 | 268 | pretty_stage_name = get_stage_name(batch) 269 | azure_template["jobs"][batch_key] = { 270 | "name": pretty_stage_name, 271 | "runs-on": runs_on, 272 | "strategy": {"fail-fast": False}, 273 | "needs": prev_batch_keys, 274 | "steps": [ 275 | { 276 | "name": "Checkout code", 277 | "uses": "actions/checkout@v4", 278 | }, 279 | { 280 | "name": f"Build {' '.join([pkg for pkg in batch])}", 281 | "env": { 282 | "ANACONDA_API_TOKEN": "${{ secrets.ANACONDA_API_TOKEN }}", 283 | "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", 284 | "BUILD_TARGET": target, # use for cross-compilation 285 | }, 286 | "run": script, 287 | }, 288 | ], 289 | } 290 | 291 | prev_batch_keys = batch_keys 292 | 293 | if len(azure_template.get("jobs", [])) == 0: 294 | return 295 | 296 | azure_template["on"] = {"push": {"branches": [trigger_branch]}} 297 | 298 | dump_for_gha(azure_template, outfile) 299 | 300 | 301 | def build_linux_pipeline( 302 | stages, 303 | trigger_branch, 304 | script=azure_unix_script, 305 | azure_template=None, 306 | runs_on="ubuntu-latest", 307 | outfile="linux.yml", 308 | pipeline_name="build_linux", 309 | ): 310 | build_unix_pipeline( 311 | stages, 312 | trigger_branch, 313 | script=script, 314 | azure_template=azure_template, 315 | runs_on=runs_on, 316 | outfile=outfile, 317 | pipeline_name=pipeline_name, 318 | target="linux-64", 319 | ) 320 | 321 | 322 | def build_osx_pipeline( 323 | stages, 324 | trigger_branch, 325 | vm_imagename="macos-13", 326 | outfile="osx.yml", 327 | azure_template=None, 328 | script=azure_unix_script, 329 | target="osx-64", 330 | pipeline_name="build_osx_64" 331 | ): 332 | build_unix_pipeline( 333 | stages, 334 | trigger_branch, 335 | script=script, 336 | azure_template=azure_template, 337 | runs_on=vm_imagename, 338 | outfile=outfile, 339 | target=target, 340 | pipeline_name=pipeline_name 341 | ) 342 | 343 | 344 | def build_win_pipeline(stages, trigger_branch, outfile="win.yml", azure_template=None): 345 | vm_imagename = "windows-2019" 346 | # Build Win pipeline 347 | blurb = {"jobs": {}, "name": "build_win"} 348 | 349 | if azure_template is None: 350 | azure_template = blurb 351 | 352 | script = azure_win_script 353 | 354 | # overwrite with what we're finding in the repo! 355 | if os.path.exists(".scripts/build_win.bat"): 356 | with open(".scripts/build_win.bat", "r") as fi: 357 | script = lu(fi.read()) 358 | 359 | jobs = [] 360 | job_names = [] 361 | prev_batch_keys = [] 362 | for i, s in enumerate(stages): 363 | stage_name = f"stage_{i}" 364 | batch_keys = [] 365 | for batch in s: 366 | batch_key = f"{stage_name}_job_{len(azure_template['jobs'])}" 367 | batch_keys.append(batch_key) 368 | 369 | pretty_stage_name = get_stage_name(batch) 370 | azure_template["jobs"][batch_key] = { 371 | "name": pretty_stage_name, 372 | "runs-on": vm_imagename, 373 | "strategy": {"fail-fast": False}, 374 | "needs": prev_batch_keys, 375 | "env": {"CONDA_BLD_PATH": "C:\\\\bld\\\\"}, 376 | "steps": [ 377 | {"name": "Checkout code", "uses": "actions/checkout@v4"}, 378 | { 379 | "name": "Setup pixi", 380 | "uses": "prefix-dev/setup-pixi@v0.8.1", 381 | "with": { 382 | "pixi-version": "v0.40.3", 383 | "cache": "true", 384 | }, 385 | }, 386 | { 387 | "uses": "egor-tensin/cleanup-path@v4", 388 | "with": { 389 | "dirs": "C:\\Program Files\\Git\\usr\\bin;C:\\Program Files\\Git\\bin;C:\\Program Files\\Git\\cmd;C:\\Program Files\\Git\\mingw64\\bin" 390 | }, 391 | }, 392 | { 393 | "shell": "cmd", 394 | "run": azure_win_preconfig_script, 395 | "name": "conda-forge build setup", 396 | }, 397 | { 398 | "shell": "cmd", 399 | "run": script, 400 | "env": { 401 | "ANACONDA_API_TOKEN": "${{ secrets.ANACONDA_API_TOKEN }}", 402 | "CURRENT_RECIPES": f"{' '.join([pkg for pkg in batch])}", 403 | "PYTHONUNBUFFERED": 1, 404 | }, 405 | "name": f"Build {' '.join([pkg for pkg in batch])}", 406 | }, 407 | ], 408 | } 409 | 410 | prev_batch_keys = batch_keys 411 | 412 | if len(azure_template.get("jobs", [])) == 0: 413 | return 414 | 415 | azure_template["on"] = {"push": {"branches": [trigger_branch]}} 416 | 417 | dump_for_gha(azure_template, outfile) 418 | 419 | 420 | def get_full_tree(): 421 | recipes_dir = config.parsed_args.dir 422 | 423 | vinca_yaml = os.path.join(os.path.dirname(recipes_dir), "vinca.yaml") 424 | 425 | temp_vinca_conf = read_vinca_yaml(vinca_yaml) 426 | temp_vinca_conf["build_all"] = True 427 | temp_vinca_conf["skip_built_packages"] = [] 428 | config.selected_platform = get_conda_subdir() 429 | 430 | python_version = temp_vinca_conf.get("python_version", None) 431 | distro = Distro(temp_vinca_conf["ros_distro"], python_version) 432 | 433 | all_packages = get_selected_packages(distro, temp_vinca_conf) 434 | temp_vinca_conf["_selected_pkgs"] = all_packages 435 | 436 | all_outputs = generate_outputs(distro, temp_vinca_conf) 437 | return all_outputs 438 | 439 | 440 | def main(): 441 | args = parse_command_line(sys.argv) 442 | 443 | full_tree = get_full_tree() 444 | 445 | metas = [] 446 | 447 | additional_recipes = [] 448 | if args.additional_recipes: 449 | additional_recipes = add_additional_recipes(args) 450 | 451 | if not os.path.exists(args.dir): 452 | print(f"{args.dir} not found. Not generating a pipeline.") 453 | 454 | all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) 455 | for f in all_recipes: 456 | with open(f) as fi: 457 | metas.append(yaml.safe_load(fi.read())) 458 | 459 | platform = args.platform 460 | 461 | if len(metas) >= 1: 462 | requirements = {} 463 | 464 | for pkg in full_tree + additional_recipes: 465 | if "outputs" in pkg: 466 | req_section = pkg["outputs"][0]["requirements"] 467 | else: 468 | req_section = pkg["requirements"] 469 | requirements[pkg["package"]["name"]] = req_section.get( 470 | "host", [] 471 | ) + req_section.get("run", []) 472 | 473 | # sort out requirements that are not built in this run 474 | for pkg_name, reqs in requirements.items(): 475 | requirements[pkg_name] = [ 476 | r.split()[0] for r in reqs if (isinstance(r, str) and r in reqs) 477 | ] 478 | if platform == "emscripten-wasm32": 479 | # Hot fix to add the only ros package inside a if else statement 480 | if "ros-humble-rmw-wasm-cpp" in str(reqs): 481 | requirements[pkg_name].append("ros-humble-rmw-wasm-cpp") 482 | 483 | G = nx.DiGraph() 484 | for pkg, reqs in requirements.items(): 485 | G.add_node(pkg) 486 | for r in reqs: 487 | if r.startswith("ros-") or r.startswith("ros2-"): 488 | G.add_edge(pkg, r) 489 | 490 | # print(requirements) 491 | # import matplotlib.pyplot as plt 492 | # nx.draw(G, with_labels=True, font_weight='bold') 493 | # plt.show() 494 | 495 | tg = list(reversed(list(nx.topological_sort(G)))) 496 | 497 | names_to_build = {pkg["package"]["name"] for pkg in metas} 498 | print("Names to build: ", names_to_build) 499 | tg_slimmed = [el for el in tg if el in names_to_build] 500 | 501 | stages = [] 502 | current_stage = [] 503 | for pkg in tg_slimmed: 504 | reqs = get_all_ancestors(requirements, pkg) 505 | 506 | sort_in_stage = 0 507 | for r in reqs: 508 | # sort up the stages, until first stage found where all requirements are fulfilled. 509 | for sidx, _ in enumerate(stages): 510 | if r in stages[sidx]: 511 | sort_in_stage = max(sidx + 1, sort_in_stage) 512 | 513 | if sort_in_stage >= len(stages): 514 | stages.append([pkg]) 515 | else: 516 | stages[sort_in_stage].append(pkg) 517 | 518 | if len(current_stage): 519 | stages.append(current_stage) 520 | 521 | elif len(metas) == 1: 522 | fn_wo_yaml = os.path.splitext(os.path.basename(all_recipes[0]))[0] 523 | stages = [[fn_wo_yaml]] 524 | requirements = [fn_wo_yaml] 525 | else: 526 | stages = [] 527 | requirements = [] 528 | 529 | # filter out packages that we are not actually building 530 | filtered_stages = [] 531 | for stage in stages: 532 | filtered = [pkg for pkg in stage if pkg in requirements] 533 | if len(filtered): 534 | filtered_stages.append(filtered) 535 | 536 | stages = batch_stages(filtered_stages, args.batch_size) 537 | print(stages) 538 | 539 | with open("buildorder.txt", "w") as fo: 540 | order = [] 541 | for stage in filtered_stages: 542 | for el in stage: 543 | print(el) 544 | order.append(el) 545 | 546 | fo.write("\n".join(order)) 547 | 548 | if args.platform == "linux-64": 549 | build_unix_pipeline(stages, args.trigger_branch, outfile="linux.yml") 550 | 551 | if args.platform == "osx-64": 552 | build_osx_pipeline( 553 | stages, 554 | args.trigger_branch, 555 | ) 556 | 557 | if args.platform == "osx-arm64": 558 | build_osx_pipeline( 559 | stages, 560 | args.trigger_branch, 561 | vm_imagename="macos-14", 562 | outfile="osx_arm64.yml", 563 | script=azure_unix_script, 564 | target=platform, 565 | pipeline_name="build_osx_arm64" 566 | ) 567 | 568 | if args.platform == "linux-aarch64": 569 | # Build aarch64 pipeline 570 | build_unix_pipeline( 571 | stages, 572 | args.trigger_branch, 573 | runs_on="ubuntu-24.04-arm", 574 | outfile="linux_aarch64.yml", 575 | target=platform, 576 | pipeline_name="build_linux_aarch64" 577 | ) 578 | 579 | # windows 580 | if args.platform == "win-64": 581 | build_win_pipeline(stages, args.trigger_branch, outfile="win.yml") 582 | 583 | if args.platform == "emscripten-wasm32": 584 | build_unix_pipeline( 585 | stages, 586 | args.trigger_branch, 587 | outfile="emscripten_wasm32.yml", 588 | pipeline_name="build_emscripten_wasm32", 589 | target="emscripten-wasm32", 590 | ) 591 | -------------------------------------------------------------------------------- /vinca/generate_gitlab.py: -------------------------------------------------------------------------------- 1 | import networkx as nx 2 | import yaml 3 | import glob 4 | import sys 5 | import os 6 | 7 | try: 8 | from yaml import CLoader as Loader, CDumper as Dumper 9 | except ImportError: 10 | from yaml import Loader, Dumper 11 | 12 | # def setup_yaml(): 13 | # """ https://stackoverflow.com/a/8661021 """ 14 | # represent_dict_order = lambda self, data: self.represent_mapping('tag:yaml.org,2002:map', data.items()) 15 | # yaml.add_representer(OrderedDict, represent_dict_order) 16 | # setup_yaml() 17 | 18 | # the stages 19 | 20 | """ 21 | # use the official gcc image, based on debian 22 | # can use verions as well, like gcc:5.2 23 | # see https://hub.docker.com/_/gcc/ 24 | image: ubuntu:20.04 25 | 26 | build: 27 | stage: build 28 | script: 29 | - echo "Hello" 30 | """ 31 | 32 | 33 | def main(): 34 | metas = [] 35 | 36 | for f in glob.glob(os.path.join(sys.argv[1], "*.yaml")): 37 | print(f) 38 | with open(f) as fi: 39 | metas.append(yaml.load(fi.read(), Loader=Loader)) 40 | 41 | requirements = {} 42 | 43 | for pkg in metas: 44 | requirements[pkg["package"]["name"]] = ( 45 | pkg["requirements"]["host"] + pkg["requirements"]["run"] 46 | ) 47 | 48 | print(requirements) 49 | 50 | G = nx.DiGraph() 51 | for pkg, reqs in requirements.items(): 52 | G.add_node(pkg) 53 | for r in reqs: 54 | if r.startswith("ros-"): 55 | G.add_edge(pkg, r) 56 | 57 | # import matplotlib.pyplot as plt 58 | # nx.draw(G, with_labels=True, font_weight='bold') 59 | # plt.show() 60 | 61 | tg = list(reversed(list(nx.topological_sort(G)))) 62 | print(tg) 63 | print(requirements["ros-melodic-ros-core"]) 64 | 65 | stages = [] 66 | current_stage = [] 67 | for pkg in tg: 68 | for r in requirements[pkg]: 69 | if r in current_stage: 70 | stages.append(current_stage) 71 | current_stage = [] 72 | current_stage.append(pkg) 73 | 74 | stages.append(current_stage) 75 | 76 | print(stages) 77 | 78 | gitlab_template = {"image": "condaforge/linux-anvil-cos7-x86_64"} 79 | 80 | stage_names = [] 81 | for i, s in enumerate(stages): 82 | stage_name = f"stage_{i}" 83 | stage_names.append(stage_name) 84 | for pkg in s: 85 | gitlab_template[pkg] = { 86 | "stage": stage_name, 87 | "script": [ 88 | 'export FEEDSTOCK_ROOT="$CI_BUILDS_DIR"', 89 | "export GIT_BRANCH=$CI_COMMIT_REF_NAME", 90 | 'export RECIPE_ROOT="$FEEDSTOCK_ROOT/recipe"', 91 | "sed -i '$ichown -R conda:conda \"$FEEDSTOCK_ROOT\"' /opt/docker/bin/entrypoint", 92 | ".scripts/build_linux.sh", 93 | ], 94 | "variables": {"CURRENT_BUILD_PKG_NAME": pkg}, 95 | # 'needs': [r for r in requirements[pkg] if r.startswith('ros-')] 96 | } 97 | 98 | gitlab_template["stages"] = stage_names 99 | 100 | with open(".gitlab-ci.yml", "w") as fo: 101 | fo.write(yaml.dump(gitlab_template, Dumper=Dumper)) 102 | -------------------------------------------------------------------------------- /vinca/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import catkin_pkg 5 | import sys 6 | import os 7 | import glob 8 | import platform 9 | import ruamel.yaml 10 | from pathlib import Path 11 | 12 | from vinca import __version__ 13 | from .resolve import get_conda_index 14 | from .resolve import resolve_pkgname 15 | from .template import write_recipe, write_recipe_package 16 | from .distro import Distro 17 | 18 | from vinca import config 19 | from vinca.utils import get_repodata, get_pkg_build_number 20 | 21 | unsatisfied_deps = set() 22 | distro = None 23 | 24 | 25 | def ensure_list(obj): 26 | if not obj: 27 | return [] 28 | assert isinstance(obj, list) 29 | return obj 30 | 31 | 32 | def get_conda_subdir(): 33 | if config.parsed_args.platform: 34 | return config.parsed_args.platform 35 | 36 | sys_platform = sys.platform 37 | machine = platform.machine() 38 | if sys_platform.startswith("linux"): 39 | if machine == "aarch64": 40 | return "linux-aarch64" 41 | elif machine == "x86_64": 42 | return "linux-64" 43 | else: 44 | raise RuntimeError("Unknown machine!") 45 | elif sys_platform == "darwin": 46 | if machine == "arm64": 47 | return "osx-arm64" 48 | else: 49 | return "osx-64" 50 | elif sys_platform == "win32": 51 | return "win-64" 52 | 53 | 54 | def parse_command_line(argv): 55 | """ 56 | Parse command line argument. See -h option. 57 | :param argv: the actual program arguments 58 | :return: parsed arguments 59 | """ 60 | import textwrap 61 | 62 | default_dir = "." 63 | 64 | example = textwrap.dedent( 65 | """ 66 | Examples: 67 | {0} -d ./examples/ 68 | See: https://github.com/RoboStack/vinca 69 | """ 70 | ).format(os.path.basename(argv[0])) 71 | formatter_class = argparse.RawDescriptionHelpFormatter 72 | parser = argparse.ArgumentParser( 73 | description="Conda recipe generator for ROS packages", 74 | epilog=example, 75 | formatter_class=formatter_class, 76 | ) 77 | parser.add_argument( 78 | "-V", "--version", action="version", version="%(prog)s {}".format(__version__) 79 | ) 80 | parser.add_argument( 81 | "-d", 82 | "--dir", 83 | dest="dir", 84 | default=default_dir, 85 | help="The directory to process (default: {}).".format(default_dir), 86 | ) 87 | parser.add_argument( 88 | "-s", 89 | "--skip", 90 | dest="skip_already_built_repodata", 91 | default=[], 92 | help="Skip already built from repodata.", 93 | ) 94 | parser.add_argument( 95 | "-m", 96 | "--multiple", 97 | dest="multiple_file", 98 | action="store_const", 99 | const=True, 100 | default=False, 101 | help="Create one recipe for package.", 102 | ) 103 | parser.add_argument( 104 | "-n", 105 | "--trigger-new-versions", 106 | dest="trigger_new_versions", 107 | action="store_const", 108 | const=True, 109 | default=False, 110 | help="Trigger the build of packages that have new versions available.", 111 | ) 112 | parser.add_argument( 113 | "--source", 114 | dest="source", 115 | action="store_const", 116 | const=True, 117 | default=False, 118 | help="Create recipe with develop repo.", 119 | ) 120 | parser.add_argument( 121 | "-p", "--package", dest="package", default=None, help="The package.xml path." 122 | ) 123 | parser.add_argument( 124 | "--platform", 125 | dest="platform", 126 | default=None, 127 | help="The conda platform to check existing recipes for.", 128 | ) 129 | arguments = parser.parse_args(argv[1:]) 130 | global selected_platform 131 | config.parsed_args = arguments 132 | config.selected_platform = get_conda_subdir() 133 | return arguments 134 | 135 | 136 | def get_depmods(vinca_conf, pkg_name): 137 | depmods = vinca_conf["depmods"].get(pkg_name, {}) 138 | rm_deps, add_deps = ( 139 | {"build": [], "host": [], "run": []}, 140 | {"build": [], "host": [], "run": []}, 141 | ) 142 | 143 | for dep_type in ["build", "host", "run"]: 144 | if depmods.get("remove_" + dep_type): 145 | for el in depmods["remove_" + dep_type]: 146 | if isinstance(el, dict): 147 | rm_deps[dep_type].append(dict(el)) 148 | else: 149 | rm_deps[dep_type].append(el) 150 | 151 | if depmods.get("add_" + dep_type): 152 | for el in depmods["add_" + dep_type]: 153 | if isinstance(el, dict): 154 | add_deps[dep_type].append(dict(el)) 155 | else: 156 | add_deps[dep_type].append(el) 157 | return rm_deps, add_deps 158 | 159 | 160 | def read_vinca_yaml(filepath): 161 | yaml = ruamel.yaml.YAML() 162 | vinca_conf = yaml.load(open(filepath, "r")) 163 | 164 | # normalize paths to absolute paths 165 | conda_index = [] 166 | for i in vinca_conf["conda_index"]: 167 | if os.path.isfile(i): 168 | conda_index.append(os.path.abspath(i)) 169 | else: 170 | conda_index.append(i) 171 | 172 | vinca_conf["conda_index"] = conda_index 173 | patch_dir = Path(vinca_conf["patch_dir"]).absolute() 174 | vinca_conf["_patch_dir"] = patch_dir 175 | patches = {} 176 | 177 | for x in glob.glob(os.path.join(vinca_conf["_patch_dir"], "*.patch")): 178 | splitted = os.path.basename(x).split(".") 179 | if splitted[0] not in patches: 180 | patches[splitted[0]] = {"any": [], "osx": [], "linux": [], "win": [], "emscripten": []} 181 | if len(splitted) == 3: 182 | if splitted[1] in ("osx", "linux", "win" , "emscripten"): 183 | patches[splitted[0]][splitted[1]].append(x) 184 | continue 185 | if splitted[1] == "unix": 186 | patches[splitted[0]]["linux"].append(x) 187 | patches[splitted[0]]["osx"].append(x) 188 | continue 189 | 190 | patches[splitted[0]]["any"].append(x) 191 | 192 | vinca_conf["_patches"] = patches 193 | 194 | tests = {} 195 | test_dir = Path(filepath).parent / "tests" 196 | for x in test_dir.glob("*.yaml"): 197 | tests[os.path.basename(x).split(".")[0]] = x 198 | vinca_conf["_tests"] = tests 199 | 200 | if (patch_dir / "dependencies.yaml").exists(): 201 | vinca_conf["depmods"] = yaml.load(open(patch_dir / "dependencies.yaml")) 202 | if not vinca_conf.get("depmods"): 203 | vinca_conf["depmods"] = {} 204 | 205 | config.ros_distro = vinca_conf["ros_distro"] 206 | config.skip_testing = vinca_conf.get("skip_testing", True) 207 | 208 | vinca_conf["_conda_indexes"] = get_conda_index( 209 | vinca_conf, os.path.dirname(filepath) 210 | ) 211 | 212 | vinca_conf["trigger_new_versions"] = vinca_conf.get("trigger_new_versions", False) 213 | 214 | if (Path(filepath).parent / "pkg_additional_info.yaml").exists(): 215 | vinca_conf["_pkg_additional_info"] = yaml.load(open(Path(filepath).parent / "pkg_additional_info.yaml")) 216 | else: 217 | vinca_conf["_pkg_additional_info"] = {} 218 | 219 | return vinca_conf 220 | 221 | 222 | def read_snapshot(vinca_conf): 223 | if not "rosdistro_snapshot" in vinca_conf: 224 | return None 225 | 226 | yaml = ruamel.yaml.YAML() 227 | snapshot = yaml.load(open(vinca_conf["rosdistro_snapshot"], "r")) 228 | return snapshot 229 | 230 | 231 | def generate_output(pkg_shortname, vinca_conf, distro, version, all_pkgs=None): 232 | if not all_pkgs: 233 | all_pkgs = [] 234 | 235 | if pkg_shortname not in vinca_conf["_selected_pkgs"]: 236 | return None 237 | 238 | pkg_names = resolve_pkgname(pkg_shortname, vinca_conf, distro) 239 | if not pkg_names: 240 | return None 241 | 242 | if vinca_conf.get("trigger_new_versions"): 243 | if (pkg_names[0], version) in vinca_conf["skip_built_packages"]: 244 | return None 245 | else: 246 | if pkg_names[0] in vinca_conf["skip_built_packages"]: 247 | return None 248 | 249 | output = { 250 | "package": {"name": pkg_names[0], "version": version}, 251 | "requirements": { 252 | "build": [ 253 | "${{ compiler('cxx') }}", 254 | "${{ compiler('c') }}", 255 | {"if": "target_platform!='emscripten-wasm32'", "then": ["${{ stdlib('c') }}"]}, 256 | "ninja", 257 | "python", 258 | "setuptools", 259 | "git", 260 | {"if": "unix", "then": ["patch", "make", "coreutils"]}, 261 | {"if": "win", "then": ["m2-patch"]}, 262 | {"if": "osx", "then": ["tapi"]}, 263 | {"if": "build_platform != target_platform", "then": ["pkg-config"]}, 264 | "cmake", 265 | "cython", 266 | {"if": "build_platform != target_platform", "then": ["python", "cross-python_${{ target_platform }}", "numpy"]}, 267 | ], 268 | "host": [ 269 | {"if": "build_platform == target_platform", "then": ["pkg-config"]}, 270 | "python", 271 | "numpy", 272 | "pip", 273 | ], 274 | "run": [], 275 | }, 276 | "build": {"script": ""}, 277 | } 278 | 279 | pkg = catkin_pkg.package.parse_package_string( 280 | distro.get_release_package_xml(pkg_shortname) 281 | ) 282 | 283 | pkg.evaluate_conditions(os.environ) 284 | 285 | resolved_python = resolve_pkgname("python", vinca_conf, distro) 286 | output["requirements"]["run"].extend(resolved_python) 287 | output["requirements"]["host"].extend(resolved_python) 288 | if pkg.get_build_type() in ["cmake", "catkin"]: 289 | output["build"][ 290 | "script" 291 | ] = "${{ '$RECIPE_DIR/build_catkin.sh' if unix or wasm32 else '%RECIPE_DIR%\\\\bld_catkin.bat' }}" 292 | elif pkg.get_build_type() in ["ament_cmake"]: 293 | output["build"][ 294 | "script" 295 | ] = "${{ '$RECIPE_DIR/build_ament_cmake.sh' if unix or wasm32 else '%RECIPE_DIR%\\\\bld_ament_cmake.bat' }}" 296 | elif pkg.get_build_type() in ["ament_python"]: 297 | output["build"][ 298 | "script" 299 | ] = "${{ '$RECIPE_DIR/build_ament_python.sh' if unix or wasm32 else '%RECIPE_DIR%\\\\bld_ament_python.bat' }}" 300 | resolved_setuptools = resolve_pkgname("python-setuptools", vinca_conf, distro) 301 | output["requirements"]["host"].extend(resolved_setuptools) 302 | else: 303 | print(f"Unknown build type for {pkg_shortname}: {pkg.get_build_type()}") 304 | return None 305 | 306 | if vinca_conf.get("mutex_package"): 307 | output["requirements"]["host"].append(vinca_conf["mutex_package"]) 308 | output["requirements"]["run"].append(vinca_conf["mutex_package"]) 309 | 310 | if not distro.check_ros1() and pkg_shortname not in [ 311 | "ament_cmake_core", 312 | "ament_package", 313 | "ros_workspace", 314 | "ros_environment", 315 | ]: 316 | output["requirements"]["host"].append( 317 | f"ros-{config.ros_distro}-ros-environment" 318 | ) 319 | output["requirements"]["host"].append(f"ros-{config.ros_distro}-ros-workspace") 320 | output["requirements"]["run"].append(f"ros-{config.ros_distro}-ros-workspace") 321 | 322 | rm_deps, add_deps = get_depmods(vinca_conf, pkg.name) 323 | gdeps = [] 324 | if pkg.group_depends: 325 | for gdep in pkg.group_depends: 326 | gdep.extract_group_members(all_pkgs) 327 | gdeps += gdep.members 328 | 329 | build_tool_deps = pkg.buildtool_depends 330 | build_tool_deps += pkg.buildtool_export_depends 331 | build_tool_deps = [d.name for d in build_tool_deps if d.evaluated_condition] 332 | 333 | build_deps = pkg.build_depends 334 | build_deps += pkg.build_export_depends 335 | build_deps += pkg.test_depends 336 | build_deps = [d.name for d in build_deps if d.evaluated_condition] 337 | build_deps += gdeps 338 | 339 | # we stick some build tools into the `build` section to make cross compilation work 340 | # right now it's only `git`. 341 | for dep in build_tool_deps: 342 | resolved_dep = resolve_pkgname(dep, vinca_conf, distro) 343 | if not resolved_dep: 344 | unsatisfied_deps.add(dep) 345 | continue 346 | 347 | if "git" in resolved_dep: 348 | output["requirements"]["build"].extend(resolved_dep) 349 | else: 350 | # remove duplicate cmake 351 | if dep not in ["cmake"]: 352 | build_deps.append(dep) 353 | 354 | # Hack to add cyclonedds into build for cross compilation 355 | if pkg_shortname == "cyclonedds" or "cyclonedds" in ( 356 | build_deps + build_tool_deps 357 | ): 358 | output["requirements"]["build"].append( 359 | { 360 | "if": "build_platform != target_platform", 361 | "then": [f"ros-{config.ros_distro}-cyclonedds"], 362 | } 363 | ) 364 | 365 | for dep in build_deps: 366 | if dep in ["REQUIRE_OPENGL", "REQUIRE_GL"]: 367 | output["requirements"]["host"].append(dep) 368 | continue 369 | 370 | resolved_dep = resolve_pkgname(dep, vinca_conf, distro) 371 | if not resolved_dep: 372 | unsatisfied_deps.add(dep) 373 | continue 374 | output["requirements"]["host"].extend(resolved_dep) 375 | 376 | run_deps = pkg.run_depends 377 | run_deps += pkg.exec_depends 378 | run_deps += pkg.build_export_depends 379 | run_deps += pkg.buildtool_export_depends 380 | run_deps = [d.name for d in run_deps if d.evaluated_condition] 381 | run_deps += gdeps 382 | 383 | for dep in run_deps: 384 | if dep in ["REQUIRE_OPENGL", "REQUIRE_GL"]: 385 | output["requirements"]["host"].append(dep) 386 | continue 387 | 388 | resolved_dep = resolve_pkgname(dep, vinca_conf, distro, is_rundep=True) 389 | if not resolved_dep: 390 | unsatisfied_deps.add(dep) 391 | continue 392 | output["requirements"]["run"].extend(resolved_dep) 393 | 394 | for dep_type in ["build", "host", "run"]: 395 | for dep in add_deps[dep_type]: 396 | output["requirements"][dep_type].append(dep) 397 | for dep in rm_deps[dep_type]: 398 | while dep in output["requirements"][dep_type]: 399 | output["requirements"][dep_type].remove(dep) 400 | 401 | def sortkey(k): 402 | if isinstance(k, dict): 403 | return list(k.values())[0] 404 | return k 405 | 406 | # For Emscripten, only install cmake as a build dependency. 407 | # This should be ok as cmake is only really needed during builds, not when running packages. 408 | if "cmake" in output["requirements"]["run"]: 409 | output["requirements"]["run"].remove("cmake") 410 | output["requirements"]["run"].append({"if": "target_platform != 'emscripten-wasm32'", "then": ["cmake"]}) 411 | 412 | if "cmake" in output["requirements"]["host"]: 413 | output["requirements"]["host"].remove("cmake") 414 | if "cmake" not in output["requirements"]["build"]: 415 | output["requirements"]["build"].append("cmake") 416 | 417 | if f"ros-{config.ros_distro}-mimick-vendor" in output["requirements"]["build"]: 418 | output["requirements"]["build"].remove(f"ros-{config.ros_distro}-mimick-vendor") 419 | output["requirements"]["build"].append({"if": "target_platform != 'emscripten-wasm32'", "then": [f"ros-{config.ros_distro}-mimick-vendor"]}) 420 | 421 | if f"ros-{config.ros_distro}-mimick-vendor" in output["requirements"]["host"]: 422 | output["requirements"]["host"].remove(f"ros-{config.ros_distro}-mimick-vendor") 423 | output["requirements"]["build"].append({"if": "target_platform != 'emscripten-wasm32'", "then": [f"ros-{config.ros_distro}-mimick-vendor"]}) 424 | 425 | if f"ros-{config.ros_distro}-rosidl-default-generators" in output["requirements"]["host"]: 426 | output["requirements"]["build"].append({"if": "target_platform == 'emscripten-wasm32'", "then": [f"ros-{config.ros_distro}-rosidl-default-generators"]}) 427 | 428 | output["requirements"]["run"] = sorted(output["requirements"]["run"], key=sortkey) 429 | output["requirements"]["host"] = sorted(output["requirements"]["host"], key=sortkey) 430 | 431 | output["requirements"]["run"] += [ 432 | { 433 | "if": "osx and x86_64", 434 | "then": ["__osx >=${{ MACOSX_DEPLOYMENT_TARGET|default('10.14') }}"], 435 | } 436 | ] 437 | 438 | if f"ros-{config.ros_distro}-pybind11-vendor" in output["requirements"]["host"]: 439 | output["requirements"]["host"] += ["pybind11"] 440 | if "pybind11" in output["requirements"]["host"]: 441 | output["requirements"]["build"] += [ 442 | {"if": "build_platform != target_platform", "then": ["pybind11"]} 443 | ] 444 | if "qt-main" in output["requirements"]["host"]: 445 | output["requirements"]["build"] += [ 446 | {"if": "build_platform != target_platform", "then": ["qt-main"]} 447 | ] 448 | # pyqt-builder + git + doxygen must be in build, not host for cross-compile 449 | pkgs_move_to_build = ["pyqt-builder", "git", "doxygen"] 450 | for pkg_move_to_build in pkgs_move_to_build: 451 | if pkg_move_to_build in output["requirements"]["host"]: 452 | output["requirements"]["build"] += [ 453 | {"if": "build_platform != target_platform", "then": [pkg_move_to_build]} 454 | ] 455 | while pkg_move_to_build in output["requirements"]["host"]: 456 | output["requirements"]["host"].remove(pkg_move_to_build) 457 | output["requirements"]["host"] += [ 458 | {"if": "build_platform == target_platform", "then": [pkg_move_to_build]} 459 | ] 460 | 461 | # fix up OPENGL support for Unix 462 | if ( 463 | "REQUIRE_OPENGL" in output["requirements"]["run"] 464 | or "REQUIRE_OPENGL" in output["requirements"]["host"] 465 | ): 466 | # add requirements for opengl 467 | while "REQUIRE_OPENGL" in output["requirements"]["run"]: 468 | output["requirements"]["run"].remove("REQUIRE_OPENGL") 469 | while "REQUIRE_OPENGL" in output["requirements"]["host"]: 470 | output["requirements"]["host"].remove("REQUIRE_OPENGL") 471 | 472 | output["requirements"]["host"] += [ 473 | { 474 | "if": "linux", 475 | "then": ["libgl-devel", "libopengl-devel"], 476 | } 477 | ] 478 | 479 | output["requirements"]["host"] += [ 480 | {"if": "unix", "then": ["xorg-libx11", "xorg-libxext"]}, 481 | ] 482 | output["requirements"]["run"] += [ 483 | {"if": "unix", "then": ["xorg-libx11", "xorg-libxext"]}, 484 | ] 485 | 486 | # fix up GL support for Unix 487 | if ( 488 | "REQUIRE_GL" in output["requirements"]["run"] 489 | or "REQUIRE_GL" in output["requirements"]["host"] 490 | ): 491 | # add requirements for gl 492 | while "REQUIRE_GL" in output["requirements"]["run"]: 493 | output["requirements"]["run"].remove("REQUIRE_GL") 494 | while "REQUIRE_GL" in output["requirements"]["host"]: 495 | output["requirements"]["host"].remove("REQUIRE_GL") 496 | 497 | output["requirements"]["host"] += [ 498 | { 499 | "if": "linux", 500 | "then": [ 501 | "libgl-devel" 502 | ], 503 | } 504 | ] 505 | 506 | # remove duplicates 507 | for dep_type in ["build", "host", "run"]: 508 | tmp_nonduplicate = [] 509 | [ 510 | tmp_nonduplicate.append(x) 511 | for x in output["requirements"][dep_type] 512 | if x not in tmp_nonduplicate 513 | ] 514 | output["requirements"][dep_type] = tmp_nonduplicate 515 | 516 | return output 517 | 518 | 519 | def generate_outputs(distro, vinca_conf): 520 | outputs = [] 521 | 522 | def get_pkg(pkg_name): 523 | pkg = catkin_pkg.package.parse_package_string( 524 | distro.get_release_package_xml(pkg_name) 525 | ) 526 | pkg.evaluate_conditions(os.environ) 527 | return pkg 528 | 529 | all_pkgs = [get_pkg(pkg) for pkg in distro.get_depends("ros_base")] 530 | 531 | for pkg_shortname in vinca_conf["_selected_pkgs"]: 532 | if not distro.check_package(pkg_shortname): 533 | print(f"Could not generate output for {pkg_shortname}") 534 | continue 535 | 536 | try: 537 | output = generate_output( 538 | pkg_shortname, 539 | vinca_conf, 540 | distro, 541 | distro.get_version(pkg_shortname), 542 | all_pkgs, 543 | ) 544 | except AttributeError: 545 | print("Skip " + pkg_shortname + " due to invalid version / XML.") 546 | if output is not None: 547 | outputs.append(output) 548 | return outputs 549 | 550 | 551 | def generate_outputs_version(distro, vinca_conf): 552 | outputs = [] 553 | for pkg_shortname in vinca_conf["_selected_pkgs"]: 554 | if not distro.check_package(pkg_shortname): 555 | print(f"Could not generate output for {pkg_shortname}") 556 | continue 557 | 558 | version = distro.get_version(pkg_shortname) 559 | output = generate_output(pkg_shortname, vinca_conf, distro, version) 560 | if output is not None: 561 | outputs.append(output) 562 | 563 | return outputs 564 | 565 | 566 | def generate_source(distro, vinca_conf): 567 | source = {} 568 | for pkg_shortname in vinca_conf["_selected_pkgs"]: 569 | if not distro.check_package(pkg_shortname): 570 | print(f"Could not generate source for {pkg_shortname}") 571 | continue 572 | 573 | url, version = distro.get_released_repo(pkg_shortname) 574 | entry = {} 575 | entry["git"] = url 576 | entry["tag"] = version 577 | pkg_names = resolve_pkgname(pkg_shortname, vinca_conf, distro) 578 | pkg_version = distro.get_version(pkg_shortname) 579 | print("Checking ", pkg_shortname, pkg_version) 580 | if not pkg_names: 581 | continue 582 | if vinca_conf.get("trigger_new_versions"): 583 | if (pkg_names[0], pkg_version) in vinca_conf["skip_built_packages"]: 584 | continue 585 | else: 586 | if pkg_names[0] in vinca_conf["skip_built_packages"]: 587 | continue 588 | pkg_name = pkg_names[0] 589 | entry["target_directory"] = "%s/src/work" % pkg_name 590 | 591 | patches = [] 592 | pd = vinca_conf["_patches"].get(pkg_name) 593 | if pd: 594 | patches.extend(pd["any"]) 595 | 596 | # find specific patches 597 | plat = get_conda_subdir().split("-")[0] 598 | patches.extend(pd[plat]) 599 | if len(patches): 600 | print(patches) 601 | common_prefix = os.path.commonprefix((os.getcwd(), patches[0])) 602 | print(common_prefix) 603 | entry["patches"] = [os.path.relpath(p, common_prefix) for p in patches] 604 | 605 | source[pkg_name] = entry 606 | 607 | return source 608 | 609 | 610 | def generate_source_version(distro, vinca_conf): 611 | source = {} 612 | for pkg_shortname in vinca_conf["_selected_pkgs"]: 613 | if not distro.check_package(pkg_shortname): 614 | print(f"Could not generate source for {pkg_shortname}") 615 | continue 616 | 617 | url, version = distro.get_released_repo(pkg_shortname) 618 | 619 | entry = {} 620 | entry["git"] = url 621 | entry["tag"] = version 622 | pkg_names = resolve_pkgname(pkg_shortname, vinca_conf, distro) 623 | if vinca_conf.get("trigger_new_versions"): 624 | if ( 625 | not pkg_names 626 | or (pkg_names[0], version) in vinca_conf["skip_built_packages"] 627 | ): 628 | continue 629 | else: 630 | if not pkg_names or pkg_names[0] in vinca_conf["skip_built_packages"]: 631 | continue 632 | pkg_name = pkg_names[0] 633 | entry["target_directory"] = "%s/src/work" % pkg_name 634 | 635 | patches = [] 636 | pd = vinca_conf["_patches"].get(pkg_name) 637 | if pd: 638 | patches.extend(pd["any"]) 639 | 640 | # find specific patches 641 | plat = get_conda_subdir().split("-")[0] 642 | patches.extend(pd[plat]) 643 | if len(patches): 644 | entry["patches"] = patches 645 | 646 | source[pkg_name] = entry 647 | 648 | return source 649 | 650 | 651 | def generate_fat_source(distro, vinca_conf): 652 | source = [] 653 | for pkg_shortname in vinca_conf["_selected_pkgs"]: 654 | if not distro.check_package(pkg_shortname): 655 | print(f"Could not generate source for {pkg_shortname}") 656 | continue 657 | 658 | url, version = distro.get_released_repo(pkg_shortname) 659 | entry = {} 660 | entry["git"] = url 661 | entry["tag"] = version 662 | pkg_names = resolve_pkgname(pkg_shortname, vinca_conf, distro) 663 | if not pkg_names: 664 | continue 665 | pkg_name = pkg_names[0] 666 | entry["target_directory"] = "src/%s" % pkg_name 667 | patch_path = os.path.join(vinca_conf["_patch_dir"], "%s.patch" % pkg_name) 668 | if os.path.exists(patch_path): 669 | entry["patches"] = [ 670 | "%s/%s" % (vinca_conf["patch_dir"], "%s.patch" % pkg_name) 671 | ] 672 | source.append(entry) 673 | return source 674 | 675 | 676 | def get_selected_packages(distro, vinca_conf): 677 | selected_packages = set() 678 | skipped_packages = set() 679 | 680 | if vinca_conf.get("build_all", False): 681 | selected_packages = set(distro._distro.release_packages.keys()) 682 | elif vinca_conf["packages_select_by_deps"]: 683 | 684 | if ( 685 | "packages_skip_by_deps" in vinca_conf 686 | and vinca_conf["packages_skip_by_deps"] is not None 687 | ): 688 | for i in vinca_conf["packages_skip_by_deps"]: 689 | skipped_packages = skipped_packages.union([i, i.replace("-", "_")]) 690 | print("Skipped pkgs: ", skipped_packages) 691 | for i in vinca_conf["packages_select_by_deps"]: 692 | i = i.replace("-", "_") 693 | selected_packages = selected_packages.union([i]) 694 | if i in skipped_packages: 695 | continue 696 | try: 697 | pkgs = distro.get_depends(i, ignore_pkgs=skipped_packages) 698 | except KeyError: 699 | # handle (rare) package names that use "-" as separator 700 | pkgs = distro.get_depends(i.replace("_", "-")) 701 | selected_packages.remove(i) 702 | selected_packages.add(i.replace("_", "-")) 703 | selected_packages = selected_packages.union(pkgs) 704 | 705 | result = sorted(list(selected_packages)) 706 | return result 707 | 708 | 709 | def parse_package(pkg, distro, vinca_conf, path): 710 | name = pkg["name"].replace("_", "-") 711 | final_name = f"ros-{distro.name}-{name}" 712 | 713 | recipe = { 714 | "package": {"name": final_name, "version": pkg["version"]}, 715 | "about": { 716 | "homepage": "https://www.ros.org/", 717 | "license": [str(lic) for lic in pkg["licenses"]], 718 | "summary": pkg["description"], 719 | "maintainers": [], 720 | }, 721 | "extra": {"recipe-maintainers": ["robostack"]}, 722 | "build": { 723 | "number": 0, 724 | "script": "${{ '$RECIPE_DIR/build_catkin.sh' if unix or wasm32 else '%RECIPE_DIR%\\\\bld_catkin.bat' }}", 725 | }, 726 | "source": {}, 727 | "requirements": { 728 | "build": [ 729 | "${{ compiler('cxx') }}", 730 | "${{ compiler('c') }}", 731 | {"if": "target_platform!='emscripten-wasm32'", "then": ["${{ stdlib('c') }}"]}, 732 | "ninja", 733 | "python", 734 | "patch", 735 | {"if": "unix", "then": ["make", "coreutils"]}, 736 | "cmake", 737 | {"if": "build_platform != target_platform", "then": ["python"]}, 738 | { 739 | "if": "build_platform != target_platform", 740 | "then": ["cross-python_${{ target_platform }}"], 741 | }, 742 | {"if": "build_platform != target_platform", "then": ["cython"]}, 743 | {"if": "build_platform != target_platform", "then": ["numpy"]}, 744 | {"if": "build_platform != target_platform", "then": ["pybind11"]}, 745 | ], 746 | "host": [], 747 | "run": [], 748 | }, 749 | } 750 | 751 | if test := vinca_conf.get("_tests", {}).get(final_name): 752 | # parse as yaml 753 | text = test.read_text() 754 | test_content = ruamel.yaml.safe_load(text) 755 | recipe["test"] = test_content 756 | 757 | for p in pkg["authors"]: 758 | name = p.name + " (" + p.email + ")" if p.email else p.name 759 | recipe["about"]["maintainers"].append(name) 760 | 761 | for u in pkg["urls"]: 762 | # if u.type == 'repository' : 763 | # recipe['source']['git'] = u.url 764 | # recipe['source']['tag'] = recipe['package']['version'] 765 | if u.type == "website": 766 | recipe["about"]["homepage"] = u.url 767 | 768 | # if u.type == 'bugtracker' : 769 | # recipe['about']['url_issues'] = u.url 770 | 771 | if not recipe["source"].get("git", None): 772 | aux = path.split("/") 773 | print(aux[: len(aux) - 1]) 774 | recipe["source"]["path"] = "/".join(aux[: len(aux) - 1]) 775 | recipe["source"]["target_directory"] = f"{final_name}/src/work" 776 | 777 | for d in pkg["buildtool_depends"]: 778 | recipe["requirements"]["host"].extend( 779 | resolve_pkgname(d.name, vinca_conf, distro) 780 | ) 781 | 782 | for d in pkg["build_depends"]: 783 | recipe["requirements"]["host"].extend( 784 | resolve_pkgname(d.name, vinca_conf, distro) 785 | ) 786 | 787 | for d in pkg["build_export_depends"]: 788 | recipe["requirements"]["host"].extend( 789 | resolve_pkgname(d.name, vinca_conf, distro) 790 | ) 791 | recipe["requirements"]["run"].extend( 792 | resolve_pkgname(d.name, vinca_conf, distro) 793 | ) 794 | 795 | for d in pkg["buildtool_export_depends"]: 796 | recipe["requirements"]["host"].extend( 797 | resolve_pkgname(d.name, vinca_conf, distro) 798 | ) 799 | recipe["requirements"]["run"].extend( 800 | resolve_pkgname(d.name, vinca_conf, distro) 801 | ) 802 | 803 | for d in pkg["test_depends"]: 804 | recipe["requirements"]["host"].extend( 805 | resolve_pkgname(d.name, vinca_conf, distro) 806 | ) 807 | 808 | for d in pkg["exec_depends"]: 809 | recipe["requirements"]["run"].extend( 810 | resolve_pkgname(d.name, vinca_conf, distro) 811 | ) 812 | 813 | if pkg.get_build_type() in ["cmake", "catkin"]: 814 | recipe["build"][ 815 | "script" 816 | ] = "${{ '$RECIPE_DIR/build_catkin.sh' if unix or wasm32 else '%RECIPE_DIR%\\\\bld_catkin.bat' }}" 817 | 818 | # fix up OPENGL support for Unix 819 | if ( 820 | "REQUIRE_OPENGL" in recipe["requirements"]["run"] 821 | or "REQUIRE_OPENGL" in recipe["requirements"]["host"] 822 | ): 823 | # add requirements for opengl 824 | while "REQUIRE_OPENGL" in recipe["requirements"]["run"]: 825 | recipe["requirements"]["run"].remove("REQUIRE_OPENGL") 826 | while "REQUIRE_OPENGL" in recipe["requirements"]["host"]: 827 | recipe["requirements"]["host"].remove("REQUIRE_OPENGL") 828 | 829 | recipe["requirements"]["host"] += [ 830 | { 831 | "if": "linux", 832 | "then": ["libgl-devel", "libopengl-devel"], 833 | } 834 | ] 835 | recipe["requirements"]["host"] += [ 836 | {"if": "unix", "then": ["xorg-libx11", "xorg-libxext"]}, 837 | ] 838 | recipe["requirements"]["run"] += [ 839 | {"if": "unix", "then": ["xorg-libx11", "xorg-libxext"]}, 840 | ] 841 | 842 | # fix up GL support for Unix 843 | if ( 844 | "REQUIRE_GL" in recipe["requirements"]["run"] 845 | or "REQUIRE_GL" in recipe["requirements"]["host"] 846 | ): 847 | # add requirements for gl 848 | while "REQUIRE_GL" in recipe["requirements"]["run"]: 849 | recipe["requirements"]["run"].remove("REQUIRE_GL") 850 | while "REQUIRE_GL" in recipe["requirements"]["host"]: 851 | recipe["requirements"]["host"].remove("REQUIRE_GL") 852 | 853 | recipe["requirements"]["host"] += [ 854 | { 855 | "if": "linux", 856 | "then": ["libgl-devel"], 857 | } 858 | ] 859 | 860 | return recipe 861 | 862 | 863 | def main(): 864 | global distro, unsatisfied_deps 865 | 866 | arguments = parse_command_line(sys.argv) 867 | 868 | base_dir = os.path.abspath(arguments.dir) 869 | vinca_yaml = os.path.join(base_dir, "vinca.yaml") 870 | vinca_conf = read_vinca_yaml(vinca_yaml) 871 | snapshot = read_snapshot(vinca_conf) 872 | 873 | from .template import generate_bld_ament_cmake 874 | from .template import generate_bld_ament_python 875 | from .template import generate_bld_catkin 876 | from .template import generate_activate_hook 877 | from .template import generate_bld_colcon_merge 878 | from .template import generate_bld_catkin_merge 879 | 880 | generate_bld_ament_cmake() 881 | generate_bld_ament_python() 882 | generate_bld_catkin() 883 | generate_bld_colcon_merge() 884 | generate_bld_catkin_merge() 885 | generate_activate_hook() 886 | 887 | if arguments.trigger_new_versions: 888 | vinca_conf["trigger_new_versions"] = True 889 | else: 890 | vinca_conf["trigger_new_versions"] = vinca_conf.get("trigger_new_versions", False) 891 | 892 | if arguments.package: 893 | pkg_files = glob.glob(arguments.package) 894 | 895 | python_version = None 896 | if "python_version" in vinca_conf: 897 | python_version = vinca_conf["python_version"] 898 | 899 | distro = Distro(vinca_conf["ros_distro"], python_version, snapshot) 900 | additional_pkgs, parsed_pkgs = [], [] 901 | for f in pkg_files: 902 | parsed_pkg = catkin_pkg.package.parse_package(f) 903 | additional_pkgs.append(parsed_pkg.name) 904 | parsed_pkgs.append(parsed_pkg) 905 | 906 | distro.add_packages(additional_pkgs) 907 | 908 | outputs = [] 909 | for f in pkg_files: 910 | pkg = catkin_pkg.package.parse_package(f) 911 | recipe = parse_package(pkg, distro, vinca_conf, f) 912 | 913 | if arguments.multiple_file: 914 | write_recipe_package(recipe) 915 | else: 916 | outputs.append(recipe) 917 | 918 | if not arguments.multiple_file: 919 | sources = {} 920 | for o in outputs: 921 | sources[o["package"]["name"]] = o["source"] 922 | del o["source"] 923 | write_recipe(sources, outputs, vinca_conf) 924 | 925 | else: 926 | if arguments.skip_already_built_repodata or vinca_conf.get("skip_existing"): 927 | skip_built_packages = set() 928 | fn = arguments.skip_already_built_repodata 929 | if not fn: 930 | fn = vinca_conf.get("skip_existing") 931 | 932 | yaml = ruamel.yaml.YAML() 933 | additional_recipe_names = set() 934 | for add_rec in glob.glob( 935 | os.path.join(base_dir, "additional_recipes", "**", "recipe.yaml") 936 | ): 937 | with open(add_rec) as fi: 938 | add_rec_y = yaml.load(fi) 939 | if config.parsed_args.platform == 'emscripten-wasm32': 940 | additional_recipe_names.add(add_rec_y["package"]["name"]) 941 | else: 942 | if add_rec_y["package"]["name"] not in ["ros-humble-rmw-wasm-cpp", "ros-humble-wasm-cpp", "ros-humble-dynmsg", "ros-humble-test-wasm"]: 943 | additional_recipe_names.add(add_rec_y["package"]["name"]) 944 | 945 | print("Found additional recipes: ", additional_recipe_names) 946 | 947 | fns = list(fn) 948 | for fn in fns: 949 | selected_bn = None 950 | 951 | print(f"Fetching repodata: {fn}") 952 | repodata = get_repodata(fn, get_conda_subdir()) 953 | # currently we don't check the build numbers of local repodatas, 954 | # only URLs 955 | if "://" in fn: 956 | selected_bn = vinca_conf.get("build_number", 0) 957 | if not vinca_conf.get("use_explicit_build_number", True): 958 | distro = vinca_conf["ros_distro"] 959 | all_pkgs = repodata.get("packages", {}) 960 | all_pkgs.update(repodata.get("packages.conda", {})) 961 | for pkg_name, pkg in all_pkgs.items(): 962 | if pkg_name.startswith(f"ros-{distro}"): 963 | if pkg_name.rsplit("-", 2)[0] in additional_recipe_names: 964 | print( 965 | f"Skipping additional recipe for build number computation {pkg_name}" 966 | ) 967 | continue 968 | selected_bn = max(selected_bn, pkg["build_number"]) 969 | 970 | 971 | explicitly_selected_pkgs = [ 972 | f"ros-{distro}-{pkg.replace('_', '-')}" 973 | for pkg in ensure_list(vinca_conf["packages_select_by_deps"]) 974 | ] 975 | all_pkgs = repodata.get("packages", {}) 976 | all_pkgs.update(repodata.get("packages.conda", {})) 977 | for _, pkg in all_pkgs.items(): 978 | is_built = False 979 | if selected_bn is not None: 980 | pkg_build_number = get_pkg_build_number(selected_bn, pkg["name"], vinca_conf) 981 | if pkg["build_number"] == pkg_build_number: 982 | is_built = True 983 | else: 984 | is_built = True 985 | 986 | if is_built: 987 | print(f"Skipping {pkg['name']}") 988 | if vinca_conf["trigger_new_versions"]: 989 | skip_built_packages.add((pkg["name"], pkg["version"])) 990 | else: 991 | skip_built_packages.add(pkg["name"]) 992 | 993 | vinca_conf["skip_built_packages"] = skip_built_packages 994 | else: 995 | vinca_conf["skip_built_packages"] = [] 996 | print("Skip built packages!", vinca_conf["skip_built_packages"]) 997 | python_version = None 998 | if "python_version" in vinca_conf: 999 | python_version = vinca_conf["python_version"] 1000 | 1001 | distro = Distro(vinca_conf["ros_distro"], python_version, snapshot) 1002 | 1003 | selected_pkgs = get_selected_packages(distro, vinca_conf) 1004 | 1005 | vinca_conf["_selected_pkgs"] = selected_pkgs 1006 | 1007 | if arguments.source: 1008 | source = generate_source_version(distro, vinca_conf) 1009 | outputs = generate_outputs_version(distro, vinca_conf) 1010 | else: 1011 | source = generate_source(distro, vinca_conf) 1012 | outputs = generate_outputs(distro, vinca_conf) 1013 | 1014 | if arguments.multiple_file: 1015 | write_recipe(source, outputs, vinca_conf, False) 1016 | else: 1017 | write_recipe(source, outputs, vinca_conf) 1018 | 1019 | if unsatisfied_deps: 1020 | print("Unsatisfied dependencies:", unsatisfied_deps) 1021 | 1022 | print("build scripts are created successfully.") 1023 | -------------------------------------------------------------------------------- /vinca/migrate.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import sys 3 | import os 4 | import argparse 5 | import re 6 | import networkx as nx 7 | import subprocess 8 | import shutil 9 | import ruamel.yaml 10 | from .utils import get_repodata 11 | from vinca import config 12 | from vinca.distro import Distro 13 | from distutils.dir_util import copy_tree 14 | 15 | distro_version = None 16 | ros_prefix = None 17 | 18 | # arches = ["linux-64", "linux-aarch64", "win-64", "osx-64", "osx-arm64"] 19 | # arch_to_fname = { 20 | # "linux-64": "linux", 21 | # "linux-aarch64": "linux_aarch_64", 22 | # "win-64": "win", 23 | # "osx-64": "osx", 24 | # "osx-arm64": "osx_arm64" 25 | # } 26 | 27 | 28 | def to_ros_name(distro, pkg_name): 29 | shortname = pkg_name[len(ros_prefix) + 1 :] 30 | if distro.check_package(shortname): 31 | return shortname 32 | elif distro.check_package(shortname.replace("-", "_")): 33 | return shortname.replace("-", "_") 34 | else: 35 | raise RuntimeError(f"Couldnt convert {pkg_name} to ROS pkg name") 36 | 37 | 38 | def create_migration_instructions(arch, packages_to_migrate, trigger_branch): 39 | url = "https://conda.anaconda.org/robostack/" 40 | 41 | yaml = ruamel.yaml.YAML() 42 | with open("vinca.yaml", "r") as fi: 43 | vinca_conf = yaml.load(fi) 44 | 45 | global distro_version, ros_prefix 46 | distro_version = vinca_conf["ros_distro"] 47 | ros_prefix = f"ros-{distro_version}" 48 | 49 | repodata = get_repodata(url, arch) 50 | 51 | packages = repodata["packages"] 52 | to_migrate = set() 53 | ros_pkgs = set() 54 | for pkey in packages: 55 | if not pkey.startswith(ros_prefix): 56 | continue 57 | 58 | pname = pkey.rsplit("-", 2)[0] 59 | ros_pkgs.add(pname) 60 | 61 | p = packages[pkey] 62 | 63 | for d in p.get("depends", []): 64 | if d.split()[0] in packages_to_migrate: 65 | # print(f"need to migrate {pkey}") 66 | to_migrate.add(pname) 67 | 68 | latest = {} 69 | for pkg in ros_pkgs: 70 | current = current_version = None 71 | for pkey in packages: 72 | if packages[pkey]["name"] == pkg: 73 | tmp = packages[pkey]["version"].split(".") 74 | version = [] 75 | for el in tmp: 76 | if el.isdecimal(): 77 | version.append(int(el)) 78 | else: 79 | x = re.search(r"[^0-9]", version).start() 80 | version.append(int(el[:x])) 81 | 82 | version = tuple(version) 83 | 84 | if not current or version > current_version: 85 | current_version = version 86 | current = pkey 87 | latest[pkg] = current 88 | 89 | # now we can build the graph ... 90 | 91 | G = nx.DiGraph() 92 | for pkg, pkgkey in latest.items(): 93 | full_pkg = packages[pkgkey] 94 | for dep in full_pkg.get("depends", []): 95 | req = dep.split(" ")[0] 96 | G.add_node(pkg) 97 | if req.startswith(ros_prefix): 98 | G.add_edge(pkg, req) 99 | 100 | gsorted = nx.topological_sort(G) 101 | gsorted = list(reversed([g for g in gsorted])) 102 | 103 | to_migrate = sorted(to_migrate, key=lambda x: gsorted.index(x)) 104 | 105 | print("Sorted to migrate: ", to_migrate) 106 | 107 | distro = Distro(distro_version) 108 | # import IPython; IPython.embed() 109 | 110 | ros_names = [] 111 | for pkg in to_migrate: 112 | ros_names.append(to_ros_name(distro, pkg)) 113 | print("Final names: ", ros_names) 114 | 115 | vinca_conf["packages_select_by_deps"] = ros_names 116 | vinca_conf["is_migration"] = True 117 | vinca_conf["skip_existing"] = [] 118 | 119 | with open("vinca.yaml", "w") as fo: 120 | yaml.dump(vinca_conf, fo) 121 | 122 | if os.path.exists("recipes"): 123 | shutil.rmtree("recipes") 124 | 125 | mutex_path = os.path.join( 126 | config.parsed_args.dir, "additional_recipes/ros-distro-mutex" 127 | ) 128 | if os.path.exists(mutex_path): 129 | goal_folder = os.path.join( 130 | config.parsed_args.dir, "recipes", "ros-distro-mutex" 131 | ) 132 | os.makedirs(goal_folder, exist_ok=True) 133 | copy_tree(mutex_path, goal_folder) 134 | 135 | subprocess.check_call( 136 | ["vinca", "-d", config.parsed_args.dir, "--multiple", "--platform", arch] 137 | ) 138 | 139 | # TODO remove hard coded build branch here! 140 | recipe_dir = os.path.join(config.parsed_args.dir, "recipes") 141 | subprocess.check_call( 142 | [ 143 | "vinca-azure", 144 | "--platform", 145 | arch, 146 | "--trigger-branch", 147 | "buildbranch_linux", 148 | "-d", 149 | recipe_dir, 150 | "--additional-recipes", 151 | ] 152 | ) 153 | 154 | 155 | def parse_command_line(argv): 156 | parser = argparse.ArgumentParser( 157 | description="Conda recipe Azure pipeline generator for ROS packages" 158 | ) 159 | 160 | default_dir = "./recipes" 161 | parser.add_argument( 162 | "-d", 163 | "--dir", 164 | dest="dir", 165 | default=default_dir, 166 | help="The recipes directory to process (default: {}).".format(default_dir), 167 | ) 168 | 169 | parser.add_argument( 170 | "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" 171 | ) 172 | 173 | parser.add_argument( 174 | "-p", 175 | "--platform", 176 | dest="platform", 177 | default="linux-64", 178 | help="Platform to emit build pipeline for", 179 | ) 180 | 181 | parser.add_argument( 182 | "-a", 183 | "--additional-recipes", 184 | action="store_true", 185 | help="search for additional_recipes folder?", 186 | ) 187 | 188 | arguments = parser.parse_args(argv[1:]) 189 | config.parsed_args = arguments 190 | return arguments 191 | 192 | 193 | def main(): 194 | args = parse_command_line(sys.argv) 195 | 196 | mfile = os.path.join(args.dir + "/migration.yaml") 197 | with open(mfile, "r") as fi: 198 | migration = yaml.safe_load(fi) 199 | print(migration) 200 | create_migration_instructions( 201 | args.platform, migration.get("packages", []), args.trigger_branch 202 | ) 203 | -------------------------------------------------------------------------------- /vinca/resolve.py: -------------------------------------------------------------------------------- 1 | import os 2 | from urllib.request import urlopen 3 | from vinca import config 4 | 5 | map_platform_python_to_conda = { 6 | "linux-64": "linux", 7 | "linux-aarch64": "linux", 8 | "osx-64": "osx", 9 | "osx-arm64": "osx", 10 | "win-64": "win64", 11 | "emscripten-wasm32": "emscripten", 12 | } 13 | 14 | 15 | def get_conda_index(vinca_conf, base_dir): 16 | import ruamel.yaml 17 | 18 | yaml = ruamel.yaml.YAML() 19 | conda_index = [] 20 | for i in vinca_conf["conda_index"]: 21 | ip = os.path.join(base_dir, i) 22 | if os.path.isfile(ip): 23 | rawdata = yaml.load(open(ip, "r")) 24 | else: 25 | rawdata = yaml.load(urlopen(i)) 26 | conda_index.append(rawdata) 27 | return conda_index 28 | 29 | 30 | def resolve_pkgname_from_indexes(pkg_shortname, conda_index): 31 | for i in conda_index: 32 | if pkg_shortname in i: 33 | sys_platform = map_platform_python_to_conda[config.selected_platform] 34 | if "robostack" in i[pkg_shortname].keys(): 35 | if config.selected_platform in i[pkg_shortname]["robostack"]: 36 | return i[pkg_shortname]["robostack"][config.selected_platform] 37 | elif sys_platform in i[pkg_shortname]["robostack"]: 38 | return i[pkg_shortname]["robostack"][sys_platform] 39 | else: 40 | return i[pkg_shortname]["robostack"] 41 | raise KeyError( 42 | "Missing package for platform {}: {}\nCheck your conda metadata!".format( 43 | sys_platform, pkg_shortname 44 | ) 45 | ) 46 | 47 | return None 48 | 49 | 50 | def should_skip_pkg(pkg_shortname, vinca_conf): 51 | skip = vinca_conf.get("packages_remove_from_deps", []) 52 | if not skip: 53 | return False 54 | 55 | if pkg_shortname in skip: 56 | return True 57 | if pkg_shortname.replace("_", "-") in skip: 58 | return True 59 | 60 | 61 | def resolve_pkgname(pkg_shortname, vinca_conf, distro, is_rundep=False): 62 | pkg_names = resolve_pkgname_from_indexes( 63 | pkg_shortname, vinca_conf["_conda_indexes"] 64 | ) 65 | if pkg_names is None: 66 | if not distro.check_package(pkg_shortname) or should_skip_pkg( 67 | pkg_shortname, vinca_conf 68 | ): 69 | return [] 70 | else: 71 | return [ 72 | "ros-%s-%s" 73 | % (vinca_conf["ros_distro"], pkg_shortname.replace("_", "-")) 74 | ] 75 | else: 76 | if is_rundep: # for run dependencies, remove the version 77 | pkg_names_pinned = [] 78 | 79 | for pkg_name in pkg_names: 80 | if " " in pkg_name and "${{" not in pkg_name: 81 | pkg_name_raw = pkg_name.split(" ")[0] 82 | else: 83 | pkg_name_raw = pkg_name 84 | 85 | if pkg_name_raw != "python": 86 | pkg_names_pinned.append(pkg_name_raw) 87 | 88 | return pkg_names_pinned 89 | else: # for host deps 90 | return pkg_names 91 | -------------------------------------------------------------------------------- /vinca/snapshot.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import yaml 3 | import datetime 4 | from .distro import Distro 5 | 6 | 7 | def main(): 8 | parser = argparse.ArgumentParser( 9 | description="Dependency snapshotting tool for ROS packages" 10 | ) 11 | parser.add_argument( 12 | "-d", 13 | "--distro", 14 | type=str, 15 | dest="distro", 16 | default="humble", 17 | help="ROS distribution to use (default: humble)", 18 | required=False, 19 | ) 20 | parser.add_argument( 21 | "-p", 22 | "--package", 23 | type=str, 24 | dest="package", 25 | default=None, 26 | help="ROS package to get dependencies for (default: ALL)", 27 | required=False, 28 | ) 29 | parser.add_argument( 30 | "-o", 31 | "--output", 32 | type=str, 33 | dest="output", 34 | default="rosdistro_snapshot.yaml", 35 | help="Output file to write dependencies to", 36 | required=False, 37 | ) 38 | parser.add_argument( 39 | "-q", 40 | "--quiet", 41 | dest="quiet", 42 | action="store_true", 43 | help="Suppress output to stdout", 44 | required=False, 45 | ) 46 | args = parser.parse_args() 47 | 48 | # Get the current UTC time 49 | utc_time = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") 50 | 51 | distro = Distro(args.distro) 52 | 53 | if args.package is None: 54 | deps = distro.get_package_names() 55 | else: 56 | deps = distro.get_depends(args.package) 57 | deps.add(args.package) 58 | 59 | if not args.quiet: 60 | max_len = max([len(dep) for dep in deps]) 61 | print("\033[1m{0:{2}} {1}\033[0m".format("Package", "Version", max_len + 2)) 62 | 63 | output = {} 64 | 65 | for dep in deps: 66 | try: 67 | url, tag = distro.get_released_repo(dep) 68 | version = distro.get_version(dep) 69 | except AttributeError: 70 | print("\033[93mPackage '{}' has no version set, skipping...\033[0m".format(dep)) 71 | continue 72 | 73 | output[dep] = {"url": url, "version": version, "tag": tag} 74 | 75 | if not args.quiet: 76 | print("{0:{2}} {1}".format(dep, version, max_len + 2)) 77 | 78 | with open(args.output, "w") as f: 79 | f.write(f"# Snapshot generated by vinca-snapshot on {utc_time} UTC for distro {args.distro}\n") 80 | yaml.dump(output, f) 81 | -------------------------------------------------------------------------------- /vinca/template.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import shutil 3 | import os 4 | import re 5 | import stat 6 | 7 | from ruamel import yaml 8 | from pathlib import Path 9 | 10 | from vinca.utils import get_pkg_build_number 11 | 12 | TEMPLATE = """\ 13 | # yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json 14 | 15 | package: 16 | name: ros 17 | version: 0.0.1 18 | 19 | source: 20 | 21 | build: 22 | number: 0 23 | 24 | about: 25 | homepage: https://www.ros.org/ 26 | license: BSD-3-Clause 27 | summary: | 28 | Robot Operating System 29 | 30 | extra: 31 | recipe-maintainers: 32 | - ros-forge 33 | 34 | """ 35 | 36 | post_process_items = [ 37 | { 38 | "files": ["*.pc"], 39 | "regex": '(?:-L|-I)?"?([^;\\s]+/sysroot/)', 40 | "replacement": "$$(CONDA_BUILD_SYSROOT_S)", 41 | }, 42 | { 43 | "files": ["*.cmake"], 44 | "regex": '([^;\\s"]+/sysroot)', 45 | "replacement": "$$ENV{CONDA_BUILD_SYSROOT}", 46 | }, 47 | { 48 | "files": ["*.cmake"], 49 | "regex": '([^;\\s"]+/MacOSX\\d*\\.?\\d*\\.sdk)', 50 | "replacement": "$$ENV{CONDA_BUILD_SYSROOT}", 51 | }, 52 | ] 53 | 54 | 55 | def write_recipe_package(recipe): 56 | file = yaml.YAML() 57 | file.width = 4096 58 | file.indent(mapping=2, sequence=4, offset=2) 59 | 60 | os.makedirs(recipe["package"]["name"], exist_ok=True) 61 | recipe_path = os.path.join(recipe["package"]["name"], "recipe.yaml") 62 | with open(recipe_path, "w") as stream: 63 | file.dump(recipe, stream) 64 | 65 | def copyfile_with_exec_permissions(source_file, destination_file): 66 | shutil.copyfile(source_file, destination_file) 67 | 68 | # It seems that rattler-build requires script to have executable permissions 69 | if os.name == 'posix': 70 | # Retrieve current permissions 71 | current_permissions = os.stat(destination_file).st_mode 72 | # Set executable permissions for user, group, and others 73 | os.chmod(destination_file, current_permissions | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) 74 | 75 | def write_recipe(source, outputs, vinca_conf, single_file=True): 76 | # single_file = False 77 | if single_file: 78 | file = yaml.YAML() 79 | file.width = 4096 80 | file.indent(mapping=2, sequence=4, offset=2) 81 | meta = file.load(TEMPLATE) 82 | 83 | meta["source"] = [source[k] for k in source] 84 | meta["outputs"] = outputs 85 | meta["package"]["version"] = f"{datetime.datetime.now():%Y.%m.%d}" 86 | meta["recipe"] = meta["package"] 87 | del meta["package"] 88 | meta["build"]["number"] = vinca_conf.get("build_number", 0) 89 | meta["build"]["post_process"] = post_process_items 90 | with open("recipe.yaml", "w") as stream: 91 | file.dump(meta, stream) 92 | else: 93 | for o in outputs: 94 | file = yaml.YAML() 95 | file.width = 4096 96 | file.indent(mapping=2, sequence=4, offset=2) 97 | meta = file.load(TEMPLATE) 98 | 99 | meta["source"] = source[o["package"]["name"]] 100 | for k, v in o.items(): 101 | meta[k] = v 102 | 103 | meta["package"]["name"] = o["package"]["name"] 104 | meta["package"]["version"] = o["package"]["version"] 105 | 106 | meta["build"]["number"] = get_pkg_build_number(vinca_conf.get("build_number", 0), o["package"]["name"], vinca_conf) 107 | meta["build"]["post_process"] = post_process_items 108 | 109 | if test := vinca_conf["_tests"].get(o["package"]["name"]): 110 | print("Using test: ", test) 111 | text = test.read_text() 112 | test_content = yaml.safe_load(text) 113 | meta["tests"] = test_content["tests"] 114 | 115 | recipe_dir = (Path("recipes") / o["package"]["name"]).absolute() 116 | os.makedirs(recipe_dir, exist_ok=True) 117 | with open(recipe_dir / "recipe.yaml", "w") as stream: 118 | file.dump(meta, stream) 119 | 120 | if meta["source"].get("patches"): 121 | for p in meta["source"]["patches"]: 122 | patch_dir, _ = os.path.split(p) 123 | os.makedirs(recipe_dir / patch_dir, exist_ok=True) 124 | shutil.copyfile(p, recipe_dir / p) 125 | 126 | build_scripts = re.findall(r"'(.*?)'", meta["build"]["script"]) 127 | baffer = meta["build"]["script"] 128 | for script in build_scripts: 129 | script_filename = script.replace("$RECIPE_DIR", "").replace("%RECIPE_DIR%", "").replace("/", "").replace("\\", "") 130 | copyfile_with_exec_permissions(script_filename, recipe_dir / script_filename) 131 | if "catkin" in o["package"]["name"] or "workspace" in o["package"]["name"]: 132 | shutil.copyfile("activate.sh", recipe_dir / "activate.sh") 133 | shutil.copyfile("activate.bat", recipe_dir / "activate.bat") 134 | shutil.copyfile("activate.ps1", recipe_dir / "activate.ps1") 135 | shutil.copyfile("deactivate.sh", recipe_dir / "deactivate.sh") 136 | shutil.copyfile("deactivate.bat", recipe_dir / "deactivate.bat") 137 | shutil.copyfile("deactivate.ps1", recipe_dir / "deactivate.ps1") 138 | 139 | 140 | def generate_template(template_in, template_out): 141 | import em 142 | from vinca.config import skip_testing, ros_distro 143 | 144 | g = {"ros_distro": ros_distro, "skip_testing": "ON" if skip_testing else "OFF"} 145 | interpreter = em.Interpreter( 146 | output=template_out, options={em.RAW_OPT: True, em.BUFFERED_OPT: True} 147 | ) 148 | interpreter.updateGlobals(g) 149 | interpreter.file(open(template_in)) 150 | interpreter.shutdown() 151 | 152 | # It seems that rattler-build requires script to have executable permissions 153 | # See https://github.com/RoboStack/ros-humble/pull/229#issuecomment-2549988298 154 | if os.name == 'posix': 155 | # Retrieve current permissions 156 | current_permissions = os.stat(template_out.name).st_mode 157 | # Set executable permissions for user, group, and others 158 | os.chmod(template_out.name, current_permissions | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) 159 | 160 | def generate_bld_ament_cmake(): 161 | import pkg_resources 162 | 163 | template_in = pkg_resources.resource_filename( 164 | "vinca", "templates/bld_ament_cmake.bat.in" 165 | ) 166 | generate_template(template_in, open("bld_ament_cmake.bat", "w")) 167 | template_in = pkg_resources.resource_filename( 168 | "vinca", "templates/build_ament_cmake.sh.in" 169 | ) 170 | generate_template(template_in, open("build_ament_cmake.sh", "w")) 171 | 172 | 173 | def generate_bld_ament_python(): 174 | import pkg_resources 175 | 176 | template_in = pkg_resources.resource_filename( 177 | "vinca", "templates/bld_ament_python.bat.in" 178 | ) 179 | generate_template(template_in, open("bld_ament_python.bat", "w")) 180 | template_in = pkg_resources.resource_filename( 181 | "vinca", "templates/build_ament_python.sh.in" 182 | ) 183 | generate_template(template_in, open("build_ament_python.sh", "w")) 184 | 185 | 186 | def generate_bld_catkin(): 187 | import pkg_resources 188 | 189 | template_in = pkg_resources.resource_filename( 190 | "vinca", "templates/bld_catkin.bat.in" 191 | ) 192 | generate_template(template_in, open("bld_catkin.bat", "w")) 193 | template_in = pkg_resources.resource_filename( 194 | "vinca", "templates/build_catkin.sh.in" 195 | ) 196 | generate_template(template_in, open("build_catkin.sh", "w")) 197 | 198 | 199 | def generate_bld_colcon_merge(): 200 | import pkg_resources 201 | 202 | template_in = pkg_resources.resource_filename( 203 | "vinca", "templates/bld_colcon_merge.bat.in" 204 | ) 205 | generate_template(template_in, open("bld_colcon_merge.bat", "w")) 206 | 207 | 208 | def generate_bld_catkin_merge(): 209 | import pkg_resources 210 | 211 | template_in = pkg_resources.resource_filename( 212 | "vinca", "templates/bld_catkin_merge.bat.in" 213 | ) 214 | generate_template(template_in, open("bld_catkin_merge.bat", "w")) 215 | 216 | 217 | def generate_activate_hook(): 218 | import pkg_resources 219 | 220 | template_in = pkg_resources.resource_filename("vinca", "templates/activate.bat.in") 221 | generate_template(template_in, open("activate.bat", "w")) 222 | template_in = pkg_resources.resource_filename( 223 | "vinca", "templates/deactivate.bat.in" 224 | ) 225 | generate_template(template_in, open("deactivate.bat", "w")) 226 | 227 | template_in = pkg_resources.resource_filename("vinca", "templates/activate.ps1.in") 228 | generate_template(template_in, open("activate.ps1", "w")) 229 | template_in = pkg_resources.resource_filename( 230 | "vinca", "templates/deactivate.ps1.in" 231 | ) 232 | generate_template(template_in, open("deactivate.ps1", "w")) 233 | 234 | template_in = pkg_resources.resource_filename("vinca", "templates/activate.sh.in") 235 | generate_template(template_in, open("activate.sh", "w")) 236 | template_in = pkg_resources.resource_filename("vinca", "templates/deactivate.sh.in") 237 | generate_template(template_in, open("deactivate.sh", "w")) 238 | -------------------------------------------------------------------------------- /vinca/templates/activate.bat.in: -------------------------------------------------------------------------------- 1 | :: Generated by vinca http://github.com/RoboStack/vinca. 2 | :: DO NOT EDIT! 3 | @@if not defined CONDA_PREFIX goto:eof 4 | 5 | @@REM Don't do anything when we are in conda build. 6 | @@if defined SYS_PREFIX exit /b 0 7 | 8 | @@set "QT_PLUGIN_PATH=%CONDA_PREFIX%\Library\plugins" 9 | 10 | @@call "%CONDA_PREFIX%\Library\local_setup.bat" 11 | @@set PYTHONHOME= 12 | @@set "ROS_OS_OVERRIDE=conda:win64" 13 | @@set "ROS_ETC_DIR=%CONDA_PREFIX%\Library\etc\ros" 14 | @@set "AMENT_PREFIX_PATH=%CONDA_PREFIX%\Library" 15 | @@set "AMENT_PYTHON_EXECUTABLE=%CONDA_PREFIX%\python.exe" 16 | -------------------------------------------------------------------------------- /vinca/templates/activate.ps1.in: -------------------------------------------------------------------------------- 1 | # Generated by vinca http://github.com/RoboStack/vinca. 2 | # DO NOT EDIT! 3 | if ($null -eq ${env:CONDA_PREFIX}) { Exit } 4 | 5 | # Don't do anything when we are in conda build. 6 | if ($null -ne ${env:SYS_PREFIX}) { Exit 0 } 7 | 8 | $Env:QT_PLUGIN_PATH="${env:CONDA_PREFIX}\Library\plugins" 9 | 10 | & "${env:CONDA_PREFIX}\Library\local_setup.ps1" 11 | 12 | $Env:PYTHONHOME='' 13 | $Env:ROS_OS_OVERRIDE='conda:win64' 14 | $Env:ROS_ETC_DIR="${env:CONDA_PREFIX}\Library\etc\ros" 15 | $Env:AMENT_PREFIX_PATH="${env:CONDA_PREFIX}\Library" 16 | $Env:AMENT_PYTHON_EXECUTABLE="${env:CONDA_PREFIX}\python.exe" 17 | -------------------------------------------------------------------------------- /vinca/templates/activate.sh.in: -------------------------------------------------------------------------------- 1 | # Generated by vinca http://github.com/RoboStack/vinca. 2 | # DO NOT EDIT! 3 | # if [ -z "${CONDA_PREFIX}" ]; then 4 | # exit 0; 5 | # fi 6 | 7 | # Not sure if this is necessary on UNIX? 8 | # export QT_PLUGIN_PATH=$CONDA_PREFIX\plugins 9 | 10 | if [ "$CONDA_BUILD" = "1" -a "$target_platform" != "$build_platform" ]; then 11 | # ignore sourcing 12 | echo "Not activating ROS when cross-compiling"; 13 | else 14 | source $CONDA_PREFIX/setup.sh 15 | fi 16 | 17 | case "$OSTYPE" in 18 | darwin*) export ROS_OS_OVERRIDE="conda:osx";; 19 | linux*) export ROS_OS_OVERRIDE="conda:linux";; 20 | esac 21 | 22 | export ROS_ETC_DIR=$CONDA_PREFIX/etc/ros 23 | export AMENT_PREFIX_PATH=$CONDA_PREFIX 24 | 25 | # Looks unnecessary for UNIX 26 | # unset PYTHONHOME= 27 | -------------------------------------------------------------------------------- /vinca/templates/bld_ament_cmake.bat.in: -------------------------------------------------------------------------------- 1 | :: Generated by vinca http://github.com/RoboStack/vinca. 2 | :: DO NOT EDIT! 3 | setlocal EnableDelayedExpansion 4 | 5 | set "PYTHONPATH=%LIBRARY_PREFIX%\lib\site-packages;%SP_DIR%" 6 | 7 | :: MSVC is preferred. 8 | set CC=cl.exe 9 | set CXX=cl.exe 10 | 11 | rd /s /q build 12 | mkdir build 13 | pushd build 14 | 15 | :: set "CMAKE_GENERATOR=Ninja" 16 | :: We use the Visual Studio generator as a workaround for 17 | :: problems in Ninja when using long paths, see https://github.com/RoboStack/ros-humble/pull/229#issuecomment-2564856467 18 | :: Once those are solved, we can switch back to use Ninja 19 | set "CMAKE_GENERATOR=Visual Studio %VS_MAJOR% %VS_YEAR%" 20 | 21 | :: PYTHON_INSTALL_DIR should be a relative path, see 22 | :: https://github.com/ament/ament_cmake/blob/2.3.2/ament_cmake_python/README.md 23 | :: So we compute the relative path of %SP_DIR% w.r.t. to LIBRARY_PREFIX, 24 | :: but it is not trivial to do this in Command Prompt scripting, so let's do it via 25 | :: python 26 | 27 | :: This line is scary, but it basically assigns the output of the command inside (` and `) 28 | :: to the variable specified after DO SET 29 | :: The equivalent in bash is PYTHON_INSTALL_DIR=`python -c ...` 30 | FOR /F "tokens=* USEBACKQ" %%i IN (`python -c "import os;print(os.path.relpath(os.environ['SP_DIR'],os.environ['LIBRARY_PREFIX']).replace('\\','/'))"`) DO SET PYTHON_INSTALL_DIR=%%i 31 | 32 | cmake ^ 33 | -G "%CMAKE_GENERATOR%" ^ 34 | -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% ^ 35 | -DCMAKE_BUILD_TYPE=Release ^ 36 | -DCMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP=True ^ 37 | -DPYTHON_EXECUTABLE=%PYTHON% ^ 38 | -DPython_EXECUTABLE=%PYTHON% ^ 39 | -DPython3_EXECUTABLE=%PYTHON% ^ 40 | -DSETUPTOOLS_DEB_LAYOUT=OFF ^ 41 | -DBUILD_SHARED_LIBS=ON ^ 42 | -DBUILD_TESTING=OFF ^ 43 | -DCMAKE_OBJECT_PATH_MAX=255 ^ 44 | --compile-no-warning-as-error ^ 45 | -DPYTHON_INSTALL_DIR=%PYTHON_INSTALL_DIR% ^ 46 | %SRC_DIR%\%PKG_NAME%\src\work 47 | if errorlevel 1 exit 1 48 | 49 | :: We explicitly pass %CPU_COUNT% to cmake --build as we are not using Ninja, 50 | :: see the comment before setting the CMAKE_GENERATOR env variable 51 | cmake --build . --config Release --parallel %CPU_COUNT% --target install 52 | if errorlevel 1 exit 1 53 | -------------------------------------------------------------------------------- /vinca/templates/bld_ament_python.bat.in: -------------------------------------------------------------------------------- 1 | :: Generated by vinca http://github.com/RoboStack/vinca. 2 | :: DO NOT EDIT! 3 | setlocal 4 | 5 | set "PYTHONPATH=%LIBRARY_PREFIX%\lib\site-packages;%SP_DIR%" 6 | 7 | pushd %SRC_DIR%\%PKG_NAME%\src\work 8 | set "PKG_NAME_SHORT=%PKG_NAME:*ros-@(ros_distro)-=%" 9 | set "PKG_NAME_SHORT=%PKG_NAME_SHORT:-=_%" 10 | 11 | :: If there is a setup.cfg that contains install-scripts then use pip to install 12 | findstr install[-_]scripts setup.cfg 13 | if "%errorlevel%" == "0" ( 14 | %PYTHON% setup.py install --single-version-externally-managed --record=files.txt ^ 15 | --prefix=%LIBRARY_PREFIX% ^ 16 | --install-lib=%SP_DIR% ^ 17 | --install-scripts=%LIBRARY_PREFIX%\lib\%PKG_NAME_SHORT% 18 | ) else ( 19 | %PYTHON% setup.py install --single-version-externally-managed --record=files.txt ^ 20 | --prefix=%LIBRARY_PREFIX% ^ 21 | --install-lib=%SP_DIR% ^ 22 | --install-scripts=%LIBRARY_PREFIX%\bin 23 | ) 24 | 25 | if errorlevel 1 exit 1 26 | -------------------------------------------------------------------------------- /vinca/templates/bld_catkin.bat.in: -------------------------------------------------------------------------------- 1 | :: Generated by vinca http://github.com/RoboStack/vinca. 2 | :: DO NOT EDIT! 3 | setlocal 4 | set "PYTHONPATH=%LIBRARY_PREFIX%\lib\site-packages;%SP_DIR%" 5 | 6 | :: MSVC is preferred. 7 | set CC=cl.exe 8 | set CXX=cl.exe 9 | 10 | :: ROS_BUILD_SHARED_LIBS is always defined in CMake by catkin 11 | :: if ROS (1) is build as shared library . However, some packages are not 12 | :: passing compilation flags from CMake to other build systems (such as qmake), 13 | :: so we enable it explicitly via the CL environment variable, see 14 | :: https://learn.microsoft.com/en-us/cpp/build/reference/cl-environment-variables?view=msvc-170 15 | set CL=/DROS_BUILD_SHARED_LIBS=1 /DNOGDI=1 16 | 17 | set "CATKIN_BUILD_BINARY_PACKAGE_ARGS=-DCATKIN_BUILD_BINARY_PACKAGE=1" 18 | if "%PKG_NAME%" == "ros-@(ros_distro)-catkin" ( 19 | :: create catkin cookie to make it is a catkin workspace 20 | type NUL > %LIBRARY_PREFIX%\.catkin 21 | :: keep the workspace activation scripts (e.g., local_setup.bat) 22 | set CATKIN_BUILD_BINARY_PACKAGE_ARGS= 23 | ) 24 | 25 | rd /s /q build 26 | mkdir build 27 | pushd build 28 | 29 | set SKIP_TESTING=@(skip_testing) 30 | 31 | cmake ^ 32 | -G "Ninja" ^ 33 | --compile-no-warning-as-error ^ 34 | -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% ^ 35 | -DCMAKE_BUILD_TYPE=Release ^ 36 | -DCMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP=ON ^ 37 | -DBUILD_SHARED_LIBS=ON ^ 38 | -DPYTHON_EXECUTABLE=%PYTHON% ^ 39 | -DPython_EXECUTABLE=%PYTHON% ^ 40 | -DPython3_EXECUTABLE=%PYTHON% ^ 41 | -DSETUPTOOLS_DEB_LAYOUT=OFF ^ 42 | -DBoost_USE_STATIC_LIBS=OFF ^ 43 | %CATKIN_BUILD_BINARY_PACKAGE_ARGS% ^ 44 | -DCATKIN_SKIP_TESTING=%SKIP_TESTING% ^ 45 | %SRC_DIR%\%PKG_NAME%\src\work 46 | if errorlevel 1 exit 1 47 | 48 | if "%PKG_NAME%" == "ros-@(ros_distro)-eigenpy" ( 49 | cmake --build . --config Release --target all --parallel 1 50 | if errorlevel 1 exit 1 51 | ) else ( 52 | cmake --build . --config Release --target all 53 | if errorlevel 1 exit 1 54 | ) 55 | 56 | if "%SKIP_TESTING%" == "OFF" ( 57 | cmake --build . --config Release --target run_tests 58 | if errorlevel 1 exit 1 59 | ) 60 | 61 | cmake --build . --config Release --target install 62 | if errorlevel 1 exit 1 63 | 64 | if "%PKG_NAME%" == "ros-@(ros_distro)-catkin" ( 65 | :: Copy the [de]activate scripts to %PREFIX%\etc\conda\[de]activate.d. 66 | :: This will allow them to be run on environment activation. 67 | for %%F in (activate deactivate) DO ( 68 | if not exist %PREFIX%\etc\conda\%%F.d mkdir %PREFIX%\etc\conda\%%F.d 69 | copy %RECIPE_DIR%\%%F.bat %PREFIX%\etc\conda\%%F.d\%PKG_NAME%_%%F.bat 70 | ) 71 | ) 72 | 73 | if "%PKG_NAME%" == "ros-@(ros_distro)-ros-workspace" ( 74 | :: Copy the [de]activate scripts to %PREFIX%\etc\conda\[de]activate.d. 75 | :: This will allow them to be run on environment activation. 76 | for %%F in (activate deactivate) DO ( 77 | if not exist %PREFIX%\etc\conda\%%F.d mkdir %PREFIX%\etc\conda\%%F.d 78 | copy %RECIPE_DIR%\%%F.bat %PREFIX%\etc\conda\%%F.d\%PKG_NAME%_%%F.bat 79 | copy %RECIPE_DIR%\%%F.ps1 %PREFIX%\etc\conda\%%F.d\%PKG_NAME%_%%F.ps1 80 | ) 81 | ) 82 | -------------------------------------------------------------------------------- /vinca/templates/bld_catkin_merge.bat.in: -------------------------------------------------------------------------------- 1 | :: Generated by vinca http://github.com/RoboStack/vinca. 2 | :: DO NOT EDIT! 3 | setlocal 4 | 5 | :: MSVC is preferred. 6 | set CC=cl.exe 7 | set CXX=cl.exe 8 | 9 | :: ROS_BUILD_SHARED_LIBS is always defined in CMake by catkin 10 | :: if ROS (1) is build as shared library . However, some packages are not 11 | :: passing compilation flags from CMake to other build systems (such as qmake), 12 | :: so we enable it explicitly via the CL environment variable, see 13 | :: https://learn.microsoft.com/en-us/cpp/build/reference/cl-environment-variables?view=msvc-170 14 | set CL=/DROS_BUILD_SHARED_LIBS=1 /DNOGDI=1 15 | 16 | set CATKIN_MAKE_ISOLATED=src\ros-@(ros_distro)-catkin\bin\catkin_make_isolated 17 | set CMAKE_PREFIX_PATH=%CMAKE_PREFIX_PATH:\=/% 18 | 19 | %PYTHON% %CATKIN_MAKE_ISOLATED% ^ 20 | --install-space %LIBRARY_PREFIX% ^ 21 | --use-ninja ^ 22 | --install ^ 23 | -DCMAKE_BUILD_TYPE=Release ^ 24 | -DBUILD_SHARED_LIBS=ON ^ 25 | -DPYTHON_EXECUTABLE=%PYTHON% ^ 26 | -DCATKIN_SKIP_TESTING=ON 27 | if errorlevel 1 exit 1 28 | 29 | :: Copy the [de]activate scripts to %PREFIX%\etc\conda\[de]activate.d. 30 | :: This will allow them to be run on environment activation. 31 | for %%F in (activate deactivate) DO ( 32 | if not exist %PREFIX%\etc\conda\%%F.d mkdir %PREFIX%\etc\conda\%%F.d 33 | copy %RECIPE_DIR%\%%F.bat %PREFIX%\etc\conda\%%F.d\%PKG_NAME%_%%F.bat 34 | ) 35 | -------------------------------------------------------------------------------- /vinca/templates/bld_colcon_merge.bat.in: -------------------------------------------------------------------------------- 1 | :: Generated by vinca http://github.com/RoboStack/vinca. 2 | :: DO NOT EDIT! 3 | setlocal 4 | 5 | :: MSVC is preferred. 6 | set CC=cl.exe 7 | set CXX=cl.exe 8 | 9 | :: PYTHON_INSTALL_DIR should be a relative path, see 10 | :: https://github.com/ament/ament_cmake/blob/2.3.2/ament_cmake_python/README.md 11 | :: So we compute the relative path of %SP_DIR% w.r.t. to LIBRARY_PREFIX, 12 | :: but it is not trivial to do this in Command Prompt scripting, so let's do it via 13 | :: python 14 | 15 | :: This line is scary, but it basically assigns the output of the command inside (` and `) 16 | :: to the variable specified after DO SET 17 | :: The equivalent in bash is PYTHON_INSTALL_DIR=`python -c ...` 18 | FOR /F "tokens=* USEBACKQ" %%i IN (`python -c "import os;print(os.path.relpath(os.environ['SP_DIR'],os.environ['LIBRARY_PREFIX']).replace('\\','/'))"`) DO SET PYTHON_INSTALL_DIR=%%i 19 | 20 | colcon build ^ 21 | --event-handlers console_cohesion+ ^ 22 | --merge-install ^ 23 | --install-base %LIBRARY_PREFIX% ^ 24 | --cmake-args ^ 25 | --compile-no-warning-as-error ^ 26 | -G Ninja ^ 27 | -DCMAKE_BUILD_TYPE=Release ^ 28 | -DBUILD_TESTING=OFF ^ 29 | -DPYTHON_INSTALL_DIR=%PYTHON_INSTALL_DIR% ^ 30 | -DPYTHON_EXECUTABLE=%PYTHON% 31 | if errorlevel 1 exit 1 32 | 33 | :: Copy the [de]activate scripts to %PREFIX%\etc\conda\[de]activate.d. 34 | :: This will allow them to be run on environment activation. 35 | for %%F in (activate deactivate) DO ( 36 | if not exist %PREFIX%\etc\conda\%%F.d mkdir %PREFIX%\etc\conda\%%F.d 37 | copy %RECIPE_DIR%\%%F.bat %PREFIX%\etc\conda\%%F.d\%PKG_NAME%_%%F.bat 38 | ) 39 | -------------------------------------------------------------------------------- /vinca/templates/build_ament_cmake.sh.in: -------------------------------------------------------------------------------- 1 | # Generated by vinca http://github.com/RoboStack/vinca. 2 | # DO NOT EDIT! 3 | 4 | set -eo pipefail 5 | 6 | rm -rf build 7 | mkdir build 8 | cd build 9 | 10 | # necessary for correctly linking SIP files (from python_qt_bindings) 11 | export LINK=$CXX 12 | 13 | if [[ "$CONDA_BUILD_CROSS_COMPILATION" != "1" ]]; then 14 | PYTHON_EXECUTABLE=$PREFIX/bin/python 15 | PKG_CONFIG_EXECUTABLE=$PREFIX/bin/pkg-config 16 | OSX_DEPLOYMENT_TARGET="10.15" 17 | else 18 | PYTHON_EXECUTABLE=$BUILD_PREFIX/bin/python 19 | PKG_CONFIG_EXECUTABLE=$BUILD_PREFIX/bin/pkg-config 20 | OSX_DEPLOYMENT_TARGET="11.0" 21 | fi 22 | 23 | if [[ "${CONDA_BUILD_CROSS_COMPILATION:-}" == "1" ]]; then 24 | export QT_HOST_PATH="$BUILD_PREFIX" 25 | else 26 | export QT_HOST_PATH="$PREFIX" 27 | fi 28 | 29 | echo "USING PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE}" 30 | echo "USING PKG_CONFIG_EXECUTABLE=${PKG_CONFIG_EXECUTABLE}" 31 | 32 | export ROS_PYTHON_VERSION=`$PYTHON_EXECUTABLE -c "import sys; print('%i.%i' % (sys.version_info[0:2]))"` 33 | echo "Using Python ${ROS_PYTHON_VERSION}" 34 | 35 | # see https://github.com/conda-forge/cross-python-feedstock/issues/24 36 | if [[ "$CONDA_BUILD_CROSS_COMPILATION" == "1" ]]; then 37 | find $PREFIX/lib/cmake -type f -exec sed -i "s~\${_IMPORT_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~${BUILD_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~g" {} + || true 38 | find $PREFIX/share/rosidl* -type f -exec sed -i "s~${PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~${BUILD_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~g" {} + || true 39 | find $PREFIX/share/rosidl* -type f -exec sed -i "s~\${_IMPORT_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~${BUILD_PREFIX}/lib/python${ROS_PYTHON_VERSION}/site-packages~g" {} + || true 40 | find $PREFIX/lib/cmake -type f -exec sed -i "s~message(FATAL_ERROR \"The imported target~message(WARNING \"The imported target~g" {} + || true 41 | fi 42 | 43 | if [[ $target_platform =~ linux.* ]]; then 44 | export CFLAGS="${CFLAGS} -D__STDC_FORMAT_MACROS=1" 45 | export CXXFLAGS="${CXXFLAGS} -D__STDC_FORMAT_MACROS=1" 46 | fi; 47 | 48 | # Needed for qt-gui-cpp .. 49 | if [[ $target_platform =~ linux.* ]]; then 50 | ln -s $GCC ${BUILD_PREFIX}/bin/gcc 51 | ln -s $GXX ${BUILD_PREFIX}/bin/g++ 52 | fi; 53 | 54 | # PYTHON_INSTALL_DIR should be a relative path, see 55 | # https://github.com/ament/ament_cmake/blob/2.3.2/ament_cmake_python/README.md 56 | # So we compute the relative path of $SP_DIR w.r.t. to $PREFIX, 57 | # but it is not trivial to do this in bash scripting, so let's do it via python 58 | export PYTHON_INSTALL_DIR=`python -c "import os;print(os.path.relpath(os.environ['SP_DIR'],os.environ['PREFIX']))"` 59 | echo "Using PYTHON_INSTALL_DIR: $PYTHON_INSTALL_DIR" 60 | 61 | if [[ $target_platform =~ emscripten.* ]]; then 62 | export CONDA_BUILD_CROSS_COMPILATION="1" 63 | PYTHON_EXECUTABLE=$BUILD_PREFIX/bin/python$PY_VER 64 | echo "set_property(GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS TRUE)"> $SRC_DIR/__vinca_shared_lib_patch.cmake 65 | echo "set(CMAKE_STRIP FALSE) # used by default in pybind11 on .so modules">> $SRC_DIR/__vinca_shared_lib_patch.cmake 66 | echo "set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH) # fixes an error where numpy header files are not found correctly">> $SRC_DIR/__vinca_shared_lib_patch.cmake 67 | 68 | # if [ "${PKG_NAME}" == "ros-humble-examples-rclcpp-minimal-publisher" ] || [ "${PKG_NAME}" == "ros-humble-examples-rclcpp-minimal-subscriber" ] || [ "${PKG_NAME}" == "ros-humble-rclcpp-components" ]; then 69 | # echo "set(CMAKE_SHARED_LIBRARY_CREATE_C_FLAGS \"-s ASSERTIONS=1 -s SIDE_MODULE=1 -sWASM_BIGINT -s USE_PTHREADS=0 -s DEMANGLE_SUPPORT=1 -s ALLOW_MEMORY_GROWTH=1 \")">> $SRC_DIR/__vinca_shared_lib_patch.cmake 70 | # echo "set(CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS \"-s ASSERTIONS=1 -s SIDE_MODULE=1 -sWASM_BIGINT -s USE_PTHREADS=0 -s DEMANGLE_SUPPORT=1 -s ALLOW_MEMORY_GROWTH=1 -sASYNCIFY -O3 -s ASYNCIFY_STACK_SIZE=24576 \")">> $SRC_DIR/__vinca_shared_lib_patch.cmake 71 | # echo "set(CMAKE_EXE_LINKER_FLAGS \"-sMAIN_MODULE=1 -sASSERTIONS=1 -fexceptions -lembind -sWASM_BIGINT -s USE_PTHREADS=0 -s DEMANGLE_SUPPORT=1 -sALLOW_MEMORY_GROWTH=1 -sASYNCIFY -O3 -s ASYNCIFY_STACK_SIZE=24576 -L$SRC_DIR/build -L$PREFIX/lib\") # remove SIDE_MODULE from exe linker flags">> $SRC_DIR/__vinca_shared_lib_patch.cmake 72 | # else 73 | echo "set(CMAKE_SHARED_LIBRARY_CREATE_C_FLAGS \"-s ASSERTIONS=1 -s SIDE_MODULE=1 -sWASM_BIGINT -s USE_PTHREADS=0 -s ALLOW_MEMORY_GROWTH=1 -s DEMANGLE_SUPPORT=1 \")">> $SRC_DIR/__vinca_shared_lib_patch.cmake 74 | echo "set(CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS \"-s ASSERTIONS=1 -s SIDE_MODULE=1 -sWASM_BIGINT -s USE_PTHREADS=0 -s ALLOW_MEMORY_GROWTH=1 -s DEMANGLE_SUPPORT=1 \")">> $SRC_DIR/__vinca_shared_lib_patch.cmake 75 | echo "set(CMAKE_EXE_LINKER_FLAGS \"-sMAIN_MODULE=1 -sASSERTIONS=1 -fexceptions -lembind -sWASM_BIGINT -s USE_PTHREADS=0 -sALLOW_MEMORY_GROWTH=1 -s DEMANGLE_SUPPORT=1 -L$SRC_DIR/build -L$PREFIX/lib\") # remove SIDE_MODULE from exe linker flags">> $SRC_DIR/__vinca_shared_lib_patch.cmake 76 | # fi 77 | 78 | export BUILD_TYPE="Debug" 79 | export EXTRA_CMAKE_ARGS=" \ 80 | -DPYTHON_SOABI="cpython-${ROS_PYTHON_VERSION//./}-wasm32-emscripten" \ 81 | -DRMW_IMPLEMENTATION=rmw_wasm_cpp \ 82 | -DCMAKE_FIND_ROOT_PATH=$PREFIX \ 83 | -DCMAKE_POSITION_INDEPENDENT_CODE=TRUE \ 84 | -DCMAKE_PROJECT_INCLUDE=$SRC_DIR/__vinca_shared_lib_patch.cmake \ 85 | " 86 | 87 | unset -f cmake 88 | export CMAKE_GEN="emcmake cmake" 89 | export CMAKE_BLD="cmake" 90 | 91 | export STATIC_ROSIDL_TYPESUPPORT_C=rosidl_typesupport_introspection_c 92 | export STATIC_ROSIDL_TYPESUPPORT_CPP=rosidl_typesupport_introspection_cpp 93 | else 94 | export BUILD_TYPE="Release" 95 | export CMAKE_GEN="cmake" 96 | export CMAKE_BLD="cmake" 97 | fi; 98 | 99 | if [ "${PKG_NAME}" == "ros-humble-rmw-wasm-cpp" ]; then 100 | WORK_DIR=$SRC_DIR/$PKG_NAME/src/work/rmw_wasm_cpp 101 | elif [ "${PKG_NAME}" == "ros-humble-wasm-cpp" ]; then 102 | WORK_DIR=$SRC_DIR/$PKG_NAME/src/work/wasm_cpp 103 | elif [ "${PKG_NAME}" == "dynmsg" ]; then 104 | WORK_DIR=$SRC_DIR/$PKG_NAME/src/work/dynmsg 105 | else 106 | WORK_DIR=$SRC_DIR/$PKG_NAME/src/work 107 | fi; 108 | 109 | $CMAKE_GEN \ 110 | -G "Ninja" \ 111 | -DCMAKE_BUILD_TYPE=$BUILD_TYPE \ 112 | -DCMAKE_INSTALL_PREFIX=$PREFIX \ 113 | -DCMAKE_PREFIX_PATH=$PREFIX \ 114 | -DAMENT_PREFIX_PATH=$PREFIX \ 115 | -DCMAKE_INSTALL_LIBDIR=lib \ 116 | -DPYTHON_EXECUTABLE=$PYTHON_EXECUTABLE \ 117 | -DPython_EXECUTABLE=$PYTHON_EXECUTABLE \ 118 | -DPython3_EXECUTABLE=$PYTHON_EXECUTABLE \ 119 | -DPython3_FIND_STRATEGY=LOCATION \ 120 | -DPKG_CONFIG_EXECUTABLE=$PKG_CONFIG_EXECUTABLE \ 121 | -DPYTHON_INSTALL_DIR=$PYTHON_INSTALL_DIR \ 122 | -DSETUPTOOLS_DEB_LAYOUT=OFF \ 123 | -DCATKIN_SKIP_TESTING=$SKIP_TESTING \ 124 | -DCMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP=True \ 125 | -DBUILD_SHARED_LIBS=ON \ 126 | -DBUILD_TESTING=OFF \ 127 | -DCMAKE_IGNORE_PREFIX_PATH="/opt/homebrew;/usr/local/homebrew" \ 128 | -DCMAKE_OSX_DEPLOYMENT_TARGET=$OSX_DEPLOYMENT_TARGET \ 129 | --compile-no-warning-as-error \ 130 | $EXTRA_CMAKE_ARGS \ 131 | $WORK_DIR 132 | 133 | $CMAKE_BLD --build . --config $BUILD_TYPE --target install 134 | -------------------------------------------------------------------------------- /vinca/templates/build_ament_python.sh.in: -------------------------------------------------------------------------------- 1 | # Generated by vinca http://github.com/RoboStack/vinca. 2 | # DO NOT EDIT! 3 | 4 | set -eo pipefail 5 | 6 | pushd $SRC_DIR/$PKG_NAME/src/work 7 | 8 | # If there is a setup.cfg that contains install-scripts then we should not set it here 9 | if [ -f setup.cfg ] && grep -q "install[-_]scripts" setup.cfg; then 10 | # Remove e.g. ros-humble- from PKG_NAME 11 | PKG_NAME_SHORT=${PKG_NAME#*ros-@(ros_distro)-} 12 | # Substitute "-" with "_" 13 | PKG_NAME_SHORT=${PKG_NAME_SHORT//-/_} 14 | INSTALL_SCRIPTS_ARG="--install-scripts=$PREFIX/lib/$PKG_NAME_SHORT" 15 | echo "WARNING: setup.cfg not set, will set INSTALL_SCRIPTS_ARG to: $INSTALL_SCRIPTS_ARG" 16 | $PYTHON setup.py install --prefix="$PREFIX" --install-lib="$SP_DIR" $INSTALL_SCRIPTS_ARG --single-version-externally-managed --record=files.txt 17 | else 18 | $PYTHON -m pip install . --no-deps -vvv 19 | fi 20 | -------------------------------------------------------------------------------- /vinca/templates/build_catkin.sh.in: -------------------------------------------------------------------------------- 1 | # Generated by vinca http://github.com/RoboStack/vinca. 2 | # DO NOT EDIT! 3 | 4 | set -eo pipefail 5 | 6 | CATKIN_BUILD_BINARY_PACKAGE="ON" 7 | 8 | if [ "${PKG_NAME}" == "ros-@(ros_distro)-catkin" ]; then 9 | # create catkin cookie to make it is a catkin workspace 10 | touch $PREFIX/.catkin 11 | # keep the workspace activation scripts (e.g., local_setup.bat) 12 | CATKIN_BUILD_BINARY_PACKAGE="OFF" 13 | fi 14 | 15 | rm -rf build 16 | mkdir build 17 | cd build 18 | 19 | # necessary for correctly linking SIP files (from python_qt_bindings) 20 | export LINK=$CXX 21 | 22 | if [[ "$CONDA_BUILD_CROSS_COMPILATION" != "1" ]]; then 23 | PYTHON_EXECUTABLE=$PREFIX/bin/python 24 | PKG_CONFIG_EXECUTABLE=$PREFIX/bin/pkg-config 25 | OSX_DEPLOYMENT_TARGET="10.15" 26 | else 27 | PYTHON_EXECUTABLE=$BUILD_PREFIX/bin/python 28 | PKG_CONFIG_EXECUTABLE=$BUILD_PREFIX/bin/pkg-config 29 | OSX_DEPLOYMENT_TARGET="11.0" 30 | fi 31 | 32 | if [[ "${CONDA_BUILD_CROSS_COMPILATION:-}" == "1" ]]; then 33 | export QT_HOST_PATH="$BUILD_PREFIX" 34 | else 35 | export QT_HOST_PATH="$PREFIX" 36 | fi 37 | 38 | echo "USING PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE}" 39 | echo "USING PKG_CONFIG_EXECUTABLE=${PKG_CONFIG_EXECUTABLE}" 40 | 41 | export ROS_PYTHON_VERSION=`$PYTHON_EXECUTABLE -c "import sys; print('%i.%i' % (sys.version_info[0:2]))"` 42 | echo "Using Python $ROS_PYTHON_VERSION" 43 | # Fix up SP_DIR which for some reason might contain a path to a wrong Python version 44 | FIXED_SP_DIR=$(echo $SP_DIR | sed -E "s/python[0-9]+\.[0-9]+/python$ROS_PYTHON_VERSION/") 45 | echo "Using site-package dir ${FIXED_SP_DIR}" 46 | 47 | # see https://github.com/conda-forge/cross-python-feedstock/issues/24 48 | if [[ "$CONDA_BUILD_CROSS_COMPILATION" == "1" ]]; then 49 | find $PREFIX/lib/cmake -type f -exec sed -i "s~\${_IMPORT_PREFIX}/lib/python$ROS_PYTHON_VERSION/site-packages~$BUILD_PREFIX/lib/python$ROS_PYTHON_VERSION/site-packages~g" {} + || true 50 | find $PREFIX/share/rosidl* -type f -exec sed -i "s~$PREFIX/lib/python$ROS_PYTHON_VERSION/site-packages~$BUILD_PREFIX/lib/python$ROS_PYTHON_VERSION/site-packages~g" {} + || true 51 | find $PREFIX/share/rosidl* -type f -exec sed -i "s~\${_IMPORT_PREFIX}/lib/python$ROS_PYTHON_VERSION/site-packages~$BUILD_PREFIX/lib/python$ROS_PYTHON_VERSION/site-packages~g" {} + || true 52 | find $PREFIX/lib/cmake -type f -exec sed -i "s~message(FATAL_ERROR \"The imported target~message(WARNING \"The imported target~g" {} + || true 53 | # way around bad CPU type issues 54 | # rm $PREFIX/bin/doxygen || echo "doxygen not found" 55 | # rm $PREFIX/bin/dia || echo "dia not found" 56 | # rm $PREFIX/bin/dot || echo "dot not found" 57 | fi 58 | 59 | # NOTE: there might be undefined references occurring 60 | # in the Boost.system library, depending on the C++ versions 61 | # used to compile Boost. We can avoid them by forcing the use of 62 | # the header-only version of the library. 63 | export CXXFLAGS="$CXXFLAGS -DBOOST_ERROR_CODE_HEADER_ONLY" 64 | 65 | if [[ $target_platform =~ linux.* ]]; then 66 | export CFLAGS="$CFLAGS -D__STDC_FORMAT_MACROS=1"; 67 | export CXXFLAGS="$CXXFLAGS -D__STDC_FORMAT_MACROS=1"; 68 | # I am too scared to turn this on for now ... 69 | # export LDFLAGS="$LDFLAGS -lrt"; 70 | # Some qt stuff uses g++ directly - fix these use cases 71 | ln -s $GXX $BUILD_PREFIX/bin/g++ 72 | fi 73 | 74 | if [[ $target_platform =~ emscripten.* ]]; then 75 | export CONDA_BUILD_CROSS_COMPILATION="1" 76 | echo "set_property(GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS TRUE)"> $SRC_DIR/__vinca_shared_lib_patch.cmake 77 | echo "set(CMAKE_STRIP FALSE) # used by default in pybind11 on .so modules">> $SRC_DIR/__vinca_shared_lib_patch.cmake 78 | echo "set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE BOTH) # fixes an error where numpy header files are not found correctly">> $SRC_DIR/__vinca_shared_lib_patch.cmake 79 | export EXTRA_CMAKE_ARGS=" \ 80 | -DCMAKE_FIND_ROOT_PATH=$PREFIX \ 81 | -DCMAKE_POSITION_INDEPENDENT_CODE=TRUE \ 82 | -DCMAKE_PROJECT_INCLUDE=$SRC_DIR/__vinca_shared_lib_patch.cmake \ 83 | " 84 | fi 85 | 86 | export SKIP_TESTING=@(skip_testing) 87 | 88 | if [ "${PKG_NAME}" == "ros-noetic-euslisp" ] || [ "${PKG_NAME}" = "ros-noetic-jskeus" ] || [ "${PKG_NAME}" = "ros-noetic-roseus" ]; then 89 | GENERATOR="Unix Makefiles" 90 | else 91 | GENERATOR="Ninja" 92 | fi 93 | 94 | cmake ${CMAKE_ARGS} --compile-no-warning-as-error \ 95 | -DCMAKE_INSTALL_PREFIX=$PREFIX \ 96 | -DCMAKE_PREFIX_PATH=$PREFIX \ 97 | -DCMAKE_BUILD_TYPE=Release \ 98 | -DCMAKE_INSTALL_LIBDIR=lib \ 99 | -DCMAKE_NO_SYSTEM_FROM_IMPORTED=ON \ 100 | -DCMAKE_FIND_FRAMEWORK=LAST \ 101 | -DCMAKE_AUTOMOC_EXECUTABLE=$CMAKE_AUTOMOC_EXECUTABLE \ 102 | -DBUILD_SHARED_LIBS=ON \ 103 | -DPYTHON_EXECUTABLE=$PYTHON_EXECUTABLE \ 104 | -DPython_EXECUTABLE=$PYTHON_EXECUTABLE \ 105 | -DPython3_EXECUTABLE=$PYTHON_EXECUTABLE \ 106 | -DPython3_FIND_STRATEGY=LOCATION \ 107 | -DPYTHON_INSTALL_DIR=$FIXED_SP_DIR \ 108 | -DPKG_CONFIG_EXECUTABLE=$PKG_CONFIG_EXECUTABLE \ 109 | -DSETUPTOOLS_DEB_LAYOUT=OFF \ 110 | -DCATKIN_SKIP_TESTING=$SKIP_TESTING \ 111 | -DCATKIN_BUILD_BINARY_PACKAGE=$CATKIN_BUILD_BINARY_PACKAGE \ 112 | -DCMAKE_OSX_DEPLOYMENT_TARGET=$OSX_DEPLOYMENT_TARGET \ 113 | $EXTRA_CMAKE_ARGS \ 114 | -G "$GENERATOR" \ 115 | $SRC_DIR/$PKG_NAME/src/work 116 | 117 | cmake --build . --config Release --target all 118 | 119 | if [[ "$SKIP_TESTING" == "OFF" ]]; then 120 | cmake --build . --config Release --target run_tests 121 | fi 122 | 123 | cmake --build . --config Release --target install 124 | 125 | if [ "${PKG_NAME}" == "ros-@(ros_distro)-catkin" ]; then 126 | # Copy the [de]activate scripts to $PREFIX/etc/conda/[de]activate.d. 127 | # This will allow them to be run on environment activation. 128 | for CHANGE in "activate" "deactivate" 129 | do 130 | mkdir -p "${PREFIX}/etc/conda/${CHANGE}.d" 131 | cp "${RECIPE_DIR}/${CHANGE}.sh" "${PREFIX}/etc/conda/${CHANGE}.d/${PKG_NAME}_${CHANGE}.sh" 132 | done 133 | fi 134 | 135 | if [ "${PKG_NAME}" == "ros-@(ros_distro)-environment" ]; then 136 | for SCRIPT in "1.ros_distro.sh" "1.ros_etc_dir.sh" "1.ros_package_path.sh" "1.ros_python_version.sh" "1.ros_version.sh" 137 | do 138 | mkdir -p "${PREFIX}/etc/conda/activate.d" 139 | cp "${PREFIX}/etc/catkin/profile.d/${SCRIPT}" "${PREFIX}/etc/conda/activate.d/${SCRIPT}" 140 | done 141 | fi 142 | 143 | if [ "${PKG_NAME}" == "ros-@(ros_distro)-ros-workspace" ]; then 144 | # Copy the [de]activate scripts to $PREFIX/etc/conda/[de]activate.d. 145 | # This will allow them to be run on environment activation. 146 | for CHANGE in "activate" "deactivate" 147 | do 148 | mkdir -p "${PREFIX}/etc/conda/${CHANGE}.d" 149 | cp "${RECIPE_DIR}/${CHANGE}.sh" "${PREFIX}/etc/conda/${CHANGE}.d/${PKG_NAME}_${CHANGE}.sh" 150 | done 151 | fi 152 | -------------------------------------------------------------------------------- /vinca/templates/deactivate.bat.in: -------------------------------------------------------------------------------- 1 | :: Generated by vinca http://github.com/RoboStack/vinca. 2 | :: DO NOT EDIT! 3 | @@if not defined CONDA_PREFIX goto:eof 4 | 5 | @@set ROS_OS_OVERRIDE= 6 | @@set ROS_DISTRO= 7 | @@set ROS_ETC_DIR= 8 | @@set ROS_PACKAGE_PATH= 9 | @@set ROS_PYTHON_VERSION= 10 | @@set ROS_VERSION= 11 | @@set PYTHONHOME= 12 | @@set PYTHONPATH= 13 | @@set CMAKE_PREFIX_PATH= 14 | @@set AMENT_PREFIX_PATH= 15 | @@set COLCON_PREFIX_PATH= 16 | @@set QT_PLUGIN_PATH= 17 | @@set ROS_LOCALHOST_ONLY= 18 | @@set ament_python_executable= 19 | -------------------------------------------------------------------------------- /vinca/templates/deactivate.ps1.in: -------------------------------------------------------------------------------- 1 | # Generated by vinca http://github.com/RoboStack/vinca. 2 | # DO NOT EDIT! 3 | if ($null -eq ${env:CONDA_PREFIX}) { Exit } 4 | 5 | $Env:ROS_OS_OVERRIDE='' 6 | $Env:ROS_DISTRO='' 7 | $Env:ROS_ETC_DIR='' 8 | $Env:ROS_PACKAGE_PATH='' 9 | $Env:ROS_PYTHON_VERSION='' 10 | $Env:ROS_VERSION='' 11 | $Env:PYTHONHOME='' 12 | $Env:PYTHONPATH='' 13 | $Env:CMAKE_PREFIX_PATH='' 14 | $Env:AMENT_PREFIX_PATH='' 15 | $Env:COLCON_PREFIX_PATH='' 16 | $Env:QT_PLUGIN_PATH='' 17 | $Env:ROS_LOCALHOST_ONLY='' 18 | $Env:ament_python_executable='' 19 | -------------------------------------------------------------------------------- /vinca/templates/deactivate.sh.in: -------------------------------------------------------------------------------- 1 | # Generated by vinca http://github.com/RoboStack/vinca. 2 | # DO NOT EDIT! 3 | if [ -z "${CONDA_PREFIX}" ]; then 4 | exit 0 5 | fi 6 | 7 | unset ROS_DISTRO 8 | unset ROS_ETC_DIR 9 | unset ROS_PACKAGE_PATH 10 | unset ROS_PYTHON_VERSION 11 | unset CMAKE_PREFIX_PATH 12 | unset AMENT_PREFIX_PATH 13 | unset COLCON_PREFIX_PATH 14 | unset ROS_VERSION 15 | unset ROS_OS_OVERRIDE 16 | # unset PYTHONPATH 17 | # unset PYTHONHOME 18 | # unset QT_PLUGIN_PATH 19 | unset ROS_LOCALHOST_ONLY 20 | unset ament_python_executable 21 | unset RMW_IMPLEMENTATION 22 | -------------------------------------------------------------------------------- /vinca/utils.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import hashlib 3 | import os 4 | import time 5 | import json 6 | import requests 7 | 8 | 9 | class folded_unicode(str): 10 | pass 11 | 12 | 13 | class literal_unicode(str): 14 | pass 15 | 16 | 17 | def folded_unicode_representer(dumper, data): 18 | return dumper.represent_scalar("tag:yaml.org,2002:str", data, style=">") 19 | 20 | 21 | def literal_unicode_representer(dumper, data): 22 | return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|") 23 | 24 | 25 | yaml.add_representer(folded_unicode, folded_unicode_representer) 26 | yaml.add_representer(literal_unicode, literal_unicode_representer) 27 | 28 | 29 | class NoAliasDumper(yaml.SafeDumper): 30 | def __init__(self, *args, **kwargs): 31 | super().__init__(*args, **kwargs) 32 | 33 | self.add_representer(folded_unicode, folded_unicode_representer) 34 | self.add_representer(literal_unicode, literal_unicode_representer) 35 | 36 | def ignore_aliases(self, data): 37 | return True 38 | 39 | 40 | def get_repodata(url_or_path, platform=None): 41 | if platform: 42 | if not url_or_path.endswith("/"): 43 | url_or_path += "/" 44 | url_or_path += f"{platform}/repodata.json" 45 | 46 | if "://" not in url_or_path: 47 | with open(url_or_path) as fi: 48 | return json.load(fi) 49 | print("Downloading repodata from ", url_or_path) 50 | 51 | m = hashlib.md5(url_or_path.encode("utf-8")).hexdigest()[:10] 52 | # print(tempfile.gettempdir()) 53 | fn = f"vinca_{m}.json" 54 | if os.path.exists(fn): 55 | st = os.stat(fn) 56 | age = time.time() - st.st_mtime 57 | print(f"Found cached repodata, age: {age}") 58 | max_age = 100_000 # seconds == 27 hours 59 | if age < max_age: 60 | with open(fn) as fi: 61 | return json.load(fi) 62 | 63 | repodata = requests.get(url_or_path) 64 | content = repodata.content 65 | with open(fn, "w") as fcache: 66 | fcache.write(content.decode("utf-8")) 67 | return json.loads(content) 68 | 69 | def ensure_name_is_without_distro_prefix_and_with_underscores(name, vinca_conf): 70 | """ 71 | Ensure that the name is without distro prefix and with underscores 72 | e.g. "ros-humble-pkg-name" -> "pkg_name" 73 | """ 74 | newname = name.replace("-", "_") 75 | distro_prefix = "ros_" + vinca_conf.get("ros_distro") + "_" 76 | if (newname.startswith(distro_prefix) ): 77 | newname = newname.replace(distro_prefix, "") 78 | 79 | return newname 80 | 81 | def get_pkg_build_number(default_build_number, pkg_name, vinca_conf): 82 | normalized_name = ensure_name_is_without_distro_prefix_and_with_underscores(pkg_name, vinca_conf) 83 | pkg_additional_info = vinca_conf["_pkg_additional_info"].get(normalized_name, {}) 84 | return pkg_additional_info.get("build_number", default_build_number) 85 | --------------------------------------------------------------------------------