├── patches ├── nestegg │ └── nestegg-stdint.h ├── libnestegg.py └── libmodplug_771ee75bb8bfcfe95eae434ed1f3b2c5b63b2cb3.patch ├── .gitignore ├── LICENSE ├── bootstrap.json ├── README.md └── bootstrap.py /patches/nestegg/nestegg-stdint.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | src 3 | archives 4 | snapshots 5 | .*.json 6 | .idea 7 | orig 8 | xcuserdata 9 | -------------------------------------------------------------------------------- /patches/libnestegg.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import shutil 5 | 6 | base_dir=os.getcwd() 7 | lib_dir=os.path.join(base_dir, "src", "nestegg") 8 | 9 | shutil.copyfile(os.path.join(base_dir, "patches", "nestegg", "nestegg-stdint.h"), os.path.join(lib_dir, "include", "nestegg", "nestegg-stdint.h")) 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2016-2025, Sergey Kosarevsky 3 | 4 | --- 5 | Based on https://bitbucket.org/blippar/bootstrapping-external-libs 6 | 7 | The MIT License (MIT) 8 | Copyright (c) 2016 Blippar.com Ltd 9 | 10 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | -------------------------------------------------------------------------------- /bootstrap.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "libjpeg", 4 | "source": { 5 | "type": "archive", 6 | "url": "http://www.ijg.org/files/jpegsrc.v9d.tar.gz", 7 | "sha1": "19b32a12988eae920d142243373841ed78cd4374", 8 | "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36" 9 | } 10 | }, 11 | { 12 | "name": "nestegg", 13 | "source": { 14 | "type": "git", 15 | "url": "https://github.com/kinetiknz/nestegg", 16 | "revision": "8374e436ad90afd61919ffe27aa5ff2887feacba" 17 | }, 18 | "postprocess": { 19 | "type": "script", 20 | "file": "libnestegg.py" 21 | } 22 | }, 23 | { 24 | "name": "libmodplug", 25 | "source": { 26 | "type": "archive", 27 | "url": "http://downloads.sourceforge.net/project/modplug-xmms/libmodplug/0.8.8.5/libmodplug-0.8.8.5.tar.gz", 28 | "sha1": "771ee75bb8bfcfe95eae434ed1f3b2c5b63b2cb3" 29 | }, 30 | "postprocess": { 31 | "type": "patch", 32 | "file": "libmodplug_771ee75bb8bfcfe95eae434ed1f3b2c5b63b2cb3.patch" 33 | } 34 | }, 35 | { 36 | "name": "stb", 37 | "predicate": "platform.system() != 'Windows' or os.getenv('DOWNLOAD_STB') != None", 38 | "source": { 39 | "type": "git", 40 | "url": "https://github.com/nothings/stb.git", 41 | "revision": "ae721c50eaf761660b4f90cc590453cdb0c2acd0" 42 | } 43 | } 44 | ] 45 | -------------------------------------------------------------------------------- /patches/libmodplug_771ee75bb8bfcfe95eae434ed1f3b2c5b63b2cb3.patch: -------------------------------------------------------------------------------- 1 | diff --exclude=.git --exclude=.hg -rupN ./src/libmodplug/config.h ./src/libmodplug-patched/config.h 2 | --- ./src/libmodplug/config.h Thu Jan 1 01:00:00 1970 3 | +++ ./src/libmodplug-patched/config.h Tue Aug 25 12:09:05 2015 4 | @@ -0,0 +1,139 @@ 5 | +/* src/config.h.in. Generated from configure.in by autoheader. */ 6 | + 7 | +/* Define if building universal (internal helper macro) */ 8 | +#undef AC_APPLE_UNIVERSAL_BUILD 9 | + 10 | +/* Define to 1 if you have the header file. */ 11 | +#define HAVE_DLFCN_H 1 12 | + 13 | +/* Define to 1 if you have the header file. */ 14 | +#undef HAVE_INTTYPES_H 15 | + 16 | +/* Define to 1 if you have the header file. */ 17 | +#ifndef __APPLE__ 18 | +#define HAVE_MALLOC_H 1 19 | +#endif 20 | + 21 | +/* Define to 1 if you have the header file. */ 22 | +#define HAVE_MEMORY_H 1 23 | + 24 | +/* Define to 1 if you have the `setenv' function. */ 25 | +#undef HAVE_SETENV 26 | + 27 | +/* Define to 1 if you have the `sinf' function. */ 28 | +#define HAVE_SINF 1 29 | + 30 | +#if defined(BLACKBERRY) 31 | +# define HAVE_STDINT_H 32 | +#else 33 | +/* Define to 1 if you have the header file. */ 34 | +# undef HAVE_STDINT_H 35 | +#endif 36 | + 37 | +/* Define to 1 if you have the header file. */ 38 | +#undef HAVE_STDLIB_H 39 | + 40 | +/* Define to 1 if you have the header file. */ 41 | +#undef HAVE_STRINGS_H 42 | + 43 | +/* Define to 1 if you have the header file. */ 44 | +#undef HAVE_STRING_H 45 | + 46 | +/* Define to 1 if you have the header file. */ 47 | +#define HAVE_SYS_STAT_H 1 48 | + 49 | +/* Define to 1 if you have the header file. */ 50 | +#define HAVE_SYS_TYPES_H 1 51 | + 52 | +/* Define to 1 if you have the header file. */ 53 | +#undef HAVE_UNISTD_H 54 | + 55 | +/* Define to the sub-directory in which libtool stores uninstalled libraries. 56 | + */ 57 | +#undef LT_OBJDIR 58 | + 59 | +/* Name of package */ 60 | +#undef PACKAGE 61 | + 62 | +/* Define to the address where bug reports for this package should be sent. */ 63 | +#undef PACKAGE_BUGREPORT 64 | + 65 | +/* Define to the full name of this package. */ 66 | +#undef PACKAGE_NAME 67 | + 68 | +/* Define to the full name and version of this package. */ 69 | +#undef PACKAGE_STRING 70 | + 71 | +/* Define to the one symbol short name of this package. */ 72 | +#undef PACKAGE_TARNAME 73 | + 74 | +/* Define to the home page for this package. */ 75 | +#undef PACKAGE_URL 76 | + 77 | +/* Define to the version of this package. */ 78 | +#undef PACKAGE_VERSION 79 | + 80 | +/* Define to 1 if you have the ANSI C header files. */ 81 | +#undef STDC_HEADERS 82 | + 83 | +/* Version number of package */ 84 | +#undef VERSION 85 | + 86 | +/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most 87 | + significant byte first (like Motorola and SPARC, unlike Intel). */ 88 | +#if defined(AC_APPLE_UNIVERSAL_BUILD) || defined(__ppc__) || defined(__ppc64__) 89 | +# if defined __BIG_ENDIAN__ 90 | +# define WORDS_BIGENDIAN 1 91 | +# endif 92 | +#else 93 | +# ifndef WORDS_BIGENDIAN 94 | +# undef WORDS_BIGENDIAN 95 | +# endif 96 | +#endif 97 | + 98 | +/* Define for Solaris 2.5.1 so the uint32_t typedef from , 99 | + , or is not used. If the typedef were allowed, the 100 | + #define below would cause a syntax error. */ 101 | +#undef _UINT32_T 102 | + 103 | +/* Define for Solaris 2.5.1 so the uint64_t typedef from , 104 | + , or is not used. If the typedef were allowed, the 105 | + #define below would cause a syntax error. */ 106 | +#undef _UINT64_T 107 | + 108 | +/* Define for Solaris 2.5.1 so the uint8_t typedef from , 109 | + , or is not used. If the typedef were allowed, the 110 | + #define below would cause a syntax error. */ 111 | +#undef _UINT8_T 112 | + 113 | +/* Define to the type of a signed integer type of width exactly 16 bits if 114 | + such a type exists and the standard includes do not define it. */ 115 | +#undef int16_t 116 | + 117 | +/* Define to the type of a signed integer type of width exactly 32 bits if 118 | + such a type exists and the standard includes do not define it. */ 119 | +#undef int32_t 120 | + 121 | +/* Define to the type of a signed integer type of width exactly 64 bits if 122 | + such a type exists and the standard includes do not define it. */ 123 | +#undef int64_t 124 | + 125 | +/* Define to the type of a signed integer type of width exactly 8 bits if such 126 | + a type exists and the standard includes do not define it. */ 127 | +#undef int8_t 128 | + 129 | +/* Define to the type of an unsigned integer type of width exactly 16 bits if 130 | + such a type exists and the standard includes do not define it. */ 131 | +#undef uint16_t 132 | + 133 | +/* Define to the type of an unsigned integer type of width exactly 32 bits if 134 | + such a type exists and the standard includes do not define it. */ 135 | +#undef uint32_t 136 | + 137 | +/* Define to the type of an unsigned integer type of width exactly 64 bits if 138 | + such a type exists and the standard includes do not define it. */ 139 | +#undef uint64_t 140 | + 141 | +/* Define to the type of an unsigned integer type of width exactly 8 bits if 142 | + such a type exists and the standard includes do not define it. */ 143 | +#undef uint8_t 144 | diff --exclude=.git --exclude=.hg -rupN ./src/libmodplug/src/libmodplug/stdafx.h ./src/libmodplug-patched/src/libmodplug/stdafx.h 145 | --- ./src/libmodplug/src/libmodplug/stdafx.h Mon Mar 10 00:52:07 2014 146 | +++ ./src/libmodplug-patched/src/libmodplug/stdafx.h Tue Aug 25 12:09:05 2015 147 | @@ -89,7 +89,9 @@ inline LONG MulDiv (long a, long b, long 148 | return ((uint64_t) a * (uint64_t) b ) / c; 149 | } 150 | 151 | -#define MODPLUG_NO_FILESAVE 152 | +#if !defined(MODPLUG_NO_FILESAVE) 153 | +# define MODPLUG_NO_FILESAVE 154 | +#endif 155 | #define NO_AGC 156 | #define LPCTSTR LPCSTR 157 | #define lstrcpyn strncpy 158 | diff --exclude=.git --exclude=.hg -rupN ./src/libmodplug/src/sndfile.cpp ./src/libmodplug-patched/src/sndfile.cpp 159 | --- ./src/libmodplug/src/sndfile.cpp Mon Feb 3 01:14:41 2014 160 | +++ ./src/libmodplug-patched/src/sndfile.cpp Tue Aug 25 12:09:05 2015 161 | @@ -1140,7 +1140,7 @@ UINT CSoundFile::ReadSample(MODINSTRUMEN 162 | for (UINT j=0; j= MAX_ORDERS)) 176 | { 177 | - //if (!m_nRepeatCount) 178 | + if (!m_nRepeatCount) 179 | return FALSE; //never repeat entire song 180 | if (!m_nRestartPos) 181 | { 182 | @@ -398,7 +398,7 @@ BOOL CSoundFile::ProcessRow() 183 | } 184 | } 185 | } 186 | -// if (m_nRepeatCount > 0) m_nRepeatCount--; 187 | + if (m_nRepeatCount > 0) m_nRepeatCount--; 188 | m_nCurrentPattern = m_nRestartPos; 189 | m_nRow = 0; 190 | if ((Order[m_nCurrentPattern] >= MAX_PATTERNS) || (!Patterns[Order[m_nCurrentPattern]])) return FALSE; 191 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 👢 Bootstrap 2 | 3 | The Bootstrap script is a versatile dependencies manager for your C++ 4 | projects. You can think of it as a portable (Windows, Linux, OSX) and a more 5 | feature-complete alternative to Google's Repo tool. The script itself is written 6 | in Python and should "just work" using any standard Python 3 installation. 7 | 8 | ## Introduction 9 | 10 | Main features of Bootstrap: 11 | - One-button-download philosophy. Just run the script to download and update all your dependencies. 12 | - Cross-platform. Runs on Windows, Linux, and OSX. 13 | - Full support of Git, Mercurial, SVN repositories. 14 | - Full support of `.zip`, `.tar`, `.gz`, `.bz2`, `.xz` archives. 15 | - Caching and fallback mechanisms. 16 | - Rich error reporting. 17 | 18 | ## Usage 19 | 20 | For instance, this minimalistic JSON snippet will clone the GLFW library from its Git repository 21 | and check out the revision which is tagged `3.3`. 22 | 23 | ```JSON 24 | [{ 25 | "name": "glfw", 26 | "source": { 27 | "type": "git", 28 | "url": "https://github.com/glfw/glfw.git", 29 | "revision": "3.3" 30 | } 31 | }] 32 | ``` 33 | 34 | This simple JSON snippet will download the `libjpeg` library from 35 | the specified URL (via a custom `user-agent` string), check the archive integrity 36 | via SHA1, unpack the archive, and put its content into the `src/libjpeg` folder: 37 | 38 | ```JSON 39 | [{ 40 | "name": "libjpeg", 41 | "source": { 42 | "type": "archive", 43 | "url": "http://www.ijg.org/files/jpegsrc.v9a.tar.gz", 44 | "sha1": "d65ed6f88d318f7380a3a5f75d578744e732daca", 45 | "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36" 46 | } 47 | }] 48 | ``` 49 | 50 | This JSON snippet will download the `nestegg` library from the Git repository, 51 | checkout the specified revision, and apply a patch via a custom Python script. 52 | You can also provide an optional `predicate` string to run custom Python code 53 | to determine whether the library needs to be downloaded. 54 | 55 | ```JSON 56 | [{ 57 | "name": "nestegg", 58 | "predicate": "platform.system() != 'Windows' or os.getenv('DOWNLOAD_STB') != None", 59 | "source": { 60 | "type": "git", 61 | "url": "https://github.com/kinetiknz/nestegg", 62 | "revision": "8374e436ad90afd61919ffe27aa5ff2887feacba" 63 | }, 64 | "postprocess": { 65 | "type": "script", 66 | "file": "libnestegg.py" 67 | } 68 | }] 69 | ``` 70 | 71 | Read the comprehensive documentation below for further details. 72 | 73 | # Projects using this script 74 | 75 | - **[IGL](https://github.com/facebook/igl/)** - Meta's Intermediate Graphics Library (IGL) is a cross-platform library that commands the GPU. License: MIT. 76 | - **[LightweightVK](https://github.com/corporateshark/lightweightvk)** - lightweight C++ bindless Vulkan 1.3 wrapper. License: MIT. 77 | - **[Mastering Graphics Programming with Vulkan](https://github.com/PacktPublishing/Mastering-Graphics-Programming-with-Vulkan)** 78 | - **[3D Graphics Rendering Cookbook](https://github.com/PacktPublishing/3D-Graphics-Rendering-Cookbook)** 79 | - **[Vulkan 3D Graphics Rendering Cookbook](https://github.com/PacktPublishing/3D-Graphics-Rendering-Cookbook-Second-Edition)** 80 | 81 | 82 | ## P.S. 83 | 84 | This is a fork of an abandoned library https://bitbucket.org/blippar/bootstrapping-external-libs 85 | 86 | ------------------------------------------------------------------------------ 87 | Original documentation: 88 | ------------------------------------------------------------------------------ 89 | 90 | This repository holds our external (i.e. third party) libraries. After a fresh 91 | clone, the repository contains *only* metadata about the libraries, i.e. their 92 | names, where to retrieve them from, etc. In order to actually obtain or update 93 | the libraries, the user must run a bootstrapping script, which downloads all 94 | third-party libraries and places them into a src/ directory. 95 | 96 | 97 | Prerequisites 98 | ------------- 99 | 100 | The script itself is written in Python and should "just work" using any standard 101 | Python 2 or 3 installation. The version control tools Git, Mercurial and 102 | Subversion must be installed and available on the environment path; in addition, 103 | the 'patch' program must be present on the user's system. On Windows, the script 104 | can be run from the command line (for patching to work, ensure you have the Cygwin 105 | patch tool installed). 106 | 107 | 108 | Obtaining the libraries 109 | ----------------------- 110 | 111 | Run the bootstrapping script from the repository's top-level directory: 112 | > python bootstrap.py 113 | or just: 114 | > ./bootstrap.py 115 | 116 | The script should run without any errors or exceptions raised. Third-party 117 | library sources are either downloaded as a packaged archive file (e.g. .zip, 118 | .tar.gz, .tar.bz2) and then uncompressed, or cloned directly from the original 119 | repository. All source code is obtained from the respective authorative sources, 120 | i.e. directly from the authors' websites or repositories. 121 | 122 | After script execution has finished, the following files and directories should 123 | have been added to the repository folder: 124 | 125 | ``` 126 | |- external 127 | |- .bootstrap.json 128 | |- archives/ 129 | |- ... 130 | |- snapshots/ 131 | |- ... 132 | |- src/ 133 | |- ... 134 | ``` 135 | 136 | - The file .bootstrap.json contains the cached state of the last bootstrapping/ 137 | pdating operation. The script always compares against this state, in order to 138 | decide whether to update a library. For example, if boostrap.py has executed 139 | successfully and is then immediately re-run, no further action will take place. 140 | 141 | - The directory archives/ contains the archive files of all libraries that have 142 | been downloaded as archives. It serves as a cache to prevent multiple downloads. 143 | 144 | - The directory snapshots/ serves as a cache for snapshots of a complete 145 | repository. It will only be created if the --repo-snapshots option was specified 146 | on the command line. This will enable the respective copy of the 'external' 147 | repository to serve as a fallback location. 148 | 149 | - The directory src/ contains all third-party library sources. Libraries that 150 | were obtained in archive form will have been uncompressed into this directory. 151 | Libraries that were obtained from a repository (Git, Mercurial or SVN) will have 152 | been cloned into this directory. 153 | 154 | 155 | Adding or changing the version of a library 156 | ------------------------------------------- 157 | 158 | All metadata about the third-party libraries and their versions is contained in 159 | a single JSON file (bootstrap.json) that is being read by the script. 160 | 161 | The file should contain exactly one JSON array of objects, where each object 162 | contained in this array describes one library. This JSON schema gives an 163 | overview of the format: 164 | 165 | ``` 166 | [ 167 | { 168 | "name": "LibraryName", 169 | "source": { 170 | "type": "archive|git|hg|svn", 171 | "url": "http://...", 172 | "sha1": "0123456789...0123456789", # for type == archive 173 | "revision": "0123456789" # for type == git|hg|svn 174 | }, 175 | "postprocess": { 176 | "type": "patch|script", 177 | "file": "filename" 178 | } 179 | }, 180 | { 181 | ... 182 | }, 183 | ... 184 | ] 185 | ``` 186 | 187 | The library "name" specifies the name of the library, which in turn specifies 188 | the subdirectory name under the src/ directory. The name should be the common 189 | name of the library (e.g. "libjpeg") and *not* contain any particular version 190 | numbers or other information that may change between versions. 191 | 192 | For each library, the "source" field contains information about where to obtain 193 | the library from, in the form of a JSON object. 194 | 195 | The source "type" field can be one of the following types: "archive", "git", 196 | "hg", or "svn". The first type describes an archive file (such as .zip, .tar.gz, 197 | .tar.bz2 files), while the last three types describe different repository types. 198 | 199 | The "url" value contains the URL of the archive to be downloaded in case the 200 | type is 'archive', and the respository URL otherwise. 201 | 202 | If the source type is 'archive', then the optional "sha1" field can (and should) 203 | be used to add the SHA1 hash of the archive, for verification purposes. 204 | 205 | For repositories (i.e. type is 'git|hg|svn'), an optional "revision" field can 206 | specify the particular revision/commit to be checked out. If the revision field 207 | is omitted, the default is to check out the HEAD revision of the master branch 208 | (for Git repositories), or the tip of the default branch (for Mercurial 209 | repositories). 210 | 211 | The "postprocess" field contains an object which describes any optional post- 212 | processing steps, once the original library sources have been put into the src/ 213 | directory. Post-processing can be of type "patch" or "script"; in both cases, 214 | the filename has to be given in the "file" field. 215 | 216 | For type 'patch', the file field specifies a patch file to be contained in the 217 | patches/ directory. For type 'file', the file field specified a script that is 218 | run from the patches/ directory. Patches can be used to make minor modifications 219 | to a library, such as silencing warning or to fix bugs which have not been 220 | included in the upstream version of the library. Scripts can embody any more 221 | complex operations, but should mainly be used for simple library-specific 222 | prerequisites (such as copying header prototype files). All scripts have to 223 | be written in Python, in order to be compatible with all platforms, including 224 | Windows. 225 | 226 | Patches should be created using the `diff` program from the external/ directory, 227 | similar to this example: 228 | ``` 229 | > diff --exclude=".git" --exclude=".hg" -rupN \ 230 | ./src/AGAST/ ./src/AGAST_patched/ > ./patches/agast.patch 231 | ``` 232 | The default -p argument for the patch command is 3, but this can be changed by 233 | an optional "pnum" field inside the postprocessing JSON object, containing a 234 | numeric value. 235 | For example, for patches created using `git diff` or `hg diff`, a "pnum" 236 | argument of "1" is likely needed. This method of creating a patch is 237 | discouraged, however, in favor of the cleaner method using the plain 'diff' 238 | command described above. 239 | 240 | In general, only ever add or modify libraries in a way that is compatible with 241 | how it's already done. See also the next section. 242 | 243 | 244 | Policies for adding or updating a library 245 | ----------------------------------------- 246 | 247 | - IMPORTANT: There shall be NO committed source code of any kind in this 248 | repository (besides patches). All source code shall be downloaded from the 249 | respective authorative sources, i.e. the library authors' web sites (zipped or 250 | tarred packages) or repositories. 251 | 252 | - IMPORTANT: There shall be NO committed binary files of any kind in this 253 | repository. Binary files are to be built using the respective platform-specific 254 | build system, and are to not be committed anywhere in the first place. 255 | 256 | - The bootstrapping script should always run cleanly, with no further action 257 | required from the user. 258 | 259 | - All repositories should be in their final usable state after running the 260 | bootstrapping script, i.e. the committed makefiles should be usable. 261 | 262 | - Any patches to a library should be kept minimal. Larger changes to a 263 | third-party code base are discouraged. 264 | 265 | - If we really have to patch a library, e.g. to fix a bug or to silence a 266 | warning, let's try to get our patch accepted upstream. If we then update to a 267 | newer version of the library, we won't need a patch file anymore. 268 | 269 | - All patch files should adhere to the naming 270 | _.patch 271 | where is the hash of either the archive file or the repository. 272 | This enables to keep multiple patches for the same library, in case different 273 | project need different versions of a library (via using local bootstrapping 274 | files). 275 | 276 | - The canonical bootstrapping JSON file should always contain the respective 277 | latest version of each library that is used across our codebase. If a library 278 | is updated, it should be updated to the respective latest version available. 279 | 280 | - We should be keeping the contained library versions reasonably up-to-date. 281 | 282 | 283 | License 284 | ------- 285 | See the LICENSE file 286 | 287 | Bug reports, comments 288 | --------------------- 289 | 290 | Should go to omar@blippar.com. 291 | -------------------------------------------------------------------------------- /bootstrap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | # 4 | # https://github.com/corporateshark/bootstrapping.git 5 | # sk@linderdaum.com 6 | # 7 | # The MIT License (MIT) 8 | # Copyright (c) 2016-2025, Sergey Kosarevsky 9 | # 10 | # --- 11 | # Based on https://bitbucket.org/blippar/bootstrapping-external-libs 12 | # 13 | # The MIT License (MIT) 14 | # Copyright (c) 2016 Blippar.com Ltd 15 | # 16 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 17 | # 18 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 19 | # 20 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | 22 | from __future__ import print_function 23 | import platform 24 | import os 25 | import sys 26 | import io 27 | import shutil 28 | import subprocess 29 | import zipfile 30 | import tarfile 31 | import hashlib 32 | import json 33 | import getopt 34 | import traceback 35 | import urllib 36 | import ssl 37 | import ctypes 38 | 39 | ssl._create_default_https_context = ssl._create_unverified_context 40 | 41 | try: 42 | from urllib.request import urlparse 43 | from urllib.request import urlunparse 44 | from urllib.request import quote 45 | except ImportError: 46 | from urlparse import urlparse 47 | from urlparse import urlunparse 48 | from urllib import URLopener 49 | from urllib import quote 50 | 51 | try: 52 | import paramiko 53 | import scp 54 | scp_available = True 55 | except: 56 | scp_available = False 57 | print("WARNING: Please install the Python packages [paramiko, scp] for full script operation.") 58 | 59 | try: 60 | import lzma 61 | lzma_available = True 62 | except: 63 | print("WARNING: Python lzma library not available; extraction of .tar.xz files may not be supported.") 64 | print("Installation on Ubuntu:") 65 | print("> apt-get install python-lzma") 66 | print("Installation on Mac OS X:") 67 | print("> brew install xz") 68 | print("> pip install pyliblzma") 69 | lzma_available = False 70 | 71 | BOOTSTRAP_VERSION = "1.0.7 (2025)" 72 | 73 | class Colors: 74 | GREEN = '\033[92m' 75 | WARNING = '\033[91m' 76 | NORMAL = '\033[0m' 77 | 78 | SRC_DIR_BASE = "src" 79 | ARCHIVE_DIR_BASE = "archives" 80 | SNAPSHOT_DIR_BASE = "snapshots" 81 | 82 | BASE_DIR = os.getcwd() 83 | SRC_DIR = os.path.join(BASE_DIR, SRC_DIR_BASE) 84 | ARCHIVE_DIR = os.path.join(BASE_DIR, ARCHIVE_DIR_BASE) 85 | SNAPSHOT_DIR = os.path.join(BASE_DIR, SNAPSHOT_DIR_BASE) 86 | 87 | DEFAULT_PNUM = 3 88 | DEBUG_OUTPUT = False 89 | FALLBACK_URL = "" 90 | 91 | USE_TAR = False 92 | USE_UNZIP = False 93 | 94 | TOOL_COMMAND_PYTHON = sys.executable if not " " in sys.executable else '"{}"'.format(sys.executable) 95 | TOOL_COMMAND_GIT = "git" 96 | TOOL_COMMAND_HG = "hg" 97 | TOOL_COMMAND_SVN = "svn" 98 | TOOL_COMMAND_PATCH = "patch" 99 | TOOL_COMMAND_TAR = "tar" 100 | TOOL_COMMAND_UNZIP = "unzip" 101 | 102 | ansi_console = True 103 | 104 | if platform.system() == "Windows": 105 | os.environ['CYGWIN'] = "nodosfilewarning" 106 | ansi_console = False 107 | if sys.getwindowsversion().major >= 10: 108 | ansi_console = True 109 | kernel32 = ctypes.windll.kernel32 110 | handle = kernel32.GetStdHandle(-11) 111 | mode = ctypes.c_uint32() 112 | kernel32.GetConsoleMode(handle, ctypes.byref(mode)) 113 | kernel32.SetConsoleMode(handle, mode.value | 0x0004) 114 | 115 | if not sys.version_info[0] >= 3: 116 | raise ValueError("I require Python 3.0 or a later version") 117 | 118 | def log(string): 119 | print("--- " + string) 120 | 121 | def warning(string): 122 | if ansi_console: 123 | print(Colors.WARNING, "--- " + string, Colors.NORMAL) 124 | else: 125 | print("--- " + string) 126 | 127 | def dlog(string): 128 | if DEBUG_OUTPUT: 129 | print("*** " + string) 130 | 131 | def executeCommand(command, printCommand = False, quiet = False): 132 | 133 | printCommand = printCommand or DEBUG_OUTPUT 134 | out = None 135 | err = None 136 | 137 | if quiet: 138 | out = open(os.devnull, 'w') 139 | err = subprocess.STDOUT 140 | 141 | if printCommand: 142 | if DEBUG_OUTPUT: 143 | dlog(">>> " + command) 144 | else: 145 | log(">>> " + command) 146 | 147 | return subprocess.call(command, shell = True, stdout=out, stderr=err); 148 | 149 | 150 | def dieIfNonZero(res): 151 | if res != 0: 152 | raise ValueError("Command returned non-zero status: " + str(res)); 153 | 154 | def escapifyPath(path): 155 | if path.find(" ") == -1: 156 | return path 157 | if platform.system() == "Windows": 158 | return "\"" + path + "\"" 159 | return path.replace("\\ ", " ") 160 | 161 | def cloneRepository(type, url, target_name, revision, try_only_local_operations = False, recursive = True): 162 | target_dir = escapifyPath(os.path.join(SRC_DIR, target_name)) 163 | target_dir_exists = os.path.exists(target_dir) 164 | log("Cloning " + url + " to " + target_dir) 165 | 166 | if type == "hg": 167 | repo_exists = os.path.exists(os.path.join(target_dir, ".hg")) 168 | 169 | if not repo_exists: 170 | if try_only_local_operations: 171 | raise RuntimeError("Repository for " + target_name + " not found; cannot execute local operations only") 172 | if target_dir_exists: 173 | dlog("Removing directory " + target_dir + " before cloning") 174 | shutil.rmtree(target_dir) 175 | dieIfNonZero(executeCommand(TOOL_COMMAND_HG + " clone " + url + " " + target_dir)) 176 | elif not try_only_local_operations: 177 | log("Repository " + target_dir + " already exists; pulling instead of cloning") 178 | dieIfNonZero(executeCommand(TOOL_COMMAND_HG + " pull -R " + target_dir)) 179 | 180 | if revision is None: 181 | revision = "" 182 | dieIfNonZero(executeCommand(TOOL_COMMAND_HG + " update -R " + target_dir + " -C " + revision)) 183 | dieIfNonZero(executeCommand(TOOL_COMMAND_HG + " purge -R " + target_dir + " --config extensions.purge=")) 184 | 185 | elif type == "git": 186 | repo_exists = os.path.exists(os.path.join(target_dir, ".git")) 187 | 188 | if not repo_exists: 189 | if try_only_local_operations: 190 | raise RuntimeError("Repository for " + target_name + " not found; cannot execute local operations only") 191 | if target_dir_exists: 192 | dlog("Removing directory " + target_dir + " before cloning") 193 | shutil.rmtree(target_dir) 194 | if recursive: 195 | dieIfNonZero(executeCommand(TOOL_COMMAND_GIT + " clone --recursive " + url + " " + target_dir)) 196 | else: 197 | dieIfNonZero(executeCommand(TOOL_COMMAND_GIT + " clone " + url + " " + target_dir)) 198 | elif not try_only_local_operations: 199 | log("Repository " + target_dir + " already exists; fetching instead of cloning") 200 | if recursive: 201 | dieIfNonZero(executeCommand(TOOL_COMMAND_GIT + " -C " + target_dir + " fetch --recurse-submodules")) 202 | else: 203 | dieIfNonZero(executeCommand(TOOL_COMMAND_GIT + " -C " + target_dir + " fetch")) 204 | 205 | if revision is None: 206 | revision = "HEAD" 207 | dieIfNonZero(executeCommand(TOOL_COMMAND_GIT + " -C " + target_dir + " reset --hard " + revision)) 208 | dieIfNonZero(executeCommand(TOOL_COMMAND_GIT + " -C " + target_dir + " clean -fxd")) 209 | 210 | elif type == "svn": 211 | if not try_only_local_operations: # we can't do much without a server connection when dealing with SVN 212 | if target_dir_exists: 213 | dlog("Removing directory " + target_dir + " before cloning") 214 | shutil.rmtree(target_dir) 215 | dieIfNonZero(executeCommand(TOOL_COMMAND_SVN + " checkout " + url + " " + target_dir)) 216 | 217 | if revision is not None and revision != "": 218 | raise RuntimeError("Updating to revision not implemented for SVN.") 219 | 220 | else: 221 | raise ValueError("Cloning " + type + " repositories not implemented.") 222 | 223 | 224 | def decompressTarXZFile(src_filename, dst_filename): 225 | if not lzma_available: 226 | raise RuntimeError("lzma extraction not available; please install package lzma (pyliblzma) and try again") 227 | 228 | try: 229 | fs = open(src_filename, "rb") 230 | if not fs: 231 | raise RuntimeError("Opening file " + src_filename + " failed") 232 | fd = open(dst_filename, "wb") 233 | if not fd: 234 | raise RuntimeError("Opening file " + dst_filename + " failed") 235 | 236 | decompressed = lzma.decompress(fs.read()) 237 | fd.write(decompressed) 238 | finally: 239 | fs.close() 240 | fd.close() 241 | 242 | 243 | 244 | def extractFile(filename, target_dir): 245 | if os.path.exists(target_dir): 246 | shutil.rmtree(target_dir) 247 | 248 | log("Extracting file " + filename) 249 | stem, extension = os.path.splitext(os.path.basename(filename)) 250 | 251 | if extension == ".zip" or extension == "": 252 | zfile = None 253 | try: 254 | zfile = zipfile.ZipFile(filename) 255 | except zipfile.BadZipFile: 256 | warning("WARNING: Invalid ZIP file '" + filename + "'") 257 | if os.path.exists(filename) and os.path.getsize(filename) == 0: 258 | warning("WARNING: Zero-sized file was deleted. Run the script again.") 259 | os.remove(filename) 260 | else: 261 | warning("WARNING: Try deleting the cached file and run the script again.") 262 | raise RuntimeError("Invalid ZIP file '" + filename + "'") from None 263 | extract_dir = os.path.commonprefix(zfile.namelist()) 264 | hasFolder = False 265 | for fname in zfile.namelist(): 266 | if fname.find('/') != -1: 267 | hasFolder = True 268 | extract_dir_local = "" 269 | if not hasFolder: # special case, there are no folders in the archive 270 | extract_dir = "" 271 | if extract_dir == "": # deal with stupid zip files that don't contain a base directory 272 | extract_dir, extension2 = os.path.splitext(os.path.basename(filename)) 273 | extract_dir_local = extract_dir 274 | extract_dir_abs = os.path.join(SRC_DIR, extract_dir_local) 275 | 276 | try: 277 | os.mkdirs(extract_dir_abs) 278 | except: 279 | pass 280 | 281 | if not USE_UNZIP: 282 | zfile.extractall(extract_dir_abs) 283 | zfile.close() 284 | else: 285 | zfile.close() 286 | dieIfNonZero(executeCommand(TOOL_COMMAND_UNZIP + " " + filename + " -d " + extract_dir_abs)) 287 | 288 | elif extension == ".tar" or extension == ".gz" or extension == ".bz2" or extension == ".xz": 289 | 290 | if extension == ".xz":# and not lzma_available: 291 | stem2, extension2 = os.path.splitext(os.path.basename(stem)) 292 | if extension2 == ".tar": 293 | # we extract the .tar.xz file to a .tar file before we uncompress that 294 | tar_filename = os.path.join(os.path.dirname(filename), stem) 295 | decompressTarXZFile(filename, tar_filename) 296 | filename = tar_filename 297 | else: 298 | raise RuntimeError("Unable to extract .xz file that is not a .tar.xz file.") 299 | 300 | tfile = tarfile.open(filename) 301 | extract_dir = os.path.commonprefix(tfile.getnames()) 302 | extract_dir_local = "" 303 | if extract_dir == "": # deal with stupid tar files that don't contain a base directory 304 | extract_dir, extension2 = os.path.splitext(os.path.basename(filename)) 305 | extract_dir_local = extract_dir 306 | extract_dir_abs = os.path.join(SRC_DIR, extract_dir_local) 307 | 308 | try: 309 | os.mkdirs(extract_dir_abs) 310 | except: 311 | pass 312 | 313 | if not USE_TAR: 314 | tfile.extractall(extract_dir_abs) 315 | tfile.close() 316 | else: 317 | tfile.close() 318 | dieIfNonZero(executeCommand(TOOL_COMMAND_TAR + " -x -f " + filename + " -C " + extract_dir_abs)) 319 | 320 | else: 321 | raise RuntimeError("Unknown compressed file format " + extension) 322 | 323 | if platform.system() == "Windows": 324 | extract_dir = extract_dir.replace( '/', '\\' ) 325 | target_dir = target_dir.replace( '/', '\\' ) 326 | if extract_dir[-1::] == '\\': 327 | extract_dir = extract_dir[:-1] 328 | 329 | # rename extracted folder to target_dir 330 | extract_dir_abs = os.path.join(SRC_DIR, extract_dir) 331 | 332 | needRename = True 333 | 334 | if platform.system() == "Windows": 335 | needRename = extract_dir_abs.lower() != target_dir.lower() 336 | 337 | if needRename: os.rename(extract_dir_abs, target_dir) 338 | 339 | 340 | def createArchiveFromDirectory(src_dir_name, archive_name, delete_existing_archive = False): 341 | if delete_existing_archive and os.path.exists(archive_name): 342 | dlog("Removing snapshot file " + archive_name + " before creating new one") 343 | os.remove(archive_name) 344 | 345 | archive_dir = os.path.dirname(archive_name) 346 | if not os.path.isdir(archive_dir): 347 | os.mkdir(archive_dir) 348 | 349 | with tarfile.open(archive_name, "w:gz") as tar: 350 | tar.add(src_dir_name, arcname = os.path.basename(src_dir_name)) 351 | 352 | 353 | def downloadSCP(hostname, username, path, target_dir): 354 | if not scp_available: 355 | warning("ERROR: missing Python packages [paramiko, scp]; cannot continue.") 356 | raise RuntimeError("Missing Python packages [paramiko, scp]; cannot continue.") 357 | ssh = paramiko.SSHClient() 358 | ssh.load_system_host_keys() 359 | ssh.connect(hostname = hostname, username = username) 360 | scpc = scp.SCPClient(ssh.get_transport()) 361 | scpc.get(path, local_path = target_dir); 362 | 363 | def downloadProgress(cur_size, total_size): 364 | percent = int((cur_size / total_size)*100) 365 | print("[", end = "") 366 | for i in range(int(percent/2)): 367 | print("*", end = "") 368 | for i in range(int(percent/2), 50): 369 | print(".", end = "") 370 | print("] " + str(percent) + "% --- ", end = "") 371 | print("%.2f" % (cur_size / (1024*1024)), "Mb", end = "\r") 372 | 373 | def computeFileHash(filename): 374 | blocksize = 65536 375 | hasher = hashlib.sha1() 376 | with open(filename, 'rb') as afile: 377 | buf = afile.read(blocksize) 378 | while len(buf) > 0: 379 | hasher.update(buf) 380 | buf = afile.read(blocksize) 381 | return hasher.hexdigest() 382 | 383 | def downloadFile(url, download_dir, target_dir_name, sha1_hash = None, force_download = False, user_agent = None): 384 | if not os.path.isdir(download_dir): 385 | os.mkdir(download_dir) 386 | 387 | p = urlparse(url) 388 | url = urlunparse([p[0], p[1], quote(p[2]), p[3], p[4], p[5]]) # replace special characters in the URL path 389 | 390 | filename_rel = os.path.split(p.path)[1] # get original filename 391 | target_filename = os.path.join(download_dir, filename_rel) 392 | 393 | # check SHA1 hash, if file already exists 394 | if os.path.exists(target_filename) and sha1_hash is not None and sha1_hash != "": 395 | hash_file = computeFileHash(target_filename) 396 | if hash_file != sha1_hash: 397 | log("Hash of " + target_filename + " (" + hash_file + ") does not match expected hash (" + sha1_hash + "); forcing download") 398 | force_download = True 399 | 400 | # download file 401 | if (not os.path.exists(target_filename)) or force_download: 402 | log("Downloading " + url + " to " + target_filename) 403 | if p.scheme == "ssh": 404 | downloadSCP(p.hostname, p.username, p.path, download_dir) 405 | else: 406 | opener = urllib.request.build_opener() 407 | if user_agent is not None: 408 | opener.addheaders = [('User-agent', user_agent)] 409 | f = open(target_filename, 'wb') 410 | with opener.open(url) as response: 411 | Length = response.getheader('content-length') 412 | BlockSize = 128*1024 # default value 413 | if Length: 414 | Length = int(Length) 415 | BlockSize = max(BlockSize, Length // 1000) 416 | Size = 0 417 | while True: 418 | Buffer = response.read(BlockSize) 419 | if not Buffer: 420 | break 421 | f.write(Buffer) 422 | Size += len(Buffer) 423 | downloadProgress(Size, Length) 424 | print(); 425 | else: 426 | f.write(response.read()) 427 | f.close() 428 | else: 429 | log("Skipping download of " + url + "; already downloaded") 430 | 431 | # check SHA1 hash 432 | if sha1_hash is not None and sha1_hash != "": 433 | hash_file = computeFileHash(target_filename) 434 | if hash_file != sha1_hash: 435 | errorStr = "Hash of " + target_filename + " (" + hash_file + ") differs from expected hash (" + sha1_hash + ")" 436 | log(errorStr) 437 | raise RuntimeError(errorStr) 438 | 439 | return target_filename 440 | 441 | 442 | def downloadAndExtractFile(url, download_dir, target_dir_name, sha1_hash = None, force_download = False, user_agent = None): 443 | target_filename = downloadFile(url, download_dir, target_dir_name, sha1_hash, force_download, user_agent) 444 | extractFile(target_filename, os.path.join(SRC_DIR, target_dir_name)) 445 | 446 | 447 | def applyPatchFile(patch_name, dir_name, pnum): 448 | # we're assuming the patch was applied like in this example: 449 | # diff --exclude=".git" --exclude=".hg" -rupN ./src/AGAST/ ./src/AGAST_patched/ > ./patches/agast.patch 450 | # where the first given location is the unpatched directory, and the second location is the patched directory. 451 | log("Applying patch to " + dir_name) 452 | patch_dir = os.path.join(BASE_DIR, "patches") 453 | arguments = "-d " + os.path.join(SRC_DIR, dir_name) + " -p" + str(pnum) + " < " + os.path.join(patch_dir, patch_name) 454 | argumentsBinary = "-d " + os.path.join(SRC_DIR, dir_name) + " -p" + str(pnum) + " --binary < " + os.path.join(patch_dir, patch_name) 455 | res = executeCommand(TOOL_COMMAND_PATCH + " --dry-run " + arguments, quiet = True) 456 | if res != 0: 457 | arguments = argumentsBinary 458 | res = executeCommand(TOOL_COMMAND_PATCH + " --dry-run " + arguments, quiet = True) 459 | if res != 0: 460 | warning("ERROR: patch application failure; has this patch already been applied?") 461 | executeCommand(TOOL_COMMAND_PATCH + " --dry-run " + arguments, printCommand = True) 462 | exit(255) 463 | else: 464 | dieIfNonZero(executeCommand(TOOL_COMMAND_PATCH + " " + arguments, quiet = True)) 465 | 466 | 467 | def runPythonScript(script_name): 468 | log("Running Python script " + script_name) 469 | patch_dir = os.path.join(BASE_DIR, "patches") 470 | filename = os.path.join(patch_dir, script_name) 471 | dieIfNonZero(executeCommand(TOOL_COMMAND_PYTHON + " " + escapifyPath(filename), False)); 472 | 473 | 474 | def findToolCommand(command, paths_to_search, required = False): 475 | command_res = command 476 | found = False 477 | 478 | for path in paths_to_search: 479 | command_abs = os.path.join(path, command) 480 | if os.path.exists(command_abs): 481 | command_res = command_abs 482 | found = True 483 | break; 484 | 485 | if required and not found: 486 | warning("WARNING: command " + command + " not found, but required by script") 487 | 488 | dlog("Found '" + command + "' as " + command_res) 489 | return command_res 490 | 491 | 492 | def readJSONData(filename): 493 | try: 494 | json_data = open(filename).read() 495 | except: 496 | warning("ERROR: Could not read JSON file: " + filename) 497 | return None 498 | 499 | try: 500 | data = json.loads(json_data) 501 | except json.JSONDecodeError as e: 502 | warning("ERROR: Could not parse JSON document: {}\n {} (line {}:{})\n".format(filename, e.msg, e.lineno, e.colno)) 503 | return None 504 | except: 505 | warning("ERROR: Could not parse JSON document: " + filename) 506 | return None 507 | 508 | return data 509 | 510 | 511 | def writeJSONData(data, filename): 512 | with open(filename, 'w') as outfile: 513 | json.dump(data, outfile) 514 | 515 | 516 | def listLibraries(data): 517 | for library in data: 518 | name = library.get('name', None) 519 | if name is not None: 520 | print(name) 521 | 522 | 523 | def printOptions(): 524 | print("--------------------------------------------------------------------------------") 525 | print(BOOTSTRAP_VERSION) 526 | print("--------------------------------------------------------------------------------") 527 | print("Downloads external libraries, and applies patches or scripts if necessary.") 528 | print("If the --name argument is not provided, all available libraries will be") 529 | print("downloaded.") 530 | print("") 531 | print("Options:") 532 | print(" --list, -l List all available libraries") 533 | print(" --name, -n Specifies the name of a single library to be") 534 | print(" downloaded") 535 | print(" --name-file, -N Specifies a file that contains a (sub)set of libraries") 536 | print(" to be downloaded. One library name per line; lines") 537 | print(" starting with '#' are considered comments.") 538 | print(" --skip Specifies a name of a single library to be skipped") 539 | print(" --clean, -c Remove library directory before obtaining library") 540 | print(" --clean-all, -C Implies --clean, and also forces re-download of cached") 541 | print(" archive files") 542 | print(" --base-dir, -b Base directory, if script is called from outside of") 543 | print(" its directory") 544 | print(" --bootstrap-file Specifies the file containing the canonical bootstrap") 545 | print(" JSON data (default: bootstrap.json)") 546 | print(" --local-bootstrap-file Specifies the file containing local bootstrap JSON") 547 | print(" data (e.g. for a particular project). The data in this") 548 | print(" file will have higher precedence than the data from") 549 | print(" the canonical bootstrap file.") 550 | print(" --use-tar Use 'tar' command instead of Python standard library") 551 | print(" to extract tar archives") 552 | print(" --use-unzip Use 'unzip' command instead of Python standard library") 553 | print(" to extract zip archives") 554 | print(" --repo-snapshots Create a snapshot archive of a repository when its") 555 | print(" state changes, e.g. on a fallback location") 556 | print(" --fallback-url Fallback URL that points to an existing and already") 557 | print(" bootstrapped `external` repository that may be used to") 558 | print(" retrieve otherwise unobtainable archives or") 559 | print(" repositories. The --repo-snapshots option must be") 560 | print(" active on the fallback server. Allowed URL schemes are") 561 | print(" file://, ssh://, http://, https://, ftp://.") 562 | print(" --force-fallback Force using the fallback URL instead of the original") 563 | print(" sources") 564 | print(" --debug-output Enables extra debugging output") 565 | print(" --break-on-first-error Terminate script once the first error is encountered") 566 | print(" --version Print the script version") 567 | print("--------------------------------------------------------------------------------") 568 | 569 | 570 | def main(argv): 571 | global BASE_DIR, SRC_DIR, ARCHIVE_DIR, DEBUG_OUTPUT, FALLBACK_URL, USE_TAR, USE_UNZIP 572 | global TOOL_COMMAND_PYTHON, TOOL_COMMAND_GIT, TOOL_COMMAND_HG, TOOL_COMMAND_SVN, TOOL_COMMAND_PATCH, TOOL_COMMAND_TAR, TOOL_COMMAND_UNZIP 573 | 574 | try: 575 | opts, args = getopt.getopt( 576 | argv, 577 | "ln:N:cCb:h", 578 | ["list", "name=", "name-file=", "skip=", "clean", "clean-all", "base-dir", "bootstrap-file=", 579 | "local-bootstrap-file=", "use-tar", "use-unzip", "repo-snapshots", "fallback-url=", 580 | "force-fallback", "debug-output", "help", "break-on-first-error", "version"]) 581 | except getopt.GetoptError: 582 | printOptions() 583 | return 0 584 | 585 | opt_names = [] 586 | name_files = [] 587 | skip_libs = [] 588 | opt_clean = False 589 | opt_clean_archives = False 590 | list_libraries = False 591 | 592 | default_bootstrap_filename = "bootstrap.json" 593 | bootstrap_filename = os.path.abspath(os.path.join(BASE_DIR, default_bootstrap_filename)) 594 | local_bootstrap_filename = "" 595 | create_repo_snapshots = False 596 | force_fallback = False 597 | break_on_first_error = False 598 | 599 | base_dir_path = "" 600 | 601 | for opt, arg in opts: 602 | if opt in ("-h", "--help"): 603 | printOptions() 604 | return 0 605 | if opt in ("--version"): 606 | print(BOOTSTRAP_VERSION) 607 | return 0 608 | if opt in ("-l", "--list"): 609 | list_libraries = True 610 | if opt in ("-n", "--name"): 611 | opt_names.append(arg) 612 | if opt in ("-N", "--name-file"): 613 | name_files.append(os.path.abspath(arg)) 614 | if opt in ("--skip",): 615 | skip_libs.append(arg) 616 | if opt in ("-c", "--clean"): 617 | opt_clean = True 618 | if opt in ("-C", "--clean-all"): 619 | opt_clean = True 620 | opt_clean_archives = True 621 | if opt in ("-b", "--base-dir"): 622 | base_dir_path = os.path.abspath(arg) 623 | BASE_DIR = base_dir_path 624 | SRC_DIR = os.path.join(BASE_DIR, SRC_DIR_BASE) 625 | ARCHIVE_DIR = os.path.join(BASE_DIR, ARCHIVE_DIR_BASE) 626 | bootstrap_filename = os.path.join(BASE_DIR, default_bootstrap_filename) 627 | log("Using " + arg + " as base directory") 628 | if opt in ("--bootstrap-file",): 629 | bootstrap_filename = os.path.abspath(arg) 630 | log("Using main bootstrap file " + bootstrap_filename) 631 | if opt in ("--local-bootstrap-file",): 632 | local_bootstrap_filename = os.path.abspath(arg) 633 | log("Using local bootstrap file " + local_bootstrap_filename) 634 | if opt in ("--use-tar",): 635 | USE_TAR = True 636 | if opt in ("--use-unzip",): 637 | USE_UNZIP = True 638 | if opt in ("--repo-snapshots",): 639 | create_repo_snapshots = True 640 | log("Will create repository snapshots") 641 | if opt in ("--fallback-url",): 642 | FALLBACK_URL = arg 643 | if opt in ("--force-fallback",): 644 | force_fallback = True 645 | log("Using fallback URL to fetch all libraries") 646 | if opt in ("--break-on-first-error",): 647 | break_on_first_error = True 648 | if opt in ("--debug-output",): 649 | DEBUG_OUTPUT = True 650 | 651 | if platform.system() != "Windows": 652 | # Unfortunately some IDEs do not have a proper PATH environment variable set, 653 | # so we search manually for the required tools in some obvious locations. 654 | paths_to_search = os.environ["PATH"].split(":") + ["/usr/local/bin", "/opt/local/bin", "/usr/bin"] 655 | TOOL_COMMAND_PYTHON = findToolCommand(TOOL_COMMAND_PYTHON, paths_to_search, required = True) 656 | TOOL_COMMAND_GIT = findToolCommand(TOOL_COMMAND_GIT, paths_to_search, required = True) 657 | TOOL_COMMAND_HG = findToolCommand(TOOL_COMMAND_HG, paths_to_search, required = True) 658 | TOOL_COMMAND_SVN = findToolCommand(TOOL_COMMAND_SVN, paths_to_search, required = True) 659 | TOOL_COMMAND_PATCH = findToolCommand(TOOL_COMMAND_PATCH, paths_to_search, required = True) 660 | TOOL_COMMAND_TAR = findToolCommand(TOOL_COMMAND_TAR, paths_to_search, required = USE_TAR) 661 | TOOL_COMMAND_UNZIP = findToolCommand(TOOL_COMMAND_UNZIP, paths_to_search, required = USE_UNZIP) 662 | 663 | if base_dir_path: 664 | os.chdir(base_dir_path) 665 | 666 | if name_files: 667 | for name_file in name_files: 668 | try: 669 | with open(name_file) as f: 670 | opt_names_local = [l for l in (line.strip() for line in f) if l] 671 | opt_names_local = [l for l in opt_names_local if l[0] != '#'] 672 | opt_names += opt_names_local 673 | dlog("Name file contains: " + ", ".join(opt_names_local)) 674 | except: 675 | warning("ERROR: cannot parse name file '" + name_file + "'") 676 | return -1 677 | 678 | if force_fallback and not FALLBACK_URL: 679 | warning("Error: cannot force usage of the fallback location without specifying a fallback URL") 680 | return -1; 681 | 682 | state_filename = os.path.join(os.path.dirname(os.path.splitext(bootstrap_filename)[0]), \ 683 | "." + os.path.basename(os.path.splitext(bootstrap_filename)[0])) \ 684 | + os.path.splitext(bootstrap_filename)[1] 685 | 686 | dlog("bootstrap_filename = " + bootstrap_filename) 687 | dlog("state_filename = " + state_filename) 688 | 689 | # read canonical libraries data 690 | data = readJSONData(bootstrap_filename) 691 | if data is None: 692 | return -1; 693 | 694 | # some sanity checking 695 | for library in data: 696 | if library.get('name', None) is None: 697 | warning("ERROR: Invalid schema: library object does not have a 'name'") 698 | return -1 699 | 700 | # read local libraries data, if available 701 | local_data = None 702 | if local_bootstrap_filename: 703 | local_data = readJSONData(local_bootstrap_filename) 704 | 705 | if local_data is None: 706 | return -1; 707 | 708 | # some sanity checking 709 | for local_library in local_data: 710 | if local_library.get('name', None) is None: 711 | warning("ERROR: Invalid schema: local library object does not have a 'name'") 712 | return -1 713 | 714 | # merge canonical and local library data, if applicable; local libraries take precedence 715 | if local_data is not None: 716 | for local_library in local_data: 717 | local_name = local_library.get('name', None) 718 | found_canonical_library = False 719 | for n, library in enumerate(data): 720 | name = library.get('name', None) 721 | if local_name == name: 722 | data[n] = local_library # overwrite library 723 | found_canonical_library = True 724 | if not found_canonical_library: 725 | data.append(local_library) 726 | 727 | if list_libraries: 728 | listLibraries(data) 729 | return 0 730 | 731 | sdata = [] 732 | if os.path.exists(state_filename): 733 | sdata = readJSONData(state_filename) 734 | 735 | # create source directory 736 | if not os.path.isdir(SRC_DIR): 737 | log("Creating directory " + SRC_DIR) 738 | os.mkdir(SRC_DIR) 739 | 740 | # create archive files directory 741 | if not os.path.isdir(ARCHIVE_DIR): 742 | log("Creating directory " + ARCHIVE_DIR) 743 | os.mkdir(ARCHIVE_DIR) 744 | 745 | failed_libraries = [] 746 | 747 | for library in data: 748 | name = library.get('name', None) 749 | source = library.get('source', None) 750 | post = library.get('postprocess', None) 751 | predicate = library.get('predicate', None) 752 | 753 | if (skip_libs) and (name in skip_libs): 754 | continue 755 | 756 | if (opt_names) and (not name in opt_names): 757 | continue 758 | 759 | if predicate is not None: 760 | log("Running predicate code for '" + name + "'") 761 | if eval(predicate) is not True: 762 | log("Predicate is not True for '" + name + "'; skipping library") 763 | continue 764 | 765 | lib_dir = os.path.join(SRC_DIR, name) 766 | lib_dir = lib_dir.replace(os.path.sep, '/') 767 | 768 | dlog("********** LIBRARY " + name + " **********") 769 | dlog("lib_dir = " + lib_dir + ")") 770 | 771 | # compare against cached state 772 | cached_state_ok = False 773 | if not opt_clean: 774 | for slibrary in sdata: 775 | sname = slibrary.get('name', None) 776 | if sname is not None and sname == name and slibrary == library and os.path.exists(lib_dir): 777 | cached_state_ok = True 778 | break 779 | 780 | if cached_state_ok: 781 | log("Cached state for '" + name + "' equals expected state; skipping library") 782 | continue 783 | else: 784 | # remove cached state for library 785 | sdata[:] = [s for s in sdata if not (lambda s, name : s.get('name', None) is not None and s['name'] == name)(s, name)] 786 | 787 | # create library directory, if necessary 788 | if opt_clean: 789 | log("Cleaning directory for " + name) 790 | if os.path.exists(lib_dir): 791 | shutil.rmtree(lib_dir) 792 | if not os.path.exists(lib_dir): 793 | os.makedirs(lib_dir) 794 | 795 | try: 796 | # download source 797 | if source is not None: 798 | if 'type' not in source: 799 | warning("ERROR: Invalid schema for '" + name + "': 'source' object must have a 'type'") 800 | return -1 801 | if 'url' not in source: 802 | warning("ERROR: Invalid schema for '" + name + "': 'source' object must have a 'url'") 803 | return -1 804 | src_type = source['type'] 805 | src_url = source['url'] 806 | 807 | if src_type == "sourcefile": 808 | sha1 = source.get('sha1', None) 809 | user_agent = source.get('user-agent', None) 810 | try: 811 | if force_fallback: 812 | raise RuntimeError 813 | downloadFile(src_url, ARCHIVE_DIR, name, sha1, force_download = opt_clean_archives, user_agent = user_agent) 814 | filename_rel = os.path.basename(src_url) 815 | shutil.copyfile( os.path.join(ARCHIVE_DIR, filename_rel), os.path.join(lib_dir, filename_rel) ) 816 | except: 817 | if FALLBACK_URL: 818 | if not force_fallback: 819 | log("WARNING: Downloading of file " + src_url + " failed; trying fallback") 820 | 821 | p = urlparse(src_url) 822 | filename_rel = os.path.split(p.path)[1] # get original filename 823 | p = urlparse(FALLBACK_URL) 824 | fallback_src_url = urlunparse([p[0], p[1], p[2] + "/" + ARCHIVE_DIR_BASE + "/" + filename_rel, p[3], p[4], p[5]]) 825 | downloadFile(fallback_src_url, ARCHIVE_DIR, name, sha1, force_download = True) 826 | shutil.copyfile( os.path.join(ARCHIVE_DIR, filename_rel), os.path.join(lib_dir, filename_rel) ) 827 | else: 828 | shutil.rmtree(lib_dir) 829 | raise 830 | elif src_type == "archive": 831 | sha1 = source.get('sha1', None) 832 | user_agent = source.get('user-agent', None) 833 | try: 834 | if force_fallback: 835 | raise RuntimeError 836 | downloadAndExtractFile(src_url, ARCHIVE_DIR, name, sha1, force_download = opt_clean_archives, user_agent = user_agent) 837 | except: 838 | if FALLBACK_URL: 839 | if not force_fallback: 840 | log("WARNING: Downloading of file " + src_url + " failed; trying fallback") 841 | 842 | p = urlparse(src_url) 843 | filename_rel = os.path.split(p.path)[1] # get original filename 844 | p = urlparse(FALLBACK_URL) 845 | fallback_src_url = urlunparse([p[0], p[1], p[2] + "/" + ARCHIVE_DIR_BASE + "/" + filename_rel, p[3], p[4], p[5]]) 846 | downloadAndExtractFile(fallback_src_url, ARCHIVE_DIR, name, sha1, force_download = True) 847 | else: 848 | raise 849 | 850 | else: 851 | revision = source.get('revision', None) 852 | recursive = source.get('recursive', True) 853 | 854 | archive_name = name + ".tar.gz" # for reading or writing of snapshot archives 855 | if revision is not None: 856 | archive_name = name + "_" + revision + ".tar.gz" 857 | 858 | try: 859 | if force_fallback: 860 | raise RuntimeError 861 | cloneRepository(src_type, src_url, name, revision, False, recursive) 862 | 863 | if create_repo_snapshots: 864 | log("Creating snapshot of library repository '" + name + "'") 865 | repo_dir = os.path.join(SRC_DIR, name) 866 | archive_filename = os.path.join(SNAPSHOT_DIR, archive_name) 867 | 868 | dlog("Snapshot will be saved as " + archive_filename) 869 | createArchiveFromDirectory(repo_dir, archive_filename, revision is None) 870 | 871 | except: 872 | if FALLBACK_URL: 873 | if not force_fallback: 874 | log("WARNING: Cloning of repository " + src_url + " failed; trying fallback") 875 | 876 | # copy archived snapshot from fallback location 877 | p = urlparse(FALLBACK_URL) 878 | fallback_src_url = urlunparse([p[0], p[1], p[2] + "/" + SNAPSHOT_DIR_BASE + "/" + archive_name, p[3], p[4], p[5]]) 879 | dlog("Looking for snapshot " + fallback_src_url + " of library repository " + name) 880 | 881 | # create snapshots files directory 882 | downloadAndExtractFile(fallback_src_url, SNAPSHOT_DIR, name, force_download = True) 883 | 884 | # reset repository state to particular revision (only using local operations inside the function) 885 | cloneRepository(src_type, src_url, name, revision, True, True) 886 | else: 887 | raise 888 | else: 889 | # set up clean directory for potential patch application 890 | shutil.rmtree(lib_dir) 891 | os.mkdir(lib_dir) 892 | 893 | # post-processing 894 | if post is not None: 895 | if 'type' not in post: 896 | warning("ERROR: Invalid schema for '" + name + "': 'postprocess' object must have a 'type'") 897 | return -1 898 | if 'file' not in post: 899 | warning("ERROR: Invalid schema for '" + name + "': 'postprocess' object must have a 'file'") 900 | return -1 901 | post_type = post['type'] 902 | post_file = post['file'] 903 | 904 | if post_type == "patch": 905 | applyPatchFile(post_file, name, post.get('pnum', DEFAULT_PNUM)) 906 | elif post_type == "script": 907 | runPythonScript(post_file) 908 | else: 909 | warning("ERROR: Unknown post-processing type '" + post_type + "' for " + name) 910 | return -1 911 | 912 | # add to cached state 913 | sdata.append(library) 914 | 915 | # write out cached state 916 | writeJSONData(sdata, state_filename) 917 | except urllib.error.URLError as e: 918 | warning("ERROR: Failure to bootstrap library '" + name + "' (urllib.error.URLError: reason " + str(e.reason) + ")") 919 | if break_on_first_error: 920 | exit(-1) 921 | traceback.print_exc() 922 | failed_libraries.append(name) 923 | except: 924 | warning("ERROR: Failure to bootstrap library '" + name + "' (reason: " + str(sys.exc_info()[0]) + ")") 925 | if break_on_first_error: 926 | exit(-1) 927 | traceback.print_exc() 928 | failed_libraries.append(name) 929 | 930 | if failed_libraries: 931 | warning("***************************************") 932 | warning("FAILURE to bootstrap the following libraries:") 933 | warning(', '.join(failed_libraries)) 934 | warning("***************************************") 935 | return -1 936 | 937 | log("Finished") 938 | 939 | # touch the state cache file 940 | os.utime(state_filename, None); 941 | 942 | return 0 943 | 944 | if __name__ == "__main__": 945 | sys.exit(main(sys.argv[1:])) 946 | --------------------------------------------------------------------------------