├── tests ├── __init__.py ├── unit │ ├── __init__.py │ ├── backend │ │ ├── __init__.py │ │ ├── test_json_data │ │ │ └── compile_commands.json │ │ └── test_json.py │ ├── includedb │ │ ├── depth │ │ │ ├── aa.h │ │ │ ├── a.cpp │ │ │ └── a.h │ │ ├── repeat │ │ │ ├── a.h │ │ │ └── a.cpp │ │ ├── simple │ │ │ ├── a.h │ │ │ └── a.cpp │ │ ├── both-leaf-and-node │ │ │ ├── b.h │ │ │ ├── a.cpp │ │ │ ├── a.h │ │ │ └── b.cpp │ │ ├── multiple-parents │ │ │ ├── ab.h │ │ │ ├── a.cpp │ │ │ └── b.cpp │ │ ├── mutual │ │ │ ├── a.cpp │ │ │ └── a.h │ │ ├── mutual2 │ │ │ ├── a1.h │ │ │ ├── a2.h │ │ │ └── a.cpp │ │ └── self │ │ │ └── a.cpp │ ├── headerdb │ │ ├── test_06 │ │ │ ├── a.cpp │ │ │ └── include │ │ │ │ └── a │ │ │ │ └── a │ │ ├── test_01 │ │ │ ├── b.cpp │ │ │ ├── a.hpp │ │ │ └── a.cpp │ │ ├── test_04 │ │ │ ├── a.cpp │ │ │ ├── b.cpp │ │ │ ├── a.hpp │ │ │ ├── b.hpp │ │ │ ├── a.ipp │ │ │ └── b.ipp │ │ ├── test_05 │ │ │ ├── utf-8-á.cpp │ │ │ ├── utf-8-á.hpp │ │ │ ├── latin-1-á.cpp │ │ │ └── latin-1-á.hpp │ │ ├── test_02 │ │ │ ├── src │ │ │ │ ├── a.cpp │ │ │ │ └── b.cpp │ │ │ └── include │ │ │ │ ├── a │ │ │ │ └── a.hpp │ │ │ │ └── b │ │ │ │ └── b.hpp │ │ ├── test_03 │ │ │ ├── a.hpp │ │ │ ├── b.hpp │ │ │ ├── a.cpp │ │ │ ├── b.cpp │ │ │ ├── a_private.hpp │ │ │ ├── b_private.hpp │ │ │ └── a_b.cpp │ │ └── test_07 │ │ │ ├── a.hpp │ │ │ ├── a.cpp │ │ │ ├── quoted_a.hpp │ │ │ └── b.cpp │ ├── __main__.py │ ├── test_models.py │ ├── test_dump.py │ └── test_headerdb.py ├── regression │ └── headerdb │ │ ├── .gitignore │ │ ├── irony.supp │ │ ├── README.md │ │ ├── cmake.supp │ │ ├── llvm.supp │ │ ├── filelist_verify.py │ │ └── Makefile ├── integration │ ├── docker │ │ ├── ubuntu-trusty │ │ │ ├── entrypoint.sh │ │ │ └── Dockerfile │ │ └── ubuntu-trusty.sh │ └── packaging-trusty.sh └── __main__.py ├── compdb ├── backend │ ├── __init__.py │ ├── memory.py │ └── json.py ├── __main__.py ├── __about__.py ├── __init__.py ├── complementer │ ├── __init__.py │ └── headerdb.py ├── filelist.py ├── models.py ├── utils.py ├── core.py ├── config.py ├── includedb.py └── cli.py ├── setup.cfg ├── contrib └── zsh │ ├── compdb.plugin.zsh │ ├── README.md │ ├── check-all-helps │ ├── all-helps.txt │ └── functions │ └── _compdb ├── .gitignore ├── requirements.txt ├── requirements-dev.txt ├── .travis.yml ├── tox.ini ├── LICENSE.txt ├── setup.py └── README.rst /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /compdb/backend/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/backend/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/includedb/depth/aa.h: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/includedb/repeat/a.h: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/includedb/simple/a.h: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 3 | -------------------------------------------------------------------------------- /tests/unit/includedb/both-leaf-and-node/b.h: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/includedb/multiple-parents/ab.h: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/regression/headerdb/.gitignore: -------------------------------------------------------------------------------- 1 | out/ 2 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_06/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a" 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/depth/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a.h" 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/depth/a.h: -------------------------------------------------------------------------------- 1 | #include "aa.h" 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/mutual/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a.h" 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/simple/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a.h" 2 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_01/b.cpp: -------------------------------------------------------------------------------- 1 | #include "a.hpp" 2 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_04/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a.hpp" 2 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_04/b.cpp: -------------------------------------------------------------------------------- 1 | #include "b.hpp" 2 | -------------------------------------------------------------------------------- /contrib/zsh/compdb.plugin.zsh: -------------------------------------------------------------------------------- 1 | fpath+=${0:A:h}/functions 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/multiple-parents/a.cpp: -------------------------------------------------------------------------------- 1 | #include "ab.h" 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/multiple-parents/b.cpp: -------------------------------------------------------------------------------- 1 | #include "ab.h" 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.pyc 3 | .tox/ 4 | compdb.egg-info/ 5 | -------------------------------------------------------------------------------- /tests/unit/includedb/both-leaf-and-node/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a.h" 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/both-leaf-and-node/a.h: -------------------------------------------------------------------------------- 1 | #include "b.cpp" 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/both-leaf-and-node/b.cpp: -------------------------------------------------------------------------------- 1 | #include "b.h" 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/mutual2/a1.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "a2.h" 3 | -------------------------------------------------------------------------------- /tests/unit/includedb/mutual2/a2.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "a1.h" 3 | -------------------------------------------------------------------------------- /tests/unit/includedb/repeat/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a.h" 2 | #include "a.h" 3 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_05/utf-8-á.cpp: -------------------------------------------------------------------------------- 1 | #include "utf-8-á.hpp" // háló 2 | -------------------------------------------------------------------------------- /tests/unit/includedb/mutual2/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a1.h" 2 | #include "a2.h" 3 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_02/src/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a/a.hpp" 2 | #include "b/b.hpp" 3 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_02/src/b.cpp: -------------------------------------------------------------------------------- 1 | #include "a/a.hpp" 2 | #include "b/b.hpp" 3 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_06/include/a/a: -------------------------------------------------------------------------------- 1 | #ifndef A_A 2 | #define A_A 3 | 4 | #endif 5 | -------------------------------------------------------------------------------- /tests/unit/includedb/mutual/a.h: -------------------------------------------------------------------------------- 1 | #ifdef EMBED_SOURCE 2 | #include "a.cpp" 3 | #endif 4 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_01/a.hpp: -------------------------------------------------------------------------------- 1 | #ifndef A_HPP_ 2 | #define A_HPP_ 3 | 4 | #endif /* !A_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_03/a.hpp: -------------------------------------------------------------------------------- 1 | #ifndef A_HPP_ 2 | #define A_HPP_ 3 | 4 | #endif /* !A_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_03/b.hpp: -------------------------------------------------------------------------------- 1 | #ifndef B_HPP_ 2 | #define B_HPP_ 3 | 4 | #endif /* !B_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_07/a.hpp: -------------------------------------------------------------------------------- 1 | #ifndef A_HPP_ 2 | #define A_HPP_ 3 | 4 | #endif /* !A_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/regression/headerdb/irony.supp: -------------------------------------------------------------------------------- 1 | # -*-conf-*- 2 | 3 | # unused 4 | server/src/support/arraysize.h 5 | -------------------------------------------------------------------------------- /tests/unit/includedb/self/a.cpp: -------------------------------------------------------------------------------- 1 | #ifndef INCLUDED 2 | #define INCLUDED 3 | #include "a.cpp" 4 | #endif 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_02/include/a/a.hpp: -------------------------------------------------------------------------------- 1 | #ifndef A_HPP_ 2 | #define A_HPP_ 3 | 4 | #endif /* !A_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_02/include/b/b.hpp: -------------------------------------------------------------------------------- 1 | #ifndef B_HPP_ 2 | #define B_HPP_ 3 | 4 | #endif /* !B_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_07/a.cpp: -------------------------------------------------------------------------------- 1 | // check that comments aren't wrongly matched 2 | 3 | // #include "a.hpp" 4 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_03/a.cpp: -------------------------------------------------------------------------------- 1 | #include "a.hpp" 2 | 3 | #include "a_private.hpp" 4 | #include "b_private.hpp" 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_03/b.cpp: -------------------------------------------------------------------------------- 1 | #include "b.hpp" 2 | 3 | #include "a_private.hpp" 4 | #include "b_private.hpp" 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Note: the use of environment marker requires pip >= 7.1.0 2 | configparser; python_version < '3.0' 3 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_04/a.hpp: -------------------------------------------------------------------------------- 1 | #ifndef A_HPP_ 2 | #define A_HPP_ 3 | 4 | #include "a.ipp" 5 | 6 | #endif /* !A_HPP_ */ 7 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_04/b.hpp: -------------------------------------------------------------------------------- 1 | #ifndef B_HPP_ 2 | #define B_HPP_ 3 | 4 | #include "b.ipp" 5 | 6 | #endif /* !B_HPP_ */ 7 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_05/utf-8-á.hpp: -------------------------------------------------------------------------------- 1 | #ifndef UTF_8_A_HPP_ 2 | #define UTF_8_A_HPP_ 3 | 4 | #endif /* !UTF_8_A_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_03/a_private.hpp: -------------------------------------------------------------------------------- 1 | #ifndef A_PRIVATE_HPP_ 2 | #define A_PRIVATE_HPP_ 3 | 4 | #endif /* !A_PRIVATE_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_03/b_private.hpp: -------------------------------------------------------------------------------- 1 | #ifndef A_PRIVATE_HPP_ 2 | #define A_PRIVATE_HPP_ 3 | 4 | #endif /* !A_PRIVATE_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_05/latin-1-á.cpp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Sarcasm/compdb/HEAD/tests/unit/headerdb/test_05/latin-1-á.cpp -------------------------------------------------------------------------------- /tests/unit/headerdb/test_05/latin-1-á.hpp: -------------------------------------------------------------------------------- 1 | #ifndef LATIN_1_A_HPP_ 2 | #define LATIN_1_A_HPP_ 3 | 4 | #endif /* !LATIN_1_A_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_07/quoted_a.hpp: -------------------------------------------------------------------------------- 1 | #ifndef QUOTED_A_HPP_ 2 | #define QUOTED_A_HPP_ 3 | 4 | #endif /* !QUOTED_A_HPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_03/a_b.cpp: -------------------------------------------------------------------------------- 1 | #include "a.hpp" 2 | #include "b.hpp" 3 | 4 | #include "a_private.hpp" 5 | #include "b_private.hpp" 6 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_04/a.ipp: -------------------------------------------------------------------------------- 1 | #ifndef A_PRIVATE_IPP_ // -*- C++ -*- 2 | #define A_PRIVATE_IPP_ 3 | 4 | #endif /* !A_PRIVATE_IPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_04/b.ipp: -------------------------------------------------------------------------------- 1 | #ifndef B_PRIVATE_IPP_ // -*- C++ -*- 2 | #define B_PRIVATE_IPP_ 3 | 4 | #endif /* !B_PRIVATE_IPP_ */ 5 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_01/a.cpp: -------------------------------------------------------------------------------- 1 | // The comments contain quotes in case the regexp used 2 | // to match #include ".+" is greedy 3 | #include "a.hpp" // "greedy?" 4 | -------------------------------------------------------------------------------- /tests/unit/headerdb/test_07/b.cpp: -------------------------------------------------------------------------------- 1 | // check that multiple kind of spaces are accepted 2 | 3 | # include "a.hpp" 4 | # include 5 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | # NOTE: When updating this file, regenerate the tox environment with: 2 | # tox --recreate 3 | 4 | -r requirements.txt 5 | 6 | flake8==3.2.0 7 | pep8-naming==0.4.1 8 | readme-renderer==17.1 9 | tox==2.5.0 10 | yapf==0.19.0 11 | -------------------------------------------------------------------------------- /tests/regression/headerdb/README.md: -------------------------------------------------------------------------------- 1 | Test the headerdb command on a few open source projects. 2 | 3 | The make invocation is self-documented, to start, type: 4 | 5 | make 6 | 7 | These tests may be sensible to the platform and software versions they run on, 8 | don't be panic if manual tweaking is necessary. 9 | -------------------------------------------------------------------------------- /compdb/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import os 4 | import sys 5 | 6 | # allow invokation of the style 'python /path/to/compdb' 7 | if __package__ == '': 8 | sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) 9 | 10 | from compdb.cli import main # noqa: E402 11 | 12 | if __name__ == '__main__': 13 | sys.exit(main()) 14 | -------------------------------------------------------------------------------- /compdb/__about__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | '__author__', 3 | '__desc__', 4 | '__prog__', 5 | '__url__', 6 | '__version__', 7 | ] 8 | 9 | # these variables are used by the module and also by the setup.py 10 | __author__ = 'Guillaume Papin' 11 | __desc__ = '''The compilation database Swiss army knife''' 12 | __prog__ = 'compdb' 13 | __url__ = 'https://github.com/Sarcasm/compdb' 14 | __version__ = '0.2.0' 15 | -------------------------------------------------------------------------------- /tests/integration/docker/ubuntu-trusty/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | : "${USER?Please set USER}" 6 | : "${GROUP?Please set GROUP to the user group using $(id -gn)}" 7 | : "${UID?Please set UID}" 8 | : "${GID?Please set GID}" 9 | 10 | groupadd --gid "$GID" "$GROUP" 11 | useradd --create-home --shell /bin/bash --gid "$GROUP" --uid "$UID" "$USER" 12 | 13 | export HOME="/home/$USER" 14 | cd "$HOME" 15 | exec chpst -u "$USER" "$@" 16 | 17 | -------------------------------------------------------------------------------- /contrib/zsh/README.md: -------------------------------------------------------------------------------- 1 | # ZSH completion plugin 2 | 3 | To install, add the `functions/` directory to your `fpath`. 4 | 5 | This is done automatically if you use one of the numerous ZSH plugin managers. 6 | 7 | 8 | ## check-all-helps: monitor CLI interface changes 9 | 10 | The `check-all-helps` monitors the `compdb` help outputs. 11 | When a change is detected it suggest to update files that may depend on it. 12 | 13 | To run, type: 14 | 15 | ./check-all-helps 16 | -------------------------------------------------------------------------------- /tests/integration/docker/ubuntu-trusty/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:trusty 2 | 3 | MAINTAINER Guillaume Papin "guillaume.papin@epitech.eu" 4 | 5 | RUN apt-get update && apt-get install -y --no-install-recommends \ 6 | python-pip \ 7 | python-virtualenv \ 8 | python3-pip \ 9 | runit \ 10 | \ 11 | && apt-get clean \ 12 | && rm -rf /var/lib/apt/lists/* 13 | 14 | COPY entrypoint.sh / 15 | 16 | ENTRYPOINT ["/entrypoint.sh"] 17 | 18 | -------------------------------------------------------------------------------- /tests/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import os 4 | import unittest 5 | 6 | # alternatively: one can use the command line like this: 7 | # python -m unittest discover '--pattern=*.py' 8 | if __name__ == '__main__': 9 | local_path = os.path.abspath(os.path.dirname(__file__)) 10 | top_level = os.path.dirname(local_path) 11 | testsuite = unittest.TestLoader().discover( 12 | local_path, top_level_dir=top_level, pattern="test_*.py") 13 | unittest.TextTestRunner(verbosity=1).run(testsuite) 14 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - 2.7 4 | - 3.3 5 | - 3.4 6 | - 3.5 7 | - 3.6 8 | - nightly 9 | - pypy 10 | 11 | env: TOXENV=py 12 | 13 | matrix: 14 | fast_finish: true 15 | allow_failures: 16 | - python: nightly 17 | include: 18 | - python: 3.6 19 | env: TOXENV=lint,docs 20 | - python: 2.7 21 | script: ./tests/integration/packaging-trusty.sh 22 | - python: 3.6 23 | install: python setup.py install 24 | script: ./contrib/zsh/check-all-helps 25 | 26 | install: pip install tox 27 | 28 | script: 29 | - tox 30 | -------------------------------------------------------------------------------- /tests/unit/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import os 4 | import unittest 5 | 6 | # alternatively: one can use the command line like this: 7 | # python -m unittest discover '--pattern=*.py' 8 | if __name__ == '__main__': 9 | local_path = os.path.abspath(os.path.dirname(__file__)) 10 | top_level = os.path.dirname(os.path.dirname(local_path)) 11 | testsuite = unittest.TestLoader().discover( 12 | local_path, top_level_dir=top_level, pattern="test_*.py") 13 | unittest.TextTestRunner(verbosity=1).run(testsuite) 14 | -------------------------------------------------------------------------------- /tests/unit/backend/test_json_data/compile_commands.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "directory": "/tmp/", 4 | "command": "clang -DA=1", 5 | "file": "/tmp/a.cpp" 6 | }, 7 | { 8 | "directory": "/tmp/", 9 | "arguments": [ "clang", "-DB=1" ], 10 | "file": "/tmp/b.cpp" 11 | }, 12 | { 13 | "directory": "/tmp/", 14 | "arguments": [ "clang", "-DB=2" ], 15 | "file": "/tmp/b.cpp" 16 | }, 17 | { 18 | "directory": "/tmp/", 19 | "arguments": [ "clang", "-DC=1" ], 20 | "file": "/tmp/c.cpp", 21 | "output": "c.o" 22 | } 23 | , 24 | { 25 | "directory": "/tmp/", 26 | "command": "clang -I 'a b c' -I \"d e h\"", 27 | "file": "/tmp/d.cpp" 28 | } 29 | ] 30 | -------------------------------------------------------------------------------- /tests/unit/test_models.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import unittest 4 | 5 | from compdb.models import CompileCommand 6 | 7 | 8 | class CompileCommandTest(unittest.TestCase): 9 | def test_comparable(self): 10 | a1 = CompileCommand("/", "a.c", ["cc"]) 11 | a2 = CompileCommand("/", "a.c", ["cc"]) 12 | b = CompileCommand("/", "b.c", ["cc"]) 13 | self.assertTrue(a1 == a2) 14 | self.assertFalse(a1 == b) 15 | self.assertTrue(a1 != b) 16 | self.assertFalse(a1 != a2) 17 | self.assertEqual(a1, a2) 18 | 19 | 20 | if __name__ == "__main__": 21 | unittest.main() 22 | -------------------------------------------------------------------------------- /compdb/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | from compdb.__about__ import ( 4 | __author__, 5 | __url__, 6 | __desc__, 7 | __prog__, 8 | __version__, 9 | ) 10 | 11 | __all__ = [ 12 | '__author__', 13 | '__desc__', 14 | '__prog__', 15 | '__url__', 16 | '__version__', 17 | ] 18 | 19 | 20 | class CompdbError(Exception): 21 | '''Base exception for errors raised by compdb''' 22 | 23 | def __init__(self, message, cause=None): 24 | super(CompdbError, self).__init__(message) 25 | self.cause = cause 26 | 27 | 28 | class NotImplementedError(NotImplementedError, CompdbError): 29 | pass 30 | -------------------------------------------------------------------------------- /contrib/zsh/check-all-helps: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | cd $(dirname "${BASH_SOURCE[0]}") 6 | 7 | compdb --help > all-helps.txt.new 8 | 9 | compdb help | grep "^ [a-z]" | while read command descr; do 10 | echo 11 | echo 12 | echo "# $command" 13 | echo 14 | compdb $command --help 15 | done >> all-helps.txt.new 16 | 17 | if ! diff -u all-helps.txt{,.new}; then 18 | cat < 11 | deps = -rrequirements-dev.txt 12 | 13 | [testenv:docs] 14 | commands = python setup.py check -m -r -s 15 | 16 | [testenv:lint] 17 | commands = 18 | yapf --diff --recursive \ 19 | --exclude 'tests/integration/*' \ 20 | --exclude 'tests/regression/headerdb/*' \ 21 | compdb tests setup.py 22 | flake8 compdb tests setup.py 23 | 24 | [flake8] 25 | exclude = 26 | *.egg, 27 | .tox, 28 | tests/integration, 29 | tests/regression/headerdb 30 | -------------------------------------------------------------------------------- /tests/regression/headerdb/cmake.supp: -------------------------------------------------------------------------------- 1 | # -*-conf-*- 2 | 3 | # not built 4 | Modules/CMakeCompilerABI.h 5 | 6 | # not built 7 | Source/CPack/WiX/* 8 | Source/QtDialog/* 9 | Source/cmGhsMultiTargetGenerator.h 10 | Source/cmIDEFlagTable.h 11 | Source/cmIDEOptions.h 12 | Source/cmLocalGhsMultiGenerator.h 13 | Source/cm*VisualStudio* 14 | # Source/cmVisualStudio* 15 | Source/cmVS* 16 | Source/cm*XCode*.h 17 | 18 | # these are not really cmake sources 19 | Tests/* 20 | 21 | # unused 22 | Utilities/KWStyle/Headers/CMakeHeader.h 23 | 24 | # not an include 25 | Utilities/Release/WiX/CustomAction/exports.def 26 | Utilities/cmbzip2/libbz2.def 27 | Utilities/cmcurl/lib/Makefile.inc 28 | 29 | # unused 30 | Utilities/cmjsoncpp/include/json/json.h 31 | Utilities/cmjsoncpp/include/json/version.h 32 | 33 | # not built 34 | Utilities/cmlibarchive/libarchive/config_freebsd.h 35 | 36 | # not an include 37 | Utilities/cmzlib/zlib.def 38 | -------------------------------------------------------------------------------- /compdb/backend/memory.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import os 4 | 5 | from compdb.models import CompilationDatabaseInterface 6 | 7 | 8 | class InMemoryCompilationDatabase(CompilationDatabaseInterface): 9 | def __init__(self, compile_commands=None): 10 | if compile_commands is None: 11 | self.compile_commands = [] 12 | else: 13 | self.compile_commands = compile_commands 14 | 15 | def get_compile_commands(self, filepath): 16 | filepath = os.path.abspath(filepath) 17 | for compile_command in self.compile_commands: 18 | if compile_command.normfile == filepath: 19 | yield compile_command 20 | 21 | def get_all_files(self): 22 | return (c.normfile for c in self.compile_commands) 23 | 24 | def get_all_compile_commands(self): 25 | return iter(self.compile_commands) 26 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Guillaume Papin 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tests/integration/docker/ubuntu-trusty.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit 4 | 5 | LOCAL_PATH=$(cd $(dirname "${BASH_SOURCE[0]}") && pwd) 6 | COMPDB_PATH=$(cd ${LOCAL_PATH}/../../.. && pwd) 7 | IMAGE_NAME=sarcasm/compdb-ubuntu-trusty 8 | 9 | if [[ $# -ne 1 ]]; then 10 | 1>&2 cat </dev/null docker build -q -t ${IMAGE_NAME} "${LOCAL_PATH}/ubuntu-trusty" 21 | 22 | tempdir=$(mktemp -d --tmpdir -t compdb_docker_trusty.XXXXXXXXXX) 23 | >/dev/null pushd "$tempdir" 24 | 25 | cp "$USER_SCRIPT" script.sh 26 | 27 | cat <<'EOF' > wrapper.sh 28 | #!/bin/bash 29 | set -o errexit 30 | tar xaf /data/compdb.tar.gz 31 | cd compdb 32 | exec /data/script.sh 33 | EOF 34 | chmod +x wrapper.sh 35 | 36 | # TODO: verify that removed files (in the index) are not present in the archive 37 | >/dev/null pushd "$COMPDB_PATH" 38 | gitref=$(git stash create) 39 | git archive --prefix=compdb/ -o "${tempdir}/compdb.tar.gz" ${gitref:-HEAD} 40 | >/dev/null popd 41 | 42 | docker run \ 43 | --interactive \ 44 | --tty \ 45 | --rm \ 46 | --env USER=user \ 47 | --env GROUP=grp \ 48 | --env UID=$(id -u) \ 49 | --env GID=$(id -g) \ 50 | --volume $(pwd):/data \ 51 | ${IMAGE_NAME} \ 52 | /data/wrapper.sh 53 | 54 | >/dev/null popd 55 | 56 | rm -rf "$tempdir" 57 | -------------------------------------------------------------------------------- /contrib/zsh/all-helps.txt: -------------------------------------------------------------------------------- 1 | usage: compdb [general options] command [command options] [command arguments] 2 | 3 | compdb: the compilation database Swiss army knife 4 | 5 | general options: 6 | -h, --help show this help message and exit 7 | --debug MODULE turn on debug logs for the specified modules 8 | --trace trace execution 9 | -p BUILD_DIR build path(s) 10 | 11 | available commands: 12 | help show general or command help 13 | list list database entries 14 | version display this version of compdb 15 | 16 | 17 | # help 18 | 19 | usage: compdb help [-h] [command] 20 | 21 | show general or command help 22 | 23 | positional arguments: 24 | command show help information for COMMAND, i.e. `compdb COMMAND --help` 25 | 26 | optional arguments: 27 | -h, --help show this help message and exit 28 | 29 | 30 | # list 31 | 32 | usage: compdb list [-h] [-1] [file [file ...]] 33 | 34 | list database entries 35 | 36 | positional arguments: 37 | file restrict results to a list of files 38 | 39 | optional arguments: 40 | -h, --help show this help message and exit 41 | -1, --unique restrict results to a single entry per file 42 | 43 | 44 | # version 45 | 46 | usage: compdb version [-h] [--short] 47 | 48 | display this version of compdb 49 | 50 | optional arguments: 51 | -h, --help show this help message and exit 52 | --short machine readable version 53 | -------------------------------------------------------------------------------- /compdb/complementer/__init__.py: -------------------------------------------------------------------------------- 1 | import compdb 2 | 3 | 4 | class ComplementerInterface(object): 5 | """Provides a method to compute a compilation datbase complement. 6 | 7 | .. seealso:: complement() 8 | """ 9 | 10 | def complement(self, layers): 11 | """Compute the complements of multiple layers of databases. 12 | 13 | This method should provide compile commands of files not present in the 14 | compilations databases but that are part of the same project. 15 | 16 | Multiple databases are passed as argument so that the complementer has 17 | the opportunity to reduce duplicates and assign each file to the most 18 | fitting database. 19 | 20 | Example use case #1: 21 | Imagine a build system with one build directory/compdb per target, 22 | 3 targets: 23 | 1. libfoo Foo.h (header-only, no foo.cpp to take options from) 24 | 2. foo-test FooTest.cpp (tests Foo.h, best candidate for the 25 | compile options) 26 | 3. foo-example main.cpp 27 | Includes Foo.h but is not a very good fit compared to 28 | FooTest.cpp in #2 29 | 30 | Example use case #2: 31 | A multi-compdb project has: 32 | - headers in project A 33 | - project B includes project A headers 34 | 35 | In this multi-project setup, the complementer should have 36 | the opportunity to complement project A's database with the headers 37 | over project B which uses the headers "more indirectly". 38 | """ 39 | raise compdb.NotImplementedError 40 | -------------------------------------------------------------------------------- /tests/regression/headerdb/llvm.supp: -------------------------------------------------------------------------------- 1 | # -*-conf-*- 2 | 3 | # not built 4 | bindings/go/* 5 | 6 | # not built 7 | lib/Target/WebAssembly/* 8 | 9 | # not used by LLVM itself (OTOH Clang uses this) 10 | include/llvm/ADT/ImmutableList.h 11 | 12 | # unused 13 | include/llvm/CodeGen/MachORelocation.h 14 | 15 | # unused 16 | include/llvm/DebugInfo/CodeView/CodeViewOStream.h 17 | include/llvm/DebugInfo/CodeView/FunctionId.h 18 | include/llvm/DebugInfo/CodeView/TypeSymbolEmitter.h 19 | 20 | # not built 21 | include/llvm/DebugInfo/PDB/DIA/DIADataStream.h 22 | include/llvm/DebugInfo/PDB/DIA/DIAEnumDebugStreams.h 23 | include/llvm/DebugInfo/PDB/DIA/DIAEnumLineNumbers.h 24 | include/llvm/DebugInfo/PDB/DIA/DIAEnumSourceFiles.h 25 | include/llvm/DebugInfo/PDB/DIA/DIAEnumSymbols.h 26 | include/llvm/DebugInfo/PDB/DIA/DIALineNumber.h 27 | include/llvm/DebugInfo/PDB/DIA/DIARawSymbol.h 28 | include/llvm/DebugInfo/PDB/DIA/DIASourceFile.h 29 | 30 | # not built 31 | include/llvm/ExecutionEngine/OProfileWrapper.h 32 | 33 | # unused 34 | include/llvm/Support/Capacity.h 35 | include/llvm/Support/RegistryParser.h 36 | include/llvm/Support/Solaris.h 37 | 38 | # not built 39 | lib/ExecutionEngine/IntelJITEvents/* 40 | 41 | # unused 42 | lib/ExecutionEngine/MCJIT/ObjectBuffer.h 43 | 44 | # not built 45 | lib/Fuzzer/* 46 | 47 | # not built 48 | lib/Target/AVR/* 49 | 50 | # unused 51 | lib/Transforms/Instrumentation/MaximumSpanningTree.h 52 | 53 | # not built 54 | test/DebugInfo/Inputs/* 55 | 56 | # tablegen include, not C 57 | test/TableGen/Include.inc 58 | 59 | # not built 60 | test/tools/llvm-cov/Inputs/test.h 61 | 62 | # not an include 63 | unittests/ExecutionEngine/MCJIT/MCJITTests.def 64 | -------------------------------------------------------------------------------- /compdb/filelist.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import fnmatch 4 | import itertools 5 | import os 6 | 7 | 8 | class FileScanner(object): 9 | def __init__(self): 10 | self.extensions = [] 11 | self.suppressions = [] 12 | self.source_exts = [ 13 | '.c', 14 | '.C', 15 | '.cc', 16 | '.c++', 17 | '.C++', 18 | '.cxx', 19 | '.cpp', 20 | ] 21 | self.header_exts = [ 22 | '.h', 23 | '.H', 24 | '.hh', 25 | '.h++', 26 | '.H++', 27 | '.hxx', 28 | '.hpp', 29 | ] 30 | 31 | def enable_group(self, group): 32 | if group == 'source': 33 | self.extensions += self.source_exts 34 | elif group == 'header': 35 | self.extensions += self.header_exts 36 | 37 | def add_suppressions(self, suppressions): 38 | # filter out suppressions 39 | # could convert the fnmatch expression to regex 40 | # and use re.search() instead of prefixing */ pattern 41 | self.suppressions.extend( 42 | ['*/{}'.format(supp) for supp in suppressions]) 43 | 44 | def _accept_path(self, path): 45 | if os.path.splitext(path)[1] not in self.extensions: 46 | return False 47 | for suppression in self.suppressions: 48 | if fnmatch.fnmatchcase(path, suppression): 49 | return False 50 | return True 51 | 52 | def scan(self, path): 53 | for root, dirnames, filenames in os.walk(os.path.abspath(path)): 54 | for filename in filenames: 55 | out_path = os.path.join(root, filename) 56 | if self._accept_path(out_path): 57 | yield out_path 58 | 59 | def scan_many(self, paths): 60 | return itertools.chain.from_iterable((self.scan(path) 61 | for path in paths)) 62 | -------------------------------------------------------------------------------- /contrib/zsh/functions/_compdb: -------------------------------------------------------------------------------- 1 | #compdef compdb 2 | 3 | # ZSH completion for compdb 4 | 5 | (( $+functions[__compdb-help] )) || 6 | _compdb-help() { 7 | _arguments '(- :)'{-h,--help}'[show help message and exit]' 8 | } 9 | 10 | (( $+functions[_compdb-version] )) || 11 | _compdb-version() { 12 | _arguments -C \ 13 | '(- :)'{-h,--help}'[show help message and exit]' \ 14 | '--short[machine readable version]' 15 | } 16 | 17 | (( $+functions[_compdb-list] )) || 18 | _compdb-list() { 19 | _arguments \ 20 | '(- :)'{-h,--help}'[show help message and exit]' \ 21 | '(-1 --unique)'{-1,--unique}'[restrict results to a single entry per file]' \ 22 | '(-)*:source file:_files -g \*.\(c\|h\|cc\|hh\|cpp\|hpp\|cxx\|hxx\|c\+\+\|h\+\+\)' 23 | } 24 | 25 | (( $+functions[__compdb_commands] )) || 26 | _compdb_commands() { 27 | local -a commands 28 | 29 | commands=( 30 | help:"display this help" 31 | list:"list database entries" 32 | version:"display this version of compdb" 33 | ) 34 | 35 | _describe -t compdb-commands "command" commands 36 | } 37 | 38 | _compdb() { 39 | local -a base_opts 40 | base_opts=( 41 | '(- :)'{-h,--help}'[show help message and exit]' 42 | '*-p[build path]:build directory:_files -/' 43 | '*--debug[turn on debug logs for the specified modules]:module:' 44 | '--trace[trace execution]' 45 | ) 46 | 47 | local curcontext="$curcontext" state line 48 | typeset -A opt_args 49 | _arguments -C -S : \ 50 | "${base_opts[@]}" \ 51 | '(-): :_compdb_commands' \ 52 | '(-)*:: :->option-or-argument' && return 53 | 54 | # complete arguments 55 | # if a function exists _compdb- call it, 56 | # otherwise do like git and use 'use-fallback' 57 | if [[ $state = option-or-argument ]]; then 58 | integer ret=1 59 | curcontext=${curcontext%:*:*}:compdb-$words[1]: 60 | 61 | if ! _call_function ret _compdb-$words[1]; then 62 | if zstyle -T :completion:$curcontext: use-fallback; then 63 | _default && ret=0 64 | else 65 | _message "unknown command: $words[1]" 66 | fi 67 | fi 68 | return $ret 69 | fi 70 | 71 | return 0 72 | } 73 | 74 | _compdb "$@" 75 | 76 | # Local Variables: 77 | # mode: Shell-Script 78 | # sh-indentation: 2 79 | # indent-tabs-mode: nil 80 | # sh-basic-offset: 2 81 | # End: 82 | # vim: ft=zsh sw=2 ts=2 et 83 | -------------------------------------------------------------------------------- /tests/unit/backend/test_json.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import os 4 | import unittest 5 | 6 | from compdb.backend.json import JSONCompilationDatabase 7 | from compdb.models import CompileCommand 8 | 9 | 10 | class JSONCompilationDatabaseTest(unittest.TestCase): 11 | LOCAL_PATH = os.path.abspath(os.path.dirname(__file__)) 12 | TEST_DIR = os.path.join(LOCAL_PATH, 'test_json_data') 13 | 14 | def setUp(self): 15 | self.db = JSONCompilationDatabase.probe_directory(self.TEST_DIR) 16 | 17 | def tearDown(self): 18 | self.db = None 19 | 20 | def test_get_compile_commands(self): 21 | a_commands = list(self.db.get_compile_commands("/tmp/a.cpp")) 22 | self.assertEqual(len(a_commands), 1) 23 | self.assertEqual(a_commands[0], 24 | CompileCommand("/tmp/", "/tmp/a.cpp", 25 | ["clang", "-DA=1"])) 26 | b_commands = list(self.db.get_compile_commands("/tmp/b.cpp")) 27 | self.assertEqual(len(b_commands), 2) 28 | self.assertEqual(b_commands[0], 29 | CompileCommand("/tmp/", "/tmp/b.cpp", 30 | ["clang", "-DB=1"])) 31 | self.assertEqual(b_commands[1], 32 | CompileCommand("/tmp/", "/tmp/b.cpp", 33 | ["clang", "-DB=2"])) 34 | c_commands = list(self.db.get_compile_commands("/tmp/c.cpp")) 35 | self.assertEqual(len(c_commands), 1) 36 | self.assertEqual(c_commands[0], 37 | CompileCommand("/tmp/", "/tmp/c.cpp", 38 | ["clang", "-DC=1"], "c.o")) 39 | 40 | def test_quoted(self): 41 | d_commands = list(self.db.get_compile_commands("/tmp/d.cpp")) 42 | self.assertEqual(len(d_commands), 1) 43 | self.assertEqual( 44 | d_commands[0], 45 | CompileCommand("/tmp/", "/tmp/d.cpp", 46 | ["clang", "-I", "a b c", "-I", "d e h"])) 47 | 48 | def test_get_all_files(self): 49 | files = list(sorted(self.db.get_all_files())) 50 | self.assertEqual( 51 | files, 52 | [ 53 | '/tmp/a.cpp', 54 | '/tmp/b.cpp', 55 | # note: it's debatable whether duplicates should be present 56 | '/tmp/b.cpp', 57 | '/tmp/c.cpp', 58 | '/tmp/d.cpp', 59 | ]) 60 | -------------------------------------------------------------------------------- /tests/unit/test_dump.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import unittest 4 | 5 | try: 6 | from StringIO import StringIO 7 | except ImportError: 8 | from io import StringIO 9 | 10 | from compdb.backend.json import ( 11 | arguments_to_json, 12 | compile_commands_to_json, 13 | ) 14 | from compdb.models import CompileCommand 15 | 16 | # command: The compile command executed. After JSON unescaping, this must be a 17 | # valid command to rerun the exact compilation step for the translation unit in 18 | # the environment the build system uses. Parameters use shell quoting and shell 19 | # escaping of quotes, with '"' and '\' being the only special characters. Shell 20 | # expansion is not supported. 21 | # 22 | # -- http://clang.llvm.org/docs/JSONCompilationDatabase.html 23 | 24 | ARGUMENTS_TO_JSON_DATA = [ 25 | (['clang++'], r'"clang++"'), 26 | (['clang++', '-std=c++11'], r'"clang++ -std=c++11"'), 27 | (['clang++', '-DFOO=a b'], r'"clang++ \"-DFOO=a b\""'), 28 | (['clang++', '-DFOO="str"'], r'"clang++ -DFOO=\\\"str\\\""'), 29 | (['clang++', '-DFOO="string with spaces"'], 30 | r'"clang++ \"-DFOO=\\\"string with spaces\\\"\""'), 31 | (['clang++', '-DFOO="string with spaces and \\-slash"'], 32 | r'"clang++ \"-DFOO=\\\"string with spaces and \\\\-slash\\\"\""'), 33 | (['clang++', "-DBAR='c'"], '"clang++ \\"-DBAR=\'c\'\\""'), 34 | ] 35 | 36 | COMPILE_COMMANDS_TO_JSON_DATA = ([ 37 | CompileCommand("/tmp", "foo.cpp", ["clang++"]), 38 | CompileCommand("/tmp/bar", "bar.cpp", ["clang++", "-std=c++11"]), 39 | CompileCommand("/tmp/foo", "foo.cpp", ["clang++", "-std=c++11"], "foo.o"), 40 | ], r"""[ 41 | { 42 | "directory": "/tmp", 43 | "command": "clang++", 44 | "file": "foo.cpp" 45 | }, 46 | 47 | { 48 | "directory": "/tmp/bar", 49 | "command": "clang++ -std=c++11", 50 | "file": "bar.cpp" 51 | }, 52 | 53 | { 54 | "directory": "/tmp/foo", 55 | "command": "clang++ -std=c++11", 56 | "file": "foo.cpp", 57 | "output": "foo.o" 58 | } 59 | ] 60 | """) 61 | 62 | 63 | class ToJSON(unittest.TestCase): 64 | def test_arguments_to_json(self): 65 | for tpl in ARGUMENTS_TO_JSON_DATA: 66 | self.assertEqual(tpl[1], arguments_to_json(tpl[0])) 67 | 68 | def test_compile_commands_to_json(self): 69 | output = StringIO() 70 | compile_commands_to_json(COMPILE_COMMANDS_TO_JSON_DATA[0], output) 71 | self.assertEqual(COMPILE_COMMANDS_TO_JSON_DATA[1], output.getvalue()) 72 | 73 | 74 | if __name__ == "__main__": 75 | unittest.main() 76 | -------------------------------------------------------------------------------- /compdb/models.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import os 4 | import pprint 5 | 6 | import compdb 7 | 8 | 9 | class ProbeError(LookupError, compdb.CompdbError): 10 | """Raised when probing a compilation database failed""" 11 | 12 | def __init__(self, message, cause=None): 13 | super(ProbeError, self).__init__(message) 14 | self.cause = cause 15 | 16 | 17 | class CompileCommand: 18 | def __init__(self, directory, file, arguments, output=None): 19 | self.directory = directory 20 | self.file = file 21 | self.arguments = arguments 22 | self.output = output 23 | 24 | @property 25 | def normfile(self): 26 | return os.path.normpath(os.path.join(self.directory, self.file)) 27 | 28 | def __repr__(self): 29 | return "{{directory: {}, file: {}, arguments: {}, output: {}}}".format( 30 | repr(self.directory), 31 | repr(self.file), pprint.pformat(self.arguments), repr(self.output)) 32 | 33 | def __str__(self): 34 | return self.__repr__() 35 | 36 | def _as_tuple(self): 37 | return (self.directory, self.file, self.arguments, self.output) 38 | 39 | def __eq__(self, other): 40 | if isinstance(other, self.__class__): 41 | return self._as_tuple() == other._as_tuple() 42 | return NotImplemented 43 | 44 | def __ne__(self, other): 45 | return not self == other 46 | 47 | 48 | class CompilationDatabaseInterface(object): 49 | @classmethod 50 | def probe_directory(cls, directory): 51 | """Probe compilation database for a specific directory. 52 | 53 | Should return an instance of the compilation database 54 | if the directory contains a database. 55 | If the directory does not contain a database, 56 | a ProbeError should be raised (the default action if not overriden). 57 | """ 58 | raise ProbeError( 59 | "{}: compilation databases not found".format(directory)) 60 | 61 | def get_compile_commands(self, filepath): 62 | """Get the compile commands for the given file. 63 | 64 | Return an iterable of CompileCommand. 65 | """ 66 | raise compdb.NotImplementedError 67 | 68 | def get_all_files(self): 69 | """Return an iterable of path strings. 70 | 71 | It is ok to return the same path multiple times 72 | unless all_files_unique() returns True. 73 | """ 74 | raise compdb.NotImplementedError 75 | 76 | def all_files_unique(self): 77 | """Whether or not get_all_files() returns unique paths. 78 | 79 | Override this if get_all_files() is guaranteed to return unique paths, 80 | this fact can be used for optimization. 81 | """ 82 | return False 83 | 84 | def get_all_compile_commands(self): 85 | """Return an iterable of CompileCommand.""" 86 | raise compdb.NotImplementedError 87 | -------------------------------------------------------------------------------- /tests/regression/headerdb/filelist_verify.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import print_function, unicode_literals, absolute_import 4 | 5 | import argparse 6 | import fnmatch 7 | import io 8 | import json 9 | import os 10 | import sys 11 | 12 | 13 | def get_suppressions_patterns_from_file(path): 14 | patterns = [] 15 | with io.open(path, 'r', encoding='utf-8') as f: 16 | for line in f: 17 | pattern = line.partition('#')[0].rstrip() 18 | if pattern: 19 | patterns.append(pattern) 20 | return patterns 21 | 22 | 23 | def print_set_summary(files, name): 24 | print("Only in {}:".format(name)) 25 | cwd = os.getcwd() 26 | for path in sorted(files): 27 | if path.startswith(cwd): 28 | pretty_filename = os.path.relpath(path) 29 | else: 30 | pretty_filename = path 31 | print(' {}'.format(pretty_filename)) 32 | 33 | 34 | def main(): 35 | parser = argparse.ArgumentParser( 36 | description='Verify headerdb contains specified files.') 37 | parser.add_argument('headerdb', help='A header compilation database') 38 | parser.add_argument('filelist', help='List of file to check against') 39 | parser.add_argument('--suppressions', 40 | action='append', 41 | default=[], 42 | help='Add suppression file') 43 | 44 | args = parser.parse_args() 45 | 46 | with open(args.headerdb) as f: 47 | db_files = [ 48 | os.path.normpath(os.path.join(entry['directory'], entry['file'])) 49 | for entry in json.load(f) 50 | ] 51 | 52 | with open(args.filelist) as f: 53 | list_files = [os.path.abspath(line.rstrip('\n')) for line in f] 54 | 55 | suppressions = [] 56 | for supp in args.suppressions: 57 | suppressions.extend(get_suppressions_patterns_from_file(supp)) 58 | 59 | db_files = frozenset(db_files) 60 | list_files = frozenset(list_files) 61 | 62 | # this only is not a hard error, files may be in system paths or build 63 | # directory for example 64 | db_only = db_files - list_files 65 | if db_only: 66 | print_set_summary(db_only, args.headerdb) 67 | 68 | list_only = list_files - db_files 69 | # filter out suppressions 70 | # could convert the fnmatch expression to mutex and use re.search() 71 | # instead of prefixing */ pattern 72 | suppressions = ['*/{}'.format(supp) for supp in suppressions] 73 | for supp in suppressions: 74 | filterred = set(fnmatch.filter(list_only, supp)) 75 | list_only -= filterred 76 | 77 | if not list_only: 78 | sys.exit(0) 79 | 80 | # print difference an exit with error 81 | print_set_summary(list_only, args.filelist) 82 | print("error: some files are missing from the header databases", 83 | file=sys.stderr) 84 | sys.exit(1) 85 | 86 | 87 | if __name__ == '__main__': 88 | main() 89 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import setuptools 5 | import sys 6 | 7 | from codecs import open 8 | from distutils.version import LooseVersion 9 | from os import path 10 | from setuptools import setup, find_packages 11 | 12 | local_path = path.abspath(path.dirname(__file__)) 13 | 14 | # find_packages()'s 'include' parameter has been introduced in setuptools 3.3. 15 | # 16 | # Ubuntu 14:04 comes with 3.3 for the system wide installation, 17 | # but when using virtualenv the setuptools version is 2.2. 18 | # The solution is to upgrade setuptools in the virtualenv. 19 | if LooseVersion(setuptools.__version__) < LooseVersion('3.3'): 20 | print("setuptools version:", str(LooseVersion(setuptools.__version__))) 21 | print("to upgrade with pip, type: pip install -U setuptools") 22 | raise AssertionError("compdb requires setuptools 3.3 higher") 23 | 24 | with open(path.join(local_path, 'README.rst'), encoding='utf-8') as f: 25 | long_desc = f.read() 26 | 27 | about = {} 28 | with open(path.join(local_path, "compdb", "__about__.py")) as f: 29 | exec(f.read(), about) 30 | 31 | install_requires = [] 32 | extras_require = {} 33 | 34 | # Depending on the setuptools version, 35 | # fill in install_requires or extras_require. 36 | # 37 | # The ideas comes from the following article: 38 | # - https://hynek.me/articles/conditional-python-dependencies/ 39 | # 40 | # This handles Ubuntu 14.04, which comes with setuptools 3.3. 41 | # But not everything is handled, a more recent version of setuptools 42 | # is still required to support bdist_wheel. 43 | if LooseVersion(setuptools.__version__) < LooseVersion('18'): 44 | if "bdist_wheel" in sys.argv: 45 | print("setuptools version:", str(LooseVersion(setuptools.__version__))) 46 | print("to upgrade with pip, type: pip install -U setuptools") 47 | raise AssertionError("setuptools >= 18 required for wheels") 48 | if sys.version_info[0] < 3: 49 | install_requires.append('configparser') 50 | else: # setuptools >= 18 51 | extras_require[":python_version<'3.0'"] = ['configparser'] 52 | 53 | setup( 54 | name=about['__prog__'], 55 | version=about['__version__'], 56 | description=about['__desc__'], 57 | long_description=long_desc, 58 | url=about['__url__'], 59 | author=about['__author__'], 60 | author_email='guillaume.papin@epitech.eu', 61 | license='MIT', 62 | classifiers=[ 63 | 'Development Status :: 3 - Alpha', 64 | 'Intended Audience :: Developers', 65 | 'Topic :: Software Development :: Build Tools', 66 | 'License :: OSI Approved :: MIT License', 67 | 'Programming Language :: Python :: 2', 68 | 'Programming Language :: Python :: 2.7', 69 | 'Programming Language :: Python :: 3', 70 | 'Programming Language :: Python :: 3.3', 71 | 'Programming Language :: Python :: 3.4', 72 | 'Programming Language :: Python :: 3.5', 73 | 'Programming Language :: Python :: 3.6', 74 | "Programming Language :: Python :: Implementation :: PyPy", 75 | ], 76 | keywords=['Clang', 'compilation-database', 'compdb'], 77 | packages=find_packages(include=['compdb', 'compdb.*']), 78 | test_suite="tests", 79 | entry_points={ 80 | "console_scripts": [ 81 | "compdb=compdb.cli:main", 82 | ], 83 | }, 84 | python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*', 85 | install_requires=install_requires, 86 | extras_require=extras_require) 87 | -------------------------------------------------------------------------------- /compdb/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import codecs 4 | import contextlib 5 | import itertools 6 | import os 7 | import re 8 | import sys 9 | 10 | try: 11 | from StringIO import StringIO 12 | except ImportError: 13 | from io import StringIO 14 | 15 | 16 | # Check if a generator has at least one element. 17 | # 18 | # Since we don't want to consume the element the function return a tuple. 19 | # The first element is a boolean telling whether or not the generator is empty. 20 | # The second element is a new generator where the first element has been 21 | # put back. 22 | def empty_iterator_wrap(iterator): 23 | try: 24 | first = next(iterator) 25 | except StopIteration: 26 | return True, None 27 | return False, itertools.chain([first], iterator) 28 | 29 | 30 | # compatibility function, 31 | # not as smart as the version of the Python standard library 32 | @contextlib.contextmanager 33 | def suppress(*exceptions): 34 | """Context manager to suppress specified exceptions 35 | with suppress(OSError): 36 | os.remove(somefile) 37 | """ 38 | try: 39 | yield 40 | except exceptions: 41 | pass 42 | 43 | 44 | def re_fullmatch(regex, string, flags=0): 45 | """Emulate python-3.4 re.fullmatch().""" 46 | return re.match("(?:" + regex + r")\Z", string, flags=flags) 47 | 48 | 49 | # The issue this function tries to solve is to have a text writer where unicode 50 | # data can be written without decoding error. It should work in the following 51 | # conditions: 52 | # - python 2 & 3, output to terminal 53 | # - python 2 & 3, output to a pipe or shell redirection 54 | # - python 2 & 3, output to a StringIO 55 | # 56 | # When using python 2, if the program output is redirected to a pipe or file, 57 | # the output encoding may be set to 'ascii', 58 | # potentially producing UnicodeEncodeError. 59 | # Redirections do not seem to cause such issue with python 3 60 | # but explicit utf-8 encoding seems a sensible choice to output data to be 61 | # consumed by other programs (e.g: JSON). 62 | def stdout_unicode_writer(): 63 | stream = sys.stdout 64 | if isinstance(stream, StringIO): 65 | return stream 66 | if hasattr(stream, 'buffer'): 67 | stream = stream.buffer 68 | return codecs.getwriter('utf-8')(stream) 69 | 70 | 71 | def get_friendly_path(path): 72 | full_path = os.path.normpath(path) 73 | try: 74 | rel_path = os.path.relpath(full_path) 75 | except ValueError: 76 | # on Windows, we can get a ValueError 77 | # if the current directory is on another drive: 78 | # > ValueError: path is on drive D:, start on drive C: 79 | # > -- https://github.com/Sarcasm/compdb/issues/16 80 | return full_path 81 | if rel_path.startswith(os.path.join(os.pardir, os.pardir)): 82 | friendly_path = full_path 83 | else: 84 | friendly_path = rel_path 85 | return friendly_path 86 | 87 | 88 | def logical_abspath(p): 89 | """Same as os.path.abspath, 90 | but use the logical current working to expand relative paths. 91 | """ 92 | if os.path.isabs(p): 93 | return os.path.normpath(p) 94 | cwd = os.getenv('PWD') 95 | if cwd and os.path.isabs(cwd) and os.path.samefile(cwd, '.'): 96 | return os.path.normpath(os.path.join(cwd, p)) 97 | return os.path.abspath(p) 98 | 99 | 100 | def locate_dominating_file(name, start_dir=os.curdir): 101 | curdir = os.path.abspath(start_dir) 102 | olddir = None 103 | while not curdir == olddir: 104 | if os.path.exists(os.path.join(curdir, name)): 105 | return curdir 106 | olddir = curdir 107 | curdir = os.path.dirname(curdir) 108 | return None 109 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | compdb: the compilation database Swiss army knife 2 | ================================================= 3 | 4 | .. contents:: :local: 5 | 6 | 7 | Introduction 8 | ------------ 9 | 10 | compdb_ is a command line tool to manipulates compilation databases. 11 | A compilation database is a database for compile options, 12 | it has records of which compile options are used to build the files in a project. 13 | An example of compilation database is the `JSON Compilation Database`_ 14 | 15 | ``compdb`` aims to make it easier for you to run tools on your codebase 16 | by spoon-feeding you the right compile options. 17 | 18 | ``compdb`` is not so much about generating the initial compilation database, 19 | this, is left to other tools, such as ``cmake`` and ``ninja``. 20 | It is only a glue between the initial compilation database and your tool(s). 21 | 22 | 23 | Motivation 24 | ---------- 25 | 26 | With the proliferation of Clang-based tools, 27 | it has become apparent that the compile options 28 | are no longer useful uniquely to the compiler. 29 | 30 | Standalone tools such as clang-tidy_ 31 | or text editors with libclang_ integration have to deal with compile options. 32 | 33 | Examples of such tools, dealing with compilation databases are: 34 | irony-mode_, rtags_ and ycmd_. 35 | 36 | Based on this evidence, ``compdb`` came to life. 37 | A tool that has knowledge of the compile options and can share it 38 | both to inform the text editor and to run clang based tool from the shell. 39 | 40 | 41 | Getting started 42 | --------------- 43 | 44 | Installation 45 | ~~~~~~~~~~~~ 46 | 47 | Install with pip_:: 48 | 49 | pip install compdb 50 | 51 | From Github, as user:: 52 | 53 | pip install --user git+https://github.com/Sarcasm/compdb.git#egg=compdb 54 | 55 | 56 | Generate a compilation database with header files 57 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 58 | 59 | Assuming a build directory ``build/``, containing a ``compile_commands.json``, 60 | a new compilation database, containing the header files, 61 | can be generated with:: 62 | 63 | compdb -p build/ list > compile_commands.json 64 | 65 | 66 | Running the tests 67 | ~~~~~~~~~~~~~~~~~ 68 | 69 | To run the tests, type:: 70 | 71 | python -m tests 72 | 73 | Or:: 74 | 75 | tox --skip-missing-interpreters 76 | 77 | For regression tests on a few open source projects:: 78 | 79 | cd tests/regression/headerdb 80 | make [all|help] 81 | 82 | 83 | Contribute 84 | ---------- 85 | 86 | Contributions are always welcome! 87 | 88 | Try to be consistent with the actual code, it will ease the review. 89 | 90 | 91 | License 92 | ------- 93 | 94 | This project is licensed under the MIT License. 95 | See LICENSE.txt for details. 96 | 97 | 98 | Acknowledgments 99 | --------------- 100 | 101 | * repo_: for its ubiquitous command line interface, 102 | which served as an inspiration 103 | * scan-build_: for the clear Python package design 104 | * git_: for the ``git-config`` API 105 | * `julio.meroh.net`_: for the interesting article serie on `CLI design`_ 106 | 107 | 108 | .. _clang-tidy: http://clang.llvm.org/extra/clang-tidy/ 109 | .. _CLI design: https://julio.meroh.net/2013/09/cli-design-series-wrap-up.html 110 | .. _compdb: https://github.com/Sarcasm/compdb 111 | .. _git: https://git-scm.com/ 112 | .. _irony-mode: https://github.com/Sarcasm/irony-mode 113 | .. _julio.meroh.net: https://julio.meroh.net/ 114 | .. _JSON Compilation Database: http://clang.llvm.org/docs/JSONCompilationDatabase.html 115 | .. _libclang: http://clang.llvm.org/doxygen/group__CINDEX.html 116 | .. _pip: https://pip.pypa.io/ 117 | .. _repo: https://gerrit.googlesource.com/git-repo/ 118 | .. _rtags: https://github.com/Andersbakken/rtags 119 | .. _scan-build: https://github.com/rizsotto/scan-build 120 | .. _ycmd: https://github.com/Valloric/ycmd 121 | -------------------------------------------------------------------------------- /tests/regression/headerdb/Makefile: -------------------------------------------------------------------------------- 1 | LOCAL_PATH := $(abspath $(patsubst %/,%,$(dir $(lastword $(MAKEFILE_LIST))))) 2 | 3 | # Customizable variables start here 4 | 5 | OUT_DIR=out 6 | COMPDB=python $(LOCAL_PATH)/../../../compdb 7 | 8 | # Customizable variables end here 9 | 10 | # Optional user configuration can override the default settings 11 | -include local.mk 12 | 13 | BUILD_DIR := $(OUT_DIR)/build 14 | DOWNLOAD_DIR := $(OUT_DIR)/dl 15 | SOURCE_DIR := $(OUT_DIR)/src 16 | STAMP_DIR := $(OUT_DIR)/stamp 17 | 18 | STAMP_TYPES := download source configure headerdb verify 19 | 20 | # Make sure none of the targets are considered intermediate 21 | .PRECIOUS: $(foreach X,$(STAMP_TYPES),$(STAMP_DIR)/%.$X) 22 | 23 | # Disable builtin stuff. 24 | .SUFFIXES: 25 | 26 | abbreviate_path = $(patsubst $(CURDIR)/%,./%,$1) 27 | abbreviate_exe_path = $(patsubst $(CURDIR)/%,./%,$1) 28 | 29 | PROJECTS := \ 30 | cmake \ 31 | irony \ 32 | llvm 33 | 34 | # Project specific variables 35 | # 36 | # Mandatory variables: 37 | # _URL 38 | # Optional variables: 39 | # _ARCHIVE_NAME 40 | # _CMAKELIST_SUBDIR 41 | # _SUPPRESSIONS 42 | 43 | cmake_URL := https://cmake.org/files/v3.6/cmake-3.6.1.tar.gz 44 | cmake_SUPPRESSIONS := $(LOCAL_PATH)/cmake.supp 45 | 46 | irony_URL := https://github.com/Sarcasm/irony-mode/archive/v0.2.0.tar.gz 47 | irony_ARCHIVE_NAME := irony-mode-0.2.0.tar.gz 48 | irony_CMAKELIST_SUBDIR := server/ 49 | irony_SUPPRESSIONS := $(LOCAL_PATH)/irony.supp 50 | 51 | llvm_URL := http://llvm.org/releases/3.8.1/llvm-3.8.1.src.tar.xz 52 | llvm_SUPPRESSIONS := $(LOCAL_PATH)/llvm.supp 53 | 54 | archive_name = $(firstword $($1_ARCHIVE_NAME) $(notdir $($1_URL))) 55 | archive_path = $(DOWNLOAD_DIR)/$(call archive_name,$1) 56 | archive_noext = $(basename $(subst .tar.,.tar,$(call archive_name,$1))) 57 | source_path = $(SOURCE_DIR)/$(call archive_noext,$1) 58 | build_path = $(BUILD_DIR)/$(call archive_noext,$1) 59 | 60 | define USAGE 61 | Usage: $(MAKE) [all|project] 62 | 63 | Test headerdb on a handful of open source projects. 64 | 65 | Available projects: $(PROJECTS) 66 | 67 | Targets: 68 | all Download, configure, generate compilation database, generate 69 | header compilation database and verify the header compilation 70 | database results. 71 | clean Clean but preserve downloaded artifacts. 72 | cleanall Clean everything. 73 | help [Default] Show this message. 74 | 75 | Customizable variables (local.mk and command line): 76 | OUT_DIR Where to put generated files (default: out) 77 | endef 78 | 79 | .PHONY: help 80 | help: 81 | @: $(info $(USAGE)) 82 | 83 | .PHONY: all 84 | all: $(PROJECTS) 85 | 86 | # New line definition, please keep the two and only two empty lines in the macro 87 | define endl 88 | 89 | 90 | endef 91 | 92 | # declare phony targets for each step for each projects 93 | # make irony-download, make-irony-configure, ... 94 | define project_targets_template 95 | .PHONY: $1 96 | $1: $(STAMP_DIR)/$(1).verify 97 | .PHONY: $(foreach X,$(STAMP_TYPES),$(1)-$(X)) 98 | $(foreach X,$(STAMP_TYPES), 99 | .PHONY: $(1)-$(X)$(endl) 100 | $(1)-$(X): $(STAMP_DIR)/$(1).$(X)$(endl)) 101 | endef 102 | $(foreach P,$(PROJECTS),$(eval $(call project_targets_template,$(P)))) 103 | 104 | .PHONY: clean cleanall 105 | clean: 106 | rm -rf $(BUILD_DIR) $(SOURCE_DIR) $(STAMP_DIR) 107 | 108 | cleanall: 109 | rm -rf $(OUT_DIR) 110 | 111 | # order-only prerequisite targets 112 | $(BUILD_DIR) $(DOWNLOAD_DIR) $(SOURCE_DIR) $(STAMP_DIR): 113 | @mkdir -p $@ 114 | 115 | $(STAMP_DIR)/%.verify: $(STAMP_DIR)/%.headerdb 116 | $(COMPDB) -p $(call build_path,$*) -c compdb.complementers=headerdb check -g header \ 117 | $(foreach SUPP,$($*_SUPPRESSIONS),\ 118 | --suppressions $(call abbreviate_path,$(SUPP))) \ 119 | $(call source_path,$*) 120 | @touch $@ 121 | 122 | $(STAMP_DIR)/%.headerdb: $(STAMP_DIR)/%.configure 123 | $(COMPDB) -p $(call build_path,$*) -c compdb.complementers=headerdb update 124 | @touch $@ 125 | 126 | $(STAMP_DIR)/%.configure: $(STAMP_DIR)/%.source 127 | @mkdir -p $(call build_path,$*) 128 | cd $(call build_path,$*) && \ 129 | cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \ 130 | $(abspath $(call source_path,$*))/$($*_CMAKELIST_SUBDIR) 131 | @touch $@ 132 | 133 | $(STAMP_DIR)/%.source: $(STAMP_DIR)/%.download | $(SOURCE_DIR) 134 | tar xf $(call archive_path,$*) -C $(SOURCE_DIR) 135 | @touch $@ 136 | 137 | # TODO: could verify md5sum 138 | $(STAMP_DIR)/%.download: | $(STAMP_DIR) $(DOWNLOAD_DIR) 139 | wget --quiet --continue "$($*_URL)" -O $(call archive_path,$*) 140 | @touch $@ 141 | -------------------------------------------------------------------------------- /tests/integration/packaging-trusty.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script tests the packaging of compdb on Ubuntu Trusty. 4 | 5 | # It has been made to run on Travis CI and inside docker image of Ubuntu 14.04. 6 | # For the docker image, one can use docker/ubuntu-trusty.sh. 7 | # At this time it is not recommended to run this locally 8 | # because files are created in the user home directory. 9 | # The dependencies can be found in docker/ubuntu-trusty/Dockerfile. 10 | 11 | if [[ ! -f compdb/__init__.py ]]; then 12 | 1>&2 echo "error: this script expects to run in compdb root directory!" 13 | exit 1 14 | fi 15 | 16 | set -o errexit 17 | set -o xtrace 18 | 19 | # Initial goal for this script was to run in a "pristine" Ubuntu Trusty, 20 | # with a stock installation of python, 21 | # unfortunately Travis CI uses isolated virtualenvs: 22 | # - https://docs.travis-ci.com/user/languages/python/#Travis-CI-Uses-Isolated-virtualenvs 23 | # 24 | # This means, one has to accomodate the 'pip install' commands 25 | # to not use the --user options when run under virtualenv. 26 | # Doing otherwise, triggers the following error: 27 | # $ pip install --user . 28 | # Can not perform a '--user' install. User site-packages are not visible in this virtualenv. 29 | # 30 | # virtualenv detection logic copied from pip: 31 | # - https://github.com/pypa/pip/blob/ccd75d4daf7753b6587cffbb1ba52e7dfa5e9915/pip/locations.py#L41-L51 32 | USER_OPTS="" 33 | if python -c 'import sys; sys.exit(hasattr(sys, "real_prefix"))' && 34 | python -c 'import sys; sys.exit(sys.prefix != getattr(sys, "base_prefix", sys.prefix))' 35 | then 36 | USER_OPTS="--user" 37 | fi 38 | 39 | # First generate release files to ~/dist 40 | virtualenv .venv 41 | source .venv/bin/activate 42 | pip install -U "setuptools>=18" 43 | pip install wheel 44 | python setup.py sdist bdist_wheel 45 | deactivate 46 | mv dist ~/dist 47 | rm -r .venv 48 | 49 | # Install from source 50 | mkdir ~/userbase 51 | env PYTHONPATH=$(PYTHONUSERBASE=~/userbase python -m site --user-site) \ 52 | PYTHONUSERBASE=~/userbase \ 53 | python setup.py install --user 54 | env PYTHONPATH=$(PYTHONUSERBASE=~/userbase python -m site --user-site) \ 55 | PYTHONUSERBASE=~/userbase \ 56 | PATH="$HOME/userbase/bin:$PATH" \ 57 | compdb version 58 | rm -r ~/userbase 59 | 60 | # Install from source with pip 61 | mkdir ~/userbase 62 | PYTHONUSERBASE=~/userbase pip install ${USER_OPTS} . 63 | PYTHONUSERBASE=~/userbase PATH="$HOME/userbase/bin:$PATH" compdb version 64 | rm -r ~/userbase 65 | 66 | # Install from source in virtualenv 67 | # On Ubuntu 14.04, system wide setuptools version is 3.3, 68 | # but in virtualenv it is 2.2, which is unsufficient. 69 | virtualenv .venv 70 | source .venv/bin/activate 71 | pip install -U "setuptools>=3.3" 72 | python setup.py install 73 | compdb version 74 | deactivate 75 | rm -r .venv 76 | 77 | # Wheel 78 | mkdir ~/userbase 79 | PYTHONUSERBASE=~/userbase pip install ${USER_OPTS} ~/dist/compdb-*.whl 80 | PYTHONUSERBASE=~/userbase PATH="$HOME/userbase/bin:$PATH" compdb version 81 | rm -r ~/userbase 82 | 83 | # Wheel in virtualenv 84 | # Seems to work out of the box for ubuntu 14:04: with setuptools 2.2 85 | # and pip 1.5.x. 86 | # I assume wheels have support for 'extras_require' 87 | # for longer than source distributions. 88 | virtualenv .venv 89 | source .venv/bin/activate 90 | pip install ~/dist/compdb-*.whl 91 | compdb version 92 | deactivate 93 | rm -r .venv 94 | 95 | # pip install source distribution 96 | mkdir ~/userbase 97 | PYTHONUSERBASE=~/userbase pip install ${USER_OPTS} ~/dist/compdb-*.tar.gz 98 | PYTHONUSERBASE=~/userbase PATH="$HOME/userbase/bin:$PATH" compdb version 99 | rm -r ~/userbase 100 | 101 | # pip install source distribution in virtualenv 102 | # 103 | # 2 alternatives: 104 | # pip vendors setuptools, in pip 7.1.0 setuptools has been bumped to version 18. 105 | # Starting from this version 'extras_require' is supported in setup(). 106 | # 107 | # 1. works but does not use extras_require 108 | virtualenv .venv 109 | source .venv/bin/activate 110 | pip install -U 'setuptools>=3.3,<18' 111 | pip install -U 'pip<7.1.0' 112 | pip install ~/dist/compdb-*.tar.gz 113 | compdb version 114 | deactivate 115 | rm -r .venv 116 | # 2. works by using extras_require 117 | virtualenv .venv 118 | source .venv/bin/activate 119 | pip install -U 'setuptools>=18' 120 | pip install -U 'pip>=7.1.0' 121 | pip install ~/dist/compdb-*.tar.gz 122 | compdb version 123 | deactivate 124 | rm -r .venv 125 | 126 | # requirements.txt, depends on the pip version that vendors setuptools>=18 127 | virtualenv .venv 128 | source .venv/bin/activate 129 | pip install -U 'pip==7.1.0' 130 | pip install -r requirements.txt 131 | deactivate 132 | rm -r .venv 133 | -------------------------------------------------------------------------------- /compdb/backend/json.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import json 4 | import os 5 | import re 6 | import shlex 7 | 8 | import compdb.utils 9 | 10 | from compdb.models import (CompileCommand, CompilationDatabaseInterface) 11 | 12 | 13 | class JSONCompilationDatabase(CompilationDatabaseInterface): 14 | def __init__(self, json_db_path): 15 | self.json_db_path = json_db_path 16 | self.__data = None 17 | 18 | @classmethod 19 | def probe_directory(cls, directory): 20 | """Automatically create a CompilationDatabase from build directory.""" 21 | db_path = os.path.join(directory, 'compile_commands.json') 22 | if os.path.exists(db_path): 23 | return cls(db_path) 24 | return super(JSONCompilationDatabase, cls).probe_directory(directory) 25 | 26 | def get_compile_commands(self, filepath): 27 | filepath = compdb.utils.logical_abspath(filepath) 28 | for elem in self._data: 29 | if os.path.normpath(os.path.join(elem['directory'], 30 | elem['file'])) == filepath: 31 | yield self._dict_to_compile_command(elem) 32 | 33 | def get_all_files(self): 34 | for entry in self._data: 35 | yield os.path.normpath( 36 | os.path.join(entry['directory'], entry['file'])) 37 | 38 | def get_all_compile_commands(self): 39 | return map(self._dict_to_compile_command, self._data) 40 | 41 | @staticmethod 42 | def _dict_to_compile_command(d): 43 | if 'arguments' in d: 44 | arguments = d['arguments'] 45 | else: 46 | # PERFORMANCE: I think shlex is inherently slow, 47 | # something performing better may be necessary 48 | arguments = shlex.split(d['command'], 49 | # XXX: os.name is "posix" on mysys2/cygwin, 50 | # is that correct? 51 | posix=os.name == "posix") 52 | return CompileCommand(d['directory'], d['file'], arguments, 53 | d.get('output')) 54 | 55 | @property 56 | def _data(self): 57 | if self.__data is None: 58 | with open(self.json_db_path) as f: 59 | self.__data = json.load(f) 60 | return self.__data 61 | 62 | 63 | def arguments_to_json(arguments): 64 | cmd_line = '"' 65 | for i, argument in enumerate(arguments): 66 | if i != 0: 67 | cmd_line += ' ' 68 | has_space = re.search(r"\s", argument) is not None 69 | # reader now accepts simple quotes, so we need to support them here too 70 | has_simple_quote = "'" in argument 71 | need_quoting = has_space or has_simple_quote 72 | if need_quoting: 73 | cmd_line += r'\"' 74 | cmd_line += argument.replace("\\", r'\\\\').replace(r'"', r'\\\"') 75 | if need_quoting: 76 | cmd_line += r'\"' 77 | return cmd_line + '"' 78 | 79 | 80 | def str_to_json(s): 81 | return '"{}"'.format(s.replace("\\", "\\\\").replace('"', r'\"')) 82 | 83 | 84 | def compile_command_to_json(compile_command): 85 | output_str = "" 86 | if compile_command.output: 87 | output_str = ',\n "output": {}'.format( 88 | str_to_json(compile_command.output)) 89 | return r'''{{ 90 | "directory": {}, 91 | "command": {}, 92 | "file": {}{} 93 | }}'''.format( 94 | str_to_json(compile_command.directory), 95 | arguments_to_json(compile_command.arguments), 96 | str_to_json(compile_command.file), output_str) 97 | 98 | 99 | class JSONCompileCommandSerializer(object): 100 | def __init__(self, fp): 101 | self.fp = fp 102 | self.__count = 0 103 | 104 | def __enter__(self): 105 | self.fp.write('[\n') 106 | return self 107 | 108 | def serialize(self, compile_command): 109 | if self.__count != 0: 110 | self.fp.write(',\n\n') 111 | self.fp.write(compile_command_to_json(compile_command)) 112 | self.__count += 1 113 | 114 | def __exit__(self, exc_type, exc_val, exc_tb): 115 | if self.__count != 0: 116 | self.fp.write('\n') 117 | self.fp.write(']\n') 118 | 119 | 120 | def compile_commands_to_json(compile_commands, fp): 121 | """ 122 | Dump Json. 123 | 124 | Parameters 125 | ---------- 126 | compile_commands : CompileCommand iterable 127 | fp 128 | A file-like object, JSON is written to this element. 129 | """ 130 | with JSONCompileCommandSerializer(fp) as serializer: 131 | for compile_command in compile_commands: 132 | serializer.serialize(compile_command) 133 | -------------------------------------------------------------------------------- /tests/unit/test_headerdb.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from __future__ import print_function, unicode_literals, absolute_import 4 | 5 | import operator 6 | import os 7 | import unittest 8 | 9 | from compdb.backend.memory import InMemoryCompilationDatabase 10 | from compdb.complementer.headerdb import ( 11 | Complementer, 12 | subword_split, 13 | ) 14 | from compdb.models import CompileCommand 15 | 16 | 17 | class Utils(unittest.TestCase): 18 | def test_subword_split(self): 19 | self.assertEqual(["Camel", "Case"], subword_split("CamelCase")) 20 | self.assertEqual(["camel", "Back"], subword_split("camelBack")) 21 | self.assertEqual(["String", "Ref"], subword_split("StringRef")) 22 | self.assertEqual(["Gst", "Buffer"], subword_split("GstBuffer")) 23 | self.assertEqual(["NS", "String"], subword_split("NSString")) 24 | self.assertEqual(["ALLCAP"], subword_split("ALLCAP")) 25 | self.assertEqual(["nocap"], subword_split("nocap")) 26 | self.assertEqual(["One", "Two", "Three", "Four"], 27 | subword_split("OneTwoThreeFour")) 28 | self.assertEqual(["Foo1", "Bar2"], subword_split("Foo1Bar2")) 29 | self.assertEqual(["123"], subword_split("123")) 30 | self.assertEqual(["lowercase", "underscore"], 31 | subword_split("lowercase_underscore")) 32 | self.assertEqual(["Funny", "Case", "dash"], 33 | subword_split("FunnyCase-dash")) 34 | # this one is debatable, we could have empty strings too 35 | self.assertEqual(["underscore"], subword_split("_underscore_")) 36 | self.assertEqual(["with", "dot"], subword_split("with.dot")) 37 | self.assertEqual(["with", "space"], subword_split("with space")) 38 | 39 | 40 | class HeaderDB(unittest.TestCase): 41 | LOCAL_PATH = os.path.abspath(os.path.dirname(__file__)) 42 | TEST_DIR = os.path.join(LOCAL_PATH, 'headerdb') 43 | 44 | def srcdir(self, dirname): 45 | return os.path.join(self.TEST_DIR, dirname) 46 | 47 | def complement(self, compile_commands): 48 | ''' 49 | The output is returned sorted in the following order: file, directory, 50 | arguments. 51 | ''' 52 | database = InMemoryCompilationDatabase(compile_commands) 53 | result = list(Complementer().complement([[database]])[0] 54 | .get_all_compile_commands()) 55 | result.sort(key=operator.attrgetter('file', 'directory', 'arguments')) 56 | return result 57 | 58 | def test_01(self): 59 | test_srcdir = self.srcdir('test_01') 60 | result = self.complement([ 61 | CompileCommand( 62 | directory=test_srcdir, 63 | arguments=['clang++', '-DA=1'], 64 | file='a.cpp'), 65 | CompileCommand( 66 | directory=test_srcdir, 67 | arguments=['clang++', '-DB=1'], 68 | file='b.cpp'), 69 | ]) 70 | 71 | self.assertEqual(1, len(result)) 72 | self.assertEqual('a.hpp', result[0].file) 73 | self.assertEqual(['clang++', '-DA=1', '-c', 'a.hpp'], 74 | result[0].arguments) 75 | 76 | def test_02(self): 77 | test_srcdir = self.srcdir('test_02') 78 | result = self.complement([ 79 | CompileCommand( 80 | directory=test_srcdir, 81 | arguments=['clang++', '-iquote', 'include', '-DA=1'], 82 | file='src/a.cpp'), 83 | CompileCommand( 84 | directory=test_srcdir, 85 | arguments=['clang++', '-iquoteinclude', '-DB=1'], 86 | file='src/b.cpp'), 87 | ]) 88 | self.assertEqual(2, len(result)) 89 | self.assertEqual('include/a/a.hpp', result[0].file) 90 | self.assertEqual( 91 | ['clang++', '-iquote', 'include', '-DA=1', '-c', 'include/a/a.hpp'], 92 | result[0].arguments) 93 | self.assertEqual('include/b/b.hpp', result[1].file) 94 | self.assertEqual( 95 | ['clang++', '-iquoteinclude', '-DB=1', '-c', 'include/b/b.hpp'], 96 | result[1].arguments) 97 | 98 | def test_03(self): 99 | test_srcdir = self.srcdir('test_03') 100 | result = self.complement([ 101 | CompileCommand( 102 | directory=test_srcdir, 103 | arguments=['clang++', '-DAB=1'], 104 | file='a_b.cpp'), 105 | CompileCommand( 106 | directory=test_srcdir, 107 | arguments=['clang++', '-DA=1'], 108 | file='a.cpp'), 109 | CompileCommand( 110 | directory=test_srcdir, 111 | arguments=['clang++', '-DB=1'], 112 | file='b.cpp'), 113 | ]) 114 | self.assertEqual(4, len(result)) 115 | self.assertEqual('a.hpp', result[0].file) 116 | self.assertEqual(['clang++', '-DA=1', '-c', 'a.hpp'], 117 | result[0].arguments) 118 | self.assertEqual('a_private.hpp', result[1].file) 119 | self.assertEqual(['clang++', '-DA=1', '-c', 'a_private.hpp'], 120 | result[1].arguments) 121 | self.assertEqual('b.hpp', result[2].file) 122 | self.assertEqual(['clang++', '-DB=1', '-c', 'b.hpp'], 123 | result[2].arguments) 124 | self.assertEqual('b_private.hpp', result[3].file) 125 | self.assertEqual(['clang++', '-DB=1', '-c', 'b_private.hpp'], 126 | result[3].arguments) 127 | 128 | def test_04(self): 129 | test_srcdir = self.srcdir('test_04') 130 | result = self.complement([ 131 | CompileCommand( 132 | directory=test_srcdir, 133 | arguments=['clang++', '-DA=1'], 134 | file='a.cpp'), 135 | CompileCommand( 136 | directory=test_srcdir, 137 | arguments=['clang++', '-DB=1'], 138 | file='b.cpp'), 139 | ]) 140 | self.assertEqual(4, len(result)) 141 | self.assertEqual('a.hpp', result[0].file) 142 | self.assertEqual(['clang++', '-DA=1', '-c', 'a.hpp'], 143 | result[0].arguments) 144 | self.assertEqual('a.ipp', result[1].file) 145 | self.assertEqual(['clang++', '-DA=1', '-c', 'a.ipp'], 146 | result[1].arguments) 147 | self.assertEqual('b.hpp', result[2].file) 148 | self.assertEqual(['clang++', '-DB=1', '-c', 'b.hpp'], 149 | result[2].arguments) 150 | self.assertEqual('b.ipp', result[3].file) 151 | self.assertEqual(['clang++', '-DB=1', '-c', 'b.ipp'], 152 | result[3].arguments) 153 | 154 | def test_05(self): 155 | test_srcdir = self.srcdir('test_05') 156 | result = self.complement([ 157 | CompileCommand( 158 | directory=test_srcdir, 159 | arguments=['clang++', '-DLATIN=1'], 160 | file='latin-1-á.cpp'), 161 | CompileCommand( 162 | directory=test_srcdir, 163 | arguments=['clang++', '-DUTF=8'], 164 | file='utf-8-á.cpp'), 165 | ]) 166 | self.assertEqual(2, len(result)) 167 | self.assertEqual('latin-1-á.hpp', result[0].file) 168 | self.assertEqual(['clang++', '-DLATIN=1', '-c', 'latin-1-á.hpp'], 169 | result[0].arguments) 170 | self.assertEqual('utf-8-á.hpp', result[1].file) 171 | self.assertEqual(['clang++', '-DUTF=8', '-c', 'utf-8-á.hpp'], 172 | result[1].arguments) 173 | 174 | def test_06(self): 175 | test_srcdir = self.srcdir('test_06') 176 | result = self.complement([ 177 | CompileCommand( 178 | directory=test_srcdir, 179 | arguments=['clang++', '-Iinclude', '-Iinclude/a'], 180 | file='a.cpp'), 181 | ]) 182 | self.assertEqual(1, len(result)) 183 | self.assertEqual('include/a/a', result[0].file) 184 | self.assertEqual( 185 | ['clang++', '-Iinclude', '-Iinclude/a', '-c', 'include/a/a'], 186 | result[0].arguments) 187 | 188 | def test_07(self): 189 | test_srcdir = self.srcdir('test_07') 190 | result = self.complement([ 191 | CompileCommand( 192 | directory=test_srcdir, 193 | arguments=['clang++', '-DA=1', '-I.'], 194 | file='a.cpp'), 195 | CompileCommand( 196 | directory=test_srcdir, 197 | arguments=['clang++', '-DB=1', '-I.'], 198 | file='b.cpp'), 199 | ]) 200 | self.assertEqual(2, len(result)) 201 | self.assertEqual('a.hpp', result[0].file) 202 | self.assertEqual(['clang++', '-DB=1', '-I.', '-c', 'a.hpp'], 203 | result[0].arguments) 204 | self.assertEqual('quoted_a.hpp', result[1].file) 205 | self.assertEqual(['clang++', '-DB=1', '-I.', '-c', 'quoted_a.hpp'], 206 | result[1].arguments) 207 | 208 | 209 | if __name__ == "__main__": 210 | unittest.main() 211 | -------------------------------------------------------------------------------- /compdb/core.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import glob 4 | import io 5 | import itertools 6 | import os 7 | 8 | import compdb 9 | from compdb.backend.json import (JSONCompilationDatabase, 10 | compile_commands_to_json) 11 | from compdb.backend.memory import InMemoryCompilationDatabase 12 | from compdb.models import (CompilationDatabaseInterface, ProbeError) 13 | from compdb.utils import (suppress, re_fullmatch, empty_iterator_wrap) 14 | 15 | 16 | class ComplementerError(compdb.CompdbError): 17 | '''Base exception for complementer-related errors''' 18 | 19 | def __init__(self, complementer, message): 20 | super(ComplementerError, self).__init__(message) 21 | self.complementer = complementer 22 | 23 | 24 | class ComplementerCacheNotFound(ComplementerError): 25 | def __init__(self, complementer, directory): 26 | super(ComplementerCacheNotFound, self).__init__( 27 | complementer, "Could not find '{}' complementer in '{}'".format( 28 | complementer.name, directory)) 29 | self.directory = directory 30 | 31 | 32 | class ComplementerNameError(ComplementerError): 33 | def __init__(self, complementer): 34 | super(ComplementerNameError, self).__init__( 35 | complementer, "Invalid complementer name: '{}'".format( 36 | complementer.name)) 37 | 38 | 39 | def _chain_get_compile_commands(databases, filepath): 40 | return itertools.chain.from_iterable((db.get_compile_commands(filepath) 41 | for db in databases)) 42 | 43 | 44 | def _chain_get_all_files(databases): 45 | return itertools.chain.from_iterable((db.get_all_files() 46 | for db in databases)) 47 | 48 | 49 | def _chain_get_all_compile_commands(databases): 50 | return itertools.chain.from_iterable((db.get_all_compile_commands() 51 | for db in databases)) 52 | 53 | 54 | class _ComplementerWrapper(object): 55 | def __init__(self, name, complementer): 56 | if not self._valid_name(name): 57 | raise ComplementerNameError(complementer) 58 | self.name = name 59 | self.complementer = complementer 60 | 61 | @staticmethod 62 | def _valid_name(name): 63 | return re_fullmatch('[a-z][a-z0-9]*(?:_[a-z0-9]+)*', name) 64 | 65 | @property 66 | def cache_filename(self): 67 | return self.name + '.json' 68 | 69 | def complement(self, databases): 70 | return self.complementer.complement(databases) 71 | 72 | 73 | class CompilationDatabase(object): 74 | def __init__(self): 75 | self._registry = [] 76 | self._complementers = [] 77 | self._layers = [[]] 78 | self._directories = [] 79 | self.raise_on_missing_cache = True 80 | 81 | def register_backend(self, db_cls): 82 | if db_cls not in self._registry: 83 | self._registry.append(db_cls) 84 | 85 | def add_complementer(self, name, complementer): 86 | complementer = _ComplementerWrapper(name, complementer) 87 | self._complementers.append(complementer) 88 | self._layers.append([]) 89 | 90 | def _add_databases(self, probe_results): 91 | for complemented_database, directory in probe_results: 92 | for i, db in enumerate(complemented_database): 93 | self._layers[i].append(db) 94 | self._directories.append(directory) 95 | 96 | def _add_database(self, probe_result): 97 | self._add_databases([probe_result]) 98 | 99 | def _probe_dir1(self, directory): 100 | for compdb_cls in self._registry: 101 | with suppress(ProbeError): 102 | yield compdb_cls.probe_directory(directory) 103 | break 104 | else: 105 | # no compilation database found, 106 | # calling the interface's probe_directory() function 107 | # should raise a good probe error 108 | CompilationDatabaseInterface.probe_directory(directory) 109 | # make sure to raise something, 110 | # in case probe_directory() no longer asserts 111 | raise AssertionError 112 | for complementer in self._complementers: 113 | cache_path = os.path.join(directory, complementer.cache_filename) 114 | if os.path.exists(cache_path): 115 | yield JSONCompilationDatabase(cache_path) 116 | elif self.raise_on_missing_cache: 117 | raise ComplementerCacheNotFound(complementer, directory) 118 | else: 119 | yield InMemoryCompilationDatabase() 120 | 121 | def _probe_dir(self, directory): 122 | return (list(self._probe_dir1(directory)), directory) 123 | 124 | def add_directory(self, directory): 125 | self._add_database(self._probe_dir(directory)) 126 | 127 | def add_directories(self, directories): 128 | """Either all directories are added successfuly 129 | or none if an exception is raised.""" 130 | databases = [] 131 | for directory in directories: 132 | databases.append(self._probe_dir(directory)) 133 | self._add_databases(databases) 134 | 135 | def _add_directory_pattern1(self, path_pattern): 136 | # we are interested only in directories, 137 | # glob() will list only directories if the pattern ends with os.sep 138 | dir_pattern = os.path.join(path_pattern, '') 139 | databases = [] 140 | # sorting makes the order predicatable, reproducible 141 | for directory in sorted(glob.glob(dir_pattern)): 142 | with suppress(ProbeError): 143 | databases.append(self._probe_dir(directory)) 144 | if not databases: 145 | raise ProbeError( 146 | "{}: no compilation databases found".format(path_pattern)) 147 | return databases 148 | 149 | def add_directory_pattern(self, path_pattern): 150 | """If no compilation database is found, a ProbeError is raised.""" 151 | self._add_databases(self._add_directory_pattern1(path_pattern)) 152 | 153 | def add_directory_patterns(self, path_patterns): 154 | databases = [] 155 | for path_pattern in path_patterns: 156 | databases.extend(self._add_directory_pattern1(path_pattern)) 157 | self._add_databases(databases) 158 | 159 | def update_complements(self): 160 | # clear all complementary databases but keep the initial database 161 | del self._layers[1:] 162 | # incrementally compute the complements, 163 | # each complement depends on its predecesors 164 | for complementer in self._complementers: 165 | yield ('begin', {'complementer': complementer.name}) 166 | layer = complementer.complement(self._layers) 167 | self._layers.append(layer) 168 | for db, directory in zip(layer, self._directories): 169 | cache_path = os.path.join(directory, 170 | complementer.cache_filename) 171 | yield ('saving', {'file': cache_path}) 172 | with io.open(cache_path, 'w', encoding='utf8') as f: 173 | compile_commands_to_json(db.get_all_compile_commands(), f) 174 | yield ('end', {'complementer': complementer.name}) 175 | 176 | def get_compile_commands(self, filepath, **kwargs): 177 | def uniquify(compile_commands): 178 | for compile_command in compile_commands: 179 | yield compile_command 180 | break 181 | 182 | for key in kwargs: 183 | assert key in ['unique'], "invalid named argument: {}".format(key) 184 | ret = iter(()) 185 | for layer in self._layers: 186 | is_empty, compile_commands = empty_iterator_wrap( 187 | _chain_get_compile_commands(layer, filepath)) 188 | # The complementary databases aren't supposed to contain files 189 | # from the main or precedings databases. 190 | # This allow us to early exit as soon as a match is found. 191 | if not is_empty: 192 | ret = compile_commands 193 | break 194 | if kwargs.get('unique', False): 195 | ret = uniquify(ret) 196 | return ret 197 | 198 | def get_all_files(self): 199 | return itertools.chain.from_iterable((_chain_get_all_files(layer) 200 | for layer in self._layers)) 201 | 202 | def get_all_compile_commands(self, **kwargs): 203 | def uniquify(compile_commands): 204 | serialized_files = set() 205 | for compile_command in compile_commands: 206 | normpath = compile_command.normfile 207 | if normpath in serialized_files: 208 | continue 209 | serialized_files.add(normpath) 210 | yield compile_command 211 | 212 | for key in kwargs: 213 | assert key in ['unique'], "invalid named argument: {}".format(key) 214 | ret = itertools.chain.from_iterable( 215 | (_chain_get_all_compile_commands(layer) for layer in self._layers)) 216 | if kwargs.get('unique', False): 217 | ret = uniquify(ret) 218 | return ret 219 | -------------------------------------------------------------------------------- /compdb/config.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, unicode_literals, absolute_import 2 | 3 | import configparser 4 | import os 5 | import sys 6 | 7 | import compdb.utils 8 | 9 | 10 | def _xdg_config_home(): 11 | """Return a path under XDG_CONFIG_HOME directory (defaults to '~/.config'). 12 | 13 | https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html 14 | """ 15 | return os.getenv('XDG_CONFIG_HOME', os.path.expanduser('~/.config')) 16 | 17 | 18 | def _win32_config_dir(): 19 | """Return resource under APPDATA directory. 20 | 21 | https://technet.microsoft.com/en-us/library/cc749104(v=ws.10).aspx 22 | """ 23 | # Purposefully use a syntax that triggers an error 24 | # if the APPDATA environment variable does not exists. 25 | # It's not clear what should be the default. 26 | return os.environ['APPDATA'] 27 | 28 | 29 | def _macos_config_dir(): 30 | """Return path under macOS specific configuration directory. 31 | """ 32 | # What should the directory be? 33 | # ~/Library/Application Support/ 34 | # https://developer.apple.com/library/content/documentation/General/Conceptual/MOSXAppProgrammingGuide/AppRuntime/AppRuntime.html#//apple_ref/doc/uid/TP40010543-CH2-SW13 35 | # ~/Library/Preferences/ 36 | # Someone said so stackoverflow. 37 | # ~/.config/: 38 | # Same as Linux when XDG_CONFIG_HOME is not defined. 39 | # 40 | # Choose the Linux way until someone with more knowledge complains. 41 | return os.path.expanduser('~/.config') 42 | 43 | 44 | def get_user_conf(): 45 | if sys.platform.startswith('win32'): 46 | config_dir = _win32_config_dir() 47 | elif sys.platform.startswith('darwin'): 48 | config_dir = _macos_config_dir() 49 | else: 50 | # Assume Linux-like behavior for other platforms, 51 | # platforms like FreeBSD should have the same behavior as Linux. 52 | # 53 | # A few platforms would be nice to test: 54 | # - cygwin 55 | # - msys2 56 | config_dir = _xdg_config_home() 57 | return os.path.join(config_dir, 'compdb', 'config') 58 | 59 | 60 | def get_local_conf(): 61 | compdb_dir = compdb.utils.locate_dominating_file('.compdb') 62 | if compdb_dir: 63 | return os.path.join(compdb_dir, '.compdb') 64 | return None 65 | 66 | 67 | class OptionInvalidError(ValueError): 68 | '''Raise when a key string of the form '
.