├── .cmake-format.py ├── .gitattributes ├── .gitchangelog-keepachangelog.tpl ├── .gitchangelog.rc ├── .github ├── ISSUE_TEMPLATE │ ├── automation-change.md │ ├── bug-report.md │ └── feature-request.md ├── codeql │ ├── codeql-config.yml │ ├── default.qls │ └── qlpack.yml ├── dependabot.yml └── workflows │ ├── autobot.yml │ ├── choke.yml │ ├── codeql.yml │ ├── conda-dev.yml │ ├── conda.yml │ ├── cov-test.yml │ ├── coverage.yml │ ├── cpplint.yml │ ├── debs.yml │ ├── env.yml │ ├── mark-stale.yml │ ├── release.yml │ └── smoke.yml ├── .gitignore ├── .lcovrc ├── .pre-commit-config.yaml ├── .repolite.yml ├── AUTHORS ├── CMakeLists.txt ├── CPPLINT.cfg ├── ChangeLog.rst ├── INSTALL ├── LICENSE ├── Makefile.am ├── NEWS ├── README.rst ├── autogen.sh ├── clang_toolchain.cmake ├── cmake ├── FindJSONC.cmake ├── FindPython.cmake ├── TestCoverage.cmake └── coverage.cmake ├── conda ├── bld.bat ├── build.sh └── meta.yaml ├── configure.ac ├── debian ├── changelog ├── compat ├── control ├── copyright ├── libredis-ipc-dev.install ├── libredis-ipc0.install ├── rules └── source │ ├── format │ └── lintian-overrides ├── docs └── dev │ ├── generate_changelog.rst │ ├── pre-commit-config.rst │ └── pre-commit-usage.rst ├── environment.devenv.yml ├── gcovr.cfg ├── inc └── json.hh ├── redis-ipc.pc.in ├── requirements-sync.txt ├── scripts ├── fix_pkg_name.sh └── run_redis.sh ├── src ├── Makefile.am ├── redis_ipc.c └── redis_ipc.h ├── test ├── Makefile.am ├── README.rst ├── command_result_test.c ├── command_result_test.out ├── json_test.cpp ├── json_test.out ├── multithread_test.c ├── multithread_test.out ├── pub_sub_test.c ├── pub_sub_test.out ├── settings_status_test.c └── settings_status_test.out ├── tox-deps.ini ├── tox.ini └── toxfile.py /.cmake-format.py: -------------------------------------------------------------------------------- 1 | # ----------------------------- 2 | # Options affecting formatting. 3 | # ----------------------------- 4 | with section("format"): 5 | 6 | # If a statement is wrapped to more than one line, than dangle the closing 7 | # parenthesis on its own line. 8 | dangle_parens = True 9 | 10 | # How wide to allow formatted cmake files 11 | line_width = 80 12 | 13 | # How many spaces to tab for indent 14 | tab_size = 4 15 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Set default behaviour to automatically normalize line endings. 2 | * text=auto 3 | 4 | # Force batch scripts to always use CRLF line endings so that if a repo is 5 | # accessed in Windows via a file share from Linux, the scripts will work. 6 | *.{cmd,[cC][mM][dD]} text eol=crlf 7 | *.{bat,[bB][aA][tT]} text eol=crlf 8 | 9 | # Force bash scripts to always use LF line endings so that if a repo is 10 | # accessed in Unix via a file share from Windows, the scripts will work. 11 | *.sh text eol=lf 12 | -------------------------------------------------------------------------------- /.gitchangelog-keepachangelog.tpl: -------------------------------------------------------------------------------- 1 | # ChangeLog 2 | 3 | {{#versions}} 4 | ## {{#tag}}{{{tag}}}{{/tag}}{{^tag}}_(unreleased)_{{/tag}} 5 | 6 | {{#sections}} 7 | ### {{{label}}} 8 | 9 | {{#commits}} 10 | - {{{subject}}} [{{{author}}}] 11 | {{#body}} 12 | 13 | {{{body_indented}}} 14 | {{/body}} 15 | {{/commits}} 16 | 17 | {{/sections}} 18 | 19 | {{/versions}} 20 | -------------------------------------------------------------------------------- /.gitchangelog.rc: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8; mode: python -*- 2 | ## 3 | ## Message Format 4 | ## 5 | ## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...] 6 | ## 7 | ## Description 8 | ## 9 | ## ACTION is one of 'chg', 'fix', 'new' 10 | ## 11 | ## Is WHAT the change is about. 12 | ## 13 | ## 'chg' is for refactor, small improvement, cosmetic changes... 14 | ## 'fix' is for bug fixes 15 | ## 'new' is for new features, big improvement 16 | ## 17 | ## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc' 18 | ## 19 | ## Is WHO is concerned by the change. 20 | ## 21 | ## 'dev' is for developpers (API changes, refactors...) 22 | ## 'usr' is for final users (UI changes) 23 | ## 'pkg' is for packagers (packaging changes) 24 | ## 'test' is for testers (test only related changes) 25 | ## 'doc' is for doc guys (doc only changes) 26 | ## 27 | ## COMMIT_MSG is ... well ... the commit message itself. 28 | ## 29 | ## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic' 30 | ## 31 | ## They are preceded with a '!' or a '@' (prefer the former, as the 32 | ## latter is wrongly interpreted in github.) Commonly used tags are: 33 | ## 34 | ## 'refactor' is obviously for refactoring code only 35 | ## 'minor' is for a very meaningless change (a typo, adding a comment) 36 | ## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...) 37 | ## 'wip' is for partial functionality but complete subfunctionality. 38 | ## 39 | ## Example: 40 | ## 41 | ## new: usr: support of bazaar implemented 42 | ## chg: re-indentend some lines !cosmetic 43 | ## new: dev: updated code to be compatible with last version of killer lib. 44 | ## fix: pkg: updated year of licence coverage. 45 | ## new: test: added a bunch of test around user usability of feature X. 46 | ## fix: typo in spelling my name in comment. !minor 47 | ## 48 | ## Please note that multi-line commit message are supported, and only the 49 | ## first line will be considered as the "summary" of the commit message. So 50 | ## tags, and other rules only applies to the summary. The body of the commit 51 | ## message will be displayed in the changelog without reformatting. 52 | 53 | 54 | ## 55 | ## ``ignore_regexps`` is a line of regexps 56 | ## 57 | ## Any commit having its full commit message matching any regexp listed here 58 | ## will be ignored and won't be reported in the changelog. 59 | ## 60 | #ignore_regexps = [] 61 | ignore_regexps = [ 62 | r'@minor', r'!minor', 63 | r'@cosmetic', r'!cosmetic', 64 | r'@refactor', r'!refactor', 65 | r'@wip', r'!wip', 66 | r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:', 67 | r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$', 68 | r'^$', ## ignore commits with empty messages 69 | ] 70 | 71 | 72 | ## ``section_regexps`` is a list of 2-tuples associating a string label and a 73 | ## list of regexp 74 | ## 75 | ## Commit messages will be classified in sections thanks to this. Section 76 | ## titles are the label, and a commit is classified under this section if any 77 | ## of the regexps associated is matching. 78 | ## 79 | ## Please note that ``section_regexps`` will only classify commits and won't 80 | ## make any changes to the contents. So you'll probably want to go check 81 | ## ``subject_process`` (or ``body_process``) to do some changes to the subject, 82 | ## whenever you are tweaking this variable. 83 | ## 84 | section_regexps = [ 85 | ('New', [ 86 | r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 87 | ]), 88 | ('Features', [ 89 | r'^([nN]ew|[fF]eat)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 90 | ]), 91 | ('Changes', [ 92 | r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 93 | ]), 94 | ('Fixes', [ 95 | r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 96 | ]), 97 | 98 | ('Other', None ## Match all lines 99 | ), 100 | ] 101 | 102 | 103 | ## ``body_process`` is a callable 104 | ## 105 | ## This callable will be given the original body and result will 106 | ## be used in the changelog. 107 | ## 108 | ## Available constructs are: 109 | ## 110 | ## - any python callable that take one txt argument and return txt argument. 111 | ## 112 | ## - ReSub(pattern, replacement): will apply regexp substitution. 113 | ## 114 | ## - Indent(chars=" "): will indent the text with the prefix 115 | ## Please remember that template engines gets also to modify the text and 116 | ## will usually indent themselves the text if needed. 117 | ## 118 | ## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns 119 | ## 120 | ## - noop: do nothing 121 | ## 122 | ## - ucfirst: ensure the first letter is uppercase. 123 | ## (usually used in the ``subject_process`` pipeline) 124 | ## 125 | ## - final_dot: ensure text finishes with a dot 126 | ## (usually used in the ``subject_process`` pipeline) 127 | ## 128 | ## - strip: remove any spaces before or after the content of the string 129 | ## 130 | ## - SetIfEmpty(msg="No commit message."): will set the text to 131 | ## whatever given ``msg`` if the current text is empty. 132 | ## 133 | ## Additionally, you can `pipe` the provided filters, for instance: 134 | #body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ") 135 | #body_process = Wrap(regexp=r'\n(?=\w+\s*:)') 136 | #body_process = noop 137 | body_process = ReSub(r'((^|\n)[A-Z]\w+(-\w+)*: .*(\n\s+.*)*)+$', r'') | strip 138 | #body_process = lambda text: "" 139 | #body_process = ReSub(r'.*', '') 140 | 141 | 142 | ## ``subject_process`` is a callable 143 | ## 144 | ## This callable will be given the original subject and result will 145 | ## be used in the changelog. 146 | ## 147 | ## Available constructs are those listed in ``body_process`` doc. 148 | subject_process = (strip | 149 | ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') | 150 | SetIfEmpty("No commit message.") | ucfirst | final_dot) 151 | 152 | 153 | ## ``tag_filter_regexp`` is a regexp 154 | ## 155 | ## Tags that will be used for the changelog must match this regexp. 156 | ## 157 | #tag_filter_regexp = r'^v?[0-9]+\.[0-9]+(\.[0-9]+)?$' 158 | tag_filter_regexp = r"^.*$" 159 | 160 | ## ``unreleased_version_label`` is a string or a callable that outputs a string 161 | ## 162 | ## This label will be used as the changelog Title of the last set of changes 163 | ## between last valid tag and HEAD if any. 164 | # custom template (.tpl file below) overrides this setting 165 | unreleased_version_label = lambda: swrap( 166 | ["git", "describe", "--tags"], 167 | shell=False) 168 | #unreleased_version_label = "(unreleased)" 169 | 170 | 171 | ## ``output_engine`` is a callable 172 | ## 173 | ## This will change the output format of the generated changelog file 174 | ## 175 | ## Available choices are: 176 | ## 177 | ## - rest_py 178 | ## 179 | ## Legacy pure python engine, outputs ReSTructured text. 180 | ## This is the default. 181 | ## 182 | ## - mustache() 183 | ## 184 | ## Template name could be any of the available templates in 185 | ## ``templates/mustache/*.tpl``. 186 | ## Requires python package ``pystache``. 187 | ## Examples: 188 | ## - mustache("markdown") 189 | ## - mustache("restructuredtext") 190 | ## 191 | ## - makotemplate() 192 | ## 193 | ## Template name could be any of the available templates in 194 | ## ``templates/mako/*.tpl``. 195 | ## Requires python package ``mako``. 196 | ## Examples: 197 | ## - makotemplate("restructuredtext") 198 | ## 199 | output_engine = rest_py 200 | #output_engine = mustache("restructuredtext") 201 | #output_engine = mustache(".gitchangelog-keepachangelog.tpl") 202 | #output_engine = mustache("markdown") 203 | #output_engine = makotemplate("restructuredtext") 204 | 205 | 206 | ## ``include_merge`` is a boolean 207 | ## 208 | ## This option tells git-log whether to include merge commits in the log. 209 | ## The default is to include them. 210 | include_merge = False 211 | 212 | 213 | ## ``log_encoding`` is a string identifier 214 | ## 215 | ## This option tells gitchangelog what encoding is outputed by ``git log``. 216 | ## The default is to be clever about it: it checks ``git config`` for 217 | ## ``i18n.logOutputEncoding``, and if not found will default to git's own 218 | ## default: ``utf-8``. 219 | #log_encoding = 'utf-8' 220 | 221 | 222 | ## ``publish`` is a callable 223 | ## 224 | ## Sets what ``gitchangelog`` should do with the output generated by 225 | ## the output engine. ``publish`` is a callable taking one argument 226 | ## that is an interator on lines from the output engine. 227 | ## 228 | ## Some helper callable are provided: 229 | ## 230 | ## Available choices are: 231 | ## 232 | ## - stdout 233 | ## 234 | ## Outputs directly to standard output 235 | ## (This is the default) 236 | ## 237 | ## - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start(), flags) 238 | ## 239 | ## Creates a callable that will parse given file for the given 240 | ## regex pattern and will insert the output in the file. 241 | ## ``idx`` is a callable that receive the matching object and 242 | ## must return a integer index point where to insert the 243 | ## the output in the file. Default is to return the position of 244 | ## the start of the matched string. 245 | ## 246 | ## - FileRegexSubst(file, pattern, replace, flags) 247 | ## 248 | ## Apply a replace inplace in the given file. Your regex pattern must 249 | ## take care of everything and might be more complex. Check the README 250 | ## for a complete copy-pastable example. 251 | ## 252 | # publish = FileInsertIntoFirstRegexMatch( 253 | # "CHANGELOG.rst", 254 | # r'/(?P[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n/', 255 | # idx=lambda m: m.start(1) 256 | # ) 257 | 258 | publish = stdout 259 | 260 | 261 | ## ``revs`` is a list of callable or a list of string 262 | ## 263 | ## callable will be called to resolve as strings and allow dynamical 264 | ## computation of these. The result will be used as revisions for 265 | ## gitchangelog (as if directly stated on the command line). This allows 266 | ## to filter exaclty which commits will be read by gitchangelog. 267 | ## 268 | ## To get a full documentation on the format of these strings, please 269 | ## refer to the ``git rev-list`` arguments. There are many examples. 270 | ## 271 | ## Using callables is especially useful, for instance, if you 272 | ## are using gitchangelog to generate incrementally your changelog. 273 | ## 274 | ## Some helpers are provided, you can use them:: 275 | ## 276 | ## - FileFirstRegexMatch(file, pattern): will return a callable that will 277 | ## return the first string match for the given pattern in the given file. 278 | ## If you use named sub-patterns in your regex pattern, it'll output only 279 | ## the string matching the regex pattern named "rev". 280 | ## 281 | ## - Caret(rev): will return the rev prefixed by a "^", which is a 282 | ## way to remove the given revision and all its ancestor. 283 | ## 284 | ## Please note that if you provide a rev-list on the command line, it'll 285 | ## replace this value (which will then be ignored). 286 | ## 287 | ## If empty, then ``gitchangelog`` will act as it had to generate a full 288 | ## changelog. 289 | ## 290 | ## The default is to use all commits to make the changelog. 291 | #revs = ["^1.0.3", ] 292 | #revs = [ 293 | # Caret( 294 | # FileFirstRegexMatch( 295 | # "CHANGELOG.rst", 296 | # r"(?P[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")), 297 | # "HEAD" 298 | #] 299 | revs = [] 300 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/automation-change.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Automation Change Request 3 | about: Please request automation changes here. 4 | title: '' 5 | labels: Automation 6 | assignees: sarnold 7 | 8 | --- 9 | 10 | ## Automation Change Request 11 | 12 | Provide a short description of the requested automation change (both 13 | new and existing workflows). 14 | 15 | ### Additional Context 16 | Add any other context about the request here. 17 | 18 | ### Linked To 19 | List any issues linked to this request. 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | about: Please report any bugs here. 4 | title: '' 5 | labels: Bug 6 | assignees: SJLC 7 | 8 | --- 9 | 10 | ## Bug Report 11 | 12 | Provide a short description of the bug with any error message. 13 | 14 | ### Log Messages 15 | ``` 16 | Add​ the ​relevant part of any redis/client logs, if applicable. 17 | ``` 18 | 19 | ### Expected Behavior 20 | A clear and concise description of what you expected to happen. 21 | 22 | ### Additional Context 23 | Add any other context about the problem here. 24 | 25 | ### Linked To 26 | List any issues linked to this bug. 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | about: Please suggest new or improved features here. 4 | title: '' 5 | labels: Enhancement 6 | assignees: SJLC 7 | 8 | --- 9 | 10 | ## Feature Request 11 | 12 | Provide a short description of the problem you are trying to solve or feature you would like to see. 13 | 14 | ### Expected Behavior 15 | Provide a detailed description of how the feature should work. 16 | 17 | ### Additional Context 18 | Add any other context about the feature here. 19 | 20 | ### Linked To 21 | List any issues linked to this feature request. 22 | -------------------------------------------------------------------------------- /.github/codeql/codeql-config.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL config" 2 | 3 | queries: 4 | - name: Use custom query pack (security-and-quality plus critical minus dev) 5 | uses: ./.github/codeql/default.qls 6 | -------------------------------------------------------------------------------- /.github/codeql/default.qls: -------------------------------------------------------------------------------- 1 | # https://github.com/zbazztian/custom-queries/blob/master/cpp/default.qls 2 | 3 | # add standard security and quality query set 4 | - import: codeql-suites/cpp-security-and-quality.qls 5 | from: codeql/cpp-queries 6 | - exclude: 7 | id: 8 | - cpp/fixme-comment 9 | - cpp/short-global-name 10 | 11 | # add non-standard queries, which are normally disabled 12 | - queries: '.' 13 | from: codeql/cpp-queries 14 | - include: 15 | id: 16 | - cpp/descriptor-may-not-be-closed 17 | - cpp/descriptor-never-closed 18 | - cpp/file-may-not-be-closed 19 | - cpp/file-never-closed 20 | - cpp/memory-may-not-be-freed 21 | - cpp/memory-never-freed 22 | -------------------------------------------------------------------------------- /.github/codeql/qlpack.yml: -------------------------------------------------------------------------------- 1 | name: custom-cpp-query-pack 2 | version: 1.0.0 3 | libraryPathDependencies: codeql-cpp 4 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "daily" 8 | commit-message: 9 | prefix: "ci:" 10 | labels: ["actions"] 11 | # only needed for non-default branch 12 | #target-branch: "develop" 13 | -------------------------------------------------------------------------------- /.github/workflows/autobot.yml: -------------------------------------------------------------------------------- 1 | name: Dependabot automation 2 | 3 | on: # yamllint disable-line rule:truthy 4 | pull_request: 5 | types: 6 | - opened 7 | - synchronize 8 | 9 | permissions: 10 | contents: write 11 | pull-requests: write 12 | 13 | jobs: 14 | dependabot: 15 | env: 16 | PR_URL: ${{ github.event.pull_request.html_url }} 17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - name: Fetch Dependabot metadata 22 | id: metadata 23 | if: ${{ github.event_name == 'pull_request' && github.actor == 'dependabot[bot]' }} 24 | uses: dependabot/fetch-metadata@08eff52bf64351f401fb50d4972fa95b9f2c2d1b # v2.4.0 25 | with: 26 | github-token: "${{ secrets.GITHUB_TOKEN }}" 27 | 28 | - name: Enable auto-approve for Dependabot PRs 29 | if: ${{ github.event_name == 'pull_request' && github.actor == 'dependabot[bot]' }} 30 | run: gh pr review --approve "${PR_URL}" 31 | 32 | - name: Enable auto-approve for Dependabot PRs 33 | if: ${{ github.event_name == 'pull_request' && github.actor == 'dependabot[bot]' }} 34 | run: gh pr merge --squash --auto "${PR_URL}" || gh pr merge --merge --auto "${PR_URL}" || gh pr merge --rebase --auto "${PR_URL}" 35 | -------------------------------------------------------------------------------- /.github/workflows/choke.yml: -------------------------------------------------------------------------------- 1 | name: choke 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: [ master, develop ] 7 | pull_request: 8 | branches: [ master, develop ] 9 | # schedule: 10 | # - cron: '15 1 * * *' 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ${{ matrix.os }} 16 | permissions: 17 | contents: read 18 | defaults: 19 | run: 20 | shell: bash 21 | 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | os: [ubuntu-22.04] 26 | 27 | steps: 28 | - uses: actions/checkout@v4 29 | 30 | - uses: actions/setup-python@v5 31 | with: 32 | python-version: 3.9 33 | 34 | - name: Environment 35 | run: | 36 | bash -c set 37 | 38 | - name: Backports 39 | run: | 40 | sudo apt-get -qq update 41 | sudo apt-get install -y software-properties-common 42 | sudo add-apt-repository -y -s ppa:nerdboy/embedded 43 | sudo apt-get install -y libjson-c-dev lcov 44 | sudo apt-get install -y libhiredis-dev gcovr redis-server 45 | sudo apt-get install -y valgrind libc6-dbg 46 | 47 | - name: Stop redis-server system service 48 | run: | 49 | sudo systemctl stop redis 50 | 51 | - name: Add python requirements 52 | run: | 53 | python -m pip install --upgrade pip 54 | pip install tox tox-gh-actions 55 | 56 | - name: Run tests with valgrind 57 | run: | 58 | tox -e grind 59 | tox -e tests 60 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL Advanced" 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: [ master, develop ] 7 | pull_request: 8 | branches: [ master, develop ] 9 | #schedule: 10 | #- cron: '21 20 * * 5' 11 | 12 | jobs: 13 | analyze: 14 | name: Analyze Code 15 | runs-on: ubuntu-22.04 16 | env: 17 | LLVM_VER: 15 18 | permissions: 19 | # required for all workflows 20 | security-events: write 21 | # required to fetch internal or private CodeQL packs 22 | packages: read 23 | # only required for workflows in private repositories 24 | actions: read 25 | contents: read 26 | 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | include: 31 | - language: c-cpp 32 | build-mode: manual 33 | - language: python 34 | build-mode: none 35 | - language: actions 36 | build-mode: none 37 | 38 | steps: 39 | - name: Checkout repository 40 | uses: actions/checkout@v4 41 | with: 42 | fetch-depth: 0 43 | 44 | - name: Environment 45 | run: | 46 | bash -c set 47 | 48 | - name: Initialize CodeQL 49 | uses: github/codeql-action/init@v3 50 | with: 51 | languages: ${{ matrix.language }} 52 | build-mode: ${{ matrix.build-mode }} 53 | config-file: ./.github/codeql/codeql-config.yml 54 | 55 | - name: Install dependencies 56 | if: matrix.build-mode == 'manual' 57 | run: | 58 | sudo apt-get -qq update 59 | sudo apt-get install -yqq software-properties-common redis-server 60 | sudo add-apt-repository -y -s ppa:ubuntu-toolchain-r/ppa 61 | sudo apt-get -qq update 62 | sudo apt-get install -yqq libjson-c-dev libhiredis-dev libgtest-dev libgmock-dev lcov 63 | sudo systemctl stop redis 64 | sudo apt-get install -y clang-${{ env.LLVM_VER }} llvm-${{ env.LLVM_VER }} lld-${{ env.LLVM_VER }} llvm-${{ env.LLVM_VER }}-tools g++-multilib 65 | echo "CC=clang-${{ env.LLVM_VER }}" >> $GITHUB_ENV 66 | echo "CXX=clang++-${{ env.LLVM_VER }}" >> $GITHUB_ENV 67 | echo "LLVM_VER_DIR=/usr/lib/llvm-${{ env.LLVM_VER }}" >> $GITHUB_ENV 68 | 69 | - if: matrix.build-mode == 'manual' 70 | uses: actions/setup-python@v5 71 | with: 72 | python-version: "3.11" 73 | 74 | - if: matrix.build-mode == 'manual' 75 | name: Install Tox 76 | run: | 77 | python -m pip install --upgrade pip 78 | pip install tox 79 | 80 | - if: matrix.build-mode == 'manual' 81 | name: Build 82 | env: 83 | CC: ${{ env.CC }} 84 | CXX: ${{ env.CXX }} 85 | LLVM_VER_DIR: ${{ env.LLVM_VER_DIR }} 86 | run: | 87 | tox -e clang 88 | 89 | - name: Perform CodeQL Analysis 90 | uses: github/codeql-action/analyze@v3 91 | with: 92 | category: "/language:${{matrix.language}}" 93 | -------------------------------------------------------------------------------- /.github/workflows/conda-dev.yml: -------------------------------------------------------------------------------- 1 | name: CondaDev 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - master 8 | - develop 9 | 10 | jobs: 11 | build: 12 | name: redis-ipc ${{ matrix.python-version }} ${{ matrix.os }} 13 | runs-on: ${{ matrix.os }} 14 | permissions: 15 | contents: read 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: ['ubuntu-22.04', 'ubuntu-24.04'] 20 | python-version: ['3.7', '3.9'] 21 | include: 22 | - os: 'ubuntu-22.04' 23 | generator: 'Ninja' 24 | build_type: 'Debug' 25 | - os: 'ubuntu-24.04' 26 | generator: 'Ninja' 27 | build_type: 'RelWithDebInfo' 28 | #- os: 'macOS-11' 29 | # generator: 'Ninja' 30 | # build_type: 'Debug' 31 | env: 32 | OS: ${{ matrix.os }} 33 | PYTHON: ${{ matrix.python-version }} 34 | PYTHONIOENCODING: utf-8 35 | 36 | steps: 37 | - uses: actions/checkout@v4 38 | 39 | - name: Setup base python 40 | uses: actions/setup-python@v5 41 | with: 42 | python-version: '3.x' 43 | 44 | - name: Cache conda 45 | id: cache 46 | uses: actions/cache@v4 47 | env: 48 | # Increase this value to reset cache if environment.devenv.yml has not changed 49 | CACHE_NUMBER: 1 50 | with: 51 | path: ~/conda_pkgs_dir 52 | key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.devenv.yml') }} 53 | 54 | - uses: conda-incubator/setup-miniconda@v3.2.0 55 | with: 56 | auto-update-conda: true 57 | python-version: ${{ matrix.python-version }} 58 | channels: conda-forge 59 | channel-priority: strict 60 | use-only-tar-bz2: true 61 | 62 | - name: Configure condadev environment 63 | shell: bash -l {0} 64 | env: 65 | PY_VER: ${{ matrix.python-version }} 66 | run: | 67 | conda config --set always_yes yes --set changeps1 no 68 | conda config --add channels conda-forge 69 | conda install conda-devenv=2.1.1 70 | conda devenv 71 | 72 | - name: Build and test 73 | shell: bash -l {0} 74 | env: 75 | PY_VER: ${{ matrix.python-version }} 76 | run: | 77 | source activate redis-ipc-test 78 | ctest --build-config "${{ matrix.build_type }}" \ 79 | --build-generator "${{ matrix.generator }}" \ 80 | --build-and-test . build \ 81 | --build-options -DRIPC_DISABLE_SOCK_TESTS=1 \ 82 | --test-command ctest -V \ 83 | --build-config "${{ matrix.build_type }}" 84 | -------------------------------------------------------------------------------- /.github/workflows/conda.yml: -------------------------------------------------------------------------------- 1 | name: Conda 2 | 3 | on: 4 | workflow_dispatch: 5 | #pull_request: 6 | #push: 7 | #branches: 8 | #- master 9 | #- develop 10 | paths-ignore: 11 | - '**.md' 12 | - '**.rst' 13 | - '**.sh' 14 | 15 | jobs: 16 | build: 17 | strategy: 18 | fail-fast: false 19 | matrix: 20 | platform: [ubuntu-latest, macos-latest] 21 | runs-on: ${{ matrix.platform }} 22 | permissions: 23 | actions: read 24 | contents: read 25 | # The setup-miniconda action needs this to activate miniconda 26 | defaults: 27 | run: 28 | shell: "bash -l {0}" 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | with: 33 | fetch-depth: 0 34 | 35 | - name: Cache conda 36 | uses: actions/cache@v4 37 | with: 38 | path: ~/conda_pkgs_dir 39 | key: ${{matrix.os}}-conda-pkgs-${{hashFiles('**/conda/meta.yaml')}} 40 | 41 | - name: Get conda 42 | uses: conda-incubator/setup-miniconda@v3.2.0 43 | with: 44 | python-version: 3.9 45 | channels: conda-forge 46 | channel-priority: strict 47 | use-only-tar-bz2: true 48 | auto-activate-base: true 49 | 50 | - name: Prepare 51 | run: conda install conda-build conda-verify 52 | 53 | - name: Build and test 54 | run: conda build conda/ 55 | 56 | - name: Install 57 | run: conda install -c ${CONDA_PREFIX}/conda-bld/ redis-ipc 58 | -------------------------------------------------------------------------------- /.github/workflows/cov-test.yml: -------------------------------------------------------------------------------- 1 | name: "Coverity" 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: [ master, develop ] 7 | pull_request: 8 | branches: [ master, develop ] 9 | # schedule: 10 | # - cron: '21 20 * * 5' 11 | 12 | jobs: 13 | convert: 14 | name: Convert and upload coverity results 15 | runs-on: ubuntu-22.04 16 | permissions: 17 | actions: read 18 | contents: read 19 | 20 | steps: 21 | - name: Checkout repository 22 | uses: actions/checkout@v4 23 | 24 | - name: Check for new scan data 25 | env: 26 | FILE: 'coverity-results.json' 27 | shell: bash 28 | run: | 29 | if [ -f "${FILE}" ] 30 | then 31 | echo "HAVE_SCAN_DATA=true" >> $GITHUB_ENV 32 | echo "Found data file: ${FILE}" 33 | else 34 | echo "HAVE_SCAN_DATA=false" >> $GITHUB_ENV 35 | echo "Data file NOT found!!" 36 | fi 37 | 38 | - name: Convert Coverity Results to SARIF 39 | if: env.HAVE_SCAN_DATA == 'true' 40 | uses: gautambaghel/coverity-scan-results-to-sarif@master 41 | with: 42 | pipeline-results-json: coverity-results.json 43 | output-results-sarif: coverity-results.sarif 44 | 45 | - name: Upload SARIF file to GitHub UI 46 | if: env.HAVE_SCAN_DATA == 'true' 47 | uses: github/codeql-action/upload-sarif@v3 48 | with: 49 | sarif_file: coverity-results.sarif 50 | -------------------------------------------------------------------------------- /.github/workflows/coverage.yml: -------------------------------------------------------------------------------- 1 | # internal coverage with PR comment and badge v0.0.6 2 | # Note this works for public orgs but is not guaranteed for all 3 | # environments. 4 | # 5 | # This version has updated actions and coverage value regex, no fork isolation 6 | # yet. Badge and comment job logic should be tuned for personal vs org use 7 | # (fork PRs still needs the big refactor; see badge job comments below). 8 | name: Coverage 9 | 10 | on: 11 | workflow_dispatch: 12 | pull_request: 13 | push: 14 | branches: 15 | - master 16 | - develop 17 | 18 | jobs: 19 | pre_ci: 20 | name: Prepare CI environment 21 | runs-on: ubuntu-24.04 22 | permissions: 23 | # only required for workflows in private repositories 24 | actions: read 25 | contents: read 26 | outputs: 27 | #commit_message: ${{ steps.get_commit_message.outputs.commit_message }} 28 | branch: ${{ steps.extract_branch.outputs.branch }} 29 | 30 | steps: 31 | - name: Checkout Project 32 | uses: actions/checkout@v4 33 | with: 34 | # We need to fetch with a depth of 2 for pull_request so we can do HEAD^2 35 | fetch-depth: 2 36 | 37 | - name: Environment 38 | run: | 39 | bash -c set 40 | 41 | #- name: "Get commit message" 42 | #id: get_commit_message 43 | #env: 44 | #COMMIT_PUSH: ${{ github.event.head_commit.message }} 45 | #run: | 46 | #COMMIT_MESSAGE="${COMMIT_PUSH:-$(git log --format=%B -n 1 HEAD^2)}" 47 | #echo "commit_message=${COMMIT_MESSAGE}" >> $GITHUB_OUTPUT 48 | 49 | - name: Extract branch name 50 | id: extract_branch 51 | shell: bash 52 | run: | 53 | TMP_PULL_HEAD_REF="${{ github.head_ref }}" 54 | TMP_GITHUB_REF="${GITHUB_REF#refs/heads/}" 55 | EXPORT_VALUE="" 56 | if [ "${TMP_PULL_HEAD_REF}" != "" ] 57 | then 58 | EXPORT_VALUE="${TMP_PULL_HEAD_REF}" 59 | else 60 | EXPORT_VALUE="${TMP_GITHUB_REF}" 61 | fi 62 | echo "branch=${EXPORT_VALUE}" >> $GITHUB_OUTPUT 63 | 64 | base: 65 | name: Base coverage 66 | runs-on: ubuntu-24.04 67 | permissions: 68 | # only required for workflows in private repositories 69 | actions: read 70 | contents: read 71 | outputs: 72 | base_branch: ${{ steps.get_base.outputs.base_branch }} 73 | base_cov: ${{ steps.get_base.outputs.base_cov }} 74 | 75 | steps: 76 | - uses: actions/checkout@v4 77 | with: 78 | ref: badges 79 | path: badges 80 | 81 | - name: Get base ref and coverage score 82 | id: get_base 83 | env: 84 | FILE: 'test-coverage.txt' 85 | working-directory: ./badges 86 | shell: bash 87 | run: | 88 | TMP_PULL_BASE_REF="${{ github.base_ref }}" 89 | TMP_GITHUB_REF="${GITHUB_REF#refs/heads/}" 90 | EXPORT_VALUE="" 91 | if [ "${TMP_PULL_BASE_REF}" != "" ] 92 | then 93 | EXPORT_VALUE="${TMP_PULL_BASE_REF}" 94 | else 95 | EXPORT_VALUE="${TMP_GITHUB_REF}" 96 | fi 97 | echo "base_branch=${EXPORT_VALUE}" >> $GITHUB_OUTPUT 98 | if [ -f "${EXPORT_VALUE}/${FILE}" ] 99 | then 100 | echo "Base coverage found on ${EXPORT_VALUE}" 101 | BASE_COV=$(cat "${EXPORT_VALUE}/${FILE}") 102 | echo "Base coverage is: ${BASE_COV}" 103 | echo "base_cov=${BASE_COV}" >> $GITHUB_OUTPUT 104 | else 105 | echo "Base coverage NOT found on ${EXPORT_VALUE}!!" 106 | fi 107 | 108 | check: 109 | name: Pre CI check 110 | runs-on: ubuntu-24.04 111 | permissions: 112 | # only required for workflows in private repositories 113 | actions: read 114 | contents: read 115 | needs: [pre_ci, base] 116 | 117 | steps: 118 | - name: Check github variables 119 | # NOTE base coverage env var may be empty here 120 | env: 121 | #COMMIT_MESSAGE: ${{ needs.pre_ci.outputs.commit_message }} 122 | EXPORT_VALUE: ${{ needs.pre_ci.outputs.branch }} 123 | BASE_BRANCH: ${{ needs.base.outputs.base_branch }} 124 | BASE_COVERAGE: ${{ needs.base.outputs.base_cov }} 125 | run: | 126 | #echo "Commit message: ${COMMIT_MESSAGE}" 127 | echo "Export value (head_ref): ${EXPORT_VALUE}" 128 | echo "Base value (base_ref): ${BASE_BRANCH}" 129 | echo "Base coverage (percent): ${{ env.BASE_COVERAGE }}" 130 | 131 | cov_data: 132 | name: Generate test coverage data 133 | runs-on: ubuntu-24.04 134 | permissions: 135 | # only required for workflows in private repositories 136 | actions: read 137 | contents: read 138 | pull-requests: write 139 | needs: [check] 140 | defaults: 141 | run: 142 | shell: bash 143 | outputs: 144 | coverage: ${{ steps.coverage.outputs.coverage }} 145 | coverage-rounded-display: ${{ steps.coverage.outputs.coverage-rounded-display }} 146 | env: 147 | LLVM_VER: 17 148 | PYTHONIOENCODING: utf-8 149 | PIP_DOWNLOAD_CACHE: ${{ github.workspace }}/../.pip_download_cache 150 | 151 | steps: 152 | - name: Checkout repo 153 | uses: actions/checkout@v4 154 | with: 155 | fetch-depth: 0 156 | 157 | - name: Environment 158 | run: | 159 | bash -c set 160 | 161 | - uses: actions/setup-python@v5 162 | with: 163 | python-version: '3.11' 164 | 165 | - name: Set platform server (socket) dir 166 | id: set_temp_dirs 167 | run: | 168 | MKTEMP=$(mktemp -d) 169 | echo "Platform temp dir is: ${MKTEMP}" 170 | echo "GH CI temp dir is: ${RUNNER_TEMP}" 171 | if [ "${ImageOS}" != "ubuntu18" ] 172 | then 173 | TEMP_DIR="${MKTEMP}" 174 | else 175 | TEMP_DIR="${RUNNER_TEMP}" 176 | fi 177 | echo "Setting runtime dir: ${TEMP_DIR}" 178 | echo "TEMP_DIR=${TEMP_DIR}" >> $GITHUB_ENV 179 | bash -c set 180 | 181 | - name: Deps 182 | run: | 183 | sudo apt-get -qq update 184 | sudo apt-get install -yqq software-properties-common redis-server 185 | sudo add-apt-repository -y -s ppa:nerdboy/embedded 186 | sudo add-apt-repository -y -s ppa:ubuntu-toolchain-r/ppa 187 | sudo apt-get -qq update 188 | sudo apt-get install -yqq libjson-c-dev libhiredis-dev libgtest-dev libgmock-dev lcov 189 | sudo systemctl stop redis 190 | sudo apt-get install -y clang-${{ env.LLVM_VER }} llvm-${{ env.LLVM_VER }} lld-${{ env.LLVM_VER }} llvm-${{ env.LLVM_VER }}-tools g++-multilib 191 | echo "CC=clang-${{ env.LLVM_VER }}" >> $GITHUB_ENV 192 | echo "CXX=clang++-${{ env.LLVM_VER }}" >> $GITHUB_ENV 193 | echo "LLVM_VER_DIR=/usr/lib/llvm-${{ env.LLVM_VER }}" >> $GITHUB_ENV 194 | 195 | - name: Add python requirements 196 | run: | 197 | python -m pip install --upgrade pip 198 | pip install tox 199 | 200 | - name: Run tests 201 | env: 202 | CC: ${{ env.CC }} 203 | CXX: ${{ env.CXX }} 204 | LLVM_VER_DIR: ${{ env.LLVM_VER_DIR }} 205 | run: | 206 | tox -e clang,lcov 207 | 208 | - uses: actions/upload-artifact@v4 209 | with: 210 | name: coverage_data 211 | path: coverage.xml 212 | 213 | - name: Code Coverage Summary Report (data) 214 | uses: irongut/CodeCoverageSummary@51cc3a756ddcd398d447c044c02cb6aa83fdae95 # v1.3.0 215 | with: 216 | filename: coverage.xml 217 | output: 'both' 218 | 219 | - uses: actions/upload-artifact@v4 220 | with: 221 | name: src_coverage_rpts 222 | path: | 223 | coverage.xml 224 | code-coverage-results.txt 225 | retention-days: 30 226 | 227 | - name: Check code coverage 228 | id: coverage 229 | env: 230 | VALUE: "Branch Rate" 231 | run: | 232 | COVERAGE=$( cat code-coverage-results.txt | grep -e ^Summary | grep -o -E "${VALUE} = .{3}" | egrep -o '([0-9]+)' ) 233 | echo "coverage=${COVERAGE}" >> $GITHUB_OUTPUT 234 | echo "coverage-rounded-display=${COVERAGE}%" >> $GITHUB_OUTPUT 235 | echo "Current coverage is: ${COVERAGE}%" 236 | 237 | - name: Code Coverage Summary Report 238 | uses: irongut/CodeCoverageSummary@51cc3a756ddcd398d447c044c02cb6aa83fdae95 # v1.3.0 239 | if: ${{ github.event_name == 'pull_request' }} 240 | with: 241 | filename: coverage.xml 242 | format: 'markdown' 243 | output: 'both' 244 | 245 | - name: Add Coverage PR Comment 246 | uses: marocchino/sticky-pull-request-comment@67d0dec7b07ed060a405f9b2a64b8ab319fdd7db # v2.9.2 247 | if: github.event_name == 'pull_request' 248 | with: 249 | header: coverage 250 | recreate: true 251 | path: code-coverage-results.md 252 | 253 | test: 254 | name: Coverage check 255 | runs-on: ubuntu-24.04 256 | needs: [cov_data, base] 257 | permissions: 258 | # required for workflows in private repositories 259 | actions: read 260 | contents: read 261 | outputs: 262 | coverage: ${{ needs.cov_data.outputs.coverage }} 263 | coverage-base: ${{ needs.base.outputs.base_cov }} 264 | coverage-rounded-display: ${{ needs.cov_data.outputs.coverage-rounded-display }} 265 | 266 | steps: 267 | - name: Check test coverage 268 | env: 269 | COVERAGE: ${{ needs.cov_data.outputs.coverage }} 270 | COVERAGE_ROUNDED: ${{ needs.cov_data.outputs.coverage-rounded-display }} 271 | BASE_COVERAGE: ${{ needs.base.outputs.base_cov }} 272 | MEMBER: ${{ github.event.pull_request.author_association }} 273 | run: | 274 | echo "Coverage: ${COVERAGE}" 275 | echo "Coverage Rounded: ${COVERAGE_ROUNDED}" 276 | echo "Coverage on Base Branch: ${BASE_COVERAGE}" 277 | echo "Author assoc: ${MEMBER}" 278 | 279 | comment_cov_change: 280 | name: Comment on PR with coverage delta 281 | runs-on: ubuntu-24.04 282 | needs: [test, base] 283 | permissions: 284 | pull-requests: write 285 | # required for workflows in private repositories 286 | actions: read 287 | contents: read 288 | 289 | steps: 290 | - name: Environment 291 | run: | 292 | bash -c set 293 | 294 | - name: Set whether base coverage was found 295 | shell: bash 296 | env: 297 | BASE: ${{ needs.test.outputs.coverage-base }} 298 | run: | 299 | if [ -n "${BASE}" ] 300 | then 301 | BASE_RESULT="true" 302 | else 303 | BASE_RESULT="false" 304 | fi 305 | echo "HAVE_BASE_COVERAGE is ${BASE_RESULT}" 306 | echo "HAVE_BASE_COVERAGE=${BASE_RESULT}" >> $GITHUB_ENV 307 | echo "BASE_COVERAGE=${BASE}" >> $GITHUB_ENV 308 | 309 | - name: Collect variables and construct comment for delta message 310 | if: env.HAVE_BASE_COVERAGE == 'true' 311 | shell: bash 312 | env: 313 | BASE_BRANCH: ${{ needs.base.outputs.base_branch }} 314 | COVERAGE: ${{ needs.test.outputs.coverage }} 315 | BASE_COVERAGE: ${{ needs.test.outputs.coverage-base }} 316 | DELTA_WORD: "not change" 317 | RATE: "Branch Rate" 318 | 319 | run: | 320 | if [ "${COVERAGE}" -gt "${BASE_COVERAGE}" ] 321 | then 322 | DELTA_WORD="increase" 323 | elif [ "${COVERAGE}" -lt "${BASE_COVERAGE}" ] 324 | then 325 | DELTA_WORD="decrease" 326 | fi 327 | CHG=$(( COVERAGE - BASE_COVERAGE )) 328 | CHG="${CHG/-/}" 329 | echo "" > coverage-delta.md 330 | echo "Hello @${{ github.actor }}! Thanks for opening this PR. We found the following information based on analysis of the coverage report:" >> coverage-delta.md 331 | echo "" >> coverage-delta.md 332 | echo "__Base__ ${RATE} coverage is __${BASE_COVERAGE}%__" >> coverage-delta.md 333 | if [ "${CHG}" = "0" ] 334 | then 335 | echo "Merging ${{ github.sha }} into ${BASE_BRANCH} will __${DELTA_WORD}__ coverage" >> coverage-delta.md 336 | else 337 | echo "Merging ${{ github.sha }} into ${BASE_BRANCH} will __${DELTA_WORD}__ coverage by __${CHG}%__" >> coverage-delta.md 338 | fi 339 | if ! [ "${DELTA_WORD}" = "decrease" ] 340 | then 341 | echo "" >> coverage-delta.md 342 | echo "Nice work, @${{ github.actor }}. Cheers! :rocket:" >> coverage-delta.md 343 | fi 344 | 345 | - name: Comment PR with test coverage delta 346 | uses: marocchino/sticky-pull-request-comment@67d0dec7b07ed060a405f9b2a64b8ab319fdd7db # v2.9.2 347 | if: env.HAVE_BASE_COVERAGE == 'true' 348 | with: 349 | header: delta 350 | recreate: true 351 | path: coverage-delta.md 352 | 353 | badge: 354 | # Only generate and publish if these conditions are met: 355 | # - The test step ended successfully 356 | # - One of these is met: 357 | # - This is a push event and the push event is on branch 'master' or 'develop' 358 | # Note: if this repo is personal (ie, not an org repo) then you can 359 | # use the following to change the scope of the next 2 jobs 360 | # instead of running on branch push as shown below: 361 | # - This is a pull request event and the pull actor is the same as the repo owner 362 | # if: ${{ ( github.event_name == 'pull_request' && github.actor == github.repository_owner ) || github.ref == 'refs/heads/master' }} 363 | name: Generate badge image with test coverage value 364 | runs-on: ubuntu-24.04 365 | permissions: 366 | pull-requests: write 367 | # required for workflows in private repositories 368 | actions: read 369 | contents: write 370 | needs: [test, pre_ci] 371 | if: github.event_name == 'push' 372 | outputs: 373 | url: ${{ steps.url.outputs.url }} 374 | markdown: ${{ steps.url.outputs.markdown }} 375 | 376 | steps: 377 | - uses: actions/checkout@v4 378 | with: 379 | ref: badges 380 | path: badges 381 | 382 | # Use the output from the `coverage` step 383 | - name: Generate the badge SVG image 384 | uses: emibcn/badge-action@808173dd03e2f30c980d03ee49e181626088eee8 # v2.0.3 385 | id: badge 386 | with: 387 | label: 'Branch coverage' 388 | status: ${{ needs.test.outputs.coverage-rounded-display }} 389 | color: ${{ 390 | needs.test.outputs.coverage > 90 && 'green' || 391 | needs.test.outputs.coverage > 80 && 'yellow,green' || 392 | needs.test.outputs.coverage > 70 && 'yellow' || 393 | needs.test.outputs.coverage > 60 && 'orange,yellow' || 394 | needs.test.outputs.coverage > 50 && 'orange' || 395 | needs.test.outputs.coverage > 40 && 'red,orange' || 396 | needs.test.outputs.coverage > 30 && 'red,red,orange' || 397 | needs.test.outputs.coverage > 20 && 'red,red,red,orange' || 398 | 'red' }} 399 | path: badges/test-coverage.svg 400 | 401 | - name: Commit badge and data 402 | env: 403 | BRANCH: ${{ needs.pre_ci.outputs.branch }} 404 | COVERAGE: ${{ needs.test.outputs.coverage }} 405 | FILE: 'test-coverage.svg' 406 | DATA: 'test-coverage.txt' 407 | working-directory: ./badges 408 | run: | 409 | git config --local user.email "action@github.com" 410 | git config --local user.name "GitHub Action" 411 | mkdir -p "${BRANCH}" 412 | mv "${FILE}" "${BRANCH}" 413 | echo "${COVERAGE}" > "${BRANCH}/${DATA}" 414 | git add "${BRANCH}/${FILE}" "${BRANCH}/${DATA}" 415 | # Will give error if badge has not changed 416 | git commit -m "Add/Update badge" || true 417 | 418 | - name: Push badge commit 419 | uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df # v0.8.0 420 | with: 421 | github_token: ${{ secrets.GITHUB_TOKEN }} 422 | branch: badges 423 | directory: badges 424 | -------------------------------------------------------------------------------- /.github/workflows/cpplint.yml: -------------------------------------------------------------------------------- 1 | name: cpplint 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | push: 7 | branches: [ master, develop ] 8 | 9 | jobs: 10 | cpplint: 11 | 12 | runs-on: ubuntu-latest 13 | permissions: 14 | contents: read 15 | defaults: 16 | run: 17 | shell: bash 18 | 19 | steps: 20 | - uses: actions/checkout@v4 21 | 22 | - name: Setup Python 3.11 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: '3.11' 26 | 27 | - name: Install Tox 28 | run: | 29 | python -m pip install --upgrade pip 30 | pip install tox 31 | 32 | - name: Run cpplint 33 | run: | 34 | tox -e lint 35 | -------------------------------------------------------------------------------- /.github/workflows/debs.yml: -------------------------------------------------------------------------------- 1 | name: Debian packages 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | push: 7 | branches: 8 | - master 9 | 10 | jobs: 11 | get_version: 12 | name: Get version info 13 | runs-on: ubuntu-22.04 14 | permissions: 15 | contents: read 16 | defaults: 17 | run: 18 | shell: bash 19 | outputs: 20 | version: ${{ steps.git_ver.outputs.version }} 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | with: 25 | fetch-depth: 0 26 | 27 | - name: Get package version 28 | id: git_ver 29 | run: | 30 | version=$(git describe --tags | sed -e "s|v||" -e "s|-g|+g|") 31 | echo "Version from git: ${version}" 32 | echo "version=${version}" >> $GITHUB_OUTPUT 33 | 34 | build_debs: 35 | name: ${{ matrix.name }} 36 | runs-on: ubuntu-22.04 37 | permissions: 38 | contents: write 39 | needs: [get_version] 40 | 41 | strategy: 42 | fail-fast: false 43 | matrix: 44 | name: [ 45 | x64_bookworm, 46 | x64_trixie, 47 | x64_sid, 48 | ] 49 | 50 | include: 51 | - name: x64_bookworm 52 | dist: bookworm 53 | 54 | - name: x64_trixie 55 | dist: trixie 56 | 57 | - name: x64_sid 58 | dist: sid 59 | 60 | steps: 61 | - name: Check github variables 62 | env: 63 | VERSION: ${{ needs.get_version.outputs.version }} 64 | run: | 65 | echo "Package version from git: ${VERSION}" 66 | 67 | - uses: actions/checkout@v4 68 | with: 69 | fetch-depth: 0 70 | 71 | - name: Install deps and update debian changelog 72 | run: | 73 | sudo apt-get update 74 | sudo apt-get install devscripts 75 | debchange -v ${{ needs.get_version.outputs.version }}-${{ matrix.dist }} -b -M --distribution ${{ matrix.dist }} "ci build" 76 | 77 | - name: Build deb packages 78 | uses: jtdor/build-deb-action@v1 79 | env: 80 | DEB_BUILD_OPTIONS: noautodbgsym 81 | with: 82 | apt-opts: --install-recommends 83 | docker-image: "debian:${{ matrix.dist }}-slim" 84 | buildpackage-opts: --build=binary --no-sign 85 | extra-build-deps: git cmake 86 | 87 | - name: Upload deb files 88 | uses: actions/upload-artifact@v4 89 | with: 90 | name: "redis-ipc_${{ needs.get_version.outputs.version }}-${{ matrix.dist }}" 91 | path: ./debian/artifacts/*.deb 92 | -------------------------------------------------------------------------------- /.github/workflows/env.yml: -------------------------------------------------------------------------------- 1 | name: Runtime 2 | 3 | on: 4 | workflow_dispatch: 5 | #push: 6 | #branches: [ master, develop ] 7 | #pull_request: 8 | #branches: [ master, develop ] 9 | 10 | jobs: 11 | build: 12 | 13 | runs-on: ${{ matrix.os }} 14 | permissions: 15 | contents: read 16 | defaults: 17 | run: 18 | shell: bash 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | os: [ubuntu-22.04, ubuntu-24.04] 24 | 25 | steps: 26 | - uses: actions/checkout@v4 27 | 28 | - uses: actions/setup-python@v5 29 | with: 30 | python-version: '3.9' 31 | 32 | - name: Set platform server (socket) dir 33 | id: set_temp_dirs 34 | run: | 35 | MKTEMP=$(mktemp -d) 36 | echo "Platform temp dir is: ${MKTEMP}" 37 | echo "GH CI temp dir is: ${RUNNER_TEMP}" 38 | if [ "${ImageOS}" != "ubuntu18" ] 39 | then 40 | TEMP_DIR="${MKTEMP}" 41 | else 42 | TEMP_DIR="${RUNNER_TEMP}" 43 | fi 44 | echo "Setting runtime dir: ${TEMP_DIR}" 45 | echo "TEMP_DIR=${TEMP_DIR}" >> $GITHUB_ENV 46 | bash -c set 47 | 48 | - name: Add python requirements 49 | run: | 50 | python -m pip install --upgrade pip 51 | pip install gcovr 52 | 53 | - name: Common dependencies 54 | run: | 55 | sudo apt-get -qq update 56 | sudo apt-get install -y libhiredis-dev redis-server 57 | 58 | - name: Backports (jammy) 59 | if: matrix.os == 'ubuntu-22.04' 60 | run: | 61 | sudo apt-get install -y software-properties-common 62 | sudo add-apt-repository -y -s ppa:nerdboy/embedded 63 | sudo apt-get install -y libjson-c-dev lcov 64 | 65 | - name: Backports (noble) 66 | if: matrix.os == 'ubuntu-24.04' 67 | run: | 68 | sudo apt-get install -y software-properties-common autoconf automake lcov 69 | sudo add-apt-repository -y -s ppa:nerdboy/embedded 70 | sudo apt-get install -y libjson-c-dev 71 | 72 | - name: Configure and build (autotools) 73 | if: matrix.os == 'ubuntu-24.04' 74 | run: | 75 | autoreconf -fiv 76 | ./configure --with-coverage 77 | make cov || true 78 | 79 | - name: Configure and build (cmake) 80 | if: matrix.os == 'ubuntu-22.04' 81 | run: | 82 | cmake -S . -B build -DWITH_COVERAGE=1 -DCMAKE_BUILD_TYPE=Debug 83 | cmake --build build -j 2 84 | make cov || true 85 | 86 | - name: Message bus 87 | env: 88 | RIPC_RUNTIME_DIR: "${{ env.TEMP_DIR }}" 89 | run: | 90 | ./scripts/run_redis.sh start 91 | 92 | - name: Test (autotools) 93 | if: matrix.os == 'ubuntu-20.04' 94 | env: 95 | RIPC_SERVER_PATH: "${{ env.TEMP_DIR }}/socket" 96 | run: | 97 | make cov 98 | gcovr --gcov-ignore-parse-errors --config gcovr.cfg -s -b src/.libs/ test/ 99 | 100 | - name: Test (ctest) 101 | if: matrix.os == 'ubuntu-22.04' 102 | env: 103 | RIPC_SERVER_PATH: "${{ env.TEMP_DIR }}/socket" 104 | run: | 105 | ctest --build-target cov --test-dir build/ 106 | gcovr --gcov-ignore-parse-errors --config gcovr.cfg -r . -s -b build/ 107 | 108 | - name: Cleanup 109 | env: 110 | RIPC_RUNTIME_DIR: "${{ env.TEMP_DIR }}" 111 | run: | 112 | ./scripts/run_redis.sh stop 113 | -------------------------------------------------------------------------------- /.github/workflows/mark-stale.yml: -------------------------------------------------------------------------------- 1 | name: Mark Stale Issues and PRs 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: "30 1 * * *" 7 | 8 | jobs: 9 | stale: 10 | runs-on: ubuntu-latest 11 | permissions: 12 | actions: read 13 | contents: read 14 | 15 | steps: 16 | - name: Mark Stale 17 | uses: actions/stale@v9 18 | with: 19 | repo-token: ${{ secrets.GITHUB_TOKEN }} 20 | exempt-all-milestones: true 21 | stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove stale label or comment or this issue will be closed in 30 days.' 22 | stale-pr-message: 'This PR is stale because it has been open 45 days with no activity. Remove stale label or comment or this PR will be closed in 14 days.' 23 | close-issue-message: 'This issue was closed because it has been stale for 30 days with no activity.' 24 | close-pr-message: 'This PR was closed because it has been stale for 14 days with no activity.' 25 | days-before-issue-stale: 90 26 | days-before-pr-stale: 45 27 | days-before-issue-close: 30 28 | days-before-pr-close: 14 29 | stale-issue-label: 'stale' 30 | stale-pr-label: 'stale' 31 | close-issue-label: 'closed-stale' 32 | close-pr-label: 'closed-stale' 33 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | # release on tag push 6 | tags: 7 | - '*' 8 | 9 | jobs: 10 | get_version: 11 | name: Get version info 12 | runs-on: ubuntu-22.04 13 | permissions: 14 | contents: read 15 | defaults: 16 | run: 17 | shell: bash 18 | outputs: 19 | version: ${{ steps.git_ver.outputs.version }} 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | with: 24 | fetch-depth: 0 25 | 26 | - name: Get package version 27 | id: git_ver 28 | run: | 29 | version=$(git describe --tags | sed -e "s|v||" -e "s|-g|+g|") 30 | echo "Version from git: ${version}" 31 | echo "version=${version}" >> $GITHUB_OUTPUT 32 | 33 | packaging: 34 | name: ${{ matrix.name }} 35 | runs-on: ubuntu-22.04 36 | permissions: 37 | contents: write 38 | needs: [get_version] 39 | 40 | strategy: 41 | fail-fast: false 42 | matrix: 43 | name: [ 44 | x64_bookworm, 45 | x64_trixie, 46 | x64_sid, 47 | ] 48 | 49 | include: 50 | - name: x64_bookworm 51 | dist: bookworm 52 | 53 | - name: x64_trixie 54 | dist: trixie 55 | 56 | - name: x64_sid 57 | dist: sid 58 | 59 | steps: 60 | - name: Check github variables 61 | env: 62 | VERSION: ${{ needs.get_version.outputs.version }} 63 | run: | 64 | echo "Package version from git: ${VERSION}" 65 | 66 | - uses: actions/checkout@v4 67 | with: 68 | fetch-depth: 0 69 | 70 | - name: Install deps and update debian changelog 71 | run: | 72 | sudo apt-get update 73 | sudo apt-get install devscripts 74 | debchange -v ${{ needs.get_version.outputs.version }}-${{ matrix.dist }} -b -M --distribution ${{ matrix.dist }} "ci build" 75 | 76 | - name: Build deb packages 77 | uses: jtdor/build-deb-action@v1 78 | env: 79 | DEB_BUILD_OPTIONS: noautodbgsym 80 | with: 81 | apt-opts: --install-recommends 82 | docker-image: "debian:${{ matrix.dist }}-slim" 83 | buildpackage-opts: --build=binary --no-sign 84 | extra-build-deps: git 85 | 86 | - name: Upload deb files 87 | uses: actions/upload-artifact@v4 88 | with: 89 | name: "redis-ipc_${{ needs.get_version.outputs.version }}-${{ matrix.dist }}" 90 | path: ./debian/artifacts/*.deb 91 | 92 | create_release: 93 | name: Create Release 94 | needs: [packaging] 95 | runs-on: ubuntu-22.04 96 | permissions: 97 | contents: write 98 | 99 | steps: 100 | - name: Get version 101 | id: get_version 102 | run: | 103 | echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV 104 | echo ${{ env.VERSION }} 105 | 106 | - uses: actions/checkout@v4 107 | with: 108 | fetch-depth: 0 109 | 110 | - name: Set up Python ${{ matrix.python-version }} 111 | uses: actions/setup-python@v5 112 | with: 113 | python-version: 3.9 114 | 115 | - name: Install dependencies 116 | run: | 117 | sudo apt-get install -yqq redis-server autoconf automake 118 | sudo apt-get install -yqq libjson-c-dev libhiredis-dev libgtest-dev libgmock-dev 119 | sudo systemctl stop redis 120 | python -m pip install --upgrade pip wheel 121 | pip install tox 122 | 123 | - name: Build dist pkg 124 | run: | 125 | tox -e dist 126 | 127 | # download all artifacts to project dir 128 | - uses: actions/download-artifact@v4 129 | 130 | - name: Generate changes file 131 | uses: sarnold/gitchangelog-action@master 132 | with: 133 | github_token: ${{ secrets.GITHUB_TOKEN}} 134 | 135 | - name: Create draft release 136 | id: create_release 137 | uses: softprops/action-gh-release@v2 138 | env: 139 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 140 | with: 141 | tag_name: ${{ env.VERSION }} 142 | name: Release v${{ env.VERSION }} 143 | body_path: CHANGES.md 144 | draft: false 145 | prerelease: false 146 | files: | 147 | packages/*.deb 148 | redis-ipc*.tar.gz 149 | -------------------------------------------------------------------------------- /.github/workflows/smoke.yml: -------------------------------------------------------------------------------- 1 | name: smoke 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: [ master, develop ] 7 | pull_request: 8 | branches: [ master, develop ] 9 | 10 | jobs: 11 | ci_build: 12 | 13 | runs-on: ${{ matrix.os }} 14 | permissions: 15 | actions: read 16 | contents: read 17 | defaults: 18 | run: 19 | shell: bash 20 | 21 | strategy: 22 | fail-fast: false 23 | matrix: 24 | os: [ubuntu-22.04, ubuntu-24.04] 25 | include: 26 | - os: ubuntu-24.04 27 | compiler: gcc 28 | version: "13" 29 | test_cmd: 'auto' 30 | - os: ubuntu-22.04 31 | compiler: clang 32 | version: "15" 33 | test_cmd: clang 34 | 35 | steps: 36 | - uses: actions/checkout@v4 37 | 38 | - uses: actions/setup-python@v5 39 | with: 40 | python-version: '3.11' 41 | 42 | - name: Environment 43 | run: | 44 | bash -c set 45 | 46 | # use gtest/gmock 1.10 from PPA in the following step 47 | - name: Deps plus PPA/msg bus 48 | run: | 49 | sudo apt-get -qq update 50 | sudo apt-get install -yqq software-properties-common 51 | sudo add-apt-repository ppa:ubuntu-toolchain-r/ppa 52 | sudo apt-get -qq update 53 | sudo apt-get install -yqq redis-server autoconf automake lcov 54 | sudo apt-get install -yqq libjson-c-dev libhiredis-dev libgtest-dev libgmock-dev 55 | sudo systemctl stop redis 56 | if [ "${{ matrix.compiler }}" = gcc ]; then 57 | sudo apt-get install -y g++-${{ matrix.version }} g++-${{ matrix.version }}-multilib libltdl-dev 58 | echo "CC=gcc-${{ matrix.version }}" >> $GITHUB_ENV 59 | echo "CXX=g++-${{ matrix.version }}" >> $GITHUB_ENV 60 | else 61 | sudo apt-get install -y clang-${{ matrix.version }} llvm-${{ matrix.version }} lld-${{ matrix.version }} llvm-${{ matrix.version }}-tools g++-multilib 62 | echo "CC=clang-${{ matrix.version }}" >> $GITHUB_ENV 63 | echo "CXX=clang++-${{ matrix.version }}" >> $GITHUB_ENV 64 | echo "LLVM_VER_DIR=/usr/lib/llvm-${{ matrix.version }}" >> $GITHUB_ENV 65 | fi 66 | 67 | - name: Add python requirements 68 | run: | 69 | python -m pip install --upgrade pip 70 | pip install tox 71 | 72 | - name: Test 73 | env: 74 | CC: ${{ env.CC }} 75 | CXX: ${{ env.CXX }} 76 | LLVM_VER_DIR: ${{ env.LLVM_VER_DIR }} 77 | run: | 78 | tox -e ${{ matrix.test_cmd }} 79 | 80 | - uses: actions/upload-artifact@v4 81 | if: matrix.os == 'ubuntu-22.04' 82 | with: 83 | name: src_coverage_data 84 | path: | 85 | build/coverage/html 86 | build/coverage/lcov.info 87 | 88 | ci_metrics: 89 | name: Collect metrics 90 | runs-on: ubuntu-22.04 91 | permissions: 92 | actions: read 93 | contents: read 94 | needs: [ci_build] 95 | steps: 96 | - uses: actions/checkout@v4 97 | with: 98 | fetch-depth: 0 99 | 100 | - uses: sarnold/cccc-action@855d440abe1a4229896f36b4d01d88decae0a1f2 # v1.0 101 | with: 102 | github_token: ${{ secrets.GITHUB_TOKEN }} 103 | source_dirs: | 104 | src 105 | inc 106 | 107 | - uses: actions/upload-artifact@v4 108 | with: 109 | name: metrics_redis-ipc 110 | path: ./metrics 111 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Local files generated by IDEs 2 | .vs/* 3 | .vscode/* 4 | *.code-workspace 5 | ~AutoRecover.* 6 | *.sln 7 | *.vcxproj* 8 | __pycache__/* 9 | /*.config 10 | /*.creator 11 | /*.creator.user 12 | /*.files 13 | /*.idea 14 | /*.includes 15 | # Eclipse 16 | .cproject 17 | .project 18 | .settings 19 | # Visual Studio 20 | Debug/* 21 | Release/* 22 | # cmake 23 | build/ 24 | # Conda 25 | environment.yml 26 | # repolite 27 | ext/ 28 | # virtual env 29 | .env 30 | .tox 31 | .venv 32 | env/ 33 | venv/ 34 | ENV/ 35 | env.bak/ 36 | venv.bak/ 37 | 38 | # coverage files 39 | test/*.out 40 | *.gcov 41 | # compilation files 42 | *.lo 43 | *.od 44 | *.d 45 | *.o 46 | *.obj 47 | *.a 48 | *.lib 49 | 50 | # files stored by editors 51 | *~ 52 | 53 | *.pyc 54 | 55 | # auto generated data (docs should go on gh-pages branch) 56 | coverage/ 57 | coverage.xml 58 | doc/html/ 59 | doc/metrics/ 60 | -------------------------------------------------------------------------------- /.lcovrc: -------------------------------------------------------------------------------- 1 | # LCOV configuration file 2 | # 3 | #genhtml_css_file = gcov.css 4 | 5 | branch_coverage = 1 6 | function_coverage = 1 7 | checksum = 0 8 | 9 | genhtml_charset=UTF-8 10 | #genhtml_demangle_cpp=1 11 | 12 | geninfo_compat_libtool = 1 13 | #geninfo_external = 0 14 | #geninfo_gcov_all_blocks = 0 15 | genhtml_precision = 4 16 | #geninfo_auto_base = 1 17 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # To install the git pre-commit hook run: 2 | # pre-commit install 3 | # To update the pre-commit hooks run: 4 | # pre-commit install-hooks 5 | 6 | exclude: '^(debian/.*|.tox/|test/)$' 7 | repos: 8 | - repo: meta 9 | hooks: 10 | - id: check-useless-excludes 11 | - id: check-hooks-apply 12 | - repo: https://github.com/pre-commit/pre-commit-hooks 13 | rev: v4.4.0 14 | hooks: 15 | - id: trailing-whitespace 16 | - id: end-of-file-fixer 17 | - id: mixed-line-ending 18 | # exclude: '\.patch$' 19 | args: [--fix=lf] 20 | - id: check-added-large-files 21 | - id: check-case-conflict 22 | - id: check-executables-have-shebangs 23 | - id: check-shebang-scripts-are-executable 24 | - id: check-merge-conflict 25 | # - id: check-symlinks 26 | - id: detect-private-key 27 | # - id: check-xml 28 | # - id: check-json 29 | - id: check-yaml 30 | exclude: 'conda/meta.yaml' 31 | 32 | - repo: https://github.com/PyCQA/doc8 33 | rev: v1.1.1 34 | hooks: 35 | - id: doc8 36 | args: 37 | - '--max-line-length=90' 38 | - '--ignore=D001' 39 | exclude: ChangeLog\.rst$ 40 | 41 | - repo: https://github.com/pre-commit/pygrep-hooks 42 | rev: v1.10.0 43 | hooks: 44 | - id: rst-backticks 45 | exclude: ChangeLog\.rst$ 46 | - id: rst-directive-colons 47 | - id: rst-inline-touching-normal 48 | 49 | # - repo: https://github.com/adrienverge/yamllint 50 | # rev: v1.26.3 51 | # hooks: 52 | # - id: yamllint 53 | # exclude: 'conda/meta.yaml' 54 | 55 | - repo: https://github.com/iconmaster5326/cmake-format-pre-commit-hook 56 | rev: v0.6.9 57 | hooks: 58 | - id: cmake-format 59 | 60 | - repo: https://github.com/lovesegfault/beautysh 61 | rev: v6.2.1 62 | hooks: 63 | - id: beautysh 64 | 65 | # - repo: https://github.com/pocc/pre-commit-hooks 66 | # rev: v1.1.1 67 | # hooks: 68 | # - id: clang-format 69 | # args: [-i] 70 | 71 | # current cpplint hook ignores top-level CPPLINT.cfg 72 | - repo: https://github.com/cpplint/cpplint 73 | rev: 1.6.1 74 | hooks: 75 | - id: cpplint 76 | args: 77 | - '--output=gsed' 78 | - '--linelength=120' 79 | - '--filter=-build/include_subdir,-build/header_guard,-whitespace/braces,-whitespace/newline' 80 | - '--exclude=test/*.c' 81 | - '--exclude=test/*.cpp' 82 | files: \.(h|hh|hpp|hxx|h|c|cc|cpp|cxx|c)$ 83 | 84 | - repo: https://github.com/jorisroovers/gitlint 85 | rev: v0.19.1 86 | hooks: 87 | - id: gitlint 88 | -------------------------------------------------------------------------------- /.repolite.yml: -------------------------------------------------------------------------------- 1 | prog_name: repolite 2 | top_dir: ext # local directory path for enabled repositories 3 | pull_with_rebase: false # use --ff-only if false 4 | # add new repo_name sections as needed 5 | repos: 6 | - repo_name: hiredis 7 | repo_alias: null 8 | repo_url: https://github.com/redis/hiredis.git 9 | repo_depth: 0 10 | repo_remote: origin 11 | repo_opts: [] 12 | repo_branch: v0.14.1 13 | repo_hash: null 14 | repo_use_rebase: false 15 | repo_has_lfs_files: false 16 | repo_init_submodules: false 17 | repo_install: false 18 | repo_enable: true 19 | - repo_name: json-c 20 | repo_alias: json 21 | repo_url: https://github.com/json-c/json-c.git 22 | repo_depth: 0 23 | repo_remote: origin 24 | repo_opts: [] 25 | repo_branch: json-c-0.16-20220414 26 | repo_hash: null 27 | repo_use_rebase: false 28 | repo_has_lfs_files: false 29 | repo_init_submodules: false 30 | repo_install: false 31 | repo_enable: true 32 | - repo_name: redis 33 | repo_alias: null 34 | repo_url: https://github.com/redis/redis.git 35 | repo_depth: 0 36 | repo_remote: origin 37 | repo_opts: [] 38 | repo_branch: 6.0.16 39 | repo_hash: null 40 | repo_use_rebase: false 41 | repo_has_lfs_files: false 42 | repo_init_submodules: false 43 | repo_install: false 44 | repo_enable: true 45 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Authors of redis-ipc 2 | 3 | Stephanie Lockwood-Childs 4 | 5 | - pretty much everything 6 | 7 | Stephen Arnold 8 | 9 | - occasional build fixes and autotools port 10 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.18) 2 | 3 | # Add cmake modules of this project to the module path 4 | list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake) 5 | 6 | # used for both library and pkgconfig file 7 | set(PACKAGE_VERSION 0.2.1) 8 | set(LIBRARY_SOVERSION 0) 9 | 10 | if(SCM_VERSION_INFO) 11 | set(PACKAGE_VERSION ${SCM_VERSION_INFO}) 12 | endif() 13 | 14 | set(RIPC_SONAME ${PACKAGE_VERSION}) 15 | 16 | project( 17 | redis-ipc 18 | LANGUAGES C CXX 19 | VERSION ${PACKAGE_VERSION} 20 | ) 21 | 22 | set(CMAKE_EXPORT_COMPILE_COMMANDS ON) 23 | set(CMAKE_VERBOSE_MAKEFILE ON) 24 | 25 | if(NOT CMAKE_CXX_STANDARD) 26 | set(CMAKE_CXX_STANDARD 17) 27 | set(CMAKE_CXX_STANDARD_REQUIRED ON) 28 | set(CMAKE_CXX_EXTENSIONS OFF) 29 | endif() 30 | 31 | set(CMAKE_C_STANDARD 11) 32 | set(CMAKE_DEBUG_POSTFIX d) 33 | 34 | if(NOT CMAKE_BUILD_TYPE) 35 | set(CMAKE_BUILD_TYPE 36 | "RelWithDebInfo" 37 | CACHE STRING "Default build type: RelWithDebInfo" FORCE 38 | ) 39 | endif() 40 | 41 | include(CMakeParseArguments) 42 | include(CheckCCompilerFlag) 43 | include(CheckCXXCompilerFlag) 44 | include(GNUInstallDirs) 45 | include(CTest) 46 | 47 | # LTO requires cmake min version 3.9 and clang LTO requires lld 48 | if(RIPC_ENABLE_LTO AND CMAKE_BUILD_TYPE MATCHES "Release") 49 | if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") 50 | find_package(LLVM CONFIG HINTS ENV{LLVM_VER_DIR}) 51 | endif() 52 | 53 | include(CheckIPOSupported) 54 | check_ipo_supported(RESULT ipo_supported OUTPUT error) 55 | 56 | if(ipo_supported) 57 | message(STATUS "IPO / LTO supported") 58 | set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE) 59 | else() 60 | message(WARNING "IPO / LTO not supported: <${error}>") 61 | endif() 62 | endif() 63 | 64 | option(BUILD_SHARED_LIBS "build shared libraries" ON) 65 | option(BUILD_STATIC_LIBS "Build static libraries" OFF) 66 | option(RIPC_BUILD_TESTING "build and run tests" OFF) 67 | option(RIPC_DISABLE_SOCK_TESTS "disable tests requiring redis socket" OFF) 68 | 69 | set(RIPC_RUNTIME_DIR 70 | "" 71 | CACHE PATH "path to directory containing redis server socket" 72 | ) 73 | 74 | set(WITH_COVERAGE 75 | "" 76 | CACHE PATH "build with test coverage enabled" 77 | ) 78 | 79 | set(INSTALL_PKGCONFIG_DIR 80 | "${CMAKE_INSTALL_PREFIX}/share/pkgconfig" 81 | CACHE PATH "Install directory for pkgconfig (.pc) files" 82 | ) 83 | set(EXTRA_TARGET_LINK_LIBRARIES) 84 | 85 | # accept cmake option -or- environment variable 86 | if(DEFINED ENV{RIPC_RUNTIME_DIR}) 87 | message(STATUS "Found socket path in ENV: $ENV{RIPC_RUNTIME_DIR}") 88 | set(RUNTIME_DIR "$ENV{RIPC_RUNTIME_DIR}") 89 | add_compile_definitions(RIPC_RUNTIME_DIR="${RUNTIME_DIR}") 90 | elseif(RIPC_RUNTIME_DIR) 91 | message(STATUS "Got cmake option RIPC_RUNTIME_DIR: ${RIPC_RUNTIME_DIR}") 92 | set(RUNTIME_DIR "${RIPC_RUNTIME_DIR}") 93 | add_compile_definitions(RIPC_RUNTIME_DIR="${RUNTIME_DIR}") 94 | endif() 95 | 96 | # ~~~ 97 | # Adds custom target for lcov report generation (does not build, only 98 | # runs test cmd). Uses the same .lcovrc and command args as autotools. 99 | # Note: "make cov" target must be run both with/without RIPC_SERVER_PATH 100 | # override in order to generate full coverage data. 101 | # ~~~ 102 | if(WITH_COVERAGE) 103 | set(RIPC_BUILD_TESTING ON) 104 | include(TestCoverage) 105 | endif() 106 | 107 | find_package(PkgConfig) 108 | 109 | if(NOT PkgConfig_FOUND) 110 | find_package(json-c) 111 | find_package(hiredis) 112 | list(APPEND THIRD_PARTY_LIBS json-c hiredis) 113 | else() 114 | pkg_check_modules(HIREDIS IMPORTED_TARGET hiredis) 115 | if(HIREDIS_FOUND) 116 | list(APPEND THIRD_PARTY_LIBS PkgConfig::HIREDIS) 117 | endif() 118 | pkg_check_modules(JSONC IMPORTED_TARGET json-c) 119 | if(JSONC_FOUND) 120 | list(APPEND THIRD_PARTY_LIBS PkgConfig::JSONC) 121 | endif() 122 | endif() 123 | 124 | list(APPEND EXTRA_TARGET_LINK_LIBRARIES ${THIRD_PARTY_LIBS}) 125 | 126 | if(CMAKE_CXX_COMPILER_ID MATCHES "MSVC") 127 | if(MSVC_VERSION LESS 1900) 128 | message(FATAL_ERROR "you need Visual Studio 2015 or later") 129 | endif() 130 | if(BUILD_SHARED_LIBS) 131 | # See http://www.kitware.com/blog/home/post/939 for details. 132 | set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) 133 | endif() 134 | # CMake defaults to /W3, but some users like /W4 (or /Wall) and /WX, so we 135 | # disable various warnings that aren't particularly helpful. 136 | add_compile_options(/wd4100 /wd4201 /wd4456 /wd4457 /wd4702 /wd4815) 137 | # Without a byte order mark (BOM), Visual Studio assumes that the source 138 | # file is encoded using the current user code page, so we specify UTF-8. 139 | add_compile_options(/utf-8) 140 | endif() 141 | 142 | if(WIN32) 143 | add_compile_definitions(_CRT_SECURE_NO_WARNINGS _SCL_SECURE_NO_WARNINGS) 144 | add_compile_definitions( 145 | WIN32_LEAN_AND_MEAN UNICODE _UNICODE STRICT NOMINMAX 146 | ) 147 | elseif(UNIX) 148 | add_compile_options(-pthread) 149 | list(APPEND EXTRA_TARGET_LINK_LIBRARIES -pthread) 150 | endif() 151 | 152 | include_directories( 153 | ${CMAKE_CURRENT_SOURCE_DIR}/inc ${CMAKE_CURRENT_SOURCE_DIR}/src 154 | ) 155 | 156 | set(RIPC_SOURCES src/redis_ipc.c) 157 | 158 | if(BUILD_STATIC_LIBS) 159 | add_library(ripcstatic STATIC ${RIPC_SOURCES}) 160 | set_target_properties(ripcstatic PROPERTIES OUTPUT_NAME redis_ipc) 161 | endif() 162 | 163 | add_library(redis_ipc ${RIPC_SOURCES}) 164 | add_library(redis_ipc::redis_ipc ALIAS redis_ipc) 165 | target_link_libraries(redis_ipc ${EXTRA_TARGET_LINK_LIBRARIES}) 166 | 167 | # SCM_VERSION_INFO can be defined by cmake args and passed into the code as a 168 | # define here (see PACKAGE_VERSION near the top of this file) 169 | target_compile_definitions(redis_ipc PRIVATE VERSION_INFO=${PACKAGE_VERSION}) 170 | 171 | # this looks weird, but needed for correct SOVERSION links 172 | set_target_properties( 173 | redis_ipc PROPERTIES VERSION ${RIPC_SONAME} SOVERSION ${LIBRARY_SOVERSION} 174 | ) 175 | 176 | if(RIPC_BUILD_TESTING) 177 | enable_testing() 178 | if(NOT RIPC_DISABLE_SOCK_TESTS) 179 | set(TEST_TARGETS command_result_test pub_sub_test settings_status_test 180 | multithread_test 181 | ) 182 | 183 | foreach(target ${TEST_TARGETS}) 184 | add_executable(${target} test/${target}.c) 185 | target_link_libraries( 186 | ${target} redis_ipc ${EXTRA_TARGET_LINK_LIBRARIES} 187 | ${CMAKE_THREAD_LIBS_INIT} 188 | ) 189 | add_test(NAME ${target} COMMAND ${target}) 190 | endforeach(target) 191 | endif() 192 | 193 | add_executable(json_test test/json_test.cpp) 194 | target_link_libraries( 195 | json_test redis_ipc ${EXTRA_TARGET_LINK_LIBRARIES} 196 | ${CMAKE_THREAD_LIBS_INIT} 197 | ) 198 | add_test(NAME json_test COMMAND json_test) 199 | 200 | # Add source-based code coverage targets instead of using WITH_COVERAGE 201 | # above. Note this requires recent clang/llvm tooling. 202 | if(COVERAGE_BUILD AND NOT WITH_COVERAGE) 203 | include(coverage) 204 | add_coverage(redis_ipc) 205 | add_coverage(json_test) 206 | endif() 207 | endif() 208 | 209 | set(RIPC_HEADERS inc/json.hh src/redis_ipc.h) 210 | 211 | install(FILES ${RIPC_HEADERS} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) 212 | 213 | if(BUILD_SHARED_LIBS) 214 | list(APPEND RIPC_LIBS redis_ipc) 215 | endif() 216 | 217 | if(BUILD_STATIC_LIBS) 218 | list(APPEND RIPC_LIBS ripcstatic) 219 | endif() 220 | 221 | install( 222 | TARGETS ${RIPC_LIBS} 223 | EXPORT redis_ipcConfig 224 | ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} 225 | LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} 226 | RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} 227 | INCLUDES 228 | DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} 229 | ) 230 | 231 | install( 232 | EXPORT redis_ipcConfig 233 | DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/redis_ipc 234 | NAMESPACE redis_ipc:: 235 | ) 236 | 237 | set(RIPC_PC ${CMAKE_CURRENT_BINARY_DIR}/redis-ipc.pc) 238 | set(prefix ${CMAKE_INSTALL_PREFIX}) 239 | configure_file(${CMAKE_CURRENT_SOURCE_DIR}/redis-ipc.pc.in ${RIPC_PC} @ONLY) 240 | install(FILES ${RIPC_PC} DESTINATION ${INSTALL_PKGCONFIG_DIR}) 241 | -------------------------------------------------------------------------------- /CPPLINT.cfg: -------------------------------------------------------------------------------- 1 | linelength=120 2 | filter=-build/include_subdir,-build/header_guard,-whitespace/braces,-whitespace/newline 3 | exclude_files=test/*.c 4 | -------------------------------------------------------------------------------- /INSTALL: -------------------------------------------------------------------------------- 1 | Installation Instructions 2 | ************************* 3 | 4 | Copyright (C) 1994-1996, 1999-2002, 2004-2013 Free Software Foundation, 5 | Inc. 6 | 7 | Copying and distribution of this file, with or without modification, 8 | are permitted in any medium without royalty provided the copyright 9 | notice and this notice are preserved. This file is offered as-is, 10 | without warranty of any kind. 11 | 12 | Basic Installation 13 | ================== 14 | 15 | Briefly, the shell command `./configure && make && make install' 16 | should configure, build, and install this package. The following 17 | more-detailed instructions are generic; see the `README' file for 18 | instructions specific to this package. Some packages provide this 19 | `INSTALL' file but do not implement all of the features documented 20 | below. The lack of an optional feature in a given package is not 21 | necessarily a bug. More recommendations for GNU packages can be found 22 | in *note Makefile Conventions: (standards)Makefile Conventions. 23 | 24 | The `configure' shell script attempts to guess correct values for 25 | various system-dependent variables used during compilation. It uses 26 | those values to create a `Makefile' in each directory of the package. 27 | It may also create one or more `.h' files containing system-dependent 28 | definitions. Finally, it creates a shell script `config.status' that 29 | you can run in the future to recreate the current configuration, and a 30 | file `config.log' containing compiler output (useful mainly for 31 | debugging `configure'). 32 | 33 | It can also use an optional file (typically called `config.cache' 34 | and enabled with `--cache-file=config.cache' or simply `-C') that saves 35 | the results of its tests to speed up reconfiguring. Caching is 36 | disabled by default to prevent problems with accidental use of stale 37 | cache files. 38 | 39 | If you need to do unusual things to compile the package, please try 40 | to figure out how `configure' could check whether to do them, and mail 41 | diffs or instructions to the address given in the `README' so they can 42 | be considered for the next release. If you are using the cache, and at 43 | some point `config.cache' contains results you don't want to keep, you 44 | may remove or edit it. 45 | 46 | The file `configure.ac' (or `configure.in') is used to create 47 | `configure' by a program called `autoconf'. You need `configure.ac' if 48 | you want to change it or regenerate `configure' using a newer version 49 | of `autoconf'. 50 | 51 | The simplest way to compile this package is: 52 | 53 | 1. `cd' to the directory containing the package's source code and type 54 | `./configure' to configure the package for your system. 55 | 56 | Running `configure' might take a while. While running, it prints 57 | some messages telling which features it is checking for. 58 | 59 | 2. Type `make' to compile the package. 60 | 61 | 3. Optionally, type `make check' to run any self-tests that come with 62 | the package, generally using the just-built uninstalled binaries. 63 | 64 | 4. Type `make install' to install the programs and any data files and 65 | documentation. When installing into a prefix owned by root, it is 66 | recommended that the package be configured and built as a regular 67 | user, and only the `make install' phase executed with root 68 | privileges. 69 | 70 | 5. Optionally, type `make installcheck' to repeat any self-tests, but 71 | this time using the binaries in their final installed location. 72 | This target does not install anything. Running this target as a 73 | regular user, particularly if the prior `make install' required 74 | root privileges, verifies that the installation completed 75 | correctly. 76 | 77 | 6. You can remove the program binaries and object files from the 78 | source code directory by typing `make clean'. To also remove the 79 | files that `configure' created (so you can compile the package for 80 | a different kind of computer), type `make distclean'. There is 81 | also a `make maintainer-clean' target, but that is intended mainly 82 | for the package's developers. If you use it, you may have to get 83 | all sorts of other programs in order to regenerate files that came 84 | with the distribution. 85 | 86 | 7. Often, you can also type `make uninstall' to remove the installed 87 | files again. In practice, not all packages have tested that 88 | uninstallation works correctly, even though it is required by the 89 | GNU Coding Standards. 90 | 91 | 8. Some packages, particularly those that use Automake, provide `make 92 | distcheck', which can by used by developers to test that all other 93 | targets like `make install' and `make uninstall' work correctly. 94 | This target is generally not run by end users. 95 | 96 | Compilers and Options 97 | ===================== 98 | 99 | Some systems require unusual options for compilation or linking that 100 | the `configure' script does not know about. Run `./configure --help' 101 | for details on some of the pertinent environment variables. 102 | 103 | You can give `configure' initial values for configuration parameters 104 | by setting variables in the command line or in the environment. Here 105 | is an example: 106 | 107 | ./configure CC=c99 CFLAGS=-g LIBS=-lposix 108 | 109 | *Note Defining Variables::, for more details. 110 | 111 | Compiling For Multiple Architectures 112 | ==================================== 113 | 114 | You can compile the package for more than one kind of computer at the 115 | same time, by placing the object files for each architecture in their 116 | own directory. To do this, you can use GNU `make'. `cd' to the 117 | directory where you want the object files and executables to go and run 118 | the `configure' script. `configure' automatically checks for the 119 | source code in the directory that `configure' is in and in `..'. This 120 | is known as a "VPATH" build. 121 | 122 | With a non-GNU `make', it is safer to compile the package for one 123 | architecture at a time in the source code directory. After you have 124 | installed the package for one architecture, use `make distclean' before 125 | reconfiguring for another architecture. 126 | 127 | On MacOS X 10.5 and later systems, you can create libraries and 128 | executables that work on multiple system types--known as "fat" or 129 | "universal" binaries--by specifying multiple `-arch' options to the 130 | compiler but only a single `-arch' option to the preprocessor. Like 131 | this: 132 | 133 | ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ 134 | CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ 135 | CPP="gcc -E" CXXCPP="g++ -E" 136 | 137 | This is not guaranteed to produce working output in all cases, you 138 | may have to build one architecture at a time and combine the results 139 | using the `lipo' tool if you have problems. 140 | 141 | Installation Names 142 | ================== 143 | 144 | By default, `make install' installs the package's commands under 145 | `/usr/local/bin', include files under `/usr/local/include', etc. You 146 | can specify an installation prefix other than `/usr/local' by giving 147 | `configure' the option `--prefix=PREFIX', where PREFIX must be an 148 | absolute file name. 149 | 150 | You can specify separate installation prefixes for 151 | architecture-specific files and architecture-independent files. If you 152 | pass the option `--exec-prefix=PREFIX' to `configure', the package uses 153 | PREFIX as the prefix for installing programs and libraries. 154 | Documentation and other data files still use the regular prefix. 155 | 156 | In addition, if you use an unusual directory layout you can give 157 | options like `--bindir=DIR' to specify different values for particular 158 | kinds of files. Run `configure --help' for a list of the directories 159 | you can set and what kinds of files go in them. In general, the 160 | default for these options is expressed in terms of `${prefix}', so that 161 | specifying just `--prefix' will affect all of the other directory 162 | specifications that were not explicitly provided. 163 | 164 | The most portable way to affect installation locations is to pass the 165 | correct locations to `configure'; however, many packages provide one or 166 | both of the following shortcuts of passing variable assignments to the 167 | `make install' command line to change installation locations without 168 | having to reconfigure or recompile. 169 | 170 | The first method involves providing an override variable for each 171 | affected directory. For example, `make install 172 | prefix=/alternate/directory' will choose an alternate location for all 173 | directory configuration variables that were expressed in terms of 174 | `${prefix}'. Any directories that were specified during `configure', 175 | but not in terms of `${prefix}', must each be overridden at install 176 | time for the entire installation to be relocated. The approach of 177 | makefile variable overrides for each directory variable is required by 178 | the GNU Coding Standards, and ideally causes no recompilation. 179 | However, some platforms have known limitations with the semantics of 180 | shared libraries that end up requiring recompilation when using this 181 | method, particularly noticeable in packages that use GNU Libtool. 182 | 183 | The second method involves providing the `DESTDIR' variable. For 184 | example, `make install DESTDIR=/alternate/directory' will prepend 185 | `/alternate/directory' before all installation names. The approach of 186 | `DESTDIR' overrides is not required by the GNU Coding Standards, and 187 | does not work on platforms that have drive letters. On the other hand, 188 | it does better at avoiding recompilation issues, and works well even 189 | when some directory options were not specified in terms of `${prefix}' 190 | at `configure' time. 191 | 192 | Optional Features 193 | ================= 194 | 195 | If the package supports it, you can cause programs to be installed 196 | with an extra prefix or suffix on their names by giving `configure' the 197 | option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'. 198 | 199 | Some packages pay attention to `--enable-FEATURE' options to 200 | `configure', where FEATURE indicates an optional part of the package. 201 | They may also pay attention to `--with-PACKAGE' options, where PACKAGE 202 | is something like `gnu-as' or `x' (for the X Window System). The 203 | `README' should mention any `--enable-' and `--with-' options that the 204 | package recognizes. 205 | 206 | For packages that use the X Window System, `configure' can usually 207 | find the X include and library files automatically, but if it doesn't, 208 | you can use the `configure' options `--x-includes=DIR' and 209 | `--x-libraries=DIR' to specify their locations. 210 | 211 | Some packages offer the ability to configure how verbose the 212 | execution of `make' will be. For these packages, running `./configure 213 | --enable-silent-rules' sets the default to minimal output, which can be 214 | overridden with `make V=1'; while running `./configure 215 | --disable-silent-rules' sets the default to verbose, which can be 216 | overridden with `make V=0'. 217 | 218 | Particular systems 219 | ================== 220 | 221 | On HP-UX, the default C compiler is not ANSI C compatible. If GNU 222 | CC is not installed, it is recommended to use the following options in 223 | order to use an ANSI C compiler: 224 | 225 | ./configure CC="cc -Ae -D_XOPEN_SOURCE=500" 226 | 227 | and if that doesn't work, install pre-built binaries of GCC for HP-UX. 228 | 229 | HP-UX `make' updates targets which have the same time stamps as 230 | their prerequisites, which makes it generally unusable when shipped 231 | generated files such as `configure' are involved. Use GNU `make' 232 | instead. 233 | 234 | On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot 235 | parse its `' header file. The option `-nodtk' can be used as 236 | a workaround. If GNU CC is not installed, it is therefore recommended 237 | to try 238 | 239 | ./configure CC="cc" 240 | 241 | and if that doesn't work, try 242 | 243 | ./configure CC="cc -nodtk" 244 | 245 | On Solaris, don't put `/usr/ucb' early in your `PATH'. This 246 | directory contains several dysfunctional programs; working variants of 247 | these programs are available in `/usr/bin'. So, if you need `/usr/ucb' 248 | in your `PATH', put it _after_ `/usr/bin'. 249 | 250 | On Haiku, software installed for all users goes in `/boot/common', 251 | not `/usr/local'. It is recommended to use the following options: 252 | 253 | ./configure --prefix=/boot/common 254 | 255 | Specifying the System Type 256 | ========================== 257 | 258 | There may be some features `configure' cannot figure out 259 | automatically, but needs to determine by the type of machine the package 260 | will run on. Usually, assuming the package is built to be run on the 261 | _same_ architectures, `configure' can figure that out, but if it prints 262 | a message saying it cannot guess the machine type, give it the 263 | `--build=TYPE' option. TYPE can either be a short name for the system 264 | type, such as `sun4', or a canonical name which has the form: 265 | 266 | CPU-COMPANY-SYSTEM 267 | 268 | where SYSTEM can have one of these forms: 269 | 270 | OS 271 | KERNEL-OS 272 | 273 | See the file `config.sub' for the possible values of each field. If 274 | `config.sub' isn't included in this package, then this package doesn't 275 | need to know the machine type. 276 | 277 | If you are _building_ compiler tools for cross-compiling, you should 278 | use the option `--target=TYPE' to select the type of system they will 279 | produce code for. 280 | 281 | If you want to _use_ a cross compiler, that generates code for a 282 | platform different from the build platform, you should specify the 283 | "host" platform (i.e., that on which the generated programs will 284 | eventually be run) with `--host=TYPE'. 285 | 286 | Sharing Defaults 287 | ================ 288 | 289 | If you want to set default values for `configure' scripts to share, 290 | you can create a site shell script called `config.site' that gives 291 | default values for variables like `CC', `cache_file', and `prefix'. 292 | `configure' looks for `PREFIX/share/config.site' if it exists, then 293 | `PREFIX/etc/config.site' if it exists. Or, you can set the 294 | `CONFIG_SITE' environment variable to the location of the site script. 295 | A warning: not all `configure' scripts look for a site script. 296 | 297 | Defining Variables 298 | ================== 299 | 300 | Variables not defined in a site shell script can be set in the 301 | environment passed to `configure'. However, some packages may run 302 | configure again during the build, and the customized values of these 303 | variables may be lost. In order to avoid this problem, you should set 304 | them in the `configure' command line, using `VAR=value'. For example: 305 | 306 | ./configure CC=/usr/local2/bin/gcc 307 | 308 | causes the specified `gcc' to be used as the C compiler (unless it is 309 | overridden in the site shell script). 310 | 311 | Unfortunately, this technique does not work for `CONFIG_SHELL' due to 312 | an Autoconf limitation. Until the limitation is lifted, you can use 313 | this workaround: 314 | 315 | CONFIG_SHELL=/bin/bash ./configure CONFIG_SHELL=/bin/bash 316 | 317 | `configure' Invocation 318 | ====================== 319 | 320 | `configure' recognizes the following options to control how it 321 | operates. 322 | 323 | `--help' 324 | `-h' 325 | Print a summary of all of the options to `configure', and exit. 326 | 327 | `--help=short' 328 | `--help=recursive' 329 | Print a summary of the options unique to this package's 330 | `configure', and exit. The `short' variant lists options used 331 | only in the top level, while the `recursive' variant lists options 332 | also present in any nested packages. 333 | 334 | `--version' 335 | `-V' 336 | Print the version of Autoconf used to generate the `configure' 337 | script, and exit. 338 | 339 | `--cache-file=FILE' 340 | Enable the cache: use and save the results of the tests in FILE, 341 | traditionally `config.cache'. FILE defaults to `/dev/null' to 342 | disable caching. 343 | 344 | `--config-cache' 345 | `-C' 346 | Alias for `--cache-file=config.cache'. 347 | 348 | `--quiet' 349 | `--silent' 350 | `-q' 351 | Do not print messages saying which checks are being made. To 352 | suppress all normal output, redirect it to `/dev/null' (any error 353 | messages will still be shown). 354 | 355 | `--srcdir=DIR' 356 | Look for the package's source code in directory DIR. Usually 357 | `configure' can determine that directory automatically. 358 | 359 | `--prefix=DIR' 360 | Use DIR as the installation prefix. *note Installation Names:: 361 | for more details, including other options available for fine-tuning 362 | the installation locations. 363 | 364 | `--no-create' 365 | `-n' 366 | Run the configure checks, but stop before creating any output 367 | files. 368 | 369 | `configure' also accepts some other, not widely useful, options. Run 370 | `configure --help' for more details. 371 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | 294 | Copyright (C) 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | , 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | -------------------------------------------------------------------------------- /Makefile.am: -------------------------------------------------------------------------------- 1 | SUBDIRS = src test 2 | 3 | #AM_TESTS_ENVIRONMENT = top_srcdir="$(top_srcdir)" 4 | 5 | ACLOCAL_AMFLAGS = -I m4 --install 6 | 7 | EXTRA_DIST = autogen.sh README.rst debian inc/json.hh 8 | pkgconfig_DATA = redis-ipc.pc 9 | 10 | export top_srcdir 11 | 12 | ${top_srcdir}/ChangeLog: 13 | (cd $(top_srcdir); git log) >> $@ 14 | 15 | # Clear dependency_libs for multiarch 16 | all-local: 17 | sed -i "/dependency_libs/ s/'.*'/''/" `find . -name '*.la' -o -name '*.lai'` 18 | 19 | 20 | if WITH_COVERAGE 21 | COV_INFO_FILE = $(top_builddir)/coverage.info 22 | COV_DIR = $(top_builddir)/coverage 23 | cov: 24 | $(MAKE) $(AM_MAKEFLAGS) \ 25 | CFLAGS="$(CFLAGS) $(COVERAGE_CFLAGS) $(COVERAGE_OPTFLAGS)" \ 26 | CXXFLAGS="$(CXXFLAGS) $(COVERAGE_CFLAGS) $(COVERAGE_OPTFLAGS)" 27 | $(MAKE) $(AM_MAKEFLAGS) check \ 28 | CFLAGS="$(CFLAGS) $(COVERAGE_CFLAGS) $(COVERAGE_OPTFLAGS)" \ 29 | CXXFLAGS="$(CXXFLAGS) $(COVERAGE_CFLAGS) $(COVERAGE_OPTFLAGS)" 30 | @echo "Generating coverage report..." 31 | $(LCOV) --config-file $(top_srcdir)/.lcovrc --capture \ 32 | --directory "$${PWD}/src" \ 33 | --directory "$${PWD}/test" \ 34 | --include "$${PWD}/src/" \ 35 | --include "$${PWD}/inc/json.hh" \ 36 | --output-file $(COV_INFO_FILE) \ 37 | --gcov-tool $(GCOV) 38 | $(GENHTML) --prefix "$(top_builddir)" \ 39 | --output-directory $(COV_DIR) \ 40 | --title $(PACKAGE_NAME) \ 41 | --legend --show-details \ 42 | $(COV_INFO_FILE) 43 | 44 | clean-local: 45 | @echo "Cleaning lcov files." 46 | @find $(top_builddir) -name "*.gcno" -exec rm -v {} \; 47 | @find $(top_builddir) -name "*.gcda" -exec rm -v {} \; 48 | @rm -rf $(top_builddir)/coverage* 49 | endif 50 | -------------------------------------------------------------------------------- /NEWS: -------------------------------------------------------------------------------- 1 | redis-ipc NEWS -- history of user-visible changes. 2 | 3 | * Changes in redis-ipc 4 | 5 | Initial version, static Makefile, test programs 6 | -------------------------------------------------------------------------------- /autogen.sh: -------------------------------------------------------------------------------- 1 | #! /bin/sh 2 | 3 | set -e 4 | 5 | srcdir=`dirname $0` 6 | test -z "$srcdir" && srcdir=. 7 | 8 | THEDIR="`pwd`" 9 | cd "$srcdir" 10 | DIE=0 11 | 12 | abort () { 13 | echo "$1 not found or command failed. Aborting!" 14 | exit 1 15 | } 16 | 17 | #set -x 18 | libtoolize --ltdl --force --copy || abort "libtoolize" 19 | aclocal || abort "aclocal" 20 | autoheader || abort "autoheader" 21 | automake --gnu --add-missing --copy || abort "automake" 22 | autoconf || abort "autoconf" 23 | 24 | if test -z "$*"; then 25 | echo "You still need to run ./configure - if you wish to pass any arguments" 26 | echo "to it, please specify them on the configure command line." 27 | fi 28 | 29 | #set +x 30 | 31 | echo "After running ./configure, type:" 32 | echo 33 | echo "make" 34 | echo "make install" 35 | echo 36 | echo "have fun." 37 | -------------------------------------------------------------------------------- /clang_toolchain.cmake: -------------------------------------------------------------------------------- 1 | set(CMAKE_ASM_COMPILER "clang") 2 | set(CMAKE_ASM-ATT_COMPILER "clang") 3 | set(CMAKE_C_COMPILER "clang") 4 | set(CMAKE_CXX_COMPILER "clang++") 5 | set(CMAKE_Fortran_COMPILER "gfortran") 6 | set(CMAKE_AR 7 | ar 8 | CACHE FILEPATH "Archive manager" FORCE 9 | ) 10 | set(CMAKE_RANLIB 11 | ranlib 12 | CACHE FILEPATH "Archive index generator" FORCE 13 | ) 14 | -------------------------------------------------------------------------------- /cmake/FindJSONC.cmake: -------------------------------------------------------------------------------- 1 | # ~~~ 2 | # This module finds headers and libjson-c library. 3 | # Results are reported in variables: 4 | # JSONC_FOUND - True if headers and library were found 5 | # JSONC_INCLUDE_DIRS - libjson-c include directories 6 | # JSONC_LIBRARIES - libjson-c library to be linked 7 | # ~~~ 8 | 9 | find_path( 10 | JSONC_INCLUDE_DIR 11 | NAMES json-c/json.h 12 | HINTS ENV VCPKG_ROOT ENV CONDA_PREFIX 13 | PATH_SUFFIXES include include/json-c 14 | PATHS ~/Library/Frameworks /Library/Frameworks /opt/local /opt /usr 15 | /usr/local/ 16 | ) 17 | 18 | find_library( 19 | JSONC_LIBRARY 20 | NAMES json-c libjson-c 21 | HINTS ENV VCPKG_ROOT ENV CONDA_PREFIX 22 | PATH_SUFFIXES lib lib64 lib32 23 | PATHS ~/Library/Frameworks /Library/Frameworks /opt/local /opt /usr 24 | /usr/local/ 25 | ) 26 | 27 | mark_as_advanced(JSONC_INCLUDE_DIR JSONC_LIBRARY) 28 | 29 | include(FindPackageHandleStandardArgs) 30 | find_package_handle_standard_args( 31 | JSONC REQUIRED_VARS JSONC_LIBRARY JSONC_INCLUDE_DIR 32 | ) 33 | 34 | if(JSONC_FOUND) 35 | # need if _FOUND guard to allow project to autobuild; can't overwrite 36 | # imported target even if bad 37 | set(JSONC_INCLUDE_DIRS ${JSONC_INCLUDE_DIR}) 38 | set(JSONC_LIBRARIES ${JSONC_LIBRARY}) 39 | 40 | if(NOT TARGET json-c::json-c) 41 | add_library(json-c::json-c INTERFACE IMPORTED) 42 | set_target_properties( 43 | json-c::json-c 44 | PROPERTIES INTERFACE_LINK_LIBRARIES "${JSONC_LIBRARIES}" 45 | INTERFACE_INCLUDE_DIRECTORIES "${JSONC_INCLUDE_DIR}" 46 | ) 47 | endif() 48 | endif(JSONC_FOUND) 49 | -------------------------------------------------------------------------------- /cmake/FindPython.cmake: -------------------------------------------------------------------------------- 1 | find_package(PythonInterp) 2 | find_package(PythonLibs) 3 | 4 | if(PYTHONINTERP_FOUND) 5 | execute_process( 6 | COMMAND ${PYTHON_EXECUTABLE} --version 7 | ERROR_VARIABLE PYTHON_VERSION_FULL 8 | OUTPUT_STRIP_TRAILING_WHITESPACE 9 | ) 10 | 11 | string(REGEX MATCH "[0-9]+.[0-9]+" PYTHON_VERSION_MAJOR_MINOR 12 | "${PYTHON_VERSION_FULL}" 13 | ) 14 | if(UNIX) 15 | set(PYTHON_PACKAGES_PATH 16 | lib/python${PYTHON_VERSION_MAJOR_MINOR}/site-packages 17 | CACHE PATH "Where to install the python packages." 18 | ) 19 | endif() 20 | if(WIN32) 21 | get_filename_component( 22 | PYTHON_PATH 23 | "[HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\${PYTHON_VERSION_MAJOR_MINOR}\\InstallPath]" 24 | ABSOLUTE 25 | CACHE 26 | ) 27 | set(PYTHON_PACKAGES_PATH "${PYTHON_PATH}/Lib/site-packages") 28 | endif() 29 | 30 | if("${PYTHON_VERSION_MAJOR_MINOR}" VERSION_GREATER 2.5) 31 | set(PYTHON_EXECUTABLE ${PYTHON_EXECUTABLE} -B) 32 | endif() 33 | endif(PYTHONINTERP_FOUND) 34 | -------------------------------------------------------------------------------- /cmake/TestCoverage.cmake: -------------------------------------------------------------------------------- 1 | set(CMAKE_C_FLAGS "-g -O0 --coverage") 2 | set(CMAKE_CXX_FLAGS "-g -O0 --coverage") 3 | set(CMAKE_EXE_LINKER_FLAGS "--coverage") 4 | set(CMAKE_SHARED_LINKER_FLAGS "--coverage") 5 | 6 | set(COVERAGE_OUTPUT_DIR "${CMAKE_SOURCE_DIR}/coverage") 7 | set(TRACEFILE "${CMAKE_SOURCE_DIR}/coverage.info") 8 | set(REPORT_DIR "${COVERAGE_OUTPUT_DIR}") 9 | 10 | # cmake-format: off 11 | add_custom_command( 12 | OUTPUT "${TRACEFILE}" always 13 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" 14 | COMMAND ${CMAKE_COMMAND} -E remove "${TRACEFILE}" 15 | COMMAND ${CMAKE_COMMAND} -E remove_directory "${COVERAGE_OUTPUT_DIR}" 16 | COMMAND ${CMAKE_CTEST_COMMAND} # execute default test suite 17 | 18 | COMMAND lcov 19 | --config-file "${CMAKE_SOURCE_DIR}/.lcovrc" 20 | --capture 21 | --directory "${CMAKE_BINARY_DIR}" 22 | --include "${CMAKE_SOURCE_DIR}/src/redis_ipc.c" 23 | --include "${CMAKE_SOURCE_DIR}/src/redis_ipc.h" 24 | --include "${CMAKE_SOURCE_DIR}/inc/json.hh" 25 | --output-file "${TRACEFILE}" 26 | 27 | COMMAND genhtml ${TRACEFILE} 28 | --prefix "." 29 | --title "${CMAKE_PROJECT_NAME}" 30 | --legend --show-details 31 | --output-directory ${REPORT_DIR} 32 | 33 | VERBATIM # for correct handling of wildcards in command line parameters 34 | ) 35 | # cmake-format: on 36 | 37 | add_custom_target(cov DEPENDS ${TRACEFILE} always) 38 | -------------------------------------------------------------------------------- /cmake/coverage.cmake: -------------------------------------------------------------------------------- 1 | option(COVERAGE_BUILD "Enable code coverage" OFF) 2 | option(COVERAGE_TEXT "Show text summary of the coverage" ON) 3 | option(COVERAGE_LCOV "Export coverage data in lcov trace file" ON) 4 | option(COVERAGE_HTML "Detailed html report of the coverage" OFF) 5 | 6 | set(COVERAGE_EXCLUDE_REGEX "(test/)") 7 | set(COVERAGE_PATH ${PROJECT_BINARY_DIR}/coverage) 8 | set(LLVM_DIRECTORY "$ENV{LLVM_VER_DIR}") 9 | 10 | if(COVERAGE_BUILD) 11 | message( 12 | STATUS 13 | "Source coverage is enabled. TEXT=${COVERAGE_TEXT}, LCOV=${COVERAGE_LCOV}, HTML=${COVERAGE_HTML}" 14 | ) 15 | 16 | find_package( 17 | LLVM REQUIRED CONFIG 18 | HINTS ${LLVM_DIRECTORY} 19 | ) 20 | #get_filename_component(LLVM_PREFIX "${LLVM_DIR}" DIRECTORY) 21 | message(STATUS "Using llvm directory: ${LLVM_DIRECTORY}") 22 | 23 | find_program( 24 | LLVM_COV_PATH 25 | NAMES llvm-cov 26 | HINTS ${LLVM_DIRECTORY} 27 | PATH_SUFFIXES bin 28 | ) 29 | find_program( 30 | LLVM_PROFDATA_PATH 31 | NAMES llvm-profdata 32 | HINTS ${LLVM_DIRECTORY} 33 | PATH_SUFFIXES bin 34 | ) 35 | 36 | if(LLVM_COV_PATH AND LLVM_PROFDATA_PATH) 37 | set(RIPC_HAVE_LLVM_COVERAGE_TOOLS TRUE) 38 | 39 | message(STATUS "Found llvm-cov: ${LLVM_COV_PATH}") 40 | message(STATUS "Found llvm-profdata: ${LLVM_PROFDATA_PATH}") 41 | else() 42 | message(FATAL_ERROR "llvm-cov stack required for coverage!") 43 | endif() 44 | 45 | set(CMAKE_C_FLAGS 46 | "${CMAKE_C_FLAGS} -fprofile-instr-generate -fcoverage-mapping" 47 | ) 48 | set(CMAKE_CXX_FLAGS 49 | "${CMAKE_CXX_FLAGS} -fprofile-instr-generate -fcoverage-mapping" 50 | ) 51 | 52 | set(COVERAGE_TARGETS ${COVERAGE_PATH}/targets.list) 53 | set(COVERAGE_PROFDATA ${COVERAGE_PATH}/all.profdata) 54 | mark_as_advanced(COVERAGE_TARGETS COVERAGE_PROFDATA) 55 | endif() 56 | 57 | function(add_coverage TARGET) 58 | if(NOT COVERAGE_BUILD) 59 | return() 60 | endif() 61 | 62 | if(NOT TARGET coverage) 63 | add_custom_target( 64 | coverage-clear 65 | COMMAND ${CMAKE_COMMAND} -E rm -rf ${COVERAGE_PATH} 66 | COMMAND ${CMAKE_COMMAND} -E make_directory ${COVERAGE_PATH} 67 | ) 68 | 69 | add_custom_target( 70 | coverage-profdata 71 | COMMAND 72 | ${CMAKE_COMMAND} -E env 73 | LLVM_PROFILE_FILE="${COVERAGE_PATH}/test_%p.profraw" 74 | ${CMAKE_CTEST_COMMAND} ${CMAKE_CTEST_ARGUMENTS} 75 | COMMAND ${LLVM_PROFDATA_PATH} merge -sparse 76 | ${COVERAGE_PATH}/*.profraw -o ${COVERAGE_PROFDATA} 77 | WORKING_DIRECTORY ${PROJECT_BINARY_DIR} 78 | ) 79 | 80 | add_custom_target(coverage) 81 | 82 | if(COVERAGE_TEXT) 83 | add_custom_target( 84 | coverage-text 85 | COMMAND 86 | ${LLVM_COV_PATH} report `cat ${COVERAGE_TARGETS}` 87 | -instr-profile=${COVERAGE_PROFDATA} 88 | -ignore-filename-regex="${COVERAGE_EXCLUDE_REGEX}" 89 | DEPENDS coverage-profdata 90 | ) 91 | add_dependencies(coverage coverage-text) 92 | endif() 93 | 94 | if(COVERAGE_HTML) 95 | add_custom_target( 96 | coverage-html 97 | COMMAND 98 | ${LLVM_COV_PATH} show `cat ${COVERAGE_TARGETS}` 99 | -instr-profile=${COVERAGE_PROFDATA} 100 | -show-line-counts-or-regions 101 | -output-dir=${COVERAGE_PATH}/html -format="html" 102 | -ignore-filename-regex="${COVERAGE_EXCLUDE_REGEX}" 103 | DEPENDS coverage-profdata 104 | ) 105 | add_dependencies(coverage coverage-html) 106 | endif() 107 | 108 | if(COVERAGE_LCOV) 109 | add_custom_target( 110 | coverage-lcov 111 | COMMAND 112 | ${LLVM_COV_PATH} export `cat ${COVERAGE_TARGETS}` 113 | -format="lcov" -instr-profile=${COVERAGE_PROFDATA} 114 | -ignore-filename-regex="${COVERAGE_EXCLUDE_REGEX}" > 115 | ${COVERAGE_PATH}/lcov.info 116 | DEPENDS coverage-profdata 117 | ) 118 | add_dependencies(coverage coverage-lcov) 119 | endif() 120 | endif() 121 | 122 | add_custom_target( 123 | coverage-${TARGET} 124 | COMMAND ${CMAKE_COMMAND} -E echo "-object=$" >> 125 | ${COVERAGE_TARGETS} 126 | DEPENDS coverage-clear 127 | ) 128 | add_dependencies(coverage-profdata coverage-${TARGET}) 129 | 130 | endfunction() 131 | -------------------------------------------------------------------------------- /conda/bld.bat: -------------------------------------------------------------------------------- 1 | :: MSVC is preferred. 2 | ECHO "redis-ipc library" 3 | 4 | set CC=cl.exe 5 | set CXX=cl.exe 6 | 7 | mkdir build 8 | cd build 9 | cmake ^ 10 | -G "Ninja" ^ 11 | -DRIPC_BUILD_TESTING=0 ^ 12 | -DCMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% ^ 13 | -DCMAKE_BUILD_TYPE=Release ^ 14 | -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=True ^ 15 | -DBUILD_SHARED_LIBS=ON ^ 16 | %SRC_DIR% 17 | if errorlevel 1 exit 1 18 | 19 | :: Build. 20 | cmake --build . --config Release 21 | if errorlevel 1 exit 1 22 | 23 | :: Install. 24 | cmake --build . --config Release --target install 25 | if errorlevel 1 exit 1 26 | -------------------------------------------------------------------------------- /conda/build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | if [[ "$target_platform" == "osx-64" ]]; then 6 | MACOS_ARGS="-DCMAKE_MACOSX_RPATH=ON" 7 | fi 8 | 9 | mkdir build 10 | cd build 11 | 12 | cmake \ 13 | -DRIPC_BUILD_TESTING=OFF \ 14 | -DCMAKE_INSTALL_PREFIX=$PREFIX \ 15 | -DCMAKE_INSTALL_LIBDIR=lib \ 16 | -DCMAKE_BUILD_TYPE=RelWithDebInfo \ 17 | ${MACOS_ARGS} \ 18 | .. 19 | 20 | cmake --build . --config RelWithDebInfo -- -j$CPU_COUNT 21 | cmake --build . --config RelWithDebInfo --target install 22 | -------------------------------------------------------------------------------- /conda/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set name = "redis-ipc" %} 2 | {% set version = "0.2.1" %} 3 | 4 | package: 5 | name: {{ name|lower }} 6 | version: {{ version }} 7 | 8 | source: 9 | path: .. 10 | 11 | build: 12 | number: 0 13 | skip: true # [win] 14 | error_overdepending: true 15 | run_exports: # [not win] 16 | - {{ pin_subpackage('redis-ipc', max_pin='x.x') }} 17 | ignore_run_exports: # [not win] 18 | - libstdcxx-ng 19 | - libcxx 20 | 21 | requirements: 22 | build: 23 | - {{ compiler('c') }} 24 | - {{ compiler('cxx') }} 25 | - cmake 26 | - pkg-config # [unix] 27 | - make # [unix] 28 | - json-c 29 | - libhiredis 30 | 31 | host: 32 | # this is needed outside conda smithy env 33 | - json-c 34 | - libhiredis 35 | 36 | test: 37 | requires: 38 | - pkg-config 39 | #- conda-build 40 | commands: 41 | - ldd ${PREFIX}/lib/libredis_ipc${SHLIB_EXT} # [linux] 42 | - test -f ${PREFIX}/include/redis_ipc.h # [unix] 43 | - test -f ${PREFIX}/lib/libredis_ipc${SHLIB_EXT} # [osx] 44 | - test -f ${PREFIX}`pkg-config --variable=libdir --dont-define-prefix redis-ipc`/libredis_ipc${SHLIB_EXT} # [unix] 45 | #- conda inspect linkages -p $PREFIX $PKG_NAME # [unix] 46 | #- conda inspect objects -p $PREFIX $PKG_NAME # [osx] 47 | - echo "TODO - Pkg tests require redis server." 48 | 49 | about: 50 | home: https://github.com/VCTLabs/redis-ipc 51 | license: GPL-2.0-only 52 | license_family: GPL 53 | license_file: LICENSE 54 | summary: advanced IPC mechanism using redis 55 | 56 | extra: 57 | recipe-maintainers: 58 | - sarnold 59 | -------------------------------------------------------------------------------- /configure.ac: -------------------------------------------------------------------------------- 1 | # -*- Autoconf -*- 2 | # Process this file with autoconf to produce a configure script. 3 | 4 | AC_PREREQ([2.68]) 5 | AC_INIT([redis-ipc], [0.2.1], [sjl@vctlabs.com]) 6 | AC_CONFIG_SRCDIR([src/redis_ipc.c]) 7 | AC_CONFIG_MACRO_DIR([m4]) 8 | m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([yes])]) 9 | 10 | AM_INIT_AUTOMAKE([foreign subdir-objects]) 11 | 12 | if test "x$prefix" = "xNONE"; then 13 | prefix=$ac_default_prefix 14 | fi 15 | if test "x$exec_prefix" = "xNONE"; then 16 | exec_prefix='${prefix}' 17 | fi 18 | 19 | AC_CONFIG_HEADERS([config.h]) 20 | 21 | AM_PROG_AR 22 | LT_PREREQ([2.2.6]) 23 | LT_CONFIG_LTDL_DIR([libltdl]) 24 | LT_INIT([dlopen]) 25 | LTDL_INIT([subproject]) 26 | 27 | AM_MAINTAINER_MODE([enable]) 28 | 29 | AC_PROG_CC 30 | AC_PROG_CXX 31 | AC_PROG_INSTALL 32 | AC_PROG_MAKE_SET 33 | 34 | # Checks for libraries. 35 | 36 | # Checks for header files. 37 | AC_HEADER_STDC 38 | AC_CHECK_HEADERS([memory.h stdint.h stdlib.h string.h sys/types.h sys/syscall.h unistd.h]) 39 | 40 | # Checks for typedefs, structures, and compiler characteristics. 41 | AC_HEADER_STDBOOL 42 | AC_TYPE_SIZE_T 43 | AC_TYPE_SSIZE_T 44 | AC_TYPE_UINT32_T 45 | 46 | # Checks for library functions. 47 | AC_FUNC_MALLOC 48 | AC_CHECK_FUNCS([memset strdup]) 49 | 50 | AC_CHECK_LIB(pthread, pthread_create, [LIBS="-lpthread ${LIBS}"], [ 51 | echo "pthreads required, failing" 52 | exit -1 53 | ]) 54 | 55 | # Check for hiredis and json-c 56 | JSON_C_MIN_VERSION="0.9" 57 | 58 | PKG_CHECK_MODULES(JSONC, libjson-c >= $JSON_C_MIN_VERSION, , 59 | AC_CHECK_HEADER(json-c/json.h, [JSONC_LIBS="-ljson-c"], [json-c=no]) 60 | AC_CHECK_HEADER(json-c/json.h, [JSONC_CFLAGS="-I$includedir" 61 | JSONC_LIBS="-L$libdir -ljson-c"], [json-c=no]) 62 | AC_SUBST([JSONC_CFLAGS]) 63 | AC_SUBST([JSONC_LIBS]) 64 | ) 65 | 66 | PKG_CHECK_MODULES(HIREDIS, libhiredis, , 67 | AC_CHECK_HEADER(hiredis/hiredis.h, [HIREDIS_LIBS="-lhiredis"], [hiredis=no]) 68 | AC_CHECK_HEADER(hiredis/hiredis.h, [HIREDIS_CFLAGS="-I$includedir" 69 | HIREDIS_LIBS="-L$libdir -lhiredis"], [hiredis=no]) 70 | AC_SUBST([HIREDIS_CFLAGS]) 71 | AC_SUBST([HIREDIS_LIBS]) 72 | ) 73 | #[ 74 | # AC_MSG_ERROR(libHIREDIS not found) 75 | #]) 76 | 77 | AC_ARG_ENABLE(debug, 78 | [ --enable-debug Enable debugging code.],, enable_debug="no") 79 | 80 | AC_ARG_ENABLE(gprof, 81 | [ --enable-gprof Enable gcc profiling.],, enable_gprof="no") 82 | 83 | # Test coverage generate. Flag: --with-coverage 84 | AC_ARG_WITH([coverage], 85 | AS_HELP_STRING([--with-coverage], [Generate test covearge report with lcov.]), 86 | [with_cov=true], [] 87 | ) 88 | AM_CONDITIONAL([WITH_COVERAGE], [test x$with_cov = xtrue]) 89 | if test x$with_cov = xtrue; then 90 | AC_PATH_PROG(PWD, pwd) 91 | AC_PATH_PROG(LCOV, lcov) 92 | AC_PATH_PROG(GCOV, gcov) 93 | AC_PATH_PROG(GENHTML, genhtml) 94 | COVERAGE_CFLAGS="--coverage" 95 | COVERAGE_OPTFLAGS="-pg -g -O0" 96 | AC_SUBST([PWD]) 97 | AC_SUBST([GCOV]) 98 | AC_SUBST([LCOV]) 99 | AC_SUBST([GENHTML]) 100 | AC_SUBST([COVERAGE_CFLAGS]) 101 | AC_SUBST([COVERAGE_OPTFLAGS]) 102 | fi 103 | 104 | enable_value() 105 | { 106 | if test "x$1" = "xyes" ; then 107 | echo 1 108 | else 109 | echo 0 110 | fi 111 | } 112 | 113 | AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [package name]) 114 | AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [version number]) 115 | AC_DEFINE_UNQUOTED(PATH_PREFIX, "$prefix", [prefix directory]) 116 | AC_DEFINE_UNQUOTED(PATH_EXECPREFIX, "$exec_prefix", [execprefix directory]) 117 | AC_DEFINE_UNQUOTED(PATH_LOCALSTATEDIR, "$localstatedir", [local state directory]) 118 | AC_DEFINE_UNQUOTED(PATH_DATAROOTDIR, "$datarootdir", [data root directory]) 119 | AC_DEFINE_UNQUOTED(PATH_DATADIR, "$datadir", [data directory]) 120 | AC_DEFINE_UNQUOTED(PATH_LIBEXECDIR, "$libexecdir", [libexec directory]) 121 | if test -n "$env_ld_library_path"; then 122 | AC_DEFINE_UNQUOTED(ENV_LD_LIBRARY_PATH, "$env_ld_library_path", [set LD_LIBRARY_PATH to this value]) 123 | fi 124 | AC_DEFINE_UNQUOTED(ENABLE_DEBUG, `enable_value $enable_debug`, [Enable debugging]) 125 | AC_DEFINE_UNQUOTED(ENABLE_GPROF, `enable_value $enable_gprof`, [Enable gcc profiling]) 126 | 127 | AC_ARG_VAR(RIPC_RUNTIME_DIR, [Supply default path to directory containing redis server socket]) 128 | AM_CONDITIONAL([RIPC_RUNTIME_DIR], [test x$RIPC_RUNTIME_DIR != x]) 129 | 130 | PKG_INSTALLDIR 131 | 132 | AC_CONFIG_FILES([Makefile \ 133 | src/Makefile \ 134 | test/Makefile \ 135 | redis-ipc.pc]) 136 | 137 | AC_OUTPUT 138 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | redis-ipc (0.0.6-1ubuntu2) focal; urgency=medium 2 | 3 | * rebuild for focal (no src changes) 4 | 5 | -- Stephen L Arnold Sun, 21 Nov 2021 13:03:52 -0800 6 | 7 | redis-ipc (0.0.6-1ubuntu1) bionic; urgency=medium 8 | 9 | * new upstream release 10 | 11 | -- Stephen L Arnold Sun, 21 Nov 2021 12:59:00 -0800 12 | 13 | redis-ipc (0.0.5-2ubuntu4) focal; urgency=medium 14 | 15 | * rebuild for focal (no src changes) 16 | 17 | -- Stephen L Arnold Fri, 19 Nov 2021 12:42:17 -0800 18 | 19 | redis-ipc (0.0.5-2ubuntu3) bionic; urgency=medium 20 | 21 | * install cmake packaging files along with .pc files 22 | 23 | -- Stephen L Arnold Fri, 19 Nov 2021 12:36:56 -0800 24 | 25 | redis-ipc (0.0.5-2ubuntu2) focal; urgency=medium 26 | 27 | * rebuild for focal (no src changes) 28 | 29 | -- Stephen L Arnold Fri, 19 Nov 2021 12:01:15 -0800 30 | 31 | redis-ipc (0.0.5-2ubuntu1) bionic; urgency=medium 32 | 33 | * bump revision for cmake-built pkgs, update build deps 34 | 35 | -- Stephen L Arnold Fri, 19 Nov 2021 11:35:10 -0800 36 | 37 | redis-ipc (0.0.5-1ubuntu5) bionic; urgency=medium 38 | 39 | * switch to cmake build system 40 | 41 | -- Stephen L Arnold Fri, 19 Nov 2021 10:12:17 -0800 42 | 43 | redis-ipc (0.0.5-1ubuntu4) focal; urgency=medium 44 | 45 | * rebuild for focal (no src changes) 46 | 47 | -- Stephen L Arnold Thu, 18 Nov 2021 21:47:14 -0800 48 | 49 | redis-ipc (0.0.5-1ubuntu3) bionic; urgency=medium 50 | 51 | * rebuild with missing dev package deps 52 | 53 | -- Stephen L Arnold Thu, 18 Nov 2021 21:33:19 -0800 54 | 55 | redis-ipc (0.0.5-1ubuntu2) focal; urgency=medium 56 | 57 | * rebuild for focal (no src changes) 58 | 59 | -- Stephen L Arnold Fri, 12 Nov 2021 18:34:36 -0800 60 | 61 | redis-ipc (0.0.5-1ubuntu1) bionic; urgency=medium 62 | 63 | * new upstream release 64 | 65 | -- Stephen L Arnold Fri, 12 Nov 2021 18:14:37 -0800 66 | 67 | redis-ipc (0.0.4-1ubuntu1) bionic; urgency=medium 68 | 69 | * include pkgconfig file in -dev 70 | * python module has moved to separate repo 71 | 72 | -- S. Lockwood-Childs Thu, 30 Sep 2021 19:23:54 -0700 73 | 74 | redis-ipc (0.0.2-1ubuntu5) bionic; urgency=medium 75 | 76 | * add yet-another-missing-build-dep 77 | 78 | -- Stephen L Arnold Sun, 08 Aug 2021 16:17:41 -0700 79 | 80 | redis-ipc (0.0.2-1ubuntu4) bionic; urgency=medium 81 | 82 | * disable maint flags (configure bombs in launchpad) 83 | 84 | -- Stephen L Arnold Sun, 25 Jul 2021 23:50:42 -0700 85 | 86 | redis-ipc (0.0.2-1ubuntu3) bionic; urgency=medium 87 | 88 | * remove extra configure args, relax cflags 89 | 90 | -- Stephen L Arnold Sun, 25 Jul 2021 23:18:23 -0700 91 | 92 | redis-ipc (0.0.2-1ubuntu2) bionic; urgency=medium 93 | 94 | * add missing libltdl-dev (build dep) 95 | 96 | -- Stephen L Arnold Sun, 25 Jul 2021 22:40:31 -0700 97 | 98 | redis-ipc (0.0.2-1ubuntu1) bionic; urgency=medium 99 | 100 | * new upstream release (plus packaging/QA updates) 101 | 102 | -- Stephen L Arnold Sun, 25 Jul 2021 16:36:21 -0700 103 | 104 | redis-ipc (0.0.1-1) trusty; urgency=medium 105 | 106 | * Initial release. 107 | * https://github.com/VCTLabs/redis-ipc 108 | 109 | -- Steve Arnold (nerdboy) Fri, 10 Mar 2017 23:03:44 -0800 110 | 111 | -------------------------------------------------------------------------------- /debian/compat: -------------------------------------------------------------------------------- 1 | 9 2 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: redis-ipc 2 | Maintainer: Steve Arnold 3 | Section: utils 4 | Priority: optional 5 | Standards-Version: 3.9.8 6 | Build-Depends: debhelper (>= 9), 7 | cmake, 8 | pkg-config, 9 | libhiredis-dev (>= 0.10), 10 | libjson-c-dev (>= 0.9) 11 | 12 | Package: libredis-ipc0 13 | Section: libs 14 | Architecture: any 15 | Multi-Arch: same 16 | Pre-Depends: ${misc:Pre-Depends} 17 | Depends: ${shlibs:Depends}, ${misc:Depends} 18 | Description: small library for using redis server and JSON as IPC mechanism 19 | Provides a high-performance substitute for the more common choice of dbus. 20 | 21 | Package: libredis-ipc-dev 22 | Section: libdevel 23 | Architecture: any 24 | Multi-Arch: same 25 | Depends: libredis-ipc0 (= ${binary:Version}), 26 | ${misc:Depends}, 27 | libhiredis-dev, 28 | libjson-c-dev, 29 | Description: redis-ipc library development files 30 | Provides a high-performance substitute for the more common choice of dbus. 31 | -------------------------------------------------------------------------------- /debian/copyright: -------------------------------------------------------------------------------- 1 | This package was debianized by Stephen Arnold on 2 | Fri, 10 Mar 2017 23:03:44 -0800 3 | 4 | This software is released under the GPL version 2 or later (see the file 5 | LICENSE for details). 6 | 7 | Files: * 8 | Copyright: 2011-2021 Vanguard Computer Technology Labs 9 | License: GPL-2 10 | 11 | On Debian systems, the complete text of the GNU General 12 | Public License can be found in `/usr/share/common-licenses/GPL-2' 13 | 14 | -------------------------------------------------------------------------------- /debian/libredis-ipc-dev.install: -------------------------------------------------------------------------------- 1 | usr/include/ 2 | usr/lib/*/lib*.so 3 | usr/lib/*/lib*.a 4 | usr/lib/*/cmake/ 5 | usr/share/pkgconfig/ 6 | -------------------------------------------------------------------------------- /debian/libredis-ipc0.install: -------------------------------------------------------------------------------- 1 | usr/lib/*/libredis*.so.0* 2 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | export DH_VERBOSE = 1 3 | export V=1 # verbose mode for make 4 | 5 | export DEB_BUILD_MAINT_OPTIONS = hardening=+all 6 | 7 | include /usr/share/dpkg/architecture.mk 8 | 9 | export DEB_HOST_MULTIARCH 10 | 11 | multiarch_path = $(shell dpkg-architecture -qDEB_HOST_MULTIARCH) 12 | 13 | %: 14 | dh $@ --buildsystem=cmake --builddirectory=build 15 | 16 | override_dh_auto_configure: 17 | dh_auto_configure -- \ 18 | -DBUILD_STATIC_LIBS=ON \ 19 | -DRIPC_BUILD_TESTING=OFF 20 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (quilt) 2 | -------------------------------------------------------------------------------- /debian/source/lintian-overrides: -------------------------------------------------------------------------------- 1 | redis-ipc: python3-depends-but-no-python3-helper 2 | -------------------------------------------------------------------------------- /docs/dev/generate_changelog.rst: -------------------------------------------------------------------------------- 1 | Changelog Generation 2 | ==================== 3 | 4 | Changelogs help document important changes. 5 | 6 | To generate a (full) changelog from the repository root, run: 7 | 8 | .. code-block:: bash 9 | 10 | (venv) $ gitchangelog 11 | 12 | We use ``gitchangelog`` to create the changelog automatically. It 13 | examines git commit history and uses custom "filters" to produce its 14 | output. The configurations for this are in the files 15 | ``.gitchangelog.rc`` and ``.gitchangelog-keepachangelog.tpl``. 16 | 17 | To make your changelog even more useful/readable, you should use good 18 | commit messages and consider using the gitchangelog message modifiers. 19 | Since the ``.gitchangelog.rc`` is actually written in Python, it becomes 20 | quite dynamic, thus the configured modifiers and associated documentation 21 | are usually documented in the file itself (unless someone strips out all 22 | the comments). For this config, the message format uses 3 types of 23 | modifier:: 24 | 25 | Message Format 26 | ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...] 27 | 28 | Description 29 | ACTION is one of 'chg', 'fix', 'new' 30 | 31 | Is WHAT the change is about. 32 | 33 | 'chg' is for refactor, small improvement, cosmetic changes... 34 | 'fix' is for bug fixes 35 | 'new' is for new features, big improvement 36 | 37 | AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc' 38 | 39 | Is WHO is concerned by the change. 40 | 41 | 'dev' is for developers (API changes, refactors...) 42 | 'usr' is for final users (UI changes) 43 | 'pkg' is for packagers (packaging changes) 44 | 'test' is for testers (test only related changes) 45 | 'doc' is for doc guys (doc only changes) 46 | 47 | COMMIT_MSG is ... well ... the commit message itself. 48 | 49 | TAGs are additional adjective as 'refactor' 'minor' 'cosmetic' 50 | 51 | They are preceded with a '!' or a '@' (prefer the former, as the 52 | latter is wrongly interpreted in github.) Commonly used tags are: 53 | 54 | 'refactor' is obviously for refactoring code only 55 | 'minor' is for a very meaningless change (a typo, adding a comment) 56 | 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...) 57 | 'wip' is for partial functionality but complete subfunctionality. 58 | 59 | Example: 60 | 61 | new: usr: support of bazaar implemented 62 | chg: re-indented some lines !cosmetic 63 | new: dev: updated code to be compatible with last version of killer lib. 64 | fix: pkg: updated year of license coverage. 65 | new: test: added a bunch of test around user usability of feature X. 66 | fix: typo in spelling my name in comment. !minor 67 | 68 | 69 | See the current `.gitchangelog.rc`_ in the repo for more details. 70 | 71 | Read more about ``gitchangelog`` here_. 72 | 73 | .. _.gitchangelog.rc: https://github.com/VCTLabs/redis-ipc/blob/develop/.gitchangelog.rc 74 | .. _here: https://github.com/sarnold/gitchangelog 75 | 76 | 77 | Git Tags 78 | -------- 79 | 80 | Git tags are a way to bookmark commits, and come in two varieties: 81 | lightweight and signed/annotated. Both signed and annotated tags 82 | contain author information and when used they will help organize the 83 | changelog. 84 | 85 | To create an annotated tag for a version ``0.1.1`` release: 86 | 87 | .. code-block:: bash 88 | 89 | $ git tag -a v0.1.1 -m "v0.1.1" 90 | 91 | Using tags like this will break the changelog into sections based on 92 | versions. If you forgot to make a tag you can checkout an old commit 93 | and make the tag (don't forget to adjust the date - you may want to 94 | google this...) 95 | 96 | 97 | Sections 98 | -------- 99 | 100 | The sections in the changelog are created from the git log commit 101 | messages, and are parsed using the regex defined in the 102 | ``.gitchangelog.rc`` configuration file. 103 | -------------------------------------------------------------------------------- /docs/dev/pre-commit-config.rst: -------------------------------------------------------------------------------- 1 | ================================================== 2 | Contents of the ``.pre-commit-config.yaml`` file 3 | ================================================== 4 | 5 | The file ``.pre-commit-config.yaml`` is used to configure the program 6 | ``pre-commit``, which controls the setup and execution of `Git hooks`_. 7 | 8 | The ``.pre-commit-config.yaml`` file has a list of git repos, each repo may 9 | define one or more hooks. 10 | 11 | In this document we will review the various hooks. Some of the hooks will 12 | modify files, some will not. 13 | 14 | .. _pre-commit: https://pre-commit.com 15 | .. _Git hooks: https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks 16 | 17 | 18 | Hook Descriptions 19 | ================= 20 | 21 | Basic warning checks include: 22 | 23 | * ``check-added-large-files`` 24 | * ``check-case-conflict`` 25 | * ``check-executables-have-shebangs`` 26 | * ``check-shebang-scripts-are-executable`` 27 | * ``check-merge-conflict`` 28 | * ``detect-private-key`` 29 | 30 | 31 | ``end-of-file-fixer`` 32 | --------------------- 33 | 34 | This will modify files by making sure that each file ends in a blank line. 35 | 36 | If a commit fails due to this hook, just commit again. 37 | 38 | 39 | ``trailing-whitespace`` 40 | ----------------------- 41 | 42 | This will modify files by ensuring there is no trailing whitespace on any line. 43 | 44 | If a commit fails due to this hook, just commit again. 45 | 46 | ``mixed-line-ending`` 47 | --------------------- 48 | 49 | This will modify files by ensuring there are no mixed line endings in any file. 50 | 51 | If a commit fails due to this hook, just commit again. 52 | 53 | ``check-yaml`` 54 | -------------- 55 | 56 | This will NOT modify files. It will examine YAML files and report any 57 | issues. The rules for its configuration are defined in 58 | ``.pre-commit-config.yaml`` in the ``exclude`` section. 59 | 60 | If a commit fails due to this hook, all reported issues must be manually 61 | fixed before committing again. 62 | 63 | ``cmake-format`` 64 | ---------------- 65 | 66 | This will modify files. It will examine Cmake files and fix some 67 | formatting/style issues. The rules for its configuration are currently 68 | just upstream defaults. 69 | 70 | If a commit fails due to this hook, review the proposed changes in the 71 | console, and check the files using ``git diff ...`` 72 | 73 | ``cpplint`` 74 | ----------- 75 | 76 | This will NOT modify files. It will examine source files and report any 77 | issues. The rules for its configuration are defined in 78 | ``.pre-commit-config.yaml`` in the ``args`` section. 79 | 80 | If a commit fails due to this hook, all reported issues must be manually 81 | fixed before committing again. 82 | 83 | ``beautysh`` 84 | ------------ 85 | 86 | This will modify files. It will examine shell files and fix some 87 | formatting issues. The rules for its configuration are defined in 88 | ``.pre-commit-config.yaml`` in the ``args`` section. 89 | 90 | If a commit fails due to this hook, review the proposed changes in the 91 | console, and check the files using ``git diff ...`` 92 | 93 | Doc formatting (.rst files) 94 | --------------------------- 95 | 96 | * doc8 97 | * pygrep 98 | 99 | - rst-backticks 100 | - rst-directive-colons 101 | - rst-inline-touching-normal 102 | 103 | These checks will NOT modifiy files. They will examine all RST files 104 | (except ChangeLog.rst) and report any issues. 105 | 106 | If a commit fails due to any of these hooks, all reported issues must be 107 | manually fixed before committing again. 108 | -------------------------------------------------------------------------------- /docs/dev/pre-commit-usage.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Using Pre-Commit 3 | ================== 4 | 5 | `pre-commit`_ is a program used to configure and run Git hooks. These 6 | hooks can be triggered in different Git stages, though typically we use 7 | them in only commit and push stages. 8 | 9 | See the `pre-commit config contents`_ document for descriptions of the 10 | current hooks. 11 | 12 | Each of the hooks will run in its own small virtual environment. 13 | 14 | .. _pre-commit: https://pre-commit.com 15 | .. _pre-commit config contents: pre-commit-config.rst 16 | 17 | 18 | Setup 19 | ----- 20 | 21 | The program must be installed and the hooks must be configured. The 22 | program should be installed in your usual virtual environment, for 23 | example, "venv" (this could also be a conda environment). 24 | 25 | After activating your environment, run the following commands: 26 | 27 | .. code-block:: bash 28 | 29 | (venv) $ pip install pre-commit 30 | (venv) $ pre-commit autoupdate 31 | (venv) $ pre-commit install 32 | (venv) $ pre-commit install-hooks 33 | 34 | 35 | Automatic Usage 36 | --------------- 37 | 38 | In normal usage, ``pre-commit`` will trigger with every ``git commit`` 39 | and every ``git push``. The hooks that trigger in each stage can be 40 | configured by editing the ``.pre-commit-config.yaml`` file. The files 41 | that have changed will be passed to the various hooks before the git 42 | operation completes. If one of the hooks exits with a non-zero 43 | exit-code, then the commit (or push) will fail. 44 | 45 | Manual Usage 46 | ------------ 47 | 48 | To manually trigger ``pre-commit`` to run all hooks on CHANGED files: 49 | 50 | .. code-block:: bash 51 | 52 | (venv) $ pre-commit run 53 | 54 | To manually trigger ``pre-commit`` to run all hooks on ALL files, 55 | regardless if they are changed or not: 56 | 57 | .. code-block:: bash 58 | 59 | (venv) $ pre-commit run --all-files 60 | 61 | To manually trigger ``pre-commit`` to run a single hook on changed files: 62 | 63 | .. code-block:: bash 64 | 65 | (venv) $ pre-commit run 66 | 67 | To manually trigger ``pre-commit`` to run a single hook on all files: 68 | 69 | .. code-block:: bash 70 | 71 | (venv) $ pre-commit run --all-files 72 | 73 | For example, to run ``cpplint`` on all files: 74 | 75 | .. code-block:: bash 76 | 77 | (venv) $ pre-commit run cpplint --all-files 78 | -------------------------------------------------------------------------------- /environment.devenv.yml: -------------------------------------------------------------------------------- 1 | name: redis-ipc-test 2 | 3 | dependencies: 4 | - python ={{ get_env("PY_VER", default="3.7") }} 5 | - cmake>=3.18 6 | - ninja 7 | - c-compiler 8 | - cxx-compiler 9 | - pip 10 | - make # [unix] 11 | - pkg-config # [unix] 12 | - json-c=0.15 13 | - libhiredis=1.0.0 14 | - lcov 15 | - gcovr 16 | -------------------------------------------------------------------------------- /gcovr.cfg: -------------------------------------------------------------------------------- 1 | # Only show coverage for files included via regex 2 | filter = (.+/)?redis_ipc\.c$ 3 | filter = (.+/)?json\.hh$ 4 | 5 | # report options - somehow this does not respect the above filters 6 | #html = yes 7 | #html-details = yes 8 | #output = coverage/coverage.html 9 | -------------------------------------------------------------------------------- /inc/json.hh: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2011-2021 Vanguard Computer Technology Labs 2 | // 3 | // SPDX-License-Identifier: GPL-2.0-only 4 | 5 | #ifndef __JSON_HH__ 6 | #define __JSON_HH__ 7 | 8 | #include 9 | #include 10 | #include 11 | 12 | class json_parse_failure : public std::runtime_error 13 | { 14 | public: 15 | explicit json_parse_failure(const std::string &bad_text) : std::runtime_error("failed to parse text=" + bad_text) {} 16 | }; 17 | 18 | class json_missing_field : public std::runtime_error 19 | { 20 | public: 21 | explicit json_missing_field(const std::string &field_name) : std::runtime_error(field_name + " field is missing") {} 22 | }; 23 | 24 | class json { 25 | public: 26 | // normally want to take reference on underlying json_object*, 27 | // except for case of initializing from an existing raw json_object* 28 | // such as those returned by redis_ipc -- those start out with a reference 29 | explicit json(bool is_array = false) { 30 | if (is_array) obj = json_object_new_array(); 31 | else 32 | obj = json_object_new_object(); 33 | } 34 | json(const json ©) { obj = copy.obj; json_object_get(obj); } 35 | explicit json(const char *json_text) { 36 | if (json_text) obj = json_tokener_parse(json_text); 37 | if (obj == NULL) throw json_parse_failure(json_text); 38 | } 39 | // NOTE -- this constructor takes over ownership of raw json_object, 40 | // do *NOT* manually zero out reference count on parameter object 41 | // with json_object_put() 42 | explicit json(json_object *c_obj) : obj(c_obj) { 43 | if (obj == NULL) obj = json_object_new_object(); 44 | } 45 | // release reference on underlying json_object*, 46 | // if this was last reference it will get freed 47 | ~json() { json_object_put(obj); } 48 | 49 | json& operator=(const json ©) { 50 | if (obj != NULL) { 51 | json_object_put(obj); 52 | } 53 | obj = copy.obj; 54 | json_object_get(obj); 55 | return *this; 56 | } 57 | 58 | bool operator==(const json &other) { 59 | bool is_same = json_object_equal(this->obj, other.obj); 60 | return is_same; 61 | } 62 | 63 | bool operator!=(const json &other) { 64 | bool is_same = json_object_equal(this->obj, other.obj); 65 | return !is_same; 66 | } 67 | 68 | std::string dump(void) const { 69 | std::string repr = json_object_to_json_string_ext(obj, JSON_C_TO_STRING_PRETTY); 70 | return repr; 71 | } 72 | 73 | std::string to_string() const { 74 | std::string value; 75 | if (obj) value = json_object_get_string(obj); 76 | // use empty string to represent empty object rather than '{ }' 77 | if (value == std::string("{ }")) value = std::string(""); 78 | return value; 79 | } 80 | 81 | double to_double() const { 82 | double value = -1; 83 | if (obj) value = json_object_get_double(obj); 84 | return value; 85 | } 86 | 87 | int to_int() const { 88 | int value = -1; 89 | if (obj) value = json_object_get_int(obj); 90 | return value; 91 | } 92 | 93 | bool to_bool() const { 94 | bool value = false; 95 | if (obj) value = json_object_get_int(obj); 96 | return value; 97 | } 98 | 99 | json_object * to_json_c_obj() { return obj; } 100 | const json_object * to_json_c_obj() const { return obj; } 101 | 102 | bool has_field(const char *field_name) { 103 | bool field_present = false; 104 | if (json_object_is_type(obj, json_type_object) && 105 | json_object_object_get(obj, field_name)) 106 | { 107 | field_present = true; 108 | } 109 | return field_present; 110 | } 111 | 112 | json get_field(const char *field_name) { 113 | if (!json_object_is_type(obj, json_type_object)) 114 | throw std::runtime_error("Not a hash-type object!"); 115 | json_object *field_obj = json_object_object_get(obj, field_name); 116 | if (field_obj == NULL) throw json_missing_field(field_name); 117 | json_object_get(field_obj); 118 | return json(field_obj); 119 | } 120 | 121 | const json get_field(const char *field_name) const { 122 | if (!json_object_is_type(obj, json_type_object)) 123 | throw std::runtime_error("Not a hash-type object!"); 124 | json_object *field_obj = json_object_object_get(obj, field_name); 125 | if (field_obj == NULL) throw json_missing_field(field_name); 126 | json_object_get(field_obj); 127 | return json(field_obj); 128 | } 129 | 130 | void set_field(const char *field_name, const std::string &value) { 131 | if (!json_object_is_type(obj, json_type_object)) 132 | throw std::runtime_error("Not a hash-type object!"); 133 | json_object *string_obj = json_object_new_string(value.c_str()); 134 | json_object_object_add(obj, field_name, string_obj); 135 | } 136 | 137 | void set_field(const char *field_name, const char *value) { 138 | if (!json_object_is_type(obj, json_type_object)) 139 | throw std::runtime_error("Not a hash-type object!"); 140 | json_object *string_obj = json_object_new_string(value); 141 | json_object_object_add(obj, field_name, string_obj); 142 | } 143 | 144 | void set_field(const char *field_name, const double &value) { 145 | if (!json_object_is_type(obj, json_type_object)) 146 | throw std::runtime_error("Not a hash-type object!"); 147 | json_object *double_obj = json_object_new_double(value); 148 | json_object_object_add(obj, field_name, double_obj); 149 | } 150 | 151 | void set_field(const char *field_name, const int &value) { 152 | if (!json_object_is_type(obj, json_type_object)) 153 | throw std::runtime_error("Not a hash-type object!"); 154 | json_object *int_obj = json_object_new_int(value); 155 | json_object_object_add(obj, field_name, int_obj); 156 | } 157 | 158 | void set_field(const char *field_name, const bool &value) { 159 | if (!json_object_is_type(obj, json_type_object)) 160 | throw std::runtime_error("Not a hash-type object!"); 161 | json_object *bool_obj = json_object_new_boolean(value); 162 | json_object_object_add(obj, field_name, bool_obj); 163 | } 164 | 165 | void set_field(const char *field_name, const json &value) { 166 | if (!json_object_is_type(obj, json_type_object)) 167 | throw std::runtime_error("Not a hash-type object!"); 168 | // take extra reference so that value object will not be destroyed 169 | // just because this object gets destroyed (add does not bump reference count) 170 | json_object_get(value.obj); 171 | json_object_object_add(obj, field_name, value.obj); 172 | } 173 | 174 | json get_element(int idx) { 175 | if (!json_object_is_type(obj, json_type_array)) 176 | throw std::runtime_error("Not an array-type object!"); 177 | json_object *element_obj = json_object_array_get_idx(obj, idx); 178 | if (element_obj == NULL) throw std::runtime_error("No such element!"); 179 | json_object_get(element_obj); 180 | return json(element_obj); 181 | } 182 | 183 | const json get_element(int idx) const { 184 | if (!json_object_is_type(obj, json_type_array)) 185 | throw std::runtime_error("Not an array-type object!"); 186 | json_object *element_obj = json_object_array_get_idx(obj, idx); 187 | if (element_obj == NULL) throw std::runtime_error("No such element!"); 188 | json_object_get(element_obj); 189 | return json(element_obj); 190 | } 191 | 192 | void set_element(int idx, const std::string &value) { 193 | if (!json_object_is_type(obj, json_type_array)) 194 | throw std::runtime_error("Not an array-type object!"); 195 | json_object *string_obj = json_object_new_string(value.c_str()); 196 | json_object_array_put_idx(obj, idx, string_obj); 197 | } 198 | 199 | void set_element(int idx, const char *value) { 200 | if (!json_object_is_type(obj, json_type_array)) 201 | throw std::runtime_error("Not an array-type object!"); 202 | json_object *string_obj = json_object_new_string(value); 203 | json_object_array_put_idx(obj, idx, string_obj); 204 | } 205 | 206 | void set_element(int idx, const int &value) { 207 | if (!json_object_is_type(obj, json_type_array)) 208 | throw std::runtime_error("Not an array-type object!"); 209 | json_object *int_obj = json_object_new_int(value); 210 | json_object_array_put_idx(obj, idx, int_obj); 211 | } 212 | 213 | void set_element(int idx, const bool &value) { 214 | if (!json_object_is_type(obj, json_type_array)) 215 | throw std::runtime_error("Not an array-type object!"); 216 | json_object *bool_obj = json_object_new_boolean(value); 217 | json_object_array_put_idx(obj, idx, bool_obj); 218 | } 219 | 220 | // caller must make sure the 'value' object stays alive while it 221 | // is still used as a field 222 | void set_element(int idx, const json &value) { 223 | if (!json_object_is_type(obj, json_type_array)) 224 | throw std::runtime_error("Not an array-type object!"); 225 | json_object_array_put_idx(obj, idx, value.obj); 226 | } 227 | 228 | private: 229 | json_object *obj; 230 | }; 231 | 232 | #endif // __JSON_HH__ 233 | -------------------------------------------------------------------------------- /redis-ipc.pc.in: -------------------------------------------------------------------------------- 1 | prefix=@prefix@ 2 | exec_prefix=${prefix} 3 | libdir=${prefix}/lib 4 | includedir=${prefix}/include 5 | 6 | Name: redis-ipc 7 | Description: redis-ipc is an advanced IPC client using redis 8 | Version: @PACKAGE_VERSION@ 9 | Cflags: -std=c++11 -pthread -I${includedir} 10 | Libs: -pthread -L${libdir} -lredis_ipc 11 | -------------------------------------------------------------------------------- /requirements-sync.txt: -------------------------------------------------------------------------------- 1 | # tool requirements, useful for tox/pip/git 2 | #gitchangelog @ https://github.com/sarnold/gitchangelog/releases/download/3.1.2/gitchangelog-3.1.2-py3-none-any.whl 3 | repolite @ https://github.com/sarnold/repolite/releases/download/0.4.0/repolite-0.4.0-py3-none-any.whl 4 | -------------------------------------------------------------------------------- /scripts/fix_pkg_name.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # This fixes package name="" in coverage.xml or another coverage filename 4 | # as the only optional argument: ./fix_pkg_name.sh other-name.xml 5 | # We default to grepping pkg name from (python) setup.cfg 6 | # otherwise you should set the REAL_NAME environment override, eg: 7 | # 8 | # REAL_NAME="re2" ./fix_pkg_name.sh 9 | # 10 | # or export it first in your shell env. 11 | 12 | set -euo pipefail 13 | 14 | failures=0 15 | trap 'failures=$((failures+1))' ERR 16 | 17 | COV_FILE=${1:-coverage.xml} 18 | REAL_NAME=${REAL_NAME:-""} 19 | VERBOSE="false" # set to "true" for extra output 20 | 21 | NAME_CHECK=$(grep -o 'name=""' "${COV_FILE}" || true) 22 | 23 | # extra fix for autotools ?? 24 | sed -i -e "s|src..libs|src|" $COV_FILE 25 | 26 | [[ -z "$NAME_CHECK" ]] && echo "No name to fix ..." && exit 0 27 | [[ -n $REAL_NAME ]] || REAL_NAME=$(grep ^name setup.cfg | cut -d" " -f3) 28 | [[ -n $REAL_NAME ]] && sed -i -e "s|name=\"\"|name=\"${REAL_NAME}\"|" $COV_FILE 29 | [[ -n $REAL_NAME ]] && echo "Replaced \"\" with ${REAL_NAME} in ${COV_FILE} ..." 30 | 31 | if ((failures != 0)); then 32 | echo "Something went wrong !!!" 33 | exit 1 34 | fi 35 | -------------------------------------------------------------------------------- /scripts/run_redis.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # this runs a local redis-server and accepts start|stop|status args; 4 | # we use this in tox pre/post commands to run tests that require 5 | # a redis server listening on the redis-ipc socket 6 | # note: the default command is status 7 | 8 | set -euo pipefail 9 | 10 | failures=0 11 | trap 'failures=$((failures+1))' ERR 12 | 13 | CMD_ARG=${1:-status} 14 | PORT=${PORT:-0} 15 | VERBOSE="false" # set to "true" for extra output 16 | export RIPC_RUNTIME_DIR=${RIPC_RUNTIME_DIR:-/tmp/redis-ipc} 17 | 18 | echo "Using socket runtime dir: ${RIPC_RUNTIME_DIR}" 19 | 20 | if [[ "${CMD_ARG}" = "status" ]]; then 21 | [[ "${VERBOSE}" = "true" ]] && echo "pinging redis-server on local socket..." 22 | redis-cli -s ${RIPC_RUNTIME_DIR}/socket ping 23 | fi 24 | 25 | if [[ "${CMD_ARG}" = "start" ]]; then 26 | [[ "${VERBOSE}" = "true" ]] && echo "starting redis-server on local socket..." 27 | mkdir -p ${RIPC_RUNTIME_DIR} 28 | redis-server --port ${PORT} --pidfile ${RIPC_RUNTIME_DIR}/redis.pid --unixsocket ${RIPC_RUNTIME_DIR}/socket --unixsocketperm 600 & 29 | sleep 1 30 | redis-cli -s ${RIPC_RUNTIME_DIR}/socket config set save "" 31 | fi 32 | 33 | if [[ "${CMD_ARG}" = "stop" ]]; then 34 | [[ "${VERBOSE}" = "true" ]] && echo "killing redis-server on local socket in 1 sec..." 35 | sleep 1 36 | cat ${RIPC_RUNTIME_DIR}/redis.pid | xargs kill 37 | fi 38 | 39 | if ((failures == 0)); then 40 | echo "Success" 41 | else 42 | echo "Something went wrong" 43 | exit 1 44 | fi 45 | -------------------------------------------------------------------------------- /src/Makefile.am: -------------------------------------------------------------------------------- 1 | if RIPC_RUNTIME_DIR 2 | PATH_FLAG = -DRIPC_RUNTIME_DIR=\"$(RIPC_RUNTIME_DIR)\" 3 | endif 4 | 5 | AM_CPPFLAGS = -I$(top_srcdir)/src $(PATH_FLAG) 6 | 7 | lib_LTLIBRARIES = libredis_ipc.la 8 | 9 | libredis_ipc_la_SOURCES = redis_ipc.c 10 | include_HEADERS = redis_ipc.h 11 | 12 | libredis_ipc_la_CPPFLAGS = -I$(includedir) 13 | libredis_ipc_la_CFLAGS = -Wall -std=gnu99 $(HIREDIS_CFLAGS) $(JSONC_CFLAGS) 14 | libredis_ipc_la_LIBADD = $(HIREDIS_LIBS) $(JSONC_LIBS) 15 | -------------------------------------------------------------------------------- /src/redis_ipc.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2011-2021 Vanguard Computer Technology Labs 2 | // 3 | // SPDX-License-Identifier: GPL-2.0-only 4 | // LINT_C_FILE 5 | 6 | #ifndef __REDIS_IPC_H__ 7 | #define __REDIS_IPC_H__ 8 | 9 | #include 10 | #include 11 | 12 | #ifdef __cplusplus 13 | extern "C" { 14 | #endif 15 | 16 | 17 | //************* 18 | // constants 19 | //************* 20 | 21 | // debug levels for redis_ipc_send_debug() 22 | enum { RIPC_DBG_ERROR, 23 | RIPC_DBG_ALERT, 24 | RIPC_DBG_WARN, 25 | RIPC_DBG_INFO, 26 | RIPC_DBG_NOISY, 27 | RIPC_DBG_EXTRA_NOISY }; 28 | 29 | // return codes > 0 are also OK 30 | #define RIPC_OK 0 31 | #define RIPC_FAIL (-1) 32 | 33 | // location of config file 34 | // #define RIPC_CONF_PATH "/etc/redis_ipc.conf" 35 | #define RIPC_CONF_PATH "./redis_ipc.conf" 36 | 37 | //*********** things that might move to config file when it is implemented 38 | 39 | // max length of formatted debug message (will be truncated if longer) 40 | #define RIPC_MAX_DEBUG_LEN 1024 41 | 42 | // max length of constructed redis key name 43 | #define RIPC_MAX_IPC_PATH_LEN 128 44 | 45 | // redis server address 46 | #ifdef RIPC_RUNTIME_DIR 47 | #pragma message "RIPC_RUNTIME_DIR was set" 48 | #else 49 | #pragma message "RIPC_RUNTIME_DIR was *not* set" 50 | #define RIPC_RUNTIME_DIR "/tmp/redis-ipc" 51 | #endif 52 | #define RIPC_SERVER_PATH RIPC_RUNTIME_DIR "/socket" 53 | 54 | //*********** defaults for settings that can be optionally configured via functions 55 | 56 | // default verbosity level for debug channel messages 57 | #define RIPC_DEFAULT_DEBUG_VERBOSITY 5 58 | 59 | // default stderr debug messages enabled 60 | #define RIPC_DEFAULT_STDERR_DEBUG 1 61 | 62 | // component allowed to write settings (or "*" if any component is allowed) 63 | #define RIPC_COMPONENT_ANY "*" 64 | #define RIPC_DEFAULT_SETTINGS_WRITER "db" 65 | // #define RIPC_DEFAULT_SETTINGS_WRITER RIPC_COMPONENT_ANY 66 | 67 | //************* 68 | // functions 69 | //************* 70 | 71 | // When a function returns an int, return value RIPC_OK indicates success; 72 | // when a function returns a json_object*, non-null return indicates success. 73 | 74 | // NOTE: whenever a json_object* is used as a return value or a parameter, 75 | // the caller is responsible for cleaning it up with json_object_put(). 76 | // This cleanup function is safe to call on a NULL pointer. 77 | // When a char* is used as a return value, caller is responsible for 78 | // cleaning it up with free(). 79 | 80 | // Init should be called from each thread that will be doing redis IPC, 81 | // with all threads getting same component name but unique thread name. 82 | // Cleanup is provided to prevent a slow leak from turnover of short-lived 83 | // threads that each call init to allocate a few per-thread variables. 84 | // 85 | // The thread name should indicate the purpose of the thread and be 86 | // predictable, e.g. live-http-worker-3, rather than randomly generated or 87 | // based on TID number, because it will be used to generate name of results 88 | // queue -- and we want same results queues to be re-used if component gets 89 | // restarted (avoid having to garbage-collect stale, abandoned redis queues) 90 | // 91 | // Global redis-ipc settings are normally cached in each thread during 92 | // init, but threads can explicitly ask for a config relead if desired 93 | // (to pick up changes some other thread made to global settings). 94 | // 95 | // Cleanup is done based on thread ID so that main thread can clean up for 96 | // terminated threads, as long as it is tracking their IDs. For a 97 | // single-threaded process, tid == pid. 98 | 99 | int redis_ipc_init(const char *this_component, const char *this_thread); 100 | void redis_ipc_config_load(void); 101 | int redis_ipc_cleanup(pid_t tid); 102 | 103 | 104 | // A component can send a command to any other component, 105 | // but it can only receive commands from its own command queue(s). 106 | // 107 | // A component sending a command should supply NULL as subqueue unless 108 | // it has multiple command queues defined in redis_ipc.conf, 109 | // in which case it should supply one of those queues. The same goes 110 | // for a component waiting for commands -- if it only has one command 111 | // queue, subqueue should be NULL. 112 | // 113 | // The timeout field is in seconds, or use zero to indicate no timeout 114 | // for blocking. 115 | // 116 | // When submitting a command, the name of submitter's results queue 117 | // and a unique command ID will automatically be inserted into the command 118 | // as fields "results_queue" and "command_id" respectively. 119 | // 120 | // The submitter will block on its results until result has been received 121 | // or timeout has been exceeded. If a result with _different_ command ID 122 | // is received, it will be logged as an error to submitter component's 123 | // debug channel then freed, and the wait on results queue will restart 124 | // (expiration time will be reset to original). 125 | 126 | int get_debug_verbosity(); 127 | int stderr_debug_is_enabled(); 128 | 129 | // After receiving and executing a command, the receiving component 130 | // should submit a result. The original command is passed in as a parameter 131 | // to provide the command ID and path to results queue. The command ID 132 | // will automatically be added to the result object before pushing it to the 133 | // result queue so that the command submitter will only get back the 134 | // expected result (as opposed to a stale result, belonging to a command that 135 | // took so long that submitter timed out before seeing the result). 136 | 137 | json_object * redis_ipc_send_command_blocking(const char *dest_component, 138 | const char *subqueue, 139 | json_object *command, 140 | unsigned int timeout); 141 | json_object * redis_ipc_receive_command_blocking(const char *subqueue, 142 | unsigned int timeout); 143 | int redis_ipc_send_result(const json_object *completed_command, json_object *result); 144 | 145 | 146 | // A component can only write a setting if it has been authorized, 147 | // but it can read any setting. 148 | // 149 | // Each setting is a set of fields that are key-value pairs, where both field name and 150 | // values are stored as strings (of course values could be JSON text if needed). 151 | // 152 | // A setting can be written or read in its entirety using the first pair of functions: 153 | // when writing, the JSON object parameter can hold all the component's setting fields 154 | // as key-value pairs (although it would also work with a smaller set of fields); 155 | // when reading, the JSON object returned *will* hold all existing setting fields. 156 | // 157 | // A setting can also be written or read a single field at a time using the second 158 | // pair of functions, which works on strings rather JSON hashes. If multiple setting 159 | // fields are being accessed, consider accessing the full thing rather than multiple 160 | // calls to individual fields -- it may be more efficient. 161 | 162 | int redis_ipc_write_setting(const char *owner_component, const json_object *fields); 163 | json_object * redis_ipc_read_setting(const char *owner_component); 164 | int redis_ipc_write_setting_field(const char *owner_component, const char *field_name, 165 | const char *field_value); 166 | char * redis_ipc_read_setting_field(const char *owner_component, const char *field_name); 167 | 168 | 169 | // A component can only write its own status, 170 | // but it can read any status. 171 | // 172 | // Each status is a set of fields that are key-value pairs, where both field name and 173 | // values are stored as strings (of course values could be JSON text if needed). 174 | // 175 | // A status can be written or read in its entirety using the first pair of functions: 176 | // when writing, the JSON object parameter can hold all the component's status fields 177 | // as key-value pairs (although it would also work with a smaller set of fields); 178 | // when reading, the JSON object returned *will* hold all existing status fields. 179 | // 180 | // A status can also be written or read a single field at a time using the second 181 | // pair of functions, which works on strings rather JSON hashes. If multiple status 182 | // fields are being accessed, consider accessing the full thing rather than multiple 183 | // calls to individual fields -- it may be more efficient. 184 | 185 | int redis_ipc_write_status(const json_object *fields); 186 | json_object * redis_ipc_read_status(const char *owner_component); 187 | int redis_ipc_write_status_field(const char *field_name, const char *field_value); 188 | const char * redis_ipc_read_status_field(const char *owner_component, const char *field_name); 189 | 190 | 191 | // Each component can only send event messages to its own event channel(s), 192 | // but can subscribe to any (or all) event channels. 193 | // 194 | // A component sending an event should supply NULL as subchannel unless 195 | // it has multiple subchannels defined in redis_ipc.conf, 196 | // in which case it should supply one of those subchannels. 197 | // 198 | // A component subscribing to events can use NULL as component parameter 199 | // to watch all event channels for all components. Or, to watch all events 200 | // from a single component, use NULL for subchannel (as mentioned above, 201 | // not all components even have subchannels). 202 | // 203 | // The send function will automatically append the following fields, 204 | // plus the standard ones (timestamp, etc) to the event: 205 | // channel (full channel name) 206 | // 207 | // The unsubscribe function stops watching all event channels. 208 | 209 | int redis_ipc_send_event(const char *subchannel, json_object *message); 210 | int redis_ipc_subscribe_events(const char *component, const char *subchannel); 211 | int redis_ipc_unsubscribe_events(void); 212 | 213 | 214 | // Each component will send debug messages to its own debug channel, 215 | // but can subscribe to any (or all) debug channels. Use NULL as 216 | // component to subscribe to all debug channels. 217 | // 218 | // Debug send can serve as drop-in replacement for printf-style logging, 219 | // which is why it doesn't take a JSON object. The send function internally 220 | // generates a json object containing following fields, plus the standard 221 | // ones (timestamp, etc) 222 | // message 223 | // level 224 | // channel (full channel name) 225 | // 226 | // When sending debug messages, low debug level indicates 227 | // high message priority, since only messages of lower or equal level 228 | // to configured component debug verbosity will actually get sent. 229 | // 230 | // The unsubscribe function stops watching all debug channels. 231 | 232 | int redis_ipc_send_debug(unsigned int debug_level, const char *format, ...); 233 | int redis_ipc_subscribe_debug(const char *component); 234 | int redis_ipc_unsubscribe_debug(void); 235 | 236 | // Each component can receive notifications for changes to its own settings. 237 | // These notifications are generated by the redis server 238 | // https://redis.io/docs/manual/keyspace-notifications/ 239 | // 240 | // The unsubscribe function stops receiving notifications for settings changes. 241 | int redis_ipc_subscribe_setting_notifications(void); 242 | int redis_ipc_unsubscribe_setting_notifications(void); 243 | 244 | // This function is the counterpart to both redis_ipc_send_event() 245 | // and redis_ipc_send_debug() because a received message can come from any 246 | // subscribed channel. This also will receive setting notifications as well 247 | // as events or debug messages. 248 | // 249 | // At least one type of message should have been subscribed before listening. 250 | 251 | json_object * redis_ipc_get_message_timeout(struct timeval timeout); 252 | json_object * redis_ipc_get_message_blocking(void); 253 | 254 | // Functions for configuring behavior of redis-ipc itself 255 | // see RIPC_DEFAULT_* definitions for default values if not called 256 | 257 | // configure verbosity level for debug channel 258 | int redis_ipc_config_debug_verbosity(int verbosity); 259 | 260 | // configure whether debug messages will be shown on stderr 261 | int redis_ipc_config_stderr_debug(int enable_stderr); 262 | 263 | // configure which component will be authorized to write settings 264 | // (RIPC_COMPONENT_ANY works as wildcard for "any") 265 | int redis_ipc_config_settings_writer(const char *writer_component); 266 | 267 | #ifdef __cplusplus 268 | } 269 | #endif 270 | 271 | #endif // __REDIS_IPC_H__ 272 | -------------------------------------------------------------------------------- /test/Makefile.am: -------------------------------------------------------------------------------- 1 | if RIPC_RUNTIME_DIR 2 | PATH_FLAG = -DRIPC_RUNTIME_DIR=\"$(RIPC_RUNTIME_DIR)\" 3 | endif 4 | 5 | AM_CPPFLAGS = -I$(top_srcdir)/inc -I$(top_srcdir)/src -I$(includedir) $(PATH_FLAG) 6 | 7 | LIBREDISIPC = $(top_srcdir)/src/.libs/libredis_ipc.la 8 | 9 | BUILT_TESTS = command_result_test \ 10 | json_test \ 11 | multithread_test\ 12 | settings_status_test \ 13 | pub_sub_test 14 | 15 | #TESTS = $(BUILT_TESTS) tests/runtests.sh 16 | TESTS = $(check_PROGRAMS) 17 | check_PROGRAMS = $(BUILT_TESTS) 18 | include_HEADERS = $(top_srcdir)/inc/json.hh 19 | 20 | command_result_test_SOURCES = command_result_test.c 21 | command_result_test_LDADD = -lhiredis -ljson-c $(LIBREDISIPC) -lpthread 22 | command_result_test_LDFLAGS = -Wl,--hash-style=gnu -no-install 23 | 24 | json_test_SOURCES = json_test.cpp 25 | json_test_LDADD = -lhiredis -ljson-c $(LIBREDISIPC) -lpthread 26 | json_test_LDFLAGS = -Wl,--hash-style=gnu -no-install 27 | 28 | multithread_test_SOURCES = multithread_test.c 29 | multithread_test_LDADD = -lhiredis -ljson-c $(LIBREDISIPC) -lpthread 30 | multithread_test_LDFLAGS = -Wl,--hash-style=gnu -no-install 31 | 32 | pub_sub_test_SOURCES = pub_sub_test.c 33 | pub_sub_test_LDADD = -lhiredis -ljson-c $(LIBREDISIPC) -lpthread 34 | pub_sub_test_LDFLAGS = -Wl,--hash-style=gnu -no-install 35 | 36 | settings_status_test_SOURCES = settings_status_test.c 37 | settings_status_test_LDADD = -lhiredis -ljson-c $(LIBREDISIPC) -lpthread 38 | settings_status_test_LDFLAGS = -Wl,--hash-style=gnu -no-install 39 | -------------------------------------------------------------------------------- /test/README.rst: -------------------------------------------------------------------------------- 1 | examples 2 | ======== 3 | 4 | The following are currently more "ultra-simple examples" than tests, 5 | in that they mostly don't catch errors to return failures. 6 | 7 | In order to observe these examples working, you need to have a redis server 8 | configured with a unix socket (see toplevel README). 9 | 10 | multithread_test.c 11 | demo debug messages from two different threads, 12 | can use 'redis-cli -s /tmp/redis-ipc/socket monitor' to view them 13 | 14 | command_result_test.c : 15 | demo command queues 16 | 17 | pub_sub_test.c : 18 | demo events (including "debug" events with priority level) 19 | 20 | settings_status_test.c : 21 | demo settings and status (status is like settings but only owner component 22 | can write them) 23 | -------------------------------------------------------------------------------- /test/command_result_test.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include "redis_ipc.h" 6 | 7 | void spawn_command_handler_process(void) 8 | { 9 | json_object *command = NULL, *result = NULL; 10 | pid_t pid = fork(); 11 | 12 | // parent waits for child to start listening, then returns 13 | if (pid > 0) 14 | { 15 | sleep(1); 16 | return; 17 | } 18 | 19 | // child continues on 20 | redis_ipc_init("streaming", "recorder"); 21 | 22 | command = redis_ipc_receive_command_blocking("video", 0); 23 | 24 | result = json_object_new_object(); 25 | json_object_object_add(result, "code", 26 | json_object_new_int(-2)); 27 | json_object_object_add(result, "message", 28 | json_object_new_string("Next time, say 'please'")); 29 | redis_ipc_send_result(command, result); 30 | json_object_put(command); 31 | json_object_put(result); 32 | 33 | redis_ipc_cleanup(getpid()); 34 | 35 | _exit(0); 36 | } 37 | 38 | int main(int argc, char **argv) 39 | { 40 | json_object *command = NULL, *result = NULL; 41 | int timeout = 10; 42 | int child_status = -1; 43 | 44 | spawn_command_handler_process(); 45 | 46 | redis_ipc_init("web", "requestor"); 47 | 48 | command = json_object_new_object(); 49 | json_object_object_add(command, "method", 50 | json_object_new_string("control_recording")); 51 | json_object_object_add(command, "params", 52 | json_object_new_string("start")); 53 | result = redis_ipc_send_command_blocking("streaming", "video", command, timeout); 54 | json_object_put(command); 55 | 56 | if (result != NULL) 57 | { 58 | printf("Received result: %s\n", json_object_to_json_string(result)); 59 | json_object_put(result); 60 | } 61 | else 62 | { 63 | printf("Timed out waiting for result\n"); 64 | return 42; 65 | } 66 | 67 | redis_ipc_cleanup(getpid()); 68 | wait(&child_status); 69 | 70 | return 0; 71 | } 72 | -------------------------------------------------------------------------------- /test/command_result_test.out: -------------------------------------------------------------------------------- 1 | (streaming) CONFIG SET notify-keyspace-events Kh 2 | (streaming) BLPOP queues.commands.streaming.video 0 3 | (web) CONFIG SET notify-keyspace-events Kh 4 | (web) RPUSH queues.commands.streaming.video { "method": "control_recording", "params": "start", "results_queue": "queues.results.web.requestor", "command_id": "web-requestor-18824-0", "timestamp": "1668592134.787857", "component": "web", "thread": "requestor", "tid": 18824 } 5 | (web) BLPOP queues.results.web.requestor 10 6 | (streaming) [ENTRY:queues.commands.streaming.video] { "method": "control_recording", "params": "start", "results_queue": "queues.results.web.requestor", "command_id": "web-requestor-18824-0", "timestamp": "1668592134.787857", "component": "web", "thread": "requestor", "tid": 18824 } 7 | (streaming) RPUSH queues.results.web.requestor { "code": -2, "message": "Next time, say 'please'", "command_id": "web-requestor-18824-0", "timestamp": "1668592134.788300", "component": "streaming", "thread": "recorder", "tid": 18825 } 8 | (web) [ENTRY:queues.results.web.requestor] { "code": -2, "message": "Next time, say 'please'", "command_id": "web-requestor-18824-0", "timestamp": "1668592134.788300", "component": "streaming", "thread": "recorder", "tid": 18825 } 9 | Received result: { "code": -2, "message": "Next time, say 'please'", "command_id": "web-requestor-18824-0", "timestamp": "1668592134.788300", "component": "streaming", "thread": "recorder", "tid": 18825 } 10 | -------------------------------------------------------------------------------- /test/json_test.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "json.hh" 3 | 4 | using namespace std; 5 | 6 | int main(int argc, char **argv) 7 | { 8 | json_object *hum = json_object_new_object(); 9 | json_object *de = json_object_new_object(); 10 | 11 | json snook(hum), took(de); 12 | // snook and took should be holding references now, let go of original refs 13 | took.set_field("baggins", 9999); 14 | took.set_field("bilbo", "hungry"); 15 | snook.set_field("betook", took); 16 | { 17 | json wooka = snook; 18 | cout << wooka.get_field("betook").get_field("baggins").to_string() << endl; 19 | } 20 | snook.set_field("blueness", 1.5e9); 21 | cout << "Display individual fields..." << endl; 22 | cout << snook.get_field("betook").get_field("baggins").to_string() << endl; 23 | cout << snook.get_field("betook").get_field("bilbo").to_string() << endl; 24 | cout << snook.get_field("blueness").to_string() << endl; 25 | cout << "Display whole thing..." << endl; 26 | cout << snook.dump() << endl; 27 | 28 | double blue = snook.get_field("blueness").to_double(); 29 | double blue_ratio = blue / 1.5e9; 30 | double blue_error = 1.0 - blue_ratio; 31 | if (blue_error < 1e-4 && blue_error > -1e-4) 32 | cout << "OK double value is close enough" << endl; 33 | else 34 | cout << "ERROR double value seems fishy" << endl; 35 | 36 | if (snook.has_field("betook")) 37 | cout << "OK this field exists" << endl; 38 | else 39 | cout << "ERROR could not find field that should exist" << endl; 40 | 41 | if (snook.has_field("NOT-HERE")) 42 | cout << "ERROR found field that should NOT exist" << endl; 43 | else 44 | cout << "OK this field does not exist" << endl; 45 | 46 | json took_twin; 47 | took_twin.set_field("bilbo", "hungry"); 48 | took_twin.set_field("baggins", 9999); 49 | 50 | if (took == took_twin) 51 | cout << "OK these objects match" << endl; 52 | else 53 | cout << "ERROR these objects were supposed to match" << endl; 54 | 55 | if (took != took_twin) 56 | cout << "ERROR these objects were supposed to match" << endl; 57 | else 58 | cout << "OK these objects match" << endl; 59 | 60 | if (took == snook) 61 | cout << "ERROR these objects were not supposed to match" << endl; 62 | else 63 | cout << "OK these objects don't match" << endl; 64 | 65 | if (took != snook) 66 | cout << "OK these objects don't match" << endl; 67 | else 68 | cout << "ERROR these objects were not supposed to match" << endl; 69 | 70 | bool parse_failed = false; 71 | try 72 | { 73 | json bork("\"whatisthis"); 74 | } 75 | catch (json_parse_failure &exc) 76 | { 77 | parse_failed = true; 78 | cout << "OK got exception for invalid JSON text: " << exc.what() << endl; 79 | } 80 | 81 | if (!parse_failed) 82 | { 83 | cout << "ERROR invalid JSON text was not noticed" << endl; 84 | } 85 | 86 | } 87 | -------------------------------------------------------------------------------- /test/json_test.out: -------------------------------------------------------------------------------- 1 | 9999 2 | Display individual fields... 3 | 9999 4 | hungry 5 | 1500000000.0 6 | Display whole thing... 7 | { 8 | "betook":{ 9 | "baggins":9999, 10 | "bilbo":"hungry" 11 | }, 12 | "blueness":1500000000.0 13 | } 14 | OK double value is close enough 15 | OK this field exists 16 | OK this field does not exist 17 | OK these objects match 18 | OK these objects match 19 | OK these objects don't match 20 | OK these objects don't match 21 | OK got exception for invalid JSON text: failed to parse text="whatisthis 22 | -------------------------------------------------------------------------------- /test/multithread_test.c: -------------------------------------------------------------------------------- 1 | #define _GNU_SOURCE // for gettid() 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include "redis_ipc.h" 7 | 8 | // gettid() is missing a libc wrapper before glibc 2.30 9 | // (manpage even mentions it) 10 | #if (__GLIBC_MINOR__ < 30) 11 | pid_t gettid() 12 | { 13 | return syscall(SYS_gettid); 14 | } 15 | #endif 16 | 17 | void *run_printer_thread(void *data) 18 | { 19 | redis_ipc_init("printer", "monitor"); 20 | redis_ipc_send_debug(1, "printer starting to smoke"); 21 | redis_ipc_send_debug(0, "printer on fire!!"); 22 | redis_ipc_cleanup(gettid()); 23 | 24 | return 0; 25 | } 26 | 27 | void *run_button_thread(void *data) 28 | { 29 | redis_ipc_init("button", "watcher"); 30 | redis_ipc_send_debug(1, "NO don't press that button..."); 31 | redis_ipc_send_debug(0, "I told you not to press it!!"); 32 | redis_ipc_cleanup(gettid()); 33 | 34 | return 0; 35 | } 36 | 37 | int main(int argc, char **argv) 38 | { 39 | pthread_t printer_thread_info, button_thread_info; 40 | 41 | pthread_create(&printer_thread_info, NULL, run_printer_thread, NULL); 42 | pthread_create(&button_thread_info, NULL, run_button_thread, NULL); 43 | 44 | pthread_join(printer_thread_info, NULL); 45 | pthread_join(button_thread_info, NULL); 46 | 47 | return 0; 48 | } 49 | -------------------------------------------------------------------------------- /test/multithread_test.out: -------------------------------------------------------------------------------- 1 | (button) CONFIG SET notify-keyspace-events Kh 2 | (printer) CONFIG SET notify-keyspace-events Kh 3 | (button) PUBLISH channel.debug.button { "message": "NO don't press that button...", "level": 1, "channel": "channel.debug.button", "timestamp": "1668592241.554110", "component": "button", "thread": "watcher", "tid": 19190 } 4 | (printer) PUBLISH channel.debug.printer { "message": "printer starting to smoke", "level": 1, "channel": "channel.debug.printer", "timestamp": "1668592241.554110", "component": "printer", "thread": "monitor", "tid": 19189 } 5 | (printer) PUBLISH channel.debug.printer { "message": "printer on fire!!", "level": 0, "channel": "channel.debug.printer", "timestamp": "1668592241.554176", "component": "printer", "thread": "monitor", "tid": 19189 } 6 | (button) PUBLISH channel.debug.button { "message": "I told you not to press it!!", "level": 0, "channel": "channel.debug.button", "timestamp": "1668592241.554177", "component": "button", "thread": "watcher", "tid": 19190 } 7 | -------------------------------------------------------------------------------- /test/pub_sub_test.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include "redis_ipc.h" 6 | 7 | void spawn_listener_process(void) 8 | { 9 | json_object *message; 10 | pid_t pid = fork(); 11 | int i; 12 | 13 | // parent waits for child to start listening, then returns 14 | if (pid > 0) 15 | { 16 | sleep(1); 17 | return; 18 | } 19 | 20 | // child continues on 21 | redis_ipc_init("web", "listener"); 22 | 23 | redis_ipc_subscribe_events("printer", NULL); 24 | redis_ipc_subscribe_debug("printer"); 25 | redis_ipc_subscribe_setting_notifications(); 26 | 27 | for (i = 0; i < 7; i++) 28 | { 29 | message = redis_ipc_get_message_blocking(); 30 | json_object_put(message); 31 | } 32 | 33 | struct timeval timeout = {2, 0}; 34 | fprintf(stderr, "** This wait for message should time out in 2 sec...\n"); 35 | message = redis_ipc_get_message_timeout(timeout); 36 | 37 | redis_ipc_unsubscribe_events(); 38 | redis_ipc_unsubscribe_debug(); 39 | redis_ipc_unsubscribe_setting_notifications(); 40 | 41 | redis_ipc_cleanup(getpid()); 42 | 43 | _exit(0); 44 | } 45 | 46 | int main(int argc, char **argv) 47 | { 48 | json_object *event = NULL; 49 | json_object *setting = NULL; 50 | int child_status = -1; 51 | 52 | spawn_listener_process(); 53 | 54 | redis_ipc_init("printer", "monitor"); 55 | redis_ipc_send_debug(1, "printer starting to smoke"); 56 | sleep(1); 57 | redis_ipc_send_debug(0, "printer on fire!!"); 58 | sleep(1); 59 | 60 | event = json_object_new_object(); 61 | json_object_object_add(event, "severity", 62 | json_object_new_string("warning")); 63 | json_object_object_add(event, "message", 64 | json_object_new_string("printer is down for the count")); 65 | redis_ipc_send_event("state", event); 66 | json_object_put(event); 67 | sleep(1); 68 | 69 | event = json_object_new_object(); 70 | json_object_object_add(event, "severity", 71 | json_object_new_string("alert")); 72 | json_object_object_add(event, "message", 73 | json_object_new_string("there went our expensive paper")); 74 | json_object_object_add(event, "pages_remaining", json_object_new_int(0)); 75 | redis_ipc_send_event("media", event); 76 | json_object_put(event); 77 | sleep(1); 78 | 79 | event = json_object_new_object(); 80 | json_object_object_add(event, "severity", 81 | json_object_new_string("info")); 82 | json_object_object_add(event, "message", 83 | json_object_new_string("save trees, go digital")); 84 | redis_ipc_send_event(NULL, event); 85 | json_object_put(event); 86 | sleep(1); 87 | 88 | // try to generate notifications for settings changes in web component 89 | redis_ipc_config_settings_writer(RIPC_COMPONENT_ANY); 90 | fprintf(stderr, "** This full setting write should generate 'hset' message...\n"); 91 | setting = json_object_new_object(); 92 | json_object_object_add(setting, "colorspace", 93 | json_object_new_string("purple and more purple")); 94 | json_object_object_add(setting, "theme", 95 | json_object_new_string("boisterous")); 96 | redis_ipc_write_setting("web", setting); 97 | json_object_put(setting); 98 | sleep(1); 99 | 100 | fprintf(stderr, "** This single setting field write should generate 'hset' message...\n"); 101 | redis_ipc_write_setting_field("web", "theme", "stealth"); 102 | redis_ipc_config_settings_writer(RIPC_DEFAULT_SETTINGS_WRITER); 103 | 104 | redis_ipc_cleanup(getpid()); 105 | wait(&child_status); 106 | 107 | return 0; 108 | } 109 | -------------------------------------------------------------------------------- /test/pub_sub_test.out: -------------------------------------------------------------------------------- 1 | (web) CONFIG SET notify-keyspace-events Kh 2 | (web) PSUBSCRIBE channel.events.printer* 3 | (web) PSUBSCRIBE channel.debug.printer 4 | (web) PSUBSCRIBE __keyspace*__:settings.web 5 | (printer) CONFIG SET notify-keyspace-events Kh 6 | (printer) PUBLISH channel.debug.printer { "message": "printer starting to smoke", "level": 1, "channel": "channel.debug.printer", "timestamp": "1668597824.839004", "component": "printer", "thread": "monitor", "tid": 26522 } 7 | (web) [MESSAGE] { "message": "printer starting to smoke", "level": 1, "channel": "channel.debug.printer", "timestamp": "1668597824.839004", "component": "printer", "thread": "monitor", "tid": 26522 } 8 | (printer) PUBLISH channel.debug.printer { "message": "printer on fire!!", "level": 0, "channel": "channel.debug.printer", "timestamp": "1668597825.839200", "component": "printer", "thread": "monitor", "tid": 26522 } 9 | (web) [MESSAGE] { "message": "printer on fire!!", "level": 0, "channel": "channel.debug.printer", "timestamp": "1668597825.839200", "component": "printer", "thread": "monitor", "tid": 26522 } 10 | (printer) PUBLISH channel.events.printer.state { "severity": "warning", "message": "printer is down for the count", "channel": "channel.events.printer.state", "timestamp": "1668597826.839420", "component": "printer", "thread": "monitor", "tid": 26522 } 11 | (web) [MESSAGE] { "severity": "warning", "message": "printer is down for the count", "channel": "channel.events.printer.state", "timestamp": "1668597826.839420", "component": "printer", "thread": "monitor", "tid": 26522 } 12 | (printer) PUBLISH channel.events.printer.media { "severity": "alert", "message": "there went our expensive paper", "pages_remaining": 0, "channel": "channel.events.printer.media", "timestamp": "1668597827.839633", "component": "printer", "thread": "monitor", "tid": 26522 } 13 | (web) [MESSAGE] { "severity": "alert", "message": "there went our expensive paper", "pages_remaining": 0, "channel": "channel.events.printer.media", "timestamp": "1668597827.839633", "component": "printer", "thread": "monitor", "tid": 26522 } 14 | (printer) PUBLISH channel.events.printer { "severity": "info", "message": "save trees, go digital", "channel": "channel.events.printer", "timestamp": "1668597828.839953", "component": "printer", "thread": "monitor", "tid": 26522 } 15 | (web) [MESSAGE] { "severity": "info", "message": "save trees, go digital", "channel": "channel.events.printer", "timestamp": "1668597828.839953", "component": "printer", "thread": "monitor", "tid": 26522 } 16 | (printer) HSET settings.redis-ipc settings_writer * 17 | ** This full setting write should generate 'hset' message... 18 | (printer) HMSET settings.web colorspace purple and more purple theme boisterous 19 | (web) [MESSAGE] hset 20 | ** This single setting field write should generate 'hset' message... 21 | (printer) HSET settings.web theme stealth 22 | (printer) HSET settings.redis-ipc settings_writer db 23 | (web) [MESSAGE] hset 24 | ** This wait for message should time out in 2 sec... 25 | (web) PUNSUBSCRIBE channel.events.* 26 | (web) PUNSUBSCRIBE channel.debug.* 27 | (web) PUNSUBSCRIBE __keyspace*__:settings.web 28 | -------------------------------------------------------------------------------- /test/settings_status_test.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include "redis_ipc.h" 4 | 5 | int main(int argc, char **argv) 6 | { 7 | json_object *setting = NULL; 8 | json_object *status = NULL; 9 | char *field = NULL; 10 | 11 | redis_ipc_init("session", "main"); 12 | 13 | status = json_object_new_object(); 14 | json_object_object_add(status, "open", 15 | json_object_new_string("until closed")); 16 | json_object_object_add(status, "procedure", 17 | json_object_new_string("complicated")); 18 | redis_ipc_write_status(status); 19 | json_object_put(status); 20 | 21 | setting = json_object_new_object(); 22 | json_object_object_add(setting, "auto_finalize", 23 | json_object_new_string("no way")); 24 | // this should fail, component is not authorized to write settings 25 | // (not even its own) 26 | redis_ipc_config_settings_writer("db"); 27 | fprintf(stderr, "** This attempt to write settings should fail...\n"); 28 | redis_ipc_write_setting("session", setting); 29 | json_object_put(setting); 30 | fprintf(stderr, "** This attempt to write single setting should fail...\n"); 31 | redis_ipc_write_setting_field("session", "location", "right here"); 32 | 33 | // should come back empty since above write failed 34 | setting = redis_ipc_read_setting("session"); 35 | json_object_put(setting); 36 | field = redis_ipc_read_setting_field("session", "location"); 37 | if (field) free(field); 38 | 39 | // authorize this component to write settings and try again 40 | redis_ipc_config_settings_writer("session"); 41 | fprintf(stderr, "** This attempt to write single setting should work...\n"); 42 | redis_ipc_write_setting_field("session", "location", "still right here"); 43 | 44 | // put back to "db" as authorized component 45 | redis_ipc_config_settings_writer("db"); 46 | 47 | redis_ipc_cleanup(getpid()); 48 | 49 | redis_ipc_init("db", "main"); 50 | 51 | status = redis_ipc_read_status("session"); 52 | json_object_put(status); 53 | 54 | fprintf(stderr, "** This attempt to write settings should work...\n"); 55 | setting = json_object_new_object(); 56 | json_object_object_add(setting, "num_copies", 57 | json_object_new_string("until ink runs out")); 58 | json_object_object_add(setting, "paper_type", 59 | json_object_new_string("wrinkled")); 60 | redis_ipc_write_setting("printer", setting); 61 | json_object_put(setting); 62 | 63 | fprintf(stderr, "** This attempt to write single setting should work...\n"); 64 | redis_ipc_write_setting_field("printer", "contrast", "none"); 65 | 66 | setting = redis_ipc_read_setting("printer"); 67 | json_object_put(setting); 68 | 69 | redis_ipc_config_stderr_debug(0); 70 | fprintf(stderr, "** This attempt to read single setting should *not* print debug...\n"); 71 | field = redis_ipc_read_setting_field("printer", "paper_type"); 72 | if (field) free(field); 73 | redis_ipc_config_stderr_debug(1); 74 | fprintf(stderr, "** This attempt to read single setting *should* print debug...\n"); 75 | field = redis_ipc_read_setting_field("printer", "paper_type"); 76 | if (field) free(field); 77 | 78 | redis_ipc_cleanup(getpid()); 79 | 80 | return 0; 81 | } 82 | -------------------------------------------------------------------------------- /test/settings_status_test.out: -------------------------------------------------------------------------------- 1 | (session) CONFIG SET notify-keyspace-events Kh 2 | (session) HMSET status.session open until closed procedure complicated 3 | (session) HSET settings.redis-ipc settings_writer db 4 | ** This attempt to write settings should fail... 5 | (session) [ERROR] component session is not authorized to write settings 6 | ** This attempt to write single setting should fail... 7 | (session) [ERROR] component session is not authorized to write settings 8 | (session) HGETALL settings.session 9 | (session) [HASH] 10 | (session) HGET settings.session location 11 | (session) [HASH_FIELD] 12 | (session) HSET settings.redis-ipc settings_writer session 13 | ** This attempt to write single setting should work... 14 | (session) HSET settings.session location still right here 15 | (session) HSET settings.redis-ipc settings_writer db 16 | (db) CONFIG SET notify-keyspace-events Kh 17 | (db) HGETALL status.session 18 | (db) [HASH] open='until closed' procedure='complicated' 19 | ** This attempt to write settings should work... 20 | (db) HMSET settings.printer num_copies until ink runs out paper_type wrinkled 21 | ** This attempt to write single setting should work... 22 | (db) HSET settings.printer contrast none 23 | (db) HGETALL settings.printer 24 | (db) [HASH] num_copies='until ink runs out' paper_type='wrinkled' contrast='none' 25 | (db) HSET settings.redis-ipc stderr_debug 0 26 | ** This attempt to read single setting should *not* print debug... 27 | ** This attempt to read single setting *should* print debug... 28 | (db) HGET settings.printer paper_type 29 | (db) [HASH_FIELD] paper_type='wrinkled' 30 | -------------------------------------------------------------------------------- /tox-deps.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | skip_missing_interpreters = true 3 | skipsdist = true 4 | 5 | [testenv:{sync,tools,build}] 6 | skip_install = true 7 | install_command = pip install {opts} {packages} 8 | envdir = {toxworkdir}/.env 9 | 10 | passenv = 11 | REPO_CFG 12 | DISPLAY 13 | XAUTHORITY 14 | HOME 15 | USERNAME 16 | USER 17 | CI 18 | LANG 19 | LC_COLLATE 20 | SSH_* 21 | GID 22 | UID 23 | XDG_* 24 | PIP_DOWNLOAD_CACHE 25 | 26 | deps = 27 | pip>=21.3 28 | cmake 29 | ninja 30 | -r requirements-sync.txt 31 | 32 | commands = 33 | sync: repolite {posargs:--quiet} 34 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py3{6,7,8,9,10,11}-tests 3 | skip_missing_interpreters = true 4 | skipsdist = true 5 | 6 | [testenv] 7 | # uncomment next line to force older system python for local testing 8 | #basepython = python3.8 9 | install_command = pip install {opts} {packages} 10 | skip_install = true 11 | 12 | [testenv:{tests,clang,ctest,bionic,lcov,lint,grind,clean,cover}] 13 | envdir = {toxworkdir}/build 14 | runner = ignore_env_name_mismatch 15 | 16 | passenv = 17 | pythonLocation 18 | CC 19 | CXX 20 | LD 21 | AR 22 | NM 23 | PYTHON 24 | DISPLAY 25 | XAUTHORITY 26 | HOME 27 | USERNAME 28 | USER 29 | CI 30 | XDG_* 31 | GITHUB* 32 | PIP_DOWNLOAD_CACHE 33 | 34 | setenv = 35 | clang: CC = {env:CC:clang} 36 | clang: CXX = {env:CXX:clang++} 37 | bionic: ENV_RIPC_RUNTIME_DIR = {env:ENV_RIPC_RUNTIME_DIR:{envtmpdir}} 38 | LLVM_VER_DIR = {env:LLVM_VER_DIR:llvm-15} 39 | 40 | allowlist_externals = 41 | {tests,clang,ctest,bionic,lint,grind,clean}: bash 42 | {tests,clang,bionic,grind,cover}: mkdir 43 | 44 | changedir = 45 | {tests,bionic,clang,grind}: build 46 | 47 | deps = 48 | {tests,bionic,clang,ctest,grind,lcov,lint,cover}: pip>=21.0.1 49 | {tests,bionic,clang,ctest,grind,lcov,cover}: gcovr 50 | {tests,bionic,clang,ctest,grind,lcov,cover}: lcov_cobertura 51 | {tests,bionic,clang,ctest,grind,lcov,cover}: cmake 52 | {tests,bionic,clang,ctest,grind,lcov,cover}: ninja 53 | {tests,bionic,clang,ctest,grind,lcov,cover}: ValgrindCI 54 | #lint: cpplint 55 | lint: https://github.com/sarnold/cpplint/releases/download/2.0.1/cpplint-2.0.0-py3-none-any.whl 56 | lint: beautysh 57 | 58 | commands_pre = 59 | bionic: mkdir -p {toxinidir}/coverage 60 | {tests,clang,bionic,grind}: mkdir -p {toxinidir}/build 61 | {tests,clang,ctest,grind}: bash -c '{toxinidir}/scripts/run_redis.sh start > /dev/null' 62 | {tests,clang,ctest,grind}: bash -c '{toxinidir}/scripts/run_redis.sh status' 63 | 64 | commands = 65 | bionic: bash -c 'cmake -G {posargs:"Unix Makefiles"} -DWITH_COVERAGE=1 -DCMAKE_BUILD_TYPE=Debug ..' 66 | bionic: bash -c 'cmake --build .' 67 | bionic: bash -c 'make cov || true' 68 | bionic: bash -c 'RIPC_RUNTIME_DIR=$ENV_RIPC_RUNTIME_DIR {toxinidir}/scripts/run_redis.sh start > /dev/null' 69 | bionic: bash -c 'RIPC_RUNTIME_DIR=$ENV_RIPC_RUNTIME_DIR {toxinidir}/scripts/run_redis.sh status' 70 | bionic: bash -c 'RIPC_SERVER_PATH=$ENV_RIPC_RUNTIME_DIR/socket make cov' 71 | clang: bash -c 'cmake -G {posargs:"Unix Makefiles"} -DRIPC_BUILD_TESTING=ON -DCOVERAGE_BUILD=ON -DCOVERAGE_HTML=ON ..' 72 | tests: bash -c 'cmake -G {posargs:"Unix Makefiles"} -DWITH_COVERAGE=1 -DCMAKE_BUILD_TYPE=Debug ..' 73 | grind: bash -c 'cmake -DRIPC_BUILD_TESTING=ON -DCMAKE_BUILD_TYPE=Debug ..' 74 | {tests,clang,grind}: bash -c 'cmake --build . -j $(nproc)' 75 | tests: bash -c 'ctest -V --test-dir ./' 76 | clang: bash -c 'cmake --build . --target coverage' 77 | lcov: lcov_cobertura build/coverage/lcov.info --base-dir {toxinidir} --output coverage.xml 78 | lint: bash -c 'cpplint --output=gsed {toxinidir}/src/* {toxinidir}/inc/*' 79 | {bionic,tests}: gcovr --gcov-ignore-parse-errors=negative_hits.warn -s --txt-metric branch -r {toxinidir} . 80 | bionic: gcovr -r {toxinidir} --xml-pretty -o coverage.xml . 81 | bionic: gcovr -r {toxinidir} --html --html-details -o {toxinidir}/coverage/coverage.html . 82 | {bionic}: bash -c 'RIPC_RUNTIME_DIR=$ENV_RIPC_RUNTIME_DIR {toxinidir}/scripts/run_redis.sh stop' 83 | ctest: bash -c 'ctest --build-generator {posargs:"Ninja"} --build-and-test . build --build-options -DWITH_COVERAGE=ON -DCMAKE_BUILD_TYPE=Debug --test-command ctest --rerun-failed --output-on-failure -V' 84 | ctest: gcovr --gcov-ignore-parse-errors=negative_hits.warn -s --txt-metric branch build/ 85 | cover: gcovr --xml-pretty -o coverage.xml build/ 86 | # runtime assertion error without || true => (SIGSEGV)) (exited with code -11) 87 | grind: bash -c 'valgrind --tool=memcheck --xml=yes --xml-file=json_check.xml --leak-check=full --show-leak-kinds=definite,possible --error-exitcode=127 ./json_test || true' 88 | # valgrind error exit without || true => (exited with code 127) 89 | grind: bash -c 'valgrind --tool=memcheck --xml=yes --xml-file=multithread_check.xml --leak-check=full --show-leak-kinds=definite,possible --error-exitcode=127 ./multithread_test || true' 90 | grind: bash -c 'valgrind --tool=memcheck --xml=yes --xml-file=command_check.xml --leak-check=full --show-leak-kinds=definite,possible --error-exitcode=127 ./command_result_test || true' 91 | grind: bash -c '[[ -f json_check.xml ]] && valgrind-ci json_check.xml --number-of-errors' 92 | grind: bash -c '[[ -f json_check.xml ]] && valgrind-ci json_check.xml --summary' 93 | grind: valgrind-ci multithread_check.xml --number-of-errors 94 | grind: valgrind-ci multithread_check.xml --summary 95 | # xml exception (no errors in report) => junk after document element 96 | #grind: bash -c '[[ -f command_check.xml ]] && valgrind-ci command_check.xml --number-of-errors || true' 97 | #grind: bash -c '[[ -f command_check.xml ]] && valgrind-ci command_check.xml --summary || true' 98 | clean: bash -c 'rm -rf build/ coverage/ coverage.xml *.gcov __pycache__/' 99 | 100 | commands_post = 101 | {tests,clang,ctest,grind,dist}: bash -c '{toxinidir}/scripts/run_redis.sh stop > /dev/null' 102 | 103 | [testenv:{auto,autoclean,dist}] 104 | envdir = {toxworkdir}/auto 105 | runner = ignore_env_name_mismatch 106 | 107 | passenv = 108 | pythonLocation 109 | CC 110 | CXX 111 | CI 112 | GITHUB* 113 | PIP_DOWNLOAD_CACHE 114 | 115 | setenv = 116 | auto: ENV_RIPC_RUNTIME_DIR = {env:ENV_RIPC_RUNTIME_DIR:{envtmpdir}} 117 | 118 | allowlist_externals = 119 | {auto,autoclean,dist}: bash 120 | 121 | deps = 122 | {auto,autoclean,dist}: pip>=21.0.1 123 | {auto,autoclean,dist}: this-cli 124 | {auto,autoclean,dist}: gcovr 125 | 126 | commands_pre = 127 | {dist}: bash -c '{toxinidir}/scripts/run_redis.sh start > /dev/null' 128 | {dist}: bash -c '{toxinidir}/scripts/run_redis.sh status' 129 | 130 | commands = 131 | # sadly this-cli cannot pass args to configure 132 | dist: this check 133 | dist: bash -c 'make clean' 134 | dist: bash -c 'make dist' 135 | auto: bash -c 'autoreconf -fiv' 136 | auto: bash -c './configure {posargs:"--with-coverage"}' 137 | auto: bash -c 'make cov || true' 138 | auto: bash -c 'RIPC_RUNTIME_DIR=$ENV_RIPC_RUNTIME_DIR {toxinidir}/scripts/run_redis.sh start > /dev/null' 139 | auto: bash -c 'RIPC_RUNTIME_DIR=$ENV_RIPC_RUNTIME_DIR {toxinidir}/scripts/run_redis.sh status' 140 | auto: bash -c 'RIPC_SERVER_PATH=$ENV_RIPC_RUNTIME_DIR/socket make cov' 141 | auto: gcovr --gcov-ignore-parse-errors=negative_hits.warn -s --txt-metric branch src/.libs/ test/ 142 | auto: gcovr --gcov-ignore-parse-errors=negative_hits.warn --xml-pretty -o coverage.xml src/.libs/ test/ 143 | auto: bash -c 'RIPC_RUNTIME_DIR=$ENV_RIPC_RUNTIME_DIR {toxinidir}/scripts/run_redis.sh stop' 144 | autoclean: -bash -c 'make distclean-recursive' 145 | autoclean: bash -c 'rm -rf Makefile Makefile.in aclocal.m4 ar-lib autom4te.cache/ compile config.* coverage* configure configure~ depcomp install-sh libltdl/ ltmain.sh m4/ missing src/Makefile.in test-driver test/gmon.out test/Makefile.in' 146 | -------------------------------------------------------------------------------- /toxfile.py: -------------------------------------------------------------------------------- 1 | """ 2 | https://github.com/masenf/tox-ignore-env-name-mismatch 3 | 4 | MIT License 5 | Copyright (c) 2023 Masen Furer 6 | """ 7 | from contextlib import contextmanager 8 | from typing import Any, Iterator, Optional, Sequence, Tuple 9 | 10 | from tox.plugin import impl 11 | from tox.tox_env.api import ToxEnv 12 | from tox.tox_env.info import Info 13 | from tox.tox_env.python.virtual_env.runner import VirtualEnvRunner 14 | from tox.tox_env.register import ToxEnvRegister 15 | 16 | 17 | class FilteredInfo(Info): 18 | """Subclass of Info that optionally filters specific keys during compare().""" 19 | 20 | def __init__( 21 | self, 22 | *args: Any, 23 | filter_keys: Optional[Sequence[str]] = None, 24 | filter_section: Optional[str] = None, 25 | **kwargs: Any, 26 | ): 27 | """ 28 | :param filter_keys: key names to pop from value 29 | :param filter_section: if specified, only pop filter_keys when the compared section matches 30 | 31 | All other args and kwargs are passed to super().__init__ 32 | """ 33 | self.filter_keys = filter_keys 34 | self.filter_section = filter_section 35 | super().__init__(*args, **kwargs) 36 | 37 | @contextmanager 38 | def compare( 39 | self, 40 | value: Any, 41 | section: str, 42 | sub_section: Optional[str] = None, 43 | ) -> Iterator[Tuple[bool, Optional[Any]]]: 44 | """Perform comparison and update cached info after filtering `value`.""" 45 | if self.filter_section is None or section == self.filter_section: 46 | try: 47 | value = value.copy() 48 | except AttributeError: # pragma: no cover 49 | pass 50 | else: 51 | for fkey in self.filter_keys or []: 52 | value.pop(fkey, None) 53 | with super().compare(value, section, sub_section) as rv: 54 | yield rv 55 | 56 | 57 | class IgnoreEnvNameMismatchVirtualEnvRunner(VirtualEnvRunner): 58 | """EnvRunner that does NOT save the env name as part of the cached info.""" 59 | 60 | @staticmethod 61 | def id() -> str: 62 | return "ignore_env_name_mismatch" 63 | 64 | @property 65 | def cache(self) -> Info: 66 | """Return a modified Info class that does NOT pass "name" key to `Info.compare`.""" 67 | return FilteredInfo( 68 | self.env_dir, 69 | filter_keys=["name"], 70 | filter_section=ToxEnv.__name__, 71 | ) 72 | 73 | 74 | @impl 75 | def tox_register_tox_env(register: ToxEnvRegister) -> None: 76 | """tox4 entry point: add IgnoreEnvNameMismatchVirtualEnvRunner to registry.""" 77 | register.add_run_env(IgnoreEnvNameMismatchVirtualEnvRunner) 78 | --------------------------------------------------------------------------------