├── redcap ├── py.typed ├── methods │ ├── __init__.py │ ├── version.py │ ├── project_info.py │ ├── field_names.py │ ├── repeating.py │ ├── reports.py │ ├── logging.py │ ├── metadata.py │ ├── arms.py │ ├── users.py │ ├── events.py │ ├── files.py │ ├── instruments.py │ ├── surveys.py │ ├── user_roles.py │ ├── data_access_groups.py │ ├── file_repository.py │ └── base.py ├── __init__.py ├── conftest.py ├── project.py └── request.py ├── tests ├── __init__.py ├── unit │ ├── __init__.py │ ├── data.txt │ ├── conftest.py │ ├── test_survey_project.py │ └── test_long_project.py ├── integration │ ├── __init__.py │ ├── conftest.py │ ├── test_simple_project.py │ └── test_long_project.py └── data │ └── doctest_project.xml ├── docs ├── img │ └── logo.png ├── api_reference │ ├── arms.md │ ├── files.md │ ├── project.md │ ├── users.md │ ├── events.md │ ├── logging.md │ ├── records.md │ ├── reports.md │ ├── surveys.md │ ├── metadata.md │ ├── repeating.md │ ├── version.md │ ├── user_roles.md │ ├── field_names.md │ ├── instruments.md │ ├── project_info.md │ ├── file_repository.md │ └── data_access_groups.md ├── using-in-app-or-package.md ├── quickstart.md └── index.md ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── release.md │ └── unexpected-behavior-issue-template.md └── workflows │ └── ci.yml ├── .env.example ├── codecov.yml ├── mypy.ini ├── .coveragerc ├── .gitignore ├── pytest.ini ├── LICENSE ├── .pylintrc ├── mkdocs.yml ├── pyproject.toml ├── CONTRIBUTING.md ├── README.md └── HISTORY.md /redcap/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/data.txt: -------------------------------------------------------------------------------- 1 | This is a file used to test uploading. -------------------------------------------------------------------------------- /docs/img/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redcap-tools/PyCap/HEAD/docs/img/logo.png -------------------------------------------------------------------------------- /docs/api_reference/arms.md: -------------------------------------------------------------------------------- 1 | # Arms 2 | 3 | ::: redcap.methods.arms 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [pwildenhain] 4 | patreon: pwildenhain 5 | -------------------------------------------------------------------------------- /docs/api_reference/files.md: -------------------------------------------------------------------------------- 1 | # Files 2 | 3 | ::: redcap.methods.files 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/project.md: -------------------------------------------------------------------------------- 1 | # Project 2 | 3 | ::: redcap.project 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/users.md: -------------------------------------------------------------------------------- 1 | # Users 2 | 3 | ::: redcap.methods.users 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/events.md: -------------------------------------------------------------------------------- 1 | # Events 2 | 3 | ::: redcap.methods.events 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/logging.md: -------------------------------------------------------------------------------- 1 | # Logging 2 | 3 | ::: redcap.methods.logging 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/records.md: -------------------------------------------------------------------------------- 1 | # Records 2 | 3 | ::: redcap.methods.records 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/reports.md: -------------------------------------------------------------------------------- 1 | # Reports 2 | 3 | ::: redcap.methods.reports 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/surveys.md: -------------------------------------------------------------------------------- 1 | # Surveys 2 | 3 | ::: redcap.methods.surveys 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/metadata.md: -------------------------------------------------------------------------------- 1 | # Metadata 2 | 3 | ::: redcap.methods.metadata 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/repeating.md: -------------------------------------------------------------------------------- 1 | # Repeating 2 | 3 | ::: redcap.methods.repeating 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/version.md: -------------------------------------------------------------------------------- 1 | # Version 2 | 3 | 4 | ::: redcap.methods.version 5 | selection: 6 | inherited_members: true 7 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # REDCap API credentials: 2 | REDCAP_API_URL=https://redcap.example.edu/api/ 3 | REDCAP_API_KEY=replace-with-your-api-token 4 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | project: 4 | default: 5 | target: 100% 6 | comment: 7 | layout: "diff, files" 8 | -------------------------------------------------------------------------------- /docs/api_reference/user_roles.md: -------------------------------------------------------------------------------- 1 | # User Roles 2 | 3 | ::: redcap.methods.user_roles 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/field_names.md: -------------------------------------------------------------------------------- 1 | # Field Names 2 | 3 | ::: redcap.methods.field_names 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/instruments.md: -------------------------------------------------------------------------------- 1 | # Instruments 2 | 3 | ::: redcap.methods.instruments 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/project_info.md: -------------------------------------------------------------------------------- 1 | # Project Info 2 | 3 | ::: redcap.methods.project_info 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/file_repository.md: -------------------------------------------------------------------------------- 1 | # File Repository 2 | 3 | ::: redcap.methods.file_repository 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /docs/api_reference/data_access_groups.md: -------------------------------------------------------------------------------- 1 | # Data Access Groups 2 | 3 | ::: redcap.methods.data_access_groups 4 | selection: 5 | inherited_members: true 6 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | warn_unused_configs = True 3 | 4 | [mypy-tests/unit/callback_utils.*] 5 | ignore_errors = True 6 | 7 | [mypy-semantic_version] 8 | ignore_missing_imports = True 9 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | # the doctests aren't run in forks 3 | omit = 4 | redcap/conftest.py 5 | 6 | exclude_lines = 7 | pragma: no cover 8 | if TYPE_CHECKING: 9 | # overloaded functions 10 | [.]{3} 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.ipynb 3 | .DS_Store 4 | MANIFEST 5 | .gitignore 6 | dist/ 7 | README 8 | *.egg-info 9 | build/ 10 | docs/_build/ 11 | _site/ 12 | site/ 13 | .idea/ 14 | *.swp 15 | .vscode/ 16 | ve/ 17 | .venv 18 | coverage.xml 19 | .coverage 20 | .noseids -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/release.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Release 3 | about: Checklist for releases 4 | title: 'Release version:' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## To do 11 | - [ ] Draft GitHub release 12 | - [ ] Bump version: `poetry version minor` 13 | - [ ] Run all tests/styling/linting: `pytest` 14 | - [ ] Merge release PR 15 | - [ ] Update cannonical documentation: `mkdocs gh-deploy` 16 | - [ ] Publish to pypi: `poetry publish` 17 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | doctest_optionflags = NORMALIZE_WHITESPACE ELLIPSIS FAIL_FAST REPORT_NDIFF 3 | addopts = -rsxX -l --tb=short --strict --pylint --black --cov=redcap --cov-report=xml --mypy 4 | markers = 5 | integration: test connects to redcapdemo.vumc.org server 6 | # Keep current format for future version of pytest 7 | junit_family=xunit1 8 | # Ignore unimportant warnings 9 | filterwarnings = 10 | ignore::UserWarning 11 | ignore::DeprecationWarning 12 | -------------------------------------------------------------------------------- /redcap/methods/__init__.py: -------------------------------------------------------------------------------- 1 | """Make the method modules available upon import""" 2 | 3 | import redcap.methods.arms 4 | import redcap.methods.data_access_groups 5 | import redcap.methods.events 6 | import redcap.methods.field_names 7 | import redcap.methods.file_repository 8 | import redcap.methods.files 9 | import redcap.methods.instruments 10 | import redcap.methods.logging 11 | import redcap.methods.metadata 12 | import redcap.methods.project_info 13 | import redcap.methods.records 14 | import redcap.methods.repeating 15 | import redcap.methods.reports 16 | import redcap.methods.surveys 17 | import redcap.methods.users 18 | import redcap.methods.user_roles 19 | import redcap.methods.version 20 | -------------------------------------------------------------------------------- /redcap/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | This module exposes the REDCap API through the Project class. Instantiate the 6 | class with the URL to your REDCap system along with an API key, probably 7 | generated for you by a REDCap administrator. 8 | 9 | With a Project object, you can view the metadata and export/import data. 10 | 11 | Other API requests are available, such as exporting users & Form-Event Mappings. 12 | 13 | """ 14 | 15 | from redcap.project import Project 16 | from redcap.request import _RCRequest, RedcapError 17 | 18 | __author__ = "Scott Burns " 19 | __license__ = "MIT" 20 | __copyright__ = "2014, Vanderbilt University" 21 | -------------------------------------------------------------------------------- /tests/unit/conftest.py: -------------------------------------------------------------------------------- 1 | """Test fixtures for unit tests only""" 2 | 3 | from typing import Dict, Generator 4 | 5 | import pytest 6 | import responses 7 | 8 | 9 | @pytest.fixture(scope="module") 10 | def project_token() -> str: 11 | """Project API token""" 12 | return "0" * 32 13 | 14 | 15 | @pytest.fixture(scope="module") 16 | def project_urls() -> Dict[str, str]: 17 | """Different urls for different mock projects""" 18 | return { 19 | "bad_url": "https://redcap.badproject.edu/api", 20 | "long_project": "https://redcap.longproject.edu/api/", 21 | "simple_project": "https://redcap.simpleproject.edu/api/", 22 | "survey_project": "https://redcap.surveyproject.edu/api/", 23 | } 24 | 25 | 26 | # See here for docs: https://github.com/getsentry/responses#responses-as-a-pytest-fixture 27 | @pytest.fixture(scope="module") 28 | def mocked_responses() -> Generator: 29 | """Base fixture for all mocked responses""" 30 | with responses.RequestsMock() as resps: 31 | yield resps 32 | -------------------------------------------------------------------------------- /redcap/methods/version.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project REDCap version""" 2 | 3 | from typing import Optional 4 | 5 | import semantic_version 6 | 7 | from redcap.methods.base import Base 8 | 9 | 10 | class Version(Base): 11 | """Responsible for all API methods under 'REDCap' in the API Playground""" 12 | 13 | def export_version(self) -> Optional[semantic_version.Version]: 14 | """ 15 | Get the REDCap version 16 | 17 | Returns: 18 | REDCap version running on the url provided 19 | 20 | Examples: 21 | >>> import semantic_version 22 | >>> redcap_version = proj.export_version() 23 | >>> assert redcap_version >= semantic_version.Version("12.0.1") 24 | """ 25 | payload = self._initialize_payload("version") 26 | resp = None 27 | 28 | redcap_version = self._call_api(payload, return_type="str") 29 | 30 | if semantic_version.validate(redcap_version): 31 | resp = semantic_version.Version(redcap_version) 32 | 33 | return resp 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Vanderbilt University 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /redcap/conftest.py: -------------------------------------------------------------------------------- 1 | """Test fixtures for doctests only 2 | 3 | I don't think this is the ideal workflow, but it's the best I 4 | could come up with for having great, tested, examples 5 | """ 6 | 7 | from pathlib import Path 8 | 9 | import pytest 10 | 11 | from redcap.project import Project 12 | from tests.integration.conftest import ( 13 | add_files_to_repository, 14 | create_project, 15 | grant_superuser_rights, 16 | redcapdemo_url, 17 | SUPER_TOKEN, 18 | ) 19 | 20 | 21 | @pytest.fixture(scope="session", autouse=True) 22 | def add_doctest_objects(doctest_namespace): 23 | """Add the doctest project instance to the doctest_namespace""" 24 | doctest_project_xml = Path("tests/data/doctest_project.xml") 25 | doctest_token = create_project( 26 | url=redcapdemo_url(), 27 | super_token=SUPER_TOKEN, 28 | project_xml_path=doctest_project_xml, 29 | ) 30 | doctest_project = Project(redcapdemo_url(), doctest_token) 31 | doctest_project = grant_superuser_rights(doctest_project) 32 | doctest_project = add_files_to_repository(doctest_project) 33 | 34 | doctest_namespace["proj"] = doctest_project 35 | doctest_namespace["TOKEN"] = doctest_token 36 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | ignore=docs 3 | 4 | [DESIGN] 5 | # Our Base class needs a few more attributes than the default allows 6 | max-attributes=9 7 | 8 | [MESSAGES CONTROL] 9 | 10 | # Disable the message, report, category or checker with the given id(s). You 11 | # can either give multiple identifiers separated by comma (,) or put this 12 | # option multiple times (only on the command line, not in the configuration 13 | # file where it should appear only once).You can also use "--disable=all" to 14 | # disable everything first and then reenable specific checks. For example, if 15 | # you want to run only the similarities checker, you can use "--disable=all 16 | # --enable=similarities". If you want to run only the classes checker, but have 17 | # no Warning level messages displayed, use"--disable=all --enable=classes 18 | # --disable=W" 19 | 20 | # we're going to support as many arguments as the API calls have 21 | disable=too-many-arguments, 22 | too-many-positional-arguments, 23 | # unfortunately this has to be disabled because of similar 24 | # import statements across different methods (which are 25 | # separated by different files by design) 26 | duplicate-code 27 | 28 | # make sure we're only disabling what we need to disable 29 | enable=useless-suppression 30 | -------------------------------------------------------------------------------- /docs/using-in-app-or-package.md: -------------------------------------------------------------------------------- 1 | # Using PyCap in an app/package 2 | 3 | If you're using PyCap for a small script/ad-hoc data pull, then the `Project` class has the all the necessary functionality. Similarly, the `Project` class is a good choice if you need access to a wide array of functionality (export records, surveys, user, etc.). 4 | 5 | However, if you only are using one piece of the REDCap API, then you might want to consider using one of the more _focused_ and _simpler_ classes. 6 | 7 | For example, if all you want to do is export/import records from your project, then `Records` class can meet all of your needs, with it's [`Records.export_records`](../api_reference/records/#redcap.methods.records.Records.export_records) and [`Records.import_records`](../api_reference/records/#redcap.methods.records.Records.import_records) methods. 8 | 9 | In fact, these methods are exactly the same as the `Project.export_records` and `Project.import_records` methods. The `Project` class directly inherits them from the `Records` class. 10 | 11 | The benefit of using the `Records` class over the `Project` class in this case for your application or package is getting to use a simpler class (easy for the developer) and only having to depend on a simpler class (better for the app). 12 | 13 | For a full list of all `Project` subclasses, see the [API Reference](../api_reference/project). 14 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | # Need to also run on pushes to master since PRs from a fork won't have 5 | # access to the super user token, thereby skipping the integration tests 6 | # and doctests 7 | push: 8 | branches: 9 | - master 10 | 11 | pull_request: 12 | branches: 13 | - master 14 | 15 | jobs: 16 | build: 17 | runs-on: ${{ matrix.os }} 18 | env: 19 | REDCAPDEMO_SUPERUSER_TOKEN: ${{ secrets.REDCAPDEMO_SUPERUSER_TOKEN }} 20 | strategy: 21 | matrix: 22 | os: [ubuntu-latest, macos-latest, windows-latest] 23 | python-version: ['3.10', '3.11', '3.12'] 24 | 25 | steps: 26 | - uses: actions/checkout@v2 27 | - name: Set up Python 28 | uses: actions/setup-python@v2 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | - name: Run poetry image 32 | uses: abatilo/actions-poetry@v2.0.0 33 | with: 34 | poetry-version: 1.3.1 35 | - name: Install dependencies 36 | run: | 37 | poetry install -E data_science 38 | - name: Run doctests 39 | # Forks can't run doctests, requires super user token 40 | if: github.triggering_actor == 'pwildenhain' 41 | run: | 42 | poetry run pytest --doctest-only --doctest-plus 43 | - name: Run tests 44 | run: | 45 | poetry run pytest 46 | - name: Build docs 47 | run: | 48 | poetry run mkdocs build 49 | - name: Upload coverage to Codecov 50 | uses: codecov/codecov-action@v2.1.0 51 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: PyCap 2 | site_url: http://redcap-tools.github.io/PyCap/ 3 | 4 | repo_name: redcap-tools/PyCap 5 | repo_url: https://github.com/redcap-tools/PyCap 6 | 7 | nav: 8 | - Home: index.md 9 | - Quick Start: quickstart.md 10 | - Using PyCap in an app/package: using-in-app-or-package.md 11 | - API Reference: 12 | - Project: api_reference/project.md 13 | - Arms: api_reference/arms.md 14 | - Data Access Groups: api_reference/data_access_groups.md 15 | - Events: api_reference/events.md 16 | - Field Names: api_reference/field_names.md 17 | - File Repository: api_reference/file_repository.md 18 | - Files: api_reference/files.md 19 | - Instruments: api_reference/instruments.md 20 | - Logging: api_reference/logging.md 21 | - Metadata: api_reference/metadata.md 22 | - Project Info: api_reference/project_info.md 23 | - Repeating: api_reference/repeating.md 24 | - Records: api_reference/records.md 25 | - Reports: api_reference/reports.md 26 | - Surveys: api_reference/surveys.md 27 | - Users: api_reference/users.md 28 | - User Roles: api_reference/user_roles.md 29 | - Version: api_reference/version.md 30 | theme: 31 | name: material 32 | logo: img/logo.png 33 | favicon: img/logo.png 34 | palette: 35 | primary: red 36 | accent: red 37 | markdown_extensions: 38 | - pymdownx.highlight: 39 | anchor_linenums: true 40 | - pymdownx.inlinehilite 41 | - pymdownx.snippets 42 | - pymdownx.superfences 43 | plugins: 44 | - search 45 | - mkdocstrings 46 | -------------------------------------------------------------------------------- /tests/unit/test_survey_project.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | """Test suite for Project class with survey against mocked REDCap server""" 3 | # pylint: disable=missing-function-docstring 4 | # pylint: disable=redefined-outer-name 5 | 6 | import pytest 7 | import responses 8 | 9 | from redcap import Project 10 | from tests.unit.callback_utils import get_survey_project_request_handler, parse_request 11 | 12 | 13 | @pytest.fixture(scope="module") 14 | def survey_project(project_urls, project_token, mocked_responses) -> Project: 15 | """Mocked simple REDCap project, with survey fields""" 16 | 17 | def request_callback_survey(req): 18 | request_data, request_headers, request_type = parse_request(req) 19 | request_handler = get_survey_project_request_handler(request_type) 20 | response = request_handler(data=request_data, headers=request_headers) 21 | return response 22 | 23 | survey_project_url = project_urls["survey_project"] 24 | mocked_responses.add_callback( 25 | responses.POST, 26 | survey_project_url, 27 | callback=request_callback_survey, 28 | content_type="application/json", 29 | ) 30 | 31 | return Project(survey_project_url, project_token, verify_ssl=False) 32 | 33 | 34 | def test_init(survey_project): 35 | assert isinstance(survey_project, Project) 36 | 37 | 38 | def test_export_survey_fields(survey_project): 39 | records = survey_project.export_records(export_survey_fields=True) 40 | 41 | for record in records: 42 | assert "redcap_survey_identifier" in record 43 | assert "demographics_timestamp" in record 44 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "PyCap" 3 | version = "2.7.0" 4 | description = "PyCap: Python interface to REDCap" 5 | authors = ["Scott Burns "] 6 | maintainers = ["Paul Wildenhain "] 7 | license = "MIT" 8 | readme = "README.md" 9 | documentation = "http://redcap-tools.github.io/PyCap/" 10 | repository = "https://github.com/redcap-tools/PyCap" 11 | classifiers = [ 12 | "Development Status :: 5 - Production/Stable", 13 | "Intended Audience :: Developers", 14 | "Intended Audience :: Science/Research", 15 | "License :: OSI Approved :: MIT License", 16 | "License :: OSI Approved", 17 | "Topic :: Software Development", 18 | "Topic :: Scientific/Engineering", 19 | "Operating System :: Microsoft :: Windows", 20 | "Operating System :: POSIX", 21 | "Operating System :: Unix", 22 | "Operating System :: MacOS", 23 | "Programming Language :: Python", 24 | ] 25 | packages = [ 26 | { include = "redcap" } 27 | ] 28 | 29 | [tool.poetry.dependencies] 30 | python = "^3.10" 31 | requests = "^2.20" 32 | semantic-version = "^2.8.5" 33 | pandas = {version = "^2.0.0", optional = true} 34 | 35 | [tool.poetry.extras] 36 | data_science = ["pandas"] 37 | 38 | [tool.poetry.group.dev.dependencies] 39 | pytest = "^7.0.0" 40 | pytest-cov = "^3.0.0" 41 | pytest-black = "^0.3.12" 42 | pytest-mypy = "^0.10.3" 43 | pytest-pylint = "^0.20.0" 44 | responses = "^0.14.0" 45 | pytest-mock = "^3.6.1" 46 | types-requests = "^2.26.1" 47 | pandas-stubs = "^1.2.0" 48 | mkdocs = "^1.2.3" 49 | mkdocs-material = "^8.1.3" 50 | mkdocstrings = "^0.17.0" 51 | pytest-doctestplus = "^0.11.2" 52 | Jinja2 = "~3.0.0" 53 | 54 | [build-system] 55 | requires = ["poetry-core>=1.0.0"] 56 | build-backend = "poetry.core.masonry.api" 57 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Issues & Contributing 2 | 3 | If you have an issue with `PyCap` or the REDCap API, please raise an issue on the [issues page](https://github.com/redcap-tools/PyCap/issues). We'll do our best to help where we can. 4 | 5 | PyCap follows the [Fork-Pull workflow](https://help.github.com/articles/using-pull-requests#fork--pull) method for accepting contributions. If you'd like to contribute code to `PyCap`, please use the following workflow: 6 | 7 | 1. If you don't already have an account on GitHub, please make one. 8 | 2. Fork [this repo](https://github.com/redcap-tools/PyCap) to your own account. 9 | 3. Checkout a branch & commit your changes. See the section on `poetry` below for instructions how to set up your local development environment. Tests are definitely appreciated :100:! 10 | 4. Push those changes to your repo & submit a Pull-Request to this repository. 11 | 12 | If any of these steps are unclear, please peruse the helpful [GitHub Guide on Forking](https://guides.github.com/activities/forking/) or file an issue, and we'll try to help out! 13 | 14 | ## Using `poetry` 15 | 16 | This package uses [`poetry`](https://python-poetry.org/docs/master/#installation) for dependency management and publishing. It is required in order to do local development with `PyCap`. 17 | 18 | 1. Install `poetry` 19 | 20 | ```sh 21 | $ curl -sSL https://install.python-poetry.org | python3 - 22 | ``` 23 | 24 | 2. Install all project dependencies (including development/optional dependencies). 25 | 26 | ```sh 27 | $ poetry install -E data_science 28 | ``` 29 | 30 | 3. Add your changes and make sure your changes pass all tests. 31 | 32 | ``` 33 | $ poetry run pytest 34 | ``` 35 | 36 | If you make changes to the dependencies you'll need to handle 37 | them with the [`poetry add/remove`](https://python-poetry.org/docs/master/basic-usage/#installing-dependencies) commands. 38 | -------------------------------------------------------------------------------- /docs/quickstart.md: -------------------------------------------------------------------------------- 1 | # Quickstart 2 | 3 | PyCap makes it very simple to interact with the data stored in your REDCap projects 4 | 5 | > First, install python-dotenv (`pip install python-dotenv` or `poetry add python-dotenv`) and create a `.env` file in your project root with your REDCap credentials: 6 | 7 | ```dotenv 8 | REDCAP_API_URL=https://redcap.example.edu/api/ 9 | REDCAP_API_KEY=SomeSuperSecretAPIKeyThatNobodyElseShouldHave 10 | ``` 11 | 12 | Then update your code to load the values from the environment: 13 | 14 | ```python 15 | from dotenv import load_dotenv 16 | import os 17 | from redcap import Project 18 | 19 | load_dotenv() # Load variables from .env file 20 | 21 | api_url = os.getenv('REDCAP_API_URL') 22 | api_key = os.getenv('REDCAP_API_KEY') 23 | project = Project(api_url, api_key) 24 | ``` 25 | 26 | Export all the data 27 | 28 | ```python 29 | data = project.export_records() 30 | ``` 31 | 32 | Import all the data 33 | 34 | ```python 35 | to_import = [{'record': 'foo', 'test_score': 'bar'}] 36 | response = project.import_records(to_import) 37 | ``` 38 | 39 | Import a file 40 | 41 | ```python 42 | fname = 'something_to_upload.txt' 43 | with open(fname, 'r') as fobj: 44 | project.import_file('1', 'file', fname, fobj) 45 | ``` 46 | 47 | Export a file 48 | 49 | ```python 50 | content, headers = project.export_file('1', 'file') 51 | with open(headers['name'], 'wb') as fobj: 52 | fobj.write(content) 53 | ``` 54 | 55 | Delete a file 56 | ```python 57 | try: 58 | project.delete_file('1', 'file') 59 | except redcap.RedcapError: 60 | # Throws this if file wasn't successfully deleted 61 | pass 62 | except ValueError: 63 | # You screwed up and gave it a bad field name, etc 64 | pass 65 | ``` 66 | 67 | Export a PDF file of all instruments (blank) 68 | 69 | ```python 70 | content, _headers = project.export_pdf() 71 | with open('all_instruments_blank.pdf', 'wb') as fobj: 72 | fobj.write(content) 73 | ``` 74 | -------------------------------------------------------------------------------- /redcap/methods/project_info.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project info""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, Json 6 | 7 | if TYPE_CHECKING: 8 | import pandas as pd 9 | 10 | 11 | class ProjectInfo(Base): 12 | """Responsible for all API methods under 'Projects' in the API Playground""" 13 | 14 | def export_project_info( 15 | self, 16 | format_type: Literal["json", "csv", "xml", "df"] = "json", 17 | df_kwargs: Optional[Dict[str, Any]] = None, 18 | ): 19 | """ 20 | Export Project Information 21 | 22 | Args: 23 | format_type: Format of returned data 24 | df_kwargs: 25 | Passed to `pandas.read_csv` to control construction of 26 | returned DataFrame. By default, nothing 27 | 28 | Returns: 29 | Union[str, List[Dict[str, Any]], pandas.DataFrame]: Project information 30 | 31 | Examples: 32 | >>> proj.export_project_info() 33 | {'project_id': ... 34 | ... 35 | 'in_production': 0, 36 | 'project_language': 'English', 37 | 'purpose': 0, 38 | 'purpose_other': '', 39 | ... 40 | 'project_grant_number': '', 41 | 'project_pi_firstname': '', 42 | 'project_pi_lastname': '', 43 | ... 44 | 'bypass_branching_erase_field_prompt': 0} 45 | """ 46 | 47 | payload = self._initialize_payload(content="project", format_type=format_type) 48 | return_type = self._lookup_return_type(format_type, request_type="export") 49 | 50 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 51 | 52 | return self._return_data( 53 | response=response, 54 | content="project", 55 | format_type=format_type, 56 | df_kwargs=df_kwargs, 57 | ) 58 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/unexpected-behavior-issue-template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Unexpected behavior issue template 3 | about: Please report instances of PyCap not working as you expect 4 | title: 'Unexpected Behavior: {short description here}' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | *!! Note about security !! If your question/issue involves sensitive information, please do not post it in this public location. You might assist someone in compromising your institution's server or network. The [REDCap Community](https://community.projectredcap.org/spaces/20/index.html) is the appropriate forum. Please ask your institution's REDCap administrator to post a question on this site, with the tags [api](https://community.projectredcap.org/topics/337/api.html) and [pycap](https://community.projectredcap.org/topics/2803/pycap.html).* 11 | 12 | Thank you for taking the time to file an issue. Here are some suggestions for writing your issue that should let us respond and help you more quickly. 13 | 14 | **Describe the behavior**: Please provide a clear and concise description of the scenario and the behavior. *Be careful not to include tokens, PHI (protected health information), or other information that should not be public!* 15 | 16 | 17 | **Expected behavior**: A clear and concise description of what you expected to happen. 18 | 19 | 20 | **Desktop (please complete the following information):** 21 | - OS: [e.g. Windows 10] 22 | - REDCap version [e.g. 9.0.1 --look at the bottom of the REDCap web page] 23 | - PyCap Version [e.g. 1.1.3] 24 | 25 | 26 | **Additional context**: Add any other information about the specific problem, such as a data dictionary or screenshot. 27 | 28 | 29 | **Thank you and reminder**: 30 | * Many people have contributed to this package and it's documentation, and it's continually improving. If you have advice for improving the code, documentation, or this issue template, we'd love to hear it. 31 | * *Remember, don't include tokens, PHI, or other information that should not be public.* 32 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # PyCap 2 | 3 | PyCap is an interface to the [REDCap](http://www.project-redcap.org) Application Programming Interface (API). PyCap is designed to be a minimal interface exposing all required and optional API parameters. Our hope is that it makes simple things easy & hard things possible. 4 | 5 | ## Installation 6 | 7 | Install the latest version with [`pip`](https://pypi.python.org/pypi/pip) 8 | 9 | ```sh 10 | $ pip install PyCap 11 | ``` 12 | 13 | If you want to load REDCap data into [`pandas`](https://pandas.pydata.org/) dataframes, this will make sure you have `pandas` installed 14 | 15 | ```sh 16 | $ pip install PyCap[all] 17 | ``` 18 | 19 | For secure credential management, install [`python-dotenv`](https://pypi.org/project/python-dotenv/) (`pip install python-dotenv`) and create a `.env` file in your project root with your REDCap credentials: 20 | 21 | ```dotenv 22 | REDCAP_API_URL=https://redcap.example.edu/api/ 23 | REDCAP_API_KEY=YourSuperSecretAPIKeyHere 24 | ``` 25 | 26 | To install the bleeding edge version from the github repo, use the following 27 | 28 | ```sh 29 | $ pip install -e git+https://github.com/redcap-tools/PyCap.git#egg=PyCap 30 | ``` 31 | 32 | ## Philosophy 33 | 34 | The REDCap API is pretty simple. There is no built-in search or pagination, for example. However, it does expose all the functionality required to build advanced data management services on top of the API. 35 | 36 | In the same way, PyCap is minimal by design. It doesn't do anything fancy behind the scenes and will not prevent you from shooting yourself in the foot. However, it should be very easy to understand and mentally-map PyCap functionality to the REDCap API. 37 | 38 | ## License 39 | 40 | PyCap is licensed under the [MIT license](http://opensource.org/licenses/MIT). 41 | 42 | ## Citing 43 | 44 | If you use PyCap in your research, please consider citing the software: 45 | 46 | ``` 47 | Burns, S. S., Browne, A., Davis, G. N., Rimrodt, S. L., & Cutting, L. E. PyCap (Version 1.0) [Computer Software]. 48 | Nashville, TN: Vanderbilt University and Philadelphia, PA: Childrens Hospital of Philadelphia. 49 | Available from https://github.com/redcap-tools/PyCap. doi:10.5281/zenodo.9917 50 | ``` 51 | -------------------------------------------------------------------------------- /redcap/methods/field_names.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project field names""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, Json 6 | 7 | if TYPE_CHECKING: 8 | import pandas as pd 9 | 10 | 11 | class FieldNames(Base): 12 | """Responsible for all API methods under 'Field Names' in the API Playground""" 13 | 14 | def export_field_names( 15 | self, 16 | format_type: Literal["json", "csv", "xml", "df"] = "json", 17 | field: Optional[str] = None, 18 | df_kwargs: Optional[Dict[str, Any]] = None, 19 | ): 20 | # pylint: disable=line-too-long 21 | """ 22 | Export the project's export field names 23 | 24 | Args: 25 | format_type: 26 | Return the metadata in native objects, csv or xml. 27 | `'df'` will return a `pandas.DataFrame` 28 | field: 29 | Limit exported field name to this field (only single field supported). 30 | When not provided, all fields returned 31 | df_kwargs: 32 | Passed to `pandas.read_csv` to control construction of 33 | returned DataFrame. 34 | by default `{'index_col': 'original_field_name'}` 35 | 36 | Returns: 37 | Union[str, List[Dict[str, Any]], "pd.DataFrame"]: Metadata structure for the project. 38 | 39 | Examples: 40 | >>> proj.export_field_names() 41 | [{'original_field_name': 'record_id', 'choice_value': '', 'export_field_name': 'record_id'}, 42 | {'original_field_name': 'field_1', 'choice_value': '', 'export_field_name': 'field_1'}, 43 | {'original_field_name': 'checkbox_field', 'choice_value': '1', 'export_field_name': 'checkbox_field___1'}, 44 | {'original_field_name': 'checkbox_field', 'choice_value': '2', 'export_field_name': 'checkbox_field___2'}, 45 | {'original_field_name': 'form_1_complete', 'choice_value': '', 'export_field_name': 'form_1_complete'}] 46 | """ 47 | # pylint: enable=line-too-long 48 | payload = self._initialize_payload( 49 | content="exportFieldNames", format_type=format_type 50 | ) 51 | 52 | if field: 53 | payload["field"] = field 54 | 55 | return_type = self._lookup_return_type(format_type, request_type="export") 56 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 57 | 58 | return self._return_data( 59 | response=response, 60 | content="exportFieldNames", 61 | format_type=format_type, 62 | df_kwargs=df_kwargs, 63 | ) 64 | -------------------------------------------------------------------------------- /redcap/project.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """User facing class for interacting with a REDCap Project""" 4 | 5 | from typing import Optional 6 | 7 | import semantic_version 8 | 9 | from redcap import methods 10 | 11 | __author__ = "Scott Burns " 12 | __license__ = "MIT" 13 | __copyright__ = "2014, Vanderbilt University" 14 | 15 | # We're designing this class to be lazy by default, and not hit the API unless 16 | # explicitly requested by the user. We also want to keep the methods separated, 17 | # which means multi-layered inheritance is our best bet. 18 | # pylint: disable=attribute-defined-outside-init,too-many-ancestors 19 | 20 | 21 | class Project( 22 | methods.arms.Arms, 23 | methods.data_access_groups.DataAccessGroups, 24 | methods.events.Events, 25 | methods.field_names.FieldNames, 26 | methods.file_repository.FileRepository, 27 | methods.files.Files, 28 | methods.instruments.Instruments, 29 | methods.logging.Logging, 30 | methods.metadata.Metadata, 31 | methods.project_info.ProjectInfo, 32 | methods.records.Records, 33 | methods.repeating.Repeating, 34 | methods.reports.Reports, 35 | methods.surveys.Surveys, 36 | methods.users.Users, 37 | methods.user_roles.UserRoles, 38 | methods.version.Version, 39 | ): 40 | """Main class for interacting with REDCap projects 41 | 42 | Attributes: 43 | verify_ssl: Verify SSL, default True. Can pass path to CA_BUNDLE 44 | 45 | Note: 46 | Your REDCap token should be kept **secret**! Treat it like a password 47 | and NEVER save it directly in your script/application. Rather it should be obscured 48 | and retrieved 'behind the scenes'. For example, saving the token as an environment 49 | variable and retrieving it with `os.getenv`. The creation of the `TOKEN` string in 50 | the example is not shown, for the above reasons 51 | 52 | Examples: 53 | >>> from redcap import Project 54 | >>> URL = "https://redcapdemo.vumc.org/api/" 55 | >>> proj = Project(URL, TOKEN) 56 | >>> proj.field_names 57 | ['record_id', 'field_1', 'checkbox_field', 'upload_field'] 58 | >>> proj.is_longitudinal 59 | True 60 | >>> proj.def_field 61 | 'record_id' 62 | 63 | The url and token attributes are read-only, to prevent users from accidentally 64 | overwriting them 65 | >>> proj.url = "whoops" 66 | Traceback (most recent call last): 67 | ... 68 | AttributeError: ... 69 | """ 70 | 71 | @property 72 | def redcap_version(self) -> Optional[semantic_version.Version]: 73 | """REDCap version of the Project""" 74 | self._redcap_version: Optional[semantic_version.Version] 75 | try: 76 | return self._redcap_version 77 | except AttributeError: 78 | # weird pylint bug on windows where it can't find Version.export_version() 79 | # possible too many parents it's inheriting from? We also need to disable 80 | # useless-supression since this is a windows only issue 81 | # pylint: disable=no-member,useless-suppression 82 | self._redcap_version = self.export_version() 83 | # pylint: enable=no-member,useless-suppression 84 | return self._redcap_version 85 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PyCap 2 | 3 | [![CI](https://github.com/redcap-tools/PyCap/actions/workflows/ci.yml/badge.svg)](https://github.com/redcap-tools/PyCap/actions/workflows/ci.yml) 4 | [![Codecov](https://codecov.io/gh/redcap-tools/PyCap/branch/master/graph/badge.svg?token=IRgcPzANxU)](https://codecov.io/gh/redcap-tools/PyCap) 5 | [![PyPI version](https://badge.fury.io/py/pycap.svg)](https://badge.fury.io/py/pycap) 6 | [![black](https://img.shields.io/badge/code%20style-black-black)](https://pypi.org/project/black/) 7 | 8 | ## Intro 9 | 10 | `PyCap` is a python module exposing the REDCap API through some helpful abstractions. Information about the REDCap project can be found at https://project-redcap.org/. 11 | 12 | Available under the MIT license. 13 | 14 | ## Installation 15 | 16 | Install the latest version with [`pip`](https://pypi.python.org/pypi/pip) 17 | 18 | ```sh 19 | $ pip install PyCap 20 | ``` 21 | 22 | If you want to load REDCap data into [`pandas`](https://pandas.pydata.org/) dataframes, this will make sure you have `pandas` installed 23 | 24 | ```sh 25 | $ pip install PyCap[all] 26 | ``` 27 | 28 | To install the bleeding edge version from the github repo, use the following 29 | 30 | ```sh 31 | $ pip install -e git+https://github.com/redcap-tools/PyCap.git#egg=PyCap 32 | ``` 33 | 34 | ## Quickstart 35 | 36 | 1. Copy `.env.example` to `.env` and update the values with your REDCap endpoint and API token. 37 | 2. Install [`python-dotenv`](https://pypi.org/project/python-dotenv/) with `pip install python-dotenv` (or add it to your Poetry environment). 38 | 39 | Then load your credentials and connect: 40 | 41 | ```python 42 | from dotenv import load_dotenv 43 | import os 44 | from redcap import Project 45 | 46 | load_dotenv() # reads .env from the project root 47 | 48 | api_url = os.environ["REDCAP_API_URL"] 49 | api_key = os.environ["REDCAP_API_KEY"] 50 | project = Project(api_url, api_key) 51 | ``` 52 | 53 | ## Documentation 54 | 55 | Canonical documentation and usage examples can be found [here](https://redcap-tools.github.io/PyCap/). 56 | 57 | ## Features 58 | 59 | Currently, these API calls are available: 60 | 61 | ### Export 62 | 63 | * Arms 64 | * Data Access Groups 65 | * Events 66 | * Field names 67 | * Instruments 68 | * Instrument-event mapping 69 | * File 70 | * File Repository 71 | * Logging 72 | * Metadata 73 | * Project Info 74 | * PDF of instruments 75 | * Records 76 | * Repeating instruments and events 77 | * Report 78 | * Surveys 79 | * Users 80 | * User-DAG assignment 81 | * User Roles 82 | * User-Role assignment 83 | * Version 84 | 85 | ### Import 86 | 87 | * Arms 88 | * Data Access Groups 89 | * Events 90 | * File 91 | * File Repository 92 | * Instrument-event mapping 93 | * Metadata 94 | * Records 95 | * Repeating instruments and events 96 | * Users 97 | * User-DAG assignment 98 | * User Roles 99 | * User-Role assignment 100 | 101 | ### Delete 102 | 103 | * Arms 104 | * Data Access Groups 105 | * Events 106 | * File 107 | * File Repository 108 | * Records 109 | * Users 110 | * User Roles 111 | 112 | ### Other 113 | 114 | * Generate next record name 115 | * Switch data access group 116 | 117 | ## Citing 118 | 119 | If you use PyCap in your research, please consider citing the software: 120 | 121 | > Burns, S. S., Browne, A., Davis, G. N., Rimrodt, S. L., & Cutting, L. E. PyCap (Version 1.0) [Computer Software]. 122 | > Nashville, TN: Vanderbilt University and Philadelphia, PA: Childrens Hospital of Philadelphia. 123 | > Available from https://github.com/redcap-tools/PyCap. doi:10.5281/zenodo.9917 124 | -------------------------------------------------------------------------------- /redcap/methods/repeating.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project repeating instruments""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, Literal, cast 4 | 5 | from redcap.methods.base import Base 6 | from redcap.request import Json 7 | 8 | if TYPE_CHECKING: 9 | import pandas as pd 10 | 11 | 12 | class Repeating(Base): 13 | """Responsible for all API methods under 'Repeating Instruments and Events' 14 | in the API Playground 15 | """ 16 | 17 | def export_repeating_instruments_events( 18 | self, 19 | format_type: Literal["json", "csv", "xml", "df"] = "json", 20 | df_kwargs: Optional[Dict[str, Any]] = None, 21 | ): 22 | """ 23 | Export the project's repeating instruments and events settings 24 | 25 | Args: 26 | format_type: 27 | Return the repeating instruments and events in native objects, 28 | csv or xml, `'df''` will return a `pandas.DataFrame` 29 | df_kwargs: 30 | Passed to pandas.read_csv to control construction of 31 | returned DataFrame 32 | 33 | Returns: 34 | Union[str, List[Dict[str, Any]], pd.DataFrame]: Repeating instruments and events 35 | for the project 36 | 37 | Examples: 38 | >>> proj.export_repeating_instruments_events() 39 | [{'event_name': 'event_1_arm_1', 'form_name': '', 'custom_form_label': ''}] 40 | """ 41 | payload = self._initialize_payload( 42 | content="repeatingFormsEvents", format_type=format_type 43 | ) 44 | 45 | return_type = self._lookup_return_type(format_type, request_type="export") 46 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 47 | 48 | return self._return_data( 49 | response=response, 50 | content="repeatingFormsEvents", 51 | format_type=format_type, 52 | df_kwargs=df_kwargs, 53 | ) 54 | 55 | def import_repeating_instruments_events( 56 | self, 57 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 58 | return_format_type: Literal["json", "csv", "xml"] = "json", 59 | import_format: Literal["json", "csv", "xml", "df"] = "json", 60 | ): 61 | """ 62 | Import repeating instrument and event settings into the REDCap Project 63 | 64 | Args: 65 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 66 | Note: 67 | If you pass a csv or xml string, you should use the 68 | `import format` parameter appropriately. 69 | return_format_type: 70 | Response format. By default, response will be json-decoded. 71 | import_format: 72 | Format of incoming data. By default, to_import will be json-encoded 73 | 74 | Returns: 75 | Union[int, str]: The number of repeated instruments activated 76 | 77 | Examples: 78 | >>> rep_instruments = proj.export_repeating_instruments_events(format_type="csv") 79 | >>> proj.import_repeating_instruments_events(rep_instruments, import_format="csv") 80 | 1 81 | """ 82 | payload = self._initialize_import_payload( 83 | to_import=to_import, 84 | import_format=import_format, 85 | return_format_type=return_format_type, 86 | content="repeatingFormsEvents", 87 | ) 88 | 89 | return_type = self._lookup_return_type( 90 | format_type=return_format_type, request_type="import" 91 | ) 92 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 93 | 94 | return response 95 | -------------------------------------------------------------------------------- /redcap/methods/reports.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project reports""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, Json 6 | 7 | if TYPE_CHECKING: 8 | import pandas as pd 9 | 10 | 11 | class Reports(Base): 12 | """Responsible for all API methods under 'Reports' in the API Playground""" 13 | 14 | def export_report( 15 | self, 16 | report_id: str, 17 | format_type: Literal["json", "csv", "xml", "df"] = "json", 18 | raw_or_label: Literal["raw", "label"] = "raw", 19 | raw_or_label_headers: Literal["raw", "label"] = "raw", 20 | export_checkbox_labels: bool = False, 21 | csv_delimiter: Literal[",", "tab", ";", "|", "^"] = ",", 22 | df_kwargs: Optional[Dict[str, Any]] = None, 23 | ): 24 | """ 25 | Export a report of the Project 26 | 27 | Args: 28 | report_id: 29 | The report ID number provided next to the report name 30 | on the report list page 31 | format_type: 32 | Format of returned data. `'json'` returns json-decoded 33 | objects while `'csv'` and `'xml'` return strings. 34 | `'df'` will attempt to return a `pandas.DataFrame`. 35 | raw_or_label: 36 | Export the raw coded values or 37 | labels for the options of multiple choice fields 38 | raw_or_label_headers: 39 | For the CSV headers, export the variable/field names 40 | (raw) or the field labels (label) 41 | export_checkbox_labels: 42 | Specifies the format of 43 | checkbox field values specifically when exporting the data as labels 44 | (i.e. when `rawOrLabel=label`). When exporting labels, by default 45 | (without providing the exportCheckboxLabel flag or if 46 | exportCheckboxLabel=false), all checkboxes will either have a value 47 | 'Checked' if they are checked or 'Unchecked' if not checked. 48 | But if exportCheckboxLabel is set to true, it will instead export 49 | the checkbox value as the checkbox option's label (e.g., 'Choice 1') 50 | if checked or it will be blank/empty (no value) if not checked 51 | csv_delimiter: 52 | For the csv format, choose how the csv delimiter. 53 | 54 | Raises: 55 | ValueError: Unsupported format specified 56 | 57 | Returns: 58 | Union[List[Dict[str, Any]], str, pd.DataFrame]: Data from the report ordered by 59 | the record (primary key of project) and then by event id 60 | 61 | Examples: 62 | >>> proj.export_report(report_id="4292") # doctest: +SKIP 63 | [{'record_id': '1', 'redcap_event_name': 'event_1_arm_1', 64 | 'checkbox_field___1': '0', 'checkbox_field___2': '1'}] 65 | """ 66 | payload = self._initialize_payload(content="report", format_type=format_type) 67 | keys_to_add = ( 68 | report_id, 69 | raw_or_label, 70 | raw_or_label_headers, 71 | export_checkbox_labels, 72 | csv_delimiter, 73 | ) 74 | str_keys = ( 75 | "report_id", 76 | "rawOrLabel", 77 | "rawOrLabelHeaders", 78 | "exportCheckboxLabel", 79 | "csvDelimiter", 80 | ) 81 | for key, data in zip(str_keys, keys_to_add): 82 | data = cast(str, data) 83 | if data: 84 | payload[key] = data 85 | 86 | return_type = self._lookup_return_type(format_type, request_type="export") 87 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 88 | 89 | return self._return_data( 90 | response=response, 91 | content="report", 92 | format_type=format_type, 93 | df_kwargs=df_kwargs, 94 | ) 95 | -------------------------------------------------------------------------------- /redcap/methods/logging.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project logs""" 2 | 3 | from datetime import datetime 4 | from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union, cast 5 | 6 | from redcap.methods.base import Base, Json 7 | 8 | if TYPE_CHECKING: 9 | import pandas as pd 10 | 11 | 12 | class Logging(Base): 13 | """Responsible for all API methods under 'Logging' in the API Playground""" 14 | 15 | # pylint: disable=too-many-locals 16 | 17 | def export_logging( 18 | self, 19 | format_type: Literal["json", "csv", "xml", "df"] = "json", 20 | return_format_type: Optional[Literal["json", "csv", "xml"]] = None, 21 | log_type: Optional[ 22 | Literal[ 23 | "export", 24 | "manage", 25 | "user", 26 | "record", 27 | "record_add", 28 | "record_edit", 29 | "record_delete", 30 | "lock_record", 31 | "page_view", 32 | ] 33 | ] = None, 34 | user: Optional[str] = None, 35 | record: Optional[str] = None, 36 | dag: Optional[str] = None, 37 | begin_time: Optional[datetime] = None, 38 | end_time: Optional[datetime] = None, 39 | df_kwargs: Optional[Dict[str, Any]] = None, 40 | ): 41 | """ 42 | Export the project's logs 43 | 44 | Args: 45 | format_type: 46 | Return the metadata in native objects, csv or xml. 47 | `'df'` will return a `pandas.DataFrame` 48 | return_format_type: 49 | Response format. By default, response will be json-decoded. 50 | log_type: 51 | Filter by specific event types 52 | user: 53 | Filter by events created by a certain user 54 | record: 55 | Filter by events created for a certain record 56 | dag: 57 | Filter by events created by a certain data access group (group ID) 58 | begin_time: 59 | Filter by events created after a given timestamp 60 | end_time: 61 | Filter by events created before a given timestamp 62 | df_kwargs: 63 | Passed to `pandas.read_csv` to control construction of 64 | returned DataFrame. 65 | Returns: 66 | Union[str, List[Dict[str, Any]], "pd.DataFrame"]: 67 | List of all changes made to this project, including data exports, 68 | data changes, and the creation or deletion of users 69 | 70 | Examples: 71 | >>> proj.export_logging() 72 | [{'timestamp': ..., 'username': ..., 'action': 'Manage/Design ', 73 | 'details': 'Create project ...'}, ...] 74 | """ 75 | payload: Dict[str, Any] = self._initialize_payload( 76 | content="log", format_type=format_type 77 | ) 78 | optional_args = [ 79 | ("returnFormat", return_format_type), 80 | ("logtype", log_type), 81 | ("user", user), 82 | ("record", record), 83 | ("dag", dag), 84 | ("beginTime", begin_time), 85 | ("endTime", end_time), 86 | ] 87 | 88 | for arg in optional_args: 89 | arg_name, arg_value = arg 90 | if arg_value: 91 | if arg_name in ["beginTime", "endTime"]: 92 | arg_value = cast(datetime, arg_value) 93 | arg_value = arg_value.strftime("%Y-%m-%d %H:%M:%S") 94 | 95 | payload[arg_name] = arg_value 96 | 97 | return_type = self._lookup_return_type(format_type, request_type="export") 98 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 99 | 100 | return self._return_data( 101 | response=response, 102 | content="log", 103 | format_type=format_type, 104 | df_kwargs=df_kwargs, 105 | ) 106 | # pylint: enable=too-many-locals 107 | -------------------------------------------------------------------------------- /tests/integration/conftest.py: -------------------------------------------------------------------------------- 1 | """Test fixtures for integration tests only""" 2 | 3 | # pylint: disable=redefined-outer-name 4 | import os 5 | import tempfile 6 | 7 | from datetime import datetime 8 | from pathlib import Path 9 | from typing import cast 10 | 11 | import pytest 12 | import requests 13 | 14 | from redcap import Project 15 | 16 | SUPER_TOKEN = os.getenv("REDCAPDEMO_SUPERUSER_TOKEN") 17 | 18 | 19 | def create_project(url: str, super_token: str, project_xml_path: Path) -> str: 20 | """Create a project for testing on redcapdemo.vumc.org 21 | This API method returns the token for the newly created project, which 22 | used for the integration tests 23 | """ 24 | current_time = datetime.now().strftime("%m-%d %H:%M:%S") 25 | project_title = f"PyCap { project_xml_path.stem }: { current_time }" 26 | project_info = f""" 27 | 28 | {project_title} 29 | 0 30 | """ 31 | 32 | with open(project_xml_path, encoding="UTF-8") as proj_xml_file: 33 | project_data = proj_xml_file.read() 34 | 35 | res = requests.post( 36 | url=url, 37 | data={ 38 | "token": super_token, 39 | "content": "project", 40 | "format": "xml", 41 | "data": project_info, 42 | "odm": project_data, 43 | }, 44 | timeout=60, 45 | ) 46 | # Response includes a bunch of SQL statements before we get to the token 47 | # This limits the return value to just the token 48 | return res.text[-32:] 49 | 50 | 51 | def redcapdemo_url() -> str: 52 | """API url for redcapdemo testing site""" 53 | return "https://redcapdemo.vumc.org/api/" 54 | 55 | 56 | @pytest.fixture(scope="module") 57 | def redcapdemo_url_fixture() -> str: 58 | """API url for redcapdemo testing site, as a testing fixture""" 59 | return redcapdemo_url() 60 | 61 | 62 | @pytest.fixture(scope="module") 63 | def simple_project_token(redcapdemo_url_fixture) -> str: 64 | """Create a simple project and return it's API token""" 65 | simple_project_xml_path = Path("tests/data/test_simple_project.xml") 66 | super_token = cast(str, SUPER_TOKEN) 67 | project_token = create_project( # type: ignore 68 | redcapdemo_url_fixture, super_token, simple_project_xml_path 69 | ) 70 | 71 | return project_token 72 | 73 | 74 | def grant_superuser_rights(proj: Project) -> Project: 75 | """Given a newly created project, give the superuser 76 | the highest level of user rights 77 | """ 78 | superuser = proj.export_users()[0] 79 | 80 | superuser["record_delete"] = 1 81 | superuser["record_rename"] = 1 82 | superuser["lock_records_all_forms"] = 1 83 | superuser["lock_records"] = 1 84 | 85 | res = proj.import_users([superuser]) 86 | assert res == 1 87 | 88 | return proj 89 | 90 | 91 | def add_files_to_repository(proj: Project) -> Project: 92 | """Given a project, fill out it's file repository 93 | For some reason, this doesn't carry over in the XML file so 94 | it has to be done after project creation 95 | """ 96 | new_folder = proj.create_folder_in_repository("test").pop() 97 | 98 | with tempfile.NamedTemporaryFile(mode="w+t") as tmp_file: 99 | tmp_file.write("hello") 100 | tmp_file.seek(0) 101 | 102 | proj.import_file_into_repository(file_name="test.txt", file_object=tmp_file) 103 | proj.import_file_into_repository( 104 | file_name="test_in_folder.txt", 105 | file_object=tmp_file, 106 | folder_id=new_folder["folder_id"], 107 | ) 108 | 109 | return proj 110 | 111 | 112 | @pytest.fixture(scope="module") 113 | def simple_project(redcapdemo_url_fixture, simple_project_token): 114 | """A simple REDCap project""" 115 | simple_proj = Project(redcapdemo_url_fixture, simple_project_token) 116 | simple_proj = grant_superuser_rights(simple_proj) 117 | simple_proj = add_files_to_repository(simple_proj) 118 | 119 | return simple_proj 120 | 121 | 122 | @pytest.fixture(scope="module") 123 | def long_project_token(redcapdemo_url_fixture) -> str: 124 | """Create a long project and return it's API token""" 125 | long_project_xml_path = Path("tests/data/test_long_project.xml") 126 | super_token = cast(str, SUPER_TOKEN) 127 | project_token = create_project( 128 | redcapdemo_url_fixture, super_token, long_project_xml_path 129 | ) 130 | 131 | return project_token 132 | 133 | 134 | @pytest.fixture(scope="module") 135 | def long_project(redcapdemo_url_fixture, long_project_token): 136 | """A long REDCap project""" 137 | long_proj = Project(redcapdemo_url_fixture, long_project_token) 138 | long_proj = grant_superuser_rights(long_proj) 139 | return long_proj 140 | -------------------------------------------------------------------------------- /redcap/methods/metadata.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project metadata""" 2 | 3 | from typing import ( 4 | TYPE_CHECKING, 5 | Any, 6 | Dict, 7 | List, 8 | Literal, 9 | Optional, 10 | Union, 11 | cast, 12 | ) 13 | 14 | from redcap.methods.base import Base, Json 15 | 16 | if TYPE_CHECKING: 17 | import pandas as pd 18 | 19 | 20 | class Metadata(Base): 21 | """Responsible for all API methods under 'Metadata' in the API Playground""" 22 | 23 | def export_metadata( 24 | self, 25 | format_type: Literal["json", "csv", "xml", "df"] = "json", 26 | fields: Optional[List[str]] = None, 27 | forms: Optional[List[str]] = None, 28 | df_kwargs: Optional[Dict[str, Any]] = None, 29 | ): 30 | """ 31 | Export the project's metadata 32 | 33 | Args: 34 | format_type: 35 | Return the metadata in native objects, csv, or xml. 36 | `'df'` will return a `pandas.DataFrame` 37 | fields: Limit exported metadata to these fields 38 | forms: Limit exported metadata to these forms 39 | df_kwargs: 40 | Passed to `pandas.read_csv` to control construction of 41 | returned DataFrame. 42 | By default `{'index_col': 'field_name'}` 43 | 44 | Returns: 45 | Union[str, List[Dict], pd.DataFrame]: Metadata structure for the project. 46 | 47 | Examples: 48 | >>> proj.export_metadata(format_type="df") 49 | form_name section_header ... matrix_ranking field_annotation 50 | field_name ... 51 | record_id form_1 NaN ... NaN NaN 52 | field_1 form_1 NaN ... NaN NaN 53 | checkbox_field form_1 NaN ... NaN NaN 54 | upload_field form_1 NaN ... NaN NaN 55 | ... 56 | """ 57 | payload = self._initialize_payload(content="metadata", format_type=format_type) 58 | to_add = [fields, forms] 59 | str_add = ["fields", "forms"] 60 | for key, data in zip(str_add, to_add): 61 | if data: 62 | for i, value in enumerate(data): 63 | payload[f"{key}[{i}]"] = value 64 | 65 | return_type = self._lookup_return_type(format_type, request_type="export") 66 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 67 | 68 | return self._return_data( 69 | response=response, 70 | content="metadata", 71 | format_type=format_type, 72 | df_kwargs=df_kwargs, 73 | ) 74 | 75 | def import_metadata( 76 | self, 77 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 78 | return_format_type: Literal["json", "csv", "xml"] = "json", 79 | import_format: Literal["json", "csv", "xml", "df"] = "json", 80 | date_format: Literal["YMD", "DMY", "MDY"] = "YMD", 81 | ): 82 | """ 83 | Import metadata (Data Dictionary) into the REDCap Project 84 | 85 | Args: 86 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 87 | Note: 88 | If you pass a csv or xml string, you should use the 89 | `import_format` parameter appropriately. 90 | return_format_type: 91 | Response format. By default, response will be json-decoded. 92 | import_format: 93 | Format of incoming data. By default, to_import will be json-encoded 94 | date_format: 95 | Describes the formatting of dates. By default, date strings 96 | are formatted as 'YYYY-MM-DD' corresponding to 'YMD'. If date 97 | strings are formatted as 'MM/DD/YYYY' set this parameter as 98 | 'MDY' and if formatted as 'DD/MM/YYYY' set as 'DMY'. No 99 | other formattings are allowed. 100 | 101 | Returns: 102 | Union[int, str]: The number of imported fields 103 | 104 | Examples: 105 | >>> metadata = proj.export_metadata(format_type="csv") 106 | >>> proj.import_metadata(metadata, import_format="csv") 107 | 4 108 | """ 109 | payload = self._initialize_import_payload( 110 | to_import=to_import, 111 | import_format=import_format, 112 | return_format_type=return_format_type, 113 | content="metadata", 114 | ) 115 | payload["dateFormat"] = date_format 116 | 117 | return_type = self._lookup_return_type( 118 | format_type=return_format_type, request_type="import" 119 | ) 120 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 121 | 122 | return response 123 | -------------------------------------------------------------------------------- /redcap/methods/arms.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project arms""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, Json 6 | 7 | if TYPE_CHECKING: 8 | import pandas as pd 9 | 10 | 11 | class Arms(Base): 12 | """Responsible for all API methods under 'Arms' in the API Playground""" 13 | 14 | def export_arms( 15 | self, 16 | format_type: Literal["json", "csv", "xml", "df"] = "json", 17 | arms: Optional[List[str]] = None, 18 | ): 19 | """ 20 | Export the Arms of the Project 21 | 22 | Note: 23 | This only works for longitudinal projects. 24 | 25 | Args: 26 | format_type: 27 | Response return format 28 | arms: 29 | An array of arm numbers that you wish to pull arms for 30 | (by default, all arms are pulled) 31 | 32 | Returns: 33 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: List of Arms 34 | 35 | Examples: 36 | >>> proj.export_arms() 37 | [{'arm_num': 1, 'name': 'Arm 1'}] 38 | """ 39 | payload = self._initialize_payload(content="arm", format_type=format_type) 40 | if arms: 41 | # Turn list of arms into dict, and append to payload 42 | arms_dict = {f"arms[{ idx }]": arm for idx, arm in enumerate(arms)} 43 | payload.update(arms_dict) 44 | return_type = self._lookup_return_type(format_type, request_type="export") 45 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 46 | 47 | return self._return_data( 48 | response=response, 49 | content="arm", 50 | format_type=format_type, 51 | ) 52 | 53 | def import_arms( 54 | self, 55 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 56 | return_format_type: Literal["json", "csv", "xml"] = "json", 57 | import_format: Literal["json", "csv", "xml", "df"] = "json", 58 | override: Optional[int] = 0, 59 | ): 60 | """ 61 | Import Arms into the REDCap Project 62 | 63 | Note: 64 | This only works for longitudinal projects. 65 | 66 | Args: 67 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 68 | Note: 69 | If you pass a csv or xml string, you should use the 70 | `import format` parameter appropriately. 71 | return_format_type: 72 | Response format. By default, response will be json-decoded. 73 | import_format: 74 | Format of incoming data. By default, to_import will be json-encoded 75 | override: 76 | 0 - false [default], 1 - true 77 | You may use override=1 as a 'delete all + import' action in order to 78 | erase all existing Arms in the project while importing new Arms. 79 | If override=0, then you can only add new Arms or rename existing ones. 80 | 81 | Returns: 82 | Union[int, str]: Number of Arms added or updated 83 | 84 | Examples: 85 | Create a new arm 86 | >>> new_arm = [{"arm_num": 2, "name": "Arm 2"}] 87 | >>> proj.import_arms(new_arm) 88 | 1 89 | """ 90 | payload = self._initialize_import_payload( 91 | to_import=to_import, 92 | import_format=import_format, 93 | return_format_type=return_format_type, 94 | content="arm", 95 | ) 96 | payload["action"] = "import" 97 | payload["override"] = override 98 | 99 | return_type = self._lookup_return_type( 100 | format_type=return_format_type, request_type="import" 101 | ) 102 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 103 | 104 | return response 105 | 106 | def delete_arms( 107 | self, 108 | arms: List[str], 109 | return_format_type: Literal["json", "csv", "xml"] = "json", 110 | ): 111 | """ 112 | Delete Arms from the Project 113 | 114 | Note: 115 | Because of this method's destructive nature, it is only available 116 | for use for projects in Development status. 117 | Additionally, please be aware that deleting an arm also automatically 118 | deletes all events that belong to that arm, and will also automatically 119 | delete any records/data that have been collected under that arm 120 | (this is non-reversible data loss). 121 | This only works for longitudinal projects. 122 | 123 | Args: 124 | arms: List of arm numbers to delete from the project 125 | return_format_type: 126 | Response format. By default, response will be json-decoded. 127 | 128 | Returns: 129 | Union[int, str]: Number of arms deleted 130 | 131 | Examples: 132 | Create a new arm 133 | >>> new_arm = [{"arm_num": 2, "name": "Arm 2"}] 134 | >>> proj.import_arms(new_arm) 135 | 1 136 | 137 | Delete the new arm 138 | >>> proj.delete_arms([2]) 139 | 1 140 | """ 141 | payload = self._initialize_payload( 142 | content="arm", return_format_type=return_format_type 143 | ) 144 | payload["action"] = "delete" 145 | # Turn list of arms into dict, and append to payload 146 | arms_dict = {f"arms[{ idx }]": arm for idx, arm in enumerate(arms)} 147 | payload.update(arms_dict) 148 | 149 | return_type = self._lookup_return_type( 150 | format_type=return_format_type, request_type="delete" 151 | ) 152 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 153 | 154 | return response 155 | -------------------------------------------------------------------------------- /redcap/methods/users.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project users""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, Json 6 | 7 | if TYPE_CHECKING: 8 | import pandas as pd 9 | 10 | 11 | class Users(Base): 12 | """Responsible for all API methods under 'Users & User Privileges' in the API Playground""" 13 | 14 | def export_users( 15 | self, 16 | format_type: Literal["json", "csv", "xml", "df"] = "json", 17 | df_kwargs: Optional[Dict[str, Any]] = None, 18 | ): 19 | """ 20 | Export the users of the Project 21 | 22 | Args: 23 | format_type: 24 | Response return format 25 | df_kwargs: 26 | Passed to `pandas.read_csv` to control construction of 27 | returned DataFrame. By default, nothing 28 | 29 | Returns: 30 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: List of users with metadata 31 | 32 | Examples: 33 | >>> proj.export_users() 34 | [{'username': ..., 'email': ..., 'expiration': '', 'data_access_group': '', 35 | 'data_access_group_id': '', 'design': 1, 'alerts': 1, 'user_rights': 1, 36 | 'data_access_groups': 1, 'reports': 1, ...}] 37 | """ 38 | payload = self._initialize_payload(content="user", format_type=format_type) 39 | return_type = self._lookup_return_type(format_type, request_type="export") 40 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 41 | 42 | return self._return_data( 43 | response=response, 44 | content="user", 45 | format_type=format_type, 46 | df_kwargs=df_kwargs, 47 | ) 48 | 49 | def import_users( 50 | self, 51 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 52 | return_format_type: Literal["json", "csv", "xml"] = "json", 53 | import_format: Literal["json", "csv", "xml", "df"] = "json", 54 | ): 55 | """ 56 | Import users/user rights into the REDCap Project 57 | 58 | Args: 59 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 60 | Note: 61 | If you pass a csv or xml string, you should use the 62 | `import format` parameter appropriately. 63 | return_format_type: 64 | Response format. By default, response will be json-decoded. 65 | import_format: 66 | Format of incoming data. By default, to_import will be json-encoded 67 | 68 | Returns: 69 | Union[int, str]: Number of users added or updated 70 | 71 | Examples: 72 | Add test user. Only username is required 73 | >>> test_user = [{"username": "pandeharris@gmail.com"}] 74 | >>> proj.import_users(test_user) 75 | 1 76 | 77 | All currently valid options for user rights 78 | >>> test_user = [ 79 | ... {"username": "pandeharris@gmail.com", "email": "pandeharris@gmail.com", 80 | ... "firstname": "REDCap Trial", "lastname": "User", "expiration": "", 81 | ... "data_access_group": "", "data_access_group_id": "", "design": 0, 82 | ... "user_rights": 0, "data_export": 2, "reports": 1, "stats_and_charts": 1, 83 | ... "manage_survey_participants": 1, "calendar": 1, "data_access_groups": 0, 84 | ... "data_import_tool": 0, "data_comparison_tool": 0, "logging": 0, 85 | ... "file_repository": 1, "data_quality_create": 0, "data_quality_execute": 0, 86 | ... "api_export": 0, "api_import": 0, "mobile_app": 0, 87 | ... "mobile_app_download_data": 0, "record_create": 1, "record_rename": 0, 88 | ... "record_delete": 0, "lock_records_all_forms": 0, "lock_records": 0, 89 | ... "lock_records_customization": 0, "forms": {"form_1": 3}} 90 | ... ] 91 | >>> proj.import_users(test_user) 92 | 1 93 | """ 94 | payload = self._initialize_import_payload( 95 | to_import=to_import, 96 | import_format=import_format, 97 | return_format_type=return_format_type, 98 | content="user", 99 | ) 100 | 101 | return_type = self._lookup_return_type( 102 | format_type=return_format_type, request_type="import" 103 | ) 104 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 105 | 106 | return response 107 | 108 | def delete_users( 109 | self, 110 | users: List[str], 111 | return_format_type: Literal["json", "csv", "xml"] = "json", 112 | ): 113 | """ 114 | Delete users from the project. 115 | 116 | Args: 117 | users: List of usernames to delete from the project 118 | return_format_type: 119 | Response format. By default, response will be json-decoded. 120 | 121 | Returns: 122 | Union[int, str]: Number of users deleted 123 | 124 | Examples: 125 | >>> new_user = [{"username": "pandeharris@gmail.com"}] 126 | >>> proj.import_users(new_user) 127 | 1 128 | >>> proj.delete_users(["pandeharris@gmail.com"], return_format_type="xml") 129 | '1' 130 | """ 131 | payload = self._initialize_payload( 132 | content="user", return_format_type=return_format_type 133 | ) 134 | payload["action"] = "delete" 135 | # Turn list of users into dict, and append to payload 136 | users_dict = {f"users[{ idx }]": user for idx, user in enumerate(users)} 137 | payload.update(users_dict) 138 | 139 | return_type = self._lookup_return_type( 140 | format_type=return_format_type, request_type="delete" 141 | ) 142 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 143 | return response 144 | -------------------------------------------------------------------------------- /redcap/methods/events.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project events""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, Json 6 | 7 | if TYPE_CHECKING: 8 | import pandas as pd 9 | 10 | 11 | class Events(Base): 12 | """Responsible for all API methods under 'Events' in the API Playground""" 13 | 14 | def export_events( 15 | self, 16 | format_type: Literal["json", "csv", "xml", "df"] = "json", 17 | arms: Optional[List[str]] = None, 18 | ): 19 | """ 20 | Export the Events of the Project 21 | 22 | Note: 23 | This only works for longitudinal projects. 24 | 25 | Args: 26 | format_type: 27 | Response return format 28 | arms: 29 | An array of arm numbers that you wish to pull events for 30 | (by default, all events are pulled) 31 | 32 | Returns: 33 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: List of Events 34 | 35 | Examples: 36 | >>> proj.export_events() 37 | [{'event_name': 'Event 1', 'arm_num': 1, 'unique_event_name': 'event_1_arm_1', 38 | 'custom_event_label': '', 'event_id': ...}, {'event_name': 'Event 2', ...}] 39 | """ 40 | payload = self._initialize_payload(content="event", format_type=format_type) 41 | if arms: 42 | # Turn list of arms into dict, and append to payload 43 | arms_dict = {f"arms[{ idx }]": arm for idx, arm in enumerate(arms)} 44 | payload.update(arms_dict) 45 | return_type = self._lookup_return_type(format_type, request_type="export") 46 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 47 | 48 | return self._return_data( 49 | response=response, 50 | content="event", 51 | format_type=format_type, 52 | ) 53 | 54 | def import_events( 55 | self, 56 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 57 | return_format_type: Literal["json", "csv", "xml"] = "json", 58 | import_format: Literal["json", "csv", "xml", "df"] = "json", 59 | override: Optional[int] = 0, 60 | ): 61 | """ 62 | Import Events into the REDCap Project 63 | 64 | Note: 65 | This only works for longitudinal projects. 66 | 67 | Args: 68 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 69 | Note: 70 | If you pass a csv or xml string, you should use the 71 | `import format` parameter appropriately. 72 | return_format_type: 73 | Response format. By default, response will be json-decoded. 74 | import_format: 75 | Format of incoming data. By default, to_import will be json-encoded 76 | override: 77 | 0 - false [default], 1 - true 78 | You may use override=1 as a 'delete all + import' action in order to 79 | erase all existing Events in the project while importing new Events. 80 | If override=0, then you can only add new Events or rename existing ones. 81 | 82 | Returns: 83 | Union[int, str]: Number of Events added or updated 84 | 85 | Examples: 86 | Create a new event 87 | >>> new_event = [{"event_name": "Event 2", "arm_num": "1"}] 88 | >>> proj.import_events(new_event) 89 | 1 90 | """ 91 | payload = self._initialize_import_payload( 92 | to_import=to_import, 93 | import_format=import_format, 94 | return_format_type=return_format_type, 95 | content="event", 96 | ) 97 | payload["action"] = "import" 98 | payload["override"] = override 99 | 100 | return_type = self._lookup_return_type( 101 | format_type=return_format_type, request_type="import" 102 | ) 103 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 104 | 105 | return response 106 | 107 | def delete_events( 108 | self, 109 | events: List[str], 110 | return_format_type: Literal["json", "csv", "xml"] = "json", 111 | ): 112 | """ 113 | Delete Events from the Project 114 | 115 | Note: 116 | Because of this method's destructive nature, it is only available 117 | for use for projects in Development status. 118 | Additionally, please be aware that deleting an event will automatically 119 | delete any records/data that have been collected under that event 120 | (this is non-reversible data loss). 121 | This only works for longitudinal projects. 122 | 123 | Args: 124 | events: List of unique event names to delete from the project 125 | return_format_type: 126 | Response format. By default, response will be json-decoded. 127 | 128 | Returns: 129 | Union[int, str]: Number of events deleted 130 | 131 | Examples: 132 | Create a new event 133 | >>> new_event = [{"event_name": "Event 2", "arm_num": "1"}] 134 | >>> proj.import_events(new_event) 135 | 1 136 | 137 | Delete the new event 138 | >>> proj.delete_events(["event_2_arm_1"]) 139 | 1 140 | """ 141 | payload = self._initialize_payload( 142 | content="event", return_format_type=return_format_type 143 | ) 144 | payload["action"] = "delete" 145 | # Turn list of events into dict, and append to payload 146 | events_dict = {f"events[{ idx }]": event for idx, event in enumerate(events)} 147 | payload.update(events_dict) 148 | 149 | return_type = self._lookup_return_type( 150 | format_type=return_format_type, request_type="delete" 151 | ) 152 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 153 | 154 | return response 155 | -------------------------------------------------------------------------------- /redcap/request.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """Low-level HTTP functionality""" 4 | 5 | from collections import namedtuple 6 | from typing import ( 7 | Any, 8 | Dict, 9 | List, 10 | IO, 11 | Literal, 12 | Optional, 13 | Tuple, 14 | TypedDict, 15 | Union, 16 | overload, 17 | ) 18 | 19 | from requests import RequestException, Response, Session 20 | 21 | 22 | Json = List[Dict[str, Any]] 23 | EmptyJson = List[dict] 24 | 25 | __author__ = "Scott Burns " 26 | __license__ = "MIT" 27 | __copyright__ = "2014, Vanderbilt University" 28 | 29 | RedcapError = RequestException 30 | 31 | _session = Session() 32 | 33 | 34 | class FileUpload(TypedDict): 35 | """Typing for the file upload API""" 36 | 37 | file: Tuple[str, IO] 38 | 39 | 40 | _ContentConfig = namedtuple("_ContentConfig", ["return_empty_json", "return_bytes"]) 41 | 42 | 43 | class _RCRequest: 44 | """ 45 | Private class wrapping the REDCap API. Decodes response from redcap 46 | and returns it. 47 | """ 48 | 49 | def __init__( 50 | self, 51 | url: str, 52 | payload: Dict[str, Any], 53 | config: _ContentConfig, 54 | session=_session, 55 | ): 56 | """Constructor 57 | 58 | Args: 59 | url: REDCap API URL 60 | payload: Keys and values corresponding to the REDCap API 61 | config: Configuration values for getting content 62 | """ 63 | self.url = url 64 | self.payload = payload 65 | self.config = config 66 | self.session = session 67 | self.fmt = self._get_format_key(payload) 68 | 69 | @staticmethod 70 | def _get_format_key( 71 | payload: Dict[str, Any], 72 | ) -> Optional[Literal["json", "csv", "xml"]]: 73 | """Determine format of the response 74 | 75 | Args: 76 | payload: Payload to be sent in POST request 77 | 78 | Returns: 79 | The expected format of the response, if a format 80 | key was provided. Otherwise returns None to signal 81 | a non-standard response format e.g bytes, empty json, etc. 82 | 83 | Raises: 84 | ValueError: Unsupported format 85 | """ 86 | if "returnFormat" in payload: 87 | fmt_key = "returnFormat" 88 | elif "format" in payload: 89 | fmt_key = "format" 90 | else: 91 | return None 92 | 93 | return payload[fmt_key] 94 | 95 | @overload 96 | @staticmethod 97 | def get_content( 98 | response: Response, 99 | format_type: None, 100 | return_empty_json: Literal[True], 101 | return_bytes: Literal[False], 102 | ) -> EmptyJson: ... 103 | 104 | @overload 105 | @staticmethod 106 | def get_content( 107 | response: Response, 108 | format_type: None, 109 | return_empty_json: Literal[False], 110 | return_bytes: Literal[True], 111 | ) -> bytes: ... 112 | 113 | @overload 114 | @staticmethod 115 | def get_content( 116 | response: Response, 117 | format_type: Literal["json"], 118 | return_empty_json: Literal[False], 119 | return_bytes: Literal[False], 120 | ) -> Union[Json, Dict[str, str]]: 121 | # This should return json, but might also return an error dict 122 | ... 123 | 124 | @overload 125 | @staticmethod 126 | def get_content( 127 | response: Response, 128 | format_type: Literal["csv", "xml"], 129 | return_empty_json: Literal[False], 130 | return_bytes: Literal[False], 131 | ) -> str: ... 132 | 133 | @staticmethod 134 | def get_content( 135 | response: Response, 136 | format_type: Optional[Literal["json", "csv", "xml"]], 137 | return_empty_json: bool, 138 | return_bytes: bool, 139 | ): 140 | """Abstraction for grabbing content from a returned response""" 141 | if return_bytes: 142 | return response.content 143 | 144 | if return_empty_json: 145 | return [{}] 146 | 147 | if format_type == "json": 148 | return response.json() 149 | 150 | # don't do anything to csv/xml strings 151 | return response.text 152 | 153 | def execute( 154 | self, 155 | verify_ssl: Union[bool, str], 156 | return_headers: bool, 157 | file: Optional[FileUpload], 158 | **kwargs, 159 | ): 160 | """Execute the API request and return data 161 | 162 | Args: 163 | verify_ssl: Verify SSL. Can also be a path to CA_BUNDLE 164 | return_headers: 165 | Whether or not response headers should be returned along 166 | with the request content 167 | file: A file object to send along with the request 168 | **kwargs: passed to requesets.request() to control 169 | the configuration to perform requests to the api 170 | 171 | Returns: 172 | Data object from JSON decoding process if format=='json', 173 | else return raw string (ie format=='csv'|'xml') 174 | 175 | Raises: 176 | RedcapError: 177 | Badly formed request i.e record doesn't 178 | exist, field doesn't exist, etc. 179 | """ 180 | response = self.session.post( 181 | self.url, data=self.payload, verify=verify_ssl, files=file, **kwargs 182 | ) 183 | 184 | content = self.get_content( 185 | response, 186 | format_type=self.fmt, 187 | return_empty_json=self.config.return_empty_json, 188 | return_bytes=self.config.return_bytes, 189 | ) 190 | 191 | bad_request = False 192 | 193 | if self.fmt == "json": 194 | try: 195 | bad_request = "error" in content.keys() # type: ignore 196 | except AttributeError: 197 | # we're not dealing with an error dict 198 | pass 199 | elif self.fmt == "csv": 200 | bad_request = content.lower().startswith("error:") # type: ignore 201 | # xml is the default returnFormat for error messages 202 | elif self.fmt == "xml" or self.fmt is None: 203 | bad_request = "" in str(content).lower() 204 | 205 | if bad_request: 206 | raise RedcapError(content) 207 | 208 | if return_headers: 209 | return content, response.headers 210 | 211 | return content 212 | -------------------------------------------------------------------------------- /redcap/methods/files.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project files""" 2 | 3 | from typing import Any, Dict, IO, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, FileMap 6 | from redcap.request import EmptyJson, FileUpload 7 | 8 | 9 | class Files(Base): 10 | """Responsible for all API methods under 'Files' in the API Playground""" 11 | 12 | def _check_file_field(self, field: str) -> None: 13 | """Check that field exists and is a file field""" 14 | is_field = field in self.field_names 15 | is_file = self._filter_metadata(key="field_type", field_name=field) == "file" 16 | if not (is_field and is_file): 17 | msg = f"'{ field }' is not a field or not a 'file' field" 18 | raise ValueError(msg) 19 | 20 | def export_file( 21 | self, 22 | record: str, 23 | field: str, 24 | event: Optional[str] = None, 25 | repeat_instance: Optional[int] = None, 26 | ) -> FileMap: 27 | """ 28 | Export the contents of a file stored for a particular record 29 | 30 | Note: 31 | Unlike other export methods, this only works on a single record. 32 | 33 | Args: 34 | record: Record ID 35 | field: Field name containing the file to be exported. 36 | event: For longitudinal projects, the unique event name 37 | repeat_instance: 38 | (Only for projects with repeating instruments/events) 39 | The repeat instance number of the repeating event (if longitudinal) 40 | or the repeating instrument (if classic or longitudinal). 41 | 42 | Returns: 43 | Content of the file and content-type dictionary 44 | 45 | Raises: 46 | ValueError: Incorrect file field 47 | RedcapError: Bad Request e.g. invalid record_id 48 | 49 | Examples: 50 | If your project has events, then you must specifiy the event of interest. 51 | Otherwise, you can leave the event parameter blank 52 | 53 | >>> proj.export_file(record="1", field="upload_field", event="event_1_arm_1") 54 | (b'test upload\\n', {'name': 'test_upload.txt', 'charset': 'UTF-8'}) 55 | """ 56 | self._check_file_field(field) 57 | # load up payload 58 | payload = self._initialize_payload(content="file") 59 | # there's no format field in this call 60 | payload["action"] = "export" 61 | payload["field"] = field 62 | payload["record"] = record 63 | if event: 64 | payload["event"] = event 65 | if repeat_instance: 66 | payload["repeat_instance"] = str(repeat_instance) 67 | content, headers = cast( 68 | FileMap, self._call_api(payload=payload, return_type="file_map") 69 | ) 70 | # REDCap adds some useful things in content-type 71 | content_map = {} 72 | if "content-type" in headers: 73 | splat = [ 74 | key_values.strip() for key_values in headers["content-type"].split(";") 75 | ] 76 | key_values = [ 77 | (key_values.split("=")[0], key_values.split("=")[1].replace('"', "")) 78 | for key_values in splat 79 | if "=" in key_values 80 | ] 81 | content_map = dict(key_values) 82 | 83 | return content, content_map 84 | 85 | def import_file( 86 | self, 87 | record: str, 88 | field: str, 89 | file_name: str, 90 | file_object: IO, 91 | event: Optional[str] = None, 92 | repeat_instance: Optional[Union[int, str]] = None, 93 | ) -> EmptyJson: 94 | """ 95 | Import the contents of a file represented by file_object to a 96 | particular records field 97 | 98 | Args: 99 | record: Record ID 100 | field: Field name where the file will go 101 | file_name: File name visible in REDCap UI 102 | file_object: File object as returned by `open` 103 | event: For longitudinal projects, the unique event name 104 | repeat_instance: 105 | (Only for projects with repeating instruments/events) 106 | The repeat instance number of the repeating event (if longitudinal) 107 | or the repeating instrument (if classic or longitudinal). 108 | 109 | Returns: 110 | Empty JSON object 111 | 112 | Raises: 113 | ValueError: Incorrect file field 114 | RedcapError: Bad Request e.g. invalid record_id 115 | 116 | Examples: 117 | If your project has events, then you must specifiy the event of interest. 118 | Otherwise, you can leave the event parameter blank 119 | 120 | >>> import tempfile 121 | >>> tmp_file = tempfile.TemporaryFile() 122 | >>> proj.import_file( 123 | ... record="2", 124 | ... field="upload_field", 125 | ... file_name="myupload.txt", 126 | ... file_object=tmp_file, 127 | ... event="event_1_arm_1", 128 | ... ) 129 | [{}] 130 | """ 131 | self._check_file_field(field) 132 | # load up payload 133 | payload: Dict[str, Any] = self._initialize_payload(content="file") 134 | payload["action"] = "import" 135 | payload["field"] = field 136 | payload["record"] = record 137 | if event: 138 | payload["event"] = event 139 | if repeat_instance: 140 | payload["repeat_instance"] = repeat_instance 141 | file_upload_dict: FileUpload = {"file": (file_name, file_object)} 142 | 143 | return cast( 144 | EmptyJson, 145 | self._call_api( 146 | payload=payload, return_type="empty_json", file=file_upload_dict 147 | ), 148 | ) 149 | 150 | def delete_file( 151 | self, 152 | record: str, 153 | field: str, 154 | event: Optional[str] = None, 155 | ) -> EmptyJson: 156 | """ 157 | Delete a file from REDCap 158 | 159 | Note: 160 | There is no undo button to this. 161 | 162 | Args: 163 | record: Record ID 164 | field: Field name 165 | event: For longitudinal projects, the unique event name 166 | 167 | Returns: 168 | Empty JSON object 169 | 170 | Raises: 171 | ValueError: Incorrect file field 172 | RedcapError: Bad Request e.g. invalid record_id 173 | 174 | Examples: 175 | Import a tempfile and then delete it 176 | 177 | >>> import tempfile 178 | >>> tmp_file = tempfile.TemporaryFile() 179 | >>> proj.import_file( 180 | ... record="2", 181 | ... field="upload_field", 182 | ... file_name="myupload.txt", 183 | ... file_object=tmp_file, 184 | ... event="event_1_arm_1", 185 | ... ) 186 | [{}] 187 | >>> proj.delete_file(record="2", field="upload_field", event="event_1_arm_1") 188 | [{}] 189 | """ 190 | self._check_file_field(field) 191 | # Load up payload 192 | payload = self._initialize_payload(content="file") 193 | payload["action"] = "delete" 194 | payload["record"] = record 195 | payload["field"] = field 196 | if event: 197 | payload["event"] = event 198 | 199 | return cast( 200 | EmptyJson, self._call_api(payload=payload, return_type="empty_json") 201 | ) 202 | -------------------------------------------------------------------------------- /redcap/methods/instruments.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project instruments""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, FileMap 6 | from redcap.request import Json 7 | 8 | if TYPE_CHECKING: 9 | import pandas as pd 10 | 11 | 12 | class Instruments(Base): 13 | """Responsible for all API methods under 'Instruments' in the API Playground""" 14 | 15 | def export_instruments( 16 | self, 17 | format_type: Literal["json", "csv", "xml", "df"] = "json", 18 | ): 19 | """ 20 | Export the Instruments of the Project 21 | 22 | Args: 23 | format_type: 24 | Response return format 25 | 26 | Returns: 27 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: List of Instruments 28 | 29 | Examples: 30 | >>> proj.export_instruments() 31 | [{'instrument_name': 'form_1', 'instrument_label': 'Form 1'}] 32 | """ 33 | payload = self._initialize_payload( 34 | content="instrument", format_type=format_type 35 | ) 36 | return_type = self._lookup_return_type(format_type, request_type="export") 37 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 38 | 39 | return self._return_data( 40 | response=response, 41 | content="instrument", 42 | format_type=format_type, 43 | ) 44 | 45 | #### pylint: disable=too-many-locals 46 | 47 | def export_pdf( 48 | self, 49 | record: Optional[str] = None, 50 | event: Optional[str] = None, 51 | instrument: Optional[str] = None, 52 | repeat_instance: Optional[int] = None, 53 | all_records: Optional[bool] = None, 54 | compact_display: Optional[bool] = None, 55 | ) -> FileMap: 56 | """ 57 | Export PDF file of instruments, either as blank or with data 58 | 59 | Args: 60 | record: Record ID 61 | event: For longitudinal projects, the unique event name 62 | instrument: Unique instrument name 63 | repeat_instance: 64 | (Only for projects with repeating instruments/events) 65 | The repeat instance number of the repeating event (if longitudinal) 66 | or the repeating instrument (if classic or longitudinal). 67 | all_records: 68 | If True, then all records will be exported as a single PDF file. 69 | Note: If this is True, then record, event, and instrument parameters 70 | are all ignored. 71 | compact_display: 72 | If True, then the PDF will be exported in compact display mode. 73 | 74 | Returns: 75 | Content of the file and dictionary of useful metadata 76 | 77 | Examples: 78 | >>> proj.export_pdf() 79 | (b'%PDF-1.3\\n3 0 obj\\n..., {...}) 80 | """ 81 | # load up payload 82 | payload = self._initialize_payload(content="pdf", return_format_type="json") 83 | keys_to_add = ( 84 | record, 85 | event, 86 | instrument, 87 | repeat_instance, 88 | all_records, 89 | compact_display, 90 | ) 91 | str_keys = ( 92 | "record", 93 | "event", 94 | "instrument", 95 | "repeat_instance", 96 | "allRecords", 97 | "compactDisplay", 98 | ) 99 | for key, data in zip(str_keys, keys_to_add): 100 | data = cast(str, data) 101 | if data: 102 | payload[key] = data 103 | payload["action"] = "export" 104 | 105 | content, headers = cast( 106 | FileMap, self._call_api(payload=payload, return_type="file_map") 107 | ) 108 | # REDCap adds some useful things in content-type 109 | content_map = {} 110 | if "content-type" in headers: 111 | splat = [ 112 | key_values.strip() for key_values in headers["content-type"].split(";") 113 | ] 114 | key_values = [ 115 | (key_values.split("=")[0], key_values.split("=")[1].replace('"', "")) 116 | for key_values in splat 117 | if "=" in key_values 118 | ] 119 | content_map = dict(key_values) 120 | 121 | return content, content_map 122 | 123 | #### pylint: enable=too-many-locals 124 | 125 | def export_instrument_event_mappings( 126 | self, 127 | format_type: Literal["json", "csv", "xml", "df"] = "json", 128 | arms: Optional[List[str]] = None, 129 | df_kwargs: Optional[Dict[str, Any]] = None, 130 | ): 131 | """ 132 | Export the project's instrument to event mapping 133 | 134 | Args: 135 | format_type: 136 | Return the form event mappings in native objects, 137 | csv or xml, `'df''` will return a `pandas.DataFrame` 138 | arms: Limit exported form event mappings to these arms 139 | df_kwargs: 140 | Passed to pandas.read_csv to control construction of 141 | returned DataFrame 142 | 143 | Returns: 144 | Union[str, List[Dict[str, Any]], pd.DataFrame]: Instrument-event mapping for the project 145 | 146 | Examples: 147 | >>> proj.export_instrument_event_mappings() 148 | [{'arm_num': 1, 'unique_event_name': 'event_1_arm_1', 'form': 'form_1'}] 149 | """ 150 | payload = self._initialize_payload( 151 | content="formEventMapping", format_type=format_type 152 | ) 153 | 154 | if arms: 155 | for i, value in enumerate(arms): 156 | payload[f"arms[{ i }]"] = value 157 | 158 | return_type = self._lookup_return_type(format_type, request_type="export") 159 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 160 | 161 | return self._return_data( 162 | response=response, 163 | content="formEventMapping", 164 | format_type=format_type, 165 | df_kwargs=df_kwargs, 166 | ) 167 | 168 | def import_instrument_event_mappings( 169 | self, 170 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 171 | return_format_type: Literal["json", "csv", "xml"] = "json", 172 | import_format: Literal["json", "csv", "xml", "df"] = "json", 173 | ): 174 | # pylint: disable=line-too-long 175 | """ 176 | Import the project's instrument to event mapping 177 | 178 | Note: 179 | This only works for longitudinal projects. 180 | 181 | Args: 182 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 183 | Note: 184 | If you pass a csv or xml string, you should use the 185 | `import format` parameter appropriately. 186 | return_format_type: 187 | Response format. By default, response will be json-decoded. 188 | import_format: 189 | Format of incoming data. By default, import_format 190 | will be json-encoded 191 | 192 | Returns: 193 | Union[int, str]: Number of instrument-event mappings imported 194 | 195 | Examples: 196 | Import instrument-event mappings 197 | >>> instrument_event_mappings = [{"arm_num": "1", "unique_event_name": "event_1_arm_1", "form": "form_1"}] 198 | >>> proj.import_instrument_event_mappings(instrument_event_mappings) 199 | 1 200 | """ 201 | payload = self._initialize_import_payload( 202 | to_import=to_import, 203 | import_format=import_format, 204 | return_format_type=return_format_type, 205 | content="formEventMapping", 206 | ) 207 | payload["action"] = "import" 208 | 209 | return_type = self._lookup_return_type( 210 | format_type=return_format_type, request_type="import" 211 | ) 212 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 213 | 214 | return response 215 | -------------------------------------------------------------------------------- /redcap/methods/surveys.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project surveys""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, Json 6 | 7 | if TYPE_CHECKING: 8 | import pandas as pd 9 | 10 | 11 | class Surveys(Base): 12 | """Responsible for all API methods under 'Surveys' in the API Playground""" 13 | 14 | def export_survey_link( 15 | self, 16 | record: str, 17 | instrument: str, 18 | event: Optional[str] = None, 19 | repeat_instance: int = 1, 20 | ) -> str: 21 | """ 22 | Export one survey link 23 | 24 | Note: 25 | The passed instrument must be set up as a survey instrument. 26 | 27 | Args: 28 | record: 29 | Name of the record 30 | instrument: 31 | Name of instrument as seen in the Data Dictionary (metadata). 32 | event: 33 | Unique event name, only used in longitudinal projects 34 | repeat_instance: 35 | only for projects with repeating instruments/events) 36 | The repeat instance number of the repeating event (if longitudinal) 37 | or the repeating instrument (if classic or longitudinal). 38 | Default value is '1'. 39 | 40 | Returns: 41 | URL of survey link requested 42 | 43 | Examples: 44 | >>> proj.export_survey_link(record="1", instrument="form_1", event="event_1_arm_1") 45 | 'https://redcapdemo.vumc.org/surveys/?s=...' 46 | """ 47 | payload = self._initialize_payload( 48 | content="surveyLink", 49 | # Hard-coded due to the nature of the response 50 | return_format_type="csv", 51 | ) 52 | 53 | payload["record"] = record 54 | payload["instrument"] = instrument 55 | payload["repeat_instance"] = repeat_instance 56 | 57 | if event: 58 | payload["event"] = event 59 | 60 | return cast(str, self._call_api(payload, return_type="str")) 61 | 62 | def export_survey_queue_link( 63 | self, 64 | record: str, 65 | ) -> str: 66 | """ 67 | Export one survey queue link 68 | 69 | Note: 70 | The passed instrument must be set up as a survey instrument. The 71 | survey queue must be enabled for the project. 72 | 73 | Args: 74 | record: 75 | Name of the record 76 | 77 | Returns: 78 | URL of survey queue link requested 79 | 80 | Examples: 81 | >>> proj.export_survey_queue_link(record="1") 82 | 'https://redcapdemo.vumc.org/surveys/?sq=...' 83 | """ 84 | payload = self._initialize_payload( 85 | content="surveyQueueLink", 86 | # Hard-coded due to the nature of the response 87 | return_format_type="csv", 88 | ) 89 | 90 | payload["record"] = record 91 | 92 | return cast(str, self._call_api(payload, return_type="str")) 93 | 94 | def export_survey_access_code( 95 | self, 96 | record: str, 97 | instrument: str, 98 | event: Optional[str] = None, 99 | repeat_instance: int = 1, 100 | ) -> str: 101 | # pylint: disable=line-too-long 102 | """ 103 | Export a Survey Access Code for a Participant 104 | 105 | Note: 106 | The passed instrument must be set up as a survey instrument. 107 | 108 | Args: 109 | record: 110 | Name of the record 111 | instrument: 112 | Name of instrument as seen in the Data Dictionary (metadata). 113 | event: 114 | Unique event name, only used in longitudinal projects 115 | repeat_instance: 116 | only for projects with repeating instruments/events) 117 | The repeat instance number of the repeating event (if longitudinal) 118 | or the repeating instrument (if classic or longitudinal). 119 | Default value is '1'. 120 | 121 | Returns: 122 | A survey access code for a specified record and data collection 123 | instrument 124 | 125 | Examples: 126 | >>> proj.export_survey_access_code(record="1", instrument="form_1", event="event_1_arm_1") 127 | '...' 128 | """ 129 | # pylint: enable=line-too-long 130 | payload = self._initialize_payload( 131 | content="surveyAccessCode", 132 | # Hard-coded due to the nature of the response 133 | return_format_type="csv", 134 | ) 135 | 136 | payload["record"] = record 137 | payload["instrument"] = instrument 138 | payload["repeat_instance"] = repeat_instance 139 | 140 | if event: 141 | payload["event"] = event 142 | 143 | return cast(str, self._call_api(payload, return_type="str")) 144 | 145 | def export_survey_return_code( 146 | self, 147 | record: str, 148 | instrument: str, 149 | event: Optional[str] = None, 150 | repeat_instance: int = 1, 151 | ) -> str: 152 | # pylint: disable=line-too-long 153 | """ 154 | Export a Survey Return Code for a Participant 155 | 156 | Note: 157 | The passed instrument must be set up as a survey instrument, which has return codes enabled. 158 | 159 | Args: 160 | record: 161 | Name of the record 162 | instrument: 163 | Name of instrument as seen in the Data Dictionary (metadata). 164 | event: 165 | Unique event name, only used in longitudinal projects 166 | repeat_instance: 167 | only for projects with repeating instruments/events) 168 | The repeat instance number of the repeating event (if longitudinal) 169 | or the repeating instrument (if classic or longitudinal). 170 | Default value is '1'. 171 | 172 | Returns: 173 | A survey return code for a specified record and data collection 174 | instrument 175 | 176 | Examples: 177 | >>> proj.export_survey_return_code(record="1", instrument="form_1", event="event_1_arm_1") 178 | '...' 179 | """ 180 | # pylint: enable=line-too-long 181 | payload = self._initialize_payload( 182 | content="surveyReturnCode", 183 | # Hard-coded due to the nature of the response 184 | return_format_type="csv", 185 | ) 186 | 187 | payload["record"] = record 188 | payload["instrument"] = instrument 189 | payload["repeat_instance"] = repeat_instance 190 | 191 | if event: 192 | payload["event"] = event 193 | 194 | return cast(str, self._call_api(payload, return_type="str")) 195 | 196 | def export_survey_participant_list( 197 | self, 198 | instrument: str, 199 | format_type: Literal["json", "csv", "xml", "df"] = "json", 200 | event: Optional[str] = None, 201 | df_kwargs: Optional[Dict[str, Any]] = None, 202 | ): 203 | """ 204 | Export the Survey Participant List 205 | 206 | Note: 207 | The passed instrument must be set up as a survey instrument. 208 | 209 | Args: 210 | instrument: 211 | Name of instrument as seen in the Data Dictionary (metadata). 212 | format_type: 213 | Format of returned data 214 | event: 215 | Unique event name, only used in longitudinal projects 216 | df_kwargs: 217 | Passed to `pandas.read_csv` to control construction of 218 | returned DataFrame. By default, nothing 219 | 220 | Returns: 221 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: 222 | List of survey participants, 223 | along with other useful 224 | metadata such as the record, response status, etc. 225 | 226 | Examples: 227 | >>> proj.export_survey_participant_list(instrument="form_1", event="event_1_arm_1") 228 | [{'email': '', 229 | ... 230 | 'survey_access_code': ...}, 231 | {'email': '', 232 | ... 233 | 'survey_access_code': ...}] 234 | """ 235 | payload = self._initialize_payload( 236 | content="participantList", 237 | format_type=format_type, 238 | ) 239 | payload["instrument"] = instrument 240 | if event: 241 | payload["event"] = event 242 | 243 | return_type = self._lookup_return_type(format_type, request_type="export") 244 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 245 | 246 | return self._return_data( 247 | response=response, 248 | content="participantList", 249 | format_type=format_type, 250 | df_kwargs=df_kwargs, 251 | ) 252 | -------------------------------------------------------------------------------- /redcap/methods/user_roles.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project user roles""" 2 | 3 | from typing import ( 4 | TYPE_CHECKING, 5 | Any, 6 | Dict, 7 | List, 8 | Literal, 9 | Optional, 10 | Union, 11 | cast, 12 | ) 13 | 14 | from redcap.methods.base import Base, Json 15 | 16 | if TYPE_CHECKING: 17 | import pandas as pd 18 | 19 | 20 | class UserRoles(Base): 21 | """Responsible for all API methods under 'Users Roles' in the API Playground""" 22 | 23 | def export_user_roles( 24 | self, 25 | format_type: Literal["json", "csv", "xml", "df"] = "json", 26 | df_kwargs: Optional[Dict[str, Any]] = None, 27 | ): 28 | """ 29 | Export the user roles of the Project 30 | 31 | Args: 32 | format_type: 33 | Response return format 34 | df_kwargs: 35 | Passed to `pandas.read_csv` to control construction of 36 | returned DataFrame. By default, nothing 37 | 38 | Returns: 39 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: 40 | List of user roles with assigned user rights 41 | 42 | Examples: 43 | >>> proj.export_user_roles() 44 | [{'unique_role_name': ..., 'role_label': 'Test role', 'design': '0', 'alerts': '0', 45 | 'user_rights': '0', 'data_access_groups': '0', 'reports': '0', 'stats_and_charts': '0', 46 | 'manage_survey_participants': '0', 'calendar': '0', 'data_import_tool': '0', 47 | 'data_comparison_tool': '0', 'logging': '0', 'email_logging': '0', 48 | 'file_repository': '0', 'data_quality_create': '0', 'data_quality_execute': '0', 49 | 'api_export': '0', 'api_import': '0', 'api_modules': '0', 'mobile_app': '0', 50 | 'mobile_app_download_data': '0', 'record_create': '0', 'record_rename': '0', 51 | 'record_delete': '0', 'lock_records_customization': '0', 'lock_records': '0', ..., 52 | 'forms': {'form_1': 2}, 'forms_export': {'form_1': 0}}] 53 | """ 54 | payload = self._initialize_payload(content="userRole", format_type=format_type) 55 | return_type = self._lookup_return_type(format_type, request_type="export") 56 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 57 | 58 | return self._return_data( 59 | response=response, 60 | content="userRole", 61 | format_type=format_type, 62 | df_kwargs=df_kwargs, 63 | ) 64 | 65 | def import_user_roles( 66 | self, 67 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 68 | return_format_type: Literal["json", "csv", "xml"] = "json", 69 | import_format: Literal["json", "csv", "xml", "df"] = "json", 70 | ): 71 | """ 72 | Import user roles into the REDCap Project 73 | 74 | Args: 75 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 76 | Note: 77 | If you pass a csv or xml string, you should use the 78 | `import format` parameter appropriately. 79 | return_format_type: 80 | Response format. By default, response will be json-decoded. 81 | import_format: 82 | Format of incoming data. By default, to_import will be json-encoded 83 | 84 | Returns: 85 | Union[int, str]: Number of user roles added or updated 86 | 87 | Examples: 88 | >>> roles = proj.export_user_roles() 89 | >>> proj.import_user_roles(roles) 90 | 1 91 | """ 92 | payload = self._initialize_import_payload( 93 | to_import=to_import, 94 | import_format=import_format, 95 | return_format_type=return_format_type, 96 | content="userRole", 97 | ) 98 | 99 | return_type = self._lookup_return_type( 100 | format_type=return_format_type, request_type="import" 101 | ) 102 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 103 | 104 | return response 105 | 106 | def delete_user_roles( 107 | self, 108 | roles: List[str], 109 | return_format_type: Literal["json", "csv", "xml"] = "json", 110 | ): 111 | """ 112 | Delete user roles from the project. 113 | 114 | Args: 115 | roles: List of user roles to delete from the project 116 | return_format_type: 117 | Response format. By default, response will be json-decoded. 118 | 119 | Returns: 120 | Union[int, str]: Number of user roles deleted 121 | 122 | Examples: 123 | Create a new user role 124 | >>> new_role = [{"role_label": "New Role"}] 125 | >>> proj.import_user_roles(new_role) 126 | 1 127 | 128 | We don't know what the 'unique_role_name' is for the newly created role, 129 | so we have to look it up by 'role_label' 130 | >>> roles = proj.export_user_roles() 131 | >>> new_role_id = [ 132 | ... role for role in roles 133 | ... if role["role_label"] == "New Role" 134 | ... ][0]["unique_role_name"] 135 | 136 | Delete the role 137 | >>> proj.delete_user_roles([new_role_id]) 138 | 1 139 | """ 140 | payload = self._initialize_payload( 141 | content="userRole", return_format_type=return_format_type 142 | ) 143 | payload["action"] = "delete" 144 | # Turn list of user roles into dict, and append to payload 145 | roles_dict = {f"roles[{ idx }]": role for idx, role in enumerate(roles)} 146 | payload.update(roles_dict) 147 | 148 | return_type = self._lookup_return_type( 149 | format_type=return_format_type, request_type="delete" 150 | ) 151 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 152 | return response 153 | 154 | def export_user_role_assignment( 155 | self, 156 | format_type: Literal["json", "csv", "xml", "df"] = "json", 157 | df_kwargs: Optional[Dict[str, Any]] = None, 158 | ): 159 | """ 160 | Export the User-Role assignments of the Project 161 | 162 | Args: 163 | format_type: 164 | Response return format 165 | df_kwargs: 166 | Passed to `pandas.read_csv` to control construction of 167 | returned DataFrame. By default, nothing 168 | 169 | Returns: 170 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: 171 | List of user-role assignments 172 | 173 | Examples: 174 | >>> proj.export_user_role_assignment() 175 | [{'username': ..., 'unique_role_name': '', 'data_access_group': ''}] 176 | """ 177 | payload = self._initialize_payload( 178 | content="userRoleMapping", format_type=format_type 179 | ) 180 | return_type = self._lookup_return_type(format_type, request_type="export") 181 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 182 | 183 | return self._return_data( 184 | response=response, 185 | content="userRoleMapping", 186 | format_type=format_type, 187 | df_kwargs=df_kwargs, 188 | ) 189 | 190 | def import_user_role_assignment( 191 | self, 192 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 193 | return_format_type: Literal["json", "csv", "xml"] = "json", 194 | import_format: Literal["json", "csv", "xml", "df"] = "json", 195 | ): 196 | """ 197 | Import User-Role assignments into the REDCap Project 198 | 199 | Args: 200 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 201 | Note: 202 | If you pass a csv or xml string, you should use the 203 | `import format` parameter appropriately. 204 | return_format_type: 205 | Response format. By default, response will be json-decoded. 206 | import_format: 207 | Format of incoming data. By default, to_import will be json-encoded 208 | 209 | Returns: 210 | Union[int, str]: Number of user-role assignments added or updated 211 | 212 | Examples: 213 | >>> user_role_assignments = proj.export_user_role_assignment() 214 | >>> proj.import_user_role_assignment(user_role_assignments) 215 | 1 216 | """ 217 | payload = self._initialize_import_payload( 218 | to_import=to_import, 219 | import_format=import_format, 220 | return_format_type=return_format_type, 221 | content="userRoleMapping", 222 | ) 223 | 224 | return_type = self._lookup_return_type( 225 | format_type=return_format_type, request_type="import" 226 | ) 227 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 228 | 229 | return response 230 | -------------------------------------------------------------------------------- /HISTORY.md: -------------------------------------------------------------------------------- 1 | # HISTORY 2 | 3 | ## 2.2.0 (2022-11-17) 4 | 5 | ### API Support :robot: 6 | 7 | - Add support for `delete_roles` 8 | 9 | ### Package Improvements :muscle: 10 | 11 | - Add check for missing token and url 12 | 13 | ### Documentation :memo: 14 | 15 | - Modify instructions for installing `pandas` (optional package dependency) 16 | 17 | ## 2.1.0 (2022-04-05) 18 | 19 | ### API Support :robot: 20 | 21 | - Add logging methods (#222) 22 | - Add user methods (#225) 23 | - Add DAG methods (#226) 24 | - Add user role methods (#228) 25 | - Add new args for export records (#223) 26 | 27 | ### Package Improvements :muscle: 28 | 29 | - Add docs tests for return format type (#224) 30 | 31 | ## 2.0.0 (2022-03-29) 32 | 33 | ### API Support :robot: 34 | 35 | - Add support for `export_repeating_instruments_events` and `import_repeating_instruments_events` (#210 @JuliaSprenger) 36 | 37 | ### Package Improvements :muscle: 38 | 39 | - `Project` class loads lazily by default 40 | - All `Project.export_*` methods that return JSON now can return `DataFrame`'s as well 41 | - `Project` class was broken up into smaller utility classes, see the `redcap.methods` module or the API reference on the new docs site 42 | - Robust testing infrastructure (`pytest`, `doctest-plus`) with both unit and integration tests, maintained at 100% test coverage, with automated styling and linting checks in CI (`black`, `pylint`) 43 | - Gradual typing added, but not yet enforced in CI 44 | - Add _complete fields to payload when requesting survey fields (#149 @forsakendaemon) 45 | 46 | ### Breaking changes :boom: 47 | 48 | - Dropped support for Python 2, requires python 3.8 or above 49 | - Many extraneous `Project` attributes were removed. See the API reference for remaining attributes 50 | - `RedcapError` is raised for all endpoints when API errors are encountered. Errors are never returned in the response 51 | - `generate_next_record_name` now returns a `str` instead of an `int`. This fixes a bug that occurs when a project uses DAGs 52 | - `export_fem` renamed to `export_instrument_event_mapping` to be more consistent with other endpoints 53 | - Common parameter name changes including: `format` --> `format_type`, `return_format` --> `return_format_type`, `type` --> `record_type`. Most of the reason for this change was to avoid the use of reserved keywords such as `format` and `type` 54 | 55 | ### Documentation :memo: 56 | 57 | - Revamp documentation to `mkdocs-material` style on GitHub pages 58 | - Add comprehensive docstrings and doctests to all methods 59 | - Update `delete_records` documentation (#173 @andyjessen) 60 | 61 | ## 1.1.3 (2021-03-30) 62 | 63 | ### API Support :robot: 64 | 65 | - Add support for `import_metadata` endpoint (#145 @JuliaSprenger) 66 | 67 | ### Documentation :memo: 68 | 69 | - Update `contributing.rst` with new installation instructions (#135 @njvack) 70 | 71 | ## 1.1.2 (2020-11-05) 72 | 73 | ### API Support :robot: 74 | 75 | - Add support for `exportFieldNames` call (#125 @chgreer) 76 | - Add `dateRangeBegin` and `dateRangeEnd` parameters to `Project.export_records` (#124 @chgreer) 77 | 78 | ### Package Improvements :muscle: 79 | 80 | - Use `pytest` for full test suite (#132) 81 | - Enforce `black` and `pylint` style and formatting on project (#132) 82 | - Deprecate support for Python 2 (#132) 83 | - Add `pandas` as an `extra_requires` (#132) 84 | 85 | ### Documentation :memo: 86 | 87 | - Update README with new community support model and how to contribute (#132) 88 | 89 | ## 1.1.1 (2020-08-18) 90 | 91 | ### Bug Fixes :bug: 92 | 93 | - Fix package version parsing for UNIX (#122 @fissell) 94 | 95 | ## 1.1.0 (2020-07-16) 96 | 97 | ### API Support :robot: 98 | 99 | - Add `rec_type` support in `import_records()` (#40 @dckc) 100 | - Add `export_checkbox_labels` keyword arg to `export_records()` (#48 Tyler Rivera) 101 | - Properly backfill requested fields for \>6.X servers (#55) 102 | - Add Export Survey Participant List method (#71) 103 | - Add `filter_logic` to export_records (#85 @erikh360) 104 | - Add `forceAutoNumber` parameter to `import_records()` (#86 @CarlosBorroto) 105 | - Add Export Project Information (#106 @martinburchell) 106 | - Add Generate Next Record Name (#107 @martinburchell) 107 | - Add `repeat_instance` parameter to `imp_file` request (#104 @martinburchell) 108 | - Add Delete Record (#77 @damonms) 109 | - Add Export Reports (#91 @mcarmack) 110 | 111 | ### Package Improvements :muscle: 112 | 113 | - Add redcap_version attribute to Project (#44 Tyler Rivera) 114 | - Support lazy loading of Projects (#53 Tyler Rivera) 115 | - Add Python 3 support (#67, #92 @jmillxyz, @fonnesbeck) 116 | - Remove obsolete Project.filter() (#105 @martinburchell) 117 | - Change API parameters from comma-separated to arrays (#110 @martinburchell) 118 | - Use single `requests.Session()` for connections (#120 @KarthikMasi) 119 | 120 | ### Bug Fixes :bug: 121 | 122 | - Allow later versions of semantic-version (#108 @martinburchell) 123 | - Fix package version when installing from GitHub (#113) 124 | - Handle EmptyData error from pandas read_csv (#118 @martinburchell) 125 | 126 | ### Documentation :memo: 127 | 128 | - Added REDCap API changelog from 6.0.0 - 6.12.1 (#64 @SlightlyUnorthodox) 129 | - Python 3 updates (#115 @sujaypatil96) 130 | 131 | ## 1.0.2 (2016-10-05) 132 | 133 | - Fix issue in new survey participant export method. 134 | 135 | ## 1.0.1 (2016-10-05) 136 | 137 | - Add a `Project` method to export the survey participant list. 138 | - Update author email. 139 | 140 | ## 1.0 (2014-05-16) 141 | 142 | - Normalize all `format` argument to default to `json`, not `obj`. This better follows the official REDCap API. This breaks backwards compatibility, hence the 1.0 release. 143 | - Remove the `redcap.query` and associated tests. If you need filtering functionality, [Pandas](http://pandas.pydata.org) is **highly** recommended. 144 | - Update documentation re: how PyCap implicitly decodes JSON responses. 145 | 146 | ## 0.9 (2014-02-27) 147 | 148 | - Update docs about passing CA_BUNDLE through `verify_ssl`. 149 | - Canonical URL for docs is now . 150 | - Add `date_format` argument for `.import_records` 151 | - Sphinxification of docs 152 | - Add MIT license 153 | - Add `export_survey_fields` & `export_data_access_groups` arguments for `.import_records` 154 | - Raise for 5XX responses 155 | - Raise exception for failed imports 156 | - Deprecate the entire `redcap.Query` module. It was a bad idea to begin with. 157 | - Raise exception during `Project` instantiation when the metadata call fails. This is usually indicative of bad credentials. 158 | 159 | ## 0.8.1 (2013-05-16) 160 | 161 | - By default, in longitudinal projects when exporting records as a data frame, the index will be a MultiIndex of the project's primary field and `redcap_event_name`. 162 | - DataFrames can be passed to `Project.import_records`. 163 | - Added `Project.export_fem` to export Form-Event Mappings from the `Project`. 164 | - The SSL certificate on REDCap server can be ignored if need be. 165 | 166 | ## 0.8.0 (2013-02-14) 167 | 168 | - Added rest of API methods: `Project.export_users`, `Project.delete_file`. Almost all API methods are implemented within `Project` in some way, shape or form. 169 | - Fix file import bug. 170 | - Now use relaxed JSON decoding because REDCap doesn't always send strict JSON. 171 | - File export, import and delete methods will raise `redcap.RedcapError` when the methods don't succeed on the server. 172 | - Low-level content handling has been cleaned up. 173 | 174 | ## 0.7.0 (2013-01-18) 175 | 176 | - Added `Project.export_file` and `Project.import_file` methods for exporting/importing files from/to REDCap databases 177 | - Fixed a dependency issue that would cause new installations to fail 178 | - Fixed an issue where newline characters in the project's Data Dictionary would case Projects to fail instantiation. 179 | 180 | ## 0.6.1 (2012-11-16) 181 | 182 | - Add ability to alter `DataFrame` construction with the `df_kwargs` arg in `Project.export_records` and `.export_metadata` 183 | 184 | ## 0.6 (2012-11-06) 185 | 186 | - Add `export_metadata` function on redcap.Project class 187 | - Add `'df` as an option for the `format` argument on the `redcap.Project` export methods to return a `pandas.DataFrame` 188 | 189 | ## 0.5.2 (2012-10-12) 190 | 191 | - Update `setup.py` for more graceful building 192 | 193 | ## 0.5.1 (2012-10-04) 194 | 195 | - Fix potential issue when exporting strange characters 196 | 197 | ## 0.5 (2012-09-19) 198 | 199 | - Add initial support for longitudinal databases 200 | - Add helper attributes on `redcap.Project` class 201 | - Improve testing 202 | - Add Travis-CI testing on github 203 | 204 | ## 0.4.2 (2012-03-15) 205 | 206 | - 0.4.1 didn't play well with pypi? 207 | 208 | ## 0.4.1 (2012-03-15) 209 | 210 | - Defend against non-unicode characters in Redcap `Project` 211 | 212 | ## 0.3.4 (2012-01-12) 213 | 214 | - New documentation 215 | 216 | ## 0.3.3 (2011-11-21) 217 | 218 | - Bug fix when exporting all fields 219 | 220 | ## 0.3.2 (2011-11-21) 221 | 222 | - Works with current version of `requests` 223 | - Under-the-hood changes (only json is used for `RCRequest`) 224 | - Bug fix in `Project.filter` 225 | 226 | ## 0.3.1 (2011-11-02) 227 | 228 | - Bug fix in `import_records` 229 | 230 | ## 0.3 (2011-09-27) 231 | 232 | - Using Kenneth Reitz's `requests` module, greatly simplifying request code. 233 | 234 | ## 0.21 (2011-09-14) 235 | 236 | - First public release on PyPI 237 | - Version bump 238 | 239 | ## 0.1 (2011-09-14) 240 | 241 | - Basic import, export, metadata 242 | -------------------------------------------------------------------------------- /redcap/methods/data_access_groups.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project data access groups""" 2 | 3 | from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, Json 6 | 7 | if TYPE_CHECKING: 8 | import pandas as pd 9 | 10 | 11 | class DataAccessGroups(Base): 12 | """Responsible for all API methods under 'Data Access Groups' in the API Playground""" 13 | 14 | def export_dags( 15 | self, 16 | format_type: Literal["json", "csv", "xml", "df"] = "json", 17 | df_kwargs: Optional[Dict[str, Any]] = None, 18 | ): 19 | # pylint: disable=line-too-long 20 | """ 21 | Export the DAGs of the Project 22 | 23 | Args: 24 | format_type: 25 | Response return format 26 | df_kwargs: 27 | Passed to `pandas.read_csv` to control construction of 28 | returned DataFrame. By default, nothing 29 | 30 | Returns: 31 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: List of DAGs 32 | 33 | Examples: 34 | >>> proj.export_dags() 35 | [{'data_access_group_name': 'Test DAG', 'unique_group_name': 'test_dag', 'data_access_group_id': ...}] 36 | """ 37 | # pylint:enable=line-too-long 38 | payload = self._initialize_payload(content="dag", format_type=format_type) 39 | return_type = self._lookup_return_type(format_type, request_type="export") 40 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 41 | 42 | return self._return_data( 43 | response=response, 44 | content="dag", 45 | format_type=format_type, 46 | df_kwargs=df_kwargs, 47 | ) 48 | 49 | def import_dags( 50 | self, 51 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 52 | return_format_type: Literal["json", "csv", "xml"] = "json", 53 | import_format: Literal["json", "csv", "xml", "df"] = "json", 54 | ): 55 | """ 56 | Import DAGs into the REDCap Project 57 | 58 | Note: 59 | DAGs can be renamed by simply changing the group name (data_access_group_name). 60 | DAGs can be created by providing group name value while unique group name should 61 | be set to blank. 62 | 63 | Args: 64 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 65 | Note: 66 | If you pass a csv or xml string, you should use the 67 | `import format` parameter appropriately. 68 | return_format_type: 69 | Response format. By default, response will be json-decoded. 70 | import_format: 71 | Format of incoming data. By default, to_import will be json-encoded 72 | 73 | Returns: 74 | Union[int, str]: Number of DAGs added or updated 75 | 76 | Examples: 77 | Create a new data access group 78 | >>> new_dag = [{"data_access_group_name": "New DAG", "unique_group_name": ""}] 79 | >>> proj.import_dags(new_dag) 80 | 1 81 | """ 82 | payload = self._initialize_import_payload( 83 | to_import=to_import, 84 | import_format=import_format, 85 | return_format_type=return_format_type, 86 | content="dag", 87 | ) 88 | payload["action"] = "import" 89 | 90 | return_type = self._lookup_return_type( 91 | format_type=return_format_type, request_type="import" 92 | ) 93 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 94 | 95 | return response 96 | 97 | def delete_dags( 98 | self, 99 | dags: List[str], 100 | return_format_type: Literal["json", "csv", "xml"] = "json", 101 | ): 102 | """ 103 | Delete dags from the project. 104 | 105 | Args: 106 | dags: List of dags to delete from the project 107 | return_format_type: 108 | Response format. By default, response will be json-decoded. 109 | 110 | Returns: 111 | Union[int, str]: Number of dags deleted 112 | 113 | Examples: 114 | Create a new data access group 115 | >>> new_dag = [{"data_access_group_name": "New DAG", "unique_group_name": ""}] 116 | >>> proj.import_dags(new_dag) 117 | 1 118 | 119 | We know that 'New DAG' will automatically be assigned 'new_dag' as it's 120 | unique group name 121 | >>> proj.delete_dags(["new_dag"]) 122 | 1 123 | """ 124 | payload = self._initialize_payload( 125 | content="dag", return_format_type=return_format_type 126 | ) 127 | payload["action"] = "delete" 128 | # Turn list of dags into dict, and append to payload 129 | dags_dict = {f"dags[{ idx }]": dag for idx, dag in enumerate(dags)} 130 | payload.update(dags_dict) 131 | 132 | return_type = self._lookup_return_type( 133 | format_type=return_format_type, request_type="delete" 134 | ) 135 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 136 | return response 137 | 138 | def switch_dag( 139 | self, 140 | dag: str, 141 | ) -> Literal["1"]: 142 | """ 143 | Allows the current API user to switch (assign/reassign/unassign) 144 | their current Data Access Group assignment. 145 | 146 | The current user must have been assigned to multiple DAGs via the 147 | DAG Switcher page in the project 148 | 149 | Args: 150 | dag: The unique group name of the Data Access Group to which you wish to switch 151 | 152 | Returns: 153 | "1" if the user successfully switched DAGs 154 | 155 | Examples: 156 | >>> proj.switch_dag("test_dag") # doctest: +SKIP 157 | '1' 158 | """ 159 | # API docs say that "1" is the only valid value 160 | payload = self._initialize_payload(content="dag", return_format_type="csv") 161 | payload["action"] = "switch" 162 | payload["dag"] = dag 163 | 164 | response = cast(Literal["1"], self._call_api(payload, return_type="str")) 165 | return response 166 | 167 | def export_user_dag_assignment( 168 | self, 169 | format_type: Literal["json", "csv", "xml", "df"] = "json", 170 | df_kwargs: Optional[Dict[str, Any]] = None, 171 | ): 172 | """ 173 | Export the User-DAG assignment of the Project 174 | 175 | Args: 176 | format_type: 177 | Response return format 178 | df_kwargs: 179 | Passed to `pandas.read_csv` to control construction of 180 | returned DataFrame. By default, nothing 181 | 182 | Returns: 183 | Union[List[Dict[str, Any]], str, pandas.DataFrame]: 184 | List of User-DAGs assignments 185 | 186 | Examples: 187 | >>> proj.export_user_dag_assignment() 188 | [{'username': ..., 'redcap_data_access_group': ''}] 189 | """ 190 | payload = self._initialize_payload( 191 | content="userDagMapping", format_type=format_type 192 | ) 193 | return_type = self._lookup_return_type(format_type, request_type="export") 194 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 195 | 196 | return self._return_data( 197 | response=response, 198 | content="userDagMapping", 199 | format_type=format_type, 200 | df_kwargs=df_kwargs, 201 | ) 202 | 203 | def import_user_dag_assignment( 204 | self, 205 | to_import: Union[str, List[Dict[str, Any]], "pd.DataFrame"], 206 | return_format_type: Literal["json", "csv", "xml"] = "json", 207 | import_format: Literal["json", "csv", "xml", "df"] = "json", 208 | ): 209 | """ 210 | Import User-DAG assignments into the REDCap Project 211 | 212 | Args: 213 | to_import: array of dicts, csv/xml string, `pandas.DataFrame` 214 | Note: 215 | If you pass a csv or xml string, you should use the 216 | `import format` parameter appropriately. 217 | return_format_type: 218 | Response format. By default, response will be json-decoded. 219 | import_format: 220 | Format of incoming data. By default, to_import will be json-encoded 221 | 222 | Returns: 223 | Union[int, str]: 224 | Number of User-DAGs assignments added or updated 225 | 226 | Examples: 227 | Create a new user 228 | >>> new_user = "pandeharris@gmail.com" 229 | >>> proj.import_users([{"username": new_user}]) 230 | 1 231 | 232 | Add that user to a DAG 233 | >>> dag_mapping = [ 234 | ... {"username": new_user, "redcap_data_access_group": "test_dag"} 235 | ... ] 236 | >>> proj.import_user_dag_assignment(dag_mapping) 237 | 1 238 | 239 | New user-DAG mapping 240 | >>> proj.export_user_dag_assignment() 241 | [{'username': 'pandeharris@gmail.com', 'redcap_data_access_group': 'test_dag'}, 242 | {'username': ..., 'redcap_data_access_group': ''}] 243 | 244 | Remove the user 245 | >>> proj.delete_users([new_user]) 246 | 1 247 | """ 248 | payload = self._initialize_import_payload( 249 | to_import=to_import, 250 | import_format=import_format, 251 | return_format_type=return_format_type, 252 | content="userDagMapping", 253 | ) 254 | payload["action"] = "import" 255 | 256 | return_type = self._lookup_return_type( 257 | format_type=return_format_type, request_type="import" 258 | ) 259 | response = cast(Union[Json, str], self._call_api(payload, return_type)) 260 | 261 | return response 262 | -------------------------------------------------------------------------------- /redcap/methods/file_repository.py: -------------------------------------------------------------------------------- 1 | """REDCap API methods for Project file repository""" 2 | 3 | from typing import Any, Dict, IO, Literal, Optional, Union, cast 4 | 5 | from redcap.methods.base import Base, FileMap, Json 6 | from redcap.request import EmptyJson, FileUpload 7 | 8 | 9 | class FileRepository(Base): 10 | """Responsible for all API methods under 'File Repository' in the API Playground""" 11 | 12 | def create_folder_in_repository( 13 | self, 14 | name: str, 15 | folder_id: Optional[int] = None, 16 | dag_id: Optional[int] = None, 17 | role_id: Optional[int] = None, 18 | format_type: Literal["json", "csv", "xml"] = "json", 19 | return_format_type: Literal["json", "csv", "xml"] = "json", 20 | ): 21 | """ 22 | Create a New Folder in the File Repository 23 | 24 | Args: 25 | name: 26 | The desired name of the folder to be created (max length = 150 characters) 27 | folder_id: 28 | The folder_id of a specific folder in the File Repository for which you wish 29 | to create this sub-folder. If none is provided, the folder will be created in 30 | the top-level directory of the File Repository. 31 | dag_id: 32 | The dag_id of the DAG (Data Access Group) to which you wish to restrict 33 | access for this folder. If none is provided, the folder will accessible to 34 | users in all DAGs and users in no DAGs. 35 | role_id: 36 | The role_id of the User Role to which you wish to restrict access for this 37 | folder. If none is provided, the folder will accessible to users in all 38 | User Roles and users in no User Roles. 39 | format_type: 40 | Return the metadata in native objects, csv or xml. 41 | return_format_type: 42 | Response format. By default, response will be json-decoded. 43 | Returns: 44 | Union[str, List[Dict[str, Any]]]: 45 | List of all changes made to this project, including data exports, 46 | data changes, and the creation or deletion of users 47 | 48 | Examples: 49 | >>> proj.create_folder_in_repository(name="New Folder") 50 | [{'folder_id': ...}] 51 | """ 52 | payload: Dict[str, Any] = self._initialize_payload( 53 | content="fileRepository", 54 | format_type=format_type, 55 | return_format_type=return_format_type, 56 | ) 57 | 58 | payload["action"] = "createFolder" 59 | payload["name"] = name 60 | 61 | if folder_id: 62 | payload["folder_id"] = folder_id 63 | 64 | if dag_id: 65 | payload["dag_id"] = dag_id 66 | 67 | if role_id: 68 | payload["role_id"] = role_id 69 | 70 | return_type = self._lookup_return_type(format_type, request_type="export") 71 | 72 | return cast(Union[Json, str], self._call_api(payload, return_type)) 73 | 74 | def export_file_repository( 75 | self, 76 | folder_id: Optional[int] = None, 77 | format_type: Literal["json", "csv", "xml"] = "json", 78 | return_format_type: Literal["json", "csv", "xml"] = "json", 79 | ): 80 | """ 81 | Export of list of files/folders in the File Repository 82 | 83 | Only exports the top-level of files/folders. To see which files are contained 84 | within a folder, use the `folder_id` parameter 85 | 86 | Args: 87 | folder_id: 88 | The folder_id of a specific folder in the File Repository for which you wish 89 | to search for files/folders. If none is provided, the search will be conducted 90 | in the top-level directory of the File Repository. 91 | format_type: 92 | Return the metadata in native objects, csv or xml. 93 | return_format_type: 94 | Response format. By default, response will be json-decoded. 95 | Returns: 96 | Union[str, List[Dict[str, Any]]]: 97 | List of all changes made to this project, including data exports, 98 | data changes, and the creation or deletion of users 99 | 100 | Examples: 101 | >>> proj.export_file_repository() 102 | [{'folder_id': ..., 'name': 'New Folder'}, ...] 103 | """ 104 | payload: Dict[str, Any] = self._initialize_payload( 105 | content="fileRepository", 106 | format_type=format_type, 107 | return_format_type=return_format_type, 108 | ) 109 | 110 | payload["action"] = "list" 111 | 112 | if folder_id: 113 | payload["folder_id"] = folder_id 114 | 115 | return_type = self._lookup_return_type(format_type, request_type="export") 116 | 117 | return cast(Union[Json, str], self._call_api(payload, return_type)) 118 | 119 | def export_file_from_repository( 120 | self, 121 | doc_id: int, 122 | return_format_type: Literal["json", "csv", "xml"] = "json", 123 | ) -> FileMap: 124 | """ 125 | Export the contents of a file stored in the File Repository 126 | 127 | Args: 128 | doc_id: The doc_id of the file in the File Repository 129 | return_format_type: 130 | Response format. By default, response will be json-decoded. 131 | 132 | Returns: 133 | Content of the file and content-type dictionary 134 | 135 | Examples: 136 | >>> file_dir = proj.export_file_repository() 137 | >>> text_file = [file for file in file_dir if file["name"] == "test.txt"].pop() 138 | >>> proj.export_file_from_repository(doc_id=text_file["doc_id"]) 139 | (b'hello', {'name': 'test.txt', 'charset': 'UTF-8'}) 140 | """ 141 | payload = self._initialize_payload( 142 | content="fileRepository", return_format_type=return_format_type 143 | ) 144 | # there's no format field in this call 145 | payload["action"] = "export" 146 | payload["doc_id"] = doc_id 147 | 148 | content, headers = cast( 149 | FileMap, self._call_api(payload=payload, return_type="file_map") 150 | ) 151 | # REDCap adds some useful things in content-type 152 | content_map = {} 153 | if "content-type" in headers: 154 | splat = [ 155 | key_values.strip() for key_values in headers["content-type"].split(";") 156 | ] 157 | key_values = [ 158 | (key_values.split("=")[0], key_values.split("=")[1].replace('"', "")) 159 | for key_values in splat 160 | if "=" in key_values 161 | ] 162 | content_map = dict(key_values) 163 | 164 | return content, content_map 165 | 166 | def import_file_into_repository( 167 | self, 168 | file_name: str, 169 | file_object: IO, 170 | folder_id: Optional[int] = None, 171 | ) -> EmptyJson: 172 | """ 173 | Import the contents of a file represented by file_object into 174 | the file repository 175 | 176 | Args: 177 | file_name: File name visible in REDCap UI 178 | file_object: File object as returned by `open` 179 | folder_id: 180 | The folder_id of a specific folder in the File Repository where 181 | you wish to store the file. If none is provided, the file will 182 | be stored in the top-level directory of the File Repository. 183 | 184 | Returns: 185 | Empty JSON object 186 | 187 | Examples: 188 | >>> import tempfile 189 | >>> tmp_file = tempfile.TemporaryFile() 190 | >>> proj.import_file_into_repository( 191 | ... file_name="myupload.txt", 192 | ... file_object=tmp_file, 193 | ... ) 194 | [{}] 195 | """ 196 | payload: Dict[str, Any] = self._initialize_payload(content="fileRepository") 197 | payload["action"] = "import" 198 | 199 | if folder_id: 200 | payload["folder_id"] = folder_id 201 | 202 | file_upload_dict: FileUpload = {"file": (file_name, file_object)} 203 | 204 | return cast( 205 | EmptyJson, 206 | self._call_api( 207 | payload=payload, return_type="empty_json", file=file_upload_dict 208 | ), 209 | ) 210 | 211 | def delete_file_from_repository( 212 | self, 213 | doc_id: int, 214 | return_format_type: Literal["json", "csv", "xml"] = "json", 215 | ) -> EmptyJson: 216 | # pylint: disable=line-too-long 217 | """ 218 | Delete a File from the File Repository 219 | 220 | Once deleted, the file will remain in the Recycle Bin folder for up to 30 days. 221 | 222 | Args: 223 | doc_id: The doc_id of the file in the File Repository 224 | return_format_type: 225 | Response format. By default, response will be json-decoded. 226 | 227 | Returns: 228 | Empty JSON object 229 | 230 | Examples: 231 | >>> file_dir = proj.export_file_repository() 232 | >>> test_folder = [folder for folder in file_dir if folder["name"] == "test"].pop() 233 | >>> test_dir = proj.export_file_repository(folder_id=test_folder["folder_id"]) 234 | >>> test_file = [file for file in test_dir if file["name"] == "test_in_folder.txt"].pop() 235 | >>> proj.delete_file_from_repository(doc_id=test_file["doc_id"]) 236 | [{}] 237 | """ 238 | # pylint: enable=line-too-long 239 | payload = self._initialize_payload( 240 | content="fileRepository", return_format_type=return_format_type 241 | ) 242 | # there's no format field in this call 243 | payload["action"] = "delete" 244 | payload["doc_id"] = doc_id 245 | 246 | return cast( 247 | EmptyJson, self._call_api(payload=payload, return_type="empty_json") 248 | ) 249 | -------------------------------------------------------------------------------- /tests/unit/test_long_project.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | """Test suite for Project class, with long project, against mocked REDCap server""" 3 | # pylint: disable=missing-function-docstring 4 | # pylint: disable=redefined-outer-name 5 | import os 6 | import tempfile 7 | 8 | import pandas as pd 9 | import pytest 10 | import responses 11 | 12 | from redcap import Project, RedcapError 13 | from tests.unit.callback_utils import ( 14 | is_json, 15 | get_long_project_request_handler, 16 | parse_request, 17 | ) 18 | 19 | 20 | @pytest.fixture(scope="module") 21 | def long_project(project_urls, project_token, mocked_responses) -> Project: 22 | """Mocked simple REDCap project""" 23 | 24 | def request_callback_long(req): 25 | request_data, request_headers, request_type = parse_request(req) 26 | request_handler = get_long_project_request_handler(request_type) 27 | response = request_handler(data=request_data, headers=request_headers) 28 | return response 29 | 30 | long_project_url = project_urls["long_project"] 31 | mocked_responses.add_callback( 32 | responses.POST, 33 | long_project_url, 34 | callback=request_callback_long, 35 | content_type="application/json", 36 | ) 37 | 38 | return Project(long_project_url, project_token) 39 | 40 | 41 | def test_init(long_project): 42 | assert isinstance(long_project, Project) 43 | 44 | 45 | def test_file_export(long_project): 46 | record, field = "1", "file" 47 | content, _ = long_project.export_file(record, field, event="raw", repeat_instance=1) 48 | assert isinstance(content, bytes) 49 | 50 | 51 | def test_file_import(long_project): 52 | this_dir, _ = os.path.split(__file__) 53 | upload_fname = os.path.join(this_dir, "data.txt") 54 | with open(upload_fname, "r", encoding="UTF-8") as fobj: 55 | content = long_project.import_file( 56 | "1", "file", upload_fname, fobj, event="raw", repeat_instance=1 57 | ) 58 | 59 | assert content == [{}] 60 | 61 | 62 | def test_file_delete(long_project): 63 | record, field = "1", "file" 64 | content = long_project.delete_file(record, field, event="raw") 65 | assert content == [{}] 66 | 67 | 68 | def test_export_survey_participants_list(long_project): 69 | res = long_project.export_survey_participant_list(instrument="test", event="raw") 70 | 71 | assert is_json(res) 72 | 73 | 74 | def test_export_survey_link(long_project): 75 | res = long_project.export_survey_link(instrument="test", record="1", event="raw") 76 | assert res.startswith("https://redcapdemo.vumc.org/surveys/?s=") 77 | 78 | 79 | def test_export_survey_queue_link(long_project): 80 | res = long_project.export_survey_queue_link(record="1") 81 | assert res.startswith("https://redcapdemo.vumc.org/surveys/?sq=") 82 | 83 | 84 | def test_export_survey_access_code(long_project): 85 | res = long_project.export_survey_access_code( 86 | instrument="test", record="1", event="raw" 87 | ) 88 | assert len(res) == 9 89 | 90 | 91 | def test_export_survey_return_code(long_project): 92 | res = long_project.export_survey_return_code( 93 | instrument="test", record="1", event="raw" 94 | ) 95 | assert len(res) == 8 96 | 97 | 98 | def test_metadata_import_handles_api_error(long_project): 99 | metadata = long_project.export_metadata() 100 | 101 | with pytest.raises(RedcapError): 102 | long_project.import_metadata(metadata) 103 | 104 | 105 | def test_is_longitudinal(long_project): 106 | assert long_project.is_longitudinal 107 | 108 | 109 | def test_instruments_export(long_project): 110 | response = long_project.export_instruments() 111 | 112 | assert len(response) == 3 113 | 114 | 115 | def test_pdf_export(long_project): 116 | content, _ = long_project.export_pdf() 117 | 118 | assert isinstance(content, bytes) 119 | 120 | 121 | def test_pdf_export_specify(long_project): 122 | content, _ = long_project.export_pdf( 123 | record="1", event="raw", instrument="test", repeat_instance=1 124 | ) 125 | 126 | assert isinstance(content, bytes) 127 | 128 | 129 | def test_pdf_export_all_records(long_project): 130 | content, _ = long_project.export_pdf(all_records=True) 131 | 132 | assert isinstance(content, bytes) 133 | 134 | 135 | def test_pdf_export_compact_display(long_project): 136 | content, _ = long_project.export_pdf(compact_display=True) 137 | 138 | assert isinstance(content, bytes) 139 | 140 | 141 | def test_export_with_events(long_project): 142 | events = long_project.export_instrument_event_mappings() 143 | unique_event = events[0]["unique_event_name"] 144 | data = long_project.export_records(events=[unique_event]) 145 | 146 | assert isinstance(data, list) 147 | 148 | for record in data: 149 | assert isinstance(record, dict) 150 | 151 | 152 | def test_delete_records_from_event(long_project): 153 | res = long_project.delete_records( 154 | records=["1"], 155 | arm="1", 156 | instrument="form_1", 157 | event="enrollment_arm_1", 158 | repeat_instance=1, 159 | delete_logging=True, 160 | ) 161 | 162 | assert res == 1 163 | 164 | 165 | def test_fem_export(long_project): 166 | fem = long_project.export_instrument_event_mappings(format_type="json") 167 | 168 | assert isinstance(fem, list) 169 | 170 | for arm in fem: 171 | assert isinstance(arm, dict) 172 | 173 | assert len(fem) == 1 174 | 175 | 176 | def test_fem_export_stricly_enforces_format(long_project): 177 | with pytest.raises(ValueError): 178 | long_project.export_instrument_event_mappings(format_type="unsupported") 179 | 180 | 181 | def test_fem_import(long_project): 182 | instrument_event_mappings = [ 183 | {"arm_num": "1", "unique_event_name": "event_1_arm_1", "form": "form_2"} 184 | ] 185 | res = long_project.import_instrument_event_mappings(instrument_event_mappings) 186 | 187 | assert res == 1 188 | 189 | 190 | def test_export_to_df_gives_multi_index(long_project): 191 | long_dataframe = long_project.export_records(format_type="df", event_name="raw") 192 | 193 | assert hasattr(long_dataframe.index, "names") 194 | 195 | 196 | def test_import_dataframe(long_project): 197 | long_dataframe = long_project.export_records(event_name="raw", format_type="df") 198 | response = long_project.import_records(long_dataframe, import_format="df") 199 | 200 | assert "count" in response 201 | assert "error" not in response 202 | 203 | 204 | def test_reports_df_export(long_project): 205 | report = long_project.export_report(report_id="1", format_type="df") 206 | 207 | assert isinstance(report, pd.DataFrame) 208 | 209 | 210 | def test_repeating_export(long_project): 211 | rep = long_project.export_repeating_instruments_events(format_type="json") 212 | 213 | assert isinstance(rep, list) 214 | 215 | 216 | def test_import_export_repeating_forms(long_project): 217 | rep = long_project.export_repeating_instruments_events(format_type="json") 218 | res = long_project.import_repeating_instruments_events( 219 | to_import=rep, import_format="json" 220 | ) 221 | assert res == 1 222 | 223 | 224 | def test_arms_export(long_project): 225 | response = long_project.export_arms() 226 | 227 | assert len(response) == 1 228 | 229 | 230 | def test_arms_import(long_project): 231 | new_arms = [{"arm_num": 2, "name": "test_2"}] 232 | response = long_project.import_arms(new_arms) 233 | 234 | assert response == 1 235 | 236 | 237 | def test_arms_export_specify_arm(long_project): 238 | response = long_project.export_arms(arms=[2]) 239 | 240 | assert len(response) == 1 241 | 242 | assert any(arm["name"] == "test_2" for arm in response) 243 | 244 | 245 | def test_arms_import_override(long_project): 246 | new_arms = [{"arm_num": 3, "name": "test_3"}, {"arm_num": 4, "name": "test_4"}] 247 | response = long_project.import_arms(new_arms, override=1) 248 | 249 | assert response == 2 250 | 251 | 252 | def test_arms_delete(long_project): 253 | arms = [3] 254 | response = long_project.delete_arms(arms) 255 | 256 | assert response == 1 257 | 258 | 259 | def test_events_export(long_project): 260 | response = long_project.export_events() 261 | 262 | assert len(response) == 1 263 | 264 | 265 | def test_events_import(long_project): 266 | new_events = [{"event_name": "Event 2", "arm_num": "1"}] 267 | response = long_project.import_events(new_events) 268 | 269 | assert response == 1 270 | 271 | 272 | def test_events_export_specify_arm(long_project): 273 | response = long_project.export_events(arms=[1]) 274 | 275 | assert len(response) == 2 276 | 277 | assert any(event["arm_num"] == 1 for event in response) 278 | 279 | 280 | def test_events_import_override(long_project): 281 | new_events = [ 282 | {"event_name": "Event 3", "arm_num": "1"}, 283 | {"event_name": "Event 4", "arm_num": "1"}, 284 | ] 285 | response = long_project.import_events(new_events, override=1) 286 | 287 | assert response == 2 288 | 289 | 290 | def test_events_delete(long_project): 291 | events = ["event_4_arm_1"] 292 | response = long_project.delete_events(events) 293 | 294 | assert response == 1 295 | 296 | 297 | def test_file_repo_folder_create(long_project): 298 | response = long_project.create_folder_in_repository( 299 | name="test", folder_id=1, dag_id=2, role_id=3 300 | ) 301 | assert response[0]["folder_id"] 302 | 303 | 304 | def test_export_file_repo(long_project): 305 | response = long_project.export_file_repository(folder_id=1) 306 | assert is_json(response) 307 | 308 | 309 | def test_export_file_from_repo(long_project): 310 | resp, headers = long_project.export_file_from_repository(doc_id=1) 311 | assert isinstance(resp, bytes) 312 | assert headers["name"] == "test.txt" 313 | 314 | 315 | def test_import_file_into_file_repo(long_project): 316 | tmp_file = tempfile.TemporaryFile() 317 | resp = long_project.import_file_into_repository("test.txt", tmp_file, folder_id=1) 318 | assert resp 319 | 320 | 321 | def test_delete_file_from_file_repo(long_project): 322 | resp = long_project.delete_file_from_repository(doc_id=1) 323 | assert resp 324 | -------------------------------------------------------------------------------- /tests/integration/test_simple_project.py: -------------------------------------------------------------------------------- 1 | """Test suite for simple REDCap Project against real REDCap server""" 2 | 3 | # pylint: disable=missing-function-docstring 4 | import os 5 | import tempfile 6 | 7 | from io import StringIO 8 | 9 | import pandas as pd 10 | import pytest 11 | import semantic_version 12 | 13 | from redcap import RedcapError 14 | 15 | if not os.getenv("REDCAPDEMO_SUPERUSER_TOKEN"): 16 | pytest.skip( 17 | "Super user token not found, skipping integration tests", 18 | allow_module_level=True, 19 | ) 20 | 21 | 22 | @pytest.mark.integration 23 | def test_is_not_longitudinal(simple_project): 24 | assert not simple_project.is_longitudinal 25 | 26 | 27 | @pytest.mark.integration 28 | def test_export_records(simple_project): 29 | proj_records_export = simple_project.export_records() 30 | assert len(proj_records_export) == 3 31 | 32 | 33 | @pytest.mark.integration 34 | def test_export_records_always_has_record_id(simple_project): 35 | proj_records_export = simple_project.export_records(fields=["first_name"]) 36 | assert "record_id" in proj_records_export[0].keys() 37 | 38 | 39 | @pytest.mark.integration 40 | def test_export_records_df(simple_project): 41 | proj_records_export = simple_project.export_records(format_type="df") 42 | assert len(proj_records_export) == 3 43 | 44 | 45 | @pytest.mark.integration 46 | def test_export_records_df_eav(simple_project): 47 | proj_records_export = simple_project.export_records( 48 | format_type="df", record_type="eav" 49 | ) 50 | assert len(proj_records_export) == 30 51 | 52 | 53 | @pytest.mark.integration 54 | def test_import_and_delete_records(simple_project): 55 | new_record_ids = [4, 5, 6] 56 | test_records = [{"record_id": i} for i in new_record_ids] 57 | 58 | res = simple_project.import_records(test_records) 59 | assert res["count"] == len(test_records) 60 | 61 | res = simple_project.import_records(test_records, return_content="ids") 62 | assert len(res) == len(test_records) 63 | 64 | res = simple_project.import_records(test_records, return_content="nothing") 65 | assert res == [{}] 66 | 67 | res = simple_project.delete_records(new_record_ids) 68 | assert res == 3 69 | 70 | 71 | @pytest.mark.integration 72 | @pytest.mark.parametrize( 73 | ["return_format_type", "import_output", "delete_output"], 74 | [ 75 | ("csv", "3", "3"), 76 | ("xml", '3', "3"), 77 | ], 78 | ) 79 | def test_import_and_delete_records_non_json( 80 | simple_project, return_format_type, import_output, delete_output 81 | ): 82 | new_record_ids = ["4", "5", "6"] 83 | test_records_csv = "record_id\n" + "\n".join(new_record_ids) 84 | test_records_df = pd.read_csv(StringIO(test_records_csv)) 85 | 86 | res = simple_project.import_records( 87 | test_records_df, import_format="df", return_format_type=return_format_type 88 | ) 89 | assert res == import_output 90 | 91 | res = simple_project.delete_records( 92 | new_record_ids, return_format_type=return_format_type 93 | ) 94 | assert res == delete_output 95 | 96 | 97 | @pytest.mark.integration 98 | def test_import_df_no_index(simple_project): 99 | # declare df_kwargs without specifying index, which returns a df with no index 100 | proj_records_export = simple_project.export_records( 101 | format_type="df", df_kwargs={"sep": ","} 102 | ).convert_dtypes() 103 | 104 | res = simple_project.import_records(proj_records_export, import_format="df") 105 | 106 | assert res["count"] == 3 107 | 108 | 109 | @pytest.mark.integration 110 | def test_export_version(simple_project): 111 | version = simple_project.export_version() 112 | assert version >= semantic_version.Version("12.0.1") 113 | 114 | 115 | @pytest.mark.integration 116 | def test_export_users(simple_project): 117 | users = simple_project.export_users() 118 | # no need to create a test project with more than one user 119 | assert len(users) == 1 120 | # any user in this test project would by necessity have API access 121 | assert users[0]["api_export"] == 1 122 | 123 | 124 | @pytest.mark.integration 125 | def test_export_user_roles(simple_project): 126 | user_roles = simple_project.export_user_roles() 127 | assert len(user_roles) == 1 128 | assert user_roles[0]["role_label"] == "Example Role" 129 | 130 | 131 | @pytest.mark.integration 132 | def test_import_delete_user_roles(simple_project): 133 | new_role = [{"role_label": "New Role"}] 134 | 135 | res = simple_project.import_user_roles(new_role) 136 | assert res == 1 137 | 138 | new_role_id = simple_project.export_user_roles()[-1]["unique_role_name"] 139 | 140 | res = simple_project.delete_user_roles([new_role_id]) 141 | assert res == 1 142 | 143 | 144 | @pytest.mark.integration 145 | def test_export_import_user_role_assignments(simple_project): 146 | new_user = "pandeharris@gmail.com" 147 | simple_project.import_users([{"username": new_user}]) 148 | 149 | example_role_name = simple_project.export_user_roles()[0]["unique_role_name"] 150 | 151 | res = simple_project.import_user_role_assignment( 152 | [{"username": new_user, "unique_role_name": example_role_name}] 153 | ) 154 | assert res == 1 155 | 156 | user_role_assignments = simple_project.export_user_role_assignment() 157 | test_user_role_name = [ 158 | user_role["unique_role_name"] 159 | for user_role in user_role_assignments 160 | if user_role["username"] == new_user 161 | ][0] 162 | assert test_user_role_name == example_role_name 163 | # cleanup 164 | res = simple_project.delete_users([new_user]) 165 | assert res == 1 166 | 167 | 168 | @pytest.mark.integration 169 | def test_export_dags(simple_project): 170 | dags = simple_project.export_dags(format_type="df") 171 | 172 | assert len(dags) == 1 173 | 174 | 175 | @pytest.mark.integration 176 | def test_import_delete_dags(simple_project): 177 | new_dag = [{"data_access_group_name": "New DAG", "unique_group_name": ""}] 178 | 179 | res = simple_project.import_dags(new_dag, return_format_type="csv") 180 | assert res == "1" 181 | 182 | res = simple_project.delete_dags(["new_dag"]) 183 | assert res == 1 184 | 185 | 186 | @pytest.mark.integration 187 | def test_export_user_dag_assignment(simple_project): 188 | res = simple_project.export_user_dag_assignment() 189 | 190 | assert len(res) == 1 191 | 192 | 193 | @pytest.mark.integration 194 | def test_import_user_dag_assignment(simple_project): 195 | dag_mapping = simple_project.export_user_dag_assignment() 196 | res = simple_project.import_user_dag_assignment( 197 | dag_mapping, return_format_type="csv" 198 | ) 199 | 200 | assert res == "1" 201 | 202 | 203 | @pytest.mark.integration 204 | def test_export_field_names(simple_project): 205 | field_names = simple_project.export_field_names() 206 | assert len(field_names) == 16 207 | 208 | 209 | @pytest.mark.integration 210 | def test_export_one_field_name(simple_project): 211 | field_names = simple_project.export_field_names(field="first_name") 212 | assert len(field_names) == 1 213 | 214 | 215 | @pytest.mark.integration 216 | def test_export_field_names_df(simple_project): 217 | field_names = simple_project.export_field_names(format_type="df") 218 | assert all(field_names.columns == ["choice_value", "export_field_name"]) 219 | 220 | 221 | @pytest.mark.integration 222 | def test_export_instruments(simple_project): 223 | instruments = simple_project.export_instruments() 224 | assert len(instruments) == 1 225 | 226 | 227 | @pytest.mark.integration 228 | def test_export_pdf(simple_project): 229 | content, _ = simple_project.export_pdf() 230 | 231 | assert isinstance(content, bytes) 232 | 233 | 234 | @pytest.mark.integration 235 | def test_export_and_import_metadata(simple_project): 236 | original_metadata = simple_project.export_metadata() 237 | assert len(original_metadata) == 15 238 | 239 | reduced_metadata = original_metadata[:14] 240 | res = simple_project.import_metadata(reduced_metadata) 241 | assert res == len(reduced_metadata) 242 | # then "restore" it (though won't have data for the previously removed fields) 243 | res = simple_project.import_metadata(original_metadata) 244 | assert res == len(original_metadata) 245 | 246 | 247 | @pytest.mark.integration 248 | def test_export_and_import_metadata_csv(simple_project): 249 | metadata = simple_project.export_metadata("csv") 250 | assert "field_name,form_name" in metadata 251 | res = simple_project.import_metadata(to_import=metadata, import_format="csv") 252 | assert res == 15 253 | 254 | 255 | @pytest.mark.integration 256 | def test_export_and_import_metadata_df(simple_project): 257 | metadata = simple_project.export_metadata( 258 | format_type="df", 259 | # We don't want to convert these to floats (what pandas does by default) 260 | # since we need the to stay integers when re-importing into REDCap 261 | df_kwargs={ 262 | "index_col": "field_name", 263 | "dtype": { 264 | "text_validation_min": pd.Int64Dtype(), 265 | "text_validation_max": pd.Int64Dtype(), 266 | }, 267 | }, 268 | ) 269 | assert metadata.index.name == "field_name" 270 | res = simple_project.import_metadata(to_import=metadata, import_format="df") 271 | assert res == 15 272 | 273 | 274 | @pytest.mark.integration 275 | def test_export_project_info(simple_project): 276 | project_info = simple_project.export_project_info() 277 | assert project_info["is_longitudinal"] == 0 278 | 279 | 280 | @pytest.mark.integration 281 | def test_export_logging(simple_project): 282 | logs = simple_project.export_logging(log_type="manage") 283 | first_log = logs.pop() 284 | assert "manage/design" in first_log["action"].lower() 285 | 286 | 287 | @pytest.mark.integration 288 | def test_export_arms(simple_project): 289 | with pytest.raises(RedcapError): 290 | simple_project.export_arms() 291 | 292 | 293 | @pytest.mark.integration 294 | def test_export_events(simple_project): 295 | with pytest.raises(RedcapError): 296 | simple_project.export_events() 297 | 298 | 299 | @pytest.mark.integration 300 | def test_export_instrument_event_mapping(simple_project): 301 | with pytest.raises(RedcapError): 302 | simple_project.export_instrument_event_mappings() 303 | 304 | 305 | @pytest.mark.integration 306 | def test_create_folder_in_repository(simple_project): 307 | folder_name = "New Folder" 308 | new_folder = simple_project.create_folder_in_repository(name=folder_name) 309 | assert new_folder[0]["folder_id"] > 0 310 | 311 | 312 | @pytest.mark.integration 313 | def test_export_file_repository(simple_project): 314 | directory = simple_project.export_file_repository() 315 | assert len(directory) > 0 316 | 317 | 318 | @pytest.mark.integration 319 | def test_export_file_from_repository(simple_project): 320 | file_dir = simple_project.export_file_repository() 321 | text_file = [file for file in file_dir if file["name"] == "test.txt"].pop() 322 | file_contents, _ = simple_project.export_file_from_repository( 323 | doc_id=text_file["doc_id"] 324 | ) 325 | assert isinstance(file_contents, bytes) 326 | 327 | 328 | @pytest.mark.integration 329 | def test_import_file_repository(simple_project): 330 | initial_len = len(simple_project.export_file_repository()) 331 | 332 | tmp_file = tempfile.TemporaryFile() 333 | simple_project.import_file_into_repository( 334 | file_name="new_upload.txt", file_object=tmp_file 335 | ) 336 | 337 | new_len = len(simple_project.export_file_repository()) 338 | 339 | assert new_len > initial_len 340 | 341 | 342 | @pytest.mark.integration 343 | def test_delete_file_from_repository(simple_project): 344 | file_dir = simple_project.export_file_repository() 345 | text_file = [file for file in file_dir if file["name"] == "test.txt"].pop() 346 | resp = simple_project.delete_file_from_repository(doc_id=text_file["doc_id"]) 347 | assert resp == [{}] 348 | -------------------------------------------------------------------------------- /tests/integration/test_long_project.py: -------------------------------------------------------------------------------- 1 | """Test suite for longitudinal REDCap Project against real REDCap server""" 2 | 3 | # pylint: disable=missing-function-docstring 4 | import os 5 | 6 | import pytest 7 | 8 | if not os.getenv("REDCAPDEMO_SUPERUSER_TOKEN"): 9 | pytest.skip( 10 | "Super user token not found, skipping integration tests", 11 | allow_module_level=True, 12 | ) 13 | 14 | 15 | @pytest.mark.integration 16 | def test_is_longitudinal(long_project): 17 | assert long_project.is_longitudinal 18 | 19 | 20 | @pytest.mark.integration 21 | def test_export_survey_link(long_project): 22 | link = long_project.export_survey_link( 23 | instrument="contact_info", event="enrollment_arm_1", record="1" 24 | ) 25 | assert link.startswith("https://redcapdemo.vumc.org/surveys/?s=") 26 | 27 | 28 | @pytest.mark.integration 29 | def test_export_survey_queue_link(long_project): 30 | link = long_project.export_survey_queue_link(record="1") 31 | assert link.startswith("https://redcapdemo.vumc.org/surveys/?sq=") 32 | 33 | 34 | @pytest.mark.integration 35 | def test_export_survey_access_code(long_project): 36 | code = long_project.export_survey_access_code( 37 | record="1", instrument="contact_info", event="enrollment_arm_1" 38 | ) 39 | assert len(code) == 9 40 | 41 | 42 | @pytest.mark.integration 43 | def test_export_survey_return_code(long_project): 44 | code = long_project.export_survey_return_code( 45 | record="1", instrument="contact_info", event="enrollment_arm_1" 46 | ) 47 | assert len(code) == 8 48 | 49 | 50 | @pytest.mark.integration 51 | def test_survey_participant_export(long_project): 52 | data = long_project.export_survey_participant_list( 53 | instrument="contact_info", event="enrollment_arm_1" 54 | ) 55 | assert len(data) == 1 56 | 57 | data = long_project.export_survey_participant_list( 58 | instrument="contact_info", format_type="df", event="enrollment_arm_1" 59 | ) 60 | assert "email" in data.columns 61 | 62 | 63 | @pytest.mark.integration 64 | def test_project_info_export(long_project): 65 | data = long_project.export_project_info() 66 | assert data["purpose"] == 0 67 | 68 | 69 | @pytest.mark.integration 70 | def test_users_export(long_project): 71 | data = long_project.export_users(format_type="df", df_kwargs={"index_col": "email"}) 72 | assert data.index.name == "email" 73 | 74 | 75 | @pytest.mark.integration 76 | def test_users_import_and_delete(long_project): 77 | test_user = "pandeharris@gmail.com" 78 | test_user_json = [{"username": test_user}] 79 | res = long_project.import_users(test_user_json, return_format_type="csv") 80 | 81 | assert res == "1" 82 | 83 | res = long_project.delete_users([test_user]) 84 | 85 | assert res == 1 86 | 87 | 88 | @pytest.mark.integration 89 | def test_records_export_labeled_headers(long_project): 90 | data = long_project.export_records(format_type="csv", raw_or_label_headers="label") 91 | assert "Study ID" in data 92 | 93 | 94 | @pytest.mark.integration 95 | def test_repeating_export(long_project): 96 | rep = long_project.export_repeating_instruments_events(format_type="json") 97 | 98 | assert isinstance(rep, list) 99 | 100 | 101 | @pytest.mark.integration 102 | def test_repeating_export_strictly_enfores_format(long_project): 103 | with pytest.raises(ValueError): 104 | long_project.export_repeating_instruments_events(format_type="unsupported") 105 | 106 | 107 | @pytest.mark.integration 108 | def test_import_export_repeating_forms(long_project): 109 | for format_type in ["xml", "json", "csv", "df"]: 110 | rep = long_project.export_repeating_instruments_events(format_type=format_type) 111 | res = long_project.import_repeating_instruments_events( 112 | to_import=rep, import_format=format_type 113 | ) 114 | assert res == 1 115 | 116 | 117 | @pytest.mark.integration 118 | def test_limit_export_records_forms_and_fields(long_project): 119 | # only request forms 120 | records_df = long_project.export_records( 121 | forms=["demographics", "baseline_data"], format_type="df" 122 | ) 123 | complete_cols = [col for col in records_df.columns if col.endswith("_complete")] 124 | 125 | assert long_project.def_field in records_df.index.names 126 | assert complete_cols == ["demographics_complete", "baseline_data_complete"] 127 | # only request fields 128 | records_df = long_project.export_records( 129 | fields=["study_comments"], format_type="df" 130 | ) 131 | assert long_project.def_field in records_df.index.names 132 | # request forms and fields 133 | records_df = long_project.export_records( 134 | forms=["baseline_data"], fields=["study_comments"], format_type="df" 135 | ) 136 | complete_cols = [col for col in records_df.columns if col.endswith("_complete")] 137 | 138 | assert long_project.def_field in records_df.index.names 139 | assert complete_cols == ["baseline_data_complete"] 140 | 141 | 142 | def test_delete_records_from_one_instrument_only(long_project): 143 | # Add new record to test partial deletion 144 | new_record = [ 145 | { 146 | "study_id": "3", 147 | "redcap_event_name": "enrollment_arm_1", 148 | "redcap_repeat_instrument": "", 149 | "redcap_repeat_instance": "", 150 | }, 151 | { 152 | "study_id": "3", 153 | "redcap_event_name": "visit_1_arm_1", 154 | "redcap_repeat_instrument": "", 155 | "redcap_repeat_instance": "", 156 | }, 157 | ] 158 | res = long_project.import_records(new_record) 159 | assert res["count"] == 1 160 | 161 | res = long_project.export_records(records=["3"]) 162 | assert len(res) == 2 163 | 164 | res = long_project.delete_records(records=["3"], event="visit_1_arm_1") 165 | assert res == 1 166 | 167 | res = long_project.export_records(records=["3"]) 168 | assert len(res) == 1 169 | # restore project to original state pre-test 170 | res = long_project.delete_records(["3"]) 171 | assert res == 1 172 | 173 | 174 | @pytest.mark.integration 175 | def test_arms_export(long_project): 176 | response = long_project.export_arms() 177 | 178 | assert len(response) == 2 179 | 180 | arm_nums = [arm["arm_num"] for arm in response] 181 | arm_names = [arm["name"] for arm in response] 182 | 183 | assert arm_nums == [1, 2] 184 | assert arm_names == ["Drug A", "Drug B"] 185 | 186 | 187 | @pytest.mark.integration 188 | def test_arms_import(long_project): 189 | new_arms = [{"arm_num": 3, "name": "Drug C"}] 190 | response = long_project.import_arms(new_arms) 191 | 192 | assert response == 1 193 | 194 | # REDCap will not return an Arm unless it has an event associated with it 195 | # Need to add an event to the newly created Arm 196 | new_events = [{"event_name": "new_event", "arm_num": "3"}] 197 | response = long_project.import_events(new_events) 198 | 199 | response = long_project.export_arms() 200 | assert len(response) == 3 201 | 202 | arm_nums = [arm["arm_num"] for arm in response] 203 | arm_names = [arm["name"] for arm in response] 204 | 205 | assert arm_nums == [1, 2, 3] 206 | assert arm_names == ["Drug A", "Drug B", "Drug C"] 207 | 208 | 209 | @pytest.mark.integration 210 | def test_arms_import_rename(long_project): 211 | new_arms = [{"arm_num": 1, "name": "Drug Alpha"}] 212 | response = long_project.import_arms(new_arms) 213 | 214 | assert response == 1 215 | 216 | response = long_project.export_arms() 217 | 218 | assert len(response) == 3 219 | 220 | arm_nums = [arm["arm_num"] for arm in response] 221 | arm_names = [arm["name"] for arm in response] 222 | 223 | assert arm_nums == [1, 2, 3] 224 | assert arm_names == ["Drug Alpha", "Drug B", "Drug C"] 225 | 226 | 227 | @pytest.mark.integration 228 | def test_arms_delete(long_project): 229 | arms = [3] 230 | response = long_project.delete_arms(arms) 231 | 232 | assert response == 1 233 | 234 | response = long_project.export_arms() 235 | 236 | assert len(response) == 2 237 | 238 | arm_nums = [arm["arm_num"] for arm in response] 239 | arm_names = [arm["name"] for arm in response] 240 | 241 | assert arm_nums == [1, 2] 242 | assert arm_names == ["Drug Alpha", "Drug B"] 243 | 244 | 245 | @pytest.mark.integration 246 | def test_arms_import_override(long_project): 247 | # Cache current events, so they can be restored for subsequent tests, because arms, events, 248 | # and mappings are deleted when the 'override' parameter is used. 249 | state_dict = { 250 | "events": long_project.export_events(), 251 | "form_event_map": long_project.export_instrument_event_mappings(), 252 | } 253 | 254 | new_arms = [{"arm_num": 3, "name": "Drug C"}] 255 | response = long_project.import_arms(new_arms) 256 | assert response == 1 257 | # Add event for new arm 258 | new_event = [{"event_name": "new_event", "arm_num": "3"}] 259 | response = long_project.import_events(new_event) 260 | 261 | response = long_project.export_arms() 262 | 263 | assert len(response) == 3 264 | 265 | new_arms = [{"arm_num": 1, "name": "Drug A"}, {"arm_num": 2, "name": "Drug B"}] 266 | response = long_project.import_arms(new_arms, override=1) 267 | 268 | assert response == 2 269 | 270 | # Restore project state 271 | response = long_project.import_events(state_dict["events"]) 272 | assert response == 16 273 | 274 | response = long_project.import_instrument_event_mappings( 275 | state_dict["form_event_map"] 276 | ) 277 | assert response == 44 278 | 279 | response = long_project.export_arms() 280 | assert len(response) == 2 281 | 282 | arm_nums = [arm["arm_num"] for arm in response] 283 | arm_names = [arm["name"] for arm in response] 284 | 285 | assert arm_nums == [1, 2] 286 | assert arm_names == ["Drug A", "Drug B"] 287 | 288 | 289 | @pytest.mark.integration 290 | def test_events_export(long_project): 291 | response = long_project.export_events() 292 | 293 | assert len(response) == 16 294 | 295 | 296 | @pytest.mark.integration 297 | def test_events_import(long_project): 298 | new_events = [{"event_name": "XYZ", "arm_num": "2"}] 299 | response = long_project.import_events(new_events) 300 | 301 | assert response == 1 302 | 303 | response = long_project.export_events() 304 | 305 | assert len(response) == 17 306 | 307 | 308 | @pytest.mark.integration 309 | def test_events_delete(long_project): 310 | events = ["xyz_arm_2"] 311 | response = long_project.delete_events(events) 312 | 313 | assert response == 1 314 | 315 | response = long_project.export_events() 316 | 317 | assert len(response) == 16 318 | 319 | 320 | @pytest.mark.integration 321 | def test_export_instruments(long_project): 322 | response = long_project.export_instruments() 323 | assert len(response) == 9 324 | 325 | 326 | @pytest.mark.integration 327 | def test_export_pdf(long_project): 328 | content, _ = long_project.export_pdf() 329 | 330 | assert isinstance(content, bytes) 331 | 332 | 333 | @pytest.mark.integration 334 | def test_fem_export(long_project): 335 | response = long_project.export_instrument_event_mappings() 336 | 337 | assert len(response) == 44 338 | 339 | 340 | @pytest.mark.integration 341 | def test_fem_import(long_project): 342 | # Cache current instrument-event mappings, so they can be restored for subsequent tests 343 | current_fem = long_project.export_instrument_event_mappings() 344 | 345 | instrument_event_mappings = [ 346 | { 347 | "arm_num": "1", 348 | "unique_event_name": "enrollment_arm_1", 349 | "form": "demographics", 350 | } 351 | ] 352 | response = long_project.import_instrument_event_mappings(instrument_event_mappings) 353 | assert response == 1 354 | 355 | response = long_project.export_instrument_event_mappings() 356 | assert len(response) == 1 357 | 358 | fem_arm_nums = [fem["arm_num"] for fem in response] 359 | fem_unique_event_names = [fem["unique_event_name"] for fem in response] 360 | fem_forms = [fem["form"] for fem in response] 361 | 362 | assert fem_arm_nums == [1] 363 | assert fem_unique_event_names == ["enrollment_arm_1"] 364 | assert fem_forms == ["demographics"] 365 | 366 | response = long_project.import_instrument_event_mappings(current_fem) 367 | assert response == 44 368 | -------------------------------------------------------------------------------- /redcap/methods/base.py: -------------------------------------------------------------------------------- 1 | """The Base class for all REDCap methods""" 2 | 3 | from __future__ import annotations 4 | 5 | import json 6 | 7 | from typing import ( 8 | Any, 9 | Dict, 10 | List, 11 | Literal, 12 | Optional, 13 | cast, 14 | overload, 15 | Tuple, 16 | TYPE_CHECKING, 17 | Union, 18 | ) 19 | 20 | from io import StringIO 21 | 22 | from redcap.request import ( 23 | _ContentConfig, 24 | _RCRequest, 25 | RedcapError, 26 | FileUpload, 27 | Json, 28 | ) 29 | 30 | if TYPE_CHECKING: 31 | import pandas as pd 32 | 33 | # We're designing class to be lazy by default, and not hit the API unless 34 | # explicitly requested by the user 35 | 36 | # return_type type aliases 37 | FileMap = Tuple[bytes, dict] 38 | 39 | 40 | class Base: 41 | """Base attributes and methods for the REDCap API""" 42 | 43 | def __init__( 44 | self, 45 | url: str, 46 | token: str, 47 | verify_ssl: Union[bool, str] = True, 48 | **request_kwargs, 49 | ): 50 | """Initialize a Project, validate url and token""" 51 | self._validate_url_and_token(url, token) 52 | self._url = url 53 | self._token = token 54 | self.verify_ssl = verify_ssl 55 | 56 | self._validate_request_kwargs(**request_kwargs) 57 | self._request_kwargs = request_kwargs 58 | 59 | # attributes which require API calls 60 | self._metadata: Optional[Json] = None 61 | self._forms: Optional[List[str]] = None 62 | self._field_names: Optional[List[str]] = None 63 | self._def_field: Optional[str] = None 64 | self._is_longitudinal: Optional[bool] = None 65 | 66 | @property 67 | def url(self) -> str: 68 | """API URL to a REDCap server""" 69 | return self._url 70 | 71 | @property 72 | def token(self) -> str: 73 | """API token to a project""" 74 | return self._token 75 | 76 | @property 77 | def metadata(self) -> Json: 78 | """Project metadata in JSON format""" 79 | if self._metadata is None: 80 | payload = self._initialize_payload("metadata", format_type="json") 81 | self._metadata = cast(Json, self._call_api(payload, return_type="json")) 82 | 83 | return self._metadata 84 | 85 | @property 86 | def forms(self) -> List[str]: 87 | """Project form names""" 88 | if self._forms is None: 89 | self._forms = list(set(self._filter_metadata(key="form_name"))) 90 | 91 | return self._forms 92 | 93 | @property 94 | def field_names(self) -> List[str]: 95 | """Project field names 96 | 97 | Note: 98 | These are survey field names, not export field names 99 | """ 100 | if self._field_names is None: 101 | self._field_names = self._filter_metadata(key="field_name") 102 | 103 | return self._field_names 104 | 105 | @property 106 | def def_field(self) -> str: 107 | """The 'record_id' field equivalent for a project""" 108 | if self._def_field is None: 109 | self._def_field = self.field_names[0] 110 | 111 | return self._def_field 112 | 113 | @property 114 | def is_longitudinal(self) -> bool: 115 | """Whether or not this project is longitudinal""" 116 | if self._is_longitudinal is None: 117 | try: 118 | payload = self._initialize_payload( 119 | content="formEventMapping", format_type="json" 120 | ) 121 | self._call_api(payload, return_type="json") 122 | self._is_longitudinal = True 123 | except RedcapError: 124 | # we should only get a error back if there were no events defined 125 | # for the project 126 | self._is_longitudinal = False 127 | 128 | return self._is_longitudinal 129 | 130 | @staticmethod 131 | def _validate_url_and_token(url: str, token: str) -> None: 132 | """Run basic validation on user supplied url and token""" 133 | assert url, "Error! REDCap URL is missing" 134 | assert token, "Error! REDCap token is missing" 135 | url_actual_last_5 = url[-5:] 136 | url_expected_last_5 = "/api/" 137 | 138 | assert url_actual_last_5 == url_expected_last_5, ( 139 | f"Incorrect url format '{ url }', url must end with", 140 | f"{ url_expected_last_5 }", 141 | ) 142 | 143 | actual_token_len = len(token) 144 | expected_token_len = 32 145 | 146 | assert actual_token_len == expected_token_len, ( 147 | f"Incorrect token format '{ token }', token must must be", 148 | f"{ expected_token_len } characters long", 149 | ) 150 | 151 | @staticmethod 152 | def _validate_request_kwargs(**request_kwargs): 153 | """Run basic validation on user supplied kwargs for requests""" 154 | # list of kwargs hardcoded in _RCRequest.execute(...) and self._call_api(...) 155 | hardcoded_kwargs = [ 156 | "url", 157 | "data", 158 | "verify, verify_ssl", 159 | "return_headers", 160 | "files", 161 | "file", 162 | ] 163 | unallowed_kwargs = [ 164 | kwarg for kwarg in request_kwargs if kwarg in hardcoded_kwargs 165 | ] 166 | assert ( 167 | len(unallowed_kwargs) == 0 168 | ), f"Not allowed to define {unallowed_kwargs} when initiating object" 169 | 170 | # pylint: disable=import-outside-toplevel 171 | @staticmethod 172 | def _read_csv(buf: StringIO, **df_kwargs) -> "pd.DataFrame": 173 | """Wrapper around pandas read_csv that handles EmptyDataError""" 174 | import pandas as pd 175 | from pandas.errors import EmptyDataError 176 | 177 | try: 178 | dataframe = pd.read_csv(buf, **df_kwargs) 179 | except EmptyDataError: 180 | dataframe = pd.DataFrame() 181 | 182 | return dataframe 183 | 184 | # pylint: enable=import-outside-toplevel 185 | @staticmethod 186 | def _lookup_return_type( 187 | format_type: Literal["json", "csv", "xml", "df"], 188 | request_type: Literal["export", "import", "delete"], 189 | import_records_format: Optional[ 190 | Literal["count", "ids", "auto_ids", "nothing"] 191 | ] = None, 192 | ) -> Literal["json", "str", "int", "count_dict", "ids_list", "empty_json"]: 193 | """Look up a common return types based on format 194 | 195 | Non-standard return types will need to be passed directly 196 | to _call_api() via the return_type parameter. 197 | 198 | Args: 199 | format_type: The provided format for the API call 200 | request_type: 201 | The type of API request. Exports behave very differently 202 | from imports/deletes 203 | import_records_format: 204 | Format options from the import_records method. We 205 | need to use custom logic, because that method has 206 | different possible return types compared to all other 207 | methods 208 | """ 209 | if format_type in ["csv", "xml", "df"]: 210 | return "str" 211 | 212 | if format_type == "json": 213 | if request_type == "export": 214 | return "json" 215 | if request_type in ["import", "delete"] and not import_records_format: 216 | return "int" 217 | if import_records_format in ["count", "auto_ids"]: 218 | return "count_dict" 219 | if import_records_format == "ids": 220 | return "ids_list" 221 | if import_records_format == "nothing": 222 | return "empty_json" 223 | 224 | raise ValueError(f"Invalid format_type: { format_type }") 225 | 226 | @overload 227 | def _filter_metadata( 228 | self, 229 | key: str, 230 | field_name: None = None, 231 | ) -> list: ... 232 | 233 | @overload 234 | def _filter_metadata(self, key: str, field_name: str) -> str: ... 235 | 236 | def _filter_metadata(self, key: str, field_name: Optional[str] = None): 237 | """Safely filter project metadata based off requested column and field_name""" 238 | res: Union[list, str] 239 | 240 | if field_name: 241 | try: 242 | res = str( 243 | [ 244 | row[key] 245 | for row in self.metadata 246 | if row["field_name"] == field_name 247 | ][0] 248 | ) 249 | except IndexError: # pragma: no cover 250 | print(f"{ key } not in metadata field: { field_name }") 251 | return "" 252 | else: 253 | res = [row[key] for row in self.metadata] 254 | 255 | return res 256 | 257 | def _initialize_payload( 258 | self, 259 | content: str, 260 | format_type: Optional[Literal["json", "csv", "xml", "df"]] = None, 261 | return_format_type: Optional[Literal["json", "csv", "xml"]] = None, 262 | record_type: Literal["flat", "eav"] = "flat", 263 | ) -> Dict[str, Any]: 264 | """Create the default dictionary for payloads 265 | 266 | This can be used as is for simple API requests or added to 267 | for more complex API requests. 268 | 269 | Args: 270 | content: 271 | The 'content' parameter documented in the REDCap API. 272 | e.g. 'record', 'metadata', 'file', 'event', etc. 273 | format_type: Format of the data returned for export methods 274 | return_format_type: Format of the data returned for import/delete methods 275 | record_type: The type of records being exported/imported 276 | """ 277 | payload = {"token": self.token, "content": content} 278 | 279 | if format_type: 280 | if format_type == "df": 281 | payload["format"] = "csv" 282 | else: 283 | payload["format"] = format_type 284 | 285 | if return_format_type: 286 | payload["returnFormat"] = return_format_type 287 | 288 | if content == "record": 289 | payload["type"] = record_type 290 | 291 | return payload 292 | 293 | def _initialize_import_payload( 294 | self, 295 | to_import: Union[List[dict], str, "pd.DataFrame"], 296 | import_format: Literal["json", "csv", "xml", "df"], 297 | return_format_type: Literal["json", "csv", "xml"], 298 | content: str, 299 | ) -> Dict[str, Any]: 300 | """Standardize the data to be imported and add it to the payload 301 | 302 | Args: 303 | to_import: array of dicts, csv/xml string, ``pandas.DataFrame`` 304 | import_format: Format of incoming data 305 | return_format_type: Format of outgoing (returned) data 306 | content: The kind of data that are imported 307 | 308 | Returns: 309 | payload: The initialized payload dictionary and updated format 310 | """ 311 | 312 | payload = self._initialize_payload( 313 | content=content, return_format_type=return_format_type 314 | ) 315 | if import_format == "df": 316 | to_import = cast("pd.DataFrame", to_import) 317 | 318 | buf = StringIO() 319 | has_named_index = to_import.index.name is not None 320 | to_import.to_csv(buf, index=has_named_index) 321 | payload["data"] = buf.getvalue() 322 | buf.close() 323 | import_format = "csv" 324 | elif import_format == "json": 325 | payload["data"] = json.dumps(to_import, separators=(",", ":")) 326 | else: 327 | # don't do anything to csv/xml 328 | to_import = cast("str", to_import) 329 | payload["data"] = to_import 330 | 331 | payload["format"] = import_format 332 | return payload 333 | 334 | def _return_data( 335 | self, 336 | response: Union[Json, str], 337 | content: Literal[ 338 | "arm", 339 | "dag", 340 | "event", 341 | "exportFieldNames", 342 | "fileRepository", 343 | "formEventMapping", 344 | "instrument", 345 | "log", 346 | "metadata", 347 | "participantList", 348 | "project", 349 | "record", 350 | "report", 351 | "user", 352 | "userDagMapping", 353 | "userRole", 354 | "userRoleMapping", 355 | "repeatingFormsEvents", 356 | ], 357 | format_type: Literal["json", "csv", "xml", "df"], 358 | df_kwargs: Optional[Dict[str, Any]] = None, 359 | record_type: Literal["flat", "eav"] = "flat", 360 | ): 361 | """Handle returning data for export methods 362 | 363 | This mostly just stores the logic for the default 364 | `df_kwargs` value for export methods, when returning 365 | a dataframe. 366 | 367 | Args: 368 | response: Output from _call_api 369 | content: 370 | The 'content' parameter for the API call. 371 | Same one used in _initialize_payload 372 | format_type: 373 | The format of the response. 374 | Same one used in _initialize_payload 375 | df_kwargs: 376 | Passed to `pandas.read_csv` to control construction of 377 | returned DataFrame. Different defaults exist for 378 | different content 379 | record_type: 380 | Database output structure type. 381 | Used only for records content 382 | """ 383 | if format_type != "df": 384 | return response 385 | 386 | if not df_kwargs: 387 | df_kwargs = {} 388 | 389 | if "index_col" not in df_kwargs.keys() and record_type != "eav": 390 | if content == "exportFieldNames": 391 | df_kwargs["index_col"] = "original_field_name" 392 | elif content == "metadata": 393 | df_kwargs["index_col"] = "field_name" 394 | elif content in ["report", "record"]: 395 | if self.is_longitudinal: 396 | df_kwargs["index_col"] = [self.def_field, "redcap_event_name"] 397 | else: 398 | df_kwargs["index_col"] = self.def_field 399 | 400 | response = cast(str, response) 401 | 402 | buf = StringIO(response) 403 | dataframe = self._read_csv(buf, **df_kwargs) 404 | buf.close() 405 | 406 | return dataframe 407 | 408 | def _call_api( 409 | self, 410 | payload: Dict[str, Any], 411 | return_type: Literal[ 412 | "file_map", "json", "empty_json", "count_dict", "ids_list", "str", "int" 413 | ], 414 | file: Optional[FileUpload] = None, 415 | ) -> Union[ 416 | FileMap, Json, Dict[str, int], List[dict], List[str], int, str, Literal["1"] 417 | ]: 418 | """Make a POST Requst to the REDCap API 419 | 420 | Args: 421 | payload: Payload to send in POST request 422 | return_type: 423 | The data type of the return value. Used 424 | primarily for static typing, and developer 425 | understanding of the REDCap API 426 | file: 427 | File data to send with file-related API requests 428 | """ 429 | config = _ContentConfig( 430 | return_empty_json=return_type == "empty_json", 431 | return_bytes=return_type == "file_map", 432 | ) 433 | 434 | return_headers = return_type == "file_map" 435 | 436 | rcr = _RCRequest(url=self.url, payload=payload, config=config) 437 | return rcr.execute( 438 | verify_ssl=self.verify_ssl, 439 | return_headers=return_headers, 440 | file=file, 441 | **self._request_kwargs, 442 | ) 443 | -------------------------------------------------------------------------------- /tests/data/doctest_project.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | doctest project 6 | This file contains the metadata, events, and data for REDCap project "doctest project". 7 | doctest project 8 | 1 9 | 10 | 11 | 1 12 | 1 13 | 0 14 | 1 15 | 16 | 0 17 | 1 18 | 0 19 | 0 20 | 1 21 | 22 | 2 23 | 0 24 | 0 25 | 26 | 27 | 28 | 0 29 | 0 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 1 38 | 39 | 40 | 41 | 42 | 0 43 | 44 | ALL 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | Record ID 100 | 101 | 102 | Data Access Group 103 | 104 | 105 | Survey Identifier 106 | 107 | 108 | Survey Timestamp 109 | 110 | 111 | Survey Timestamp 112 | 113 | 114 | Field 1 115 | 116 | 117 | 118 | Checkbox Field 119 | 120 | 121 | 122 | Checkbox Field 123 | 124 | 125 | 126 | File Upload 127 | 128 | 129 | Complete? 130 | 131 | 132 | 133 | Yes 134 | No 135 | 136 | 137 | Checked 138 | Unchecked 139 | 140 | 141 | Checked 142 | Unchecked 143 | 144 | 145 | Incomplete 146 | Unverified 147 | Complete 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | --------------------------------------------------------------------------------