├── .flake8
├── .github
└── workflows
│ └── cicd.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .python-version
├── .readthedocs.yml
├── CHANGELOG.md
├── CITATION.cff
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── docs
├── Makefile
├── make.bat
├── requirements.txt
└── source
│ ├── api.rst
│ ├── conf.py
│ ├── dto.rst
│ ├── index.rst
│ └── scripts.rst
├── examples
├── models.ipynb
└── tutorial.ipynb
├── pyproject.toml
├── src
└── fathomnet
│ ├── __init__.py
│ ├── api
│ ├── __init__.py
│ ├── activity.py
│ ├── boundingboxes.py
│ ├── comments.py
│ ├── darwincore.py
│ ├── firebase.py
│ ├── geoimages.py
│ ├── images.py
│ ├── imagesetuploads.py
│ ├── regions.py
│ ├── stats.py
│ ├── tags.py
│ ├── taxa.py
│ ├── topics.py
│ ├── users.py
│ ├── worms.py
│ └── xapikey.py
│ ├── dto.py
│ ├── models
│ ├── __init__.py
│ ├── bases.py
│ └── yolov5.py
│ ├── scripts
│ ├── __init__.py
│ └── fathomnet_generate.py
│ └── util.py
└── test
├── __init__.py
├── test_activity.py
├── test_boundingboxes.py
├── test_comments.py
├── test_darwincore.py
├── test_firebase.py
├── test_geoimages.py
├── test_images.py
├── test_imagesetuploads.py
├── test_regions.py
├── test_stats.py
├── test_tags.py
├── test_taxa.py
├── test_topics.py
├── test_users.py
├── test_worms.py
└── test_xapikey.py
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 88
3 | extend-ignore = E203, E501, E704
--------------------------------------------------------------------------------
/.github/workflows/cicd.yml:
--------------------------------------------------------------------------------
1 | # GitHub action for CI/CD: linting/testing, releasing, and publishing
2 |
3 | name: ci
4 |
5 | on:
6 | push:
7 | branches: [ main ]
8 | pull_request:
9 | branches: [ main ]
10 | workflow_dispatch:
11 |
12 | jobs:
13 | test:
14 | runs-on: ubuntu-latest
15 | strategy:
16 | fail-fast: false
17 | matrix:
18 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
19 |
20 | steps:
21 | - name: Checkout
22 | uses: actions/checkout@v3
23 |
24 | - name: Set up Python ${{ matrix.python-version }}
25 | uses: actions/setup-python@v3
26 | with:
27 | python-version: ${{ matrix.python-version }}
28 |
29 | - name: Install flake8 and pytest
30 | run: |
31 | python -m pip install --upgrade pip
32 | python -m pip install flake8 pytest
33 |
34 | - name: Lint with flake8
35 | run: |
36 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
37 | flake8 . --count --ignore=E501,F401,C901 --exit-zero --max-line-length=127 --show-source --statistics
38 |
39 | - name: Install fathomnet
40 | run: |
41 | python -m pip install .
42 |
43 | - name: Test with pytest
44 | run: |
45 | pytest -v test
46 |
47 | release:
48 | needs: test
49 | runs-on: ubuntu-latest
50 | concurrency: release
51 | permissions:
52 | id-token: write
53 | contents: write
54 |
55 | steps:
56 | - name: Checkout
57 | uses: actions/checkout@v3
58 | with:
59 | fetch-depth: 0
60 |
61 | - name: Python Semantic Release
62 | id: semantic_release
63 | uses: python-semantic-release/python-semantic-release@master
64 | with:
65 | github_token: ${{ secrets.GITHUB_TOKEN }}
66 |
67 | outputs:
68 | released: ${{ steps.semantic_release.outputs.released }}
69 | version: ${{ steps.semantic_release.outputs.version }}
70 |
71 | publish:
72 | needs: release
73 | if: needs.release.outputs.released == 'true'
74 | runs-on: ubuntu-latest
75 |
76 | steps:
77 | - name: Checkout
78 | uses: actions/checkout@v3
79 | with:
80 | ref: v${{ needs.release.outputs.version }}
81 |
82 | - name: Set up Python 3.12
83 | uses: actions/setup-python@v3
84 | with:
85 | python-version: 3.12
86 |
87 | - name: Install uv
88 | run: |
89 | curl -LsSf https://astral.sh/uv/install.sh | sh
90 |
91 | - name: Build
92 | run: |
93 | uv build
94 |
95 | - name: Publish to PyPI
96 | run: |
97 | uv publish --token ${{ secrets.PYPI_TOKEN }}
98 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Caches
2 | *__pycache__/
3 | .pytest_cache/
4 |
5 | # Build
6 | build/
7 |
8 | # Distribution
9 | dist/
10 | *.egg-info/
11 |
12 | # MISC
13 | .vscode/settings.json
14 | .gitignore
15 | .idea
16 | demo*
17 | requirements*.lock
18 | .venv
19 | uv.lock
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/astral-sh/ruff-pre-commit
3 | rev: v0.6.1
4 | hooks:
5 | # Lint
6 | - id: ruff
7 | # Format
8 | - id: ruff-format
9 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.12.4
2 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 |
4 | sphinx:
5 | configuration: docs/source/conf.py
6 |
7 |
8 | build:
9 | os: ubuntu-22.04
10 | tools:
11 | python: "3.12"
12 |
13 |
14 | python:
15 | install:
16 | - requirements: docs/requirements.txt
17 | - method: pip
18 | path: .
19 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # CHANGELOG
2 |
3 |
4 | ## v1.8.1 (2025-04-08)
5 |
6 | ### Bug Fixes
7 |
8 | - **fathomnet-generate**: Use new DTO BaseModel syntax for model copy and JSON dump
9 | ([`8611d68`](https://github.com/fathomnet/fathomnet-py/commit/8611d68e8a8df66bf1ce1a429776e10f0e348dbc))
10 |
11 | ### Chores
12 |
13 | - Add CITATION.cff
14 | ([`d4cc5a5`](https://github.com/fathomnet/fathomnet-py/commit/d4cc5a55881f35a1c4e0e8900f5d8f7d365b89f8))
15 |
16 | ### Testing
17 |
18 | - Temporarily disable tests for broken endpoints
19 | ([`efdbede`](https://github.com/fathomnet/fathomnet-py/commit/efdbede410560ad068bffb5e611b1f482e9d94ad))
20 |
21 |
22 | ## v1.8.0 (2025-03-03)
23 |
24 | ### Bug Fixes
25 |
26 | - Fix typing for Python 3.8, 3.9
27 | ([`bb29e5f`](https://github.com/fathomnet/fathomnet-py/commit/bb29e5f3bf892ddf993fee4679d6ab71d5437ef4))
28 |
29 | ### Chores
30 |
31 | - Correct syntax for setting PATH in CI/CD workflow
32 | ([`a694975`](https://github.com/fathomnet/fathomnet-py/commit/a6949752624ae318b07c56a817948d556e9d7707))
33 |
34 | ### Features
35 |
36 | - Migrate from fathomnet.org to database.fathomnet.org; rye -> uv; dataclasses_json -> pydantic
37 | ([`fc86d07`](https://github.com/fathomnet/fathomnet-py/commit/fc86d070cb6696fa8e86803c5c48fe951baa52e6))
38 |
39 |
40 | ## v1.7.1 (2024-11-11)
41 |
42 | ### Bug Fixes
43 |
44 | - Remove debug print statements from find_by_display_name function, worms test
45 | ([`092d807`](https://github.com/fathomnet/fathomnet-py/commit/092d807b223d7c3c8718c7c40d42332a5577a6db))
46 |
47 | ### Chores
48 |
49 | - Update CI/CD workflow to set PATH for Rye installation
50 | ([`6959642`](https://github.com/fathomnet/fathomnet-py/commit/6959642b5264ec8d60d97ad8140a19d078d60642))
51 |
52 |
53 | ## v1.7.0 (2024-11-04)
54 |
55 | ### Chores
56 |
57 | - Add `docs` rye script for building docs
58 | ([`66e97f5`](https://github.com/fathomnet/fathomnet-py/commit/66e97f5485b64aed8e4c917a7de7591097f7e55a))
59 |
60 | - Swap Poetry for Rye build system
61 | ([`e698cd6`](https://github.com/fathomnet/fathomnet-py/commit/e698cd67841e9cb0c63e0d90d57ee31dcfc4aa28))
62 |
63 | ### Code Style
64 |
65 | - Apply ruff linting & formatting
66 | ([`1bdc3d8`](https://github.com/fathomnet/fathomnet-py/commit/1bdc3d81ec7134a0f05129bc5d642a4ddbb8eb48))
67 |
68 | ### Documentation
69 |
70 | - Add CONTRIBUTING.md
71 | ([`befbcf0`](https://github.com/fathomnet/fathomnet-py/commit/befbcf0071ef6967729ca11d5233e404e0c601a1))
72 |
73 | - Fix badge display in README.md
74 | ([`7c6dd4b`](https://github.com/fathomnet/fathomnet-py/commit/7c6dd4ba56358c25e99612bca0526bdfbd667cbb))
75 |
76 | - Slight tweaks to README.md
77 | ([`0895d5c`](https://github.com/fathomnet/fathomnet-py/commit/0895d5c276fe00276ae4b5220401aaeb8257497a))
78 |
79 | - Update README.md with badges
80 | ([`2fe5256`](https://github.com/fathomnet/fathomnet-py/commit/2fe52561d94172a345598aa6a925d33c6035a756))
81 |
82 | ### Features
83 |
84 | - Add function to retrieve owner institution codes by image UUID
85 | ([`773290a`](https://github.com/fathomnet/fathomnet-py/commit/773290a46f93970371129bfc9c7596044f91814c))
86 |
87 |
88 | ## v1.6.1 (2024-08-01)
89 |
90 | ### Bug Fixes
91 |
92 | - Fix bbox conversion to Pascal VOC to use 1-based pixel index
93 | ([`440afb0`](https://github.com/fathomnet/fathomnet-py/commit/440afb0f02ab5a88d3ea6827fb2b3d3e244f7653))
94 |
95 |
96 | ## v1.6.0 (2024-07-18)
97 |
98 | ### Features
99 |
100 | - Allow a start page number in fathomnet.util.page
101 | ([`62ca914`](https://github.com/fathomnet/fathomnet-py/commit/62ca91421765328c7c47c63962e144ce074463b1))
102 |
103 |
104 | ## v1.5.1 (2024-07-16)
105 |
106 | ### Bug Fixes
107 |
108 | - Fix typo in -f option for fathomnet-generate
109 | ([`c304ce1`](https://github.com/fathomnet/fathomnet-py/commit/c304ce1d165e1afe9cecce932dbcd3ebe82aea2b))
110 |
111 |
112 | ## v1.5.0 (2024-07-16)
113 |
114 | ### Bug Fixes
115 |
116 | - Make TEST_X_API_KEY use an environment variable of the same name
117 | ([`22508cf`](https://github.com/fathomnet/fathomnet-py/commit/22508cf16695621183c0a5f5fe76d14ffaf48bae))
118 |
119 | ### Chores
120 |
121 | - Add formatting/linting pre-commit setup
122 | ([`ca07b19`](https://github.com/fathomnet/fathomnet-py/commit/ca07b193aa8562161fd02375641ea050d7d632d7))
123 |
124 | - Add Python 3.12 to CI pipeline targets
125 | ([`cf979cf`](https://github.com/fathomnet/fathomnet-py/commit/cf979cf18d5db307f6adea5d7b632045e1c6e353))
126 |
127 | - Apply pre-commit hook on all files
128 | ([`0190345`](https://github.com/fathomnet/fathomnet-py/commit/01903453d9e16c7716d91e7aacf165857c982dbf))
129 |
130 | ### Documentation
131 |
132 | - Fix broken readthedocs build #27
133 | ([`3154b79`](https://github.com/fathomnet/fathomnet-py/commit/3154b790399aa7ce6def6c00b639679f6ac3bdb9))
134 |
135 | Install the current package (fathomnet-py) prior to building the Sphinx docs via RTD
136 |
137 | - Move tutorial notebook into examples dir
138 | ([`c4db176`](https://github.com/fathomnet/fathomnet-py/commit/c4db176c211235a1ce7479dacba237c68c0da6fc))
139 |
140 | - Switch order of imports/pip install in tutorial notebook
141 | ([`48afc49`](https://github.com/fathomnet/fathomnet-py/commit/48afc496c0792d662709b2b9c28826afb78914b7))
142 |
143 | switched order of imports to avoid ipyleaflet error with fathomnet install and added explanation on
144 | running outside of colab. (#28)
145 |
146 | ### Features
147 |
148 | - Add YOLO dataset generation to fathomnet-generate
149 | ([`1bacb1f`](https://github.com/fathomnet/fathomnet-py/commit/1bacb1fbbaf8802de23351fdaa9af7e8de6df73c))
150 |
151 |
152 | ## v1.4.0 (2024-04-09)
153 |
154 | ### Features
155 |
156 | - Add accepted flag to `worms.get_descendants_names`
157 | ([`de0aec2`](https://github.com/fathomnet/fathomnet-py/commit/de0aec2e04915a922b7026189e7cfc0b55aabfd6))
158 |
159 | Add support for worms-server 0.5.2
160 |
161 |
162 | ## v1.3.0 (2024-02-29)
163 |
164 | ### Features
165 |
166 | - Support worms-server 0.5.1
167 | ([`f1989bc`](https://github.com/fathomnet/fathomnet-py/commit/f1989bcfea8225d38749241709b9c98a9e47bae0))
168 |
169 | Add support for worms-server 0.5.1. This adds the `acceptedAphiaId` field to the `WormsNode` DTOs as
170 | well as a new endpoint for getting a `WormsNames` DTO by Aphia ID.
171 |
172 |
173 | ## v1.2.2 (2024-02-21)
174 |
175 | ### Bug Fixes
176 |
177 | - Correct typo in WormsNode.aphiaId
178 | ([`5feeeb0`](https://github.com/fathomnet/fathomnet-py/commit/5feeeb0fa5fd95efd22fdc0e67e5dd69b9302408))
179 |
180 | Fixes #25
181 |
182 | ### Chores
183 |
184 | - Add missing v for ref spec
185 | ([`3212b13`](https://github.com/fathomnet/fathomnet-py/commit/3212b13afcb285bb7fea482c16150bbd1971cb4c))
186 |
187 | ### Documentation
188 |
189 | - Add worms API module documentation
190 | ([`11c0112`](https://github.com/fathomnet/fathomnet-py/commit/11c0112e08afc4050bce8877cf43e1bb47419e63))
191 |
192 |
193 | ## v1.2.1 (2024-02-06)
194 |
195 |
196 | ## v1.2.0 (2024-02-06)
197 |
198 | ### Bug Fixes
199 |
200 | - Make the linter happy
201 | ([`6cc52f6`](https://github.com/fathomnet/fathomnet-py/commit/6cc52f6b6abbd1cbe1b14a435d82a9acb91e4f58))
202 |
203 | - Use worms API directly for fathomnet taxa provider
204 | ([`f887a95`](https://github.com/fathomnet/fathomnet-py/commit/f887a950cfe4741869b667410a1ceee9c92a11c9))
205 |
206 | This is a temporary workaround for a bug in Micronaut that causes the fathomnet taxa provider to
207 | fail for large trees (e.g., Scleractinia).
208 |
209 | ### Chores
210 |
211 | - Merge CI/CD workflows
212 | ([`8e0d760`](https://github.com/fathomnet/fathomnet-py/commit/8e0d76006cec956cab009bf330a068f6e7a6adf4))
213 |
214 | - Use released ref for publish in CI/CD pipeline
215 | ([`d39c482`](https://github.com/fathomnet/fathomnet-py/commit/d39c4827ea612c6ea7e8b3eabfea4f08638c98e8))
216 |
217 | ### Documentation
218 |
219 | - Add Sphinx RTD theme build requirement
220 | ([`fccd808`](https://github.com/fathomnet/fathomnet-py/commit/fccd8086997330059094baeadac5162b1d17a7e4))
221 |
222 | - Add sphinx-rtd-theme as a dev dependency
223 | ([`9be9dee`](https://github.com/fathomnet/fathomnet-py/commit/9be9deeb42636a17f0d36d37100e5406dd0778f6))
224 |
225 | - Update copyright year to 2024
226 | ([`1650bc5`](https://github.com/fathomnet/fathomnet-py/commit/1650bc5d836d80c6b6d8e2238e184834952cdf20))
227 |
228 | - Update Python version and build configuration for readthedocs
229 | ([`ef96d8d`](https://github.com/fathomnet/fathomnet-py/commit/ef96d8d46cbf83c9c5d5d0eb03b8b927be487383))
230 |
231 | ### Features
232 |
233 | - Add functions to call fast WoRMS API directly
234 | ([`21942b8`](https://github.com/fathomnet/fathomnet-py/commit/21942b837b817de9aea02d4abc399802aa55ce8f))
235 |
236 |
237 | ## v1.1.5 (2024-01-10)
238 |
239 | ### Bug Fixes
240 |
241 | - Quote boundingboxes.audit_by_concepts URL fragment, add test case
242 | ([`5ea7b45`](https://github.com/fathomnet/fathomnet-py/commit/5ea7b459278c540a31fbe63e263f9208c234e123))
243 |
244 | - Quote display/org names in users find functions
245 | ([`591e67a`](https://github.com/fathomnet/fathomnet-py/commit/591e67a3e43af9e15ca0a0140de465a1da8018f3))
246 |
247 | - Quote observer name in boundingboxes.audit_by_observer
248 | ([`b94c87e`](https://github.com/fathomnet/fathomnet-py/commit/b94c87e9351fbde116dc8ef22c58867059999bec))
249 |
250 | - Quote provider name and concept in taxa find functions, remove print from test
251 | ([`dde9eb5`](https://github.com/fathomnet/fathomnet-py/commit/dde9eb51a5da79a07a94fe5d93a7604681c035d6))
252 |
253 | - Update activity find functions for new DTO
254 | ([`cdf5617`](https://github.com/fathomnet/fathomnet-py/commit/cdf56179cc802085ff1f8adbba2f717be550f08b))
255 |
256 |
257 | ## v1.1.4 (2024-01-09)
258 |
259 | ### Bug Fixes
260 |
261 | - Quote concept in images.find_by_concept, add test
262 | ([`33aa430`](https://github.com/fathomnet/fathomnet-py/commit/33aa4301a30dd15f64aebfef57dd21914ed060e1))
263 |
264 | ### Documentation
265 |
266 | - Fix broken tests badge
267 | ([`dbfbea8`](https://github.com/fathomnet/fathomnet-py/commit/dbfbea8ab261d285a691c6f92a740ce8ba893166))
268 |
269 |
270 | ## v1.1.3 (2023-09-13)
271 |
272 | ### Bug Fixes
273 |
274 | - **api**: Improve client error exception
275 | ([`6b0b911`](https://github.com/fathomnet/fathomnet-py/commit/6b0b91110be90d7e41e7c2a8dab87cd29b331395))
276 |
277 | Add the fathomnet.util.debug_format_response to the ValueError exception raised when a status code <
278 | 500 and != 401 / 403 is returned. Fixes #16
279 |
280 | ### Documentation
281 |
282 | - Add script documentation
283 | ([`1eadc8f`](https://github.com/fathomnet/fathomnet-py/commit/1eadc8ff627307fec7cefde19918da45c086195e))
284 |
285 | Closes #5
286 |
287 |
288 | ## v1.1.2 (2023-09-12)
289 |
290 | ### Bug Fixes
291 |
292 | - Add CD
293 | ([`8979d75`](https://github.com/fathomnet/fathomnet-py/commit/8979d75b049d39c673feb0d78153d4e5a4de11c5))
294 |
295 | - Add condition to CD workflow
296 | ([`df3432d`](https://github.com/fathomnet/fathomnet-py/commit/df3432d1b5685414c64d6a8a21b020497d1b6313))
297 |
298 |
299 | ## v1.1.1 (2023-09-12)
300 |
301 | ### Bug Fixes
302 |
303 | - Remove publish step from CI (for now)
304 | ([`3ba596b`](https://github.com/fathomnet/fathomnet-py/commit/3ba596b2b240ba841cecf98f20c74d30a5e386a5))
305 |
306 | - Version in pyproject.toml
307 | ([`dd2d9a2`](https://github.com/fathomnet/fathomnet-py/commit/dd2d9a2d3e9a27610e17c119337e821fcea8bcbc))
308 |
309 |
310 | ## v1.1.0 (2023-09-12)
311 |
312 | ### Features
313 |
314 | - Set up python-semantic-release
315 | ([`4019f18`](https://github.com/fathomnet/fathomnet-py/commit/4019f1811cabc207f5972eb39b8a9a7720345f95))
316 |
317 |
318 | ## v1.0.2 (2023-09-12)
319 |
320 |
321 | ## v1.0.1 (2023-08-29)
322 |
323 |
324 | ## v1.0.0 (2023-08-03)
325 |
326 |
327 | ## v0.7.0 (2023-08-03)
328 |
329 |
330 | ## v0.6.0 (2023-02-07)
331 |
332 |
333 | ## v0.5.2 (2022-04-07)
334 |
335 |
336 | ## v0.5.1 (2022-03-31)
337 |
338 |
339 | ## v0.5.0 (2022-03-31)
340 |
341 |
342 | ## v0.4.2 (2022-03-09)
343 |
344 |
345 | ## v0.4.1 (2022-02-09)
346 |
347 |
348 | ## v0.4.0 (2022-02-09)
349 |
350 |
351 | ## v0.3.0 (2022-02-08)
352 |
353 |
354 | ## v0.2.1 (2021-11-30)
355 |
356 |
357 | ## v0.2.0 (2021-11-29)
358 |
359 |
360 | ## v0.1.0 (2021-10-19)
361 |
362 |
363 | ## v0.0.2 (2021-09-29)
364 |
--------------------------------------------------------------------------------
/CITATION.cff:
--------------------------------------------------------------------------------
1 | cff-version: 1.2.0
2 | title: "fathomnet-py"
3 | authors:
4 | - family-names: Barnard
5 | given-names: Kevin
6 | type: software
7 | message: "If you use this software, please cite both the article from preferred-citation and the software itself."
8 | preferred-citation:
9 | type: article
10 | authors:
11 | - family-names: Katija
12 | given-names: Kakani
13 | - family-names: Orenstein
14 | given-names: Eric
15 | - family-names: Schlining
16 | given-names: Brian
17 | - family-names: Lundsten
18 | given-names: Lonny
19 | - family-names: Barnard
20 | given-names: Kevin
21 | - family-names: Sainz
22 | given-names: Giovanna
23 | - family-names: Boulais
24 | given-names: Oceane
25 | - family-names: Cromwell
26 | given-names: Megan
27 | - family-names: Butler
28 | given-names: Erin
29 | - family-names: Woodward
30 | given-names: Benjamin
31 | - family-names: Bell
32 | given-names: Katherine L. C.
33 | title: "FathomNet: A global image database for enabling artificial intelligence in the ocean"
34 | journal: "Scientific Reports"
35 | volume: "12"
36 | issue: "1"
37 | year: 2022
38 | date-released: "2022-09-23"
39 | pages: "15914"
40 | doi: "10.1038/s41598-022-19939-2"
41 | url: "https://doi.org/10.1038/s41598-022-19939-2"
42 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to fathomnet-py
2 |
3 | Thanks in advance for contributing to fathomnet-py! We appreciate your help in making this project better.
4 |
5 | ## The basics
6 |
7 | The FathomNet team welcomes contributions in the form of pull requests. If you're new to the project, you may want to start by reading the [README](README.md) to get an overview of the project.
8 |
9 | For small changes (e.g., bug fixes), feel free to open a pull request right away. For larger changes, we recommend opening an issue first to discuss the proposed changes.
10 |
11 | ### Prerequisites
12 |
13 | fathomnet-py is written in Python and uses the [uv](https://docs.astral.sh/uv/) project management system. To contribute to fathomnet-py, you'll need to have Python 3.8 or later installed on your system. You can download Python from the [official website](https://www.python.org/downloads/).
14 |
15 | ### :hammer_and_wrench: Setting up your development environment
16 |
17 | To set up your development environment, follow these steps:
18 |
19 | #### 1. Install `uv`
20 |
21 | First, install `uv` by running the following command:
22 |
23 | ```bash
24 | curl -LsSf https://astral.sh/uv/install.sh | sh
25 | ```
26 |
27 | #### 2. Clone the repository
28 |
29 | Next, clone the fathomnet-py repository to your local machine:
30 |
31 | ```bash
32 | git clone git@github.com:fathomnet/fathomnet-py.git
33 | ```
34 |
35 | #### 3. Install the project dependencies
36 |
37 | Navigate to the project directory and install the project dependencies by running:
38 |
39 | ```bash
40 | uv sync
41 | ```
42 |
43 | This command will create a virtual environment at `.venv` and install the project dependencies. This will include the development dependencies needed to run the tests, build the documentation, lint, format, and manage the pre-commit hooks.
44 |
45 | #### 4. Activate the virtual environment
46 |
47 | Activate the virtual environment by running:
48 |
49 | ```bash
50 | . .venv/bin/activate
51 | ```
52 |
53 | This will activate the virtual environment with the installed packages from the previous step.
54 |
55 | #### 5. Install the pre-commit hooks
56 |
57 | Install the pre-commit hooks by running:
58 |
59 | ```bash
60 | pre-commit install
61 | ```
62 |
63 | You can run the pre-commit hooks at any time with:
64 |
65 | ```bash
66 | pre-commit run [--all-files]
67 | ```
68 |
69 | Using the `--all-files` flag will run the pre-commit hooks on all files in the repository. If you don't use the flag, the pre-commit hooks will only run on the files you've staged for commit.
70 |
71 | ### :rocket: Development
72 |
73 | Now that you have your development environment set up, you can:
74 | 1. Make changes to the code and run the code in a consistent environment.
75 | 2. Run the tests.
76 | 3. Build the documentation.
77 | 4. Lint and format the code.
78 | 5. Commit your changes.
79 |
80 | #### Running tests
81 |
82 | To run the tests, use the following command:
83 |
84 | ```bash
85 | pytest
86 | ```
87 |
88 | This command will run the tests in the `test` directory.
89 |
90 | #### Building the documentation
91 |
92 | To build the documentation, use the following command:
93 |
94 | ```bash
95 | make -C docs html
96 | ```
97 |
98 | This command builds the documentation using [Sphinx](https://www.sphinx-doc.org/en/master/). The documentation will be built in the `docs/build/html` directory.
99 |
100 | #### Linting and formatting the code
101 |
102 | To lint and format the code, run the pre-commit hooks:
103 |
104 | ```bash
105 | pre-commit run --all-files
106 | ```
107 |
108 | The first step in the pre-commit hook is to lint the code using [`ruff`](https://docs.astral.sh/ruff/). `ruff` is installed as a development dependency and is used to enforce code quality standards as well as format the code. If `ruff` finds any issues, it will print them to the console and exit with a non-zero status code.
109 |
110 | If you want to automatically fix the issues printed in this stage, you can run:
111 |
112 | ```bash
113 | ruff check --fix
114 | ```
115 |
116 | Doing this will automatically fix any issues that can be fixed automatically. If there are any issues that can't be fixed automatically, you'll need to fix them manually.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Monterey Bay Aquarium Research Institute
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # fathomnet-py
2 |
3 |
4 |
5 | [](https://pypi.python.org/pypi/fathomnet)
6 | [](https://github.com/fathomnet/fathomnet-py/blob/main/LICENSE)
7 | [](https://github.com/fathomnet/fathomnet-py/actions/workflows/cicd.yml)
8 | [](https://fathomnet-py.readthedocs.io/en/latest/?badge=latest)
9 | [](https://github.com/astral-sh/uv)
10 | [](https://docs.astral.sh/ruff/)
11 |
12 |
13 |
14 | **`fathomnet-py`** is a client-side API to help scientists, researchers, and developers interact with [FathomNet Database](https://database.fathomnet.org/) data.
15 |
16 | ```python
17 | >>> from fathomnet.api import boundingboxes
18 | >>> boundingboxes.find_concepts()
19 | ['2G Robotics structured light laser', '55-gallon drum', ...]
20 | >>> from fathomnet.api import images
21 | >>> images.find_by_concept('Nanomia')
22 | [
23 | AImageDTO(
24 | id=2274942,
25 | uuid='cdbfca66-284f-48ac-a36f-7b2ac2b43533',
26 | url='https://database.fathomnet.org/static/m3/framegrabs/MiniROV/images/0056/02_18_37_20.png',
27 | ...
28 | ),
29 | ...
30 | ]
31 | >>> from fathomnet.api import taxa
32 | >>> taxa.find_children('mbari', 'Bathochordaeus')
33 | [
34 | Taxa(name='Bathochordaeus stygius', rank='species'),
35 | Taxa(name='Bathochordaeus charon', rank='species'),
36 | Taxa(name='Bathochordaeus mcnutti', rank='species')
37 | ]
38 | >>> from fathomnet.api import xapikey
39 | >>> xapikey.auth('NuCLjlNUlgHchtgDB01Sp1fABJVcWR') # your API key here
40 | AuthHeader(
41 | type='Bearer',
42 | token='eyJhbGciOiJI...'
43 | )
44 | ```
45 |
46 | The `fathomnet-py` API offers native Python interaction with the FathomNet REST API, abstracting away the underlying HTTP requests.
47 |
48 | ## Installing `fathomnet-py`
49 |
50 | `fathomnet-py` is available on PyPI:
51 |
52 | ```bash
53 | $ python -m pip install fathomnet
54 | ```
55 |
56 | ## Examples
57 |
58 | ### API Tutorial: [](https://colab.research.google.com/github/fathomnet/fathomnet-py/blob/main/examples/tutorial.ipynb)
59 |
60 | ### FathomNet Models: [](https://colab.research.google.com/github/fathomnet/fathomnet-py/blob/main/examples/models.ipynb)
61 |
62 | ## API Reference available on [Read the Docs](https://fathomnet-py.readthedocs.io/)
63 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx_rtd_theme==2.0.0
--------------------------------------------------------------------------------
/docs/source/api.rst:
--------------------------------------------------------------------------------
1 | API Modules
2 | ===========
3 |
4 | The ``fathomnet-py`` API is provided via the ``fathomnet.api`` package, and is broken into separate modules for the various FathomNet REST servlets.
5 |
6 | ----
7 |
8 | Bounding Boxes
9 | --------------
10 |
11 | .. module:: fathomnet.api.boundingboxes
12 |
13 | The ``fathomnet.api.boundingboxes`` module supports bounding box operations.
14 |
15 | Create, update, & delete
16 | ^^^^^^^^^^^^^^^^^^^^^^^^
17 |
18 | .. autofunction:: create_with_dto
19 | .. autofunction:: update
20 | .. autofunction:: delete
21 |
22 | Bulk create
23 | ^^^^^^^^^^^
24 |
25 | .. autofunction:: upload_csv
26 |
27 | Find
28 | ^^^^
29 |
30 | .. autofunction:: find_by_user_defined_key
31 | .. autofunction:: find_by_uuid
32 |
33 | List & count
34 | ^^^^^^^^^^^^
35 |
36 | .. autofunction:: find_concepts
37 | .. autofunction:: find_observers
38 | .. autofunction:: find_all_user_defined_keys
39 | .. autofunction:: count_all
40 | .. autofunction:: count_by_concept
41 | .. autofunction:: count_total_by_concept
42 |
43 | Audit
44 | ^^^^^
45 |
46 | .. autofunction:: audit_by_uuid
47 | .. autofunction:: audit_by_user_defined_key
48 |
49 | ----
50 |
51 | Darwin Core
52 | -----------
53 |
54 | .. module:: fathomnet.api.darwincore
55 |
56 | The ``fathomnet.api.darwincore`` module supports owner institution darwin core operations.
57 |
58 | .. note:: The ``darwincore`` servlet is WIP; This subsection may be updated as new functionality is added.
59 |
60 | .. automodule:: fathomnet.api.darwincore
61 | :members:
62 | :noindex:
63 |
64 | ----
65 |
66 | Firebase
67 | --------
68 |
69 | .. module:: fathomnet.api.firebase
70 |
71 | The ``fathomnet.api.firebase`` module supports firebase authentication operations.
72 |
73 | .. warning:: The ``firebase`` servlet that this module wraps is designed for application-level authentication. Typical users will not need to use this module.
74 |
75 | .. automodule:: fathomnet.api.firebase
76 | :members:
77 | :noindex:
78 |
79 | ----
80 |
81 | Geo-images
82 | ----------
83 |
84 | .. module:: fathomnet.api.geoimages
85 |
86 | The ``fathomnet.api.geoimages`` module supports geo-image operations.
87 |
88 | .. automodule:: fathomnet.api.geoimages
89 | :members:
90 | :noindex:
91 |
92 | ----
93 |
94 | Images
95 | ------
96 |
97 | .. module:: fathomnet.api.images
98 |
99 | The ``fathomnet.api.images`` module supports image operations.
100 |
101 | Create, update, & delete
102 | ^^^^^^^^^^^^^^^^^^^^^^^^
103 |
104 | .. autofunction:: create_if_not_exists
105 | .. autofunction:: update
106 | .. autofunction:: delete
107 |
108 | Find
109 | ^^^^
110 |
111 | .. autofunction:: find
112 | .. autofunction:: find_all
113 | .. autofunction:: find_all_alt
114 | .. autofunction:: find_by_concept
115 | .. autofunction:: find_by_contributors_email
116 | .. autofunction:: find_by_observer
117 | .. autofunction:: find_by_sha256
118 | .. autofunction:: find_by_tag_key
119 | .. autofunction:: find_by_url
120 | .. autofunction:: find_by_uuid
121 | .. autofunction:: find_by_uuid_in_list
122 |
123 | List & count
124 | ^^^^^^^^^^^^
125 |
126 | .. autofunction:: find_distinct_submitter
127 | .. autofunction:: list_imaging_types
128 | .. autofunction:: count_all
129 | .. autofunction:: count_by_submitter
130 |
131 | ----
132 |
133 | Image Set Uploads
134 | -----------------
135 |
136 | .. module:: fathomnet.api.imagesetuploads
137 |
138 | The ``fathomnet.api.imagesetuploads`` module supports image set upload operations.
139 |
140 |
141 | Find
142 | ^^^^
143 |
144 | .. autofunction:: find_collections
145 | .. autofunction:: find_by_image_uuid
146 | .. autofunction:: find_by_contributor
147 | .. autofunction:: find_by_uuid
148 |
149 | List & count
150 | ^^^^^^^^^^^^
151 |
152 | .. autofunction:: find_rejection_reasons
153 | .. autofunction:: find_contributors
154 | .. autofunction:: count_all
155 |
156 | Compute stats
157 | ^^^^^^^^^^^^^
158 |
159 | .. autofunction:: stats
160 |
161 | ----
162 |
163 | Regions
164 | -------
165 |
166 | .. module:: fathomnet.api.regions
167 |
168 | The ``fathomnet.api.regions`` module supports marine region operations.
169 |
170 | .. automodule:: fathomnet.api.regions
171 | :members:
172 | :noindex:
173 |
174 | ----
175 |
176 | Stats
177 | -----
178 |
179 | .. module:: fathomnet.api.stats
180 |
181 | The ``fathomnet.api.stats`` module supports summary statistic operations.
182 |
183 | .. automodule:: fathomnet.api.stats
184 | :members:
185 | :noindex:
186 |
187 | ----
188 |
189 | Tags
190 | ----
191 |
192 | .. module:: fathomnet.api.tags
193 |
194 | .. note:: Tags API added in v0.4.0
195 |
196 | The ``fathomnet.api.tags`` module supports tag operations.
197 |
198 | Create, update, & delete
199 | ^^^^^^^^^^^^^^^^^^^^^^^^
200 |
201 | .. autofunction:: create_with_dto
202 | .. autofunction:: update
203 | .. autofunction:: delete
204 |
205 | Find
206 | ^^^^
207 |
208 | .. autofunction:: find_by_uuid
209 | .. autofunction:: find_by_image_uuid_and_key
210 |
211 | ----
212 |
213 | Taxa
214 | ----
215 |
216 | .. module:: fathomnet.api.taxa
217 |
218 | The ``fathomnet.api.taxa`` module supports taxonomic (phylogenic) lookup operations.
219 |
220 | .. automodule:: fathomnet.api.taxa
221 | :members:
222 | :noindex:
223 |
224 | ----
225 |
226 | Users
227 | -----
228 |
229 | .. module:: fathomnet.api.users
230 |
231 | The ``fathomnet.api.users`` module supports user account operations.
232 |
233 | Account operations
234 | ^^^^^^^^^^^^^^^^^^
235 |
236 | .. autofunction:: create_new_api_key
237 | .. autofunction:: delete_api_key
238 | .. autofunction:: update_user_data
239 |
240 |
241 | List & count
242 | ^^^^^^^^^^^^
243 |
244 | .. autofunction:: find_all
245 | .. autofunction:: find_by_authentication
246 | .. autofunction:: find_by_email
247 | .. autofunction:: find_by_firebase_uid
248 | .. autofunction:: find_contributors_names
249 | .. autofunction:: find_expertise
250 | .. autofunction:: find_roles
251 | .. autofunction:: count_all
252 |
253 | Miscellaneous
254 | ^^^^^^^^^^^^^
255 |
256 | .. autofunction:: get_api_key
257 | .. autofunction:: verify
258 |
259 | Admin only
260 | ^^^^^^^^^^
261 |
262 | .. autofunction:: disable_by_uuid
263 | .. autofunction:: update_user_data_admin
264 |
265 | ----
266 |
267 | WoRMS
268 | -----
269 |
270 | .. module:: fathomnet.api.worms
271 |
272 | The ``fathomnet.api.worms`` module supports World Register of Marine Species (WoRMS) lookup operations via the `fast WoRMS name service `_.
273 |
274 | .. automodule:: fathomnet.api.worms
275 | :members:
276 | :noindex:
277 |
278 | ----
279 |
280 | X-API-Key
281 | ---------
282 |
283 | .. module:: fathomnet.api.xapikey
284 |
285 | The ``fathomnet.api.xapikey`` module supports X-API-Key authentication operations.
286 |
287 | .. automodule:: fathomnet.api.xapikey
288 | :members:
289 | :noindex:
290 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 |
16 | sys.path.insert(0, os.path.abspath(".."))
17 |
18 |
19 | # -- Project information -----------------------------------------------------
20 |
21 | project = "fathomnet-py"
22 | copyright = "2022-2024, Monterey Bay Aquarium Research Institute"
23 | author = "Kevin Barnard"
24 |
25 |
26 | # -- General configuration ---------------------------------------------------
27 |
28 | # Add any Sphinx extension module names here, as strings. They can be
29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
30 | # ones.
31 | extensions = ["sphinx.ext.autodoc"]
32 |
33 | # Add any paths that contain templates here, relative to this directory.
34 | templates_path = []
35 |
36 | # List of patterns, relative to source directory, that match files and
37 | # directories to ignore when looking for source files.
38 | # This pattern also affects html_static_path and html_extra_path.
39 | exclude_patterns = []
40 |
41 |
42 | # -- Options for HTML output -------------------------------------------------
43 |
44 | # The theme to use for HTML and HTML Help pages. See the documentation for
45 | # a list of builtin themes.
46 | #
47 | html_theme = "sphinx_rtd_theme"
48 |
49 | # Add any paths that contain custom static files (such as style sheets) here,
50 | # relative to this directory. They are copied after the builtin static files,
51 | # so a file named "default.css" will overwrite the builtin "default.css".
52 | html_static_path = []
53 |
--------------------------------------------------------------------------------
/docs/source/dto.rst:
--------------------------------------------------------------------------------
1 | Data classes
2 | ============
3 |
4 | ``fathomnet-py`` uses native Python dataclasses to model FathomNet entities.
5 |
6 | ----
7 |
8 | .. automodule:: fathomnet.dto
9 | :members:
10 | :undoc-members:
11 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. fathomnet-py documentation master file, created by
2 | sphinx-quickstart on Tue Sep 7 16:25:28 2021.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | fathomnet-py
7 | ============
8 |
9 | ``fathomnet-py`` is a client-side API to help scientists, researchers, and developers interact with `FathomNet Database`_ data.
10 |
11 | .. code-block:: python
12 |
13 | >>> from fathomnet.api import boundingboxes
14 | >>> boundingboxes.find_concepts()
15 | ['2G Robotics structured light laser', '55-gallon drum', ...]
16 | >>> from fathomnet.api import images
17 | >>> images.find_by_concept('Nanomia')
18 | [
19 | AImageDTO(
20 | id=2274942,
21 | uuid='cdbfca66-284f-48ac-a36f-7b2ac2b43533',
22 | url='https://database.fathomnet.org/static/m3/framegrabs/MiniROV/images/0056/02_18_37_20.png',
23 | ...
24 | ),
25 | ...
26 | ]
27 | >>> from fathomnet.api import taxa
28 | >>> taxa.find_children('mbari', 'Bathochordaeus')
29 | [
30 | Taxa(name='Bathochordaeus stygius', rank='species'),
31 | Taxa(name='Bathochordaeus charon', rank='species'),
32 | Taxa(name='Bathochordaeus mcnutti', rank='species')
33 | ]
34 | >>> from fathomnet.api import xapikey
35 | >>> xapikey.auth('NuCLjlNUlgHchtgDB01Sp1fABJVcWR') # your API key here
36 | AuthHeader(
37 | type='Bearer',
38 | token='eyJhbGciOiJI...'
39 | )
40 |
41 | The ``fathomnet-py`` API offers native Python interaction with the FathomNet REST API, abstracting away the underlying HTTP requests.
42 |
43 | .. image:: https://github.com/fathomnet/fathomnet-py/actions/workflows/cicd.yml/badge.svg
44 | :target: https://github.com/fathomnet/fathomnet-py/actions/workflows/cicd.yml
45 | :alt: cicd
46 |
47 | .. image:: https://readthedocs.org/projects/fathomnet-py/badge/?version=latest
48 | :target: https://fathomnet-py.readthedocs.io/en/latest/?badge=latest
49 | :alt: Documentation Status
50 |
51 | Installing fathomnet-py
52 | -----------------------
53 |
54 | ``fathomnet-py`` is available on PyPI:
55 |
56 | .. code-block:: bash
57 |
58 | $ python -m pip install fathomnet
59 |
60 |
61 | .. toctree::
62 | :maxdepth: 2
63 | :caption: API Documentation
64 |
65 | api
66 | dto
67 | scripts
68 |
69 | .. Indices and tables
70 | .. ==================
71 | .. * :ref:`genindex`
72 | .. * :ref:`modindex`
73 | .. * :ref:`search`
74 |
75 |
76 | .. _FathomNet Database: https://database.fathomnet.org/
--------------------------------------------------------------------------------
/docs/source/scripts.rst:
--------------------------------------------------------------------------------
1 | Scripts
2 | =======
3 |
4 | This page documents the scripts that are included with ``fathomnet-py``.
5 |
6 | ----
7 |
8 | ``fathomnet-generate``
9 | ----------------------
10 |
11 | The ``fathomnet-generate`` script generates object detection datasets in common formats (COCO, Pascal VOC) from FathomNet data.
12 | It is installed by default with ``fathomnet-py``.
13 |
14 | There are two modes of invoking ``fathomnet-generate``: **output** and **count**.
15 |
16 | Output
17 | ^^^^^^
18 |
19 | **Output** mode generates the dataset and writes it to disk.
20 |
21 | Targets
22 | """""""
23 |
24 | For example, to generate a Pascal VOC dataset for the *Abraliopsis* concept, we would run:
25 |
26 | .. code-block:: bash
27 |
28 | fathomnet-generate --output /path/to/output --concepts 'Abraliopsis'
29 |
30 | This will write Pascal VOC XML files containing all FathomNet bounding boxes for *Abraliopsis* to ``/path/to/output/*.xml``.
31 |
32 | If we run the command again with the ``-v`` flag, we can see the progress of the dataset generation:
33 |
34 | .. code-block:: bash
35 |
36 | fathomnet-generate --output /path/to/output --concepts 'Abraliopsis' -v
37 |
38 | .. code-block:: text
39 |
40 | INFO:root:Successfully parsed flags
41 | INFO:root:Concept(s) specified:
42 | INFO:root:- Abraliopsis
43 | INFO:root:Fetching image records for 1 concept(s)...
44 | INFO:root:Found 59 unique images with bounding boxes
45 | INFO:root:Wrote 59 VOC files to /path/to/output
46 |
47 | The ``--concepts`` flag accepts a comma-separated list of concepts. For example, if we want both *Abraliopsis* and *Bathochordaeus*:
48 |
49 | .. code-block:: bash
50 |
51 | fathomnet-generate --output /path/to/output --concepts 'Abraliopsis,Bathochordaeus' -v
52 |
53 | .. code-block:: text
54 |
55 | INFO:root:Successfully parsed flags
56 | INFO:root:Concept(s) specified:
57 | INFO:root:- Abraliopsis
58 | INFO:root:- Bathochordaeus
59 | INFO:root:Fetching image records for 2 concept(s)...
60 | INFO:root:Found 1360 unique images with bounding boxes
61 | INFO:root:Wrote 1360 VOC files to /path/to/output
62 |
63 | It's worth noting: **the dataset will only include bounding boxes of the exact concepts you specify.**
64 | If we want to include the species in both the *Abraliopsis* and *Bathochordaeus* genera, we need to specify a taxonomy provider that will extend the concept list to include the species in those genera.
65 | For example, we can use the ``fathomnet`` taxonomy provider to do this, which includes the World Register of Marine Species (WoRMS) taxonomy and the Monterey Bay Aquarium Research Institute (MBARI) Deep-Sea Guide (DSG) taxonomy:
66 |
67 | .. code-block:: bash
68 |
69 | fathomnet-generate --output /path/to/output --concepts 'Abraliopsis,Bathochordaeus' -v --taxa fathomnet
70 |
71 | .. code-block:: text
72 |
73 | INFO:root:Successfully parsed flags
74 | INFO:root:Concept(s) specified:
75 | INFO:root:- Abraliopsis
76 | INFO:root:- Abraliopsis (Abraliopsis)
77 | INFO:root:- Abraliopsis (Abraliopsis) hoylei
78 | INFO:root:- Abraliopsis (Abraliopsis) morisii
79 | INFO:root:- Abraliopsis (Abraliopsis) pacificus
80 | INFO:root:- Abraliopsis (Abraliopsis) tui
81 | INFO:root:- Abraliopsis (Boreabraliopsis)
82 | INFO:root:- Abraliopsis (Boreabraliopsis) felis
83 | INFO:root:- Abraliopsis (Micrabralia)
84 | INFO:root:- Abraliopsis (Micrabralia) atlantica
85 | INFO:root:- Abraliopsis (Micrabralia) chuni
86 | INFO:root:- Abraliopsis (Micrabralia) gilchristi
87 | INFO:root:- Abraliopsis (Micrabralia) lineata
88 | INFO:root:- Abraliopsis (Pfefferiteuthis)
89 | INFO:root:- Abraliopsis (Pfefferiteuthis) affinis
90 | INFO:root:- Abraliopsis (Pfefferiteuthis) atlantica
91 | INFO:root:- Abraliopsis (Pfefferiteuthis) chuni
92 | INFO:root:- Abraliopsis (Pfefferiteuthis) falco
93 | INFO:root:- Abraliopsis (Watasenia)
94 | INFO:root:- Abraliopsis (Watasenia) felis
95 | INFO:root:- Abraliopsis affinis
96 | INFO:root:- Abraliopsis atlantica
97 | INFO:root:- Abraliopsis chuni
98 | INFO:root:- Abraliopsis falco
99 | INFO:root:- Abraliopsis felis
100 | INFO:root:- Abraliopsis gilchristi
101 | INFO:root:- Abraliopsis hoylei
102 | INFO:root:- Abraliopsis joubini
103 | INFO:root:- Abraliopsis lineata
104 | INFO:root:- Abraliopsis morisii
105 | INFO:root:- Abraliopsis pacificus
106 | INFO:root:- Abraliopsis pfefferi
107 | INFO:root:- Abraliopsis scintillans
108 | INFO:root:- Abraliopsis tui
109 | INFO:root:- Bathochordaeus
110 | INFO:root:- Bathochordaeus charon
111 | INFO:root:- Bathochordaeus mcnutti
112 | INFO:root:- Bathochordaeus stygius
113 | INFO:root:Fetching image records for 38 concept(s)...
114 | INFO:root:Found 3376 unique images with bounding boxes
115 | INFO:root:Wrote 3376 VOC files to /path/to/output
116 |
117 | For larger queries, it's recommended to write a file containing the concepts you want to query, one per line, and pass that file to ``fathomnet-generate`` using the ``--concepts-file`` flag.
118 | For example, we can write a file called ``concepts.txt`` containing the following:
119 |
120 | .. code-block:: text
121 |
122 | Bathochordaeus charon
123 | Bathochordaeus mcnutti
124 | Bathochordaeus stygius
125 |
126 | and then run:
127 |
128 | .. code-block:: bash
129 |
130 | fathomnet-generate --output /path/to/output --concepts-file concepts.txt -v --taxa fathomnet
131 |
132 | .. code-block:: text
133 |
134 | INFO:root:Successfully parsed flags
135 | INFO:root:Concept(s) specified:
136 | INFO:root:- Bathochordaeus charon
137 | INFO:root:- Bathochordaeus mcnutti
138 | INFO:root:- Bathochordaeus stygius
139 | INFO:root:Fetching image records for 3 concept(s)...
140 | INFO:root:Found 2013 unique images with bounding boxes
141 | INFO:root:Wrote 2013 VOC files to /path/to/output
142 |
143 | In some contexts, we want to gather all of the bounding boxes in each image, instead of only the bounding boxes for our specified concepts. We can do this by passing the ``--all`` flag:
144 |
145 | .. code-block:: bash
146 |
147 | fathomnet-generate --output /path/to/output --concepts 'Bathochordaeus' -v --all
148 |
149 | If we look at a generated XML file, we can note the inclusion of other concepts:
150 |
151 | .. code-block:: xml
152 |
153 |
154 | 3007
155 | 00_10_24_26.png
156 | https://database.fathomnet.org/static/m3/framegrabs/Ventana/images/3007/00_10_24_26.png
157 |
158 | FathomNet
159 |
160 |
161 | 720
162 | 368
163 | 3
164 |
165 | 0
166 |
179 |
192 |
193 |
194 | Output format
195 | """""""""""""
196 |
197 | By default, ``fathomnet-generate`` will output Pascal VOC XML files. This can be changed by passing the ``--format`` flag:
198 |
199 | .. code-block:: bash
200 |
201 | fathomnet-generate --output /path/to/output --concepts 'Bathochordaeus' -v --format coco
202 |
203 | .. code-block:: text
204 |
205 | INFO:root:Successfully parsed flags
206 | INFO:root:Concept(s) specified:
207 | INFO:root:- Bathochordaeus
208 | INFO:root:Fetching image records for 1 concept(s)...
209 | INFO:root:Found 1301 unique images with bounding boxes
210 | INFO:root:Wrote COCO dataset to /path/to/output/dataset.json
211 |
212 | The ``--format`` flag currently accepts ``coco`` and ``voc``.
213 |
214 | Image downloading
215 | """""""""""""""""
216 |
217 | By default, ``fathomnet-generate`` will not download images. Images can be downloaded to a specified directory by passing the ``--img-download`` option:
218 |
219 | .. code-block:: text
220 |
221 | fathomnet-generate --output /path/to/output --img-download /path/to/output/images --concepts 'Abraliopsis' -v
222 |
223 | .. code-block:: text
224 |
225 | INFO:root:Creating output directory /home/kbarnard/Desktop/test/images
226 | INFO:root:Successfully parsed flags
227 | INFO:root:Concept(s) specified:
228 | INFO:root:- Abraliopsis
229 | INFO:root:Fetching image records for 1 concept(s)...
230 | INFO:root:Found 59 unique images with bounding boxes
231 | INFO:root:Wrote 59 VOC files to /path/to/output
232 | 100% (59 of 59) |################################| Elapsed Time: 0:00:03 Time: 0:00:03
233 | INFO:root:Downloaded 59 new images to /path/to/output/images
234 |
235 | Note that for efficiency, ``fathomnet-generate`` will not re-download images that already exist in the specified directory. Images are renamed according to their FathomNet image UUID.
236 |
237 | Constraints
238 | """""""""""
239 |
240 | Once targets are specified, we can further constrain the dataset by passing a variety of flags. These are self-descriptive, and include:
241 |
242 | * ``--contributor-email``
243 | * ``--start`` / ``--end`` (`ISO-8601 `_ date strings)
244 | * ``--imaging-types`` (comma-separated list of imaging types to include)
245 | * ``--exclude-unverified``
246 | * ``--exclude-verified``
247 | * ``--min-longitude`` / ``--max-longitude``
248 | * ``--min-latitude`` / ``--max-latitude``
249 | * ``--min-depth`` / ``--max-depth``
250 | * ``--institutions`` (comma-separated list of institutions to include)
251 |
252 | Count
253 | ^^^^^
254 |
255 | **Count** mode is effectively a dry run that prints the number of annotations that would be generated for a given query.
256 |
257 | For example, to count the number of annotations for the *Bathochordaeus* genus and its descendants:
258 |
259 | .. code-block:: bash
260 |
261 | fathomnet-generate --count --concepts 'Bathochordaeus' --taxa fathomnet
262 |
263 | .. code-block:: text
264 |
265 | concept | # boxes
266 | -----------------------|---------
267 | Bathochordaeus | 1901
268 | Bathochordaeus charon | 99
269 | Bathochordaeus mcnutti | 1259
270 | Bathochordaeus stygius | 2471
271 |
272 | All other flags described in **output** mode are available in **count** mode.
273 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "fathomnet"
3 | version = "1.8.1"
4 | description = "fathomnet-py is a client-side API to help scientists, researchers, and developers interact with FathomNet data."
5 | license = "MIT"
6 | authors = [
7 | { name = "Kevin Barnard", email = "kbarnard@mbari.org" }
8 | ]
9 | maintainers = [
10 | { name = "Kevin Barnard", email = "kbarnard@mbari.org" }
11 | ]
12 | requires-python = ">= 3.8.1"
13 | dependencies = [
14 | "requests>=2.20.0",
15 | "lxml>=4.6.0",
16 | "coco-lib>=0.1.2",
17 | "progressbar2>=3.37.0",
18 | "pyyaml>=6.0.1",
19 | "pydantic>=2.10.6",
20 | ]
21 | readme = "README.md"
22 | classifiers = [
23 | "License :: OSI Approved :: MIT License",
24 | "Operating System :: OS Independent"
25 | ]
26 |
27 | [project.optional-dependencies]
28 | models = [
29 | "appdirs>=1.4.4",
30 | "torch>=2.4.0",
31 | "opencv-python>=4.10.0.84",
32 | "pillow>=10.4.0",
33 | "torchvision>=0.19.0",
34 | "ultralytics>=8.2.79",
35 | ]
36 |
37 | [project.scripts]
38 | fathomnet-generate = "fathomnet.scripts.fathomnet_generate:main"
39 |
40 | [project.urls]
41 | Homepage = "https://database.fathomnet.org/fathomnet/#/about"
42 | Documentation = "https://fathomnet-py.readthedocs.io"
43 | Repository = "https://github.com/fathomnet/fathomnet-py.git"
44 | Issues = "https://github.com/fathomnet/fathomnet-py/issues"
45 | Changelog = "https://github.com/fathomnet/fathomnet-py/blob/main/CHANGELOG.md"
46 |
47 | [dependency-groups]
48 | dev = [
49 | "pre-commit>=3.5.0",
50 | "pytest>=8.3.4",
51 | "ruff>=0.9.7",
52 | "sphinx-rtd-theme>=3.0.2",
53 | ]
54 |
55 | [tool.semantic_release]
56 | assets = []
57 | commit_message = "{version}\n\nAutomatically generated by python-semantic-release"
58 | commit_parser = "angular"
59 | logging_use_named_masks = false
60 | major_on_zero = true
61 | tag_format = "v{version}"
62 | version_toml = [
63 | "pyproject.toml:project.version",
64 | ]
65 |
66 | [tool.semantic_release.branches.main]
67 | match = "(main|master)"
68 | prerelease_token = "rc"
69 | prerelease = false
70 |
71 | [tool.semantic_release.changelog]
72 | template_dir = "templates"
73 | changelog_file = "CHANGELOG.md"
74 | exclude_commit_patterns = []
75 |
76 | [tool.semantic_release.changelog.environment]
77 | block_start_string = "{%"
78 | block_end_string = "%}"
79 | variable_start_string = "{{"
80 | variable_end_string = "}}"
81 | comment_start_string = "{#"
82 | comment_end_string = "#}"
83 | trim_blocks = false
84 | lstrip_blocks = false
85 | newline_sequence = "\n"
86 | keep_trailing_newline = false
87 | extensions = []
88 | autoescape = true
89 |
90 | [tool.semantic_release.commit_author]
91 | env = "GIT_COMMIT_AUTHOR"
92 | default = "semantic-release "
93 |
94 | [tool.semantic_release.commit_parser_options]
95 | allowed_tags = ["build", "chore", "ci", "docs", "feat", "fix", "perf", "style", "refactor", "test"]
96 | minor_tags = ["feat"]
97 | patch_tags = ["fix", "perf"]
98 |
99 | [tool.semantic_release.remote]
100 | name = "origin"
101 | type = "github"
102 | ignore_token_for_push = false
103 |
104 | [tool.semantic_release.remote.token]
105 | env = "GH_TOKEN"
106 |
107 | [tool.semantic_release.publish]
108 | dist_glob_patterns = ["dist/*"]
109 | upload_to_vcs_release = true
110 |
111 | [tool.hatch.metadata]
112 | allow-direct-references = true
113 |
114 | [tool.hatch.build.targets.wheel]
115 | packages = ["src/fathomnet"]
116 |
117 | [build-system]
118 | requires = ["hatchling"]
119 | build-backend = "hatchling.build"
120 |
121 | [tool.pytest.ini_options]
122 | addopts = "--verbose"
123 | pythonpath = "src"
124 |
--------------------------------------------------------------------------------
/src/fathomnet/__init__.py:
--------------------------------------------------------------------------------
1 | # __init__.py (fathomnet-py)
2 |
--------------------------------------------------------------------------------
/src/fathomnet/api/__init__.py:
--------------------------------------------------------------------------------
1 | # __init__.py (fathomnet-py)
2 | from typing import Union
3 | from os import getenv
4 |
5 | import requests
6 |
7 | from fathomnet.util import debug_format_response
8 |
9 | SESSION = requests.Session()
10 | FATHOMNET_API_URL_DEFAULT = "https://database.fathomnet.org/api"
11 |
12 |
13 | def get_api_url() -> str:
14 | """
15 | Get the API URL from the `FATHOMNET_API_URL` environment variable or use the default value.
16 |
17 | Returns:
18 | str: The API base URL.
19 | """
20 | return getenv("FATHOMNET_API_URL", FATHOMNET_API_URL_DEFAULT)
21 |
22 |
23 | class EndpointManager:
24 | ROOT = get_api_url()
25 | PATH = None
26 |
27 | @classmethod
28 | def url(cls, endpoint: str) -> str:
29 | if cls.PATH is None:
30 | raise NotImplementedError
31 | return "/".join([cls.ROOT, cls.PATH, endpoint])
32 |
33 | @classmethod
34 | def request(
35 | cls, method: str, endpoint: str, parse_json: bool = True, **kwargs
36 | ) -> Union[requests.Response, dict, list]:
37 | url = cls.url(endpoint)
38 | res = SESSION.request(method, url, **kwargs)
39 | if res.ok: # Status code < 400
40 | return res.json() if parse_json else res
41 | elif res.status_code == 401: # Not authorized, need to authenticate
42 | raise ValueError("Unauthorized: please authenticate first.")
43 | elif (
44 | res.status_code == 403
45 | ): # Forbidden, can't access this endpoint with given authentication
46 | raise ValueError("Forbidden: you cannot access this resource.")
47 | elif res.status_code < 500: # User error
48 | raise ValueError(
49 | "Client error ({}), please check your usage (docs: https://fathomnet-py.rtfd.io/) or open an issue at https://github.com/fathomnet/fathomnet-py/issues/new with the details below.\n{}".format(
50 | res.status_code, debug_format_response(res)
51 | )
52 | )
53 | else: # Server error, debug the response
54 | raise ValueError(
55 | "Server error ({}), please contact the FathomNet administrators with the details below.\n\n{}".format(
56 | res.status_code, debug_format_response(res)
57 | )
58 | )
59 |
60 | @classmethod
61 | def get(
62 | cls, endpoint: str, parse_json: bool = True, **kwargs
63 | ) -> Union[requests.Response, dict, list]:
64 | return cls.request("GET", endpoint, parse_json=parse_json, **kwargs)
65 |
66 | @classmethod
67 | def put(
68 | cls, endpoint: str, parse_json: bool = True, **kwargs
69 | ) -> Union[requests.Response, dict, list]:
70 | return cls.request("PUT", endpoint, parse_json=parse_json, **kwargs)
71 |
72 | @classmethod
73 | def post(
74 | cls, endpoint: str, parse_json: bool = True, **kwargs
75 | ) -> Union[requests.Response, dict, list]:
76 | return cls.request("POST", endpoint, parse_json=parse_json, **kwargs)
77 |
78 | @classmethod
79 | def delete(
80 | cls, endpoint: str, parse_json: bool = True, **kwargs
81 | ) -> Union[requests.Response, dict, list]:
82 | return cls.request("DELETE", endpoint, parse_json=parse_json, **kwargs)
83 |
--------------------------------------------------------------------------------
/src/fathomnet/api/activity.py:
--------------------------------------------------------------------------------
1 | # activity.py (fathomnet-py)
2 |
3 | from typing import List, Optional
4 |
5 | from fathomnet import dto
6 | from fathomnet.api import EndpointManager
7 |
8 |
9 | class Activity(EndpointManager):
10 | PATH = "activity"
11 |
12 |
13 | def find_all(
14 | auth_header: Optional[dto.AuthHeader] = None,
15 | start_timestamp: Optional[str] = None,
16 | end_timestamp: Optional[str] = None,
17 | limit: Optional[int] = None,
18 | offset: Optional[int] = None,
19 | include_self: Optional[bool] = None,
20 | ) -> List[dto.Activity]:
21 | """Get a list of all activity."""
22 | params = {}
23 | if start_timestamp:
24 | params["startTimestamp"] = start_timestamp
25 | if end_timestamp:
26 | params["endTimestamp"] = end_timestamp
27 | if limit:
28 | params["limit"] = limit
29 | if offset:
30 | params["offset"] = offset
31 | if include_self:
32 | params["includeSelf"] = include_self
33 | res_json = Activity.get("", params=params, auth=auth_header)
34 | return list(map(dto.Activity.from_dict, res_json))
35 |
36 |
37 | def find_by_email(
38 | email: str,
39 | auth_header: Optional[dto.AuthHeader] = None,
40 | start_timestamp: Optional[str] = None,
41 | end_timestamp: Optional[str] = None,
42 | limit: Optional[int] = None,
43 | offset: Optional[int] = None,
44 | ) -> List[dto.Activity]:
45 | """Get a list of activity by email."""
46 | params = {}
47 | if start_timestamp:
48 | params["startTimestamp"] = start_timestamp
49 | if end_timestamp:
50 | params["endTimestamp"] = end_timestamp
51 | if limit:
52 | params["limit"] = limit
53 | if offset:
54 | params["offset"] = offset
55 | res_json = Activity.get(
56 | "query/email/{}".format(email), params=params, auth=auth_header
57 | )
58 | return list(map(dto.Activity.from_dict, res_json))
59 |
60 |
61 | def find_by_email_admin(
62 | email: str,
63 | auth_header: Optional[dto.AuthHeader] = None,
64 | start_timestamp: Optional[str] = None,
65 | end_timestamp: Optional[str] = None,
66 | limit: Optional[int] = None,
67 | offset: Optional[int] = None,
68 | ) -> List[dto.Activity]:
69 | """(Admin) Get a list of activity by email. Used to support notification applications."""
70 | params = {}
71 | if start_timestamp:
72 | params["startTimestamp"] = start_timestamp
73 | if end_timestamp:
74 | params["endTimestamp"] = end_timestamp
75 | if limit:
76 | params["limit"] = limit
77 | if offset:
78 | params["offset"] = offset
79 | res_json = Activity.get(
80 | "admin/query/email/{}".format(email), params=params, auth=auth_header
81 | )
82 | return list(map(dto.Activity.from_dict, res_json))
83 |
--------------------------------------------------------------------------------
/src/fathomnet/api/boundingboxes.py:
--------------------------------------------------------------------------------
1 | # boundingboxes.py (fathomnet-py)
2 | from typing import BinaryIO, List, Optional
3 | from urllib.parse import quote
4 |
5 | from fathomnet import dto
6 | from fathomnet.api import EndpointManager
7 |
8 |
9 | class BoundingBoxes(EndpointManager):
10 | PATH = "boundingboxes"
11 |
12 |
13 | def create_with_dto(
14 | bounding_box: dto.BoundingBoxDTO, auth_header: Optional[dto.AuthHeader] = None
15 | ) -> dto.BoundingBoxDTO:
16 | """Create a bounding box."""
17 | res_json = BoundingBoxes.post("", json=bounding_box.to_dict(), auth=auth_header)
18 | return dto.BoundingBoxDTO.from_dict(res_json)
19 |
20 |
21 | def count_all() -> dto.Count:
22 | """Get a count of all bounding boxes."""
23 | res_json = BoundingBoxes.get("count")
24 | return dto.Count.from_dict(res_json)
25 |
26 |
27 | def find_concepts() -> List[str]:
28 | """Get a list of all concepts."""
29 | res_json = BoundingBoxes.get("list/concepts")
30 | return res_json
31 |
32 |
33 | def count_total_by_concept() -> List[dto.ByConceptCount]:
34 | """Get a count of bounding boxes for each concept."""
35 | res_json = BoundingBoxes.get("list/counts")
36 | return list(map(dto.ByConceptCount.from_dict, res_json))
37 |
38 |
39 | def find_observers() -> List[str]:
40 | """Get a list of all observers."""
41 | res_json = BoundingBoxes.get("list/observers")
42 | return res_json
43 |
44 |
45 | def count_by_concept(concept: str) -> dto.ByConceptCount:
46 | """Get a count of bounding boxes for a concept."""
47 | res_json = BoundingBoxes.get("query/count/{}".format(concept))
48 | return dto.ByConceptCount.from_dict(res_json)
49 |
50 |
51 | def find_by_user_defined_key(user_defined_key: str) -> List[dto.BoundingBoxDTO]:
52 | """Get a list of bounding boxes by a user-defined key."""
53 | res_json = BoundingBoxes.get("query/userdefinedkey/{}".format(user_defined_key))
54 | return list(map(dto.BoundingBoxDTO.from_dict, res_json))
55 |
56 |
57 | def find_all_user_defined_keys() -> List[str]:
58 | """Get a list of all user-defined keys."""
59 | res_json = BoundingBoxes.get("query/userdefinedkeys")
60 | return res_json
61 |
62 |
63 | def upload_csv(
64 | csv_fp: BinaryIO, auth_header: Optional[dto.AuthHeader] = None
65 | ) -> dto.Message:
66 | """Upload a CSV of bounding boxes."""
67 | res_json = BoundingBoxes.post("upload/csv", files={"csv": csv_fp}, auth=auth_header)
68 | return dto.Message.from_dict(res_json)
69 |
70 |
71 | def find_by_uuid(uuid: str) -> dto.BoundingBoxDTO:
72 | """Get a bounding box by UUID."""
73 | res_json = BoundingBoxes.get(uuid)
74 | return dto.BoundingBoxDTO.from_dict(res_json)
75 |
76 |
77 | def update(
78 | uuid: str,
79 | bounding_box: dto.ABoundingBoxDTO,
80 | auth_header: Optional[dto.AuthHeader] = None,
81 | ) -> dto.BoundingBoxDTO:
82 | """Update a bounding box."""
83 | res_json = BoundingBoxes.put(uuid, json=bounding_box.to_dict(), auth=auth_header)
84 | return dto.BoundingBoxDTO.from_dict(res_json)
85 |
86 |
87 | def delete(uuid: str, auth_header: Optional[dto.AuthHeader] = None):
88 | """Delete a bounding box."""
89 | BoundingBoxes.delete(uuid, auth=auth_header)
90 |
91 |
92 | def audit_by_uuid(uuid: str) -> List[dto.BoundingBoxDTO]:
93 | """Get an audit of a bounding box by UUID."""
94 | res_json = BoundingBoxes.get("audit/uuid/{}".format(uuid))
95 | return list(map(dto.BoundingBoxDTO.from_dict, res_json))
96 |
97 |
98 | def audit_by_user_defined_key(user_defined_key: str) -> List[dto.BoundingBoxDTO]:
99 | """Get an audit of a bounding box by user-defined key."""
100 | res_json = BoundingBoxes.get("audit/userdefinedkey/{}".format(user_defined_key))
101 | return list(map(dto.BoundingBoxDTO.from_dict, res_json))
102 |
103 |
104 | def find_searchable_concepts() -> List[str]:
105 | """Get a list of searchable concepts."""
106 | res_json = BoundingBoxes.get("list/searchable")
107 | return res_json
108 |
109 |
110 | def find_by_observer_uuid(
111 | uuid: str, pageable: Optional[dto.Pageable] = None
112 | ) -> List[dto.BoundingBoxDTO]:
113 | """Get a list of bounding boxes by observer UUID."""
114 | res_json = BoundingBoxes.get(
115 | "query/observer/{}".format(uuid),
116 | params=pageable.to_params() if pageable else None,
117 | )
118 | return list(map(dto.BoundingBoxDTO.from_dict, res_json.get("content", [])))
119 |
120 |
121 | def find_by_verifier_uuid(
122 | uuid: str, pageable: Optional[dto.Pageable] = None
123 | ) -> List[dto.BoundingBoxDTO]:
124 | """Get a list of bounding boxes by verifier UUID."""
125 | res_json = BoundingBoxes.get(
126 | "query/verifier/{}".format(uuid),
127 | params=pageable.to_params() if pageable else None,
128 | )
129 | return list(map(dto.BoundingBoxDTO.from_dict, res_json.get("content", [])))
130 |
131 |
132 | def audit_by_concepts(
133 | concepts: List[str],
134 | start_timestamp: Optional[str] = None,
135 | end_timestamp: Optional[str] = None,
136 | limit: Optional[int] = None,
137 | offset: Optional[int] = None,
138 | ) -> List[dto.BoundingBoxDTO]:
139 | """Get an audit of bounding boxes by concepts."""
140 | params = {}
141 | if start_timestamp:
142 | params["startTimestamp"] = start_timestamp
143 | if end_timestamp:
144 | params["endTimestamp"] = end_timestamp
145 | if limit:
146 | params["limit"] = limit
147 | if offset:
148 | params["offset"] = offset
149 | res_json = BoundingBoxes.get(
150 | "audit/concepts/{}".format(quote(",".join(concepts))), params=params
151 | )
152 | return list(map(dto.BoundingBoxDTO.from_dict, res_json))
153 |
154 |
155 | def audit_by_verifier(
156 | uuid: str,
157 | start_timestamp: Optional[str] = None,
158 | end_timestamp: Optional[str] = None,
159 | limit: Optional[int] = None,
160 | offset: Optional[int] = None,
161 | ) -> List[dto.BoundingBoxDTO]:
162 | """Get an audit of bounding boxes by verifier UUID."""
163 | params = {}
164 | if start_timestamp:
165 | params["startTimestamp"] = start_timestamp
166 | if end_timestamp:
167 | params["endTimestamp"] = end_timestamp
168 | if limit:
169 | params["limit"] = limit
170 | if offset:
171 | params["offset"] = offset
172 | res_json = BoundingBoxes.get("audit/verifier/{}".format(uuid), params=params)
173 | return list(map(dto.BoundingBoxDTO.from_dict, res_json))
174 |
175 |
176 | def audit_by_observer(
177 | observer: str,
178 | start_timestamp: Optional[str] = None,
179 | end_timestamp: Optional[str] = None,
180 | limit: Optional[int] = None,
181 | offset: Optional[int] = None,
182 | ) -> List[dto.BoundingBoxDTO]:
183 | """Get an audit of bounding boxes by observer."""
184 | params = {}
185 | if start_timestamp:
186 | params["startTimestamp"] = start_timestamp
187 | if end_timestamp:
188 | params["endTimestamp"] = end_timestamp
189 | if limit:
190 | params["limit"] = limit
191 | if offset:
192 | params["offset"] = offset
193 | res_json = BoundingBoxes.get(
194 | "audit/observer/{}".format(quote(observer)), params=params
195 | )
196 | return list(map(dto.BoundingBoxDTO.from_dict, res_json))
197 |
--------------------------------------------------------------------------------
/src/fathomnet/api/comments.py:
--------------------------------------------------------------------------------
1 | # comments.py (fathomnet-py)
2 |
3 | from typing import List, Optional
4 |
5 | from fathomnet import dto
6 | from fathomnet.api import EndpointManager
7 |
8 |
9 | class Comments(EndpointManager):
10 | PATH = "comments"
11 |
12 |
13 | def create(
14 | uuid: str,
15 | comment_content: dto.BoundingBoxCommentContent,
16 | auth_header: Optional[dto.AuthHeader] = None,
17 | ) -> dto.BoundingBoxComment:
18 | """Create a comment."""
19 | res_json = Comments.post(
20 | "boundingbox/{}".format(uuid), json=comment_content.to_dict(), auth=auth_header
21 | )
22 | return dto.BoundingBoxComment.from_dict(res_json)
23 |
24 |
25 | def find_by_uuid(uuid: str) -> dto.BoundingBoxComment:
26 | """Get a comment by uuid."""
27 | res_json = Comments.get(uuid)
28 | return dto.BoundingBoxComment.from_dict(res_json)
29 |
30 |
31 | def update(
32 | uuid: str,
33 | comment_content: dto.BoundingBoxCommentContent,
34 | auth_header: Optional[dto.AuthHeader] = None,
35 | ) -> dto.BoundingBoxComment:
36 | """Update a comment."""
37 | res_json = Comments.put(uuid, json=comment_content.to_dict(), auth=auth_header)
38 | return dto.BoundingBoxComment.from_dict(res_json)
39 |
40 |
41 | def delete(uuid: str, auth_header: Optional[dto.AuthHeader] = None) -> None:
42 | """Delete a comment."""
43 | res = Comments.delete(uuid, parse_json=False, auth=auth_header)
44 | return res.status_code == 200
45 |
46 |
47 | def find_by_bounding_box_uuid(
48 | uuid: str, auth_header: Optional[dto.AuthHeader] = None
49 | ) -> List[dto.BoundingBoxComment]:
50 | """Get a list of comments by bounding box uuid."""
51 | res_json = Comments.get("boundingbox/{}".format(uuid), auth=auth_header)
52 | return list(map(dto.BoundingBoxComment.from_dict, res_json))
53 |
54 |
55 | def find_by_email(
56 | email: str,
57 | pageable: Optional[dto.Pageable] = None,
58 | auth_header: Optional[dto.AuthHeader] = None,
59 | ) -> List[dto.BoundingBoxComment]:
60 | """Get a list of comments by email."""
61 | params = {"email": email}
62 | if pageable:
63 | params.update(pageable.to_dict())
64 | res_json = Comments.get("query/email", params=params, auth=auth_header)
65 | return list(map(dto.BoundingBoxComment.from_dict, res_json.get("content", [])))
66 |
67 |
68 | def flag(
69 | uuid: str, value: bool, auth_header: Optional[dto.AuthHeader] = None
70 | ) -> dto.BoundingBoxComment:
71 | """Flag a comment."""
72 | res_json = Comments.post(
73 | "flag/{}".format(uuid), params={"flag": value}, auth=auth_header
74 | )
75 | return dto.BoundingBoxComment.from_dict(res_json)
76 |
--------------------------------------------------------------------------------
/src/fathomnet/api/darwincore.py:
--------------------------------------------------------------------------------
1 | # darwincore.py (fathomnet-py)
2 | from typing import List
3 |
4 | from fathomnet.api import EndpointManager
5 |
6 |
7 | class DarwinCore(EndpointManager):
8 | PATH = "darwincore"
9 |
10 |
11 | def index() -> str:
12 | """Get the darwin core index page."""
13 | res = DarwinCore.get("", parse_json=False)
14 | return res.text
15 |
16 |
17 | def find_owner_institution_codes() -> List[str]:
18 | """Get a list of owner institutions."""
19 | res_json = DarwinCore.get("list/ownerinstitutions")
20 | return res_json
21 |
22 |
23 | def find_owner_institutions_by_image_uuid(image_uuid: str) -> List[str]:
24 | """Get a list of owner institutions by image UUID."""
25 | res_json = DarwinCore.get(f"query/ownerinstitutions/{image_uuid}")
26 | return res_json
27 |
--------------------------------------------------------------------------------
/src/fathomnet/api/firebase.py:
--------------------------------------------------------------------------------
1 | # firebase.py (fathomnet-py)
2 | from typing import Optional
3 |
4 | from fathomnet import dto
5 | from fathomnet.api import EndpointManager
6 |
7 |
8 | class Firebase(EndpointManager):
9 | PATH = "firebase"
10 |
11 |
12 | def auth() -> dto.AuthHeader:
13 | """Authenticate via firebase and get a JWT."""
14 | raise NotImplementedError # TODO figure out firebase authentication
15 |
16 | res_json = Firebase.post("auth", json={})
17 | return dto.AuthHeader.from_dict(res_json)
18 |
19 |
20 | def test(auth_header: Optional[dto.AuthHeader] = None) -> dto.Message:
21 | """Test an authorization token."""
22 | res_json = Firebase.get("test", auth=auth_header)
23 | return dto.Message.from_dict(res_json)
24 |
--------------------------------------------------------------------------------
/src/fathomnet/api/geoimages.py:
--------------------------------------------------------------------------------
1 | # geoimages.py (fathomnet-py)
2 | from typing import List, Optional
3 |
4 | from fathomnet import dto
5 | from fathomnet.api import EndpointManager
6 |
7 |
8 | class GeoImages(EndpointManager):
9 | PATH = "geoimages"
10 |
11 |
12 | def find_all(pageable: Optional[dto.Pageable] = None) -> List[dto.GeoImage]:
13 | """Get a paged list of all geo images."""
14 | res_json = GeoImages.get("", params=pageable.to_params() if pageable else None)
15 | # Note: schema inconsistent with response, need to grab the 'content' object
16 | return list(map(dto.GeoImage.from_dict, res_json.get("content", [])))
17 |
18 |
19 | def count(
20 | geo_image_constraints: dto.GeoImageConstraints,
21 | ) -> dto.GeoImageConstraintsCount:
22 | """Get a constrained count of geo images."""
23 | res_json = GeoImages.post("count", json=geo_image_constraints.to_dict())
24 | return dto.GeoImageConstraintsCount.from_dict(res_json)
25 |
26 |
27 | def find(geo_image_constraints: dto.GeoImageConstraints) -> List[dto.GeoImage]:
28 | """Get a constrained list of geo images."""
29 | res_json = GeoImages.post("query", json=geo_image_constraints.to_dict())
30 | return list(map(dto.GeoImage.from_dict, res_json))
31 |
32 |
33 | def find_by_image_set_upload_uuid(
34 | image_set_upload_uuid: str,
35 | limit: Optional[int] = None,
36 | offset: Optional[int] = None,
37 | ) -> List[dto.GeoImage]:
38 | """Get a list of geo images corresponding to an image set upload UUID."""
39 | res_json = GeoImages.get(
40 | "query/imagesetupload/{}".format(image_set_upload_uuid),
41 | params={"limit": limit, "offset": offset},
42 | )
43 | return list(map(dto.GeoImage.from_dict, res_json))
44 |
--------------------------------------------------------------------------------
/src/fathomnet/api/images.py:
--------------------------------------------------------------------------------
1 | # images.py (fathomnet-py)
2 | from typing import List, Optional
3 | from urllib.parse import quote, quote_plus
4 |
5 | from fathomnet import dto
6 | from fathomnet.api import EndpointManager
7 |
8 |
9 | class Images(EndpointManager):
10 | PATH = "images"
11 |
12 |
13 | def find_all_alt(pageable: Optional[dto.Pageable] = None) -> List[dto.AImageDTO]:
14 | """Get a paged list of all images. (alternative endpoint)"""
15 | res_json = Images.get("", params=pageable.to_params() if pageable else None)
16 | return list(map(dto.AImageDTO.from_dict, res_json.get("content", [])))
17 |
18 |
19 | def create_if_not_exists(
20 | images: List[dto.Image], auth_header: Optional[dto.AuthHeader] = None
21 | ) -> List[dto.AImageDTO]:
22 | """Create an image if it doesn't exist."""
23 | res_json = Images.post(
24 | "", json=list(map(dto.Image.to_dict, images)), auth=auth_header
25 | )
26 | return list(map(dto.Image.from_dict, res_json))
27 |
28 |
29 | def count_all() -> dto.Count:
30 | """Get a count of all images."""
31 | res_json = Images.get("count")
32 | return dto.Count.from_dict(res_json)
33 |
34 |
35 | def find_all(pageable: Optional[dto.Pageable] = None) -> List[dto.AImageDTO]:
36 | """Get a paged list of all images."""
37 | res_json = Images.get("list/all", params=pageable.to_params() if pageable else None)
38 | # Note: schema inconsistent with response, need to grab the 'content' object
39 | return list(map(dto.AImageDTO.from_dict, res_json.get("content", [])))
40 |
41 |
42 | def find_distinct_submitter() -> List[str]:
43 | """Get a list of all submitters."""
44 | res_json = Images.get("list/contributors")
45 | return res_json
46 |
47 |
48 | def list_imaging_types() -> List[str]:
49 | """Get a list of all imaging types."""
50 | res_json = Images.get("list/imagingtypes")
51 | return res_json
52 |
53 |
54 | def find(geo_image_constraints: dto.GeoImageConstraints) -> List[dto.AImageDTO]:
55 | """Get a constrained list of images."""
56 | res_json = Images.post("query", json=geo_image_constraints.to_dict())
57 | return list(map(dto.AImageDTO.from_dict, res_json))
58 |
59 |
60 | def find_by_concept(concept: str, taxa: Optional[str] = None) -> List[dto.AImageDTO]:
61 | """Get a list of images by concept (and optionally taxa provider)."""
62 | res_json = Images.get(
63 | "query/concept/{}".format(quote(concept)),
64 | params={"taxa": taxa} if taxa else None,
65 | )
66 | return list(map(dto.AImageDTO.from_dict, res_json))
67 |
68 |
69 | def find_by_contributors_email(contributors_email: str) -> List[dto.AImageDTO]:
70 | """Get a list of images by contributor."""
71 | res_json = Images.get("query/contributor/{}".format(contributors_email))
72 | return list(map(dto.AImageDTO.from_dict, res_json))
73 |
74 |
75 | def count_by_submitter(contributors_email: str) -> dto.ByContributorCount:
76 | """Get a count of images by contributor."""
77 | res_json = Images.get("query/count/contributor/{}".format(contributors_email))
78 | return dto.ByContributorCount.from_dict(res_json)
79 |
80 |
81 | def find_by_observer(observer: str) -> List[dto.AImageDTO]:
82 | """Get a list of images by observer."""
83 | res_json = Images.get("query/observer/{}".format(observer))
84 | return list(map(dto.AImageDTO.from_dict, res_json))
85 |
86 |
87 | def find_by_sha256(sha256: str) -> List[dto.AImageDTO]:
88 | """Get a list of images by SHA256 hash."""
89 | res_json = Images.get("query/sha256/{}".format(sha256))
90 | return list(map(dto.AImageDTO.from_dict, res_json))
91 |
92 |
93 | def find_by_tag_key(key: str, value: str) -> List[dto.AImageDTO]:
94 | """Get a list of images by a specified tag key-value pair."""
95 | res_json = Images.get("query/tags", params={"key": key, "value": value})
96 | return list(map(dto.AImageDTO.from_dict, res_json))
97 |
98 |
99 | def find_by_url(url: str) -> dto.AImageDTO:
100 | """Get an image by URL."""
101 | res_json = Images.get("query/url/{}".format(quote_plus(url)))
102 | return dto.AImageDTO.from_dict(res_json)
103 |
104 |
105 | def find_by_uuid_in_list(uuids: List[str]) -> List[dto.AImageDTO]:
106 | """Get a list of images corresponding to a specified list of UUIDs."""
107 | res_json = Images.post("query/uuids", json=uuids)
108 | return list(map(dto.AImageDTO.from_dict, res_json))
109 |
110 |
111 | def find_by_uuid(uuid: str) -> dto.AImageDTO:
112 | """Get an image by UUID."""
113 | res_json = Images.get(uuid)
114 | return dto.AImageDTO.from_dict(res_json)
115 |
116 |
117 | def update(
118 | uuid: str, image: dto.AImageDTO, auth_header: Optional[dto.AuthHeader] = None
119 | ) -> dto.AImageDTO:
120 | """Update an image."""
121 | res_json = Images.put(uuid, json=image.to_dict(), auth=auth_header)
122 | return dto.AImageDTO.from_dict(res_json)
123 |
124 |
125 | def delete(uuid: str, auth_header: Optional[dto.AuthHeader] = None):
126 | """Delete an image."""
127 | Images.delete(uuid, auth=auth_header)
128 |
--------------------------------------------------------------------------------
/src/fathomnet/api/imagesetuploads.py:
--------------------------------------------------------------------------------
1 | # imagesetuploads.py (fathomnet-py)
2 | from typing import List, Optional
3 |
4 | from fathomnet import dto
5 | from fathomnet.api import EndpointManager
6 |
7 |
8 | class ImageSetUploads(EndpointManager):
9 | PATH = "imagesetuploads"
10 |
11 |
12 | def count_all() -> dto.Count:
13 | """Count all image set uploads."""
14 | res_json = ImageSetUploads.get("count")
15 | return dto.Count.from_dict(res_json)
16 |
17 |
18 | def find_collections(
19 | pageable: Optional[dto.Pageable] = None,
20 | ) -> List[dto.BImageSetUploadDTO]:
21 | """Get a paged list of all image set uploads."""
22 | res_json = ImageSetUploads.get(
23 | "list/all", params=pageable.to_params() if pageable else None
24 | )
25 | # Note: schema inconsistent with response, need to grab the 'content' object
26 | return list(map(dto.BImageSetUploadDTO.from_dict, res_json.get("content", [])))
27 |
28 |
29 | def find_contributors() -> List[str]:
30 | """Get a list of all contributors."""
31 | res_json = ImageSetUploads.get("list/contributors")
32 | return res_json
33 |
34 |
35 | def find_rejection_reasons() -> List[str]:
36 | """Get a list of all rejection reasons."""
37 | res_json = ImageSetUploads.get("list/rejectionreasons")
38 | return res_json
39 |
40 |
41 | def find_by_contributor(contributors_email: str) -> List[dto.BImageSetUploadDTO]:
42 | """Get a list of image set uploads by contributor."""
43 | res_json = ImageSetUploads.get("query/contributor/{}".format(contributors_email))
44 | return list(map(dto.BImageSetUploadDTO.from_dict, res_json))
45 |
46 |
47 | def find_by_image_uuid(image_uuid: str) -> List[dto.BImageSetUploadDTO]:
48 | """Get an image set upload by UUID."""
49 | res_json = ImageSetUploads.get("query/image/{}".format(image_uuid))
50 | return list(map(dto.BImageSetUploadDTO.from_dict, res_json))
51 |
52 |
53 | def stats(image_set_upload_uuid: str) -> dto.ImageSetUploadStats:
54 | """Get image set upload statistics for a corresponding image set upload UUID."""
55 | res_json = ImageSetUploads.get("stats/{}".format(image_set_upload_uuid))
56 | return dto.ImageSetUploadStats.from_dict(res_json)
57 |
58 |
59 | def find_by_uuid(uuid: str) -> dto.BImageSetUploadDTO:
60 | """Get an image set upload by UUID."""
61 | res_json = ImageSetUploads.get(uuid)
62 | return dto.BImageSetUploadDTO.from_dict(res_json)
63 |
--------------------------------------------------------------------------------
/src/fathomnet/api/regions.py:
--------------------------------------------------------------------------------
1 | # regions.py (fathomnet-py)
2 | from typing import List, Optional
3 |
4 | from fathomnet import dto
5 | from fathomnet.api import EndpointManager
6 |
7 |
8 | class Regions(EndpointManager):
9 | PATH = "regions"
10 |
11 |
12 | def find_all() -> List[dto.MarineRegion]:
13 | """Get a list of all marine regions."""
14 | res_json = Regions.get("")
15 | return list(map(dto.MarineRegion.from_dict, res_json))
16 |
17 |
18 | def count_all() -> int:
19 | """Get a count of all marine regions."""
20 | res = Regions.get("count", parse_json=False)
21 | return int(res.content)
22 |
23 |
24 | def find_all_paged(pageable: Optional[dto.Pageable]) -> List[dto.MarineRegion]:
25 | """Get a paged list of all marine regions."""
26 | res_json = Regions.get(
27 | "list/all", params=pageable.to_params() if pageable else None
28 | )
29 | return list(map(dto.MarineRegion.from_dict, res_json.get("content", [])))
30 |
31 |
32 | def sync(auth_header: Optional[dto.AuthHeader] = None) -> int:
33 | """Synchronize."""
34 | res = Regions.get("sync", parse_json=False, auth=auth_header)
35 | return int(res.content)
36 |
37 |
38 | def find_at(latitude: float, longitude: float) -> List[dto.MarineRegion]:
39 | """Get the marine regions at the given latitude and longitude."""
40 | res_json = Regions.get("at", params={"latitude": latitude, "longitude": longitude})
41 | return list(map(dto.MarineRegion.from_dict, res_json))
42 |
--------------------------------------------------------------------------------
/src/fathomnet/api/stats.py:
--------------------------------------------------------------------------------
1 | # stats.py (fathomnet-py)
2 | from typing import List
3 |
4 | from fathomnet.api import EndpointManager
5 |
6 |
7 | class Stats(EndpointManager):
8 | PATH = "stats"
9 |
10 |
11 | def most_popular_searches() -> List[str]:
12 | """Get a list of the most popular searches."""
13 | res_json = Stats.get("list/popular/searches")
14 | return res_json
15 |
--------------------------------------------------------------------------------
/src/fathomnet/api/tags.py:
--------------------------------------------------------------------------------
1 | # tags.py (fathomnet-py)
2 | from typing import List, Optional
3 |
4 | from fathomnet import dto
5 | from fathomnet.api import EndpointManager
6 |
7 |
8 | class Tags(EndpointManager):
9 | PATH = "tags"
10 |
11 |
12 | def create_with_dto(
13 | tag: dto.TagDTO, auth_header: Optional[dto.AuthHeader] = None
14 | ) -> dto.TagDTO:
15 | """Create a tag."""
16 | res_json = Tags.post("", json=tag.to_dict(), auth=auth_header)
17 | return dto.TagDTO.from_dict(res_json)
18 |
19 |
20 | def find_by_uuid(uuid: str) -> dto.TagDTO:
21 | """Get a tag by UUID."""
22 | res_json = Tags.get(uuid)
23 | return dto.TagDTO.from_dict(res_json)
24 |
25 |
26 | def find_by_image_uuid_and_key(image_uuid: str, key: str) -> List[dto.TagDTO]:
27 | """Get a tag by image UUID and key."""
28 | res_json = Tags.get("query/bykey/{}/{}".format(image_uuid, key))
29 | return list(map(dto.TagDTO.from_dict, res_json))
30 |
31 |
32 | def update(
33 | uuid: str, tag: dto.TagDTO, auth_header: Optional[dto.AuthHeader] = None
34 | ) -> dto.TagDTO:
35 | """Update a tag."""
36 | res_json = Tags.put(uuid, json=tag.to_dict(), auth=auth_header)
37 | return dto.TagDTO.from_dict(res_json)
38 |
39 |
40 | def delete(uuid: str, auth_header: Optional[dto.AuthHeader] = None):
41 | """Delete a tag."""
42 | Tags.delete(uuid, auth=auth_header)
43 |
--------------------------------------------------------------------------------
/src/fathomnet/api/taxa.py:
--------------------------------------------------------------------------------
1 | # taxa.py (fathomnet-py)
2 | from typing import List
3 | from urllib.parse import quote
4 |
5 | from fathomnet import dto
6 | from fathomnet.api import EndpointManager
7 |
8 |
9 | class Taxa(EndpointManager):
10 | PATH = "taxa"
11 |
12 |
13 | def index() -> str:
14 | """Get the taxa index page."""
15 | res = Taxa.get("", parse_json=False)
16 | return res.text
17 |
18 |
19 | def list_taxa_providers() -> List[str]:
20 | """Get a list of all taxa providers."""
21 | res_json = Taxa.get("list/providers")
22 | return res_json
23 |
24 |
25 | def find_children(provider_name: str, concept: str) -> List[dto.Taxa]:
26 | """Find the taxonomic children for a concept according to a taxa provider."""
27 | res_json = Taxa.get(
28 | "query/children/{}/{}".format(quote(provider_name), quote(concept))
29 | )
30 | return list(map(dto.Taxa.from_dict, res_json))
31 |
32 |
33 | def find_parent(provider_name: str, concept: str) -> dto.Taxa:
34 | """Find the taxonomic parent for a concept according to a taxa provider."""
35 | res_json = Taxa.get(
36 | "query/parent/{}/{}".format(quote(provider_name), quote(concept))
37 | )
38 | return dto.Taxa.from_dict(res_json)
39 |
40 |
41 | def find_taxa(provider_name: str, concept: str) -> List[dto.Taxa]:
42 | """Get a list of all taxonomic descendants of a concept (including the concept itself) according to a taxa provider."""
43 | res_json = Taxa.get("query/{}/{}".format(quote(provider_name), quote(concept)))
44 | return list(map(dto.Taxa.from_dict, res_json))
45 |
--------------------------------------------------------------------------------
/src/fathomnet/api/topics.py:
--------------------------------------------------------------------------------
1 | # topics.py (fathomnet-py)
2 |
3 | from typing import List, Optional
4 |
5 | from fathomnet import dto
6 | from fathomnet.api import EndpointManager
7 |
8 |
9 | class Topics(EndpointManager):
10 | PATH = "topics"
11 |
12 |
13 | def create(
14 | topic: dto.Topic, auth_header: Optional[dto.AuthHeader] = None
15 | ) -> dto.FollowedTopic:
16 | """Follow a topic."""
17 | res_json = Topics.post("", json=topic.to_dict(), auth=auth_header)
18 | return dto.FollowedTopic.from_dict(res_json)
19 |
20 |
21 | def find_by_uuid(
22 | uuid: str, auth_header: Optional[dto.AuthHeader] = None
23 | ) -> dto.FollowedTopic:
24 | """Get a followed topic by uuid."""
25 | res_json = Topics.get(uuid, auth=auth_header)
26 | return dto.FollowedTopic.from_dict(res_json)
27 |
28 |
29 | def update(
30 | uuid: str, topic: dto.Topic, auth_header: Optional[dto.AuthHeader] = None
31 | ) -> dto.FollowedTopic:
32 | """Update a followed topic."""
33 | res_json = Topics.put(uuid, json=topic.to_dict(), auth=auth_header)
34 | return dto.FollowedTopic.from_dict(res_json)
35 |
36 |
37 | def delete(
38 | uuid: str, auth_header: Optional[dto.AuthHeader] = None
39 | ) -> dto.FollowedTopic:
40 | """Unfollow a topic."""
41 | res_json = Topics.delete(uuid, auth=auth_header)
42 | return dto.FollowedTopic.from_dict(res_json)
43 |
44 |
45 | def find(auth_header: Optional[dto.AuthHeader] = None) -> List[dto.FollowedTopic]:
46 | """Get a list of followed topics."""
47 | res_json = Topics.get("", auth=auth_header)
48 | return list(map(dto.FollowedTopic.from_dict, res_json))
49 |
50 |
51 | def find_by_email(
52 | email: str, auth_header: Optional[dto.AuthHeader] = None
53 | ) -> List[dto.FollowedTopic]:
54 | """(Admin) Get a list of followed topics by email."""
55 | res_json = Topics.get("query/email/{}".format(email), auth=auth_header)
56 | return list(map(dto.FollowedTopic.from_dict, res_json))
57 |
--------------------------------------------------------------------------------
/src/fathomnet/api/users.py:
--------------------------------------------------------------------------------
1 | # users.py (fathomnet-py)
2 | from typing import List, Optional
3 | from urllib.parse import quote
4 |
5 | from fathomnet import dto
6 | from fathomnet.api import EndpointManager
7 |
8 |
9 | class Users(EndpointManager):
10 | PATH = "users"
11 |
12 |
13 | def find_all(
14 | pageable: Optional[dto.Pageable] = None,
15 | auth_header: Optional[dto.AuthHeader] = None,
16 | ) -> List[dto.FathomnetIdentity]:
17 | """Get a paged list of all users."""
18 | res_json = Users.get(
19 | "list/users",
20 | params=pageable.to_params() if pageable else None,
21 | auth=auth_header,
22 | )
23 | return list(map(dto.FathomnetIdentity.from_dict, res_json.get("content", [])))
24 |
25 |
26 | def find_all_admin(
27 | pageable: Optional[dto.Pageable] = None,
28 | auth_header: Optional[dto.AuthHeader] = None,
29 | ) -> List[dto.FathomnetIdentity]:
30 | """(Admin) Get a paged list of all users."""
31 | res_json = Users.get(
32 | "", params=pageable.to_params() if pageable else None, auth=auth_header
33 | )
34 | return list(map(dto.FathomnetIdentity.from_dict, res_json.get("content", [])))
35 |
36 |
37 | def update_user_data(
38 | fathomnet_id_mutation: dto.FathomnetIdMutation,
39 | auth_header: Optional[dto.AuthHeader] = None,
40 | ) -> dto.FathomnetIdentity:
41 | """Update a user's account data."""
42 | res_json = Users.put("", json=fathomnet_id_mutation.to_dict(), auth=auth_header)
43 | return dto.FathomnetIdentity.from_dict(res_json)
44 |
45 |
46 | def update_user_data_admin(
47 | uuid: str,
48 | fathomnet_id_admin_mutation: dto.FathomnetIdAdminMutation,
49 | auth_header: Optional[dto.AuthHeader] = None,
50 | ) -> dto.FathomnetIdentity:
51 | """(Admin) Update a user's account data."""
52 | res_json = Users.put(
53 | "admin/{}".format(uuid),
54 | json=fathomnet_id_admin_mutation.to_dict(),
55 | auth=auth_header,
56 | )
57 | return dto.FathomnetIdentity.from_dict(res_json)
58 |
59 |
60 | def get_api_key(auth_header: Optional[dto.AuthHeader] = None) -> dto.ApiKey:
61 | """Get a user's API key."""
62 | res_json = Users.get("apikey", auth=auth_header)
63 | return dto.ApiKey.from_dict(res_json)
64 |
65 |
66 | def create_new_api_key(auth_header: Optional[dto.AuthHeader] = None) -> dto.ApiKey:
67 | """Create a new API key for a user."""
68 | res_json = Users.post("apikey", json=None, auth=auth_header)
69 | return dto.ApiKey.from_dict(res_json)
70 |
71 |
72 | def delete_api_key(auth_header: Optional[dto.AuthHeader] = None):
73 | """Delete a user's API key."""
74 | Users.delete("apikey", auth=auth_header)
75 |
76 |
77 | def count_all() -> dto.Count:
78 | """Get a count of all users."""
79 | res_json = Users.get("count")
80 | return dto.Count.from_dict(res_json)
81 |
82 |
83 | def disable_by_uuid(uuid: str, auth_header: Optional[dto.AuthHeader] = None):
84 | """(Admin) Disable an account by its UUID."""
85 | res_json = Users.put("disable/{}".format(uuid), auth=auth_header)
86 | return dto.FathomnetIdentity.from_dict(res_json)
87 |
88 |
89 | def find_expertise() -> List[str]:
90 | """Get a list of all expertise levels."""
91 | res_json = Users.get("list/expertise")
92 | return res_json
93 |
94 |
95 | def find_contributors_names() -> List[str]:
96 | """Get a list of all contributor names."""
97 | res_json = Users.get("list/names")
98 | return res_json
99 |
100 |
101 | def find_roles() -> List[str]:
102 | """Get a list of all user roles."""
103 | res_json = Users.get("list/roles")
104 | return res_json
105 |
106 |
107 | def find_by_authentication(
108 | auth_header: Optional[dto.AuthHeader] = None,
109 | ) -> dto.FathomnetIdentity:
110 | """Find a user by authentication."""
111 | res_json = Users.get("query", auth=auth_header)
112 | return dto.FathomnetIdentity.from_dict(res_json)
113 |
114 |
115 | def find_by_email(
116 | email: str, auth_header: Optional[dto.AuthHeader] = None
117 | ) -> dto.FathomnetIdentity:
118 | """Find a user by email."""
119 | res_json = Users.get("query/email/{}".format(email), auth=auth_header)
120 | return dto.FathomnetIdentity.from_dict(res_json)
121 |
122 |
123 | def find_by_firebase_uid(
124 | uid: str, auth_header: Optional[dto.AuthHeader] = None
125 | ) -> dto.FathomnetIdentity:
126 | """Find a user by Firebase UID."""
127 | res_json = Users.get("query/uid/{}".format(uid), auth=auth_header)
128 | return dto.FathomnetIdentity.from_dict(res_json)
129 |
130 |
131 | def verify(auth_header: Optional[dto.AuthHeader] = None) -> dto.Authentication:
132 | """Get the contents of an authorization token."""
133 | res_json = Users.get("verification", auth=auth_header)
134 | return dto.Authentication.from_dict(res_json)
135 |
136 |
137 | def find_by_display_name(
138 | display_name: str, pageable: Optional[dto.Pageable] = None
139 | ) -> List[dto.FathomnetIdentity]:
140 | """Find a user by display name."""
141 | res_json = Users.get(
142 | "query/name/{}".format(quote(display_name)),
143 | params=pageable.to_params() if pageable else None,
144 | )
145 | return list(map(dto.FathomnetIdentity.from_dict, res_json))
146 |
147 |
148 | def find_by_organization(
149 | organization: str, pageable: Optional[dto.Pageable] = None
150 | ) -> List[dto.FathomnetIdentity]:
151 | """Find a user by organization."""
152 | res_json = Users.get(
153 | "query/organization/{}".format(quote(organization)),
154 | params=pageable.to_params() if pageable else None,
155 | )
156 | return list(map(dto.FathomnetIdentity.from_dict, res_json))
157 |
158 |
159 | def find_by_uuid(uuid: str) -> dto.FathomnetIdentity:
160 | """Find a user by UUID."""
161 | res_json = Users.get("query/uuid/{}".format(uuid))
162 | return dto.FathomnetIdentity.from_dict(res_json)
163 |
164 |
165 | def find_badges_by_uuid(uuid: str) -> List[dto.Badge]:
166 | """Find a user's badges by UUID."""
167 | res_json = Users.get("badges/{}".format(uuid))
168 | return list(map(dto.Badge.from_dict, res_json))
169 |
--------------------------------------------------------------------------------
/src/fathomnet/api/worms.py:
--------------------------------------------------------------------------------
1 | from os import getenv
2 | from typing import List
3 |
4 | from fathomnet.api import EndpointManager
5 | from fathomnet.dto import WormsNames, WormsNode
6 |
7 | FATHOMNET_WORMS_API_URL_DEFAULT = "https://database.fathomnet.org/worms"
8 |
9 |
10 | def get_worms_api_url() -> str:
11 | """
12 | Get the Fast WoRMS API URL from the `FATHOMNET_WORMS_API_URL` environment variable or use the default value.
13 |
14 | Returns:
15 | str: The Fast WoRMS API base URL.
16 | """
17 | return getenv("FATHOMNET_WORMS_API_URL", FATHOMNET_WORMS_API_URL_DEFAULT)
18 |
19 |
20 | class Worms(EndpointManager):
21 | ROOT = get_worms_api_url()
22 | PATH = ""
23 |
24 |
25 | def count_names() -> int:
26 | """Get the total number of names available."""
27 | return int(Worms.get("names/count"))
28 |
29 |
30 | def get_all_names(limit: int = 100, offset: int = 0) -> List[str]:
31 | """Get all names."""
32 | res_json = Worms.get("names", params={"limit": limit, "offset": offset})
33 | return res_json["items"]
34 |
35 |
36 | def get_names_by_aphia_id(aphia_id: int) -> WormsNames:
37 | """Get the names data for a given Aphia ID."""
38 | res_json = Worms.get(f"names/aphiaid/{aphia_id}")
39 | return WormsNames.from_dict(res_json)
40 |
41 |
42 | def get_ancestors_names(name: str) -> List[str]:
43 | """Get all ancestors' names of a given name."""
44 | return Worms.get(f"ancestors/{name}")
45 |
46 |
47 | def get_children_names(name: str) -> List[str]:
48 | """Get all children's names of a given name."""
49 | return Worms.get(f"children/{name}")
50 |
51 |
52 | def get_descendants_names(name: str, accepted: bool = False) -> List[str]:
53 | """Get all descendants' names of a given name."""
54 | return Worms.get(f"descendants/{name}", params={"accepted": accepted})
55 |
56 |
57 | def get_parent_name(name: str) -> str:
58 | """Get the parent's name of a given name."""
59 | return Worms.get(f"parent/{name}")
60 |
61 |
62 | def find_names_containing(fragment: str) -> List[str]:
63 | """Get all names that contain a fragment."""
64 | return Worms.get(f"query/contains/{fragment}")
65 |
66 |
67 | def find_names_by_prefix(prefix: str) -> List[str]:
68 | """Get all names that start with a prefix."""
69 | return Worms.get(f"query/startswith/{prefix}")
70 |
71 |
72 | def get_synonyms_for_name(name: str) -> List[str]:
73 | """Get all synonyms for a name."""
74 | return Worms.get(f"synonyms/{name}")
75 |
76 |
77 | def get_ancestors(name: str) -> WormsNode:
78 | """Get a taxa tree from the root node to the node for the given name."""
79 | res_json = Worms.get(f"taxa/ancestors/{name}")
80 | return WormsNode.from_dict(res_json)
81 |
82 |
83 | def get_children(name: str) -> List[WormsNode]:
84 | """Get the child taxa nodes of a given name."""
85 | res_json = Worms.get(f"taxa/children/{name}")
86 | return [WormsNode.from_dict(item) for item in res_json]
87 |
88 |
89 | def get_descendants(name: str) -> WormsNode:
90 | """Get a taxa tree from the given name to the leaves."""
91 | res_json = Worms.get(f"taxa/descendants/{name}")
92 | return WormsNode.from_dict(res_json)
93 |
94 |
95 | def get_parent(name: str) -> WormsNode:
96 | """Get the parent taxa node of a given name."""
97 | res_json = Worms.get(f"taxa/parent/{name}")
98 | return WormsNode.from_dict(res_json)
99 |
100 |
101 | def get_info(name: str) -> WormsNode:
102 | """Get a taxa node for a given name."""
103 | res_json = Worms.get(f"taxa/info/{name}")
104 | return WormsNode.from_dict(res_json)
105 |
106 |
107 | def find_taxa_by_prefix(
108 | prefix: str, rank: str = None, parent: str = None
109 | ) -> List[WormsNode]:
110 | """Get all taxa nodes that start with a prefix."""
111 | params = {}
112 | if rank is not None:
113 | params["rank"] = rank
114 | if parent is not None:
115 | params["parent"] = parent
116 |
117 | res_json = Worms.get(f"taxa/query/startswith/{prefix}", params=params)
118 | return [WormsNode.from_dict(item) for item in res_json]
119 |
--------------------------------------------------------------------------------
/src/fathomnet/api/xapikey.py:
--------------------------------------------------------------------------------
1 | # xapikey.py (fathomnet-py)
2 | from typing import Optional
3 |
4 | from fathomnet import dto
5 | from fathomnet.api import SESSION, EndpointManager
6 |
7 |
8 | class XApiKey(EndpointManager):
9 | PATH = "xapikey"
10 |
11 |
12 | def auth(x_api_key_token: str) -> dto.AuthHeader:
13 | """Exchange an X-API-key token for a JWT."""
14 | res_json = XApiKey.post(
15 | "auth", headers={"X-API-Key": x_api_key_token}
16 | ) # TODO figure out request body
17 | auth_header = dto.AuthHeader.from_dict(res_json)
18 | SESSION.auth = auth_header # Update session auth
19 | return auth_header
20 |
21 |
22 | def index(auth_header: Optional[dto.AuthHeader] = None):
23 | """Test a JWT to ensure it's valid."""
24 | res_json = XApiKey.get("test", auth=auth_header)
25 | return dto.Message.from_dict(res_json)
26 |
--------------------------------------------------------------------------------
/src/fathomnet/dto.py:
--------------------------------------------------------------------------------
1 | # dto.py (fathomnet-py)
2 | import os
3 | from enum import Enum
4 | from typing import Any, Dict, List, Optional
5 |
6 | from pydantic import BaseModel
7 | from lxml import etree
8 | from lxml.builder import E
9 | from requests.auth import AuthBase
10 |
11 |
12 | class DTO(BaseModel):
13 | @classmethod
14 | def from_dict(cls, d: Dict[str, Any]) -> "DTO":
15 | return cls(**d)
16 |
17 | def to_dict(self) -> Dict[str, Any]:
18 | return self.model_dump()
19 |
20 |
21 | class AImageDTO(DTO):
22 | id: Optional[int] = None
23 | uuid: Optional[str] = None
24 | url: Optional[str] = None
25 | valid: Optional[bool] = None
26 | imagingType: Optional[str] = None
27 | depthMeters: Optional[float] = None
28 | height: Optional[int] = None
29 | lastValidation: Optional[str] = None
30 | latitude: Optional[float] = None
31 | longitude: Optional[float] = None
32 | altitude: Optional[float] = None
33 | salinity: Optional[float] = None
34 | temperatureCelsius: Optional[float] = None
35 | oxygenMlL: Optional[float] = None
36 | pressureDbar: Optional[float] = None
37 | mediaType: Optional[str] = None
38 | modified: Optional[str] = None
39 | sha256: Optional[str] = None
40 | contributorsEmail: Optional[str] = None
41 | tags: Optional[List["ATagDTO"]] = None
42 | timestamp: Optional[str] = None
43 | width: Optional[int] = None
44 | boundingBoxes: Optional[List["ABoundingBoxDTO"]] = None
45 | createdTimestamp: Optional[str] = None
46 | lastUpdatedTimestamp: Optional[str] = None
47 |
48 | def to_pascal_voc(
49 | self, path: Optional[str] = None, pretty_print: bool = False
50 | ) -> str:
51 | """Convert to a Pascal VOC.
52 |
53 | :param path: Path to the image file, defaults to using the image URL if available
54 | :type path: Optional[str], optional
55 | :param pretty_print: Set true to add indentation and newlines in XML, defaults to False
56 | :type pretty_print: bool, optional
57 | :raises ValueError: Raised if both the path and image URL are unspecified
58 | :return: Pascal VOC encoded string
59 | :rtype: str
60 | """
61 | if path is None: # If no path provided, use URL
62 | if self.url is None:
63 | raise ValueError(
64 | "Either the path argument or the image URL must be specified."
65 | )
66 | path = self.url
67 |
68 | # Parse the folder name and file name
69 | dir_path, base_name = os.path.split(path)
70 | folder_name = os.path.basename(dir_path)
71 |
72 | # Encode bounding box data into object tags
73 | boxes = self.boundingBoxes or []
74 | objects = [
75 | E.object(
76 | E.name(
77 | box.concept
78 | + (
79 | " {}".format(box.altConcept)
80 | if box.altConcept is not None
81 | else ""
82 | )
83 | ),
84 | E.pose("Unspecified"),
85 | E.truncated(
86 | str(int(box.truncated) if box.truncated is not None else 0)
87 | ),
88 | E.difficult("0"),
89 | E.occluded(str(int(box.occluded) if box.occluded is not None else 0)),
90 | E.bndbox(
91 | E.xmin(str(1 + box.x)),
92 | E.xmax(str(1 + box.x + box.width)),
93 | E.ymin(str(1 + box.y)),
94 | E.ymax(str(1 + box.y + box.height)),
95 | ),
96 | )
97 | for box in boxes
98 | ]
99 |
100 | # Encode annotation data
101 | annotation = E.annotation(
102 | E.folder(folder_name),
103 | E.filename(base_name),
104 | E.path(path),
105 | E.source(E.database("FathomNet")),
106 | E.size(E.width(str(self.width)), E.height(str(self.height)), E.depth("3")),
107 | E.segmented("0"),
108 | *objects,
109 | )
110 |
111 | return etree.tostring(annotation, pretty_print=pretty_print).decode()
112 |
113 |
114 | class ATagDTO(DTO):
115 | id: Optional[int] = None
116 | uuid: Optional[str] = None
117 | key: Optional[str] = None
118 | mediaType: Optional[str] = None
119 | value: Optional[str] = None
120 | createdTimestamp: Optional[str] = None
121 | lastUpdatedTimestamp: Optional[str] = None
122 | image: Optional[AImageDTO] = None
123 |
124 |
125 | class TagDTO(ATagDTO):
126 | imageUuid: Optional[str] = None
127 |
128 |
129 | class ABoundingBoxDTO(DTO):
130 | id: Optional[int] = None
131 | uuid: Optional[str] = None
132 | userDefinedKey: Optional[str] = None
133 | concept: Optional[str] = None
134 | altConcept: Optional[str] = None
135 | image: Optional[AImageDTO] = None
136 | groupOf: Optional[bool] = None
137 | height: Optional[int] = None
138 | occluded: Optional[bool] = None
139 | observer: Optional[str] = None
140 | truncated: Optional[bool] = None
141 | width: Optional[int] = None
142 | x: Optional[int] = None
143 | y: Optional[int] = None
144 | rejected: Optional[bool] = None
145 | verified: Optional[bool] = None
146 | verifier: Optional[str] = None
147 | verificationTimestamp: Optional[str] = None
148 | createdTimestamp: Optional[str] = None
149 | lastUpdatedTimestamp: Optional[str] = None
150 |
151 |
152 | class ApiKey(DTO):
153 | uuid: Optional[str] = None
154 | apiKey: Optional[str] = None
155 |
156 |
157 | class AuthHeader(AuthBase):
158 | type: Optional[str] = None
159 | token: Optional[str] = None
160 |
161 | @property
162 | def auth_dict(self):
163 | return {"Authorization": "{} {}".format(self.type, self.token)}
164 |
165 | def __call__(self, r):
166 | r.headers.update(self.auth_dict)
167 | return r
168 |
169 |
170 | class Authentication(DTO):
171 | attributes: Optional[object] = None
172 |
173 |
174 | class BoundingBoxDTO(ABoundingBoxDTO):
175 | imageUuid: Optional[str] = None
176 |
177 |
178 | class BoundingBox(DTO):
179 | uuid: Optional[str] = None
180 | id: Optional[int] = None
181 | userDefinedKey: Optional[str] = None
182 | concept: Optional[str] = None
183 | altConcept: Optional[str] = None
184 | image: Optional["Image"] = None
185 | groupOf: Optional[bool] = None
186 | height: Optional[int] = None
187 | occluded: Optional[bool] = None
188 | observer: Optional[str] = None
189 | truncated: Optional[bool] = None
190 | width: Optional[int] = None
191 | x: Optional[int] = None
192 | y: Optional[int] = None
193 | createdTimestamp: Optional[str] = None
194 | lastUpdatedTimestamp: Optional[str] = None
195 | verified: Optional[bool] = None
196 | verifier: Optional[str] = None
197 | verificationTimestamp: Optional[str] = None
198 |
199 |
200 | class ByConceptCount(DTO):
201 | concept: Optional[str] = None
202 | count: Optional[int] = None
203 |
204 |
205 | class ByContributorCount(DTO):
206 | contributorsEmail: Optional[str] = None
207 | count: Optional[int] = None
208 |
209 |
210 | class BDarwinCore(DTO):
211 | uuid: Optional[str] = None
212 | recordType: Optional[str] = None
213 | basisOfRecord: Optional[str] = None
214 | datasetID: Optional[str] = None
215 | recordLanguage: Optional[str] = None
216 | license: Optional[str] = None
217 | modified: Optional[str] = None
218 | ownerInstitutionCode: Optional[str] = None
219 | accessRights: Optional[str] = None
220 | bibliographicCitation: Optional[str] = None
221 | collectionCode: Optional[str] = None
222 | collectionID: Optional[str] = None
223 | dataGeneralizations: Optional[str] = None
224 | datasetName: Optional[str] = None
225 | dynamicProperties: Optional[str] = None
226 | informationWithheld: Optional[str] = None
227 | institutionCode: Optional[str] = None
228 | institutionID: Optional[str] = None
229 | recordReferences: Optional[str] = None
230 | rightsHolder: Optional[str] = None
231 |
232 |
233 | class BImageSetUploadDTO(DTO):
234 | uuid: Optional[str] = None
235 | localPath: Optional[str] = None
236 | remoteUri: Optional[str] = None
237 | sha256: Optional[str] = None
238 | format: Optional["ImageSetUpload.UploadFormat"] = None
239 | contributorsEmail: Optional[str] = None
240 | status: Optional["ImageSetUpload.Status"] = None
241 | statusUpdaterEmail: Optional[str] = None
242 | statusUpdateTimestamp: Optional[str] = None
243 | rejectionReason: Optional[str] = None
244 | rejectionDetails: Optional[str] = None
245 | darwinCore: Optional[BDarwinCore] = None
246 | createdTimestamp: Optional[str] = None
247 | lastUpdatedTimestamp: Optional[str] = None
248 |
249 |
250 | class Count(DTO):
251 | objectType: Optional[str] = None
252 | count: Optional[int] = None
253 |
254 |
255 | class DarwinCore(DTO):
256 | id: Optional[int] = None
257 | uuid: Optional[str] = None
258 | recordType: Optional[str] = None
259 | basisOfRecord: Optional[str] = None
260 | datasetID: Optional[str] = None
261 | recordLanguage: Optional[str] = None
262 | license: Optional[str] = None
263 | modified: Optional[str] = None
264 | ownerInstitutionCode: Optional[str] = None
265 | accessRights: Optional[str] = None
266 | bibliographicCitation: Optional[str] = None
267 | collectionCode: Optional[str] = None
268 | collectionID: Optional[str] = None
269 | dataGeneralizations: Optional[str] = None
270 | datasetName: Optional[str] = None
271 | dynamicProperties: Optional[str] = None
272 | informationWithheld: Optional[str] = None
273 | institutionCode: Optional[str] = None
274 | institutionID: Optional[str] = None
275 | recordReferences: Optional[str] = None
276 | rightsHolder: Optional[str] = None
277 | imageSetUpload: Optional["ImageSetUpload"] = None
278 |
279 |
280 | class FathomnetIdAdminMutation(DTO):
281 | disabled: Optional[bool] = None
282 | expertiseRank: Optional[str] = None
283 | roleData: Optional[str] = None
284 | organization: Optional[str] = None
285 |
286 |
287 | class FathomnetIdMutation(DTO):
288 | jobTitle: Optional[str] = None
289 | organization: Optional[str] = None
290 | profile: Optional[str] = None
291 | displayName: Optional[str] = None
292 |
293 |
294 | class FathomnetIdentity(DTO):
295 | id: Optional[int] = None
296 | uuid: Optional[str] = None
297 | email: Optional[str] = None
298 | firebaseUid: Optional[str] = None
299 | roleData: Optional[str] = None
300 | organization: Optional[str] = None
301 | jobTitle: Optional[str] = None
302 | profile: Optional[str] = None
303 | apiKey: Optional[str] = None
304 | avatarUrl: Optional[str] = None
305 | createdTimestamp: Optional[str] = None
306 | lastUpdatedTimestamp: Optional[str] = None
307 | disabled: Optional[bool] = None
308 | expertiseRank: Optional[str] = None
309 | displayName: Optional[str] = None
310 | roles: Optional[List["Roles"]] = None
311 | orcid: Optional[str] = None
312 | notificationFrequency: Optional[str] = None
313 |
314 |
315 | class GeoImage(DTO):
316 | uuid: Optional[str] = None
317 | url: Optional[str] = None
318 | latitude: Optional[float] = None
319 | longitude: Optional[float] = None
320 | depthMeters: Optional[float] = None
321 | contributorsEmail: Optional[str] = None
322 | timestamp: Optional[str] = None
323 | valid: Optional[bool] = None
324 | lastValidation: Optional[str] = None
325 |
326 |
327 | class GeoImageConstraints(DTO):
328 | concept: Optional[str] = None
329 | taxaProviderName: Optional[str] = None
330 | contributorsEmail: Optional[str] = None
331 | startTimestamp: Optional[str] = None
332 | endTimestamp: Optional[str] = None
333 | imagingTypes: Optional[List[str]] = None
334 | includeUnverified: Optional[bool] = None
335 | includeVerified: Optional[bool] = None
336 | minLongitude: Optional[float] = None
337 | maxLongitude: Optional[float] = None
338 | minLatitude: Optional[float] = None
339 | maxLatitude: Optional[float] = None
340 | minDepth: Optional[float] = None
341 | maxDepth: Optional[float] = None
342 | ownerInstitutionCodes: Optional[List[str]] = None
343 | limit: Optional[int] = None
344 | offset: Optional[int] = None
345 |
346 |
347 | class GeoImageConstraintsCount(DTO):
348 | constraints: Optional[GeoImageConstraints] = None
349 | count: Optional[int] = None
350 |
351 |
352 | class Image(DTO):
353 | id: Optional[int] = None
354 | uuid: Optional[str] = None
355 | url: Optional[str] = None
356 | valid: Optional[bool] = None
357 | imagingType: Optional[str] = None
358 | depthMeters: Optional[float] = None
359 | height: Optional[int] = None
360 | lastValidation: Optional[str] = None
361 | latitude: Optional[float] = None
362 | longitude: Optional[float] = None
363 | altitude: Optional[float] = None
364 | salinity: Optional[float] = None
365 | temperatureCelsius: Optional[float] = None
366 | oxygenMlL: Optional[float] = None
367 | pressureDbar: Optional[float] = None
368 | mediaType: Optional[str] = None
369 | modified: Optional[str] = None
370 | sha256: Optional[str] = None
371 | contributorsEmail: Optional[str] = None
372 | timestamp: Optional[str] = None
373 | width: Optional[int] = None
374 | tags: Optional[List["Tag"]] = None
375 | boundingBoxes: Optional[List[BoundingBox]] = None
376 | createdTimestamp: Optional[str] = None
377 | lastUpdatedTimestamp: Optional[str] = None
378 | imageSetUploads: Optional[List["ImageSetUpload"]] = None
379 |
380 |
381 | class ImageSetUpload(DTO):
382 | class Status(Enum):
383 | PENDING = "PENDING"
384 | ACCEPTED = "ACCEPTED"
385 | REJECTED = "REJECTED"
386 |
387 | class UploadFormat(Enum):
388 | CSV = "CSV"
389 | UNSUPPORTED = "UNSUPPORTED"
390 |
391 | id: Optional[int] = None
392 | uuid: Optional[str] = None
393 | localPath: Optional[str] = None
394 | remoteUri: Optional[str] = None
395 | sha256: Optional[str] = None
396 | contributorsEmail: Optional[str] = None
397 | status: Optional[Status] = None
398 | rejectionReason: Optional[str] = None
399 | rejectionDetails: Optional[str] = None
400 | statusUpdaterEmail: Optional[str] = None
401 | statusUpdateTimestamp: Optional[str] = None
402 | format: Optional[UploadFormat] = None
403 | darwinCore: Optional[DarwinCore] = None
404 | images: Optional[List[Image]] = None
405 | createdTimestamp: Optional[str] = None
406 | lastUpdatedTimestamp: Optional[str] = None
407 |
408 |
409 | class ImageSetUploadStats(DTO):
410 | imageSetUploadUuid: Optional[str] = None
411 | imageCount: Optional[int] = None
412 | boundingBoxCount: Optional[int] = None
413 | verifiedBoundingBoxCount: Optional[int] = None
414 |
415 |
416 | class MarineRegion(DTO):
417 | id: Optional[int] = None
418 | MRGID: Optional[int] = None
419 | name: Optional[str] = None
420 | minLatitude: Optional[float] = None
421 | maxLatitude: Optional[float] = None
422 | minLongitude: Optional[float] = None
423 | maxLongitude: Optional[float] = None
424 | createdTimestamp: Optional[str] = None
425 | lastUpdatedTimestamp: Optional[str] = None
426 |
427 |
428 | class Message(DTO):
429 | message: Optional[str] = None
430 |
431 |
432 | class Sort(DTO):
433 | class Order(DTO):
434 | class Direction(Enum):
435 | ASC = "ASC"
436 | DESC = "DESC"
437 |
438 | ignoreCase: Optional[bool] = None
439 | direction: Optional[Direction] = None
440 | property: Optional[str] = None
441 | ascending: Optional[bool] = None
442 |
443 | sorted: Optional[bool] = None
444 | orderBy: Optional[List[Order]] = None
445 |
446 |
447 | class Pageable(DTO):
448 | number: Optional[int] = None
449 | size: Optional[int] = None
450 | offset: Optional[int] = None
451 | sort: Optional[Sort] = None
452 | sorted: Optional[bool] = None
453 |
454 | def to_params(self) -> List[tuple]:
455 | """Make a list of paging parameters to be passed into a request."""
456 | params = []
457 | if self.size is not None:
458 | params.append(("size", self.size))
459 | if self.number is not None:
460 | params.append(("page", self.number))
461 | if self.sort is not None:
462 | for order in self.sort.orderBy:
463 | params.append(("sort", order.property))
464 | return params
465 |
466 | @classmethod
467 | def from_params(
468 | cls,
469 | size: Optional[int] = None,
470 | page: Optional[int] = None,
471 | sort_keys: Optional[List[str]] = None,
472 | ):
473 | """Make a Pageable instance from paging parameters."""
474 | pageable = cls()
475 | pageable.size = size
476 | pageable.number = page
477 | if sort_keys is not None:
478 | sort = Sort(orderBy=[])
479 | for sort_key in sort_keys:
480 | sort.orderBy.append(Sort.Order(property=sort_key))
481 | pageable.sort = sort
482 |
483 |
484 | class Roles(Enum):
485 | ADMIN = "ADMIN"
486 | MODERATOR = "MODERATOR"
487 | READ = "READ"
488 | UNKNOWN = "UNKNOWN"
489 | UPDATE = "UPDATE"
490 | WRITE = "WRITE"
491 |
492 |
493 | class Tag(DTO):
494 | uuid: Optional[str] = None
495 | id: Optional[int] = None
496 | key: Optional[str] = None
497 | mediaType: Optional[str] = None
498 | value: Optional[str] = None
499 | image: Optional[Image] = None
500 | createdTimestamp: Optional[str] = None
501 | lastUpdatedTimestamp: Optional[str] = None
502 |
503 |
504 | class Taxa(DTO):
505 | name: Optional[str] = None
506 | rank: Optional[str] = None
507 |
508 |
509 | class Badge(DTO):
510 | name: Optional[str] = None
511 | since: Optional[str] = None
512 | data: Optional[dict] = None
513 |
514 |
515 | class BoundingBoxCommentContent(DTO):
516 | text: Optional[str] = None
517 | alternateConcept: Optional[str] = None
518 |
519 |
520 | class BoundingBoxComment(DTO):
521 | uuid: Optional[str] = None
522 | boundingBoxUuid: Optional[str] = None
523 | email: Optional[str] = None
524 | text: Optional[str] = None
525 | alternateConcept: Optional[str] = None
526 | flagged: Optional[bool] = None
527 | createdTimestamp: Optional[str] = None
528 | lastUpdatedTimestamp: Optional[str] = None
529 |
530 |
531 | class Topic(DTO):
532 | topic: Optional[str] = None
533 | target: Optional[str] = None
534 | notification: Optional[bool] = None
535 |
536 |
537 | class Activity(DTO):
538 | topic: Optional[Topic] = None
539 | attributedTo: Optional[str] = None
540 | lastUpdated: Optional[str] = None
541 | content: Optional[str] = None
542 | data: Optional[dict] = None
543 |
544 |
545 | class FollowedTopic(Topic):
546 | uuid: Optional[str] = None
547 | email: Optional[str] = None
548 | createdTimestamp: Optional[str] = None
549 | lastUpdatedTimestamp: Optional[str] = None
550 |
551 |
552 | class WormsNode(DTO):
553 | name: Optional[str] = None
554 | rank: Optional[str] = None
555 | aphiaId: Optional[int] = None
556 | acceptedAphiaId: Optional[int] = None
557 | alternateNames: Optional[List[str]] = None
558 | children: Optional[List["WormsNode"]] = None
559 |
560 |
561 | class WormsNames(DTO):
562 | aphiaId: Optional[int] = None
563 | name: Optional[str] = None
564 | acceptedName: Optional[str] = None
565 | alternateNames: Optional[List[str]] = None
566 |
--------------------------------------------------------------------------------
/src/fathomnet/models/__init__.py:
--------------------------------------------------------------------------------
1 | # __init__.py (fathomnet-py)
2 |
3 | try:
4 | import torch # noqa: F401
5 | except ImportError:
6 | raise ImportError(
7 | "You must install the 'models' extra to use the models subpackage: pip install fathomnet[models]"
8 | )
9 |
10 | from fathomnet.models.yolov5 import MBARIMBBenthicModel
11 |
12 | __all__ = ["MBARIMBBenthicModel"]
13 |
--------------------------------------------------------------------------------
/src/fathomnet/models/bases.py:
--------------------------------------------------------------------------------
1 | # bases.py (fathomnet-py)
2 |
3 | from abc import ABC, abstractmethod
4 | from pathlib import Path
5 | from typing import TypeVar, Union
6 |
7 | import cv2
8 | import numpy as np
9 |
10 | T = TypeVar("T")
11 |
12 |
13 | class ImageModel(ABC):
14 | """
15 | Abstract base class for image models. Defines the interface for models that can be used to predict on images.
16 | """
17 |
18 | @abstractmethod
19 | def _predict(self, image: Union[np.ndarray, Path]) -> T:
20 | raise NotImplementedError
21 |
22 | def predict(self, image: Union[np.ndarray, Path]) -> T:
23 | return self._predict(self._load(image))
24 |
25 | def _load(self, image: Union[np.ndarray, Path]) -> np.ndarray:
26 | if isinstance(image, Path):
27 | image = cv2.imread(str(image))
28 | return image
29 |
30 | return image
31 |
--------------------------------------------------------------------------------
/src/fathomnet/models/yolov5.py:
--------------------------------------------------------------------------------
1 | # yolov5.py (fathomnet-py)
2 |
3 |
4 | from pathlib import Path
5 | from typing import Iterable
6 |
7 | import numpy as np
8 | import requests
9 | from appdirs import user_cache_dir
10 | from torch.hub import load
11 |
12 | from fathomnet.dto import BoundingBox
13 | from fathomnet.models.bases import ImageModel
14 |
15 |
16 | class YOLOv5Model(ImageModel):
17 | """
18 | YOLOv5 object detection model. Uses a .pt file and performs object detection on images.
19 | """
20 |
21 | def __init__(self, weights: Path) -> None:
22 | super().__init__()
23 |
24 | self._model = load("ultralytics/yolov5", "custom", path=weights)
25 |
26 | def _predict(self, image: np.ndarray) -> Iterable[BoundingBox]:
27 | detections = self._model(image)
28 |
29 | for detection in detections.xyxy[0]:
30 | x1, y1, x2, y2, confidence, class_id = detection
31 | yield BoundingBox(
32 | concept=self._model.names[int(class_id)],
33 | x=int(x1),
34 | y=int(y1),
35 | width=int(x2 - x1),
36 | height=int(y2 - y1),
37 | )
38 |
39 |
40 | class MBARIMBBenthicModel(YOLOv5Model):
41 | """
42 | MBARI Monterey Bay Benthic Object Detector.
43 | """
44 |
45 | WEIGHTS_URL = "https://zenodo.org/record/5539915/files/mbari-mb-benthic-33k.pt"
46 |
47 | def __init__(self) -> None:
48 | cache_dir = Path(user_cache_dir("fathomnet"))
49 | mbari_mb_benthic_dir = cache_dir / "mbari-mb-benthic"
50 | weights = mbari_mb_benthic_dir / "mbari-mb-benthic-33k.pt"
51 | if not weights.exists():
52 | print(
53 | "Downloading MBARI Monterey Bay Benthic Object Detector weights to {}".format(
54 | weights
55 | )
56 | )
57 | weights.parent.mkdir(parents=True, exist_ok=True)
58 | with requests.get(self.WEIGHTS_URL, stream=True) as r:
59 | r.raise_for_status()
60 | with weights.open("wb") as f:
61 | for chunk in r.iter_content(chunk_size=8192):
62 | f.write(chunk)
63 |
64 | super().__init__(weights)
65 |
--------------------------------------------------------------------------------
/src/fathomnet/scripts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fathomnet/fathomnet-py/42f0c151dec6467b26f3a7817e51c3a7da70c710/src/fathomnet/scripts/__init__.py
--------------------------------------------------------------------------------
/src/fathomnet/scripts/fathomnet_generate.py:
--------------------------------------------------------------------------------
1 | """
2 | fathomnet-generate: Generate a dataset from FathomNet localizations.
3 | """
4 |
5 | import argparse
6 | import datetime
7 | import logging
8 | import os
9 | from dataclasses import dataclass
10 | from shutil import copyfileobj
11 | from typing import Iterable, List, Optional
12 |
13 | import progressbar
14 | import requests
15 | import yaml
16 | from coco_lib.common import Image as COCOImage
17 | from coco_lib.common import Info as COCOInfo
18 | from coco_lib.common import License as COCOLicense
19 | from coco_lib.objectdetection import (
20 | ObjectDetectionAnnotation,
21 | ObjectDetectionCategory,
22 | ObjectDetectionDataset,
23 | )
24 |
25 | from ..api import darwincore, images, taxa, worms
26 | from ..dto import AImageDTO, GeoImageConstraints
27 |
28 |
29 | @dataclass
30 | class Arguments:
31 | """Parsed command-line arguments"""
32 |
33 | output: str
34 | concepts: List[str]
35 | base_constraints: GeoImageConstraints
36 | include_all: bool
37 | format: str
38 | img_dir: str
39 |
40 |
41 | def comma_list(s: str) -> List[str]:
42 | """Parse a comma-separated list of strings"""
43 | return s.split(",")
44 |
45 |
46 | def lowercase_str(s: str) -> str:
47 | """Convert a string to lowercase"""
48 | return s.lower()
49 |
50 |
51 | def find_images_paged(
52 | constraints: GeoImageConstraints, page_size: int = 100
53 | ) -> Iterable[AImageDTO]:
54 | """Find images for a given constraints object, paginating requests at a given size"""
55 | offset = 0
56 | while True:
57 | constraints_page = constraints.model_copy(
58 | update={"limit": page_size, "offset": offset}
59 | )
60 | images_page = images.find(constraints_page)
61 |
62 | for image in images_page:
63 | yield image
64 |
65 | if len(images_page) < page_size:
66 | break
67 | offset += page_size
68 |
69 |
70 | def generate_constraints(
71 | concepts: List[str], base_constraints: GeoImageConstraints
72 | ) -> Iterable[GeoImageConstraints]:
73 | """Generate GeoImageConstraints instances for a list of concepts from a base set of constraints"""
74 | for concept in concepts:
75 | yield base_constraints.model_copy(update={"concept": concept})
76 |
77 |
78 | def write_voc(image: AImageDTO, filename: str):
79 | """Write a single image to a file"""
80 | with open(filename, "w") as f:
81 | f.write(image.to_pascal_voc(pretty_print=True))
82 |
83 |
84 | def download_imgs(args: Arguments, ims: List[AImageDTO]):
85 | """Download a images to an output dir"""
86 | flag = 0 # keep track of how many image downloaded
87 | for image in progressbar.progressbar(ims):
88 | file_name = os.path.join(
89 | args.img_dir, f"{image.uuid}.{image.url.split('.')[-1]}"
90 | )
91 |
92 | # only download if the image does not exist in the outdir
93 | if not os.path.exists(file_name):
94 | resp = requests.get(image.url, stream=True)
95 | resp.raw.decode_content = True
96 | with open(file_name, "wb") as f:
97 | copyfileobj(resp.raw, f)
98 | flag += 1
99 |
100 | logging.info(f"Downloaded {flag} new images to {args.img_dir}")
101 |
102 |
103 | def get_images(args: Arguments) -> Optional[List[AImageDTO]]:
104 | """Get images for the dataset as specified"""
105 | # Are we counting only?
106 | counting = args.output is None
107 |
108 | image_uuid_dict = {}
109 | if args.concepts: # Concepts specified, generate constraints for each
110 | # Print concepts specified
111 | logging.info("Concept(s) specified:")
112 | for concept in args.concepts:
113 | logging.info("- {}".format(concept))
114 |
115 | # Get the image data
116 | logging.info(
117 | "Fetching image records for {} concept(s)...".format(len(args.concepts))
118 | )
119 | for constraints in generate_constraints(args.concepts, args.base_constraints):
120 | logging.debug(
121 | "Constraints: {}".format(constraints.model_dump_json(indent=2))
122 | )
123 | concept_images = find_images_paged(constraints)
124 | for image in concept_images:
125 | image_uuid_dict[image.uuid] = image
126 |
127 | # Remove any unspecified bounding boxes from the images
128 | if not args.include_all:
129 | for image in image_uuid_dict.values():
130 | if image.boundingBoxes:
131 | image.boundingBoxes = [
132 | box
133 | for box in image.boundingBoxes
134 | if box.concept in args.concepts
135 | ]
136 | else: # No concepts specified, use the base constraints
137 | logging.info("Fetching image records...")
138 | noconcept_images = find_images_paged(args.base_constraints)
139 | for image in noconcept_images:
140 | image_uuid_dict[image.uuid] = image
141 |
142 | # Remove any images that don't have bounding boxes
143 | image_uuid_dict = {
144 | uuid: image for uuid, image in image_uuid_dict.items() if image.boundingBoxes
145 | }
146 |
147 | logging.info(
148 | "Found {} unique images with bounding boxes".format(len(image_uuid_dict))
149 | )
150 |
151 | # Compute the number of bounding boxes per concept
152 | concept_counts = {}
153 | for image in image_uuid_dict.values():
154 | for box in image.boundingBoxes:
155 | concept_counts[box.concept] = concept_counts.get(box.concept, 0) + 1
156 |
157 | # Print table of bounding box counts for each concept
158 | if counting:
159 | if not concept_counts:
160 | print("No bounding boxes found")
161 | else:
162 | longest_concept = max(concept_counts.keys(), key=len)
163 | concept_header = "concept"
164 | concept_len = len(longest_concept) + 1
165 | concept_len = max(concept_len, len(concept_header) + 1)
166 |
167 | count_header = "# boxes"
168 | count_len = 9
169 | count_len = max(count_len, len(count_header) + 1)
170 |
171 | format_str = "{:<" + str(concept_len) + "}|{:>" + str(count_len) + "}"
172 |
173 | print(format_str.format(concept_header, count_header))
174 | print(format_str.format("-" * concept_len, "-" * 9))
175 | for concept in sorted(concept_counts.keys()):
176 | count = concept_counts[concept]
177 | print(format_str.format(concept, count))
178 |
179 | return None
180 | else:
181 | return list(image_uuid_dict.values())
182 |
183 |
184 | def generate_voc_dataset(ims: List[AImageDTO], output_dir: str) -> bool:
185 | """Generate a Pascal VOC dataset (folder of annotation XMLs)"""
186 | error_flag = False
187 |
188 | # Write images to output directory
189 | for image in ims:
190 | filename = "{}.{}".format(image.uuid, "xml")
191 | filename = os.path.join(output_dir, filename)
192 | logging.debug("Writing VOC {}".format(filename))
193 | try:
194 | write_voc(image, filename)
195 | except OSError as e:
196 | logging.error("Error writing {}: {}".format(filename, e))
197 | error_flag = True
198 | logging.info("Wrote {} VOC files to {}".format(len(ims), output_dir))
199 |
200 | return error_flag
201 |
202 |
203 | def generate_coco_dataset(ims: List[AImageDTO], output_dir: str) -> bool:
204 | # Describe the dataset
205 | coco_info = COCOInfo(
206 | year=datetime.datetime.now().year,
207 | version="0",
208 | description="Generated by FathomNet",
209 | contributor="FathomNet",
210 | url="https://fathomnet.org",
211 | date_created=datetime.datetime.now(),
212 | )
213 |
214 | # Set the FathomNet license
215 | fathomnet_license = COCOLicense(
216 | id=0, name="FathomNet", url="http://fathomnet.org/fathomnet/#/license"
217 | )
218 |
219 | # Encode categories in sorted order
220 | concepts = sorted(set(box.concept for image in ims for box in image.boundingBoxes))
221 | coco_categories = [
222 | ObjectDetectionCategory(id=idx, name=concept, supercategory="")
223 | for idx, concept in enumerate(concepts, start=1)
224 | ]
225 |
226 | # Encode images and annotations
227 | coco_images = []
228 | coco_annotations = []
229 | for image in ims:
230 | image_id = len(coco_images) + 1
231 | coco_image = COCOImage(
232 | id=image_id,
233 | width=image.width,
234 | height=image.height,
235 | file_name=f"{image.uuid}.{image.url.split('.')[-1]}",
236 | license=fathomnet_license.id,
237 | flickr_url=image.url,
238 | coco_url=image.url,
239 | date_captured=datetime.datetime.fromisoformat(image.timestamp.rstrip("Z"))
240 | if image.timestamp is not None
241 | else None,
242 | )
243 | coco_images.append(coco_image)
244 |
245 | for box in image.boundingBoxes:
246 | box_id = len(coco_annotations) + 1
247 | coco_annotation = ObjectDetectionAnnotation(
248 | id=box_id,
249 | image_id=image_id,
250 | category_id=concepts.index(box.concept) + 1,
251 | segmentation=[],
252 | area=float(box.width * box.height),
253 | bbox=[float(box.x), float(box.y), float(box.width), float(box.height)],
254 | iscrowd=0,
255 | )
256 | coco_annotations.append(coco_annotation)
257 |
258 | # Combine them into a dataset
259 | coco_dataset = ObjectDetectionDataset(
260 | info=coco_info,
261 | licenses=[fathomnet_license],
262 | images=coco_images,
263 | annotations=coco_annotations,
264 | categories=coco_categories,
265 | )
266 |
267 | # Write
268 | output_path = os.path.join(output_dir, "dataset.json")
269 | try:
270 | coco_dataset.save(output_path, indent=2)
271 | logging.info("Wrote COCO dataset to {}".format(output_path))
272 | except OSError as e:
273 | logging.error("Error writing {}: {}".format(output_path, e))
274 | return True
275 |
276 | return False
277 |
278 |
279 | def generate_yolo_dataset(ims: List[AImageDTO], output_dir: str) -> bool:
280 | """Generate a YOLO dataset (folder of annotation TXTs)"""
281 | error_flag = False
282 |
283 | # Create the concept -> index mapping
284 | concepts = sorted(set(box.concept for image in ims for box in image.boundingBoxes))
285 | concept_to_index = {concept: idx for idx, concept in enumerate(concepts)}
286 |
287 | labels_dir = os.path.join(output_dir, "labels")
288 | if not os.path.exists(labels_dir):
289 | os.makedirs(labels_dir)
290 |
291 | # Assume images are in a directory called "images"
292 | images_dir = os.path.join(output_dir, "images")
293 | if not os.path.exists(images_dir):
294 | os.makedirs(images_dir)
295 |
296 | # Write annotation YAML file to output directory
297 | yaml_data = {
298 | "path": output_dir,
299 | "train": "images",
300 | "val": "images",
301 | "names": {idx: concept for idx, concept in enumerate(concepts)},
302 | }
303 | yaml_path = os.path.join(output_dir, "dataset.yaml")
304 | try:
305 | with open(yaml_path, "w") as f:
306 | f.write(yaml.dump(yaml_data, indent=2, sort_keys=False))
307 | except OSError as e:
308 | logging.error("Error writing {}: {}".format(yaml_path, e))
309 | return False
310 | logging.info("Wrote dataset YAML to {}".format(yaml_path))
311 |
312 | # Write annotation files to output directory
313 | for image in ims:
314 | filename = "{}.{}".format(image.uuid, "txt")
315 | filename = os.path.join(labels_dir, filename)
316 | logging.debug("Writing YOLO {}".format(filename))
317 | try:
318 | with open(filename, "w") as f:
319 | for box in image.boundingBoxes:
320 | x_center = box.x + box.width / 2
321 | y_center = box.y + box.height / 2
322 | x_center_norm = x_center / image.width
323 | y_center_norm = y_center / image.height
324 | width_norm = box.width / image.width
325 | height_norm = box.height / image.height
326 | f.write(
327 | "{} {} {} {} {}\n".format(
328 | concept_to_index[box.concept],
329 | x_center_norm,
330 | y_center_norm,
331 | width_norm,
332 | height_norm,
333 | )
334 | )
335 | except OSError as e:
336 | logging.error("Error writing {}: {}".format(filename, e))
337 | error_flag = True
338 | logging.info("Wrote {} YOLO files to {}".format(len(ims), labels_dir))
339 |
340 | return error_flag
341 |
342 |
343 | def generate_dataset(args: Arguments, ims: List[AImageDTO]) -> bool:
344 | """Call the specified dataset generation function according to the format specified"""
345 | dataset_func = {
346 | "voc": generate_voc_dataset,
347 | "coco": generate_coco_dataset,
348 | "yolo": generate_yolo_dataset,
349 | }
350 |
351 | return dataset_func[args.format](ims, args.output)
352 |
353 |
354 | def get_parser() -> argparse.ArgumentParser:
355 | """Set up the argument parser"""
356 |
357 |
358 | def parse_args() -> Arguments:
359 | """Parse command-line arguments"""
360 | parser = argparse.ArgumentParser(description=__doc__)
361 |
362 | valid_imaging_types = [t for t in images.list_imaging_types() if t is not None]
363 | valid_taxa_providers = taxa.list_taxa_providers()
364 | valid_contributor_emails = images.find_distinct_submitter()
365 | valid_owner_institution_codes = darwincore.find_owner_institution_codes()
366 | valid_dataset_formats = ["voc", "coco", "yolo"]
367 |
368 | parser.add_argument("-v", action="count", default=0, help="Increase verbosity")
369 | parser.add_argument(
370 | "-t",
371 | "--taxa",
372 | dest="taxa",
373 | type=str,
374 | help="Taxonomy provider (to include descendants). Options: {}".format(
375 | ", ".join(valid_taxa_providers)
376 | ),
377 | )
378 | parser.add_argument(
379 | "--contributor-email",
380 | dest="contributor_email",
381 | type=str,
382 | help="Contributor email",
383 | )
384 | parser.add_argument(
385 | "--start",
386 | dest="start_timestamp",
387 | type=datetime.datetime.fromisoformat,
388 | help="Start timestamp (formatted as ISO-8601)",
389 | )
390 | parser.add_argument(
391 | "--end",
392 | dest="end_timestamp",
393 | type=datetime.datetime.fromisoformat,
394 | help="End timestamp (formatted as ISO-8601)",
395 | )
396 | parser.add_argument(
397 | "--imaging-types",
398 | dest="imaging_types",
399 | type=comma_list,
400 | help="Comma-separated list of imaging types to include. Options: {}".format(
401 | ", ".join(valid_imaging_types)
402 | ),
403 | )
404 | parser.add_argument(
405 | "--exclude-unverified",
406 | dest="include_unverified",
407 | action="store_false",
408 | help="Flag to exclude unverified images",
409 | )
410 | parser.add_argument(
411 | "--exclude-verified",
412 | dest="include_verified",
413 | action="store_false",
414 | help="Flag to exclude verified images",
415 | )
416 | parser.add_argument(
417 | "--min-longitude", dest="min_longitude", type=float, help="Minimum longitude"
418 | )
419 | parser.add_argument(
420 | "--max-longitude", dest="max_longitude", type=float, help="Maximum longitude"
421 | )
422 | parser.add_argument(
423 | "--min-latitude", dest="min_latitude", type=float, help="Minimum latitude"
424 | )
425 | parser.add_argument(
426 | "--max-latitude", dest="max_latitude", type=float, help="Maximum latitude"
427 | )
428 | parser.add_argument(
429 | "--min-depth", dest="min_depth", type=float, help="Minimum depth"
430 | )
431 | parser.add_argument(
432 | "--max-depth", dest="max_depth", type=float, help="Maximum depth"
433 | )
434 | parser.add_argument(
435 | "--institutions",
436 | dest="owner_institution_codes",
437 | type=comma_list,
438 | help="Comma-separated list of owner institution codes to include",
439 | )
440 | parser.add_argument(
441 | "-a",
442 | "--all",
443 | dest="all",
444 | action="store_true",
445 | help="Flag to include all bounding boxes of other concepts in specified images",
446 | )
447 | parser.add_argument(
448 | "-f",
449 | "--format",
450 | dest="format",
451 | type=lowercase_str,
452 | default="voc",
453 | choices=valid_dataset_formats,
454 | help="Dataset format. Options: {}".format(", ".join(valid_dataset_formats)),
455 | )
456 | parser.add_argument(
457 | "--img-download",
458 | dest="img_dir",
459 | default=None,
460 | type=str,
461 | help="Local directory to download images",
462 | )
463 |
464 | list_or_file = parser.add_mutually_exclusive_group(required=False)
465 | list_or_file.add_argument(
466 | "-c",
467 | "--concepts",
468 | dest="concepts",
469 | type=comma_list,
470 | help="Comma-separated list of concepts to include",
471 | )
472 | list_or_file.add_argument(
473 | "--concepts-file",
474 | dest="concepts_file",
475 | type=str,
476 | help="File containing newline-delimited list of concepts to include",
477 | )
478 |
479 | count_or_output = parser.add_mutually_exclusive_group(required=True)
480 | count_or_output.add_argument(
481 | "--count",
482 | dest="count",
483 | action="store_true",
484 | help="Count images and bounding boxes instead of generating a dataset",
485 | )
486 | count_or_output.add_argument(
487 | "-o", "--output", dest="output", type=str, help="Output directory"
488 | )
489 |
490 | # Parse arguments
491 | args = parser.parse_args()
492 |
493 | # Set log level
494 | level = logging.WARNING
495 | if args.v == 1:
496 | level = logging.INFO
497 | elif args.v >= 2:
498 | level = logging.DEBUG
499 |
500 | logging.basicConfig(level=level)
501 |
502 | # Parse list of concepts
503 | concepts = []
504 | if args.concepts:
505 | concepts = args.concepts
506 | elif args.concepts_file:
507 | if os.path.isfile(args.concepts_file):
508 | with open(args.concepts_file, "r") as f:
509 | concepts = f.read().splitlines()
510 | concepts = [line.strip() for line in concepts] # remove string format
511 |
512 | if not concepts:
513 | concepts = []
514 |
515 | # Parse taxonomy provider, updating concepts if necessary
516 | taxa_provider = args.taxa
517 | if taxa_provider is not None:
518 | taxa_provider_lower = taxa_provider.lower()
519 | idx = [p.lower() for p in valid_taxa_providers].index(taxa_provider_lower)
520 | if idx < 0:
521 | parser.error("Invalid taxonomy provider: {}".format(taxa_provider))
522 | else:
523 | taxa_provider = valid_taxa_providers[idx]
524 |
525 | # Update concepts with all descendants
526 | new_concepts = []
527 | for concept in concepts:
528 | logging.debug("Finding taxa for {}".format(concept))
529 | if (
530 | taxa_provider == "fathomnet"
531 | ): # SPECIAL CASE: due to a bug in Micronaut, the fast worms provider is used directly for now
532 | descendants_names = worms.get_descendants_names(concept)
533 | for new_concept in descendants_names:
534 | if new_concept not in new_concepts:
535 | new_concepts.append(new_concept)
536 | else:
537 | concept_taxa = taxa.find_taxa(
538 | taxa_provider, concept
539 | ) # Includes the concept itself
540 | for new_taxa in concept_taxa:
541 | new_concept = new_taxa.name
542 | if new_concept not in new_concepts:
543 | new_concepts.append(new_concept)
544 |
545 | logging.debug("Old concepts: {}".format(concepts))
546 | logging.debug("New concepts: {}".format(new_concepts))
547 | concepts = new_concepts
548 |
549 | # Parse contributor email
550 | contributor_email = args.contributor_email
551 | if contributor_email is not None:
552 | if contributor_email not in valid_contributor_emails:
553 | parser.error("Invalid contributor email: {}".format(contributor_email))
554 |
555 | # Parse start timestamp
556 | start_timestamp = args.start_timestamp
557 | start_timestamp_str = None
558 | if start_timestamp is not None:
559 | if start_timestamp > datetime.datetime.now():
560 | parser.error("Start timestamp cannot be in the future")
561 | start_timestamp_str = start_timestamp.isoformat(timespec="milliseconds") + "Z"
562 |
563 | # Parse end timestamp
564 | end_timestamp = args.end_timestamp
565 | end_timestamp_str = None
566 | if end_timestamp is not None:
567 | if end_timestamp > datetime.datetime.now():
568 | parser.error("End timestamp cannot be in the future")
569 | end_timestamp_str = end_timestamp.isoformat(timespec="milliseconds") + "Z"
570 |
571 | # Parse imaging types
572 | imaging_types = args.imaging_types
573 | if imaging_types:
574 | for imaging_type in imaging_types:
575 | if imaging_type not in valid_imaging_types:
576 | parser.error("Invalid imaging type: {}".format(imaging_type))
577 |
578 | # Parse unverified/verified flags
579 | include_unverified = args.include_unverified
580 | include_verified = args.include_verified
581 |
582 | # Parse longitude/latitude/depth ranges
583 | min_longitude = args.min_longitude
584 | max_longitude = args.max_longitude
585 | min_latitude = args.min_latitude
586 | max_latitude = args.max_latitude
587 | min_depth = args.min_depth
588 | max_depth = args.max_depth
589 |
590 | # Parse list of owner institution codes
591 | owner_institution_codes = args.owner_institution_codes
592 | if owner_institution_codes is not None:
593 | for owner_institution_code in owner_institution_codes:
594 | if owner_institution_code not in valid_owner_institution_codes:
595 | parser.error(
596 | "Invalid owner institution code: {}".format(owner_institution_code)
597 | )
598 |
599 | # Pack specified constraints into base constraint instance
600 | base_constraints = GeoImageConstraints(
601 | contributorsEmail=contributor_email,
602 | startTimestamp=start_timestamp_str,
603 | endTimestamp=end_timestamp_str,
604 | imagingTypes=imaging_types,
605 | includeUnverified=include_unverified,
606 | includeVerified=include_verified,
607 | minLongitude=min_longitude,
608 | maxLongitude=max_longitude,
609 | minLatitude=min_latitude,
610 | maxLatitude=max_latitude,
611 | minDepth=min_depth,
612 | maxDepth=max_depth,
613 | ownerInstitutionCodes=owner_institution_codes,
614 | )
615 |
616 | # Create output directory (if it doesn't exist)
617 | output = args.output # None if --count flag is set
618 | if output is not None:
619 | if not os.path.exists(output):
620 | logging.info("Creating output directory {}".format(output))
621 | os.makedirs(output)
622 | elif not os.path.isdir(output):
623 | parser.error(
624 | "Output directory {} exists and is not a directory".format(output)
625 | )
626 |
627 | # Create image output directory (if it doesn't exist)
628 | img_dir = args.img_dir # None if --img_download flag is not called
629 | if img_dir is not None:
630 | if not os.path.exists(img_dir):
631 | logging.info("Creating output directory {}".format(img_dir))
632 | os.makedirs(img_dir)
633 | elif not os.path.isdir(img_dir):
634 | parser.error(
635 | "Image download output directory {} exists and is not a directory".format(
636 | img_dir
637 | )
638 | )
639 |
640 | logging.info("Successfully parsed flags")
641 |
642 | # Pack everything into an arguments instance
643 | return Arguments(
644 | output=output,
645 | concepts=concepts,
646 | base_constraints=base_constraints,
647 | include_all=args.all,
648 | format=args.format,
649 | img_dir=args.img_dir,
650 | )
651 |
652 |
653 | def main():
654 | """Entry point for the script."""
655 | args = parse_args() # Will exit the script on error
656 | try:
657 | dataset_images = get_images(args) # Get the images
658 | except KeyboardInterrupt:
659 | logging.info("Image querying interrupted by user")
660 | exit(0)
661 |
662 | if not dataset_images: # Ensure there are images to use
663 | exit(0)
664 |
665 | try:
666 | error = generate_dataset(args, dataset_images) # Generate the dataset
667 | except KeyboardInterrupt:
668 | logging.info("Dataset generation interrupted by user")
669 | exit(0)
670 |
671 | if args.img_dir:
672 | try:
673 | download_imgs(
674 | args, dataset_images
675 | ) # download the images to specified output directory
676 | except KeyboardInterrupt:
677 | logging.info("Image download interrupted by user")
678 | exit(0)
679 | else:
680 | exit(0)
681 |
682 | if error:
683 | logging.error("Error generating dataset.")
684 | exit(1)
685 |
--------------------------------------------------------------------------------
/src/fathomnet/util.py:
--------------------------------------------------------------------------------
1 | # util.py (fathomnet-py)
2 |
3 | import requests
4 |
5 | from fathomnet.dto import Pageable
6 |
7 |
8 | def debug_format_response(response: requests.Response) -> str:
9 | """
10 | Formats a response object into a string for debugging purposes.
11 |
12 | Args:
13 | response: The response object to format.
14 |
15 | Returns:
16 | A string representation of the response object.
17 | """
18 | request = response.request
19 | formatted_str = """REQUEST:
20 | Method: {}
21 | URL: {}
22 | Headers:
23 | {}
24 |
25 | Body:
26 | {}
27 |
28 | RESPONSE:
29 | Status: {}
30 | Headers:
31 | {}
32 | Content:
33 | {}
34 | """.format(
35 | request.method,
36 | request.url,
37 | "\n\t".join(
38 | ["{}: {}".format(key, val) for key, val in request.headers.items()]
39 | ),
40 | request.body,
41 | response.status_code,
42 | "\n\t".join(
43 | ["{}: {}".format(key, val) for key, val in response.headers.items()]
44 | ),
45 | response.content,
46 | )
47 |
48 | return formatted_str
49 |
50 |
51 | def page(f: callable, size: int, start_page_number: int = 0, *args, **kwargs):
52 | """
53 | Calls the given function with the given arguments, and returns a generator that will yield all results, page by page, until there are no more results.
54 |
55 | Assumes that the given function takes a Pageable as its first argument, and that the return value of the function is a list-like object that evaluates to False when there are no more results.
56 |
57 | Args:
58 | f: The function to call
59 | size: The page size to use
60 | start_page_number: The page number to start on (default 0)
61 | *args: The arguments to pass to the function
62 | **kwargs: The keyword arguments to pass to the function
63 |
64 | Returns:
65 | A generator that will yield all results, page by page, until there are no more results.
66 | """
67 | number = start_page_number
68 | while True:
69 | pageable = Pageable(size=size, number=number)
70 | page = f(pageable, *args, **kwargs)
71 |
72 | if not page:
73 | break
74 |
75 | yield from page
76 | number += 1
77 |
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
1 | from os import getenv
2 | from unittest import skipIf
3 |
4 | from fathomnet.api import xapikey
5 |
6 | # Set the TEST_X_API_KEY environment variable an API key in order to perform authentication and enable test cases decorated by @skipIfNoAuth
7 | TEST_X_API_KEY = getenv("TEST_X_API_KEY")
8 |
9 | # If test X-API-Key is provided, authenticate session to enable cases
10 | if TEST_X_API_KEY is not None:
11 | xapikey.auth(TEST_X_API_KEY)
12 |
13 |
14 | def skipIfNoAuth(f: callable) -> callable:
15 | """
16 | Decorator to skip test case if it needs auth.
17 |
18 | Args:
19 | f (callable): Test case function.
20 |
21 | Returns:
22 | callable: Test case function
23 | """
24 | return skipIf(
25 | TEST_X_API_KEY is None,
26 | "TEST_X_API_KEY environment variable not specified",
27 | )(f)
28 |
--------------------------------------------------------------------------------
/test/test_activity.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet.api import activity
4 |
5 | from . import skipIfNoAuth
6 |
7 |
8 | class TestActivityAPI(TestCase):
9 | @skipIfNoAuth
10 | def test_find_all(self):
11 | activities = activity.find_all()
12 | self.assertIsNotNone(activities)
13 |
14 | @skipIfNoAuth
15 | def test_find_by_email(self):
16 | activities = activity.find_by_email("kbarnard@mbari.org")
17 | self.assertIsNotNone(activities)
18 |
19 | @skipIfNoAuth
20 | def test_find_by_email_admin(self):
21 | raise SkipTest("Not implemented")
22 |
--------------------------------------------------------------------------------
/test/test_boundingboxes.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet.api import boundingboxes
4 |
5 | from . import skipIfNoAuth
6 |
7 |
8 | class TestBoundingBoxesAPI(TestCase):
9 | @skipIfNoAuth
10 | def test_create_with_dto(self):
11 | raise SkipTest("Write tests not yet implemented") # TODO create_with_dto test
12 |
13 | def test_count_all(self):
14 | count = boundingboxes.count_all()
15 | self.assertIsNotNone(count)
16 | self.assertEqual(count.objectType, "BoundingBoxEntity")
17 | self.assertGreater(count.count, 0)
18 |
19 | def test_find_concepts(self):
20 | concepts = boundingboxes.find_concepts()
21 | self.assertIsNotNone(concepts)
22 | self.assertIn("Bathochordaeus", concepts)
23 |
24 | def test_count_total_by_concept(self):
25 | concept_counts = boundingboxes.count_total_by_concept()
26 | self.assertIsNotNone(concept_counts)
27 | for concept_count in concept_counts:
28 | if concept_count.concept == "Bathochordaeus":
29 | self.assertGreater(concept_count.count, 0)
30 | break
31 | else:
32 | self.fail()
33 |
34 | def test_find_observers(self):
35 | observers = boundingboxes.find_observers()
36 | self.assertIsNotNone(observers)
37 | self.assertIn("kakani", observers)
38 |
39 | def test_count_by_concept(self):
40 | concept_count = boundingboxes.count_by_concept("Bathochordaeus")
41 | self.assertIsNotNone(concept_count)
42 | self.assertEqual(concept_count.concept, "Bathochordaeus")
43 | self.assertGreater(concept_count.count, 0)
44 |
45 | def test_find_by_user_defined_key(self):
46 | user_defined_key = "00005716-ef67-44b9-0967-27ced2aab21e"
47 | boxes = boundingboxes.find_by_user_defined_key(user_defined_key)
48 | self.assertIsNotNone(boxes)
49 | self.assertEqual(boxes[0].userDefinedKey, user_defined_key)
50 |
51 | def test_find_all_user_defined_keys(self):
52 | user_defined_keys = boundingboxes.find_all_user_defined_keys()
53 | self.assertIsNotNone(user_defined_keys)
54 |
55 | @skipIfNoAuth
56 | def test_upload_csv(self):
57 | raise SkipTest("Write tests not yet implemented") # TODO upload_csv test
58 |
59 | def test_find_by_uuid(self):
60 | uuid = "eb05c713-9cd9-4cd9-bcaa-71f8e500825d"
61 | box = boundingboxes.find_by_uuid(uuid)
62 | self.assertIsNotNone(box)
63 | self.assertEqual(box.uuid, uuid)
64 |
65 | @skipIfNoAuth
66 | def test_update(self):
67 | raise SkipTest("Write tests not yet implemented") # TODO update test
68 |
69 | @skipIfNoAuth
70 | def test_delete(self):
71 | raise SkipTest("Write tests not yet implemented") # TODO delete test
72 |
73 | def test_audit_by_uuid(self):
74 | uuid = "9f31b626-b118-4819-860c-3c1cfc04be3f"
75 | boxes = boundingboxes.audit_by_uuid(uuid)
76 | self.assertIsNotNone(boxes)
77 | self.assertEqual(boxes[0].uuid, uuid)
78 |
79 | def test_audit_by_user_defined_key(self):
80 | user_defined_key = "285aa889-f771-46c0-6763-c0398712ba1e"
81 | boxes = boundingboxes.audit_by_user_defined_key(user_defined_key)
82 | self.assertIsNotNone(boxes)
83 | self.assertEqual(boxes[0].userDefinedKey, user_defined_key)
84 |
85 | def test_find_searchable_concepts(self):
86 | searchable_concepts = boundingboxes.find_searchable_concepts()
87 | self.assertIsNotNone(searchable_concepts)
88 | self.assertIn("Bathochordaeus", searchable_concepts)
89 |
90 | def test_find_by_observer_uuid(self):
91 | observer_uuid = "9dba65e1-5974-46df-9276-98c461beba9f"
92 | boxes = boundingboxes.find_by_observer_uuid(observer_uuid)
93 | self.assertIsNotNone(boxes)
94 |
95 | def test_find_by_verifier_uuid(self):
96 | verifier_uuid = "9dba65e1-5974-46df-9276-98c461beba9f"
97 | boxes = boundingboxes.find_by_verifier_uuid(verifier_uuid)
98 | self.assertIsNotNone(boxes)
99 |
100 | # def test_audit_by_concepts(self):
101 | # concepts = ["Bathochordaeus", "a'a", "Abraliopsis (Boreabraliopsis) felis"]
102 | # boxes = boundingboxes.audit_by_concepts(concepts)
103 | # self.assertIsNotNone(boxes)
104 |
105 | def test_audit_by_verifier(self):
106 | verifier = "brian@mbari.org"
107 | boxes = boundingboxes.audit_by_verifier(verifier)
108 | self.assertIsNotNone(boxes)
109 |
110 | def test_audit_by_observer(self):
111 | observer = "brian@mbari.org"
112 | boxes = boundingboxes.audit_by_observer(observer)
113 | self.assertIsNotNone(boxes)
114 |
--------------------------------------------------------------------------------
/test/test_comments.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet.api import comments
4 |
5 | from . import skipIfNoAuth
6 |
7 |
8 | class TestCommentsAPI(TestCase):
9 | @skipIfNoAuth
10 | def test_create(self):
11 | raise SkipTest("Write tests not yet implemented")
12 |
13 | def test_find_by_uuid(self):
14 | uuid = "c3e98572-89ab-40ac-8ec1-2cc388b129dc"
15 | comment = comments.find_by_uuid(uuid)
16 | self.assertIsNotNone(comment)
17 |
18 | @skipIfNoAuth
19 | def test_update(self):
20 | raise SkipTest("Write tests not yet implemented")
21 |
22 | @skipIfNoAuth
23 | def test_delete(self):
24 | raise SkipTest("Write tests not yet implemented")
25 |
26 | @skipIfNoAuth
27 | def find_by_bounding_box_uuid(self):
28 | bounding_box_uuid = "c4822967-13b7-435d-9cba-5a7f52f7457f"
29 | res_comments = comments.find_by_bounding_box_uuid(bounding_box_uuid)
30 | self.assertIsNotNone(res_comments)
31 |
32 | @skipIfNoAuth
33 | def test_find_by_email(self):
34 | email = "erm.butler@gmail.com"
35 | res_comments = comments.find_by_email(email)
36 | self.assertIsNotNone(res_comments)
37 |
38 | @skipIfNoAuth
39 | def test_flag(self):
40 | raise SkipTest("Write tests not yet implemented")
41 |
--------------------------------------------------------------------------------
/test/test_darwincore.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from fathomnet.api import darwincore
4 |
5 |
6 | class TestDarwinCoreAPI(TestCase):
7 | def test_find_owner_institution_codes(self):
8 | owner_institution_codes = darwincore.find_owner_institution_codes()
9 | self.assertIsNotNone(owner_institution_codes)
10 | self.assertGreater(len(owner_institution_codes), 0)
11 |
12 | def test_find_owner_institutions_by_image_uuid(self):
13 | owner_institutions = darwincore.find_owner_institutions_by_image_uuid(
14 | "b7736c31-0b78-4761-840c-e3781d6845be"
15 | )
16 | self.assertIsNotNone(owner_institutions)
17 | self.assertIn("MBARI", owner_institutions)
18 |
--------------------------------------------------------------------------------
/test/test_firebase.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet.api import firebase
4 |
5 | from . import skipIfNoAuth
6 |
7 |
8 | class TestFirebaseAPI(TestCase):
9 | def test_auth(self):
10 | raise SkipTest("Firebase authentication not yet implemented")
11 |
12 | auth_header = firebase.auth()
13 | self.assertIsNotNone(auth_header)
14 | self.assertEqual(auth_header.type, "Bearer")
15 |
16 | @skipIfNoAuth
17 | def test_test(self):
18 | message = firebase.test()
19 | self.assertIsNotNone(message)
20 |
--------------------------------------------------------------------------------
/test/test_geoimages.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from fathomnet import dto
4 | from fathomnet.api import geoimages
5 |
6 |
7 | class TestGeoImagesAPI(TestCase):
8 | def test_find_all(self):
9 | n_images = 5
10 | pageable = dto.Pageable(size=n_images)
11 | results = geoimages.find_all(pageable)
12 | self.assertIsNotNone(results)
13 | self.assertEqual(len(results), n_images)
14 |
15 | def test_count(self):
16 | geo_image_constraints = dto.GeoImageConstraints(
17 | concept="Bathochordaeus", limit=10
18 | )
19 | count = geoimages.count(geo_image_constraints)
20 | self.assertIsNotNone(count)
21 | self.assertGreater(count.count, 0)
22 |
23 | def test_find(self):
24 | geo_image_constraints = dto.GeoImageConstraints(
25 | concept="Bathochordaeus", limit=10
26 | )
27 | results = geoimages.find(geo_image_constraints)
28 | self.assertIsNotNone(results)
29 | self.assertGreater(len(results), 0)
30 |
31 | def test_find_by_image_set_upload_uuid(self):
32 | image_set_upload_uuid = "9c891f7a-976b-4376-acf9-31681e1b3a15"
33 | results = geoimages.find_by_image_set_upload_uuid(image_set_upload_uuid)
34 | self.assertIsNotNone(results)
35 | self.assertGreater(len(results), 0)
36 |
--------------------------------------------------------------------------------
/test/test_images.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet import dto
4 | from fathomnet.api import images
5 |
6 | from . import skipIfNoAuth
7 |
8 |
9 | class TestImagesAPI(TestCase):
10 | def test_find_all_alt(self):
11 | n_images = 5
12 | pageable = dto.Pageable(size=n_images)
13 | results = images.find_all_alt(pageable)
14 | self.assertIsNotNone(results)
15 | self.assertEqual(len(results), n_images)
16 |
17 | @skipIfNoAuth
18 | def test_create_if_not_exists(self):
19 | raise SkipTest(
20 | "Write tests not yet implemented"
21 | ) # TODO create_if_not_exists test
22 |
23 | def test_count_all(self):
24 | count = images.count_all()
25 | self.assertIsNotNone(count)
26 | self.assertEqual(count.objectType, "ImageEntity")
27 | self.assertGreater(count.count, 0)
28 |
29 | def test_find_all(self):
30 | n_images = 5
31 | pageable = dto.Pageable(size=n_images)
32 | results = images.find_all(pageable)
33 | self.assertIsNotNone(results)
34 | self.assertEqual(len(results), n_images)
35 |
36 | def test_find_distinct_submitter(self):
37 | submitters = images.find_distinct_submitter()
38 | self.assertIsNotNone(submitters)
39 | self.assertIn("brian@mbari.org", submitters)
40 |
41 | def test_list_imaging_types(self):
42 | imaging_types = images.list_imaging_types()
43 | self.assertIsNotNone(imaging_types)
44 | self.assertIn("ROV", imaging_types)
45 |
46 | def test_find(self):
47 | geo_image_constraints = dto.GeoImageConstraints(
48 | concept="Bathochordaeus charon", taxaProviderName="mbari", limit=10
49 | )
50 | results = images.find(geo_image_constraints)
51 | self.assertIsNotNone(results)
52 | for image in results:
53 | for bounding_box in image.boundingBoxes:
54 | if bounding_box.concept == geo_image_constraints.concept:
55 | break
56 | else:
57 | self.fail()
58 |
59 | # def test_find_by_concept(self):
60 | # for concept in (
61 | # "Bathochordaeus",
62 | # "a'a",
63 | # "Abraliopsis (Boreabrealiopsis) felis",
64 | # ):
65 | # results = images.find_by_concept(concept)
66 | # self.assertIsNotNone(results)
67 | # for image in results:
68 | # for bounding_box in image.boundingBoxes:
69 | # if bounding_box.concept == concept:
70 | # break
71 | # else:
72 | # self.fail()
73 |
74 | def test_find_by_contributors_email(self):
75 | contributors_email = "kbarnard@mbari.org"
76 | results = images.find_by_contributors_email(contributors_email)
77 | self.assertIsNotNone(results)
78 | for image in results:
79 | self.assertEqual(image.contributorsEmail, contributors_email)
80 |
81 | def test_count_by_submitter(self):
82 | contributors_email = "brian@mbari.org"
83 | count = images.count_by_submitter(contributors_email)
84 | self.assertIsNotNone(count)
85 | self.assertEqual(count.contributorsEmail, contributors_email)
86 | self.assertGreater(count.count, 0)
87 |
88 | def test_find_by_observer(self):
89 | observer = "kakani"
90 | results = images.find_by_observer(observer)
91 | self.assertIsNotNone(results)
92 | for image in results:
93 | for bounding_box in image.boundingBoxes:
94 | if bounding_box.observer == observer:
95 | break
96 | else:
97 | self.fail()
98 |
99 | def test_find_by_sha256(self):
100 | sha256 = "b572f8ca40b5af19972d8c63ac5fa4e33df215132c4c16c47b6b483ac9d07299"
101 | results = images.find_by_sha256(sha256)
102 | self.assertIsNotNone(results)
103 | for image in results:
104 | self.assertEqual(image.sha256, sha256)
105 |
106 | def test_find_by_tag_key(self):
107 | key = "source"
108 | value = "TEST"
109 | results = images.find_by_tag_key(key, value)
110 | self.assertIsNotNone(results)
111 | for image in results:
112 | for tag in image.tags:
113 | if tag.key == key and tag.value == value:
114 | break
115 | else:
116 | self.fail()
117 |
118 | # def test_find_by_url(self):
119 | # url = "https://database.fathomnet.org/static/m3/framegrabs/Ventana/images/3069/00_34_35_02.png"
120 | # image = images.find_by_url(url)
121 | # self.assertIsNotNone(image)
122 | # self.assertEqual(image.url, url)
123 |
124 | def test_find_by_uuid_in_list(self):
125 | uuids = [
126 | "b7736c31-0b78-4761-840c-e3781d6845be",
127 | "9b0bc09b-85b2-4b72-99db-bf91b36a9f89",
128 | "8bf45f3c-4d11-418c-b384-2dfdc2e6c01c",
129 | "bfa62293-1723-4643-8954-a60786f10ad5",
130 | "a1b4d4ff-a22c-417b-921f-a1dd98c21f7a",
131 | ]
132 | results = images.find_by_uuid_in_list(uuids)
133 | self.assertIsNotNone(results)
134 | self.assertSetEqual(set(image.uuid for image in results), set(uuids))
135 |
136 | def test_find_by_uuid(self):
137 | uuid = "b7736c31-0b78-4761-840c-e3781d6845be"
138 | image = images.find_by_uuid(uuid)
139 | self.assertIsNotNone(image)
140 | self.assertEqual(image.uuid, uuid)
141 |
142 | @skipIfNoAuth
143 | def test_update(self):
144 | raise SkipTest("Write tests not yet implemented") # TODO update test
145 |
146 | @skipIfNoAuth
147 | def test_delete(self):
148 | raise SkipTest("Write tests not yet implemented") # TODO delete test
149 |
--------------------------------------------------------------------------------
/test/test_imagesetuploads.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from fathomnet import dto
4 | from fathomnet.api import imagesetuploads
5 |
6 |
7 | class TestImageSetUploadsAPI(TestCase):
8 | def test_count_all(self):
9 | count = imagesetuploads.count_all()
10 | self.assertIsNotNone(count)
11 | self.assertEqual(count.objectType, "ImageSetUploadEntity")
12 | self.assertGreater(count.count, 0)
13 |
14 | def test_find_collections(self):
15 | n_image_sets = 3
16 | pageable = dto.Pageable(size=n_image_sets)
17 | results = imagesetuploads.find_collections(pageable)
18 | self.assertIsNotNone(results)
19 | self.assertEqual(len(results), n_image_sets)
20 |
21 | def test_find_contributors(self):
22 | contributors = imagesetuploads.find_contributors()
23 | self.assertIsNotNone(contributors)
24 | self.assertIn("brian@mbari.org", contributors)
25 |
26 | def test_find_rejection_reasons(self):
27 | rejection_reasons = imagesetuploads.find_rejection_reasons()
28 | self.assertIsNotNone(rejection_reasons)
29 | self.assertGreater(len(rejection_reasons), 0)
30 |
31 | def test_find_by_contributor(self):
32 | contributors_email = "brian@mbari.org"
33 | results = imagesetuploads.find_by_contributor(contributors_email)
34 | self.assertIsNotNone(results)
35 | self.assertGreater(len(results), 0)
36 | for image_set in results:
37 | self.assertEqual(image_set.contributorsEmail, contributors_email)
38 |
39 | def test_find_by_image_uuid(self):
40 | image_uuid = "4f5265f7-31cd-490d-a807-bc350356435d"
41 | results = imagesetuploads.find_by_image_uuid(image_uuid)
42 | self.assertIsNotNone(results)
43 |
44 | def test_stats(self):
45 | image_set_upload_uuid = "9da52a10-f7db-4897-a886-2e3fbf6b9d36"
46 | stats = imagesetuploads.stats(image_set_upload_uuid)
47 | self.assertIsNotNone(stats)
48 | self.assertEqual(stats.imageSetUploadUuid, image_set_upload_uuid)
49 |
50 | def test_find_by_uuid(self):
51 | uuid = "9c891f7a-976b-4376-acf9-31681e1b3a15"
52 | image_set = imagesetuploads.find_by_uuid(uuid)
53 | self.assertIsNotNone(image_set)
54 | self.assertEqual(image_set.uuid, uuid)
55 |
--------------------------------------------------------------------------------
/test/test_regions.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet import dto
4 | from fathomnet.api import regions
5 |
6 | from . import skipIfNoAuth
7 |
8 |
9 | class TestRegionsAPI(TestCase):
10 | def test_find_all(self):
11 | results = regions.find_all()
12 | self.assertIsNotNone(results)
13 |
14 | def test_count_all(self):
15 | count = regions.count_all()
16 | self.assertGreater(count, 0)
17 |
18 | def test_find_all_paged(self):
19 | n_regions = 10
20 | pageable = dto.Pageable(size=n_regions)
21 | results = regions.find_all_paged(pageable)
22 | self.assertIsNotNone(results)
23 | self.assertEqual(len(results), n_regions)
24 |
25 | @skipIfNoAuth
26 | def test_sync(self):
27 | raise SkipTest("Sync endpoint not yet implemented") # TODO sync test
28 |
29 | def test_find_at(self):
30 | latitude = 35.6
31 | longitude = -121.3
32 | results = regions.find_at(latitude, longitude)
33 | self.assertIsNotNone(results)
34 | self.assertGreaterEqual(len(results), 10)
35 | for region in results:
36 | self.assertLessEqual(latitude, region.maxLatitude)
37 | self.assertGreaterEqual(latitude, region.minLatitude)
38 | self.assertLessEqual(longitude, region.maxLongitude)
39 | self.assertGreaterEqual(longitude, region.minLongitude)
40 |
--------------------------------------------------------------------------------
/test/test_stats.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from fathomnet.api import stats
4 |
5 |
6 | class TestStatsAPI(TestCase):
7 | def test_most_popular_searches(self):
8 | results = stats.most_popular_searches()
9 | self.assertIsNotNone(results)
10 | self.assertGreater(len(results), 0)
11 |
--------------------------------------------------------------------------------
/test/test_tags.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet.api import tags
4 |
5 | from . import skipIfNoAuth
6 |
7 |
8 | class TestTagsAPI(TestCase):
9 | @skipIfNoAuth
10 | def test_create_with_dto(self):
11 | raise SkipTest("Write tests not yet implemented") # TODO create_with_dto test
12 |
13 | def test_find_by_uuid(self):
14 | uuid = "4c7f468c-ab41-4003-a048-d194a5f4ff4a"
15 | tag = tags.find_by_uuid(uuid)
16 | self.assertIsNotNone(tag)
17 | self.assertEqual(tag.uuid, uuid)
18 |
19 | def test_find_by_image_uuid_and_key(self):
20 | image_uuid = "70df75b3-02ad-4a33-a5c6-6ed90313f752"
21 | key = "source"
22 | tag_list = tags.find_by_image_uuid_and_key(image_uuid, key)
23 | self.assertIsNotNone(tag_list)
24 | tag = tag_list[0]
25 | self.assertEqual(tag.key, key)
26 | self.assertEqual(tag.value, "MBARI/VARS")
27 | self.assertEqual(tag.imageUuid, image_uuid)
28 |
29 | @skipIfNoAuth
30 | def test_update(self):
31 | raise SkipTest("Write tests not yet implemented") # TODO update test
32 |
33 | @skipIfNoAuth
34 | def test_delete(self):
35 | raise SkipTest("Write tests not yet implemented") # TODO delete test
36 |
--------------------------------------------------------------------------------
/test/test_taxa.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from fathomnet.api import taxa
4 |
5 |
6 | class TestTaxaAPI(TestCase):
7 | def test_index(self):
8 | result = taxa.index()
9 | self.assertIsNotNone(result)
10 |
11 | def test_list_taxa_providers(self):
12 | taxa_providers = taxa.list_taxa_providers()
13 | self.assertIsNotNone(taxa_providers)
14 | self.assertGreater(len(taxa_providers), 0)
15 |
16 | def test_find_children(self):
17 | children = taxa.find_children("fathomnet", "Bathochordaeus")
18 | self.assertIsNotNone(children)
19 | self.assertIn("Bathochordaeus mcnutti", set(child.name for child in children))
20 |
21 | def test_find_parent(self):
22 | parent = taxa.find_parent("fathomnet", "Bathochordaeus mcnutti")
23 | self.assertIsNotNone(parent)
24 | self.assertEqual(parent.name, "Bathochordaeus")
25 |
26 | def test_find_taxa(self):
27 | concept = "Bathochordaeus"
28 | rank = "Genus"
29 | results = taxa.find_taxa("fathomnet", concept)
30 | self.assertIsNotNone(results)
31 | self.assertGreater(len(results), 0)
32 | for taxa_item in results:
33 | if taxa_item.name == concept and taxa_item.rank == rank:
34 | break
35 | else:
36 | self.fail()
37 |
--------------------------------------------------------------------------------
/test/test_topics.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet.api import topics
4 |
5 | from . import skipIfNoAuth
6 |
7 |
8 | class TestTopicsAPI(TestCase):
9 | @skipIfNoAuth
10 | def test_create(self):
11 | raise SkipTest("Write tests not yet implemented")
12 |
13 | def test_find_by_uuid(self):
14 | uuid = "411fa644-4f24-49c3-bdc1-b40f91aecaab"
15 | topic = topics.find_by_uuid(uuid)
16 | self.assertIsNotNone(topic)
17 |
18 | @skipIfNoAuth
19 | def test_update(self):
20 | raise SkipTest("Write tests not yet implemented")
21 |
22 | @skipIfNoAuth
23 | def test_delete(self):
24 | raise SkipTest("Write tests not yet implemented")
25 |
26 | @skipIfNoAuth
27 | def test_find(self):
28 | res_topics = topics.find()
29 | self.assertIsNotNone(res_topics)
30 |
31 | @skipIfNoAuth
32 | def test_find_by_email(self):
33 | raise SkipTest("Not implemented")
34 |
--------------------------------------------------------------------------------
/test/test_users.py:
--------------------------------------------------------------------------------
1 | from unittest import SkipTest, TestCase
2 |
3 | from fathomnet.api import users
4 | from fathomnet.dto import Pageable
5 |
6 | from . import skipIfNoAuth
7 |
8 |
9 | class TestUsersAPI(TestCase):
10 | @skipIfNoAuth
11 | def test_create_with_dto(self):
12 | raise SkipTest("Write tests not yet implemented") # TODO create_with_dto test
13 |
14 | @skipIfNoAuth
15 | def test_find_all(self):
16 | pageable = Pageable(size=5)
17 | res_users = users.find_all(pageable)
18 | self.assertIsNotNone(res_users)
19 |
20 | @skipIfNoAuth
21 | def test_find_all_admin(self):
22 | raise SkipTest("Not implemented")
23 |
24 | @skipIfNoAuth
25 | def test_update_user_data(self):
26 | raise SkipTest("Write tests not yet implemented")
27 |
28 | @skipIfNoAuth
29 | def test_update_user_data_admin(self):
30 | raise SkipTest("Write tests not yet implemented")
31 |
32 | @skipIfNoAuth
33 | def test_get_api_key(self):
34 | api_key = users.get_api_key()
35 | self.assertIsNotNone(api_key)
36 |
37 | @skipIfNoAuth
38 | def test_create_new_api_key(self):
39 | raise SkipTest("Write tests not yet implemented")
40 |
41 | @skipIfNoAuth
42 | def test_delete_api_key(self):
43 | raise SkipTest("Write tests not yet implemented")
44 |
45 | def test_count_all(self):
46 | count = users.count_all()
47 | self.assertIsNotNone(count)
48 | self.assertEqual(count.objectType, "FathomnetIdentityEntity")
49 | self.assertGreater(count.count, 0)
50 |
51 | @skipIfNoAuth
52 | def test_disable_by_uuid(self):
53 | raise SkipTest("Write tests not yet implemented")
54 |
55 | def test_find_expertise(self):
56 | expertise = users.find_expertise()
57 | self.assertIsNotNone(expertise)
58 |
59 | def test_find_contributors_names(self):
60 | contributors = users.find_contributors_names()
61 | self.assertIsNotNone(contributors)
62 |
63 | def test_find_roles(self):
64 | roles = users.find_roles()
65 | self.assertIsNotNone(roles)
66 |
67 | @skipIfNoAuth
68 | def test_find_by_authentication(self):
69 | user = users.find_by_authentication()
70 | self.assertIsNotNone(user)
71 |
72 | def test_find_by_firebase_uid(self):
73 | raise SkipTest("Not yet implemented")
74 |
75 | @skipIfNoAuth
76 | def test_verify(self):
77 | auth = users.verify()
78 | self.assertIsNotNone(auth)
79 |
80 | def test_find_by_display_name(self):
81 | res_users = users.find_by_display_name("Brian")
82 | self.assertIsNotNone(res_users)
83 |
84 | @skipIfNoAuth
85 | def test_find_by_organization(self):
86 | res_users = users.find_by_organization("mbari")
87 | self.assertIsNotNone(res_users)
88 |
89 | def test_find_by_uuid(self):
90 | res_user = users.find_by_uuid("9dba65e1-5974-46df-9276-98c461beba9f")
91 | self.assertIsNotNone(res_user)
92 |
93 | def test_find_badges_by_uuid(self):
94 | res_badges = users.find_badges_by_uuid("9dba65e1-5974-46df-9276-98c461beba9f")
95 | self.assertIsNotNone(res_badges)
96 |
--------------------------------------------------------------------------------
/test/test_worms.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from fathomnet.api import worms
4 | from fathomnet.dto import WormsNode
5 |
6 |
7 | def check_for_name(node: WormsNode, name: str) -> bool:
8 | """Recursively check if a node or one of its descendants has a given name."""
9 |
10 | def recurse_check(node: WormsNode) -> bool:
11 | if node.name == name:
12 | return True
13 | if not node.children:
14 | return False
15 | return any(recurse_check(child) for child in node.children)
16 |
17 | return recurse_check(node)
18 |
19 |
20 | class TestWormsAPI(TestCase):
21 | def test_count_names(self):
22 | count = worms.count_names()
23 | self.assertIsNotNone(count)
24 | self.assertGreater(count, 0)
25 |
26 | def test_get_all_names(self):
27 | names = worms.get_all_names()
28 | self.assertIsNotNone(names)
29 |
30 | def test_get_names_by_aphia_id(self):
31 | names_obj = worms.get_names_by_aphia_id(2)
32 | self.assertIsNotNone(names_obj)
33 | self.assertEqual(2, names_obj.aphiaId)
34 |
35 | def test_get_ancestors_names(self):
36 | ancestors = worms.get_ancestors_names("Animalia")
37 | self.assertIsNotNone(ancestors)
38 | self.assertIn("object", ancestors)
39 |
40 | def test_get_children_names(self):
41 | children = worms.get_children_names("Bathochordaeus")
42 | self.assertIsNotNone(children)
43 | self.assertIn("Bathochordaeus charon", children)
44 |
45 | def test_get_descendants_names(self):
46 | descendants = worms.get_descendants_names("Bathochordaeus")
47 | self.assertIsNotNone(descendants)
48 | self.assertIn("Bathochordaeus charon", descendants)
49 |
50 | siph_all_descendants = worms.get_descendants_names(
51 | "Siphonophorae", accepted=False
52 | )
53 | siph_accepted_descendants = worms.get_descendants_names(
54 | "Siphonophorae", accepted=True
55 | )
56 | self.assertIsNotNone(siph_all_descendants)
57 | self.assertIsNotNone(siph_accepted_descendants)
58 | self.assertGreater(len(siph_all_descendants), len(siph_accepted_descendants))
59 |
60 | siph_all_descendants = worms.get_descendants_names(
61 | "Siphonophorae", accepted=False
62 | )
63 | siph_accepted_descendants = worms.get_descendants_names(
64 | "Siphonophorae", accepted=True
65 | )
66 | self.assertIsNotNone(siph_all_descendants)
67 | self.assertIsNotNone(siph_accepted_descendants)
68 | self.assertGreater(len(siph_all_descendants), len(siph_accepted_descendants))
69 |
70 | def test_get_parent_name(self):
71 | parent = worms.get_parent_name("Bathochordaeus charon")
72 | self.assertIsNotNone(parent)
73 | self.assertEqual("Bathochordaeus", parent)
74 |
75 | def test_find_names_containing(self):
76 | names = worms.find_names_containing("pendicula")
77 | self.assertIsNotNone(names)
78 | self.assertIn("Appendicularia", names)
79 |
80 | def test_find_names_by_prefix(self):
81 | names = worms.find_names_by_prefix("Appendicula")
82 | self.assertIsNotNone(names)
83 | self.assertIn("Appendicularia", names)
84 |
85 | def test_get_synonyms_for_name(self):
86 | synonyms = worms.get_synonyms_for_name("Appendicularia")
87 | self.assertIsNotNone(synonyms)
88 | self.assertIn("larvaceans", synonyms)
89 |
90 | def test_get_ancestors(self):
91 | root_node = worms.get_ancestors("Animalia")
92 | self.assertIsNotNone(root_node)
93 | self.assertTrue(
94 | check_for_name(root_node, "object"), 'No "object" node found in ancestors'
95 | )
96 |
97 | def test_get_children(self):
98 | children = worms.get_children("Bathochordaeus")
99 | self.assertIsNotNone(children)
100 | for child in children:
101 | if child.name == "Bathochordaeus charon":
102 | return
103 | self.fail('No "Bathochordaeus charon" child found')
104 |
105 | def test_get_descendants(self):
106 | taxa_node = worms.get_descendants("Bathochordaeus")
107 | self.assertIsNotNone(taxa_node)
108 | self.assertTrue(
109 | check_for_name(taxa_node, "Bathochordaeus charon"),
110 | 'No "Bathochordaeus charon" descendant found',
111 | )
112 |
113 | def test_get_parent(self):
114 | parent = worms.get_parent("Bathochordaeus charon")
115 | self.assertIsNotNone(parent)
116 | self.assertEqual("Bathochordaeus", parent.name)
117 |
118 | def test_get_info(self):
119 | info = worms.get_info("Bathochordaeus charon")
120 | self.assertIsNotNone(info)
121 | self.assertEqual("Bathochordaeus charon", info.name)
122 |
123 | def test_find_taxa_by_prefix(self):
124 | taxa = worms.find_taxa_by_prefix("Appendicula")
125 | self.assertIsNotNone(taxa)
126 | for taxon in taxa:
127 | if taxon.name == "Appendicularia":
128 | return
129 | self.fail('No "Appendicularia" taxon found')
130 |
--------------------------------------------------------------------------------
/test/test_xapikey.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 |
3 | from fathomnet.api import xapikey
4 |
5 | from . import TEST_X_API_KEY, skipIfNoAuth
6 |
7 |
8 | class TestXAPIKeyAPI(TestCase):
9 | @skipIfNoAuth
10 | def test_auth(self):
11 | auth_header = xapikey.auth(TEST_X_API_KEY)
12 | self.assertIsNotNone(auth_header)
13 | self.assertEqual(auth_header.type, "Bearer")
14 |
15 | message = xapikey.index(auth_header)
16 | self.assertIsNotNone(message)
17 | self.assertEqual(message.message, "Authentication successful")
18 |
--------------------------------------------------------------------------------