├── .github └── workflows │ ├── gradle.yml │ ├── publish-release.yml │ └── publish-snapshot.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── build.gradle.kts ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle.kts └── src ├── main ├── java │ └── qupath │ │ └── ext │ │ └── stardist │ │ ├── OpCreators.java │ │ ├── StarDist2D.java │ │ ├── StarDistBioimageIo.java │ │ └── StarDistExtension.java └── resources │ ├── .gitkeep │ ├── META-INF │ └── services │ │ └── qupath.lib.gui.extensions.QuPathExtension │ └── scripts │ ├── StarDistDeconvolved.groovy │ ├── StarDistFluorescence.groovy │ ├── StarDistHE.groovy │ └── StarDistTemplate.groovy └── test └── java └── .gitkeep /.github/workflows/gradle.yml: -------------------------------------------------------------------------------- 1 | name: Run gradle build 2 | 3 | on: 4 | push: 5 | branches: 6 | - "main" 7 | pull_request: 8 | branches: 9 | - "main" 10 | workflow_dispatch: 11 | workflow_call: 12 | 13 | permissions: 14 | contents: read 15 | 16 | jobs: 17 | build: 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | 22 | - uses: actions/checkout@v4 23 | 24 | - name: Set up JDK 21 25 | uses: actions/setup-java@v4 26 | with: 27 | java-version: '21' 28 | distribution: 'temurin' 29 | 30 | - name: Setup Gradle 31 | uses: gradle/actions/setup-gradle@v4 32 | 33 | - name: Build with Gradle 34 | run: | 35 | ./gradlew build 36 | 37 | - uses: actions/upload-artifact@v4 38 | with: 39 | path: build/libs/*.jar 40 | name: build 41 | retention-days: 7 42 | -------------------------------------------------------------------------------- /.github/workflows/publish-release.yml: -------------------------------------------------------------------------------- 1 | name: Publish release to SciJava Maven 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | publish: 9 | 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up JDK 21 15 | uses: actions/setup-java@v3 16 | with: 17 | java-version: '21' 18 | distribution: 'temurin' 19 | - name: Validate Gradle wrapper 20 | uses: gradle/wrapper-validation-action@v1 21 | - name: Publish snapshot 22 | uses: gradle/gradle-build-action@v2.4.2 23 | with: 24 | arguments: publish -P release=true 25 | env: 26 | MAVEN_USER: ${{ secrets.MAVEN_USER }} 27 | MAVEN_PASS: ${{ secrets.MAVEN_PASS }} 28 | - uses: actions/upload-artifact@v3 29 | with: 30 | name: qupath-stardist-extension-release-jar 31 | path: build/libs 32 | retention-days: 7 33 | -------------------------------------------------------------------------------- /.github/workflows/publish-snapshot.yml: -------------------------------------------------------------------------------- 1 | name: Publish snapshot to SciJava Maven 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | publish: 8 | 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Set up JDK 21 14 | uses: actions/setup-java@v3 15 | with: 16 | java-version: '21' 17 | distribution: 'temurin' 18 | - name: Validate Gradle wrapper 19 | uses: gradle/wrapper-validation-action@v1 20 | - name: Publish snapshot 21 | uses: gradle/gradle-build-action@v2.4.2 22 | with: 23 | arguments: publish 24 | env: 25 | MAVEN_USER: ${{ secrets.MAVEN_USER }} 26 | MAVEN_PASS: ${{ secrets.MAVEN_PASS }} 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Javadocs 2 | docs/ 3 | 4 | # Maven 5 | deploy/ 6 | target/ 7 | log/ 8 | 9 | # IntelliJ 10 | .idea/ 11 | *.iml 12 | out/ 13 | 14 | # Gradle 15 | # Use local properties (e.g. to set a specific JDK) 16 | gradle.properties 17 | build/ 18 | .gradle/ 19 | gradle.properties 20 | 21 | # Eclipse 22 | .settings/ 23 | .project 24 | .classpath 25 | 26 | # Mac 27 | .DS_Store 28 | 29 | # Java 30 | hs_err*.log 31 | 32 | # Other 33 | *.tmp 34 | *.bak 35 | *.swp 36 | *~.nib 37 | *thumbs.db 38 | bin/ 39 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## v0.6.0 2 | 3 | ### Enhancements 4 | 5 | * Fix default layout with TensorFlow models (https://github.com/qupath/qupath-extension-stardist/issues/34) 6 | 7 | 8 | ## v0.5.0 9 | 10 | ### Enhancements 11 | 12 | * Support for QuPath v0.5.0 13 | * Improved support for TensorFlow via Deep Java Library 14 | * Optionally use the model name for a model stored in the user directory 15 | * May be in a 'stardist' or 'models' subdirectory 16 | * Provides an alternative to specifying the full model path 17 | * Reduce non-essential logging messages to 'debug' level 18 | 19 | 20 | ## v0.4.0 21 | 22 | ### Enhancements 23 | 24 | * Support for QuPath v0.4.0 and Deep Java Library (via the [Deep Java Library extension](https://github.com/qupath/qupath-extension-djl/blob/main/README.md)) 25 | * Support for initializing the model builder from a Bioimage Model Zoo spec 26 | * Support for preprocessing based upon the full (downsampled) image (https://github.com/qupath/qupath-extension-stardist/issues/20) 27 | * Support for both per-channel and joint channel normalization (https://github.com/qupath/qupath-extension-stardist/issues/14) 28 | * The documentation is now [here](https://qupath.readthedocs.io/en/0.4/docs/deep/stardist.html) 29 | 30 | ## v0.3.2 31 | 32 | ### Bug fixes 33 | 34 | * Fix bug applying detection to a ROI on a z-stack / time series, whereby the default plane would be used for detection (at least if padding > 0). 35 | * See https://forum.image.sc/t/cell-detection-with-stardist-on-2d-stack-images/73264/5 for details. 36 | 37 | ## v0.3.1 38 | 39 | ### Bug fixes 40 | 41 | * Invalid operation for scaling (https://github.com/qupath/qupath-extension-stardist/issues/17) 42 | * QuPath with CUDA doesn’t release GPU memory after StarDist segmentation Usage & Issues qupath (https://github.com/qupath/qupath-extension-stardist/issues/11) 43 | * You'll need to add `stardist.close()` at the end of any scripts (assuming you've used `stardist` as the variable name, as it is [here](https://qupath.readthedocs.io/en/stable/docs/advanced/stardist.html)) 44 | * Reduces some occurrences of the dreaded `TopologyException` 45 | * More will hopefully be removed in the next main QuPath release 46 | 47 | ## v0.3.0 48 | 49 | * First version, written to be compatible with QuPath v0.3.0 -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Extension docs](https://img.shields.io/badge/docs-qupath_stardist-red)](https://qupath.readthedocs.io/en/stable/docs/deep/stardist.html) 2 | [![Forum](https://img.shields.io/badge/forum-image.sc-green)](https://forum.image.sc/tag/qupath) 3 | [![Downloads (latest release)](https://img.shields.io/github/downloads-pre/qupath/qupath-extension-stardist/latest/total)](https://github.com/qupath/qupath-extension-stardist/releases/latest) 4 | [![Downloads (all releases)](https://img.shields.io/github/downloads/qupath/qupath-extension-stardist/total)](https://github.com/qupath/qupath-extension-stardist/releases) 5 | 6 | # QuPath StarDist extension 7 | 8 | Welcome to the StarDist extension for [QuPath](http://qupath.github.io)! 9 | 10 | This adds support for running the 2D version of StarDist nucleus detection developed by Uwe Schmidt and Martin Weigert. 11 | 12 | The current version is written for QuPath v0.5.0 - the documentation is [here](https://qupath.readthedocs.io/en/0.5/docs/deep/stardist.html). 13 | 14 | See what's new in the [changelog](CHANGELOG.md); 15 | 16 | ## Installing 17 | 18 | **Better extension support in QuPath v0.5!** 19 | See [readthedocs](https://qupath.readthedocs.io/en/0.5/docs/intro/extensions.html) for details. 20 | 21 | To install the StarDist extension, download the latest `qupath-extension-stardist-[version].jar` file from [releases](https://github.com/qupath/qupath-extension-stardist/releases) and drag it onto the main QuPath window. 22 | 23 | If you haven't installed any extensions before, you'll be prompted to select a QuPath user directory. 24 | The extension will then be copied to a location inside that directory. 25 | 26 | You might then need to restart QuPath (but not your computer). 27 | 28 | 29 | ## Citing 30 | 31 | If you use this extension, you should cite the original StarDist publication 32 | 33 | - Uwe Schmidt, Martin Weigert, Coleman Broaddus, and Gene Myers. 34 | [*Cell Detection with Star-convex Polygons*](https://arxiv.org/abs/1806.03535). 35 | International Conference on Medical Image Computing and Computer-Assisted Intervention (MICCAI), Granada, Spain, September 2018. 36 | 37 | You should also cite the QuPath publication, as described [here](https://qupath.readthedocs.io/en/stable/docs/intro/citing.html). 38 | 39 | 40 | ## Building 41 | 42 | You can build the QuPath StarDist extension from source with 43 | 44 | ```bash 45 | gradlew clean build 46 | ``` 47 | 48 | The output will be under `build/libs`. 49 | 50 | * `clean` removes anything old 51 | * `build` builds the QuPath extension as a *.jar* file and adds it to `libs` 52 | -------------------------------------------------------------------------------- /build.gradle.kts: -------------------------------------------------------------------------------- 1 | plugins { 2 | id("qupath-conventions") 3 | `maven-publish` 4 | } 5 | 6 | qupathExtension { 7 | name = "qupath-extension-stardist" 8 | version = "0.6.0-SNAPSHOT" 9 | group = "io.github.qupath" 10 | description = "A QuPath extension to run StarDist for nucleus detection" 11 | automaticModule = "qupath.extension.stardist" 12 | } 13 | 14 | dependencies { 15 | 16 | implementation(libs.bundles.qupath) 17 | implementation(libs.bundles.logging) 18 | implementation(libs.qupath.fxtras) 19 | 20 | // For testing 21 | testImplementation(libs.junit) 22 | 23 | } 24 | 25 | publishing { 26 | repositories { 27 | maven { 28 | name = "SciJava" 29 | val releasesRepoUrl = uri("https://maven.scijava.org/content/repositories/releases") 30 | val snapshotsRepoUrl = uri("https://maven.scijava.org/content/repositories/snapshots") 31 | // Use gradle -Prelease publish 32 | url = if (project.hasProperty("release")) releasesRepoUrl else snapshotsRepoUrl 33 | credentials { 34 | username = System.getenv("MAVEN_USER") 35 | password = System.getenv("MAVEN_PASS") 36 | } 37 | } 38 | } 39 | 40 | publications { 41 | create("mavenJava") { 42 | from(components["java"]) 43 | pom { 44 | licenses { 45 | license { 46 | name = "Apache License v2.0" 47 | url = "https://www.apache.org/licenses/LICENSE-2.0" 48 | } 49 | } 50 | } 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/qupath/qupath-extension-stardist/02421060e699574ab83abc502cc60e18de0cac24/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # SPDX-License-Identifier: Apache-2.0 19 | # 20 | 21 | ############################################################################## 22 | # 23 | # Gradle start up script for POSIX generated by Gradle. 24 | # 25 | # Important for running: 26 | # 27 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 28 | # noncompliant, but you have some other compliant shell such as ksh or 29 | # bash, then to run this script, type that shell name before the whole 30 | # command line, like: 31 | # 32 | # ksh Gradle 33 | # 34 | # Busybox and similar reduced shells will NOT work, because this script 35 | # requires all of these POSIX shell features: 36 | # * functions; 37 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 38 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 39 | # * compound commands having a testable exit status, especially «case»; 40 | # * various built-in commands including «command», «set», and «ulimit». 41 | # 42 | # Important for patching: 43 | # 44 | # (2) This script targets any POSIX shell, so it avoids extensions provided 45 | # by Bash, Ksh, etc; in particular arrays are avoided. 46 | # 47 | # The "traditional" practice of packing multiple parameters into a 48 | # space-separated string is a well documented source of bugs and security 49 | # problems, so this is (mostly) avoided, by progressively accumulating 50 | # options in "$@", and eventually passing that to Java. 51 | # 52 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 53 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 54 | # see the in-line comments for details. 55 | # 56 | # There are tweaks for specific operating systems such as AIX, CygWin, 57 | # Darwin, MinGW, and NonStop. 58 | # 59 | # (3) This script is generated from the Groovy template 60 | # https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 61 | # within the Gradle project. 62 | # 63 | # You can find Gradle at https://github.com/gradle/gradle/. 64 | # 65 | ############################################################################## 66 | 67 | # Attempt to set APP_HOME 68 | 69 | # Resolve links: $0 may be a link 70 | app_path=$0 71 | 72 | # Need this for daisy-chained symlinks. 73 | while 74 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 75 | [ -h "$app_path" ] 76 | do 77 | ls=$( ls -ld "$app_path" ) 78 | link=${ls#*' -> '} 79 | case $link in #( 80 | /*) app_path=$link ;; #( 81 | *) app_path=$APP_HOME$link ;; 82 | esac 83 | done 84 | 85 | # This is normally unused 86 | # shellcheck disable=SC2034 87 | APP_BASE_NAME=${0##*/} 88 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 89 | APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s 90 | ' "$PWD" ) || exit 91 | 92 | # Use the maximum available, or set MAX_FD != -1 to use that value. 93 | MAX_FD=maximum 94 | 95 | warn () { 96 | echo "$*" 97 | } >&2 98 | 99 | die () { 100 | echo 101 | echo "$*" 102 | echo 103 | exit 1 104 | } >&2 105 | 106 | # OS specific support (must be 'true' or 'false'). 107 | cygwin=false 108 | msys=false 109 | darwin=false 110 | nonstop=false 111 | case "$( uname )" in #( 112 | CYGWIN* ) cygwin=true ;; #( 113 | Darwin* ) darwin=true ;; #( 114 | MSYS* | MINGW* ) msys=true ;; #( 115 | NONSTOP* ) nonstop=true ;; 116 | esac 117 | 118 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 119 | 120 | 121 | # Determine the Java command to use to start the JVM. 122 | if [ -n "$JAVA_HOME" ] ; then 123 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 124 | # IBM's JDK on AIX uses strange locations for the executables 125 | JAVACMD=$JAVA_HOME/jre/sh/java 126 | else 127 | JAVACMD=$JAVA_HOME/bin/java 128 | fi 129 | if [ ! -x "$JAVACMD" ] ; then 130 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 131 | 132 | Please set the JAVA_HOME variable in your environment to match the 133 | location of your Java installation." 134 | fi 135 | else 136 | JAVACMD=java 137 | if ! command -v java >/dev/null 2>&1 138 | then 139 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 140 | 141 | Please set the JAVA_HOME variable in your environment to match the 142 | location of your Java installation." 143 | fi 144 | fi 145 | 146 | # Increase the maximum file descriptors if we can. 147 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 148 | case $MAX_FD in #( 149 | max*) 150 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 151 | # shellcheck disable=SC2039,SC3045 152 | MAX_FD=$( ulimit -H -n ) || 153 | warn "Could not query maximum file descriptor limit" 154 | esac 155 | case $MAX_FD in #( 156 | '' | soft) :;; #( 157 | *) 158 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 159 | # shellcheck disable=SC2039,SC3045 160 | ulimit -n "$MAX_FD" || 161 | warn "Could not set maximum file descriptor limit to $MAX_FD" 162 | esac 163 | fi 164 | 165 | # Collect all arguments for the java command, stacking in reverse order: 166 | # * args from the command line 167 | # * the main class name 168 | # * -classpath 169 | # * -D...appname settings 170 | # * --module-path (only if needed) 171 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 172 | 173 | # For Cygwin or MSYS, switch paths to Windows format before running java 174 | if "$cygwin" || "$msys" ; then 175 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 176 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 177 | 178 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 179 | 180 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 181 | for arg do 182 | if 183 | case $arg in #( 184 | -*) false ;; # don't mess with options #( 185 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 186 | [ -e "$t" ] ;; #( 187 | *) false ;; 188 | esac 189 | then 190 | arg=$( cygpath --path --ignore --mixed "$arg" ) 191 | fi 192 | # Roll the args list around exactly as many times as the number of 193 | # args, so each arg winds up back in the position where it started, but 194 | # possibly modified. 195 | # 196 | # NB: a `for` loop captures its iteration list before it begins, so 197 | # changing the positional parameters here affects neither the number of 198 | # iterations, nor the values presented in `arg`. 199 | shift # remove old arg 200 | set -- "$@" "$arg" # push replacement arg 201 | done 202 | fi 203 | 204 | 205 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 206 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 207 | 208 | # Collect all arguments for the java command: 209 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 210 | # and any embedded shellness will be escaped. 211 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 212 | # treated as '${Hostname}' itself on the command line. 213 | 214 | set -- \ 215 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 216 | -classpath "$CLASSPATH" \ 217 | org.gradle.wrapper.GradleWrapperMain \ 218 | "$@" 219 | 220 | # Stop when "xargs" is not available. 221 | if ! command -v xargs >/dev/null 2>&1 222 | then 223 | die "xargs is not available" 224 | fi 225 | 226 | # Use "xargs" to parse quoted args. 227 | # 228 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 229 | # 230 | # In Bash we could simply go: 231 | # 232 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 233 | # set -- "${ARGS[@]}" "$@" 234 | # 235 | # but POSIX shell has neither arrays nor command substitution, so instead we 236 | # post-process each arg (as a line of input to sed) to backslash-escape any 237 | # character that might be a shell metacharacter, then use eval to reverse 238 | # that process (while maintaining the separation between arguments), and wrap 239 | # the whole thing up as a single "set" statement. 240 | # 241 | # This will of course break if any of these variables contains a newline or 242 | # an unmatched quote. 243 | # 244 | 245 | eval "set -- $( 246 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 247 | xargs -n1 | 248 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 249 | tr '\n' ' ' 250 | )" '"$@"' 251 | 252 | exec "$JAVACMD" "$@" 253 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | @rem SPDX-License-Identifier: Apache-2.0 17 | @rem 18 | 19 | @if "%DEBUG%"=="" @echo off 20 | @rem ########################################################################## 21 | @rem 22 | @rem Gradle startup script for Windows 23 | @rem 24 | @rem ########################################################################## 25 | 26 | @rem Set local scope for the variables with windows NT shell 27 | if "%OS%"=="Windows_NT" setlocal 28 | 29 | set DIRNAME=%~dp0 30 | if "%DIRNAME%"=="" set DIRNAME=. 31 | @rem This is normally unused 32 | set APP_BASE_NAME=%~n0 33 | set APP_HOME=%DIRNAME% 34 | 35 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 36 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 37 | 38 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 39 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 40 | 41 | @rem Find java.exe 42 | if defined JAVA_HOME goto findJavaFromJavaHome 43 | 44 | set JAVA_EXE=java.exe 45 | %JAVA_EXE% -version >NUL 2>&1 46 | if %ERRORLEVEL% equ 0 goto execute 47 | 48 | echo. 1>&2 49 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 50 | echo. 1>&2 51 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 52 | echo location of your Java installation. 1>&2 53 | 54 | goto fail 55 | 56 | :findJavaFromJavaHome 57 | set JAVA_HOME=%JAVA_HOME:"=% 58 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 59 | 60 | if exist "%JAVA_EXE%" goto execute 61 | 62 | echo. 1>&2 63 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 64 | echo. 1>&2 65 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 66 | echo location of your Java installation. 1>&2 67 | 68 | goto fail 69 | 70 | :execute 71 | @rem Setup the command line 72 | 73 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 74 | 75 | 76 | @rem Execute Gradle 77 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 78 | 79 | :end 80 | @rem End local scope for the variables with windows NT shell 81 | if %ERRORLEVEL% equ 0 goto mainEnd 82 | 83 | :fail 84 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 85 | rem the _cmd.exe /c_ return code! 86 | set EXIT_CODE=%ERRORLEVEL% 87 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 88 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 89 | exit /b %EXIT_CODE% 90 | 91 | :mainEnd 92 | if "%OS%"=="Windows_NT" endlocal 93 | 94 | :omega 95 | -------------------------------------------------------------------------------- /settings.gradle.kts: -------------------------------------------------------------------------------- 1 | pluginManagement { 2 | repositories { 3 | gradlePluginPortal() 4 | maven { 5 | url = uri("https://maven.scijava.org/content/repositories/releases") 6 | } 7 | } 8 | } 9 | 10 | qupath { 11 | version = "0.6.0-SNAPSHOT" 12 | } 13 | 14 | // Apply QuPath Gradle settings plugin to handle configuration 15 | plugins { 16 | id("io.github.qupath.qupath-extension-settings") version "0.2.1" 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/qupath/ext/stardist/OpCreators.java: -------------------------------------------------------------------------------- 1 | /*- 2 | * Copyright 2022 QuPath developers, University of Edinburgh 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | 18 | package qupath.ext.stardist; 19 | 20 | import java.awt.image.BufferedImage; 21 | import java.io.IOException; 22 | import java.util.Arrays; 23 | import java.util.List; 24 | 25 | import org.bytedeco.javacpp.PointerScope; 26 | import org.bytedeco.opencv.global.opencv_core; 27 | import org.bytedeco.opencv.opencv_core.Mat; 28 | import org.slf4j.Logger; 29 | import org.slf4j.LoggerFactory; 30 | 31 | import qupath.lib.awt.common.BufferedImageTools; 32 | import qupath.lib.images.ImageData; 33 | import qupath.lib.regions.ImagePlane; 34 | import qupath.lib.regions.RegionRequest; 35 | import qupath.lib.roi.interfaces.ROI; 36 | import qupath.opencv.ops.ImageDataOp; 37 | import qupath.opencv.ops.ImageOp; 38 | import qupath.opencv.ops.ImageOps; 39 | import qupath.opencv.ops.ImageOps.Normalize; 40 | import qupath.opencv.tools.OpenCVTools; 41 | 42 | /** 43 | * Helper class for creating new {@linkplain ImageOp ImageOps} based upon other image properties. 44 | *

45 | * This addresses that problem that every {@link ImageOp} only knows about the image tile that it 46 | * 'sees' at runtime. 47 | * This means that all processing needs to be local. 48 | *

49 | * Often, we want ops to use information from across the entire image - particularly for 50 | * normalization as a step in preprocessing, such as when normalizing to zero mean and unit variance 51 | * across the entire image. 52 | *

53 | * Before this class, this was problematic because either the parameters needed to be calculated 54 | * elsewhere (which was awkward), or else normalization would always treat each image tile independent - 55 | * which could result in tiles within the same image being normalized in very different ways. 56 | * 57 | * Warning! This is currently in development. 58 | * If it proves useful enough, it is likely to be refined and moved to the core QuPath software. 59 | * 60 | * @author Pete Bankhead 61 | * @since v0.4.0 62 | */ 63 | public class OpCreators { 64 | 65 | /** 66 | * Helper class for creating (tile-based) ImageOps with parameters that are derived from an entire image or ROI. 67 | *

68 | * This is most useful for normalization, where statistics may need to be calculated across the image 69 | * even if they are then applied locally (e.g. an offset and scaling factor). 70 | * 71 | * @author Pete Bankhead 72 | */ 73 | public static interface TileOpCreator { 74 | 75 | /** 76 | * Compute the (tile-based) ops from the image. 77 | * @param op the data op, which determines how to extract channels from the image data 78 | * @param imageData the image data to process 79 | * @param mask ROI mask that may be used to restrict the region being considered (optional) 80 | * @param plane the 2D image plane to use; if not provided, the plane from any ROI will be used, or otherwise the default plane 81 | * @return 82 | * @throws IOException 83 | */ 84 | public List createOps(ImageDataOp op, ImageData imageData, ROI mask, ImagePlane plane) throws IOException; 85 | 86 | } 87 | 88 | abstract static class DownsampledOpCreator implements TileOpCreator { 89 | 90 | private static final Logger logger = LoggerFactory.getLogger(DownsampledOpCreator.class); 91 | 92 | private boolean useMask = false; 93 | 94 | private double downsample = Double.NaN; 95 | private int maxDimension = 2048; 96 | 97 | DownsampledOpCreator(int maxDimension, double downsample, boolean useMask) { 98 | this.maxDimension = maxDimension; 99 | this.downsample = downsample; 100 | this.useMask = useMask; 101 | } 102 | 103 | DownsampledOpCreator() { 104 | this(2048, Double.NaN, false); 105 | } 106 | 107 | @Override 108 | public List createOps(ImageDataOp op, ImageData imageData, ROI mask, ImagePlane plane) throws IOException { 109 | var server = imageData.getServer(); 110 | double downsample = this.downsample; 111 | 112 | int x = 0, y = 0, width = server.getWidth(), height = server.getHeight(); 113 | if (useMask && mask != null) { 114 | x = (int)Math.round(mask.getBoundsX()); 115 | y = (int)Math.round(mask.getBoundsY()); 116 | width = (int)Math.round(mask.getBoundsWidth()); 117 | height = (int)Math.round(mask.getBoundsHeight()); 118 | } 119 | if (plane == null) { 120 | if (mask == null) { 121 | logger.warn("Plane not specified - will use the default plane"); 122 | plane = ImagePlane.getDefaultPlane(); 123 | } else { 124 | logger.debug("Plane not specified - will use the ROI mask plane"); 125 | plane = mask.getImagePlane(); 126 | } 127 | } 128 | 129 | 130 | if (Double.isNaN(downsample)) { 131 | downsample = Math.max(width, height) / (double)maxDimension; 132 | downsample = Math.max(downsample, 1.0); 133 | logger.debug("Computed downsample for global op: {}", downsample); 134 | } 135 | 136 | var request = RegionRequest.createInstance(server.getPath(), downsample, 137 | x, y, width, height, plane.getZ(), plane.getT()); 138 | 139 | try (var scope = new PointerScope()) { 140 | var mat = op.apply(imageData, request); 141 | 142 | if (useMask && mask != null) { 143 | var img = BufferedImageTools.createROIMask(mat.cols(), mat.rows(), mask, request); 144 | var matMask = OpenCVTools.imageToMat(img); 145 | opencv_core.bitwise_not(matMask, matMask); 146 | if (mat.depth() != opencv_core.CV_32F && mat.depth() != opencv_core.CV_64F) 147 | mat.convertTo(mat, opencv_core.CV_32F); 148 | mat.setTo(OpenCVTools.scalarMat(Double.NaN, mat.depth()), matMask); 149 | matMask.close(); 150 | // Show image for debugging 151 | // OpenCVTools.matToImagePlus("Masked input", mat).show(); 152 | } 153 | 154 | return compute(mat); 155 | } 156 | } 157 | 158 | protected abstract List compute(Mat mat); 159 | 160 | } 161 | 162 | /** 163 | * Tile op creator that computes offset and scale values across the full image 164 | * to normalize using min and max percentiles. 165 | */ 166 | public static class PercentileTileOpCreator extends DownsampledOpCreator { 167 | 168 | private static final Logger logger = LoggerFactory.getLogger(PercentileTileOpCreator.class); 169 | 170 | private double percentileMin = 0; 171 | private double percentileMax = 99.8; 172 | private boolean perChannel = false; 173 | 174 | private double eps = 1e-6; 175 | 176 | private PercentileTileOpCreator(int maxSize, double downsample, boolean useMask, double percentileMin, double percentileMax, boolean perChannel, double eps) { 177 | super(maxSize, downsample, useMask); 178 | this.percentileMin = percentileMin; 179 | this.percentileMax = percentileMax; 180 | this.perChannel = perChannel; 181 | this.eps = eps; 182 | } 183 | 184 | @Override 185 | protected List compute(Mat mat) { 186 | if (perChannel) { 187 | int nChannels = mat.channels(); 188 | double[] toSubtract = new double[nChannels]; 189 | double[] toScale = new double[nChannels]; 190 | int c = 0; 191 | try (var scope = new PointerScope()) { 192 | for (var matChannel : OpenCVTools.splitChannels(mat)) { 193 | double[] percentiles = OpenCVTools.percentiles(matChannel, percentileMin, percentileMax); 194 | toSubtract[c] = percentiles[0]; 195 | toScale[c] = 1.0/Math.max(percentiles[1] - percentiles[0], eps); 196 | c++; 197 | } 198 | } 199 | logger.debug("Computed percentile normalization offsets={}, scales={}", Arrays.toString(toSubtract), Arrays.toString(toScale)); 200 | return List.of( 201 | ImageOps.Core.subtract(toSubtract), 202 | ImageOps.Core.multiply(toScale) ); 203 | } else { 204 | double[] percentiles = OpenCVTools.percentiles(mat, percentileMin, percentileMax); 205 | logger.debug("Computed percentiles {}, {}", percentiles[0], percentiles[1]); 206 | return List.of( 207 | ImageOps.Core.subtract(percentiles[0]), 208 | ImageOps.Core.multiply(1.0/Math.max(percentiles[1] - percentiles[0], 1e-6)) ); 209 | } 210 | } 211 | 212 | } 213 | 214 | /** 215 | * Tile op creator that computes offset and scale values across the full image 216 | * to normalize to zero mean and unit variance. 217 | */ 218 | public static class ZeroMeanVarianceTileOpCreator extends DownsampledOpCreator { 219 | 220 | private static final Logger logger = LoggerFactory.getLogger(ZeroMeanVarianceTileOpCreator.class); 221 | 222 | private boolean perChannel = false; 223 | private double eps = 1e-6; 224 | 225 | private ZeroMeanVarianceTileOpCreator(int maxSize, double downsample, boolean useMask, boolean perChannel, double eps) { 226 | super(maxSize, downsample, useMask); 227 | this.perChannel = perChannel; 228 | this.eps = eps; 229 | } 230 | 231 | @Override 232 | protected List compute(Mat mat) { 233 | if (perChannel) { 234 | int nChannels = mat.channels(); 235 | double[] toSubtract = new double[nChannels]; 236 | double[] toScale = new double[nChannels]; 237 | int c = 0; 238 | try (var scope = new PointerScope()) { 239 | for (var matChannel : OpenCVTools.splitChannels(mat)) { 240 | toSubtract[c] = OpenCVTools.mean(matChannel); 241 | toScale[c] = 1.0/(OpenCVTools.stdDev(matChannel) + eps); 242 | c++; 243 | } 244 | } 245 | logger.debug("Computed mean/variance normalization offsets={}, scales={}", Arrays.toString(toSubtract), Arrays.toString(toScale)); 246 | return List.of( 247 | ImageOps.Core.subtract(toSubtract), 248 | ImageOps.Core.multiply(toScale) 249 | ); 250 | } else { 251 | double toSubtract = OpenCVTools.mean(mat); 252 | double toScale = 1.0/(OpenCVTools.stdDev(mat) + eps); 253 | logger.debug("Computed mean/variance normalization offset={}, scale={}", toSubtract, toScale); 254 | return List.of( 255 | ImageOps.Core.subtract(toSubtract), 256 | ImageOps.Core.multiply(toScale) 257 | ); 258 | } 259 | } 260 | 261 | } 262 | 263 | 264 | 265 | /** 266 | * Builder for a {@link TileOpCreator} that can be used for image preprocessing 267 | * using min/max percentiles or zero-mean-unit-variance normalization. 268 | */ 269 | public static class ImageNormalizationBuilder { 270 | 271 | private static final Logger logger = LoggerFactory.getLogger(ImageNormalizationBuilder.class); 272 | 273 | private boolean zeroMeanUnitVariance = false; 274 | 275 | private double minPercentile = 0; 276 | private double maxPercentile = 100; 277 | 278 | private boolean perChannel = false; 279 | private double eps = 1e-6; // 1e-6 - update javadoc if this changes 280 | 281 | private double downsample = Double.NaN; 282 | private int maxDimension = 2048; // 2048 - update javadoc if this changes 283 | private boolean useMask = false; 284 | 285 | /** 286 | * Specify min and max percentiles to calculate normalization values. 287 | * See {@link Normalize#percentile(double, double)}. 288 | * @param minPercentile 289 | * @param maxPercentile 290 | * @return this builder 291 | */ 292 | public ImageNormalizationBuilder percentiles(double minPercentile, double maxPercentile) { 293 | this.minPercentile = minPercentile; 294 | this.maxPercentile = maxPercentile; 295 | if (zeroMeanUnitVariance) { 296 | logger.warn("Specifying percentiles overrides previous zero-mean-unit-variance request"); 297 | zeroMeanUnitVariance = false; 298 | } 299 | return this; 300 | } 301 | 302 | /** 303 | * Error constant used for numerical stability and avoid dividing by zero. 304 | * Default is 1e-6; 305 | * @param eps 306 | * @return this builder 307 | */ 308 | public ImageNormalizationBuilder eps(double eps) { 309 | this.eps = eps; 310 | return this; 311 | } 312 | 313 | /** 314 | * Compute the normalization values separately per channel; if false, values are computed 315 | * jointly across channels. 316 | * @param perChannel 317 | * @return this builder 318 | */ 319 | public ImageNormalizationBuilder perChannel(boolean perChannel) { 320 | this.perChannel = perChannel; 321 | return this; 322 | } 323 | 324 | /** 325 | * Specify the downsample factor to use when calculating the normalization. 326 | * If this is not provided, then {@link #maxDimension(int)} will be used to calculate 327 | * a downsample value automatically. 328 | *

329 | * The downsample should be ≥ 1.0 and high enough to ensure that the entire image 330 | * can be fit in memory. A downsample of 1.0 for a whole slide image will probably 331 | * fail due to memory or array size limits. 332 | * 333 | * @param downsample 334 | * @return this builder 335 | * see {@link #maxDimension(int)} 336 | */ 337 | public ImageNormalizationBuilder downsample(double downsample) { 338 | this.downsample = downsample; 339 | return this; 340 | } 341 | 342 | /** 343 | * The maximum width or height, which is used to calculate a downsample factor for 344 | * the image if {@link #downsample(double)} is not specified. 345 | *

346 | * The current default value is 2048; 347 | * 348 | * @param maxDimension 349 | * @return this builder 350 | */ 351 | public ImageNormalizationBuilder maxDimension(int maxDimension) { 352 | this.maxDimension = maxDimension; 353 | return this; 354 | } 355 | 356 | /** 357 | * Optionally use any ROI mask provided for the calculation. 358 | * This can restrict the region that is considered. 359 | * 360 | * @param useMask 361 | * @return this builder 362 | */ 363 | public ImageNormalizationBuilder useMask(boolean useMask) { 364 | this.useMask = useMask; 365 | return this; 366 | } 367 | 368 | /** 369 | * Normalize for zero mean and unit variance. 370 | * This is an alternative to using {@link #percentiles(double, double)}. 371 | * @return this builder 372 | */ 373 | public ImageNormalizationBuilder zeroMeanUnitVariance() { 374 | return zeroMeanUnitVariance(true); 375 | } 376 | 377 | /** 378 | * Optionally normalize for zero mean and unit variance. 379 | * This is an alternative to using {@link #percentiles(double, double)}. 380 | * @param doZeroMeanUnitVariance 381 | * @return this builder 382 | */ 383 | public ImageNormalizationBuilder zeroMeanUnitVariance(boolean doZeroMeanUnitVariance) { 384 | this.zeroMeanUnitVariance = doZeroMeanUnitVariance; 385 | if (zeroMeanUnitVariance && (minPercentile != 0 || maxPercentile != 100)) 386 | logger.warn("Setting zero-mean-unit-variance will override previous percentiles that were set"); 387 | return this; 388 | } 389 | 390 | /** 391 | * Build a {@link TileOpCreator} according to the builder's parameters. 392 | * @return this builder 393 | */ 394 | public TileOpCreator build() { 395 | if (zeroMeanUnitVariance) { 396 | logger.debug("Creating zero-mean-unit-variance normalization op"); 397 | return new ZeroMeanVarianceTileOpCreator(maxDimension, downsample, useMask, perChannel, eps); 398 | } else { 399 | logger.debug("Creating percentile normalization op"); 400 | return new PercentileTileOpCreator(maxDimension, downsample, useMask, minPercentile, maxPercentile, perChannel, eps); 401 | } 402 | } 403 | 404 | } 405 | 406 | 407 | /** 408 | * Build a normalization op that can be based upon the entire (2D) image, rather than only local tiles. 409 | *

410 | * Note that currently this requires downsampling the image to a manageable size. 411 | * 412 | * @return 413 | */ 414 | public static ImageNormalizationBuilder imageNormalizationBuilder() { 415 | return new ImageNormalizationBuilder(); 416 | } 417 | 418 | } 419 | -------------------------------------------------------------------------------- /src/main/java/qupath/ext/stardist/StarDist2D.java: -------------------------------------------------------------------------------- 1 | /*- 2 | * Copyright 2020-2022 QuPath developers, University of Edinburgh 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package qupath.ext.stardist; 18 | 19 | import java.awt.image.BufferedImage; 20 | import java.io.Closeable; 21 | import java.io.File; 22 | import java.io.IOException; 23 | import java.nio.file.Files; 24 | import java.nio.file.Path; 25 | import java.nio.file.Paths; 26 | import java.util.ArrayList; 27 | import java.util.Arrays; 28 | import java.util.Collection; 29 | import java.util.Collections; 30 | import java.util.Comparator; 31 | import java.util.HashMap; 32 | import java.util.HashSet; 33 | import java.util.LinkedHashSet; 34 | import java.util.List; 35 | import java.util.Map; 36 | import java.util.Objects; 37 | import java.util.concurrent.ExecutionException; 38 | import java.util.concurrent.ForkJoinPool; 39 | import java.util.concurrent.TimeUnit; 40 | import java.util.concurrent.atomic.AtomicBoolean; 41 | import java.util.function.Function; 42 | import java.util.stream.Collectors; 43 | 44 | import org.bytedeco.javacpp.PointerScope; 45 | import org.bytedeco.javacpp.indexer.FloatIndexer; 46 | import org.bytedeco.opencv.global.opencv_core; 47 | import org.bytedeco.opencv.global.opencv_imgproc; 48 | import org.bytedeco.opencv.opencv_core.Mat; 49 | import org.bytedeco.opencv.opencv_core.Size; 50 | import org.locationtech.jts.algorithm.Centroid; 51 | import org.locationtech.jts.algorithm.locate.SimplePointInAreaLocator; 52 | import org.locationtech.jts.geom.Coordinate; 53 | import org.locationtech.jts.geom.Envelope; 54 | import org.locationtech.jts.geom.Geometry; 55 | import org.locationtech.jts.geom.GeometryCollection; 56 | import org.locationtech.jts.geom.Location; 57 | import org.locationtech.jts.geom.Polygon; 58 | import org.locationtech.jts.geom.prep.PreparedGeometry; 59 | import org.locationtech.jts.geom.prep.PreparedGeometryFactory; 60 | import org.locationtech.jts.index.strtree.STRtree; 61 | import org.locationtech.jts.simplify.VWSimplifier; 62 | import org.slf4j.Logger; 63 | import org.slf4j.LoggerFactory; 64 | 65 | import qupath.ext.stardist.OpCreators.ImageNormalizationBuilder; 66 | import qupath.ext.stardist.OpCreators.TileOpCreator; 67 | import qupath.lib.analysis.features.ObjectMeasurements; 68 | import qupath.lib.analysis.features.ObjectMeasurements.Compartments; 69 | import qupath.lib.analysis.features.ObjectMeasurements.Measurements; 70 | import qupath.lib.common.GeneralTools; 71 | import qupath.lib.common.LogTools; 72 | import qupath.lib.gui.UserDirectoryManager; 73 | import qupath.lib.images.ImageData; 74 | import qupath.lib.images.servers.ColorTransforms; 75 | import qupath.lib.images.servers.ColorTransforms.ColorTransform; 76 | import qupath.lib.images.servers.PixelCalibration; 77 | import qupath.lib.images.servers.PixelType; 78 | import qupath.lib.images.servers.TransformedServerBuilder; 79 | import qupath.lib.objects.CellTools; 80 | import qupath.lib.objects.PathCellObject; 81 | import qupath.lib.objects.PathObject; 82 | import qupath.lib.objects.PathObjects; 83 | import qupath.lib.objects.classes.PathClass; 84 | import qupath.lib.regions.ImagePlane; 85 | import qupath.lib.regions.Padding; 86 | import qupath.lib.regions.RegionRequest; 87 | import qupath.lib.roi.GeometryTools; 88 | import qupath.lib.roi.interfaces.ROI; 89 | import qupath.opencv.dnn.DnnModel; 90 | import qupath.opencv.dnn.DnnModelParams; 91 | import qupath.opencv.dnn.DnnModels; 92 | import qupath.opencv.ops.ImageDataOp; 93 | import qupath.opencv.ops.ImageOp; 94 | import qupath.opencv.ops.ImageOps; 95 | 96 | /** 97 | * Cell detection based on the following method: 98 | *

  99 |  *   Uwe Schmidt, Martin Weigert, Coleman Broaddus, and Gene Myers.
 100 |  *     "Cell Detection with Star-convex Polygons."
 101 |  *   International Conference on Medical Image Computing and Computer-Assisted Intervention (MICCAI), Granada, Spain, September 2018.
 102 |  * 
103 | * See the main repo at https://github.com/mpicbg-csbd/stardist 104 | *

105 | * Very much inspired by stardist-imagej at https://github.com/mpicbg-csbd/stardist-imagej but re-written from scratch to use OpenCV and 106 | * adapt the method of converting predictions to contours (very slightly) to be more QuPath-friendly. 107 | *

108 | * Models are expected in the same format as required by the Fiji plugin, or converted to a frozen .pb file for use with OpenCV. 109 | * 110 | * @author Pete Bankhead (this implementation, but based on the others) 111 | */ 112 | public class StarDist2D implements AutoCloseable { 113 | 114 | private static final Logger logger = LoggerFactory.getLogger(StarDist2D.class); 115 | 116 | /** 117 | * Default tile width and height. 118 | */ 119 | public static int defaultTileSize = 1024; 120 | 121 | /** 122 | * Builder to help create a {@link StarDist2D} with custom parameters. 123 | */ 124 | public static class Builder { 125 | 126 | private boolean doLog; 127 | 128 | private int nThreads = -1; 129 | 130 | private String modelPath = null; 131 | private DnnModel dnn = null; 132 | private ColorTransform[] channels = new ColorTransform[0]; 133 | 134 | private double threshold = 0.5; 135 | 136 | private int pad = 32; 137 | 138 | private double simplifyDistance = 1.4; 139 | private double cellExpansion = Double.NaN; 140 | private double cellConstrainScale = Double.NaN; 141 | private boolean ignoreCellOverlaps = false; 142 | 143 | private double pixelSize = Double.NaN; 144 | 145 | private int tileWidth = -1; 146 | private int tileHeight = -1; 147 | 148 | // Optional layout string, following the bioimage.io spec 149 | private String layout; 150 | 151 | private Function creatorFun; 152 | 153 | private PathClass globalPathClass; 154 | private Map classifications; 155 | 156 | private boolean measureShape = false; 157 | private Collection compartments = Arrays.asList(Compartments.values()); 158 | private Collection measurements; 159 | 160 | private boolean keepClassifiedBackground = false; 161 | 162 | private boolean constrainToParent = true; 163 | 164 | private TileOpCreator globalPreprocessing; 165 | private List preprocessing = new ArrayList<>(); 166 | 167 | private boolean includeProbability = false; 168 | 169 | private Builder(String modelPath) { 170 | this.modelPath = modelPath; 171 | } 172 | 173 | private Builder(DnnModel dnn) { 174 | this.dnn = dnn; 175 | } 176 | 177 | /** 178 | * Probability threshold to apply for detection, between 0 and 1. 179 | * @param threshold 180 | * @return this builder 181 | * @see #includeProbability(boolean) 182 | */ 183 | public Builder threshold(double threshold) { 184 | this.threshold = threshold; 185 | return this; 186 | } 187 | 188 | /** 189 | * Add preprocessing operations, if required. 190 | * @param ops 191 | * @return this builder 192 | */ 193 | public Builder preprocess(ImageOp... ops) { 194 | this.preprocessing.addAll(Arrays.asList(ops)); 195 | return this; 196 | } 197 | 198 | /** 199 | * Delegate to {@link #preprocessGlobal(TileOpCreator)}. 200 | * @param global 201 | * @return 202 | * @deprecated since v0.6.0 203 | */ 204 | @Deprecated 205 | public Builder preprocess(TileOpCreator global) { 206 | // See https://github.com/qupath/qupath-extension-stardist/issues/20 207 | LogTools.warnOnce(logger, 208 | "The preprocess(TileOpCreator) method is deprecated; use preprocessGlobal(TileOpCreator) instead"); 209 | return preprocessGlobal(global); 210 | } 211 | 212 | /** 213 | * Add an {@link TileOpCreator} to generate preprocessing operations based upon the 214 | * entire image, rather than per tile. 215 | *

216 | * Note that only a single such operation is permitted, which is applied after 217 | * channel extraction but before any other preprocessing. 218 | *

219 | * The intended use is with {@link OpCreators#imageNormalizationBuilder()} to perform 220 | * normalization based upon percentiles computed across the image, rather than per tile. 221 | * 222 | * @param global preprocessing operation 223 | * @return this builder 224 | */ 225 | public Builder preprocessGlobal(TileOpCreator global) { 226 | this.globalPreprocessing = global; 227 | return this; 228 | } 229 | 230 | 231 | /** 232 | * Request that progress is logged at the INFO level. 233 | * If this is not specified, progress is only logged at the DEBUG level. 234 | * @return this builder 235 | */ 236 | public Builder doLog() { 237 | this.doLog = true; 238 | return this; 239 | } 240 | 241 | 242 | /** 243 | * Optional layout string giving the axes of the input required 244 | * by the model, following the Bioimage Model Zoo spec for axes. 245 | *

246 | * Generally it should be possible to leave this unspecified, 247 | * but the option exists for cases where the model format might be 248 | * different from what is expected. 249 | *

250 | * An example string would be {@code "yxc"} indicating channels-last, 251 | * or {@code "byxc"} indicating that a batch index is required. 252 | * 253 | * @param layout 254 | * @return 255 | */ 256 | public Builder layout(String layout) { 257 | this.layout = layout; 258 | return this; 259 | } 260 | 261 | /** 262 | * Customize the extent to which contours are simplified. 263 | * Simplification reduces the number of vertices, which in turn can reduce memory requirements and 264 | * improve performance. 265 | *

266 | * Implementation note: this currently uses the Visvalingam-Whyatt algorithm. 267 | * 268 | * @param distance simplify distance threshold; set ≤ 0 to turn off additional simplification 269 | * @return this builder 270 | */ 271 | public Builder simplify(double distance) { 272 | this.simplifyDistance = distance; 273 | return this; 274 | } 275 | 276 | 277 | /** 278 | * Specify channels. Useful for detecting nuclei for one channel 279 | * within a multi-channel image, or potentially for trained models that 280 | * support multi-channel input. 281 | * @param channels 0-based indices of the channels to use 282 | * @return this builder 283 | */ 284 | public Builder channels(int... channels) { 285 | return channels(Arrays.stream(channels) 286 | .mapToObj(c -> ColorTransforms.createChannelExtractor(c)) 287 | .toArray(ColorTransform[]::new)); 288 | } 289 | 290 | /** 291 | * Specify channels by name. Useful for detecting nuclei for one channel 292 | * within a multi-channel image, or potentially for trained models that 293 | * support multi-channel input. 294 | * @param channels 0-based indices of the channels to use 295 | * @return this builder 296 | */ 297 | public Builder channels(String... channels) { 298 | return channels(Arrays.stream(channels) 299 | .map(c -> ColorTransforms.createChannelExtractor(c)) 300 | .toArray(ColorTransform[]::new)); 301 | } 302 | 303 | /** 304 | * Define the channels (or color transformers) to apply to the input image. 305 | *

306 | * This makes it possible to supply color deconvolved channels, for example. 307 | * @param channels 308 | * @return this builder 309 | */ 310 | public Builder channels(ColorTransform... channels) { 311 | this.channels = channels.clone(); 312 | return this; 313 | } 314 | 315 | /** 316 | * Amount by which to expand detected nuclei to approximate the cell area. 317 | * Units are the same as for the {@link PixelCalibration} of the input image. 318 | *

319 | * Warning! This is rather experimental, relying heavily on JTS and a convoluted method of 320 | * resolving overlaps using a Voronoi tessellation. 321 | *

322 | * In short, be wary. 323 | * @param distance 324 | * @return this builder 325 | */ 326 | public Builder cellExpansion(double distance) { 327 | this.cellExpansion = distance; 328 | return this; 329 | } 330 | 331 | /** 332 | * Constrain any cell expansion defined using {@link #cellExpansion(double)} based upon 333 | * the nucleus size. Only meaningful for values > 1; the nucleus is expanded according 334 | * to the scale factor, and used to define the maximum permitted cell expansion. 335 | * 336 | * @param scale 337 | * @return this builder 338 | */ 339 | public Builder cellConstrainScale(double scale) { 340 | this.cellConstrainScale = scale; 341 | return this; 342 | } 343 | 344 | /** 345 | * Create annotations rather than detections (the default). 346 | * If cell expansion is not zero, the nucleus will be included as a child object. 347 | * 348 | * @return this builder 349 | */ 350 | public Builder createAnnotations() { 351 | this.creatorFun = r -> PathObjects.createAnnotationObject(r); 352 | return this; 353 | } 354 | 355 | /** 356 | * Specify a mapping between StarDist predicted classifications (if available) and QuPath classifications. 357 | * 358 | * @param classifications 359 | * @return this builder 360 | * 361 | * @see #classify(PathClass) 362 | */ 363 | public Builder classifications(Map classifications) { 364 | this.classifications = new HashMap<>(classifications); 365 | return this; 366 | } 367 | 368 | /** 369 | * Specify a mapping between StarDist predicted classifications (if available) and QuPath classification names. 370 | * This is a convenience method that creates {@link PathClass} objects from Strings, then passes them to {@link #classifications(Map)}. 371 | * 372 | * @param classifications 373 | * @return this builder 374 | */ 375 | public Builder classificationNames(Map classifications) { 376 | return classifications(classifications.entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> PathClass.fromString(e.getValue())))); 377 | } 378 | 379 | /** 380 | * When using {@link #classifications(Map)}, optionally keep objects classified as background (i.e. 0). 381 | * The default is to remove such objects immediately, before resolving overlaps. 382 | * @param keep 383 | * @return this builder 384 | */ 385 | public Builder keepClassifiedBackground(boolean keep) { 386 | this.keepClassifiedBackground = keep; 387 | return this; 388 | } 389 | 390 | /** 391 | * Request that a classification is applied to all created objects. 392 | * Note that if a StarDist model supporting classifications is used and {@link #classifications(Map)} is specified, 393 | * the StarDist prediction will take precedence. The classification specified here will be applied only to objects 394 | * that have not already been classified based upon the prediction and mapping. 395 | * 396 | * @param pathClass 397 | * @return this builder 398 | * 399 | * @see #classifications(Map) 400 | * @see #classificationNames(Map) 401 | */ 402 | public Builder classify(PathClass pathClass) { 403 | this.globalPathClass = pathClass; 404 | return this; 405 | } 406 | 407 | /** 408 | * Request that a classification is applied to all created objects. 409 | * This is a convenience method that get a {@link PathClass} from a String representation. 410 | * 411 | * @param pathClassName 412 | * @return this builder 413 | * @see #classifications(Map) 414 | * @see #classificationNames(Map) 415 | */ 416 | public Builder classify(String pathClassName) { 417 | return classify(PathClass.fromString(pathClassName, (Integer)null)); 418 | } 419 | 420 | /** 421 | * If true, ignore overlaps when computing cell expansion. 422 | * @param ignore 423 | * @return this builder 424 | */ 425 | public Builder ignoreCellOverlaps(boolean ignore) { 426 | this.ignoreCellOverlaps = ignore; 427 | return this; 428 | } 429 | 430 | /** 431 | * If true, constrain nuclei and cells to any parent annotation (default is true). 432 | * @param constrainToParent 433 | * @return this builder 434 | */ 435 | public Builder constrainToParent(boolean constrainToParent) { 436 | this.constrainToParent = constrainToParent; 437 | return this; 438 | } 439 | 440 | /** 441 | * Specify the number of threads to use for processing. 442 | * If you encounter problems, setting this to 1 may help to resolve them by preventing 443 | * multithreading. 444 | * @param nThreads 445 | * @return this builder 446 | */ 447 | public Builder nThreads(int nThreads) { 448 | this.nThreads = nThreads; 449 | return this; 450 | } 451 | 452 | /** 453 | * Request default intensity measurements are made for all available cell compartments. 454 | * @return this builder 455 | */ 456 | public Builder measureIntensity() { 457 | this.measurements = Arrays.asList( 458 | Measurements.MEAN, 459 | Measurements.MEDIAN, 460 | Measurements.MIN, 461 | Measurements.MAX, 462 | Measurements.STD_DEV); 463 | return this; 464 | } 465 | 466 | /** 467 | * Request specified intensity measurements are made for all available cell compartments. 468 | * @param measurements the measurements to make 469 | * @return this builder 470 | */ 471 | public Builder measureIntensity(Collection measurements) { 472 | this.measurements = new ArrayList<>(measurements); 473 | return this; 474 | } 475 | 476 | /** 477 | * Request shape measurements are made for the detected cell or nucleus. 478 | * @return this builder 479 | */ 480 | public Builder measureShape() { 481 | measureShape = true; 482 | return this; 483 | } 484 | 485 | /** 486 | * Specify the compartments within which intensity measurements are made. 487 | * Only effective if {@link #measureIntensity()} and {@link #cellExpansion(double)} have been selected. 488 | * @param compartments cell compartments for intensity measurements 489 | * @return this builder 490 | */ 491 | public Builder compartments(Compartments...compartments) { 492 | this.compartments = Arrays.asList(compartments); 493 | return this; 494 | } 495 | 496 | /** 497 | * Optionally include the prediction probability as a measurement for the object. 498 | * This can be helpful if detection is applied with a low (generous) probability threshold, 499 | * with the intention of filtering out less likely detections later. 500 | * 501 | * @param include true if the probability should be included, false otherwise 502 | * @return this builder 503 | * @see #threshold(double) 504 | */ 505 | public Builder includeProbability(boolean include) { 506 | this.includeProbability = include; 507 | return this; 508 | } 509 | 510 | /** 511 | * Resolution at which the cell detection should be run. 512 | * The units depend upon the {@link PixelCalibration} of the input image. 513 | *

514 | * The default is to use the full resolution of the input image. 515 | *

516 | * For an image calibrated in microns, the recommended default is approximately 0.5. 517 | * 518 | * @param pixelSize 519 | * @return this builder 520 | */ 521 | public Builder pixelSize(double pixelSize) { 522 | this.pixelSize = pixelSize; 523 | return this; 524 | } 525 | 526 | /** 527 | * Size in pixels of a tile used for detection. 528 | * Note that tiles are independently normalized, and therefore tiling can impact 529 | * the results. Default is 1024. 530 | * @param tileSize 531 | * @return this builder 532 | */ 533 | public Builder tileSize(int tileSize) { 534 | return tileSize(tileSize, tileSize); 535 | } 536 | 537 | /** 538 | * Size in pixels of a tile used for detection. 539 | * Note that tiles are independently normalized, and therefore tiling can impact 540 | * the results. Default is 1024. 541 | * @param tileWidth 542 | * @param tileHeight 543 | * @return this builder 544 | */ 545 | public Builder tileSize(int tileWidth, int tileHeight) { 546 | this.tileWidth = tileWidth; 547 | this.tileHeight = tileHeight; 548 | return this; 549 | } 550 | 551 | /** 552 | * Amount to pad tiles to reduce boundary artifacts. 553 | * @param pad padding in pixels; width and height of tiles will be increased by pad x 2. 554 | * @return this builder 555 | */ 556 | public Builder padding(int pad) { 557 | this.pad = pad; 558 | return this; 559 | } 560 | 561 | /** 562 | * Apply percentile normalization separately to the input image channels. 563 | *

564 | * Note that this can be used in combination with {@link #preprocess(ImageOp...)}, 565 | * in which case the order in which the operations are applied depends upon the order 566 | * in which the methods of the builder are called. 567 | *

568 | * Warning! This is applied on a per-tile basis. This can result in artifacts and false detections 569 | * without background/constant regions. 570 | * Consider using {@link #inputAdd(double...)} and {@link #inputScale(double...)} as alternative 571 | * normalization strategies, if appropriate constants can be determined to apply globally. 572 | * 573 | * @param min minimum percentile 574 | * @param max maximum percentile 575 | * @return this builder 576 | * @see #normalizePercentiles(double, double, boolean, double) 577 | */ 578 | public Builder normalizePercentiles(double min, double max) { 579 | return normalizePercentiles(min, max, true, 0.0); 580 | } 581 | 582 | 583 | /** 584 | * Apply percentile normalization to the input image channels, or across all channels jointly. 585 | *

586 | * Note that this can be used in combination with {@link #preprocess(ImageOp...)}, 587 | * in which case the order in which the operations are applied depends upon the order 588 | * in which the methods of the builder are called. 589 | *

590 | * Warning! This is applied on a per-tile basis. This can result in artifacts and false detections 591 | * without background/constant regions. 592 | * Consider using {@link #inputAdd(double...)} and {@link #inputScale(double...)} as alternative 593 | * normalization strategies, if appropriate constants can be determined to apply globally. 594 | * 595 | * @param min minimum percentile 596 | * @param max maximum percentile 597 | * @param perChannel if true, normalize each channel separately; if false, normalize channels jointly 598 | * @param eps small constant to apply 599 | * @return this builder 600 | * @since v0.4.0 601 | */ 602 | public Builder normalizePercentiles(double min, double max, boolean perChannel, double eps) { 603 | this.preprocessing.add(ImageOps.Normalize.percentile(min, max, perChannel, eps)); 604 | return this; 605 | } 606 | 607 | 608 | /** 609 | * Add an offset as a preprocessing step. 610 | * Usually the value will be negative. Along with {@link #inputScale(double...)} this can be used as an alternative (global) normalization. 611 | *

612 | * Note that this can be used in combination with {@link #preprocess(ImageOp...)}, 613 | * in which case the order in which the operations are applied depends upon the order 614 | * in which the methods of the builder are called. 615 | * 616 | * @param values either a single value to add to all channels, or an array of values equal to the number of channels 617 | * @return this builder 618 | * @see #inputSubtract(double...) 619 | * @see #inputScale(double...) 620 | */ 621 | public Builder inputAdd(double... values) { 622 | this.preprocessing.add(ImageOps.Core.add(values)); 623 | return this; 624 | } 625 | 626 | /** 627 | * Subtract an offset as a preprocessing step. 628 | *

629 | * Note that this can be used in combination with {@link #preprocess(ImageOp...)}, 630 | * in which case the order in which the operations are applied depends upon the order 631 | * in which the methods of the builder are called. 632 | * 633 | * @param values either a single value to subtract from all channels, or an array of values equal to the number of channels 634 | * @return this builder 635 | * @since v0.4.0 636 | * @see #inputAdd(double...) 637 | * @see #inputScale(double...) 638 | */ 639 | public Builder inputSubtract(double... values) { 640 | this.preprocessing.add(ImageOps.Core.subtract(values)); 641 | return this; 642 | } 643 | 644 | /** 645 | * Multiply by a scale factor as a preprocessing step. 646 | * Along with {@link #inputAdd(double...)} this can be used as an alternative (global) normalization. 647 | *

648 | * Note that this can be used in combination with {@link #preprocess(ImageOp...)}, 649 | * in which case the order in which the operations are applied depends upon the order 650 | * in which the methods of the builder are called. 651 | * 652 | * @param values either a single value to add to all channels, or an array of values equal to the number of channels 653 | * @return this builder 654 | * @see #inputAdd(double...) 655 | * @see #inputSubtract(double...) 656 | */ 657 | public Builder inputScale(double... values) { 658 | this.preprocessing.add(ImageOps.Core.multiply(values)); 659 | return this; 660 | } 661 | 662 | /** 663 | * Create a {@link StarDist2D}, all ready for detection. 664 | * @return 665 | */ 666 | public StarDist2D build() { 667 | var stardist = new StarDist2D(); 668 | 669 | // var padding = pad > 0 ? Padding.symmetric(pad) : Padding.empty(); 670 | var dnn = this.dnn; 671 | if (dnn == null) { 672 | // Search for the model file - permitting a search in the user directory 673 | var file = findModelFile(modelPath); 674 | if (file == null || !file.exists()) { 675 | throw new IllegalArgumentException("I couldn't find the model file " + modelPath); 676 | } 677 | try { 678 | var ndLayout = layout; 679 | // If we have a saved model, default to channels-last with a batch dimension 680 | if (file.isDirectory() && ndLayout == null) 681 | ndLayout = "BYXC"; 682 | var builder = DnnModelParams.builder() 683 | .files(file) 684 | .layout(ndLayout); 685 | var params = builder.build(); 686 | dnn = DnnModels.buildModel(params); 687 | if (dnn != null) 688 | logger.debug("Loaded model {} as {}", modelPath, dnn); 689 | } catch (Exception e) { 690 | logger.error("Unable to load model file: {}", e.getMessage(), e); 691 | throw new RuntimeException("Unable to load StarDist model from " + modelPath, e); 692 | } 693 | // Report if we have no model 694 | if (dnn == null) { 695 | throw new IllegalArgumentException("No StarDist model found for path " + modelPath); 696 | } 697 | } 698 | 699 | stardist.op = ImageOps.buildImageDataOp(channels); 700 | 701 | stardist.globalPreprocess = globalPreprocessing; 702 | stardist.preprocess = new ArrayList<>(preprocessing); 703 | 704 | stardist.dnn = dnn; 705 | stardist.threshold = threshold; 706 | stardist.pixelSize = pixelSize; 707 | stardist.cellConstrainScale = cellConstrainScale; 708 | stardist.cellExpansion = cellExpansion; 709 | stardist.tileWidth = tileWidth; 710 | stardist.tileHeight = tileHeight; 711 | stardist.pad = pad; 712 | stardist.includeProbability = includeProbability; 713 | stardist.ignoreCellOverlaps = ignoreCellOverlaps; 714 | stardist.measureShape = measureShape; 715 | stardist.doLog = doLog; 716 | stardist.simplifyDistance = simplifyDistance; 717 | stardist.nThreads = nThreads; 718 | stardist.constrainToParent = constrainToParent; 719 | stardist.creatorFun = creatorFun; 720 | stardist.globalPathClass = globalPathClass; 721 | stardist.classifications = classifications; 722 | stardist.keepClassifiedBackground = keepClassifiedBackground; 723 | 724 | stardist.compartments = new LinkedHashSet<>(compartments); 725 | 726 | if (measurements != null) 727 | stardist.measurements = new LinkedHashSet<>(measurements); 728 | else 729 | stardist.measurements = Collections.emptyList(); 730 | 731 | return stardist; 732 | } 733 | 734 | } 735 | 736 | /** 737 | * Try to get a model file. First assume we have an absolute path, then check the user directory. 738 | * @param path 739 | * @return 740 | */ 741 | private static File findModelFile(String path) { 742 | if (path == null || path.isEmpty()) 743 | return null; 744 | var file = new File(path); 745 | if (file.exists()) 746 | return file; 747 | var userPath = UserDirectoryManager.getInstance().getUserPath(); 748 | if (userPath != null && Files.isDirectory(userPath)) { 749 | try { 750 | var potentialFiles = Files.walk(userPath) 751 | .filter(p -> p.getFileName().toString().equals(path)) 752 | .sorted(Comparator.comparingInt((Path p) -> "stardist".equalsIgnoreCase(parentDirName(p)) ? -1 : 0) 753 | .thenComparing((Path p) -> "models".equalsIgnoreCase(parentDirName(p)) ? -1 : 1) 754 | .thenComparing(p -> p.toString())) 755 | .map(p -> p.toFile()) 756 | .toList(); 757 | if (potentialFiles.isEmpty()) 758 | return null; 759 | else if (potentialFiles.size() == 1) { 760 | logger.debug("Found model file {}", file.getAbsolutePath()); 761 | return potentialFiles.get(0); 762 | } else { 763 | file = potentialFiles.get(0); 764 | logger.warn("Found {} potential models for {}, will use {}", 765 | potentialFiles.size(), 766 | path, 767 | file.getAbsolutePath()); 768 | return file; 769 | } 770 | } catch (IOException e) { 771 | logger.error("Exception searching for model file: " + e.getMessage(), e); 772 | } 773 | } 774 | return null; 775 | } 776 | 777 | private static String parentDirName(Path path) { 778 | if (path == null || path.getParent() == null) 779 | return null; 780 | return path.getParent().getFileName().toString(); 781 | } 782 | 783 | private boolean doLog = false; 784 | 785 | private double simplifyDistance = 1.4; 786 | 787 | private double threshold; 788 | 789 | private ImageDataOp op; 790 | private TileOpCreator globalPreprocess; 791 | private List preprocess; 792 | private DnnModel dnn; 793 | 794 | private double pixelSize; 795 | private double cellExpansion; 796 | private double cellConstrainScale; 797 | private boolean ignoreCellOverlaps; 798 | 799 | private Function creatorFun; 800 | private PathClass globalPathClass; 801 | private Map classifications; 802 | private boolean keepClassifiedBackground = false; 803 | 804 | private boolean constrainToParent = true; 805 | 806 | private int nThreads = -1; 807 | 808 | private boolean includeProbability = false; 809 | 810 | private int tileWidth = 1024; 811 | private int tileHeight = 1024; 812 | 813 | private int pad = 0; 814 | 815 | private boolean measureShape = false; 816 | 817 | private Collection compartments; 818 | private Collection measurements; 819 | 820 | private final AtomicBoolean firstRun = new AtomicBoolean(true); 821 | private boolean cancelRuns = false; 822 | 823 | 824 | /** 825 | * Detect cells within one or more parent objects, firing update events upon completion. 826 | * 827 | * @param imageData the image data containing the object 828 | * @param parents the parent objects; existing child objects will be removed, and replaced by the detected cells 829 | */ 830 | public void detectObjects(ImageData imageData, Collection parents) { 831 | runInPool(() -> detectObjectsImpl(imageData, parents)); 832 | } 833 | 834 | /** 835 | * Detect cells within a parent object. 836 | * 837 | * @param imageData the image data containing the object 838 | * @param parent the parent object; existing child objects will be removed, and replaced by the detected cells 839 | * @param fireUpdate if true, a hierarchy update will be fired on completion 840 | */ 841 | public void detectObjects(ImageData imageData, PathObject parent, boolean fireUpdate) { 842 | runInPool(() -> detectObjectsImpl(imageData, parent, fireUpdate)); 843 | } 844 | 845 | /** 846 | * Optionally submit runnable to a thread pool. This limits the parallelization used by parallel streams. 847 | * @param runnable 848 | */ 849 | private void runInPool(Runnable runnable) { 850 | if (nThreads > 0) { 851 | if (nThreads == 1) 852 | log("Processing with {} thread", nThreads); 853 | else 854 | log("Processing with {} threads", nThreads); 855 | // Using an outer thread poll impacts any parallel streams created inside 856 | try (var pool = new ForkJoinPool(nThreads)) { 857 | var completion = pool.submit(runnable); 858 | completion.get(); 859 | } catch (ExecutionException e) { 860 | logger.error("Exception running StarDist: {}", e.getMessage(), e); 861 | } catch (InterruptedException e) { 862 | logger.warn("StarDist interrupted!"); 863 | logger.debug(e.getMessage(), e); 864 | } 865 | } else { 866 | runnable.run(); 867 | } 868 | } 869 | 870 | 871 | private void detectObjectsImpl(ImageData imageData, Collection parents) { 872 | 873 | if (parents.isEmpty()) 874 | return; 875 | if (parents.size() == 1) { 876 | detectObjectsImpl(imageData, parents.iterator().next(), true); 877 | return; 878 | } 879 | log("Processing {} parent objects", parents.size()); 880 | if (nThreads >= 0) 881 | parents.forEach(p -> detectObjectsImpl(imageData, p, false)); 882 | else 883 | parents.parallelStream().forEach(p -> detectObjectsImpl(imageData, p, false)); 884 | 885 | // Fire a global update event 886 | imageData.getHierarchy().fireHierarchyChangedEvent(imageData.getHierarchy()); 887 | } 888 | 889 | 890 | /** 891 | * Detect cells within a parent object. 892 | * 893 | * @param imageData the image data containing the object 894 | * @param parent the parent object; existing child objects will be removed, and replaced by the detected cells 895 | * @param fireUpdate if true, a hierarchy update will be fired on completion 896 | */ 897 | private void detectObjectsImpl(ImageData imageData, PathObject parent, boolean fireUpdate) { 898 | Objects.requireNonNull(parent); 899 | // Lock early, so the user doesn't make modifications 900 | boolean wasLocked = parent.isLocked(); 901 | parent.setLocked(true); 902 | 903 | List detections = detectObjects(imageData, parent.getROI()); 904 | 905 | if (cancelRuns) { 906 | logger.warn("StarDist detection cancelled for {}", parent); 907 | if (!wasLocked) 908 | parent.setLocked(false); 909 | return; 910 | } 911 | 912 | parent.clearChildObjects(); 913 | parent.addChildObjects(detections); 914 | if (fireUpdate) 915 | imageData.getHierarchy().fireHierarchyChangedEvent(imageData.getHierarchy(), parent); 916 | } 917 | 918 | 919 | 920 | /** 921 | * Detect cells within a {@link ROI}. 922 | * @param imageData image to which the ROI belongs 923 | * @param roi region of interest which which to detect cells. If null, the entire image will be used. 924 | * @return the detected objects. Note that these will not automatically be added to the object hierarchy. 925 | */ 926 | public List detectObjects(ImageData imageData, ROI roi) { 927 | 928 | var resolution = imageData.getServer().getPixelCalibration(); 929 | if (Double.isFinite(pixelSize) && pixelSize > 0) { 930 | double downsample = pixelSize / resolution.getAveragedPixelSize().doubleValue(); 931 | resolution = resolution.createScaledInstance(downsample, downsample); 932 | } 933 | int tw = tileWidth <= 0 ? defaultTileSize : tileWidth; 934 | int th = tileHeight <= 0 ? tw : tileHeight; 935 | 936 | if (tw <= pad*2 || th <= pad*2) { 937 | throw new IllegalArgumentException("Tile width & height must be > padding * 2"); 938 | } 939 | 940 | // The opServer is needed only to get tile requests, or calculate global normalization percentiles 941 | var opServer = ImageOps.buildServer(imageData, op, resolution, tw - pad*2, th - pad*2); 942 | // var opServer = ImageOps.buildServer(imageData, op, resolution, tileWidth-pad*2, tileHeight-pad*2); 943 | 944 | RegionRequest request; 945 | if (roi == null) 946 | request = RegionRequest.createInstance(opServer); 947 | else 948 | request = RegionRequest.createInstance( 949 | opServer.getPath(), 950 | opServer.getDownsampleForResolution(0), 951 | roi); 952 | 953 | // Get all the required tiles that intersect with the mask ROI 954 | var mask = roi == null ? null : roi.getGeometry(); 955 | var tiles = opServer.getTileRequestManager().getTileRequests(request) 956 | .stream() 957 | .filter(t -> mask == null || mask.intersects(GeometryTools.createRectangle(t.getImageX(), t.getImageY(), t.getImageWidth(), t.getImageHeight()))) 958 | .collect(Collectors.toList()); 959 | 960 | // Detect all potential nuclei 961 | var server = imageData.getServer(); 962 | var cal = server.getPixelCalibration(); 963 | double expansion = cellExpansion / cal.getAveragedPixelSize().doubleValue(); 964 | var plane = request.getImagePlane(); 965 | 966 | // Compute op with preprocessing 967 | var fullPreprocess = new ArrayList(); 968 | fullPreprocess.add(ImageOps.Core.ensureType(PixelType.FLOAT32)); 969 | 970 | // Do global preprocessing calculations, if required 971 | if (globalPreprocess != null) { 972 | try { 973 | var normalizeOps = globalPreprocess.createOps(op, imageData, roi, request.getImagePlane()); 974 | fullPreprocess.addAll(normalizeOps); 975 | } catch (IOException e) { 976 | throw new RuntimeException("Exception computing global normalization", e); 977 | } 978 | } 979 | 980 | if (!preprocess.isEmpty()) { 981 | fullPreprocess.addAll(preprocess); 982 | } 983 | if (fullPreprocess.size() > 1) 984 | fullPreprocess.add(ImageOps.Core.ensureType(PixelType.FLOAT32)); 985 | 986 | var opWithPreprocessing = op.appendOps(fullPreprocess.toArray(ImageOp[]::new)); 987 | 988 | // Detect all potential nuclei 989 | if (tiles.size() > 1) 990 | log("Detecting nuclei for {} tiles", tiles.size()); 991 | else 992 | log("Detecting nuclei"); 993 | var nuclei = tiles.parallelStream() 994 | .flatMap(t -> detectObjectsForTile(opWithPreprocessing, dnn, imageData, t.getRegionRequest(), tiles.size() > 1, mask).stream()) 995 | .collect(Collectors.toList()); 996 | 997 | if (cancelRuns) 998 | return Collections.emptyList(); 999 | 1000 | // Filter nuclei again if we need to for resolving tile overlaps 1001 | if (tiles.size() > 1) { 1002 | log("Resolving nucleus overlaps"); 1003 | nuclei = filterNuclei(nuclei); 1004 | } 1005 | 1006 | // Convert to detections, dilating to approximate cells if necessary 1007 | // Drop cells if they fail (rather than catastrophically give up) 1008 | var detections = nuclei.parallelStream() 1009 | .map(n -> { 1010 | try { 1011 | return convertToObject(n, plane, expansion, constrainToParent ? mask : null); 1012 | } catch (Exception e) { 1013 | logger.warn("Error converting to object: {}", e.getMessage(), e); 1014 | return null; 1015 | } 1016 | }).filter(n -> n != null) 1017 | .collect(Collectors.toList()); 1018 | 1019 | // Resolve cell overlaps, if needed 1020 | if (expansion > 0 && !ignoreCellOverlaps) { 1021 | log("Resolving cell overlaps"); 1022 | if (creatorFun != null) { 1023 | // It's awkward, but we need to temporarily convert to cells and back 1024 | var cells = detections.stream().map(c -> objectToCell(c)).collect(Collectors.toList()); 1025 | cells = CellTools.constrainCellOverlaps(cells); 1026 | detections = cells.stream().map(c -> cellToObject(c, creatorFun)).collect(Collectors.toList()); 1027 | } else 1028 | detections = CellTools.constrainCellOverlaps(detections); 1029 | } 1030 | 1031 | // Add shape measurements, if needed 1032 | if (measureShape) 1033 | detections.parallelStream().forEach(c -> ObjectMeasurements.addShapeMeasurements(c, cal)); 1034 | 1035 | // Add intensity measurements, if needed 1036 | if (!detections.isEmpty() && !measurements.isEmpty()) { 1037 | log("Making measurements"); 1038 | var stains = imageData.getColorDeconvolutionStains(); 1039 | var builder = new TransformedServerBuilder(server); 1040 | if (stains != null) { 1041 | List stainNumbers = new ArrayList<>(); 1042 | for (int s = 1; s <= 3; s++) { 1043 | if (!stains.getStain(s).isResidual()) 1044 | stainNumbers.add(s); 1045 | } 1046 | builder.deconvolveStains(stains, stainNumbers.stream().mapToInt(i -> i).toArray()); 1047 | } 1048 | 1049 | var server2 = builder.build(); 1050 | double downsample = resolution.getAveragedPixelSize().doubleValue() / cal.getAveragedPixelSize().doubleValue(); 1051 | 1052 | detections.parallelStream().forEach(cell -> { 1053 | try { 1054 | ObjectMeasurements.addIntensityMeasurements(server2, cell, downsample, measurements, compartments); 1055 | } catch (IOException e) { 1056 | log(e.getMessage(), e); 1057 | } 1058 | }); 1059 | 1060 | } 1061 | 1062 | log("Detected {} cells", detections.size()); 1063 | 1064 | return detections; 1065 | } 1066 | 1067 | 1068 | 1069 | private static PathObject objectToCell(PathObject pathObject) { 1070 | ROI roiNucleus = null; 1071 | var children = pathObject.getChildObjects(); 1072 | if (children.size() == 1) 1073 | roiNucleus = children.iterator().next().getROI(); 1074 | else if (children.size() > 1) 1075 | throw new IllegalArgumentException("Cannot convert object with multiple child objects to a cell!"); 1076 | return PathObjects.createCellObject(pathObject.getROI(), roiNucleus, pathObject.getPathClass(), pathObject.getMeasurementList()); 1077 | } 1078 | 1079 | private static PathObject cellToObject(PathObject cell, Function creator) { 1080 | var parent = creator.apply(cell.getROI()); 1081 | var nucleusROI = cell instanceof PathCellObject ? ((PathCellObject)cell).getNucleusROI() : null; 1082 | if (nucleusROI != null) { 1083 | var nucleus = creator.apply(nucleusROI); 1084 | nucleus.setPathClass(cell.getPathClass()); 1085 | parent.addChildObject(nucleus); 1086 | } 1087 | parent.setPathClass(cell.getPathClass()); 1088 | var cellMeasurements = cell.getMeasurementList(); 1089 | if (!cellMeasurements.isEmpty()) { 1090 | try (var ml = parent.getMeasurementList()) { 1091 | ml.putAll(cellMeasurements); 1092 | } 1093 | } 1094 | return parent; 1095 | } 1096 | 1097 | 1098 | 1099 | private void log(String message, Object... arguments) { 1100 | if (doLog) 1101 | logger.info(message, arguments); 1102 | else 1103 | logger.debug(message, arguments); 1104 | } 1105 | 1106 | 1107 | private PathObject convertToObject(PotentialNucleus nucleus, ImagePlane plane, double cellExpansion, Geometry mask) { 1108 | var geomNucleus = simplify(nucleus.geometry); 1109 | PathObject pathObject; 1110 | if (cellExpansion > 0) { 1111 | // cellExpansion = geomNucleus.getPrecisionModel().makePrecise(cellExpansion); 1112 | // cellExpansion = Math.round(cellExpansion); 1113 | // Note that prior to QuPath v0.4.0 an extra fix was needed here 1114 | var geomCell = CellTools.estimateCellBoundary(geomNucleus, cellExpansion, cellConstrainScale); 1115 | if (mask != null) { 1116 | geomCell = GeometryTools.attemptOperation(geomCell, g -> g.intersection(mask)); 1117 | // Fix nucleus overlaps (added v0.4.0) 1118 | var geomCell2 = geomCell; 1119 | geomNucleus = GeometryTools.attemptOperation(geomNucleus, g -> g.intersection(geomCell2)); 1120 | geomNucleus = GeometryTools.ensurePolygonal(geomNucleus); 1121 | } 1122 | geomCell = simplify(geomCell); 1123 | 1124 | // Intersection with complex mask could give linestrings - which we want to remove 1125 | geomCell = GeometryTools.ensurePolygonal(geomCell); 1126 | 1127 | if (geomCell.isEmpty()) { 1128 | logger.warn("Empty cell boundary at {} will be skipped", nucleus.geometry.getCentroid()); 1129 | return null; 1130 | } 1131 | if (geomNucleus.isEmpty()) { 1132 | logger.warn("Empty nucleus at {} will be skipped", nucleus.geometry.getCentroid()); 1133 | return null; 1134 | } 1135 | var roiCell = GeometryTools.geometryToROI(geomCell, plane); 1136 | var roiNucleus = GeometryTools.geometryToROI(geomNucleus, plane); 1137 | if (creatorFun == null) 1138 | pathObject = PathObjects.createCellObject(roiCell, roiNucleus, null, null); 1139 | else { 1140 | pathObject = creatorFun.apply(roiCell); 1141 | if (roiNucleus != null) { 1142 | pathObject.addChildObject(creatorFun.apply(roiNucleus)); 1143 | } 1144 | } 1145 | } else { 1146 | if (mask != null) { 1147 | geomNucleus = GeometryTools.attemptOperation(geomNucleus, g -> g.intersection(mask)); 1148 | geomNucleus = GeometryTools.ensurePolygonal(geomNucleus); 1149 | if (geomNucleus.isEmpty()) { 1150 | return null; 1151 | } 1152 | } 1153 | var roiNucleus = GeometryTools.geometryToROI(geomNucleus, plane); 1154 | if (creatorFun == null) 1155 | pathObject = PathObjects.createDetectionObject(roiNucleus); 1156 | else 1157 | pathObject = creatorFun.apply(roiNucleus); 1158 | } 1159 | if (includeProbability) { 1160 | try (var ml = pathObject.getMeasurementList()) { 1161 | ml.put("Detection probability", nucleus.getProbability()); 1162 | } 1163 | } 1164 | 1165 | // Set classification, if available 1166 | PathClass pathClass; 1167 | if (classifications == null) 1168 | pathClass = globalPathClass; 1169 | else 1170 | pathClass = classifications.getOrDefault(nucleus.getClassification(), globalPathClass); 1171 | 1172 | if (pathClass != null && pathClass.isValid()) 1173 | pathObject.setPathClass(pathClass); 1174 | return pathObject; 1175 | } 1176 | 1177 | 1178 | private Geometry simplify(Geometry geom) { 1179 | if (simplifyDistance <= 0) 1180 | return geom; 1181 | try { 1182 | return VWSimplifier.simplify(geom, simplifyDistance); 1183 | } catch (Exception e) { 1184 | return geom; 1185 | } 1186 | } 1187 | 1188 | 1189 | private static int[] range(int startInclusive, int endExclusive) { 1190 | int n = endExclusive - startInclusive; 1191 | int[] output = new int[n]; 1192 | for (int i = 0; i < n; i++) 1193 | output[i] = startInclusive + i; 1194 | return output; 1195 | } 1196 | 1197 | 1198 | private static Mat extractChannels(Mat mat, int... channels) { 1199 | Mat output; 1200 | int n = channels.length; 1201 | if (n == 0) { 1202 | output = new Mat(); 1203 | } else if (n == 1) { 1204 | output = new Mat(); 1205 | opencv_core.extractChannel(mat, output, channels[0]); 1206 | } else { 1207 | int[] pairs = new int[n * 2]; 1208 | for (int i = 0; i < n; i++) { 1209 | pairs[i*2] = channels[i]; 1210 | pairs[i*2+1] = i; 1211 | } 1212 | output = new Mat(mat.rows(), mat.cols(), opencv_core.CV_MAKE_TYPE(mat.depth(), n)); 1213 | opencv_core.mixChannels(mat, 1, output, 1, pairs, n); 1214 | } 1215 | return output; 1216 | } 1217 | 1218 | 1219 | private static Padding ensureSize(Mat mat, int width, int height, int borderType) { 1220 | int x1 = 0, x2 = 0, y1 = 0, y2 = 0; 1221 | int w = mat.cols(); 1222 | int h = mat.rows(); 1223 | 1224 | Padding padding = Padding.empty(); 1225 | 1226 | boolean pad = false; 1227 | if (w < width) { 1228 | x1 = (width - w) / 2; 1229 | x2 = (width - w - x1); 1230 | w = width; 1231 | pad = true; 1232 | } 1233 | if (h < height) { 1234 | y1 = (height - h) / 2; 1235 | y2 = (height - h - y1); 1236 | h = height; 1237 | pad = true; 1238 | } 1239 | if (pad) { 1240 | opencv_core.copyMakeBorder(mat, mat, y1, y2, x1, x2, borderType); 1241 | padding = Padding.getPadding(x1, x2, y1, y2); 1242 | } 1243 | if (w != width && h != height) 1244 | opencv_imgproc.resize(mat, mat, new Size(width, height)); 1245 | return padding; 1246 | } 1247 | 1248 | 1249 | private List detectObjectsForTile(ImageDataOp op, DnnModel dnn, ImageData imageData, RegionRequest request, boolean excludeOnBounds, Geometry mask) { 1250 | 1251 | List nuclei; 1252 | 1253 | if (Thread.interrupted()) 1254 | cancelRuns = true; 1255 | 1256 | if (cancelRuns) 1257 | return Collections.emptyList(); 1258 | 1259 | // Create a mask around pixels we can use 1260 | var regionMask = GeometryTools.createRectangle(request.getX(), request.getY(), request.getWidth(), request.getHeight()); 1261 | if (mask == null) 1262 | mask = regionMask; 1263 | else 1264 | mask = GeometryTools.attemptOperation(mask, m -> m.intersection(regionMask)); 1265 | 1266 | // Create a padded request, if we need one 1267 | RegionRequest requestPadded = request; 1268 | if (pad > 0) { 1269 | double downsample = request.getDownsample(); 1270 | var server = imageData.getServer(); 1271 | int x1 = (int)Math.max(0, Math.round(request.getX() - downsample * pad)); 1272 | int y1 = (int)Math.max(0, Math.round(request.getY() - downsample * pad)); 1273 | int x2 = (int)Math.min(server.getWidth(), Math.round(request.getMaxX() + downsample * pad)); 1274 | int y2 = (int)Math.min(server.getHeight(), Math.round(request.getMaxY() + downsample * pad)); 1275 | requestPadded = RegionRequest.createInstance(server.getPath(), downsample, x1, y1, x2-x1, y2-y1, request.getZ(), request.getT()); 1276 | } 1277 | 1278 | // // Hack to visualize the tiles that are computed (for debugging) 1279 | // imageData.getHierarchy().addPathObject( 1280 | // PathObjects.createAnnotationObject( 1281 | // ROIs.createRectangleROI(request), 1282 | // PathClassFactory.getPathClass("Temporary") 1283 | // )); 1284 | 1285 | try (var scope = new PointerScope()) { 1286 | Mat mat; 1287 | try { 1288 | mat = op.apply(imageData, requestPadded); 1289 | } catch (IOException e) { 1290 | logger.error(e.getMessage(), e); 1291 | return Collections.emptyList(); 1292 | } 1293 | 1294 | // Calculate image width & height. 1295 | // These need to be consistent with the expected maximum number of pooling operations 1296 | // to avoid shape problems. 1297 | int expectedPooling = 6; // A generous estimate (usually 3 or 4 expected) 1298 | int multiple = (int)Math.pow(2, expectedPooling); 1299 | int tw = (int)Math.ceil(mat.cols()/(double)multiple) * multiple; 1300 | int th = (int)Math.ceil(mat.rows()/(double)multiple) * multiple; 1301 | // 1302 | // Ensure we have a Mat of the right size 1303 | var padding = ensureSize(mat, tw, th, opencv_core.BORDER_REFLECT); 1304 | 1305 | boolean isFirstRun = firstRun.getAndSet(false); 1306 | 1307 | Map output; 1308 | // synchronized(dnn) { 1309 | output = dnn.predict(Map.of(DnnModel.DEFAULT_INPUT_NAME, mat)); 1310 | // } 1311 | Mat matProb = null; 1312 | Mat matRays = null; 1313 | Mat matClassifications = null; 1314 | if (output.size() == 1) { 1315 | // Split channels to extract probability, ray and (possibly) classification images 1316 | var matOutput = output.values().iterator().next(); 1317 | int nChannels = matOutput.channels(); 1318 | int nClassifications = classifications == null ? 0 : classifications.size(); 1319 | int nRays = nChannels - 1 - nClassifications; 1320 | matProb = extractChannels(matOutput, 0); 1321 | matRays = extractChannels(matOutput, range(1, nRays+1)); 1322 | matClassifications = nClassifications == 0 ? null : extractChannels(matOutput, range(nRays+1, nChannels)); 1323 | } else { 1324 | // Split output as needed 1325 | // We require that probabilities are single-channel, and there are more rays than classifications 1326 | for (var entry : output.entrySet()) { 1327 | var temp = entry.getValue(); 1328 | if (temp.channels() == 1) 1329 | matProb = temp; 1330 | else if (matRays == null) 1331 | matRays = temp; 1332 | else { 1333 | if (temp.channels() > matRays.channels()) { 1334 | matClassifications = matRays; 1335 | matRays = temp; 1336 | } else 1337 | matClassifications = temp; 1338 | } 1339 | } 1340 | } 1341 | 1342 | // Warn if we have weird dimensions on the first run 1343 | if (isFirstRun) { 1344 | if (classifications != null && !classifications.isEmpty()) { 1345 | int nClassifications = classifications.size(); 1346 | int nChannels = matClassifications == null ? 0 : matClassifications.channels(); 1347 | // We might not specify a background classification, but if we have very different numbers from the prediction we should report that 1348 | if (nClassifications > nChannels || nClassifications < nChannels-1) 1349 | logger.warn("{} classifications provided, {} available in the prediction", nClassifications, nChannels); 1350 | else 1351 | logger.debug("{} classifications provided, {} available in the prediction", nClassifications, nChannels); 1352 | } 1353 | } 1354 | 1355 | // Depending upon model export, we might have a half resolution prediction that needs to be rescaled 1356 | long inputWidth = mat.cols(); 1357 | long inputHeight = mat.rows(); 1358 | if (inputWidth <= 0 || inputHeight <= 0) 1359 | throw new RuntimeException("Mat dimensions are unknown!"); 1360 | double scaleX = Math.round((double)inputWidth / matProb.cols()); 1361 | double scaleY = Math.round((double)inputHeight / matProb.rows()); 1362 | if (scaleX != 1.0 || scaleY != 1.0) { 1363 | if (scaleX != 2.0 || scaleY != 2.0) 1364 | logger.warn("Unexpected StarDist rescaling x={}, y={}", scaleX, scaleY); 1365 | else 1366 | logger.debug("StarDist rescaling x={}, y={}", scaleX, scaleY); 1367 | } 1368 | 1369 | // Convert predictions to potential nuclei 1370 | FloatIndexer indexerProb = matProb.createIndexer(); 1371 | FloatIndexer indexerRays = matRays.createIndexer(); 1372 | FloatIndexer indexerClassifications = matClassifications == null ? null : matClassifications.createIndexer(); 1373 | nuclei = createNuclei(indexerProb, indexerRays, indexerClassifications, 1374 | requestPadded.getDownsample(), 1375 | requestPadded.getX() - requestPadded.getDownsample() * padding.getX1(), 1376 | requestPadded.getY() - requestPadded.getDownsample() * padding.getY1(), 1377 | scaleX, 1378 | scaleY, 1379 | mask); 1380 | 1381 | // Exclude anything that overlaps the right/bottom boundary of a region 1382 | if (excludeOnBounds) { 1383 | var iter = nuclei.iterator(); 1384 | while (iter.hasNext()) { 1385 | var n = iter.next(); 1386 | var env = n.geometry.getEnvelopeInternal(); 1387 | if (env.getMaxX() >= requestPadded.getMaxX() || env.getMaxY() >= requestPadded.getMaxY()) 1388 | iter.remove(); 1389 | } 1390 | } 1391 | 1392 | } 1393 | 1394 | return filterNuclei(nuclei); 1395 | } 1396 | 1397 | 1398 | // private static void cropInPlace(Mat mat, Padding padding, double scaleX, double scaleY) { 1399 | // if (mat == null || padding.isEmpty()) 1400 | // return; 1401 | // 1402 | // int x = (int)Math.round(padding.getX1() / scaleX); 1403 | // int y = (int)Math.round(padding.getY1() / scaleY); 1404 | // int w = mat.cols() - (int)Math.round(padding.getXSum() / scaleX); 1405 | // int h = mat.rows() - (int)Math.round(padding.getYSum() / scaleY); 1406 | // 1407 | // mat.put(OpenCVTools.crop(mat, x, y, w, h)); 1408 | // } 1409 | 1410 | 1411 | 1412 | /** 1413 | * Create a builder to customize detection parameters. 1414 | * This accepts either TensorFlow's savedmodel format (if TensorFlow is available) or alternatively a frozen 1415 | * .pb file compatible with OpenCV's DNN module. 1416 | * @param modelPath path to the StarDist/TensorFlow model to use for prediction. 1417 | * @return 1418 | */ 1419 | public static Builder builder(String modelPath) { 1420 | var builder = maybeCreateBioimageIoBuilder(modelPath); 1421 | if (builder == null) 1422 | return new Builder(modelPath); 1423 | else { 1424 | return builder; 1425 | } 1426 | } 1427 | 1428 | 1429 | /** 1430 | * Maybe initialize the builder from a BioimageIO model spec... if we can 1431 | * @param path 1432 | * @return 1433 | */ 1434 | private static Builder maybeCreateBioimageIoBuilder(String path) { 1435 | var p = Paths.get(path); 1436 | if (!Files.exists(p)) 1437 | return null; 1438 | try { 1439 | if (isYamlFile(p) || (Files.isDirectory(p) && Files.list(p).anyMatch(StarDist2D::isYamlFile))) { 1440 | return StarDistBioimageIo.builder(p); 1441 | } 1442 | } catch (IOException e) { 1443 | logger.debug("Exception attempting to parse BioimageIOSpec: {}", e.getMessage(), e); 1444 | } catch (UnsatisfiedLinkError e) { 1445 | logger.debug("Unable to parse BioimageIOSpec: {}", e.getMessage(), e); 1446 | } 1447 | return null; 1448 | } 1449 | 1450 | private static boolean isYamlFile(Path path) { 1451 | if (Files.isRegularFile(path)) { 1452 | var name = path.getFileName().toString().toLowerCase(); 1453 | return name.endsWith(".yml") || name.endsWith(".yaml"); 1454 | } 1455 | return false; 1456 | } 1457 | 1458 | 1459 | 1460 | /** 1461 | * Create a builder to customize detection parameters, using a provided {@link DnnModel} for prediction. 1462 | * This provides a way to use an alternative machine learning library and model file, rather than the default 1463 | * (OpenCV or TensorFlow). 1464 | * @param dnn the model to use for prediction 1465 | * @return 1466 | */ 1467 | public static Builder builder(DnnModel dnn) { 1468 | return new Builder(dnn); 1469 | } 1470 | 1471 | 1472 | /** 1473 | * Build a normalization op that can be based upon the entire (2D) image, rather than only local tiles. 1474 | *

1475 | * Example: 1476 | *

1477 | 	 * 
1478 | 	 *   var builder = StarDist2D.builder()
1479 | 	 *   	.preprocess(
1480 | 	 *   		StarDist2D.imageNormalizationBuilder()
1481 | 	 *   			.percentiles(0, 99.8)
1482 | 	 *   			.perChannel(false)
1483 | 	 *   			.downsample(10)
1484 | 	 *   			.build()
1485 | 	 *   	).pixelSize(0.5) // Any other options to customize StarDist2D
1486 | 	 *   	.build()
1487 | 	 * 
1488 | 	 * 
1489 | *

1490 | * Note that currently this requires downsampling the image to a manageable size. 1491 | * 1492 | * @return 1493 | */ 1494 | public static ImageNormalizationBuilder imageNormalizationBuilder() { 1495 | return new ImageNormalizationBuilder(); 1496 | } 1497 | 1498 | 1499 | /** 1500 | * Create a potential nucleus. 1501 | * @param indexerProb probability values 1502 | * @param indexerRays ray values 1503 | * @param indexerClass classification probabilities (optional) 1504 | * @param downsample downsample for the region request, used to convert coordinates 1505 | * @param originX x-coordinate for the top left of the image, used to convert coordinates 1506 | * @param originY y-coordinate for the top left of the image, used to convert coordinates 1507 | * @param scaleX scaling to apply to x pixel index; normally 1.0, but may be 2.0 if passing downsampled output 1508 | * @param scaleY scaling to apply to y pixel index; normally 1.0, but may be 2.0 if passing downsampled output 1509 | * @param mask optional geometry mask, in the full image space 1510 | * @return list of potential nuclei, sorted in descending order of probability 1511 | */ 1512 | private List createNuclei(FloatIndexer indexerProb, FloatIndexer indexerRays, FloatIndexer indexerClass, double downsample, double originX, double originY, double scaleX, double scaleY, Geometry mask) { 1513 | long[] sizes = indexerProb.sizes(); 1514 | long[] inds = new long[3]; 1515 | int h = (int)sizes[0]; 1516 | int w = (int)sizes[1]; 1517 | 1518 | int nRays = (int)indexerRays.size(2); 1519 | double[][] rays = sinCosAngles(nRays); 1520 | double[] raySine = rays[0]; 1521 | double[] rayCosine = rays[1]; 1522 | 1523 | int nClasses = indexerClass == null ? 0 : (int)indexerClass.size(2); 1524 | 1525 | var nuclei = new ArrayList(); 1526 | 1527 | var locator = mask == null ? null : new SimplePointInAreaLocator(mask); 1528 | 1529 | var factory = GeometryTools.getDefaultFactory(); 1530 | var precisionModel = factory.getPrecisionModel(); 1531 | for (int y = 0; y < h; y++) { 1532 | inds[0] = y; 1533 | for (int x = 0; x < w; x++) { 1534 | inds[1] = x; 1535 | inds[2] = 0; 1536 | double prob = indexerProb.get(inds); 1537 | if (prob < threshold) 1538 | continue; 1539 | var coords = new ArrayList(); 1540 | Coordinate lastCoord = null; 1541 | for (int a = 0; a < nRays; a++) { 1542 | inds[2] = a; 1543 | double val = indexerRays.get(inds); 1544 | // We can get NaN 1545 | if (!Double.isFinite(val)) 1546 | continue; 1547 | // Python implementation imposes a minimum value 1548 | val = Math.max(1e-3, val); 1549 | // Create coordinate & add if it is distinct 1550 | double xx = precisionModel.makePrecise(originX + (x * scaleX + val * rayCosine[a]) * downsample); 1551 | double yy = precisionModel.makePrecise(originY + (y * scaleY + val * raySine[a]) * downsample); 1552 | var coord = new Coordinate(xx, yy); 1553 | if (!Objects.equals(coord, lastCoord)) 1554 | coords.add(coord); 1555 | } 1556 | // We need at least 3 for a reasonable nucleus 1557 | if (coords.size() < 3) 1558 | continue; 1559 | else if (!coords.get(0).equals(coords.get(coords.size()-1))) 1560 | coords.add(coords.get(0)); 1561 | try { 1562 | var polygon = factory.createPolygon(coords.toArray(Coordinate[]::new)); 1563 | if (locator == null || locator.locate(new Centroid(polygon).getCentroid()) != Location.EXTERIOR) { 1564 | var geom = simplify(polygon); 1565 | // Get classification, if available 1566 | int classification = -1; 1567 | if (indexerClass != null) { 1568 | double maxProb = Double.NEGATIVE_INFINITY; 1569 | for (int c = 0; c < nClasses; c++) { 1570 | inds[2] = c; 1571 | double probClass = indexerClass.get(inds); 1572 | if (probClass > maxProb) { 1573 | classification = c; 1574 | maxProb = probClass; 1575 | } 1576 | } 1577 | } 1578 | if (classification != 0 || keepClassifiedBackground) 1579 | nuclei.add(new PotentialNucleus(geom, prob, classification)); 1580 | } 1581 | } catch (Exception e) { 1582 | logger.warn("Error creating nucleus: {}", e.getMessage(), e); 1583 | } 1584 | } 1585 | } 1586 | return nuclei; 1587 | } 1588 | 1589 | 1590 | private static List filterNuclei(List potentialNuclei) { 1591 | 1592 | // Sort in descending order of probability 1593 | Collections.sort(potentialNuclei, Comparator.comparingDouble((PotentialNucleus n) -> n.getProbability()).reversed()); 1594 | 1595 | // Create array of nuclei to keep & to skip 1596 | var nuclei = new LinkedHashSet(); 1597 | var skippedNucleus = new HashSet(); 1598 | int skipErrorCount = 0; 1599 | 1600 | // Create a spatial cache to find overlaps more quickly 1601 | // (Because of later tests, we don't need to update envelopes even though geometries may be modified) 1602 | Map envelopes = new HashMap<>(); 1603 | var tree = new STRtree(); 1604 | for (var nuc : potentialNuclei) { 1605 | var env = nuc.geometry.getEnvelopeInternal(); 1606 | envelopes.put(nuc.geometry, env); 1607 | tree.insert(env, nuc); 1608 | } 1609 | 1610 | var preparingFactory = new PreparedGeometryFactory(); 1611 | 1612 | for (var nucleus : potentialNuclei) { 1613 | if (skippedNucleus.contains(nucleus)) 1614 | continue; 1615 | 1616 | nuclei.add(nucleus); 1617 | var envelope = envelopes.computeIfAbsent(nucleus.geometry, g -> g.getEnvelopeInternal()); 1618 | 1619 | @SuppressWarnings("unchecked") 1620 | var overlaps = (List)tree.query(envelope); 1621 | 1622 | // Remove the overlaps that we can be sure don't apply using quick tests, to avoid expensive ones 1623 | var iter = overlaps.iterator(); 1624 | while (iter.hasNext()) { 1625 | var nucleus2 = iter.next(); 1626 | if (nucleus2 == nucleus || skippedNucleus.contains(nucleus2) || nuclei.contains(nucleus2)) 1627 | iter.remove(); 1628 | else { 1629 | // Envelope text needed because nuclei can have been modified 1630 | var env = envelopes.computeIfAbsent(nucleus2.geometry, g -> g.getEnvelopeInternal()); 1631 | if (!envelope.intersects(env)) 1632 | iter.remove(); 1633 | } 1634 | } 1635 | 1636 | // If we need to compare a lot of intersections, preparing the geometry can speed things up 1637 | PreparedGeometry prepared = null; 1638 | if (overlaps.size() > 5) { 1639 | prepared = preparingFactory.create(nucleus.geometry); 1640 | } 1641 | for (var nucleus2 : overlaps) { 1642 | // If we have an overlap, retain the higher-probability nucleus only (i.e. the one we met first) 1643 | // Try to refine other nuclei 1644 | try { 1645 | boolean checkDifference = true; 1646 | if (prepared == null) { 1647 | // We could check for intersection, but it seems faster to just compute difference 1648 | // (this would warrant some more systematic checking though) 1649 | checkDifference = true;//nucleus.geometry.intersects(nucleus2.geometry); 1650 | } else 1651 | checkDifference = prepared.intersects(nucleus2.geometry); 1652 | if (checkDifference) { 1653 | // Retain the nucleus only if it is not fragmented, or less than half its original area 1654 | var difference = nucleus2.geometry.difference(nucleus.geometry); 1655 | 1656 | // Discard linestrings 1657 | if (difference instanceof GeometryCollection) 1658 | difference = GeometryTools.ensurePolygonal(difference); 1659 | 1660 | if (difference instanceof Polygon && difference.getArea() > nucleus2.fullArea / 2.0) 1661 | nucleus2.geometry = difference; 1662 | else { 1663 | skippedNucleus.add(nucleus2); 1664 | } 1665 | } 1666 | } catch (Exception e) { 1667 | logger.debug("Exception resolving nuclei: " + e.getMessage()); 1668 | logger.trace(e.getMessage(), e); 1669 | skippedNucleus.add(nucleus2); 1670 | skipErrorCount++; 1671 | } 1672 | 1673 | } 1674 | } 1675 | if (skipErrorCount > 0) { 1676 | // Reduce warning to debug - this happens often for 1 or 2 nuclei but isn't necessarily 1677 | // a serious problem that the user should be aware of 1678 | int skipCount = skippedNucleus.size(); 1679 | String s = skipErrorCount == 1 ? "1 nucleus" : skipErrorCount + " nuclei"; 1680 | logger.debug("Skipped {} due to error in resolving overlaps ({}% of all skipped)", 1681 | s, GeneralTools.formatNumber(skipErrorCount*100.0/skipCount, 1)); 1682 | } 1683 | return new ArrayList<>(nuclei); 1684 | } 1685 | 1686 | 1687 | private static double[][] sinCosAngles(int n) { 1688 | double[][] angles = new double[2][n]; 1689 | for (int i = 0; i < n; i++) { 1690 | double theta = 2 * Math.PI / n * i; 1691 | angles[0][i] = Math.sin(theta); 1692 | angles[1][i] = Math.cos(theta); 1693 | } 1694 | return angles; 1695 | } 1696 | 1697 | 1698 | private static class PotentialNucleus { 1699 | 1700 | private Geometry geometry; 1701 | private double fullArea; 1702 | private double probability; 1703 | private int classification; 1704 | 1705 | PotentialNucleus(Geometry geom, double prob, int classification) { 1706 | this.geometry = geom; 1707 | this.probability = prob; 1708 | this.classification = classification; 1709 | this.fullArea = geom.getArea(); 1710 | } 1711 | 1712 | double getProbability() { 1713 | return probability; 1714 | }; 1715 | 1716 | int getClassification() { 1717 | return classification; 1718 | } 1719 | 1720 | } 1721 | 1722 | 1723 | /** 1724 | * Close and cleanup resources. 1725 | *

1726 | * In practice, this means close any {@link DnnModel} stored if it is an instance of 1727 | * {@link Closeable} or {@link AutoCloseable}. 1728 | * This can be important to avoid memory leaks, particularly if using a GPU. 1729 | */ 1730 | @Override 1731 | public void close() throws Exception { 1732 | if (dnn instanceof Closeable) { 1733 | ((Closeable) dnn).close(); 1734 | } else if (dnn instanceof AutoCloseable) 1735 | ((AutoCloseable) dnn).close(); 1736 | } 1737 | 1738 | 1739 | } 1740 | -------------------------------------------------------------------------------- /src/main/java/qupath/ext/stardist/StarDistBioimageIo.java: -------------------------------------------------------------------------------- 1 | /*- 2 | * Copyright 2022 QuPath developers, University of Edinburgh 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | 18 | package qupath.ext.stardist; 19 | 20 | import java.io.File; 21 | import java.io.IOException; 22 | import java.nio.file.Files; 23 | import java.nio.file.Path; 24 | import java.nio.file.Paths; 25 | import java.util.ArrayList; 26 | import java.util.Collections; 27 | import java.util.List; 28 | import java.util.Map; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import qupath.bioimageio.spec.tensor.Shape; 33 | import qupath.bioimageio.spec.Model; 34 | import qupath.bioimageio.spec.tensor.axes.Axes; 35 | import qupath.bioimageio.spec.tensor.Processing.ScaleLinear; 36 | import qupath.bioimageio.spec.tensor.Processing.ScaleRange; 37 | import qupath.bioimageio.spec.tensor.Processing.ZeroMeanUnitVariance; 38 | import qupath.bioimageio.spec.Weights.WeightsEntry; 39 | 40 | import qupath.ext.stardist.OpCreators.TileOpCreator; 41 | import qupath.lib.common.GeneralTools; 42 | import qupath.lib.io.GsonTools; 43 | import qupath.opencv.ml.BioimageIoTools; 44 | import qupath.opencv.ops.ImageOp; 45 | 46 | /** 47 | * Helper class to create a StarDist2D builder, initializing it from a BioimageIO Model Zoo file. 48 | * 49 | * @author Pete Bankhead 50 | * @since v0.4.0 51 | */ 52 | class StarDistBioimageIo { 53 | 54 | private static final Logger logger = LoggerFactory.getLogger(StarDistBioimageIo.class); 55 | 56 | private static int DEFAULT_MAX_DIM = 4096; 57 | private static double DEFAULT_DOWNSAMPLE = Double.NaN; 58 | 59 | /** 60 | * Create a builder by parsing the model spec given by the file path. 61 | * The syntax is intended to be 'Groovy-friendly', using the map as first argument to request optional named parameters. 62 | * @param params optional named parameters for global normalization; supports "maxDim" and "downsample" 63 | * @param path 64 | * @return 65 | * @throws IOException 66 | */ 67 | static StarDist2D.Builder builder(Map params, String path) throws IOException { 68 | return builder(params, Paths.get(path)); 69 | } 70 | 71 | /** 72 | * Create a builder by parsing the model spec given by the file path. 73 | * @param path 74 | * @return 75 | * @throws IOException 76 | */ 77 | static StarDist2D.Builder builder(String path) throws IOException { 78 | return builder(Collections.emptyMap(), Paths.get(path)); 79 | } 80 | 81 | /** 82 | * Create a builder by parsing the model spec given by the file. 83 | * The syntax is intended to be 'Groovy-friendly', using the map as first argument to request optional named parameters. 84 | * @param params optional named parameters for global normalization; supports "maxDim" and "downsample" 85 | * @param file 86 | * @return 87 | * @throws IOException 88 | */ 89 | static StarDist2D.Builder builder(Map params, File file) throws IOException { 90 | return builder(params, file.toPath()); 91 | } 92 | 93 | /** 94 | * Create a builder by parsing the model spec given by the file. 95 | * @param file 96 | * @return 97 | * @throws IOException 98 | */ 99 | static StarDist2D.Builder builder(File file) throws IOException { 100 | return builder(Collections.emptyMap(), file); 101 | } 102 | 103 | /** 104 | * Create a builder by parsing the model spec given by the path. 105 | * @param path 106 | * @return 107 | * @throws IOException 108 | */ 109 | static StarDist2D.Builder builder(Path path) throws IOException { 110 | return builder(Collections.emptyMap(), path); 111 | } 112 | 113 | /** 114 | * Create a builder by parsing the model spec given by the path. 115 | * The syntax is intended to be 'Groovy-friendly', using the map as first argument to request optional named parameters. 116 | * @param params optional named parameters for global normalization; supports "maxDim" (int) and "downsample" (double). 117 | * @param path 118 | * @return 119 | * @throws IOException 120 | */ 121 | static StarDist2D.Builder builder(Map params, Path path) throws IOException { 122 | 123 | int maxDim = DEFAULT_MAX_DIM; 124 | double downsample = DEFAULT_DOWNSAMPLE; 125 | 126 | if (params != null && !params.isEmpty()) { 127 | Object val = params.getOrDefault("maxDim", null); 128 | if (val instanceof Number) 129 | maxDim = ((Number)val).intValue(); 130 | else if (val != null) 131 | logger.warn("Unsupported value for maxDim {} (must be an integer)", val); 132 | 133 | val = params.getOrDefault("downsample", null); 134 | if (val instanceof Number) 135 | downsample = ((Number)val).doubleValue(); 136 | else if (val != null) 137 | logger.warn("Unsupported value for downsample {} (must be an integer)", val); 138 | } 139 | 140 | logger.debug("Creating builder from {} with maxDim={}, downsample={}", path.getFileName(), maxDim, downsample); 141 | return builder(Model.parse(path), maxDim, downsample); 142 | } 143 | 144 | 145 | static StarDist2D.Builder builder(Model model, int globalMaxDim, double globalDownsample) { 146 | 147 | logger.info("Initializing builder from BioImage Model Zoo spec"); 148 | 149 | // Handle input 150 | var inputs = model.getInputs(); 151 | if (inputs.size() != 1) { 152 | throw new IllegalArgumentException("Expected 1 input, but found " + inputs.size()); 153 | } 154 | var input = inputs.get(0); 155 | var inputAxes = Axes.getAxesString(input.getAxes()).toLowerCase(); 156 | int indX = inputAxes.indexOf("x"); 157 | int indY = inputAxes.indexOf("y"); 158 | var shape = input.getShape(); 159 | int tileWidth = StarDist2D.defaultTileSize, tileHeight = StarDist2D.defaultTileSize; 160 | var inputShapeArray = shape.getTargetShape( 161 | Shape.createShapeArray(inputAxes, Map.of('x', tileWidth, 'y', tileHeight), 1)); 162 | tileWidth = inputShapeArray[inputAxes.indexOf('x')]; 163 | tileHeight = inputShapeArray[inputAxes.indexOf('y')]; 164 | 165 | // Handle preprocessing 166 | // Here, we make it global if we can 167 | TileOpCreator globalOpCreator = null; 168 | List preprocessing = new ArrayList<>(); 169 | boolean warnLogged = true; 170 | for (var preprocess : input.getPreprocessing()) { 171 | if (preprocessing.isEmpty()) { 172 | if (preprocess instanceof ScaleLinear) { 173 | var zeroMean = (ZeroMeanUnitVariance)preprocess; 174 | var axes = Axes.getAxesString(zeroMean.getAxes()); 175 | boolean perChannel = axes == null ? true : !axes.contains("c"); 176 | logger.info("Normalization by zero-mean-unit-variance (perChannel={}, maxDim={}, downsample={})", perChannel, globalMaxDim, globalDownsample); 177 | globalOpCreator = OpCreators.imageNormalizationBuilder() 178 | .zeroMeanUnitVariance(true) 179 | .perChannel(perChannel) 180 | .maxDimension(globalMaxDim) 181 | .downsample(globalDownsample) 182 | .build(); 183 | continue; 184 | } else if (preprocess instanceof ScaleRange) { 185 | var scaleRange = (ScaleRange)preprocess; 186 | var axes = Axes.getAxesString(scaleRange.getAxes()); 187 | boolean perChannel = axes == null ? true : !axes.contains("c"); 188 | logger.info("Normalization by percentile (min={}, max={}, perChannel={}; maxDim={}, downsample={})", 189 | scaleRange.getMinPercentile(), scaleRange.getMaxPercentile(), 190 | perChannel, 191 | globalMaxDim, globalDownsample); 192 | globalOpCreator = OpCreators.imageNormalizationBuilder() 193 | .percentiles(scaleRange.getMinPercentile(), 194 | scaleRange.getMaxPercentile()) 195 | .eps(scaleRange.getEps()) 196 | .maxDimension(globalMaxDim) 197 | .downsample(globalDownsample) 198 | .perChannel(perChannel) 199 | .build(); 200 | continue; 201 | } else { 202 | var op = BioimageIoTools.transformToOp(preprocess); 203 | if (op != null) { 204 | if (!warnLogged) { 205 | logger.warn("Adding local preprocessing operation {}", preprocess); 206 | } 207 | preprocessing.add(op); 208 | } else { 209 | logger.warn("Unsupported preprocessing {}", preprocess); 210 | } 211 | } 212 | } 213 | } 214 | 215 | 216 | // Get halo from output 217 | for (var output : model.getOutputs()) { 218 | if (!output.getPostprocessing().isEmpty()) { 219 | logger.warn("Custom postprocessing not supported for StarDist2D - will be ignored"); 220 | break; 221 | } 222 | } 223 | var output = model.getOutputs().get(0); 224 | var axes = Axes.getAxesString(output.getAxes()).toLowerCase(); 225 | indX = axes.indexOf("x"); 226 | indY = axes.indexOf("y"); 227 | int[] halo = output.getHalo(); 228 | int padding = -1; 229 | if (halo.length > 0) { 230 | int padX = halo[indX]; 231 | int padY = halo[indY]; 232 | if (padX == padY) { 233 | padding = padX; 234 | } else { 235 | logger.warn("Halo should be the same in x and y, cannot use {} and {}", padX, padY); 236 | padding = 0; 237 | } 238 | } 239 | 240 | // Create the builder 241 | var weights = model.getWeights(WeightsEntry.TENSORFLOW_SAVED_MODEL_BUNDLE); 242 | 243 | // Try to build a model path - taking any unzipped version if we can 244 | var modelUri = model.getBaseURI().resolve(weights.getSource()); 245 | var modelPath = GeneralTools.toPath(modelUri); 246 | if (modelPath != null) { 247 | String absolutePath = modelPath.toAbsolutePath().toString(); 248 | if (absolutePath.toLowerCase().endsWith(".zip")) { 249 | var pathUnzipped = Paths.get(absolutePath.substring(0, absolutePath.length()-4)); 250 | if (Files.isDirectory(pathUnzipped)) { 251 | logger.debug("Replacing {} with unzipped version {}", modelPath.getFileName(), pathUnzipped.getFileName()); 252 | modelPath = pathUnzipped; 253 | } else { 254 | logger.warn("Zipped model directories not supported! Please unzip {} and try again", modelPath); 255 | } 256 | } 257 | } 258 | var builder = StarDist2D.builder(modelPath.toString()); 259 | 260 | // Try to parse custom config - this mostly provides thresholds 261 | var config = model.getConfig().getOrDefault("stardist", null); 262 | if (config == null) { 263 | logger.warn("No StarDist-specific configuration found in the model"); 264 | } else if (config instanceof Map) { 265 | var map = (Map)config; 266 | var version = map.get("stardist_version"); 267 | if (version instanceof String) 268 | logger.debug("StarDist version: {}", version); 269 | var thresholds = map.getOrDefault("thresholds", null); 270 | if (thresholds instanceof Map) { 271 | var thresholdsMap = (Map)thresholds; 272 | var nms = thresholdsMap.getOrDefault("nms", null); 273 | if (nms != null) 274 | logger.warn("NMS threshold {} will be ignored (custom NMS threshold not supported)", nms); 275 | var prob = thresholdsMap.getOrDefault("prob", null); 276 | if (prob instanceof Number) { 277 | logger.info("Setting probability threshold to {}", prob); 278 | builder.threshold(((Number)prob).doubleValue()); 279 | } 280 | } 281 | } else { 282 | logger.warn("StarDist config is not a map - cannot parse {}", config); 283 | } 284 | 285 | // Handle normalization 286 | if (globalOpCreator != null) 287 | builder.preprocess(globalOpCreator); 288 | if (!preprocessing.isEmpty()) 289 | builder.preprocess(preprocessing.toArray(ImageOp[]::new)); 290 | 291 | // Set padding from halo 292 | if (padding >= 0) 293 | builder.padding(padding); 294 | 295 | if (tileWidth > 0 && tileHeight > 0) 296 | builder.tileSize(tileWidth, tileHeight); 297 | 298 | if (axes != null) 299 | builder.layout(axes); 300 | 301 | return builder; 302 | } 303 | 304 | 305 | /** 306 | * Main method to test models can be parsed. 307 | * @param args 308 | */ 309 | public static void main(String[] args) { 310 | 311 | if (args.length == 0) { 312 | System.err.println("No model paths found!"); 313 | return; 314 | } 315 | 316 | for (var arg : args) { 317 | try { 318 | var builder = builder(null, arg); 319 | 320 | System.out.println( 321 | GsonTools.getInstance(true).toJson(builder) 322 | ); 323 | 324 | } catch (IOException e) { 325 | e.printStackTrace(); 326 | } 327 | } 328 | 329 | } 330 | 331 | 332 | } 333 | -------------------------------------------------------------------------------- /src/main/java/qupath/ext/stardist/StarDistExtension.java: -------------------------------------------------------------------------------- 1 | /*- 2 | * Copyright 2020-2021 QuPath developers, University of Edinburgh 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package qupath.ext.stardist; 18 | 19 | import java.io.IOException; 20 | import java.nio.charset.StandardCharsets; 21 | import java.util.Map; 22 | 23 | import org.controlsfx.control.action.Action; 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import qupath.lib.common.Version; 28 | import qupath.lib.gui.QuPathGUI; 29 | import qupath.lib.gui.extensions.GitHubProject; 30 | import qupath.lib.gui.extensions.QuPathExtension; 31 | import qupath.lib.gui.tools.MenuTools; 32 | 33 | /** 34 | * Install StarDist as an extension. 35 | *

36 | * Currently this doesn't really do anything much, beyond including a reference 37 | * in the listed extensions of QuPath and enabling some compatibility/update checks. 38 | * StarDist itself is only accessible via scripting. 39 | * In the future, the extension may also add a UI. 40 | * 41 | * @author Pete Bankhead 42 | */ 43 | public class StarDistExtension implements QuPathExtension, GitHubProject { 44 | 45 | private static final Logger logger = LoggerFactory.getLogger(StarDistExtension.class); 46 | 47 | private boolean isInstalled = false; 48 | 49 | private static final Map SCRIPTS = Map.of( 50 | "StarDist H&E nucleus detection script", "scripts/StarDistHE.groovy", 51 | "StarDist brightfield cell detection script", "scripts/StarDistDeconvolved.groovy", 52 | "StarDist fluorescence cell detection script", "scripts/StarDistFluorescence.groovy", 53 | "StarDist full cell detection script", "scripts/StarDistTemplate.groovy" 54 | ); 55 | 56 | @Override 57 | public void installExtension(QuPathGUI qupath) { 58 | if (isInstalled) 59 | return; 60 | 61 | for (var entry : SCRIPTS.entrySet()) { 62 | var name = entry.getValue(); 63 | var command = entry.getKey(); 64 | try (var stream = StarDist2D.class.getClassLoader().getResourceAsStream(name)) { 65 | if (stream != null) { 66 | var script = new String(stream.readAllBytes(), StandardCharsets.UTF_8); 67 | MenuTools.addMenuItems( 68 | qupath.getMenu("Extensions>StarDist", true), 69 | new Action(command, e -> openScript(qupath, script))); 70 | } else { 71 | logger.warn("Resource not found: {}", name); 72 | } 73 | } catch (IOException e) { 74 | logger.error(e.getLocalizedMessage(), e); 75 | } 76 | } 77 | } 78 | 79 | @Override 80 | public String getName() { 81 | return "StarDist extension"; 82 | } 83 | 84 | @Override 85 | public String getDescription() { 86 | return "Run StarDist nucleus detection via scripting.\n" 87 | + "See the extension repository for citation information."; 88 | } 89 | 90 | @Override 91 | public Version getQuPathVersion() { 92 | return Version.parse("0.5.0"); 93 | } 94 | 95 | @Override 96 | public GitHubRepo getRepository() { 97 | return GitHubRepo.create(getName(), "qupath", "qupath-extension-stardist"); 98 | } 99 | 100 | 101 | private static void openScript(QuPathGUI qupath, String script) { 102 | var editor = qupath.getScriptEditor(); 103 | if (editor == null) { 104 | logger.error("No script editor is available!"); 105 | return; 106 | } 107 | qupath.getScriptEditor().showScript("StarDist detection", script); 108 | } 109 | 110 | 111 | } 112 | -------------------------------------------------------------------------------- /src/main/resources/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/qupath/qupath-extension-stardist/02421060e699574ab83abc502cc60e18de0cac24/src/main/resources/.gitkeep -------------------------------------------------------------------------------- /src/main/resources/META-INF/services/qupath.lib.gui.extensions.QuPathExtension: -------------------------------------------------------------------------------- 1 | qupath.ext.stardist.StarDistExtension -------------------------------------------------------------------------------- /src/main/resources/scripts/StarDistDeconvolved.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * This script provides a general template for cell detection using StarDist in QuPath. 3 | * 4 | * This example assumes you have brightfield image, but want to apply preprocessing 5 | * to separate the stains before running a model trained for fluorescence images. 6 | * One reason to do this is to handle some arbitrary IHC staining if you don't have 7 | * a model trained specifically for your kind of images. 8 | * 9 | * If you use this in published work, please remember to cite *both*: 10 | * - the original StarDist paper (https://doi.org/10.48550/arXiv.1806.03535) 11 | * - the original QuPath paper (https://doi.org/10.1038/s41598-017-17204-5) 12 | * 13 | * There are lots of options to customize the detection - this script shows some 14 | * of the main ones. Check out other scripts and the QuPath docs for more info. 15 | */ 16 | 17 | import qupath.ext.stardist.StarDist2D 18 | import qupath.lib.scripting.QP 19 | import qupath.opencv.ops.ImageOps 20 | 21 | // IMPORTANT! Replace this with the path to your StarDist model 22 | // that takes a single channel as input (e.g. dsb2018_heavy_augment.pb) 23 | // You can find some at https://github.com/qupath/models 24 | // (Check credit & reuse info before downloading) 25 | def modelPath = "/path/to/model.pb" 26 | 27 | // Get current image - assumed to have color deconvolution stains set 28 | var imageData = QP.getCurrentImageData() 29 | var stains = imageData.getColorDeconvolutionStains() 30 | 31 | // Customize how the StarDist detection should be applied 32 | // Here some reasonable default options are specified 33 | def stardist = StarDist2D 34 | .builder(modelPath) 35 | .preprocess( // Extra preprocessing steps, applied sequentially (per-tile) 36 | ImageOps.Channels.deconvolve(stains), // Color deconvolution 37 | ImageOps.Channels.extract(0), // Extract the first stain (indexing starts at 0) 38 | ImageOps.Filters.median(2) // Apply a small median filter (optional!) 39 | ) 40 | .normalizePercentiles(1, 99) // Percentile normalization 41 | .threshold(0.5) // Probability (detection) threshold 42 | .pixelSize(0.5) // Resolution for detection 43 | .cellExpansion(5) // Expand nuclei to approximate cell boundaries 44 | .measureShape() // Add shape measurements 45 | .measureIntensity() // Add cell measurements (in all compartments) 46 | .build() 47 | 48 | // Define which objects will be used as the 'parents' for detection 49 | // Use QP.getAnnotationObjects() if you want to use all annotations, rather than selected objects 50 | def pathObjects = QP.getSelectedObjects() 51 | 52 | // Run detection for the selected objects 53 | if (pathObjects.isEmpty()) { 54 | QP.getLogger().error("No parent objects are selected!") 55 | return 56 | } 57 | stardist.detectObjects(imageData, pathObjects) 58 | stardist.close() // This can help clean up & regain memory 59 | println('Done!') -------------------------------------------------------------------------------- /src/main/resources/scripts/StarDistFluorescence.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * This script provides a general template for cell detection using StarDist in QuPath. 3 | * This example assumes you have fluorescence image, which has a channel called 'DAPI' 4 | * showing nuclei. 5 | * 6 | * If you use this in published work, please remember to cite *both*: 7 | * - the original StarDist paper (https://doi.org/10.48550/arXiv.1806.03535) 8 | * - the original QuPath paper (https://doi.org/10.1038/s41598-017-17204-5) 9 | * 10 | * There are lots of options to customize the detection - this script shows some 11 | * of the main ones. Check out other scripts and the QuPath docs for more info. 12 | */ 13 | 14 | import qupath.ext.stardist.StarDist2D 15 | import qupath.lib.scripting.QP 16 | 17 | // IMPORTANT! Replace this with the path to your StarDist model 18 | // that takes a single channel as input (e.g. dsb2018_heavy_augment.pb) 19 | // You can find some at https://github.com/qupath/models 20 | // (Check credit & reuse info before downloading) 21 | def modelPath = "/path/to/model.pb" 22 | 23 | // Customize how the StarDist detection should be applied 24 | // Here some reasonable default options are specified 25 | def stardist = StarDist2D 26 | .builder(modelPath) 27 | .channels('DAPI') // Extract channel called 'DAPI' 28 | .normalizePercentiles(1, 99) // Percentile normalization 29 | .threshold(0.5) // Probability (detection) threshold 30 | .pixelSize(0.5) // Resolution for detection 31 | .cellExpansion(5) // Expand nuclei to approximate cell boundaries 32 | .measureShape() // Add shape measurements 33 | .measureIntensity() // Add cell measurements (in all compartments) 34 | .build() 35 | 36 | // Define which objects will be used as the 'parents' for detection 37 | // Use QP.getAnnotationObjects() if you want to use all annotations, rather than selected objects 38 | def pathObjects = QP.getSelectedObjects() 39 | 40 | // Run detection for the selected objects 41 | def imageData = QP.getCurrentImageData() 42 | if (pathObjects.isEmpty()) { 43 | QP.getLogger().error("No parent objects are selected!") 44 | return 45 | } 46 | stardist.detectObjects(imageData, pathObjects) 47 | stardist.close() // This can help clean up & regain memory 48 | println('Done!') -------------------------------------------------------------------------------- /src/main/resources/scripts/StarDistHE.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * This script provides a general template for nucleus detection using StarDist in QuPath. 3 | * This example assumes you have an RGB color image, e.g. a brightfield H&E slide. 4 | * 5 | * If you use this in published work, please remember to cite *both*: 6 | * - the original StarDist paper (https://doi.org/10.48550/arXiv.1806.03535) 7 | * - the original QuPath paper (https://doi.org/10.1038/s41598-017-17204-5) 8 | * 9 | * There are lots of options to customize the detection - this script shows some 10 | * of the main ones. Check out other scripts and the QuPath docs for more info. 11 | */ 12 | 13 | import qupath.ext.stardist.StarDist2D 14 | import qupath.lib.scripting.QP 15 | 16 | // IMPORTANT! Replace this with the path to your StarDist model 17 | // that takes 3 channel RGB as input (e.g. he_heavy_augment.pb) 18 | // You can find some at https://github.com/qupath/models 19 | // (Check credit & reuse info before downloading) 20 | def modelPath = "/path/to/model.pb" 21 | 22 | // Customize how the StarDist detection should be applied 23 | // Here some reasonable default options are specified 24 | def stardist = StarDist2D 25 | .builder(modelPath) 26 | .normalizePercentiles(1, 99) // Percentile normalization 27 | .threshold(0.5) // Probability (detection) threshold 28 | .pixelSize(0.5) // Resolution for detection 29 | .measureShape() // Add shape measurements 30 | .measureIntensity() // Add nucleus measurements 31 | .build() 32 | 33 | // Define which objects will be used as the 'parents' for detection 34 | // Use QP.getAnnotationObjects() if you want to use all annotations, rather than selected objects 35 | def pathObjects = QP.getSelectedObjects() 36 | 37 | // Run detection for the selected objects 38 | def imageData = QP.getCurrentImageData() 39 | if (pathObjects.isEmpty()) { 40 | QP.getLogger().error("No parent objects are selected!") 41 | return 42 | } 43 | stardist.detectObjects(imageData, pathObjects) 44 | stardist.close() // This can help clean up & regain memory 45 | println('Done!') -------------------------------------------------------------------------------- /src/main/resources/scripts/StarDistTemplate.groovy: -------------------------------------------------------------------------------- 1 | /** 2 | * This script provides a general template for cell detection using StarDist in QuPath. 3 | * 4 | * If you use this in published work, please remember to cite *both*: 5 | * - the original StarDist paper (https://doi.org/10.48550/arXiv.1806.03535) 6 | * - the original QuPath paper (https://doi.org/10.1038/s41598-017-17204-5) 7 | * 8 | * There are lots of options to customize the detection - and this script shows most of them. 9 | * 10 | * Please read what each option means, then remove the ones you don't want - 11 | * and adjust the ones that you care about. 12 | */ 13 | 14 | import qupath.ext.stardist.StarDist2D 15 | import qupath.lib.scripting.QP 16 | 17 | // IMPORTANT! Replace this with the path to your StarDist model 18 | // You can find some at https://github.com/qupath/models 19 | def modelPath = "/path/to/model.pb" 20 | 21 | // Customize how the StarDist detection should be applied. 22 | // This uses a 'builder' to make it easier to add lots of options. 23 | // IMPORTANT! You probably don't need all these - 24 | // read the descriptions & remove the lines you don't want 25 | def stardist = StarDist2D 26 | .builder(modelPath) 27 | .preprocessGlobal( // Apply normalization, calculating values across the whole image 28 | StarDist2D.imageNormalizationBuilder() 29 | .maxDimension(4096) // Figure out how much to downsample large images to make sure the width & height are <= this value 30 | // .downsample(1) // Optional alternative to maxDimension to use the full-resolution image for normalization 31 | // (this is a good idea for small images, but a very bad idea for large images) 32 | .percentiles(0, 99.8) // Calculate image percentiles to use for normalization 33 | .build() 34 | ) 35 | // .channels('DAPI') // Select detection channel (usually useful for fluorescence, not needed for RGB); 36 | // the channel can be selected by name or index/number (where 0 is the first channel) 37 | .threshold(0.5) // Probability (detection) threshold 38 | .pixelSize(0.5) // Resolution for detection 39 | .tileSize(1024) // Specify width & height of the tile used for prediction 40 | .cellExpansion(5.0) // Approximate cells based upon nucleus expansion 41 | .cellConstrainScale(1.5) // Constrain cell expansion using nucleus size 42 | .ignoreCellOverlaps(false) // Set to true if you don't care if cells expand into one another 43 | .measureShape() // Add shape measurements 44 | .measureIntensity() // Add cell measurements (in all compartments) 45 | .includeProbability(true) // Add probability as a measurement (enables later filtering) 46 | .nThreads(4) // Limit the number of threads used for (possibly parallel) processing 47 | .simplify(1) // Control how polygons are 'simplified' to remove unnecessary vertices 48 | .doLog() // Use this to log a bit more information while running the script 49 | .createAnnotations() // Generate annotation objects using StarDist, rather than detection objects 50 | .constrainToParent(false) // Prevent nuclei/cells expanding beyond any parent annotations (default is true) 51 | .classify("Tumor") // Automatically classify all created objects as 'Tumor' 52 | .build() 53 | 54 | // Define which objects will be used as the 'parents' for detection 55 | // Use QP.getAnnotationObjects() if you want to use all annotations, rather than selected objects 56 | def pathObjects = QP.getSelectedObjects() 57 | 58 | // Run detection for the selected objects 59 | def imageData = QP.getCurrentImageData() 60 | if (pathObjects.isEmpty()) { 61 | QP.getLogger().error("No parent objects are selected!") 62 | return 63 | } 64 | stardist.detectObjects(imageData, pathObjects) 65 | stardist.close() // This can help clean up & regain memory 66 | println('Done!') -------------------------------------------------------------------------------- /src/test/java/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/qupath/qupath-extension-stardist/02421060e699574ab83abc502cc60e18de0cac24/src/test/java/.gitkeep --------------------------------------------------------------------------------