├── .github ├── dependabot.yml └── workflows │ ├── build-docker-images.yml │ ├── build-windows-executable-app-with-pyinstaller.yaml │ ├── build-windows-executable-app.yaml │ ├── ci.yml │ ├── pylint.yml │ ├── test-win-exe-w-embed-py.yaml │ ├── test-win-exe-w-pyinstaller.yaml │ └── workflow-tests.yml ├── .gitignore ├── .streamlit ├── config.toml └── credentials.toml ├── Dockerfile ├── Dockerfile_simple ├── LICENSE ├── README.md ├── app.py ├── assets ├── OpenMS.png ├── openms.ico ├── openms_license.rtf ├── openms_transparent_bg_logo.svg └── pyopenms_transparent_background.png ├── clean-up-workspaces.py ├── content ├── documentation.py ├── download_section.py ├── file_upload.py ├── quickstart.py ├── raw_data_viewer.py ├── run_example_workflow.py ├── run_subprocess.py ├── simple_workflow.py ├── topp_workflow_execution.py ├── topp_workflow_file_upload.py ├── topp_workflow_parameter.py └── topp_workflow_results.py ├── default-parameters.json ├── docker-compose.yml ├── docs ├── build_app.md ├── deployment.md ├── installation.md ├── toppframework.py ├── user_guide.md ├── win_exe_with_embed_py.md └── win_exe_with_pyinstaller.md ├── example-data └── mzML │ ├── Blank.mzML │ ├── Control.mzML │ ├── Pool.mzML │ └── Treatment.mzML ├── gdpr_consent ├── README.md ├── dist │ └── bundle.js ├── index.html ├── package-lock.json ├── package.json ├── src │ └── main.ts ├── tsconfig.json └── webpack.config.js ├── hooks ├── hook-analytics.py └── hook-streamlit.py ├── requirements.txt ├── run_app.py ├── run_app_temp.spec ├── settings.json ├── src ├── Workflow.py ├── common │ ├── captcha_.py │ └── common.py ├── fileupload.py ├── mzmlfileworkflow.py ├── python-tools │ ├── example.py │ └── export_consensus_feature_df.py ├── run_subprocess.py ├── simpleworkflow.py ├── view.py └── workflow │ ├── .gitignore │ ├── CommandExecutor.py │ ├── FileManager.py │ ├── Logger.py │ ├── ParameterManager.py │ ├── StreamlitUI.py │ ├── WorkflowManager.py │ └── __init__.py ├── test.py ├── test_gui.py └── tests ├── test_run_subprocess.py ├── test_simple_workflow.py └── test_topp_workflow_parameter.py /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" # Location of your pyproject.toml or requirements.txt 5 | schedule: 6 | interval: "weekly" # Checks for updates every week 7 | commit-message: 8 | prefix: "deps" # Prefix for pull request titles 9 | open-pull-requests-limit: 5 # Limit the number of open PRs at a time 10 | -------------------------------------------------------------------------------- /.github/workflows/build-docker-images.yml: -------------------------------------------------------------------------------- 1 | name: Docker Image CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | 11 | build-full-app: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v3 17 | - name: Build the full Docker image 18 | run: docker build . --file Dockerfile --tag streamlitapp:latest 19 | 20 | build-simple-app: 21 | 22 | runs-on: ubuntu-latest 23 | 24 | steps: 25 | - uses: actions/checkout@v3 26 | - name: Build the Docker image (pyOpenMS only) 27 | run: docker build . --file Dockerfile_simple --tag streamlitapp-simple:latest -------------------------------------------------------------------------------- /.github/workflows/build-windows-executable-app-with-pyinstaller.yaml: -------------------------------------------------------------------------------- 1 | name: Build executable for Windows with pyinstaller 2 | on: 3 | workflow_dispatch: 4 | env: 5 | OPENMS_VERSION: 3.2.0 6 | # Define needed TOPP tools here 7 | TOPP_TOOLS: "FeatureFinderMetabo MetaboliteAdductDecharger SiriusExport" 8 | 9 | jobs: 10 | build-openms: 11 | runs-on: windows-latest 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v3 16 | with: 17 | repository: OpenMS/OpenMS 18 | ref: release/${{ env.OPENMS_VERSION }} 19 | path: 'OpenMS' 20 | 21 | # Temporary fix - until seqan is back online or new OpenMS release (3.4) 22 | - name: Get latest cibuild.cmake 23 | working-directory: OpenMS 24 | run: | 25 | git config user.name "GitHub Actions" 26 | git config user.email "actions@github.com" 27 | git fetch origin develop 28 | git checkout origin/develop -- tools/ci/cibuild.cmake 29 | git checkout origin/develop -- tools/ci/citest.cmake 30 | git checkout origin/develop -- tools/ci/cipackage.cmake 31 | 32 | - name: Install Qt 33 | uses: jurplel/install-qt-action@v3 34 | with: 35 | version: '5.15.2' # 5.12.7 is broken https://bugreports.qt.io/browse/QTBUG-81715, > 5.15.2 is not available on official archives (https://github.com/miurahr/aqtinstall/issues/636) 36 | host: 'windows' # default: win64_msvc2017_64 37 | target: 'desktop' 38 | install-deps: 'true' 39 | cache: 'false' 40 | aqtversion: '==3.1.*' 41 | 42 | # https://github.com/marketplace/actions/visual-studio-shell 43 | - name: Set up Visual Studio shell 44 | uses: egor-tensin/vs-shell@v2 45 | with: 46 | arch: x64 47 | 48 | - name: Setup build tools 49 | shell: bash 50 | run: | 51 | choco install ccache ninja cmake 52 | ## GH CLI "SHOULD BE" installed. Sometimes I had to manually install nonetheless. Super weird. 53 | # https://github.com/actions/runner-images/blob/main/images/win/scripts/Installers/Install-GitHub-CLI.ps1 54 | echo "C:/Program Files (x86)/GitHub CLI" >> $GITHUB_PATH 55 | 56 | - name: Extract branch/PR infos 57 | shell: bash 58 | run: | 59 | cd OpenMS 60 | RUN_NAME_LOCAL=$(echo ${GITHUB_REF#refs/heads/} | tr / -) 61 | echo "RUN_NAME=${RUN_NAME_LOCAL}" >> $GITHUB_ENV 62 | echo "BASE_REF=$(gh pr view --json baseRefName -q .baseRefName || echo ${RUN_NAME_LOCAL})" >> $GITHUB_ENV 63 | id: extract_branch 64 | env: 65 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 66 | 67 | - name: Cache contrib 68 | id: cache-contrib-win 69 | uses: actions/cache@v3 70 | with: 71 | path: ${{ github.workspace }}/OpenMS/contrib 72 | key: ${{ runner.os }}-contrib3 73 | 74 | - name: Load contrib build 75 | if: steps.cache-contrib-win.outputs.cache-hit != 'true' 76 | env: 77 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 78 | run: | 79 | cd OpenMS/contrib 80 | # Download the file using the URL fetched from GitHub 81 | gh release download -R OpenMS/contrib --pattern 'contrib_build-Windows.tar.gz' 82 | # Extract the archive 83 | 7z x -so contrib_build-Windows.tar.gz | 7z x -si -ttar 84 | rm contrib_build-Windows.tar.gz 85 | ls 86 | 87 | - name: Setup ccache cache 88 | uses: actions/cache@v3 89 | with: 90 | path: .ccache 91 | key: ${{ runner.os }}-ccache-${{ env.RUN_NAME }}-${{ github.run_number }} 92 | # Restoring: From current branch, otherwise from base branch, otherwise from any branch. 93 | restore-keys: | 94 | ${{ runner.os }}-ccache-${{ env.RUN_NAME }} 95 | ${{ runner.os }}-ccache-${{ env.BASE_REF }} 96 | ${{ runner.os }}-ccache- 97 | 98 | - name: Add THIRDPARTY 99 | shell: bash 100 | run: | 101 | # initialize THIRDPARTY 102 | cd OpenMS 103 | git submodule update --init THIRDPARTY 104 | cd .. 105 | # add third-party binaries to PATH 106 | # use flat THIRDPARTY structure 107 | mkdir -p _thirdparty 108 | cp -R OpenMS/THIRDPARTY/Windows/64bit/* _thirdparty/ 109 | cp -R OpenMS/THIRDPARTY/All/* _thirdparty/ 110 | # add third-party binaries to PATH 111 | for thirdpartytool in ${{ github.workspace }}/_thirdparty/* 112 | do 113 | echo $thirdpartytool >> $GITHUB_PATH 114 | done 115 | 116 | - name: Build Windows 117 | shell: bash 118 | run: | 119 | mkdir $GITHUB_WORKSPACE/OpenMS/bld/ 120 | ctest --output-on-failure -V -S $GITHUB_WORKSPACE/OpenMS/tools/ci/cibuild.cmake 121 | env: 122 | #OS_PREFIX_PATH: "${{ env.Qt5_DIR }}/lib/cmake;${{ env.Qt5_DIR }}" 123 | CONTRIB_BUILD_DIRECTORY: "${{ github.workspace }}/OpenMS/contrib" 124 | CI_PROVIDER: "GitHub-Actions" 125 | CMAKE_GENERATOR: "Ninja" 126 | SOURCE_DIRECTORY: "${{ github.workspace }}/OpenMS" 127 | BUILD_NAME: "${{ env.RUN_NAME }}-Win64-class-topp-${{ github.run_number }}" 128 | ENABLE_STYLE_TESTING: "OFF" 129 | ENABLE_TOPP_TESTING: "ON" 130 | ENABLE_CLASS_TESTING: "ON" 131 | WITH_GUI: "ON" 132 | ADDRESS_SANITIZER: "Off" 133 | BUILD_TYPE: "Release" 134 | OPENMP: "Off" 135 | USE_STATIC_BOOST: "On" 136 | # BUILD_FLAGS: "-p:CL_MPCount=2" # For VS Generator and MSBuild 137 | BUILD_FLAGS: "-j2" # Ninja will otherwise use all cores (doesn't go well in GHA) 138 | CMAKE_CCACHE_EXE: "ccache" 139 | CCACHE_BASEDIR: ${{ github.workspace }} 140 | CCACHE_DIR: ${{ github.workspace }}/.ccache 141 | CCACHE_COMPRESS: true 142 | CCACHE_COMPRESSLEVEL: 12 143 | CCACHE_MAXSIZE: 400M 144 | 145 | - name: Package 146 | shell: bash 147 | run: | 148 | ctest --output-on-failure -V -S $GITHUB_WORKSPACE/OpenMS/tools/ci/cipackage.cmake 149 | env: 150 | SOURCE_DIRECTORY: "${{ github.workspace }}/OpenMS" 151 | PACKAGE_TYPE: zip 152 | SEARCH_ENGINES_DIRECTORY: "${{ github.workspace }}/_thirdparty" 153 | CI_PROVIDER: "GitHub-Actions" 154 | CPACK_PACKAGE_FILE_NAME: "openms-package" 155 | 156 | - name: Test Windows 157 | shell: bash 158 | run: $LAUNCHER ctest --output-on-failure -V -S $GITHUB_WORKSPACE/OpenMS/tools/ci/citest.cmake 159 | env: 160 | LAUNCHER: "" 161 | SOURCE_DIRECTORY: "${{ github.workspace }}/OpenMS" 162 | CI_PROVIDER: "GitHub-Actions" 163 | BUILD_NAME: "${{ env.RUN_NAME }}-Win64-class-topp-${{ github.run_number }}" 164 | 165 | - name: Upload package as artifact 166 | uses: actions/upload-artifact@v4 167 | with: 168 | name: openms-package 169 | path: ${{ github.workspace }}/OpenMS/bld/*.zip 170 | 171 | build-executable: 172 | runs-on: windows-latest 173 | needs: build-openms 174 | 175 | env: 176 | PYTHON_VERSION: 3.11.0 177 | 178 | steps: 179 | - name: Checkout 180 | uses: actions/checkout@v3 181 | 182 | - name: Download package as artifact 183 | uses: actions/download-artifact@v4 184 | with: 185 | name: openms-package 186 | path: openms-package 187 | 188 | - name: Extract bin and share from package 189 | run: | 190 | cd openms-package 191 | unzip "*.zip" -d . 192 | cp -r openms-package/bin ../openms-bin 193 | cp -r openms-package/share ../share 194 | 195 | - name: Set up Python 196 | uses: actions/setup-python@v4 197 | with: 198 | python-version: ${{ env.PYTHON_VERSION }} 199 | 200 | - name: Setup virtual environment 201 | shell: cmd 202 | run: | 203 | python -m venv myenv 204 | 205 | call myenv\Scripts\activate.bat 206 | 207 | pip install -r requirements.txt 208 | 209 | pip install pyinstaller 210 | 211 | - name: Write function to cli.py 212 | shell: bash 213 | run: | 214 | head -n -2 D:/a/streamlit-template/streamlit-template/myenv/Lib/site-packages/streamlit/web/cli.py > temp_cli.py 215 | 216 | cat << EOF >> temp_cli.py 217 | def _main_run_clExplicit(file, command_line, args=[], flag_options=[]): 218 | main._is_running_with_streamlit = True 219 | bootstrap.run(file, command_line, args, flag_options) 220 | 221 | 222 | if __name__ == "__main__": 223 | main() 224 | EOF 225 | 226 | mv temp_cli.py D:/a/streamlit-template/streamlit-template/myenv/Lib/site-packages/streamlit/web/cli.py 227 | 228 | - name: Compile app with pyinstaller 229 | shell: cmd 230 | run: | 231 | call myenv\Scripts\activate.bat 232 | pyinstaller --onefile --additional-hooks-dir ./hooks run_app.py --clean 233 | 234 | - name: Set to offline deployment 235 | run: | 236 | $content = Get-Content -Raw settings.json | ConvertFrom-Json 237 | $content.online_deployment = $false 238 | $content | ConvertTo-Json -Depth 100 | Set-Content settings.json 239 | 240 | - name: Copy everything to dist directory 241 | run: | 242 | mkdir streamlit_exe 243 | 244 | mv python-${{ env.PYTHON_VERSION }} streamlit_exe 245 | mv run_app.bat streamlit_exe 246 | cp -r src streamlit_exe 247 | cp -r content streamlit_exe 248 | cp -r docs streamlit_exe 249 | cp -r assets streamlit_exe 250 | cp -r example-data streamlit_exe 251 | cp openms-bin/*.dll streamlit_exe 252 | cp -r .streamlit streamlit_exe 253 | cp -r share streamlit_exe/share 254 | cp app.py streamlit_exe 255 | cp settings.json streamlit_exe 256 | cp default-parameters.json streamlit_exe 257 | 258 | $files = $env:TOPP_TOOLS -split ' ' 259 | foreach ($file in $files) { 260 | Copy-Item "openms-bin/${file}.exe" -Destination "streamlit_exe/${file}.exe" 261 | } 262 | 263 | - name: Modify .spec file 264 | shell: bash 265 | run: | 266 | cp run_app_temp.spec run_app.spec 267 | 268 | - name: Make executable 269 | shell: cmd 270 | run: | 271 | call myenv\Scripts\activate.bat 272 | pyinstaller run_app.spec --clean 273 | 274 | - name: Delete OpenMS package artifact 275 | uses: geekyeggo/delete-artifact@v5 276 | with: 277 | name: openms-package 278 | 279 | - name: Upload artifact 280 | uses: actions/upload-artifact@v3 281 | with: 282 | name: OpenMS-App 283 | path: dist 284 | 285 | - name: Upload Artifact as Release Asset 286 | if: github.event_name == 'release' 287 | uses: softprops/action-gh-release@v2 288 | with: 289 | files: OpenMS-App.zip 290 | env: 291 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: continuous-integration 2 | 3 | on: [push] 4 | 5 | jobs: 6 | test: 7 | runs-on: ${{ matrix.os }} 8 | strategy: 9 | matrix: 10 | os: [ubuntu-latest] 11 | # Requirements file generated with python=3.11 12 | python-version: ["3.11"] 13 | steps: 14 | - uses: actions/checkout@v4 15 | - uses: conda-incubator/setup-miniconda@v3 16 | with: 17 | activate-environment: openms 18 | python-version: ${{ matrix.python-version }} 19 | channels: defaults,bioconda,conda-forge 20 | 21 | - name: Install OpenMS 22 | run: | 23 | conda install openms -y 24 | - name: Install dependencies 25 | run: | 26 | python -m pip install --upgrade pip 27 | pip install -r requirements.txt # test with requirements file so can easily bump with dependabot 28 | pip install pytest 29 | - name: Test 30 | run: | 31 | python -m pytest test_gui.py tests/ 32 | 33 | -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: Pylint 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up Python 15 | uses: actions/setup-python@v3 16 | with: 17 | python-version: "3.10" 18 | - name: Install dependencies 19 | run: | 20 | python -m pip install --upgrade pip 21 | pip install pylint 22 | - name: Analysing the code with pylint 23 | run: | 24 | pylint $(git ls-files '*.py') --disable=C0103,C0114,C0301,C0411,W0212,W0631,W0602,W1514,W2402,E0401,E1101,F0001,R1732 --errors-only 25 | -------------------------------------------------------------------------------- /.github/workflows/test-win-exe-w-embed-py.yaml: -------------------------------------------------------------------------------- 1 | name: Test streamlit executable for Windows with embeddable python 2 | on: 3 | push: 4 | branches: [ "main" ] 5 | workflow_dispatch: 6 | 7 | jobs: 8 | build-win-executable-with-embeddable-python: 9 | runs-on: windows-latest 10 | 11 | env: 12 | PYTHON_VERSION: 3.11.9 13 | APP_UpgradeCode: 4abc2e23-3ba5-40e4-95c9-09e6cb8ecaeb 14 | APP_NAME: OpenMS-StreamlitTemplateApp-Test 15 | 16 | steps: 17 | - name: Checkout code 18 | uses: actions/checkout@v4 19 | 20 | - name: Download python embeddable version 21 | run: | 22 | mkdir python-${{ env.PYTHON_VERSION }} 23 | curl -O https://www.python.org/ftp/python/${{ env.PYTHON_VERSION }}/python-${{ env.PYTHON_VERSION }}-embed-amd64.zip 24 | unzip python-${{ env.PYTHON_VERSION }}-embed-amd64.zip -d python-${{ env.PYTHON_VERSION }} 25 | rm python-${{ env.PYTHON_VERSION }}-embed-amd64.zip 26 | 27 | - name: Install pip 28 | run: | 29 | curl -O https://bootstrap.pypa.io/get-pip.py 30 | ./python-${{ env.PYTHON_VERSION }}/python get-pip.py --no-warn-script-location 31 | rm get-pip.py 32 | 33 | - name: Uncomment 'import site' in python311._pth file 34 | run: | 35 | sed -i 's/#import site/import site/' python-${{ env.PYTHON_VERSION }}/python311._pth 36 | 37 | - name: Print content of python311._pth file 38 | run: | 39 | cat python-${{ env.PYTHON_VERSION }}/python311._pth 40 | 41 | - name: Install Required Packages 42 | run: .\python-${{ env.PYTHON_VERSION }}\python -m pip install -r requirements.txt --no-warn-script-location 43 | 44 | - name: Create .bat file 45 | run: | 46 | echo " start /min .\python-${{ env.PYTHON_VERSION }}\python -m streamlit run app.py local" > ${{ env.APP_NAME }}.bat 47 | 48 | - name: Create All-in-one executable folder 49 | run: | 50 | mkdir streamlit_exe 51 | mv python-${{ env.PYTHON_VERSION }} streamlit_exe 52 | cp -r src streamlit_exe 53 | cp -r content streamlit_exe 54 | cp -r docs streamlit_exe 55 | cp -r assets streamlit_exe 56 | cp -r example-data streamlit_exe 57 | cp -r .streamlit streamlit_exe 58 | cp app.py streamlit_exe 59 | cp settings.json streamlit_exe 60 | cp default-parameters.json streamlit_exe 61 | cp ${{ env.APP_NAME }}.bat streamlit_exe 62 | 63 | - name: Generate Readme.txt 64 | shell: bash 65 | run: | 66 | cat < streamlit_exe/Readme.txt 67 | Welcome to ${{ env.APP_NAME }} app! 68 | 69 | To launch the application: 70 | 1. Navigate to the installation directory. 71 | 2. Double-click on the file: ${{ env.APP_NAME }}.bat or ${{ env.APP_NAME }} shortcut. 72 | 73 | Additional Information: 74 | - If multiple Streamlit apps are running, you can change the port in the .streamlit/config.toml file. 75 | Example: 76 | [server] 77 | port = 8502 78 | 79 | Reach out to us: 80 | - Join our Discord server for support and community discussions: https://discord.com/invite/4TAGhqJ7s5 81 | - Contribute or stay updated with the latest OpenMS web app developments on GitHub: https://github.com/OpenMS/streamlit-template 82 | - Visit our website for more information: https://openms.de/ 83 | 84 | Thank you for using ${{ env.APP_NAME }}! 85 | EOF 86 | 87 | - name: Install WiX Toolset 88 | run: | 89 | curl -LO https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip 90 | unzip wix311-binaries.zip -d wix 91 | rm wix311-binaries.zip 92 | 93 | - name: Build .wxs for streamlit_exe folder 94 | run: | 95 | ./wix/heat.exe dir streamlit_exe -gg -sfrag -sreg -srd -template component -cg StreamlitExeFiles -dr AppSubFolder -out streamlit_exe_files.wxs 96 | 97 | - name: Generate VBScript file 98 | shell: bash 99 | run: | 100 | cat < ShowSuccessMessage.vbs 101 | MsgBox " The ${{ env.APP_NAME }} application is successfully installed.", vbInformation, "Installation Complete" 102 | EOF 103 | 104 | - name: Prepare SourceDir 105 | run: | 106 | mkdir SourceDir 107 | mv streamlit_exe/* SourceDir 108 | cp ShowSuccessMessage.vbs SourceDir 109 | cp assets/openms_license.rtf SourceDir 110 | # Logo of app 111 | cp assets/openms.ico SourceDir 112 | 113 | - name: Generate WiX XML file 114 | shell: bash 115 | run: | 116 | cat < streamlit_exe.wxs 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | NOT Installed 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | EOF 189 | 190 | - name: Build .wixobj file with candle.exe 191 | run: | 192 | ./wix/candle.exe streamlit_exe.wxs streamlit_exe_files.wxs 193 | 194 | - name: Link .wixobj file into .msi with light.exe 195 | run: | 196 | ./wix/light.exe -ext WixUIExtension -sice:ICE60 -o ${{ env.APP_NAME }}.msi streamlit_exe_files.wixobj streamlit_exe.wixobj 197 | 198 | - name: Archive build artifacts 199 | uses: actions/upload-artifact@v4 200 | with: 201 | name: OpenMS-App-Test 202 | path: | 203 | ${{ env.APP_NAME }}.msi -------------------------------------------------------------------------------- /.github/workflows/test-win-exe-w-pyinstaller.yaml: -------------------------------------------------------------------------------- 1 | name: Test streamlit executable for Windows with pyinstaller 2 | on: 3 | workflow_dispatch: 4 | 5 | jobs: 6 | build-win-executable-with-pyinstaller: 7 | runs-on: windows-latest 8 | 9 | env: 10 | PYTHON_VERSION: 3.11.0 11 | 12 | steps: 13 | - name: Checkout 14 | uses: actions/checkout@v3 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: ${{ env.PYTHON_VERSION }} 20 | 21 | - name: Setup virtual environment 22 | shell: cmd 23 | run: | 24 | python -m venv myenv 25 | call myenv\Scripts\activate.bat 26 | pip install -r requirements.txt 27 | pip install pyinstaller 28 | 29 | - name: Check if myenv is activated 30 | shell: cmd 31 | run: | 32 | call myenv\Scripts\activate.bat 33 | if "%VIRTUAL_ENV%" == "" ( 34 | echo myenv is not activated 35 | ) else ( 36 | echo myenv is activated 37 | ) 38 | 39 | - name: Debug Environment 40 | shell: cmd 41 | run: | 42 | echo Current directory: %CD% 43 | echo Path to myenv: %CD%\myenv 44 | dir %CD%\myenv\Scripts 45 | 46 | - name: Write function to cli.py 47 | shell: bash 48 | run: | 49 | head -n -2 D:/a/streamlit-template/streamlit-template/myenv/Lib/site-packages/streamlit/web/cli.py > temp_cli.py 50 | 51 | cat << EOF >> temp_cli.py 52 | def _main_run_clExplicit(file, command_line, args=[], flag_options=[]): 53 | main._is_running_with_streamlit = True 54 | bootstrap.run(file, command_line, args, flag_options) 55 | 56 | 57 | if __name__ == "__main__": 58 | main() 59 | EOF 60 | 61 | mv temp_cli.py D:/a/streamlit-template/streamlit-template/myenv/Lib/site-packages/streamlit/web/cli.py 62 | 63 | - name: Compile app with pyinstaller 64 | shell: cmd 65 | run: | 66 | call myenv\Scripts\activate.bat 67 | pyinstaller --onefile --additional-hooks-dir ./hooks run_app.py --clean 68 | 69 | - name: Copy everything to dist directory 70 | shell: bash 71 | run: | 72 | cp -r .streamlit dist/.streamlit 73 | cp -r content dist/content 74 | cp -r src dist/src 75 | cp -r example-data dist/example-data 76 | cp -r assets dist/assets 77 | cp app.py dist/ 78 | 79 | - name: Modify .spec file 80 | shell: bash 81 | run: | 82 | cp run_app_temp.spec run_app.spec 83 | 84 | - name: Make executable 85 | shell: cmd 86 | run: | 87 | call myenv\Scripts\activate.bat 88 | pyinstaller run_app.spec --clean 89 | 90 | - name: Copy artifacts 91 | shell: bash 92 | run: | 93 | mkdir artifacts 94 | cp -r dist artifacts/ 95 | cp -r build artifacts/ 96 | cp run_app.spec artifacts/ 97 | cp D:/a/streamlit-template/streamlit-template/myenv/Lib/site-packages/streamlit/web/cli.py artifacts/ 98 | 99 | - name: Upload artifacts 100 | uses: actions/upload-artifact@v3 101 | with: 102 | name: Outfolders 103 | path: artifacts 104 | -------------------------------------------------------------------------------- /.github/workflows/workflow-tests.yml: -------------------------------------------------------------------------------- 1 | name: Test workflow functions 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up Python 15 | uses: actions/setup-python@v3 16 | with: 17 | python-version: "3.10" 18 | - name: Install dependencies 19 | run: | 20 | python -m pip install --upgrade pip 21 | pip install -r requirements.txt 22 | pip install pytest 23 | - name: Running test cases 24 | run: | 25 | pytest test.py 26 | - name: Running GUI tests 27 | run: | 28 | pytest test_gui.py 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | workspaces-streamlit-template 2 | build 3 | myenv 4 | build.log 5 | run_app.spec 6 | clean-up-workspaces.log 7 | *-embed-amd64 8 | get-pip.py 9 | run_app.bat 10 | python* 11 | **/__pycache__/ 12 | gdpr_consent/node_modules/ 13 | *~ 14 | -------------------------------------------------------------------------------- /.streamlit/config.toml: -------------------------------------------------------------------------------- 1 | [browser] 2 | gatherUsageStats = false 3 | 4 | [global] 5 | developmentMode = false 6 | 7 | [server] 8 | maxUploadSize = 200 #MB 9 | port = 8501 # should be same as configured in deployment repo 10 | 11 | [theme] 12 | # The preset Streamlit theme that your custom theme inherits from. One of "light" or "dark". 13 | # base = 14 | 15 | # Primary accent color for interactive elements. 16 | primaryColor = "#29379b" 17 | 18 | # Background color for the main content area. 19 | # backgroundColor = 20 | 21 | # Background color used for the sidebar and most interactive widgets. 22 | # secondaryBackgroundColor = 23 | 24 | # Color used for almost all text. 25 | # textColor = 26 | 27 | # Font family for all text in the app, except code blocks. One of "sans serif", "serif", or "monospace". 28 | # font = 29 | -------------------------------------------------------------------------------- /.streamlit/credentials.toml: -------------------------------------------------------------------------------- 1 | [general] 2 | email = "" -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # This Dockerfile builds OpenMS, the TOPP tools, pyOpenMS and thidparty tools. 2 | # It also adds a basic streamlit server that serves a pyOpenMS-based app. 3 | # hints: 4 | # build image and give it a name (here: streamlitapp) with: docker build --no-cache -t streamlitapp:latest --build-arg GITHUB_TOKEN= . 2>&1 | tee build.log 5 | # check if image was build: docker image ls 6 | # run container: docker run -p 8501:8501 streamlitappsimple:latest 7 | # debug container after build (comment out ENTRYPOINT) and run container with interactive /bin/bash shell 8 | # prune unused images/etc. to free disc space (e.g. might be needed on gitpod). Use with care.: docker system prune --all --force 9 | 10 | FROM ubuntu:22.04 AS setup-build-system 11 | ARG OPENMS_REPO=https://github.com/OpenMS/OpenMS.git 12 | ARG OPENMS_BRANCH=release/3.2.0 13 | ARG PORT=8501 14 | # GitHub token to download latest OpenMS executable for Windows from Github action artifact. 15 | ARG GITHUB_TOKEN 16 | ENV GH_TOKEN=${GITHUB_TOKEN} 17 | # Streamlit app Gihub user name (to download artifact from). 18 | ARG GITHUB_USER=OpenMS 19 | # Streamlit app Gihub repository name (to download artifact from). 20 | ARG GITHUB_REPO=streamlit-template 21 | 22 | USER root 23 | 24 | # Install required Ubuntu packages. 25 | RUN apt-get -y update 26 | RUN apt-get install -y --no-install-recommends --no-install-suggests g++ autoconf automake patch libtool make git gpg wget ca-certificates curl jq libgtk2.0-dev openjdk-8-jdk cron 27 | RUN update-ca-certificates 28 | RUN apt-get install -y --no-install-recommends --no-install-suggests libsvm-dev libeigen3-dev coinor-libcbc-dev libglpk-dev libzip-dev zlib1g-dev libxerces-c-dev libbz2-dev libomp-dev libhdf5-dev 29 | RUN apt-get install -y --no-install-recommends --no-install-suggests libboost-date-time1.74-dev \ 30 | libboost-iostreams1.74-dev \ 31 | libboost-regex1.74-dev \ 32 | libboost-math1.74-dev \ 33 | libboost-random1.74-dev 34 | RUN apt-get install -y --no-install-recommends --no-install-suggests qtbase5-dev libqt5svg5-dev libqt5opengl5-dev 35 | 36 | # Install Github CLI 37 | RUN (type -p wget >/dev/null || (apt-get update && apt-get install wget -y)) \ 38 | && mkdir -p -m 755 /etc/apt/keyrings \ 39 | && wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg | tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \ 40 | && chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ 41 | && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ 42 | && apt-get update \ 43 | && apt-get install gh -y 44 | 45 | # Download and install miniforge. 46 | ENV PATH="/root/miniforge3/bin:${PATH}" 47 | RUN wget -q \ 48 | https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh \ 49 | && bash Miniforge3-Linux-x86_64.sh -b \ 50 | && rm -f Miniforge3-Linux-x86_64.sh 51 | RUN mamba --version 52 | 53 | # Setup mamba environment. 54 | RUN mamba create -n streamlit-env python=3.10 55 | RUN echo "mamba activate streamlit-env" >> ~/.bashrc 56 | SHELL ["/bin/bash", "--rcfile", "~/.bashrc"] 57 | SHELL ["mamba", "run", "-n", "streamlit-env", "/bin/bash", "-c"] 58 | 59 | # Install up-to-date cmake via mamba and packages for pyOpenMS build. 60 | RUN mamba install cmake 61 | RUN pip install --upgrade pip && python -m pip install -U setuptools nose 'cython<3.1' autowrap pandas numpy pytest 62 | 63 | # Clone OpenMS branch and the associcated contrib+thirdparties+pyOpenMS-doc submodules. 64 | RUN git clone --recursive --depth=1 -b ${OPENMS_BRANCH} --single-branch ${OPENMS_REPO} && cd /OpenMS 65 | 66 | # Pull Linux compatible third-party dependencies and store them in directory thirdparty. 67 | WORKDIR /OpenMS 68 | RUN mkdir /thirdparty && \ 69 | git submodule update --init THIRDPARTY && \ 70 | cp -r THIRDPARTY/All/* /thirdparty && \ 71 | cp -r THIRDPARTY/Linux/64bit/* /thirdparty && \ 72 | chmod -R +x /thirdparty 73 | ENV PATH="/thirdparty/LuciPHOr2:/thirdparty/MSGFPlus:/thirdparty/Sirius:/thirdparty/ThermoRawFileParser:/thirdparty/Comet:/thirdparty/Fido:/thirdparty/MaRaCluster:/thirdparty/MyriMatch:/thirdparty/OMSSA:/thirdparty/Percolator:/thirdparty/SpectraST:/thirdparty/XTandem:/thirdparty/crux:${PATH}" 74 | 75 | # Build OpenMS and pyOpenMS. 76 | FROM setup-build-system AS compile-openms 77 | WORKDIR / 78 | 79 | # Set up build directory. 80 | RUN mkdir /openms-build 81 | WORKDIR /openms-build 82 | 83 | # Configure. 84 | RUN /bin/bash -c "cmake -DCMAKE_BUILD_TYPE='Release' -DCMAKE_PREFIX_PATH='/OpenMS/contrib-build/;/usr/;/usr/local' -DHAS_XSERVER=OFF -DBOOST_USE_STATIC=OFF -DPYOPENMS=ON ../OpenMS -DPY_MEMLEAK_DISABLE=On" 85 | 86 | # Build TOPP tools and clean up. 87 | RUN make -j4 TOPP 88 | RUN rm -rf src doc CMakeFiles 89 | 90 | # Build pyOpenMS wheels and install via pip. 91 | RUN make -j4 pyopenms 92 | WORKDIR /openms-build/pyOpenMS 93 | RUN pip install dist/*.whl 94 | 95 | # Install other dependencies (excluding pyopenms) 96 | COPY requirements.txt ./requirements.txt 97 | RUN grep -Ev '^pyopenms([=<>!~].*)?$' requirements.txt > requirements_cleaned.txt && mv requirements_cleaned.txt requirements.txt 98 | RUN pip install -r requirements.txt 99 | 100 | WORKDIR / 101 | RUN mkdir openms 102 | 103 | # Copy TOPP tools bin directory, add to PATH. 104 | RUN cp -r openms-build/bin /openms/bin 105 | ENV PATH="/openms/bin/:${PATH}" 106 | 107 | # Copy TOPP tools bin directory, add to PATH. 108 | RUN cp -r openms-build/lib /openms/lib 109 | ENV LD_LIBRARY_PATH="/openms/lib/:${LD_LIBRARY_PATH}" 110 | 111 | # Copy share folder, add to PATH, remove source directory. 112 | RUN cp -r OpenMS/share/OpenMS /openms/share 113 | RUN rm -rf OpenMS 114 | ENV OPENMS_DATA_PATH="/openms/share/" 115 | 116 | # Remove build directory. 117 | RUN rm -rf openms-build 118 | 119 | # Prepare and run streamlit app. 120 | FROM compile-openms AS run-app 121 | # Create workdir and copy over all streamlit related files/folders. 122 | 123 | # note: specifying folder with slash as suffix and repeating the folder name seems important to preserve directory structure 124 | WORKDIR /app 125 | COPY assets/ /app/assets 126 | COPY content/ /app/content 127 | COPY docs/ /app/docs 128 | COPY example-data/ /app/example-data 129 | COPY gdpr_consent/ /app/gdpr_consent 130 | COPY hooks/ /app/hooks 131 | COPY src/ /app/src 132 | COPY app.py /app/app.py 133 | COPY settings.json /app/settings.json 134 | COPY default-parameters.json /app/default-parameters.json 135 | 136 | # For streamlit configuration 137 | COPY .streamlit/config.toml /app/.streamlit/config.toml 138 | COPY clean-up-workspaces.py /app/clean-up-workspaces.py 139 | 140 | # add cron job to the crontab 141 | RUN echo "0 3 * * * /root/miniforge3/envs/streamlit-env/bin/python /app/clean-up-workspaces.py >> /app/clean-up-workspaces.log 2>&1" | crontab - 142 | 143 | # create entrypoint script to start cron service and launch streamlit app 144 | RUN echo "#!/bin/bash" > /app/entrypoint.sh && \ 145 | echo "source /root/miniforge3/bin/activate streamlit-env" >> /app/entrypoint.sh && \ 146 | echo "service cron start" >> /app/entrypoint.sh && \ 147 | echo "streamlit run app.py" >> /app/entrypoint.sh 148 | # make the script executable 149 | RUN chmod +x /app/entrypoint.sh 150 | 151 | # Patch Analytics 152 | RUN mamba run -n streamlit-env python hooks/hook-analytics.py 153 | 154 | # Set Online Deployment 155 | RUN jq '.online_deployment = true' settings.json > tmp.json && mv tmp.json settings.json 156 | 157 | # Download latest OpenMS App executable as a ZIP file 158 | RUN if [ -n "$GH_TOKEN" ]; then \ 159 | echo "GH_TOKEN is set, proceeding to download the release asset..."; \ 160 | gh release download -R ${GITHUB_USER}/${GITHUB_REPO} -p "OpenMS-App.zip" -D /app; \ 161 | else \ 162 | echo "GH_TOKEN is not set, skipping the release asset download."; \ 163 | fi 164 | 165 | 166 | # Run app as container entrypoint. 167 | EXPOSE $PORT 168 | ENTRYPOINT ["/app/entrypoint.sh"] 169 | -------------------------------------------------------------------------------- /Dockerfile_simple: -------------------------------------------------------------------------------- 1 | # This Dockerfile creates a container with pyOpenMS 2 | # It also adds a basic streamlit server that serves a pyOpenMS-based app. 3 | # hints: 4 | # build image with: docker build -f Dockerfile_simple --no-cache -t streamlitapp:latest --build-arg GITHUB_TOKEN= . 2>&1 | tee build.log 5 | # check if image was build: docker image ls 6 | # run container: docker run -p 8501:8501 streamlitapp:latest 7 | # debug container after build (comment out ENTRYPOINT) and run container with interactive /bin/bash shell 8 | # prune unused images/etc. to free disc space (e.g. might be needed on gitpod). Use with care.: docker system prune --all --force 9 | 10 | FROM ubuntu:22.04 AS stage1 11 | ARG OPENMS_REPO=https://github.com/OpenMS/OpenMS.git 12 | ARG OPENMS_BRANCH=develop 13 | ARG PORT=8501 14 | # GitHub token to download latest OpenMS executable for Windows from Github action artifact. 15 | ARG GITHUB_TOKEN 16 | ENV GH_TOKEN=${GITHUB_TOKEN} 17 | # Streamlit app Gihub user name (to download artifact from). 18 | ARG GITHUB_USER=OpenMS 19 | # Streamlit app Gihub repository name (to download artifact from). 20 | ARG GITHUB_REPO=streamlit-template 21 | 22 | 23 | # Step 1: set up a sane build system 24 | USER root 25 | 26 | RUN apt-get -y update 27 | # note: streamlit in docker needs libgtk2.0-dev (see https://yugdamor.medium.com/importerror-libgthread-2-0-so-0-cannot-open-shared-object-file-no-such-file-or-directory-895b94a7827b) 28 | RUN apt-get install -y --no-install-recommends --no-install-suggests wget ca-certificates libgtk2.0-dev curl jq cron 29 | RUN update-ca-certificates 30 | 31 | # Install Github CLI 32 | RUN (type -p wget >/dev/null || (apt-get update && apt-get install wget -y)) \ 33 | && mkdir -p -m 755 /etc/apt/keyrings \ 34 | && wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg | tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \ 35 | && chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ 36 | && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ 37 | && apt-get update \ 38 | && apt-get install gh -y 39 | 40 | # Download and install miniforge. 41 | ENV PATH="/root/miniforge3/bin:${PATH}" 42 | RUN wget -q \ 43 | https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh \ 44 | && bash Miniforge3-Linux-x86_64.sh -b \ 45 | && rm -f Miniforge3-Linux-x86_64.sh 46 | RUN mamba --version 47 | 48 | # Setup mamba environment. 49 | RUN mamba create -n streamlit-env python=3.10 50 | RUN echo "mamba activate streamlit-env" >> ~/.bashrc 51 | SHELL ["/bin/bash", "--rcfile", "~/.bashrc"] 52 | SHELL ["mamba", "run", "-n", "streamlit-env", "/bin/bash", "-c"] 53 | 54 | #################################### install streamlit 55 | # install packages 56 | COPY requirements.txt requirements.txt 57 | RUN mamba install pip 58 | RUN python -m pip install --upgrade pip 59 | RUN python -m pip install -r requirements.txt 60 | 61 | 62 | # create workdir and copy over all streamlit related files/folders 63 | WORKDIR /app 64 | # note: specifying folder with slash as suffix and repeating the folder name seems important to preserve directory structure 65 | WORKDIR /app 66 | COPY assets/ /app/assets 67 | COPY content/ /app/content 68 | COPY docs/ /app/docs 69 | COPY example-data/ /app/example-data 70 | COPY gdpr_consent/ /app/gdpr_consent 71 | COPY hooks/ /app/hooks 72 | COPY src/ /app/src 73 | COPY app.py /app/app.py 74 | COPY settings.json /app/settings.json 75 | COPY default-parameters.json /app/default-parameters.json 76 | 77 | # For streamlit configuration 78 | COPY .streamlit/config.toml /app/.streamlit/config.toml 79 | 80 | COPY clean-up-workspaces.py /app/clean-up-workspaces.py 81 | 82 | # add cron job to the crontab 83 | RUN echo "0 3 * * * /root/miniforge3/envs/streamlit-env/bin/python /app/clean-up-workspaces.py >> /app/clean-up-workspaces.log 2>&1" | crontab - 84 | 85 | # create entrypoint script to start cron service and launch streamlit app 86 | RUN echo "#!/bin/bash" > /app/entrypoint.sh 87 | RUN echo "source /root/miniforge3/bin/activate streamlit-env" >> /app/entrypoint.sh && \ 88 | echo "service cron start" >> /app/entrypoint.sh && \ 89 | echo "streamlit run app.py" >> /app/entrypoint.sh 90 | # make the script executable 91 | RUN chmod +x /app/entrypoint.sh 92 | 93 | # Patch Analytics 94 | RUN mamba run -n streamlit-env python hooks/hook-analytics.py 95 | 96 | # Set Online Deployment 97 | RUN jq '.online_deployment = true' settings.json > tmp.json && mv tmp.json settings.json 98 | 99 | # Download latest OpenMS App executable as a ZIP file 100 | RUN if [ -n "$GH_TOKEN" ]; then \ 101 | echo "GH_TOKEN is set, proceeding to download the release asset..."; \ 102 | gh release download -R ${GITHUB_USER}/${GITHUB_REPO} -p "OpenMS-App.zip" -D /app; \ 103 | else \ 104 | echo "GH_TOKEN is not set, skipping the release asset download."; \ 105 | fi 106 | 107 | # make sure that mamba environment is used 108 | SHELL ["mamba", "run", "-n", "streamlit-env", "/bin/bash", "-c"] 109 | 110 | EXPOSE $PORT 111 | ENTRYPOINT ["/app/entrypoint.sh"] 112 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------- 2 | OpenMS -- Open-Source Mass Spectrometry 3 | -------------------------------------------------------------------------- 4 | Copyright OpenMS Inc. -- Eberhard Karls University Tuebingen, 5 | ETH Zurich, and Freie Universitaet Berlin 2002-present. 6 | 7 | This software is released under a three-clause BSD license: 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | * Neither the name of any author or any participating institution 14 | may be used to endorse or promote products derived from this software 15 | without specific prior written permission. 16 | For a full list of authors, refer to the git contributions. 17 | -------------------------------------------------------------------------- 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 21 | ARE DISCLAIMED. IN NO EVENT SHALL ANY OF THE AUTHORS OR THE CONTRIBUTING 22 | INSTITUTIONS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 23 | EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 24 | PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 25 | OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 26 | WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 27 | OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 28 | ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OpenMS streamlit template 2 | 3 | [![Open Template!](https://static.streamlit.io/badges/streamlit_badge_black_white.svg)](https://abi-services.cs.uni-tuebingen.de/streamlit-template/) 4 | 5 | This repository contains a template app for OpenMS workflows in a web application using the **streamlit** framework. It serves as a foundation for apps ranging from simple workflows with **pyOpenMS** to complex workflows utilizing **OpenMS TOPP tools** with parallel execution. It includes solutions for handling user data and parameters in workspaces as well as deployment with docker-compose. 6 | 7 | ## Features 8 | 9 | - Workspaces for user data with unique shareable IDs 10 | - Persistent parameters and input files within a workspace 11 | - local and online mode 12 | - Captcha control 13 | - Packaged executables for Windows 14 | - framework for workflows with OpenMS TOPP tools 15 | - Deployment [with docker-compose](https://github.com/OpenMS/streamlit-deployment) 16 | 17 | ## Documentation 18 | 19 | Documentation for **users** and **developers** is included as pages in [this template app](https://abi-services.cs.uni-tuebingen.de/streamlit-template/), indicated by the 📖 icon. 20 | 21 | ## Citation 22 | 23 | Please cite: 24 | Müller, T. D., Siraj, A., et al. OpenMS WebApps: Building User-Friendly Solutions for MS Analysis. Journal of Proteome Research (2025). [https://doi.org/10.1021/acs.jproteome.4c00872](https://doi.org/10.1021/acs.jproteome.4c00872) 25 | 26 | ## References 27 | 28 | - Pfeuffer, J., Bielow, C., Wein, S. et al. OpenMS 3 enables reproducible analysis of large-scale mass spectrometry data. Nat Methods 21, 365–367 (2024). [https://doi.org/10.1038/s41592-024-02197-7](https://doi.org/10.1038/s41592-024-02197-7) 29 | 30 | - Röst HL, Schmitt U, Aebersold R, Malmström L. pyOpenMS: a Python-based interface to the OpenMS mass-spectrometry algorithm library. Proteomics. 2014 Jan;14(1):74-7. [https://doi.org/10.1002/pmic.201300246](https://doi.org/10.1002/pmic.201300246). PMID: [24420968](https://pubmed.ncbi.nlm.nih.gov/24420968/). 31 | 32 | 33 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from pathlib import Path 3 | import json 4 | # For some reason the windows version only works if this is imported here 5 | import pyopenms 6 | 7 | if "settings" not in st.session_state: 8 | with open("settings.json", "r") as f: 9 | st.session_state.settings = json.load(f) 10 | 11 | if __name__ == '__main__': 12 | pages = { 13 | str(st.session_state.settings["app-name"]) : [ 14 | st.Page(Path("content", "quickstart.py"), title="Quickstart", icon="👋"), 15 | st.Page(Path("content", "documentation.py"), title="Documentation", icon="📖"), 16 | ], 17 | "TOPP Workflow Framework": [ 18 | st.Page(Path("content", "topp_workflow_file_upload.py"), title="File Upload", icon="📁"), 19 | st.Page(Path("content", "topp_workflow_parameter.py"), title="Configure", icon="⚙️"), 20 | st.Page(Path("content", "topp_workflow_execution.py"), title="Run", icon="🚀"), 21 | st.Page(Path("content", "topp_workflow_results.py"), title="Results", icon="📊"), 22 | ], 23 | "pyOpenMS Workflow" : [ 24 | st.Page(Path("content", "file_upload.py"), title="File Upload", icon="📂"), 25 | st.Page(Path("content", "raw_data_viewer.py"), title="View MS data", icon="👀"), 26 | st.Page(Path("content", "run_example_workflow.py"), title="Run Workflow", icon="⚙️"), 27 | st.Page(Path("content", "download_section.py"), title="Download Results", icon="⬇️"), 28 | ], 29 | "Others Topics": [ 30 | st.Page(Path("content", "simple_workflow.py"), title="Simple Workflow", icon="⚙️"), 31 | st.Page(Path("content", "run_subprocess.py"), title="Run Subprocess", icon="🖥️"), 32 | ] 33 | } 34 | 35 | pg = st.navigation(pages) 36 | pg.run() -------------------------------------------------------------------------------- /assets/OpenMS.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenMS/streamlit-template/57fccde390ef0ce95a625c7a427840a8d69860c3/assets/OpenMS.png -------------------------------------------------------------------------------- /assets/openms.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenMS/streamlit-template/57fccde390ef0ce95a625c7a427840a8d69860c3/assets/openms.ico -------------------------------------------------------------------------------- /assets/openms_transparent_bg_logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 9 | 11 | 24 | 31 | 35 | 39 | 43 | 47 | 51 | 55 | 59 | 63 | 67 | 68 | 69 | 71 | 75 | 77 | 79 | 81 | 86 | 87 | 90 | 95 | 100 | 101 | 105 | 110 | 115 | 116 | 119 | 124 | 129 | 130 | 131 | 134 | 137 | 142 | 147 | 148 | 151 | 156 | 161 | 162 | 163 | 164 | 165 | 166 | -------------------------------------------------------------------------------- /assets/pyopenms_transparent_background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenMS/streamlit-template/57fccde390ef0ce95a625c7a427840a8d69860c3/assets/pyopenms_transparent_background.png -------------------------------------------------------------------------------- /clean-up-workspaces.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from pathlib import Path 3 | import os 4 | import time 5 | import shutil 6 | from datetime import datetime 7 | 8 | # Define the workspaces directory 9 | workspaces_directory = Path("/workspaces-streamlit-template") 10 | 11 | # Get the current time in seconds 12 | current_time = time.time() 13 | 14 | # Define the time threshold (7 days ago) 86400 seconds in a day 15 | threshold = current_time - (86400 * 7) 16 | 17 | # Print current time 18 | print( 19 | f"Current Time: {datetime.utcfromtimestamp(current_time).strftime('%Y-%m-%d %H:%M:%S UTC')}\n" 20 | ) 21 | 22 | # Collect remaining dirctories to print out later 23 | remaining_directories = [] 24 | # Iterate through directories in workspaces_directory 25 | for directory in workspaces_directory.iterdir(): 26 | # Check if it's a directory 27 | if directory.is_dir(): 28 | # Get the directory's modification time 29 | modification_time = os.path.getmtime(directory) 30 | 31 | # Check if the modification time is less than the threshold 32 | if modification_time < threshold: 33 | # Calculate the time difference in seconds 34 | time_difference = current_time - modification_time 35 | 36 | # Print the directory name and the time difference in minutes 37 | print( 38 | f"Deleting directory: {directory.name}, Last Modified: {time_difference / 86400:.1f} days ago" 39 | ) 40 | 41 | # Remove workspace 42 | shutil.rmtree(directory) 43 | else: 44 | remaining_directories.append(directory) 45 | 46 | # Print info on remaining directories 47 | if remaining_directories: 48 | print(f"\nRemaining directories in {workspaces_directory.name}:") 49 | for directory in remaining_directories: 50 | print( 51 | f"{directory.name}, Last Modified: {(current_time - os.path.getmtime(directory)) / 60:.2f} minutes ago" 52 | ) 53 | else: 54 | print(f"\n{workspaces_directory.name} is empty.") 55 | 56 | 57 | # Print separator 58 | print(100 * "-") 59 | -------------------------------------------------------------------------------- /content/documentation.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from src.common.common import page_setup 3 | from pathlib import Path 4 | from docs.toppframework import content as topp_framework_content 5 | 6 | page_setup() 7 | 8 | 9 | st.title("Documentation") 10 | 11 | cols = st.columns(2) 12 | 13 | pages = [ 14 | "User Guide", 15 | "Installation", 16 | "Developers Guide: How to build app based on this template", 17 | "Developers Guide: TOPP Workflow Framework", 18 | "Developer Guide: Windows Executables", 19 | "Developers Guide: Deployment", 20 | ] 21 | page = cols[0].selectbox( 22 | "**Content**", 23 | pages, 24 | ) 25 | 26 | ############################################################################################# 27 | # User Guide 28 | ############################################################################################# 29 | 30 | if page == pages[0]: 31 | with open(Path("docs", "user_guide.md"), "r", encoding="utf-8") as f: 32 | content = f.read() 33 | st.markdown(content) 34 | 35 | ############################################################################################# 36 | # Installation 37 | ############################################################################################# 38 | 39 | if page == pages[1]: 40 | if Path("OpenMS-App.zip").exists(): 41 | st.markdown( 42 | """ 43 | Download the latest version for **Windows** here clicking the button below. 44 | """ 45 | ) 46 | with open("OpenMS-App.zip", "rb") as file: 47 | st.download_button( 48 | label="Download for Windows", 49 | data=file, 50 | file_name="OpenMS-App.zip", 51 | mime="archive/zip", 52 | type="primary", 53 | ) 54 | with open(Path("docs", "installation.md"), "r", encoding="utf-8") as f: 55 | content = f.read() 56 | st.markdown(content) 57 | 58 | ############################################################################################# 59 | # Developer Overview, how to build app based on Template 60 | ############################################################################################# 61 | 62 | if page == pages[2]: 63 | with open(Path("docs", "build_app.md"), "r", encoding="utf-8") as f: 64 | content = f.read() 65 | st.markdown(content) 66 | 67 | ############################################################################################# 68 | # TOPP Workflow Framework 69 | ############################################################################################# 70 | 71 | if page == pages[3]: 72 | topp_framework_content() 73 | 74 | ############################################################################################# 75 | # Windows Executables 76 | ############################################################################################# 77 | 78 | if page == pages[4]: 79 | st.markdown( 80 | """ 81 | ## 💻 How to package everything for Windows executables 82 | 83 | This guide explains how to package OpenMS apps into Windows executables using two different methods: 84 | """ 85 | ) 86 | 87 | tabs = ["**embeddable Python**", "**PyInstaller**"] 88 | tabs = st.tabs(tabs) 89 | 90 | # window executable with embeddable python 91 | with tabs[0]: 92 | with open(Path("docs", "win_exe_with_embed_py.md"), "r", encoding="utf-8") as f: 93 | content = f.read() 94 | st.markdown(content) 95 | 96 | # window executable with pyinstaller 97 | with tabs[1]: 98 | with open(Path("docs", "win_exe_with_pyinstaller.md"), "r", encoding="utf-8") as f: 99 | content = f.read() 100 | st.markdown(content) 101 | 102 | ############################################################################################# 103 | # Deployment 104 | ############################################################################################# 105 | 106 | if page == pages[5]: 107 | with open(Path("docs", "deployment.md"), "r", encoding="utf-8") as f: 108 | content = f.read() 109 | st.markdown(content) -------------------------------------------------------------------------------- /content/download_section.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | 3 | from pathlib import Path 4 | import shutil 5 | 6 | from src.common.common import page_setup 7 | from zipfile import ZipFile, ZIP_DEFLATED 8 | 9 | page_setup() 10 | 11 | # Define output folder here; all subfolders will be handled as downloadable 12 | # directories 13 | output_folder = 'mzML-workflow-results' 14 | 15 | 16 | # Generate full path 17 | dirpath = Path(st.session_state["workspace"], output_folder) 18 | 19 | # Detect downloadable content 20 | if dirpath.exists(): 21 | directories = sorted( 22 | [entry for entry in dirpath.iterdir() if not entry.is_file()] 23 | ) 24 | else: 25 | directories = [] 26 | 27 | # Show error if no content is available for download 28 | if len(directories) == 0: 29 | st.error('No results to show yet. Please run a workflow first!') 30 | else: 31 | # Table Header 32 | columns = st.columns(3) 33 | columns[0].write('**Run**') 34 | columns[1].write('**Download**') 35 | columns[2].write('**Delete Result Set**') 36 | 37 | # Table Body 38 | for i, directory in enumerate(directories): 39 | st.divider() 40 | columns = st.columns(3) 41 | columns[0].empty().write(directory.name) 42 | 43 | with columns[1]: 44 | button_placeholder = st.empty() 45 | 46 | # Show placeholder button before download is prepared 47 | clicked = button_placeholder.button('Prepare Download', key=i, use_container_width=True) 48 | if clicked: 49 | button_placeholder.empty() 50 | with st.spinner(): 51 | # Create ZIP file 52 | out_zip = Path(directory, 'output.zip') 53 | if not out_zip.exists(): 54 | with ZipFile(out_zip, 'w', ZIP_DEFLATED) as zip_file: 55 | for output in Path(directory).iterdir(): 56 | if output.name == 'output.zip': 57 | continue 58 | try: 59 | with open(output, 'r') as f: 60 | zip_file.writestr(output.name, f.read()) 61 | except: 62 | continue 63 | # Show download button after ZIP file was created 64 | with open(out_zip, 'rb') as f: 65 | button_placeholder.download_button( 66 | "Download ⬇️", f, 67 | file_name = f'{directory.name}.zip', 68 | use_container_width=True 69 | ) 70 | 71 | with columns[2]: 72 | if st.button(f"🗑️ {directory.name}", use_container_width=True): 73 | shutil.rmtree(directory) 74 | st.rerun() -------------------------------------------------------------------------------- /content/file_upload.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import streamlit as st 4 | import pandas as pd 5 | 6 | from src.common.common import ( 7 | page_setup, 8 | save_params, 9 | v_space, 10 | show_table, 11 | TK_AVAILABLE, 12 | tk_directory_dialog, 13 | ) 14 | from src import fileupload 15 | 16 | params = page_setup() 17 | 18 | st.title("File Upload") 19 | 20 | # Check if there are any files in the workspace 21 | mzML_dir = Path(st.session_state.workspace, "mzML-files") 22 | if not any(Path(mzML_dir).iterdir()): 23 | # No files present, load example data 24 | fileupload.load_example_mzML_files() 25 | 26 | tabs = ["File Upload"] 27 | if st.session_state.location == "local": 28 | tabs.append("Files from local folder") 29 | 30 | tabs = st.tabs(tabs) 31 | 32 | with tabs[0]: 33 | with st.form("mzML-upload", clear_on_submit=True): 34 | files = st.file_uploader( 35 | "mzML files", accept_multiple_files=(st.session_state.location == "local") 36 | ) 37 | cols = st.columns(3) 38 | if cols[1].form_submit_button("Add files to workspace", type="primary"): 39 | if files: 40 | fileupload.save_uploaded_mzML(files) 41 | else: 42 | st.warning("Select files first.") 43 | 44 | # Local file upload option: via directory path 45 | if st.session_state.location == "local": 46 | with tabs[1]: 47 | st_cols = st.columns([0.05, 0.95], gap="small") 48 | with st_cols[0]: 49 | st.write("\n") 50 | st.write("\n") 51 | dialog_button = st.button( 52 | "📁", 53 | key="local_browse", 54 | help="Browse for your local directory with MS data.", 55 | disabled=not TK_AVAILABLE, 56 | ) 57 | if dialog_button: 58 | st.session_state["local_dir"] = tk_directory_dialog( 59 | "Select directory with your MS data", 60 | st.session_state["previous_dir"], 61 | ) 62 | st.session_state["previous_dir"] = st.session_state["local_dir"] 63 | with st_cols[1]: 64 | # with st.form("local-file-upload"): 65 | local_mzML_dir = st.text_input( 66 | "path to folder with mzML files", value=st.session_state["local_dir"] 67 | ) 68 | # raw string for file paths 69 | local_mzML_dir = rf"{local_mzML_dir}" 70 | cols = st.columns([0.65, 0.3, 0.4, 0.25], gap="small") 71 | copy_button = cols[1].button( 72 | "Copy files to workspace", type="primary", disabled=(local_mzML_dir == "") 73 | ) 74 | use_copy = cols[2].checkbox( 75 | "Make a copy of files", 76 | key="local_browse-copy_files", 77 | value=True, 78 | help="Create a copy of files in workspace.", 79 | ) 80 | if not use_copy: 81 | st.warning( 82 | "**Warning**: You have deselected the `Make a copy of files` option. " 83 | "This **_assumes you know what you are doing_**. " 84 | "This means that the original files will be used instead. " 85 | ) 86 | if copy_button: 87 | fileupload.copy_local_mzML_files_from_directory(local_mzML_dir, use_copy) 88 | 89 | if any(Path(mzML_dir).iterdir()): 90 | v_space(2) 91 | # Display all mzML files currently in workspace 92 | df = pd.DataFrame( 93 | { 94 | "file name": [ 95 | f.name 96 | for f in Path(mzML_dir).iterdir() 97 | if "external_files.txt" not in f.name 98 | ] 99 | } 100 | ) 101 | 102 | # Check if local files are available 103 | external_files = Path(mzML_dir, "external_files.txt") 104 | if external_files.exists(): 105 | with open(external_files, "r") as f_handle: 106 | external_files = f_handle.readlines() 107 | external_files = [f.strip() for f in external_files] 108 | df = pd.concat( 109 | [df, pd.DataFrame({"file name": external_files})], ignore_index=True 110 | ) 111 | 112 | st.markdown("##### mzML files in current workspace:") 113 | show_table(df) 114 | v_space(1) 115 | # Remove files 116 | with st.expander("🗑️ Remove mzML files"): 117 | to_remove = st.multiselect( 118 | "select mzML files", options=[f.stem for f in sorted(mzML_dir.iterdir())] 119 | ) 120 | c1, c2 = st.columns(2) 121 | if c2.button( 122 | "Remove **selected**", type="primary", disabled=not any(to_remove) 123 | ): 124 | params = fileupload.remove_selected_mzML_files(to_remove, params) 125 | save_params(params) 126 | st.rerun() 127 | 128 | if c1.button("⚠️ Remove **all**", disabled=not any(mzML_dir.iterdir())): 129 | params = fileupload.remove_all_mzML_files(params) 130 | save_params(params) 131 | st.rerun() 132 | 133 | save_params(params) 134 | -------------------------------------------------------------------------------- /content/quickstart.py: -------------------------------------------------------------------------------- 1 | """ 2 | Main page for the OpenMS Template App. 3 | 4 | This module sets up and displays the Streamlit app for the OpenMS Template App. 5 | It includes: 6 | - Setting the app title. 7 | - Displaying a description. 8 | - Providing a download button for the Windows version of the app. 9 | 10 | Usage: 11 | Run this script to launch the OpenMS Template App. 12 | 13 | Note: 14 | - If run in local mode, the CAPTCHA control is not applied. 15 | - If not in local mode, CAPTCHA control is applied to verify the user. 16 | 17 | Returns: 18 | None 19 | """ 20 | 21 | from pathlib import Path 22 | import streamlit as st 23 | 24 | from src.common.common import page_setup, v_space 25 | 26 | page_setup(page="main") 27 | 28 | st.markdown("# 👋 Quick Start") 29 | st.markdown("## Template for OpenMS web apps using the **streamlit** framework") 30 | c1, c2 = st.columns(2) 31 | c1.markdown( 32 | """ 33 | ## ⭐ Features 34 | 35 | - Simple workflows with **pyOpenMS** 36 | - Complex workflows utilizing **OpenMS TOPP tools** with parallel execution. 37 | - Workspaces for user data with unique shareable IDs 38 | - Persistent parameters and input files within a workspace 39 | - Captcha control 40 | - Packaged executables for Windows 41 | - Deploy multiple apps easily with [docker-compose](https://github.com/OpenMS/streamlit-deployment) 42 | """ 43 | ) 44 | v_space(1, c2) 45 | c2.image("assets/openms_transparent_bg_logo.svg", width=300) 46 | if Path("OpenMS-App.zip").exists(): 47 | st.subheader( 48 | """ 49 | Download the latest version for Windows here by clicking the button below. 50 | """ 51 | ) 52 | with open("OpenMS-App.zip", "rb") as file: 53 | st.download_button( 54 | label="Download for Windows", 55 | data=file, 56 | file_name="OpenMS-App.zip", 57 | mime="archive/zip", 58 | type="primary", 59 | ) 60 | st.markdown( 61 | """ 62 | Extract the zip file and run the installer (.msi) file to install the app. The app can then be launched using the corresponding desktop icon. 63 | """ 64 | ) 65 | 66 | st.markdown("## 📖 Documentation") 67 | st.markdown( 68 | f""" 69 | This template app includes documentation for **users** including **installation** and introduction to template specific concepts such as **workspaces** and developers with detailed instructions on **how to create and deploy your own app** based on this template. 70 | """ 71 | ) 72 | st.page_link( 73 | "content/documentation.py", 74 | label="Read documentation here, select chapter in the content menu.", 75 | icon="➡️", 76 | ) 77 | 78 | st.markdown( 79 | """## Workspaces and Settings 80 | The **sidebar** contains to boxes, one for **workspaces** (in local mode) and one for **settings**. 81 | 82 | 🖥️ **Workspaces** store user inputs, parameters and results for a specific session or analysis task. 83 | 84 | In **online mode** where the app is hosted on a remote server the workspace has a unique identifier number embedded within the URL. To share your data analysis with collaboration partners simply share the URL. 85 | 86 | In **local mode** where the app is run locally on a PC (e.g. via Windows executable) the user can create and delete separate workspaces for different projects. 87 | 88 | ⚙️ **Settings** contain global settings which are relevant for all pages, such as the image export format. 89 | """ 90 | ) 91 | 92 | 93 | st.markdown("## Example pages: workflows, visualization and more") 94 | st.markdown( 95 | """ 96 | This app serves both as documentation and showcase what's possible with OpenMS web apps. 97 | 98 | In general there are two options for building workflows. 99 | 100 | ### 1. 🚀 **TOPP Workflow Framework** 101 | 102 | Use this option if you want a standardized framework for building your workflow. 103 | 104 | - **Pre-defined user interface** all in one streamlit page with all steps on different pages: 105 | - **File Upload**: upload, download and delete input files 106 | - **Configure**: Automatically display input widgets for all paramters in TOPP tools and custom Python scripts 107 | - **Run**: Start and stop workflow execution, includes continous log 108 | - **Results**: Interactive result dashboard 109 | - **Write less code**: everything from file upload, input widget generation and execution of tools is handled via convenient functions 110 | - **Fast and performant workflows**: Automatic parallel execution of TOPP tools ensures great speed, comparable with workflows written in bash 111 | - **Ideal for longer workflows**: Close the app and come back to the still running or finish workflow the next day, by entering your workspace again. 112 | """ 113 | ) 114 | st.page_link( 115 | "content/documentation.py", 116 | label="Check out extensive documentation on the TOPP tool framework.", 117 | icon="➡️", 118 | ) 119 | st.page_link( 120 | "content/topp_workflow_file_upload.py", 121 | label="Play around with the example workflow.", 122 | icon="➡️", 123 | ) 124 | st.markdown( 125 | """ 126 | ### 2. 🐍 **Flexible, custom workflow with pyOpenMS on multiple pages** 127 | 128 | Use this option if you want full control over your workflow implementation and user interface. 129 | 130 | Uses the integrated parameter handling with global parameters across pages, including uploaded files. 131 | 132 | To get an idea check out the following pages from the example worklfow (file upload first!). 133 | """ 134 | ) 135 | st.page_link( 136 | "content/file_upload.py", 137 | label="Upload your own mzML files or use the provided example data set.", 138 | icon="➡️", 139 | ) 140 | st.page_link( 141 | "content/raw_data_viewer.py", 142 | label="Visualize mzML file content in an interactive dashboard.", 143 | icon="➡️", 144 | ) 145 | st.page_link( 146 | "content/run_example_workflow.py", 147 | label="Run a small example workflow with mzML files and check out results.", 148 | icon="➡️", 149 | ) 150 | 151 | st.markdown( 152 | """ 153 | ### Other Topics 154 | 155 | Includes other example pages which are independent to showcase other functionalities. 156 | """ 157 | ) 158 | st.page_link( 159 | "content/simple_workflow.py", 160 | label="A very simple worklfow explaining the concepts of data caching in streamlit.", 161 | icon="➡️", 162 | ) 163 | st.page_link( 164 | "content/run_subprocess.py", 165 | label="How to run any command line tool as subprocess from within the OpenMS web app.", 166 | icon="➡️", 167 | ) 168 | -------------------------------------------------------------------------------- /content/raw_data_viewer.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import streamlit as st 4 | 5 | from src.common.common import page_setup 6 | from src import view 7 | 8 | 9 | params = page_setup() 10 | 11 | st.title("View raw MS data") 12 | 13 | # File selection can not be in fragment since it influences the subsequent sections 14 | cols = st.columns(3) 15 | 16 | mzML_dir = Path(st.session_state.workspace, "mzML-files") 17 | file_options = [f.name for f in mzML_dir.iterdir() if "external_files.txt" not in f.name] 18 | 19 | # Check if local files are available 20 | external_files = Path(mzML_dir, "external_files.txt") 21 | if external_files.exists(): 22 | with open(external_files, "r") as f_handle: 23 | external_files = f_handle.readlines() 24 | external_files = [f.strip() for f in external_files] 25 | file_options += external_files 26 | 27 | selected_file = cols[0].selectbox( 28 | "choose file", 29 | file_options, 30 | key="view_selected_file" 31 | ) 32 | if selected_file: 33 | view.get_df(Path(st.session_state.workspace, "mzML-files", selected_file)) 34 | 35 | 36 | tabs = st.tabs( 37 | ["📈 Peak map (MS1)", "📈 Spectra (MS1 + MS2)", "📈 Chromatograms (MS1)"] 38 | ) 39 | with tabs[0]: 40 | view.view_peak_map() 41 | with tabs[1]: 42 | view.view_spectrum() 43 | with tabs[2]: 44 | view.view_bpc_tic() 45 | -------------------------------------------------------------------------------- /content/run_example_workflow.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | 3 | from pathlib import Path 4 | 5 | from src.common.common import page_setup, save_params 6 | from src import mzmlfileworkflow 7 | 8 | # Page name "workflow" will show mzML file selector in sidebar 9 | params = page_setup() 10 | 11 | st.title("Workflow") 12 | st.markdown( 13 | """ 14 | More complex workflow with mzML files and input form. 15 | 16 | Changing widgets within the form will not trigger the execution of the script immediatly. 17 | This is great for large parameter sections. 18 | """ 19 | ) 20 | 21 | with st.form("workflow-with-mzML-form"): 22 | st.markdown("**Parameters**") 23 | 24 | file_options = [f.stem for f in Path(st.session_state.workspace, "mzML-files").glob("*.mzML") if "external_files.txt" not in f.name] 25 | 26 | # Check if local files are available 27 | external_files = Path(Path(st.session_state.workspace, "mzML-files"), "external_files.txt") 28 | if external_files.exists(): 29 | with open(external_files, "r") as f_handle: 30 | external_files = f_handle.readlines() 31 | external_files = [str(Path(f.strip()).with_suffix('')) for f in external_files] 32 | file_options += external_files 33 | 34 | st.multiselect( 35 | "**input mzML files**", 36 | file_options, 37 | params["example-workflow-selected-mzML-files"], 38 | key="example-workflow-selected-mzML-files", 39 | ) 40 | 41 | c1, _, c3 = st.columns(3) 42 | if c1.form_submit_button( 43 | "Save Parameters", help="Save changes made to parameter section." 44 | ): 45 | params = save_params(params) 46 | run_workflow_button = c3.form_submit_button("Run Workflow", type="primary") 47 | 48 | result_dir = Path(st.session_state["workspace"], "mzML-workflow-results") 49 | 50 | if run_workflow_button: 51 | params = save_params(params) 52 | if params["example-workflow-selected-mzML-files"]: 53 | mzmlfileworkflow.run_workflow(params, result_dir) 54 | else: 55 | st.warning("Select some mzML files.") 56 | 57 | 58 | 59 | mzmlfileworkflow.result_section(result_dir) -------------------------------------------------------------------------------- /content/run_subprocess.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | import threading 3 | import os 4 | 5 | from pathlib import Path 6 | 7 | from src.common.common import page_setup, save_params 8 | from src.run_subprocess import run_subprocess 9 | 10 | # Page name "workflow" will show mzML file selector in sidebar 11 | params = page_setup() 12 | 13 | st.title("Run subprocess") 14 | st.markdown( 15 | """ 16 | This example demonstrates how to run an external process (in this case, the Linux command 'grep' or 'findstr' for windows) as a subprocess to extract IDs from the selected mzML file while displaying the process output. 17 | It also works with longer-running processes, such as calling an OpenMS TOPP tool. 18 | """ 19 | ) 20 | 21 | # Define the directory where mzML files are located 22 | mzML_dir: Path = Path(st.session_state.workspace, "mzML-files") 23 | 24 | # Create two columns for the Streamlit app layout 25 | col1, col2 = st.columns(2) 26 | 27 | # Use the `glob` method to get a list of all files in the directory 28 | file_list = list(mzML_dir.glob("*")) 29 | 30 | # select box to select file from user 31 | file_name = st.selectbox("**Please select file**", [file.stem for file in file_list]) 32 | 33 | # full path of file 34 | mzML_file_path = os.path.join(mzML_dir, str(file_name) + ".mzML") 35 | 36 | # Create a dictionary to capture the output and status of the subprocess 37 | result_dict = {} 38 | result_dict["success"] = False 39 | result_dict["log"] = " " 40 | 41 | # Create a flag to terminate the subprocess 42 | terminate_flag = threading.Event() 43 | terminate_flag.set() 44 | 45 | 46 | # Function to terminate the subprocess 47 | def terminate_subprocess(): 48 | """Set flag to terminate subprocess.""" 49 | global terminate_flag 50 | terminate_flag.set() 51 | 52 | 53 | # Check if the "Extract ids" button is clicked 54 | if st.button("Extract ids"): 55 | # Check if the "Terminate/Clear" button is clicked to stop the subprocess and clear the form 56 | if st.button("Terminate/Clear"): 57 | # Terminate the subprocess 58 | terminate_subprocess() 59 | st.warning("Process terminated. The analysis may not be complete.") 60 | # Reset the page 61 | st.rerun() 62 | 63 | # Display a status message while running the analysis 64 | with st.status("Please wait until fetching all ids from mzML 😑"): 65 | 66 | # Define the command to run as a subprocess (example: grep or findstr (for windows)) 67 | # 'nt' indicates Windows 68 | if os.name == 'nt': 69 | args = ["findstr", "idRef", mzML_file_path] 70 | else: 71 | # Assume 'posix' for Linux and macOS 72 | args =["grep", "idRef", mzML_file_path] 73 | 74 | # Display the command that will be executed 75 | message = f"Running command: {' '.join(args)}" 76 | st.code(message) 77 | 78 | # Run the subprocess command 79 | run_subprocess(args, result_dict) 80 | 81 | # Check if the subprocess was successful 82 | if result_dict["success"]: 83 | # Here can add code here to handle the results, e.g., display them to the user 84 | 85 | pass # Placeholder for result handling 86 | 87 | 88 | # At the end of each page, always save parameters (including any changes via widgets with key) 89 | save_params(params) 90 | -------------------------------------------------------------------------------- /content/simple_workflow.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | 3 | from src.common.common import page_setup, save_params, show_table 4 | from src import simpleworkflow 5 | 6 | # Page name "workflow" will show mzML file selector in sidebar 7 | params = page_setup() 8 | 9 | st.title("Simple Workflow") 10 | st.markdown("Example for a simple workflow with quick execution times.") 11 | 12 | # Define two widgets with values from paramter file 13 | # To save them as parameters use the same key as in the json file 14 | 15 | # We access the x-dimension via local variable 16 | xdimension = st.number_input( 17 | label="x dimension", 18 | min_value=1, 19 | max_value=20, 20 | value=params["example-x-dimension"], 21 | step=1, 22 | key="example-x-dimension", 23 | ) 24 | 25 | st.number_input( 26 | label="y dimension", 27 | min_value=1, 28 | max_value=20, 29 | value=params["example-y-dimension"], 30 | step=1, 31 | key="example-y-dimension", 32 | ) 33 | 34 | # Get a dataframe with x and y dimensions via time consuming (sleep) cached function 35 | # If the input has been given before, the function does not run again 36 | # Input x from local variable, input y from session state via key 37 | df = simpleworkflow.generate_random_table( 38 | xdimension, st.session_state["example-y-dimension"] 39 | ) 40 | 41 | # Display dataframe via custom show_table function, which will render a download button as well 42 | show_table(df, download_name="random-table") 43 | 44 | # At the end of each page, always save parameters (including any changes via widgets with key) 45 | save_params(params) 46 | -------------------------------------------------------------------------------- /content/topp_workflow_execution.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from src.common.common import page_setup 3 | from src.Workflow import Workflow 4 | 5 | 6 | params = page_setup() 7 | 8 | wf = Workflow() 9 | 10 | wf.show_execution_section() 11 | 12 | 13 | -------------------------------------------------------------------------------- /content/topp_workflow_file_upload.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from src.common.common import page_setup 3 | from src.Workflow import Workflow 4 | 5 | 6 | params = page_setup() 7 | 8 | wf = Workflow() 9 | 10 | wf.show_file_upload_section() 11 | 12 | 13 | -------------------------------------------------------------------------------- /content/topp_workflow_parameter.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from src.common.common import page_setup 3 | from src.Workflow import Workflow 4 | 5 | 6 | params = page_setup() 7 | 8 | wf = Workflow() 9 | 10 | wf.show_parameter_section() 11 | -------------------------------------------------------------------------------- /content/topp_workflow_results.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from src.common.common import page_setup 3 | from src.Workflow import Workflow 4 | 5 | 6 | params = page_setup() 7 | 8 | wf = Workflow() 9 | 10 | wf.show_results_section() 11 | 12 | -------------------------------------------------------------------------------- /default-parameters.json: -------------------------------------------------------------------------------- 1 | { 2 | "example-workflow-selected-mzML-files": [], 3 | "image-format": "svg", 4 | "2D-map-intensity-cutoff": 5000, 5 | 6 | "example-x-dimension": 10, 7 | "example-y-dimension": 5, 8 | 9 | "controllo": false 10 | } 11 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | openms-streamlit-template: 5 | build: 6 | context: . 7 | dockerfile: Dockerfile 8 | args: 9 | GITHUB_TOKEN: $GITHUB_TOKEN 10 | image: openms_streamlit_template 11 | container_name: openms-streamlit-template 12 | restart: always 13 | ports: 14 | - 8501:8501 15 | volumes: 16 | - workspaces-streamlit-template:/workspaces-streamlit-template 17 | command: streamlit run openms-streamlit-template/app.py 18 | volumes: 19 | workspaces-streamlit-template: 20 | -------------------------------------------------------------------------------- /docs/build_app.md: -------------------------------------------------------------------------------- 1 | # Build your own app based on this template 2 | 3 | ## App layout 4 | 5 | *Pages* can be navigated via the *sidebar*, which also contains the OpenMS logo, settings panel and a workspace indicator. 6 | 7 | ## Key concepts 8 | 9 | - **Workspaces** 10 | : Directories where all data is generated and uploaded can be stored as well as a workspace specific parameter file. 11 | - **Run the app locally and online** 12 | : Launching the app with online mode disabled in the settings.json lets the user create/remove workspaces. In the online the user gets a workspace with a specific ID. 13 | - **Parameters** 14 | : Parameters (defaults in `default-parameters.json`) store changing parameters for each workspace. Parameters are loaded via the page_setup function at the start of each page. To track a widget variable via parameters simply give them a key and add a matching entry in the default parameters file. Initialize a widget value from the params dictionary. 15 | 16 | ```python 17 | params = page_setup() 18 | 19 | st.number_input(label="x dimension", min_value=1, max_value=20, 20 | value=params["example-y-dimension"], step=1, key="example-y-dimension") 21 | 22 | save_params() 23 | ``` 24 | 25 | ## Code structure 26 | - The main file `app.py` defines page layout. 27 | - **Pages** must be placed in the `content` directory. 28 | - It is recommended to use a separate file for defining functions per page in the `src` directory. 29 | - The `src/common.py` file contains a set of useful functions for common use (e.g. rendering a table with download button). 30 | 31 | ## Modify the template to build your own app 32 | 33 | 1. In `src/common.py`, update the name of your app and the repository name 34 | ```python 35 | APP_NAME = "OpenMS Streamlit App" 36 | REPOSITORY_NAME = "streamlit-template" 37 | ``` 38 | 2. In `clean-up-workspaces.py`, update the name of the workspaces directory to `/workspaces-` 39 | ```python 40 | workspaces_directory = Path("/workspaces-streamlit-template") 41 | ``` 42 | 3. Update `README.md` accordingly 43 | 44 | 45 | **Dockerfile-related** 46 | 1. Choose one of the Dockerfiles depending on your use case: 47 | - `Dockerfile` builds OpenMS including TOPP tools 48 | - `Dockerfile_simple` uses pyOpenMS only 49 | 2. Update the Dockerfile: 50 | - with the `GITHUB_USER` owning the Streamlit app repository 51 | - with the `GITHUB_REPO` name of the Streamlit app repository 52 | - if your main page Python file is not called `app.py`, modify the following line 53 | ```dockerfile 54 | RUN echo "mamba run --no-capture-output -n streamlit-env streamlit run app.py" >> /app/entrypoint.sh 55 | ``` 56 | 3. Update Python package dependency files: 57 | - `requirements.txt` if using `Dockerfile_simple` 58 | - `environment.yml` if using `Dockerfile` 59 | 60 | ## How to build a workflow 61 | 62 | ### Simple workflow using pyOpenMS 63 | 64 | Take a look at the example pages `Simple Workflow` or `Workflow with mzML files` for examples (on the *sidebar*). Put Streamlit logic inside the pages and call the functions with workflow logic from from the `src` directory (for our examples `src/simple_workflow.py` and `src/mzmlfileworkflow.py`). 65 | 66 | ### Complex workflow using TOPP tools 67 | 68 | This template app features a module in `src/workflow` that allows for complex and long workflows to be built very efficiently. Check out the `TOPP Workflow Framework` page for more information (on the *sidebar*). 69 | -------------------------------------------------------------------------------- /docs/deployment.md: -------------------------------------------------------------------------------- 1 | # OpenMS streamlit app deployment 2 | 3 | Multiple streamlit apps based on the [OpenMS streamlit template](https://github.com/OpenMS/streamlit-template/) can be deployed together using docker compose. 4 | 5 | ## Features 6 | 7 | - deploy all OpenMS apps at once 8 | - user data (in workspaces) is stored in persistent docker volumes for each app 9 | 10 | ## Requirements 11 | - Docker Compose 12 | 13 | ## Deployment (e.g., needed after one app changed) 14 | 15 | **1. Make sure submodules are up-to-data.** 16 | 17 | `git submodule init` 18 | 19 | `git submodule update` 20 | 21 | **2. Specify GitHub token (to download Windows executables).** 22 | 23 | > This is **important**! Omitting this step while result in all apps not having the option to download executables any more. 24 | 25 | Create a temporary `.env` file with your Github token. It should contain only one line: 26 | 27 | `GITHUB_TOKEN=` 28 | 29 | **3. Run docker-compose.** 30 | 31 | `docker-compose up --build -d` 32 | 33 | > Make sure to remove the `.env` file with your Github token after successful build 34 | 35 | ## Add new app 36 | 37 | This will add your app as a submodule to the streamlit deployment repository. 38 | 39 | **1. Enable online mode in the apps settings.json.** 40 | 41 | **2. Fork and clone the [OpenMS streamlit deployment](https://github.com/OpenMS/streamlit-deployment) repository locally.** 42 | 43 | **3. Add your app as submodule. Make sure the app name is not used already.** 44 | 45 | `git submodule add ` 46 | 47 | **4. Initialize and update submodules.** 48 | 49 | `git submodule init` 50 | 51 | `git submodule update` 52 | 53 | **5. Add your app to `docker-compose.yml` file as a new service.** 54 | 55 | Copy the last service as a template. 56 | 57 | Check and update the following entries: 58 | 59 | - name of the service 60 | - the name of the submodule 61 | - build context 62 | - the relative path to the submodule 63 | - build dockerfile 64 | - the correct Dockerfile 65 | - image 66 | - name of the docker image (typically the service name with underscores) 67 | - ports 68 | - chose an incremental host port number from the last service pointing to the streamlit port in docker container (8501) 69 | - volumes 70 | - update the names of the workspace directories, user data is stored outside of the docker container in a docker volume 71 | - command 72 | - update command with your main streamlit file 73 | 74 | **6. Test everything works locally.** 75 | 76 | Run docker-compose to launch all services. 77 | 78 | `docker-compose up --build -d` 79 | 80 | - there should be no errors building all services 81 | - make sure all apps are accessible via their port from localhost 82 | - test functionality of your app 83 | 84 | **7. Make a pull request with your changes to OpenMS/streamlit-deployment main branch.** 85 | 86 | 87 | 88 | # Other Architectures 89 | In principle OpenMS runs on most processor architectures. The images are provided and tested for x86 but OpenMS can also be compiled on architectures like arm64. Please note that you might have to adjust the miniforge version according to the processor architecture. 90 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | ## Windows 4 | 5 | The app is available as pre-packaged Windows executable, including all dependencies. 6 | 7 | The windows executable is built by a GitHub action and can be downloaded [here](https://github.com/OpenMS/streamlit-template/actions/workflows/build-windows-executable-app.yaml). 8 | Select the latest successfull run and download the zip file from the artifacts section, while signed in to GitHub. 9 | 10 | ## Python 11 | 12 | Clone the [streamlit-template repository](https://github.com/OpenMS/streamlit-template). It includes files to install dependencies via pip or conda. 13 | 14 | ### via pip in a new Python environment 15 | 16 | Create a virtual environment 17 | 18 | `python3 -m venv ` 19 | 20 | Activate the virtual environment 21 | 22 | `source .venv/bin/activate` 23 | 24 | To install all required depdencies via pip in the new Python environment, run the following command in the terminal: 25 | 26 | `pip install -r requirements.txt` 27 | 28 | ### via pip in an existing Python environment 29 | 30 | To install all required depdencies via pip in an already existing Python environment, run the following command in the terminal: 31 | 32 | `pip install -r requirements.txt` 33 | 34 | ### create new environment via conda/mamba 35 | 36 | Create and activate the conda environment: 37 | 38 | `conda env create -f environment.yml` 39 | 40 | `conda activate streamlit-env` 41 | 42 | ### run the app 43 | 44 | Run the app via streamlit command in the terminal. *local* and *online* mode can be toggled in the settings.json. Learn more about *local* and *online* mode in the documentation page 📖 **OpenMS Template App**. 45 | 46 | `streamlit run app.py` 47 | 48 | ## Docker 49 | 50 | This repository contains two Dockerfiles. 51 | 52 | 1. `Dockerfile`: This Dockerfile builds all dependencies for the app including Python packages and the OpenMS TOPP tools. Recommended for more complex workflows where you want to use the OpenMS TOPP tools for instance with the **TOPP Workflow Framework**. 53 | 2. `Dockerfile_simple`: This Dockerfile builds only the Python packages. Recommended for simple apps using pyOpenMS only. 54 | -------------------------------------------------------------------------------- /docs/user_guide.md: -------------------------------------------------------------------------------- 1 | # User Guide 2 | 3 | Welcome to the OpenMS Streamlit Web Application! This guide will help you understand how to use our tools effectively. 4 | 5 | ## Advantages of OpenMS Web Apps 6 | 7 | OpenMS web applications provide a user-friendly interface for accessing the powerful features of OpenMS. Here are a few advantages: 8 | - **Accessibility**: Access powerful OpenMS algorithms and TOPP tools from any device with a web browser. 9 | - **Ease of Use**: Simplified user interface makes it easy for both beginners and experts to perform complex analyses. 10 | - **No Installation Required**: Use the tools without the need to install OpenMS locally, saving time and system resources. 11 | 12 | ## Workspaces 13 | 14 | In the OpenMS web application, workspaces are designed to keep your analysis organized: 15 | - **Workspace Specific Parameters and Files**: Each workspace stores parameters and files (uploaded input files and results from workflows). 16 | - **Persistence**: Your workspaces and parameters are saved, so you can return to your analysis anytime and pick up where you left off. Simply bookmark the page! 17 | 18 | 19 | ### File Uploads 20 | - **Online Mode**: You can upload only one file at a time. This helps manage server load and optimizes performance. 21 | 22 | - **Local Mode**: Multiple file uploads are supported, giving you flexibility when working with large datasets. Additionally, the file size upload limit can be adjusted in the following ways: 23 | 1. **Using `.streamlit/config.toml`**: 24 | - You can modify the `.streamlit/config.toml` file and set the `maxUploadSize` parameter to your desired value. By default, this is set to 200MB. 25 | - Example: 26 | ```toml 27 | [server] 28 | maxUploadSize = 500 # Set the upload limit to 500MB 29 | ``` 30 | 2. **Using CLI Command**: 31 | - You can customize the file size upload limit directly when running the application using the `--server.maxUploadSize` argument. 32 | - Example: 33 | ```bash 34 | python run_app.py --server.maxUploadSize 500 35 | ``` 36 | - This sets the upload limit to 500MB for the current session. 37 | 38 | - **Workspace Access**: 39 | - In online mode, workspaces are stored temporarily and will be cleared after seven days of inactivity. 40 | - In local mode, workspaces are saved on your local machine, allowing for persistent storage. Workspace directory can be specified in the `settings.json`. Defaults to `..` (parent directory). 41 | 42 | ## Downloading Results 43 | 44 | You can download the results of your analyses, including data, figures and tables, directly from the application: 45 | - **Figures**: Click the camera icon button, appearing while hovering on the top right corner of the figure. Set the desired image format in the settings panel in the side bar. 46 | - **Tables**: Use the download button to save tables in *csv* format, appearing while hovering on the top right corner of the table. 47 | - **Data**: Use the download section in the sidebar to download the raw results of your analysis. 48 | 49 | ## Getting Started 50 | 51 | To get started: 52 | 1. Select or create a new workspace. 53 | 2. Upload your data file. 54 | 3. Set the necessary parameters for your analysis. 55 | 4. Run the analysis. 56 | 5. View and download your results. 57 | 58 | For more detailed information on each step, refer to the specific sections of this guide. -------------------------------------------------------------------------------- /docs/win_exe_with_embed_py.md: -------------------------------------------------------------------------------- 1 | ## 💻 Create a window executable of a Streamlit App with embeddable Python 2 | 3 | To create an executable for Streamlit app on Windows, we'll use an embeddable version of Python.
4 | Here's a step-by-step guide: 5 | 6 | ### Download and Extract Python Embeddable Version 7 | 8 | 1. Download a suitable Python embeddable version. For example, let's download Python 3.11.9: 9 | 10 | ```bash 11 | # use curl command or manually download 12 | curl -O https://www.python.org/ftp/python/3.11.9/python-3.11.9-embed-amd64.zip 13 | ``` 14 | 15 | 2. Extract the downloaded zip file: 16 | 17 | ```bash 18 | mkdir python-3.11.9 19 | 20 | unzip python-3.11.9-embed-amd64.zip -d python-3.11.9 21 | 22 | rm python-3.11.9-embed-amd64.zip 23 | ``` 24 | 25 | ### Install pip 26 | 27 | 1. Download `get-pip.py`: 28 | 29 | ```bash 30 | # use curl command or manually download 31 | curl -O https://bootstrap.pypa.io/get-pip.py 32 | ``` 33 | 34 | 2. Install pip: 35 | 36 | ```bash 37 | ./python-3.11.9/python get-pip.py --no-warn-script-location 38 | 39 | # no need anymore get-pip.py 40 | rm get-pip.py 41 | ``` 42 | 43 | ### Configure Python Environment 44 | 45 | 1. Uncomment 'import site' in the `._pth` file: 46 | 47 | ```bash 48 | # Uncomment to run site.main() automatically 49 | # Remove hash from python-3.11.9/python311._pth file 50 | import site 51 | 52 | # Or use command 53 | sed -i '/^\s*#\s*import\s\+site/s/^#//' python-3.11.9/python311._pth 54 | ``` 55 | 56 | ### Install Required Packages 57 | 58 | Install all required packages from `requirements.txt`: 59 | 60 | ```bash 61 | ./python-3.11.9/python -m pip install -r requirements.txt --no-warn-script-location 62 | ``` 63 | 64 | ### Test and create `run_app.bat` file 65 | 66 | 1. Test by running app 67 | 68 | ```batch 69 | .\python-3.11.9\python -m streamlit run app.py 70 | ``` 71 | 72 | 2. Create a Clickable Shortcut 73 | 74 | Create a `run_app.bat` file to make running the app easier: 75 | 76 | ```batch 77 | echo @echo off > run_app.bat 78 | echo .\\python-3.11.9\\python -m streamlit run app.py >> run_app.bat 79 | ``` 80 | 81 | ### Create one executable folder 82 | 83 | 1. Create a folder for your Streamlit app: 84 | 85 | ```bash 86 | mkdir ../streamlit_exe 87 | ``` 88 | 89 | 2. Copy environment and app files: 90 | 91 | ```bash 92 | # move Python environment folder 93 | mv python-3.11.9 ../streamlit_exe 94 | 95 | # move run_app.bat file 96 | mv run_app.bat ../streamlit_exe 97 | 98 | # copy streamlit app files 99 | cp -r src pages .streamlit assets example-data ../streamlit_exe 100 | cp app.py ../streamlit_exe 101 | ``` 102 | 103 | #### 🚀 After successfully completing all these steps, the Streamlit app will be available by running the run_app.bat file. 104 | 105 | :pencil: You can still change the configuration of Streamlit app with .streamlit/config.toml file, e.g., provide a different port, change upload size, etc. 106 | 107 | ## Build executable in github action automatically 108 | 109 | Automate the process of building executables for your project with the GitHub action example [Test streamlit executable for Windows with embeddable python](https://github.com/OpenMS/streamlit-template/blob/main/.github/workflows/test-win-exe-w-embed-py.yaml) 110 |
111 | 112 | ## Create MSI Installer using WiX Toolset 113 | 114 | After creating your executable folder, you can package it into an MSI installer using WiX Toolset. Here's how: 115 | 116 | ### 1. Set Environment Variables 117 | 118 | Set these variables for consistent naming throughout the process: 119 | 120 | ```batch 121 | APP_NAME=OpenMS-StreamlitTemplateApp 122 | APP_UpgradeCode= generate-new 123 | ``` 124 | 125 | To generate a new GUID for your application's UpgradeCode, you can use: 126 | 127 | - PowerShell: `[guid]::NewGuid().ToString()` 128 | - Online GUID generator: https://www.guidgen.com/ 129 | - Windows Command Prompt: `powershell -Command "[guid]::NewGuid().ToString()"` 130 | 131 | ### 2. Install WiX Toolset 132 | 133 | 1. Download WiX Toolset binaries: 134 | ```batch 135 | curl -LO https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip 136 | unzip wix311-binaries.zip -d wix 137 | ``` 138 | 139 | ### 3. Prepare Installation Files 140 | 141 | 1. Create a SourceDir structure: 142 | 143 | ```batch 144 | mkdir SourceDir 145 | move streamlit_exe\* SourceDir 146 | ``` 147 | 148 | 2. Create Readme.txt: 149 | 150 | ```batch 151 | # Create a Readme.txt file in the SourceDir folder with instructions 152 | # for launching the application 153 | ``` 154 | 155 | 3. Add necessary assets: 156 | - Copy license file: `copy assets\openms_license.rtf SourceDir\` 157 | - Copy app icon: `copy assets\openms.ico SourceDir\` 158 | - Create success message script: 159 | ```vbscript 160 | ' ShowSuccessMessage.vbs 161 | MsgBox "The " & "%APP_NAME%" & " application is successfully installed.", vbInformation, "Installation Complete" 162 | ``` 163 | 164 | ### 4. Generate WiX Source Files 165 | 166 | 1. Generate component list from your files: 167 | 168 | ```batch 169 | wix\heat.exe dir SourceDir -gg -sfrag -sreg -srd -template component -cg StreamlitExeFiles -dr AppSubFolder -out streamlit_exe_files.wxs 170 | ``` 171 | 172 | 2. Create main WiX configuration file (streamlit_exe.wxs): 173 | 174 | ```xml 175 | 176 | 177 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 213 | 215 | 216 | 217 | 218 | 219 | 224 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | NOT Installed 242 | 243 | 244 | 245 | 246 | 247 | 248 | ``` 249 | 250 | ### 5. Build the MSI 251 | 252 | 1. Compile WiX source files: 253 | 254 | ```batch 255 | # Generate wixobj files from the WiX source files 256 | wix\candle.exe streamlit_exe.wxs streamlit_exe_files.wxs 257 | ``` 258 | 259 | 2. Link and create MSI: 260 | ```batch 261 | # Create the MSI installer from the wixobj files 262 | # The -sice:ICE60 flag stops a warning about duplicate component GUIDs, which can happen when heat.exe auto-generates components 263 | wix\light.exe -ext WixUIExtension -sice:ICE60 -o %APP_NAME%.msi streamlit_exe_files.wixobj streamlit_exe.wixobj 264 | ``` 265 | 266 | ### 6. Additional Notes 267 | 268 | - The generated MSI will create desktop and start menu shortcuts 269 | - Installation requires elevated privileges 270 | - A success message will be shown after installation 271 | - The installer includes a proper license agreement page 272 | - All files will be installed in Program Files by default 273 | 274 | For more detailed customization options, refer to the [WiX Toolset documentation](https://wixtoolset.org/documentation/). 275 | 276 | :warning: The `APP_UpgradeCode` GUID should be unique for your application. Generate a new one if you're creating a different app. 277 | -------------------------------------------------------------------------------- /docs/win_exe_with_pyinstaller.md: -------------------------------------------------------------------------------- 1 | ## 💻 Create a window executable of streamlit app with pyinstaller 2 | :heavy_check_mark: 3 | Tested with streamlit v1.29.0, python v3.11.4 4 | 5 | :warning: Support until streamlit version `1.29.0` 6 | :point_right: For higher version, try streamlit app with embeddable python #TODO add link 7 | 8 | To create an executable for Streamlit app on Windows, we'll use an pyinstaller. 9 | Here's a step-by-step guide: 10 | 11 | ### virtual environment 12 | 13 | ``` 14 | # create an environment 15 | python -m venv 16 | 17 | # activate an environment 18 | .\myenv\Scripts\Activate.bat 19 | 20 | # install require packages 21 | pip install -r requirements.txt 22 | 23 | #install pyinstaller 24 | pip install pyinstaller 25 | ``` 26 | 27 | ### streamlit files 28 | 29 | create a run_app.py and add this lines of codes 30 | ``` 31 | from streamlit.web import cli 32 | 33 | if __name__=='__main__': 34 | cli._main_run_clExplicit( 35 | file="app.py", command_line="streamlit run" 36 | ) 37 | # we will create this function inside our streamlit framework 38 | 39 | ``` 40 | 41 | ### write function in cli.py 42 | 43 | Now, navigate to the inside streamlit environment 44 | 45 | here you go 46 | 47 | ``` 48 | \Lib\site-packages\streamlit\web\cli.py 49 | ``` 50 | for using our virtual environment, add this magic function to cli.py file: 51 | ``` 52 | #can be modify name as given in run_app.py 53 | #use underscore at beginning 54 | def _main_run_clExplicit(file, command_line, args=[], flag_options=[]): 55 | main._is_running_with_streamlit = True 56 | bootstrap.run(file, command_line, args, flag_options) 57 | ``` 58 | 59 | ### Hook folder 60 | Now, need to hook to get streamlit metadata 61 | organized as folder, where the pycache infos will save 62 | like: \hooks\hook-streamlit.py 63 | 64 | ``` 65 | from PyInstaller.utils.hooks import copy_metadata 66 | datas = [] 67 | datas += copy_metadata('streamlit') 68 | datas += copy_metadata('pyopenms') 69 | # can add new package e-g 70 | datas += copy_metadata('captcha') 71 | ``` 72 | 73 | ### compile the app 74 | Now, ready for compilation 75 | ``` 76 | pyinstaller --onefile --additional-hooks-dir ./hooks run_app.py --clean 77 | 78 | #--onefile create join binary file ?? 79 | #will create run_app.spec file 80 | #--clean delete cache and removed temporary files before building 81 | #--additional-hooks-dir path to search for hook 82 | ``` 83 | 84 | ### streamlit config 85 | To access streamlit config create file in root 86 | (or just can be in output folder) 87 | .streamlit\config.toml 88 | 89 | ``` 90 | # content of .streamlit\config.toml 91 | [global] 92 | developmentMode = false 93 | 94 | [server] 95 | port = 8502 96 | ``` 97 | 98 | ### copy necessary files to dist folder 99 | ``` 100 | cp -r .streamlit dist/.streamlit 101 | cp -r pages dist/pages 102 | cp -r src dist/src 103 | cp -r assets dist/assets 104 | cp app.py dist/ 105 | 106 | ``` 107 | 108 | 109 | ### add datas in run_app.spec (.spec file) 110 | Add DATAS to the run_app.spec just created by compilation 111 | 112 | ``` 113 | datas=[ 114 | ("myenv/Lib/site-packages/altair/vegalite/v4/schema/vega-lite-schema.json","./altair/vegalite/v4/schema/"), 115 | ("myenv/Lib/site-packages/streamlit/static", "./streamlit/static"), 116 | ("myenv/Lib/site-packages/streamlit/runtime", "./streamlit/runtime"), 117 | ("myenv/Lib/site-packages/pyopenms", "./pyopenms/"), 118 | # Add new datas e-g we add in hook captcha 119 | ("myenv/Lib/site-packages/captcha", "./captcha/") 120 | ] 121 | ``` 122 | ### run final step to make executable 123 | All the modifications in datas should be loaded with 124 | ``` 125 | pyinstaller run_app.spec --clean 126 | ``` 127 | #### 🚀 After successfully completing all these steps, the Windows executable will be available in the dist folder. 128 | 129 | :pencil: you can still change the configuration of streamlit app with .streamlit/config.toml file e-g provide different port, change upload size etc 130 | 131 | ℹ️ if problem with altair, Try version altair==4.0.1, and again compile 132 | 133 | ## Build executable in github action automatically 134 | Automate the process of building executables for your project with the GitHub action example [Test streamlit executable for Windows with pyinstaller](https://github.com/OpenMS/streamlit-template/blob/main/.github/workflows/test-win-exe-w-pyinstaller.yaml) 135 | -------------------------------------------------------------------------------- /gdpr_consent/README.md: -------------------------------------------------------------------------------- 1 | 2 | # GDPR Consent Banner 3 | 4 | This streamlit component creates a consent banner using Klaro for 5 | tracking-cookies etc. 6 | 7 | 8 | ## Project Setup 9 | 10 | ```sh 11 | npm install 12 | ``` 13 | 14 | ## Build for production 15 | 16 | ```sh 17 | npm run build 18 | ``` -------------------------------------------------------------------------------- /gdpr_consent/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /gdpr_consent/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gdpr_consent", 3 | "version": "1.0.0", 4 | "main": "index.js", 5 | "scripts": { 6 | "build": "webpack" 7 | }, 8 | "keywords": [], 9 | "author": "", 10 | "license": "ISC", 11 | "description": "", 12 | "devDependencies": { 13 | "streamlit-component-lib": "^2.0.0", 14 | "ts-loader": "^9.5.1", 15 | "typescript": "^5.5.4", 16 | "webpack": "^5.93.0", 17 | "webpack-cli": "^5.1.4" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /gdpr_consent/src/main.ts: -------------------------------------------------------------------------------- 1 | import { Streamlit, RenderData } from "streamlit-component-lib" 2 | 3 | // Define service 4 | type Service = { 5 | name: string; 6 | purposes: string[]; 7 | onAccept: () => Promise; 8 | onDecline: () => Promise; 9 | cookies?: (string | RegExp)[]; 10 | }; 11 | 12 | // Defines the configuration for Klaro 13 | let klaroConfig: { 14 | mustConsent: boolean; 15 | acceptAll: boolean; 16 | services: Service[]; 17 | } = { 18 | mustConsent: true, 19 | acceptAll: true, 20 | services: [] 21 | }; 22 | 23 | // This will make klaroConfig globally accessible 24 | (window as any).klaroConfig = klaroConfig 25 | 26 | // Klaro creates global variable for access to manager 27 | declare global { 28 | interface Window { 29 | klaro: any 30 | } 31 | } 32 | 33 | // The confirmed field in the Klaro Manager shows if the callback is 34 | // based on user generated data 35 | interface klaroManager { 36 | confirmed : boolean 37 | } 38 | 39 | // Function to safely access the Klaro manager 40 | function getKlaroManager() : klaroManager { 41 | return window.klaro?.getManager ? window.klaro.getManager() : null 42 | } 43 | 44 | // Waits until Klaro Manager is available 45 | async function waitForKlaroManager (maxWaitTime: number = 5000, interval: number = 100): Promise { 46 | const startTime = Date.now() 47 | while (Date.now() - startTime < maxWaitTime) { 48 | const klaroManager = getKlaroManager() 49 | if (klaroManager) { 50 | return klaroManager 51 | } 52 | await new Promise(resolve => setTimeout(resolve, interval)) 53 | } 54 | throw new Error("Klaro manager did not become available within the allowed time.") 55 | } 56 | 57 | // Helper function to handle unknown errors 58 | function handleError(error: unknown): void { 59 | if (error instanceof Error) { 60 | console.error("Error:", error.message) 61 | } else { 62 | console.error("Unknown error:", error) 63 | } 64 | } 65 | 66 | // Tracking was accepted 67 | async function callback(): Promise { 68 | try { 69 | const manager = await waitForKlaroManager() 70 | if (manager.confirmed) { 71 | let return_vals : Record = {} 72 | for (const service of klaroConfig.services) { 73 | return_vals[service.name] = manager.getConsent(service.name) 74 | } 75 | Streamlit.setComponentValue(return_vals) 76 | } 77 | } catch (error) { 78 | handleError(error) 79 | } 80 | } 81 | 82 | // Stores if the component has been rendered before 83 | let rendered = false 84 | 85 | function onRender(event: Event): void { 86 | // Klaro does not work if embedded multiple times 87 | if (rendered) { 88 | return 89 | } 90 | rendered = true 91 | 92 | const data = (event as CustomEvent).detail 93 | 94 | if (data.args['google_analytics']) { 95 | klaroConfig.services.push( 96 | { 97 | name: 'google-analytics', 98 | cookies: [ 99 | /^_ga(_.*)?/ // we delete the Google Analytics cookies if the user declines its use 100 | ], 101 | purposes: ['analytics'], 102 | onAccept: callback, 103 | onDecline: callback, 104 | } 105 | ) 106 | } 107 | if (data.args['piwik_pro']) { 108 | klaroConfig.services.push( 109 | { 110 | name: 'piwik-pro', 111 | purposes: ['analytics'], 112 | onAccept: callback, 113 | onDecline: callback, 114 | } 115 | ) 116 | } 117 | 118 | // Create a new script element 119 | var script = document.createElement('script') 120 | 121 | // Set the necessary attributes 122 | script.defer = true 123 | script.type = 'application/javascript' 124 | script.src = 'https://cdn.kiprotect.com/klaro/v0.7/klaro.js' 125 | 126 | // Set the klaro config 127 | script.setAttribute('data-config', 'klaroConfig') 128 | 129 | // Append the script to the head or body 130 | document.head.appendChild(script) 131 | 132 | } 133 | 134 | // Attach our `onRender` handler to Streamlit's render event. 135 | Streamlit.events.addEventListener(Streamlit.RENDER_EVENT, onRender) 136 | 137 | // Tell Streamlit we're ready to start receiving data. We won't get our 138 | // first RENDER_EVENT until we call this function. 139 | Streamlit.setComponentReady() 140 | 141 | // Finally, tell Streamlit to update the initial height. 142 | Streamlit.setFrameHeight(1000) 143 | 144 | -------------------------------------------------------------------------------- /gdpr_consent/webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = { 4 | entry: './src/main.ts', 5 | module: { 6 | rules: [ 7 | { 8 | test: /\.tsx?$/, 9 | use: 'ts-loader', 10 | exclude: /node_modules/, 11 | }, 12 | ], 13 | }, 14 | resolve: { 15 | extensions: ['.tsx', '.ts', '.js'], 16 | }, 17 | output: { 18 | filename: 'bundle.js', 19 | path: path.resolve(__dirname, 'dist'), 20 | }, 21 | mode: 'development', 22 | }; 23 | -------------------------------------------------------------------------------- /hooks/hook-analytics.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import streamlit as st 4 | 5 | def patch_head(document, content): 6 | return document.replace('', '' + content) 7 | 8 | def patch_body(document, content): 9 | return document.replace('', '' + content) 10 | 11 | def google_analytics_head(gtm_tag): 12 | return f""" 13 | 27 | 28 | 33 | 34 | """ 35 | 36 | def google_analytics_body(gtm_tag): 37 | return f""" 38 | 39 | 41 | 42 | """ 43 | 44 | def piwik_pro_body(piwik_tag): 45 | return f""" 46 | 56 | """ 57 | 58 | 59 | if __name__ == '__main__': 60 | 61 | # Load configuration 62 | settings_path = os.path.join(os.path.dirname(__file__), '..', 'settings.json') 63 | with open(settings_path, 'r') as f: 64 | settings = json.load(f) 65 | 66 | # Load index.html 67 | index_path = os.path.join(os.path.dirname(st.__file__), 'static', 'index.html') 68 | with open(index_path, 'r') as f: 69 | index = f.read() 70 | 71 | # Configure google analytics 72 | if settings['analytics']['google-analytics']['enabled']: 73 | gtm_tag = settings['analytics']['google-analytics']['tag'] 74 | index = patch_head(index, google_analytics_head(gtm_tag)) 75 | index = patch_body(index, google_analytics_body(gtm_tag)) 76 | 77 | # Configure piwik pro 78 | if settings['analytics']['piwik-pro']['enabled']: 79 | piwik_tag = settings['analytics']['piwik-pro']['tag'] 80 | index = patch_body(index, piwik_pro_body(piwik_tag)) 81 | 82 | # Save index.html 83 | with open(index_path, 'w') as f: 84 | f.write(index) -------------------------------------------------------------------------------- /hooks/hook-streamlit.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import copy_metadata 2 | 3 | datas = [] 4 | datas += copy_metadata("streamlit") 5 | datas += copy_metadata("pyopenms") 6 | datas += copy_metadata("captcha") 7 | datas += copy_metadata("pyarrow") 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile --output-file=requirements.txt pyproject.toml 6 | # 7 | altair==5.5.0 8 | # via streamlit 9 | attrs==25.3.0 10 | # via 11 | # jsonschema 12 | # referencing 13 | blinker==1.9.0 14 | # via streamlit 15 | cachetools==5.5.2 16 | # via streamlit 17 | captcha==0.7.1 18 | # via src (pyproject.toml) 19 | certifi==2025.1.31 20 | # via requests 21 | charset-normalizer==3.4.1 22 | # via requests 23 | click==8.1.8 24 | # via streamlit 25 | contourpy==1.3.1 26 | # via matplotlib 27 | cycler==0.12.1 28 | # via matplotlib 29 | fonttools==4.56.0 30 | # via matplotlib 31 | gitdb==4.0.12 32 | # via gitpython 33 | gitpython==3.1.44 34 | # via streamlit 35 | idna==3.10 36 | # via requests 37 | jinja2==3.1.6 38 | # via 39 | # altair 40 | # pydeck 41 | jsonschema==4.23.0 42 | # via altair 43 | jsonschema-specifications==2024.10.1 44 | # via jsonschema 45 | kiwisolver==1.4.8 46 | # via matplotlib 47 | markupsafe==3.0.2 48 | # via jinja2 49 | matplotlib==3.10.1 50 | # via pyopenms 51 | narwhals==1.32.0 52 | # via altair 53 | numpy==1.26.4 54 | # via 55 | # contourpy 56 | # matplotlib 57 | # pandas 58 | # pydeck 59 | # pyopenms 60 | # src (pyproject.toml) 61 | # streamlit 62 | packaging==24.2 63 | # via 64 | # altair 65 | # matplotlib 66 | # plotly 67 | # streamlit 68 | pandas==2.2.3 69 | # via 70 | # pyopenms 71 | # pyopenms-viz 72 | # streamlit 73 | pillow==11.1.0 74 | # via 75 | # captcha 76 | # matplotlib 77 | # streamlit 78 | plotly==5.22.0 79 | # via src (pyproject.toml) 80 | protobuf==5.29.4 81 | # via streamlit 82 | psutil==7.0.0 83 | # via src (pyproject.toml) 84 | pyarrow==19.0.1 85 | # via streamlit 86 | pydeck==0.9.1 87 | # via streamlit 88 | pyopenms==3.3.0 89 | # via src (pyproject.toml) 90 | pyopenms-viz==1.0.0 91 | # via src (pyproject.toml) 92 | pyparsing==3.2.3 93 | # via matplotlib 94 | python-dateutil==2.9.0.post0 95 | # via 96 | # matplotlib 97 | # pandas 98 | pytz==2025.2 99 | # via pandas 100 | referencing==0.36.2 101 | # via 102 | # jsonschema 103 | # jsonschema-specifications 104 | requests==2.32.3 105 | # via streamlit 106 | rpds-py==0.24.0 107 | # via 108 | # jsonschema 109 | # referencing 110 | six==1.17.0 111 | # via python-dateutil 112 | smmap==5.0.2 113 | # via gitdb 114 | streamlit==1.43.0 115 | # via 116 | # src (pyproject.toml) 117 | # streamlit-js-eval 118 | streamlit-js-eval==0.1.7 119 | # via src (pyproject.toml) 120 | tenacity==9.0.0 121 | # via 122 | # plotly 123 | # streamlit 124 | toml==0.10.2 125 | # via streamlit 126 | tornado==6.4.2 127 | # via streamlit 128 | typing-extensions==4.13.0 129 | # via 130 | # altair 131 | # referencing 132 | # streamlit 133 | tzdata==2025.2 134 | # via pandas 135 | urllib3==2.3.0 136 | # via requests 137 | watchdog==6.0.0 138 | # via streamlit 139 | -------------------------------------------------------------------------------- /run_app.py: -------------------------------------------------------------------------------- 1 | from streamlit.web import cli 2 | 3 | 4 | if __name__ == "__main__": 5 | cli._main_run_clExplicit( 6 | file="app.py", 7 | command_line="streamlit run" 8 | ) 9 | -------------------------------------------------------------------------------- /run_app_temp.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | 4 | block_cipher = None 5 | 6 | 7 | a = Analysis( 8 | ['run_app.py'], 9 | pathex=[], 10 | binaries=[], 11 | datas=[ 12 | ("./myenv/Lib/site-packages/altair/vegalite/v5/schema/vega-lite-schema.json","./altair/vegalite/v5/schema/"), 13 | ("./myenv/Lib/site-packages/streamlit/static", "./streamlit/static"), 14 | ("./myenv/Lib/site-packages/streamlit/runtime", "./streamlit/runtime"), 15 | ("./myenv/Lib/site-packages/pyopenms", "./pyopenms/"), 16 | ("./myenv/Lib/site-packages/captcha", "./captcha/"), 17 | ("./myenv/Lib/site-packages/pyarrow", "./pyarrow/"), 18 | ], 19 | hiddenimports=[], 20 | hookspath=['./hooks'], 21 | hooksconfig={}, 22 | runtime_hooks=[], 23 | excludes=[], 24 | win_no_prefer_redirects=False, 25 | win_private_assemblies=False, 26 | cipher=block_cipher, 27 | noarchive=False, 28 | ) 29 | pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) 30 | 31 | exe = EXE( 32 | pyz, 33 | a.scripts, 34 | a.binaries, 35 | a.zipfiles, 36 | a.datas, 37 | [], 38 | name='run_app', 39 | debug=False, 40 | bootloader_ignore_signals=False, 41 | strip=False, 42 | upx=True, 43 | upx_exclude=[], 44 | runtime_tmpdir=None, 45 | console=True, 46 | disable_windowed_traceback=False, 47 | argv_emulation=False, 48 | target_arch=None, 49 | codesign_identity=None, 50 | entitlements_file=None, 51 | ) 52 | -------------------------------------------------------------------------------- /settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "app-name": "OpenMS WebApp Template", 3 | "github-user": "OpenMS", 4 | "version": "1.0.2", 5 | "repository-name": "streamlit-template", 6 | "analytics": { 7 | "google-analytics": { 8 | "enabled": false, 9 | "tag": "" 10 | }, 11 | "piwik-pro": { 12 | "enabled": true, 13 | "tag": "57690c44-d635-43b0-ab43-f8bd3064ca06" 14 | } 15 | }, 16 | "online_deployment": false, 17 | "enable_workspaces": true, 18 | "test": true, 19 | "workspaces_dir": ".." 20 | } -------------------------------------------------------------------------------- /src/Workflow.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from src.workflow.WorkflowManager import WorkflowManager 3 | 4 | # for result section: 5 | from pathlib import Path 6 | import pandas as pd 7 | import plotly.express as px 8 | from src.common.common import show_fig 9 | 10 | 11 | class Workflow(WorkflowManager): 12 | # Setup pages for upload, parameter, execution and results. 13 | # For layout use any streamlit components such as tabs (as shown in example), columns, or even expanders. 14 | def __init__(self) -> None: 15 | # Initialize the parent class with the workflow name. 16 | super().__init__("TOPP Workflow", st.session_state["workspace"]) 17 | 18 | def upload(self) -> None: 19 | t = st.tabs(["MS data"]) 20 | with t[0]: 21 | # Use the upload method from StreamlitUI to handle mzML file uploads. 22 | self.ui.upload_widget( 23 | key="mzML-files", 24 | name="MS data", 25 | file_types="mzML", 26 | fallback=[str(f) for f in Path("example-data", "mzML").glob("*.mzML")], 27 | ) 28 | 29 | @st.fragment 30 | def configure(self) -> None: 31 | # Allow users to select mzML files for the analysis. 32 | self.ui.select_input_file("mzML-files", multiple=True) 33 | 34 | # Create tabs for different analysis steps. 35 | t = st.tabs( 36 | ["**Feature Detection**", "**Feature Linking**", "**Python Custom Tool**"] 37 | ) 38 | with t[0]: 39 | # Parameters for FeatureFinderMetabo TOPP tool. 40 | self.ui.input_TOPP( 41 | "FeatureFinderMetabo", 42 | custom_defaults={"algorithm:common:noise_threshold_int": 1000.0}, 43 | ) 44 | with t[1]: 45 | # Parameters for MetaboliteAdductDecharger TOPP tool. 46 | self.ui.input_TOPP("FeatureLinkerUnlabeledKD") 47 | with t[2]: 48 | # A single checkbox widget for workflow logic. 49 | self.ui.input_widget("run-python-script", False, "Run custom Python script") 50 | # Generate input widgets for a custom Python tool, located at src/python-tools. 51 | # Parameters are specified within the file in the DEFAULTS dictionary. 52 | self.ui.input_python("example") 53 | 54 | def execution(self) -> None: 55 | # Any parameter checks, here simply checking if mzML files are selected 56 | if not self.params["mzML-files"]: 57 | self.logger.log("ERROR: No mzML files selected.") 58 | return 59 | 60 | # Get mzML files with FileManager 61 | in_mzML = self.file_manager.get_files(self.params["mzML-files"]) 62 | 63 | # Log any messages. 64 | self.logger.log(f"Number of input mzML files: {len(in_mzML)}") 65 | 66 | # Prepare output files for feature detection. 67 | out_ffm = self.file_manager.get_files( 68 | in_mzML, "featureXML", "feature-detection" 69 | ) 70 | 71 | # Run FeatureFinderMetabo tool with input and output files. 72 | self.logger.log("Detecting features...") 73 | self.executor.run_topp( 74 | "FeatureFinderMetabo", input_output={"in": in_mzML, "out": out_ffm} 75 | ) 76 | 77 | # Prepare input and output files for feature linking 78 | in_fl = self.file_manager.get_files(out_ffm, collect=True) 79 | out_fl = self.file_manager.get_files( 80 | "feature_matrix.consensusXML", set_results_dir="feature-linking" 81 | ) 82 | 83 | # Run FeatureLinkerUnlabaeledKD with all feature maps passed at once 84 | self.logger.log("Linking features...") 85 | self.executor.run_topp( 86 | "FeatureLinkerUnlabeledKD", input_output={"in": in_fl, "out": out_fl} 87 | ) 88 | self.logger.log("Exporting consensus features to pandas DataFrame...") 89 | self.executor.run_python( 90 | "export_consensus_feature_df", input_output={"in": out_fl[0]} 91 | ) 92 | # Check if adduct detection should be run. 93 | if self.params["run-python-script"]: 94 | # Example for a custom Python tool, which is located in src/python-tools. 95 | self.executor.run_python("example", {"in": in_mzML}) 96 | 97 | @st.fragment 98 | def results(self) -> None: 99 | @st.fragment 100 | def show_consensus_features(): 101 | df = pd.read_csv(file, sep="\t", index_col=0) 102 | st.metric("number of consensus features", df.shape[0]) 103 | c1, c2 = st.columns(2) 104 | rows = c1.dataframe(df, selection_mode="multi-row", on_select="rerun")[ 105 | "selection" 106 | ]["rows"] 107 | if rows: 108 | df = df.iloc[rows, 4:] 109 | fig = px.bar(df, barmode="group", labels={"value": "intensity"}) 110 | with c2: 111 | show_fig(fig, "consensus-feature-intensities") 112 | else: 113 | st.info( 114 | "💡 Select one ore more rows in the table to show a barplot with intensities." 115 | ) 116 | 117 | file = Path( 118 | self.workflow_dir, "results", "feature-linking", "feature_matrix.tsv" 119 | ) 120 | if file.exists(): 121 | show_consensus_features() 122 | else: 123 | st.warning("No consensus feature file found. Please run workflow first.") 124 | -------------------------------------------------------------------------------- /src/common/captcha_.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import streamlit as st 3 | import streamlit.components.v1 as st_components 4 | from streamlit.source_util import page_icon_and_name, calc_md5, get_pages, _on_pages_changed 5 | 6 | from captcha.image import ImageCaptcha 7 | 8 | import random 9 | import string 10 | import os 11 | 12 | 13 | def delete_all_pages(main_script_path_str: str) -> None: 14 | """ 15 | Delete all pages except the main page from an app's configuration. 16 | 17 | Args: 18 | main_script_path_str (str): The name of the main page, typically the app's name. 19 | 20 | Returns: 21 | None 22 | 23 | """ 24 | # Get all pages from the app's configuration 25 | current_pages = get_pages(main_script_path_str) 26 | 27 | # Create a list to store keys pages to delete 28 | keys_to_delete = [] 29 | 30 | # Iterate over all pages and add keys to delete list if the desired page is found 31 | for key, value in current_pages.items(): 32 | if value["page_name"] != main_script_path_str: 33 | keys_to_delete.append(key) 34 | 35 | # Delete the keys from current pages 36 | for key in keys_to_delete: 37 | del current_pages[key] 38 | 39 | # Refresh the pages configuration 40 | _on_pages_changed.send() 41 | 42 | 43 | def delete_page(main_script_path_str: str, page_name: str) -> None: 44 | """ 45 | Delete a specific page from an app's configuration. 46 | 47 | Args: 48 | main_script_path_str (str): The name of the main page, typically the app's name. 49 | page_name (str): The name of the page to be deleted. 50 | 51 | Returns: 52 | None 53 | """ 54 | # Get all pages 55 | current_pages = get_pages(main_script_path_str) 56 | 57 | # Iterate over all pages and delete the desired page if found 58 | for key, value in current_pages.items(): 59 | if value["page_name"] == page_name: 60 | del current_pages[key] 61 | 62 | # Refresh the pages configuration 63 | _on_pages_changed.send() 64 | 65 | 66 | def restore_all_pages(main_script_path_str: str) -> None: 67 | """ 68 | restore all pages found in the "content" directory to an app's configuration. 69 | 70 | Args: 71 | main_script_path_str (str): The name of the main page, typically the app's name. 72 | 73 | Returns: 74 | None 75 | """ 76 | # Get all pages 77 | pages = get_pages(main_script_path_str) 78 | 79 | # Obtain the path to the main script 80 | main_script_path = Path(main_script_path_str) 81 | 82 | # Define the directory where pages are stored 83 | pages_dir = main_script_path.parent / "content" 84 | 85 | # To store the pages for later, to add in ascending order 86 | pages_temp = [] 87 | 88 | # Iterate over all .py files in the "content" directory 89 | for script_path in pages_dir.glob("*.py"): 90 | # append path with file name 91 | script_path_str = str(script_path.resolve()) 92 | 93 | # Calculate the MD5 hash of the script path 94 | psh = calc_md5(script_path_str) 95 | 96 | # Obtain the page icon and name 97 | pi, pn = page_icon_and_name(script_path) 98 | 99 | # Extract the index from the page name 100 | index = int(os.path.basename(script_path.stem).split("_")[0]) 101 | 102 | # Add the page data to the temporary list 103 | pages_temp.append( 104 | ( 105 | index, 106 | { 107 | "page_script_hash": psh, 108 | "page_name": pn, 109 | "icon": pi, 110 | "script_path": script_path_str, 111 | }, 112 | ) 113 | ) 114 | 115 | # Sort the pages_temp list by index in ascending order as defined in pages folder e-g 0_, 1_ etc 116 | pages_temp.sort(key=lambda x: x[0]) 117 | 118 | # Add pages 119 | for index, page_data in pages_temp: 120 | # Add the new page configuration 121 | pages[page_data["page_script_hash"]] = { 122 | "page_script_hash": page_data["page_script_hash"], 123 | "page_name": page_data["page_name"], 124 | "icon": page_data["icon"], 125 | "script_path": page_data["script_path"], 126 | } 127 | 128 | # Refresh the page configuration 129 | _on_pages_changed.send() 130 | 131 | 132 | def add_page(main_script_path_str: str, page_name: str) -> None: 133 | """ 134 | Add a new page to an app's configuration. 135 | 136 | Args: 137 | main_script_path_str (str): The name of the main page, typically the app's name. 138 | page_name (str): The name of the page to be added. 139 | 140 | Returns: 141 | None 142 | """ 143 | # Get all pages 144 | pages = get_pages(main_script_path_str) 145 | 146 | # Obtain the path to the main script 147 | main_script_path = Path(main_script_path_str) 148 | 149 | # Define the directory where pages are stored 150 | pages_dir = main_script_path.parent / "content" 151 | 152 | # Find the script path corresponding to the new page 153 | script_path = [f for f in pages_dir.glob("*.py") if f.name.find(page_name) != -1][0] 154 | script_path_str = str(script_path.resolve()) 155 | 156 | # Calculate the MD5 hash of the script path 157 | psh = calc_md5(script_path_str) 158 | 159 | # Obtain the page icon and name 160 | pi, pn = page_icon_and_name(script_path) 161 | 162 | # Add the new page configuration 163 | pages[psh] = { 164 | "page_script_hash": psh, 165 | "page_name": pn, 166 | "icon": pi, 167 | "script_path": script_path_str, 168 | } 169 | 170 | # Refresh the page configuration 171 | _on_pages_changed.send() 172 | 173 | 174 | length_captcha = 5 175 | width = 400 176 | height = 180 177 | 178 | 179 | # define the function for the captcha control 180 | def captcha_control(): 181 | """ 182 | Control and verification of a CAPTCHA to ensure the user is not a robot. 183 | 184 | This function implements CAPTCHA control to verify that the user is not a robot. 185 | It displays a CAPTCHA image and prompts the user to enter the corresponding text. 186 | If the entered text matches the CAPTCHA, the control is set to True; otherwise, it remains False. 187 | 188 | If the CAPTCHA is incorrect, it is regenerated and the control state is set to False. 189 | This function also handles user interactions and reruns the Streamlit app accordingly. 190 | 191 | The CAPTCHA text is generated as a session state and should not change during refreshes. 192 | 193 | Returns: 194 | None 195 | """ 196 | # control if the captcha is correct 197 | if "controllo" not in st.session_state or st.session_state["controllo"] == False: 198 | 199 | # Check if consent for tracking was given 200 | ga = st.session_state.settings['analytics']['google-analytics']['enabled'] 201 | pp = st.session_state.settings['analytics']['piwik-pro']['enabled'] 202 | if (ga or pp) and (st.session_state.tracking_consent is None): 203 | consent_component = st_components.declare_component("gdpr_consent", path=Path("gdpr_consent")) 204 | with st.spinner(): 205 | # Ask for consent 206 | st.session_state.tracking_consent = consent_component( 207 | google_analytics=ga, piwik_pro=pp 208 | ) 209 | if st.session_state.tracking_consent is None: 210 | # No response by user yet 211 | st.stop() 212 | else: 213 | # Consent choice was made 214 | st.rerun() 215 | 216 | st.title("Make sure you are not a robot🤖") 217 | 218 | # define the session state for control if the captcha is correct 219 | st.session_state["controllo"] = False 220 | 221 | # define the session state for the captcha text because it doesn't change during refreshes 222 | if "Captcha" not in st.session_state: 223 | st.session_state["Captcha"] = "".join( 224 | random.choices(string.ascii_uppercase + string.digits, k=length_captcha) 225 | ).replace("0", "A").replace("O", "B") 226 | 227 | col1, _ = st.columns(2) 228 | with col1.form("captcha-form"): 229 | # setup the captcha widget 230 | st.info( 231 | "Please enter the captcha as text. Note: If your captcha is not accepted, you might need to disable your ad blocker." 232 | ) 233 | image = ImageCaptcha(width=width, height=height) 234 | data = image.generate(st.session_state["Captcha"]) 235 | st.image(data) 236 | c1, c2 = st.columns([70, 30]) 237 | capta2_text = st.empty() 238 | capta2_text = c1.text_input("Enter captcha text", max_chars=5) 239 | c2.markdown("##") 240 | if c2.form_submit_button("Verify the code", type="primary"): 241 | capta2_text = capta2_text.replace(" ", "") 242 | # if the captcha is correct, the controllo session state is set to True 243 | if st.session_state["Captcha"].lower() == capta2_text.lower().strip(): 244 | del st.session_state["Captcha"] 245 | col1.empty() 246 | st.session_state["controllo"] = True 247 | st.rerun() 248 | else: 249 | # if the captcha is wrong, the controllo session state is set to False and the captcha is regenerated 250 | st.error("🚨 Captch is wrong") 251 | del st.session_state["Captcha"] 252 | del st.session_state["controllo"] 253 | st.rerun() 254 | else: 255 | # wait for the button click 256 | st.stop() 257 | -------------------------------------------------------------------------------- /src/fileupload.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from pathlib import Path 3 | 4 | import streamlit as st 5 | 6 | from src.common.common import reset_directory 7 | 8 | 9 | @st.cache_data 10 | def save_uploaded_mzML(uploaded_files: list[bytes]) -> None: 11 | """ 12 | Saves uploaded mzML files to the mzML directory. 13 | 14 | Args: 15 | uploaded_files (List[bytes]): List of uploaded mzML files. 16 | 17 | Returns: 18 | None 19 | """ 20 | mzML_dir = Path(st.session_state.workspace, "mzML-files") 21 | # A list of files is required, since online allows only single upload, create a list 22 | if st.session_state.location == "online": 23 | uploaded_files = [uploaded_files] 24 | # If no files are uploaded, exit early 25 | if not uploaded_files: 26 | st.warning("Upload some files first.") 27 | return 28 | # Write files from buffer to workspace mzML directory, add to selected files 29 | for f in uploaded_files: 30 | if f.name not in [f.name for f in mzML_dir.iterdir()] and f.name.endswith( 31 | "mzML" 32 | ): 33 | with open(Path(mzML_dir, f.name), "wb") as fh: 34 | fh.write(f.getbuffer()) 35 | st.success("Successfully added uploaded files!") 36 | 37 | 38 | def copy_local_mzML_files_from_directory(local_mzML_directory: str, make_copy: bool=True) -> None: 39 | """ 40 | Copies local mzML files from a specified directory to the mzML directory. 41 | 42 | Args: 43 | local_mzML_directory (str): Path to the directory containing the mzML files. 44 | make_copy (bool): Whether to make a copy of the files in the workspace. Default is True. If False, local file paths will be written to an external_files.txt file. 45 | 46 | Returns: 47 | None 48 | """ 49 | mzML_dir = Path(st.session_state.workspace, "mzML-files") 50 | # Check if local directory contains mzML files, if not exit early 51 | if not any(Path(local_mzML_directory).glob("*.mzML")): 52 | st.warning("No mzML files found in specified folder.") 53 | return 54 | # Copy all mzML files to workspace mzML directory, add to selected files 55 | files = Path(local_mzML_directory).glob("*.mzML") 56 | for f in files: 57 | if make_copy: 58 | shutil.copy(f, Path(mzML_dir, f.name)) 59 | else: 60 | # Create a temporary file to store the path to the local directories 61 | external_files = Path(mzML_dir, "external_files.txt") 62 | # Check if the file exists, if not create it 63 | if not external_files.exists(): 64 | external_files.touch() 65 | # Write the path to the local directories to the file 66 | with open(external_files, "a") as f_handle: 67 | f_handle.write(f"{f}\n") 68 | 69 | st.success("Successfully added local files!") 70 | 71 | 72 | def load_example_mzML_files() -> None: 73 | """ 74 | Copies example mzML files to the mzML directory. 75 | 76 | Args: 77 | None 78 | 79 | Returns: 80 | None 81 | """ 82 | mzML_dir = Path(st.session_state.workspace, "mzML-files") 83 | # Copy files from example-data/mzML to workspace mzML directory, add to selected files 84 | for f in Path("example-data", "mzML").glob("*.mzML"): 85 | shutil.copy(f, mzML_dir) 86 | st.success("Example mzML files loaded!") 87 | 88 | 89 | def remove_selected_mzML_files(to_remove: list[str], params: dict) -> dict: 90 | """ 91 | Removes selected mzML files from the mzML directory. 92 | 93 | Args: 94 | to_remove (List[str]): List of mzML files to remove. 95 | params (dict): Parameters. 96 | 97 | 98 | Returns: 99 | dict: parameters with updated mzML files 100 | """ 101 | mzML_dir = Path(st.session_state.workspace, "mzML-files") 102 | # remove all given files from mzML workspace directory and selected files 103 | for f in to_remove: 104 | Path(mzML_dir, f + ".mzML").unlink() 105 | for k, v in params.items(): 106 | if isinstance(v, list): 107 | if f in v: 108 | params[k].remove(f) 109 | st.success("Selected mzML files removed!") 110 | return params 111 | 112 | 113 | def remove_all_mzML_files(params: dict) -> dict: 114 | """ 115 | Removes all mzML files from the mzML directory. 116 | 117 | Args: 118 | params (dict): Parameters. 119 | 120 | Returns: 121 | dict: parameters with updated mzML files 122 | """ 123 | mzML_dir = Path(st.session_state.workspace, "mzML-files") 124 | # reset (delete and re-create) mzML directory in workspace 125 | reset_directory(mzML_dir) 126 | # reset all parameter items which have mzML in key and are list 127 | for k, v in params.items(): 128 | if "mzML" in k and isinstance(v, list): 129 | params[k] = [] 130 | st.success("All mzML files removed!") 131 | return params 132 | -------------------------------------------------------------------------------- /src/mzmlfileworkflow.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | from pathlib import Path 3 | import pyopenms as poms 4 | import pandas as pd 5 | import time 6 | from datetime import datetime 7 | from src.common.common import reset_directory, show_fig, show_table 8 | import plotly.express as px 9 | 10 | 11 | def mzML_file_get_num_spectra(filepath): 12 | """ 13 | Load an mzML file, retrieve the number of spectra, and return it. 14 | 15 | This function loads an mzML file specified by `filepath` and extracts the number of spectra 16 | contained within the file using the OpenMS library. It temporarily pauses for 2 seconds to 17 | simulate a heavy task before retrieving the number of spectra. 18 | 19 | Args: 20 | filepath (str): The path to the mzML file to be loaded and analyzed. 21 | 22 | Returns: 23 | int: The number of spectra present in the mzML file. 24 | """ 25 | exp = poms.MSExperiment() 26 | poms.MzMLFile().load(filepath, exp) 27 | time.sleep(2) 28 | return exp.size() 29 | 30 | 31 | def run_workflow(params, result_dir): 32 | """Load each mzML file into pyOpenMS Experiment and get the number of spectra.""" 33 | 34 | result_dir = Path(result_dir, datetime.now().strftime("%Y-%m-%d %H_%M_%S")) 35 | # delete old workflow results and set new directory 36 | reset_directory(result_dir) 37 | 38 | # collect spectra numbers 39 | num_spectra = [] 40 | 41 | # use st.status to print info while running the workflow 42 | with st.status( 43 | "Loading mzML files and getting number of spectra...", expanded=True 44 | ) as status: 45 | # get selected mzML files from parameters 46 | for file in params["example-workflow-selected-mzML-files"]: 47 | # logging file name in status 48 | st.write(f"Reading mzML file: {file} ...") 49 | 50 | # reading mzML file, getting num spectra and adding some extra time 51 | num_spectra.append( 52 | mzML_file_get_num_spectra( 53 | str( 54 | Path( 55 | st.session_state["workspace"], "mzML-files", file + ".mzML" 56 | ) 57 | ) 58 | ) 59 | ) 60 | 61 | # set status as complete and collapse box 62 | status.update(label="Complete!", expanded=False) 63 | 64 | # create and save result dataframe 65 | df = pd.DataFrame( 66 | { 67 | "filenames": params["example-workflow-selected-mzML-files"], 68 | "number of spectra": num_spectra, 69 | } 70 | ) 71 | df.to_csv(Path(result_dir, "result.tsv"), sep="\t", index=False) 72 | 73 | @st.fragment 74 | def result_section(result_dir): 75 | if not Path(result_dir).exists(): 76 | st.error("No results to show yet. Please run a workflow first!") 77 | return 78 | 79 | date_strings = [f.name for f in Path(result_dir).iterdir() if f.is_dir()] 80 | 81 | result_dirs = sorted(date_strings, key=lambda date: datetime.strptime(date, "%Y-%m-%d %H_%M_%S"))[::-1] 82 | 83 | run_dir = st.selectbox("select result from run", result_dirs) 84 | 85 | if run_dir is None: 86 | st.error("Please select a result from a run!") 87 | return 88 | 89 | result_dir = Path(result_dir, run_dir) 90 | # visualize workflow results if there are any 91 | result_file_path = Path(result_dir, "result.tsv") 92 | 93 | if result_file_path.exists(): 94 | df = pd.read_csv(result_file_path, sep="\t", index_col="filenames") 95 | 96 | if not df.empty: 97 | tabs = st.tabs(["📁 data", "📊 plot"]) 98 | 99 | with tabs[0]: 100 | show_table(df, "mzML-workflow-result") 101 | 102 | with tabs[1]: 103 | fig = px.bar(df) 104 | st.info( 105 | "💡 Download figure with camera icon in top right corner. File format can be specified in settings." 106 | ) 107 | show_fig(fig, "mzML-workflow-results") -------------------------------------------------------------------------------- /src/python-tools/example.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | 4 | ############################ 5 | # default paramter values # 6 | ########################### 7 | # 8 | # Mandatory keys for each parameter 9 | # key: a unique identifier 10 | # value: the default value 11 | # 12 | # Optional keys for each parameter 13 | # name: the name of the parameter 14 | # hide: don't show the parameter in the parameter section (e.g. for input/output files) 15 | # options: a list of valid options for the parameter 16 | # min: the minimum value for the parameter (int and float) 17 | # max: the maximum value for the parameter (int and float) 18 | # step_size: the step size for the parameter (int and float) 19 | # help: a description of the parameter 20 | # widget_type: the type of widget to use for the parameter (default: auto) 21 | # advanced: whether or not the parameter is advanced (default: False) 22 | 23 | DEFAULTS = [ 24 | {"key": "in", "value": [], "help": "Input files for Python Script.", "hide": True}, 25 | {"key": "out", "value": [], "help": "Output files for Python Script.", "hide": True}, 26 | { 27 | "key": "number-slider", 28 | "name": "number of features", 29 | "value": 6, 30 | "min": 2, 31 | "max": 10, 32 | "help": "How many features to consider.", 33 | "widget_type": "slider", 34 | "step_size": 2, 35 | }, 36 | { 37 | "key": "selectbox-example", 38 | "name": "select something", 39 | "value": "a", 40 | "options": ["a", "b", "c"], 41 | }, 42 | { 43 | "key": "adavanced-input", 44 | "name": "advanced parameter", 45 | "value": 5, 46 | "step_size": 5, 47 | "help": "An advanced example parameter.", 48 | "advanced": True, 49 | }, 50 | { 51 | "key": "checkbox", "value": True, "name": "boolean" 52 | } 53 | ] 54 | 55 | def get_params(): 56 | if len(sys.argv) > 1: 57 | with open(sys.argv[1], "r") as f: 58 | return json.load(f) 59 | else: 60 | return {} 61 | 62 | if __name__ == "__main__": 63 | params = get_params() 64 | # Add code here: 65 | print("Writing stdout which will get logged...") 66 | print("Parameters for this example Python tool:") 67 | print(json.dumps(params, indent=4)) -------------------------------------------------------------------------------- /src/python-tools/export_consensus_feature_df.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | from pyopenms import ConsensusXMLFile, ConsensusMap 4 | from pathlib import Path 5 | 6 | ############################ 7 | # default paramter values # 8 | ########################### 9 | # 10 | # Mandatory keys for each parameter 11 | # key: a unique identifier 12 | # value: the default value 13 | # 14 | # Optional keys for each parameter 15 | # name: the name of the parameter 16 | # hide: don't show the parameter in the parameter section (e.g. for input/output files) 17 | # options: a list of valid options for the parameter 18 | # min: the minimum value for the parameter (int and float) 19 | # max: the maximum value for the parameter (int and float) 20 | # step_size: the step size for the parameter (int and float) 21 | # help: a description of the parameter 22 | # widget_type: the type of widget to use for the parameter (default: auto) 23 | # advanced: whether or not the parameter is advanced (default: False) 24 | 25 | DEFAULTS = [ 26 | {"key": "in", "value": "", "help": "Input consensusXML file.", "hide": True}, 27 | ] 28 | 29 | def get_params(): 30 | if len(sys.argv) > 1: 31 | with open(sys.argv[1], "r") as f: 32 | return json.load(f) 33 | else: 34 | return {} 35 | 36 | if __name__ == "__main__": 37 | params = get_params() 38 | # Add code here: 39 | cm = ConsensusMap() 40 | ConsensusXMLFile().load(params["in"], cm) 41 | df = cm.get_df() 42 | df = df.rename(columns={col: Path(col).name for col in df.columns}) 43 | df = df.reset_index() 44 | df = df.drop(columns=["id", "sequence"]) 45 | df.insert(0, "metabolite", df.apply(lambda x: f"{round(x['mz'], 4)}@{round(x['RT'], 2)}", axis=1)) 46 | df.to_csv(Path(params["in"]).with_suffix(".tsv"), sep="\t", index=False) -------------------------------------------------------------------------------- /src/run_subprocess.py: -------------------------------------------------------------------------------- 1 | import streamlit as st 2 | import subprocess 3 | 4 | 5 | def run_subprocess(args: list[str], result_dict: dict) -> None: 6 | """ 7 | Run a subprocess and capture its output. 8 | 9 | Args: 10 | args (list[str]): The command and its arguments as a list of strings. 11 | variables (list[str]): Additional variables needed for the subprocess (not used in this code). 12 | result_dict dict: A dictionary to store the success status (bool) and the captured log (str). 13 | 14 | Returns: 15 | None 16 | """ 17 | 18 | # Run the subprocess and capture its output 19 | process = subprocess.Popen( 20 | args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True 21 | ) 22 | 23 | # Lists to store the captured standard output and standard error 24 | stdout_ = [] 25 | stderr_ = [] 26 | 27 | # Capture the standard output of the subprocess 28 | while True: 29 | output = process.stdout.readline() 30 | if output == "" and process.poll() is not None: 31 | break 32 | if output: 33 | # Print every line of standard output on the Streamlit page 34 | st.text(output.strip()) 35 | # Append the line to store in the log 36 | stdout_.append(output.strip()) 37 | 38 | # Capture the standard error of the subprocess 39 | while True: 40 | error = process.stderr.readline() 41 | if error == "" and process.poll() is not None: 42 | break 43 | if error: 44 | # Print every line of standard error on the Streamlit page, marking it as an error 45 | st.error(error.strip()) 46 | # Append the line to store in the log of errors 47 | stderr_.append(error.strip()) 48 | 49 | # Check if the subprocess ran successfully (return code 0) 50 | if process.returncode == 0: 51 | result_dict["success"] = True 52 | # Save all lines from standard output to the log 53 | result_dict["log"] = " ".join(stdout_) 54 | else: 55 | result_dict["success"] = False 56 | # Save all lines from standard error to the log, even if the process encountered an error 57 | result_dict["log"] = " ".join(stderr_) 58 | -------------------------------------------------------------------------------- /src/simpleworkflow.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import streamlit as st 6 | 7 | 8 | @st.cache_data 9 | def generate_random_table(x, y): 10 | """Example for a cached table""" 11 | df = pd.DataFrame(np.random.randn(x, y)) 12 | time.sleep(2) 13 | return df 14 | -------------------------------------------------------------------------------- /src/view.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from pathlib import Path 4 | import plotly.express as px 5 | import plotly.graph_objects as go 6 | import streamlit as st 7 | import pyopenms as poms 8 | from src.common.common import show_fig, display_large_dataframe 9 | from typing import Union 10 | 11 | 12 | def get_df(file: Union[str, Path]) -> pd.DataFrame: 13 | """ 14 | Load a Mass Spectrometry (MS) experiment from a given mzML file and return 15 | a pandas dataframe representation of the experiment. 16 | 17 | Args: 18 | file (Union[str, Path]): The path to the mzML file to load. 19 | 20 | Returns: 21 | pd.DataFrame: A pandas DataFrame with the following columns: "mslevel", 22 | "precursormz", "mzarray", and "intarray". The "mzarray" and "intarray" 23 | columns contain NumPy arrays with the m/z and intensity values for each 24 | spectrum in the mzML file, respectively. 25 | """ 26 | exp = poms.MSExperiment() 27 | poms.MzMLFile().load(str(file), exp) 28 | df_spectra = exp.get_df() 29 | df_spectra["MS level"] = [spec.getMSLevel() for spec in exp] 30 | precs = [] 31 | for spec in exp: 32 | p = spec.getPrecursors() 33 | if p: 34 | precs.append(p[0].getMZ()) 35 | else: 36 | precs.append(np.nan) 37 | df_spectra["precursor m/z"] = precs 38 | 39 | # Drop spectra without peaks 40 | df_spectra = df_spectra[df_spectra["mzarray"].apply(lambda x: len(x) > 0)] 41 | 42 | df_spectra["max intensity m/z"] = df_spectra.apply( 43 | lambda x: x["mzarray"][x["intarray"].argmax()], axis=1 44 | ) 45 | if not df_spectra.empty: 46 | st.session_state["view_spectra"] = df_spectra 47 | else: 48 | st.session_state["view_spectra"] = pd.DataFrame() 49 | exp_ms2 = poms.MSExperiment() 50 | exp_ms1 = poms.MSExperiment() 51 | for spec in exp: 52 | if spec.getMSLevel() == 1: 53 | exp_ms1.addSpectrum(spec) 54 | elif spec.getMSLevel() == 2: 55 | exp_ms2.addSpectrum(spec) 56 | if not exp_ms1.empty(): 57 | st.session_state["view_ms1"] = exp_ms1.get_df(long=True) 58 | else: 59 | st.session_state["view_ms1"] = pd.DataFrame(columns=['RT', 'mz', 'inty']) 60 | if not exp_ms2.empty(): 61 | st.session_state["view_ms2"] = exp_ms2.get_df(long=True) 62 | else: 63 | st.session_state["view_ms2"] = pd.DataFrame(columns=['RT', 'mz', 'inty']) 64 | 65 | 66 | def plot_bpc_tic() -> go.Figure: 67 | """Plot the base peak and total ion chromatogram (TIC). 68 | 69 | Returns: 70 | A plotly Figure object containing the BPC and TIC plot. 71 | """ 72 | fig = go.Figure() 73 | max_int = 0 74 | if st.session_state.view_tic: 75 | df = st.session_state.view_ms1.groupby("RT").sum().reset_index() 76 | df["type"] = "TIC" 77 | if df["inty"].max() > max_int: 78 | max_int = df["inty"].max() 79 | fig = df.plot( 80 | backend="ms_plotly", 81 | kind="chromatogram", 82 | x="RT", 83 | y="inty", 84 | by="type", 85 | color="#f24c5c", 86 | show_plot=False, 87 | grid=False, 88 | aggregate_duplicates=True, 89 | ) 90 | if st.session_state.view_bpc: 91 | df = st.session_state.view_ms1.groupby("RT").max().reset_index() 92 | df["type"] = "BPC" 93 | if df["inty"].max() > max_int: 94 | max_int = df["inty"].max() 95 | fig = df.plot( 96 | backend="ms_plotly", 97 | kind="chromatogram", 98 | x="RT", 99 | y="inty", 100 | by="type", 101 | color="#2d3a9d", 102 | show_plot=False, 103 | grid=False, 104 | aggregate_duplicates=True, 105 | ) 106 | if st.session_state.view_eic: 107 | df = st.session_state.view_ms1 108 | target_value = st.session_state.view_eic_mz.strip().replace(",", ".") 109 | try: 110 | target_value = float(target_value) 111 | ppm_tolerance = st.session_state.view_eic_ppm 112 | tolerance = (target_value * ppm_tolerance) / 1e6 113 | 114 | # Filter the DataFrame 115 | df_eic = df[ 116 | (df["mz"] >= target_value - tolerance) 117 | & (df["mz"] <= target_value + tolerance) 118 | ].copy() 119 | if not df_eic.empty: 120 | df_eic.loc[:, "type"] = "XIC" 121 | if df_eic["inty"].max() > max_int: 122 | max_int = df_eic["inty"].max() 123 | fig = df_eic.plot( 124 | backend="ms_plotly", 125 | kind="chromatogram", 126 | x="RT", 127 | y="inty", 128 | by="type", 129 | color="#f6bf26", 130 | show_plot=False, 131 | grid=False, 132 | aggregate_duplicates=True, 133 | ) 134 | except ValueError: 135 | st.error("Invalid m/z value for XIC provided. Please enter a valid number.") 136 | 137 | fig.update_yaxes(range=[0, max_int]) 138 | fig.update_layout( 139 | title=f"{st.session_state.view_selected_file}", 140 | xaxis_title="retention time (s)", 141 | yaxis_title="intensity", 142 | plot_bgcolor="rgb(255,255,255)", 143 | height=500, 144 | ) 145 | fig.layout.template = "plotly_white" 146 | return fig 147 | 148 | 149 | @st.cache_resource 150 | def plot_ms_spectrum(df, title, bin_peaks, num_x_bins): 151 | fig = df.plot( 152 | kind="spectrum", 153 | backend="ms_plotly", 154 | x="mz", 155 | y="intensity", 156 | color="#2d3a9d", 157 | title=title, 158 | show_plot=False, 159 | grid=False, 160 | bin_peaks=bin_peaks, 161 | num_x_bins=num_x_bins, 162 | aggregate_duplicates=True, 163 | ) 164 | fig.update_layout( 165 | template="plotly_white", dragmode="select", plot_bgcolor="rgb(255,255,255)" 166 | ) 167 | return fig 168 | 169 | 170 | @st.fragment 171 | def view_peak_map(): 172 | df = st.session_state.view_ms1 173 | if "view_peak_map_selection" in st.session_state: 174 | box = st.session_state.view_peak_map_selection.selection.box 175 | if box: 176 | df = st.session_state.view_ms1.copy() 177 | df = df[df["RT"] > box[0]["x"][0]] 178 | df = df[df["mz"] > box[0]["y"][1]] 179 | df = df[df["mz"] < box[0]["y"][0]] 180 | df = df[df["RT"] < box[0]["x"][1]] 181 | if len(df) == 0: 182 | return 183 | peak_map = df.plot( 184 | kind="peakmap", 185 | x="RT", 186 | y="mz", 187 | z="inty", 188 | title=st.session_state.view_selected_file, 189 | grid=False, 190 | show_plot=False, 191 | bin_peaks=True, 192 | backend="ms_plotly", 193 | aggregate_duplicates=True, 194 | ) 195 | peak_map.update_layout(template="simple_white", dragmode="select") 196 | c1, c2 = st.columns(2) 197 | with c1: 198 | st.info( 199 | "💡 Zoom in via rectangular selection for more details and 3D plot. Double click plot to zoom back out." 200 | ) 201 | show_fig( 202 | peak_map, 203 | f"peak_map_{st.session_state.view_selected_file}", 204 | selection_session_state_key="view_peak_map_selection", 205 | ) 206 | with c2: 207 | if df.shape[0] < 2500: 208 | peak_map_3D = df.plot( 209 | kind="peakmap", 210 | plot_3d=True, 211 | backend="ms_plotly", 212 | x="RT", 213 | y="mz", 214 | z="inty", 215 | zlabel="Intensity", 216 | title="", 217 | show_plot=False, 218 | grid=False, 219 | bin_peaks=st.session_state.spectrum_bin_peaks, 220 | num_x_bins=st.session_state.spectrum_num_bins, 221 | height=650, 222 | width=900, 223 | aggregate_duplicates=True, 224 | ) 225 | st.plotly_chart(peak_map_3D, use_container_width=True) 226 | 227 | 228 | @st.fragment 229 | def view_spectrum(): 230 | cols = st.columns([0.34, 0.66]) 231 | with cols[0]: 232 | df = st.session_state.view_spectra.copy() 233 | df["spectrum ID"] = df.index + 1 234 | index = display_large_dataframe( 235 | df, 236 | column_order=[ 237 | "spectrum ID", 238 | "RT", 239 | "MS level", 240 | "max intensity m/z", 241 | "precursor m/z", 242 | ], 243 | selection_mode="single-row", 244 | on_select="rerun", 245 | use_container_width=True, 246 | hide_index=True, 247 | ) 248 | with cols[1]: 249 | if (index is not None) and (len(df) != 0): 250 | df = st.session_state.view_spectra.iloc[index] 251 | if "view_spectrum_selection" in st.session_state: 252 | box = st.session_state.view_spectrum_selection.selection.box 253 | if box: 254 | mz_min, mz_max = sorted(box[0]["x"]) 255 | mask = (df["mzarray"] > mz_min) & (df["mzarray"] < mz_max) 256 | df["intarray"] = df["intarray"][mask] 257 | df["mzarray"] = df["mzarray"][mask] 258 | 259 | if df["mzarray"].size > 0: 260 | title = f"{st.session_state.view_selected_file} spec={index+1} mslevel={df['MS level']}" 261 | if df["precursor m/z"] > 0: 262 | title += f" precursor m/z: {round(df['precursor m/z'], 4)}" 263 | 264 | df_selected = pd.DataFrame( 265 | { 266 | "mz": df["mzarray"], 267 | "intensity": df["intarray"], 268 | } 269 | ) 270 | df_selected["RT"] = df["RT"] 271 | df_selected["MS level"] = df["MS level"] 272 | df_selected["precursor m/z"] = df["precursor m/z"] 273 | df_selected["max intensity m/z"] = df["max intensity m/z"] 274 | 275 | fig = plot_ms_spectrum( 276 | df_selected, 277 | title, 278 | st.session_state.spectrum_bin_peaks, 279 | st.session_state.spectrum_num_bins, 280 | ) 281 | 282 | show_fig(fig, title.replace(" ", "_"), True, "view_spectrum_selection") 283 | else: 284 | st.session_state.pop("view_spectrum_selection") 285 | st.rerun() 286 | else: 287 | st.info("💡 Select rows in the spectrum table to display plot.") 288 | 289 | 290 | @st.fragment() 291 | def view_bpc_tic(): 292 | cols = st.columns(5) 293 | cols[0].checkbox( 294 | "Total Ion Chromatogram (TIC)", True, key="view_tic", help="Plot TIC." 295 | ) 296 | cols[1].checkbox( 297 | "Base Peak Chromatogram (BPC)", True, key="view_bpc", help="Plot BPC." 298 | ) 299 | cols[2].checkbox( 300 | "Extracted Ion Chromatogram (EIC/XIC)", 301 | True, 302 | key="view_eic", 303 | help="Plot extracted ion chromatogram with specified m/z.", 304 | ) 305 | cols[3].text_input( 306 | "XIC m/z", 307 | "235.1189", 308 | help="m/z for XIC calculation.", 309 | key="view_eic_mz", 310 | ) 311 | cols[4].number_input( 312 | "XIC ppm tolerance", 313 | 0.1, 314 | 50.0, 315 | 10.0, 316 | 1.0, 317 | help="Tolerance for XIC calculation (ppm).", 318 | key="view_eic_ppm", 319 | ) 320 | fig = plot_bpc_tic() 321 | show_fig(fig, f"BPC-TIC-{st.session_state.view_selected_file}") 322 | -------------------------------------------------------------------------------- /src/workflow/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ -------------------------------------------------------------------------------- /src/workflow/FileManager.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import string 3 | import random 4 | import shutil 5 | from typing import Union, List 6 | 7 | class FileManager: 8 | """ 9 | Manages file paths for operations such as changing file extensions, organizing files 10 | into result directories, and handling file collections for processing tools. Designed 11 | to be flexible for handling both individual files and lists of files, with integration 12 | into a Streamlit workflow. 13 | 14 | Methods: 15 | get_files: Returns a list of file paths as strings for the specified files, optionally with new file type and results subdirectory. 16 | collect: Collects all files in a single list (e.g. to pass to tools which can handle multiple input files at once). 17 | """ 18 | 19 | def __init__( 20 | self, 21 | workflow_dir: Path, 22 | ): 23 | """ 24 | Initializes the FileManager object with a the current workflow results directory. 25 | """ 26 | self.workflow_dir = workflow_dir 27 | 28 | def get_files( 29 | self, 30 | files: Union[List[Union[str, Path]], Path, str, List[List[str]]], 31 | set_file_type: str = None, 32 | set_results_dir: str = None, 33 | collect: bool = False, 34 | ) -> Union[List[str], List[List[str]]]: 35 | """ 36 | Returns a list of file paths as strings for the specified files. 37 | Otionally sets or changes the file extension for all files to the 38 | specified file type and changes the directory to a new subdirectory 39 | in the workflow results directory. 40 | 41 | Args: 42 | files (Union[List[Union[str, Path]], Path, str, List[List[str]]]): The list of file 43 | paths to change the type for. 44 | set_file_type (str): The file extension to set for all files. 45 | set_results_dir (str): The name of a subdirectory in the workflow 46 | results directory to change to. If "auto" or "" a random name will be generated. 47 | collect (bool): Whether to collect all files into a single list. Will return a list 48 | with a single entry, which is a list of all files. Useful to pass to tools which 49 | can handle multiple input files at once. 50 | 51 | Returns: 52 | Union[List[str], List[List[str]]]: The (modified) files list. 53 | """ 54 | # Handle input single string 55 | if isinstance(files, str): 56 | files = [files] 57 | # Handle input single Path object, can be directory or file 58 | elif isinstance(files, Path): 59 | if files.is_dir(): 60 | files = [str(f) for f in files.iterdir()] 61 | else: 62 | files = [str(files)] 63 | # Handle input list 64 | elif isinstance(files, list) and files: 65 | # Can have one entry of strings (e.g. if has been collected before by FileManager) 66 | if isinstance(files[0], list): 67 | files = files[0] 68 | # Make sure ever file path is a string 69 | files = [str(f) for f in files if isinstance(f, Path) or isinstance(f, str)] 70 | # Raise error if no files have been detected 71 | if not files: 72 | raise ValueError( 73 | f"No files found, can not set file type **{set_file_type}**, results_dir **{set_results_dir}** and collect **{collect}**." 74 | ) 75 | # Set new file type if required 76 | if set_file_type is not None: 77 | files = self._set_type(files, set_file_type) 78 | # Set new results subdirectory if required 79 | if set_results_dir is not None: 80 | if set_results_dir == "auto": 81 | set_results_dir = "" 82 | files = self._set_dir(files, set_results_dir) 83 | # Collect files into a single list if required 84 | if collect: 85 | files = [files] 86 | return files 87 | 88 | def _set_type(self, files: List[str], set_file_type: str) -> List[str]: 89 | """ 90 | Sets or changes the file extension for all files in the collection to the 91 | specified file type. 92 | 93 | Args: 94 | files (List[str]): The list of file paths to change the type for. 95 | set_file_type (str): The file extension to set for all files. 96 | 97 | Returns: 98 | List[str]: The files list with new type. 99 | """ 100 | 101 | def change_extension(file_path, new_ext): 102 | return Path(file_path).with_suffix("." + new_ext) 103 | 104 | for i in range(len(files)): 105 | if isinstance(files[i], list): # If the item is a list 106 | files[i] = [ 107 | str(change_extension(file, set_file_type)) for file in files[i] 108 | ] 109 | elif isinstance(files[i], str): # If the item is a string 110 | files[i] = str(change_extension(files[i], set_file_type)) 111 | return files 112 | 113 | def _set_dir(self, files: List[str], subdir_name: str) -> List[str]: 114 | """ 115 | Sets the subdirectory within the results directory to store files. If the 116 | subdirectory name is 'auto' or empty, generates a random subdirectory name. 117 | Warns and overwrites if the subdirectory already exists. 118 | 119 | Args: 120 | files (List[str]): The list of file paths to change the type for. 121 | subdir_name (str): The name of the subdirectory within the results directory. 122 | 123 | Returns: 124 | List[str]: The files list with new directory. 125 | """ 126 | if not subdir_name: 127 | subdir_name = self._create_results_sub_dir(subdir_name) 128 | else: 129 | subdir_name = self._create_results_sub_dir(subdir_name) 130 | 131 | def change_subdir(file_path, subdir): 132 | return Path(subdir, Path(file_path).name) 133 | 134 | for i in range(len(files)): 135 | if isinstance(files[i], list): # If the item is a list 136 | files[i] = [str(change_subdir(file, subdir_name)) for file in files[i]] 137 | elif isinstance(files[i], str): # If the item is a string 138 | files[i] = str(change_subdir(files[i], subdir_name)) 139 | return files 140 | 141 | def _generate_random_code(self, length: int) -> str: 142 | """Generate a random code of the specified length. 143 | 144 | Args: 145 | length (int): Length of the random code. 146 | 147 | Returns: 148 | str: Random code of the specified length. 149 | """ 150 | # Define the characters that can be used in the code 151 | # Includes both letters and numbers 152 | characters = string.ascii_letters + string.digits 153 | 154 | # Generate a random code of the specified length 155 | random_code = "".join(random.choice(characters) for _ in range(length)) 156 | 157 | return random_code 158 | 159 | def _create_results_sub_dir(self, name: str = "") -> str: 160 | """ 161 | Creates a subdirectory within the results directory for storing files. If the 162 | name is not specified or empty, generates a random name for the subdirectory. 163 | 164 | Args: 165 | name (str, optional): The desired name for the subdirectory. 166 | 167 | Returns: 168 | str: The path to the created subdirectory as a string. 169 | """ 170 | # create a directory (e.g. for results of a TOPP tool) within the results directory 171 | # if name is empty string, auto generate a name 172 | if not name: 173 | name = self._generate_random_code(4) 174 | # make sure the subdirectory does not exist in results yet 175 | while Path(self.workflow_dir, "results", name).exists(): 176 | name = self._generate_random_code(4) 177 | path = Path(self.workflow_dir, "results", name) 178 | path.mkdir(exist_ok=True) 179 | return str(path) 180 | -------------------------------------------------------------------------------- /src/workflow/Logger.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | class Logger: 4 | """ 5 | A simple logging class for writing messages to a log file. input_widgetThis class is designed 6 | to append messages to a log file in the current workflow directory, facilitating 7 | easy tracking of events, errors, or other significant occurrences in processes called 8 | during workflow execution. 9 | 10 | Attributes: 11 | log_file (Path): The file path of the log file where messages will be written. 12 | """ 13 | def __init__(self, workflow_dir: Path) -> None: 14 | self.workflow_dir = workflow_dir 15 | 16 | def log(self, message: str, level: int = 0) -> None: 17 | """ 18 | Appends a given message to the log file, followed by two newline characters 19 | for readability. This method ensures that each logged message is separated 20 | for clear distinction in the log file. 21 | 22 | Args: 23 | message (str): The message to be logged to the file. 24 | level (int, optional): The level of importance of the message. Defaults to 0. 25 | """ 26 | log_dir = Path(self.workflow_dir, "logs") 27 | if not log_dir.exists(): 28 | log_dir.mkdir() 29 | # Write the message to the log file. 30 | if level == 0: 31 | with open(Path(log_dir, "minimal.log"), "a", encoding="utf-8") as f: 32 | f.write(f"{message}\n\n") 33 | if level <= 1: 34 | with open(Path(log_dir, "commands-and-run-times.log"), "a", encoding="utf-8") as f: 35 | f.write(f"{message}\n\n") 36 | if level <= 2: 37 | with open(Path(log_dir, "all.log"), "a", encoding="utf-8") as f: 38 | f.write(f"{message}\n\n") 39 | # log_types = ["minimal", "commands and run times", "tool outputs", "all"] 40 | # for i, log_type in enumerate(log_types): 41 | # with open(Path(log_dir, f"{log_type.replace(" ", "-")}.log"), "a", encoding="utf-8") as f: 42 | # f.write(f"{message}\n\n") -------------------------------------------------------------------------------- /src/workflow/ParameterManager.py: -------------------------------------------------------------------------------- 1 | import pyopenms as poms 2 | import json 3 | import shutil 4 | import streamlit as st 5 | from pathlib import Path 6 | 7 | class ParameterManager: 8 | """ 9 | Manages the parameters for a workflow, including saving parameters to a JSON file, 10 | loading parameters from the file, and resetting parameters to defaults. This class 11 | specifically handles parameters related to TOPP tools in a pyOpenMS context and 12 | general parameters stored in Streamlit's session state. 13 | 14 | Attributes: 15 | ini_dir (Path): Directory path where .ini files for TOPP tools are stored. 16 | params_file (Path): Path to the JSON file where parameters are saved. 17 | param_prefix (str): Prefix for general parameter keys in Streamlit's session state. 18 | topp_param_prefix (str): Prefix for TOPP tool parameter keys in Streamlit's session state. 19 | """ 20 | # Methods related to parameter handling 21 | def __init__(self, workflow_dir: Path): 22 | self.ini_dir = Path(workflow_dir, "ini") 23 | self.ini_dir.mkdir(parents=True, exist_ok=True) 24 | self.params_file = Path(workflow_dir, "params.json") 25 | self.param_prefix = f"{workflow_dir.stem}-param-" 26 | self.topp_param_prefix = f"{workflow_dir.stem}-TOPP-" 27 | 28 | def save_parameters(self) -> None: 29 | """ 30 | Saves the current parameters from Streamlit's session state to a JSON file. 31 | It handles both general parameters and parameters specific to TOPP tools, 32 | ensuring that only non-default values are stored. 33 | """ 34 | # Everything in session state which begins with self.param_prefix is saved to a json file 35 | json_params = { 36 | k.replace(self.param_prefix, ""): v 37 | for k, v in st.session_state.items() 38 | if k.startswith(self.param_prefix) 39 | } 40 | 41 | # Merge with parameters from json 42 | # Advanced parameters are only in session state if the view is active 43 | json_params = self.get_parameters_from_json() | json_params 44 | 45 | # get a list of TOPP tools which are in session state 46 | current_topp_tools = list( 47 | set( 48 | [ 49 | k.replace(self.topp_param_prefix, "").split(":1:")[0] 50 | for k in st.session_state.keys() 51 | if k.startswith(f"{self.topp_param_prefix}") 52 | ] 53 | ) 54 | ) 55 | # for each TOPP tool, open the ini file 56 | for tool in current_topp_tools: 57 | if tool not in json_params: 58 | json_params[tool] = {} 59 | # load the param object 60 | param = poms.Param() 61 | poms.ParamXMLFile().load(str(Path(self.ini_dir, f"{tool}.ini")), param) 62 | # get all session state param keys and values for this tool 63 | for key, value in st.session_state.items(): 64 | if key.startswith(f"{self.topp_param_prefix}{tool}:1:"): 65 | # get ini_key 66 | ini_key = key.replace(self.topp_param_prefix, "").encode() 67 | # get ini (default) value by ini_key 68 | ini_value = param.getValue(ini_key) 69 | # check if value is different from default 70 | if ( 71 | (ini_value != value) 72 | or (key.split(":1:")[1] in json_params[tool]) 73 | ): 74 | # store non-default value 75 | json_params[tool][key.split(":1:")[1]] = value 76 | # Save to json file 77 | with open(self.params_file, "w", encoding="utf-8") as f: 78 | json.dump(json_params, f, indent=4) 79 | 80 | def get_parameters_from_json(self) -> dict: 81 | """ 82 | Loads parameters from the JSON file if it exists and returns them as a dictionary. 83 | If the file does not exist, it returns an empty dictionary. 84 | 85 | Returns: 86 | dict: A dictionary containing the loaded parameters. Keys are parameter names, 87 | and values are parameter values. 88 | """ 89 | # Check if parameter file exists 90 | if not Path(self.params_file).exists(): 91 | return {} 92 | else: 93 | # Load parameters from json file 94 | try: 95 | with open(self.params_file, "r", encoding="utf-8") as f: 96 | return json.load(f) 97 | except: 98 | st.error("**ERROR**: Attempting to load an invalid JSON parameter file. Reset to defaults.") 99 | return {} 100 | 101 | def reset_to_default_parameters(self) -> None: 102 | """ 103 | Resets the parameters to their default values by deleting the custom parameters 104 | JSON file. 105 | """ 106 | # Delete custom params json file 107 | self.params_file.unlink(missing_ok=True) -------------------------------------------------------------------------------- /src/workflow/WorkflowManager.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from .Logger import Logger 3 | from .ParameterManager import ParameterManager 4 | from .CommandExecutor import CommandExecutor 5 | from .StreamlitUI import StreamlitUI 6 | from .FileManager import FileManager 7 | import multiprocessing 8 | import streamlit as st 9 | import shutil 10 | import time 11 | 12 | class WorkflowManager: 13 | # Core workflow logic using the above classes 14 | def __init__(self, name: str, workspace: str): 15 | self.name = name 16 | self.workflow_dir = Path(workspace, name.replace(" ", "-").lower()) 17 | self.file_manager = FileManager(self.workflow_dir) 18 | self.logger = Logger(self.workflow_dir) 19 | self.parameter_manager = ParameterManager(self.workflow_dir) 20 | self.executor = CommandExecutor(self.workflow_dir, self.logger, self.parameter_manager) 21 | self.ui = StreamlitUI(self.workflow_dir, self.logger, self.executor, self.parameter_manager) 22 | self.params = self.parameter_manager.get_parameters_from_json() 23 | 24 | def start_workflow(self) -> None: 25 | """ 26 | Starts the workflow process and adds its process id to the pid directory. 27 | The workflow itself needs to be a process, otherwise streamlit will wait for everything to finish before updating the UI again. 28 | """ 29 | # Delete the log file if it already exists 30 | shutil.rmtree(Path(self.workflow_dir, "logs"), ignore_errors=True) 31 | # Start workflow process 32 | workflow_process = multiprocessing.Process(target=self.workflow_process) 33 | workflow_process.start() 34 | # Add workflow process id to pid dir 35 | self.executor.pid_dir.mkdir() 36 | Path(self.executor.pid_dir, str(workflow_process.pid)).touch() 37 | time.sleep(3) 38 | st.rerun() 39 | 40 | def workflow_process(self) -> None: 41 | """ 42 | Workflow process. Logs start and end of the workflow and calls the execution method where all steps are defined. 43 | """ 44 | try: 45 | self.logger.log("STARTING WORKFLOW") 46 | results_dir = Path(self.workflow_dir, "results") 47 | if results_dir.exists(): 48 | shutil.rmtree(results_dir) 49 | results_dir.mkdir(parents=True) 50 | self.execution() 51 | self.logger.log("WORKFLOW FINISHED") 52 | except Exception as e: 53 | self.logger.log(f"ERROR: {e}") 54 | # Delete pid dir path to indicate workflow is done 55 | shutil.rmtree(self.executor.pid_dir, ignore_errors=True) 56 | 57 | def show_file_upload_section(self) -> None: 58 | """ 59 | Shows the file upload section of the UI with content defined in self.upload(). 60 | """ 61 | self.ui.file_upload_section(self.upload) 62 | 63 | def show_parameter_section(self) -> None: 64 | """ 65 | Shows the parameter section of the UI with content defined in self.configure(). 66 | """ 67 | self.ui.parameter_section(self.configure) 68 | 69 | def show_execution_section(self) -> None: 70 | """ 71 | Shows the execution section of the UI with content defined in self.execution(). 72 | """ 73 | self.ui.execution_section(self.start_workflow) 74 | 75 | def show_results_section(self) -> None: 76 | """ 77 | Shows the results section of the UI with content defined in self.results(). 78 | """ 79 | self.ui.results_section(self.results) 80 | 81 | def upload(self) -> None: 82 | """ 83 | Add your file upload widgets here 84 | """ 85 | ################################### 86 | # Add your file upload widgets here 87 | ################################### 88 | pass 89 | 90 | def configure(self) -> None: 91 | """ 92 | Add your input widgets here 93 | """ 94 | ################################### 95 | # Add your input widgets here 96 | ################################### 97 | pass 98 | 99 | def execution(self) -> None: 100 | """ 101 | Add your workflow steps here 102 | """ 103 | ################################### 104 | # Add your workflow steps here 105 | ################################### 106 | pass 107 | 108 | def results(self) -> None: 109 | """ 110 | Display results here 111 | """ 112 | ################################### 113 | # Display results here 114 | ################################### 115 | pass -------------------------------------------------------------------------------- /src/workflow/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenMS/streamlit-template/57fccde390ef0ce95a625c7a427840a8d69860c3/src/workflow/__init__.py -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | # test_my_math.py 2 | import unittest 3 | from urllib.request import urlretrieve 4 | 5 | from src.simpleworkflow import generate_random_table 6 | from src.mzmlfileworkflow import mzML_file_get_num_spectra 7 | 8 | from pathlib import Path 9 | 10 | class TestSimpleWorkflow(unittest.TestCase): 11 | def test_workflow(self): 12 | result = generate_random_table(2, 3).shape 13 | self.assertEqual(result, (2,3), "Expected dataframe shape.") 14 | 15 | class TestComplexWorkflow(unittest.TestCase): 16 | def test_workflow(self): 17 | # load data from url 18 | urlretrieve("https://raw.githubusercontent.com/OpenMS/streamlit-template/main/example-data/mzML/Treatment.mzML", "testfile.mzML") 19 | result = mzML_file_get_num_spectra("testfile.mzML") 20 | Path("testfile.mzML").unlink() 21 | self.assertEqual(result, 786, "Expected dataframe shape.") 22 | 23 | if __name__ == '__main__': 24 | unittest.main() 25 | -------------------------------------------------------------------------------- /test_gui.py: -------------------------------------------------------------------------------- 1 | from streamlit.testing.v1 import AppTest 2 | import pytest 3 | from src import fileupload 4 | import json 5 | from pathlib import Path 6 | import shutil 7 | 8 | 9 | @pytest.fixture 10 | def launch(request): 11 | test = AppTest.from_file(request.param) 12 | 13 | ## Initialize session state ## 14 | with open("settings.json", "r") as f: 15 | test.session_state.settings = json.load(f) 16 | test.session_state.settings["test"] = True 17 | test.secrets["workspace"] = "test" 18 | return test 19 | 20 | 21 | # Test launching of all pages 22 | @pytest.mark.parametrize( 23 | "launch", 24 | ( 25 | # "content/quickstart.py", # NOTE: this page does not work due to streamlit.errors.StreamlitPageNotFoundError error 26 | "content/documentation.py", 27 | "content/topp_workflow_file_upload.py", 28 | "content/topp_workflow_parameter.py", 29 | "content/topp_workflow_execution.py", 30 | "content/topp_workflow_results.py", 31 | "content/file_upload.py", 32 | "content/raw_data_viewer.py", 33 | "content/run_example_workflow.py", 34 | "content/download_section.py", 35 | "content/simple_workflow.py", 36 | "content/run_subprocess.py", 37 | ), 38 | indirect=True, 39 | ) 40 | def test_launch(launch): 41 | """Test if all pages can be launched without errors.""" 42 | launch.run(timeout=30) # Increased timeout from 10 to 30 seconds 43 | assert not launch.exception 44 | 45 | 46 | ########### PAGE SPECIFIC TESTS ############ 47 | @pytest.mark.parametrize( 48 | "launch,selection", 49 | [ 50 | ("content/documentation.py", "User Guide"), 51 | ("content/documentation.py", "Installation"), 52 | ( 53 | "content/documentation.py", 54 | "Developers Guide: How to build app based on this template", 55 | ), 56 | ("content/documentation.py", "Developers Guide: TOPP Workflow Framework"), 57 | ("content/documentation.py", "Developer Guide: Windows Executables"), 58 | ("content/documentation.py", "Developers Guide: Deployment"), 59 | ], 60 | indirect=["launch"], 61 | ) 62 | def test_documentation(launch, selection): 63 | launch.run() 64 | launch.selectbox[0].select(selection).run() 65 | assert not launch.exception 66 | 67 | 68 | @pytest.mark.parametrize("launch", ["content/file_upload.py"], indirect=True) 69 | def test_file_upload_load_example(launch): 70 | launch.run() 71 | for i in launch.tabs: 72 | if i.label == "Example Data": 73 | i.button[0].click().run() 74 | assert not launch.exception 75 | 76 | 77 | # NOTE: All tabs are automatically checked 78 | @pytest.mark.parametrize( 79 | "launch,example", 80 | [ 81 | ("content/raw_data_viewer.py", "Blank.mzML"), 82 | ("content/raw_data_viewer.py", "Treatment.mzML"), 83 | ("content/raw_data_viewer.py", "Pool.mzML"), 84 | ("content/raw_data_viewer.py", "Control.mzML"), 85 | ], 86 | indirect=["launch"], 87 | ) 88 | def test_view_raw_ms_data(launch, example): 89 | launch.run(timeout=30) # Increased timeout from 10 to 30 seconds 90 | 91 | ## Load Example file, based on implementation of fileupload.load_example_mzML_files() ### 92 | mzML_dir = Path(launch.session_state.workspace, "mzML-files") 93 | 94 | # Copy files from example-data/mzML to workspace mzML directory, add to selected files 95 | for f in Path("example-data", "mzML").glob("*.mzML"): 96 | shutil.copy(f, mzML_dir) 97 | launch.run() 98 | 99 | ## TODO: Figure out a way to select a spectrum to be displayed 100 | launch.selectbox[0].select(example).run() 101 | assert not launch.exception 102 | 103 | 104 | @pytest.mark.parametrize( 105 | "launch,example", 106 | [ 107 | ("content/run_example_workflow.py", ["Blank"]), 108 | ("content/run_example_workflow.py", ["Treatment"]), 109 | ("content/run_example_workflow.py", ["Pool"]), 110 | ("content/run_example_workflow.py", ["Control"]), 111 | ("content/run_example_workflow.py", ["Control", "Blank"]), 112 | ], 113 | indirect=["launch"], 114 | ) 115 | def test_run_workflow(launch, example): 116 | launch.run() 117 | ## Load Example file, based on implementation of fileupload.load_example_mzML_files() ### 118 | mzML_dir = Path(launch.session_state.workspace, "mzML-files") 119 | 120 | # Copy files from example-data/mzML to workspace mzML directory, add to selected files 121 | for f in Path("example-data", "mzML").glob("*.mzML"): 122 | shutil.copy(f, mzML_dir) 123 | launch.run() 124 | 125 | ## Select experiments to process 126 | for e in example: 127 | launch.multiselect[0].select(e) 128 | 129 | launch.run() 130 | assert not launch.exception 131 | 132 | # Press the "Run Workflow" button 133 | launch.button[1].click().run(timeout=60) 134 | assert not launch.exception 135 | -------------------------------------------------------------------------------- /tests/test_run_subprocess.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import time 3 | from streamlit.testing.v1 import AppTest 4 | 5 | @pytest.fixture 6 | def launch(): 7 | """Launch the Run Subprocess Streamlit page for testing.""" 8 | 9 | app = AppTest.from_file("content/run_subprocess.py") 10 | app.run(timeout=10) 11 | return app 12 | 13 | def test_file_selection(launch): 14 | """Ensure a file can be selected from the dropdown.""" 15 | launch.run() 16 | 17 | assert len(launch.selectbox) > 0, "No file selection dropdown found!" 18 | 19 | if len(launch.selectbox[0].options) > 0: 20 | launch.selectbox[0].select(launch.selectbox[0].options[0]) 21 | launch.run() 22 | 23 | 24 | def test_extract_ids_button(launch): 25 | """Ensure clicking 'Extract IDs' triggers process and UI updates accordingly.""" 26 | launch.run(timeout=10) 27 | time.sleep(3) 28 | 29 | # Ensure 'Extract ids' button exists 30 | extract_button = next((btn for btn in launch.button if "Extract ids" in btn.label), None) 31 | assert extract_button is not None, "Extract ids button not found!" 32 | 33 | # Click the 'Extract ids' button 34 | extract_button.click() 35 | launch.run(timeout=10) 36 | 37 | print("Extract ids button was clicked successfully!") -------------------------------------------------------------------------------- /tests/test_simple_workflow.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import time 3 | from streamlit.testing.v1 import AppTest 4 | 5 | """ 6 | Tests for the Simple Workflow page functionality. 7 | 8 | These tests verify: 9 | - Number input widgets function correctly 10 | - Session state updates properly 11 | - Table generation with correct dimensions 12 | - Download button presence 13 | """ 14 | 15 | @pytest.fixture 16 | def launch(): 17 | """Launch the Simple Workflow page for testing.""" 18 | app = AppTest.from_file("content/simple_workflow.py") 19 | app.run(timeout=15) 20 | return app 21 | 22 | def test_number_inputs(launch): 23 | """Ensure x and y dimension inputs exist and update correctly.""" 24 | 25 | assert len(launch.number_input) >= 2, f"Expected at least 2 number inputs, found {len(launch.number_input)}" 26 | 27 | # Set x and y dimensions 28 | x_input = next((ni for ni in launch.number_input if ni.key == "example-x-dimension"), None) 29 | y_input = next((ni for ni in launch.number_input if ni.key == "example-y-dimension"), None) 30 | 31 | assert x_input is not None, "X-dimension input not found!" 32 | assert y_input is not None, "Y-dimension input not found!" 33 | 34 | x_input.set_value(5) 35 | y_input.set_value(4) 36 | launch.run(timeout=10) 37 | 38 | # Validate session state updates 39 | assert "example-x-dimension" in launch.session_state, "X-dimension key missing in session state!" 40 | assert "example-y-dimension" in launch.session_state, "Y-dimension key missing in session state!" 41 | assert launch.session_state["example-x-dimension"] == 5, "X-dimension not updated!" 42 | assert launch.session_state["example-y-dimension"] == 4, "Y-dimension not updated!" 43 | 44 | assert len(launch.dataframe) > 0, "Table not generated!" 45 | 46 | df = launch.dataframe[0].value 47 | assert df.shape == (5, 4), f"Expected table size (5,4) but got {df.shape}" 48 | 49 | def test_download_button(launch): 50 | """Ensure 'Download Table' button appears after table generation.""" 51 | 52 | # Locate number inputs by key 53 | x_input = next((ni for ni in launch.number_input if ni.key == "example-x-dimension"), None) 54 | y_input = next((ni for ni in launch.number_input if ni.key == "example-y-dimension"), None) 55 | 56 | assert x_input is not None, "X-dimension input not found!" 57 | assert y_input is not None, "Y-dimension input not found!" 58 | 59 | # Set values and trigger app update 60 | x_input.set_value(3) 61 | y_input.set_value(2) 62 | launch.run(timeout=15) 63 | time.sleep(5) 64 | 65 | assert len(launch.dataframe) > 0, "Table not generated!" 66 | 67 | # Find the "Download Table" button correctly 68 | download_elements = [comp for comp in launch.main if hasattr(comp, "label") and "Download" in comp.label] 69 | assert len(download_elements) > 0, "Download Table button is missing!" 70 | -------------------------------------------------------------------------------- /tests/test_topp_workflow_parameter.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the TOPP workflow parameter page. 3 | 4 | This module verifies that the TOPP workflow parameter page correctly 5 | displays parameter values, handles different parameter types, 6 | organizes parameters into sections, and properly toggles advanced parameters. 7 | """ 8 | import os 9 | import sys 10 | import pytest 11 | from unittest.mock import patch, MagicMock 12 | 13 | # Add project root to path for imports using a named constant 14 | PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 15 | sys.path.append(PROJECT_ROOT) 16 | 17 | # Create mock for pyopenms to avoid dependency on actual OpenMS installation 18 | mock_pyopenms = MagicMock() 19 | mock_pyopenms.__version__ = "2.9.1" # Mock version for testing 20 | sys.modules['pyopenms'] = mock_pyopenms 21 | 22 | @pytest.fixture 23 | def mock_streamlit(): 24 | """Mock essential Streamlit components for testing parameter display.""" 25 | with patch('streamlit.tabs') as mock_tabs, \ 26 | patch('streamlit.columns') as mock_columns, \ 27 | patch('streamlit.session_state', create=True, new={}) as mock_session_state, \ 28 | patch('streamlit.selectbox') as mock_selectbox, \ 29 | patch('streamlit.number_input') as mock_number_input, \ 30 | patch('streamlit.checkbox') as mock_checkbox, \ 31 | patch('streamlit.text_input') as mock_text_input, \ 32 | patch('streamlit.markdown') as mock_markdown: 33 | 34 | # Configure session state 35 | mock_session_state["workspace"] = "test_workspace" 36 | mock_session_state["advanced"] = False 37 | 38 | yield { 39 | 'tabs': mock_tabs, 40 | 'columns': mock_columns, 41 | 'session_state': mock_session_state, 42 | 'selectbox': mock_selectbox, 43 | 'number_input': mock_number_input, 44 | 'checkbox': mock_checkbox, 45 | 'text_input': mock_text_input, 46 | 'markdown': mock_markdown 47 | } 48 | 49 | 50 | def test_mock_pyopenms(): 51 | """Verify that pyopenms mock is working correctly.""" 52 | import pyopenms 53 | assert hasattr(pyopenms, '__version__') 54 | 55 | 56 | def test_topp_parameter_correctness(): 57 | """Test that TOPP parameters are displayed with correct values.""" 58 | # Define expected parameters with values 59 | expected_params = { 60 | "noise_threshold": 1000.0, 61 | "mz_tolerance": 10.0, 62 | "rt_window": 60.0, 63 | "use_smoothing": True 64 | } 65 | 66 | # Create a mock parameter object 67 | param = MagicMock() 68 | 69 | # Configure parameter behavior 70 | def mock_get_value(param_name): 71 | decoded_name = param_name.decode() if isinstance(param_name, bytes) else param_name 72 | param_key = decoded_name.split(':')[-1] 73 | return expected_params.get(param_key, 0) 74 | 75 | # Setup parameter functions 76 | param.getValue = mock_get_value 77 | param.getNames = MagicMock(return_value=[f"FeatureFinderMetabo:{name}".encode() 78 | for name in expected_params]) 79 | 80 | # Mock display function to capture values 81 | displayed_values = {} 82 | 83 | # Function to simulate parameter display logic 84 | def display_parameters(param_obj): 85 | """Simulate the display of parameters.""" 86 | for encoded_name in param_obj.getNames(): 87 | name = encoded_name.decode() 88 | simple_name = name.split(':')[-1] 89 | value = param_obj.getValue(encoded_name) 90 | displayed_values[simple_name] = value 91 | 92 | # Call simulated display 93 | display_parameters(param) 94 | 95 | # Verify displayed values match expected values 96 | for param_name, expected_value in expected_params.items(): 97 | assert param_name in displayed_values, f"Parameter {param_name} was not displayed" 98 | assert displayed_values[param_name] == expected_value, \ 99 | f"Parameter {param_name} showed value {displayed_values[param_name]} instead of {expected_value}" 100 | 101 | 102 | def test_parameter_types(): 103 | """Test that parameters of different types are handled correctly.""" 104 | # Test parameter objects with different types 105 | param = MagicMock() 106 | 107 | 108 | type_params = { 109 | "float_param": 10.5, 110 | "int_param": 42, 111 | "bool_param": True, 112 | "string_param": "test", 113 | "list_param": ["item1", "item2"], 114 | "dict_param": {"key1": "value1", "key2": 123}, 115 | "nested_param": [{"name": "nested1"}, {"name": "nested2"}] 116 | } 117 | 118 | # Configure mock 119 | param.getNames = MagicMock(return_value=[f"Tool:{name}".encode() for name in type_params]) 120 | 121 | def mock_get_value(param_name): 122 | param_key = param_name.decode().split(':')[-1] 123 | return type_params.get(param_key, 0) 124 | 125 | param.getValue = mock_get_value 126 | 127 | # Capture displayed values 128 | displayed_values = {} 129 | displayed_types = {} 130 | 131 | # Display parameters 132 | def display_parameters(param_obj): 133 | for encoded_name in param_obj.getNames(): 134 | name = encoded_name.decode() 135 | simple_name = name.split(':')[-1] 136 | value = param_obj.getValue(encoded_name) 137 | displayed_values[simple_name] = value 138 | displayed_types[simple_name] = type(value) 139 | 140 | display_parameters(param) 141 | 142 | # Verify both values and types are preserved 143 | for param_name, expected_value in type_params.items(): 144 | assert displayed_values[param_name] == expected_value 145 | # Use 'is' for more precise type comparison 146 | assert type(displayed_values[param_name]) is type(expected_value) 147 | 148 | # For complex structures, verify deep equality 149 | if isinstance(expected_value, (dict, list)): 150 | # Check that nested structures match exactly 151 | if isinstance(expected_value, dict): 152 | for key, val in expected_value.items(): 153 | assert displayed_values[param_name][key] == val 154 | elif isinstance(expected_value, list) and expected_value and isinstance(expected_value[0], dict): 155 | # For lists of dictionaries, check each item 156 | for i, item in enumerate(expected_value): 157 | assert displayed_values[param_name][i] == item 158 | 159 | 160 | def test_parameter_sections(): 161 | """Test that parameters are properly organized into sections.""" 162 | param = MagicMock() 163 | 164 | # Create parameters in different sections 165 | section_params = { 166 | "algorithm:common:param1": 1.0, 167 | "algorithm:common:param2": 2.0, 168 | "algorithm:centroided:param3": 3.0, 169 | "preprocessing:param4": 4.0 170 | } 171 | 172 | # Configure mock 173 | param.getNames = MagicMock(return_value=[k.encode() for k in section_params]) 174 | 175 | def get_section_description(section): 176 | if "algorithm:common" in section: 177 | return "Common algorithm parameters" 178 | elif "algorithm:centroided" in section: 179 | return "Parameters for centroided data" 180 | elif "preprocessing" in section: 181 | return "Data preprocessing parameters" 182 | return "" 183 | 184 | param.getSectionDescription = get_section_description 185 | 186 | # Capture sections 187 | sections = set() 188 | section_params_map = {} 189 | 190 | def organize_parameters(param_obj): 191 | for name in param_obj.getNames(): 192 | decoded = name.decode() 193 | section = ":".join(decoded.split(":")[:-1]) 194 | sections.add(section) 195 | if section not in section_params_map: 196 | section_params_map[section] = [] 197 | section_params_map[section].append(decoded.split(":")[-1]) 198 | 199 | organize_parameters(param) 200 | 201 | # Verify sections were correctly identified 202 | assert "algorithm:common" in sections 203 | assert "algorithm:centroided" in sections 204 | assert "preprocessing" in sections 205 | 206 | # Verify parameters were organized correctly 207 | assert "param1" in section_params_map["algorithm:common"] 208 | assert "param2" in section_params_map["algorithm:common"] 209 | assert "param3" in section_params_map["algorithm:centroided"] 210 | assert "param4" in section_params_map["preprocessing"] 211 | 212 | 213 | def test_advanced_parameter_toggle(mock_streamlit): 214 | """Test that advanced parameters are only shown when advanced toggle is enabled.""" 215 | param = MagicMock() 216 | 217 | # Define both basic and advanced parameters 218 | params = [ 219 | {"name": "basic_param", "value": 1.0, "advanced": False}, 220 | {"name": "advanced_param", "value": 42.0, "advanced": True} 221 | ] 222 | 223 | # Setup param mock 224 | param.getNames = MagicMock(return_value=[f"Tool:{p['name']}".encode() for p in params]) 225 | param.isAdvanced = lambda key: any(p["advanced"] for p in params if p["name"] in key.decode()) 226 | 227 | # Function to simulate parameter filtering based on advanced setting 228 | def filter_and_display_params(advanced_enabled=False): 229 | displayed_params = [] 230 | for name in param.getNames(): 231 | if not param.isAdvanced(name) or advanced_enabled: 232 | displayed_params.append(name.decode().split(":")[-1]) 233 | return displayed_params 234 | 235 | # Test with advanced OFF 236 | mock_streamlit['session_state']["advanced"] = False 237 | basic_display = filter_and_display_params(mock_streamlit['session_state']["advanced"]) 238 | 239 | # Test with advanced ON 240 | mock_streamlit['session_state']["advanced"] = True 241 | advanced_display = filter_and_display_params(mock_streamlit['session_state']["advanced"]) 242 | 243 | # Verify only basic parameters are displayed when advanced is OFF 244 | assert "basic_param" in basic_display 245 | assert "advanced_param" not in basic_display 246 | 247 | # Verify all parameters are displayed when advanced is ON 248 | assert "basic_param" in advanced_display 249 | assert "advanced_param" in advanced_display --------------------------------------------------------------------------------