├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ ├── codeql-analysis.yml
│ ├── docker.dispatch.yml
│ ├── docker.latest.yml
│ ├── docker.nightly.yml
│ └── docker.tag.yml
├── .gitignore
├── .gitmodules
├── .zenodo.json
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── DEPLOYING.md
├── Dockerfile
├── Flaski.Readme.png
├── LICENSE.md
├── README.md
├── STATUS.md
├── _models.py
├── docker-compose.yml
├── egdata
├── DAVID.xlsx
├── DAVID_DiffExp.xlsx
├── DAVID_input.xlsx
├── KEGG_input.xlsx
├── MDS_with_groups.xlsx
├── cellplot_david.xlsx
├── dendogram.xlsx
├── heatmap.xlsx
├── histogram.xlsx
├── iGSEA.xlsx
├── scatterplot.xlsx
├── threeDscatterplot.xlsx
├── venn.xlsx
└── violinplot_labels.xlsx
├── email
├── app_exception.html
├── app_exception.txt
├── app_help.html
├── app_help.txt
├── submissions.age.html
├── submissions.age.txt
├── submissions.ftp.data.html
├── submissions.ftp.data.txt
├── submissions.ftp.html
├── submissions.ftp.txt
├── validate_email.html
└── validate_email.txt
├── ftp_info
├── 1.cyberduck.png
├── 2.cyberduck.png
├── 3.cyberduck.png
├── 4.cyberduck.png
├── 5.cyberduck.png
├── 6.cyberduck.png
└── README.md
├── hooks
└── build
├── kubernetes
├── README.md
├── init-pod.yaml
└── server-deployment.yaml
├── production-compose.yml
├── requirements.txt
├── routes
├── _about.py
├── _impressum.py
├── _privacy.py
├── _routes.py
├── _vars.py
├── apps
│ ├── _aadatalake.py
│ ├── _cbioportal.py
│ ├── _chatbot.py
│ ├── _gtex.py
│ ├── _kegg.py
│ ├── _neanderthalage.py
│ ├── _utils.py
│ ├── aadatalake.py
│ ├── agebot.py
│ ├── alphafold.py
│ ├── asplicing.py
│ ├── atacseq.py
│ ├── cbioportal.py
│ ├── cellplot.py
│ ├── chatbot.py
│ ├── chipseq.py
│ ├── circrna.py
│ ├── circularbarplots.py
│ ├── convert.py
│ ├── crispr.py
│ ├── david.py
│ ├── dendrogram.py
│ ├── gsea.py
│ ├── gseaplot.py
│ ├── gtex.py
│ ├── heatmap.py
│ ├── histogram.py
│ ├── intronret.py
│ ├── ip.py
│ ├── irfinder.py
│ ├── kegg.py
│ ├── kegg_old.py
│ ├── lifespan.py
│ ├── lineplot.py
│ ├── mds.py
│ ├── methylclock.py
│ ├── mirna.py
│ ├── neanderthalage.py
│ ├── pca.py
│ ├── riboseq.py
│ ├── rnaseq.py
│ ├── scatterplot.py
│ ├── sixteens.py
│ ├── storage.py
│ ├── threeDscatterplot.py
│ ├── transfer.py
│ ├── tsne.py
│ ├── varcal.py
│ ├── vcheck.py
│ ├── venndiagram.py
│ └── violinplot.py
├── home.py
└── index.py
├── static
├── dog-solid.png
└── dog-solid.svg
└── utils
└── stats.py
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: "[BUG]"
5 | labels: bug
6 | assignees: jorgeboucas
7 |
8 | ---
9 |
10 | **App**
11 | In which App did you find a bug.
12 |
13 | **Describe the bug**
14 | A clear and concise description of what the bug is.
15 |
16 | **To Reproduce**
17 | Steps to reproduce the behavior:
18 | 1. Go to '...'
19 | 2. Click on '....'
20 | 3. Scroll down to '....'
21 | 4. See error
22 |
23 | **Expected behavior**
24 | A clear and concise description of what you expected to happen.
25 |
26 | **Screenshots**
27 | If applicable, add screenshots to help explain your problem.
28 |
29 | **Desktop (please complete the following information):**
30 | - OS: [e.g. iOS]
31 | - Browser [e.g. chrome, safari]
32 | - Version [e.g. 22]
33 |
34 | **Smartphone (please complete the following information):**
35 | - Device: [e.g. iPhone6]
36 | - OS: [e.g. iOS8.1]
37 | - Browser [e.g. stock browser, safari]
38 | - Version [e.g. 22]
39 |
40 | **Additional context**
41 | Add any other context about the problem here.
42 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: "[FEATURE REQ.]"
5 | labels: enhancement
6 | assignees: jorgeboucas
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [master]
6 | pull_request:
7 | # The branches below must be a subset of the branches above
8 | branches: [master]
9 | schedule:
10 | - cron: '0 11 * * 4'
11 |
12 | jobs:
13 | analyze:
14 | name: Analyze
15 | runs-on: ubuntu-latest
16 |
17 | strategy:
18 | fail-fast: false
19 | matrix:
20 | # Override automatic language detection by changing the below list
21 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
22 | language: ['python', 'javascript']
23 | # Learn more...
24 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
25 |
26 | steps:
27 | - name: Checkout repository
28 | uses: actions/checkout@v2
29 | with:
30 | # We must fetch at least the immediate parents so that if this is
31 | # a pull request then we can checkout the head.
32 | fetch-depth: 2
33 |
34 | # If this run was triggered by a pull request event, then checkout
35 | # the head of the pull request instead of the merge commit.
36 | - run: git checkout HEAD^2
37 | if: ${{ github.event_name == 'pull_request' }}
38 |
39 | # Initializes the CodeQL tools for scanning.
40 | - name: Initialize CodeQL
41 | uses: github/codeql-action/init@v1
42 | with:
43 | languages: ${{ matrix.language }}
44 |
45 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
46 | # If this step fails, then you should remove it and run the build manually (see below)
47 | - name: Autobuild
48 | uses: github/codeql-action/autobuild@v1
49 |
50 | # ℹ️ Command-line programs to run using the OS shell.
51 | # 📚 https://git.io/JvXDl
52 |
53 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
54 | # and modify them (or add more) to build your code if your project
55 | # uses a compiled language
56 |
57 | #- run: |
58 | # make bootstrap
59 | # make release
60 |
61 | - name: Perform CodeQL Analysis
62 | uses: github/codeql-action/analyze@v1
63 |
--------------------------------------------------------------------------------
/.github/workflows/docker.dispatch.yml:
--------------------------------------------------------------------------------
1 | name: dispatch
2 |
3 | on: repository_dispatch
4 |
5 | jobs:
6 | build:
7 | runs-on: ubuntu-22.04
8 |
9 | steps:
10 | - name: Remove unnecessary files
11 | run: |
12 | sudo rm -rf /usr/share/dotnet
13 | sudo rm -rf "$AGENT_TOOLSDIRECTORY"
14 | - uses: actions/checkout@v2
15 | - name: Upgrade QEMU
16 | run: |
17 | sudo apt-get update
18 | sudo apt-get install -y qemu binfmt-support qemu-user-static
19 | - name: Fix QEMU binfmt
20 | run: |
21 | docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
22 | - name: Set up QEMU
23 | uses: docker/setup-qemu-action@v3
24 | - name: Set up Docker Buildx
25 | uses: docker/setup-buildx-action@v3
26 | - name: Build the Docker image
27 | run: |
28 | echo "${{ secrets.DOCKER_HUB_TOKEN }}" | docker login -u "${{ secrets.DOCKER_HUB_USERNAME }}" --password-stdin docker.io
29 | git submodule update --init --recursive
30 | APP_VERSION=$(git rev-parse --short HEAD)
31 | cd pyflaski && PYFLASKI_VERSION=$(git rev-parse --short HEAD) && cd ../
32 | for i in {1..3}; do
33 | docker buildx build --platform linux/amd64,linux/arm64 --build-arg APP_VERSION=${APP_VERSION} --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t docker.io/mpgagebioinformatics/flaski:latest --push . && break
34 | echo "Build failed (attempt $i). Retrying in 30s..."
35 | sleep 30
36 | done
37 | - name: Set Success env
38 | run: echo "GITHUB_SHA_SHORT=$(echo $GITHUB_SHA | cut -c 1-8)" >> $GITHUB_ENV
39 | - name: Slack Success Notification
40 | run: |
41 | generate_post_data()
42 | {
43 | cat << EOF
44 | {
45 | "deployment":"server","namespace":"flaski-dev", "container" :"server","image":"mpgagebioinformatics/flaski", "tag":"latest" ,"SHA":"${GITHUB_SHA_SHORT}/myapp"
46 | }
47 | EOF
48 | }
49 | curl --insecure -H "Content-Type: application/json" -X POST -d "$(generate_post_data)" ${{ secrets.ROLLOUT_WEBHOOK }}
50 | - name: Set Failure env
51 | if: failure()
52 | run: echo "GITHUB_SHA_SHORT=$(echo $GITHUB_SHA | cut -c 1-8)" >> $GITHUB_ENV
53 | - name: Slack Failure Notification
54 | if: failure()
55 | run: |
56 | generate_post_data()
57 | {
58 | cat << EOF
59 | {
60 | "text": "flaski $GITHUB_SHA_SHORT build and push FAILED"
61 | }
62 | EOF
63 | }
64 | curl -H "Content-Type: application/json" -X POST -d "$(generate_post_data)" ${{ secrets.SLACK_WEBHOOK }}
65 |
66 |
--------------------------------------------------------------------------------
/.github/workflows/docker.latest.yml:
--------------------------------------------------------------------------------
1 | name: latest
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | tags-ignore:
8 | - '**'
9 |
10 | jobs:
11 | build:
12 | runs-on: ubuntu-22.04
13 |
14 | steps:
15 | - name: Remove unnecessary files
16 | run: |
17 | sudo rm -rf /usr/share/dotnet
18 | sudo rm -rf "$AGENT_TOOLSDIRECTORY"
19 | - uses: actions/checkout@v2
20 | - name: Upgrade QEMU
21 | run: |
22 | sudo apt-get update
23 | sudo apt-get install -y qemu binfmt-support qemu-user-static
24 | - name: Fix QEMU binfmt
25 | run: |
26 | docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
27 | - name: Set up QEMU
28 | uses: docker/setup-qemu-action@v3
29 | - name: Set up Docker Buildx
30 | uses: docker/setup-buildx-action@v3
31 | - name: Build the Docker image
32 | run: |
33 | echo "${{ secrets.DOCKER_HUB_TOKEN }}" | docker login -u "${{ secrets.DOCKER_HUB_USERNAME }}" --password-stdin docker.io
34 | git submodule update --init --recursive
35 | APP_VERSION=$(git rev-parse --short HEAD)
36 | cd pyflaski && PYFLASKI_VERSION=$(git rev-parse --short HEAD) && cd ../
37 | for i in {1..3}; do
38 | docker buildx build --platform linux/amd64,linux/arm64 --build-arg APP_VERSION=${APP_VERSION} --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t docker.io/mpgagebioinformatics/flaski:latest --push . && break
39 | echo "Build failed (attempt $i). Retrying in 30s..."
40 | sleep 30
41 | done
42 | - name: Set Success env
43 | run: echo "GITHUB_SHA_SHORT=$(echo $GITHUB_SHA | cut -c 1-8)" >> $GITHUB_ENV
44 | - name: Slack Success Notification
45 | run: |
46 | generate_post_data()
47 | {
48 | cat << EOF
49 | {
50 | "deployment":"server","namespace":"flaski-dev", "container" :"server","image":"mpgagebioinformatics/flaski", "tag":"latest" ,"SHA":"$GITHUB_SHA_SHORT"
51 | }
52 | EOF
53 | }
54 | curl --insecure -H "Content-Type: application/json" -X POST -d "$(generate_post_data)" ${{ secrets.ROLLOUT_WEBHOOK }}
55 | - name: Set Failure env
56 | if: failure()
57 | run: echo "GITHUB_SHA_SHORT=$(echo $GITHUB_SHA | cut -c 1-8)" >> $GITHUB_ENV
58 | - name: Slack Failure Notification
59 | if: failure()
60 | run: |
61 | generate_post_data()
62 | {
63 | cat << EOF
64 | {
65 | "text": "flaski $GITHUB_SHA_SHORT build and push FAILED"
66 | }
67 | EOF
68 | }
69 | curl -H "Content-Type: application/json" -X POST -d "$(generate_post_data)" ${{ secrets.SLACK_WEBHOOK }}
70 |
--------------------------------------------------------------------------------
/.github/workflows/docker.nightly.yml:
--------------------------------------------------------------------------------
1 | name: nightly
2 |
3 | on:
4 | schedule:
5 | - cron: "0 1 * * *"
6 |
7 | jobs:
8 | build:
9 | runs-on: ubuntu-22.04
10 |
11 | steps:
12 | - name: Remove unnecessary files
13 | run: |
14 | sudo rm -rf /usr/share/dotnet
15 | sudo rm -rf "$AGENT_TOOLSDIRECTORY"
16 | - uses: actions/checkout@v2
17 | - name: Upgrade QEMU
18 | run: |
19 | sudo apt-get update
20 | sudo apt-get install -y qemu binfmt-support qemu-user-static
21 | - name: Fix QEMU binfmt
22 | run: |
23 | docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
24 | - name: Set up QEMU
25 | uses: docker/setup-qemu-action@v3
26 | - name: Set up Docker Buildx
27 | uses: docker/setup-buildx-action@v3
28 | - name: Build the Docker image
29 | run: |
30 | echo "${{ secrets.DOCKER_HUB_TOKEN }}" | docker login -u "${{ secrets.DOCKER_HUB_USERNAME }}" --password-stdin docker.io
31 | git submodule update --init --recursive
32 | APP_VERSION=$(git rev-parse --short HEAD)
33 | cd pyflaski && PYFLASKI_VERSION=$(git rev-parse --short HEAD) && cd ../
34 | for i in {1..3}; do
35 | docker buildx build --platform linux/amd64,linux/arm64 --build-arg APP_VERSION=${APP_VERSION} --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t docker.io/mpgagebioinformatics/flaski:nightly --push . && break
36 | echo "Build failed (attempt $i). Retrying in 30s..."
37 | sleep 30
38 | done
39 | - name: Set Success env
40 | run: echo "GITHUB_SHA_SHORT=$(echo $GITHUB_SHA | cut -c 1-8)" >> $GITHUB_ENV
41 | - name: Slack Success Notification
42 | run: |
43 | generate_post_data()
44 | {
45 | cat << EOF
46 | {
47 | "deployment":"server","namespace":"flaski-dev", "container" :"server","image":"mpgagebioinformatics/flaski", "tag":"latest" ,"SHA":"$GITHUB_SHA_SHORT"
48 | }
49 | EOF
50 | }
51 | curl --insecure -H "Content-Type: application/json" -X POST -d "$(generate_post_data)" ${{ secrets.ROLLOUT_WEBHOOK }}
52 | - name: Set Failure env
53 | if: failure()
54 | run: echo "GITHUB_SHA_SHORT=$(echo $GITHUB_SHA | cut -c 1-8)" >> $GITHUB_ENV
55 | - name: Slack Failure Notification
56 | if: failure()
57 | run: |
58 | generate_post_data()
59 | {
60 | cat << EOF
61 | {
62 | "text": "flaski $GITHUB_SHA_SHORT nightly build and push FAILED"
63 | }
64 | EOF
65 | }
66 | curl -H "Content-Type: application/json" -X POST -d "$(generate_post_data)" ${{ secrets.SLACK_WEBHOOK }}
67 |
68 |
--------------------------------------------------------------------------------
/.github/workflows/docker.tag.yml:
--------------------------------------------------------------------------------
1 | name: production
2 |
3 | on:
4 | push:
5 | tags: [ '*.*.*' ]
6 |
7 | jobs:
8 | build:
9 | runs-on: ubuntu-22.04
10 |
11 | steps:
12 | - name: Remove unnecessary files
13 | run: |
14 | sudo rm -rf /usr/share/dotnet
15 | sudo rm -rf "$AGENT_TOOLSDIRECTORY"
16 | - uses: actions/checkout@v2
17 | - name: Upgrade QEMU
18 | run: |
19 | sudo apt-get update
20 | sudo apt-get install -y qemu binfmt-support qemu-user-static
21 | - name: Fix QEMU binfmt
22 | run: |
23 | docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
24 | - name: Set up QEMU
25 | uses: docker/setup-qemu-action@v3
26 | - name: Set up Docker Buildx
27 | uses: docker/setup-buildx-action@v3
28 | - name: Build the Docker image
29 | run: |
30 | echo "${{ secrets.DOCKER_HUB_TOKEN }}" | docker login -u "${{ secrets.DOCKER_HUB_USERNAME }}" --password-stdin docker.io
31 | APP_VERSION=${GITHUB_REF#refs/*/}
32 | git clone https://github.com/mpg-age-bioinformatics/flaski.git ${APP_VERSION}
33 | cd ${APP_VERSION}
34 | git checkout ${APP_VERSION}
35 | git submodule update --init --recursive
36 | cd pyflaski && PYFLASKI_VERSION=$(git rev-parse --short HEAD) && cd ../
37 | for i in {1..3}; do
38 | docker buildx build --platform linux/amd64,linux/arm64 --build-arg APP_VERSION=${APP_VERSION} --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t docker.io/mpgagebioinformatics/flaski:stable -t docker.io/mpgagebioinformatics/flaski:${APP_VERSION} -t docker.io/mpgagebioinformatics/flaski:latest --push . && break
39 | echo "Build failed (attempt $i). Retrying in 30s..."
40 | sleep 30
41 | done
42 | - name: Set Success env
43 | run: echo "GITHUB_TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV && echo "GITHUB_SHA_SHORT=$(echo $GITHUB_SHA | cut -c 1-8)"
44 | - name: Slack Success Notification
45 | run: |
46 | generate_post_data()
47 | {
48 | cat << EOF
49 | {
50 | "deployment":"server","namespace":"flaski-prod", "container" :"server","image":"mpgagebioinformatics/flaski", "tag":"$GITHUB_TAG" ,"SHA":"$GITHUB_SHA_SHORT"
51 | }
52 | EOF
53 | }
54 | curl --insecure -H "Content-Type: application/json" -X POST -d "$(generate_post_data)" ${{ secrets.ROLLOUT_WEBHOOK }}
55 | - name: Set Failure env
56 | if: failure()
57 | run: echo "GITHUB_TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV && echo "GITHUB_SHA_SHORT=$(echo $GITHUB_SHA | cut -c 1-8)"
58 | - name: Slack Failure Notification
59 | if: failure()
60 | run: |
61 | generate_post_data()
62 | {
63 | cat << EOF
64 | {
65 | "text": "flaski $GITHUB_TAG build and push FAILED"
66 | }
67 | EOF
68 | }
69 | curl -H "Content-Type: application/json" -X POST -d "$(generate_post_data)" ${{ secrets.SLACK_WEBHOOK }}
70 |
71 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled python modules.
2 | *.pyc
3 | notes.md
4 | pureftpd
5 | pyflaski/build/
6 |
7 | # Setuptools distribution folder.
8 | /dist/
9 | /build/
10 | /venv/
11 | /src
12 | site
13 | tmp
14 | migrations
15 | __pycache__
16 | /services/nginx/certs/
17 | pyflaski/dist/
18 | pyflaski/pyflaski.egg-info/
19 | *.ipynb_checkpoints*
20 |
21 | # environment
22 | .env
23 |
24 | # Python egg metadata, regenerated from source files by setuptools.
25 | /*.egg-info
26 |
27 | # Other
28 | *.swp
29 | .DS_Store
30 | *~$*.xlsx
31 |
32 | build.log
33 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "pyflaski"]
2 | path = pyflaski
3 | url = https://github.com/mpg-age-bioinformatics/pyflaski.git
4 |
--------------------------------------------------------------------------------
/.zenodo.json:
--------------------------------------------------------------------------------
1 | {
2 | "contributors": [
3 | {
4 | "affiliation": "Bioinformatics, Max Planck Institute for Biology of Ageing",
5 | "type": "Producer",
6 | "name": "Iqbal, Ayesha"
7 | },
8 | {
9 | "affiliation": "Bioinformatics, Max Planck Institute for Biology of Ageing",
10 | "type": "Producer",
11 | "name": "Duitama, Camila"
12 | },
13 | {
14 | "affiliation": "Bioinformatics, Max Planck Institute for Biology of Ageing",
15 | "type": "Producer",
16 | "name": "Metge, Franziska"
17 | },
18 | {
19 | "affiliation": "Bioinformatics, Max Planck Institute for Biology of Ageing",
20 | "type": "Producer",
21 | "name": "Rosskopp, Daniel"
22 | },
23 | {
24 | "affiliation": "Bioinformatics, Max Planck Institute for Biology of Ageing",
25 | "type": "ContactPerson",
26 | "name": "Boucas, Jorge"
27 | }
28 | ],
29 | "license": "MIT",
30 | "title": "Flaski",
31 | "upload_type": "software",
32 | "keywords": [
33 | "biology",
34 | "research",
35 | "science",
36 | "interactive",
37 | "bioinformartics",
38 | "plots",
39 | "circular bar plots",
40 | "DAVID",
41 | "histogram",
42 | "dendogram",
43 | "heatmap",
44 | "scatter plot",
45 | "violing plot",
46 | "kegg",
47 | "metabolomics",
48 | "survival analysis",
49 | "life span",
50 | "MDS",
51 | "PCA",
52 | "TSNE",
53 | "venn diagram"
54 | ],
55 | "creators": [
56 | {
57 | "affiliation": "Max Planck Institute for Biology of Ageing",
58 | "name": "Iqbal, Ayesha"
59 | },
60 | {
61 | "affiliation": "Max Planck Institute for Biology of Ageing",
62 | "name": "Duitama, Camila"
63 | },
64 | {
65 | "affiliation": "Max Planck Institute for Biology of Ageing",
66 | "name": "Metge, Franziska"
67 | },
68 | {
69 | "affiliation": "Max Planck Institute for Biology of Ageing",
70 | "name": "Rosskopp, Daniel"
71 | },
72 | {
73 | "affiliation": "Max Planck Institute for Biology of Ageing",
74 | "name": "Boucas, Jorge"
75 | }
76 | ],
77 | "access_right": "open",
78 | "description": "
A collection of Flask based apps for biological research.
"
79 | }
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at bioinformatics@age.mpg.de. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 | For answers to common questions about this code of conduct, see
76 | https://www.contributor-covenant.org/faq
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # CONTRIBUTING
2 |
3 | For additional info please check the `myapp` repository.
4 |
5 | Clone flaski and the base myapp repo:
6 |
7 | ```
8 | cd ~
9 | git clone git@github.com:mpg-age-bioinformatics/myapp.git
10 | git clone git@github.com:mpg-age-bioinformatics/flaski.git
11 | ```
12 |
13 | Export secret variables:
14 | ```
15 | cd ~/flaski
16 | cat << EOF > .env
17 | MYSQL_PASSWORD=$(openssl rand -base64 20 | tr -d '/')
18 | MYSQL_ROOT_PASSWORD=$(openssl rand -base64 20 | tr -d '/')
19 | REDIS_PASSWORD=$(openssl rand -base64 20 | tr -d '/')
20 | SECRET_KEY=$(openssl rand -base64 20 | tr -d '/')
21 | EOF
22 | ```
23 |
24 | Create local folders:
25 | ```
26 | mkdir -p ~/flaski23/backup/stats ~/flaski23/backup/users_data2 ~/flaski23/backup/users_data3 ~/flaski23/backup/mariadb ~/flaski23/private ~/flaski23/mpcdf ~/flaski23/submissions
27 | ```
28 |
29 | To deploy flaski edit the docker-compose.yml accordingly and then pull pyflaski and build:
30 | ```
31 | cd ~/flaski
32 | git submodule update --init --recursive
33 | docker-compose up -d --build
34 | ```
35 |
36 | If running myapp on development mode you will have to start flask from inside the server container:
37 | ```
38 | docker-compose exec server3 /bin/bash
39 | flask run --host 0.0.0.0 --port 8000
40 | ```
41 | Add the following line to your `/etc/hosts`
42 | ```
43 | 127.0.0.1 flaski.localhost
44 | ```
45 |
46 | You can now access flaski over https://flaski.localhost/v3.
47 |
48 | Adding administrator user:
49 | ```
50 | docker-compose run --entrypoint="python3 /myapp/myapp.py admin --add myemail@gmail.com" init
51 | ```
52 |
53 | ### Email logging
54 |
55 | To use the SMTP debugging server from Python comment all email related `env` in `docker-compose.yml`.
56 | You can not using python's fake email server that accepts emails, but instead of sending them, it prints them to the console.
57 | To run this server, open a second terminal session and run the following command on it:
58 | ```bash
59 | docker-compose exec server3 python3 -m smtpd -n -c DebuggingServer localhost:8025
60 | ```
61 |
62 | ### pyflaski
63 |
64 | pyflaski was added as a submodule of flaski:
65 | ```
66 | cd ~/flaski
67 | git submodule add git@github.com:mpg-age-bioinformatics/pyflaski.git pyflaski
68 | git submodule init pyflaski
69 | ```
70 |
71 | If making a fresh clone you will need to:
72 | ```
73 | cd ~/flaski
74 | git submodule update --init --recursive
75 | ```
76 |
77 | To update from the remote:
78 | ```
79 | git submodule update --recursive --remote
80 | ```
81 |
82 | Commiting changes:
83 | ```
84 | cd ~/flaski/pyflaski
85 | git add -A .
86 | git commit -m ""
87 | git push origin HEAD:master
88 | ```
89 |
90 | then tell the main project to start tracking the updated version:
91 | ```
92 | cd ~/flaski
93 | git add pyflaski
94 | git commit -m pyflaski
95 | git push
96 | ```
97 |
--------------------------------------------------------------------------------
/DEPLOYING.md:
--------------------------------------------------------------------------------
1 | # flaski
2 |
3 | ## Deploying flaski
4 |
5 | Create local folders:
6 | ```
7 | mkdir -p ~/flaski23/backup/stats ~/flaski23/backup/users_data2 ~/flaski23/backup/users_data3 ~/flaski23/backup/mariadb ~/flaski23/private ~/flaski23/mpcdf ~/flaski23/submissions
8 | ```
9 |
10 | For production export secret variables into `.env`:
11 | ```bash
12 | cat << EOF > .env.prod
13 | MAIL_PASSWORD=""
14 | MYSQL_PASSWORD=$(openssl rand -base64 20)
15 | MYSQL_ROOT_PASSWORD=$(openssl rand -base64 20)
16 | REDIS_PASSWORD=$(openssl rand -base64 20)
17 | SECRET_KEY=$(openssl rand -base64 20)
18 | EOF
19 | ```
20 |
21 | For local development (quote all mail related entries in the `docker-compose.yml`).
22 |
23 | To deploy flaski edit the `docker-compose.yml` accordingly and then:
24 | ```bash
25 | docker-compose -f production-compose.yml up
26 | ```
27 | Check the `stdout` with:
28 | ```bash
29 | docker-compose logs
30 | ```
31 | or for example:
32 | ```bash
33 | docker-compose logs -f server
34 | ```
35 | If running myapp on development mode you will have to start flask from inside the server container:
36 | ```
37 | docker-compose exec server3 /bin/bash
38 | flask run --host 0.0.0.0 --port 8000
39 | ```
40 | Adding administrator user:
41 | ```
42 | docker-compose run --entrypoint="python3 /myapp/myapp.py admin --add myemail@gmail.com" init
43 | ```
44 | You can connect to any of the running containers by eg.
45 | ```bash
46 | docker-compose exec mariadb /bin/bash
47 | ```
48 | For stopping and removing a container,
49 | ```bash
50 | docker-compose stop mariadb && docker-compose rm mariadb
51 | ```
52 | Stopping and removing all containers:
53 | ```bash
54 | docker-compose down
55 | ```
56 | Stopping and removing all containers as well as all volumes (this will destroy the volumes and contained data):
57 | ```bash
58 | docker-compose down -v
59 | ```
60 | To remove a volume, eg.
61 | ```bash
62 | docker volume rm db
63 | ```
64 |
65 | ## Backups
66 |
67 | ```bash
68 | docker-compose exec backup /backup.sh
69 | docker-compose exec backup rsync -rtvh --delete /myapp_data/users/ /backup/users_data/
70 | ```
71 |
72 | ## Email logging
73 |
74 | To use the SMTP debugging server from Python comment all email related `env` in `docker-compose.yml`.
75 | You can not using python's fake email server that accepts emails, but instead of sending them, it prints them to the console.
76 | To run this server, open a second terminal session and run the following command on it:
77 | ```bash
78 | docker-compose exec server python3 -m smtpd -n -c DebuggingServer localhost:8025
79 | ```
80 |
81 | ## Databases
82 |
83 | For handling database entries you can start the `flask shell` by:
84 | ```bash
85 | docker-compose exec server flask shell
86 | ```
87 | make the required imports:
88 | ```python
89 | from myapp import app, db
90 | from myapp.models import User, UserLogging
91 | ```
92 | and then for removing a user from the db:
93 | ```python
94 | u=User.query.filter_by(email=).first()
95 | db.session.delete(u)
96 | db.session.commit()
97 | ```
98 | for editing entries eg.:
99 | ```python
100 | user=User.query.filter_by(email=).first()
101 | user.active = False
102 | db.session.add(user)
103 | db.session.commit()
104 | ```
105 |
106 | Collecting usage entries:
107 | ```bash
108 | docker-compose run --entrypoint="python3 /myapp/myapp.py stats /backup/stats" init
109 | ```
110 |
111 | If you need to re-initiate your database
112 | ```bash
113 | rm -rf migrations && flask db init && flask db migrate -m "users table" && flask db upgrade
114 | ```
115 |
116 | upgrading
117 | ```bash
118 | flask db migrate -m "new fields in user model"
119 | flask db upgrade
120 | ```
121 |
122 | Manually backup a database:
123 | ```bash
124 | docker-compose exec mariadb /usr/bin/mysqldump -u root --password=mypass ${APP_NAME} > dump.sql
125 | ```
126 |
127 | Manually restore a database from backup:
128 | ```bash
129 | cat dump.sql | docker-compose exec mariadb mysql --user=root --password=mypass ${APP_NAME}
130 | ```
131 |
132 | ## Multiplatform builds
133 |
134 | Builds are currently working for `linux/amd64` and `linux/arm64` but not for `linux/arm/v7`.
135 |
136 | ```
137 | docker buildx create --name mybuilder
138 | docker buildx use mybuilder
139 | docker buildx inspect --bootstrap
140 | docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 --build-arg MYAPP_VERSION=local --no-cache --force-rm -t myapp/myapp:latest -f services/server/Dockerfile . --load
141 | ```
142 |
143 | To push result image into registry use --push or to load image into docker use --load.
144 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright (c) Bioinformatics Core Facility of the Max Planck Institute for Biology of Ageing.
2 | # Distributed under the terms of the Modified BSD License.
3 | ARG MYAPP_IMAGE=mpgagebioinformatics/myapp:latest
4 | FROM $MYAPP_IMAGE
5 |
6 | LABEL maintainer="bioinformatics@age.mpg.de"
7 |
8 | ARG APP_VERSION
9 | ENV APP_VERSION ${APP_VERSION}
10 |
11 | ARG PYFLASKI_VERSION
12 | ENV PYFLASKI_VERSION ${PYFLASKI_VERSION}
13 |
14 | USER root
15 |
16 | ENV DEBIAN_FRONTEND=noninteractive
17 | ENV PYTHONDONTWRITEBYTECODE=1
18 | ENV DEB_PYTHON_INSTALL_LAYOUT=deb_system
19 |
20 | # -o Acquire::Check-Valid-Until=false
21 | RUN apt-get update && apt-get install -yq --no-install-recommends libgirepository1.0-dev libcairo2-dev python3-dev pkg-config ninja-build build-essential gobject-introspection gcc libglib2.0-dev && \
22 | apt-get clean && rm -rf /var/lib/apt/lists/*
23 |
24 | COPY ./pyflaski/requirements.txt /pyflaski.requirements.txt
25 | RUN pip3 install --no-cache-dir --prefer-binary -r /pyflaski.requirements.txt
26 |
27 | COPY requirements.txt /requirements.txt
28 | RUN pip3 install --no-cache-dir -r /requirements.txt
29 |
30 | RUN mkdir -p /myapp/data/kegg /myapp/data/david /mpcdf /submissions_ftp /flaski_private /backup/oc_data /oc_data
31 | RUN chown -R ${BUILD_NAME}:${BUILD_NAME} /submissions /flaski_private /mpcdf /backup/oc_data /oc_data /submissions_ftp
32 |
33 |
34 | COPY ./static/dog-solid.png /myapp/myapp/static/favicon.ico
35 | COPY ./static/dog-solid.png /myapp/myapp/static/logo.png
36 | COPY ./pyflaski/pyflaski /myapp/pyflaski
37 | COPY ./pyflaski/data/david /myapp/pyflaski/data/david
38 | COPY ./pyflaski/data/kegg /myapp/pyflaski/data/kegg
39 | COPY ./routes/home.py /myapp/myapp/routes/home.py
40 | COPY ./routes/index.py /myapp/myapp/routes/index.py
41 | COPY ./routes/apps /myapp/myapp/routes/apps
42 | COPY ./routes/_routes.py /myapp/myapp/routes/_routes.py
43 | COPY ./routes/_impressum.py /myapp/myapp/routes/_impressum.py
44 | COPY ./routes/_vars.py /myapp/myapp/routes/_vars.py
45 | COPY ./routes/_privacy.py /myapp/myapp/routes/_privacy.py
46 | COPY ./routes/_about.py /myapp/myapp/routes/_about.py
47 | COPY ./email/app_exception.html /myapp/myapp/templates/email/app_exception.html
48 | COPY ./email/app_exception.txt /myapp/myapp/templates/email/app_exception.txt
49 | COPY ./email/app_help.html /myapp/myapp/templates/email/app_help.html
50 | COPY ./email/app_help.txt /myapp/myapp/templates/email/app_help.txt
51 | COPY ./email/submissions.age.html /myapp/myapp/templates/email/submissions.age.html
52 | COPY ./email/submissions.age.txt /myapp/myapp/templates/email/submissions.age.txt
53 | COPY ./email/submissions.ftp.html /myapp/myapp/templates/email/submissions.ftp.html
54 | COPY ./email/submissions.ftp.txt /myapp/myapp/templates/email/submissions.ftp.txt
55 | COPY ./email/submissions.ftp.data.html /myapp/myapp/templates/email/submissions.ftp.data.html
56 | COPY ./email/submissions.ftp.data.txt /myapp/myapp/templates/email/submissions.ftp.data.txt
57 | COPY ./email/validate_email.html /myapp/myapp/templates/email/validate_email.html
58 | COPY ./email/validate_email.txt /myapp/myapp/templates/email/validate_email.txt
59 | COPY ./_models.py /myapp/myapp/_models.py
60 |
61 | RUN chown -R ${BUILD_NAME}:${BUILD_NAME} /${BUILD_NAME}
62 |
63 | USER myapp
64 |
--------------------------------------------------------------------------------
/Flaski.Readme.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/Flaski.Readme.png
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 Bioinformatics Core Facility of the Max Planck Institute for Biology of Ageing
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |  [](https://zenodo.org/badge/latestdoi/227070034) [](https://github.com/mpg-age-bioinformatics/flaski/actions/workflows/docker.nightly.yml) [](https://github.com/mpg-age-bioinformatics/flaski/actions/workflows/docker.latest.yml) [](https://github.com/mpg-age-bioinformatics/flaski/actions/workflows/docker.tag.yml)
2 |
3 |
4 |
5 | # Flaski
6 |
7 | Flaski is a [myapp](https://github.com/mpg-age-bioinformatics/myapp) based collection of web apps for data analysis and visualization in life sciences.
8 |
9 | 
10 |
11 | Flaski provides:
12 |
13 | - interactive data analysis
14 | - user level authentication
15 | - Apps as plugins
16 | - session management
17 | - server storage
18 | - Graphic User Interface to Programmatic Interface
19 | - App2App communication
20 | - server based
21 | - background jobs
22 | - access to databases
23 | - usage statistics
24 | - on-the-fly error reporting
25 | - scalable
26 | - continuous delivery
27 | - full stack ready
28 | - multiplatform: *amd64*, *arm64*, and * aarch64*
29 |
30 | Flaski can be used for free on [https://flaski.age.mpg.de](https://flaski.age.mpg.de).
31 |
32 |
33 |
34 | Check our how-to videos on [YouTube](https://www.youtube.com/channel/UCQCHNHJ23FGyXo9usEC_TbA).
35 |
36 | Information on how to deploy Flaski on your own servers can be found in [DEPLOYING.md](DEPLOYING.md) and on Google Kubernetes Engine in [kubernetes](kubernetes).
37 |
38 | For Graphical User Interface to Programmatic Interface exchanges please install the [pyflaski](https://github.com/mpg-age-bioinformatics/pyflaski) companion package.
39 |
40 | Flaski sessions are versioned and you can check the respective version of any saved session [here](https://flaski.age.mpg.de/vcheck). For reproducting plots done with previous Flaski versions please use the [pyflaski](https://github.com/mpg-age-bioinformatics/pyflaski) companion package.
41 |
42 | If you are looking to contribute to Flaski please check [CONTRIBUTING.md](CONTRIBUTING.md).
43 |
44 | Issues: [https://github.com/mpg-age-bioinformatics/flaski/issues](https://github.com/mpg-age-bioinformatics/flaski/issues).
45 |
46 |
47 |
48 | Please check our [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md) before doing any contribution or opening an issue.
49 |
50 | ___
51 |
52 | ## Local installation
53 |
54 | Feel free to contact us if you would like to deploy Flaski at your institution or if you would like to contribute to Flaski.
55 |
56 | ```bash
57 | mkdir -p ~/flaski_data/backup/stats ~/flaski_data/backup/users_data ~/flaski_data/backup/mariadb
58 | git clone git@github.com:mpg-age-bioinformatics/flaski.git
59 | cd flaski
60 | cat << EOF > .env
61 | MYSQL_PASSWORD=$(openssl rand -base64 20)
62 | MYSQL_ROOT_PASSWORD=$(openssl rand -base64 20)
63 | REDIS_PASSWORD=$(openssl rand -base64 20)
64 | SECRET_KEY=$(openssl rand -base64 20)
65 | EOF
66 | docker-compose -f production-compose.yml up -d
67 | ```
68 |
69 | Email logging:
70 | ```bash
71 | docker-compose -f production-compose.yml exec server3 python3 -m smtpd -n -c DebuggingServer localhost:8025
72 | ```
73 |
74 | Flaski is now accessible under [https://flaski.localhost](https://flaski.localhost). Depending on your local machine, it might take a few seconds until the server is up and running. You might need to edit your `/etc/hosts` file to include:
75 | ```
76 | 127.0.0.1 flaski.localhost
77 | ```
78 | ___
79 |
80 | ## Citing
81 |
82 | Iqbal, A., Duitama, C., Metge, F., Rosskopp, D., Boucas, J. Flaski. (2021). doi:10.5281/zenodo.4849515
83 |
84 |
85 | ## Versioning
86 |
87 | We recommend that you allways export your session along with your results so that you can in future reproduce them.
88 |
89 | Current version can be seen at the end of this page and old sessions version can be checked under [https://flaski.age.mpg.de/vcheck/](https://flaski.age.mpg.de/vcheck/).
90 |
91 | If you wish to open an older session under the same package version please use the [pyflaski](https://github.com/mpg-age-bioinformatics/pyflaski) companion package.
92 |
93 | ___
94 |
95 | ## Credits
96 |
97 | Flaski was build using [Font-Awesome](https://github.com/FortAwesome/Font-Awesome) toolkit. Please consult the respective project for license information.
98 |
99 | The Bioinformatics Core Facility of the Max Planck Institute for Biology of Ageing, Cologne, Germany.
100 |
--------------------------------------------------------------------------------
/STATUS.md:
--------------------------------------------------------------------------------
1 | # Status
2 |
3 | Information on flaski.age.mpg.de is displayed here.
--------------------------------------------------------------------------------
/_models.py:
--------------------------------------------------------------------------------
1 | from myapp import app, db
2 | from datetime import datetime
3 | import jwt
4 | from time import time
5 |
6 | # print("\n\n\n--------------_MODELS\n\n\n")
7 | class FTPSubmissions(db.Model):
8 | id = db.Column(db.Integer, primary_key=True)
9 | file_name=db.Column(db.String(128), index=True, unique=True)
10 | user_id=db.Column(db.Integer)
11 | date_time = db.Column(db.DateTime, default=datetime.utcnow )
12 | ftp_user = db.Column(db.String(128), index=True) #, unique=True)
13 |
14 | def __init__(self, **kwargs):
15 | super(FTPSubmissions, self).__init__(**kwargs)
16 |
17 | # 14 days * 24 hours * 60 minutes * 60 seconds
18 | def get_submission_validation_token(self, expires_in=1290600):
19 | return jwt.encode(
20 | {'file': self.id, 'exp': time() + expires_in},
21 | app.config['SECRET_KEY'], algorithm='HS256') #.decode('utf-8')
22 |
23 | @staticmethod
24 | def verify_submission_token(token):
25 | try:
26 | id = jwt.decode(token, app.config['SECRET_KEY'],
27 | algorithms=['HS256'])['file']
28 | except:
29 | return None
30 | # file_name=FTPSubmissions.query.get(id)
31 | return id
32 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 |
5 | # this init container only recovers users data for flaski3
6 | init:
7 | container_name: init
8 | image: mpgagebioinformatics/flaski:latest
9 | build:
10 | context: ./
11 | dockerfile: Dockerfile
12 | args:
13 | APP_VERSION: dev
14 | PYFLASKI_VERSION: dev
15 | entrypoint: /myapp/services/init/entrypoint.sh
16 | user: root
17 | volumes:
18 | - data3:/myapp_data/users
19 | - ~/myapp:/myapp
20 | - ~/flaski23/backup/stats:/backup/stats
21 | - ~/flaski23/backup/users_data3:/backup/users_data
22 | # - ~/flaski23/backup/users_data2:/backup/users_data
23 | - ~/flaski23/backup/mariadb:/backup/mariadb:ro
24 | - ./_models.py:/myapp/myapp/_models.py
25 | environment:
26 | APP_NAME: myapp
27 | FLASK_ENV: init
28 | ADMINS: flaski@age.mpg.de
29 | RESTORE_DB: 1
30 | RESTORE_USERS_DATA: 1
31 | UPGRADE_DB: 1
32 | MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
33 | SECRET_KEY: ${SECRET_KEY}
34 | REDIS_ADDRESS: redis:6379/0
35 | REDIS_PASSWORD: ${REDIS_PASSWORD}
36 | MYSQL_USER: flaski
37 | MYSQL_PASSWORD: ${MYSQL_PASSWORD}
38 | MYSQL_HOST: mariadb
39 | MYSQL_PORT: 3306
40 | DB_NAME: flaski
41 | # MAIL_PORT: 587
42 | # MAIL_USE_TLS: 1
43 | # MAIL_PASSWORD: ${MAIL_PASSWORD}
44 | # MAIL_SERVER: mail.age.mpg.de
45 | MAIL_USERNAME: flaski@age.mpg.de
46 | MAIL_USERNAME_ADDRESS: flaski@age.mpg.de
47 | links:
48 | - mariadb
49 | depends_on:
50 | - mariadb
51 |
52 | # server:
53 | # container_name: server
54 | # image: flaski/flaski:latest
55 | # build:
56 | # context: ~/flaski
57 | # dockerfile: services/server/dockerfiles/Dockerfile
58 | # restart: always
59 | # volumes:
60 | # - ~/flaski/utils/dev.py:/flaski/flaski/apps/external.py # this needs to be the first volume as otherwise it will be overwritten by `- ./:/flaski`
61 | # - data:/flaski_data/users
62 | # - ~/flaski:/flaski
63 | # - ~/flaski23/private:/flaski_private:ro
64 | # - ~/flaski23/mpcdf:/mpcdf
65 | # - ~/flaski23/submissions:/submissions
66 | # environment:
67 | # N_WORKERS: 4
68 | # ADMINS: flaski@age.mpg.de
69 | # APP_NAME: flaski
70 | # INSTANCE: backup
71 | # SECRET_KEY: ${SECRET_KEY}
72 | # PRIVATE_APPS: /flaski_private/private.apps.tsv
73 | # APP_URL: https://flaski.localhost
74 | # FLASK_ENV: development #development #production
75 | # LOGS: /var/log/flaski/
76 | # # MAIL_PASSWORD: ${MAIL_PASSWORD}
77 | # # MAIL_PORT: 587
78 | # # MAIL_SERVER: mail.age.mpg.de
79 | # # MAIL_USE_TLS: '1'
80 | # MAIL_USERNAME: flaski@age.mpg.de
81 | # ADMINS: flaski@age.mpg.de
82 | # MYSQL_HOST: mariadb
83 | # MYSQL_PORT: 3306
84 | # MYSQL_PASSWORD: ${MYSQL_PASSWORD}
85 | # MYSQL_USER: flaski
86 | # DB_NAME: flaski
87 | # REDIS_ADDRESS: redis:6379/0
88 | # REDIS_PASSWORD: ${REDIS_PASSWORD}
89 | # labels:
90 | # - traefik.http.services.server.loadbalancer.server.port=8000
91 | # - traefik.http.middlewares.server_https.redirectscheme.scheme=https
92 | # - traefik.http.routers.server.entrypoints=web
93 | # - traefik.http.routers.server.rule=Host(`flaski.localhost`)
94 | # - traefik.http.routers.server.middlewares=server_https@docker
95 | # - traefik.http.routers.server_https.rule=Host(`flaski.localhost`)
96 | # - traefik.http.routers.server_https.tls=true
97 | # - traefik.http.routers.server_https.entrypoints=websecure
98 | # links:
99 | # - mariadb
100 | # - redis
101 | # depends_on:
102 | # - init
103 | # - mariadb
104 | # - redis
105 |
106 | server3:
107 | container_name: server3
108 | image: mpgagebioinformatics/flaski:latest
109 | build:
110 | context: ./
111 | dockerfile: Dockerfile
112 | args:
113 | APP_VERSION: dev
114 | PYFLASKI_VERSION: dev
115 | restart: always
116 | volumes:
117 | - ~/flaski23/submissions/:/submissions
118 | - data3:/flaski_data/users
119 | # - ~/tmp_sessions:/flaski_data/users/1
120 | - ~/flaski23/private/:/flaski_private:rw
121 | - ./static/dog-solid.png:/myapp/myapp/static/favicon.ico
122 | - ./static/dog-solid.png:/myapp/myapp/static/logo.png
123 | - ./pyflaski/pyflaski:/myapp/pyflaski
124 | - ./routes/apps:/myapp/myapp/routes/apps
125 | # - ~/myapp/myapp/routes/login.py:/myapp/myapp/routes/login.py
126 | # - ~/myapp/myapp/routes/register.py:/myapp/myapp/routes/register.py
127 | - ~/myapp/myapp/models.py:/myapp/myapp/models.py
128 | - ~/myapp/myapp/routes/_utils.py:/myapp/myapp/routes/_utils.py
129 | - ~/myapp/myapp/routes/admin.py:/myapp/myapp/routes/admin.py
130 | - ./routes/_routes.py:/myapp/myapp/routes/_routes.py
131 | - ./routes/home.py:/myapp/myapp/routes/home.py
132 | - ./routes/index.py:/myapp/myapp/routes/index.py
133 | - ./routes/_impressum.py:/myapp/myapp/routes/_impressum.py
134 | - ./routes/_vars.py:/myapp/myapp/routes/_vars.py
135 | - ./routes/_privacy.py:/myapp/myapp/routes/_privacy.py
136 | - ./routes/_about.py:/myapp/myapp/routes/_about.py
137 | - ~/myapp/myapp/routes/about.py:/myapp/myapp/routes/about.py
138 | - ./email/app_exception.html:/myapp/myapp/templates/email/app_exception.html
139 | - ./email/app_exception.txt:/myapp/myapp/templates/email/app_exception.txt
140 | - ./email/app_help.html:/myapp/myapp/templates/email/app_help.html
141 | - ./email/app_help.txt:/myapp/myapp/templates/email/app_help.txt
142 | - ./email/submissions.age.html:/myapp/myapp/templates/email/submissions.age.html
143 | - ./email/submissions.age.txt:/myapp/myapp/templates/email/submissions.age.txt
144 | - ./email/submissions.mpcdf.html:/myapp/myapp/templates/email/submissions.mpcdf.html
145 | - ./email/submissions.mpcdf.txt:/myapp/myapp/templates/email/submissions.mpcdf.txt
146 | - ./_models.py:/myapp/myapp/_models.py
147 | - ./email/submissions.mpcdf.data.html:/myapp/myapp/templates/email/submissions.mpcdf.data.html
148 | - ./email/submissions.mpcdf.data.txt:/myapp/myapp/templates/email/submissions.mpcdf.data.txt
149 | - ./email/validate_email.html:/myapp/myapp/templates/email/validate_email.html
150 | - ./email/validate_email.txt:/myapp/myapp/templates/email/validate_email.txt
151 | environment:
152 | N_WORKERS: 4
153 | APP_NAME: myapp
154 | APP_TITLE: flaski(local)
155 | INSTANCE: local
156 | SECRET_KEY: ${SECRET_KEY}
157 | APP_URL: https://flaski.localhost/v3
158 | PAGE_PREFIX: /v3
159 | FLASK_ENV: development #development #production
160 | FLASK_DEBUG: 1
161 | # MAIL_PASSWORD: ${MAIL_PASSWORD}
162 | # MAIL_PORT: 587
163 | # MAIL_SERVER: mail.age.mpg.de
164 | MAIL_USERNAME: flaski@age.mpg.de
165 | MAIL_USERNAME_ADDRESS: flaski@age.mpg.de
166 | ADMINS: flaski@age.mpg.de
167 | # MAIL_USE_TLS: '1'
168 | MYSQL_HOST: mariadb
169 | MYSQL_PORT: 3306
170 | MYSQL_PASSWORD: ${MYSQL_PASSWORD}
171 | MYSQL_USER: flaski
172 | DB_NAME: flaski
173 | REDIS_ADDRESS: redis:6379/0
174 | REDIS_PASSWORD: ${REDIS_PASSWORD}
175 | PUREFTPD_AUTH_SALT: ${PUREFTPD_AUTH_SALT}
176 | PUREFTPD_MYSQL_SERVER: ${PUREFTPD_MYSQL_SERVER}
177 | PUREFTPD_MYSQL_PORT: 3306
178 | PUREFTPD_MYSQL_USER: pureftpd
179 | PUREFTPD_MYSQL_PASS: ${PUREFTPD_MYSQL_PASS}
180 | PUREFTPD_MYSQL_DB: pureftpd
181 | labels:
182 | - traefik.http.services.server3.loadbalancer.server.port=8000
183 | - traefik.http.middlewares.server3_https.redirectscheme.scheme=https
184 | - traefik.http.routers.server3.entrypoints=web
185 | - traefik.http.routers.server3.rule=Host(`flaski.localhost`) && PathPrefix(`/v3`)
186 | - traefik.http.routers.server3.middlewares=server3_https@docker
187 | - traefik.http.routers.server3_https.rule=Host(`flaski.localhost`) && PathPrefix(`/v3`)
188 | - traefik.http.routers.server3_https.tls=true
189 | - traefik.http.routers.server3_https.entrypoints=websecure
190 | links:
191 | - mariadb
192 | - redis
193 | depends_on:
194 | - init
195 | - mariadb
196 | - redis
197 |
198 | # this backup is currently only backing up the data from v2
199 | backup:
200 | container_name: backup
201 | image: mpgagebioinformatics/myapp:latest
202 | build:
203 | context: ~/myapp
204 | dockerfile: services/server/Dockerfile
205 | args:
206 | BUILD_NAME: myapp
207 | MYAPP_VERSION: dev
208 | UPGRADE_REQS: "no"
209 | entrypoint: /flaski/services/backup/entrypoint.sh
210 | user: root
211 | depends_on:
212 | - mariadb
213 | - init
214 | volumes:
215 | - ~/myapp:/flaski
216 | - ~/flaski23/backup/stats:/backup/stats
217 | - ~/flaski23/backup/users_data:/backup/users_data
218 | - ~/flaski23/backup/mariadb:/backup/mariadb:ro
219 | - data3:/myapp_data/users:ro
220 | environment:
221 | APP_NAME: flaski
222 | INSTANCE: backup
223 | FLASK_ENV: backup
224 | LOGS: /var/log/flaski/
225 | MYSQL_HOST: mariadb
226 | MYSQL_PORT: 3306
227 | MYSQL_PASSWORD: ${MYSQL_PASSWORD}
228 | MYSQL_USER: flaski
229 | DB_NAME: flaski
230 | restart: unless-stopped
231 | links:
232 | - mariadb
233 |
234 | # this backup is currently only backing up the data from v2
235 | # backup:
236 | # container_name: backup
237 | # image: mpgagebioinformatics/myapp-flaski2:latest
238 | # build:
239 | # context: ~/myapp
240 | # dockerfile: services/server/Dockerfile
241 | # args:
242 | # BUILD_NAME: flaski
243 | # MYAPP_VERSION: dev
244 | # UPGRADE_REQS: "no"
245 | # entrypoint: /flaski/services/backup/entrypoint.sh
246 | # user: root
247 | # depends_on:
248 | # - mariadb
249 | # - init
250 | # volumes:
251 | # - ~/myapp:/flaski
252 | # - ~/flaski23/backup/stats:/backup/stats
253 | # - ~/flaski23/backup/users_data:/backup/users_data
254 | # - ~/flaski23/backup/mariadb:/backup/mariadb:ro
255 | # - data:/flaski_data/users:ro
256 | # environment:
257 | # APP_NAME: flaski
258 | # INSTANCE: backup
259 | # FLASK_ENV: backup
260 | # LOGS: /var/log/flaski/
261 | # MYSQL_HOST: mariadb
262 | # MYSQL_PORT: 3306
263 | # MYSQL_PASSWORD: ${MYSQL_PASSWORD}
264 | # MYSQL_USER: flaski
265 | # DB_NAME: flaski
266 | # restart: unless-stopped
267 | # links:
268 | # - mariadb
269 |
270 | # uploads:
271 | # container_name: uploads
272 | # # image: mpgagebioinformatics/myapp:latest # -flaski2:latest
273 | # build:
274 | # context: ~/myapp
275 | # dockerfile: services/server/Dockerfile
276 | # args:
277 | # BUILD_NAME: myapp
278 | # MYAPP_VERSION: dev
279 | # UPGRADE_REQS: "no"
280 | # entrypoint: /myapp/services/rsync/entrypoint.sh
281 | # volumes:
282 | # - ~/myapp:/myapp
283 | # - ~/flaski23/backup/mpcdf/:/mpcdf
284 | # - ~/flaski23/backup/submissions/:/submissions
285 | # - ./.owncloud.flaski:/flaski/.owncloud.flaski
286 | # # - /srv/submissions/:/submissions
287 | # # - /srv/backup/mariadb:/backup/mariadb
288 | # # - /srv/backup/data/users:/backup/users_data
289 | # environment:
290 | # FLASK_ENV: rsync
291 | # OWNCLOUD_ADDRESS: https://datashare.mpcdf.mpg.de
292 | # OWNCLOUD_USER: g-flaski
293 | # OWNCLOUD_PASS: ${OWNCLOUD_PASS}
294 | # restart: unless-stopped
295 |
296 | mariadb:
297 | container_name: mariadb
298 | image: mariadb:10.5
299 | restart: always
300 | volumes:
301 | - db:/var/lib/mysql
302 | environment:
303 | MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
304 |
305 | redis:
306 | container_name: redis
307 | image: redis:7
308 | restart: always
309 | command: redis-server --requirepass ${REDIS_PASSWORD}
310 |
311 | reverse-proxy:
312 | image: traefik:v2.6
313 | command:
314 | - --api.insecure=true
315 | - --providers.docker=true
316 | - --entrypoints.websecure.address=:443
317 | - --entrypoints.web.address=:80
318 | # - --providers.file.filename=/etc/traefik/dynamic_conf/conf.yml
319 | # - --providers.file.watch=true
320 | ports:
321 | - "80:80"
322 | - "443:443"
323 | - "8080:8080"
324 | volumes:
325 | # - ~/flaski23/certificates/:/tools/certs
326 | # - ./services/traefik/config.yml:/etc/traefik/dynamic_conf/conf.yml:ro
327 | - /var/run/docker.sock:/var/run/docker.sock
328 | labels:
329 | - traefik.enable=false
330 | depends_on:
331 | # - server
332 | - server3
333 |
334 | # nginx:
335 | # container_name: nginx
336 | # image: nginx:alpine
337 | # restart: always
338 | # ports:
339 | # - 80:80
340 | # - 443:443
341 | # volumes:
342 | # # prod
343 | # # - ./services/nginx/flaski23.conf:/etc/nginx/conf.d/default.conf:rw
344 | # # - ~/flaski23/certificates/cert.pem:/certs/cert.pem:ro
345 | # # - ~/flaski23/certificates/key.pem:/certs/key.pem:ro
346 | # # - ~/flaski23/certificates/dhparam.pem:/certs/dhparam.pem:ro
347 | # # dev
348 | # - ./services/nginx/dev.conf:/etc/nginx/conf.d/default.conf:rw
349 | # - ~/flaski23/certificates/selfsigned/cert.pem:/certs/cert.pem:ro
350 | # - ~/flaski23/certificates/selfsigned/key.pem:/certs/key.pem:ro
351 | # - ~/flaski23/certificates/dhparam.pem:/certs/dhparam.pem:ro
352 | # links:
353 | # - server
354 | # - server3
355 | # depends_on:
356 | # - server
357 | # - server3
358 |
359 | volumes:
360 | # data:
361 | # external: false
362 | data3:
363 | external: false
364 | db:
365 | external: false
366 | oc:
367 | external: false
368 |
--------------------------------------------------------------------------------
/egdata/DAVID.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/DAVID.xlsx
--------------------------------------------------------------------------------
/egdata/DAVID_DiffExp.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/DAVID_DiffExp.xlsx
--------------------------------------------------------------------------------
/egdata/DAVID_input.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/DAVID_input.xlsx
--------------------------------------------------------------------------------
/egdata/KEGG_input.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/KEGG_input.xlsx
--------------------------------------------------------------------------------
/egdata/MDS_with_groups.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/MDS_with_groups.xlsx
--------------------------------------------------------------------------------
/egdata/cellplot_david.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/cellplot_david.xlsx
--------------------------------------------------------------------------------
/egdata/dendogram.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/dendogram.xlsx
--------------------------------------------------------------------------------
/egdata/heatmap.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/heatmap.xlsx
--------------------------------------------------------------------------------
/egdata/histogram.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/histogram.xlsx
--------------------------------------------------------------------------------
/egdata/iGSEA.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/iGSEA.xlsx
--------------------------------------------------------------------------------
/egdata/scatterplot.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/scatterplot.xlsx
--------------------------------------------------------------------------------
/egdata/threeDscatterplot.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/threeDscatterplot.xlsx
--------------------------------------------------------------------------------
/egdata/venn.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/venn.xlsx
--------------------------------------------------------------------------------
/egdata/violinplot_labels.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/egdata/violinplot_labels.xlsx
--------------------------------------------------------------------------------
/email/app_exception.html:
--------------------------------------------------------------------------------
1 | {{eapp}}
2 |
3 | {% for line in emsg %}
4 | {{line}}
5 | {% endfor %}
6 |
7 | {{user.firstname}} {{user.lastname}}: {{user.email}}
8 |
9 | {{etime}}
10 |
11 | Flaski
--------------------------------------------------------------------------------
/email/app_exception.txt:
--------------------------------------------------------------------------------
1 | {{eapp}}
2 |
3 | {{emsg}}
4 |
5 | {{user.firstname}} {{user.lastname}}: {{user.email}}
6 |
7 | {{etime}}
8 |
9 | Flaski
--------------------------------------------------------------------------------
/email/app_help.html:
--------------------------------------------------------------------------------
1 | {{eapp}}
2 | *******************************************
3 | {% for line in emsg %}
4 | {{line}}
5 | {% endfor %}
6 | *******************************************
7 | {{user.firstname}} {{user.lastname}}: {{user.email}}
8 |
9 | {{etime}}
10 |
11 | {{session_file}}
12 |
13 | Flaski
14 |
15 | (html)
--------------------------------------------------------------------------------
/email/app_help.txt:
--------------------------------------------------------------------------------
1 | {{eapp}}
2 |
3 | {{emsg}}
4 |
5 | {{user.firstname}} {{user.lastname}}: {{user.email}}
6 |
7 | {{etime}}
8 |
9 | {{session_file}}
10 |
11 | Flaski
12 |
13 | (txt)
--------------------------------------------------------------------------------
/email/submissions.age.html:
--------------------------------------------------------------------------------
1 | Files have been submitted by {{ user.firstname }} for {{ submission_type }} analysis.
2 |
3 | Cheers!
4 | Flaski
5 |
6 | Submission: {{ submission_tag }}
--------------------------------------------------------------------------------
/email/submissions.age.txt:
--------------------------------------------------------------------------------
1 | Files have been submitted by {{ user.firstname }} for {{ submission_type }} analysis.
2 |
3 | Cheers!
4 |
5 | Flaski
6 |
7 | Submission: {{ submission_tag }}
--------------------------------------------------------------------------------
/email/submissions.ftp.data.html:
--------------------------------------------------------------------------------
1 | Hi {{ user.firstname }}
2 |
3 | your request "{{ filename }}" for {{ submission_type }} analysis has been released.
4 |
5 | Cheers!
6 |
7 | Flaski
8 |
9 | Submission: {{ submission_tag }}
--------------------------------------------------------------------------------
/email/submissions.ftp.data.txt:
--------------------------------------------------------------------------------
1 | Hi {{ user.firstname }}
2 |
3 | your request "{{ filename }}" for {{ submission_type }} analysis has been released.
4 |
5 | Cheers!
6 |
7 | Flaski
8 |
9 | Submission: {{ submission_tag }}
10 |
11 |
--------------------------------------------------------------------------------
/email/submissions.ftp.html:
--------------------------------------------------------------------------------
1 | Hi {{ user.firstname }}
2 |
3 | your request "{{ filename }}" for {{ submission_type }} analysis has been received.
4 |
5 | Please transfer your data over FTP to
6 |
7 | ftp: {{ PUREFTPD_MYSQL_SERVER }}
8 | user: {{ ftp_user }}
9 | pass: {{ ftp_pass }}
10 |
11 | drop all your files on the main folder, do not create subdirectories.
12 |
13 | Example with lftp:
14 | ```
15 | # navigate to your raw data folder
16 | cd ~/myrawdata
17 | # login with lftp
18 | lftp -u {{ ftp_user }},{{ ftp_pass }} -e "set ftp:ssl-allow true; set ssl:verify-certificate no; set ftp:ssl-force true; set ftp:ssl-protect-data true" {{ PUREFTPD_MYSQL_SERVER }}
19 | # upload your raw files
20 | mput *.fastq.gz
21 | # upload your md5sums file
22 | put md5sums.txt
23 | ```
24 |
25 | Instructions on how to use cyberduck for transfering your data can be found here: https://github.com/mpg-age-bioinformatics/flaski/blob/main/ftp_info/README.md
26 |
27 | Please realise that this FTP account will be inactivated in 2 weeks.
28 |
29 | Once you've transfered all your files click on the following link to start the analysis:
30 |
31 | {{ token_link }}
32 |
33 | If you only have SRA data you do not need to download/upload it yourself, just go ahead and click the link above.
34 |
35 | Cheers!
36 |
37 | Flaski
38 |
39 | Submission: {{ submission_tag }}
40 |
--------------------------------------------------------------------------------
/email/submissions.ftp.txt:
--------------------------------------------------------------------------------
1 | Hi {{ user.firstname }}
2 |
3 | your request "{{ filename }}" for {{ submission_type }} analysis has been received.
4 |
5 | Please transfer your data over FTP to
6 |
7 | ftp: {{ PUREFTPD_MYSQL_SERVER }}
8 | user: {{ ftp_user }}
9 | pass: {{ ftp_pass }}
10 |
11 | drop all your files on the main folder, do not create subdirectories.
12 |
13 | Example with ncftp:
14 | ```
15 | # navigate to your raw data folder
16 | cd ~/myrawdata
17 | # login with ncftp
18 | ncftp -u {{ ftp_user }} -p {{ ftp_pass }} {{ PUREFTPD_MYSQL_SERVER }}
19 | # upload your raw files
20 | mput *.fastq.gz
21 | # upload your md5sums file
22 | put md5sums.txt
23 | ```
24 |
25 | Instructions on how to use cyberduck for transfering your data can be found here: https://github.com/mpg-age-bioinformatics/flaski/blob/main/ftp_info/README.md
26 |
27 | Please realise that this FTP account will be inactivated in 2 weeks.
28 |
29 | Once you've transfered all your files click on the following link to start the analysis:
30 |
31 | {{ token_link }}
32 |
33 | If you only have SRA data you do not need to download/upload it yourself, just go ahead and click the link above.
34 |
35 | Cheers!
36 |
37 | Flaski
38 |
39 | Submission: {{ submission_tag }}
40 |
--------------------------------------------------------------------------------
/email/validate_email.html:
--------------------------------------------------------------------------------
1 | Hi {{ user.firstname }},
2 | Welcome!
3 |
4 | Thanks for signing up. Please follow this link to activate your account: click here .
5 |
6 | {{app_name}} works best on Google Chrome, both on your laptop, on your iPad or on your phone.
7 |
8 | You can find several how-to videos on {{app_name}}'s YouTube channel.
9 |
10 | For documentation please visit {{app_name}}'s GitHub page.
11 |
12 | Citing: Iqbal, A., Duitama, C., Metge, F., Rosskopp, D., Boucas, J. Flaski. (2021). doi:10.5281/zenodo.4849515
13 |
14 | Cheers!
15 |
16 | Sincerely,
17 | {{app_name}}
--------------------------------------------------------------------------------
/email/validate_email.txt:
--------------------------------------------------------------------------------
1 | Hi {{ user.firstname }},
2 |
3 | Welcome! Thanks for signing up. Please follow this link to activate your account:
4 |
5 | {{app_url}}/login/{{token}}
6 |
7 | {{app_name}} works best on Google Chrome, both on your laptop, on your iPad or on your phone.
8 |
9 | You can find several how-to videos on {{app_name}}'s YouTube channel - https://www.youtube.com/channel/UCQCHNHJ23FGyXo9usEC_TbA.
10 |
11 | For documentation please visit {{app_name}}'s GitHub page - https://bioinformatics.age.mpg.de/flaski.
12 |
13 | Citing: Iqbal, A., Duitama, C., Metge, F., Rosskopp, D., Boucas, J. Flaski. (2021). doi:10.5281/zenodo.4849515 - https://github.com/mpg-age-bioinformatics/flaski#citing.
14 |
15 | Cheers!
16 |
17 | Sincerely,
18 |
19 | {{app_name}}
--------------------------------------------------------------------------------
/ftp_info/1.cyberduck.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/ftp_info/1.cyberduck.png
--------------------------------------------------------------------------------
/ftp_info/2.cyberduck.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/ftp_info/2.cyberduck.png
--------------------------------------------------------------------------------
/ftp_info/3.cyberduck.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/ftp_info/3.cyberduck.png
--------------------------------------------------------------------------------
/ftp_info/4.cyberduck.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/ftp_info/4.cyberduck.png
--------------------------------------------------------------------------------
/ftp_info/5.cyberduck.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/ftp_info/5.cyberduck.png
--------------------------------------------------------------------------------
/ftp_info/6.cyberduck.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/ftp_info/6.cyberduck.png
--------------------------------------------------------------------------------
/ftp_info/README.md:
--------------------------------------------------------------------------------
1 | # CYBERDUCK FTP TRANSFER
2 |
3 | You can obtain cyberduck at [https://cyberduck.io](https://cyberduck.io).
4 |
5 | 1. open cyberduck and select "Open Connection".
6 |
7 | 
8 |
9 | 2. give in the server address, username and password as in the emails instructions. Do not forget to check "Add to Keychain". Afterwards select "Connect".
10 |
11 | 
12 |
13 | 3. When asked if you would like to change to a secure connection select "Change".
14 |
15 | 
16 |
17 | 4. Select 'Always trust "pureftpd" when connecting to "ftp.flaski.app"' and select "Continue".
18 |
19 | 
20 |
21 | 5. Give in your laptop credentials to authorize the trust and select "Update Settings".
22 |
23 | 
24 |
25 | 6. Select "Action" > "Upload" to upload and select your files.
26 |
27 | 
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/hooks/build:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | if [ "${1}" == "nightly" ] ;
6 | then
7 | repoName="mpgagebioinformatics/flaski"
8 | IMAGE_NAME="flaski"
9 | DOCKER_TAG="nightly"
10 | else
11 |
12 | # # currently using github actions to build images
13 | # exit
14 |
15 | # Parse image name for repo name
16 | tagStart=$(expr index "$IMAGE_NAME" :)
17 | repoName=${IMAGE_NAME:0:tagStart-1}
18 | fi
19 |
20 | echo "- Repo: ${repoName}"
21 | echo "- Date: $(date '+%d/%m/%Y %H:%M:%S')"
22 |
23 | if [ "${DOCKER_TAG}" == "latest" ] ;
24 | then
25 | echo ":: Tag: ${DOCKER_TAG}"
26 | APP_VERSION=$(git rev-parse --short HEAD)
27 | git submodule update --init --recursive && \
28 | git submodule update --recursive --remote && \
29 | cd pyflaski && PYFLASKI_VERSION=$(git rev-parse --short HEAD) && cd ../ && \
30 | docker buildx create --name mybuilder --driver docker-container --bootstrap --use
31 | docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
32 | docker buildx build --platform linux/amd64,linux/arm64 --build-arg APP_VERSION=${APP_VERSION} --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t docker.io/mpgagebioinformatics/flaski:latest .
33 |
34 | # docker build --build-arg APP_VERSION=${APP_VERSION} --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t ${repoName}:latest . #&& \
35 | # docker push ${repoName}:latest && \
36 | # echo ":: build & push tag: latest"
37 |
38 | elif [ "${1}" == "nightly" ] ;
39 | then
40 |
41 | APP_VERSION=$(git rev-parse --short HEAD)
42 | git submodule update --init --recursive && \
43 | git submodule update --recursive --remote && \
44 | cd pyflaski && PYFLASKI_VERSION=$(git rev-parse --short HEAD) && cd ../
45 | echo "- git #${APP_VERSION}"
46 | text=":: !FAILED! "
47 | docker build --build-arg MYAPP_IMAGE="mpgagebioinformatics/myapp:nightly-${2}" --build-arg APP_VERSION="nightly-${APP_VERSION}" --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t ${repoName}:nightly-${2} . && \
48 | text="- "
49 | echo "${text}build flaski:nightly-${2}"
50 | docker system prune -f
51 |
52 | else
53 | echo ":: Tag: ${DOCKER_TAG}"
54 | git submodule update --init --recursive && \
55 | git submodule update --recursive --remote && \
56 | cd pyflaski && PYFLASKI_VERSION=$(git rev-parse --short HEAD) && cd ../
57 | docker buildx create --name mybuilder --driver docker-container --bootstrap --use
58 | docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
59 | docker buildx build --platform linux/amd64,linux/arm64 --build-arg APP_VERSION=${DOCKER_TAG} --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t docker.io/mpgagebioinformatics/flaski:stable -t docker.io/mpgagebioinformatics/flaski:${DOCKER_TAG} -t docker.io/mpgagebioinformatics/flaski:latest .
60 |
61 | #docker build --build-arg APP_VERSION=${DOCKER_TAG} --build-arg PYFLASKI_VERSION=${PYFLASKI_VERSION} --no-cache -t ${repoName}:latest .
62 | # docker tag ${repoName}:latest ${repoName}:${DOCKER_TAG}
63 | # docker tag ${repoName}:latest ${repoName}:stable
64 | # docker push ${repoName}:stable
65 | # echo ":: push tag: stable"
66 | # docker push ${repoName}:${DOCKER_TAG}
67 | # echo ":: push tag: ${DOCKER_TAG}"
68 | # docker push ${repoName}:latest
69 | # echo ":: push tag: latest"
70 | fi
71 |
72 | echo "- Finished"
--------------------------------------------------------------------------------
/kubernetes/README.md:
--------------------------------------------------------------------------------
1 | # Kuberenetes
2 |
3 | Here you find the yaml files for the init-pod and server-deployment.
4 |
5 | You will need to generate private volumes claims, mysql (galera) and redis (sentinel) servers and edit the yaml files accordingly.
--------------------------------------------------------------------------------
/kubernetes/init-pod.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Pod
3 | metadata:
4 | name: init3
5 | namespace: flaski
6 | labels:
7 | app: init3
8 | spec:
9 | containers:
10 | - image: mpgagebioinformatics/flaski:stable
11 | imagePullPolicy: Always
12 | name: init3
13 | command: ["/myapp/services/init/entrypoint.sh"]
14 | securityContext:
15 | allowPrivilegeEscalation: false
16 | runAsUser: 0
17 | env:
18 | - name: APP_NAME
19 | value: myapp
20 | - name: APP_TITLE
21 | value: myapp
22 | - name: SECRET_KEY
23 | valueFrom:
24 | secretKeyRef:
25 | name: flaski-secrets
26 | key: SECRET_KEY
27 | - name: FLASK_ENV
28 | value: init
29 | - name: RESTORE_DB
30 | value: "0"
31 | - name: RESTORE_USERS_DATA
32 | value: "0"
33 | - name: MAIL_PASSWORD
34 | valueFrom:
35 | secretKeyRef:
36 | name: flaskimailpass
37 | key: pass
38 | - name: MYSQL_HOST
39 | value: galeraprod-mariadb-galera.galeraprod.svc.cluster.local
40 | - name: MYSQL_PORT
41 | value: "3306"
42 | - name: MYSQL_PASSWORD
43 | valueFrom:
44 | secretKeyRef:
45 | name: flaski-secrets
46 | key: GALERA_PASS_PROD
47 | - name: MYSQL_USER
48 | value: flaski
49 | - name: DB_NAME
50 | value: flaski
51 | - name: REDIS_ADDRESS
52 | value: redis:6379/0
53 | - name: REDIS_PASSWORD
54 | valueFrom:
55 | secretKeyRef:
56 | name: flaski-secrets
57 | key: REDIS_PASSWORD
58 | - name: ADMINS
59 | value: flaski@age.mpg.de
60 | resources: {}
61 | volumeMounts:
62 | - name: users-volume-mount
63 | mountPath: /myapp_data/users/
64 | restartPolicy: OnFailure
65 | imagePullSecrets:
66 | - name: dockerlogin
67 | volumes:
68 | - name: users-volume-mount
69 | persistentVolumeClaim:
70 | claimName: users3-pvc
71 |
72 | status: {}
73 |
--------------------------------------------------------------------------------
/kubernetes/server-deployment.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: server3
5 | namespace: flaski
6 | labels:
7 | app: server3
8 | spec:
9 | selector:
10 | app: server3
11 | ports:
12 | - name: server3
13 | protocol: TCP
14 | port: 80
15 | targetPort: 8000
16 | type: ClusterIP
17 | ---
18 | apiVersion: apps/v1
19 | kind: Deployment
20 | metadata:
21 | name: server3
22 | namespace: flaski
23 | labels:
24 | app: server3
25 | spec:
26 | replicas: 4
27 | selector:
28 | matchLabels:
29 | app: server3
30 | strategy:
31 | rollingUpdate:
32 | maxSurge: 25%
33 | maxUnavailable: 25%
34 | type: RollingUpdate
35 | template:
36 | metadata:
37 | labels:
38 | app: server3
39 | spec:
40 | containers:
41 | - name: server
42 | image: mpgagebioinformatics/flaski:stable
43 | imagePullPolicy: Always
44 | env:
45 | - name: N_WORKERS
46 | value: "4"
47 | - name: ADMINS
48 | value: flaski@age.mpg.de
49 | - name: APP_NAME
50 | value: myapp
51 | - name: APP_TITLE
52 | value: flaski
53 | - name: SECRET_KEY
54 | valueFrom:
55 | secretKeyRef:
56 | name: flaski-secrets
57 | key: SECRET_KEY
58 | - name: APP_URL
59 | value: https://flaski.age.mpg.de
60 | - name: PAGE_PREFIX
61 | value: ""
62 | - name: FLASK_ENV
63 | value: production
64 | - name: LOGS
65 | value: /var/log/myapp/
66 | - name: MAIL_PASSWORD
67 | valueFrom:
68 | secretKeyRef:
69 | name: flaskimailpass
70 | key: pass
71 | - name: MAIL_PORT
72 | value: "587"
73 | - name: MAIL_SERVER
74 | value: mail.ox.gwdg.de
75 | - name: MAIL_USERNAME
76 | value: flaski@age.mpg.de
77 | - name: MAIL_USERNAME_ADDRESS
78 | value: flaski@age.mpg.de
79 | - name: MAIL_USE_TLS
80 | value: "1"
81 | - name: MYSQL_HOST
82 | value: galeraprod-mariadb-galera.galeraprod.svc.cluster.local
83 | - name: MYSQL_PORT
84 | value: "3306"
85 | - name: MYSQL_PASSWORD
86 | valueFrom:
87 | secretKeyRef:
88 | name: flaski-secrets
89 | key: GALERA_PASS_PROD
90 | - name: DB_NAME
91 | value: flaski
92 | - name: MYSQL_USER
93 | value: flaski
94 | - name: CACHE_TYPE
95 | value: RedisSentinelCache
96 | - name: CACHE_REDIS_SENTINELS_address
97 | value: redisprod.redisprod.svc.cluster.local
98 | - name: CACHE_REDIS_SENTINEL_MASTER
99 | value: mymaster
100 | - name: CACHE_REDIS_SENTINELS_port
101 | value: "26379"
102 | - name: REDIS_PASSWORD
103 | valueFrom:
104 | secretKeyRef:
105 | name: flaski-secrets
106 | key: REDIS_REPLICAS_PROD
107 | - name: PUREFTPD_MYSQL_SERVER
108 | value: flaski-ftp.hpccloud.mpg.de
109 | - name: PUREFTPD_MYSQL_PORT
110 | value: "3306"
111 | - name: PUREFTPD_MYSQL_USER
112 | value: pureftpd
113 | - name: PUREFTPD_MYSQL_DB
114 | value: pureftpd
115 | - name: PUREFTPD_AUTH_SALT
116 | valueFrom:
117 | secretKeyRef:
118 | name: flaski-secrets
119 | key: PUREFTPD_AUTH_SALT
120 | - name: PUREFTPD_MYSQL_PASS
121 | valueFrom:
122 | secretKeyRef:
123 | name: flaski-secrets
124 | key: PUREFTPD_MYSQL_PASS
125 | ports:
126 | - name: http
127 | containerPort: 8000
128 | resources: {}
129 | volumeMounts:
130 | - name: users-volume-mount
131 | mountPath: /myapp_data/users/
132 | restartPolicy: Always
133 | imagePullSecrets:
134 | - name: dockerlogin
135 | volumes:
136 | - name: users-volume-mount
137 | persistentVolumeClaim:
138 | claimName: users3-pvc
139 | status: {}
--------------------------------------------------------------------------------
/production-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 |
5 | init:
6 | container_name: init
7 | image: mpgagebioinformatics/flaski:latest
8 | # build:
9 | # context: ~/myapp
10 | # dockerfile: services/server/Dockerfile
11 | # args:
12 | # BUILD_NAME: myapp
13 | # MYAPP_VERSION: dev
14 | # UPGRADE_REQS: "no"
15 | entrypoint: /myapp/services/init/entrypoint.sh
16 | user: root
17 | volumes:
18 | - data3:/myapp_data/users
19 | - private:/flaski_private
20 | - mysql-certs:/etc/mysql/certs
21 | - ~/backups:/backup
22 | environment:
23 | APP_NAME: myapp
24 | FLASK_ENV: init
25 | APP_URL: https://flaski.age.mpg.de
26 | ADMINS: flaski@age.mpg.de
27 | RESTORE_DB: 0
28 | RESTORE_USERS_DATA: 0
29 | UPGRADE_DB: 0
30 | RESTORE_CERTS: 0
31 | RESTORE_DATALAKE: 0
32 | MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
33 | SECRET_KEY: ${SECRET_KEY}
34 | REDIS_ADDRESS: redis:6379/0
35 | REDIS_PASSWORD: ${REDIS_PASSWORD}
36 | MYSQL_USER: flaski
37 | MYSQL_PASSWORD: ${MYSQL_PASSWORD}
38 | MYSQL_HOST: mariadb
39 | MYSQL_PORT: 3306
40 | DB_NAME: flaski
41 | links:
42 | - mariadb
43 | depends_on:
44 | - mariadb
45 |
46 | server3:
47 | container_name: server3
48 | image: mpgagebioinformatics/flaski:stable
49 | # build:
50 | # context: ./
51 | # dockerfile: Dockerfile
52 | # args:
53 | # APP_VERSION: dev
54 | # PYFLASKI_VERSION: dev
55 | restart: always
56 | volumes:
57 | - data3:/flaski_data/users
58 | - private:/flaski_private
59 | - mysql-certs:/etc/mysql/certs
60 | - ~/mpcdf:/mpcdf
61 | environment:
62 | N_WORKERS: 4
63 | APP_NAME: myapp
64 | APP_TITLE: flaski
65 | # INSTANCE: local
66 | SECRET_KEY: ${SECRET_KEY}
67 | APP_URL: https://flaski.age.mpg.de
68 | PAGE_PREFIX: ""
69 | FLASK_ENV: production
70 | MAIL_PASSWORD: ${MAIL_PASSWORD}
71 | MAIL_PORT: 587
72 | MAIL_SERVER: mail.ox.gwdg.de
73 | MAIL_USERNAME: flaski@age.mpg.de
74 | MAIL_USERNAME_ADDRESS: flaski@age.mpg.de
75 | ADMINS: flaski@age.mpg.de
76 | MAIL_USE_TLS: '1'
77 | MYSQL_HOST: mariadb
78 | MYSQL_PORT: 3306
79 | MYSQL_PASSWORD: ${MYSQL_PASSWORD}
80 | MYSQL_USER: flaski
81 | DB_NAME: flaski
82 | REDIS_ADDRESS: redis:6379/0
83 | REDIS_PASSWORD: ${REDIS_PASSWORD}
84 | PUREFTPD_AUTH_SALT: ${PUREFTPD_AUTH_SALT}
85 | PUREFTPD_MYSQL_SERVER: ${PUREFTPD_MYSQL_SERVER}
86 | PUREFTPD_MYSQL_PORT: 3306
87 | PUREFTPD_MYSQL_USER: pureftpd
88 | PUREFTPD_MYSQL_PASS: ${PUREFTPD_MYSQL_PASS}
89 | PUREFTPD_MYSQL_DB: pureftpd
90 | labels:
91 | - traefik.enable=true
92 | - traefik.http.services.server3.loadbalancer.server.port=8000
93 | - traefik.http.routers.server3.entrypoints=web
94 | - traefik.http.routers.server3.rule=Host(`flaski.age.mpg.de`)
95 | - traefik.http.middlewares.server3.redirectscheme.scheme=https
96 | - traefik.http.routers.server3.middlewares=server3@docker
97 | - traefik.http.routers.server3_https.entrypoints=websecure
98 | - traefik.http.routers.server3_https.rule=Host(`flaski.age.mpg.de`)
99 | - traefik.http.routers.server3_https.tls=true
100 | - traefik.http.routers.server3_https.tls.certresolver=myresolver
101 | links:
102 | - mariadb
103 | - redis
104 | depends_on:
105 | - init
106 | - mariadb
107 | - redis
108 |
109 | backup:
110 | container_name: backup
111 | image: mpgagebioinformatics/myapp:latest
112 | # build:
113 | # context: ~/myapp
114 | # dockerfile: services/server/Dockerfile
115 | # args:
116 | # BUILD_NAME: myapp
117 | # MYAPP_VERSION: dev
118 | # UPGRADE_REQS: "no"
119 | entrypoint: /myapp/services/backup/entrypoint.sh
120 | user: root
121 | depends_on:
122 | - mariadb
123 | - init
124 | volumes:
125 | - data3:/myapp_data/users:ro
126 | - ~/backups:/backup
127 | environment:
128 | APP_NAME: flaski
129 | INSTANCE: backup
130 | FLASK_ENV: backup
131 | LOGS: /var/log/flaski/
132 | MYSQL_HOST: mariadb
133 | MYSQL_PORT: 3306
134 | MYSQL_PASSWORD: ${MYSQL_PASSWORD}
135 | MYSQL_USER: flaski
136 | DB_NAME: flaski
137 | restart: unless-stopped
138 | links:
139 | - mariadb
140 |
141 | mariadb:
142 | container_name: mariadb
143 | image: mariadb:10.5
144 | restart: always
145 | volumes:
146 | - db:/var/lib/mysql
147 | environment:
148 | MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
149 |
150 | redis:
151 | container_name: redis
152 | image: redis:7
153 | restart: always
154 | command: redis-server --requirepass ${REDIS_PASSWORD}
155 |
156 | reverse-proxy:
157 | image: "traefik:v2.9"
158 | restart: always
159 | container_name: "traefik"
160 | command:
161 | - --providers.docker=true
162 | - --providers.docker.useBindPortIP=true
163 | - --entrypoints.web.address=:80
164 | - --entrypoints.websecure.address=:443
165 | - "--entrypoints.web.http.redirections.entryPoint.to=websecure"
166 | - "--entrypoints.web.http.redirections.entryPoint.scheme=https"
167 | - --certificatesresolvers.myresolver.acme.httpchallenge=true
168 | - --certificatesresolvers.myresolver.acme.httpchallenge.entrypoint=web
169 | - --certificatesresolvers.myresolver.acme.tlschallenge=true
170 | - --certificatesresolvers.myresolver.acme.email=jboucas@age.mpg.de
171 | #- --certificatesresolvers.myresolver.acme.caserver=https://acme-staging-v02.api.letsencrypt.org/directory
172 | #- --certificatesresolvers.myresolver.acme.storage=/letsencrypt/acme-dev-4.json
173 | - --certificatesresolvers.myresolver.acme.caserver=https://acme-v02.api.letsencrypt.org/directory
174 | - --certificatesresolvers.myresolver.acme.storage=/letsencrypt/acme-prod-3.json
175 | - --log.level=DEBUG
176 | ports:
177 | - 80:80
178 | - 443:443
179 | volumes:
180 | - ~/letsencrypt:/letsencrypt
181 | - /var/run/docker.sock:/var/run/docker.sock:ro
182 | labels:
183 | - traefik.enable=true
184 | # Global redir to https
185 | - traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)
186 | - traefik.http.routers.http-catchall.entrypoints=web
187 | - traefik.http.routers.http-catchall.middlewares=redirect-to-https
188 | - traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https
189 |
190 | depends_on:
191 | - server3
192 |
193 | volumes:
194 | data3:
195 | external: false
196 | db:
197 | external: false
198 | private:
199 | external: false
200 | mysql-certs:
201 | external: false
202 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | humanize==3.13.1
2 | biopython==1.84
3 | reportlab==4.2.5
4 | ### packages for LLMs
5 | sentence-transformers==3.4.1
6 | openai==1.61.1
7 | # pymupdf==1.25.3
8 | faiss-cpu==1.10.0
--------------------------------------------------------------------------------
/routes/_about.py:
--------------------------------------------------------------------------------
1 | from myapp import app
2 | import os
3 |
4 | PYFLASKI_VERSION=os.environ['PYFLASKI_VERSION']
5 | PYFLASKI_VERSION=str(PYFLASKI_VERSION)
6 |
7 | EXT_URL=os.environ["APP_URL"]
8 | EXT_URL=f"{EXT_URL}/ext/"
9 |
10 | v=app.config["APP_VERSION"]
11 | v=str(v)
12 |
13 | _about=f'''
14 |
15 | Flaski is a [myapp]({EXT_URL}github.com/mpg-age-bioinformatics/myapp) based collection of web apps for data analysis and visualization in life sciences.
16 |
17 | Flaski provides:
18 |
19 | - interactive data analysis
20 | - user level authentication
21 | - Apps as plugins
22 | - session management
23 | - server storage
24 | - Graphic User Interface to Programmatic Interface
25 | - App2App communication
26 | - server based
27 | - background jobs
28 | - access to databases
29 | - usage statistics
30 | - on-the-fly error reporting
31 | - scalable
32 | - continuous delivery
33 | - full stack ready
34 | - multiplatform: *amd64*, *arm64*, and * aarch64*
35 |
36 | Flaski can be used for free on [https://flaski.age.mpg.de](https://flaski.age.mpg.de).
37 |
38 | Check our how-to videos on [YouTube]({EXT_URL}www.youtube.com/channel/UCQCHNHJ23FGyXo9usEC_TbA).
39 |
40 | For Graphical User Interface to Programmatic Interface exchanges please install the [pyflaski]({EXT_URL}github.com/mpg-age-bioinformatics/pyflaski) companion package.
41 |
42 | Issues: [https://github.com/mpg-age-bioinformatics/flaski/issues]({EXT_URL}github.com/mpg-age-bioinformatics/flaski/issues).
43 |
44 | Source: [https://github.com/mpg-age-bioinformatics/flaski]({EXT_URL}github.com/mpg-age-bioinformatics/flaski).
45 |
46 | Please check our [CODE_OF_CONDUCT.md]({EXT_URL}github.com/mpg-age-bioinformatics/flaski/blob/main/CODE_OF_CONDUCT.md) before doing any contribution or opening an issue.
47 |
48 | ##### Citing
49 |
50 | Iqbal, A., Duitama, C., Metge, F., Rosskopp, D., Boucas, J. Flaski. (2021). doi:10.5281/zenodo.4849515
51 |
52 | ##### Versioning
53 |
54 | We recommend that you allways export your session along with your results so that you can in future reproduce them.
55 |
56 | Current version can be seen at the end of this page and old sessions version can be checked under [https://flaski.age.mpg.de/vcheck/](https://flaski.age.mpg.de/vcheck/).
57 |
58 | If you wish to open an older session under the same package version please use the [pyflaski]({EXT_URL}github.com/mpg-age-bioinformatics/pyflaski) companion package.
59 |
60 | ##### Credits
61 |
62 | Flaski was build using the [Font-Awesome]({EXT_URL}github.com/FortAwesome/Font-Awesome) toolkit. Please consult the respective project for license information.
63 |
64 | The Bioinformatics Core Facility of the Max Planck Institute for Biology of Ageing, Cologne, Germany.
65 |
66 | ##### Version
67 |
68 | flaski: {v}
69 |
70 | pyflaski: #{PYFLASKI_VERSION}
71 | '''
--------------------------------------------------------------------------------
/routes/_impressum.py:
--------------------------------------------------------------------------------
1 | _impressum=f'''
2 | The following provides mandatory data concerning the provider of this website,
3 | obligations with regard to data protection, as well as other important legal references
4 | involving the Flaski, the Bioinformatics Core Facility Internet web site of the Max Planck
5 | Institute for Biology of Ageing (mpg-age-bioinformatics.github.io) as required by German law.
6 |
7 | **Provider**
8 |
9 | The provider of this Internet site within the legal meaning of the term is the registered
10 | association Max Planck Society for the Advancement of Science e.V.
11 |
12 | **Address**
13 | ```
14 | Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V.
15 | Hofgartenstrasse 8
16 | D-80539 Munich
17 | Phone: +49 89 2108-0
18 | Internet: www.mpg.de
19 | ```
20 |
21 | **Register of Societies and Associations**
22 |
23 | The Max Planck Society is registered in the Official Register of Societies and Associations
24 | at Berlin-Charlottenburg Local Court under the register number VR 13378 B.
25 |
26 | **Representatives**
27 |
28 | The Max Planck Society is legally represented by its Board of Directors which, in turn,
29 | is represented by the President of the Society, Prof. Dr. Martin Stratmann and by
30 | Secretary General Dr. Ludwig Kronthaler.
31 |
32 | **Value Added Tax Identification Number**
33 |
34 | The value added tax identification number of the Max Planck Society is DE 129517720.
35 |
36 | **Editors**
37 |
38 | Responsible editor for the contents of the website flaski.age.mpg.de with regard to media law:
39 |
40 | ```
41 | Dr. Jorge Boucas
42 | Max Planck Institute for Biology of Ageing
43 | Joseph-Stelzmann-Str. 9b
44 | 50931 Cologne, Germany
45 | jorge.boucas(at)age.mpg.de
46 | ```
47 |
48 | **Technically responsible (Webmaster)**
49 |
50 | Technically responsible for the Flaski website of the Bioinformatics Core Facility
51 | Max Planck Institute for Biology of Ageing:
52 | ```
53 | Jorge Boucas
54 | jorge.boucas(at)age.mpg.de
55 | ```
56 | **Legal Structure**
57 |
58 | The Max Planck Society is a non-profit research facility which is organized as a
59 | registered association. All of the institutes and facilities of the Max Planck Society
60 | are largely autonomous in terms of organization and research, but as a rule have no legal capacity of their own.
61 |
62 | **Foreign Language Pages**
63 |
64 | To the extent that parts of this Internet site are offered in languages other than German,
65 | this represents a service exclusively for staff and guests of the Max Planck Society
66 | who are not proficient in German.
67 |
68 | **Liability for Contents of Online Information**
69 |
70 | As the provider of contents in accordance with Section 7 Paragraph 1 of the Tele-Media Law,
71 | the Max Planck Society shall be responsible for any contents which it makes available for
72 | use in accordance with general legal provisions. The Max Planck Society makes every effort
73 | to provide timely and accurate information on this Web site. Nevertheless, errors and
74 | inaccuracies cannot be completely ruled out. Therefore, the Max Planck Society does not
75 | assume any liability for the relevance, accuracy, completeness or quality of the information provided.
76 | The Max Planck Society shall not be liable for damage of a tangible or intangible nature
77 | caused directly or indirectly through the use or failure to use the information offered
78 | and/or through the use of faulty or incomplete information unless it is verifiably
79 | culpable of intent or gross negligence. The same shall apply to any downloadable
80 | software available free of charge. The Max Planck Society reserves the right to modify,
81 | supplement, or delete any or all of the information offered on its Internet site, or to
82 | temporarily or permanently cease publication thereof without prior and separate notification.
83 |
84 | **Links to Internet Sites of Third Parties**
85 |
86 | This Internet site includes links to external pages. These external links are designated
87 | appropriately. The respective provider shall be responsible for the contents of any
88 | linked external pages. In establishing the initial link, the Max Planck Society has
89 | reviewed the respective external content in order to determine whether such link entailed
90 | possible civil or criminal responsibility. However, a constant review of linked external
91 | pages is unreasonable without concrete reason to believe that a violation of the law may
92 | be involved. If the Max Planck Society determines such or it is pointed out by others that
93 | an external offer to which it is connected via a link entails civil or criminal responsibility,
94 | then the Max Planck Society will immediately eliminate any link to this offer. The Max Planck
95 | Society expressly dissociates itself from such contents.
96 | '''
--------------------------------------------------------------------------------
/routes/_privacy.py:
--------------------------------------------------------------------------------
1 | _privacy=f'''
2 |
3 | Thanks for entrusting Flaski with your data. Holding on to your private information is a serious responsibility, and we want you to know how we're handling it.
4 |
5 | The entity responsible for the processing of your personal information in connection with the services is Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG); Hofgartenstrasse 8, 80539 Munich; Telephone: +49 (89) 2108-0.
6 |
7 | The Data Protection Officer at the entity responsible is Heidi Schuster, Hofgartenstrasse 8, 80539 Munich, Telephone: +49 (89) 2108-1554, E-mail: datenschutz@mpg.de
8 |
9 | **What personal data Flaski collects**
10 |
11 | Flaski collects your first name, last name, organization name, and email address. Additionally, Flaski collects email and time stamps on performed actions like figure download or login to an app.
12 |
13 | **What personal data Flaski does not collect**
14 |
15 | Flaski does not collect your input data. You can store your data in our servers but we generally will not look into it other than stated below.
16 |
17 | **How Flaski collets information**
18 |
19 | Every time you visit an App (or eg. download a figure or an error occurs) a timestamp, App, and user email contact is recorded in a MySQL/MariaDB which is accessible to Flaski’s administrators only.
20 |
21 | **Why Flaski collects your information**
22 |
23 | Flaski is an open source project supported by the private and work time of it’s developers. In order to continuously keep on improving Flaski we might need to apply for funding. It is therefore important that we are able to demonstrate a concrete usage of Flaski and from which institutions our usage originates so that we can choose the appropriate funding organizations.
24 |
25 | **How Flaski uses your information**
26 |
27 | We collect usage statistics to better understand our Apps and develop them further for you.
28 |
29 | **How we share the information we collect**
30 |
31 | When reporting usage statistics we deplete user's first name, last name, and email from that report. We might include organizations names in such reports. We do not share any users account with any other public or private organization.
32 |
33 | **How you can access the information we collect**
34 |
35 | If you want to permanently remove all traces of your account from our data bank please mail us from your user's email address to flaski@age.mpg.de.
36 |
37 | **Our use of cookies and tracking**
38 |
39 | Flaski only uses minimal functionality cookies so that your input data does not vanish every time you press “Submit” or refresh the page. Flaski's minimal functionality cookies only collect the values that you input into an App (eg. range of x-axis) so that the instructions you give to Flaski can be executed. Session cookies are permanently cleared every time you logout. Flaski does not perform tracking. Cookies are text files stored in the Internet browser or by the Internet browser on the user's computer system.
40 |
41 | **How Flaski secures your information**
42 |
43 | We take all measures reasonably necessary to protect the confidentiality, integrity, and availability of your personal information on Flaski and to protect the resilience of our servers.
44 |
45 | Flaski takes all measures reasonably necessary to protect User Personal Information from unauthorized access, alteration, or destruction; maintain data accuracy; and help ensure the appropriate use of User Personal Information.
46 |
47 | Flaski enforces a written security information program. Our program:
48 |
49 | - aligns with industry recognized frameworks;
50 | - includes security safeguards reasonably designed to protect the confidentiality, integrity, availability, and resilience of our Users' data;
51 | - is appropriate to the nature, size, and complexity of Flaski's business operations;
52 | - includes incident response and data breach notification processes.
53 |
54 | In the event of a data breach that affects your User Personal Information, we will act promptly to mitigate the impact of a breach and notify any affected Users without undue delay.
55 |
56 | Transmission of data on Flaski is encrypted using SSH, HTTPS (TLS). We manage our own cages and racks at our own data centers with high level of physical and network security.
57 |
58 | No method of transmission, or method of electronic storage, is 100% secure. Therefore, we cannot guarantee its absolute security.
59 |
60 | Flaski, the Max Planck Institute for Biology of Ageing, and it's personnel can not be hold be responsible for the misusage for Flaski servers by a third-party as it would be the case of for example an hacking event.
61 |
62 | **Other important information**
63 |
64 | *Data storage*
65 |
66 | Flaski personnel do not access private repositories unless required to for security purposes, to assist the repository owner with a support matter, to maintain the integrity of the service, or to comply with our legal obligations. However, while we do not generally search for content in your repositories, we may scan our servers and content to detect certain tokens or security signatures, known active malware, or other content such as violent extremist or terrorist content or child exploitation imagery, based on algorithmic fingerprinting techniques.
67 |
68 | Flaski regularly scans it's servers for old, untouched data and marks it for deletion. Users are informed of this mark and are given time to properly backup their data.
69 |
70 | Personal data will be deleted too if you ask us to delete it or either above purposes are achieved. Data will be delete once it reaches an age of 10 years.
71 |
72 | *Data analysis*
73 |
74 | Flaski purpose is to support researchers on the analysis of their data. For this we constantly enhance the number and quality of our app promptly responding to user's requests. Flaski, the Max Planck Institute for Biology of Ageing, and it's personnel can not be hold responsible for any data analysis performed with Flaski nor for any misinterpretation that might come from an undesirable or misunderstood function. Flaski's code is public and open source and we encourage all users to follow it's code as a trace to their analysis.
75 |
76 | *What are my rights as a data subject?*
77 |
78 | As an individual whose personal data is gathered as part of the aforementioned services, you have, in principle, the following rights, to the extent that no legal exceptions are applicable in individual cases:
79 |
80 | - Information (Article 15 GDPR)
81 | - Correction (Article 16 GDPR)
82 | - Deletion (Article 17 (1) GDPR)
83 | - Restriction of processing (Article 18 GDPR)
84 | - Data transmission (Article 20 GDPR)
85 | - Revocation of processing (Article 21 GDPR)
86 | - Revocation of consent (Article 7 (3) GDPR)
87 | - Right to complain to the regulator (Article 77 GDPR). For the MPG, this is the Bavarian Data Protection Authority (BayLDA), Postbox 606, 91511 Ansbach
88 |
89 | **Contacting Flaski**
90 |
91 | Please feel free to contact us if you have questions about our Privacy and Data Statement. Contact details:
92 | ```
93 | Max Planck Institute for Biology of Ageing
94 | Joseph-Stelzmann-Str. 9b
95 | 50931 Köln
96 | Phone: +49 221 37970-0
97 | Email: bioinformatics@age.mpg.de
98 | ```
99 | '''
--------------------------------------------------------------------------------
/routes/_routes.py:
--------------------------------------------------------------------------------
1 | from myapp.routes.apps import storage, scatterplot, heatmap, violinplot, cellplot, gseaplot, david, lineplot, dendrogram, threeDscatterplot, mds, pca, tsne, histogram, circularbarplots, venndiagram, lifespan, aadatalake
2 | from myapp.routes.apps import ip, rnaseq, transfer, atacseq, chipseq, asplicing, intronret, irfinder, circrna, mirna, sixteens, varcal, riboseq, methylclock, alphafold, gsea, crispr, vcheck, convert, cbioportal, neanderthalage
3 | from myapp.routes.apps import gtex, kegg, chatbot
4 |
5 | # kegg
6 |
7 | # import os
8 | # if os.environ['FLASK_ENV'] == "development" :
9 | # from myapp.routes.apps import cbioportal
10 | #from myapp.routes.apps import agebot
11 |
--------------------------------------------------------------------------------
/routes/_vars.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | _PRIVATE_ROUTES=['alphafold', 'rnaseq', "atacseq", "chipseq", "asplicing", "intronret", "irfinder", "circrna", "mirna", "sixteens", "varcal", "riboseq","gsea",'aadatalake', "methylclock", "crispr", "neanderthalage"]
4 | _PUBLIC_VIEWS = ['alphafold', 'rnaseq', "atacseq", "chipseq", "asplicing", "intronret", "irfinder", "circrna", "mirna", "sixteens", "varcal", "riboseq","gsea"] #, "cbioportal"]
5 |
6 | if os.environ['FLASK_ENV'] != "development" :
7 | _DEV_ROUTES=[ "agebot" ] #"cbioportal",
8 | _PRIVATE_ROUTES = _PRIVATE_ROUTES + _DEV_ROUTES
9 |
10 | _META_TAGS=[{'name':'title', 'property':'og:title', 'content':'flaski' },\
11 | {'name':'image','property':'og:image', 'content':'https://i.ibb.co/pRL0sM1/Flaski.jpg' },\
12 | {'name':'description','property':'og:description', 'content':'Flaski is a myapp based collection of web apps for data analysis and visualization in life \
13 | sciences with session management and versioning. Flaski is built thinking of interactions between code experienced \
14 | and non-experienced users for which sessions created over the web interface can be opened in python as standard \
15 | plotly objects and vice-versa. Among other things, error reporting includes a session sharing option for optimal 1st level \
16 | support. Flaski is open-source under the MIT License. ' },\
17 | {'property':'og:url', 'content':os.getenv("APP_URL") },\
18 | {'property':'og:image:width', 'content':'1200' },\
19 | {'property':'og:image:height', 'content':'675' },\
20 | {'property':'og:type', 'content':'website' }]
21 |
22 |
23 | user_navbar_links={
24 | "Home":"/home/",\
25 | "Storage":"/storage/",\
26 | "separator_1":"-",\
27 | "General":"__title__",\
28 | "About":"/about/",\
29 | "Tutorials":"/ext/www.youtube.com/channel/UCQCHNHJ23FGyXo9usEC_TbA",\
30 | "Impressum":"/impressum/",\
31 | "Privacy":"/privacy/",\
32 | "Issues":"/ext/github.com/mpg-age-bioinformatics/flaski/issues",\
33 | "fixed_separator":"-",\
34 | "Configuration":"__title__", \
35 | "Settings":"/settings/",\
36 | "fixed_separator_2":"-",\
37 | "Logout":"/logout/"
38 | }
39 |
40 | # "KEGG":"/kegg/",\
41 |
42 | other_nav_dropdowns =[
43 | { \
44 | "Apps": \
45 | {
46 | "Scatter plot":"/scatterplot/",\
47 | "3D Scatter plot":"/threeDscatterplot/",\
48 | "Line plot":"/lineplot/",\
49 | "Histogram":"/histogram/",\
50 | "Heatmap":"/heatmap/",\
51 | "Violin plot":"/violinplot/",\
52 | "Circular bar plot":"/circularbarplots/",\
53 | "Dendrogram":"/dendrogram/",\
54 | "Venn diagram":"/venndiagram/",\
55 | "GSEA plot":"/gseaplot/",\
56 | "DAVID":"/david/",\
57 | "Cell plot":"/cellplot/",\
58 | "PCA":"/pca/",\
59 | "MDS":"/mds/",\
60 | "t-SNE":"/tsne/",\
61 | "Lifespan":"/lifespan/",\
62 | "Datalake":"/aadatalake/",\
63 | "cBioPortal":"/cbioportal/",\
64 | "GTEx":"/gtex/",\
65 | # "AGE bot":"/agebot/",\
66 | "Neanderthal age":"/neanderthalage/",\
67 | "KEGG":"/kegg/",\
68 | "Chatbot AGE":"/chatbot/",\
69 | "Version check":"/vcheck/",\
70 | }, \
71 |
72 | }, \
73 | { "Forms": \
74 | {
75 | "RNAseq":"/rnaseq/",\
76 | "ATACseq":"/atacseq/",\
77 | "ChIPseq":"/chipseq/",\
78 | "Alternative Splicing":"/asplicing/",\
79 | "Intron Retention":"/intronret/",\
80 | "IRfinder":"/irfinder/",\
81 | "Circular RNA":"/circrna/",\
82 | "miRNA":"/mirna/",\
83 | "16S":"/sixteens/",\
84 | "Variant Calling":"/varcal/",\
85 | "Ribo-Seq":"/riboseq/",\
86 | "AlphaFold":"/alphafold/",\
87 | "Methylation Clock":"/methylclock/",\
88 | "GSEA":"/gsea/",\
89 | "CRISPR" : "/crispr/"
90 | } \
91 | }
92 | ]
93 | ###################################
94 | # _PRIVATE_ROUTES=['home'] ## only users added to this route on the admin board / User model will have access
95 | # _PUBLIC_VIEWS=[] ## can be used to set specific rights within the app eg. deactiva Submit buttons.
96 | # other_nav_dropdowns =[
97 | # { \
98 | # "Eg. DropDown": \
99 | # {
100 | # "Home":"/home/",\
101 | # "separator_1":"-",\
102 | # "General":"__title__",\
103 | # "About":"/about/",\
104 | # "Impressum":"/impressum/",\
105 | # "Privacy":"/privacy/",\
106 | # "fixed_separator":"-",\
107 | # "Configuration":"__title__", \
108 | # "Settings":"/settings/",\
109 | # "Logout":"/logout/"
110 | # } \
111 | # }, \
112 | # { \
113 | # "Eg. DropDown 2": \
114 | # {
115 | # "Home":"/home/",\
116 | # "separator_1":"-",\
117 | # "General":"__title__",\
118 | # "About":"/about/",\
119 | # "Impressum":"/impressum/",\
120 | # "Privacy":"/privacy/",\
121 | # "fixed_separator":"-",\
122 | # "Configuration":"__title__", \
123 | # "Settings":"/settings/",\
124 | # "Logout":"/logout/"
125 | # } \
126 | # }
127 | # ]
128 | ###################################
--------------------------------------------------------------------------------
/routes/apps/_cbioportal.py:
--------------------------------------------------------------------------------
1 | from matplotlib.pyplot import plot
2 | import pandas as pd
3 | # from flaski.routines import fuzzy_search
4 | from pyflaski.lifespan import make_figure as survival_ls
5 | from pyflaski.lifespan import figure_defaults as defaults_lifespan
6 |
7 | import plotly.express as px
8 | import plotly.graph_objects as go
9 | from plotly.graph_objs import *
10 | import plotly.graph_objs as go
11 |
12 | import numpy as np
13 | import re
14 | import os
15 |
16 | from functools import reduce
17 |
18 |
19 | path_to_files="/flaski_private/cbioportal/"
20 |
21 | def fix_id(x):
22 | try:
23 | x=int(x)
24 | except:
25 | x=str(x)
26 | x=str(x)
27 | return x
28 |
29 | def grouping(g, mrna, low_percentile, hi_percentile):
30 | mrna_=mrna[[g]].dropna()
31 | mrna_=mrna_[mrna_[g]>0]
32 | if len(mrna_.columns.tolist()) > 1 :
33 | return np.nan
34 |
35 | values=mrna_[g].tolist()
36 | if not values:
37 | return np.nan
38 |
39 | l=np.percentile(values,float(low_percentile))
40 | h=np.percentile(values,float(hi_percentile))
41 |
42 | l=mrna_[mrna_[g]<=l].index.tolist()
43 | h=mrna_[mrna_[g]>=h].index.tolist()
44 |
45 | if ( len(l) < 2 ) or ( len(h) < 2 ) :
46 | return np.nan
47 |
48 | l=",".join(l)
49 | h=",".join(h)
50 |
51 | return f"{l} | {h}"
52 |
53 |
54 | def get_groups(genes, mrna, lowp, highp):
55 | """
56 | define high and low expression groups
57 | based on 25 and 75 percentiles
58 | """
59 |
60 | mrna=mrna.transpose()
61 |
62 | groups=pd.DataFrame( {"Hugo_Symbol":genes }, index=genes )
63 | groups["group"]=groups["Hugo_Symbol"].apply(lambda x: grouping(x,mrna, lowp, highp) )
64 |
65 | groups=groups.dropna()
66 |
67 | return groups
68 |
69 |
70 | def read_study_meta(path_to_files=path_to_files, dataset=None):
71 | if dataset:
72 | with open (path_to_files+dataset+"/meta_study.txt", "r") as md:
73 | meta_data=md.read()
74 | meta_data=meta_data.split("\n")
75 | meta_data=[s for s in meta_data if s != ""]
76 |
77 | return meta_data
78 |
79 |
80 | def plot_gene(gene_list, dataset, lp, hp):
81 | """
82 | plot a KaplanMeierFitter gene from a specific dataset
83 | read gene expression and clinical data as above
84 | """
85 | gene=gene_list[0]
86 |
87 | mrnafile=[s for s in os.listdir(path_to_files+dataset) if "mrna" in s][0]
88 |
89 |
90 | mrna=pd.read_csv(path_to_files+dataset+"/"+mrnafile ,sep="\t")
91 | cols=mrna.columns.tolist()
92 |
93 | if ( "Hugo_Symbol" in cols ) and ( "Entrez_Gene_Id" in cols ) :
94 | mrna.index=mrna["Hugo_Symbol"]
95 | mrna=mrna.drop(["Hugo_Symbol","Entrez_Gene_Id"], axis=1)
96 | # mrna["Entrez_Gene_Id"]=mrna["Entrez_Gene_Id"].apply(lambda x: fix_id(x) )
97 | # mrna.index=mrna["Hugo_Symbol"]+"_"+mrna["Entrez_Gene_Id"]
98 | # mrna=mrna.drop(["Hugo_Symbol","Entrez_Gene_Id"], axis=1)
99 | elif ( "Hugo_Symbol" in cols ) :
100 | mrna.index=mrna["Hugo_Symbol"]
101 | mrna=mrna.drop(["Hugo_Symbol"], axis=1)
102 | elif ( "Entrez_Gene_Id" in cols ) :
103 | mrna["Entrez_Gene_Id"]=mrna["Entrez_Gene_Id"].apply(lambda x: fix_id(x) )
104 | mrna.index=mrna["Entrez_Gene_Id"]
105 | mrna=mrna.drop(["Entrez_Gene_Id"], axis=1)
106 |
107 | mrna=mrna.astype(float)
108 |
109 | sample=pd.read_csv(path_to_files+dataset+"/data_clinical_sample.txt" ,sep="\t" )
110 | sample.columns=sample.loc[3,]
111 | sample=sample[4:]
112 |
113 | clinical=pd.read_csv(path_to_files+dataset+"/data_clinical_patient.txt" ,sep="\t" )
114 | clinical.columns=clinical.loc[3,]
115 | clinical=clinical[4:]
116 |
117 | clinical.loc[ clinical["OS_STATUS"] == "1:DECEASED", "dead" ] = 1
118 | clinical.loc[ clinical["OS_STATUS"] == "0:LIVING" , "dead"] = 0
119 |
120 | clinical["time"]=clinical["OS_MONTHS"].tolist()
121 |
122 | def fix_float(x):
123 | try:
124 | x=float(x)
125 | return x
126 | except:
127 | return np.nan
128 |
129 | clinical["time"]=clinical["time"].apply(lambda x: fix_float(x) )
130 |
131 | ### Subset to patients that have a clinical record
132 | clinical_=clinical[["PATIENT_ID", "time", "dead" ]].dropna()
133 | clinical_=clinical_["PATIENT_ID"].tolist()
134 |
135 | clinical_=sample[sample["PATIENT_ID"].isin(clinical_)]["SAMPLE_ID"].tolist()
136 |
137 | # subset mrna samples to samples with clinical data
138 | mrna_=mrna.columns.tolist()
139 | mrna_=[ s for s in mrna_ if s in clinical_ ]
140 | mrna=mrna[mrna_]
141 |
142 | # define the high and low expression groups for the each gene
143 | groups=get_groups(gene_list, mrna, lp, hp)
144 |
145 | grps=groups[groups.index==gene]["group"].tolist()[0].split(" | ")
146 | h=grps[1].split(",")
147 | l=grps[0].split(",")
148 |
149 | h=sample[sample["SAMPLE_ID"].isin(h)]["PATIENT_ID"].tolist()
150 | l=sample[sample["SAMPLE_ID"].isin(l)]["PATIENT_ID"].tolist()
151 |
152 | clinical.loc[ clinical["PATIENT_ID"].isin(h), "group" ] = "high"
153 | clinical.loc[ clinical["PATIENT_ID"].isin(l) , "group"] = "low"
154 |
155 | clinical=clinical[["group","time", "dead" ]].dropna()
156 | clinical=clinical.reset_index(drop=True)
157 |
158 | title=read_study_meta(dataset=dataset)
159 | title=[s.split("short_name:")[1] for s in title if "short_name" in s][0]
160 | # print(title)
161 |
162 | ### Sending data to the lifespan app
163 | pa=defaults_lifespan()
164 |
165 | pa['xvals'] = "time"
166 | pa['yvals'] = "dead"
167 | pa['title'] = title+" - "+gene
168 | pa['xlabel'] = "Months"
169 | pa['ylabel'] = "Survival"
170 | pa['groups_value'] = "group"
171 | pa["list_of_groups"] = list(set(clinical["group"].tolist()))
172 |
173 | COLORS=["blue","green","red","black"]
174 | groups=pa["list_of_groups"]
175 |
176 | colors_dict=dict(zip(pa["list_of_groups"], COLORS[:len(groups)]))
177 | # print(colors_dict)
178 |
179 | groups_settings=[]
180 |
181 | #["Conf_Interval", "ci_legend", "show_censors"]
182 |
183 | for g in pa["list_of_groups"]:
184 | group_dic={"name":g,\
185 | "linewidth_write" : "1.5",\
186 | "linestyle_value" : "solid",\
187 | "line_color_value" : colors_dict[g],\
188 | "linecolor_write" : "",\
189 | "model_settings" : ["Conf_Interval", "ci_legend"] ,\
190 | "ci_linewidth_write" : "1.0",\
191 | "ci_linestyle_value" : "solid",\
192 | "ci_line_color_value" : colors_dict[g],\
193 | "ci_linecolor_write" : "",\
194 | "ci_alpha" : "0.2",\
195 | "censor_marker_value" : "x",\
196 | "censor_marker_size_val" : "4",\
197 | "markerc" : "black",\
198 | "markerc_write" : "",\
199 | "edge_linewidth" : "1",\
200 | "edgecolor" : "black",\
201 | "edgecolor_write" : "",\
202 | "marker_alpha" : "1"}
203 |
204 | groups_settings.append(group_dic)
205 |
206 | pa["groups_settings"]=groups_settings
207 |
208 |
209 | df, fig, cph_coeff, cph_stats, input_df=survival_ls(clinical,pa)
210 |
211 |
212 | return df, fig, cph_coeff, cph_stats,pa, input_df
213 |
214 |
215 | def read_results_files(cache, path_to_files=path_to_files): #cache
216 | @cache.memoize(60*60*2) # 2 hours
217 | def _read_results_files(path_to_files=path_to_files):
218 | df=pd.read_csv(path_to_files+"all.datasets.gene.names.cleaned.csv",sep="\t", dtype=str)
219 | return df.to_json(orient='records', default_handler=str )
220 | return pd.read_json(_read_results_files(), dtype=str)
221 |
222 |
223 | def read_meta_files(cache,path_to_files=path_to_files):
224 | @cache.memoize(60*60*2) # 2 hours
225 | def _read_meta_files(path_to_files=path_to_files):
226 | df=pd.read_csv(path_to_files+"all.metaData.formatted.csv",sep="\t")
227 | return df.to_json()
228 | return pd.read_json(_read_meta_files())
229 |
230 |
231 | def nFormat(x):
232 | if float(x) == 0:
233 | return str(x)
234 | elif ( float(x) < 0.01 ) & ( float(x) > -0.01 ) :
235 | return str('{:.3e}'.format(float(x)))
236 | else:
237 | return str('{:.3f}'.format(float(x)))
238 |
239 |
240 | def filter_data(cache, datasets=None, genes=None):
241 | @cache.memoize(60*60*2) # 2 hours
242 | def _filter_data(cache, datasets=None, genes=None):
243 | results_files=read_results_files(cache)
244 |
245 | if datasets:
246 | results_files=results_files[ results_files['Dataset'].isin( datasets ) ]
247 |
248 | if genes:
249 | results_files=results_files[ results_files['Hugo Symbol'].isin( genes ) ]
250 |
251 | if datasets and genes:
252 | results_files=results_files[ results_files['Dataset'].isin( datasets ) & (results_files['Hugo Symbol'].isin( genes )) ]
253 |
254 | return results_files
255 | return _filter_data(cache, datasets, genes)
256 |
257 |
258 |
259 | def convert_html_links_to_markdown(html_text):
260 | start_idx = 0
261 | markdown_text = ""
262 |
263 | while True:
264 | # Find the start and end positions of the tag
265 | start_tag_idx = html_text.lower().find(" tags found, append the remaining text and break
269 | markdown_text += html_text[start_idx:]
270 | break
271 |
272 | end_tag_idx = html_text.lower().find(" ", start_tag_idx)
273 |
274 | if end_tag_idx == -1:
275 | # If there's no matching tag, append the remaining text and break
276 | markdown_text += html_text[start_idx:]
277 | break
278 |
279 | # Extract the link text and href attribute
280 | link_start_idx = html_text.find(">", start_tag_idx) + 1
281 | link_text = html_text[link_start_idx:end_tag_idx]
282 | href_start_idx = html_text.lower().find('href="', start_tag_idx) + 6
283 | href_end_idx = html_text.find('"', href_start_idx)
284 | href = html_text[href_start_idx:href_end_idx]
285 |
286 | # Convert the link to Dash Markdown format and append it
287 | markdown_text += f"{html_text[start_idx:start_tag_idx]}[{link_text}]({href})"
288 |
289 | # Update the start index for the next iteration
290 | start_idx = end_tag_idx + 4
291 |
292 | return markdown_text
293 |
294 |
295 | # def make_annotated_col(x,annotate_genes):
296 | # if x in annotate_genes:
297 | # return x
298 | # else:
299 | # return ""
300 |
301 | # def plot_height(sets):
302 | # if len(sets) <= 14:
303 | # minheight = 700
304 | # else:
305 | # minheight=len(sets)
306 | # minheight=minheight * 45
307 |
308 | # if minheight > 945:
309 | # minheight=945
310 |
311 | # return minheight
312 |
--------------------------------------------------------------------------------
/routes/apps/_chatbot.py:
--------------------------------------------------------------------------------
1 | from myapp import app
2 | import os
3 | import json
4 | import openai
5 | from openai import OpenAI
6 | import faiss
7 | import pickle
8 | import re
9 | import random
10 | import numpy as np
11 | from sentence_transformers import SentenceTransformer
12 | from flask_login import current_user
13 | from myapp.routes.apps._utils import make_except_toast
14 |
15 |
16 | PYFLASKI_VERSION=os.environ['PYFLASKI_VERSION']
17 | PYFLASKI_VERSION=str(PYFLASKI_VERSION)
18 |
19 | path_to_files="/flaski_private/chatbot/"
20 | # Global file paths
21 | INDEX_FILE = f"{path_to_files}mpnet_web.index"
22 | CHUNK_FILE = f"{path_to_files}chunks_web.pkl"
23 |
24 | # Preload the FAISS index and text chunks once
25 | faiss_index = faiss.read_index(INDEX_FILE)
26 | with open(CHUNK_FILE, "rb") as f:
27 | text_chunks_with_metadata = pickle.load(f)
28 |
29 | # Preload the embedding model once
30 | embedding_model = SentenceTransformer("multi-qa-mpnet-base-cos-v1")
31 |
32 | # api_key = app.config["GWDG_CHAT_API"]
33 | api_key = app.config.get("GWDG_CHAT_API", "")
34 |
35 | # API configuration
36 | base_url = "https://chat-ai.academiccloud.de/v1"
37 |
38 | # Start OpenAI client
39 | client = OpenAI(
40 | api_key = api_key,
41 | base_url = base_url
42 | )
43 |
44 | AUTHOR_VARIANTS = {
45 | ("adam antebi", "antebi adam", "adam a", "adam antabi"): "Antebi A",
46 | ("joris deelen", "joris d", "joris", "deelen joris", "deelen"): "Deelen J",
47 | ("constantinos demetriades", "constantinos", "demetriades", "c demetriades"): "Demetriades C",
48 | ("martin s", "s denzel", "martin denzel", "denzel martin"): "Denzel MS",
49 | ("zak frentz", "frentz zak", "zak", "f zak", "frentz"): "Frentz Z",
50 | ("martin graef", "martin g", "graef martin"): "Graef M",
51 | ("ina huppertz", "ina h", "huppertz", "huppertz ina"): "Huppertz I",
52 | ("ron jachimowicz", "ron j", "jachimowicz ron"): "Jachimowicz RD",
53 | ("thomas langer", "thomas l", "langer thomas"): "Langer T",
54 | ("ivan matic", "ivan m", "matic ivan"): "Matic I",
55 | ("stephanie panier", "panier stephanie", "stephanie p"): "Panier S",
56 | ("linda partridge", "linda p", "partridge linda"): "Partridge L",
57 | ("lena pernas", "lena p", "pernas lena"): "Pernas L",
58 | ("anne schaefer", "schaefer anne", "s anne"): "Schaefer A",
59 | ("james b", "b stewart", "james stewart"): "Stewart JB",
60 | ("peter tessarz", "peter t", "tessarz peter"): "Tessarz P",
61 | ("dario riccardo", "riccardo valenzano"): "Valenzano DR",
62 | ("sara a wickström", "a wickström", "sara wickstrom", "wickström"): "Wickstrom SA"
63 | }
64 |
65 | # Flatten dictionary for fast lookups
66 | FLATTENED_AUTHOR_MAP = {variant: proper_name for variants, proper_name in AUTHOR_VARIANTS.items() for variant in variants}
67 |
68 | def normalize_author_name(name):
69 | return FLATTENED_AUTHOR_MAP.get(name, name)
70 |
71 | def extract_author_from_query(query):
72 | """
73 | Extracts a likely author name from the user's query using regex.
74 | Returns a cleaned author name or None if no match is found.
75 | """
76 | query = query.lower()
77 |
78 | # Expanded list of author-related patterns
79 | author_patterns = [
80 | r"author is ([A-Za-z]+(?:\s[A-Za-z]+)?)",
81 | r"papers by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
82 | r"paper by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
83 | r"written by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
84 | r"papers of ([A-Za-z]+(?:\s[A-Za-z]+)?)",
85 | r"paper of ([A-Za-z]+(?:\s[A-Za-z]+)?)",
86 | r"studies by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
87 | r"research by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
88 | r"articles by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
89 | r"article by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
90 | r"authored by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
91 | r"work by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
92 | r"papers from ([A-Za-z]+(?:\s[A-Za-z]+)?)",
93 | r"paper from ([A-Za-z]+(?:\s[A-Za-z]+)?)",
94 | r"publications by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
95 | r"published by ([A-Za-z]+(?:\s[A-Za-z]+)?)",
96 | r"research of ([A-Za-z]+(?:\s[A-Za-z]+)?)",
97 | r"studies from ([A-Za-z]+(?:\s[A-Za-z]+)?)",
98 | ]
99 |
100 | for pattern in author_patterns:
101 | match = re.search(pattern, query)
102 | if match:
103 | return normalize_author_name(match.group(1).strip())
104 |
105 | return None
106 |
107 |
108 | def chat_age_high(query, conversation_history=None, model="meta-llama-3.1-8b-instruct", top_k=8):
109 | """
110 | Retrieve relevant paper chunks and generate a chat response based on research papers.
111 | Uses metadata directly from the embedding process.
112 |
113 | Args:
114 | query (str): User's query.
115 | conversation_history (list, optional): List of past conversation messages.
116 | model (str): Model to use for generation.
117 | top_k (int): Number of relevant chunks to retrieve.
118 |
119 | Returns:
120 | str: Generated response from the language model.
121 | """
122 | if not query:
123 | return "You may have forgotten to input your query 🤔", conversation_history
124 | if conversation_history is None or not conversation_history or conversation_history[0]["role"] != "system":
125 | conversation_history = [
126 | {
127 | "role": "system",
128 | "content": (
129 | "You are an AI assistant for the Max Planck Institute for Biology of Ageing (MPI-AGE), specializing in ageing biology. "
130 | "Your responses should be grounded primarily in the institute's published research, emphasizing biological insights and robust experimental evidence. "
131 | "If external information enhances your answer, you may incorporate it, but always maintain a rigorous scientific perspective."
132 | "Your responses should always take the full conversation history into account when answering. "
133 | "Ensure continuity and avoid repeating previous answers unless necessary. "
134 | "Refer back to previous exchanges if they are relevant to the user’s QUERY."
135 | )
136 | }
137 | ]
138 |
139 | try:
140 | # Try to extract an author name from the query
141 | extracted_author = extract_author_from_query(query)
142 | if extracted_author:
143 | random.shuffle(text_chunks_with_metadata)
144 |
145 | _, text_chunks, metadata_list = zip(*text_chunks_with_metadata) # Unpack into lists
146 |
147 | context_parts = []
148 | author_matches = []
149 | unique_titles = set()
150 |
151 | if extracted_author:
152 | for metadata, chunk in zip(metadata_list, text_chunks):
153 | authors = metadata.get("Authors", "").lower()
154 | title = metadata.get("Title", "Unknown Title")
155 |
156 | if extracted_author.lower() in authors and title not in unique_titles:
157 | unique_titles.add(title) # Add title to the set
158 | author_matches.append((chunk, metadata))
159 |
160 | # Stop collecting results once we reach top_k unique papers
161 | if len(author_matches) >= top_k:
162 | break
163 |
164 | # If metadata search finds results, return those
165 | if author_matches:
166 | context_parts = [
167 | f"Title: {m.get('Title', 'N/A')}\nAuthors: {m.get('Authors', 'N/A')}\nJournal: {m.get('Journal Name', 'N/A')}\n"
168 | f"Published Date: {m.get('Published Date', 'N/A')}\nWeb Link: {m.get('Web Link', 'N/A')}\nExcerpt: {c}\n---"
169 | for c, m in author_matches[:top_k]
170 | ]
171 | context_with_metadata = "\n\n".join(context_parts)
172 | top_k = top_k // 2
173 |
174 | # Load embedding model
175 | embedding_model = SentenceTransformer("multi-qa-mpnet-base-cos-v1")
176 |
177 | # Embed query and search
178 | query_embedding = np.array([embedding_model.encode(query)], dtype=np.float32)
179 | distances, indices = faiss_index.search(query_embedding, top_k)
180 |
181 | relevant_chunks = [text_chunks[idx] for idx in indices[0]]
182 | relevant_metadata = [metadata_list[idx] for idx in indices[0]]
183 |
184 | # Build the final context with metadata
185 | if relevant_chunks:
186 | for metadata, chunk in zip(relevant_metadata, relevant_chunks):
187 | context_parts.append(
188 | f"Title: {metadata.get('Title', 'N/A')}\n"
189 | f"Authors: {metadata.get('Authors', 'N/A')}\n"
190 | f"Journal: {metadata.get('Journal Name', 'N/A')}\n"
191 | f"Published Date: {metadata.get('Published Date', 'N/A')}\n"
192 | f"Web Link: {metadata.get('Web Link', 'N/A')}\n"
193 | f"Excerpt: {chunk}\n"
194 | "---"
195 | )
196 |
197 | context_with_metadata = "\n\n".join(context_parts)
198 |
199 | # Append past conversation history
200 | messages = conversation_history[-5:].copy()
201 |
202 | # Add current user query with research context
203 | messages.append({
204 | "role": "user",
205 | "content": (
206 | f"Below is a set of research paper contexts along with their metadata:\n\n{context_with_metadata}\n\n"
207 | "Given the provided context and any available conversation history, generate a response to the following QUERY at the end. "
208 | "Please note that these contexts are for your reference only and are not visible to me. "
209 | # "While giving your responese please consider that these contexts are only for you and not known by the user. "
210 | # "ensuring that your response focuses on biology-related aspects preferably ageing. "
211 | # "You can also use external knowledge only if that strengthens the response and you should ignore contexts that are irrelevenat to the QUERY. "
212 | "You can also use external knowledge only if that strengthens the response. "
213 | "For a generic QUERY like greetings, response generally without considering the provided contexts. \n\n"
214 | f"QUERY: {query}"
215 | )
216 | })
217 |
218 | # Call LLM API with the full conversation
219 | chat_completion = client.chat.completions.create(
220 | messages=messages,
221 | model=model
222 | )
223 |
224 | response = chat_completion.choices[0].message.content
225 |
226 | # Update conversation history
227 | conversation_history.append({"role": "user", "content": query})
228 | conversation_history.append({"role": "assistant", "content": response})
229 |
230 | return response, conversation_history
231 |
232 | except Exception as e:
233 | make_except_toast("There was a problem with the chatbot:","chatbot_issue", e, current_user,"chatbot")
234 | return f"**Error Occurred:**\n\n```\n{str(e)}\n```", conversation_history
--------------------------------------------------------------------------------
/routes/apps/_gtex.py:
--------------------------------------------------------------------------------
1 | from myapp import app
2 | import pandas as pd
3 | from myapp.routes.apps._utils import make_table
4 | from pyflaski.violinplot import figure_defaults
5 | import json
6 | from datetime import datetime
7 | import os
8 |
9 | PYFLASKI_VERSION=os.environ['PYFLASKI_VERSION']
10 | PYFLASKI_VERSION=str(PYFLASKI_VERSION)
11 |
12 | path_to_files="/flaski_private/gtex/"
13 |
14 |
15 | def read_menus(cache,path_to_files=path_to_files):
16 | @cache.memoize(60*60*2) # 2 hours
17 | def _read_menus(path_to_files=path_to_files):
18 | with open(f"{path_to_files}/menus.json", 'r') as json1_file:
19 | json1_str = json1_file.read()
20 | json1_data = json.loads(json1_str)
21 | return json1_data
22 | return _read_menus()
23 |
24 | def read_data(cache, path_to_files=path_to_files):
25 | @cache.memoize(60*60*2)
26 | def _read_data(path_to_files=path_to_files):
27 | df=pd.read_csv(f"{path_to_files}/data.tsv",sep="\t")
28 | return df.to_json()
29 | return pd.read_json(_read_data())
30 |
31 | def read_significant(cache, path_to_files=path_to_files):
32 | @cache.memoize(60*60*2)
33 | def _read_significant(path_to_files=path_to_files):
34 | df=pd.read_csv(f"{path_to_files}/sig.genes.tsv",sep="\t",dtype=str)
35 | df=df.rename(columns={"log2FoldChange":"log2(group_2/group_1)"})
36 | return df.to_json()
37 | return pd.read_json(_read_significant())
38 |
39 | def gene_report(cache,gender,tissue,geneid,path_to_files=path_to_files):
40 | @cache.memoize(60*60*2)
41 | def _gene_report(gender,tissue,geneid,path_to_files=path_to_files):
42 |
43 | metadata=pd.read_csv(f"{path_to_files}metadata.samples.tsv",sep="\t")
44 |
45 | if gender[0] == "male" :
46 | g=1
47 | elif gender[0] == "female":
48 | g=2
49 |
50 | samples=metadata[ (metadata["SMTSD_"].isin(tissue) ) & (metadata["SEX"]==g) ]
51 | samples_list=samples["SAMPID"].tolist()
52 | samples_dic=dict(zip(samples_list, samples["friendly_name"].tolist() ) )
53 |
54 | tissue_=tissue[0].replace("_._","_-_")
55 | gender_=gender[0]
56 | norm_counts_file=f"{path_to_files}{gender_}_{tissue_}.tissue.counts.tsv.deseq2.normcounts.tsv"
57 |
58 | ## Normcounts approach
59 | genes=pd.read_csv(norm_counts_file, sep="\t", usecols=[0])
60 |
61 | gene_index=genes.index.tolist()
62 | if geneid not in gene_index:
63 | df=pd.DataFrame()
64 | return df.to_json()
65 |
66 | gene_index=gene_index.index(geneid)
67 |
68 | fileheader_size=0
69 | table_header=1
70 | skiprows=fileheader_size+table_header+gene_index
71 |
72 | df_head=pd.read_csv( norm_counts_file, nrows=1, sep="\t", header=None)
73 | df_head=pd.DataFrame(df_head.loc[0,])
74 | df_head_samples=df_head[0].tolist()
75 | # not all sample ids from counts seem to be on the tmp file (??) we therefore need the interception
76 | samples_list_=[ s.replace("-",".") for s in samples_list ]
77 |
78 | header=[ s for s in samples_list_ if s in df_head_samples ]
79 | df_head=df_head[ df_head[0].isin(header) ]
80 | header=df_head[0].tolist()
81 | header=[ s.replace(".","-") for s in header ]
82 | samples_index=df_head.index.tolist()
83 |
84 | df=pd.read_csv( norm_counts_file, skiprows=skiprows, nrows=1, usecols=[0]+samples_index , names=["Name"]+header , sep="\t", header=None)
85 | df=df.transpose()
86 | df.reset_index(inplace=True, drop=False)
87 | df=pd.merge(df, samples, left_on=["index"], right_on=["SAMPID"], how="left")
88 |
89 |
90 |
91 | # print(df.head())
92 | # sys.stdout.flush()
93 | ## TPM approach big tissue approach
94 |
95 | # genes=read_genes(cache)
96 | # gene_index=genes[genes["gene_id"]==geneid].index.tolist()[0]
97 |
98 | # fileheader_size=2
99 | # table_header=1
100 | # skiprows=fileheader_size+table_header+gene_index
101 | # df_head=pd.read_csv( f"{path_to_files}GTEx_Analysis_2017-06-05_v8_RNASeQCv1.1.9_gene_tpm.gct", skiprows=2, nrows=1, sep="\t", header=None)
102 | # df_head=pd.DataFrame(df_head.loc[0,])
103 | # # not all sample ids from counts seem to be on the tmp file (??) we therefore need the interception
104 | # header=[ s for s in ["Name","Description"]+samples_list if s in df_head[0].tolist() ]
105 | # df_head=df_head[ df_head[0].isin(header) ]
106 | # header=df_head[0].tolist()
107 | # samples_index=df_head.index.tolist()
108 |
109 | # df=pd.read_csv( f"{path_to_files}GTEx_Analysis_2017-06-05_v8_RNASeQCv1.1.9_gene_tpm.gct", skiprows=skiprows, nrows=1, usecols=samples_index , names=header , sep="\t", header=None)
110 | # df=df.transpose()
111 | # df.reset_index(inplace=True, drop=False)
112 | # df=pd.merge(df, samples, left_on=["index"], right_on=["SAMPID"], how="left")
113 |
114 | return df.to_json()
115 | return pd.read_json(_gene_report(gender,tissue,geneid))
116 |
117 | def read_genes(cache,path_to_files=path_to_files):
118 | @cache.memoize(60*60*2)
119 | def _read_genes(path_to_files=path_to_files):
120 | df=pd.read_csv(path_to_files+"genes.tsv",sep="\t")
121 | return df.to_json()
122 | return pd.read_json(_read_genes())
123 |
124 | def read_metadata(cache,path_to_files=path_to_files):
125 | @cache.memoize(60*60*2) # 2 hours
126 | def _read_metadata(path_to_files=path_to_files):
127 | df=pd.read_csv(path_to_files+"metadata.tsv",sep="\t")
128 | return df.to_json()
129 | return pd.read_json(_read_metadata())
130 |
131 |
132 | def change_table_minWidth(tb,minwidth):
133 | st=tb.style_table
134 | st["minWidth"]=minwidth
135 | tb.style_table=st
136 | return tb
137 |
138 | def change_fig_minWidth(fig,minwidth):
139 | st=fig.style
140 | st["minWidth"]=minwidth
141 | fig.style=st
142 | return fig
143 |
144 | def get_tables(cache,genders,tissues,groups,genenames,geneids):
145 | genes=read_genes(cache)
146 | data=read_data(cache)
147 | sigdf_=read_significant(cache)
148 | sigdf=sigdf_.drop(["file"],axis=1)
149 |
150 | if genders:
151 | data=data[data["gender"].isin(genders)]
152 | sigdf=sigdf[sigdf["gender"].isin(genders)]
153 | lgenders=len(genders)
154 | else:
155 | lgenders=0
156 |
157 | if tissues:
158 | data=data[data["tissue"].isin(tissues)]
159 | sigdf=sigdf[sigdf["tissue"].isin(tissues)]
160 | ltissues=len(tissues)
161 | else:
162 | ltissues=0
163 |
164 | if groups:
165 | data=data[ ( data["group_1"].isin(groups) ) | ( data["group_2"].isin(groups) ) ]
166 | sigdf=sigdf[ ( sigdf["group_1"].isin(groups) ) | ( sigdf["group_2"].isin(groups) ) ]
167 |
168 | if genenames or geneids :
169 |
170 | if genenames :
171 | lgenenames=len(genenames)
172 | sigdf_=sigdf[ ( sigdf["gene_name"].isin(genenames) ) ]
173 | genes_=genes[ ( genes["gene_name"].isin(genenames) ) ]
174 | else:
175 | lgenenames=0
176 | sigdf_=pd.DataFrame()
177 | genes_=pd.DataFrame()
178 |
179 | if geneids :
180 | lgeneids=len(geneids)
181 | sigdf__=sigdf[ ( sigdf["gene_id"].isin(geneids) ) ]
182 | genes__=genes[ ( genes["gene_id"].isin(geneids) ) ]
183 |
184 | else:
185 | lgeneids=0
186 | sigdf__=pd.DataFrame()
187 | genes__=pd.DataFrame()
188 |
189 | sigdf=pd.concat( [sigdf_, sigdf__ ] )
190 | sigdf=sigdf.drop_duplicates()
191 |
192 | genes=pd.concat( [genes_, genes__ ] )
193 | genes=genes.drop_duplicates()
194 |
195 | else:
196 | lgenenames=0
197 | lgeneids=0
198 |
199 | data=make_table(data,"data")
200 | sigdf=make_table(sigdf,"sigdf")
201 |
202 | if ( lgenders == 1 ) and ( ltissues == 1 ) and ( (lgenenames ==1 ) or (lgeneids == 1) ) :
203 |
204 | geneid=genes["gene_id"].tolist()[0]
205 | df=gene_report(cache, genders,tissues,geneid)
206 | if len(df) > 0:
207 |
208 | df=df[["SAMPID","AGE","0","DTHHRDY", "SEX", "SMTS","SMTSD"]]
209 | df=df[2:]
210 | df["0"]=df["0"].astype(float)
211 | df=df.rename(columns={"0":"Normalized counts"})
212 | df=df.sort_values(by=["AGE","SMTSD"],ascending=True)
213 |
214 | pa=figure_defaults()
215 |
216 | gene_name=genes["gene_name"].tolist()[0]
217 | gender=genders[0]
218 | tissue=tissues[0].replace("_._"," - ")
219 |
220 | pa["style"]="Violinplot and Swarmplot"
221 | pa['title']=f'{gene_name}, {tissue}, {gender}'
222 | pa["x_val"]="AGE"
223 | pa["y_val"]="Normalized counts"
224 | pa["vals"]=[None]+df.columns.tolist()
225 | pa["xlabel"]="AGE"
226 | pa["ylabel"]="Normalized counts"
227 |
228 | session_data={ "session_data": {"app": { "violinplot": {"filename":"" ,'last_modified':datetime.timestamp( datetime.now()),"df":df.to_json(),"pa":pa} } } }
229 | session_data["APP_VERSION"]=app.config['APP_VERSION']
230 | session_data["PYFLASKI_VERSION"]=PYFLASKI_VERSION
231 |
232 | else:
233 |
234 | df=None
235 | pa=None
236 | session_data=None
237 |
238 | return data, sigdf, df, pa, session_data
239 |
--------------------------------------------------------------------------------
/routes/apps/_kegg.py:
--------------------------------------------------------------------------------
1 | from myapp import app
2 | from myapp.routes.apps._utils import make_table
3 | from io import StringIO
4 | from io import BytesIO
5 | import pandas as pd
6 | import os
7 | import Bio
8 | from Bio.KEGG.REST import *
9 | from Bio.KEGG.KGML import KGML_parser
10 | from Bio.Graphics.KGML_vis import KGMLCanvas
11 |
12 |
13 | PYFLASKI_VERSION=os.environ['PYFLASKI_VERSION']
14 | PYFLASKI_VERSION=str(PYFLASKI_VERSION)
15 |
16 | path_to_files="/flaski_private/kegg/"
17 |
18 | def read_compound_pathway(cache, path_to_files=path_to_files):
19 | @cache.memoize(60*60*2)
20 | def _read_compound_pathway(path_to_files=path_to_files):
21 | df=pd.read_csv(f"{path_to_files}/compound_pathway.tsv", sep="\t", names=['compound_id', 'compound_name', 'pathways'])
22 | return df.to_json()
23 | return pd.read_json(StringIO(_read_compound_pathway()))
24 |
25 | def read_pathway_organism(cache, path_to_files=path_to_files):
26 | @cache.memoize(60*60*2)
27 | def _read_pathway_organism(path_to_files=path_to_files):
28 | df=pd.read_csv(f"{path_to_files}/pathway_organisms.tsv", sep="\t", names=['pathway_id', 'pathway_name', 'organisms'])
29 | return df.to_json()
30 | return pd.read_json(StringIO(_read_pathway_organism()))
31 |
32 | def read_organisms(cache, path_to_files=path_to_files):
33 | @cache.memoize(60*60*2)
34 | def _read_organisms(path_to_files=path_to_files):
35 | df=pd.read_csv(f"{path_to_files}/organisms.tsv", sep="\t", names=['organism_id', 'organism_name'])
36 | return df.to_json()
37 | return pd.read_json(StringIO(_read_organisms()))
38 |
39 | def compound_options(cache):
40 | compound_pathway_data=read_compound_pathway(cache)
41 | return [{'label': f"{cid}: {cname}", 'value': cid} for cid, cname in zip(compound_pathway_data['compound_id'], compound_pathway_data['compound_name'])]
42 |
43 | def pathway_options(cache, compound_list):
44 | compound_pathway_data=read_compound_pathway(cache)
45 | pathway_organism_data=read_pathway_organism(cache)
46 |
47 | cp_row=compound_pathway_data[compound_pathway_data['compound_id'].isin(compound_list)]
48 | pathways_values = cp_row['pathways'].tolist()
49 | if not pathways_values or pathways_values==[None]:
50 | return None
51 | # pathways_list = list(set([path.strip() for sublist in pathways_values for path in sublist.split(',')]))
52 | pathways_list = list(set(
53 | [path.strip() for sublist in pathways_values if sublist is not None for path in sublist.split(',')]
54 | ))
55 |
56 | pw_rows = pathway_organism_data[pathway_organism_data['pathway_id'].isin(pathways_list)]
57 |
58 | return [{'label': f"{pid}: {pname}", 'value': pid} for pid, pname in zip(pw_rows['pathway_id'], pw_rows['pathway_name'])]
59 |
60 | def organism_options(cache, pathway_id):
61 | pod=read_pathway_organism(cache)
62 | org_df=read_organisms(cache)
63 | org_value=pod.loc[pod['pathway_id'] == pathway_id, 'organisms'].values[0] if not pod.loc[pod['pathway_id'] == pathway_id, 'organisms'].empty else None
64 | if org_value is None:
65 | return None
66 |
67 | # return [{'label': org, 'value': org} for org in org_value.split(',')]
68 | return [{'label': f"{org}: {org_df.loc[org_df['organism_id'] == org, 'organism_name'].values[0]}" if not org_df.loc[org_df['organism_id'] == org, 'organism_name'].empty else org, 'value': org} for org in org_value.split(',')]
69 |
70 |
71 | def additional_compound_options(cache, pathway_id, organism_id):
72 | compound_pathway_data=read_compound_pathway(cache)
73 | try:
74 | pathname = pathway_id.replace("map", organism_id)
75 | pathway=KGML_parser.read(kegg_get(pathname, "kgml"))
76 | compound_list=[]
77 | for compound in pathway.compounds :
78 | c=compound.name.split(":")[-1]
79 | compound_list.append(c)
80 |
81 | # return [{'label': id, 'value': id} for id in compound_list] if compound_list else []
82 | return [{'label': f"{id}: {compound_pathway_data.loc[compound_pathway_data['compound_id'] == id, 'compound_name'].values[0]}"
83 | if not compound_pathway_data.loc[compound_pathway_data['compound_id'] == id, 'compound_name'].empty else id, 'value': id} for id in compound_list] if compound_list else []
84 | except:
85 | return []
86 |
87 | def kegg_operations(cache, selected_compound, pathway_id, organism_id, additional_compound):
88 | compound_pathway_data=read_compound_pathway(cache)
89 | overview=None
90 | compound_list=[]
91 | compound_dfl=[]
92 |
93 | try:
94 | pathname = pathway_id.replace("map", organism_id)
95 | pathway=KGML_parser.read(kegg_get(pathname, "kgml"))
96 | buffer = BytesIO()
97 | canvas = KGMLCanvas(pathway, import_imagemap=True)
98 |
99 | overview=(str(pathway)).split("Entry types:")[0].strip()
100 |
101 | for compound in pathway.compounds :
102 | c=compound.name.split(":")[-1]
103 | compound_list.append(c)
104 | if additional_compound:
105 | if c in additional_compound:
106 | compound.graphics[0].bgcolor="#00FFFF"
107 | if c in selected_compound:
108 | compound.graphics[0].bgcolor="#FF0000"
109 |
110 | canvas.draw(buffer)
111 | buffer.seek(0)
112 |
113 | for compound_id in compound_list:
114 | if not compound_pathway_data.loc[compound_pathway_data['compound_id']==compound_id, 'compound_name'].empty:
115 | compound_name=compound_pathway_data.loc[compound_pathway_data['compound_id']==compound_id, 'compound_name'].values[0]
116 | else:
117 | compound_name="NA"
118 | compound_dfl.append({'compound_id': compound_id, 'compound_name': compound_name})
119 |
120 | compound_df=pd.DataFrame(compound_dfl)
121 | compound_table=make_table(compound_df,"compound_df")
122 |
123 | return buffer, overview, compound_table
124 | except Exception as e:
125 | return None, None, None, None
126 |
127 |
128 | ####### Generate/organize kegg data for faster use #######
129 | ### Generate pathway_organisms.tsv with pathway_id, pathway_name, available_organisms
130 |
131 | # from Bio.KEGG import REST
132 | # import csv
133 |
134 | # # Fetch all pathways
135 | # pathway_list = REST.kegg_list('pathway').read()
136 |
137 | # # Write pathways to a TSV file without a header
138 | # with open('pathway_organisms.tsv', 'w', newline='') as outfile:
139 | # tsv_writer = csv.writer(outfile, delimiter='\t')
140 | # for line in pathway_list.splitlines():
141 | # pathway_id, pathway_name = line.split('\t')
142 | # tsv_writer.writerow([pathway_id, pathway_name])
143 |
144 | # # Fetch the list of all available organisms
145 | # organism_list = REST.kegg_list('organism').read()
146 |
147 | # # Extract the organism codes (e.g., 'hsa', 'ptr', 'pps', etc.)
148 | # organism_codes = [line.split('\t')[1] for line in organism_list.splitlines()]
149 |
150 |
151 | # # Function to get pathways for a given organism
152 | # def get_pathways_from_org(organism_code):
153 | # pathway_list = REST.kegg_list('pathway', organism_code).read()
154 | # return [line.split('\t')[0][3:] for line in pathway_list.splitlines()]
155 |
156 | # # Read TSV file, check pathways, and append organism code
157 | # def update_tsv_with_organism(tsv_file, organism_code):
158 | # # Get the list of pathways for the organism
159 | # organism_pathways = get_pathways_from_org(organism_code)
160 |
161 | # # Read the contents of the TSV file into memory
162 | # updated_rows = []
163 | # with open(tsv_file, 'r') as infile:
164 | # tsv_reader = csv.reader(infile, delimiter='\t')
165 |
166 | # # Process each line of the TSV
167 | # for row in tsv_reader:
168 | # # Extract the pathway ID (e.g., 'map01100')
169 | # pathway_id = row[0][3:] # Remove 'map' to get the numeric part
170 |
171 | # # Check if this pathway exists in the organism's pathways
172 | # if pathway_id in organism_pathways:
173 | # # Append the organism code to the row
174 | # if len(row) < 3:
175 | # row.append(organism_code)
176 | # else:
177 | # row[2] += f",{organism_code}" # If third column exists, append to it
178 |
179 | # # Add the updated row to the list
180 | # updated_rows.append(row)
181 |
182 | # # Overwrite the original file with the updated data
183 | # with open(tsv_file, 'w', newline='') as outfile:
184 | # tsv_writer = csv.writer(outfile, delimiter='\t')
185 | # tsv_writer.writerows(updated_rows)
186 |
187 | # # Update pathway_organisms.tsv file with organisms
188 | # tsv_file = 'pathway_organisms.tsv'
189 | # for org in organism_codes:
190 | # update_tsv_with_organism(tsv_file, org)
191 |
192 | ### Generate compound_pathways.tsv with compound_id, compound_name, available_pathways
193 |
194 | # # Function to get pathways associated with a given compound using KEGG REST API
195 | # def get_pathways_for_compound(compound_id):
196 | # # Fetch pathways linked to the compound using the KEGG REST API
197 | # link_url = f"https://rest.kegg.jp/link/pathway/{compound_id}"
198 | # response = requests.get(link_url)
199 |
200 | # # Check if the response is empty (i.e., no linked pathways found)
201 | # if response.status_code != 200 or not response.text.strip():
202 | # return None
203 |
204 | # # Parse the linked pathways and extract pathway IDs (e.g., map00190)
205 | # pathway_ids = [line.split("\t")[1].split(":")[1] for line in response.text.strip().splitlines()]
206 |
207 | # # Return the comma-separated list of pathway IDs (e.g., map00190, map00195, etc.)
208 | # return ",".join(pathway_ids)
209 |
210 | # # Function to append pathway list to the compounds and save the result to a TSV file
211 | # def append_pathways_to_compounds(output_file):
212 | # # Fetch the list of all compounds from KEGG
213 | # request = REST.kegg_list("compound")
214 | # compound_data = request.read()
215 |
216 | # # Get the compound lines
217 | # compound_lines = compound_data.splitlines()[18800:]
218 |
219 | # # Process each compound and append data to the file one by one
220 | # for index, line in enumerate(compound_lines):
221 | # compound_id, compound_name = line.split("\t")
222 |
223 | # # Get the associated pathways for this compound
224 | # pathways = get_pathways_for_compound(compound_id)
225 | # if not pathways:
226 | # pathways = "NA"
227 |
228 | # mode = "w" if index == 0 else "a"
229 |
230 | # # Open the output file in the appropriate mode
231 | # with open(output_file, mode) as f:
232 | # # Write the compound ID, compound name, and pathway list to the file
233 | # f.write(f"{compound_id}\t{compound_name}\t{pathways}\n")
234 |
235 | # # Generate the TSV file and append data one by one
236 | # output_file = "compound_pathways.tsv"
237 | # append_pathways_to_compounds(output_file)
238 |
239 | # print(f"TSV file '{output_file}' generated successfully.")
240 |
--------------------------------------------------------------------------------
/routes/apps/_neanderthalage.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pandas as pd
3 | import matplotlib.pyplot as plt
4 |
5 | path_to_files="/flaski_private/neanderthalage/"
6 |
7 | def read_agegene(cache, path_to_files=path_to_files): #cache
8 | #@cache.memoize(60*60*2) # 2 hours
9 | def _read_agegene(path_to_files=path_to_files):
10 | df=pd.read_csv(path_to_files+"age.gene.altai_20240625.tsv",sep="\t", dtype=str)
11 | return df.to_json(orient='records', default_handler=str )
12 | return pd.read_json(_read_agegene(), dtype=str)
13 |
14 |
15 | def read_drug(cache,path_to_files=path_to_files):
16 | #@cache.memoize(60*60*2) # 2 hours
17 | def _read_drug(path_to_files=path_to_files):
18 | df=pd.read_csv(path_to_files+"drug.masterTable_20240702.tsv",sep="\t", dtype=str)
19 | return df.to_json(orient='records', default_handler=str )
20 | return pd.read_json(_read_drug(), dtype=str)
21 |
22 |
23 | def read_agedist(cache,path_to_files=path_to_files):
24 | #@cache.memoize(60*60*2) # 2 hours
25 | def _read_agedist(path_to_files=path_to_files):
26 | df=pd.read_csv(path_to_files+"mutation_age_distribution_20240625.tsv",sep="\t", dtype=str)
27 | return df.to_json(orient='records', default_handler=str )
28 | return pd.read_json(_read_agedist(), dtype=str)
29 |
30 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/routes/apps/agebot.py:
--------------------------------------------------------------------------------
1 | from myapp import app, PAGE_PREFIX
2 | from flask_login import current_user
3 | from flask_caching import Cache
4 | from flask import session
5 | import dash
6 | from dash import dcc, html
7 | from dash.dependencies import Input, Output, State, MATCH, ALL
8 | from myapp.routes._utils import META_TAGS, navbar_A, protect_dashviews, make_navbar_logged
9 | import dash_bootstrap_components as dbc
10 | from myapp.routes.apps._utils import check_access, make_options, GROUPS, GROUPS_INITALS, make_table, make_submission_file, validate_metadata, send_submission_email, send_submission_ftp_email
11 | import os
12 | import uuid
13 | import io
14 | import json
15 | import base64
16 | import pandas as pd
17 | import zipfile
18 |
19 | from myapp import db
20 | from myapp.models import UserLogging, PrivateRoutes
21 | from werkzeug.utils import secure_filename
22 |
23 | from llama_index import VectorStoreIndex, SimpleDirectoryReader, StorageContext, load_index_from_storage
24 |
25 | FONT_AWESOME = "https://use.fontawesome.com/releases/v5.7.2/css/all.css"
26 |
27 | dashapp = dash.Dash("agebot",url_base_pathname=f'{PAGE_PREFIX}/agebot/', meta_tags=META_TAGS, server=app, external_stylesheets=[dbc.themes.BOOTSTRAP, FONT_AWESOME], title="AGE bot", assets_folder=app.config["APP_ASSETS"])# , assets_folder="/flaski/flaski/static/dash/") update_title='Load...',
28 |
29 | protect_dashviews(dashapp)
30 |
31 | if app.config["SESSION_TYPE"] == "sqlalchemy":
32 | import sqlalchemy
33 | engine = sqlalchemy.create_engine(app.config["SQLALCHEMY_DATABASE_URI"] , echo=True)
34 | app.config["SESSION_SQLALCHEMY"] = engine
35 | elif app.config["CACHE_TYPE"] == "RedisCache" :
36 | cache = Cache(dashapp.server, config={
37 | 'CACHE_TYPE': 'RedisCache',
38 | 'CACHE_REDIS_URL': 'redis://:%s@%s' %( os.environ.get('REDIS_PASSWORD'), app.config['REDIS_ADDRESS'] ) #'redis://localhost:6379'),
39 | })
40 | elif app.config["CACHE_TYPE"] == "RedisSentinelCache" :
41 | cache = Cache(dashapp.server, config={
42 | 'CACHE_TYPE': 'RedisSentinelCache',
43 | 'CACHE_REDIS_SENTINELS': [
44 | [ os.environ.get('CACHE_REDIS_SENTINELS_address'), os.environ.get('CACHE_REDIS_SENTINELS_port') ]
45 | ],
46 | 'CACHE_REDIS_SENTINEL_MASTER': os.environ.get('CACHE_REDIS_SENTINEL_MASTER')
47 | })
48 |
49 | # improve tables styling
50 | style_cell={
51 | 'height': '100%',
52 | # all three widths are needed
53 | 'minWidth': '130px', 'width': '130px', 'maxWidth': '180px',
54 | 'whiteSpace': 'normal'
55 | }
56 |
57 | dashapp.layout=html.Div(
58 | [
59 | dcc.Store( data=str(uuid.uuid4()), id='session-id' ),
60 | dcc.Location( id='url', refresh=True ),
61 | html.Div( id="protected-content" ),
62 | ]
63 | )
64 |
65 | @dashapp.callback(
66 | Output('protected-content', 'children'),
67 | Input('url', 'pathname'))
68 | def make_layout(pathname):
69 | eventlog = UserLogging(email=current_user.email, action="visit agebot")
70 | db.session.add(eventlog)
71 | db.session.commit()
72 |
73 | protected_content=html.Div(
74 | [
75 | make_navbar_logged("AGE bot",current_user),
76 | dbc.Row( html.Div(id="input-field", style={"height":"100%","overflow":"scroll"}) ),
77 | dbc.Row( dcc.Loading(id="loading-stored-file", children=dcc.Store( id='session-data' )), style={"margin-top":25, "margin-bottom":10} ),
78 | dbc.Row( html.Div(id="output-field", style={"height":"100%","overflow":"scroll"}), style={"height":"100%"}),
79 | # dcc.Store( id='session-data' ),
80 | # dbc.Row(
81 | # dcc.Loading(id="loading-stored-file", children=html.Div(id="output-field", style={"height":"100%","overflow":"scroll"}), style={"height":"100%"}),
82 | # )
83 | navbar_A,
84 | ],
85 | style={"height":"100vh","verticalAlign":"center"}
86 | )
87 | return protected_content
88 |
89 |
90 |
91 | @dashapp.callback(
92 | Output('input-field', component_property='children'),
93 | Input('session-id', 'data')
94 | )
95 | def input_field(session_id):
96 | # header_access, msg_access = check_access( 'agebot' )
97 | # header_access, msg_access = None, None # for local debugging
98 | content=[
99 | dbc.Row(
100 | [
101 | dbc.Col( dcc.Textarea(id='input', placeholder="type here", value="", style={ "width":"100%",'height': 30} ), md=5 ), #, style={ "width":"100%"} ) ), #,'height': 100,
102 | dbc.Col( html.Button(id='submit-button-state', n_clicks=0, children='Submit', style={"width": "100%",'height': 30}), md=1),
103 | ],
104 | style={"margin-top":10},
105 | justify="center",
106 | )
107 | ]
108 | return content
109 |
110 | @dashapp.callback(
111 | Output('output-field', component_property='children'),
112 | Output( "session-data", "data"),
113 | Input( "submit-button-state", "n_clicks"),
114 | State('input', 'value'),
115 | State( "session-data", "data")
116 | )
117 | def output_field(n_clicks, input, current ):
118 | # header_access, msg_access = check_access( 'agebot' )
119 | # header_access, msg_access = None, None # for local debugging
120 |
121 | if ( not input ) and ( not current ) :
122 | return None, None
123 |
124 | # storage_context = StorageContext.from_defaults(persist_dir="/flaski_private/agebot/")
125 | # index = load_index_from_storage(storage_context)
126 | # query_engine = index.as_query_engine()
127 | # response = query_engine.query(input)
128 |
129 | # input="**question: **" + input
130 | # answer="**answer: **" + response.response
131 | # output=answer + '\n\n' + input
132 |
133 | # if current:
134 | # output=output + "\n\n" + current
135 |
136 | output="""
137 | **AGE bot:** Based on the provided context information, there is no conclusive evidence to support the theory that mitochondria play a direct role in ageing. The mitochondrial free radical theory of aging postulates that reactive oxygen species (ROS) generated by mitochondrial function cause damage and contribute to the aging process. However, recent studies have shown that ROS can also serve as signaling molecules and facilitate adaptation to stress in various physiological situations. The impact of mtDNA mutations on mitochondrial function and aging is still unclear, and there is evidence that apparently neutral polymorphisms may segregate in a non-random fashion and be subject to selection that is tissue-specific.\n
138 | \n
139 | **Jorge:** What is the role of mitochondria in ageing?\n
140 | \n
141 | **AGE bot:** Hello! Based on the provided context information, I can provide an answer to your query.\n
142 | \n
143 | A mitochondrion is a type of organelle found in the cells of most eukaryotes, including animals, plants, and fungi. It is often referred to as a mitochondria. Mitochondria are organelles that generate energy for the cell through a process called cellular respiration, which involves the breakdown of glucose and other organic molecules to produce ATP (adenosine triphosphate), the energy currency of the cell.\n
144 | \n
145 | Mitochondria have two main parts: the outer membrane and the inner membrane. The outer membrane is permeable, allowing certain substances to pass through, while the inner membrane is impermeable and folded into cristae, which increase the surface area for energy production. The mitochondria also contain a matrix, which is the space between the inner and outer membranes, where the energy-producing reactions take place.\n
146 | \n
147 | In addition to generating energy, mitochondria also play a role in other cellular processes, such as signaling, cell division, and the regulation of program\n
148 | \n
149 | **Jorge:** What is a mitochondria?
150 | """
151 |
152 | content=[
153 | dbc.Row(
154 | [
155 | dbc.Col( dcc.Markdown(output, style={"width":"100%", "margin":"2px"}, id="output-text"), md=6 )
156 | ],
157 | style={"margin-top":10},
158 | justify="center",
159 | )
160 | ]
161 |
162 | return content, output
163 |
164 | @dashapp.callback(
165 | Output("modal", "is_open"),
166 | [Input("submit-button-state", "n_clicks"), Input("close", "n_clicks")],
167 | [State("modal", "is_open")],
168 | )
169 | def toggle_modal(n1, n2, is_open):
170 | if n1 or n2:
171 | return not is_open
172 | return is_open
173 |
174 | # navbar toggle for colapsed status
175 | @dashapp.callback(
176 | Output("navbar-collapse", "is_open"),
177 | [Input("navbar-toggler", "n_clicks")],
178 | [State("navbar-collapse", "is_open")])
179 | def toggle_navbar_collapse(n, is_open):
180 | if n:
181 | return not is_open
182 | return is_open
183 |
184 |
--------------------------------------------------------------------------------
/routes/apps/alphafold.py:
--------------------------------------------------------------------------------
1 | from myapp import app, PAGE_PREFIX
2 | from flask_login import current_user
3 | from flask_caching import Cache
4 | from flask import session
5 | import dash
6 | from dash import dcc, html
7 | from dash.dependencies import Input, Output, State, MATCH, ALL
8 | from myapp.routes._utils import META_TAGS, navbar_A, protect_dashviews, make_navbar_logged
9 | import dash_bootstrap_components as dbc
10 | from myapp.routes.apps._utils import check_access, make_options, GROUPS, make_submission_file, send_submission_email, GROUPS_INITALS
11 | import os
12 | import uuid
13 | import io
14 | import base64
15 | import pandas as pd
16 | from myapp import db
17 | from myapp.models import UserLogging, PrivateRoutes
18 | from werkzeug.utils import secure_filename
19 |
20 |
21 |
22 | FONT_AWESOME = "https://use.fontawesome.com/releases/v5.7.2/css/all.css"
23 |
24 | dashapp = dash.Dash("alphafold",url_base_pathname=f'{PAGE_PREFIX}/alphafold/', meta_tags=META_TAGS, server=app, external_stylesheets=[dbc.themes.BOOTSTRAP, FONT_AWESOME], title="AlphaFold" , assets_folder=app.config["APP_ASSETS"])# , assets_folder="/flaski/flaski/static/dash/")
25 |
26 | protect_dashviews(dashapp)
27 |
28 | if app.config["SESSION_TYPE"] == "sqlalchemy":
29 | import sqlalchemy
30 | engine = sqlalchemy.create_engine(app.config["SQLALCHEMY_DATABASE_URI"] , echo=True)
31 | app.config["SESSION_SQLALCHEMY"] = engine
32 | elif app.config["CACHE_TYPE"] == "RedisCache" :
33 | cache = Cache(dashapp.server, config={
34 | 'CACHE_TYPE': 'RedisCache',
35 | 'CACHE_REDIS_URL': 'redis://:%s@%s' %( os.environ.get('REDIS_PASSWORD'), app.config['REDIS_ADDRESS'] ) #'redis://localhost:6379'),
36 | })
37 | elif app.config["CACHE_TYPE"] == "RedisSentinelCache" :
38 | cache = Cache(dashapp.server, config={
39 | 'CACHE_TYPE': 'RedisSentinelCache',
40 | 'CACHE_REDIS_SENTINELS': [
41 | [ os.environ.get('CACHE_REDIS_SENTINELS_address'), os.environ.get('CACHE_REDIS_SENTINELS_port') ]
42 | ],
43 | 'CACHE_REDIS_SENTINEL_MASTER': os.environ.get('CACHE_REDIS_SENTINEL_MASTER')
44 | })
45 |
46 | # improve tables styling
47 | style_cell={
48 | 'height': '100%',
49 | # all three widths are needed
50 | 'minWidth': '130px', 'width': '130px', 'maxWidth': '180px',
51 | 'whiteSpace': 'normal'
52 | }
53 |
54 | dashapp.layout=html.Div(
55 | [
56 | dcc.Store( data=str(uuid.uuid4()), id='session-id' ),
57 | dcc.Location( id='url', refresh=True ),
58 | html.Div( id="protected-content" ),
59 | ]
60 | )
61 |
62 | @dashapp.callback(
63 | Output('protected-content', 'children'),
64 | Input('url', 'pathname'))
65 | def make_layout(pathname):
66 | eventlog = UserLogging(email=current_user.email, action="visit alphafold")
67 | db.session.add(eventlog)
68 | db.session.commit()
69 | protected_content=html.Div(
70 | [
71 | make_navbar_logged("AlphaFold",current_user),
72 | html.Div(id="app-content",style={"height":"100%","overflow":"scroll"}),
73 | navbar_A,
74 | ],
75 | style={"height":"100vh","verticalAlign":"center"}
76 | )
77 | return protected_content
78 |
79 | def make_submission_json(email,group, name, sequence):
80 | @cache.memoize(7200) # 2 hours
81 | def _make_submission_json(email,group, name, sequence):
82 | def clean_seqs(sequence):
83 | sequence=sequence.replace(" ", "")
84 | sequence=secure_filename(sequence)
85 | sequence=sequence.upper()
86 | return sequence
87 |
88 | def clean_header(name):
89 | name=secure_filename(name)
90 | name=name.replace(" ","_")
91 | return name
92 |
93 | filename=make_submission_file(".alphafold.json", folder="mpcdf")
94 | name=clean_header(name)
95 | email=email.replace(" ", ",")
96 | email=email.split(",")
97 | email=[ e for e in email if e ]
98 | email=",".join(email)
99 |
100 | if ">" in sequence :
101 | sequence=sequence.split(">")
102 | sequence=[ s.split("\n") for s in sequence ]
103 | sequence=[ [ ">"+clean_header(s[0]), clean_seqs(s[1]) ] for s in sequence if len(s) > 1 ]
104 | sequence=[ ";".join( s ) for s in sequence ]
105 | sequence=";".join(sequence)
106 | else:
107 | sequence=clean_seqs(sequence)
108 | return {"filename":filename,"email": email, "group_name":group, "group_initials":GROUPS_INITALS[group],"name_fasta_header":name, "sequence_fasta":sequence}
109 | return _make_submission_json(email,group, name, sequence)
110 |
111 |
112 |
113 | @dashapp.callback(
114 | Output('app-content', component_property='children'),
115 | Input('session-id', 'data'))
116 | def make_app_content(session_id):
117 | # header_access, msg_access = check_access( 'alphafold' )
118 | # header_access, msg_access = None, None # for local debugging
119 |
120 | # generate dropdown options
121 | groups_=make_options(GROUPS)
122 | external_=make_options(["External"])
123 |
124 | example_fasta="MEEPQSDPSVEPPLSQETFSDLWKLLPENNVLSPLPSQAMDDLMLSPDDIEQWFTEDPGP\
125 | DEAPRMPEAAPPVAPAPAAPTPAAPAPAPSWPLSSSVPSQKTYQGSYGFRLGFLHSGTAK\
126 | SVTCTYSPALNKMFCQLAKTCPVQLWVDSTPPPGTRVRAMAIYKQSQHMTEVVRRCPHHE\
127 | RCSDSDGLAPPQHLIRVEGNLRVEYLDDRNTFRHSVVVPYEPPEVGSDCTTIHYNYMCNS\n\n\
128 | ..or multifasta, for multimers:\n\n\
129 | >sequence_1_name\n\
130 | MEEPQSDPSVEPPLSQETFSDLWKLLPENNVLSPLPSQAMDDLMLSPDDIEQWFTEDPGP\
131 | DEAPRMPEAAPPVAPAPAAPTPAAPAPAPSWPLSSSVPSQKTYQGSYGFRLGFLHSGTAK\
132 | SVTCTYSPALNKMFCQLAKTCPVQLWVDSTPPPGTRVRAMAIYKQSQHMTEVVRRCPH\n\
133 | >sequence_2_name\n\
134 | RCSDSDGLAPPQHLIRVEGNLRVEYLDDRNTFRHSVVVPYEPPEVGSDCTTIHYNYMCNS\
135 | SCMGGMNRRPILTIITLEDSSGNLLGRNSFEVRVCACPGRDRRTEEENLRKKGEPHHELP\
136 | PGSTKRALPNNTSSSPQPKKKPLDGEYFTLQIRGRERFEMFRELNEALELKDAQAGKEPG\
137 | GSRAHSSHLKSKKGQSTSRH\n\
138 | >sequence_n_nam...."
139 |
140 | content=[
141 | dbc.Card(
142 | [
143 | dbc.Row(
144 | [
145 | dbc.Col( html.Label('email') ,md=2, style={"textAlign":"right" }),
146 | dbc.Col( dcc.Input(id='email', placeholder="your.email@age.mpg.de", value=current_user.email, type='text', style={ "width":"100%"} ) ,md=5 ),
147 | dbc.Col( html.Label('your email address'),md=4 ),
148 | ],
149 | style={"margin-top":10}),
150 | dbc.Row(
151 | [
152 | dbc.Col( html.Label('Group') ,md=2 , style={"textAlign":"right" }),
153 | dbc.Col( dcc.Dropdown( id='opt-group', options=groups_, style={ "width":"100%"}),md=5 ),
154 | dbc.Col( html.Label('Select from dropdown menu'),md=4 ),
155 | ],
156 | style={"margin-top":10}),
157 | dbc.Row(
158 | [
159 | dbc.Col( html.Label('Sequence name') ,md=2 , style={"textAlign":"right" }),
160 | dbc.Col( dcc.Input(id='name', placeholder="my sequence name", value="", type='text', style={ "width":"100%"} ) ,md=5 ),
161 | dbc.Col( html.Label('Fasta header'),md=4 ),
162 | ],
163 | style={"margin-top":10}),
164 | dbc.Row(
165 | [
166 | dbc.Col( html.Label('Sequence') ,md=2 , style={"textAlign":"right" }),
167 | dbc.Col( dcc.Textarea(id='sequence', placeholder=example_fasta, value="", style={ "width":"100%",'height': 400} ) ,md=5 ),
168 | dbc.Col( html.Label('Protein sequence'),md=4 ),
169 | ],
170 | style={"margin-top":10})
171 | ],
172 | body=False
173 | ),
174 | html.Button(id='submit-button-state', n_clicks=0, children='Submit', style={"width": "200px","margin-top":4, "margin-bottom":"50px"}),
175 | dbc.Modal(
176 | dcc.Loading(
177 | id=f"modal-load",
178 | type="default",
179 | children=
180 | [
181 | dbc.ModalHeader(dbc.ModalTitle("Whoopss..",id="modal_header") ),
182 | dbc.ModalBody("something went wrong!", id="modal_body"),
183 | dbc.ModalFooter(
184 | dbc.Button(
185 | "Close", id="close", className="ms-auto", n_clicks=0
186 | )
187 | ),
188 | ],
189 | ),
190 | id="modal",
191 | is_open=False,
192 | ),
193 | dcc.Download( id="download-file" )
194 | ]
195 |
196 | return content
197 |
198 | # main submission call
199 | @dashapp.callback(
200 | Output("modal_header", "children"),
201 | Output("modal_body", "children"),
202 | Output("download-file","data"),
203 | Input('submit-button-state', 'n_clicks'),
204 | State('email', 'value'),
205 | State('opt-group', 'value'),
206 | State('name', 'value'),
207 | State('sequence', 'value'),
208 | prevent_initial_call=True )
209 | def update_output(n_clicks, email,group,name,sequence):
210 | header, msg = check_access( 'alphafold' )
211 | # header, msg = None, None
212 | if msg :
213 | return header, msg, dash.no_update
214 |
215 | subdic=make_submission_json( email,group, name, sequence)
216 |
217 | if os.path.isfile(subdic["filename"].replace("json","tsv")):
218 | header="Attention"
219 | msg='''You have already submitted this data. Re-submission will not take place.'''
220 | else:
221 | df=pd.DataFrame(subdic, index=[0] )
222 | df=df.transpose()
223 | df.reset_index(inplace=True, drop=False)
224 | df.to_csv(subdic["filename"].replace("json","tsv"), sep="\t", index=None, header=False)
225 | header="Success!"
226 | msg='''Please allow a summary file of your submission to download and check your email for confirmation.'''
227 | send_submission_email(user=current_user, submission_type="AlphaFold", submission_tag=subdic["filename"].replace("json","tsv"), submission_file=None, attachment_path=None)
228 |
229 | return header, msg, dcc.send_file(subdic["filename"].replace("json","tsv"))
230 |
231 | @dashapp.callback(
232 | Output("modal", "is_open"),
233 | [Input("submit-button-state", "n_clicks"), Input("close", "n_clicks")],
234 | [State("modal", "is_open")],
235 | )
236 | def toggle_modal(n1, n2, is_open):
237 | if n1 or n2:
238 | return not is_open
239 | return is_open
240 |
241 | # navbar toggle for colapsed status
242 | @dashapp.callback(
243 | Output("navbar-collapse", "is_open"),
244 | [Input("navbar-toggler", "n_clicks")],
245 | [State("navbar-collapse", "is_open")])
246 | def toggle_navbar_collapse(n, is_open):
247 | if n:
248 | return not is_open
249 | return is_open
250 |
--------------------------------------------------------------------------------
/routes/apps/chatbot.py:
--------------------------------------------------------------------------------
1 | from myapp import app, PAGE_PREFIX, PRIVATE_ROUTES
2 | from myapp import db
3 | from myapp.models import UserLogging, PrivateRoutes
4 | from flask_login import current_user
5 | from flask_caching import Cache
6 | import dash
7 | import os
8 | import uuid
9 | from dash import dcc, html, callback_context, no_update
10 | from dash.dependencies import Input, Output, State
11 | from myapp.routes._utils import META_TAGS, navbar_A, protect_dashviews, make_navbar_logged
12 | import dash_bootstrap_components as dbc
13 | from ._chatbot import chat_age_high
14 |
15 | FONT_AWESOME = "https://use.fontawesome.com/releases/v5.7.2/css/all.css"
16 |
17 | dashapp = dash.Dash(
18 | "chatbot",
19 | url_base_pathname=f'{PAGE_PREFIX}/chatbot/',
20 | meta_tags=META_TAGS,
21 | server=app,
22 | external_stylesheets=[dbc.themes.BOOTSTRAP, FONT_AWESOME],
23 | title="Chatbot AGE",
24 | assets_folder=app.config["APP_ASSETS"]
25 | )
26 |
27 | protect_dashviews(dashapp)
28 |
29 | if app.config["SESSION_TYPE"] == "sqlalchemy":
30 | import sqlalchemy
31 | engine = sqlalchemy.create_engine(app.config["SQLALCHEMY_DATABASE_URI"], echo=True)
32 | app.config["SESSION_SQLALCHEMY"] = engine
33 | elif app.config["CACHE_TYPE"] == "RedisCache":
34 | cache = Cache(dashapp.server, config={
35 | 'CACHE_TYPE': 'RedisCache',
36 | 'CACHE_REDIS_URL': 'redis://:%s@%s' % (os.environ.get('REDIS_PASSWORD'), app.config['REDIS_ADDRESS']),
37 | })
38 | elif app.config["CACHE_TYPE"] == "RedisSentinelCache":
39 | cache = Cache(dashapp.server, config={
40 | 'CACHE_TYPE': 'RedisSentinelCache',
41 | 'CACHE_REDIS_SENTINELS': [
42 | [os.environ.get('CACHE_REDIS_SENTINELS_address'), int(os.environ.get('CACHE_REDIS_SENTINELS_port'))]
43 | ],
44 | 'CACHE_REDIS_SENTINEL_MASTER': os.environ.get('CACHE_REDIS_SENTINEL_MASTER')
45 | })
46 |
47 | dashapp.layout = html.Div(
48 | [
49 | dcc.Store(data=str(uuid.uuid4()), id='session-id'),
50 | dcc.Location(id='url', refresh=False),
51 | html.Div(id="protected-content"),
52 | # Dummy divs for clientside callbacks
53 | html.Div(id="dummy-scroll", style={"display": "none"}),
54 | html.Div(id="btn-state-dummy", style={"display": "none"})
55 | ]
56 | )
57 |
58 | @dashapp.callback(
59 | Output('protected-content', 'children'),
60 | Input('session-id', 'data')
61 | )
62 | def make_layout(session_id):
63 | ## check if user is authorized
64 | eventlog = UserLogging(email=current_user.email, action="visit chatbot")
65 | db.session.add(eventlog)
66 | db.session.commit()
67 |
68 | protected_content = html.Div(
69 | [
70 | make_navbar_logged(
71 | html.Span(
72 | [
73 | "Chatbot AGE",
74 | html.I(
75 | className="fas fa-info-circle ms-2",
76 | id="chatbot-info-icon",
77 | style={
78 | "cursor": "pointer", "fontSize": "0.7em", "verticalAlign": "super"
79 | }
80 | )
81 | ],
82 | style={"display": "inline-flex", "alignItems": "flex-start"}
83 | ),
84 | current_user
85 | ),
86 | dbc.Tooltip(
87 | "This AI assistant uses the Meta LLaMA 3.1-8B Instruct model, powered by GWDG hardware, and is fed with the Open Access publications from the institute. Be aware that chatbots can produce hallucinated content; their responses can be unreliable and should be interpreted with caution.",
88 | target="chatbot-info-icon",
89 | placement="bottom"
90 | ),
91 | html.Div(style={"marginTop": "10px"}), # Added space between navbar and chat container
92 | html.Div(
93 | [
94 | dcc.Store(id="chat-history", data=[]),
95 | dcc.Store(id="conversation-history", data=[]),
96 | html.Div(
97 | id="chat-container",
98 | style={
99 | "height": "calc(100vh - 200px)",
100 | "overflowY": "auto",
101 | "padding": "10px",
102 | "paddingBottom": "100px",
103 | "display": "flex",
104 | "flexDirection": "column"
105 | }
106 | ),
107 | html.Div(
108 | [
109 | dbc.Textarea(
110 | id="user-input",
111 | placeholder="Ask the MPI-AGE chatbot...",
112 | style={"width": "100%", "resize": "none", "minHeight": "100px", "border": "3px solid #ddd"}
113 | ),
114 | # Wrap the ASK button in a container so we can update it
115 | html.Div(
116 | dbc.Button(
117 | "ASK",
118 | id="send-btn",
119 | color="secondary",
120 | className="mt-2",
121 | style={"width": "100%"}
122 | ),
123 | id="send-btn-container"
124 | )
125 | ],
126 | style={
127 | "width": "90%",
128 | "maxWidth": "1200px",
129 | # "paddingTop": "10px",
130 | # "paddingBottom": "10px",
131 | # "borderTop": "1px solid #ddd",
132 | # "backgroundColor": "#f8f9fa",
133 | "position": "fixed",
134 | "bottom": "50px",
135 | "left": "50%",
136 | "transform": "translateX(-50%)",
137 | "zIndex": "10000",
138 | },
139 | ),
140 | ],
141 | style={
142 | "width": "90%",
143 | "maxWidth": "1200px",
144 | "margin": "auto",
145 | "padding": "20px",
146 | "border": "1px solid #ddd",
147 | "borderRadius": "10px",
148 | "backgroundColor": "#f8f9fa",
149 | },
150 | ),
151 | navbar_A,
152 | ]
153 | )
154 | return protected_content
155 |
156 | @dashapp.callback(
157 | [Output("chat-container", "children"),
158 | Output("user-input", "value"),
159 | Output("conversation-history", "data")], # Store conversation history
160 | [Input("send-btn", "n_clicks")],
161 | [State("user-input", "value"),
162 | State("chat-container", "children"),
163 | State("conversation-history", "data")], # Retrieve stored history
164 | prevent_initial_call=True
165 | )
166 | def update_chat(n_clicks, user_message, chat_history, conversation_history):
167 | if not chat_history:
168 | chat_history = []
169 |
170 | if conversation_history is None:
171 | conversation_history = []
172 |
173 | # Get bot response and updated conversation history
174 | bot_response_text, conversation_history = chat_age_high(user_message, conversation_history)
175 |
176 | # Wrap the bot response in a Loading component so it shows a spinner
177 | bot_response = dcc.Loading(
178 | type="default",
179 | children=[dcc.Markdown(bot_response_text)]
180 | # children=[(str(conversation_history))]
181 | )
182 |
183 | # Append messages to UI chat history
184 | user_div = html.Div(
185 | [html.B("🤓 "), html.B(user_message)],
186 | className="last-message",
187 | style={"marginTop": "20px", "marginBottom": "20px", "textAlign": "right"}
188 | )
189 | bot_div = html.Div(
190 | [html.B("📚 "), bot_response],
191 | style={"marginTop": "20px", "marginBottom": "20px", "textAlign": "left"}
192 | )
193 |
194 | chat_history.extend([user_div, bot_div])
195 |
196 | return chat_history, "", conversation_history # Return updated history
197 |
198 |
199 | # Clientside callback to scroll to the last message and re-enable the ASK button after chat updates
200 | dashapp.clientside_callback(
201 | """
202 | function(children) {
203 | setTimeout(function(){
204 | var messages = document.getElementsByClassName('last-message');
205 | if (messages.length > 0) {
206 | messages[messages.length - 1].scrollIntoView({behavior: 'smooth'});
207 | }
208 | // Re-enable the button and reset its content
209 | var btn = document.getElementById('send-btn');
210 | if (btn) {
211 | btn.disabled = false;
212 | btn.innerHTML = "ASK";
213 | }
214 | }, 100);
215 | return '';
216 | }
217 | """,
218 | Output("dummy-scroll", "children"),
219 | Input("chat-container", "children")
220 | )
221 |
222 | # Clientside callback to disable the ASK button and show a spinner immediately when clicked
223 | dashapp.clientside_callback(
224 | """
225 | function(n_clicks) {
226 | // When the button is clicked, disable it and replace its content with a spinner
227 | var btn = document.getElementById('send-btn');
228 | if (btn) {
229 | btn.disabled = true;
230 | btn.innerHTML = 'Loading...
';
231 | }
232 | return '';
233 | }
234 | """,
235 | Output("btn-state-dummy", "children"),
236 | Input("send-btn", "n_clicks")
237 | )
238 |
--------------------------------------------------------------------------------
/routes/apps/convert.py:
--------------------------------------------------------------------------------
1 | from myapp import app, PAGE_PREFIX
2 | from flask_login import current_user
3 | from flask_caching import Cache
4 | from flask import session
5 | import dash
6 | from dash import dcc, html
7 | from dash.dependencies import Input, Output, State, MATCH, ALL
8 | from dash.exceptions import PreventUpdate
9 | from myapp.routes._utils import META_TAGS, navbar_A, protect_dashviews, make_navbar_logged
10 | import dash_bootstrap_components as dbc
11 | from myapp.routes.apps._utils import parse_import_json, parse_table, make_options, make_except_toast, ask_for_help, save_session, load_session, check_app, scatterplot_import, david_import, cellplot_import
12 | from pyflaski.scatterplot import make_figure, figure_defaults
13 | import os
14 | import uuid
15 | import traceback
16 | import json
17 | import base64
18 | import pandas as pd
19 | import time
20 | import plotly.express as px
21 | # from plotly.io import write_image
22 | import plotly.graph_objects as go
23 | from werkzeug.utils import secure_filename
24 | from myapp import db
25 | from myapp.models import UserLogging
26 | from time import sleep
27 |
28 | # import pyflaski as flaski
29 | import sys
30 | import json
31 |
32 | PYFLASKI_VERSION=os.environ['PYFLASKI_VERSION']
33 | PYFLASKI_VERSION=str(PYFLASKI_VERSION)
34 | FONT_AWESOME = "https://use.fontawesome.com/releases/v5.7.2/css/all.css"
35 |
36 | dashapp = dash.Dash("convert",url_base_pathname=f'{PAGE_PREFIX}/convert/', meta_tags=META_TAGS, server=app, external_stylesheets=[dbc.themes.BOOTSTRAP, FONT_AWESOME], title=app.config["APP_TITLE"], assets_folder=app.config["APP_ASSETS"])# , assets_folder="/flaski/flaski/static/dash/")
37 |
38 | protect_dashviews(dashapp)
39 |
40 | if app.config["SESSION_TYPE"] == "sqlalchemy":
41 | import sqlalchemy
42 | engine = sqlalchemy.create_engine(app.config["SQLALCHEMY_DATABASE_URI"] , echo=True)
43 | app.config["SESSION_SQLALCHEMY"] = engine
44 | elif app.config["CACHE_TYPE"] == "RedisCache" :
45 | cache = Cache(dashapp.server, config={
46 | 'CACHE_TYPE': 'RedisCache',
47 | 'CACHE_REDIS_URL': 'redis://:%s@%s' %( os.environ.get('REDIS_PASSWORD'), app.config['REDIS_ADDRESS'] ) #'redis://localhost:6379'),
48 | })
49 | elif app.config["CACHE_TYPE"] == "RedisSentinelCache" :
50 | cache = Cache(dashapp.server, config={
51 | 'CACHE_TYPE': 'RedisSentinelCache',
52 | 'CACHE_REDIS_SENTINELS': [
53 | [ os.environ.get('CACHE_REDIS_SENTINELS_address'), os.environ.get('CACHE_REDIS_SENTINELS_port') ]
54 | ],
55 | 'CACHE_REDIS_SENTINEL_MASTER': os.environ.get('CACHE_REDIS_SENTINEL_MASTER')
56 | })
57 |
58 |
59 | dashapp.layout=html.Div(
60 | [
61 | dcc.Store( data=str(uuid.uuid4()), id='session-id' ),
62 | dcc.Location( id='url', refresh=True ),
63 | html.Div( id="protected-content" ),
64 | ]
65 | )
66 |
67 | card_label_style={"margin-right":"2px"}
68 | card_label_style_={"margin-left":"5px","margin-right":"2px"}
69 |
70 | card_input_style={"height":"35px","width":"100%"}
71 | # card_input_style_={"height":"35px","width":"100%","margin-right":"10px"}
72 | card_body_style={ "padding":"2px", "padding-top":"2px"}#,"margin":"0px"}
73 | # card_body_style={ "padding":"2px", "padding-top":"4px","padding-left":"18px"}
74 |
75 |
76 | @dashapp.callback(
77 | Output('protected-content', 'children'),
78 | Input('url', 'pathname'))
79 | def make_layout(pathname):
80 | eventlog = UserLogging(email=current_user.email, action="visit vcheck")
81 | db.session.add(eventlog)
82 | db.session.commit()
83 | protected_content=html.Div(
84 | [
85 | make_navbar_logged("v2 to v3 converter",current_user),
86 | html.Div(id="app-content"),
87 | navbar_A,
88 | ],
89 | style={"height":"100vh","verticalAlign":"center"}
90 | )
91 | return protected_content
92 |
93 | @dashapp.callback(
94 | Output('app-content', 'children'),
95 | Input('url', 'pathname'))
96 | def make_app_content(pathname):
97 |
98 | app_content=dbc.Row(
99 | [
100 | dbc.Col(
101 | [
102 | dbc.Card(
103 | dbc.Form(
104 | [
105 | dcc.Upload(
106 | id='upload-data',
107 | children=html.Div(
108 | [ html.A('drop a file here',id='upload-data-text') ],
109 | style={ 'textAlign': 'center', "padding-top": 35, "margin-bottom": 4, }
110 | ),
111 | style={
112 | 'width': '100%',
113 | 'borderWidth': '1px',
114 | 'borderStyle': 'dashed',
115 | 'borderRadius': '0px',
116 | "margin-bottom": "0px",
117 | 'max-width':"375px",
118 | 'min-height':"100px",
119 | # "verticalAlign":"center"
120 | },
121 | multiple=False,
122 | ),
123 | html.Div( id="app-version"),
124 | ]
125 | ),
126 | body=True,
127 | outline=False,
128 | color="white",
129 | ),
130 | ],
131 | sm=9,md=6, lg=5, xl=5,
132 | align="center",
133 | style={ "margin-left":0, "margin-right":0 ,'margin-bottom':"50px",'max-width':"375px"}
134 | ),
135 | dcc.Download( id="download-file1" ),
136 | dcc.Download( id="download-file2" ),
137 | ],
138 | align="center",
139 | justify="center",
140 | style={"min-height": "86vh", 'verticalAlign': 'center'},
141 | )
142 |
143 | return app_content
144 |
145 | @dashapp.callback(
146 | Output('upload-data', 'children'),
147 | Output("download-file1","data"),
148 | Output("download-file2","data"),
149 | Input('upload-data', 'contents'),
150 | State('upload-data', 'filename'),
151 | State('upload-data', 'last_modified'),
152 | State('session-id', 'data'),
153 | prevent_initial_call=True)
154 | def read_input_file(contents,filename,last_modified,session_id):
155 | filename=secure_filename( filename )
156 | if not filename :
157 | raise dash.exceptions.PreventUpdate
158 | message=dcc.Markdown(f"Not a valid session file.")
159 | children=html.Div(
160 | [ html.A(message,id='upload-data-text') ],
161 | style={ 'textAlign': 'center', "margin-top": 35, "margin-bottom": 4}
162 | )
163 | if filename.split(".")[-1] not in [ "ses"]:#, "arg" ]:
164 | return children, None, None
165 |
166 | filename=filename.replace(".ses", ".json").replace(".arg",".json")
167 |
168 | content_type, content_string = contents.split(',')
169 | decoded=base64.b64decode(content_string)
170 | decoded=decoded.decode('utf-8')
171 | session_data=json.loads(decoded)
172 |
173 | # print(session_data.keys())
174 |
175 | session_app=session_data["app"]
176 |
177 | # print(session_app)
178 |
179 | if session_app == "iscatterplot" :
180 | session_data=scatterplot_import(session_data, last_modified=last_modified)
181 |
182 | def write_json(filename,session_data=session_data):
183 | filename.write(json.dumps(session_data).encode())
184 |
185 | return dash.no_update, dcc.send_bytes(write_json, filename), dash.no_update
186 | elif session_app == "david" :
187 | session_data, david_df, report_stats = david_import(session_data, last_modified=last_modified)
188 |
189 | david_df=pd.read_json(david_df)
190 | report_stats=pd.read_json(report_stats)
191 |
192 | import io
193 | output = io.BytesIO()
194 | writer= pd.ExcelWriter(output)
195 | david_df.to_excel(writer, sheet_name = 'david', index = None)
196 | report_stats.to_excel(writer, sheet_name = 'stats', index = None)
197 | writer.save()
198 | data=output.getvalue()
199 | excel_filename=filename.replace(".json", ".xlsx")
200 |
201 | def write_json(filename,session_data=session_data):
202 | filename.write(json.dumps(session_data).encode())
203 |
204 | return dash.no_update, dcc.send_bytes(write_json, filename), dcc.send_bytes(data, excel_filename)
205 |
206 | elif session_app == "icellplot" :
207 | session_data = cellplot_import(session_data, last_modified=last_modified)
208 |
209 | def write_json(filename,session_data=session_data):
210 | filename.write(json.dumps(session_data).encode())
211 |
212 | return dash.no_update, dcc.send_bytes(write_json, filename), dash.no_update
213 |
214 | else:
215 | return children, dash.no_update, dash.no_update
216 |
217 | @dashapp.callback(
218 | Output( { 'type': 'collapse-toast-traceback', 'index': MATCH }, "is_open"),
219 | Output( { 'type': 'toggler-toast-traceback', 'index': MATCH }, "children"),
220 | Input( { 'type': 'toggler-toast-traceback', 'index': MATCH }, "n_clicks"),
221 | State( { 'type': 'collapse-toast-traceback', 'index': MATCH }, "is_open"),
222 | prevent_initial_call=True
223 | )
224 | def toggle_toast_traceback(n,is_open):
225 | if not is_open:
226 | return not is_open , "collapse"
227 | else:
228 | return not is_open , "expand"
229 |
230 | @dashapp.callback(
231 | Output( { 'type': 'toast-error', 'index': ALL }, "is_open" ),
232 | Output( 'toast-email' , "children" ),
233 | Output( { 'type': 'toast-error', 'index': ALL }, "n_clicks" ),
234 | Input( { 'type': 'help-toast-traceback', 'index': ALL }, "n_clicks" ),
235 | State({ "type":"traceback", "index":ALL }, "data"),
236 | State( "session-data", "data"),
237 | prevent_initial_call=True
238 | )
239 | def help_email(n,tb_str, session_data):
240 | closed=[ False for s in n ]
241 | n=[ s for s in n if s ]
242 | clicks=[ 0 for s in n ]
243 | n=[ s for s in n if s > 0 ]
244 | if n :
245 |
246 | toast=dbc.Toast(
247 | [
248 | "We have received your request for help and will get back to you as soon as possible.",
249 | ],
250 | id={'type':'success-email','index':"email"},
251 | header="Help",
252 | is_open=True,
253 | dismissable=True,
254 | icon="success",
255 | )
256 |
257 | if tb_str :
258 | tb_str= [ s for s in tb_str if s ]
259 | tb_str="\n\n***********************************\n\n".join(tb_str)
260 | else:
261 | tb_str="! traceback could not be found"
262 |
263 | ask_for_help(tb_str,current_user, "vcheck", session_data)
264 |
265 | return closed, toast, clicks
266 | else:
267 |
268 | raise PreventUpdate
269 |
270 | @dashapp.callback(
271 | Output("navbar-collapse", "is_open"),
272 | [Input("navbar-toggler", "n_clicks")],
273 | [State("navbar-collapse", "is_open")],
274 | )
275 | def toggle_navbar_collapse(n, is_open):
276 | if n:
277 | return not is_open
278 | return is_open
--------------------------------------------------------------------------------
/routes/apps/ip.py:
--------------------------------------------------------------------------------
1 | from myapp import app, PAGE_PREFIX
2 | from flask_login import current_user
3 | from flask_caching import Cache
4 | from flask import session
5 | import dash
6 | from dash import dcc, html
7 | from dash.dependencies import Input, Output, State, MATCH, ALL
8 | from myapp.routes._utils import META_TAGS, navbar_A, protect_dashviews, make_navbar_logged
9 | import dash_bootstrap_components as dbc
10 | from myapp.routes.apps._utils import check_access, make_options, GROUPS, make_table, make_submission_file, validate_metadata, send_submission_email, send_submission_ftp_email
11 | import os
12 | import uuid
13 | import io
14 | import base64
15 | import pandas as pd
16 | from myapp import db
17 | from myapp.models import UserLogging, PrivateRoutes
18 | from werkzeug.utils import secure_filename
19 | from flask import jsonify, request
20 | import requests
21 |
22 | # @app.route('/v3/ip', methods=['GET'])
23 | # def get_tasks():
24 | # if request.environ.get('HTTP_X_FORWARDED_FOR') is None:
25 | # return jsonify({'REMOTE_ADDR': request.environ['REMOTE_ADDR']}), 200
26 | # else:
27 | # return jsonify({'REMOTE_ADDR': request.environ['REMOTE_ADDR'],'HTTP_X_FORWARDED_FOR': request.environ['HTTP_X_FORWARDED_FOR']}), 200
28 |
29 |
30 | FONT_AWESOME = "https://use.fontawesome.com/releases/v5.7.2/css/all.css"
31 |
32 | dashapp = dash.Dash("ip",url_base_pathname=f'{PAGE_PREFIX}/ip/', meta_tags=META_TAGS, server=app, external_stylesheets=[dbc.themes.BOOTSTRAP, FONT_AWESOME], title=app.config["APP_TITLE"], assets_folder=app.config["APP_ASSETS"])# , assets_folder="/flaski/flaski/static/dash/") update_title='Load...',
33 |
34 | protect_dashviews(dashapp)
35 |
36 | if app.config["SESSION_TYPE"] == "sqlalchemy":
37 | import sqlalchemy
38 | engine = sqlalchemy.create_engine(app.config["SQLALCHEMY_DATABASE_URI"] , echo=True)
39 | app.config["SESSION_SQLALCHEMY"] = engine
40 | elif app.config["CACHE_TYPE"] == "RedisCache" :
41 | cache = Cache(dashapp.server, config={
42 | 'CACHE_TYPE': 'RedisCache',
43 | 'CACHE_REDIS_URL': 'redis://:%s@%s' %( os.environ.get('REDIS_PASSWORD'), app.config['REDIS_ADDRESS'] ) #'redis://localhost:6379'),
44 | })
45 | elif app.config["CACHE_TYPE"] == "RedisSentinelCache" :
46 | cache = Cache(dashapp.server, config={
47 | 'CACHE_TYPE': 'RedisSentinelCache',
48 | 'CACHE_REDIS_SENTINELS': [
49 | [ os.environ.get('CACHE_REDIS_SENTINELS_address'), os.environ.get('CACHE_REDIS_SENTINELS_port') ]
50 | ],
51 | 'CACHE_REDIS_SENTINEL_MASTER': os.environ.get('CACHE_REDIS_SENTINEL_MASTER')
52 | })
53 |
54 | dashapp.layout=html.Div(
55 | [
56 | dcc.Store( data=str(uuid.uuid4()), id='session-id' ),
57 | dcc.Location( id='url', refresh=True ),
58 | html.Div( id="protected-content" ),
59 | ]
60 | )
61 |
62 | @dashapp.callback(
63 | Output('protected-content', 'children'),
64 | Input('url', 'pathname'))
65 | def make_layout(pathname):
66 | ip_site1 = requests.get('https://checkip.amazonaws.com').text.strip()
67 | ip_site2 = requests.get('https://ifconfig.me').text.strip()
68 | ip_site3 = requests.get('https://ident.me').text.strip()
69 | headers_list = request.headers.getlist("HTTP_X_FORWARDED_FOR")
70 | ip = headers_list[0] if headers_list else request.environ.get('HTTP_X_REAL_IP', request.remote_addr)
71 | l=request.headers.getlist("X-Forwarded-For")
72 | l="--".join(l)
73 | r=request.access_route
74 | r="--".join(r)
75 | return ["r::", r, "//", "l::", l, "//", "X-Real-IP", request.headers['X-Real-IP'], "//","ip::", ip, "//" ,'REMOTE_ADDR', '\n', request.environ['REMOTE_ADDR'],'\n', 'HTTP_X_FORWARDED_FOR', '\n', request.environ['HTTP_X_FORWARDED_FOR'], '\n', 'x_real_ip::', request.headers.get('X-Real-IP'), '\n', 'remote_addr::', request.remote_addr, '\n', 'ip1::', ip_site1, '\n', 'ip2::', ip_site2, '\n', 'ip3::', ip_site3 ]
76 | # if request.environ.get('HTTP_X_FORWARDED_FOR') is None:
77 | # return [ 'REMOTE_ADDR','\n', request.environ['REMOTE_ADDR'] ]
78 | # else:
79 | # return [ 'REMOTE_ADDR', '\n', request.environ['REMOTE_ADDR'],'\n', 'HTTP_X_FORWARDED_FOR', '\n', request.environ['HTTP_X_FORWARDED_FOR'] ]
--------------------------------------------------------------------------------
/routes/apps/transfer.py:
--------------------------------------------------------------------------------
1 | from myapp import app, db, PAGE_PREFIX
2 | from flask_login import current_user
3 | import dash
4 | from dash.dependencies import Input, Output, State
5 | from dash import dcc, html
6 | import dash_bootstrap_components as dbc
7 | from myapp.email import send_email
8 | import os
9 | from datetime import datetime, date
10 | from myapp.routes._utils import META_TAGS, navbar_A
11 | from flask import render_template
12 | from myapp.models import User, FTPSubmissions
13 | import pymysql.cursors
14 | import shutil
15 |
16 | dashapp = dash.Dash("transfer",url_base_pathname=f'{PAGE_PREFIX}/transfer/', meta_tags=META_TAGS, server=app, external_stylesheets=[dbc.themes.BOOTSTRAP], title=app.config["APP_TITLE"], assets_folder=app.config["APP_ASSETS"])# , assets_folder="/flaski/flaski/static/dash/")
17 |
18 | if app.config['PAGE_PREFIX'] == "" :
19 | home_page=app.config['APP_URL']
20 | else:
21 | home_page=app.config['APP_URL']
22 |
23 | dashapp.layout=dbc.Row(
24 | [
25 | dbc.Col(
26 | [
27 | dcc.Location(id='url', refresh=False),
28 | dbc.Card(
29 | # dbc.Form(),
30 | id="release-form",
31 | body=True
32 | ),
33 | ],
34 | md=8, lg=6, xl=4,
35 | align="center",
36 | style={ "margin-left":2, "margin-right":2 ,'margin-bottom':"50px"}
37 | ),
38 | navbar_A
39 | ],
40 | align="center",
41 | justify="center",
42 | style={"min-height": "95vh", 'verticalAlign': 'center'}
43 | )
44 |
45 | @dashapp.callback(
46 | Output('release-form', 'children'),
47 | Input('url', 'pathname'))
48 | def request_change( pathname):
49 | token=pathname.split("/transfer/")[-1]
50 | s_id=FTPSubmissions.verify_submission_token(token)
51 | if not s_id :
52 | request_form=dbc.Form(
53 | [
54 | html.H2("Error", style={'textAlign': 'center'} ),
55 | html.Div("Token could not be found.", style={'textAlign': 'center'})
56 | ]
57 | )
58 | return request_form
59 |
60 | s=FTPSubmissions.query.get(s_id)
61 | submission_file=s.file_name
62 |
63 | if not os.path.isfile(submission_file) :
64 | request_form=dbc.Form(
65 | [
66 | html.H2("Error", style={'textAlign': 'center'} ),
67 | html.Div("Submission could not be found.")
68 | ]
69 | )
70 | return request_form
71 |
72 | filename=os.path.basename(submission_file)
73 |
74 | request_form=dbc.Form(
75 | [
76 | html.H2("Submit request", style={'textAlign': 'center'} ),
77 | html.Div(f"Please make sure you have transfered all your files for your '{filename}' request before pressing 'Submit'."),
78 | html.Div(
79 | dbc.Button("Submit", id='reset-button',color="secondary", n_clicks=0, className="me-1",style={"width":"auto","margin-top":10, "margin-bottom":4}),
80 | className="d-grid gap-2 d-md-flex justify-content-md-end",
81 | ),
82 | # html.Div(
83 | # html.Button(id='reset-button', n_clicks=0, children='Submit', style={"width":"auto","margin-top":4, "margin-bottom":4}),
84 | # style = { "margin-top":"10px"}
85 | # ),
86 | html.Div(id="submit-feedback")
87 | ]
88 | )
89 | return request_form
90 |
91 | @dashapp.callback(
92 | Output('submit-feedback', 'children'),
93 | Input('reset-button', 'n_clicks'),
94 | State('url', 'pathname'),
95 | prevent_initial_call=True )
96 | def release_request(n_clicks, pathname):
97 | token=pathname.split("/transfer/")[-1]
98 | s_id=FTPSubmissions.verify_submission_token(token)
99 | s=FTPSubmissions.query.get(s_id)
100 | submission_file=s.file_name
101 | filename=os.path.basename(submission_file)
102 | dest=os.path.join("/mpcdf",filename)
103 |
104 | if not os.path.isfile(submission_file) :
105 | modal=dbc.Modal(
106 | [
107 | dbc.ModalHeader(dbc.ModalTitle("Error",id="modal_header") ),
108 | dbc.ModalBody("Submission could not be found.", id="modal_body"),
109 | dbc.ModalFooter(
110 | dbc.Button(
111 | "Close", id="close", className="ms-auto", n_clicks=0, href=home_page
112 | )
113 | ),
114 | ],
115 | id="modal",
116 | is_open=True,
117 | )
118 | return modal
119 |
120 | today=str(date.today())
121 | PUREFTPD_AUTH_SALT=os.getenv('PUREFTPD_AUTH_SALT')
122 | PUREFTPD_MYSQL_SERVER=os.getenv('PUREFTPD_MYSQL_SERVER')
123 | PUREFTPD_MYSQL_PORT=os.getenv('PUREFTPD_MYSQL_PORT')
124 | PUREFTPD_MYSQL_USER=os.getenv('PUREFTPD_MYSQL_USER')
125 | PUREFTPD_MYSQL_PASS=os.getenv('PUREFTPD_MYSQL_PASS')
126 | PUREFTPD_MYSQL_DB=os.getenv('PUREFTPD_MYSQL_DB')
127 |
128 | ftp_user=s.ftp_user
129 |
130 | try:
131 | connection = pymysql.connect(host=PUREFTPD_MYSQL_SERVER,
132 | port=int(PUREFTPD_MYSQL_PORT),
133 | user=PUREFTPD_MYSQL_USER,
134 | password=PUREFTPD_MYSQL_PASS,
135 | database=PUREFTPD_MYSQL_DB,
136 | ssl_ca='/etc/mysql/certs/ca-cert.pem',
137 | ssl_key='/etc/mysql/certs/client-key.pem',
138 | ssl_cert='/etc/mysql/certs/client-cert.pem',
139 | cursorclass=pymysql.cursors.DictCursor)
140 |
141 | with connection:
142 | with connection.cursor() as cursor:
143 | sql=( f"UPDATE users SET uploaded = {today} WHERE user = '{ftp_user}';" )
144 | response=cursor.execute(sql)
145 | connection.commit()
146 |
147 | except:
148 | modal=dbc.Modal(
149 | [
150 | dbc.ModalHeader(dbc.ModalTitle("Error",id="modal_header") ),
151 | dbc.ModalBody("ftp server could not be reached. Please try again later.", id="modal_body"),
152 | dbc.ModalFooter(
153 | dbc.Button(
154 | "Close", id="close", className="ms-auto", n_clicks=0, href=home_page
155 | )
156 | ),
157 | ],
158 | id="modal",
159 | is_open=True,
160 | )
161 | return modal
162 |
163 | modal=dbc.Modal(
164 | [
165 | dbc.ModalHeader(dbc.ModalTitle("Success",id="modal_header") ),
166 | dbc.ModalBody("Your request has been released.", id="modal_body"),
167 | dbc.ModalFooter(
168 | dbc.Button(
169 | "Close", id="close", className="ms-auto", n_clicks=0, href=home_page
170 | )
171 | ),
172 | ],
173 | id="modal",
174 | is_open=True,
175 | backdrop='static'
176 | )
177 |
178 | submission_type=filename.split(".")[-2]
179 |
180 | user=User.query.get(s.user_id)
181 |
182 | shutil.move(submission_file, dest)
183 |
184 | xlsx_file=submission_file.replace(".json", ".xlsx")
185 | if os.path.isfile(xlsx_file) :
186 | shutil.move(xlsx_file, dest.replace(".json", ".xlsx"))
187 |
188 | submission_tag=os.path.basename(dest)
189 |
190 | send_email(f'[Flaski][Automation][{submission_type}] Files have been transfered.',
191 | sender=app.config['MAIL_USERNAME_ADDRESS'],
192 | recipients=[user.email, 'automation@age.mpg.de' ],
193 | text_body=render_template('email/submissions.ftp.data.txt',
194 | user=user, filename=os.path.basename(submission_tag), submission_tag=submission_tag, submission_type=submission_type),
195 | html_body=render_template('email/submissions.ftp.data.html',
196 | user=user, filename=os.path.basename(submission_tag), submission_tag=submission_tag, submission_type=submission_type),\
197 | reply_to='bioinformatics@age.mpg.de',\
198 | attachment=None ,
199 | attachment_path=None,
200 | open_type="rb",\
201 | attachment_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
202 |
203 | return modal
--------------------------------------------------------------------------------
/routes/apps/vcheck.py:
--------------------------------------------------------------------------------
1 | from myapp import app, PAGE_PREFIX
2 | from flask_login import current_user
3 | from flask_caching import Cache
4 | from flask import session
5 | import dash
6 | from dash import dcc, html
7 | from dash.dependencies import Input, Output, State, MATCH, ALL
8 | from dash.exceptions import PreventUpdate
9 | from myapp.routes._utils import META_TAGS, navbar_A, protect_dashviews, make_navbar_logged
10 | import dash_bootstrap_components as dbc
11 | from myapp.routes.apps._utils import parse_import_json, parse_table, make_options, make_except_toast, ask_for_help, save_session, load_session
12 | from pyflaski.scatterplot import make_figure, figure_defaults
13 | import os
14 | import uuid
15 | import traceback
16 | import json
17 | import base64
18 | import pandas as pd
19 | import time
20 | import plotly.express as px
21 | # from plotly.io import write_image
22 | import plotly.graph_objects as go
23 | from werkzeug.utils import secure_filename
24 | from myapp import db
25 | from myapp.models import UserLogging
26 | from time import sleep
27 |
28 | PYFLASKI_VERSION=os.environ['PYFLASKI_VERSION']
29 | PYFLASKI_VERSION=str(PYFLASKI_VERSION)
30 | FONT_AWESOME = "https://use.fontawesome.com/releases/v5.7.2/css/all.css"
31 |
32 | dashapp = dash.Dash("vcheck",url_base_pathname=f'{PAGE_PREFIX}/vcheck/', meta_tags=META_TAGS, server=app, external_stylesheets=[dbc.themes.BOOTSTRAP, FONT_AWESOME], title=app.config["APP_TITLE"], assets_folder=app.config["APP_ASSETS"])# , assets_folder="/flaski/flaski/static/dash/")
33 |
34 | protect_dashviews(dashapp)
35 |
36 | if app.config["SESSION_TYPE"] == "sqlalchemy":
37 | import sqlalchemy
38 | engine = sqlalchemy.create_engine(app.config["SQLALCHEMY_DATABASE_URI"] , echo=True)
39 | app.config["SESSION_SQLALCHEMY"] = engine
40 | elif app.config["CACHE_TYPE"] == "RedisCache" :
41 | cache = Cache(dashapp.server, config={
42 | 'CACHE_TYPE': 'RedisCache',
43 | 'CACHE_REDIS_URL': 'redis://:%s@%s' %( os.environ.get('REDIS_PASSWORD'), app.config['REDIS_ADDRESS'] ) #'redis://localhost:6379'),
44 | })
45 | elif app.config["CACHE_TYPE"] == "RedisSentinelCache" :
46 | cache = Cache(dashapp.server, config={
47 | 'CACHE_TYPE': 'RedisSentinelCache',
48 | 'CACHE_REDIS_SENTINELS': [
49 | [ os.environ.get('CACHE_REDIS_SENTINELS_address'), os.environ.get('CACHE_REDIS_SENTINELS_port') ]
50 | ],
51 | 'CACHE_REDIS_SENTINEL_MASTER': os.environ.get('CACHE_REDIS_SENTINEL_MASTER')
52 | })
53 |
54 | dashapp.layout=html.Div(
55 | [
56 | dcc.Store( data=str(uuid.uuid4()), id='session-id' ),
57 | dcc.Location( id='url', refresh=True ),
58 | html.Div( id="protected-content" ),
59 | ]
60 | )
61 |
62 | card_label_style={"margin-right":"2px"}
63 | card_label_style_={"margin-left":"5px","margin-right":"2px"}
64 |
65 | card_input_style={"height":"35px","width":"100%"}
66 | # card_input_style_={"height":"35px","width":"100%","margin-right":"10px"}
67 | card_body_style={ "padding":"2px", "padding-top":"2px"}#,"margin":"0px"}
68 | # card_body_style={ "padding":"2px", "padding-top":"4px","padding-left":"18px"}
69 |
70 |
71 | @dashapp.callback(
72 | Output('protected-content', 'children'),
73 | Input('url', 'pathname'))
74 | def make_layout(pathname):
75 | eventlog = UserLogging(email=current_user.email, action="visit vcheck")
76 | db.session.add(eventlog)
77 | db.session.commit()
78 | protected_content=html.Div(
79 | [
80 | make_navbar_logged("Version check",current_user),
81 | html.Div(id="app-content"),
82 | navbar_A,
83 | ],
84 | style={"height":"100vh","verticalAlign":"center"}
85 | )
86 | return protected_content
87 |
88 | @dashapp.callback(
89 | Output('app-content', 'children'),
90 | Input('url', 'pathname'))
91 | def make_app_content(pathname):
92 |
93 | app_content=dbc.Row(
94 | [
95 | dbc.Col(
96 | [
97 | dbc.Card(
98 | dbc.Form(
99 | [
100 | dcc.Upload(
101 | id='upload-data',
102 | children=html.Div(
103 | [ html.A('drop a file here',id='upload-data-text') ],
104 | style={ 'textAlign': 'center', "padding-top": 35, "margin-bottom": 4, }
105 | ),
106 | style={
107 | 'width': '100%',
108 | 'borderWidth': '1px',
109 | 'borderStyle': 'dashed',
110 | 'borderRadius': '0px',
111 | "margin-bottom": "0px",
112 | 'max-width':"375px",
113 | 'min-height':"100px",
114 | # "verticalAlign":"center"
115 | },
116 | multiple=False,
117 | ),
118 | html.Div( id="app-version"),
119 | ]
120 | ),
121 | body=True,
122 | outline=False,
123 | color="white",
124 | ),
125 | ],
126 | sm=9,md=6, lg=5, xl=5,
127 | align="center",
128 | style={ "margin-left":0, "margin-right":0 ,'margin-bottom':"50px",'max-width':"375px"}
129 | ),
130 | ],
131 | align="center",
132 | justify="center",
133 | style={"min-height": "86vh", 'verticalAlign': 'center'}
134 | )
135 |
136 | return app_content
137 |
138 | @dashapp.callback(
139 | Output('upload-data', 'children'),
140 | Input('upload-data', 'contents'),
141 | State('upload-data', 'filename'),
142 | State('upload-data', 'last_modified'),
143 | State('session-id', 'data'),
144 | prevent_initial_call=True)
145 | def read_input_file(contents,filename,last_modified,session_id):
146 | if not filename :
147 | raise dash.exceptions.PreventUpdate
148 | # try:
149 | # app_data=parse_import_json(contents,filename,last_modified,current_user.id,cache, "scatterplot")
150 | content_type, content_string = contents.split(',')
151 | decoded=base64.b64decode(content_string)
152 | decoded=decoded.decode('utf-8')
153 | session=json.loads(decoded)
154 |
155 | FLASKI_version=session["APP_VERSION"]
156 | PYFLASKI_version=session["PYFLASKI_VERSION"]
157 | APP=list(session["session_data"]["app"].keys())[0]
158 |
159 | message=dcc.Markdown(f"**Session info**\
160 | \n\n\
161 | Flaski: {FLASKI_version}\
162 | \n\
163 | pyflaski: {PYFLASKI_version}\
164 | \n\
165 | App: {APP}")
166 |
167 | children=html.Div(
168 | [ html.A(message,id='upload-data-text') ],
169 | style={ 'textAlign': 'center', "margin-top": 4, "margin-bottom": 4}
170 | )
171 | return children
172 |
173 | # except:
174 | # children=html.Div(
175 | # [ html.A("! file could not be read !",id='upload-data-text') ],
176 | # style={ 'textAlign': 'center', "margin-top": 4, "margin-bottom": 4}
177 | # )
178 | # return children
179 |
180 |
181 | @dashapp.callback(
182 | Output( { 'type': 'collapse-toast-traceback', 'index': MATCH }, "is_open"),
183 | Output( { 'type': 'toggler-toast-traceback', 'index': MATCH }, "children"),
184 | Input( { 'type': 'toggler-toast-traceback', 'index': MATCH }, "n_clicks"),
185 | State( { 'type': 'collapse-toast-traceback', 'index': MATCH }, "is_open"),
186 | prevent_initial_call=True
187 | )
188 | def toggle_toast_traceback(n,is_open):
189 | if not is_open:
190 | return not is_open , "collapse"
191 | else:
192 | return not is_open , "expand"
193 |
194 | @dashapp.callback(
195 | Output( { 'type': 'toast-error', 'index': ALL }, "is_open" ),
196 | Output( 'toast-email' , "children" ),
197 | Output( { 'type': 'toast-error', 'index': ALL }, "n_clicks" ),
198 | Input( { 'type': 'help-toast-traceback', 'index': ALL }, "n_clicks" ),
199 | State({ "type":"traceback", "index":ALL }, "data"),
200 | State( "session-data", "data"),
201 | prevent_initial_call=True
202 | )
203 | def help_email(n,tb_str, session_data):
204 | closed=[ False for s in n ]
205 | n=[ s for s in n if s ]
206 | clicks=[ 0 for s in n ]
207 | n=[ s for s in n if s > 0 ]
208 | if n :
209 |
210 | toast=dbc.Toast(
211 | [
212 | "We have received your request for help and will get back to you as soon as possible.",
213 | ],
214 | id={'type':'success-email','index':"email"},
215 | header="Help",
216 | is_open=True,
217 | dismissable=True,
218 | icon="success",
219 | )
220 |
221 | if tb_str :
222 | tb_str= [ s for s in tb_str if s ]
223 | tb_str="\n\n***********************************\n\n".join(tb_str)
224 | else:
225 | tb_str="! traceback could not be found"
226 |
227 | ask_for_help(tb_str,current_user, "vcheck", session_data)
228 |
229 | return closed, toast, clicks
230 | else:
231 |
232 | raise PreventUpdate
233 |
234 | @dashapp.callback(
235 | Output("navbar-collapse", "is_open"),
236 | [Input("navbar-toggler", "n_clicks")],
237 | [State("navbar-collapse", "is_open")],
238 | )
239 | def toggle_navbar_collapse(n, is_open):
240 | if n:
241 | return not is_open
242 | return is_open
--------------------------------------------------------------------------------
/routes/home.py:
--------------------------------------------------------------------------------
1 | from myapp import app, db, PAGE_PREFIX
2 | import dash
3 | from dash.dependencies import Input, Output, State
4 | from dash import dcc, html
5 | import dash_bootstrap_components as dbc
6 | from myapp.models import User, PrivateRoutes
7 | from myapp.email import send_validate_email
8 | from datetime import datetime
9 | from ._utils import META_TAGS, check_email, password_check, navbar_A, protect_dashviews, make_navbar_logged
10 | from flask_login import current_user
11 | from ._vars import other_nav_dropdowns, _PRIVATE_ROUTES, _PUBLIC_VIEWS
12 |
13 | _PR = [ s for s in _PRIVATE_ROUTES if s not in _PUBLIC_VIEWS ]
14 |
15 | FONT_AWESOME = "https://use.fontawesome.com/releases/v5.7.2/css/all.css"
16 |
17 | dashapp = dash.Dash("home",url_base_pathname=f'{PAGE_PREFIX}/home/', meta_tags=META_TAGS, server=app, external_stylesheets=[dbc.themes.BOOTSTRAP, FONT_AWESOME], title=app.config["APP_TITLE"], assets_folder=app.config["APP_ASSETS"])# , assets_folder="/flaski/flaski/static/dash/")
18 |
19 | # protect_dashviews(dashapp)
20 |
21 | dashapp.layout=html.Div( [
22 | dcc.Location(id='url', refresh=False),
23 | html.Div(id="protected-content"),
24 | ]
25 | )
26 |
27 | @dashapp.callback(
28 | Output('protected-content', 'children'),
29 | Input('url', 'pathname'))
30 | def make_layout(pathname):
31 | container_children=[]
32 | # print(1)
33 | for o in other_nav_dropdowns :
34 | label= list(o.keys())[0]
35 |
36 | label_title=dbc.Row(
37 | dbc.Col(
38 | [
39 | html.H1(label, style={"font-size":"60px","width":"100%"} )
40 | ],
41 | align="center"#,
42 | #style={'textAlign':'center',"width":"100%"}
43 | ),
44 | align="center",
45 | justify="center",
46 | style={'textAlign':'center',"margin-top":"5%", "margin-bottom":"5%", "background-color":"#4d4d4d","color":"white","height":"150px","min-width":"375px"}
47 | )
48 |
49 | container_children.append(label_title)
50 |
51 | links_dic=o[label]
52 | links_keys=list(links_dic.keys())
53 | i = 0
54 | row=[]
55 | for l in list( links_keys ) :
56 | app_route=links_dic[l].split("/")[1]
57 | if app_route in _PR :
58 | if not current_user :
59 | continue
60 | if not current_user.is_authenticated :
61 | continue
62 | if not current_user.active :
63 | continue
64 |
65 | route_obj=PrivateRoutes.query.filter_by(route=app_route).first()
66 | if not route_obj :
67 | continue
68 |
69 | users=route_obj.users
70 | if not users :
71 | continue
72 |
73 | uid=current_user.id
74 | if uid not in users :
75 | udomains=route_obj.users_domains
76 | if not udomains:
77 | continue
78 | if current_user.domain not in udomains :
79 | continue
80 | l_=links_dic[l]
81 | link_icon=dbc.Col(
82 | [
83 | dcc.Link(
84 | [
85 | html.I(className="fas fa-3x fa-flask", ),
86 | html.H4(l, style={"textAlign":"center"} ),
87 | ],
88 | href=f'{PAGE_PREFIX}{l_}',
89 | refresh=True,
90 | style={"color":"#4d4d4d","text-decoration": "none"}
91 | )
92 | ],
93 | align="center",
94 | xs=6, sm=3, md=3, lg=3,
95 | style={"margin-top":"30px", "margin-bottom":"30px"}
96 | )
97 |
98 | row.append(link_icon)
99 | i=i+1
100 | if i == 4:
101 | i=0
102 | if ( i != 0 ) & ( len(row) > 1 ):
103 | while i < 4:
104 | link_icon=dbc.Col(
105 | [ ],
106 | align="center",
107 | xs=6, sm=3, md=3, lg=3,
108 | style={"margin-top":"30px", "margin-bottom":"30px"}
109 | )
110 | row.append(link_icon)
111 | i=i+1
112 |
113 | row=dbc.Row(
114 | children=row,
115 | align="center",
116 | justify="evenly",
117 | style={'textAlign':'center',"width":"100%"}
118 | )
119 | container_children.append(row)
120 |
121 | if not getattr(current_user, 'email', None) or not current_user.email.endswith('.mpg.de'):
122 | container_children.append(html.P("* Log in with Max Planck Institute email address to access more Apps/Forms *", style={'textAlign': 'center'}))
123 |
124 | links_style={"color":"#35443f", "margin-left":"12px", "margin-right":"12px", "font-weight": "bold","text-decoration": "none"}
125 |
126 | open_content=html.Div(
127 | [
128 | dbc.Row(
129 | html.Footer(
130 | [
131 | html.A("Login", style=links_style, target='_blank', href=f"{PAGE_PREFIX}/login/"),
132 | html.A("About", style=links_style, target='_blank', href=f"{PAGE_PREFIX}/about/"),
133 | html.A("Privacy", style=links_style, target='_blank', href=f"{PAGE_PREFIX}/privacy/"),
134 | html.A("Contact", style=links_style, target='_blank', href=f"{PAGE_PREFIX}/contact/"),
135 | ] ,
136 | style={"margin-top": 50, "margin-bottom": 100,},
137 | ),
138 | align="center",
139 | justify="evenly",
140 | style={'textAlign':'center',"width":"100%"}
141 | )
142 | ],
143 | style={"height":"10px"}
144 | )
145 |
146 |
147 | if current_user :
148 | if current_user.is_authenticated :
149 | if current_user.active :
150 | open_content=html.Div(
151 | [
152 | dbc.Row(
153 | html.Footer(
154 | [
155 | html.A("About", target='_blank', style=links_style, href=f"{PAGE_PREFIX}/about/"),
156 | html.A("Settings", target='_blank', style=links_style, href=f"{PAGE_PREFIX}/settings/"),
157 | html.A("Storage", target='_blank', style=links_style, href=f"{PAGE_PREFIX}/storage/"),
158 | html.A("Logout", target='_blank', style=links_style, href=f"{PAGE_PREFIX}/logout/")
159 | ] ,
160 | style={"margin-top": 50, "margin-bottom": 100,},
161 | ),
162 | align="center",
163 | justify="evenly",
164 | style={'textAlign':'center',"width":"100%"}
165 | )
166 | ],
167 | style={"height":"10px"}
168 | )
169 |
170 | container_children.append(open_content)
171 |
172 | protected_content=html.Div(
173 | [
174 | dbc.Container( container_children,
175 | style={"min-height": "80vh","width":"100%"}
176 | ),
177 | navbar_A
178 | ],
179 | style={"height":"100vh","verticalAlign":"center","width":"100%","padding":"0px","margin":"0px"}
180 | )
181 | return protected_content
182 |
183 |
184 | @dashapp.callback(
185 | Output("navbar-collapse", "is_open"),
186 | [Input("navbar-toggler", "n_clicks")],
187 | [State("navbar-collapse", "is_open")],
188 | )
189 | def toggle_navbar_collapse(n, is_open):
190 | if n:
191 | return not is_open
192 | return is_open
--------------------------------------------------------------------------------
/routes/index.py:
--------------------------------------------------------------------------------
1 | import re
2 | from myapp import app, PAGE_PREFIX
3 | from flask_login import current_user
4 | from flask_caching import Cache
5 | import dash
6 | from dash.dependencies import Input, Output, State
7 | from dash import dcc, html
8 | import dash_bootstrap_components as dbc
9 | import uuid
10 | from werkzeug.utils import secure_filename
11 | import json
12 | from flask import session
13 | import base64
14 | from ._utils import META_TAGS
15 |
16 | import pandas as pd
17 | import os
18 |
19 |
20 | dashapp = dash.Dash("index", url_base_pathname=f"{PAGE_PREFIX}/", meta_tags=META_TAGS, server=app, external_stylesheets=[dbc.themes.BOOTSTRAP], title=app.config["APP_TITLE"], assets_folder=app.config["APP_ASSETS"])# , assets_folder="/flaski/flaski/static/dash/")
21 |
22 | # protect_dashviews(dashapp)
23 |
24 | # cache = Cache(dashapp.server, config={
25 | # 'CACHE_TYPE': 'redis',
26 | # 'CACHE_REDIS_URL': 'redis://:%s@%s' %( os.environ.get('REDIS_PASSWORD'), os.environ.get('REDIS_ADDRESS') ) #'redis://localhost:6379'),
27 | # })
28 |
29 | image_filename = f'{app.config["APP_ASSETS"]}logo.png' # replace with your own image
30 | encoded_image = base64.b64encode(open(image_filename, 'rb').read())
31 |
32 | dashapp.layout=html.Div( [ dcc.Location(id='url', refresh=False), html.Div(id="page-content") ] )
33 |
34 | logged_children=[]
35 | nonlogged_children=[]
36 |
37 | @dashapp.callback(
38 | Output('page-content', 'children'),
39 | Input('url', 'pathname'))
40 | def make_layout(pathname):
41 |
42 | links_style={"color":"#35443f", "margin-left":"12px", "margin-right":"12px", "font-weight": "bold","text-decoration": "none"}
43 |
44 | target=f"{PAGE_PREFIX}/home/"
45 | refresh=True
46 |
47 | # if current_user :
48 | # if current_user.is_authenticated :
49 | # if current_user.active :
50 | # target=f"{PAGE_PREFIX}/home/"
51 | # open_content=html.Div(id="page-footer",style={"height":"10px"})
52 | # refresh=True
53 |
54 | # if not target:
55 | # if pathname not in [f'{PAGE_PREFIX}/index/open/', f'{PAGE_PREFIX}/open/'] :
56 | # target=f"{PAGE_PREFIX}/index/open/"
57 | # open_content=html.Div(id="page-footer", style={"height":"10px"})
58 | # refresh=False
59 |
60 | # else :
61 | # target=f"{PAGE_PREFIX}/index/"
62 | # open_content=html.Div([
63 | # dbc.Row(
64 | # html.Footer( [
65 | # html.A("About", style=links_style, href=f"{PAGE_PREFIX}/about/"),
66 | # html.A("Login", style=links_style, href=f"{PAGE_PREFIX}/login/"),
67 | # html.A("Register", style=links_style, href=f"{PAGE_PREFIX}/register/"),
68 | # html.A("Contact", style=links_style, href=f"{PAGE_PREFIX}/contact/")
69 | # ] ,
70 | # style={"margin-top": 25, "margin-bottom": 5,},
71 | # ),
72 | # style={"justify-content":"center"}
73 | # )
74 | # ],style={"height":"10px"})
75 | # refresh=False
76 |
77 |
78 | page_content=html.Div(
79 | [
80 | dbc.Row(
81 | dbc.Col(
82 | [
83 | dcc.Link(
84 | [
85 | html.Img( alt=app.config["APP_TITLE"], lang="en", src='data:image/png;base64,{}'.format(encoded_image.decode() ) , height="300px", style={ "margin-bottom":5}),
86 | html.H1(app.config["APP_TITLE"], style={"textAlign":"center"}),
87 | ],
88 | href=target,
89 | refresh=refresh,
90 | style={"color":"black","text-decoration": "none"}
91 | ),
92 | # open_content
93 | ],
94 | align="center",
95 | style={"textAlign":"center"}
96 | ),
97 | justify="center",
98 | style={"min-height": "100vh"}
99 | )
100 | ]
101 | )
102 |
103 | return page_content
104 |
105 |
106 | @dashapp.callback(
107 | Output("navbar-collapse", "is_open"),
108 | [Input("navbar-toggler", "n_clicks")],
109 | [State("navbar-collapse", "is_open")])
110 | def toggle_navbar_collapse(n, is_open):
111 | if n:
112 | return not is_open
113 | return is_open
114 |
115 |
--------------------------------------------------------------------------------
/static/dog-solid.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mpg-age-bioinformatics/flaski/4fb70e3e664bfbcd536989830f83fb61e925e5f1/static/dog-solid.png
--------------------------------------------------------------------------------
/static/dog-solid.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/utils/stats.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from myapp import app, db
4 | from myapp.models import User, UserLogging
5 | import sys, os, datetime
6 | import pandas as pd
7 |
8 | @app.shell_context_processor
9 | def make_shell_context():
10 | return {'db': db, 'User': User}
11 |
12 | def stats(outfolder="/flaski_private"):
13 | entries=UserLogging.query.filter_by()
14 | df=[ [ e.id, e.email, e.action, e.date_time ] for e in entries ]
15 | df=pd.DataFrame(df, columns=["id","email","action", "date_time"])
16 | outname=str(datetime.datetime.now()).split(".")[0].replace(" ","_").replace(":",".")
17 | if not os.path.isdir(outfolder):
18 | os.makedirs(outfolder)
19 | outname=outfolder+"/"+outname+".stats.tsv"
20 | df.to_csv(outname, sep="\t", index=None)
21 | print("Done collecting usage stats.\n%s" %outname)
22 | sys.stdout.flush()
23 |
24 |
25 | if __name__ == "__main__":
26 | with app.app_context():
27 | stats()
28 |
29 | # pod=$(kubectl --kubeconfig ~/admin.conf -n flaski-prod get pods | grep server | head -n 1 | awk '{print $1}')
30 | # kubectl --kubeconfig ~/admin.conf -n flaski-prod cp stats.py ${pod}:/myapp/stats.py
31 | # kubectl --kubeconfig ~/admin.conf -n flaski-prod exec -it ${pod} -- /bin/bash
32 | # ./stats.py
33 | # kubectl --kubeconfig ~/admin.conf -n flaski-prod cp ${pod}:/flaski_private/2023-08-31_09.17.51.stats.tsv ~/2023-08-31_09.17.51.stats.tsv
34 |
--------------------------------------------------------------------------------