├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── PULL_REQUEST_TEMPLATE
│ └── pull_request_template.m
└── workflows
│ ├── black.yml
│ ├── codeql.yml
│ ├── greetings.yml
│ └── pylint.yml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── SECURITY.md
├── api
├── __init__.py
└── v1
│ ├── __init__.py
│ ├── alert.py
│ ├── analytics.py
│ ├── api_keys.py
│ ├── breaches.py
│ ├── domain_breaches.py
│ ├── domain_phishing.py
│ ├── domain_verification.py
│ ├── feeds.py
│ ├── metrics.py
│ ├── slack.py
│ ├── teams.py
│ └── webhooks.py
├── config
├── limiter.py
├── middleware.py
└── settings.py
├── docker-compose.yml
├── main.py
├── models
├── base.py
├── requests.py
└── responses.py
├── requirements.txt
├── services
├── analytics.py
├── breach.py
├── cloudflare.py
├── globe.py
├── messaging.py
├── send_email.py
├── slack.py
├── teams.py
└── webhook.py
├── static
├── robots.txt
└── static
│ ├── dictionary.txt
│ ├── robots.txt
│ └── tld.txt
├── templates
├── domain_dashboard_error.html
├── domain_dashboard_success.html
├── domain_email_error.html
├── domain_email_verify.html
├── email_error.html
├── email_shield_error.html
├── email_shield_verify.html
├── email_success.html
├── email_template.html
├── email_unsub_error.html
├── email_unsub_verify.html
├── email_verify.html
├── index.html
├── styles.css
└── swagger
│ └── custom_swagger.html
└── utils
├── __init__.py
├── helpers.py
├── request.py
├── security.py
├── token.py
└── validation.py
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE/pull_request_template.m:
--------------------------------------------------------------------------------
1 | # Pull Request Template
2 |
3 | ## Description
4 | Please include a summary of the change and which issue is fixed. Also include relevant motivation and context. List any dependencies that are required for this change.
5 |
6 | ## Type of change
7 | - [ ] Bug fix (non-breaking change which fixes an issue)
8 | - [ ] New feature (non-breaking change which adds functionality)
9 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
10 | - [ ] This change requires a documentation update
11 |
12 | ## How Has This Been Tested?
13 | Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration.
14 |
15 | - [ ] Test A
16 | - [ ] Test B
17 |
18 | ## Checklist:
19 | - [ ] My code follows the style guidelines of this project
20 | - [ ] I have performed a self-review of my own code
21 | - [ ] I have commented my code, particularly in hard-to-understand areas
22 | - [ ] I have made corresponding changes to the documentation
23 | - [ ] My changes generate no new warnings
24 | - [ ] I have added tests that prove my fix is effective or that my feature works
25 | - [ ] New and existing unit tests pass locally with my changes
26 | - [ ] Any dependent changes have been merged and published in downstream modules
27 |
28 | ## Screenshots (if appropriate):
29 |
30 | ## Additional Notes:
31 | Include any additional information that you think is important for reviewers to know.
32 |
33 |
--------------------------------------------------------------------------------
/.github/workflows/black.yml:
--------------------------------------------------------------------------------
1 | name: Lint
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | lint:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v3
10 | - uses: psf/black@stable
11 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ "master" ]
17 | pull_request:
18 | branches: [ "master" ]
19 | schedule:
20 | - cron: '39 17 * * 3'
21 |
22 | jobs:
23 | analyze:
24 | name: Analyze
25 | # Runner size impacts CodeQL analysis time. To learn more, please see:
26 | # - https://gh.io/recommended-hardware-resources-for-running-codeql
27 | # - https://gh.io/supported-runners-and-hardware-resources
28 | # - https://gh.io/using-larger-runners
29 | # Consider using larger runners for possible analysis time improvements.
30 | runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
31 | timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
32 | permissions:
33 | actions: read
34 | contents: read
35 | security-events: write
36 |
37 | strategy:
38 | fail-fast: false
39 | matrix:
40 | language: [ 'python' ]
41 | # CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
42 | # Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
43 | # Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
44 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
45 |
46 | steps:
47 | - name: Checkout repository
48 | uses: actions/checkout@v4
49 |
50 | # Initializes the CodeQL tools for scanning.
51 | - name: Initialize CodeQL
52 | uses: github/codeql-action/init@v3
53 | with:
54 | languages: ${{ matrix.language }}
55 | # If you wish to specify custom queries, you can do so here or in a config file.
56 | # By default, queries listed here will override any specified in a config file.
57 | # Prefix the list here with "+" to use these queries and those in the config file.
58 |
59 | # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
60 | # queries: security-extended,security-and-quality
61 |
62 |
63 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
64 | # If this step fails, then you should remove it and run the build manually (see below)
65 | - name: Autobuild
66 | uses: github/codeql-action/autobuild@v3
67 |
68 | # ℹ️ Command-line programs to run using the OS shell.
69 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
70 |
71 | # If the Autobuild fails above, remove it and uncomment the following three lines.
72 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
73 |
74 | # - run: |
75 | # echo "Run, Build Application using script"
76 | # ./location_of_script_within_repo/buildscript.sh
77 |
78 | - name: Perform CodeQL Analysis
79 | uses: github/codeql-action/analyze@v3
80 | with:
81 | category: "/language:${{matrix.language}}"
82 |
--------------------------------------------------------------------------------
/.github/workflows/greetings.yml:
--------------------------------------------------------------------------------
1 | name: Friendly Greetings
2 |
3 | on: [pull_request_target, issues]
4 |
5 | jobs:
6 | warm-welcome:
7 | runs-on: ubuntu-latest
8 | permissions:
9 | issues: write
10 | pull-requests: write
11 | steps:
12 | - uses: actions/first-interaction@v1
13 | with:
14 | repo-token: ${{ secrets.GITHUB_TOKEN }}
15 | issue-message: >
16 | 🌟 Hi there! We're thrilled to see you contributing for the first time! Thank you for raising this issue. Our community values your input, and we look forward to addressing your concerns. Feel free to ask questions or seek assistance if needed. Welcome aboard! 🚀
17 | pr-message: >
18 | 🎉 Welcome to our project! Your first pull request marks an exciting moment for us. We're eager to review your contribution and collaborate to merge it into our codebase. Thanks for investing your time in improving our project. If you have any questions or need help, don't hesitate to
19 |
--------------------------------------------------------------------------------
/.github/workflows/pylint.yml:
--------------------------------------------------------------------------------
1 | name: Python Lint
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | build:
7 | runs-on: ubuntu-latest
8 | strategy:
9 | matrix:
10 | python-version: ["3.8", "3.9", "3.10"]
11 |
12 | steps:
13 | - uses: actions/checkout@v3
14 | - name: Set up Python ${{ matrix.python-version }}
15 | uses: actions/setup-python@v3
16 | with:
17 | python-version: ${{ matrix.python-version }}
18 |
19 | - name: Install dependencies
20 | run: |
21 | python -m pip install --upgrade pip
22 | pip install -r requirements.txt
23 |
24 | - name: Install Pylint
25 | run: |
26 | pip install pylint
27 |
28 | - name: Run Pylint
29 | run: |
30 | pylint --fail-under=9 $(git ls-files '*.py')
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | *.py[cod]
3 | .pytest_cache
4 | MANIFEST
5 | *.manifest
6 | *.spec
7 | *.log
8 | pip-log.txt
9 | pip-delete-this-directory.txt
10 | instance/
11 | .webassets-cache
12 | .env
13 | .venv
14 | .vscode
15 | env/
16 | venv/
17 | ENV/
18 | env.bak/
19 | venv.bak/
20 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
6 |
7 | ## Our Standards
8 |
9 | Examples of behavior that contributes to creating a positive environment include:
10 |
11 | - Using welcoming and inclusive language
12 | - Being respectful of differing viewpoints and experiences
13 | - Gracefully accepting constructive criticism
14 | - Focusing on what is best for the community
15 | - Showing empathy towards other community members
16 |
17 | Examples of unacceptable behavior by participants include:
18 |
19 | - Trolling, insulting/derogatory comments, and personal or political attacks
20 | - Public or private harassment
21 | - Publishing others' private information, such as a physical or electronic address, without explicit permission
22 | - The use of sexualized language or imagery and unwelcome sexual attention or advances
23 | - Other conduct which could reasonably be considered inappropriate in a professional setting
24 |
25 | ## Our Responsibilities
26 |
27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
28 |
29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
30 |
31 | ## Enforcement
32 |
33 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at **deva[AT]xposedornot.com**. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident.
34 |
35 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
36 |
37 | ### Attribution
38 |
39 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at [https://www.contributor-covenant.org/version/2/0/code_of_conduct/][version]
40 |
41 | [homepage]: https://contributor-covenant.org
42 | [version]: https://www.contributor-covenant.org/version/2/0/code_of_conduct/
43 |
44 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to This Repository
2 |
3 | ## Scope
4 |
5 | This document provides guidelines for contributing to this repository.
6 |
7 | ## Issues
8 |
9 | File an issue if you think you've found a bug. Please describe:
10 |
11 | 1. How can it be reproduced
12 | 2. What was expected
13 | 3. What actually occurred
14 | 4. What version of the involved component was used
15 |
16 | ## Patches
17 |
18 | All contributions are welcome and most will be accepted. Patches for fixes, features, and improvements are accepted via pull requests.
19 |
20 | Pull requests should be based on the master branch, unless you want to contribute to an active branch for a specific topic.
21 |
22 | When contributing to this repository, please first discuss the change you wish to make via issue, email, or any other method with the owners of this repository before making a change.
23 |
24 | ## Pull Request Process
25 |
26 | 1. Update the README.md with details of changes to the interface, this includes new environment variables, exposed ports, useful file locations and container parameters.
27 | 2. To contribute, just issue a pull request. Include tests, please. If you add any new files please make sure you add the source header to the top of that file.
28 | 3. Commit messages should explain why code is changing, configuration is added, or new types or packages are introduced.
29 |
30 | ## Style Guide
31 |
32 | - Functions should take as few parameters as possible. If many parameters are required, consider introducing a new type that logically groups the data.
33 | - Large blocks of commented out code should not be checked in.
34 | - Avoid the use of global variables. Prefer a dependency injection style that uses a mix of interfaces and concrete types.
35 | - Follow Python styling guidelines including PEP-8 for code.
36 |
37 | ## Git Commit
38 |
39 | Use these prefixes when committing:
40 |
41 | - 🐛 fix: Corrected typo in README.md
42 | - ✨ feat: Added user login functionality
43 | - 📝 docs: Updated API documentation
44 | - 💄 style: Improved button UI in the header
45 | - ♻️ refactor: Refactored alertme processing code
46 | - ✅ test: Added unit tests for new utility functions
47 | - 🧹 chore: Removed unused dependencies from package.json
48 | - ⚡ perf: Optimized database query performance
49 |
50 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Use the smaller 'slim' version of Python to reduce image size
2 | FROM python:3.12-slim
3 |
4 | # Set environment variables for better Cloud Run compatibility
5 | ENV PYTHONUNBUFFERED=True
6 | ENV APP_HOME=/app
7 | ENV PORT=8080
8 |
9 | WORKDIR ${APP_HOME}
10 |
11 | # Install system dependencies for ssdeep and tlsh
12 | RUN apt-get update && \
13 | apt-get install -y --no-install-recommends \
14 | libfuzzy-dev \
15 | ssdeep \
16 | build-essential \
17 | && rm -rf /var/lib/apt/lists/*
18 |
19 | # Copy only essential files first to leverage Docker's caching
20 | COPY requirements.txt ./
21 |
22 | # Install dependencies efficiently
23 | RUN pip install --no-cache-dir --upgrade pip && \
24 | pip install --no-cache-dir -r requirements.txt
25 |
26 | # Copy the rest of the app source code
27 | COPY . ./
28 |
29 | # Expose the port for Cloud Run
30 | EXPOSE ${PORT}
31 |
32 | # Run uvicorn with multiple workers for better performance
33 | CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8080", "--workers", "8", "--limit-concurrency", "60", "--timeout-keep-alive", "120"]
34 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 XposedOrNot
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
XposedOrNot API
2 |
3 |
4 | 🎉 Your free API for real-time data breach monitoring and analytics.
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | XposedOrNot API Playground ·
15 | XposedOrNot.com
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | ## What is XposedOrNot API?
24 |
25 | XposedOrNot is like your personal guard against data breaches. It's a platform that warns you when your email account might be at risk because of a public data breach. Knowing about these breaches can help you reduce the chances of your data getting exposed. Plus, it's totally open-source so you can see exactly how it works.
26 |
27 | The XposedOrNot API is the heart of this system. It's what makes the checks for data breaches and sends you the alerts.
28 |
29 | And guess what? It's FREE.
30 |
31 | It gives you all the details about any data breaches that it finds, plus some useful stats about an email.
32 |
33 | The API was built and is maintained by Devanand Premkumar.
34 | [](https://twitter.com/devaonbreaches)
35 | [](https://infosec.exchange/@DevaOnBreaches)
36 |
37 |
38 |
39 | ## Show Your Support!
40 |
41 | 🌟 Give us a star if you like what we're doing!
42 |
43 | 🍴 Fork it and make it your own!
44 |
45 | 🤝 And hey, why not contribute? We love seeing what you can add to the mix!
46 |
47 | ## How to Use XposedOrNot API (documentation)
48 |
49 |
50 | If you want to get more details, you can check out our full [documentation](https://XposedOrNot.com/api_doc) and [API playground](https://xposedornot.docs.apiary.io/).
51 |
52 | ## Why use XposedOrNot API?
53 |
54 | XposedOrNot API is the power behind XposedOrNot, and it's the first open-source tool that monitors and alerts you about data breaches.
55 |
56 | This API is your go-to for all information related to data breaches that XposedOrNot has collected and keeps up-to-date. Here are some things you can do with it:
57 | - Look up whether an email address has been caught in a data breach and get some stats about it
58 | - See if an email address has been exposed in public pastes
59 | - Do a combined search to check both data breach and pastes exposure for an email address
60 | - Check for exposed passwords without having to reveal who you are
61 |
62 | If you'd rather skip the API and check data breach info directly, you can do that on our website at : https://XposedOrNot.com.
63 |
64 |
65 | ## How secure is XposedOrNot API?
66 |
67 | Is XposedOrNot API safe to use? Absolutely.
68 |
69 | You see, the whole issue of data breaches has come from places that aren't secure. So, we've made sure everything is open-source, including the API and all related files on Github. We trust in the power of open source tools to make our digital world safer.
70 |
71 | Everything we run, from the app to the website, is built on open source - from the operating system (Linux) to the API script (Python), and even the web files (HTML). We believe in improving services through collaboration, and open source makes that possible.
72 |
73 | We've designed the XposedOrNot API with safety at its core because we're dealing with sensitive data breach information. Tools like Black, Pylint, and SonarQube Community Edition support the security of our code and design elements.
74 |
75 | If you spot any problems or have suggestions for improvements, please raise an issue on GitHub.
76 |
77 | And if you want to contribute, we welcome your pull requests. We'll gladly consider any changes or fixes you suggest.
78 |
79 | ## Quick Start for Local Development
80 |
81 | ### Using Docker-Compose
82 |
83 | 1. **Clone the Repository:**
84 |
85 | ```shell
86 | git clone https://github.com/XposedOrNot/XposedOrNot-API
87 | ```
88 |
89 | 2. **Update the necessary environment variables in the docker-compose.yml file if needed, then run:**
90 |
91 |
92 | ```shell
93 | docker-compose up
94 | ```
95 |
96 | This command will build API and Datastore Docker images. Note that the project source directory is mapped in the Docker container, so any changes in the source code won't require rebuilding the Docker image.
97 |
98 | ### Local Installation
99 |
100 | 1. **Clone the Repository:**
101 |
102 | ```shell
103 | git clone https://github.com/XposedOrNot/XposedOrNot-API
104 | ```
105 |
106 | 2. **Install Required Packages**
107 |
108 | ```shell
109 | sudo apt-get install -y google-cloud-sdk google-cloud-sdk-app-engine-python python3-pip google-cloud-sdk-app-engine-python build-essential libffi-dev python-dev
110 | ```
111 |
112 | 3. **Install Python Libraries**
113 |
114 |
115 | ```shell
116 | pip3 install -r requirements.txt
117 | ```
118 |
119 | 4. **Setup Google Cloud Datastore**
120 |
121 | Before running XposedOrNot-API, choose one of the following options:
122 |
123 | - [Run local Google DataStore emulator](https://cloud.google.com/datastore/docs/tools/datastore-emulator)
124 | and debug using the local emulator rather than directly connect to Google DataStore.
125 |
126 | ```shell
127 | # For posix platforms, e.g. linux, mac:
128 | gcloud beta emulators datastore start
129 | ```
130 |
131 | - [Authenticate to Google DataStore](https://cloud.google.com/sdk/gcloud/reference/beta/auth/application-default) and directly debug using Google DataStore.
132 |
133 | 5. **Run the application**
134 |
135 | ```shell
136 | python3 main.py
137 | ```
138 |
139 | ## Contributing
140 |
141 | Please read [CONTRIBUTING.md](https://github.com/XposedOrNot/XposedOrNot-API/blob/master/CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests to us.
142 |
143 |
144 | ## Authors
145 |
146 | * **Devanand Premkumar** - *Initial work* - [XposedOrNot-API](https://github.com/XposedOrNot/XposedOrNot-API)
147 |
148 | ## License
149 |
150 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details
151 |
152 | ## Security Vulnerability Reporting
153 |
154 | Please do not report security vulnerabilities through public GitHub issues. Instead, refer to our [Responsible Disclosure Guidelines](https://beta.xposedornot.com/responsible-disclosure) for reporting these issues in a secure manner.
155 |
156 |
157 | ## Acknowledgments
158 |
159 | * Big shout-out to Python and all the people looking after the modules we've used. You guys rock!
160 |
161 | * And a round of applause for everyone who's reviewed our code. Your eyes make all the difference.
162 |
163 | ## Support! :star:
164 |
165 | :star2: Star it
166 | :fork_and_knife:Fork it
167 | :handshake: Contribute to it!
168 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 |
4 | ## Reporting a Vulnerability
5 |
6 | If you happen to discover 🔍 a bug or security vulnerability, I would love 😍 to hear from you! I encourage you to disclose it using the **[responsible disclosure](https://beta.xposedornot.com/responsible-disclosure)** guidelines to support XposedOrNot.
7 |
8 | You can report it via email at **deva @ xposedornot.com**.
9 |
10 | I want to make it clear that this is not a bug bounty program and we do not offer a monetary reward for submissions. However, I would be happy to feature your valid submissions on our **[Hall of Fame](https://beta.xposedornot.com/hof)** page, based on your preference. I believe in recognizing the positive contributions of reporters who have demonstrated a high level of dedication to our program.
11 |
--------------------------------------------------------------------------------
/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/XposedOrNot/XposedOrNot-API/ffcc06a0adcdbb0493e438b32b0b72e70a5cbf54/api/__init__.py
--------------------------------------------------------------------------------
/api/v1/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/XposedOrNot/XposedOrNot-API/ffcc06a0adcdbb0493e438b32b0b72e70a5cbf54/api/v1/__init__.py
--------------------------------------------------------------------------------
/api/v1/api_keys.py:
--------------------------------------------------------------------------------
1 | """API key management endpoints."""
2 |
3 | import secrets
4 | import datetime
5 | import logging
6 | from typing import Optional
7 | from fastapi import APIRouter, Request
8 | from google.cloud import datastore
9 |
10 | from config.limiter import limiter
11 | from models.base import BaseResponse
12 | from utils.validation import validate_variables, validate_url
13 |
14 | router = APIRouter()
15 |
16 |
17 | class APIKeyResponse(BaseResponse):
18 | """Response model for API key operations with status and optional API key."""
19 |
20 | api_key: Optional[str] = None
21 |
22 |
23 | @router.get("/create-api-key/{token}", response_model=APIKeyResponse)
24 | @limiter.limit("2 per second;10 per hour;50 per day")
25 | async def create_api_key(token: str, request: Request):
26 | """Generates or renews an API key for a user identified by a provided token."""
27 | try:
28 | if not token or not validate_variables([token]) or not validate_url(request):
29 | return APIKeyResponse(
30 | status="error", message="Invalid token or URL", status_code=400
31 | )
32 |
33 | client = datastore.Client()
34 | query = client.query(kind="xon_domains_session")
35 | query.add_filter("domain_magic", "=", token)
36 | user = list(query.fetch())
37 | if not user:
38 | return APIKeyResponse(
39 | status="error", message="Invalid token", status_code=400
40 | )
41 |
42 | email = user[0].key.name
43 | api_key = secrets.token_hex(16)
44 | timestamp = datetime.datetime.utcnow()
45 | api_key_key = client.key("xon_api_key", email)
46 | api_key_entity = client.get(api_key_key)
47 |
48 | if api_key_entity:
49 | api_key_entity.update({"api_key": api_key, "updated_timestamp": timestamp})
50 | else:
51 | api_key_entity = datastore.Entity(key=api_key_key)
52 | api_key_entity.update(
53 | {
54 | "api_key": api_key,
55 | "insert_timestamp": timestamp,
56 | "updated_timestamp": timestamp,
57 | }
58 | )
59 |
60 | client.put(api_key_entity)
61 | return APIKeyResponse(status="success", api_key=api_key, status_code=200)
62 |
63 | except Exception as exc:
64 | logging.error("Error creating API key: %s", str(exc))
65 | return APIKeyResponse(
66 | status="error",
67 | message="Unfortunately an error occurred while creating/renewing the API key",
68 | status_code=500,
69 | )
70 |
71 |
72 | @router.get("/get-api-key/{token}", response_model=APIKeyResponse)
73 | @limiter.limit("2 per second;50 per hour;100 per day")
74 | async def get_api_key(token: str, request: Request):
75 | """Retrieves the existing API key for a user identified by a provided token."""
76 | try:
77 | if not token or not validate_variables([token]) or not validate_url(request):
78 | return APIKeyResponse(
79 | status="error", message="Invalid token or URL", status_code=400
80 | )
81 |
82 | client = datastore.Client()
83 | query = client.query(kind="xon_domains_session")
84 | query.add_filter("domain_magic", "=", token)
85 | user = list(query.fetch())
86 | if not user:
87 | return APIKeyResponse(
88 | status="error", message="Invalid token", status_code=400
89 | )
90 |
91 | email = user[0].key.name
92 | api_key_key = client.key("xon_api_key", email)
93 | api_key_entity = client.get(api_key_key)
94 |
95 | if api_key_entity:
96 | api_key = api_key_entity.get("api_key")
97 | return APIKeyResponse(status="success", api_key=api_key, status_code=200)
98 |
99 | return APIKeyResponse(
100 | status="error", message="API key not found", status_code=404
101 | )
102 |
103 | except Exception as exc:
104 | logging.error("Error retrieving API key: %s", str(exc))
105 | return APIKeyResponse(
106 | status="error", message="API key not found", status_code=404
107 | )
108 |
--------------------------------------------------------------------------------
/api/v1/domain_breaches.py:
--------------------------------------------------------------------------------
1 | """Domain breaches endpoint for retrieving breach data and metrics."""
2 |
3 | from datetime import datetime
4 | from collections import defaultdict
5 | from typing import Dict, List
6 | from operator import itemgetter
7 |
8 | from fastapi import APIRouter, Request, HTTPException, Header, Depends
9 | from google.cloud import datastore
10 | from pydantic import BaseModel, Field
11 |
12 | from config.limiter import limiter
13 | from models.base import BaseResponse
14 | from utils.validation import validate_url
15 |
16 | router = APIRouter()
17 |
18 |
19 | # CSRF exemption dependency
20 | async def csrf_exempt():
21 | """Dependency to exempt from CSRF protection."""
22 | return True
23 |
24 |
25 | class BreachDetail(BaseModel):
26 | """Model for detailed breach information."""
27 |
28 | breached_date: datetime
29 | logo: str
30 | password_risk: str
31 | searchable: bool
32 | xposed_data: List[str] = Field(default_factory=list)
33 | xposed_records: int
34 | xposure_desc: str
35 |
36 | @classmethod
37 | def from_datastore(cls, data: dict) -> "BreachDetail":
38 | """Create a BreachDetail instance from datastore data."""
39 | # Convert string xposed_data to list if needed
40 | if isinstance(data.get("xposed_data"), str):
41 | data["xposed_data"] = [
42 | item.strip() for item in data["xposed_data"].split(";")
43 | ]
44 | return cls(**data)
45 |
46 |
47 | class BreachSummary(BaseModel):
48 | """Model for breach summary information."""
49 |
50 | email: str
51 | domain: str
52 | breach: str
53 |
54 |
55 | class DomainBreachesResponse(BaseResponse):
56 | """Response model for domain breaches endpoint."""
57 |
58 | metrics: Dict = {
59 | "Yearly_Metrics": Dict[str, int],
60 | "Domain_Summary": Dict[str, int],
61 | "Breach_Summary": Dict[str, int],
62 | "Breaches_Details": List[BreachSummary],
63 | "Top10_Breaches": Dict[str, int],
64 | "Detailed_Breach_Info": Dict[str, Dict],
65 | }
66 |
67 |
68 | @router.post(
69 | "/domain-breaches",
70 | response_model=DomainBreachesResponse,
71 | dependencies=[Depends(csrf_exempt)],
72 | )
73 | @router.post(
74 | "/domain-breaches/",
75 | response_model=DomainBreachesResponse,
76 | dependencies=[Depends(csrf_exempt)],
77 | )
78 | @limiter.limit("500 per day;100 per hour;2 per second")
79 | async def protected(
80 | request: Request,
81 | x_api_key: str = Header(..., description="API key for authentication"),
82 | ):
83 | """Retrieves the data breaches and related metrics for an API-key"""
84 | try:
85 | if not x_api_key or x_api_key.strip() == "" or not validate_url(request):
86 | raise HTTPException(status_code=401, detail="Invalid or missing API key")
87 |
88 | # Instantiate a datastore client
89 | datastore_client = datastore.Client()
90 |
91 | # Create a query against the kind 'xon_api_key'
92 | query = datastore_client.query(kind="xon_api_key")
93 | query.add_filter("api_key", "=", x_api_key)
94 | results = list(query.fetch())
95 |
96 | if not results:
97 | raise HTTPException(status_code=401, detail="Invalid or missing API key")
98 |
99 | # If the key is valid, return the associated email
100 | email = results[0].key.name
101 |
102 | # Additional operations
103 | query = datastore_client.query(kind="xon_domains")
104 | query.add_filter("email", "=", email)
105 | verified_domains = [entity["domain"] for entity in query.fetch()]
106 |
107 | current_year = datetime.utcnow().year
108 | yearly_summary = defaultdict(int)
109 | yearly_summary = {str(year): 0 for year in range(2007, current_year + 1)}
110 | yearly_breach_summary = {
111 | str(year): defaultdict(int) for year in range(2007, current_year + 1)
112 | }
113 | breach_summary = defaultdict(int)
114 | domain_summary = defaultdict(int)
115 | detailed_breach_info = {}
116 | breach_details = []
117 |
118 | for domain in verified_domains:
119 | query = datastore_client.query(kind="xon_domains_summary")
120 | query.add_filter("domain", "=", domain)
121 | domain_summary[domain] = 0
122 |
123 | for entity in query.fetch():
124 | if entity["breach"] == "No_Breaches":
125 | continue
126 |
127 | breach_key = datastore_client.key("xon_breaches", entity["breach"])
128 | breach = datastore_client.get(breach_key)
129 |
130 | if breach:
131 | breach_year = breach["breached_date"].strftime("%Y")
132 | yearly_summary[breach_year] += entity["email_count"]
133 | yearly_breach_summary[breach_year][entity["breach"]] += entity[
134 | "email_count"
135 | ]
136 | breach_summary[entity["breach"]] += entity["email_count"]
137 | domain_summary[domain] += entity["email_count"]
138 |
139 | detailed_breach_info[entity["breach"]] = {
140 | "breached_date": breach["breached_date"],
141 | "logo": breach["logo"],
142 | "password_risk": breach["password_risk"],
143 | "searchable": breach["searchable"],
144 | "xposed_data": breach["xposed_data"],
145 | "xposed_records": breach["xposed_records"],
146 | "xposure_desc": breach["xposure_desc"],
147 | }
148 |
149 | query = datastore_client.query(kind="xon_domains_details")
150 | query.add_filter("domain", "=", domain)
151 | for entity in query.fetch():
152 | breach_details.append(
153 | BreachSummary(
154 | email=entity["email"],
155 | domain=entity["domain"],
156 | breach=entity["breach"],
157 | )
158 | )
159 |
160 | top10_breaches = dict(
161 | sorted(breach_summary.items(), key=itemgetter(1), reverse=True)[:10]
162 | )
163 |
164 | metrics = {
165 | "Yearly_Metrics": dict(yearly_summary),
166 | "Domain_Summary": dict(domain_summary),
167 | "Breach_Summary": dict(breach_summary),
168 | "Breaches_Details": breach_details,
169 | "Top10_Breaches": dict(top10_breaches),
170 | "Detailed_Breach_Info": detailed_breach_info,
171 | }
172 |
173 | return DomainBreachesResponse(status="success", metrics=metrics)
174 |
175 | except HTTPException:
176 | raise
177 | except Exception as exception_details:
178 | raise HTTPException(
179 | status_code=500, detail="An error occurred during processing"
180 | ) from exception_details
181 |
--------------------------------------------------------------------------------
/api/v1/domain_phishing.py:
--------------------------------------------------------------------------------
1 | """Domain phishing check router module."""
2 |
3 | import socket
4 | import json
5 | import os
6 | from datetime import datetime, timedelta
7 | from typing import Dict, List, Optional, Any, Union
8 | from pathlib import Path
9 |
10 | import dnstwist
11 | from fastapi import APIRouter, Depends, HTTPException, Request, Query
12 | from pydantic import BaseModel, Field, validator, EmailStr
13 | from redis import Redis
14 |
15 | from config.limiter import limiter, RATE_LIMIT_HELP
16 | from config.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
17 | from models.responses import BaseResponse
18 | from utils.validation import validate_email_with_tld, validate_variables
19 | from utils.token import confirm_token
20 | from google.cloud import datastore
21 |
22 | router = APIRouter()
23 |
24 | redis_client = Redis(
25 | host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB, decode_responses=True
26 | )
27 |
28 | BASE_DIR = Path(__file__).resolve().parent.parent.parent
29 | STATIC_DIR = BASE_DIR / "static" / "static"
30 | DICTIONARY_FILE = STATIC_DIR / "dictionary.txt"
31 | TLD_FILE = STATIC_DIR / "tld.txt"
32 |
33 | if not DICTIONARY_FILE.exists():
34 | raise FileNotFoundError(f"Dictionary file not found at {DICTIONARY_FILE}")
35 | if not TLD_FILE.exists():
36 | raise FileNotFoundError(f"TLD file not found at {TLD_FILE}")
37 |
38 |
39 | def validate_file_content(file_path: Path, min_lines: int = 1) -> None:
40 | if not file_path.exists():
41 | raise FileNotFoundError(f"File not found: {file_path}")
42 | with open(file_path, "r") as f:
43 | lines = [line.strip() for line in f if line.strip()]
44 | if len(lines) < min_lines:
45 | raise ValueError(
46 | f"File {file_path} has insufficient content (minimum {min_lines} lines required)"
47 | )
48 |
49 |
50 | try:
51 | validate_file_content(DICTIONARY_FILE, min_lines=10)
52 | validate_file_content(TLD_FILE, min_lines=5)
53 | except Exception as e:
54 | raise
55 |
56 |
57 | class DomainPhishingRequest(BaseModel):
58 | domain: str = Field(..., description="Domain to check for phishing variants")
59 |
60 | @validator("domain")
61 | def validate_domain(cls, v: str) -> str:
62 | if not v or not isinstance(v, str):
63 | raise ValueError("Domain must be a non-empty string")
64 | if len(v) > 255:
65 | raise ValueError("Domain length exceeds maximum allowed length")
66 | if "." not in v:
67 | raise ValueError("Invalid domain format")
68 | return v.lower().strip()
69 |
70 |
71 | class DomainPhishingSummaryResponse(BaseResponse):
72 | total_scanned: int
73 | total_live: int = 0
74 | unique_fuzzers: int = 0
75 | last_checked: Optional[str] = None
76 |
77 |
78 | class DomainPhishingResponse(BaseResponse):
79 | total_scanned: int
80 | total_live: int = 0
81 | unique_fuzzers: int = 0
82 | live_domains: List[str] = []
83 | raw_results: List[Dict[str, Any]]
84 | last_checked: Optional[str] = None
85 |
86 |
87 | def is_domain_live(domain: str) -> bool:
88 | try:
89 | socket.gethostbyname(domain)
90 | return True
91 | except socket.error:
92 | return False
93 |
94 |
95 | def get_cached_result(domain: str) -> Optional[Dict]:
96 | cache_key = f"phishing_check:{domain}"
97 | cached_data = redis_client.get(cache_key)
98 | if cached_data:
99 | try:
100 | return json.loads(cached_data)
101 | except json.JSONDecodeError:
102 | return None
103 | return None
104 |
105 |
106 | def cache_result(domain: str, result: Dict, expiry_hours: int = 24) -> None:
107 | cache_key = f"phishing_check:{domain}"
108 | if isinstance(result.get("last_checked"), datetime):
109 | result["last_checked"] = result["last_checked"].isoformat()
110 | redis_client.setex(cache_key, timedelta(hours=expiry_hours), json.dumps(result))
111 |
112 |
113 | async def verify_user_access(email: str, token: str) -> bool:
114 | if not email or not token:
115 | return False
116 | try:
117 | verified_email = await confirm_token(token)
118 | if not verified_email or verified_email.lower() != email.lower():
119 | return False
120 | datastore_client = datastore.Client()
121 | alert_key = datastore_client.key("xon_alert", email.lower())
122 | alert_record = datastore_client.get(alert_key)
123 | is_verified = bool(alert_record and alert_record.get("verified", False))
124 | return is_verified
125 | except Exception:
126 | return False
127 |
128 |
129 | async def is_domain_verified_for_user(email: str, domain: str) -> bool:
130 | """Check if a domain is verified for a specific user."""
131 | try:
132 | datastore_client = datastore.Client()
133 | # Query for the domain record
134 | query = datastore_client.query(kind="xon_domains")
135 | query.add_filter("email", "=", email.lower())
136 | query.add_filter("domain", "=", domain.lower())
137 | query.add_filter("verified", "=", True)
138 |
139 | # Get the first matching result
140 | results = list(query.fetch(limit=1))
141 | return len(results) > 0
142 | except Exception:
143 | return False
144 |
145 |
146 | @router.get(
147 | "/domain-phishing/{domain}",
148 | response_model=None,
149 | responses={
150 | 200: {
151 | "description": "Domain phishing check results",
152 | "content": {
153 | "application/json": {
154 | "example": {
155 | "status": "success",
156 | "total_scanned": 10,
157 | "total_live": 2,
158 | "live_domains": ["example.com"],
159 | "raw_results": [],
160 | "last_checked": "2024-03-07T12:00:00",
161 | }
162 | }
163 | },
164 | }
165 | },
166 | include_in_schema=True,
167 | )
168 | @limiter.limit("10/minute")
169 | async def check_domain_phishing(
170 | domain: str,
171 | request: Request,
172 | email: Optional[EmailStr] = Query(None, description="Email for authentication"),
173 | token: Optional[str] = Query(None, description="Verification token"),
174 | ) -> Union[DomainPhishingSummaryResponse, DomainPhishingResponse]:
175 | try:
176 | domain_request = DomainPhishingRequest(domain=domain)
177 | domain = domain_request.domain
178 | is_authenticated = False
179 | if email and token:
180 | is_authenticated = await verify_user_access(email, token)
181 | if is_authenticated:
182 | # For authenticated requests, verify domain ownership
183 | is_domain_verified = await is_domain_verified_for_user(email, domain)
184 | if not is_domain_verified:
185 | raise HTTPException(
186 | status_code=401,
187 | detail="Domain not verified for this user. Please verify domain ownership first.",
188 | )
189 |
190 | cached_result = get_cached_result(domain)
191 | if cached_result:
192 |
193 | if (not cached_result.get("unique_fuzzers")) and cached_result.get(
194 | "raw_results"
195 | ):
196 | cached_result["unique_fuzzers"] = len(
197 | set(
198 | r.get("fuzzer", "")
199 | for r in cached_result["raw_results"]
200 | if "fuzzer" in r and r.get("fuzzer")
201 | )
202 | )
203 | if is_authenticated:
204 | response = DomainPhishingResponse(**cached_result)
205 | return response
206 | response = DomainPhishingSummaryResponse(
207 | status="success",
208 | total_scanned=cached_result["total_scanned"],
209 | total_live=cached_result["total_live"],
210 | unique_fuzzers=cached_result.get("unique_fuzzers", 0),
211 | last_checked=cached_result["last_checked"],
212 | )
213 | return response
214 | try:
215 | options = {
216 | "registered": True,
217 | "format": "json",
218 | "threads": 8,
219 | "all": True,
220 | "mxcheck": True,
221 | "whois": True,
222 | "dictionary": str(DICTIONARY_FILE),
223 | "tld": str(TLD_FILE),
224 | }
225 | twist_results = dnstwist.run(domain=domain, **options)
226 | twist_results.sort(
227 | key=lambda x: (
228 | x.get("fuzzer", "") != "*original",
229 | x.get("domain-name", "") or x.get("domain", ""),
230 | )
231 | )
232 | live_domains = []
233 | for result in twist_results:
234 | domain_to_check = result.get("domain-name") or result.get("domain")
235 | if domain_to_check and is_domain_live(domain_to_check):
236 | live_domains.append(domain_to_check)
237 | unique_fuzzers = len(
238 | set(
239 | r.get("fuzzer", "")
240 | for r in twist_results
241 | if "fuzzer" in r and r.get("fuzzer")
242 | )
243 | )
244 | response_data = {
245 | "status": "success",
246 | "total_scanned": len(twist_results),
247 | "total_live": len(live_domains),
248 | "unique_fuzzers": unique_fuzzers,
249 | "live_domains": live_domains,
250 | "raw_results": twist_results,
251 | "last_checked": datetime.utcnow().isoformat(),
252 | }
253 | cache_result(domain, response_data)
254 | if is_authenticated:
255 | response = DomainPhishingResponse(**response_data)
256 | return response
257 | response = DomainPhishingSummaryResponse(
258 | status="success",
259 | total_scanned=response_data["total_scanned"],
260 | total_live=response_data["total_live"],
261 | unique_fuzzers=response_data["unique_fuzzers"],
262 | last_checked=response_data["last_checked"],
263 | )
264 | return response
265 | except Exception as e:
266 | raise HTTPException(
267 | status_code=500, detail=f"Error running domain check: {str(e)}"
268 | )
269 | except Exception as e:
270 | raise HTTPException(
271 | status_code=500, detail=f"Error processing domain: {str(e)}"
272 | )
273 |
--------------------------------------------------------------------------------
/api/v1/feeds.py:
--------------------------------------------------------------------------------
1 | """Feed endpoints for RSS and XON Pulse."""
2 |
3 | import logging
4 | from typing import List
5 | from fastapi import APIRouter, Request, Response
6 | from google.cloud import datastore
7 | from pydantic import BaseModel
8 | from feedgen.feed import FeedGenerator
9 | from models.base import BaseResponse
10 | from config.limiter import limiter
11 |
12 | router = APIRouter()
13 |
14 |
15 | class PulseNewsItem(BaseModel):
16 | """Model for individual news item."""
17 |
18 | title: str
19 | date: str
20 | summary: str
21 | url: str
22 |
23 |
24 | class PulseNewsResponse(BaseResponse):
25 | """Response model for news feed."""
26 |
27 | data: List[PulseNewsItem]
28 |
29 |
30 | @router.get("/xon-pulse", response_model=PulseNewsResponse)
31 | @limiter.limit("2 per second;100 per hour;1000 per day")
32 | async def get_pulse_data(request: Request):
33 | """Generate news feed for presenting all data breaches news."""
34 | try:
35 | client = datastore.Client()
36 | query = client.query(kind="xon-pulse")
37 | results = list(query.fetch())
38 |
39 | data = []
40 | for entity in results:
41 | item = PulseNewsItem(
42 | title=entity.get("title"),
43 | date=entity.get("date").strftime("%Y-%b-%d"),
44 | summary=entity.get("description"),
45 | url=entity.get("url"),
46 | )
47 | data.append(item)
48 |
49 | return PulseNewsResponse(status="success", data=data, status_code=200)
50 | except Exception as exc:
51 | logging.error("Failed to fetch news feed: %s", str(exc))
52 | return PulseNewsResponse(
53 | status="error", message="Failed to fetch news feed", status_code=404
54 | )
55 |
56 |
57 | @router.get("/rss")
58 | @limiter.limit("2 per second;50 per hour;100 per day")
59 | async def rss_feed(request: Request):
60 | """Generate RSS feed for presenting all data breaches in XoN."""
61 | try:
62 | feed_generator = FeedGenerator()
63 | feed_generator.title("XposedOrNot Data Breaches")
64 | feed_generator.description("Live updates of uploaded data breaches")
65 | feed_generator.link(href="https://xposedornot.com/xposed")
66 | feed_generator.contributor(
67 | name="Devanand Premkumar", email="deva@xposedornot.com"
68 | )
69 |
70 | datastore_client = datastore.Client()
71 | query = datastore_client.query(kind="xon_breaches")
72 | query_iter = query.fetch()
73 |
74 | for entity in query_iter:
75 | feed_entry = feed_generator.add_entry()
76 | entity_key = entity.key
77 | parts = str(entity_key).split(",")
78 | entity_key = parts[1][:-2][2:]
79 |
80 | feed_entry.id(entity_key)
81 | feed_entry.title(entity_key)
82 | feed_entry.link(href="https://xposedornot.com/xposed#" + entity_key)
83 |
84 | description = (
85 | str(entity["xposure_desc"])
86 | + ". Exposed data: "
87 | + str(entity["xposed_data"])
88 | )
89 | feed_entry.description(description=description)
90 | feed_entry.pubDate(entity["timestamp"])
91 | feed_entry.guid(guid=entity_key, permalink=True)
92 |
93 | rss_content = feed_generator.rss_str()
94 | return Response(content=rss_content, media_type="application/rss+xml")
95 |
96 | except Exception as exc:
97 | logging.error("Feed generation failed: %s", str(exc))
98 | return Response(content="Feed generation failed", status_code=404)
99 |
--------------------------------------------------------------------------------
/api/v1/metrics.py:
--------------------------------------------------------------------------------
1 | """Metrics-related API endpoints."""
2 |
3 | from datetime import datetime
4 |
5 | from fastapi import APIRouter, HTTPException, Request
6 | from fastapi.responses import JSONResponse
7 |
8 | from config.limiter import limiter
9 | from models.responses import MetricsResponse, DetailedMetricsResponse
10 | from services.analytics import get_detailed_metrics
11 | from utils.helpers import validate_url
12 |
13 | router = APIRouter()
14 |
15 |
16 | @router.get("/metrics", response_model=MetricsResponse)
17 | @limiter.limit("5 per minute;50 per hour;100 per day")
18 | async def get_metrics_endpoint(request: Request) -> MetricsResponse:
19 | """Returns basic metrics about breaches."""
20 | try:
21 | if not validate_url(request):
22 | raise HTTPException(status_code=400, detail="Invalid request URL")
23 |
24 | metrics = await get_detailed_metrics()
25 | return MetricsResponse(
26 | Breaches_Count=metrics["breaches_count"],
27 | Breaches_Records=metrics["breaches_total_records"],
28 | Pastes_Count=str(metrics["pastes_count"]),
29 | Pastes_Records=metrics["pastes_total_records"],
30 | )
31 |
32 | except Exception as e:
33 | raise HTTPException(status_code=500, detail=str(e)) from e
34 |
35 |
36 | @router.get("/metrics/detailed", response_model=DetailedMetricsResponse)
37 | @limiter.limit("500 per day;100 per hour")
38 | async def get_detailed_metrics_endpoint(request: Request) -> DetailedMetricsResponse:
39 | """Returns detailed summary of data breaches including yearly count, top breaches, and recent breaches."""
40 | try:
41 | if not validate_url(request):
42 | raise HTTPException(status_code=400, detail="Invalid request URL")
43 |
44 | metrics = await get_detailed_metrics()
45 |
46 | # Process top breaches
47 | top_breaches = []
48 | for breach in metrics["top_breaches"]:
49 | top_breaches.append(
50 | {
51 | "breachid": breach.key.id_or_name,
52 | "logo": breach.get("logo"),
53 | "description": breach.get("xposure_desc"),
54 | "count": breach.get("xposed_records"),
55 | }
56 | )
57 |
58 | # Process recent breaches
59 | recent_breaches = []
60 | for breach in metrics["recent_breaches"]:
61 | timestamp = breach.get("timestamp")
62 | if isinstance(timestamp, datetime):
63 | formatted_timestamp = timestamp.strftime("%a, %d %b %Y %H:%M:%S GMT")
64 | else:
65 | formatted_timestamp = datetime.utcnow().strftime(
66 | "%a, %d %b %Y %H:%M:%S GMT"
67 | )
68 |
69 | recent_breaches.append(
70 | {
71 | "breachid": breach.key.id_or_name,
72 | "timestamp": formatted_timestamp,
73 | "logo": breach.get("logo"),
74 | "description": breach.get("xposure_desc"),
75 | "count": breach.get("xposed_records"),
76 | }
77 | )
78 |
79 | return DetailedMetricsResponse(
80 | Breaches_Count=metrics["breaches_count"],
81 | Breaches_Records=metrics["breaches_total_records"],
82 | Pastes_Count=str(metrics["pastes_count"]),
83 | Pastes_Records=metrics["pastes_total_records"],
84 | Yearly_Breaches_Count=metrics["yearly_count"],
85 | Industry_Breaches_Count=metrics["industry_breaches_count"],
86 | Top_Breaches=top_breaches,
87 | Recent_Breaches=recent_breaches,
88 | )
89 |
90 | except Exception as e:
91 | raise HTTPException(status_code=500, detail=str(e)) from e
92 |
93 |
94 | @router.get("/metrics/domain/{domain}", include_in_schema=False)
95 | @limiter.limit("5 per minute;50 per hour;100 per day")
96 | async def get_domain_metrics(request: Request, domain: str) -> JSONResponse:
97 | """Returns metrics for a specific domain."""
98 | try:
99 | if not validate_url(request):
100 | raise HTTPException(status_code=400, detail="Invalid request URL")
101 |
102 | domain_metrics = {
103 | "status": "success",
104 | "message": "Domain metrics retrieved successfully",
105 | "data": {
106 | "domain": domain,
107 | "metrics": {
108 | "total_breaches": 0,
109 | "total_records": 0,
110 | "last_breach": None,
111 | "risk_score": 0,
112 | "industry_breaches_count": {},
113 | },
114 | },
115 | }
116 |
117 | return JSONResponse(content=domain_metrics)
118 |
119 | except Exception as e:
120 | raise HTTPException(status_code=500, detail=str(e)) from e
121 |
--------------------------------------------------------------------------------
/api/v1/slack.py:
--------------------------------------------------------------------------------
1 | """Slack channel-related API endpoints."""
2 |
3 | from fastapi import APIRouter, HTTPException, Request
4 |
5 | from config.limiter import limiter
6 | from models.requests import ChannelSetupRequest
7 | from models.responses import ChannelConfigResponse, ChannelSetupResponse
8 | from services.slack import (
9 | setup_slack_channel,
10 | verify_slack_channel,
11 | delete_slack_channel,
12 | get_slack_channel_config,
13 | )
14 | from utils.helpers import validate_url, validate_variables
15 |
16 | router = APIRouter()
17 |
18 |
19 | @router.post("/slack/setup", response_model=ChannelSetupResponse)
20 | @limiter.limit("5 per minute;50 per hour;100 per day")
21 | async def setup_slack_channel_endpoint(
22 | request: Request, channel_data: ChannelSetupRequest
23 | ) -> ChannelSetupResponse:
24 | """Setup Slack channel for domain notifications."""
25 | try:
26 | if not validate_variables(
27 | [channel_data.token, channel_data.domain, channel_data.webhook]
28 | ):
29 | raise HTTPException(status_code=400, detail="Invalid input parameters")
30 |
31 | if not validate_url(request):
32 | raise HTTPException(status_code=400, detail="Invalid request URL")
33 |
34 | if channel_data.action == "setup":
35 | success = await setup_slack_channel(channel_data)
36 | if not success:
37 | raise HTTPException(
38 | status_code=400, detail="Slack channel setup failed"
39 | )
40 | return ChannelSetupResponse(
41 | status="success", message="Slack channel setup successful"
42 | )
43 |
44 | if channel_data.action == "verify":
45 | if not channel_data.verify_token:
46 | raise HTTPException(
47 | status_code=400, detail="Verification token required"
48 | )
49 |
50 | success = await verify_slack_channel(channel_data)
51 | if not success:
52 | raise HTTPException(
53 | status_code=400, detail="Slack channel verification failed"
54 | )
55 | return ChannelSetupResponse(
56 | status="success", message="Slack channel verified successfully"
57 | )
58 |
59 | if channel_data.action == "delete":
60 | success = await delete_slack_channel(channel_data)
61 | if not success:
62 | raise HTTPException(
63 | status_code=400, detail="Slack channel deletion failed"
64 | )
65 | return ChannelSetupResponse(
66 | status="success", message="Slack channel deleted successfully"
67 | )
68 |
69 | raise HTTPException(status_code=400, detail="Invalid action")
70 |
71 | except Exception as e:
72 | raise HTTPException(status_code=500, detail=str(e)) from e
73 |
74 |
75 | @router.get("/slack/config/{domain}", response_model=ChannelConfigResponse)
76 | @limiter.limit("5 per minute;50 per hour;100 per day")
77 | async def get_slack_channel_config_endpoint(
78 | request: Request, domain: str, token: str
79 | ) -> ChannelConfigResponse:
80 | """Get Slack channel configuration for a domain."""
81 | try:
82 | if not validate_variables([domain, token]):
83 | raise HTTPException(status_code=400, detail="Invalid input parameters")
84 |
85 | if not validate_url(request):
86 | raise HTTPException(status_code=400, detail="Invalid request URL")
87 |
88 | config = await get_slack_channel_config(domain, token)
89 | if not config:
90 | raise HTTPException(
91 | status_code=404, detail="Slack channel configuration not found"
92 | )
93 |
94 | return ChannelConfigResponse(
95 | status="success",
96 | message="Slack channel configuration retrieved successfully",
97 | data=config,
98 | )
99 |
100 | except Exception as e:
101 | raise HTTPException(status_code=500, detail=str(e)) from e
102 |
--------------------------------------------------------------------------------
/api/v1/teams.py:
--------------------------------------------------------------------------------
1 | """Teams channel-related API endpoints."""
2 |
3 | from fastapi import APIRouter, HTTPException, Request
4 |
5 | from config.limiter import limiter
6 | from models.requests import ChannelSetupRequest
7 | from models.responses import ChannelConfigResponse, ChannelSetupResponse
8 | from services.teams import (
9 | setup_teams_channel,
10 | verify_teams_channel,
11 | delete_teams_channel,
12 | get_teams_channel_config,
13 | )
14 | from utils.helpers import validate_url, validate_variables
15 |
16 | router = APIRouter()
17 |
18 |
19 | @router.post("/teams/setup", response_model=ChannelSetupResponse)
20 | @limiter.limit("5 per minute;50 per hour;100 per day")
21 | async def setup_teams_channel_endpoint(
22 | request: Request, channel_data: ChannelSetupRequest
23 | ) -> ChannelSetupResponse:
24 | """Setup Teams channel for domain notifications."""
25 | try:
26 | if not validate_variables(
27 | [channel_data.token, channel_data.domain, channel_data.webhook]
28 | ):
29 | raise HTTPException(status_code=400, detail="Invalid input parameters")
30 |
31 | if not validate_url(request):
32 | raise HTTPException(status_code=400, detail="Invalid request URL")
33 |
34 | if channel_data.action == "setup":
35 | success = await setup_teams_channel(channel_data)
36 | if success:
37 | return ChannelSetupResponse(
38 | status="success", message="Teams channel setup successful"
39 | )
40 | raise HTTPException(status_code=400, detail="Teams channel setup failed")
41 | elif channel_data.action == "verify":
42 | if not channel_data.verify_token:
43 | raise HTTPException(
44 | status_code=400, detail="Verification token required"
45 | )
46 |
47 | success = await verify_teams_channel(channel_data)
48 | if success:
49 | return ChannelSetupResponse(
50 | status="success", message="Teams channel verified successfully"
51 | )
52 | raise HTTPException(
53 | status_code=400, detail="Teams channel verification failed"
54 | )
55 | elif channel_data.action == "delete":
56 | success = await delete_teams_channel(channel_data)
57 | if success:
58 | return ChannelSetupResponse(
59 | status="success", message="Teams channel deleted successfully"
60 | )
61 | raise HTTPException(status_code=400, detail="Teams channel deletion failed")
62 | else:
63 | raise HTTPException(status_code=400, detail="Invalid action")
64 |
65 | except Exception as e:
66 | raise HTTPException(status_code=500, detail=str(e)) from e
67 |
68 |
69 | @router.get("/teams/config/{domain}", response_model=ChannelConfigResponse)
70 | @limiter.limit("5 per minute;50 per hour;100 per day")
71 | async def get_teams_channel_config_endpoint(
72 | request: Request, domain: str, token: str
73 | ) -> ChannelConfigResponse:
74 | """Get Teams channel configuration for a domain."""
75 | try:
76 | if not validate_variables([domain, token]):
77 | raise HTTPException(status_code=400, detail="Invalid input parameters")
78 |
79 | if not validate_url(request):
80 | raise HTTPException(status_code=400, detail="Invalid request URL")
81 |
82 | config = await get_teams_channel_config(domain, token)
83 | if config:
84 | return ChannelConfigResponse(
85 | status="success",
86 | message="Teams channel configuration retrieved successfully",
87 | data=config,
88 | )
89 | raise HTTPException(
90 | status_code=404, detail="Teams channel configuration not found"
91 | )
92 |
93 | except Exception as e:
94 | raise HTTPException(status_code=500, detail=str(e)) from e
95 |
--------------------------------------------------------------------------------
/api/v1/webhooks.py:
--------------------------------------------------------------------------------
1 | """Webhook-related API endpoints."""
2 |
3 | from fastapi import APIRouter, HTTPException, Request
4 |
5 | from config.limiter import limiter
6 | from models.requests import WebhookSetupRequest
7 | from models.responses import WebhookConfigResponse, WebhookSetupResponse
8 | from services.webhook import (
9 | setup_webhook,
10 | verify_webhook,
11 | delete_webhook,
12 | get_webhook_config,
13 | )
14 | from utils.helpers import validate_url, validate_variables
15 |
16 | router = APIRouter()
17 |
18 |
19 | @router.post("/webhook/setup", response_model=WebhookSetupResponse)
20 | @limiter.limit("5 per minute;50 per hour;100 per day")
21 | async def setup_webhook_endpoint(
22 | request: Request, webhook_data: WebhookSetupRequest
23 | ) -> WebhookSetupResponse:
24 | """Setup webhook for domain notifications."""
25 | try:
26 | if not validate_variables(
27 | [webhook_data.token, webhook_data.domain, webhook_data.webhook]
28 | ):
29 | raise HTTPException(status_code=400, detail="Invalid input parameters")
30 |
31 | if not validate_url(request):
32 | raise HTTPException(status_code=400, detail="Invalid request URL")
33 |
34 | if webhook_data.action == "setup":
35 | verify_token = await setup_webhook(webhook_data)
36 | return WebhookSetupResponse(
37 | status="success",
38 | message="Webhook setup successful",
39 | verify_token=verify_token,
40 | )
41 |
42 | if webhook_data.action == "verify":
43 | if not webhook_data.verify_token:
44 | raise HTTPException(
45 | status_code=400, detail="Verification token required"
46 | )
47 |
48 | success = await verify_webhook(webhook_data)
49 | if not success:
50 | raise HTTPException(
51 | status_code=400, detail="Webhook verification failed"
52 | )
53 | return WebhookSetupResponse(
54 | status="success", message="Webhook verified successfully"
55 | )
56 |
57 | if webhook_data.action == "delete":
58 | success = await delete_webhook(webhook_data)
59 | if not success:
60 | raise HTTPException(status_code=400, detail="Webhook deletion failed")
61 | return WebhookSetupResponse(
62 | status="success", message="Webhook deleted successfully"
63 | )
64 |
65 | raise HTTPException(status_code=400, detail="Invalid action")
66 |
67 | except Exception as e:
68 | raise HTTPException(status_code=500, detail=str(e)) from e
69 |
70 |
71 | @router.get("/webhook/config/{domain}", response_model=WebhookConfigResponse)
72 | @limiter.limit("5 per minute;50 per hour;100 per day")
73 | async def get_webhook_config_endpoint(
74 | request: Request, domain: str, token: str
75 | ) -> WebhookConfigResponse:
76 | """Get webhook configuration for a domain."""
77 | try:
78 | if not validate_variables([domain, token]):
79 | raise HTTPException(status_code=400, detail="Invalid input parameters")
80 |
81 | if not validate_url(request):
82 | raise HTTPException(status_code=400, detail="Invalid request URL")
83 |
84 | config = await get_webhook_config(domain, token)
85 | if not config:
86 | raise HTTPException(
87 | status_code=404, detail="Webhook configuration not found"
88 | )
89 |
90 | return WebhookConfigResponse(
91 | status="success",
92 | message="Webhook configuration retrieved successfully",
93 | data=config,
94 | )
95 |
96 | except Exception as e:
97 | raise HTTPException(status_code=500, detail=str(e)) from e
98 |
--------------------------------------------------------------------------------
/config/limiter.py:
--------------------------------------------------------------------------------
1 | """Centralized rate limiter configuration."""
2 |
3 | from datetime import datetime, timedelta
4 | from typing import Tuple, Optional, Dict
5 |
6 | from slowapi import Limiter
7 | from slowapi.middleware import SlowAPIMiddleware
8 | from slowapi.errors import RateLimitExceeded
9 | from fastapi import FastAPI, Request
10 | from fastapi.responses import JSONResponse
11 | from slowapi.util import get_remote_address
12 |
13 | # Local imports
14 | from utils.helpers import get_client_ip
15 | from config.settings import REDIS_URL
16 |
17 |
18 | def get_key_func(request: Request) -> str:
19 | """
20 | Enhanced key function that combines IP and endpoint for more granular rate limiting.
21 | """
22 | client_ip = get_client_ip(request)
23 | endpoint = request.url.path
24 | return f"{client_ip}:{endpoint}"
25 |
26 |
27 | def get_rate_limit_key(request: Request) -> str:
28 | """
29 | Get a unique key for rate limiting based on the request.
30 |
31 | Args:
32 | request: FastAPI request object
33 |
34 | Returns:
35 | String key for rate limiting
36 | """
37 | client_ip = get_remote_address(request) # Use slowapi's built-in IP detection
38 | endpoint = request.url.path
39 | return f"{client_ip}:{endpoint}"
40 |
41 |
42 | def _parse_rate_limit(limit_str: str) -> Tuple[int, str]:
43 | """
44 | Parse a rate limit string into a tuple of (limit, period).
45 |
46 | Args:
47 | limit_str: String in format "X per Y" (e.g., "2 per second")
48 |
49 | Returns:
50 | Tuple of (limit, period)
51 | """
52 | try:
53 | limit, _, period = limit_str.strip().split()
54 | return int(limit), period
55 | except ValueError:
56 | return 2, "second" # Default fallback
57 |
58 |
59 | # Initialize the rate limiter with enhanced key function and Redis storage
60 | limiter = Limiter(
61 | key_func=get_key_func, # Use our enhanced key function
62 | default_limits=["2 per second;5 per hour;100 per day"],
63 | storage_uri=REDIS_URL, # Use Redis URL from settings
64 | strategy="fixed-window", # Use fixed window strategy for more predictable rate limiting
65 | )
66 |
67 | # Define specific rate limits for different types of routes
68 | RATE_LIMIT_HELP = "50 per day;10 per hour" # For help/documentation routes
69 | RATE_LIMIT_UNBLOCK = "24 per day;2 per hour;2 per second" # For unblock operations
70 | RATE_LIMIT_BREACHES = "2 per second;5 per hour;100 per day" # For breach listing
71 | RATE_LIMIT_CHECK_EMAIL = "2 per second;5 per hour;100 per day" # For email checks
72 | RATE_LIMIT_ANALYTICS = (
73 | "5 per minute;100 per hour;500 per day" # For analytics endpoints
74 | )
75 | RATE_LIMIT_DOMAIN = (
76 | "2 per second;10 per hour;50 per day" # For domain-related endpoints
77 | )
78 |
79 |
80 | def setup_limiter(app: FastAPI) -> None:
81 | """
82 | Set up rate limiting for the FastAPI application.
83 |
84 | Args:
85 | app: FastAPI application instance
86 | """
87 | # Add the limiter to the app state
88 | app.state.limiter = limiter
89 |
90 | # Add the SlowAPI middleware
91 | app.add_middleware(SlowAPIMiddleware)
92 |
93 | # Add custom rate limit exceeded handler
94 | app.add_exception_handler(RateLimitExceeded, rate_limit_exceeded_handler)
95 |
96 |
97 | def rate_limit_exceeded_handler(
98 | request: Request, exc: RateLimitExceeded
99 | ) -> JSONResponse:
100 | """
101 | Custom handler for rate limit exceeded exceptions.
102 | Includes retry-after header and detailed response.
103 | """
104 | # Calculate retry after time based on the rate limit that was exceeded
105 | retry_after = 1 # Default to 1 second
106 | if hasattr(exc, "retry_after"):
107 | retry_after = exc.retry_after
108 | reset_time = datetime.now() + timedelta(seconds=retry_after)
109 |
110 | return JSONResponse(
111 | status_code=429,
112 | content={
113 | "error": "Rate limit exceeded",
114 | "detail": str(exc),
115 | "retry_after": retry_after,
116 | "reset_time": reset_time.isoformat(),
117 | },
118 | headers={"Retry-After": str(retry_after)},
119 | )
120 |
--------------------------------------------------------------------------------
/config/middleware.py:
--------------------------------------------------------------------------------
1 | """Middleware configurations for the application."""
2 |
3 | # Standard library imports
4 | import asyncio
5 |
6 | # Third-party imports
7 | from fastapi import FastAPI, Request
8 | from fastapi.middleware.cors import CORSMiddleware
9 | from slowapi.middleware import SlowAPIMiddleware
10 |
11 | # Local imports
12 | from services.globe import process_request_for_globe
13 | from utils.request import get_client_ip
14 | from config.limiter import limiter
15 |
16 |
17 | def setup_middleware(app: FastAPI) -> None:
18 | """Configure middleware for the FastAPI application."""
19 |
20 | # Add CORS middleware first
21 | app.add_middleware(
22 | CORSMiddleware,
23 | allow_origins=["*"],
24 | allow_credentials=True,
25 | allow_methods=["GET", "POST"],
26 | allow_headers=["*"],
27 | expose_headers=["*"],
28 | max_age=600,
29 | )
30 |
31 | # Add rate limiting middleware after CORS
32 | app.state.limiter = limiter
33 | app.add_middleware(SlowAPIMiddleware)
34 |
35 |
36 | def setup_security_headers(app: FastAPI) -> None:
37 | """Configure security headers middleware."""
38 |
39 | @app.middleware("http")
40 | async def add_security_headers(request: Request, call_next):
41 | response = await call_next(request)
42 | csp_value = (
43 | "default-src 'self';"
44 | "script-src 'self' 'unsafe-inline' 'unsafe-eval' "
45 | "https://cdnjs.cloudflare.com https://maxcdn.bootstrapcdn.com;"
46 | "style-src 'self' 'unsafe-inline' "
47 | "https://cdnjs.cloudflare.com https://fonts.googleapis.com "
48 | "https://xposedornot.com https://maxcdn.bootstrapcdn.com;"
49 | "img-src 'self' https://xposedornot.com https://fastapi.tiangolo.com data:;"
50 | "font-src 'self' https://fonts.gstatic.com https://cdnjs.cloudflare.com;"
51 | "object-src 'none';"
52 | "base-uri 'self';"
53 | "connect-src 'self' https://xposedornot.com https://api.xposedornot.com https://cdnjs.cloudflare.com;"
54 | "worker-src 'self' blob:;"
55 | )
56 |
57 | response.headers["Content-Security-Policy"] = csp_value
58 | response.headers["X-Frame-Options"] = "DENY"
59 | response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
60 | response.headers["Permissions-Policy"] = (
61 | "accelerometer=(), camera=(), geolocation=(), "
62 | "microphone=(), midi=(), payment=(), usb=()"
63 | )
64 | response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
65 | response.headers["X-Content-Type-Options"] = "nosniff"
66 |
67 | return response
68 |
69 |
70 | async def process_globe_request_background(client_ip: str) -> None:
71 | """Process the globe request in the background."""
72 | try:
73 | await process_request_for_globe(client_ip)
74 | except (ValueError, KeyError) as e:
75 | pass
76 | except Exception as e:
77 | pass
78 |
79 |
80 | def setup_globe_middleware(app: FastAPI) -> None:
81 | """Configure globe visualization middleware."""
82 |
83 | @app.middleware("http")
84 | async def globe_request_middleware(request: Request, call_next):
85 | """Process request for globe visualization before processing the request."""
86 | try:
87 | client_ip = get_client_ip(request)
88 |
89 | asyncio.create_task(process_globe_request_background(client_ip))
90 | except (ValueError, KeyError) as e:
91 | pass
92 | except Exception as e:
93 | pass
94 |
95 | response = await call_next(request)
96 | return response
97 |
--------------------------------------------------------------------------------
/config/settings.py:
--------------------------------------------------------------------------------
1 | """Configuration settings for the application."""
2 |
3 | import os
4 | from typing import Optional
5 |
6 | # Environment Variables
7 | AUTH_EMAIL: str = os.environ["AUTH_EMAIL"]
8 | AUTHKEY: str = os.environ["AUTHKEY"]
9 | CF_MAGIC: str = os.environ["CF_MAGIC"]
10 | CF_UNBLOCK_MAGIC: str = os.environ["CF_UNBLOCK_MAGIC"]
11 | FERNET_KEY: Optional[str] = os.environ.get("ENCRYPTION_KEY")
12 | PROJECT_ID: Optional[str] = os.environ.get("PROJECT_ID")
13 | SECRET_APIKEY: str = os.environ["SECRET_APIKEY"]
14 | SECURITY_SALT: str = os.environ["SECURITY_SALT"]
15 | TOPIC_ID: Optional[str] = os.environ.get("TOPIC_ID")
16 | WTF_CSRF_SECRET_KEY: str = os.environ["WTF_CSRF_SECRET_KEY"]
17 | XMLAPI_KEY: str = os.environ["XMLAPI_KEY"]
18 | BASE_URL: str = os.environ.get("BASE_URL", "https://api.xposedornot.com")
19 |
20 | # Redis Configuration
21 | REDIS_HOST: str = os.environ.get("REDIS_HOST", "localhost")
22 | REDIS_PORT: int = int(os.environ.get("REDIS_PORT", "6379"))
23 | REDIS_DB: int = int(os.environ.get("REDIS_DB", "0"))
24 | REDIS_PASSWORD: Optional[str] = os.environ.get("REDIS_PASSWORD")
25 |
26 | # Redis URL for rate limiter
27 | REDIS_URL: str = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}"
28 | if REDIS_PASSWORD:
29 | REDIS_URL = f"redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}"
30 |
31 | # Constants
32 | MAX_EMAIL_LENGTH: int = 254
33 | DEFAULT_TIMEOUT: int = 20
34 |
35 | """API configuration settings."""
36 |
37 | # API Configuration
38 | API_TITLE = "XON API"
39 | API_VERSION = "2.0.0"
40 | API_DESCRIPTION = """
41 | The XON API provides comprehensive data breach detection and monitoring services. Use our API to:
42 | * Check if email addresses have been exposed in data breaches
43 | * Monitor domain-level breaches
44 | * Access detailed breach analytics
45 | * Get real-time breach metrics
46 |
47 | ## Documentation Access
48 |
49 | | Interface | Features | Rate Limits |
50 | |-----------|----------|-------------|
51 | | **Swagger UI** - Interactive documentation at `/docs` | • Try out API endpoints directly • View request/response examples • Interactive testing | • 2 requests/second • 50-100/hour • 100-1000/day |
52 | | **OpenAPI JSON** - Raw spec at `/openapi.json` | • Import into other tools • Use for automated testing • Generate client code | |
53 |
54 | > Most endpoints are publicly accessible. Domain-specific endpoints may require authentication.
55 | """
56 |
57 | # Rate Limiting
58 | RATE_LIMIT_DEFAULT = "2 per second;50 per hour;100 per day"
59 | RATE_LIMIT_ANALYTICS = "5 per minute;100 per hour;500 per day"
60 | RATE_LIMIT_DOMAIN = "2 per second;10 per hour;50 per day"
61 |
62 | # Security
63 | MAX_EMAIL_LENGTH = 254
64 | MAX_DOMAIN_LENGTH = 253
65 | MAX_TOKEN_LENGTH = 100
66 |
67 | # Cache Settings
68 | CACHE_TTL = 3600 # 1 hour
69 | CACHE_MAX_SIZE = 1000
70 |
71 | # API Endpoints
72 | API_PREFIX = "/v1"
73 | API_DOCS_URL = "/docs"
74 | API_OPENAPI_URL = "/openapi.json"
75 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 | services:
3 | datastore:
4 | image: google/cloud-sdk
5 | command: gcloud beta emulators datastore start --project=xposedornot --host-port datastore:8000 --no-store-on-disk
6 | ports:
7 | - 8000:8000
8 | healthcheck:
9 | test: [ "CMD", "curl", "-f", "http://datastore:8000" ]
10 | interval: 10s
11 | timeout: 5s
12 | retries: 5
13 |
14 | app:
15 | build: .
16 | volumes:
17 | - .:/app
18 | depends_on:
19 | datastore:
20 | condition: service_healthy
21 | links:
22 | - datastore
23 | ports:
24 | - "8080:8080"
25 | environment:
26 | - PORT=8080
27 | # environment variables for datastore emulator, not needed for production
28 | - DATASTORE_DATASET=xposedornot
29 | - DATASTORE_EMULATOR_HOST=datastore:8000
30 | - DATASTORE_EMULATOR_HOST_PATH=datastore:8000/datastore
31 | - DATASTORE_HOST=http://datastore:8000
32 | - DATASTORE_PROJECT_ID=xposedornot
33 | # environment variables for security, can be left unmodified for local developments
34 | - SECRET_APIKEY=your_secret_api_key
35 | - SECURITY_SALT=your_security_salt
36 | - WTF_CSRF_SECRET_KEY=your_wtf_csrf_secret_key
37 | - ENCRYPTION_KEY=7ba9LmVLqozrFTey5E1P9cRv4rtOKHu80JxOODXzYME= # generated in python with print(__import__('cryptography.fernet', fromlist=['Fernet']).Fernet.generate_key().decode('utf-8'))
38 | # environment variables for cloudflare setup, can be left unmodified for local development
39 | - AUTH_EMAIL=your_auth_email
40 | - AUTHKEY=your_auth_key
41 | - CF_MAGIC=your_cf_magic
42 | - CF_UNBLOCK_MAGIC=your_cf_ublock_magic
43 | # environment variables for external services
44 | - XMLAPI_KEY=your_xmlapi_key # can be aquired from https://www.whoisxmlapi.com/
45 | - MJ_API_KEY=your_mailjet_api_key # can be aquired from https://app.mailjet.com/
46 | - MJ_API_SECRET=your_mailjet_api_secret
47 |
--------------------------------------------------------------------------------
/models/base.py:
--------------------------------------------------------------------------------
1 | """Base models and utilities for the application."""
2 |
3 | from typing import Optional
4 | from pydantic import BaseModel
5 |
6 |
7 | class BaseResponse(BaseModel):
8 | """Base response model with common fields."""
9 |
10 | status: str
11 | message: Optional[str] = None
12 |
13 |
14 | class ErrorResponse(BaseModel):
15 | """Standard error response model."""
16 |
17 | status: str = "error"
18 | message: str
19 |
20 |
21 | class BaseBreachInfo(BaseModel):
22 | """Base model for breach information."""
23 |
24 | breachid: str
25 | logo: Optional[str] = None
26 | description: Optional[str] = None
27 | count: int
28 |
29 |
30 | class BaseMetrics(BaseModel):
31 | """Base model for metrics information."""
32 |
33 | Breaches_Count: int
34 | Breaches_Records: int
35 | Pastes_Count: str
36 | Pastes_Records: int
37 |
38 |
39 | class BaseAnalytics(BaseModel):
40 | """Base model for analytics information."""
41 |
42 | total_breaches: int
43 | total_records: int
44 | first_breach: str
45 | last_breach: str
46 |
--------------------------------------------------------------------------------
/models/requests.py:
--------------------------------------------------------------------------------
1 | """Request models for the application."""
2 |
3 | from typing import Optional, Dict, Any
4 | from pydantic import BaseModel, Field, EmailStr
5 |
6 |
7 | class WebhookSetupRequest(BaseModel):
8 | """Request model for webhook setup.
9 |
10 | This model represents the data required to set up, verify, or delete a webhook
11 | for domain notifications. It includes fields for authentication, domain information,
12 | and webhook configuration.
13 | """
14 |
15 | token: str
16 | domain: str
17 | webhook: str = Field(alias="webhook_url")
18 | secret: str
19 | action: str
20 | verify_token: Optional[str] = None
21 |
22 | class Config: # pylint: disable=too-few-public-methods
23 | """Configuration class for WebhookSetupRequest.
24 |
25 | Defines validation settings and provides example data for API documentation.
26 | """
27 |
28 | validate_by_name = True
29 | json_schema_extra = {
30 | "example": {
31 | "token": "your_token",
32 | "domain": "example.com",
33 | "webhook_url": "https://example.com/webhook",
34 | "secret": "your_secret",
35 | "action": "setup",
36 | "verify_token": "optional_verify_token",
37 | }
38 | }
39 |
40 | def is_setup_action(self) -> bool:
41 | """Check if the request is for setting up a webhook."""
42 | return self.action == "setup"
43 |
44 | def is_verify_action(self) -> bool:
45 | """Check if the request is for verifying a webhook."""
46 | return self.action == "verify"
47 |
48 | def get_action_type(self) -> str:
49 | """Return the action type."""
50 | return self.action
51 |
52 |
53 | class ChannelSetupRequest(BaseModel):
54 | """Request model for Slack/Teams channel setup.
55 |
56 | This model represents the data required to set up, verify, or delete a Slack/Teams
57 | channel for domain notifications. It includes fields for authentication, domain
58 | information, and channel configuration.
59 | """
60 |
61 | token: str
62 | domain: str
63 | webhook: str
64 | action: str
65 | verify_token: Optional[str] = None
66 | tokens: Optional[Dict[str, Any]] = None
67 |
68 | class Config: # pylint: disable=too-few-public-methods
69 | """Configuration class for ChannelSetupRequest.
70 |
71 | Defines validation settings and provides example data for API documentation.
72 | """
73 |
74 | validate_by_name = True
75 | json_schema_extra = {
76 | "example": {
77 | "token": "your_token",
78 | "domain": "example.com",
79 | "webhook": "https://example.com/webhook",
80 | "action": "setup",
81 | "verify_token": "optional_verify_token",
82 | "tokens": {"key": "value"},
83 | }
84 | }
85 |
86 | def is_setup_action(self) -> bool:
87 | """Check if the request is for setting up a channel."""
88 | return self.action == "setup"
89 |
90 | def is_verify_action(self) -> bool:
91 | """Check if the request is for verifying a channel."""
92 | return self.action == "verify"
93 |
94 | def get_action_type(self) -> str:
95 | """Return the action type."""
96 | return self.action
97 |
98 |
99 | class EmailVerificationRequest(BaseModel):
100 | """Request model for email verification.
101 |
102 | This model represents the data required to verify an email address for a domain.
103 | It includes fields for the email address, domain, and verification token.
104 | """
105 |
106 | email: EmailStr
107 | domain: str
108 | token: str
109 |
110 | class Config: # pylint: disable=too-few-public-methods
111 | """Configuration class for EmailVerificationRequest.
112 |
113 | Defines validation settings and provides example data for API documentation.
114 | """
115 |
116 | validate_by_name = True
117 | json_schema_extra = {
118 | "example": {
119 | "email": "user@example.com",
120 | "domain": "example.com",
121 | "token": "verification_token",
122 | }
123 | }
124 |
125 | def get_domain_from_email(self) -> str:
126 | """Extract domain from email address."""
127 | return self.email.split("@")[1]
128 |
129 | def is_domain_match(self) -> bool:
130 | """Check if email domain matches the specified domain."""
131 | return self.get_domain_from_email() == self.domain
132 |
133 |
134 | class DomainVerificationRequest(BaseModel):
135 | """Request model for domain verification.
136 |
137 | This model represents the data required to verify domain ownership through various
138 | methods (DNS, HTML, or email). It includes fields for the domain, verification type,
139 | and verification token.
140 | """
141 |
142 | domain: str
143 | verification_type: str = Field(
144 | ..., description="Type of verification: dns, html, or email"
145 | )
146 | token: str
147 |
148 | class Config: # pylint: disable=too-few-public-methods
149 | """Configuration class for DomainVerificationRequest.
150 |
151 | Defines validation settings and provides example data for API documentation.
152 | """
153 |
154 | validate_by_name = True
155 | json_schema_extra = {
156 | "example": {
157 | "domain": "example.com",
158 | "verification_type": "dns",
159 | "token": "verification_token",
160 | }
161 | }
162 |
163 | def is_valid_verification_type(self) -> bool:
164 | """Check if the verification type is valid."""
165 | return self.verification_type in ["dns", "html", "email"]
166 |
167 | def is_dns_verification(self) -> bool:
168 | """Check if the verification type is DNS."""
169 | return self.verification_type == "dns"
170 |
171 |
172 | class ApiKeyRequest(BaseModel):
173 | """Request model for API key operations.
174 |
175 | This model represents the data required for API key operations, including
176 | authentication token and associated email address.
177 | """
178 |
179 | token: str
180 | email: EmailStr
181 |
182 | class Config:
183 | """Configuration class for ApiKeyRequest.
184 |
185 | Defines validation settings and provides example data for API documentation.
186 | """
187 |
188 | validate_by_name = True
189 | json_schema_extra = {
190 | "example": {"token": "your_token", "email": "user@example.com"}
191 | }
192 |
193 | def get_domain_from_email(self) -> str:
194 | """Extract domain from email address."""
195 | return self.email.split("@")[1]
196 |
197 | def is_valid_email(self) -> bool:
198 | """Check if the email address is valid."""
199 | return "@" in self.email and "." in self.email.split("@")[1]
200 |
--------------------------------------------------------------------------------
/models/responses.py:
--------------------------------------------------------------------------------
1 | """Response models for the application."""
2 |
3 | # Standard library imports
4 | from datetime import datetime
5 | from typing import Optional, List, Dict, Any, Union
6 |
7 | # Third-party imports
8 | from pydantic import BaseModel, Field
9 |
10 | # Local imports
11 | from .base import BaseResponse, BaseBreachInfo, BaseMetrics
12 |
13 |
14 | class AlertMeResponse(BaseResponse):
15 | """Response model for alert-me endpoint."""
16 |
17 | pass
18 |
19 |
20 | class VerificationResponse(BaseModel):
21 | """Response model for verification endpoints."""
22 |
23 | html_content: str
24 |
25 |
26 | class EmailCheckResponse(BaseModel):
27 | """Response model for email check endpoint."""
28 |
29 | breaches: List[List[str]] = []
30 | email: str
31 |
32 |
33 | class MetricsResponse(BaseMetrics):
34 | """Response model for metrics endpoint."""
35 |
36 | pass
37 |
38 |
39 | class DomainVerificationResponse(BaseModel):
40 | """Response model for domain verification."""
41 |
42 | domainVerification: Optional[str | List[str]]
43 |
44 |
45 | class DomainExposureResponse(BaseModel):
46 | """Response model for domain exposure."""
47 |
48 | sendDomains: Dict
49 | SearchStatus: str
50 |
51 |
52 | class BreachSummary(BaseModel):
53 | """Model for breach summary."""
54 |
55 | site: str = ""
56 |
57 |
58 | class PasteSummary(BaseModel):
59 | """Model for paste summary."""
60 |
61 | cnt: int = 0
62 | domain: str = ""
63 | tmpstmp: str = ""
64 |
65 |
66 | class BreachEntity(BaseModel):
67 | """Model for breach entity details."""
68 |
69 | breachID: str
70 | breachedDate: str
71 | domain: str
72 | industry: str
73 | logo: str
74 | passwordRisk: str
75 | searchable: Optional[bool]
76 | sensitive: Optional[bool]
77 | verified: Optional[bool]
78 | exposedData: List[str]
79 | exposedRecords: int
80 | exposureDescription: str
81 | referenceURL: str = ""
82 |
83 |
84 | class BreachesResponse(BaseResponse):
85 | """Response model for breaches endpoint."""
86 |
87 | exposedBreaches: Optional[List[BreachEntity]] = None
88 |
89 |
90 | class BreachAnalyticsResponse(BaseModel):
91 | """Response model for breach analytics endpoint."""
92 |
93 | BreachMetrics: Optional[Dict] = None
94 | BreachesSummary: BreachSummary
95 | ExposedBreaches: Optional[Dict] = None
96 | ExposedPastes: Optional[Dict] = None
97 | PasteMetrics: Optional[Dict] = None
98 | PastesSummary: PasteSummary
99 |
100 | class Config:
101 | """Pydantic model configuration."""
102 |
103 | validate_by_name = True
104 | validate_assignment = True
105 |
106 |
107 | class BreachAnalyticsV2Response(BaseModel):
108 | """Response model for breach analytics v2 endpoint."""
109 |
110 | AI_Summary: str
111 |
112 |
113 | class EmptyBreachResponse(BaseModel):
114 | """Response model for empty breach results."""
115 |
116 | BreachesSummary: BreachSummary
117 | PastesSummary: PasteSummary
118 |
119 |
120 | class DomainBreachDetail(BaseModel):
121 | """Model for individual domain breach details."""
122 |
123 | domain: str = Field(..., description="Domain that was breached")
124 | breach_pastes: int = Field(..., description="Number of paste records found")
125 | breach_emails: int = Field(..., description="Number of unique emails found")
126 | breach_total: int = Field(..., description="Total number of breach records")
127 | breach_count: int = Field(..., description="Number of unique breaches")
128 | breach_last_seen: Optional[str] = Field(
129 | None, description="Date when the breach was last seen"
130 | )
131 |
132 |
133 | class DomainBreachSummaryResponse(BaseModel):
134 | """Response model for domain breach summary endpoint."""
135 |
136 | sendDomains: Dict[str, List[DomainBreachDetail]] = Field(
137 | ..., description="Dictionary containing list of breach details"
138 | )
139 | SearchStatus: str = Field(..., description="Status of the search (Success/Error)")
140 |
141 |
142 | class DomainBreachDetails(BaseModel):
143 | """Model for domain breach details."""
144 |
145 | domain: str
146 | breach_pastes: int
147 | breach_emails: int
148 | breach_total: int
149 | breach_count: int
150 | breach_last_seen: Optional[str] = None
151 |
152 |
153 | class WebhookConfigResponse(BaseResponse):
154 | """Response model for webhook configuration."""
155 |
156 | data: Optional[Dict[str, str]] = None
157 |
158 |
159 | class ChannelConfigResponse(BaseResponse):
160 | """Response model for channel configuration."""
161 |
162 | data: Optional[Dict[str, str]] = None
163 |
164 |
165 | class WebhookSetupResponse(BaseResponse):
166 | """Response model for webhook setup."""
167 |
168 | verify_token: Optional[str] = None
169 |
170 |
171 | class ChannelSetupResponse(BaseResponse):
172 | """Response model for channel setup."""
173 |
174 | pass
175 |
176 |
177 | class DetailedMetricsResponse(BaseMetrics):
178 | """Response model for detailed metrics."""
179 |
180 | Yearly_Breaches_Count: Dict[int, int]
181 | Industry_Breaches_Count: Dict[str, int]
182 | Top_Breaches: List[BaseBreachInfo]
183 | Recent_Breaches: List[BaseBreachInfo]
184 |
185 |
186 | class PulseNewsItem(BaseModel):
187 | """Model for individual news item."""
188 |
189 | title: str
190 | date: str
191 | summary: str
192 | url: str
193 |
194 |
195 | class PulseNewsResponse(BaseResponse):
196 | """Response model for news feed."""
197 |
198 | data: List[PulseNewsItem]
199 |
200 |
201 | class ApiKeyResponse(BaseResponse):
202 | """Response model for API key operations."""
203 |
204 | api_key: Optional[str] = None
205 |
206 |
207 | class BreachDetailResponse(BaseModel):
208 | """Model for detailed breach information."""
209 |
210 | breachID: str
211 | breachedDate: str
212 | domain: str
213 | industry: str
214 | logo: str
215 | passwordRisk: str
216 | searchable: bool
217 | sensitive: bool
218 | verified: bool
219 | exposedData: List[str]
220 | exposedRecords: int
221 | exposureDescription: str
222 | referenceURL: str = ""
223 |
224 |
225 | class BreachListResponse(BaseModel):
226 | """Response model for /v1/breaches endpoint."""
227 |
228 | status: str
229 | message: Optional[str] = None
230 | exposedBreaches: Optional[List[BreachDetailResponse]] = None
231 |
232 |
233 | class EmailBreachResponse(BaseModel):
234 | """Response model for email breach check endpoint."""
235 |
236 | breaches: List[List[str]] = []
237 | email: str
238 |
239 |
240 | class EmailBreachErrorResponse(BaseModel):
241 | """Response model for email breach check error."""
242 |
243 | Error: str
244 | email: Optional[str] = None
245 |
246 |
247 | class DomainAlertResponse(BaseModel):
248 | """Response model for domain alert endpoint."""
249 |
250 | Success: str = "Domain Alert Successful"
251 |
252 |
253 | class DomainAlertErrorResponse(BaseModel):
254 | """Error response model for domain alert endpoint."""
255 |
256 | Error: str
257 | email: Optional[str] = None
258 |
259 |
260 | class DomainVerifyResponse(BaseModel):
261 | """Response model for domain verification."""
262 |
263 | status: str = "success"
264 | dashboard_link: str
265 |
266 |
267 | class DomainVerifyErrorResponse(BaseModel):
268 | """Error response model for domain verification."""
269 |
270 | status: str = "error"
271 | message: str = "Invalid or expired verification token"
272 |
273 |
274 | class BreachDetails(BaseModel):
275 | """Model for breach details."""
276 |
277 | email: str
278 | domain: str
279 | breach: str
280 |
281 |
282 | class DetailedBreachInfo(BaseModel):
283 | """Model for detailed breach information."""
284 |
285 | breached_date: Optional[str] = None
286 | logo: str = ""
287 | password_risk: str = ""
288 | searchable: str = "No"
289 | xposed_data: str = ""
290 | xposed_records: Union[int, str] = 0
291 | xposure_desc: str = ""
292 |
293 | class Config:
294 | """Pydantic model configuration."""
295 |
296 | json_encoders = {
297 | datetime: lambda v: v.strftime("%a, %d %b %Y %H:%M:%S GMT") if v else None
298 | }
299 |
300 |
301 | class DomainBreachesResponse(BaseModel):
302 | """Response model for domain breaches endpoint."""
303 |
304 | Yearly_Metrics: Dict[str, int]
305 | Domain_Summary: Dict[str, int]
306 | Breach_Summary: Dict[str, int]
307 | Breaches_Details: List[BreachDetails]
308 | Top10_Breaches: Dict[str, int]
309 | Detailed_Breach_Info: Dict[str, DetailedBreachInfo]
310 | Verified_Domains: List[str]
311 | Seniority_Summary: Dict[str, int]
312 | Yearly_Breach_Hierarchy: Dict[str, Any]
313 |
314 |
315 | class DomainBreachesErrorResponse(BaseModel):
316 | """Error response model for domain breaches endpoint."""
317 |
318 | Error: str
319 |
320 |
321 | class ShieldActivationResponse(BaseModel):
322 | """Response model for shield activation endpoint."""
323 |
324 | Success: str
325 |
326 |
327 | class ShieldActivationErrorResponse(BaseModel):
328 | """Error response model for shield activation endpoint."""
329 |
330 | Error: str
331 |
332 |
333 | class ShieldVerificationResponse(BaseModel):
334 | """Response model for shield verification endpoint."""
335 |
336 | html_content: str = ""
337 | status: str = "success"
338 |
339 |
340 | class ShieldVerificationErrorResponse(BaseModel):
341 | """Error response model for shield verification endpoint."""
342 |
343 | html_content: str = ""
344 | status: str = "error"
345 |
346 |
347 | class AlertResponse(BaseModel):
348 | """Response model for alert operations."""
349 |
350 | status: str
351 | message: str
352 |
353 |
354 | class VerificationResponse(BaseModel):
355 | """Response model for verification operations."""
356 |
357 | status: str
358 | sensitive_breach_details: Optional[str] = None
359 | BreachMetrics: Optional[Dict] = None
360 |
361 |
362 | class BreachHierarchyChild(BaseModel):
363 | """Model for breach hierarchy child items."""
364 |
365 | description: str
366 | children: List["BreachHierarchyChild"] = []
367 | tooltip: Optional[str] = None
368 |
369 | def dict(self, *args, **kwargs):
370 | """Convert the model to a dictionary with custom handling for recursive structures."""
371 | d = super().dict(*args, **kwargs)
372 | # Only include non-null values and properly handle tooltip
373 | return {
374 | k: v
375 | for k, v in d.items()
376 | if v is not None and k != "tooltip" or k == "tooltip" and v is not None
377 | }
378 |
379 |
380 | class BreachHierarchyResponse(BaseModel):
381 | """Model for breach hierarchy response."""
382 |
383 | description: str
384 | children: List[BreachHierarchyChild] = []
385 |
386 | def dict(self, *args, **kwargs):
387 | """Convert the model to a dictionary with custom handling for recursive structures."""
388 | d = super().dict(*args, **kwargs)
389 | return {k: v for k, v in d.items() if v is not None}
390 |
391 |
392 | # Update forward references for nested models
393 | BreachHierarchyChild.update_forward_refs()
394 |
395 |
396 | class UnsubscribeResponse(BaseModel):
397 | """Response model for unsubscribe endpoint."""
398 |
399 | status: str
400 | message: str
401 |
402 |
403 | class UnsubscribeVerifyResponse(BaseModel):
404 | """Response model for unsubscribe verification endpoint."""
405 |
406 | html_content: str
407 | status: str = "success"
408 |
409 |
410 | class UnsubscribeVerifyErrorResponse(BaseModel):
411 | """Error response model for unsubscribe verification endpoint."""
412 |
413 | html_content: str
414 | status: str = "error"
415 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp
2 | blinker
3 | bson
4 | cryptography
5 | dnspython>=1.16.0
6 | dnstwist
7 | domcheck
8 | email-validator
9 | email_split
10 | fastapi>=0.68.0
11 | feedgen
12 | feedgenerator
13 | gcloud
14 | geoip2
15 | google-cloud-datastore
16 | google-cloud-pubsub
17 | gunicorn
18 | httpx
19 | idna>=2.8
20 | ipaddress
21 | itsdangerous
22 | jinja2
23 | lxml
24 | mailjet_rest
25 | openai
26 | ppdeep>=20200505
27 | pydantic>=1.8.0
28 | python-dateutil
29 | python-jose
30 | python-multipart>=0.0.5
31 | python-whois
32 | pyyaml
33 | PySocks
34 | redis>=4.5.0
35 | requests
36 | simplejson
37 | slowapi
38 | starlette>=0.14.0
39 | tld>=0.9.1
40 | typing-extensions>=4.0.0
41 | ua-parser
42 | user-agents
43 | urllib3
44 | uvicorn>=0.15.0
45 | validate-email
46 | validators
47 | whois
48 | # py-tlsh
49 | # py-tlsh>=4.5.0
50 | # ssdeep
51 |
52 |
--------------------------------------------------------------------------------
/services/breach.py:
--------------------------------------------------------------------------------
1 | """Breach-related service functions."""
2 |
3 | # Standard library imports
4 | from typing import Dict, List, Any
5 |
6 | # Third-party imports
7 | from google.cloud import datastore
8 | from google.api_core import exceptions as api_exceptions
9 | from fastapi import HTTPException
10 |
11 | # Initialize datastore client
12 | ds_client = datastore.Client()
13 |
14 |
15 | async def get_combined_breach_data(email: str) -> Dict[str, Any]:
16 | """Get combined breach data for a given email."""
17 | try:
18 | client = datastore.Client()
19 | key = client.key("xon", email)
20 | entity = client.get(key)
21 |
22 | if not entity:
23 | return {}
24 |
25 | # Get breach data
26 | breach_data = {
27 | "site": entity.get("site", ""),
28 | "breach_date": entity.get("breach_date", ""),
29 | "xposed_data": entity.get("xposed_data", ""),
30 | "xposed_records": entity.get("xposed_records", 0),
31 | "xposure_desc": entity.get("xposure_desc", ""),
32 | "password_risk": entity.get("password_risk", ""),
33 | "searchable": entity.get("searchable", ""),
34 | "sensitive": entity.get("sensitive", ""),
35 | "verified": entity.get("verified", ""),
36 | "references": entity.get("references", ""),
37 | "domain": entity.get("domain", ""),
38 | "industry": entity.get("industry", ""),
39 | "logo": entity.get("logo", ""),
40 | }
41 |
42 | # Get sensitive data from xon_sensitive table
43 | sensitive_key = client.key("xon_sensitive", email)
44 | sensitive_entity = client.get(sensitive_key)
45 |
46 | if sensitive_entity:
47 | sensitive_data = {
48 | "site": sensitive_entity.get("site", ""),
49 | "breach_date": sensitive_entity.get("breach_date", ""),
50 | "xposed_data": sensitive_entity.get("xposed_data", ""),
51 | "xposed_records": sensitive_entity.get("xposed_records", 0),
52 | "xposure_desc": sensitive_entity.get("xposure_desc", ""),
53 | "password_risk": sensitive_entity.get("password_risk", ""),
54 | "searchable": sensitive_entity.get("searchable", ""),
55 | "sensitive": sensitive_entity.get("sensitive", ""),
56 | "verified": sensitive_entity.get("verified", ""),
57 | "references": sensitive_entity.get("references", ""),
58 | "domain": sensitive_entity.get("domain", ""),
59 | "industry": sensitive_entity.get("industry", ""),
60 | "logo": sensitive_entity.get("logo", ""),
61 | }
62 | breach_data.update(sensitive_data)
63 |
64 | return breach_data
65 |
66 | except api_exceptions.NotFound as e:
67 | raise HTTPException(status_code=404, detail="Entity not found") from e
68 | except api_exceptions.GoogleAPIError as e:
69 | raise HTTPException(status_code=500, detail="Datastore error") from e
70 | except (ValueError, TypeError) as e:
71 | raise HTTPException(status_code=400, detail="Invalid input") from e
72 | except Exception as e:
73 | raise HTTPException(status_code=500, detail="Internal server error") from e
74 |
75 |
76 | async def get_exposure(user_email: str) -> Dict[str, Any]:
77 | """Returns breach data for a given email."""
78 |
79 | try:
80 | datastore_client = datastore.Client()
81 | search_key = datastore_client.key("xon", user_email)
82 |
83 | user_data = datastore_client.get(search_key)
84 | if user_data is not None:
85 | return dict(user_data)
86 |
87 | return {}
88 | except api_exceptions.GoogleAPIError:
89 | return {}
90 | except (ValueError, TypeError):
91 | return {}
92 | except Exception as e:
93 | raise
94 |
95 |
96 | async def get_sensitive_exposure(user_email: str) -> Dict[str, Any]:
97 | """Get sensitive exposure data for a user."""
98 | try:
99 | datastore_client = datastore.Client()
100 | search_key = datastore_client.key("xon_sensitive", user_email)
101 | user_data = datastore_client.get(search_key)
102 | if user_data is not None:
103 | return dict(user_data)
104 |
105 | return {}
106 | except api_exceptions.GoogleAPIError:
107 | return {}
108 | except (ValueError, TypeError):
109 | return {}
110 | except Exception as e:
111 | raise
112 |
113 |
114 | def get_breaches(breaches: str) -> Dict[str, List[Dict[str, Any]]]:
115 | """Returns the exposed breaches with details including records, domain, industry, and other metadata."""
116 | breaches_output = {"breaches_details": []}
117 | breach_list = breaches.split(";")
118 |
119 | for breach in breach_list:
120 | try:
121 | key = ds_client.key("xon_breaches", breach)
122 | query_result = ds_client.get(key)
123 |
124 | if query_result is not None:
125 | xposed_records = query_result.get("xposed_records", 0)
126 | breach_details = {
127 | "breach": breach,
128 | "xposed_records": xposed_records,
129 | "details": query_result.get("xposure_desc", ""),
130 | "domain": query_result.get("domain", ""),
131 | "industry": query_result.get("industry", ""),
132 | "logo": query_result.get("logo", ""),
133 | "password_risk": query_result.get("password_risk", ""),
134 | "xposed_data": query_result.get("xposed_data", ""),
135 | "searchable": query_result.get("searchable", ""),
136 | "verified": query_result.get("verified", ""),
137 | }
138 | breaches_output["breaches_details"].append(breach_details)
139 | else:
140 | raise HTTPException(status_code=404, detail="Breach not found")
141 |
142 | except api_exceptions.NotFound as e:
143 | raise HTTPException(status_code=404, detail="Breach not found") from e
144 | except api_exceptions.GoogleAPIError as e:
145 | raise HTTPException(status_code=500, detail="Datastore error") from e
146 |
147 | return breaches_output
148 |
--------------------------------------------------------------------------------
/services/cloudflare.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # -*- coding: utf-8 -*-
3 |
4 | """XposedOrNot Cloudflare API module."""
5 |
6 | import json
7 | import datetime
8 | import time
9 | import os
10 | import hashlib
11 | from typing import Optional, Dict, Any
12 | import requests
13 | import httpx
14 | import dateutil.parser as dp
15 | from google.cloud import datastore
16 | from fastapi import HTTPException
17 | from pydantic import BaseModel, Field
18 |
19 | # Environment variables
20 | AUTH_EMAIL = os.environ["AUTH_EMAIL"]
21 | AUTH_KEY = os.environ["AUTHKEY"]
22 |
23 |
24 | # Response Models
25 | class CloudflareResponse(BaseModel):
26 | """Base model for Cloudflare responses."""
27 |
28 | status: str
29 | message: str
30 | details: Optional[Dict[str, Any]] = None
31 |
32 | class Config:
33 | """Configuration for CloudflareResponse model."""
34 |
35 | json_schema_extra = {
36 | "example": {
37 | "status": "success",
38 | "message": "Operation completed successfully",
39 | "details": {"rule_id": "123456"},
40 | }
41 | }
42 |
43 |
44 | class CloudflareError(BaseModel):
45 | """Model for Cloudflare error responses."""
46 |
47 | status: str = "error"
48 | message: str
49 | error_code: Optional[str] = None
50 |
51 | class Config:
52 | """Configuration for CloudflareError model."""
53 |
54 | json_schema_extra = {
55 | "example": {
56 | "status": "error",
57 | "message": "Failed to block IP",
58 | "error_code": "CF_BLOCK_FAILED",
59 | }
60 | }
61 |
62 |
63 | async def update_cf_trans(ip_address: str) -> None:
64 | """Update the Cloud Firestore transaction with the given IP address."""
65 | try:
66 | key = hashlib.sha256(ip_address.encode()).hexdigest()
67 | datastore_client = datastore.Client()
68 | task_cnt = datastore.Entity(
69 | datastore_client.key("xon_cf", key),
70 | exclude_from_indexes=["insrt_tmpstmp", "cf_data"],
71 | )
72 | task_cnt.update(
73 | {
74 | "insert_timestamp": datetime.datetime.now(),
75 | "release_timestamp": "",
76 | "cf_data": ip_address,
77 | }
78 | )
79 | datastore_client.put(task_cnt)
80 | except Exception as e:
81 | raise HTTPException(
82 | status_code=500,
83 | detail={
84 | "status": "error",
85 | "message": f"Failed to update transaction: {str(e)}",
86 | },
87 | ) from e
88 |
89 |
90 | async def get_isp_from_ip(ip_address: str) -> Optional[str]:
91 | """Fetch the ISP for a given IP address using the ipinfo.io API."""
92 | try:
93 | url = f"https://ipinfo.io/{ip_address}/org"
94 | async with httpx.AsyncClient() as client:
95 | response = await client.get(url, timeout=10)
96 | if response.status_code == 200:
97 | return response.text.strip()
98 | raise HTTPException(
99 | status_code=response.status_code,
100 | detail={
101 | "status": "error",
102 | "message": f"Failed to fetch ISP info: {response.text}",
103 | },
104 | )
105 | except httpx.HTTPError as e:
106 | return None
107 | except Exception as e:
108 | return None
109 |
110 |
111 | async def block_hour(ip_address: str) -> CloudflareResponse:
112 | """Block an IP address for one hour using the Cloudflare API."""
113 | try:
114 | isp_info = await get_isp_from_ip(ip_address)
115 | if isp_info and "Cloudflare" in isp_info:
116 | return CloudflareResponse(
117 | status="skipped",
118 | message="IP belongs to Cloudflare, skipping block",
119 | details={"ip": ip_address},
120 | )
121 |
122 | url = "https://api.cloudflare.com/client/v4/user/firewall/access_rules/rules"
123 | headers = {
124 | "X-Auth-Email": AUTH_EMAIL,
125 | "X-Auth-Key": AUTH_KEY,
126 | "Content-Type": "application/json",
127 | }
128 | payload = {
129 | "mode": "challenge",
130 | "configuration": {"target": "ip", "value": ip_address},
131 | "notes": "Hour block enforced",
132 | }
133 |
134 | async with httpx.AsyncClient() as client:
135 | response = await client.post(url, headers=headers, json=payload, timeout=20)
136 |
137 | if response.status_code in [200, 201]:
138 | await update_cf_trans(response.content)
139 | return CloudflareResponse(
140 | status="success",
141 | message=f"Successfully blocked IP {ip_address} for one hour",
142 | details=response.json(),
143 | )
144 |
145 | raise HTTPException(
146 | status_code=response.status_code,
147 | detail={
148 | "status": "error",
149 | "message": f"Failed to block IP: {response.text}",
150 | },
151 | )
152 |
153 | except httpx.HTTPError as e:
154 | raise HTTPException(
155 | status_code=500,
156 | detail={"status": "error", "message": f"HTTP error blocking IP: {str(e)}"},
157 | ) from e
158 | except Exception as e:
159 | raise HTTPException(
160 | status_code=500,
161 | detail={"status": "error", "message": f"Error blocking IP: {str(e)}"},
162 | ) from e
163 |
164 |
165 | async def block_day(ip_address: str) -> CloudflareResponse:
166 | """Block an IP address for one day using the Cloudflare API."""
167 | try:
168 | isp_info = await get_isp_from_ip(ip_address)
169 | if isp_info and "Cloudflare" in isp_info:
170 | return CloudflareResponse(
171 | status="skipped",
172 | message="IP belongs to Cloudflare, skipping block",
173 | details={"ip": ip_address},
174 | )
175 |
176 | url = "https://api.cloudflare.com/client/v4/user/firewall/access_rules/rules"
177 | headers = {
178 | "X-Auth-Email": AUTH_EMAIL,
179 | "X-Auth-Key": AUTH_KEY,
180 | "Content-Type": "application/json",
181 | }
182 | payload = {
183 | "mode": "block",
184 | "configuration": {"target": "ip", "value": ip_address},
185 | "notes": "Day block enforced",
186 | }
187 |
188 | async with httpx.AsyncClient() as client:
189 | response = await client.post(url, headers=headers, json=payload, timeout=20)
190 |
191 | if response.status_code in [200, 201]:
192 | await update_cf_trans(response.content)
193 | return CloudflareResponse(
194 | status="success",
195 | message=f"Successfully blocked IP {ip_address} for one day",
196 | details=response.json(),
197 | )
198 |
199 | raise HTTPException(
200 | status_code=response.status_code,
201 | detail={
202 | "status": "error",
203 | "message": f"Failed to block IP: {response.text}",
204 | },
205 | )
206 |
207 | except httpx.HTTPError as e:
208 | raise HTTPException(
209 | status_code=500,
210 | detail={"status": "error", "message": f"HTTP error blocking IP: {str(e)}"},
211 | ) from e
212 | except Exception as e:
213 | raise HTTPException(
214 | status_code=500,
215 | detail={"status": "error", "message": f"Error blocking IP: {str(e)}"},
216 | ) from e
217 |
218 |
219 | async def unblock() -> CloudflareResponse:
220 | """Unblocks IP addresses that have been blocked for over an hour."""
221 | try:
222 | base_url = (
223 | "https://api.cloudflare.com/client/v4/user/firewall/access_rules/rules/"
224 | )
225 | headers = {
226 | "X-Auth-Email": AUTH_EMAIL,
227 | "X-Auth-Key": AUTH_KEY,
228 | "Content-Type": "application/json",
229 | }
230 |
231 | datastore_client = datastore.Client()
232 | query = datastore_client.query(kind="xon_cf")
233 | query.add_filter("release_timestamp", "=", "")
234 |
235 | unblocked_count = 0
236 | async with httpx.AsyncClient() as client:
237 | for entity in query.fetch():
238 | config = json.loads(entity["cf_data"])
239 | firewall_rule_id = config["result"]["id"]
240 | created = config["result"]["created_on"]
241 | parsed_created = dp.parse(created)
242 | created_time_in_seconds = parsed_created.strftime("%s")
243 |
244 | if time.time() - float(created_time_in_seconds) > 3600:
245 | url = base_url + firewall_rule_id
246 | response = await client.delete(url, headers=headers, timeout=20)
247 |
248 | if response.status_code != 200:
249 | continue
250 |
251 | entity.update(
252 | {"release_timestamp": datetime.datetime.utcnow().isoformat()}
253 | )
254 | datastore_client.put(entity)
255 | unblocked_count += 1
256 |
257 | return CloudflareResponse(
258 | status="success",
259 | message="Unblock operation completed",
260 | details={"unblocked_count": unblocked_count},
261 | )
262 |
263 | except httpx.HTTPError as e:
264 | raise HTTPException(
265 | status_code=500,
266 | detail={
267 | "status": "error",
268 | "message": f"HTTP error during unblock operation: {str(e)}",
269 | },
270 | ) from e
271 | except Exception as e:
272 | raise HTTPException(
273 | status_code=500,
274 | detail={
275 | "status": "error",
276 | "message": f"Error during unblock operation: {str(e)}",
277 | },
278 | ) from e
279 |
--------------------------------------------------------------------------------
/services/globe.py:
--------------------------------------------------------------------------------
1 | """Globe visualization service.
2 |
3 | This module handles the collection and publishing of IP-based location data
4 | for the globe visualization feature. It captures client IP addresses,
5 | retrieves geolocation information, and publishes the data to a PubSub topic.
6 | """
7 |
8 | import json
9 | import time
10 | import os
11 | import hashlib
12 | import httpx
13 | from typing import Dict, Any, Optional
14 | from google.cloud import pubsub_v1
15 |
16 | # Initialize PubSub constants
17 | TOPIC_ID = os.environ.get("TOPIC_ID")
18 | PROJECT_ID = os.environ.get("PROJECT_ID")
19 |
20 | # Storage for recent requests to prevent duplicates
21 | recent_requests = {}
22 |
23 | # Initialize Google Cloud Pub/Sub client
24 | publisher = pubsub_v1.PublisherClient()
25 | topic_path = publisher.topic_path(PROJECT_ID, TOPIC_ID)
26 |
27 |
28 | async def get_geolocation(ip: str) -> Optional[Dict[str, Any]]:
29 | """Fetch city, latitude, and longitude for the given IP."""
30 | geolocation_api_url = f"http://ip-api.com/json/{ip}"
31 | try:
32 | async with httpx.AsyncClient() as client:
33 | response = await client.get(geolocation_api_url, timeout=10.0)
34 | data = response.json()
35 | if data["status"] == "success":
36 | return {
37 | "city": data.get("city", "Unknown"),
38 | "lat": data.get("lat", 0.0),
39 | "lon": data.get("lon", 0.0),
40 | }
41 | return None
42 | except httpx.HTTPError:
43 | return None
44 | except Exception:
45 | return None
46 |
47 |
48 | def generate_request_hash(data: Dict[str, Any]) -> str:
49 | """Generate a hash for deduplication of requests."""
50 | data_str = json.dumps(data, sort_keys=True)
51 | return hashlib.md5(data_str.encode()).hexdigest()
52 |
53 |
54 | async def publish_to_pubsub(data: Dict[str, Any]) -> None:
55 | """Publish to Google Cloud Pub/Sub with hash-based deduplication."""
56 | try:
57 | # Check if PubSub is configured
58 | if not TOPIC_ID or not PROJECT_ID:
59 | return
60 |
61 | request_hash = generate_request_hash(data)
62 | current_time = time.time()
63 |
64 | # Clean up old entries from recent_requests
65 | for key in list(recent_requests.keys()):
66 | if current_time - recent_requests[key] > 60: # 1 minute expiry
67 | del recent_requests[key]
68 |
69 | # Check if this request hash was already published
70 | if request_hash in recent_requests:
71 | return
72 |
73 | # Publish message - note: pubsub client is synchronous but we wrap it in async
74 | message = json.dumps(data).encode("utf-8")
75 | publisher.publish(topic_path, message)
76 |
77 | # Store request hash with timestamp
78 | recent_requests[request_hash] = current_time
79 |
80 | except Exception:
81 | pass
82 |
83 |
84 | async def process_request_for_globe(client_ip: str) -> None:
85 | """Process a request for the globe visualization feature."""
86 | try:
87 | if not client_ip:
88 | return
89 |
90 | geo_data = await get_geolocation(client_ip)
91 | if not geo_data:
92 | return
93 |
94 | pubsub_data = {
95 | "ip": client_ip,
96 | "city": geo_data["city"],
97 | "lat": geo_data["lat"],
98 | "lon": geo_data["lon"],
99 | }
100 |
101 | await publish_to_pubsub(pubsub_data)
102 |
103 | except Exception:
104 | pass
105 |
--------------------------------------------------------------------------------
/services/messaging.py:
--------------------------------------------------------------------------------
1 | """Common messaging service functionality."""
2 |
3 | from typing import Dict, Optional
4 | from fastapi import HTTPException
5 | from google.cloud import datastore
6 | from models.requests import ChannelSetupRequest
7 |
8 |
9 | async def setup_messaging_channel(
10 | channel_data: ChannelSetupRequest, platform: str
11 | ) -> bool:
12 | """
13 | Set up a messaging channel for a given platform (Slack or Teams).
14 |
15 | Args:
16 | channel_data: Channel setup data including domain, webhook, and tokens
17 | platform: The messaging platform ('slack' or 'teams')
18 |
19 | Returns:
20 | bool: True if setup was successful
21 | """
22 | try:
23 | datastore_client = datastore.Client()
24 | channel_key = datastore_client.key(
25 | f"xon_{platform}_channel", f"{channel_data.domain}"
26 | )
27 | channel_entity = datastore.Entity(key=channel_key)
28 | channel_entity.update(
29 | {
30 | "domain": channel_data.domain,
31 | "webhook_url": channel_data.webhook,
32 | "tokens": channel_data.tokens or {},
33 | "verified": False,
34 | "active": False,
35 | }
36 | )
37 |
38 | datastore_client.put(channel_entity)
39 | return True
40 |
41 | except Exception as e:
42 | raise HTTPException(
43 | status_code=500, detail=f"Error setting up {platform} channel: {str(e)}"
44 | ) from e
45 |
46 |
47 | async def verify_messaging_channel(
48 | channel_data: ChannelSetupRequest, platform: str
49 | ) -> bool:
50 | """
51 | Verify a messaging channel for a given platform.
52 |
53 | Args:
54 | channel_data: Channel verification data
55 | platform: The messaging platform ('slack' or 'teams')
56 |
57 | Returns:
58 | bool: True if verification was successful
59 | """
60 | try:
61 | datastore_client = datastore.Client()
62 | channel_key = datastore_client.key(
63 | f"xon_{platform}_channel", f"{channel_data.domain}"
64 | )
65 | channel_entity = datastore_client.get(channel_key)
66 |
67 | if not channel_entity:
68 | detail = f"{platform.capitalize()} channel not found for domain {channel_data.domain}"
69 | raise HTTPException(status_code=404, detail=detail)
70 |
71 | channel_entity.update(
72 | {
73 | "verified": True,
74 | "active": True,
75 | }
76 | )
77 |
78 | datastore_client.put(channel_entity)
79 | return True
80 |
81 | except Exception as e:
82 | if isinstance(e, HTTPException):
83 | raise e
84 | raise HTTPException(
85 | status_code=500, detail=f"Error verifying {platform} channel: {str(e)}"
86 | ) from e
87 |
88 |
89 | async def get_channel_config(domain: str, token: str, platform: str) -> Optional[Dict]:
90 | """
91 | Get channel configuration for a given platform.
92 |
93 | Args:
94 | domain: The domain to get configuration for
95 | token: Authentication token
96 | platform: The messaging platform ('slack' or 'teams')
97 |
98 | Returns:
99 | Optional[Dict]: Channel configuration if found
100 | """
101 | try:
102 | datastore_client = datastore.Client()
103 | channel_key = datastore_client.key(f"xon_{platform}_channel", domain)
104 | channel_entity = datastore_client.get(channel_key)
105 |
106 | if not channel_entity:
107 | return None
108 |
109 | if not channel_entity.get("tokens", {}).get(token):
110 | return None
111 |
112 | return dict(channel_entity)
113 |
114 | except Exception as e:
115 | raise HTTPException(
116 | status_code=500,
117 | detail=f"Error fetching {platform} channel config: {str(e)}",
118 | ) from e
119 |
120 |
121 | async def delete_messaging_channel(
122 | channel_data: ChannelSetupRequest, platform: str
123 | ) -> bool:
124 | """
125 | Delete a messaging channel for a given platform.
126 |
127 | Args:
128 | channel_data: Channel data for deletion
129 | platform: The messaging platform ('slack' or 'teams')
130 |
131 | Returns:
132 | bool: True if deletion was successful
133 | """
134 | try:
135 | datastore_client = datastore.Client()
136 | channel_key = datastore_client.key(
137 | f"xon_{platform}_channel", f"{channel_data.domain}"
138 | )
139 | datastore_client.delete(channel_key)
140 | return True
141 |
142 | except Exception as e:
143 | raise HTTPException(
144 | status_code=500, detail=f"Error deleting {platform} channel: {str(e)}"
145 | ) from e
146 |
--------------------------------------------------------------------------------
/services/slack.py:
--------------------------------------------------------------------------------
1 | """Slack-specific service functions."""
2 |
3 | from typing import Dict, Optional
4 | import httpx
5 | from fastapi import HTTPException
6 | from models.requests import ChannelSetupRequest
7 | from services.messaging import (
8 | setup_messaging_channel,
9 | verify_messaging_channel,
10 | get_channel_config,
11 | delete_messaging_channel,
12 | )
13 |
14 |
15 | async def setup_slack_channel(channel_data: ChannelSetupRequest) -> bool:
16 | """Set up a Slack channel for notifications."""
17 | return await setup_messaging_channel(channel_data, "slack")
18 |
19 |
20 | async def verify_slack_channel(channel_data: ChannelSetupRequest) -> bool:
21 | """Verify a Slack channel setup."""
22 | return await verify_messaging_channel(channel_data, "slack")
23 |
24 |
25 | async def delete_slack_channel(channel_data: ChannelSetupRequest) -> bool:
26 | """Delete a Slack channel configuration."""
27 | return await delete_messaging_channel(channel_data, "slack")
28 |
29 |
30 | async def get_slack_channel_config(domain: str, token: str) -> Optional[Dict]:
31 | """Get Slack channel configuration."""
32 | return await get_channel_config(domain, token, "slack")
33 |
34 |
35 | async def send_slack_notification(domain: str, data: Dict) -> bool:
36 | """
37 | Send a notification to a Slack channel.
38 |
39 | Args:
40 | domain: The domain to send notification for
41 | data: The notification data to send
42 |
43 | Returns:
44 | bool: True if notification was sent successfully
45 | """
46 | try:
47 | config = await get_slack_channel_config(domain, data.get("token", ""))
48 | if not config:
49 | raise HTTPException(
50 | status_code=404, detail=f"Slack channel not found for domain {domain}"
51 | )
52 |
53 | webhook_url = config.get("webhook_url")
54 | if not webhook_url:
55 | raise HTTPException(status_code=400, detail="Webhook URL not configured")
56 |
57 | async with httpx.AsyncClient() as client:
58 | response = await client.post(webhook_url, json=data)
59 | response.raise_for_status()
60 |
61 | return True
62 |
63 | except httpx.HTTPError as e:
64 | raise HTTPException(
65 | status_code=500, detail=f"Error sending Slack notification: {str(e)}"
66 | ) from e
67 |
--------------------------------------------------------------------------------
/services/teams.py:
--------------------------------------------------------------------------------
1 | """Teams-specific service functions."""
2 |
3 | # Standard library imports
4 | from typing import Dict, Optional
5 |
6 | # Third-party imports
7 | import httpx
8 | from fastapi import HTTPException
9 |
10 | # Local imports
11 | from models.requests import ChannelSetupRequest
12 | from services.messaging import (
13 | setup_messaging_channel,
14 | verify_messaging_channel,
15 | get_channel_config,
16 | delete_messaging_channel,
17 | )
18 |
19 | # Constants
20 | MAX_CARDS = 10 # Teams' maximum number of cards per message
21 | MAX_CARD_SIZE = 28000 # Teams' maximum card size in bytes
22 |
23 |
24 | async def setup_teams_channel(channel_data: ChannelSetupRequest) -> bool:
25 | """Set up a Teams channel for notifications."""
26 | return await setup_messaging_channel(channel_data, "teams")
27 |
28 |
29 | async def verify_teams_channel(channel_data: ChannelSetupRequest) -> bool:
30 | """Verify a Teams channel setup."""
31 | return await verify_messaging_channel(channel_data, "teams")
32 |
33 |
34 | async def delete_teams_channel(channel_data: ChannelSetupRequest) -> bool:
35 | """Delete a Teams channel configuration."""
36 | return await delete_messaging_channel(channel_data, "teams")
37 |
38 |
39 | async def get_teams_channel_config(domain: str, token: str) -> Optional[Dict]:
40 | """Get Teams channel configuration."""
41 | return await get_channel_config(domain, token, "teams")
42 |
43 |
44 | async def send_teams_notification(domain: str, data: Dict) -> bool:
45 | """
46 | Send a notification to a Teams channel.
47 |
48 | Args:
49 | domain: The domain to send notification for
50 | data: The notification data to send
51 |
52 | Returns:
53 | bool: True if notification was sent successfully
54 | """
55 | try:
56 | config = await get_teams_channel_config(domain, data.get("token", ""))
57 | if not config:
58 | raise HTTPException(
59 | status_code=404, detail=f"Teams channel not found for domain {domain}"
60 | )
61 |
62 | webhook_url = config.get("webhook_url")
63 | if not webhook_url:
64 | raise HTTPException(status_code=400, detail="Webhook URL not configured")
65 |
66 | # Convert data to Teams-specific format if needed
67 | teams_data = {
68 | "type": "message",
69 | "attachments": [
70 | {
71 | "contentType": "application/vnd.microsoft.card.adaptive",
72 | "content": data,
73 | }
74 | ],
75 | }
76 |
77 | async with httpx.AsyncClient() as client:
78 | response = await client.post(webhook_url, json=teams_data)
79 | response.raise_for_status()
80 |
81 | return True
82 |
83 | except httpx.HTTPError as e:
84 | raise HTTPException(
85 | status_code=500, detail=f"Error sending Teams notification: {str(e)}"
86 | ) from e
87 |
--------------------------------------------------------------------------------
/services/webhook.py:
--------------------------------------------------------------------------------
1 | """Webhook-related service functions."""
2 |
3 | import secrets
4 | from typing import Dict, Optional
5 | from google.cloud import datastore
6 | from fastapi import HTTPException
7 | from models.requests import WebhookSetupRequest
8 |
9 |
10 | async def setup_webhook(webhook_data: WebhookSetupRequest) -> str:
11 | """Setup webhook for domain notifications."""
12 | try:
13 | datastore_client = datastore.Client()
14 | verify_token = secrets.token_urlsafe(32)
15 |
16 | # Create webhook entity
17 | webhook_key = datastore_client.key("xon_webhooks", webhook_data.domain)
18 | webhook_entity = datastore.Entity(key=webhook_key)
19 | webhook_entity.update(
20 | {
21 | "domain": webhook_data.domain,
22 | "webhook_url": webhook_data.webhook,
23 | "secret": webhook_data.secret,
24 | "verify_token": verify_token,
25 | "verified": False,
26 | "active": False,
27 | }
28 | )
29 |
30 | datastore_client.put(webhook_entity)
31 | return verify_token
32 |
33 | except Exception as e:
34 | raise HTTPException(
35 | status_code=500,
36 | detail=f"Failed to setup webhook: {str(e)}",
37 | ) from e
38 |
39 |
40 | async def verify_webhook(webhook_data: WebhookSetupRequest) -> bool:
41 | """Verify webhook for domain notifications."""
42 | try:
43 | datastore_client = datastore.Client()
44 | webhook_key = datastore_client.key("xon_webhooks", webhook_data.domain)
45 | webhook_entity = datastore_client.get(webhook_key)
46 |
47 | if not webhook_entity:
48 | return False
49 |
50 | if webhook_entity["verify_token"] != webhook_data.verify_token:
51 | return False
52 |
53 | webhook_entity["verified"] = True
54 | webhook_entity["active"] = True
55 | datastore_client.put(webhook_entity)
56 | return True
57 |
58 | except Exception as e:
59 | raise HTTPException(
60 | status_code=500,
61 | detail=f"Failed to verify webhook: {str(e)}",
62 | ) from e
63 |
64 |
65 | async def delete_webhook(webhook_data: WebhookSetupRequest) -> bool:
66 | """Delete webhook for domain notifications."""
67 | try:
68 | datastore_client = datastore.Client()
69 | webhook_key = datastore_client.key("xon_webhooks", webhook_data.domain)
70 | webhook_entity = datastore_client.get(webhook_key)
71 |
72 | if not webhook_entity:
73 | return False
74 |
75 | datastore_client.delete(webhook_key)
76 | return True
77 |
78 | except Exception as e:
79 | raise HTTPException(
80 | status_code=500,
81 | detail=f"Failed to delete webhook: {str(e)}",
82 | ) from e
83 |
84 |
85 | async def get_webhook_config(domain: str) -> Optional[Dict]:
86 | """Get webhook configuration for a domain."""
87 | try:
88 | datastore_client = datastore.Client()
89 | webhook_key = datastore_client.key("xon_webhooks", domain)
90 | webhook_entity = datastore_client.get(webhook_key)
91 |
92 | if not webhook_entity:
93 | return None
94 |
95 | return {
96 | "domain": webhook_entity["domain"],
97 | "webhook_url": webhook_entity["webhook_url"],
98 | "verified": webhook_entity["verified"],
99 | "active": webhook_entity["active"],
100 | }
101 |
102 | except Exception as e:
103 | raise HTTPException(
104 | status_code=500,
105 | detail=f"Failed to get webhook config: {str(e)}",
106 | ) from e
107 |
108 |
109 | async def send_webhook_notification(domain: str) -> bool:
110 | """Send notification to configured webhook."""
111 | try:
112 | datastore_client = datastore.Client()
113 | webhook_key = datastore_client.key("xon_webhooks", domain)
114 | webhook_entity = datastore_client.get(webhook_key)
115 |
116 | if (
117 | not webhook_entity
118 | or not webhook_entity["verified"]
119 | or not webhook_entity["active"]
120 | ):
121 | return False
122 |
123 | # Implement webhook notification logic here
124 | # This is a placeholder for the actual implementation
125 | return True
126 |
127 | except Exception as e:
128 | raise HTTPException(
129 | status_code=500,
130 | detail=f"Failed to send webhook notification: {str(e)}",
131 | ) from e
132 |
--------------------------------------------------------------------------------
/static/robots.txt:
--------------------------------------------------------------------------------
1 | User-agent: *
2 | Disallow: /
3 |
--------------------------------------------------------------------------------
/static/static/dictionary.txt:
--------------------------------------------------------------------------------
1 | admin
2 | login
3 | secure
4 | support
5 | account
6 | billing
7 | update
8 | verify
9 | payment
10 | checkout
11 | customer
12 | portal
13 | signin
14 | service
15 | reset
16 | unlock
17 | confirm
18 | recovery
19 | authentication
20 | auth
21 | email
22 | mail
23 | webmail
24 | bank
25 | invoice
26 | access
27 | wallet
28 | help
29 | security
30 | alert
31 | notification
32 | password
33 | info
34 | online
35 | dashboard
36 | my
37 | safe
38 | pay
39 | trust
40 | user
41 | id
42 | client
43 | member
44 | cloud
45 | mobile
46 | verify
47 | connect
48 | app
49 | system
50 |
51 |
--------------------------------------------------------------------------------
/static/static/robots.txt:
--------------------------------------------------------------------------------
1 | User-agent: *
2 | Disallow: /
3 |
--------------------------------------------------------------------------------
/static/static/tld.txt:
--------------------------------------------------------------------------------
1 | com
2 | net
3 | org
4 | co
5 | io
6 | ai
7 | app
8 | biz
9 | info
10 | online
11 | site
12 | store
13 | website
14 | tech
15 | cloud
16 | host
17 | live
18 | support
19 | space
20 | click
21 | xyz
22 | pro
23 | club
24 | link
25 | today
26 | top
27 | vip
28 | shop
29 | news
30 | email
31 | systems
32 | solutions
33 | network
34 | tk
35 | ml
36 | ga
37 | cf
38 | gq
39 | ru
40 | cn
41 | su
42 | cc
43 | tv
44 | ws
45 | to
46 | me
47 | in
48 | uk
49 | us
50 | ca
51 | de
52 | fr
53 | nl
54 | pl
55 | br
56 | au
57 | ph
58 | ke
59 | ng
60 | za
61 |
--------------------------------------------------------------------------------
/templates/domain_dashboard_error.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | XposedOrNot: Dashboard Email Validation Failed
7 |
9 |
12 |
15 |
16 |
19 |
21 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 | XposedOrNot
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 | Email Verification Failed !
50 |
51 | Sorry, but we couldn't validate this link.
52 | This might be because the link expired or because it is an invalid link .
53 |
Please try the dashboard access using the passwordless link again when you are ready.
54 |
Dashboard
55 |
56 |
57 |
58 |
59 |
 
61 |
62 |
 
64 |
65 |
66 |
67 |
68 |
69 |
70 |
--------------------------------------------------------------------------------
/templates/domain_dashboard_success.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | XposedOrNot: Email Authentication Successful
7 |
9 |
12 |
15 |
16 |
19 |
21 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 | XposedOrNot
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
You have been successfully authenticated for this email
50 |
51 |
This link will be active for the next 24 hours
52 |
Click the button below to access your XON Dashboard:
53 |
54 | Access Dashboard
55 |
56 |
57 |
58 |
59 |
60 |
 
62 |
63 |
 
65 |
66 |
67 |
68 |
69 |
70 |
71 |
--------------------------------------------------------------------------------
/templates/domain_email_error.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | XposedOrNot: Alert Me Activation Failed
7 |
9 |
12 |
15 |
16 |
19 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | XposedOrNot
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 | Email Verification Failed !
38 |
39 | Sorry, but we couldn't validate your email for this domain.
40 | This might be because the link expired after an hour or because you're already signed up for domain alertme
41 | system.
42 | Please try signing up again and click on the verification link within an hour.
43 |
44 |
45 |
46 |
47 |
48 |
 
50 |
51 |
 
53 |
54 |
55 |
56 |
57 |
58 |
59 |
--------------------------------------------------------------------------------
/templates/domain_email_verify.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Success...
7 |
9 |
12 |
15 |
16 |
19 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | XposedOrNot
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
Success!
41 | Your email have been added to the XposedOrNot Alert Me Service for this domain. You will be
42 | notified if your email is found in any of the new data breaches loaded in XposedOrNot for this domain.
43 |
44 |
 
46 |
 
48 |
49 |
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/templates/email_error.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | XposedOrNot: Alert Me Activation Failed
7 |
9 |
12 |
15 |
16 |
19 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | XposedOrNot
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 | Email Verification Failed !
38 |
39 | Sorry, but we couldn't activate your "Alert Me".
40 | This might be because the link expired after an hour or because you're already signed up for our "Alert
41 | Me " system.
42 | Please try signing up again and click on the verification link within an hour.
43 |
44 |
45 |
46 |
47 |
48 |
 
50 |
51 |
 
53 |
54 |
55 |
56 |
57 |
58 |
59 |
--------------------------------------------------------------------------------
/templates/email_shield_error.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | XposedOrNot: Alert Me Activation Failed
7 |
9 |
12 |
15 |
16 |
19 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | XposedOrNot
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 | Privacy Shield Activation Failed !
38 |
39 | Sorry, but we couldn't activate your Privacy Shield.
40 | This might be because the link expired after an hour or because you're already signed up for our Privacy
41 | Shield .
42 | Please try signing up again and click on the verification link within an hour.
43 |
44 |
45 |
46 |
47 |
 
49 |
50 |
 
52 |
53 |
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/templates/email_shield_verify.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Success...
8 |
9 |
11 |
12 |
15 |
16 |
19 |
20 |
21 |
22 |
25 |
26 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 | XposedOrNot
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
Success!
66 |
67 |
68 | Your email has been successfuly authenticated to avoid public searches in XposedOrNot.
69 |
70 |
71 |
72 |
73 |
 
75 |
76 |
 
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
--------------------------------------------------------------------------------
/templates/email_success.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | XposedOrNot: Alert Me Enabled Successfully
7 |
9 |
12 |
15 |
16 |
19 |
21 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 | XposedOrNot
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
You have been successfully added to the XposedOrNot Alert Me Service
58 |
59 |
Going forward, you will be notified if your email is found in any of the new data breaches loaded in
60 | XposedOrNot.
61 |
64 |
65 |
66 |
67 |
68 |
 
70 |
71 |
 
73 |
74 |
75 |
76 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/templates/email_template.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Xposed or Not ? AlertMe Activation
8 |
9 |
10 |
11 |
12 |
13 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
Welcome! Thank you for signing up to AlertMe service. Please follow this link to activate your account: {{ confirm_url }}
40 |
41 |
42 |
43 |
Cheers!
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/templates/email_unsub_error.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 | XposedOrNot: Oops
10 |
11 |
13 |
14 |
16 |
19 |
22 |
25 |
26 |
27 |
28 |
29 |
66 |
67 |
68 |
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/templates/email_unsub_verify.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Success...
7 |
9 |
12 |
15 |
16 |
19 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | XposedOrNot
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
Alright. As you wish !
41 | You will no longer receive any notification alerts from new data breaches loaded in XposedOrNot.
42 |
43 |
 
45 |
 
47 |
48 |
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/templates/email_verify.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Success...
7 |
9 |
12 |
15 |
16 |
19 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | XposedOrNot
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 | You have been added to the XposedOrNot Alert Me Service. You will be notified if your email is
41 | found in any of the new data breaches loaded in XposedOrNot.
42 | Good news 🎉 You currently do not have any data breaches linked to this email address.
43 |
44 |
 
46 |
 
48 |
49 |
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | XposedOrNot API Reference
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 | XposedOrNot
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
Welcome to XposedOrNot API Portal
31 |
Your comprehensive solution for querying exposed data breaches via our REST API.
32 |
33 |
I appreciate your interest in integrating the XposedOrNot API into your applications. I am thrilled to have you on board! 🎉
34 |
Explore XposedOrNot API Playground and Dive into our API Playground to get a hands-on experience with our API's capabilities.
35 |
36 | For more in-depth information, please refer to our API-documentation guide and FAQ . These resources are designed to provide you with all the details you need to effectively use our API.
37 |
38 |
52 |
53 |
More detailed information can be read at
API-documentation guide and
FAQ .
54 |
55 |
56 |
 
57 |
 
58 |
59 |
60 |
61 |
62 |
63 |
64 |
--------------------------------------------------------------------------------
/templates/styles.css:
--------------------------------------------------------------------------------
1 | /* -------------------------------------
2 | * GLOBAL
3 | * A very basic CSS reset
4 | * ------------------------------------- */
5 | * {
6 | * margin: 0;
7 | * font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
8 | * box-sizing: border-box;
9 | * font-size: 14px;
10 | * }
11 | *
12 | * img {
13 | * max-width: 100%;
14 | * }
15 | *
16 | * body {
17 | * -webkit-font-smoothing: antialiased;
18 | * -webkit-text-size-adjust: none;
19 | * width: 100% !important;
20 | * height: 100%;
21 | * line-height: 1.6em;
22 | * /* 1.6em * 14px = 22.4px, use px to get airier line-height also in Thunderbird, and Yahoo!, Outlook.com, AOL webmail clients */
23 | /*line-height: 22px;*/
24 | }
25 |
26 | /* Let's make sure all tables have defaults */
27 | table td {
28 | vertical-align: top;
29 | }
30 |
31 | /* -------------------------------------
32 | * BODY & CONTAINER
33 | * ------------------------------------- */
34 | body {
35 | background-color: #f6f6f6;
36 | }
37 |
38 | .body-wrap {
39 | background-color: #f6f6f6;
40 | width: 100%;
41 | }
42 |
43 | .container {
44 | display: block !important;
45 | max-width: 600px !important;
46 | margin: 0 auto !important;
47 | /* makes it centered */
48 | clear: both !important;
49 | }
50 |
51 | .content {
52 | max-width: 600px;
53 | margin: 0 auto;
54 | display: block;
55 | padding: 20px;
56 | }
57 |
58 | /* -------------------------------------
59 | * HEADER, FOOTER, MAIN
60 | * ------------------------------------- */
61 | .main {
62 | background-color: #fff;
63 | border: 1px solid #e9e9e9;
64 | border-radius: 3px;
65 | }
66 |
67 | .content-wrap {
68 | padding: 20px;
69 | }
70 |
71 | .content-block {
72 | padding: 0 0 20px;
73 | }
74 |
75 | .header {
76 | width: 100%;
77 | margin-bottom: 20px;
78 | }
79 |
80 | .footer {
81 | width: 100%;
82 | clear: both;
83 | color: #999;
84 | padding: 20px;
85 | }
86 | .footer p, .footer a, .footer td {
87 | color: #999;
88 | font-size: 12px;
89 | }
90 |
91 | /* -------------------------------------
92 | * TYPOGRAPHY
93 | * ------------------------------------- */
94 | h1, h2, h3 {
95 | font-family: "Helvetica Neue", Helvetica, Arial, "Lucida Grande", sans-serif;
96 | color: #000;
97 | margin: 40px 0 0;
98 | line-height: 1.2em;
99 | font-weight: 400;
100 | }
101 |
102 | h1 {
103 | font-size: 32px;
104 | font-weight: 500;
105 | /* 1.2em * 32px = 38.4px, use px to get airier line-height also in Thunderbird, and Yahoo!, Outlook.com, AOL webmail clients */
106 | /*line-height: 38px;*/
107 | }
108 |
109 | h2 {
110 | font-size: 24px;
111 | /* 1.2em * 24px = 28.8px, use px to get airier line-height also in Thunderbird, and Yahoo!, Outlook.com, AOL webmail clients */
112 | /*line-height: 29px;*/
113 | }
114 |
115 | h3 {
116 | font-size: 18px;
117 | /* 1.2em * 18px = 21.6px, use px to get airier line-height also in Thunderbird, and Yahoo!, Outlook.com, AOL webmail clients */
118 | /*line-height: 22px;*/
119 | }
120 |
121 | h4 {
122 | font-size: 14px;
123 | font-weight: 600;
124 | }
125 |
126 | p, ul, ol {
127 | margin-bottom: 10px;
128 | font-weight: normal;
129 | }
130 | p li, ul li, ol li {
131 | margin-left: 5px;
132 | list-style-position: inside;
133 | }
134 |
135 | /* -------------------------------------
136 | * LINKS & BUTTONS
137 | * ------------------------------------- */
138 | a {
139 | color: #348eda;
140 | text-decoration: underline;
141 | }
142 |
143 | .btn-primary {
144 | text-decoration: none;
145 | color: #FFF;
146 | background-color: #348eda;
147 | border: solid #348eda;
148 | border-width: 10px 20px;
149 | line-height: 2em;
150 | /* 2em * 14px = 28px, use px to get airier line-height also in Thunderbird, and Yahoo!, Outlook.com, AOL webmail clients */
151 | /*line-height: 28px;*/
152 | font-weight: bold;
153 | text-align: center;
154 | cursor: pointer;
155 | display: inline-block;
156 | border-radius: 5px;
157 | text-transform: capitalize;
158 | }
159 |
160 | /* -------------------------------------
161 | * OTHER STYLES THAT MIGHT BE USEFUL
162 | * ------------------------------------- */
163 | .last {
164 | margin-bottom: 0;
165 | }
166 |
167 | .first {
168 | margin-top: 0;
169 | }
170 |
171 | .aligncenter {
172 | text-align: center;
173 | }
174 |
175 | .alignright {
176 | text-align: right;
177 | }
178 |
179 | .alignleft {
180 | text-align: left;
181 | }
182 |
183 | .clear {
184 | clear: both;
185 | }
186 |
187 | /* -------------------------------------
188 | * ALERTS
189 | * Change the class depending on warning email, good email or bad email
190 | * ------------------------------------- */
191 | .alert {
192 | font-size: 16px;
193 | color: #fff;
194 | font-weight: 500;
195 | padding: 20px;
196 | text-align: center;
197 | border-radius: 3px 3px 0 0;
198 | }
199 | .alert a {
200 | color: #fff;
201 | text-decoration: none;
202 | font-weight: 500;
203 | font-size: 16px;
204 | }
205 | .alert.alert-warning {
206 | background-color: #FF9F00;
207 | }
208 | .alert.alert-bad {
209 | background-color: #D0021B;
210 | }
211 | .alert.alert-good {
212 | background-color: #68B90F;
213 | }
214 |
215 | /* -------------------------------------
216 | * INVOICE
217 | * Styles for the billing table
218 | * ------------------------------------- */
219 | .invoice {
220 | margin: 40px auto;
221 | text-align: left;
222 | width: 80%;
223 | }
224 | .invoice td {
225 | padding: 5px 0;
226 | }
227 | .invoice .invoice-items {
228 | width: 100%;
229 | }
230 | .invoice .invoice-items td {
231 | border-top: #eee 1px solid;
232 | }
233 | .invoice .invoice-items .total td {
234 | border-top: 2px solid #333;
235 | border-bottom: 2px solid #333;
236 | font-weight: 700;
237 | }
238 |
239 | /* -------------------------------------
240 | * RESPONSIVE AND MOBILE FRIENDLY STYLES
241 | * ------------------------------------- */
242 | @media only screen and (max-width: 640px) {
243 | body {
244 | padding: 0 !important;
245 | }
246 |
247 | h1, h2, h3, h4 {
248 | font-weight: 800 !important;
249 | margin: 20px 0 5px !important;
250 | }
251 |
252 | h1 {
253 | font-size: 22px !important;
254 | }
255 |
256 | h2 {
257 | font-size: 18px !important;
258 | }
259 |
260 | h3 {
261 | font-size: 16px !important;
262 | }
263 |
264 | .container {
265 | padding: 0 !important;
266 | width: 100% !important;
267 | }
268 |
269 | .content {
270 | padding: 0 !important;
271 | }
272 |
273 | .content-wrap {
274 | padding: 10px !important;
275 | }
276 |
277 | .invoice {
278 | width: 100% !important;
279 | }
280 | }
281 |
282 | /*# sourceMappingURL=styles.css.map */
283 |
--------------------------------------------------------------------------------
/templates/swagger/custom_swagger.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | {{ title }}
7 |
8 |
9 |
10 |
195 |
196 |
197 |
198 |
199 |
200 |
217 |
218 |
219 |
224 |
225 |
305 |
306 |
307 |
308 |
--------------------------------------------------------------------------------
/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/XposedOrNot/XposedOrNot-API/ffcc06a0adcdbb0493e438b32b0b72e70a5cbf54/utils/__init__.py
--------------------------------------------------------------------------------
/utils/helpers.py:
--------------------------------------------------------------------------------
1 | """General utility functions for the application."""
2 |
3 | # Standard library imports
4 | import hashlib
5 | import ipaddress
6 | import re
7 | import logging
8 | from typing import Optional, Dict, Any
9 |
10 | # Third-party imports
11 | import requests
12 | from fastapi import Request
13 | from user_agents import parse
14 |
15 | # Local imports
16 | from utils.validation import validate_url, validate_variables, validate_email_with_tld
17 |
18 | # Configure logging
19 | logging.basicConfig(level=logging.INFO)
20 | logger = logging.getLogger(__name__)
21 |
22 |
23 | def get_client_ip(request: Request) -> str:
24 | """
25 | Extract the real client IP address from request headers.
26 | Handles various proxy and load balancer scenarios.
27 | """
28 | headers = request.headers
29 |
30 | # Log specific IP-related headers
31 | ip_headers = {
32 | "CF-Connecting-IP": headers.get("CF-Connecting-IP"),
33 | "X-Forwarded-For": headers.get("X-Forwarded-For"),
34 | "X-Real-IP": headers.get("X-Real-IP"),
35 | "True-Client-IP": headers.get("True-Client-IP"),
36 | "Remote-Addr": getattr(request.client, "host", None),
37 | "X-Original-Forwarded-For": headers.get("X-Original-Forwarded-For"),
38 | }
39 |
40 | # Try to get IP from various headers in order of reliability
41 | client_ip = None
42 |
43 | # 1. Try Cloudflare headers first
44 | if headers.get("CF-Connecting-IP"):
45 | client_ip = headers["CF-Connecting-IP"].strip()
46 | return client_ip
47 |
48 | # 2. Try True-Client-IP
49 | if headers.get("True-Client-IP"):
50 | client_ip = headers["True-Client-IP"].strip()
51 | return client_ip
52 |
53 | # 3. Try X-Forwarded-For
54 | if headers.get("X-Forwarded-For"):
55 | # Get the leftmost IP which is typically the client
56 | ips = [ip.strip() for ip in headers["X-Forwarded-For"].split(",")]
57 | # Filter out private and reserved IPs
58 | public_ips = [ip for ip in ips if not ipaddress.ip_address(ip).is_private]
59 | if public_ips:
60 | client_ip = public_ips[0]
61 | return client_ip
62 | client_ip = ips[0]
63 | return client_ip
64 |
65 | # 4. Try X-Real-IP
66 | if headers.get("X-Real-IP"):
67 | client_ip = headers["X-Real-IP"].strip()
68 | return client_ip
69 |
70 | # 5. Fallback to direct client address
71 | client_ip = getattr(request.client, "host", "unknown")
72 |
73 | if client_ip == "unknown" or client_ip.startswith("169.254"):
74 | logger.warning(f"Potentially invalid IP address detected: {client_ip}")
75 | # Try to get any other available IP information
76 | logger.warning(f"All available headers: {dict(headers)}")
77 |
78 | return client_ip
79 |
80 |
81 | def validate_domain(domain: str) -> bool:
82 | """Returns True if the domain is valid, False otherwise."""
83 | if not domain:
84 | return False
85 | domain_pattern = (
86 | r"^(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+"
87 | r"[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9]$"
88 | )
89 |
90 | parts = domain.split(".")
91 | if len(parts) < 2 or len(parts[-1]) < 2:
92 | return False
93 |
94 | return bool(re.match(domain_pattern, domain))
95 |
96 |
97 | def is_valid_domain_name(domain: str) -> bool:
98 | """Check if a domain name is valid."""
99 | return validate_domain(domain)
100 |
101 |
102 | def is_valid_ip(ip_address: str) -> bool:
103 | """Check if an IP address is valid."""
104 | try:
105 | ipaddress.ip_address(ip_address)
106 | return True
107 | except ValueError:
108 | return False
109 |
110 |
111 | def get_preferred_ip_address(x_forwarded_for: str) -> Optional[str]:
112 | """Get the preferred IP address from X-Forwarded-For header."""
113 | if not x_forwarded_for:
114 | return None
115 |
116 | # Split the string into individual IP addresses
117 | ip_addresses = x_forwarded_for.split(",")
118 |
119 | # Return the first IP address that is valid
120 | for ip in ip_addresses:
121 | ip = ip.strip()
122 | if is_valid_ip(ip):
123 | return ip
124 |
125 | return None
126 |
127 |
128 | def fetch_location_by_ip(ip_address: str) -> str:
129 | """Fetch location information for an IP address."""
130 | try:
131 | response = requests.get(f"http://ip-api.com/json/{ip_address}", timeout=10)
132 | if response.status_code == 200:
133 | data = response.json()
134 | if data.get("status") == "success":
135 | return data.get("isp", "Unknown ISP")
136 | except requests.RequestException:
137 | # Log the error if needed
138 | pass
139 | return "Unknown ISP"
140 |
141 |
142 | def generate_request_hash(data: Dict[str, Any]) -> str:
143 | """Generate a hash for the request data."""
144 | data_str = str(sorted(data.items()))
145 | return hashlib.sha256(data_str.encode()).hexdigest()
146 |
147 |
148 | def get_client_info(request: Request) -> Dict[str, str]:
149 | """Get client information from the request."""
150 | user_agent_string = request.headers.get("user-agent", "")
151 | user_agent = parse(user_agent_string)
152 |
153 | return {
154 | "ip_address": request.client.host if request.client else "unknown",
155 | "browser_type": f"{user_agent.browser.family} {user_agent.browser.version_string}",
156 | "client_platform": f"{user_agent.os.family} {user_agent.os.version_string}",
157 | }
158 |
159 |
160 | def string_to_boolean(value: str) -> bool:
161 | """Convert a string to a boolean value."""
162 | return value.lower() in ("true", "t", "yes", "y", "1")
163 |
--------------------------------------------------------------------------------
/utils/request.py:
--------------------------------------------------------------------------------
1 | """Request-related utility functions."""
2 |
3 | from typing import Dict, Optional
4 | from fastapi import Request
5 | from user_agents import parse
6 |
7 |
8 | def get_client_ip(request: Request) -> str:
9 | """
10 | Get the client IP address from the request headers.
11 | Prioritizes Cloudflare headers, then falls back to standard headers.
12 | """
13 | headers = request.headers
14 | ip_headers = {
15 | "CF-Connecting-IP": headers.get("CF-Connecting-IP"),
16 | "X-Forwarded-For": headers.get("X-Forwarded-For"),
17 | "X-Real-IP": headers.get("X-Real-IP"),
18 | "True-Client-IP": headers.get("True-Client-IP"),
19 | "Remote-Addr": getattr(request.client, "host", None),
20 | "X-Original-Forwarded-For": headers.get("X-Original-Forwarded-For"),
21 | }
22 | # Try to get IP from various headers in order of reliability
23 | client_ip = None
24 |
25 | # 1. Try Cloudflare headers first
26 | if headers.get("CF-Connecting-IP"):
27 | client_ip = headers["CF-Connecting-IP"].strip()
28 |
29 | return client_ip
30 |
31 | # 2. Try True-Client-IP
32 | if headers.get("True-Client-IP"):
33 | client_ip = headers["True-Client-IP"].strip()
34 |
35 | return client_ip
36 |
37 | # 3. Try X-Forwarded-For
38 | if headers.get("X-Forwarded-For"):
39 | # Get the leftmost IP which is typically the client
40 | ips = [ip.strip() for ip in headers["X-Forwarded-For"].split(",")]
41 | # Filter out private and reserved IPs
42 | public_ips = [ip for ip in ips if not ipaddress.ip_address(ip).is_private]
43 | if public_ips:
44 | client_ip = public_ips[0]
45 |
46 | return client_ip
47 | client_ip = ips[0]
48 |
49 | return client_ip
50 |
51 | # 4. Try X-Real-IP
52 | if headers.get("X-Real-IP"):
53 | client_ip = headers["X-Real-IP"].strip()
54 |
55 | return client_ip
56 |
57 | # Fallback to remote address
58 | client_ip = request.client.host if request.client else "0.0.0.0"
59 |
60 | # Basic IP validation
61 | if not client_ip or client_ip == "0.0.0.0":
62 | return "0.0.0.0"
63 |
64 | return client_ip
65 |
66 |
67 | def get_user_agent_info(request: Request) -> tuple[str, str]:
68 | """
69 | Extract browser and platform information from the request.
70 | """
71 | user_agent_string = request.headers.get("User-Agent")
72 | user_agent = parse(user_agent_string)
73 | browser_type = f"{user_agent.browser.family} {user_agent.browser.version_string}"
74 | client_platform = user_agent.os.family
75 | return browser_type, client_platform
76 |
--------------------------------------------------------------------------------
/utils/security.py:
--------------------------------------------------------------------------------
1 | """Security utilities for the application."""
2 |
3 | from cryptography.fernet import Fernet
4 | from itsdangerous import URLSafeTimedSerializer, SignatureExpired, BadSignature
5 | from config.settings import FERNET_KEY, SECRET_APIKEY, SECURITY_SALT
6 |
7 | # Initialize Fernet cipher suite
8 | CIPHER_SUITE = Fernet(FERNET_KEY)
9 |
10 |
11 | def encrypt_data(data: str) -> bytes:
12 | """Encrypts the given data using a predefined cipher suite."""
13 | return CIPHER_SUITE.encrypt(data.encode())
14 |
15 |
16 | def decrypt_data(data: bytes) -> str:
17 | """Decrypts the given data using a predefined cipher suite."""
18 | return CIPHER_SUITE.decrypt(data).decode()
19 |
20 |
21 | def generate_confirmation_token(email: str) -> str:
22 | """Returns confirmation token generated for validation."""
23 | serializer = URLSafeTimedSerializer(SECRET_APIKEY)
24 | return serializer.dumps(email, salt=SECURITY_SALT)
25 |
26 |
27 | def confirm_token(token: str, expiration: int = 1296000) -> str:
28 | """Returns status of confirmation used for validation."""
29 | try:
30 | serializer = URLSafeTimedSerializer(SECRET_APIKEY)
31 | return serializer.loads(token, salt=SECURITY_SALT, max_age=expiration)
32 | except (SignatureExpired, BadSignature, ValueError):
33 | return False
34 |
--------------------------------------------------------------------------------
/utils/token.py:
--------------------------------------------------------------------------------
1 | """Token generation and verification utilities."""
2 |
3 | from typing import Optional
4 | import logging
5 | from fastapi import HTTPException
6 | from itsdangerous import URLSafeTimedSerializer, SignatureExpired, BadSignature
7 |
8 | from config.settings import SECRET_APIKEY, SECURITY_SALT
9 |
10 | # Configure logging
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | async def generate_confirmation_token(email: str) -> str:
15 | """
16 | Generate a secure confirmation token for email verification.
17 | """
18 | try:
19 | logger.debug("[TOKEN] Generating confirmation token for email: %s", email)
20 | serializer = URLSafeTimedSerializer(SECRET_APIKEY)
21 | token = serializer.dumps(email, salt=SECURITY_SALT)
22 | logger.debug("[TOKEN] Successfully generated token for email: %s", email)
23 | return token
24 | except Exception as e:
25 | logger.error(
26 | "[TOKEN] Error generating confirmation token: %s", str(e), exc_info=True
27 | )
28 | raise HTTPException(
29 | status_code=500, detail="Error generating confirmation token"
30 | ) from e
31 |
32 |
33 | async def confirm_token(token: str, expiration: int = 1296000) -> Optional[str]:
34 | """
35 | Verify and decode a confirmation token.
36 | """
37 | try:
38 | logger.debug("[TOKEN] Verifying token with expiration: %s", expiration)
39 | serializer = URLSafeTimedSerializer(SECRET_APIKEY)
40 | email = serializer.loads(token, salt=SECURITY_SALT, max_age=expiration)
41 | logger.debug("[TOKEN] Successfully verified token for email: %s", email)
42 | return email
43 | except SignatureExpired:
44 | logger.error("[TOKEN] Token expired", exc_info=True)
45 | return None
46 | except BadSignature:
47 | logger.error("[TOKEN] Invalid token signature", exc_info=True)
48 | return None
49 | except Exception as e:
50 | logger.error("[TOKEN] Error verifying token: %s", str(e), exc_info=True)
51 | return None
52 |
--------------------------------------------------------------------------------
/utils/validation.py:
--------------------------------------------------------------------------------
1 | """Validation utilities for the application."""
2 |
3 | import re
4 | from urllib.parse import urlparse
5 | from fastapi import Request
6 |
7 |
8 | def validate_variables(variables_to_validate: list) -> bool:
9 | """Validate input variables to ensure they contain only valid characters."""
10 | pattern = r"^[a-zA-Z0-9@._:/-]*$"
11 | return all(
12 | value and not value.isspace() and re.match(pattern, value)
13 | for value in variables_to_validate
14 | )
15 |
16 |
17 | def validate_url(request: Request) -> bool:
18 | """Returns True if the url is a valid url, False otherwise."""
19 | try:
20 | url = str(request.url)
21 | result = urlparse(url)
22 | return all([result.scheme, result.netloc])
23 | except (ValueError, AttributeError):
24 | return False
25 |
26 |
27 | def validate_email_with_tld(email: str) -> bool:
28 | """Validate email with a basic format check."""
29 | pattern = r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$"
30 | return bool(re.match(pattern, email))
31 |
32 |
33 | def validate_token(token: str) -> bool:
34 | """
35 | Validates a token format based on allowed characters.
36 | Allowed characters: alphanumeric, dots, underscores, hyphens, and base64 characters
37 |
38 | Args:
39 | token: The token to validate
40 |
41 | Returns:
42 | bool: True if token contains only allowed characters, False otherwise
43 | """
44 | try:
45 | # Pattern allows alphanumeric, dots, underscores, hyphens, and base64 characters
46 | pattern = r"^[a-zA-Z0-9._\-=]+$"
47 | return bool(re.match(pattern, token))
48 | except Exception:
49 | return False
50 |
--------------------------------------------------------------------------------