├── .dockerignore ├── .gitignore ├── requirements.txt ├── assets └── screenshot.jpg ├── Dockerfile ├── docker-compose.yaml ├── netmon.spec ├── .github ├── dependabot.yml └── workflows │ ├── lint.yml │ ├── dependabot_updates.yml │ └── publish.yml ├── LICENSE ├── README.md └── app.py /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | 3 | !app.py 4 | !requirements.txt 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv 2 | /__pycache__ 3 | build/ 4 | dist/ 5 | 6 | results.json 7 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | apprise==1.9.5 2 | python3_nmap==1.9.1 3 | schedule==1.2.2 4 | -------------------------------------------------------------------------------- /assets/screenshot.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RafhaanShah/Net-Mon/HEAD/assets/screenshot.jpg -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3-alpine 2 | 3 | LABEL org.opencontainers.image.source="https://github.com/RafhaanShah/Net-Mon" 4 | 5 | # https://pkgs.alpinelinux.org/package/edge/main/x86_64/nmap 6 | ENV NMAP_VERSION="7.97-r0" 7 | 8 | RUN apk update && apk add \ 9 | nmap=${NMAP_VERSION} \ 10 | && rm -rf /var/cache/apk/* 11 | 12 | ENV PYTHONUNBUFFERED=1 13 | 14 | WORKDIR /app 15 | 16 | COPY requirements.txt . 17 | RUN pip install -r requirements.txt 18 | 19 | COPY app.py . 20 | 21 | # needs to run as root for nmap to get mac addresses 22 | ENTRYPOINT ["python", "app.py"] 23 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | net-mon: 3 | container_name: net-mon 4 | image: ghcr.io/rafhaanshah/net-mon:latest 5 | restart: unless-stopped 6 | network_mode: host # needed for nmap to get mac addresses 7 | volumes: 8 | - ./results.json:/app/results.json # optional, if you want to keep found hosts persistent. 9 | # create an empty results.json first 10 | environment: 11 | - NETMON_NOTIFICATION=tgram://bottoken/ChatID 12 | - NETMON_SUBNET=192.168.1.0/24 13 | - NETMON_MINUTES=60 14 | -------------------------------------------------------------------------------- /netmon.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | 4 | a = Analysis( 5 | ['app.py'], 6 | pathex=[], 7 | binaries=[], 8 | datas=[], 9 | hiddenimports=[], 10 | hookspath=[], 11 | hooksconfig={}, 12 | runtime_hooks=[], 13 | excludes=[], 14 | noarchive=False, 15 | optimize=0, 16 | ) 17 | pyz = PYZ(a.pure) 18 | 19 | exe = EXE( 20 | pyz, 21 | a.scripts, 22 | a.binaries, 23 | a.datas, 24 | [], 25 | name='netmon', 26 | debug=False, 27 | bootloader_ignore_signals=False, 28 | strip=False, 29 | upx=True, 30 | upx_exclude=[], 31 | runtime_tmpdir=None, 32 | console=True, 33 | disable_windowed_traceback=False, 34 | argv_emulation=False, 35 | target_arch=None, 36 | codesign_identity=None, 37 | entitlements_file=None, 38 | ) 39 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" 9 | directory: "/" 10 | schedule: 11 | interval: "monthly" 12 | groups: 13 | all: 14 | patterns: 15 | - "*" 16 | - package-ecosystem: "github-actions" 17 | directory: "/" 18 | schedule: 19 | interval: "monthly" 20 | groups: 21 | all: 22 | patterns: 23 | - "*" 24 | - package-ecosystem: "docker" 25 | directory: "/" 26 | schedule: 27 | interval: "monthly" 28 | - package-ecosystem: "docker-compose" 29 | directory: "/" 30 | schedule: 31 | interval: "monthly" 32 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | # This workflow lint checks a Python application 2 | 3 | name: Lint 4 | 5 | on: 6 | push: 7 | branches: [ master ] 8 | pull_request: 9 | branches: [ master ] 10 | 11 | jobs: 12 | lint: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout repository 17 | uses: actions/checkout@v6 18 | 19 | - name: Set up Python 20 | uses: actions/setup-python@v6 21 | 22 | - name: Cache dependencies 23 | uses: actions/cache@v4 24 | with: 25 | path: ~/.cache/pip 26 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} 27 | restore-keys: | 28 | ${{ runner.os }}-pip- 29 | 30 | - name: Install dependencies 31 | run: | 32 | python -m pip install --upgrade pip 33 | pip install -r requirements.txt 34 | pip install pylint 35 | 36 | - name: Run Pylint 37 | run: | 38 | pylint app.py 39 | -------------------------------------------------------------------------------- /.github/workflows/dependabot_updates.yml: -------------------------------------------------------------------------------- 1 | # Merge dependabot updates automatically 2 | # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions 3 | 4 | name: Dependabot Auto-Merge 5 | on: pull_request 6 | 7 | permissions: 8 | contents: write 9 | pull-requests: write 10 | 11 | jobs: 12 | dependabot_updates: 13 | name: Dependabot Auto-Merge 14 | runs-on: ubuntu-latest 15 | if: ${{ github.actor == 'dependabot[bot]' }} 16 | 17 | steps: 18 | - name: Dependabot metadata 19 | id: metadata 20 | uses: dependabot/fetch-metadata@v2 21 | with: 22 | github-token: "${{ secrets.GITHUB_TOKEN }}" 23 | 24 | - name: Enable auto-merge for Dependabot PRs 25 | # if: ${{contains(steps.metadata.outputs.dependency-names, 'my-dependency') && steps.metadata.outputs.update-type == 'version-update:semver-patch'}} 26 | run: gh pr merge --auto --squash "$PR_URL" 27 | env: 28 | PR_URL: ${{github.event.pull_request.html_url}} 29 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) [2021] [Rafhaan Shah] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Net-Mon 2 | 3 | Get notified for new devices on your network. This app runs [nmap](https://nmap.org/) periodically and saves found hosts, and send you a notification whenever a new device (mac-address) is found. 4 | 5 | ![](/assets/screenshot.jpg) 6 | 7 | ## Prerequisites 8 | - A notification service supported by [Apprise](https://github.com/caronc/apprise#popular-notification-services) and the required API keys or other configuration for your chosen services 9 | - Have `nmap` already installed on your system 10 | 11 | ## Building 12 | Install Requirements: 13 | ``` 14 | python3 -m venv .venv 15 | source .venv/bin/activate 16 | pip install -r requirements.txt 17 | ``` 18 | 19 | Upgrade Dependencies: 20 | ``` 21 | pip install pipreqs 22 | pip install --upgrade -r requirements.txt 23 | pipreqs --force --ignore .venv 24 | ``` 25 | 26 | ## Installation 27 | - If you have Python installed, you can clone the repository and directly run the Python file 28 | - You can download the latest release artifact from [GitHub Releases](https://github.com/RafhaanShah/Net-Mon/releases) 29 | - If you have Docker installed, you can run the Docker image 30 | 31 | ## Configuration 32 | You can configure Net-Mon using **environment variables** or **command-line arguments**. 33 | 34 | ### Environment Variables: 35 | 1. Apprise configuration url, for your chosen providers: 36 | - `NETMON_NOTIFICATION=tgram://bottoken/ChatID` 37 | 2. Subnet for scanning in CIDR form or range form: 38 | - `NETMON_SUBNET=192.168.1.0/24` or `NETMON_SUBNET=192.168.1.1-100` 39 | 3. Interval for scanning, in minutes: 40 | - `NETMON_MINUTES=15` 41 | 4. Results file path (optional, default is `results.json`): 42 | - `NETMON_RESULTS=results.json` 43 | 44 | ### Command-Line Arguments: 45 | You can also pass these options directly when running the app: 46 | - `--notification` Notification URL (e.g. `--notification tgram://bottoken/ChatID`) 47 | - `--subnet` Subnet to scan (e.g. `--subnet 192.168.1.0/24`) 48 | - `--minutes` Scan interval in minutes (e.g. `--minutes 15`) 49 | - `--results` Results file path (default: `results.json`) 50 | 51 | ## Usage 52 | - Python: 53 | `sudo python app.py --notification tgram://bottoken/ChatID` 54 | - Executable: 55 | `sudo ./netmon --notification tgram://bottoken/ChatID` 56 | - Docker: 57 | ```bash 58 | docker run -e \ 59 | NETMON_NOTIFICATION=tgram://bottoken/ChatID \ 60 | NETMON_SUBNET=192.168.1.0/24 \ 61 | NETMON_MINUTES=15 \ 62 | --net=host \ 63 | ghcr.io/rafhaanshah/net-mon:latest 64 | ``` 65 | - Docker-Compose: 66 | ```yaml 67 | services: 68 | net-mon: 69 | container_name: net-mon 70 | image: ghcr.io/rafhaanshah/net-mon:latest 71 | restart: unless-stopped 72 | network_mode: host # needed for nmap to get mac addresses 73 | volumes: 74 | - ./results.json:/app/results.json # optional, if you want to keep found hosts persistent 75 | # create an empty results.json first 76 | environment: 77 | - NETMON_NOTIFICATION=tgram://bottoken/ChatID 78 | - NETMON_SUBNET=192.168.1.0/24 79 | - NETMON_MINUTES=60 80 | ``` 81 | 82 | ## License 83 | [MIT](https://choosealicense.com/licenses/mit/) 84 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build and publish a Docker Image to GitHub Container Registry 2 | # And also build executables and upload it to the release 3 | # See https://github.com/docker/build-push-action 4 | 5 | name: Publish 6 | 7 | on: 8 | release: 9 | types: [published] 10 | 11 | env: 12 | REGISTRY: ghcr.io 13 | IMAGE_NAME: rafhaanshah/net-mon 14 | 15 | jobs: 16 | push_to_registry: 17 | name: Push Docker Image to GitHub Container Registry 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | # checkout is NOT needed for docker/build-push-action 22 | 23 | - name: Set version variables 24 | id: vars 25 | run: | 26 | TAG=${GITHUB_REF#refs/*/} 27 | MAJOR=$(echo $TAG | grep -oP '^v\d+') 28 | MINOR=$(echo $TAG | grep -oP '^v\d+\.\d+') 29 | PATCH=$TAG 30 | echo "major=$MAJOR" >> $GITHUB_OUTPUT 31 | echo "minor=$MINOR" >> $GITHUB_OUTPUT 32 | echo "patch=$PATCH" >> $GITHUB_OUTPUT 33 | 34 | - name: Set up QEMU 35 | uses: docker/setup-qemu-action@v3 36 | 37 | - name: Set up Docker Buildx 38 | uses: docker/setup-buildx-action@v3 39 | 40 | - name: Login to GitHub Container Registry 41 | uses: docker/login-action@v3 42 | with: 43 | registry: ${{ env.REGISTRY }} 44 | username: ${{ github.actor }} 45 | password: ${{ secrets.GITHUB_TOKEN }} 46 | 47 | - name: Build and push 48 | id: docker_build 49 | uses: docker/build-push-action@v6 50 | with: 51 | platforms: linux/amd64,linux/arm64,linux/arm/v7 52 | push: true 53 | cache-from: type=gha 54 | cache-to: type=gha,mode=max 55 | tags: | 56 | ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.vars.outputs.major }} 57 | ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.vars.outputs.minor }} 58 | ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.vars.outputs.patch }} 59 | ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest 60 | 61 | - name: Print image digest 62 | run: echo ${{ steps.docker_build.outputs.digest }} 63 | 64 | upload_executable: 65 | name: Build and upload executable to release 66 | runs-on: ${{ matrix.os }} 67 | strategy: 68 | fail-fast: false 69 | matrix: 70 | include: 71 | - os: ubuntu-24.04 72 | path: ~/.cache/pip 73 | - os: ubuntu-24.04-arm 74 | path: ~/.cache/pip 75 | - os: macos-latest 76 | path: ~/Library/Caches/pip 77 | 78 | steps: 79 | - name: Checkout repository 80 | uses: actions/checkout@v6 81 | 82 | - name: Set up Python 83 | uses: actions/setup-python@v6 84 | 85 | - name: Cache dependencies 86 | uses: actions/cache@v4 87 | with: 88 | path: ${{ matrix.path }} 89 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} 90 | restore-keys: | 91 | ${{ runner.os }}-pip- 92 | 93 | - name: Install dependencies 94 | run: | 95 | python -m pip install --upgrade pip 96 | pip install -r requirements.txt 97 | pip install pyinstaller 98 | 99 | - name: Build executable 100 | run: pyinstaller netmon.spec 101 | 102 | - name: Set artifact variables 103 | id: artifact_vars 104 | run: | 105 | echo "os_lower=$(echo ${{ runner.os }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_OUTPUT 106 | echo "arch_lower=$(echo ${{ runner.arch }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_OUTPUT 107 | 108 | - name: Upload artifacts 109 | uses: shogo82148/actions-upload-release-asset@v1 110 | with: 111 | upload_url: ${{ github.event.release.upload_url }} 112 | asset_path: dist/netmon 113 | asset_name: netmon-${{ steps.artifact_vars.outputs.os_lower }}-${{ steps.artifact_vars.outputs.arch_lower }} 114 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | """Run nmap periodically to monitor for and notify when new devices are detected.""" 2 | 3 | import argparse 4 | import json 5 | import os 6 | import sys 7 | import time 8 | 9 | import apprise 10 | import nmap3 11 | import schedule 12 | 13 | 14 | def parse_args(): 15 | """Parse command-line arguments and fallback to environment variables.""" 16 | parser = argparse.ArgumentParser( 17 | description="Run nmap periodically to monitor for new devices." 18 | ) 19 | parser.add_argument( 20 | "--notification", 21 | default=os.getenv("NETMON_NOTIFICATION", ""), 22 | help="Notification URL", 23 | ) 24 | parser.add_argument( 25 | "--subnet", 26 | default=os.getenv("NETMON_SUBNET", "192.168.1.0/241"), 27 | help="Subnet to scan", 28 | ) 29 | parser.add_argument( 30 | "--minutes", 31 | default=os.getenv("NETMON_MINUTES", "15"), 32 | type=int, 33 | help="Scan interval in minutes", 34 | ) 35 | parser.add_argument( 36 | "--results", 37 | default=os.getenv("NETMON_RESULTS", "resultsz.json"), 38 | help="Results file path", 39 | ) 40 | return parser.parse_args() 41 | 42 | 43 | def main(): 44 | """Run application.""" 45 | args = parse_args() 46 | print("Starting Net-Mon") 47 | service = args.notification.split("://")[0] if "://" in args.notification else "" 48 | print(f"Notification service: {service}") 49 | print(f"Subnet: {args.subnet}") 50 | print(f"Scan interval (minutes): {args.minutes}") 51 | 52 | apprise_client = get_apprise_client(args.notification) 53 | scan_and_process(apprise_client, args.results, args.subnet) # first run 54 | 55 | # then every x minutes 56 | schedule.every(int(args.minutes)).minutes.do( 57 | scan_and_process, 58 | apprise_client=apprise_client, 59 | results=args.results, 60 | subnet=args.subnet, 61 | ) 62 | 63 | while True: 64 | try: 65 | schedule.run_pending() 66 | time.sleep(1) 67 | 68 | except KeyboardInterrupt: 69 | sys.exit("\tStopping application, bye bye") 70 | 71 | 72 | def scan_and_process(apprise_client, results, subnet): 73 | """Scans for new hosts and checks against existing hosts.""" 74 | new_scan = scan(subnet) 75 | 76 | if is_first_run(results): 77 | print("First run, found " + str(len(new_scan)) + " hosts") 78 | write_json(new_scan, results) 79 | return 80 | 81 | old_scan = read_json(results) 82 | process_results(apprise_client, old_scan, new_scan) 83 | merged = merge_lists(old_scan, new_scan) 84 | write_json(merged, results) 85 | 86 | 87 | def scan(subnet): 88 | """Do nmap scan and parse mac addresses.""" 89 | nmap = nmap3.NmapScanTechniques() 90 | scan_result = nmap.nmap_ping_scan(subnet) 91 | scan_result.pop("stats", None) 92 | scan_result.pop("runtime", None) 93 | 94 | result = {} 95 | 96 | for ip_address, host in scan_result.items(): 97 | if "macaddress" in host: 98 | mcadr = host["macaddress"] 99 | if mcadr and "addr" in mcadr: 100 | mac = mcadr["addr"] 101 | result[mac] = ip_address 102 | 103 | return result 104 | 105 | 106 | def process_results(apprise_client, old_list, new_list): 107 | """Check for new hosts and notify.""" 108 | for mac in new_list: 109 | if mac not in old_list: 110 | ip_address = new_list[mac] 111 | notify(apprise_client, mac, ip_address) 112 | 113 | 114 | def read_json(results): 115 | """Read json to memory.""" 116 | with open(results, encoding="utf-8") as file: 117 | result = json.load(file) 118 | return result 119 | 120 | 121 | def write_json(result, results): 122 | """Write result to json.""" 123 | with open(results, "w", encoding="utf-8") as file: 124 | json.dump(result, file) 125 | 126 | 127 | def merge_lists(old_list, new_list): 128 | """Merge two dictionaries.""" 129 | for mac in new_list: 130 | old_list[mac] = new_list[mac] 131 | 132 | return old_list 133 | 134 | 135 | def is_first_run(file): 136 | """Check if results file exists.""" 137 | return not os.path.exists(file) or os.stat(file).st_size == 0 138 | 139 | 140 | def notify(apprise_client, mac, ip_address): 141 | """Send apprise notification.""" 142 | message = "New device " + mac + " on IP: " + ip_address 143 | print(message) 144 | apprise_client.notify( 145 | title="Net-Mon", 146 | body=message, 147 | ) 148 | 149 | 150 | def get_apprise_client(url): 151 | """Return Apprise instance.""" 152 | apprise_client = apprise.Apprise() 153 | apprise_client.add(url) 154 | 155 | return apprise_client 156 | 157 | 158 | if __name__ == "__main__": 159 | main() 160 | --------------------------------------------------------------------------------