├── .vscode
└── settings.json
├── settings.gradle
├── source asset
├── icon 2.png
└── HumanDetection4Tasker.zip
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── app
├── src
│ ├── main
│ │ ├── res
│ │ │ ├── play_store_512.png
│ │ │ ├── playstore-icon.png
│ │ │ ├── ic_launcher-web.png
│ │ │ ├── drawable-xhdpi
│ │ │ │ └── bg_splash.png
│ │ │ ├── mipmap-hdpi
│ │ │ │ ├── human_detection.png
│ │ │ │ ├── opencv4tasker.png
│ │ │ │ ├── ai_image_analyzer.png
│ │ │ │ ├── ai_image_analyzer_round.png
│ │ │ │ ├── opencv4tasker_background.png
│ │ │ │ ├── opencv4tasker_foreground.png
│ │ │ │ ├── opencv4tasker_monochrome.png
│ │ │ │ ├── human_detection_background.png
│ │ │ │ ├── human_detection_foreground.png
│ │ │ │ ├── human_detection_monochrome.png
│ │ │ │ └── ai_image_analyzer_foreground.png
│ │ │ ├── mipmap-mdpi
│ │ │ │ ├── human_detection.png
│ │ │ │ ├── opencv4tasker.png
│ │ │ │ ├── ai_image_analyzer.png
│ │ │ │ ├── ai_image_analyzer_round.png
│ │ │ │ ├── opencv4tasker_background.png
│ │ │ │ ├── opencv4tasker_foreground.png
│ │ │ │ ├── opencv4tasker_monochrome.png
│ │ │ │ ├── human_detection_background.png
│ │ │ │ ├── human_detection_foreground.png
│ │ │ │ ├── human_detection_monochrome.png
│ │ │ │ └── ai_image_analyzer_foreground.png
│ │ │ ├── mipmap-xhdpi
│ │ │ │ ├── opencv4tasker.png
│ │ │ │ ├── human_detection.png
│ │ │ │ ├── ai_image_analyzer.png
│ │ │ │ ├── ai_image_analyzer_round.png
│ │ │ │ ├── opencv4tasker_background.png
│ │ │ │ ├── opencv4tasker_foreground.png
│ │ │ │ ├── opencv4tasker_monochrome.png
│ │ │ │ ├── human_detection_background.png
│ │ │ │ ├── human_detection_foreground.png
│ │ │ │ ├── human_detection_monochrome.png
│ │ │ │ └── ai_image_analyzer_foreground.png
│ │ │ ├── mipmap-xxhdpi
│ │ │ │ ├── opencv4tasker.png
│ │ │ │ ├── human_detection.png
│ │ │ │ ├── ai_image_analyzer.png
│ │ │ │ ├── ai_image_analyzer_round.png
│ │ │ │ ├── opencv4tasker_background.png
│ │ │ │ ├── opencv4tasker_foreground.png
│ │ │ │ ├── opencv4tasker_monochrome.png
│ │ │ │ ├── ai_image_analyzer_foreground.png
│ │ │ │ ├── human_detection_background.png
│ │ │ │ ├── human_detection_foreground.png
│ │ │ │ └── human_detection_monochrome.png
│ │ │ ├── image_download_play_store_512.png
│ │ │ ├── mipmap-xxxhdpi
│ │ │ │ ├── opencv4tasker.png
│ │ │ │ ├── ai_image_analyzer.png
│ │ │ │ ├── human_detection.png
│ │ │ │ ├── ai_image_analyzer_round.png
│ │ │ │ ├── opencv4tasker_background.png
│ │ │ │ ├── opencv4tasker_foreground.png
│ │ │ │ ├── opencv4tasker_monochrome.png
│ │ │ │ ├── human_detection_background.png
│ │ │ │ ├── human_detection_foreground.png
│ │ │ │ ├── human_detection_monochrome.png
│ │ │ │ └── ai_image_analyzer_foreground.png
│ │ │ ├── values
│ │ │ │ ├── camera_play_background.xml
│ │ │ │ ├── ic_launcher_background.xml
│ │ │ │ ├── camera_pause_background.xml
│ │ │ │ ├── colors.xml
│ │ │ │ ├── styles.xml
│ │ │ │ └── strings.xml
│ │ │ ├── mipmap-anydpi-v26
│ │ │ │ ├── ai_image_analyzer.xml
│ │ │ │ ├── ai_image_analyzer_round.xml
│ │ │ │ ├── opencv4tasker.xml
│ │ │ │ └── human_detection.xml
│ │ │ ├── menu
│ │ │ │ └── main_menu.xml
│ │ │ ├── layout
│ │ │ │ ├── activity_splash.xml
│ │ │ │ ├── activity_config_cancel_notification.xml
│ │ │ │ ├── activity_config_detect_humans.xml
│ │ │ │ ├── activity_main.xml
│ │ │ │ ├── activity_config_analyze_image.xml
│ │ │ │ ├── activity_config.xml
│ │ │ │ └── activity_config_notification_intercepted_event.xml
│ │ │ ├── drawable-v24
│ │ │ │ └── ic_launcher_foreground.xml
│ │ │ └── drawable
│ │ │ │ └── ic_launcher_background.xml
│ │ ├── assets
│ │ │ └── lite-model_efficientdet_lite0_detection_metadata_1.tflite
│ │ ├── java
│ │ │ └── online
│ │ │ │ └── avogadro
│ │ │ │ └── opencv4tasker
│ │ │ │ ├── tasker
│ │ │ │ ├── NotificationRaiser.kt
│ │ │ │ ├── AnalyzeImageOutput.kt
│ │ │ │ ├── CancelNotificationInput.kt
│ │ │ │ ├── DetectHumansInput.kt
│ │ │ │ ├── DetectHumansOutput.kt
│ │ │ │ ├── AnalyzeImageInput.kt
│ │ │ │ ├── CancelNotificationOutput.kt
│ │ │ │ ├── CancelNotificationActionHelper.kt
│ │ │ │ ├── ActivityConfigNotificationInterceptedEvent.kt
│ │ │ │ ├── NotificationInterceptedEvent.kt
│ │ │ │ ├── DetectHumansActionHelper.kt
│ │ │ │ └── AnalyzeImageActionHelper.kt
│ │ │ │ ├── app
│ │ │ │ ├── OpenCV4TaskerApplication.java
│ │ │ │ ├── Util.java
│ │ │ │ └── SharedPreferencesHelper.java
│ │ │ │ ├── ai
│ │ │ │ └── AIImageAnalyzer.java
│ │ │ │ ├── opencv
│ │ │ │ └── HumansDetector.java
│ │ │ │ ├── tensorflowlite
│ │ │ │ └── HumansDetectorTensorFlow.java
│ │ │ │ ├── googleml
│ │ │ │ └── HumansDetectorGoogleML.kt
│ │ │ │ ├── ConfigActivity.java
│ │ │ │ ├── notification
│ │ │ │ ├── NotificationFileManager.kt
│ │ │ │ ├── NotificationInterceptorService.kt
│ │ │ │ └── NotificationImageExtractor.kt
│ │ │ │ ├── SplashActivity.java
│ │ │ │ ├── claudeai
│ │ │ │ └── HumansDetectorClaudeAI.java
│ │ │ │ ├── openrouter
│ │ │ │ └── HumansDetectorOpenRouter.java
│ │ │ │ └── gemini
│ │ │ │ └── HumansDetectorGemini.java
│ │ └── AndroidManifest.xml
│ └── androidTest
│ │ └── java
│ │ └── online
│ │ └── avogadro
│ │ └── aws4tasker
│ │ └── ExampleInstrumentedTest.java
├── .gitignore
├── proguard-rules.pro
├── REMOVED-google-services.json
└── build.gradle
├── .claude
└── settings.local.json
├── How to create API keys for Google and Claude.md
├── .gitignore
├── gradle.properties
├── gradlew.bat
├── CLAUDE.md
├── README.md
└── gradlew
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "java.configuration.updateBuildConfiguration": "interactive"
3 | }
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 | rootProject.name='AIImageAnalysis4Tasker'
3 | //include ':opencv'
4 |
--------------------------------------------------------------------------------
/source asset/icon 2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/source asset/icon 2.png
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/app/src/main/res/play_store_512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/play_store_512.png
--------------------------------------------------------------------------------
/app/src/main/res/playstore-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/playstore-icon.png
--------------------------------------------------------------------------------
/app/src/main/res/ic_launcher-web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/ic_launcher-web.png
--------------------------------------------------------------------------------
/source asset/HumanDetection4Tasker.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/source asset/HumanDetection4Tasker.zip
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/bg_splash.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/drawable-xhdpi/bg_splash.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/human_detection.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/human_detection.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/opencv4tasker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/opencv4tasker.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/human_detection.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/human_detection.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/opencv4tasker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/opencv4tasker.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/opencv4tasker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/opencv4tasker.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/opencv4tasker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/opencv4tasker.png
--------------------------------------------------------------------------------
/app/src/main/res/image_download_play_store_512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/image_download_play_store_512.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ai_image_analyzer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/ai_image_analyzer.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ai_image_analyzer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/ai_image_analyzer.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/human_detection.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/human_detection.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/human_detection.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/human_detection.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/opencv4tasker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/opencv4tasker.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ai_image_analyzer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/ai_image_analyzer.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ai_image_analyzer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/ai_image_analyzer.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ai_image_analyzer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/ai_image_analyzer.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/human_detection.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/human_detection.png
--------------------------------------------------------------------------------
/app/src/main/res/values/camera_play_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #c37559
4 |
--------------------------------------------------------------------------------
/app/src/main/res/values/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #ddd8c7
4 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ai_image_analyzer_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/ai_image_analyzer_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/opencv4tasker_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/opencv4tasker_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/opencv4tasker_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/opencv4tasker_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/opencv4tasker_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/opencv4tasker_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ai_image_analyzer_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/ai_image_analyzer_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/opencv4tasker_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/opencv4tasker_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/opencv4tasker_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/opencv4tasker_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/opencv4tasker_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/opencv4tasker_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ai_image_analyzer_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/ai_image_analyzer_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/opencv4tasker_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/opencv4tasker_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/opencv4tasker_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/opencv4tasker_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/opencv4tasker_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/opencv4tasker_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ai_image_analyzer_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/ai_image_analyzer_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values/camera_pause_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #c37559
4 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/human_detection_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/human_detection_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/human_detection_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/human_detection_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/human_detection_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/human_detection_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/human_detection_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/human_detection_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/human_detection_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/human_detection_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/human_detection_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/human_detection_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/human_detection_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/human_detection_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/human_detection_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/human_detection_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/human_detection_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/human_detection_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/opencv4tasker_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/opencv4tasker_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/opencv4tasker_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/opencv4tasker_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/opencv4tasker_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/opencv4tasker_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ai_image_analyzer_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/ai_image_analyzer_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/opencv4tasker_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/opencv4tasker_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/opencv4tasker_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/opencv4tasker_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/opencv4tasker_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/opencv4tasker_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ai_image_analyzer_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-hdpi/ai_image_analyzer_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ai_image_analyzer_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-mdpi/ai_image_analyzer_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ai_image_analyzer_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xhdpi/ai_image_analyzer_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ai_image_analyzer_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/ai_image_analyzer_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/human_detection_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/human_detection_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/human_detection_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/human_detection_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/human_detection_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxhdpi/human_detection_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/human_detection_background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/human_detection_background.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/human_detection_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/human_detection_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/human_detection_monochrome.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/human_detection_monochrome.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ai_image_analyzer_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/res/mipmap-xxxhdpi/ai_image_analyzer_foreground.png
--------------------------------------------------------------------------------
/app/src/main/assets/lite-model_efficientdet_lite0_detection_metadata_1.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimoneAvogadro/HumanDetection4Tasker/HEAD/app/src/main/assets/lite-model_efficientdet_lite0_detection_metadata_1.tflite
--------------------------------------------------------------------------------
/.claude/settings.local.json:
--------------------------------------------------------------------------------
1 | {
2 | "permissions": {
3 | "allow": [
4 | "WebSearch",
5 | "Bash(mkdir:*)",
6 | "WebFetch(domain:stackoverflow.com)"
7 | ],
8 | "deny": [],
9 | "ask": []
10 | }
11 | }
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | .idea
4 | /local.properties
5 | /.idea/workspace.xml
6 | /.idea/libraries
7 | .DS_Store
8 | /build
9 | /release
10 | /captures
11 | .externalNativeBuild
12 |
13 | .gradle/
14 | .idea/
15 | build/
16 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ai_image_analyzer.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Thu Jun 04 14:32:15 CST 2020
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-all.zip
7 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ai_image_analyzer_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/NotificationRaiser.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 | import android.content.Context
3 |
4 | object NotificationRaiser {
5 | public fun raiseAlarmEvent(c: Context?, b: Any) {
6 | c?.triggerTaskerEventNotificationIntercepted(b)
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #008577
4 | #00574B
5 | #D81B60
6 |
7 |
8 |
9 | #000000
10 |
11 |
12 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/opencv4tasker.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/human_detection.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/AnalyzeImageOutput.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import com.joaomgcd.taskerpluginlibrary.output.TaskerOutputObject
4 | import com.joaomgcd.taskerpluginlibrary.output.TaskerOutputVariable
5 |
6 | @TaskerOutputObject
7 | class AnalyzeImageOutput(
8 | @get:TaskerOutputVariable("response") var response: String? = ""
9 | )
10 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/CancelNotificationInput.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputField
4 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputRoot
5 |
6 | @TaskerInputRoot
7 | class CancelNotificationInput @JvmOverloads constructor(
8 | @field:TaskerInputField("notificationKey") var notificationKey: String? = null
9 | )
--------------------------------------------------------------------------------
/app/src/main/res/menu/main_menu.xml:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/DetectHumansInput.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputField
4 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputRoot
5 |
6 | @TaskerInputRoot
7 | class DetectHumansInput @JvmOverloads constructor(
8 | @field:TaskerInputField("imagePath") var imagePath: String? = null,
9 | @field:TaskerInputField("engine") var engine: String? = null
10 | )
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/DetectHumansOutput.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import com.joaomgcd.taskerpluginlibrary.output.TaskerOutputObject
4 | import com.joaomgcd.taskerpluginlibrary.output.TaskerOutputVariable
5 |
6 | @TaskerOutputObject
7 | class DetectHumansOutput(
8 | @get:TaskerOutputVariable("detectionScore") var detectionScore: Int? =0,
9 | @get:TaskerOutputVariable("detectionResponse") var detectionResponse: String? =""
10 | ) {
11 | }
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/app/OpenCV4TaskerApplication.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.app;
2 |
3 | import android.app.Application;
4 |
5 |
6 | public class OpenCV4TaskerApplication extends Application {
7 |
8 | public static int partnerId = 8;
9 | public static String partnerIdS = "8";
10 |
11 | private static OpenCV4TaskerApplication instance;
12 |
13 | public static OpenCV4TaskerApplication getInstance() {
14 | return instance;
15 | }
16 | @Override
17 | public void onCreate() {
18 | super.onCreate();
19 | instance = this;
20 |
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/AnalyzeImageInput.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputField
4 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputRoot
5 |
6 | @TaskerInputRoot
7 | class AnalyzeImageInput @JvmOverloads constructor(
8 | @field:TaskerInputField("imagePath") var imagePath: String? = null,
9 | @field:TaskerInputField("engine") var engine: String? = null,
10 | @field:TaskerInputField("systemPrompt") var systemPrompt: String? = null,
11 | @field:TaskerInputField("userPrompt") var userPrompt: String? = null
12 | )
13 |
--------------------------------------------------------------------------------
/How to create API keys for Google and Claude.md:
--------------------------------------------------------------------------------
1 |
2 | How to get a Google Gemini API key:
3 | * go to https://aistudio.google.com/apikey
4 | * register/login if needed
5 | * press the "+ Create API Key" button in the top right corner of the page
6 |
7 | How to get an Anthropic Claude API key:
8 | * go to https://console.anthropic.com/settings/keys
9 | * register/login if needed
10 | * press the "+ Create Key" button in the top right corner of the page
11 |
12 | How to get an openRouter API key:
13 | * go to https://openrouter.ai/settings/keys
14 | * register/login if needed
15 | * press the blue "Create API Key" button in the top right corner of the page
16 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_splash.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
14 |
15 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/CancelNotificationOutput.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputField
4 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputRoot
5 | import com.joaomgcd.taskerpluginlibrary.output.TaskerOutputObject
6 | import com.joaomgcd.taskerpluginlibrary.output.TaskerOutputVariable
7 |
8 | @TaskerInputRoot
9 | @TaskerOutputObject
10 | class CancelNotificationOutput @JvmOverloads constructor(
11 | @get:TaskerOutputVariable("success")
12 | @TaskerInputField(key="success")
13 | var success: Boolean = false,
14 |
15 | @get:TaskerOutputVariable("message")
16 | @TaskerInputField(key="message")
17 | var message: String = ""
18 | )
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 | -keep class org.opencv.** { *; }
23 | -keep class org.opencv.engine.OpenCVEngineInterface
--------------------------------------------------------------------------------
/app/src/androidTest/java/online/avogadro/aws4tasker/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package com.meari.test;
2 |
3 | import android.content.Context;
4 |
5 | import androidx.test.platform.app.InstrumentationRegistry;
6 | import androidx.test.ext.junit.runners.AndroidJUnit4;
7 |
8 | import org.junit.Test;
9 | import org.junit.runner.RunWith;
10 |
11 | import static org.junit.Assert.*;
12 |
13 | /**
14 | * Instrumented test, which will execute on an Android device.
15 | *
16 | * @see Testing documentation
17 | */
18 | @RunWith(AndroidJUnit4.class)
19 | public class ExampleInstrumentedTest {
20 | @Test
21 | public void useAppContext() {
22 | // Context of the app under test.
23 | Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
24 |
25 | assertEquals("online.avogadro.aws4tasker", appContext.getPackageName());
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | .idea
4 | /local.properties
5 | /.idea/workspace.xml
6 | /.idea/libraries
7 | .DS_Store
8 | /build
9 | /release
10 | /captures
11 | .externalNativeBuild
12 |
13 | /app/debug
14 |
15 | .gradle/
16 | .idea/
17 | build/
18 | /opencv/.cxx/Debug/
19 | /opencv/.cxx/tools/debug/
20 | /opencv/native/bin/armeabi-v7a/opencv_test_calib3d
21 | /opencv/native/bin/armeabi-v7a/opencv_test_core
22 | /opencv/native/bin/armeabi-v7a/opencv_test_dnn
23 | /opencv/native/bin/armeabi-v7a/opencv_test_features2d
24 | /opencv/native/bin/armeabi-v7a/opencv_test_flann
25 | /opencv/native/bin/armeabi-v7a/opencv_test_gapi
26 | /opencv/native/bin/armeabi-v7a/opencv_test_highgui
27 | /opencv/native/bin/armeabi-v7a/opencv_test_imgcodecs
28 | /opencv/native/bin/armeabi-v7a/opencv_test_imgproc
29 | /opencv/native/bin/armeabi-v7a/opencv_test_ml
30 | /opencv/native/bin/armeabi-v7a/opencv_test_objdetect
31 | /opencv/native/bin/armeabi-v7a/opencv_test_photo
32 | /opencv/native/bin/armeabi-v7a/opencv_test_stitching
33 | /opencv/native/bin/armeabi-v7a/opencv_test_video
34 | /opencv/native/bin/armeabi-v7a/opencv_test_videoio
35 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/ai/AIImageAnalyzer.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.ai;
2 |
3 | import android.content.Context;
4 |
5 | import org.json.JSONException;
6 |
7 | import java.io.IOException;
8 |
9 | /**
10 | * Common interface for AI-based image analysis
11 | */
12 | public interface AIImageAnalyzer {
13 | /**
14 | * Set up the analyzer with necessary API keys and configurations
15 | * @param ctx Android context
16 | */
17 | void setup(Context ctx) throws IOException;
18 |
19 | /**
20 | * Analyze an image with AI using provided prompts
21 | * @param systemPrompt Instructions for the AI
22 | * @param userPrompt User's specific question about the image
23 | * @param imagePath Path to the image file
24 | * @return The AI's complete response as a string
25 | */
26 | String analyzeImage(String systemPrompt, String userPrompt, String imagePath) throws IOException, JSONException;
27 |
28 | /**
29 | * Get the last response from the AI
30 | */
31 | String getLastResponse();
32 |
33 | /**
34 | * Get error information if the last call failed
35 | */
36 | String getLastError();
37 | }
38 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx4096m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app's APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 | android.defaults.buildfeatures.buildconfig=true
21 | android.nonTransitiveRClass=false
22 | android.nonFinalResIds=false
23 |
--------------------------------------------------------------------------------
/app/REMOVED-google-services.json:
--------------------------------------------------------------------------------
1 | {
2 | "project_info": {
3 | "project_id": "HumanDetection4Tasker-4f843",
4 | "project_number": "973182941123",
5 | "name": "HumanDetection Tasker Plugin"
6 | },
7 | "client": [
8 | {
9 | "client_info": {
10 | "mobilesdk_app_id": "1:629182941492:android:765f46cdf2b4779e",
11 | "client_id": "android:online.avogadro.opencv4tasker",
12 | "client_type": 1,
13 | "android_client_info": {
14 | "package_name": "online.avogadro.opencv4tasker"
15 | }
16 | },
17 | "oauth_client": [],
18 | "api_key": [
19 | {
20 | "current_key": "BHzaSyASz7Fo1191jy1gfdEiga6PXGF2qMpvPpQ"
21 | }
22 | ],
23 | "services": {
24 | "analytics_service": {
25 | "status": 0
26 | },
27 | "cloud_messaging_service": {
28 | "status": 0,
29 | "apns_config": []
30 | },
31 | "appinvite_service": {
32 | "status": 0,
33 | "other_platform_oauth_client": []
34 | },
35 | "google_signin_service": {
36 | "status": 0
37 | },
38 | "ads_service": {
39 | "status": 0
40 | }
41 | }
42 | }
43 | ],
44 | "client_info": [],
45 | "ARTIFACT_VERSION": "1"
46 | }
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | AI Image Analysis Tasker Plugin
3 | account
4 | Success
5 | Failed
6 |
7 |
8 | Notification Title
9 | The title of the intercepted notification
10 | Notification Text
11 | The text content of the intercepted notification
12 | Image Path
13 | Path to the extracted notification image
14 | App Package
15 | Package name of the app that sent the notification
16 | App Name
17 | Human-readable name of the app that sent the notification
18 |
19 |
20 | Claude Sonnet 4.5 (online)
21 | Gemini 2.5 Flash (online)
22 | OpenRouter (online)
23 | Tensorflow Lite (local device)
24 |
25 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_config_cancel_notification.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
17 |
18 |
22 |
23 |
31 |
32 |
39 |
40 |
45 |
46 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_config_detect_humans.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
13 |
14 |
21 |
22 |
27 |
28 |
31 |
32 |
37 |
38 |
43 |
44 |
49 |
50 |
56 |
57 |
58 |
63 |
64 |
65 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/app/Util.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.app;
2 |
3 | import android.content.ContentResolver;
4 | import android.content.Context;
5 | import android.content.pm.ApplicationInfo;
6 | import android.content.pm.PackageManager;
7 | import android.net.Uri;
8 | import android.os.Bundle;
9 | import android.util.Log;
10 |
11 | import java.io.File;
12 | import java.io.FileOutputStream;
13 | import java.io.IOException;
14 | import java.io.InputStream;
15 |
16 | import online.avogadro.opencv4tasker.opencv.HumansDetector;
17 |
18 | public class Util {
19 |
20 | private static final String TAG = "Util";
21 |
22 | public static String getMetadata(Context c, String key) {
23 | try {
24 | ApplicationInfo ai = c.getPackageManager().getApplicationInfo(c.getPackageName(),
25 | PackageManager.GET_META_DATA);
26 |
27 | Bundle metaData = ai.metaData;
28 |
29 | return metaData.getString(key, "8");
30 | } catch (PackageManager.NameNotFoundException e) {
31 | throw new RuntimeException(e);
32 | }
33 | }
34 |
35 | public static int getMetadataInt(Context c, String key) {
36 | try {
37 | ApplicationInfo ai = c.getPackageManager().getApplicationInfo(c.getPackageName(),
38 | PackageManager.GET_META_DATA);
39 | Bundle metaData = ai.metaData;
40 |
41 | return metaData.getInt(key,8);
42 | } catch (PackageManager.NameNotFoundException e) {
43 | throw new RuntimeException(e);
44 | }
45 | }
46 |
47 | public static String getPathFromUri(Context context, Uri uri) throws IOException {
48 | ContentResolver resolver = context.getContentResolver();
49 | InputStream inputStream = resolver.openInputStream(uri);
50 |
51 | // Create temporary file
52 | File tempFile = File.createTempFile("temp_image", ".jpg", context.getCacheDir());
53 | tempFile.deleteOnExit();
54 |
55 | // Copy input stream to temporary file
56 | FileOutputStream out = new FileOutputStream(tempFile);
57 | byte[] buffer = new byte[64*1024];
58 | int bytesRead;
59 | while ((bytesRead = inputStream.read(buffer)) != -1) {
60 | out.write(buffer, 0, bytesRead);
61 | }
62 | out.flush();
63 | out.close();
64 | inputStream.close();
65 |
66 | return tempFile.getAbsolutePath();
67 | }
68 |
69 | /**
70 | * OpenCV and other libs are unable to handle content:// URIs
71 | * This method handls this for them by copying to a temporary file
72 | *
73 | * @param path
74 | * @return
75 | */
76 | public static String contentToFile(Context context, String path) throws IOException {
77 | if (path.startsWith("file:")) {
78 | return path;
79 | } else if (path.startsWith("content:")) {
80 | return getPathFromUri(context, Uri.parse(path));
81 | } else {
82 | Log.w(TAG,"formato path sconosciuto");
83 | return path;
84 | }
85 |
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/app/SharedPreferencesHelper.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.app;
2 | import android.content.Context;
3 | import android.content.SharedPreferences;
4 | import android.util.Base64;
5 |
6 | import javax.crypto.Cipher;
7 | import javax.crypto.spec.SecretKeySpec;
8 |
9 | public class SharedPreferencesHelper {
10 | private static final String PREFS_NAME = "MyAppPreferences";
11 | public static final String CLAUDE_API_KEY = "CLAUDE_API_KEY";
12 | public static final String GEMINI_API_KEY = "GEMINI_API_KEY";
13 | public static final String OPENROUTER_API_KEY = "OPENROUTER_API_KEY";
14 | public static final String OPENROUTER_MODEL = "OPENROUTER_MODEL";
15 | public static final String LAST_IMAGE_PATH = "LAST_IMAGE_PATH";
16 | public static final String NOTIFICATION_EVENT_ENABLED = "NOTIFICATION_EVENT_ENABLED";
17 | private static final String PASSWORD = "u2fg393ujk.%!kspa5fg393ujk.%!kra"; // Not recommended
18 |
19 | public static void save(Context context, String key, String value) {
20 | SharedPreferences prefs = context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE);
21 | String encryptedValue = encrypt(value, PASSWORD);
22 | prefs.edit().putString(key, encryptedValue).apply();
23 | }
24 |
25 | public static String get(Context context, String key) {
26 | SharedPreferences prefs = context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE);
27 | String val = prefs.getString(key, "");
28 | if ("".equals(val))
29 | return val;
30 | return decrypt(val, PASSWORD);
31 | }
32 |
33 | public static void saveBoolean(Context context, String key, boolean value) {
34 | SharedPreferences prefs = context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE);
35 | prefs.edit().putBoolean(key, value).apply();
36 | }
37 |
38 | public static boolean getBoolean(Context context, String key, boolean defaultValue) {
39 | SharedPreferences prefs = context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE);
40 | return prefs.getBoolean(key, defaultValue);
41 | }
42 |
43 | private static String encrypt(String value, String password) {
44 | try {
45 | SecretKeySpec key = new SecretKeySpec(password.getBytes(), "AES");
46 | Cipher cipher = Cipher.getInstance("AES");
47 | cipher.init(Cipher.ENCRYPT_MODE, key);
48 | byte[] encryptedValueBytes = cipher.doFinal(value.getBytes());
49 | return Base64.encodeToString(encryptedValueBytes, Base64.DEFAULT);
50 | } catch (Exception e) {
51 | throw new RuntimeException(e);
52 | }
53 | }
54 |
55 | private static String decrypt(String value, String password) {
56 | try {
57 | SecretKeySpec key = new SecretKeySpec(password.getBytes(), "AES");
58 | Cipher cipher = Cipher.getInstance("AES");
59 | cipher.init(Cipher.DECRYPT_MODE, key);
60 | byte[] originalValueBytes = cipher.doFinal(Base64.decode(value, Base64.DEFAULT));
61 | return new String(originalValueBytes);
62 | } catch (Exception e) {
63 | throw new RuntimeException(e);
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'org.jetbrains.kotlin.android'
3 | // apply plugin: 'com.google.gms.google-services'
4 |
5 | android {
6 | compileSdkVersion 34
7 | buildToolsVersion "30.0.3"
8 | // viewBinding true
9 | buildFeatures {
10 | viewBinding true
11 | dataBinding true
12 | }
13 | defaultConfig {
14 | applicationId "online.avogadro.opencv4tasker"
15 | minSdkVersion 30
16 | targetSdkVersion 33
17 | versionCode 432
18 | versionName "4.3.0"
19 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
20 | multiDexEnabled = true
21 |
22 | ndk {
23 | abiFilters 'armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64'
24 | }
25 | }
26 | buildTypes {
27 | release {
28 | minifyEnabled false
29 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
30 | }
31 | }
32 |
33 | sourceSets {
34 | main {
35 | jniLibs.srcDirs = ['libs']
36 | }
37 | }
38 | compileOptions {
39 | sourceCompatibility JavaVersion.VERSION_11
40 | targetCompatibility JavaVersion.VERSION_11
41 | }
42 |
43 | kotlinOptions {
44 | jvmTarget = '11'
45 | }
46 |
47 | namespace 'online.avogadro.opencv4tasker'
48 | }
49 |
50 | repositories {
51 | flatDir {
52 | dirs 'libs'
53 | }
54 | }
55 |
56 | dependencies {
57 | // implementation fileTree(dir: '../libs', include: ['*.jar'])
58 | implementation 'androidx.appcompat:appcompat:1.7.0'
59 | implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
60 | implementation 'androidx.core:core-ktx:1.13.1'
61 | testImplementation 'junit:junit:4.13.2'
62 | androidTestImplementation 'androidx.test.ext:junit:1.1.5'
63 | androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1'
64 |
65 | // ML Kit Object Detection
66 | // implementation 'com.google.mlkit:object-detection:17.0.1'
67 |
68 | // Tensorflow based detection
69 | implementation 'org.tensorflow:tensorflow-lite:2.5.0'
70 | implementation 'org.tensorflow:tensorflow-lite-support:0.3.0'
71 | implementation 'org.tensorflow:tensorflow-lite-metadata:0.1.0'
72 | implementation 'org.tensorflow:tensorflow-lite-task-vision:0.4.0'
73 | implementation 'org.tensorflow:tensorflow-lite-support:0.4.0'
74 |
75 | // implementation project(':opencv')
76 |
77 | // implementation 'com.squareup.okhttp3:okhttp:3.12.0'
78 | // implementation 'com.alibaba:fastjson:1.1.67.android'
79 | // implementation 'com.google.code.gson:gson:2.10'
80 | // implementation 'com.google.zxing:core:3.3.3'
81 | implementation 'androidx.localbroadcastmanager:localbroadcastmanager:1.1.0'
82 |
83 | // implementation 'io.reactivex.rxjava2:rxjava:2.2.6'
84 | // implementation 'io.reactivex.rxjava2:rxandroid:2.1.1'
85 |
86 | // others
87 | implementation 'androidx.multidex:multidex:2.0.1'
88 | implementation 'com.google.android.material:material:1.12.0'
89 | // implementation 'com.github.bumptech.glide:glide:4.11.0'
90 |
91 |
92 | // Tasker plugin
93 | implementation 'com.joaomgcd:taskerpluginlibrary:0.4.10'
94 |
95 |
96 | }
97 |
--------------------------------------------------------------------------------
/CLAUDE.md:
--------------------------------------------------------------------------------
1 | # CLAUDE.md
2 |
3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4 |
5 | ## Project Overview
6 |
7 | AWS4Tasker (formerly OpenCV4Tasker) is an Android plugin for Tasker and MacroDroid that provides AI-powered image analysis capabilities. The app can detect humans in images and perform general-purpose image analysis using multiple AI engines including Claude AI, Google Gemini, and TensorFlow Lite.
8 |
9 | ## Build Commands
10 |
11 | ```bash
12 | # Build the project
13 | ./gradlew build
14 |
15 | # Build debug APK
16 | ./gradlew assembleDebug
17 |
18 | # Build release APK
19 | ./gradlew assembleRelease
20 |
21 | # Clean build
22 | ./gradlew clean
23 |
24 | # Run tests
25 | ./gradlew test
26 | ./gradlew connectedAndroidTest
27 | ```
28 |
29 | ## Architecture Overview
30 |
31 | ### Core Components
32 |
33 | - **Application Class**: `OpenCV4TaskerApplication` - Main application entry point with singleton pattern
34 | - **Activities**:
35 | - `SplashActivity` - Launcher activity
36 | - `MainActivity` - Main UI for testing image analysis
37 | - `ConfigActivity` - General configuration
38 | - **AI Engines**: Multiple implementations of image analysis:
39 | - `HumansDetectorClaudeAI` - Claude AI integration for human detection
40 | - `HumansDetectorGemini` - Google Gemini integration
41 | - `HumansDetectorTensorFlow` - Local TensorFlow Lite processing
42 | - `AIImageAnalyzer` - Common interface for AI-based image analysis
43 |
44 | ### Tasker Plugin System
45 |
46 | The app integrates with Tasker/MacroDroid through:
47 | - **Actions**:
48 | - `DetectHumansActionHelper` - Human detection in images
49 | - `AnalyzeImageActionHelper` - General AI image analysis
50 | - **Events**:
51 | - `NotificationInterceptedEvent` - Intercepts notifications with images
52 | - **Configuration Activities**:
53 | - `ActivityConfigDetectHumansAction`
54 | - `ActivityConfigAnalyzeImageAction`
55 | - `ActivityConfigNotificationInterceptedEvent`
56 |
57 | ### Notification Interception
58 |
59 | New notification interception system includes:
60 | - `NotificationInterceptorService` - Core notification listener service
61 | - `NotificationImageExtractor` - Extracts images from notifications
62 | - `NotificationFileManager` - Manages temporary image files
63 |
64 | ### Key Dependencies
65 |
66 | - Tasker Plugin Library: `com.joaomgcd:taskerpluginlibrary:0.4.10`
67 | - TensorFlow Lite: `org.tensorflow:tensorflow-lite:2.5.0` with related libraries
68 | - AndroidX libraries for modern Android development
69 | - Kotlin support with Java interop
70 |
71 | ## Development Notes
72 |
73 | - **Target SDK**: 33, **Min SDK**: 30 (Android 11+)
74 | - **Language**: Mixed Java/Kotlin codebase
75 | - **Permissions**: Requires storage, internet, notification access, and battery optimization bypass
76 | - **Build Tools**: Gradle with Android build tools 8.2.2
77 | - The project uses view binding and data binding
78 | - TensorFlow models are stored in `app/src/main/assets/`
79 | - Package name: `online.avogadro.opencv4tasker`
80 |
81 | ## Engine Configuration
82 |
83 | The app supports three AI engines selected via radio buttons:
84 | - **CLAUDE**: Cloud-based Claude AI analysis
85 | - **GEMINI**: Google Gemini integration
86 | - **TENSORFLOW**: Local TensorFlow Lite processing (default for backward compatibility)
87 |
88 | Engine selection is persisted using `SharedPreferencesHelper` and each engine implements the `AIImageAnalyzer` interface for consistency.
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 |
15 |
16 |
24 |
25 |
31 |
32 |
33 |
34 |
37 |
38 |
43 |
44 |
49 |
50 |
55 |
56 |
62 |
63 |
64 |
69 |
70 |
77 |
78 |
90 |
91 |
92 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_config_analyze_image.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
13 |
14 |
19 |
20 |
27 |
28 |
33 |
34 |
37 |
38 |
43 |
44 |
49 |
50 |
55 |
56 |
57 |
62 |
63 |
71 |
72 |
77 |
78 |
86 |
87 |
93 |
94 |
95 |
96 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/opencv/HumansDetector.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.opencv;
2 |
3 | import android.content.Context;
4 | import android.util.Log;
5 |
6 | //import org.opencv.android.OpenCVLoader;
7 | //import org.opencv.core.Mat;
8 | //import org.opencv.core.MatOfDouble;
9 | //import org.opencv.core.MatOfRect;
10 | //import org.opencv.core.Rect;
11 | //import org.opencv.imgcodecs.Imgcodecs;
12 | //import org.opencv.objdetect.HOGDescriptor;
13 |
14 | import java.io.File;
15 | import java.io.IOException;
16 |
17 | import online.avogadro.opencv4tasker.app.Util;
18 |
19 | public class HumansDetector {
20 | //
21 | // private static final String TAG = "HumansDetector";
22 | //
23 | // /**
24 | // * Detect humans and return the highest score
25 | // * @param path in the form of file:///{something} or content:///{something}
26 | // * @return 0-100+, lower values are lower scores. '-1' is a failure
27 | // */
28 | // public static int detectHumans(Context context, String path) {
29 | // if (!OpenCVLoader.initLocal()) {
30 | // Log.e(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
31 | // // Toast.makeText(this,"FAILED TO INIT OpenCV",Toast.LENGTH_SHORT).show();
32 | // return -1;
33 | // } else {
34 | // Log.d(TAG, "OpenCV library found inside package. Using it!");
35 | // }
36 | //
37 | // String newPath = null;
38 | // try {
39 | // newPath = Util.contentToFile(context,path);
40 | // return detectHumansFromFile(newPath);
41 | // } catch (IOException e) {
42 | // Log.e(TAG, "Failed to parse file name "+path,e);
43 | // return -1;
44 | // } finally {
45 | // if (newPath!=null && !path.equals(newPath))
46 | // new File(newPath).delete();
47 | // }
48 | // }
49 | //
50 | // /**
51 | // * Detect humans and return the highest score
52 | // * @param path in the form of file:///{something}
53 | // * @return 0-100+, lower values are lower scores. '-1' is a failure
54 | // */
55 | // private static int detectHumansFromFile(String path) {
56 | // try {
57 | // // Load the image from Uri
58 | // Mat image = Imgcodecs.imread(path);
59 | //
60 | // if (image.empty()) {
61 | // if (!new File(path).canRead())
62 | // Log.e(TAG,"File does not exist or missing access rights");
63 | // else if (!Imgcodecs.haveImageReader(path))
64 | // Log.e(TAG,"File exists but image format is unknown");
65 | // else
66 | // Log.e(TAG,"Failed to parse the image in the existing file");
67 | //
68 | // return -1;
69 | // }
70 | //
71 | // // Create HOG descriptor and set SVM detector
72 | // HOGDescriptor hog = new HOGDescriptor();
73 | // hog.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector());
74 | //
75 | // // Detect humans in the image
76 | // MatOfRect detections = new MatOfRect();
77 | // MatOfDouble foundWeights = new MatOfDouble();
78 | // hog.detectMultiScale(image, detections, foundWeights);
79 | //
80 | // Rect[] rects = detections.toArray();
81 | // if (rects.length==0) {
82 | // Log.d(TAG,"nothing detected");
83 | // return 0;
84 | // }
85 | // double[] weights = foundWeights.toArray();
86 | //
87 | // double max=0;
88 | // for (int i = 0; i < rects.length; i++) {
89 | // max = Math.max(max,weights[i]);
90 | // }
91 | //
92 | // return (int)(max*100);
93 | //
94 | // } catch (Exception e) {
95 | // Log.e(TAG,"error processing image: "+e.getMessage(),e);
96 | // return -1;
97 | // }
98 | // }
99 | }
100 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_config.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
14 |
15 |
21 |
22 |
30 |
31 |
32 |
33 |
37 |
38 |
44 |
45 |
53 |
54 |
55 |
56 |
60 |
61 |
67 |
68 |
76 |
77 |
78 |
79 |
83 |
84 |
90 |
91 |
99 |
100 |
101 |
102 |
107 |
108 |
114 |
115 |
116 |
117 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_config_notification_intercepted_event.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
16 |
17 |
22 |
23 |
29 |
30 |
35 |
36 |
42 |
43 |
48 |
49 |
50 |
51 |
57 |
58 |
65 |
66 |
73 |
74 |
81 |
82 |
88 |
89 |
95 |
96 |
101 |
102 |
103 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tensorflowlite/HumansDetectorTensorFlow.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tensorflowlite;
2 |
3 | import android.content.Context;
4 | import android.content.res.AssetFileDescriptor;
5 | import android.graphics.Bitmap;
6 | import android.graphics.BitmapFactory;
7 | import android.os.Bundle;
8 | import android.util.Log;
9 |
10 | import java.io.File;
11 | import java.io.FileInputStream;
12 | import java.io.IOException;
13 |
14 | import org.tensorflow.lite.support.image.TensorImage;
15 | import org.tensorflow.lite.task.vision.detector.Detection;
16 | import org.tensorflow.lite.task.vision.detector.ObjectDetector;
17 |
18 |
19 | import java.io.IOException;
20 | import java.nio.MappedByteBuffer;
21 | import java.nio.channels.FileChannel;
22 | import java.util.List;
23 |
24 | import online.avogadro.opencv4tasker.app.Util;
25 |
26 | public class HumansDetectorTensorFlow {
27 | private ObjectDetector objectDetector;
28 | static final String TAG = "HumansDetectorTensorFlow";
29 |
30 | /**
31 | * Detect humans and return the highest score
32 | * @param path in the form of file:///{something} or content:///{something}
33 | * @return 0-100+, lower values are lower scores. '-1' is a failure
34 | */
35 | public static int detectHumans(Context context, String path) throws IOException {
36 | HumansDetectorTensorFlow htc = new HumansDetectorTensorFlow();
37 | htc.setup(context);
38 | return htc.detectPerson(context,path);
39 | }
40 |
41 | public void setup(Context ctx) throws IOException {
42 | objectDetector = ObjectDetector.createFromBufferAndOptions(
43 | loadModelFile(ctx),
44 | ObjectDetector.ObjectDetectorOptions.builder()
45 | .setMaxResults(5)
46 | .setScoreThreshold(0.5f)
47 | .build());
48 | }
49 |
50 | private MappedByteBuffer loadModelFile(Context ctx) throws IOException {
51 | try (
52 | // Model source: https://www.kaggle.com/models/kaggle/yolo-v5
53 | // AssetFileDescriptor fileDescriptor = ctx.getAssets().openFd("yolo-v5-tflite-tflite-tflite-model-v1.tflite");
54 | // Model source: https://www.kaggle.com/models/tensorflow/efficientdet/tfLite/lite0-detection-metadata/1?tfhub-redirect=true
55 | AssetFileDescriptor fileDescriptor = ctx.getAssets().openFd("lite-model_efficientdet_lite0_detection_metadata_1.tflite");
56 | FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor())
57 | ) {
58 | FileChannel fileChannel = inputStream.getChannel();
59 | long startOffset = fileDescriptor.getStartOffset();
60 | long declaredLength = fileDescriptor.getDeclaredLength();
61 | return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
62 | }
63 | }
64 |
65 | public int detectPerson(Context ctx, String imagePath) {
66 | String newPath = null;
67 | try {
68 | newPath = Util.contentToFile(ctx,imagePath);
69 | // Load image from disk
70 | Bitmap bitmap = BitmapFactory.decodeFile(newPath);
71 |
72 | // Convert bitmap to TensorImage
73 | TensorImage image = TensorImage.fromBitmap(bitmap);
74 |
75 | // Run inference
76 | List results = objectDetector.detect(image);
77 |
78 | // Process results
79 | float highestScore = 0f;
80 | for (Detection detection : results) {
81 | if (detection.getCategories().get(0).getLabel().equals("person")) {
82 | float score = detection.getCategories().get(0).getScore();
83 | if (score > highestScore) {
84 | highestScore = score;
85 | }
86 | }
87 | }
88 |
89 | // Convert the highest score to an integer in the range 0-100
90 | return Math.round(highestScore * 100);
91 | } catch (IOException e) {
92 | Log.e(TAG, "Failed to parse file name "+newPath,e);
93 | return -1;
94 | } finally {
95 | if (newPath!=null && !newPath.equals(imagePath))
96 | new File(newPath).delete();
97 | }
98 |
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/googleml/HumansDetectorGoogleML.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.googleml
2 |
3 | //import android.content.Context
4 | //import android.graphics.Rect
5 | //import android.net.Uri
6 | //import android.util.Log
7 | //import com.google.mlkit.vision.common.InputImage
8 | //import com.google.mlkit.vision.objects.ObjectDetection
9 | //import com.google.mlkit.vision.objects.defaults.ObjectDetectorOptions
10 | //import online.avogadro.opencv4tasker.app.Util
11 | //import java.io.File
12 | //import java.io.IOException
13 | //import java.util.concurrent.CountDownLatch
14 | //import java.util.concurrent.TimeUnit
15 |
16 | object HumansDetectorGoogleML {
17 | // private val objectDetector by lazy {
18 | // val options = ObjectDetectorOptions.Builder()
19 | // .setDetectorMode(ObjectDetectorOptions.SINGLE_IMAGE_MODE)
20 | // .enableMultipleObjects()
21 | // .enableClassification()
22 | // .build()
23 | // ObjectDetection.getClient(options)
24 | // }
25 | //
26 | //
27 | // fun detectPersonBoundingBoxes(
28 | // image: InputImage,
29 | // onSuccess: (List) -> Unit,
30 | // onError: (Exception) -> Unit
31 | // ) {
32 | // objectDetector.process(image)
33 | // .addOnSuccessListener { detectedObjects ->
34 | // val personBounds = detectedObjects
35 | // .filter { it.labels.any { label -> label.text == "Person" } }
36 | // .map { it.boundingBox }
37 | // onSuccess(personBounds)
38 | // }
39 | // .addOnFailureListener { e ->
40 | // onError(e)
41 | // }
42 | // }
43 | //
44 | // fun detectPersonConfidence(
45 | // image: InputImage,
46 | // onSuccess: (Float) -> Unit,
47 | // onError: (Exception) -> Unit
48 | // ) {
49 | // objectDetector.process(image)
50 | // .addOnSuccessListener { detectedObjects ->
51 | // val highestConfidence = detectedObjects
52 | // .flatMap { it.labels }
53 | // .filter { it.text == "Person" }
54 | // .maxOf { it.confidence }
55 | // onSuccess(highestConfidence)
56 | // }
57 | // .addOnFailureListener { e ->
58 | // onError(e)
59 | // }
60 | // }
61 | //
62 | // fun detectPersonConfidence(
63 | // context: Context,
64 | // imagePath: String,
65 | // onSuccess: (Float) -> Unit,
66 | // onError: (Exception) -> Unit
67 | // ) {
68 | // detectPersonConfidence(
69 | // InputImage.fromFilePath(context, Uri.fromFile(File(imagePath))),
70 | // onSuccess,
71 | // onError);
72 | // }
73 | //
74 | // fun detectPersonConfidence(
75 | // context: Context,
76 | // imagePath: String
77 | // ): Int {
78 | // var newPath: String? = null
79 | // return try {
80 | // newPath = Util.contentToFile(context, imagePath)
81 | // detectPersonConfidenceInFile(context, newPath)
82 | // } catch (e: IOException) {
83 | // Log.e("HumansDetectorGoogleML", "Failed to parse file name $imagePath", e)
84 | // -1
85 | // } finally {
86 | // if (newPath != null && imagePath != newPath) File(newPath).delete()
87 | // }
88 | // }
89 | //
90 | // fun detectPersonConfidenceInFile(
91 | // context: Context,
92 | // imagePath: String
93 | // ): Int {
94 | // var confidence=-1f;
95 | // var exception:Exception?=null;
96 | // val latch = CountDownLatch(1)
97 | // val u = Uri.fromFile(File(imagePath))
98 | // val img = InputImage.fromFilePath(context, u)
99 | // detectPersonConfidence(
100 | // img,
101 | // { c ->
102 | // confidence=c
103 | // latch.countDown()
104 | // },
105 | // { e ->
106 | // exception = e
107 | // latch.countDown()
108 | // });
109 | //
110 | // latch.await(10, TimeUnit.SECONDS)
111 | //
112 | // if (exception!=null)
113 | // throw exception as Exception
114 | //
115 | // if (confidence==-1f)
116 | // return -1;
117 | // else
118 | // return (confidence*100).toInt()
119 | // }
120 |
121 | }
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/CancelNotificationActionHelper.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import android.app.Activity
4 | import android.content.Context
5 | import android.os.Bundle
6 | import android.service.notification.NotificationListenerService
7 | import com.joaomgcd.taskerpluginlibrary.action.TaskerPluginRunnerAction
8 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfig
9 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfigHelper
10 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInput
11 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResult
12 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultErrorWithOutput
13 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultSucess
14 | import online.avogadro.opencv4tasker.databinding.ActivityConfigCancelNotificationBinding
15 | import online.avogadro.opencv4tasker.notification.NotificationInterceptorService
16 |
17 | class CancelNotificationActionHelper(config: TaskerPluginConfig) :
18 | TaskerPluginConfigHelper(config) {
19 |
20 | override val runnerClass: Class
21 | get() = CancelNotificationActionRunner::class.java
22 |
23 | override val inputClass = CancelNotificationInput::class.java
24 |
25 | override val outputClass = CancelNotificationOutput::class.java
26 |
27 | override fun addToStringBlurb(input: TaskerInput, blurbBuilder: StringBuilder) {
28 | blurbBuilder.append(" cancel notification")
29 | input.regular.notificationKey?.let {
30 | blurbBuilder.append(" (Key: ${it.take(20)}...)")
31 | }
32 | }
33 | }
34 |
35 | class ActivityConfigCancelNotificationAction : Activity(), TaskerPluginConfig {
36 |
37 | private lateinit var binding: ActivityConfigCancelNotificationBinding
38 |
39 | override fun assignFromInput(input: TaskerInput) {
40 | binding?.editNotificationKey?.setText(input.regular.notificationKey ?: "")
41 | }
42 |
43 | override val inputForTasker: TaskerInput
44 | get() {
45 | val notificationKey = binding?.editNotificationKey?.text?.toString()?.takeIf { it.isNotBlank() }
46 | return TaskerInput(CancelNotificationInput(notificationKey))
47 | }
48 |
49 | override val context get() = applicationContext
50 |
51 | private val taskerHelper by lazy { CancelNotificationActionHelper(this) }
52 |
53 | override fun onCreate(savedInstanceState: Bundle?) {
54 | super.onCreate(savedInstanceState)
55 | binding = ActivityConfigCancelNotificationBinding.inflate(layoutInflater)
56 |
57 | binding.buttonOK.setOnClickListener {
58 | taskerHelper.finishForTasker()
59 | }
60 |
61 | setContentView(binding.root)
62 | taskerHelper.onCreate()
63 | }
64 | }
65 |
66 | class CancelNotificationActionRunner : TaskerPluginRunnerAction() {
67 |
68 | override fun run(context: Context, input: TaskerInput): TaskerPluginResult {
69 | try {
70 | val notificationKey = input.regular.notificationKey
71 |
72 | // Check if we have the required parameter
73 | if (notificationKey.isNullOrBlank()) {
74 | return TaskerPluginResultSucess(CancelNotificationOutput(
75 | false,
76 | "Missing required parameter: notification key is required"
77 | ))
78 | }
79 |
80 | // Try to cancel the notification using the key
81 | val success = cancelNotificationByKey(notificationKey)
82 |
83 | val message = if (success) {
84 | "Notification canceled successfully"
85 | } else {
86 | "Failed to cancel notification. Make sure the notification service is enabled and has proper permissions."
87 | }
88 |
89 | return TaskerPluginResultSucess(CancelNotificationOutput(success, message))
90 |
91 | } catch (e: Exception) {
92 | return TaskerPluginResultErrorWithOutput(-1, "Error canceling notification: ${e.message}")
93 | }
94 | }
95 |
96 | private fun cancelNotificationByKey(notificationKey: String): Boolean {
97 | return NotificationInterceptorService.cancelNotificationByKey(notificationKey)
98 | }
99 | }
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/ConfigActivity.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker;
2 |
3 | import android.content.Intent;
4 | import android.content.SharedPreferences;
5 | import android.net.Uri;
6 | import android.os.Bundle;
7 | import android.util.Log;
8 | import android.view.MenuItem;
9 | import android.view.View;
10 | import android.widget.EditText;
11 | import android.widget.RadioButton;
12 | import android.widget.TextView;
13 |
14 | import java.io.IOException;
15 |
16 | import androidx.appcompat.app.AppCompatActivity;
17 | import online.avogadro.opencv4tasker.app.SharedPreferencesHelper;
18 | import online.avogadro.opencv4tasker.tensorflowlite.HumansDetectorTensorFlow;
19 |
20 | public class ConfigActivity extends AppCompatActivity {
21 |
22 | private static final String TAG = "ConfigActivity";
23 |
24 | EditText claudeApiKey;
25 | EditText geminiApiKey;
26 | EditText openRouterApiKey;
27 | EditText openRouterModel;
28 |
29 | @Override
30 | protected void onCreate(Bundle savedInstanceState) {
31 | super.onCreate(savedInstanceState);
32 | setContentView(R.layout.activity_config);
33 |
34 | // Enable back button in action bar
35 | if (getSupportActionBar() != null) {
36 | getSupportActionBar().setDisplayHomeAsUpEnabled(true);
37 | getSupportActionBar().setDisplayShowHomeEnabled(true);
38 | }
39 |
40 | // Configure Claude API key
41 | claudeApiKey = findViewById(R.id.claudeApiKey);
42 | String claudeVal = SharedPreferencesHelper.get(this, SharedPreferencesHelper.CLAUDE_API_KEY);
43 | if (claudeVal != null)
44 | claudeApiKey.setText(claudeVal);
45 |
46 | // Configure Gemini API key
47 | geminiApiKey = findViewById(R.id.geminiApiKey);
48 | String geminiVal = SharedPreferencesHelper.get(this, SharedPreferencesHelper.GEMINI_API_KEY);
49 | if (geminiVal != null)
50 | geminiApiKey.setText(geminiVal);
51 |
52 | // Configure OpenRouter API key
53 | openRouterApiKey = findViewById(R.id.openRouterApiKey);
54 | String openRouterKeyVal = SharedPreferencesHelper.get(this, SharedPreferencesHelper.OPENROUTER_API_KEY);
55 | if (openRouterKeyVal != null)
56 | openRouterApiKey.setText(openRouterKeyVal);
57 |
58 | // Configure OpenRouter Model
59 | openRouterModel = findViewById(R.id.openRouterModel);
60 | String openRouterModelVal = SharedPreferencesHelper.get(this, SharedPreferencesHelper.OPENROUTER_MODEL);
61 | if (openRouterModelVal != null && !openRouterModelVal.isEmpty())
62 | openRouterModel.setText(openRouterModelVal);
63 | else
64 | openRouterModel.setText("qwen/qwen2.5-vl-32b-instruct:free");
65 |
66 | findViewById(R.id.buttonSave).setOnClickListener(new View.OnClickListener() {
67 | @Override
68 | public void onClick(View v) {
69 | // Save Claude API key
70 | SharedPreferencesHelper.save(
71 | ConfigActivity.this,
72 | SharedPreferencesHelper.CLAUDE_API_KEY,
73 | claudeApiKey.getText().toString());
74 |
75 | // Save Gemini API key
76 | SharedPreferencesHelper.save(
77 | ConfigActivity.this,
78 | SharedPreferencesHelper.GEMINI_API_KEY,
79 | geminiApiKey.getText().toString());
80 |
81 | // Save OpenRouter API key
82 | SharedPreferencesHelper.save(
83 | ConfigActivity.this,
84 | SharedPreferencesHelper.OPENROUTER_API_KEY,
85 | openRouterApiKey.getText().toString());
86 |
87 | // Save OpenRouter Model
88 | SharedPreferencesHelper.save(
89 | ConfigActivity.this,
90 | SharedPreferencesHelper.OPENROUTER_MODEL,
91 | openRouterModel.getText().toString());
92 |
93 | finish(); // return to main activity
94 | }
95 | });
96 | }
97 |
98 | @Override
99 | public boolean onOptionsItemSelected(MenuItem item) {
100 | if (item.getItemId() == android.R.id.home) {
101 | finish();
102 | return true;
103 | }
104 | return super.onOptionsItemSelected(item);
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
31 |
32 |
33 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
50 |
51 |
52 |
53 |
54 |
55 |
60 |
61 |
62 |
63 |
64 |
65 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Plugin app for Tasker and MacroDroid to provide Humans Detection
2 |
3 | Licensed under GPL v3
4 |
5 | V 1.6.1:
6 | * FIXED: OpenRouter option not appearing in the Tasker/MacroDroid config screens
7 |
8 | V 1.6.0:
9 | * ADDED: Notification listener can now optionally intercept notifications without images. Useful when tha app sends a notification without images but the image can be fetched via API.
10 | * ADDED: "Cancel Notification" action: useful to remove false positives' notifications.
11 | * CHANGED: updated to using Gemini Flash 2.5 instead of 2.0
12 | * CHANGED: updated to using Claude Sonnet 4.5 since 3.5 is being retired
13 |
14 | V 1.5.0:
15 | * ADDED: support OpenRouter.AI for cloud queries: allowing completely free online image detection!
16 | This is a very powerful addition since it allows to:
17 | - use free models
18 | - use almost any model which is vision-capable (future proof)
19 | - get higher uptime (Openrouter will use fallback logic on different providers for some models)
20 | * FIXED: issue where NotificationInterceptor will only apply appName filter for the last edited filter!
21 |
22 | V 1.4.0:
23 | * FEATURE: Generate a Tasker Event when a notification with images is generated
24 |
25 | V 1.3.0
26 | * ADDED: new event allowing to perform a general purpose image analysis using Gemini or Claude
27 |
28 | V 1.2.0
29 | * ADDED: Google Gemini support
30 | * IMPROVED: test file selector will remember the last image (also for file picker)
31 |
32 | V 1.1.0
33 | * remove OpenCV support
34 | * added Tensorflow Lite support
35 | * added Claude.AI (cloud) support: much better reliability !
36 |
37 | V 1.0.2
38 | * added file picker
39 |
40 | Features:
41 | * Provides a tasker Event that will trigger when an app (configurable) creates a notification with an image: useful to start a person detection macro that will add advanced AI-features to non-AI enabled cameras
42 | * Provides a tasker action capable to open an image and return a score 0-100 in term of how likely the image contains a person
43 | * Provides a tasker action that you can use to ask generic questions to Claude/Gemini regarding an image/text (e.g. "is the garage door closed?" or "is there a dog in the image?")
44 | * simple home screen to test it against local images
45 | * can parse file names in the form of content://media/external/images/something or in form file:///sdcard/somewhere/file.jpg
46 | * can parse PNG and JPG (tensorflow) or JPG (Anthropic Claude, Google Gemini and OpenRouter)
47 |
48 | Supported person detection/image analysis engines:
49 | * TensorFlow: locally to the phone, limited accurancy ut very privacy-savy
50 | * Claude Sonnet 4.0 (online): will send the data to Anthropic's cloud LLM, which can perform many complex tasks. Very accurate
51 | * Gemini Flash 2.5 (online): will send the data to Google's cloud LLM, which can perform many complex tasks. Accurate and cheap
52 | * OpenRouter (online): will send the data to OpenRouter's cloud, which in turn will forward to the LLM you have selected. This qay you can shoose the cost (from free to very expensive) and accurateness of the model
53 |
54 | Limitations:
55 | * uses old APIs so no Play store version: you can download pre-built APK from github
56 | * permission and battery management is still rudimentary
57 |
58 | Ideas for future improvements:
59 | * Support for PNG with Claude/Gemini
60 | * [DONE] ~~Support for Openrouter.ai~~
61 | * support for running locally Google Gemma 3n
62 | * [DISCARDED] ~~Support for ChatGPT-Vision~~ (superseeded by OpenRouter allowing the use of ChatGPT Vision!)
63 | * [DONE] ~~Support for generic Claude/ChatGPT actions~~
64 |
65 | IMPORTANT Caveats:
66 | * will use more battery than you want, until I understand why, the default plugin mechanism is not working as expected (problems with Foreground service)
67 |
68 | HOW-TO use it:
69 | * install the APK (you can download it from the GitHub releases)
70 | * start it: so that it's registered and available to Tasker/Macrodroid
71 | * (optional): go to settings and add you Claude/Gemini/OpenRouter API Key (if you want to be able to use these online models), see
72 | * within Macrodroid/Tasker
73 | * go to the task you want to use
74 | * add action > external app > HumanDetection4Tasker > Human Recognition
75 | * you get a window where to enter the name of the image, usually you'll want to use a variable instead of an hard-coded value (e.g. %my_image )
76 | * then you get a window where you say where to save the result, usually another variable (e.g. detection_score)
77 | * then do whatever you want with the information :-)
78 |
79 | HOW-TO test it:
80 | * start the app
81 | * grant the permission it requires
82 | * use the file picker to choose an image
83 | * press the "test recognition" button
84 | * see what's the score
85 |
86 | Example usage for the generic question:
87 | * image: the image you want to analyze
88 | * system prompt:
89 | * user prompt:
90 |
91 |
92 | E.g. my use case is simple: I want to reduce to almost zero the false positive alarms of some security cameras
93 | * listen for alerts from (cheap?) security cameras
94 | * download the alert image locally
95 | * pass the image to this plugin
96 | * if the detection_score>=50 then start the siren/lights
97 | * otherwise just ignore the false positive
98 |
99 |
100 | E.g. the AI Image Analysis opens the door to more sofisticated workflows:
101 | * Task: check if I remembered to park the car in the backyard or left it parked in the road in front of the house. I use the backyard security cam to check that
102 | * image: the image from your security camera (I use my plugin here to do that: https://github.com/SimoneAvogadro/CloudEdge4Tasker )
103 | * system prompt: Respond with a single word (CAR or NO_CAR) because your response will be fed into an automation workflow
104 | * user prompt: Please analize the image and respond with a single word: CAR or NO_CAR. If there's a car then return CAR, otherwise return NO_CAR
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/ActivityConfigNotificationInterceptedEvent.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import android.app.Activity
4 | import android.content.ComponentName
5 | import android.content.Intent
6 | import android.os.Bundle
7 | import android.provider.Settings
8 | import android.util.Log
9 | import android.widget.Toast
10 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfig
11 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInput
12 | import online.avogadro.opencv4tasker.databinding.ActivityConfigNotificationInterceptedEventBinding
13 | import online.avogadro.opencv4tasker.notification.NotificationInterceptorService
14 | import online.avogadro.opencv4tasker.app.SharedPreferencesHelper
15 |
16 | class ActivityConfigNotificationInterceptedEvent : Activity(), TaskerPluginConfig {
17 |
18 | companion object {
19 | private const val TAG = "NotificationEventConfig"
20 | private const val REQUEST_NOTIFICATION_ACCESS = 1001
21 | }
22 |
23 | private lateinit var binding: ActivityConfigNotificationInterceptedEventBinding
24 |
25 | override fun assignFromInput(input: TaskerInput) {
26 | // Set the text field state based on the input
27 | binding.editTextAppFilter.setText(input.regular.appNameFilter)
28 |
29 | // Set the radio button state based on the input
30 | if (input.regular.requireImages) {
31 | binding.radioOnlyWithImages.isChecked = true
32 | } else {
33 | binding.radioAlsoWithoutImages.isChecked = true
34 | }
35 | }
36 |
37 | override val inputForTasker: TaskerInput
38 | get() = TaskerInput(NotificationInterceptedEventInput(
39 | appNameFilter = binding.editTextAppFilter.text.toString().trim(),
40 | requireImages = binding.radioOnlyWithImages.isChecked
41 | ))
42 |
43 | override val context get() = applicationContext
44 | private val taskerHelper by lazy { NotificationInterceptedEventHelper(this) }
45 |
46 | override fun onCreate(savedInstanceState: Bundle?) {
47 | super.onCreate(savedInstanceState)
48 |
49 | binding = ActivityConfigNotificationInterceptedEventBinding.inflate(layoutInflater)
50 | setContentView(binding.root)
51 |
52 | binding.buttonOK.setOnClickListener {
53 | // Check if notification listener permission is granted
54 | if (!isNotificationListenerEnabled()) {
55 | // Show dialog to grant permission
56 | showNotificationAccessDialog()
57 | } else {
58 | // Permission is granted, proceed
59 | finishConfiguration()
60 | }
61 | }
62 |
63 | taskerHelper.onCreate()
64 | }
65 |
66 | private fun isNotificationListenerEnabled(): Boolean {
67 | val packageName = packageName
68 | val flat = Settings.Secure.getString(
69 | contentResolver,
70 | "enabled_notification_listeners"
71 | )
72 |
73 | if (flat.isNullOrEmpty()) {
74 | return false
75 | }
76 |
77 | val names = flat.split(":".toRegex()).dropLastWhile { it.isEmpty() }
78 | for (name in names) {
79 | val componentName = ComponentName.unflattenFromString(name)
80 | if (componentName != null) {
81 | if (packageName == componentName.packageName) {
82 | Log.d(TAG, "Notification listener permission is granted")
83 | return true
84 | }
85 | }
86 | }
87 |
88 | Log.d(TAG, "Notification listener permission is NOT granted")
89 | return false
90 | }
91 |
92 | private fun showNotificationAccessDialog() {
93 | try {
94 | Toast.makeText(
95 | this,
96 | "Please grant notification access to this app in the next screen",
97 | Toast.LENGTH_LONG
98 | ).show()
99 |
100 | // Open notification access settings
101 | val intent = Intent(Settings.ACTION_NOTIFICATION_LISTENER_SETTINGS)
102 | startActivityForResult(intent, REQUEST_NOTIFICATION_ACCESS)
103 |
104 | } catch (e: Exception) {
105 | Log.e(TAG, "Error opening notification access settings", e)
106 | Toast.makeText(
107 | this,
108 | "Please manually grant notification access in Android Settings > Apps > Special Access > Notification Access",
109 | Toast.LENGTH_LONG
110 | ).show()
111 | finishConfiguration()
112 | }
113 | }
114 |
115 | override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
116 | super.onActivityResult(requestCode, resultCode, data)
117 |
118 | when (requestCode) {
119 | REQUEST_NOTIFICATION_ACCESS -> {
120 | // Check if permission was granted
121 | if (isNotificationListenerEnabled()) {
122 | Toast.makeText(this, "Notification access granted!", Toast.LENGTH_SHORT).show()
123 | finishConfiguration()
124 | } else {
125 | Toast.makeText(
126 | this,
127 | "Notification access is required for this event to work",
128 | Toast.LENGTH_LONG
129 | ).show()
130 | // Still finish configuration, user can enable it later
131 | finishConfiguration()
132 | }
133 | }
134 | }
135 | }
136 |
137 | private fun finishConfiguration() {
138 | Log.d(TAG, "Finishing configuration with filter='${binding.editTextAppFilter.text.toString().trim()}'")
139 |
140 | taskerHelper.finishForTasker()
141 | }
142 | }
143 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/NotificationInterceptedEvent.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import android.content.Context
4 | import com.joaomgcd.taskerpluginlibrary.action.TaskerPluginRunnerAction
5 | import com.joaomgcd.taskerpluginlibrary.condition.TaskerPluginRunnerCondition
6 | import com.joaomgcd.taskerpluginlibrary.condition.TaskerPluginRunnerConditionNoInput
7 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfig
8 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfigHelper
9 | import com.joaomgcd.taskerpluginlibrary.extensions.requestQuery
10 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInput
11 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputField
12 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInputRoot
13 | import com.joaomgcd.taskerpluginlibrary.output.TaskerOutputObject
14 | import com.joaomgcd.taskerpluginlibrary.output.TaskerOutputVariable
15 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResult
16 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultCondition
17 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultConditionSatisfied
18 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultConditionUnsatisfied
19 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultSucess
20 |
21 | /**
22 | * Input configuration for the Notification Intercepted event
23 | */
24 | @TaskerInputRoot
25 | class NotificationInterceptedEventInput @JvmOverloads constructor(
26 | @field:TaskerInputField("appNameFilter") var appNameFilter: String = "",
27 | @field:TaskerInputField("requireImages") var requireImages: Boolean = true
28 | )
29 |
30 | /**
31 | * Output data for the Notification Intercepted event
32 | */
33 | @TaskerInputRoot
34 | @TaskerOutputObject
35 | class NotificationInterceptedEvent(
36 | @get:TaskerOutputVariable("notification_title")
37 | @TaskerInputField(key="notification_title")
38 | var notificationTitle: String = "",
39 |
40 | @get:TaskerOutputVariable("notification_text")
41 | @TaskerInputField(key="notification_text")
42 | var notificationText: String = "",
43 |
44 | @get:TaskerOutputVariable("image_path")
45 | @TaskerInputField(key="image_path")
46 | var imagePath: String = "",
47 |
48 | @get:TaskerOutputVariable("app_package")
49 | @TaskerInputField(key="app_package")
50 | var appPackage: String = "",
51 |
52 | @get:TaskerOutputVariable("app_name")
53 | @TaskerInputField(key="app_name")
54 | var appName: String = "",
55 |
56 | @get:TaskerOutputVariable("notification_key")
57 | @TaskerInputField(key="notification_key")
58 | var notificationKey: String = ""
59 | )
60 |
61 | /**
62 | * Tasker Plugin Event Helper for Notification Intercepted
63 | */
64 | class NotificationInterceptedEventHelper(config: TaskerPluginConfig) :
65 | TaskerPluginConfigHelper(config) {
66 |
67 | override val runnerClass: Class
68 | get() = NotificationInterceptedRunnerConditionEvent::class.java
69 |
70 | override val inputClass = NotificationInterceptedEventInput::class.java
71 |
72 | override val outputClass = NotificationInterceptedEvent::class.java
73 |
74 | override fun addToStringBlurb(input: TaskerInput, blurbBuilder: StringBuilder) {
75 | val typeText = if (input.regular.requireImages) "with images" else "with or without images"
76 |
77 | if (input.regular.appNameFilter.isNotEmpty()) {
78 | blurbBuilder.append(" monitoring notifications $typeText from apps containing '${input.regular.appNameFilter}'")
79 | } else {
80 | blurbBuilder.append(" monitoring notifications $typeText from all apps")
81 | }
82 | }
83 | }
84 |
85 | ///**
86 | // * Event Runner - dummy implementation since events are triggered externally
87 | // */
88 | //class NotificationInterceptedEventRunner : TaskerPluginRunnerAction() {
89 | //
90 | // override fun run(context: Context, input: TaskerInput): TaskerPluginResult {
91 | // // This is just a placeholder - the actual event triggering is handled by the NotificationInterceptorService
92 | // // Return success to indicate the event configuration is valid
93 | // return TaskerPluginResultSucess()
94 | // }
95 | //}
96 |
97 | class NotificationInterceptedRunnerConditionEvent() : TaskerPluginRunnerCondition() {
98 | override val isEvent: Boolean get() = true
99 |
100 | override fun getSatisfiedCondition(context: Context, input: TaskerInput, update: NotificationInterceptedEvent?): TaskerPluginResultCondition {
101 | if (update == null) {
102 | return TaskerPluginResultConditionUnsatisfied()
103 | }
104 |
105 | // Check if images are required and if the notification has images
106 | if (input.regular.requireImages && update.imagePath.isEmpty()) {
107 | return TaskerPluginResultConditionUnsatisfied()
108 | }
109 |
110 | val appNameFilter = input.regular.appNameFilter
111 | if (appNameFilter.isNotEmpty()) {
112 | val matchesAppName = update.appName.contains(appNameFilter, ignoreCase = true)
113 | val matchesPackage = update.appPackage.contains(appNameFilter, ignoreCase = true)
114 |
115 | if (!matchesAppName && !matchesPackage) {
116 | return TaskerPluginResultConditionUnsatisfied()
117 | }
118 | }
119 |
120 | return TaskerPluginResultConditionSatisfied(context, update)
121 | }
122 | }
123 |
124 | fun Context.triggerTaskerEventNotificationIntercepted(bundle: Any?) = ActivityConfigNotificationInterceptedEvent::class.java.requestQuery(this, bundle)
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/notification/NotificationFileManager.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.notification
2 |
3 | import android.content.Context
4 | import android.graphics.Bitmap
5 | import android.util.Log
6 | import java.io.File
7 | import java.io.FileOutputStream
8 | import java.io.IOException
9 | import java.text.SimpleDateFormat
10 | import java.util.*
11 |
12 | class NotificationFileManager(private val context: Context) {
13 |
14 | companion object {
15 | private const val TAG = "NotificationFileManager"
16 | private const val TEMP_DIR = "notification_images"
17 | private const val FILE_PREFIX = "notification_"
18 | private const val FILE_EXTENSION = ".jpg"
19 | private const val CLEANUP_INTERVAL_HOURS = 24
20 | private const val JPEG_QUALITY = 85
21 | }
22 |
23 | private val tempDirectory: File by lazy {
24 | File(context.cacheDir, TEMP_DIR).also { dir ->
25 | if (!dir.exists()) {
26 | dir.mkdirs()
27 | Log.d(TAG, "Created temp directory: ${dir.absolutePath}")
28 | }
29 | }
30 | }
31 |
32 | /**
33 | * Saves a bitmap to a temporary file and returns the file
34 | */
35 | fun saveImageToTemp(bitmap: Bitmap, packageName: String): File? {
36 | return try {
37 | // Create unique filename with timestamp and package name
38 | val timestamp = SimpleDateFormat("yyyyMMdd_HHmmss_SSS", Locale.getDefault()).format(Date())
39 | val sanitizedPackageName = packageName.replace(".", "_")
40 | val filename = "${FILE_PREFIX}${sanitizedPackageName}_${timestamp}${FILE_EXTENSION}"
41 |
42 | val file = File(tempDirectory, filename)
43 |
44 | // Save bitmap to file
45 | FileOutputStream(file).use { outputStream ->
46 | val success = bitmap.compress(Bitmap.CompressFormat.JPEG, JPEG_QUALITY, outputStream)
47 | if (success) {
48 | Log.d(TAG, "Image saved successfully to: ${file.absolutePath} (${file.length()} bytes)")
49 | file
50 | } else {
51 | Log.e(TAG, "Failed to compress bitmap to file")
52 | null
53 | }
54 | }
55 | } catch (e: IOException) {
56 | Log.e(TAG, "Error saving image to temp file", e)
57 | null
58 | } catch (e: Exception) {
59 | Log.e(TAG, "Unexpected error saving image", e)
60 | null
61 | }
62 | }
63 |
64 | /**
65 | * Cleans up old temporary files older than CLEANUP_INTERVAL_HOURS
66 | */
67 | fun cleanupOldFiles() {
68 | try {
69 | if (!tempDirectory.exists()) {
70 | Log.d(TAG, "Temp directory doesn't exist, nothing to clean up")
71 | return
72 | }
73 |
74 | val currentTime = System.currentTimeMillis()
75 | val cutoffTime = currentTime - (CLEANUP_INTERVAL_HOURS * 60 * 60 * 1000L)
76 |
77 | val files = tempDirectory.listFiles()
78 | if (files == null || files.isEmpty()) {
79 | Log.d(TAG, "No files to clean up")
80 | return
81 | }
82 |
83 | var deletedCount = 0
84 | var totalSize = 0L
85 |
86 | for (file in files) {
87 | if (file.isFile && file.lastModified() < cutoffTime) {
88 | totalSize += file.length()
89 | if (file.delete()) {
90 | deletedCount++
91 | Log.d(TAG, "Deleted old file: ${file.name}")
92 | } else {
93 | Log.w(TAG, "Failed to delete old file: ${file.name}")
94 | }
95 | }
96 | }
97 |
98 | if (deletedCount > 0) {
99 | Log.i(TAG, "Cleanup completed: deleted $deletedCount files, freed ${formatFileSize(totalSize)}")
100 | } else {
101 | Log.d(TAG, "No old files to clean up")
102 | }
103 |
104 | } catch (e: Exception) {
105 | Log.e(TAG, "Error during cleanup", e)
106 | }
107 | }
108 |
109 | /**
110 | * Forces cleanup of all temporary files (useful for testing or manual cleanup)
111 | */
112 | fun cleanupAllFiles() {
113 | try {
114 | if (!tempDirectory.exists()) {
115 | Log.d(TAG, "Temp directory doesn't exist, nothing to clean up")
116 | return
117 | }
118 |
119 | val files = tempDirectory.listFiles()
120 | if (files == null || files.isEmpty()) {
121 | Log.d(TAG, "No files to clean up")
122 | return
123 | }
124 |
125 | var deletedCount = 0
126 | var totalSize = 0L
127 |
128 | for (file in files) {
129 | if (file.isFile) {
130 | totalSize += file.length()
131 | if (file.delete()) {
132 | deletedCount++
133 | Log.d(TAG, "Deleted file: ${file.name}")
134 | } else {
135 | Log.w(TAG, "Failed to delete file: ${file.name}")
136 | }
137 | }
138 | }
139 |
140 | Log.i(TAG, "Force cleanup completed: deleted $deletedCount files, freed ${formatFileSize(totalSize)}")
141 |
142 | } catch (e: Exception) {
143 | Log.e(TAG, "Error during force cleanup", e)
144 | }
145 | }
146 |
147 | /**
148 | * Returns information about the temp directory
149 | */
150 | fun getTempDirectoryInfo(): String {
151 | return try {
152 | if (!tempDirectory.exists()) {
153 | "Temp directory doesn't exist"
154 | } else {
155 | val files = tempDirectory.listFiles()
156 | if (files == null || files.isEmpty()) {
157 | "Temp directory is empty"
158 | } else {
159 | val totalSize = files.filter { it.isFile }.sumOf { it.length() }
160 | "Temp directory: ${files.size} files, ${formatFileSize(totalSize)}"
161 | }
162 | }
163 | } catch (e: Exception) {
164 | "Error getting temp directory info: ${e.message}"
165 | }
166 | }
167 |
168 | /**
169 | * Checks if temp directory has enough space (basic check)
170 | */
171 | fun hasEnoughSpace(requiredBytes: Long = 10 * 1024 * 1024): Boolean { // Default 10MB
172 | return try {
173 | tempDirectory.usableSpace > requiredBytes
174 | } catch (e: Exception) {
175 | Log.e(TAG, "Error checking available space", e)
176 | false
177 | }
178 | }
179 |
180 | private fun formatFileSize(bytes: Long): String {
181 | return when {
182 | bytes < 1024 -> "$bytes B"
183 | bytes < 1024 * 1024 -> "${bytes / 1024} KB"
184 | bytes < 1024 * 1024 * 1024 -> "${bytes / (1024 * 1024)} MB"
185 | else -> "${bytes / (1024 * 1024 * 1024)} GB"
186 | }
187 | }
188 | }
189 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/SplashActivity.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker;
2 |
3 | import androidx.annotation.NonNull;
4 | import androidx.appcompat.app.AppCompatActivity;
5 | import androidx.core.app.ActivityCompat;
6 |
7 | import android.Manifest;
8 | import android.app.Activity;
9 | import android.content.Intent;
10 | import android.content.pm.PackageManager;
11 | import android.net.Uri;
12 | import android.os.Bundle;
13 | import android.os.Environment;
14 | import android.os.PowerManager;
15 | import android.provider.Settings;
16 | import android.util.Log;
17 | import android.widget.Toast;
18 |
19 | import online.avogadro.opencv4tasker.R;
20 |
21 | public class SplashActivity extends AppCompatActivity {
22 |
23 | @Override
24 | protected void onCreate(Bundle savedInstanceState) {
25 | super.onCreate(savedInstanceState);
26 | setContentView(R.layout.activity_splash);
27 | verifyBatteryPermission();
28 | verifyStoragePermissions(this);
29 | }
30 |
31 | // TODO: does not seem to work
32 | // 1 - if opening permissions intent must not switch to home page
33 | // 2 - even when not switching to homepage does not seem to open the system dialog to provide the permission
34 | private void verifyBatteryPermission() {
35 | PowerManager powerManager = (PowerManager) getSystemService(POWER_SERVICE);
36 | String packageName = getPackageName();
37 | boolean isIgnoringBatteryOptimizations = powerManager.isIgnoringBatteryOptimizations(packageName);
38 |
39 | if (!isIgnoringBatteryOptimizations) {
40 | Intent intent = new Intent();
41 | intent.setAction(Settings.ACTION_REQUEST_IGNORE_BATTERY_OPTIMIZATIONS);
42 | intent.setData(Uri.parse("package:" + packageName));
43 | startActivity(intent);
44 | }
45 | }
46 |
47 | private static final int REQUEST_EXTERNAL_STORAGE = 1;
48 |
49 | private static final String[] PERMISSIONS_ALL =
50 | {
51 | Manifest.permission.READ_EXTERNAL_STORAGE
52 | ,Manifest.permission.WRITE_EXTERNAL_STORAGE
53 | ,Manifest.permission.READ_MEDIA_IMAGES
54 | ,Manifest.permission.FOREGROUND_SERVICE
55 | ,Manifest.permission.MANAGE_EXTERNAL_STORAGE
56 | };
57 |
58 | private void verifyStoragePermissions(Activity activity) {
59 | try {
60 | int READ_EXTERNAL_STORAGE = ActivityCompat.checkSelfPermission(activity, Manifest.permission.READ_EXTERNAL_STORAGE);
61 | int MANAGE_EXTERNAL_STORAGE = ActivityCompat.checkSelfPermission(activity, Manifest.permission.MANAGE_EXTERNAL_STORAGE);
62 | int READ_MEDIA_IMAGES = ActivityCompat.checkSelfPermission(activity, Manifest.permission.READ_MEDIA_IMAGES);
63 | //int START_FOREGROUND_SERVICES_FROM_BACKGROUND = ActivityCompat.checkSelfPermission(activity, Manifest.permission.START_FOREGROUND_SERVICES_FROM_BACKGROUND);
64 | int FOREGROUND_SERVICE = ActivityCompat.checkSelfPermission(activity, Manifest.permission.FOREGROUND_SERVICE);
65 | if ( READ_EXTERNAL_STORAGE != PackageManager.PERMISSION_GRANTED
66 | || MANAGE_EXTERNAL_STORAGE != PackageManager.PERMISSION_GRANTED
67 | || READ_MEDIA_IMAGES != PackageManager.PERMISSION_GRANTED
68 | // || READ_MEDIA_IMAGES != PackageManager.PERMISSION_GRANTED
69 | || FOREGROUND_SERVICE != PackageManager.PERMISSION_GRANTED
70 | ) { // || RECORD_AUDIO != PackageManager.PERMISSION_GRANTED
71 | ActivityCompat.requestPermissions(activity, PERMISSIONS_ALL,REQUEST_EXTERNAL_STORAGE);
72 | } else {
73 | goHome();
74 | }
75 | } catch (Exception e) {
76 | Log.e("SplashActivity","error verifying permissions: "+e.getMessage(),e);
77 | Toast.makeText(this, "Failed to check permissions", Toast.LENGTH_LONG).show();
78 | // e.printStackTrace();
79 | goHome();
80 | }
81 | }
82 |
83 | @Override
84 | public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
85 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
86 | if (requestCode == 1) {
87 | if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
88 | goHome();
89 | } else {
90 | Log.e("SplashActivity","missing persmissions");
91 | goHome();
92 | }
93 | } else {
94 | Log.e("SplashActivity","wrong request code");
95 | goHome();
96 | }
97 | }
98 |
99 | private void goHome() {
100 | // new Handler().postDelayed(new Runnable() {
101 | // @Override
102 | // public void run() {
103 | // Intent intent;
104 | // loginWithStoredCredentials(SplashActivity.this, new ILoginCallback() {
105 | // @Override
106 | // public void onSuccess(UserInfo userInfo) {
107 | // Toast.makeText(SplashActivity.this,"Autologin ok", Toast.LENGTH_LONG).show();
108 | // goToActivity(MainActivity.class);
109 | // }
110 | //
111 | // @Override
112 | // public void onError(int i, String s) {
113 | // Toast.makeText(SplashActivity.this, R.string.toast_fail+" autologin: "+i+" s:"+s, Toast.LENGTH_LONG).show();
114 | // goToActivity(LoginActivity.class);
115 | // }
116 | // });
117 | // }
118 | // },100);
119 | goToActivity(MainActivity.class);
120 | }
121 |
122 | private void goToActivity(Class dest) {
123 | Intent intent = new Intent(this, dest);
124 | // no back-to-splachscreen!
125 | intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
126 | startActivity(intent);
127 | finish();
128 | }
129 |
130 | // public static void loginWithStoredCredentials(Context ctx, ILoginCallback then) {
131 | // String username = SharedPreferencesHelper.get(ctx,"username");
132 | // String password = SharedPreferencesHelper.get(ctx,"password");
133 | //
134 | // if ("".equals(username) || "".equals(password)) {
135 | // then.onError(-1,"No stored login data");
136 | // return; // no stored credentials, go on with standard login
137 | // }
138 | //
139 | // MeariSmartSdk.partnerId= AWS4TaskerApplication.partnerIdS;
140 | // MeariUser.getInstance().loginWithAccount("country", "code", username, password, new ILoginCallback() {
141 | // @Override
142 | // public void onSuccess(UserInfo userInfo) {
143 | // // MyFirebaseMessagingService.startListening(SplashActivity.this);
144 | // if (then!=null)
145 | // then.onSuccess(userInfo);
146 | // }
147 | //
148 | // @Override
149 | // public void onError(int i, String s) {
150 | // SharedPreferencesHelper.save(ctx, "username", "" );
151 | // SharedPreferencesHelper.save(ctx, "password", "" );
152 | // if (then!=null)
153 | // then.onError(i,s);
154 | // }
155 | // } );
156 | // }
157 | }
158 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/DetectHumansActionHelper.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import android.app.Activity
4 | import android.content.Context
5 | import android.os.Bundle
6 | import com.joaomgcd.taskerpluginlibrary.action.TaskerPluginRunnerAction
7 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfig
8 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfigHelper
9 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInput
10 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResult
11 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultErrorWithOutput
12 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultSucess
13 | import online.avogadro.opencv4tasker.app.SharedPreferencesHelper
14 | import online.avogadro.opencv4tasker.claudeai.HumansDetectorClaudeAI
15 | import online.avogadro.opencv4tasker.databinding.ActivityConfigDetectHumansBinding;
16 | import online.avogadro.opencv4tasker.gemini.HumansDetectorGemini
17 | import online.avogadro.opencv4tasker.openrouter.HumansDetectorOpenRouter
18 | import online.avogadro.opencv4tasker.tensorflowlite.HumansDetectorTensorFlow
19 |
20 | const val ENGINE_CLAUDEAI = "CLAUDE"
21 | const val ENGINE_TENSORFLOW = "TENSORFLOW"
22 | const val ENGINE_GEMINI = "GEMINI"
23 | const val ENGINE_OPENROUTER = "OPENROUTER"
24 |
25 | class DetectHumansActionHelper(config: TaskerPluginConfig) : TaskerPluginConfigHelper(config) {
26 | override val runnerClass: Class get() = DetectHumansActionRunner::class.java
27 | override val inputClass = DetectHumansInput::class.java
28 | override val outputClass = DetectHumansOutput::class.java
29 | override fun addToStringBlurb(input: TaskerInput, blurbBuilder: StringBuilder) {
30 | blurbBuilder.append(" detect humans in image")
31 | }
32 | }
33 |
34 | class ActivityConfigDetectHumansAction : Activity(), TaskerPluginConfig {
35 |
36 | private lateinit var binding: ActivityConfigDetectHumansBinding
37 |
38 | override fun assignFromInput(input: TaskerInput) {
39 | binding?.editFileName?.setText(input.regular.imagePath);
40 |
41 | // Reset all radio buttons
42 | binding?.radioEngineClaudeAI?.isChecked = false
43 | binding?.radioEngineGemini?.isChecked = false
44 | binding?.radioEngineOpenRouter?.isChecked = false
45 | binding?.radioEngineTensorflowLite?.isChecked = false
46 |
47 | // Set the appropriate radio button based on the engine
48 | if (ENGINE_CLAUDEAI.equals(input.regular.engine)) {
49 | binding?.radioEngineClaudeAI?.isChecked = true
50 | } else if (ENGINE_GEMINI.equals(input.regular.engine)) {
51 | binding?.radioEngineGemini?.isChecked = true
52 | } else if (ENGINE_OPENROUTER.equals(input.regular.engine)) {
53 | binding?.radioEngineOpenRouter?.isChecked = true
54 | } else { // null or anything else
55 | // Local Tensorflow == default (backward compatibility!)
56 | binding?.radioEngineTensorflowLite?.isChecked = true
57 | }
58 |
59 | // disable Claude options if there's no API KEY
60 | var claudeApiKey = SharedPreferencesHelper.get(this, SharedPreferencesHelper.CLAUDE_API_KEY)
61 | if ("".equals(claudeApiKey)) {
62 | binding?.radioEngineClaudeAI?.isEnabled = false
63 | binding?.radioEngineClaudeAI?.isChecked = false
64 |
65 | // Default to TensorFlow if Claude was selected but now disabled
66 | if (ENGINE_CLAUDEAI.equals(input.regular.engine)) {
67 | binding?.radioEngineTensorflowLite?.isChecked = true
68 | }
69 | }
70 |
71 | // disable Gemini options if there's no API KEY
72 | var geminiApiKey = SharedPreferencesHelper.get(this, SharedPreferencesHelper.GEMINI_API_KEY)
73 | if ("".equals(geminiApiKey)) {
74 | binding?.radioEngineGemini?.isEnabled = false
75 | binding?.radioEngineGemini?.isChecked = false
76 |
77 | // Default to TensorFlow if Gemini was selected but now disabled
78 | if (ENGINE_GEMINI.equals(input.regular.engine)) {
79 | binding?.radioEngineTensorflowLite?.isChecked = true
80 | }
81 | }
82 |
83 | // disable OpenRouter options if there's no API KEY
84 | var openRouterApiKey = SharedPreferencesHelper.get(this, SharedPreferencesHelper.OPENROUTER_API_KEY)
85 | if ("".equals(openRouterApiKey)) {
86 | binding?.radioEngineOpenRouter?.isEnabled = false
87 | binding?.radioEngineOpenRouter?.isChecked = false
88 |
89 | // Default to TensorFlow if OpenRouter was selected but now disabled
90 | if (ENGINE_OPENROUTER.equals(input.regular.engine)) {
91 | binding?.radioEngineTensorflowLite?.isChecked = true
92 | }
93 | }
94 | }
95 |
96 | override val inputForTasker: TaskerInput get() {
97 | var engine = ENGINE_TENSORFLOW // fail-safe local default
98 | if (binding?.radioEngineClaudeAI?.isChecked == true) {
99 | engine = ENGINE_CLAUDEAI
100 | } else if (binding?.radioEngineGemini?.isChecked == true) {
101 | engine = ENGINE_GEMINI
102 | } else if (binding?.radioEngineOpenRouter?.isChecked == true) {
103 | engine = ENGINE_OPENROUTER
104 | }
105 | return TaskerInput(DetectHumansInput(binding?.editFileName?.text?.toString(), engine))
106 | }
107 |
108 | override val context get() = applicationContext
109 | private val taskerHelper by lazy { DetectHumansActionHelper(this) }
110 | override fun onCreate(savedInstanceState: Bundle?) {
111 | super.onCreate(savedInstanceState)
112 | binding = ActivityConfigDetectHumansBinding.inflate(layoutInflater)
113 |
114 | binding.buttonOK.setOnClickListener {
115 | // Handle button click event
116 | taskerHelper.finishForTasker()
117 | }
118 | setContentView(binding.root)
119 | taskerHelper.onCreate()
120 | }
121 | }
122 |
123 | class DetectHumansActionRunner : TaskerPluginRunnerAction() {
124 | override fun run(context: Context, input: TaskerInput): TaskerPluginResult {
125 | var result: Int = 0
126 | var resultReason = "";
127 | var resultError = "";
128 |
129 | // Here the plugin EXECUTES
130 | if (ENGINE_CLAUDEAI.equals(input.regular.engine)) {
131 | // result = HumansDetectorClaudeAI.detectHumans(context, input.regular.imagePath);
132 | val htc = HumansDetectorClaudeAI()
133 | htc.setup(context)
134 | result = htc.detectPerson(context, input.regular.imagePath)
135 | resultReason = htc.getLastResponse()
136 | if (result==-1)
137 | resultError = htc.getLastError()
138 | } else if (ENGINE_GEMINI.equals(input.regular.engine)) {
139 | val htg = HumansDetectorGemini()
140 | htg.setup(context)
141 | result = htg.detectPerson(context, input.regular.imagePath)
142 | resultReason = htg.getLastResponse()
143 | if (result==-1)
144 | resultError = htg.getLastError()
145 | } else if (ENGINE_OPENROUTER.equals(input.regular.engine)) {
146 | val hto = HumansDetectorOpenRouter()
147 | hto.setup(context)
148 | result = hto.detectPerson(context, input.regular.imagePath)
149 | resultReason = hto.getLastResponse()
150 | if (result==-1)
151 | resultError = hto.getLastError()
152 | } else {
153 | // default = TENSORFLOW
154 | var path = input.regular.imagePath;
155 | if (path==null)
156 | path="FAIL"
157 | result = HumansDetectorTensorFlow.detectHumans(context, path);
158 | }
159 |
160 | if (result == -1) {
161 | if (resultError.equals(""))
162 | return TaskerPluginResultErrorWithOutput(-1,"Failed to perform detection on "+input.regular.imagePath)
163 | else
164 | return TaskerPluginResultErrorWithOutput(-1,"Failed to perform detection on "+input.regular.imagePath+" "+resultError)
165 | } else {
166 | return TaskerPluginResultSucess(DetectHumansOutput(result, resultReason))
167 | }
168 | }
169 | }
170 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/notification/NotificationInterceptorService.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.notification
2 |
3 | import android.app.Notification
4 | import android.content.Intent
5 | import android.content.pm.PackageManager
6 | import android.service.notification.NotificationListenerService
7 | import android.service.notification.StatusBarNotification
8 | import android.util.Log
9 | import online.avogadro.opencv4tasker.app.OpenCV4TaskerApplication
10 | import online.avogadro.opencv4tasker.app.SharedPreferencesHelper
11 | import online.avogadro.opencv4tasker.tasker.NotificationInterceptedEvent
12 | import online.avogadro.opencv4tasker.tasker.NotificationRaiser
13 |
14 | class NotificationInterceptorService : NotificationListenerService() {
15 |
16 | private val DEBUG=false;
17 |
18 | companion object {
19 | private const val TAG = "NotificationInterceptor"
20 | private const val TEMP_DIR = "notification_images"
21 | const val ACTION_NOTIFICATION_INTERCEPTED = "online.avogadro.opencv4tasker.NOTIFICATION_INTERCEPTED"
22 |
23 | private var instance: NotificationInterceptorService? = null
24 |
25 | fun getInstance(): NotificationInterceptorService? = instance
26 |
27 | fun cancelNotificationByKey(notificationKey: String): Boolean {
28 | return try {
29 | instance?.cancelNotification(notificationKey)
30 | Log.d(TAG, "Successfully canceled notification with key: $notificationKey")
31 | true
32 | } catch (e: Exception) {
33 | Log.e(TAG, "Error canceling notification with key: $notificationKey", e)
34 | false
35 | }
36 | }
37 | }
38 |
39 | private lateinit var imageExtractor: NotificationImageExtractor
40 | private lateinit var fileManager: NotificationFileManager
41 |
42 | override fun onCreate() {
43 | super.onCreate()
44 | Log.d(TAG, "NotificationInterceptorService created")
45 | imageExtractor = NotificationImageExtractor(this)
46 | fileManager = NotificationFileManager(this)
47 | instance = this
48 | }
49 |
50 | override fun onNotificationPosted(sbn: StatusBarNotification) {
51 | try {
52 | Log.d(TAG, "Notification posted from ${sbn.packageName}")
53 |
54 | // Check if event is enabled
55 | val isEventEnabled = SharedPreferencesHelper.getBoolean(
56 | this,
57 | SharedPreferencesHelper.NOTIFICATION_EVENT_ENABLED,
58 | false
59 | )
60 |
61 | if (!isEventEnabled) {
62 | Log.d(TAG, "Notification event is disabled, ignoring")
63 | return
64 | }
65 |
66 | // Extract basic notification info
67 | val notification = sbn.notification
68 | val packageName = sbn.packageName
69 | val notificationKey = sbn.key
70 |
71 | // Get app name
72 | val appName = getApplicationName(packageName)
73 |
74 | // Extract notification text
75 | val title = notification.extras.getString(Notification.EXTRA_TITLE) ?: ""
76 | val text = notification.extras.getString(Notification.EXTRA_TEXT) ?: ""
77 |
78 | if (DEBUG) {
79 | // Trigger debug event with all notification.extras values
80 | val debugText = buildDebugText(sbn.notification)
81 | triggerTaskerEvent(title, debugText, "", packageName, appName, notificationKey)
82 | // triggerDebugEvent(debugText, sbn.packageName)
83 | return;
84 | }
85 |
86 | Log.d(TAG, "Title: $title, Text: $text")
87 |
88 | var imagePath = ""
89 |
90 | // Check if notification has an image and extract it if present
91 | if (imageExtractor.hasImage(notification)) {
92 | Log.d(TAG, "Image found in notification, extracting it")
93 |
94 | // Try to extract image from notification
95 | val imageBitmap = imageExtractor.extractImage(notification)
96 |
97 | if (imageBitmap != null) {
98 | Log.d(TAG, "Image extracted, saving to temp file")
99 |
100 | // Save image to temporary file
101 | val imageFile = fileManager.saveImageToTemp(imageBitmap, packageName)
102 |
103 | if (imageFile != null) {
104 | Log.d(TAG, "Image saved to: ${imageFile.absolutePath}")
105 | imagePath = imageFile.absolutePath
106 | } else {
107 | Log.e(TAG, "Failed to save image to temporary file")
108 | }
109 | } else {
110 | Log.d(TAG, "Failed to extract image from notification")
111 | }
112 | } else {
113 | Log.d(TAG, "No image found in notification")
114 | }
115 |
116 | // Trigger Tasker event for all notifications (with or without images)
117 | // The filtering based on image requirement will happen in the event condition logic
118 | triggerTaskerEvent(title, text, imagePath, packageName, appName, notificationKey)
119 |
120 | } catch (e: Exception) {
121 | Log.e(TAG, "Error processing notification", e)
122 | }
123 | }
124 |
125 | override fun onNotificationRemoved(sbn: StatusBarNotification) {
126 | Log.d(TAG, "Notification removed from ${sbn.packageName}")
127 | }
128 |
129 | private fun getApplicationName(packageName: String): String {
130 | return try {
131 | val packageManager = packageManager
132 | val applicationInfo = packageManager.getApplicationInfo(packageName, 0)
133 | packageManager.getApplicationLabel(applicationInfo).toString()
134 | } catch (e: PackageManager.NameNotFoundException) {
135 | Log.w(TAG, "Could not get app name for package: $packageName")
136 | packageName // Fallback to package name
137 | }
138 | }
139 |
140 | private fun buildDebugText(notification: Notification): String {
141 | val debugInfo = StringBuilder()
142 | val extras = notification.extras
143 |
144 | // Iterate through all extras and build debug text
145 | for (key in extras.keySet()) {
146 | val value = extras.get(key)
147 | debugInfo.append("$key: $value\n")
148 | }
149 |
150 | return debugInfo.toString().trimEnd('\n')
151 | }
152 |
153 | private fun triggerTaskerEvent(
154 | title: String,
155 | notificationText: String,
156 | imagePath: String,
157 | packageName: String,
158 | appName: String,
159 | notificationKey: String
160 | ) {
161 | try {
162 | Log.d(TAG, "Triggering Tasker event with data: title=$title, text=$notificationText, imagePath=$imagePath, packageName=$packageName, appName=$appName, key=$notificationKey")
163 |
164 | val notificationData = NotificationInterceptedEvent(title, notificationText, imagePath, appName, packageName, notificationKey)
165 |
166 | notificationData.notificationTitle = title
167 | notificationData.notificationText = notificationText
168 | notificationData.appPackage = packageName
169 | notificationData.appName = appName
170 | notificationData.imagePath = imagePath
171 | notificationData.notificationKey = notificationKey
172 |
173 | if (1==1) {
174 | NotificationRaiser.raiseAlarmEvent(OpenCV4TaskerApplication.getInstance(), notificationData)
175 | return;
176 | }
177 |
178 | // old code...
179 |
180 | // Create broadcast intent with notification data
181 | val intent = Intent(ACTION_NOTIFICATION_INTERCEPTED).apply {
182 | putExtra("notification_title", title)
183 | putExtra("notification_text", notificationText)
184 | putExtra("image_path", imagePath)
185 | putExtra("app_package", packageName)
186 | putExtra("app_name", appName)
187 | putExtra("notification_key", notificationKey)
188 | }
189 |
190 | sendBroadcast(intent)
191 |
192 | Log.d(TAG, "Broadcast sent successfully")
193 |
194 | } catch (e: Exception) {
195 | Log.e(TAG, "Error triggering Tasker event", e)
196 | }
197 | }
198 |
199 | override fun onDestroy() {
200 | super.onDestroy()
201 | Log.d(TAG, "NotificationInterceptorService destroyed")
202 |
203 | // Clean up old temporary files
204 | fileManager.cleanupOldFiles()
205 |
206 | // Clear the instance reference
207 | instance = null
208 | }
209 | }
210 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/claudeai/HumansDetectorClaudeAI.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.claudeai;
2 |
3 | import android.content.Context;
4 | import android.util.Base64;
5 | import android.util.Log;
6 |
7 | import org.json.JSONArray;
8 | import org.json.JSONException;
9 | import org.json.JSONObject;
10 |
11 | import java.io.BufferedReader;
12 | import java.io.File;
13 | import java.io.FileInputStream;
14 | import java.io.IOException;
15 | import java.io.InputStreamReader;
16 | import java.io.OutputStream;
17 | import java.net.HttpURLConnection;
18 | import java.net.URL;
19 |
20 | import online.avogadro.opencv4tasker.ai.AIImageAnalyzer;
21 | import online.avogadro.opencv4tasker.app.SharedPreferencesHelper;
22 | import online.avogadro.opencv4tasker.app.Util;
23 |
24 | public class HumansDetectorClaudeAI implements AIImageAnalyzer {
25 |
26 | private String API_KEY = "YOUR_API_KEY_HERE";
27 | private static final String API_URL = "https://api.anthropic.com/v1/messages";
28 |
29 | // Default system prompt for human detection
30 | private static final String PROMPT_SYSTEM=
31 | "The user will be providing images taken from cheap security cameras, these images might be taken during the day or the night and the angle may vary. Images are usually taken top-down, during the night images may be blurry due to person's movements. Please reply him with a single keyword in the first line and a brief explanation of your choice in the second line, chosen among these:\n" +
32 | "* HUMAN: an human or a part of an human (usually on the border of the image) is visible in the frame. The human may be seen from above since the camera is usually mounted on an high position\n" +
33 | "* SPIDER: no humans are visible but a spider is near the camera\n" +
34 | "* CAT: if it's an animal or a cat, it may be a cat walking away from the camera or walking toward the camera\n" +
35 | "* NONE: neither an human nor a spider are in frame\n" +
36 | "* UNCERTAIN: you were unable to tell in which of the above categories the image might fit. Use this response if you are not totally sure that the answer is one of the above\n" +
37 | "Ignore any shadows";
38 |
39 | static final String TAG = "HumansDetectorClaudeAI";
40 | public static final String CLAUDE_MODEL = "claude-sonnet-4-5-20250929"; // was: ""claude-3-5-sonnet-latest"; "claude-sonnet-4-20250514"
41 |
42 | private static final String CONTENT_TYPE_JPG="image/jpeg";
43 | private static final String CONTENT_TYPE_PNG="image/png";
44 |
45 | public String lastResponse = null;
46 | public Exception lastException = null;
47 | public String lastHttpResponse = null;
48 |
49 | /**
50 | * Detect humans and return the highest score
51 | * @param path in the form of file:///{something} or content:///{something}
52 | * @return 0-100+, lower values are lower scores. '-1' is a failure
53 | */
54 | public static int detectHumans(Context context, String path) throws IOException {
55 | HumansDetectorClaudeAI htc = new HumansDetectorClaudeAI();
56 | htc.setup(context);
57 | return htc.detectPerson(context,path);
58 | }
59 |
60 | @Override
61 | public void setup(Context ctx) throws IOException {
62 | API_KEY = SharedPreferencesHelper.get(ctx, SharedPreferencesHelper.CLAUDE_API_KEY);
63 | }
64 |
65 | public int detectPerson(Context ctx, String imagePath) {
66 | lastResponse = null;
67 | lastException = null;
68 | String newPath = null;
69 | try {
70 | newPath = Util.contentToFile(ctx,imagePath);
71 | String claudeResponse = analyzeImage(PROMPT_SYSTEM, null, newPath);
72 | lastResponse = claudeResponse;
73 | String[] res=claudeResponse.split("\\r?\\n");
74 | if (res[0].trim().equals("HUMAN"))
75 | return 100;
76 | else if (res[0].trim().equals("NONE"))
77 | return 0;
78 | else if (res[0].trim().equals("SPIDER"))
79 | return 0;
80 | else if (res[0].trim().equals("CAT"))
81 | return 0;
82 | else if (res[0].trim().equals("UNCERTAIN"))
83 | return 30;
84 | else
85 | return -1; // issues
86 |
87 | } catch (IOException | JSONException e) {
88 | Log.e(TAG, "Failed to examine file "+newPath,e);
89 | lastException = e;
90 | return -1;
91 | } finally {
92 | if (newPath!=null && !newPath.equals(imagePath))
93 | new File(newPath).delete();
94 | }
95 | }
96 |
97 | @Override
98 | public String getLastResponse() {
99 | return lastResponse;
100 | }
101 |
102 | @Override
103 | public String getLastError() {
104 | String res = lastHttpResponse;
105 | if (lastException!=null)
106 | res+="\n"+lastException;
107 | return res;
108 | }
109 |
110 | @Override
111 | public String analyzeImage(String systemPrompt, String userPrompt, String imagePath) throws IOException, JSONException {
112 | lastHttpResponse = null;
113 | URL url = new URL(API_URL);
114 | HttpURLConnection connection = (HttpURLConnection) url.openConnection();
115 | connection.setRequestMethod("POST");
116 | connection.setRequestProperty("Content-Type", "application/json");
117 | connection.setRequestProperty("X-API-Key", API_KEY);
118 | connection.setRequestProperty("anthropic-version","2023-06-01");
119 | connection.setDoOutput(true);
120 |
121 | String imageBase64 = encodeImageToBase64(imagePath);
122 |
123 | JSONObject jsonBody = new JSONObject();
124 | jsonBody.put("model", CLAUDE_MODEL);
125 | jsonBody.put("max_tokens", 1000);
126 | jsonBody.put("temperature", 0.0f);
127 | jsonBody.put("system", systemPrompt);
128 |
129 | JSONArray messages = new JSONArray();
130 |
131 | JSONObject userMessage = new JSONObject();
132 | userMessage.put("role", "user");
133 |
134 | String imageContentType = CONTENT_TYPE_JPG;
135 | if (imagePath.toLowerCase().endsWith(".png"))
136 | imageContentType = CONTENT_TYPE_PNG;
137 |
138 | JSONArray contentArray = new JSONArray();
139 | if (userPrompt!=null)
140 | contentArray.put(new JSONObject().put("type", "text").put("text", userPrompt));
141 |
142 | contentArray.put(new JSONObject().put("type", "image").put("source", new JSONObject().put("type", "base64").put("media_type", imageContentType).put("data", imageBase64)));
143 |
144 | userMessage.put("content", contentArray);
145 | messages.put(userMessage);
146 |
147 | jsonBody.put("messages", messages);
148 |
149 | OutputStream os = connection.getOutputStream();
150 | os.write(jsonBody.toString().getBytes());
151 | os.flush();
152 | os.close();
153 |
154 | int responseCode = connection.getResponseCode();
155 | if (responseCode == HttpURLConnection.HTTP_OK) {
156 | BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
157 | String inputLine;
158 | StringBuilder response = new StringBuilder();
159 | while ((inputLine = in.readLine()) != null) {
160 | response.append(inputLine);
161 | }
162 | in.close();
163 |
164 | lastHttpResponse = response.toString();
165 |
166 | JSONObject jsonResponse = new JSONObject(response.toString());
167 | JSONArray ja = jsonResponse.getJSONArray("content");
168 | for (int i=0; i= 0) {
195 | offset += bytesRead;
196 | }
197 | if (offset != bytes.length) {
198 | throw new IOException("Could not completely read file " + file.getName());
199 | }
200 | }
201 | return Base64.encodeToString(bytes, Base64.NO_WRAP);
202 | }
203 | }
204 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/openrouter/HumansDetectorOpenRouter.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.openrouter;
2 |
3 | import android.content.Context;
4 | import android.util.Base64;
5 | import android.util.Log;
6 |
7 | import org.json.JSONArray;
8 | import org.json.JSONException;
9 | import org.json.JSONObject;
10 |
11 | import java.io.BufferedReader;
12 | import java.io.File;
13 | import java.io.FileInputStream;
14 | import java.io.IOException;
15 | import java.io.InputStreamReader;
16 | import java.io.OutputStream;
17 | import java.net.HttpURLConnection;
18 | import java.net.URL;
19 |
20 | import online.avogadro.opencv4tasker.ai.AIImageAnalyzer;
21 | import online.avogadro.opencv4tasker.app.SharedPreferencesHelper;
22 | import online.avogadro.opencv4tasker.app.Util;
23 |
24 | public class HumansDetectorOpenRouter implements AIImageAnalyzer {
25 |
26 | private String API_KEY = "YOUR_API_KEY_HERE";
27 | private String MODEL_NAME = "qwen/qwen2.5-vl-32b-instruct:free";
28 | private static final String API_URL = "https://openrouter.ai/api/v1/chat/completions";
29 |
30 | private static final String PROMPT_SYSTEM =
31 | "The user will be providing images taken from cheap security cameras, these images might be taken during the day or the night and the angle may vary. Images are usually taken top-down, during the night images may be blurry due to person's movements. Please reply him with a single keyword in the first line and a brief explanation of your choice in the second line, chosen among these:\n" +
32 | "* HUMAN: an human or a part of an human (usually on the border of the image) is visible in the frame. The human may be seen from above since the camera is usually mounted on an high position\n" +
33 | "* SPIDER: no humans are visible but a spider is near the camera\n" +
34 | "* CAT: if it's an animal or a cat, it may be a cat walking away from the camera or walking toward the camera\n" +
35 | "* NONE: neither an human nor a spider are in frame\n" +
36 | "* UNCERTAIN: you were unable to tell in which of the above categories the image might fit. Use this response if you are not totally sure that the answer is one of the above\n" +
37 | "Ignore any shadows";
38 |
39 | static final String TAG = "HumansDetectorOpenRouter";
40 |
41 | public String lastResponse = null;
42 | public Exception lastException = null;
43 | public String lastHttpResponse = null;
44 |
45 | public static int detectHumans(Context context, String path) throws IOException {
46 | HumansDetectorOpenRouter detector = new HumansDetectorOpenRouter();
47 | detector.setup(context);
48 | return detector.detectPerson(context, path);
49 | }
50 |
51 | @Override
52 | public void setup(Context ctx) throws IOException {
53 | API_KEY = SharedPreferencesHelper.get(ctx, SharedPreferencesHelper.OPENROUTER_API_KEY);
54 | MODEL_NAME = SharedPreferencesHelper.get(ctx, SharedPreferencesHelper.OPENROUTER_MODEL);
55 | if (MODEL_NAME == null || MODEL_NAME.isEmpty()) {
56 | MODEL_NAME = "qwen/qwen2.5-vl-32b-instruct:free";
57 | }
58 | }
59 |
60 | public int detectPerson(Context ctx, String imagePath) {
61 | lastResponse = null;
62 | lastException = null;
63 | String newPath = null;
64 | try {
65 | newPath = Util.contentToFile(ctx, imagePath);
66 | String openRouterResponse = analyzeImage(PROMPT_SYSTEM, null, newPath);
67 | lastResponse = openRouterResponse;
68 | String[] res = openRouterResponse.split("\\r?\\n");
69 | if (res[0].trim().startsWith("HUMAN"))
70 | return 100;
71 | else if (res[0].trim().startsWith("NONE"))
72 | return 0;
73 | else if (res[0].trim().startsWith("SPIDER"))
74 | return 0;
75 | else if (res[0].trim().startsWith("CAT"))
76 | return 0;
77 | else if (res[0].trim().startsWith("UNCERTAIN"))
78 | return 30;
79 | else
80 | return -1;
81 |
82 | } catch (IOException | JSONException e) {
83 | Log.e(TAG, "Failed to examine file " + newPath, e);
84 | lastException = e;
85 | return -1;
86 | } finally {
87 | if (newPath != null && !newPath.equals(imagePath))
88 | new File(newPath).delete();
89 | }
90 | }
91 |
92 | @Override
93 | public String getLastResponse() {
94 | return lastResponse;
95 | }
96 |
97 | @Override
98 | public String getLastError() {
99 | String res = lastHttpResponse;
100 | if (lastException != null)
101 | res += "\n" + lastException;
102 | return res;
103 | }
104 |
105 | @Override
106 | public String analyzeImage(String systemPrompt, String userPrompt, String imagePath) throws IOException, JSONException {
107 | lastHttpResponse = null;
108 | URL url = new URL(API_URL);
109 | HttpURLConnection connection = (HttpURLConnection) url.openConnection();
110 | connection.setRequestMethod("POST");
111 | connection.setRequestProperty("Content-Type", "application/json");
112 | connection.setRequestProperty("Authorization", "Bearer " + API_KEY);
113 | connection.setDoOutput(true);
114 |
115 | String imageBase64 = encodeImageToBase64(imagePath);
116 | String imageType = imagePath.toLowerCase().endsWith(".png") ? "image/png" : "image/jpeg";
117 | String imageDataUrl = "data:" + imageType + ";base64," + imageBase64;
118 |
119 | JSONObject jsonBody = new JSONObject();
120 | jsonBody.put("model", MODEL_NAME);
121 | jsonBody.put("max_tokens", 1000);
122 | jsonBody.put("temperature", 0.0);
123 |
124 | JSONArray messages = new JSONArray();
125 |
126 | if (systemPrompt != null && !systemPrompt.isEmpty()) {
127 | JSONObject systemMessage = new JSONObject();
128 | systemMessage.put("role", "system");
129 | systemMessage.put("content", systemPrompt);
130 | messages.put(systemMessage);
131 | }
132 |
133 | JSONObject userMessage = new JSONObject();
134 | userMessage.put("role", "user");
135 |
136 | JSONArray contentArray = new JSONArray();
137 |
138 | if (userPrompt != null && !userPrompt.isEmpty()) {
139 | JSONObject textContent = new JSONObject();
140 | textContent.put("type", "text");
141 | textContent.put("text", userPrompt);
142 | contentArray.put(textContent);
143 | }
144 |
145 | JSONObject imageContent = new JSONObject();
146 | imageContent.put("type", "image_url");
147 | JSONObject imageUrl = new JSONObject();
148 | imageUrl.put("url", imageDataUrl);
149 | imageContent.put("image_url", imageUrl);
150 | contentArray.put(imageContent);
151 |
152 | userMessage.put("content", contentArray);
153 | messages.put(userMessage);
154 |
155 | jsonBody.put("messages", messages);
156 |
157 | OutputStream os = connection.getOutputStream();
158 | os.write(jsonBody.toString().getBytes());
159 | os.flush();
160 | os.close();
161 |
162 | int responseCode = connection.getResponseCode();
163 | if (responseCode == HttpURLConnection.HTTP_OK) {
164 | BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
165 | String inputLine;
166 | StringBuilder response = new StringBuilder();
167 | while ((inputLine = in.readLine()) != null) {
168 | response.append(inputLine);
169 | }
170 | in.close();
171 |
172 | lastHttpResponse = response.toString();
173 |
174 | JSONObject jsonResponse = new JSONObject(response.toString());
175 | JSONArray choices = jsonResponse.getJSONArray("choices");
176 | if (choices.length() > 0) {
177 | JSONObject choice = choices.getJSONObject(0);
178 | JSONObject message = choice.getJSONObject("message");
179 | return message.getString("content");
180 | }
181 | throw new IOException("No choices found in response: " + response.toString());
182 | } else {
183 | BufferedReader errorReader = new BufferedReader(new InputStreamReader(connection.getErrorStream()));
184 | String inputLine;
185 | StringBuilder response = new StringBuilder();
186 | while ((inputLine = errorReader.readLine()) != null) {
187 | response.append(inputLine);
188 | }
189 | errorReader.close();
190 | String r = response.toString();
191 | lastHttpResponse = r;
192 | throw new IOException("Error " + responseCode + " " + r);
193 | }
194 | }
195 |
196 | private String encodeImageToBase64(String imagePath) throws IOException {
197 | File file = new File(imagePath);
198 | byte[] bytes = new byte[(int) file.length()];
199 | try (FileInputStream fis = new FileInputStream(file)) {
200 | int bytesRead = 0;
201 | int offset = 0;
202 | while (offset < bytes.length && (bytesRead = fis.read(bytes, offset, bytes.length - offset)) >= 0) {
203 | offset += bytesRead;
204 | }
205 | if (offset != bytes.length) {
206 | throw new IOException("Could not completely read file " + file.getName());
207 | }
208 | }
209 | return Base64.encodeToString(bytes, Base64.NO_WRAP);
210 | }
211 | }
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/gemini/HumansDetectorGemini.java:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.gemini;
2 |
3 | import android.content.Context;
4 | import android.util.Base64;
5 | import android.util.Log;
6 |
7 | import org.json.JSONArray;
8 | import org.json.JSONException;
9 | import org.json.JSONObject;
10 |
11 | import java.io.BufferedReader;
12 | import java.io.File;
13 | import java.io.FileInputStream;
14 | import java.io.IOException;
15 | import java.io.InputStreamReader;
16 | import java.io.OutputStream;
17 | import java.net.HttpURLConnection;
18 | import java.net.URL;
19 |
20 | import online.avogadro.opencv4tasker.ai.AIImageAnalyzer;
21 | import online.avogadro.opencv4tasker.app.SharedPreferencesHelper;
22 | import online.avogadro.opencv4tasker.app.Util;
23 |
24 | public class HumansDetectorGemini implements AIImageAnalyzer {
25 |
26 | private String API_KEY = "YOUR_API_KEY_HERE";
27 |
28 | private static final String MODEL = "gemini-2.5-flash";
29 | private static final String API_URL = "https://generativelanguage.googleapis.com/v1beta/models/"+MODEL+":generateContent";
30 |
31 | private static final String PROMPT_SYSTEM =
32 | "The user will be providing images taken from cheap security cameras, these images might be taken during the day or the night and the angle may vary. Images are usually taken top-down, during the night images may be blurry due to person's movements. Please reply him with a single keyword in the first line and a brief explanation of your choice in the second line, chosen among these:\n" +
33 | "* HUMAN: an human or a part of an human (usually on the border of the image) is visible in the frame. The human may be seen from above since the camera is usually mounted on an high position\n" +
34 | "* SPIDER: no humans are visible but a spider is near the camera\n" +
35 | "* CAT: if it's an animal or a cat, it may be a cat walking away from the camera or walking toward the camera\n" +
36 | "* NONE: neither an human nor a spider are in frame\n" +
37 | "* UNCERTAIN: you were unable to tell in which of the above categories the image might fit. Use this response if you are not totally sure that the answer is one of the above\n" +
38 | "Ignore any shadows";
39 |
40 | static final String TAG = "HumansDetectorGemini";
41 | public static final String GEMINI_MODEL = "gemini-pro-vision";
42 |
43 | private static final String CONTENT_TYPE_JPG = "image/jpeg";
44 | private static final String CONTENT_TYPE_PNG = "image/png";
45 |
46 | public String lastResponse = null;
47 | public Exception lastException = null;
48 | public String lastHttpResponse = null;
49 |
50 | /**
51 | * Detect humans and return the highest score
52 | * @param path in the form of file:///{something} or content:///{something}
53 | * @return 0-100+, lower values are lower scores. '-1' is a failure
54 | */
55 | public static int detectHumans(Context context, String path) throws IOException {
56 | HumansDetectorGemini htg = new HumansDetectorGemini();
57 | htg.setup(context);
58 | return htg.detectPerson(context, path);
59 | }
60 |
61 | @Override
62 | public void setup(Context ctx) throws IOException {
63 | API_KEY = SharedPreferencesHelper.get(ctx, SharedPreferencesHelper.GEMINI_API_KEY);
64 | }
65 |
66 | public int detectPerson(Context ctx, String imagePath) {
67 | lastResponse = null;
68 | lastException = null;
69 | String newPath = null;
70 | try {
71 | newPath = Util.contentToFile(ctx, imagePath);
72 | String geminiResponse = analyzeImage(PROMPT_SYSTEM, null, newPath);
73 | lastResponse = geminiResponse;
74 | String[] res = geminiResponse.split("\\r?\\n");
75 | if (res[0].trim().equals("HUMAN"))
76 | return 100;
77 | else if (res[0].trim().equals("NONE"))
78 | return 0;
79 | else if (res[0].trim().equals("SPIDER"))
80 | return 0;
81 | else if (res[0].trim().equals("CAT"))
82 | return 0;
83 | else if (res[0].trim().equals("UNCERTAIN"))
84 | return 30;
85 | else
86 | return -1; // issues
87 |
88 | } catch (IOException | JSONException e) {
89 | Log.e(TAG, "Failed to examine file " + newPath, e);
90 | lastException = e;
91 | return -1;
92 | } finally {
93 | if (newPath != null && !newPath.equals(imagePath))
94 | new File(newPath).delete();
95 | }
96 | }
97 |
98 | @Override
99 | public String getLastResponse() {
100 | return lastResponse;
101 | }
102 |
103 | @Override
104 | public String getLastError() {
105 | String res = lastHttpResponse;
106 | if (lastException != null)
107 | res += "\n" + lastException;
108 | return res;
109 | }
110 |
111 | @Override
112 | public String analyzeImage(String systemPrompt, String userPrompt, String imagePath) throws IOException, JSONException {
113 | lastHttpResponse = null;
114 | // API Key is included in the URL for Gemini API
115 | URL url = new URL(API_URL + "?key=" + API_KEY);
116 | HttpURLConnection connection = (HttpURLConnection) url.openConnection();
117 | connection.setRequestMethod("POST");
118 | connection.setRequestProperty("Content-Type", "application/json");
119 | connection.setDoOutput(true);
120 |
121 | String imageBase64 = encodeImageToBase64(imagePath);
122 | String imageContentType = imagePath.toLowerCase().endsWith(".png") ? CONTENT_TYPE_PNG : CONTENT_TYPE_JPG;
123 |
124 | // Create JSON body for Gemini API
125 | JSONObject jsonBody = new JSONObject();
126 |
127 | // Create contents array with parts
128 | JSONArray contents = new JSONArray();
129 | JSONObject content = new JSONObject();
130 |
131 | // Add parts array with text prompt and image
132 | JSONArray parts = new JSONArray();
133 |
134 | // Add text prompt
135 | parts.put(new JSONObject().put("text", systemPrompt + (userPrompt != null ? "\n" + userPrompt : "")));
136 |
137 | // Add image
138 | JSONObject imagePart = new JSONObject();
139 | JSONObject inlineData = new JSONObject();
140 | inlineData.put("mimeType", imageContentType);
141 | inlineData.put("data", imageBase64);
142 | imagePart.put("inlineData", inlineData);
143 | parts.put(imagePart);
144 |
145 | content.put("parts", parts);
146 | contents.put(content);
147 | jsonBody.put("contents", contents);
148 |
149 | // Gemini API specific parameters
150 | jsonBody.put("generationConfig", new JSONObject()
151 | .put("temperature", 0.0)
152 | .put("maxOutputTokens", 1000));
153 |
154 | OutputStream os = connection.getOutputStream();
155 | os.write(jsonBody.toString().getBytes());
156 | os.flush();
157 | os.close();
158 |
159 | int responseCode = connection.getResponseCode();
160 | if (responseCode == HttpURLConnection.HTTP_OK) {
161 | BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
162 | String inputLine;
163 | StringBuilder response = new StringBuilder();
164 | while ((inputLine = in.readLine()) != null) {
165 | response.append(inputLine);
166 | }
167 | in.close();
168 |
169 | lastHttpResponse = response.toString();
170 |
171 | // Parse Gemini response
172 | JSONObject jsonResponse = new JSONObject(response.toString());
173 | if (!jsonResponse.has("candidates") || jsonResponse.getJSONArray("candidates").length() == 0) {
174 | throw new IOException("No candidates in response: " + response.toString());
175 | }
176 |
177 | JSONObject candidate = jsonResponse.getJSONArray("candidates").getJSONObject(0);
178 | if (!candidate.has("content") || !candidate.getJSONObject("content").has("parts")) {
179 | throw new IOException("Unexpected response format: " + response.toString());
180 | }
181 |
182 | JSONArray responseParts = candidate.getJSONObject("content").getJSONArray("parts");
183 | for (int i = 0; i < responseParts.length(); i++) {
184 | JSONObject part = responseParts.getJSONObject(i);
185 | if (part.has("text")) {
186 | return part.getString("text").trim();
187 | }
188 | }
189 |
190 | throw new IOException("No text found in response: " + response.toString());
191 | } else {
192 | BufferedReader in;
193 | try {
194 | in = new BufferedReader(new InputStreamReader(connection.getErrorStream()));
195 | } catch (Exception e) {
196 | throw new IOException("Error " + responseCode);
197 | }
198 | String inputLine;
199 | StringBuilder response = new StringBuilder();
200 | while ((inputLine = in.readLine()) != null) {
201 | response.append(inputLine);
202 | }
203 | in.close();
204 | String errorResponse = response.toString();
205 | lastHttpResponse = errorResponse;
206 | throw new IOException("Error " + responseCode + " " + errorResponse);
207 | }
208 | }
209 |
210 | private String encodeImageToBase64(String imagePath) throws IOException {
211 | File file = new File(imagePath);
212 | byte[] bytes = new byte[(int) file.length()];
213 | try (FileInputStream fis = new FileInputStream(file)) {
214 | int bytesRead = 0;
215 | int offset = 0;
216 | while (offset < bytes.length && (bytesRead = fis.read(bytes, offset, bytes.length - offset)) >= 0) {
217 | offset += bytesRead;
218 | }
219 | if (offset != bytes.length) {
220 | throw new IOException("Could not completely read file " + file.getName());
221 | }
222 | }
223 | return Base64.encodeToString(bytes, Base64.NO_WRAP);
224 | }
225 | }
226 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/tasker/AnalyzeImageActionHelper.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.tasker
2 |
3 | import android.app.Activity
4 | import android.content.Context
5 | import android.os.Bundle
6 | import com.joaomgcd.taskerpluginlibrary.action.TaskerPluginRunnerAction
7 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfig
8 | import com.joaomgcd.taskerpluginlibrary.config.TaskerPluginConfigHelper
9 | import com.joaomgcd.taskerpluginlibrary.input.TaskerInput
10 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResult
11 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultErrorWithOutput
12 | import com.joaomgcd.taskerpluginlibrary.runner.TaskerPluginResultSucess
13 | import online.avogadro.opencv4tasker.app.SharedPreferencesHelper
14 | import online.avogadro.opencv4tasker.app.Util
15 | import online.avogadro.opencv4tasker.claudeai.HumansDetectorClaudeAI
16 | import online.avogadro.opencv4tasker.databinding.ActivityConfigAnalyzeImageBinding
17 | import online.avogadro.opencv4tasker.gemini.HumansDetectorGemini
18 | import online.avogadro.opencv4tasker.openrouter.HumansDetectorOpenRouter
19 | import java.io.File
20 |
21 | const val ENGINE_ANALYZE_CLAUDEAI = "CLAUDE"
22 | const val ENGINE_ANALYZE_GEMINI = "GEMINI"
23 | const val ENGINE_ANALYZE_OPENROUTER = "OPENROUTER"
24 |
25 | class AnalyzeImageActionHelper(config: TaskerPluginConfig) : TaskerPluginConfigHelper(config) {
26 | override val runnerClass: Class get() = AnalyzeImageActionRunner::class.java
27 | override val inputClass = AnalyzeImageInput::class.java
28 | override val outputClass = AnalyzeImageOutput::class.java
29 | override fun addToStringBlurb(input: TaskerInput, blurbBuilder: StringBuilder) {
30 | blurbBuilder.append(" analyze image with AI")
31 | }
32 | }
33 |
34 | class ActivityConfigAnalyzeImageAction : Activity(), TaskerPluginConfig {
35 |
36 | private lateinit var binding: ActivityConfigAnalyzeImageBinding
37 |
38 | override fun assignFromInput(input: TaskerInput) {
39 | binding.editImagePath.setText(input.regular.imagePath ?: "")
40 | binding.editSystemPrompt.setText(input.regular.systemPrompt ?: "")
41 | binding.editUserPrompt.setText(input.regular.userPrompt ?: "")
42 |
43 | // Reset all radio buttons
44 | binding.radioEngineClaudeAI.isChecked = false
45 | binding.radioEngineGemini.isChecked = false
46 | binding.radioEngineOpenRouter.isChecked = false
47 |
48 | // Set the appropriate radio button based on the engine
49 | when (input.regular.engine) {
50 | ENGINE_ANALYZE_CLAUDEAI -> binding.radioEngineClaudeAI.isChecked = true
51 | ENGINE_ANALYZE_GEMINI -> binding.radioEngineGemini.isChecked = true
52 | ENGINE_ANALYZE_OPENROUTER -> binding.radioEngineOpenRouter.isChecked = true
53 | else -> {
54 | // Default to Claude if available
55 | if (isClaudeAvailable()) {
56 | binding.radioEngineClaudeAI.isChecked = true
57 | } else if (isOpenRouterAvailable()) {
58 | binding.radioEngineOpenRouter.isChecked = true
59 | } else {
60 | binding.radioEngineGemini.isChecked = true
61 | }
62 | }
63 | }
64 |
65 | // Disable Claude option if no API KEY is available
66 | if (!isClaudeAvailable()) {
67 | binding.radioEngineClaudeAI.isEnabled = false
68 | binding.radioEngineClaudeAI.isChecked = false
69 |
70 | // Default to Gemini if Claude was selected but now disabled
71 | if (ENGINE_ANALYZE_CLAUDEAI == input.regular.engine) {
72 | binding.radioEngineGemini.isChecked = true
73 | }
74 | }
75 |
76 | // Disable Gemini option if no API KEY is available
77 | if (!isGeminiAvailable()) {
78 | binding.radioEngineGemini.isEnabled = false
79 | binding.radioEngineGemini.isChecked = false
80 |
81 | // Default to Claude if Gemini was selected but now disabled
82 | if (ENGINE_ANALYZE_GEMINI == input.regular.engine) {
83 | binding.radioEngineClaudeAI.isChecked = true
84 | }
85 | }
86 |
87 | // Disable OpenRouter option if no API KEY is available
88 | if (!isOpenRouterAvailable()) {
89 | binding.radioEngineOpenRouter.isEnabled = false
90 | binding.radioEngineOpenRouter.isChecked = false
91 |
92 | // Default to Claude if OpenRouter was selected but now disabled
93 | if (ENGINE_ANALYZE_OPENROUTER == input.regular.engine) {
94 | if (isClaudeAvailable()) {
95 | binding.radioEngineClaudeAI.isChecked = true
96 | } else {
97 | binding.radioEngineGemini.isChecked = true
98 | }
99 | }
100 | }
101 | }
102 |
103 | private fun isClaudeAvailable(): Boolean {
104 | val claudeApiKey = SharedPreferencesHelper.get(this, SharedPreferencesHelper.CLAUDE_API_KEY)
105 | return claudeApiKey.isNotEmpty()
106 | }
107 |
108 | private fun isGeminiAvailable(): Boolean {
109 | val geminiApiKey = SharedPreferencesHelper.get(this, SharedPreferencesHelper.GEMINI_API_KEY)
110 | return geminiApiKey.isNotEmpty()
111 | }
112 |
113 | private fun isOpenRouterAvailable(): Boolean {
114 | val openRouterApiKey = SharedPreferencesHelper.get(this, SharedPreferencesHelper.OPENROUTER_API_KEY)
115 | return openRouterApiKey.isNotEmpty()
116 | }
117 |
118 | override val inputForTasker: TaskerInput get() {
119 | val engine = when {
120 | binding.radioEngineClaudeAI.isChecked -> ENGINE_ANALYZE_CLAUDEAI
121 | binding.radioEngineGemini.isChecked -> ENGINE_ANALYZE_GEMINI
122 | binding.radioEngineOpenRouter.isChecked -> ENGINE_ANALYZE_OPENROUTER
123 | else -> ENGINE_ANALYZE_CLAUDEAI // Default to Claude
124 | }
125 |
126 | return TaskerInput(AnalyzeImageInput(
127 | binding.editImagePath.text?.toString(),
128 | engine,
129 | binding.editSystemPrompt.text?.toString(),
130 | binding.editUserPrompt.text?.toString()
131 | ))
132 | }
133 |
134 | override val context get() = applicationContext
135 | private val taskerHelper by lazy { AnalyzeImageActionHelper(this) }
136 | override fun onCreate(savedInstanceState: Bundle?) {
137 | super.onCreate(savedInstanceState)
138 | binding = ActivityConfigAnalyzeImageBinding.inflate(layoutInflater)
139 |
140 | binding.buttonOK.setOnClickListener {
141 | taskerHelper.finishForTasker()
142 | }
143 |
144 | setContentView(binding.root)
145 | taskerHelper.onCreate()
146 | }
147 | }
148 |
149 | class AnalyzeImageActionRunner : TaskerPluginRunnerAction() {
150 | override fun run(context: Context, input: TaskerInput): TaskerPluginResult {
151 | var response = ""
152 | var error = ""
153 | var newPath: String? = null
154 |
155 | try {
156 | // Get the image path
157 | val imagePath = input.regular.imagePath ?: return TaskerPluginResultErrorWithOutput(
158 | -1, "No image path provided"
159 | )
160 |
161 | // Convert content:// URIs to file paths if needed
162 | newPath = Util.contentToFile(context, imagePath)
163 |
164 | // Select the appropriate AI engine
165 | when (input.regular.engine) {
166 | ENGINE_ANALYZE_CLAUDEAI -> {
167 | val claude = HumansDetectorClaudeAI()
168 | claude.setup(context)
169 | response = claude.analyzeImage(
170 | input.regular.systemPrompt ?: "",
171 | input.regular.userPrompt,
172 | newPath
173 | )
174 | if (response.isEmpty()) {
175 | error = claude.getLastError()
176 | }
177 | }
178 | ENGINE_ANALYZE_GEMINI -> {
179 | val gemini = HumansDetectorGemini()
180 | gemini.setup(context)
181 | response = gemini.analyzeImage(
182 | input.regular.systemPrompt ?: "",
183 | input.regular.userPrompt,
184 | newPath
185 | )
186 | if (response.isEmpty()) {
187 | error = gemini.getLastError()
188 | }
189 | }
190 | ENGINE_ANALYZE_OPENROUTER -> {
191 | val openRouter = HumansDetectorOpenRouter()
192 | openRouter.setup(context)
193 | response = openRouter.analyzeImage(
194 | input.regular.systemPrompt ?: "",
195 | input.regular.userPrompt,
196 | newPath
197 | )
198 | if (response.isEmpty()) {
199 | error = openRouter.getLastError()
200 | }
201 | }
202 | else -> {
203 | return TaskerPluginResultErrorWithOutput(
204 | -1, "Invalid engine selected: ${input.regular.engine}"
205 | )
206 | }
207 | }
208 |
209 | // Check if we got a valid response
210 | if (response.isEmpty()) {
211 | return TaskerPluginResultErrorWithOutput(
212 | -1, "Failed to analyze image: $error"
213 | )
214 | }
215 |
216 | return TaskerPluginResultSucess(AnalyzeImageOutput(response))
217 |
218 | } catch (e: Exception) {
219 | return TaskerPluginResultErrorWithOutput(
220 | -1, "Error analyzing image: ${e.message}"
221 | )
222 | } finally {
223 | // Clean up temporary file if created
224 | if (newPath != null && newPath != input.regular.imagePath) {
225 | File(newPath).delete()
226 | }
227 | }
228 | }
229 | }
230 |
--------------------------------------------------------------------------------
/app/src/main/java/online/avogadro/opencv4tasker/notification/NotificationImageExtractor.kt:
--------------------------------------------------------------------------------
1 | package online.avogadro.opencv4tasker.notification
2 |
3 | import android.app.Notification
4 | import android.content.Context
5 | import android.graphics.Bitmap
6 | import android.graphics.BitmapFactory
7 | import android.graphics.drawable.BitmapDrawable
8 | import android.graphics.drawable.Icon
9 | import android.net.Uri
10 | import android.os.Build
11 | import android.util.Log
12 | import java.io.InputStream
13 | import java.net.URL
14 |
15 | class NotificationImageExtractor(private val context: Context) {
16 |
17 | companion object {
18 | private const val TAG = "NotificationImageExtractor"
19 | }
20 |
21 | /**
22 | * Extracts image from notification. Returns null if no image is found.
23 | * Prioritizes larger images when multiple versions are available.
24 | */
25 | fun extractImage(notification: Notification): Bitmap? {
26 | try {
27 | // Method 1: Check for BigPicture style images (largest)
28 | val pictureFromExtras = extractPictureFromExtras(notification)
29 | if (pictureFromExtras != null) {
30 | Log.d(TAG, "Found image in BigPicture extras")
31 | return pictureFromExtras
32 | }
33 |
34 | // Method 2: Check for EXTRA_LARGE_ICON_BIG (expanded large icon)
35 | val largeBigIcon = extractLargeBigIcon(notification)
36 | if (largeBigIcon != null) {
37 | Log.d(TAG, "Found image in EXTRA_LARGE_ICON_BIG")
38 | return largeBigIcon
39 | }
40 |
41 | // Method 3: Check for standard large icon
42 | val largeIcon = extractLargeIcon(notification)
43 | if (largeIcon != null) {
44 | Log.d(TAG, "Found image in large icon")
45 | return largeIcon
46 | }
47 |
48 | // Method 4: Check for other image sources in extras
49 | val otherImage = extractOtherImageFromExtras(notification)
50 | if (otherImage != null) {
51 | Log.d(TAG, "Found image in other extras")
52 | return otherImage
53 | }
54 |
55 | // Method 5: Check for URI-based images
56 | val uriImage = extractImageFromUri(notification)
57 | if (uriImage != null) {
58 | Log.d(TAG, "Found image from URI")
59 | return uriImage
60 | }
61 |
62 | Log.d(TAG, "No image found in notification")
63 | return null
64 |
65 | } catch (e: Exception) {
66 | Log.e(TAG, "Error extracting image from notification", e)
67 | return null
68 | }
69 | }
70 |
71 | private fun extractLargeIcon(notification: Notification): Bitmap? {
72 | return try {
73 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
74 | // For API 23+, use Icon
75 | val largeIcon = notification.getLargeIcon()
76 | if (largeIcon != null) {
77 | iconToBitmap(largeIcon)
78 | } else {
79 | null
80 | }
81 | } else {
82 | // For older versions, use deprecated largeIcon
83 | @Suppress("DEPRECATION")
84 | notification.largeIcon
85 | }
86 | } catch (e: Exception) {
87 | Log.e(TAG, "Error extracting large icon", e)
88 | null
89 | }
90 | }
91 |
92 | private fun extractLargeBigIcon(notification: Notification): Bitmap? {
93 | return try {
94 | val extras = notification.extras
95 |
96 | // Check for EXTRA_LARGE_ICON_BIG (android.largeIcon.big)
97 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
98 | val largeBigIcon = extras.getParcelable("android.largeIcon.big")
99 | if (largeBigIcon != null) {
100 | return iconToBitmap(largeBigIcon)
101 | }
102 | }
103 |
104 | // Also check as bitmap fallback
105 | val largeBigBitmap = extras.getParcelable("android.largeIcon.big")
106 | if (largeBigBitmap != null) {
107 | return largeBigBitmap
108 | }
109 |
110 | null
111 | } catch (e: Exception) {
112 | Log.e(TAG, "Error extracting EXTRA_LARGE_ICON_BIG", e)
113 | null
114 | }
115 | }
116 |
117 | private fun extractPictureFromExtras(notification: Notification): Bitmap? {
118 | return try {
119 | val extras = notification.extras
120 |
121 | // Check for BigPictureStyle picture
122 | val picture = extras.getParcelable(Notification.EXTRA_PICTURE)
123 | if (picture != null) {
124 | return picture
125 | }
126 |
127 | // Check for picture icon
128 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
129 | val pictureIcon = extras.getParcelable(Notification.EXTRA_PICTURE_ICON)
130 | if (pictureIcon != null) {
131 | return iconToBitmap(pictureIcon)
132 | }
133 | }
134 |
135 | null
136 | } catch (e: Exception) {
137 | Log.e(TAG, "Error extracting picture from extras", e)
138 | null
139 | }
140 | }
141 |
142 | private fun extractOtherImageFromExtras(notification: Notification): Bitmap? {
143 | return try {
144 | val extras = notification.extras
145 |
146 | // Skip background images as they are primarily aesthetic
147 | // Skip EXTRA_LARGE_ICON_BIG as it's handled in extractLargeBigIcon()
148 |
149 | // Check for other possible bitmap keys
150 | val possibleBitmapKeys = arrayOf(
151 | "android.rebuild.largeIcon",
152 | "android.icon",
153 | "android.picture",
154 | "android.bigLargeIcon",
155 | "android.media.metadata.ART", // MediaMetadata artwork
156 | "fcm_image", // FCM custom image field
157 | "image_url", // Common custom field
158 | "large_icon_url" // Vendor-specific field
159 | )
160 |
161 | for (key in possibleBitmapKeys) {
162 | val bitmap = extras.getParcelable(key)
163 | if (bitmap != null) {
164 | Log.d(TAG, "Found bitmap in key: $key")
165 | return bitmap
166 | }
167 | }
168 |
169 | null
170 | } catch (e: Exception) {
171 | Log.e(TAG, "Error extracting other images from extras", e)
172 | null
173 | }
174 | }
175 |
176 | private fun extractImageFromUri(notification: Notification): Bitmap? {
177 | return try {
178 | val extras = notification.extras
179 |
180 | // Check for URI-based image fields (excluding background images)
181 | val possibleUriKeys = arrayOf(
182 | "android.media.metadata.ALBUM_ART_URI",
183 | "image_url",
184 | "large_icon_url",
185 | "fcm_image_url"
186 | )
187 |
188 | for (key in possibleUriKeys) {
189 | val uriString = extras.getString(key)
190 | if (!uriString.isNullOrEmpty()) {
191 | Log.d(TAG, "Found URI in key: $key = $uriString")
192 | val bitmap = loadImageFromUri(uriString)
193 | if (bitmap != null) {
194 | return bitmap
195 | }
196 | }
197 | }
198 |
199 | null
200 | } catch (e: Exception) {
201 | Log.e(TAG, "Error extracting image from URI", e)
202 | null
203 | }
204 | }
205 |
206 | private fun loadImageFromUri(uriString: String): Bitmap? {
207 | return try {
208 | val uri = Uri.parse(uriString)
209 | when (uri.scheme?.lowercase()) {
210 | "content" -> {
211 | // Content URI - use ContentResolver
212 | val inputStream = context.contentResolver.openInputStream(uri)
213 | inputStream?.use { BitmapFactory.decodeStream(it) }
214 | }
215 | "file" -> {
216 | // File URI
217 | BitmapFactory.decodeFile(uri.path)
218 | }
219 | "http", "https" -> {
220 | // HTTP/HTTPS URL - load from web (should be done in background thread)
221 | Log.w(TAG, "HTTP URI found but not loading synchronously: $uriString")
222 | null // Don't load HTTP images synchronously to avoid blocking
223 | }
224 | else -> {
225 | Log.w(TAG, "Unsupported URI scheme: ${uri.scheme}")
226 | null
227 | }
228 | }
229 | } catch (e: Exception) {
230 | Log.e(TAG, "Error loading image from URI: $uriString", e)
231 | null
232 | }
233 | }
234 |
235 | private fun iconToBitmap(icon: Icon): Bitmap? {
236 | return try {
237 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
238 | val drawable = icon.loadDrawable(context)
239 | if (drawable is BitmapDrawable) {
240 | drawable.bitmap
241 | } else if (drawable != null) {
242 | // Convert drawable to bitmap
243 | val width = if (drawable.intrinsicWidth > 0) drawable.intrinsicWidth else 1
244 | val height = if (drawable.intrinsicHeight > 0) drawable.intrinsicHeight else 1
245 |
246 | val bitmap = Bitmap.createBitmap(
247 | width,
248 | height,
249 | Bitmap.Config.ARGB_8888
250 | )
251 | val canvas = android.graphics.Canvas(bitmap)
252 | drawable.setBounds(0, 0, canvas.width, canvas.height)
253 | drawable.draw(canvas)
254 | bitmap
255 | } else {
256 | null
257 | }
258 | } else {
259 | null
260 | }
261 | } catch (e: Exception) {
262 | Log.e(TAG, "Error converting icon to bitmap", e)
263 | null
264 | }
265 | }
266 |
267 | /**
268 | * Checks if the notification likely contains an image based on style and extras
269 | */
270 | fun hasImage(notification: Notification): Boolean {
271 | try {
272 | // Quick check without actually extracting the image
273 |
274 | // Check for large icon
275 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
276 | if (notification.getLargeIcon() != null) {
277 | return true
278 | }
279 | } else {
280 | @Suppress("DEPRECATION")
281 | if (notification.largeIcon != null) {
282 | return true
283 | }
284 | }
285 |
286 | // Check for BigPicture style
287 | val extras = notification.extras
288 | if (extras.containsKey(Notification.EXTRA_PICTURE) ||
289 | extras.containsKey(Notification.EXTRA_PICTURE_ICON)) {
290 | return true
291 | }
292 |
293 | // Check for EXTRA_LARGE_ICON_BIG
294 | if (extras.containsKey("android.largeIcon.big")) {
295 | return true
296 | }
297 |
298 | // Check for URI-based images (excluding background images)
299 | val uriKeys = arrayOf(
300 | "android.media.metadata.ALBUM_ART_URI",
301 | "image_url",
302 | "large_icon_url",
303 | "fcm_image_url"
304 | )
305 | for (key in uriKeys) {
306 | if (extras.containsKey(key) && !extras.getString(key).isNullOrEmpty()) {
307 | return true
308 | }
309 | }
310 |
311 | // Check for other bitmap fields
312 | val bitmapKeys = arrayOf(
313 | "android.media.metadata.ART",
314 | "fcm_image",
315 | "image_url",
316 | "large_icon_url"
317 | )
318 | for (key in bitmapKeys) {
319 | if (extras.containsKey(key)) {
320 | return true
321 | }
322 | }
323 |
324 | // Check notification template/style
325 | val template = extras.getString(Notification.EXTRA_TEMPLATE)
326 | if (template != null && template.contains("BigPicture", ignoreCase = true)) {
327 | return true
328 | }
329 |
330 | return false
331 | } catch (e: Exception) {
332 | Log.e(TAG, "Error checking if notification has image", e)
333 | return false
334 | }
335 | }
336 | }
337 |
--------------------------------------------------------------------------------