├── .dockerignore ├── .gitattributes ├── .github ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug_report.yaml │ ├── config.yaml │ └── feature_request.yaml ├── PULL_REQUEST_TEMPLATE.md ├── labeler.yaml ├── labels.yaml ├── renovate.json5 └── workflows │ ├── docker-build.yml │ ├── meta-labeler.yaml │ ├── meta-sync-labels.yaml │ ├── renovate.yaml │ ├── rigging_pr_description.yaml │ └── test.yml ├── .gitignore ├── .hooks ├── .gitkeep ├── check_pinned_hash_dependencies.py ├── generate_pr_description.py ├── linters │ ├── mdstyle.rb │ └── yamllint.yaml ├── post_merge.sh └── prettier.sh ├── .pre-commit-config.yaml ├── CODEOWNERS ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── LICENSE ├── README.md ├── examples ├── groq_example.py ├── langchain_example.py ├── nerve.md ├── ollama_example.py ├── openai_example.py └── rigging_example.py ├── release.py └── src ├── book ├── flavors │ ├── mod.rs │ ├── nerve.rs │ ├── openai.rs │ └── rigging.rs ├── mod.rs ├── runtime.rs ├── templates.rs └── templates │ ├── basic.yml │ ├── docker_build.yml │ ├── docker_image.yml │ └── nmap.Dockerfile ├── cli ├── create.rs ├── install.rs ├── mod.rs ├── run.rs ├── serve.rs ├── validate.rs └── view.rs ├── main.rs └── runtime ├── cmd.rs ├── docker.rs ├── mod.rs ├── prompt.rs └── ssh.rs /.dockerignore: -------------------------------------------------------------------------------- 1 | /.github 2 | /.vscode 3 | /examples 4 | /target 5 | /assets 6 | /docs 7 | .mypy_cache 8 | __pycache__ 9 | .DS_Store -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | /examples/** linguist-vendored 2 | /Dockerfile linguist-vendored 3 | /release.py linguist-vendored -------------------------------------------------------------------------------- /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to make participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | - Using welcoming and inclusive language 18 | - Being respectful of differing viewpoints and experiences 19 | - Gracefully accepting constructive criticism 20 | - Focusing on what is best for the community 21 | - Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | - The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | - Trolling, insulting/derogatory comments, and personal or political attacks 28 | - Public or private harassment 29 | - Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | - Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | This Code of Conduct also applies outside the project spaces when there is a 56 | reasonable belief that an individual's behavior may have a negative impact on 57 | the project or its community. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported by contacting the project team at . All 63 | complaints will be reviewed and investigated and will result in a response that 64 | is deemed necessary and appropriate to the circumstances. The project team is 65 | obligated to maintain confidentiality with regard to the reporter of an incident. 66 | Further details of specific enforcement policies may be posted separately. 67 | 68 | Project maintainers who do not follow or enforce the Code of Conduct in good 69 | faith may face temporary or permanent repercussions as determined by other 70 | members of the project's leadership. 71 | 72 | ## Attribution 73 | 74 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 75 | version 1.4, available at 76 | 77 | [homepage]: https://www.contributor-covenant.org 78 | 79 | For answers to common questions about this code of conduct, see 80 | 81 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to this project 2 | 3 | We want to make contributing to this project as easy and transparent as 4 | possible. 5 | 6 | ## Pull Request Guidelines 7 | 8 | We actively welcome your pull requests. 9 | 10 | 1. Fork the repo and create your branch from `main`. 11 | 2. If you've added code that should be tested, add tests. 12 | 3. If you've changed APIs, update the documentation. 13 | 4. Ensure the test suite passes. 14 | 5. Make sure your code lints. 15 | 6. If you haven't already, complete the Contributor License Agreement ("CLA"). 16 | 17 | ### PR Description Format 18 | 19 | We use a standardized format for pull request descriptions to ensure 20 | consistency and clarity: 21 | 22 | 1. **Title**: Use a clear, concise title that summarizes the changes 23 | 2. **Key Changes**: List the most important updates 24 | 3. **Added**: Document new features or files 25 | 4. **Changed**: Highlight modifications to existing code 26 | 5. **Removed**: Note any deletions or removals 27 | 28 | Example: 29 | 30 | ```markdown 31 | ### Add device configuration automation 32 | 33 | **Key Changes:** 34 | 35 | - Implement dynamic device configuration 36 | - Add automated setup scripts 37 | - Update documentation 38 | 39 | **Added:** 40 | 41 | - New device setup module 42 | - Configuration templates 43 | - Setup guide 44 | 45 | **Changed:** 46 | 47 | - Refactored device initialization 48 | - Updated configuration format 49 | - Modified setup process 50 | 51 | **Removed:** 52 | 53 | - Legacy device configs 54 | - Deprecated setup scripts 55 | ``` 56 | 57 | ## Contributor License Agreement ("CLA") 58 | 59 | In order to accept your pull request, we need you to submit a CLA. You only need 60 | to do this once to work on any of Facebook's open source projects. 61 | 62 | Complete your CLA here: 63 | 64 | ## Issues 65 | 66 | We use GitHub issues to track public bugs. Please ensure your description is 67 | clear and has sufficient instructions to be able to reproduce the issue. 68 | 69 | ## License 70 | 71 | By contributing to this project, you agree that your contributions will be licensed 72 | under the LICENSE file in the root directory of this source tree. 73 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "🚨 Bug Report" 3 | description: File a bug report 4 | title: "🚨 [BUG] - " 5 | labels: ["bug", "triage"] 6 | assignees: 7 | - octocat 8 | body: 9 | - type: markdown 10 | attributes: 11 | value: | 12 | Thanks for taking the time to fill out this bug report! 13 | 14 | - type: textarea 15 | id: what-happened 16 | attributes: 17 | label: What happened? 18 | description: Also tell us, what did you expect to happen? 19 | placeholder: | 20 | Steps to reproduce the behavior: 21 | 1. 22 | 2. 23 | 3. 24 | 25 | Expected behavior: 26 | ... 27 | 28 | Actual behavior: 29 | ... 30 | validations: 31 | required: true 32 | 33 | - type: textarea 34 | id: possible-fix 35 | attributes: 36 | label: Any suggestions for fixing this bug? 37 | description: If you have an idea to fix this bug, we'd love to hear it! 38 | validations: 39 | required: false 40 | 41 | - type: textarea 42 | id: logs 43 | attributes: 44 | label: Relevant log output 45 | description: Please copy and paste any relevant log output. 46 | render: shell 47 | 48 | - type: textarea 49 | id: environment 50 | attributes: 51 | label: Details about your environment 52 | description: Please provide the following information about your environment. 53 | placeholder: | 54 | ## Your Environment 55 | - Go Version: 56 | - Operating System: 57 | - Browser (if applicable): 58 | - Relevant env vars 59 | 60 | Tell us what you see! 61 | validations: 62 | required: false 63 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | blank_issues_enabled: false 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "💡 Feature Request" 3 | description: Create a new ticket for a new feature request 4 | title: "💡 [REQUEST] - <title>" 5 | labels: ["question"] 6 | body: 7 | - type: textarea 8 | id: implementation_pr 9 | attributes: 10 | label: "Implementation PR" 11 | description: Associated pull request 12 | placeholder: "# Pull Request ID" 13 | validations: 14 | required: false 15 | - type: textarea 16 | id: reference_issues 17 | attributes: 18 | label: "Reference Issues" 19 | description: Related issues 20 | placeholder: "# Issue ID(s)" 21 | validations: 22 | required: false 23 | - type: textarea 24 | id: summary 25 | attributes: 26 | label: "Summary" 27 | description: Provide a brief explanation of the feature 28 | placeholder: Describe your feature request 29 | validations: 30 | required: true 31 | - type: textarea 32 | id: basic_example 33 | attributes: 34 | label: "Basic Example" 35 | description: Provide some basic examples of your feature 36 | placeholder: A few specific details about your feature request 37 | validations: 38 | required: true 39 | - type: textarea 40 | id: drawbacks 41 | attributes: 42 | label: "Drawbacks" 43 | description: What are the drawbacks/impacts of your feature request? 44 | placeholder: Identify the drawbacks and impacts while remaining neutral on your feature request 45 | validations: 46 | required: true 47 | - type: textarea 48 | id: unresolved_question 49 | attributes: 50 | label: "Unresolved questions" 51 | description: What questions remain unresolved? 52 | placeholder: Identify any unresolved issues 53 | validations: 54 | required: false 55 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | # [Title of Your PR] 2 | 3 | **Key Changes:** 4 | 5 | - [ ] List major changes and core updates 6 | - [ ] Keep each line under 80 characters 7 | - [ ] Focus on the "what" and "why" 8 | 9 | **Added:** 10 | 11 | - [ ] New features/functionality 12 | - [ ] New files/configurations 13 | - [ ] New dependencies 14 | 15 | **Changed:** 16 | 17 | - [ ] Updates to existing code 18 | - [ ] Configuration changes 19 | - [ ] Dependency updates 20 | 21 | **Removed:** 22 | 23 | - [ ] Deleted files/code 24 | - [ ] Removed dependencies 25 | - [ ] Cleaned up configurations 26 | 27 | --- 28 | 29 | <!-- Delete any sections that are not applicable --> 30 | <!-- Add screenshots or code examples if relevant --> 31 | -------------------------------------------------------------------------------- /.github/labeler.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Area Labels 3 | area/docs: 4 | - changed-files: 5 | - any-glob-to-any-file: "docs/**/*" 6 | 7 | area/examples: 8 | - changed-files: 9 | - any-glob-to-any-file: "examples/**/*" 10 | 11 | area/github: 12 | - changed-files: 13 | - any-glob-to-any-file: ".github/**/*" 14 | 15 | area/pre-commit: 16 | - changed-files: 17 | - any-glob-to-any-file: ".pre-commit-config.yaml" 18 | - any-glob-to-any-file: ".hooks/**/*" 19 | 20 | area/python: 21 | - changed-files: 22 | - any-glob-to-any-file: "pyproject.toml" 23 | - any-glob-to-any-file: "requirements.txt" 24 | - any-glob-to-any-file: "*.py" 25 | 26 | area/security: 27 | - changed-files: 28 | - any-glob-to-any-file: "SECURITY.md" 29 | - any-glob-to-any-file: "secrets.baseline" 30 | 31 | area/taskfiles: 32 | - changed-files: 33 | - any-glob-to-any-file: "Taskfile.yaml" 34 | 35 | area/tests: 36 | - changed-files: 37 | - any-glob-to-any-file: "tests/**/*" 38 | 39 | area/workspace: 40 | - changed-files: 41 | - any-glob-to-any-file: "python.code-workspace" 42 | 43 | # Development Labels 44 | area/dev: 45 | - changed-files: 46 | - any-glob-to-any-file: "dev/**/*" 47 | 48 | # Semantic Type Labels 49 | type/digest: 50 | - head-branch: ["^renovate/"] 51 | - head-branch: ["^deps/"] 52 | 53 | type/patch: 54 | - any: ["title:/^(?:Fix|Patch|Update)/"] 55 | 56 | type/minor: 57 | - any: ["title:/^(?:Add|Feature|Improve)/"] 58 | 59 | type/major: 60 | - any: ["title:/^(?:BREAKING)/"] 61 | 62 | type/break: 63 | - any: ["body:/BREAKING CHANGE:/"] 64 | 65 | # Documentation Labels 66 | type/docs: 67 | - changed-files: 68 | - any-glob-to-any-file: "docs/**/*" 69 | - any-glob-to-any-file: "*.md" 70 | 71 | # Core Files Labels 72 | type/core: 73 | - changed-files: 74 | - any-glob-to-any-file: "CODEOWNERS" 75 | - any-glob-to-any-file: "LICENSE" 76 | - any-glob-to-any-file: "README.md" 77 | -------------------------------------------------------------------------------- /.github/labels.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # Area Labels 3 | - name: area/docs 4 | color: "72CCF3" # Light Blue 5 | description: >- 6 | Changes to documentation and guides 7 | 8 | - name: area/examples 9 | color: "BC9BE3" # Lavender 10 | description: >- 11 | Changes to example code and demonstrations 12 | 13 | - name: area/github 14 | color: "F4D1B7" # Peach 15 | description: >- 16 | Changes made to GitHub Actions 17 | 18 | - name: area/pre-commit 19 | color: "84B6EB" # Steel Blue 20 | description: >- 21 | Changes made to pre-commit hooks 22 | 23 | - name: area/python 24 | color: "7BD7E0" # Turquoise 25 | description: >- 26 | Changes to Python package configuration and dependencies 27 | 28 | - name: area/security 29 | color: "FF6600" # Orange 30 | description: >- 31 | Changes to security policies and configurations 32 | 33 | - name: area/taskfiles 34 | color: "66CCFF" # Sky Blue 35 | description: >- 36 | Changes made to Taskfiles 37 | 38 | - name: area/tests 39 | color: "99CC00" # Lime Green 40 | description: >- 41 | Changes to test files and testing infrastructure 42 | 43 | - name: area/workspace 44 | color: "FF99CC" # Pink 45 | description: >- 46 | Changes to VSCode workspace configuration 47 | 48 | - name: area/assets 49 | color: "FFA07A" # Light Salmon 50 | description: >- 51 | Changes to asset files 52 | 53 | - name: area/templates 54 | color: "DA70D6" # Orchid 55 | description: >- 56 | Changes to templates 57 | 58 | - name: area/scripts 59 | color: "40E0D0" # Turquoise 60 | description: >- 61 | Changes to script files 62 | 63 | - name: area/src 64 | color: "4682B4" # Steel Blue 65 | description: >- 66 | Changes to source code 67 | 68 | - name: area/ci 69 | color: "FF4500" # Orange Red 70 | description: >- 71 | Changes related to CI/CD configurations 72 | 73 | - name: area/shell 74 | color: "556B2F" # Dark Olive Green 75 | description: >- 76 | Changes to shell scripts 77 | 78 | - name: area/dev 79 | color: "CC6699" # Dusty Rose 80 | description: >- 81 | Changes to development tools and assets 82 | 83 | # Renovate Labels 84 | - name: renovate/container 85 | color: "9933CC" # Purple 86 | description: >- 87 | Docker container updates via Renovate 88 | 89 | - name: renovate/github-action 90 | color: "FF3366" # Hot Pink 91 | description: >- 92 | GitHub Action updates via Renovate 93 | 94 | - name: renovate/github-release 95 | color: "3399FF" # Bright Blue 96 | description: >- 97 | GitHub Release updates via Renovate 98 | 99 | # Semantic Type Labels 100 | - name: type/digest 101 | color: "FF66CC" # Bright Pink 102 | description: >- 103 | Dependency digest updates 104 | 105 | - name: type/patch 106 | color: "FFC300" # Golden Yellow 107 | description: >- 108 | Patch changes (fixes, updates) 109 | 110 | - name: type/minor 111 | color: "FFD700" # Gold 112 | description: >- 113 | Minor changes (features, improvements) 114 | 115 | - name: type/major 116 | color: "F6412D" # Red Orange 117 | description: >- 118 | Major changes 119 | 120 | - name: type/break 121 | color: "FF0000" # Bright Red 122 | description: >- 123 | Breaking changes 124 | 125 | # Documentation Labels 126 | - name: type/docs 127 | color: "0075CA" # Documentation Blue 128 | description: >- 129 | Documentation updates and improvements 130 | 131 | - name: type/core 132 | color: "A2EEEF" # Light Blue 133 | description: >- 134 | Changes to core repository files and configurations 135 | -------------------------------------------------------------------------------- /.github/renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | $schema: "https://docs.renovatebot.com/renovate-schema.json", 3 | extends: [ 4 | "config:base", 5 | ":disableRateLimiting", 6 | ":dependencyDashboard", 7 | ":semanticCommits", 8 | ":automergeDigest", 9 | ":automergeBranch", 10 | ], 11 | dependencyDashboardTitle: "Renovate Dashboard 🤖", 12 | suppressNotifications: ["prIgnoreNotification"], 13 | rebaseWhen: "conflicted", 14 | commitBodyTable: true, 15 | cargo: { 16 | commitMessageTopic: "Rust crate {{depName}}", 17 | }, 18 | packageRules: [ 19 | { 20 | description: "Auto merge non-major updates", 21 | matchUpdateTypes: ["minor", "patch"], 22 | automerge: true, 23 | automergeType: "pr", 24 | }, 25 | { 26 | description: "Group Rust dev dependencies", 27 | matchManagers: ["cargo"], 28 | matchDepTypes: ["dev-dependencies"], 29 | groupName: "Rust dev dependencies", 30 | groupSlug: "rust-dev", 31 | }, 32 | ], 33 | ignorePaths: [], 34 | } -------------------------------------------------------------------------------- /.github/workflows/docker-build.yml: -------------------------------------------------------------------------------- 1 | name: Build and Push Docker Images 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | paths-ignore: 7 | - 'README.md' 8 | 9 | jobs: 10 | docker: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout 14 | uses: actions/checkout@v5 15 | - name: Set up QEMU 16 | uses: docker/setup-qemu-action@v3 17 | - name: Set up Docker Buildx 18 | uses: docker/setup-buildx-action@v3 19 | - name: Login to Docker Hub 20 | uses: docker/login-action@v3 21 | with: 22 | username: ${{ secrets.DOCKERHUB_USERNAME }} 23 | password: ${{ secrets.DOCKERHUB_TOKEN }} 24 | - name: Build and push 25 | uses: docker/build-push-action@v6 26 | with: 27 | platforms: linux/amd64,linux/arm64 28 | push: true 29 | tags: dreadnode/robopages:latest -------------------------------------------------------------------------------- /.github/workflows/meta-labeler.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "Labeler" 3 | on: 4 | pull_request_target: 5 | branches: ["main"] 6 | types: ["opened", "synchronize"] 7 | 8 | permissions: 9 | actions: read 10 | contents: read 11 | issues: write 12 | pull-requests: write 13 | 14 | jobs: 15 | labeler: 16 | name: Labeler 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Generate Token 20 | uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1 21 | id: app-token 22 | with: 23 | app-id: "${{ secrets.BOT_APP_ID }}" 24 | private-key: "${{ secrets.BOT_APP_PRIVATE_KEY }}" 25 | 26 | - name: Labeler 27 | uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1 28 | with: 29 | configuration-path: .github/labeler.yaml 30 | repo-token: "${{ steps.app-token.outputs.token }}" 31 | -------------------------------------------------------------------------------- /.github/workflows/meta-sync-labels.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "Meta Sync labels" 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: ["main"] 7 | paths: [".github/labels.yaml"] 8 | 9 | permissions: 10 | actions: read 11 | contents: read 12 | issues: write 13 | pull-requests: write 14 | 15 | jobs: 16 | labels: 17 | name: Sync Labels 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Generate Token 21 | uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1 22 | id: app-token 23 | with: 24 | app-id: "${{ secrets.BOT_APP_ID }}" 25 | private-key: "${{ secrets.BOT_APP_PRIVATE_KEY }}" 26 | 27 | - name: Set up git repository 28 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 29 | with: 30 | token: "${{ steps.app-token.outputs.token }}" 31 | 32 | - name: Sync Labels 33 | uses: EndBug/label-sync@52074158190acb45f3077f9099fea818aa43f97a # v2.3.3 34 | with: 35 | config-file: .github/labels.yaml 36 | token: "${{ steps.app-token.outputs.token }}" 37 | delete-other-labels: true 38 | -------------------------------------------------------------------------------- /.github/workflows/renovate.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Renovate 3 | on: 4 | # checkov:skip=CKV_GHA_7: "Workflow dispatch inputs are required for manual debugging and configuration" 5 | workflow_dispatch: 6 | inputs: 7 | dryRun: 8 | description: Dry Run 9 | default: "false" 10 | required: false 11 | logLevel: 12 | description: Log Level 13 | default: "debug" 14 | required: false 15 | version: 16 | description: Renovate version 17 | default: latest 18 | required: false 19 | schedule: 20 | # Run every week on sunday and wednesday at 00:00 UTC 21 | - cron: "0 0 * * 0,3" 22 | push: 23 | branches: ["main"] 24 | paths: 25 | - .github/renovate.json5 26 | - .github/renovate/**.json5 27 | 28 | permissions: 29 | contents: read 30 | pull-requests: write 31 | issues: write 32 | 33 | concurrency: 34 | group: ${{ github.workflow }}-${{ github.run_number || github.ref }} 35 | cancel-in-progress: true 36 | 37 | # Retrieve BOT_USER_ID via `curl -s "https://api.github.com/users/${BOT_USERNAME}%5Bbot%5D" | jq .id` 38 | env: 39 | WORKFLOW_DRY_RUN: false 40 | WORKFLOW_LOG_LEVEL: debug 41 | WORKFLOW_VERSION: latest # 37.59.8 42 | RENOVATE_PLATFORM: github 43 | RENOVATE_PLATFORM_COMMIT: true 44 | RENOVATE_ONBOARDING_CONFIG_FILE_NAME: .github/renovate.json5 45 | RENOVATE_AUTODISCOVER: true 46 | RENOVATE_AUTODISCOVER_FILTER: "${{ github.repository }}" 47 | RENOVATE_GIT_AUTHOR: "${{ secrets.BOT_USERNAME }} <${{ secrets.BOT_USER_ID }}+${{ secrets.BOT_USERNAME }}[bot]@users.noreply.github.com>" 48 | 49 | jobs: 50 | renovate: 51 | name: Renovate 52 | runs-on: ubuntu-latest 53 | steps: 54 | - name: Generate Token 55 | uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1 56 | id: app-token 57 | with: 58 | app-id: "${{ secrets.BOT_APP_ID }}" 59 | private-key: "${{ secrets.BOT_APP_PRIVATE_KEY }}" 60 | 61 | - name: Checkout 62 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 63 | with: 64 | token: "${{ steps.app-token.outputs.token }}" 65 | 66 | - name: Override default config from dispatch variables 67 | run: | 68 | echo "RENOVATE_DRY_RUN=${{ github.event.inputs.dryRun || env.WORKFLOW_DRY_RUN }}" >> "${GITHUB_ENV}" 69 | echo "LOG_LEVEL=${{ github.event.inputs.logLevel || env.WORKFLOW_LOG_LEVEL }}" >> "${GITHUB_ENV}" 70 | 71 | - name: Delete old dashboard 72 | run: | 73 | ISSUE_NUMBER=$(gh issue list -S 'Renovate Dashboard 🤖' --json number -q '.[0].number') 74 | if [ "$ISSUE_NUMBER" != "null" ] && [ -n "$ISSUE_NUMBER" ]; then 75 | gh issue close "$ISSUE_NUMBER" 76 | else 77 | echo "No issue found to close." 78 | fi 79 | env: 80 | GITHUB_TOKEN: "${{ steps.app-token.outputs.token }}" 81 | 82 | - name: Renovate 83 | uses: renovatebot/github-action@6927a58a017ee9ac468a34a5b0d2a9a9bd45cac3 # v43.0.11 84 | with: 85 | configurationFile: "${{ env.RENOVATE_ONBOARDING_CONFIG_FILE_NAME }}" 86 | token: "${{ steps.app-token.outputs.token }}" 87 | renovate-version: "${{ github.event.inputs.version || env.WORKFLOW_VERSION }}" -------------------------------------------------------------------------------- /.github/workflows/rigging_pr_description.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Update PR Description with Rigging 3 | on: 4 | pull_request: 5 | types: [opened] 6 | 7 | jobs: 8 | update-description: 9 | name: Update PR Description with Rigging 10 | runs-on: ubuntu-latest 11 | permissions: 12 | pull-requests: write 13 | contents: read 14 | 15 | steps: 16 | - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 17 | with: 18 | fetch-depth: 0 # full history for proper diffing 19 | 20 | - name: Set up Python 21 | uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 22 | with: 23 | python-version: "3.14" 24 | 25 | - name: Install uv 26 | run: | 27 | python -m pip install --upgrade pip 28 | pip install uv 29 | 30 | - name: Generate PR Description 31 | id: description 32 | env: 33 | OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} 34 | run: | 35 | DESCRIPTION="$(uv run --no-project .hooks/generate_pr_description.py --base-ref "origin/${{ github.base_ref }}" --exclude "./*.lock")" 36 | { 37 | echo "description<<EOF" 38 | echo "${DESCRIPTION}" 39 | echo "EOF" 40 | } >> "$GITHUB_OUTPUT" 41 | 42 | - name: Update PR Description 43 | uses: nefrob/pr-description@4dcc9f3ad5ec06b2a197c5f8f93db5e69d2fdca7 # v1.2.0 44 | with: 45 | token: ${{ secrets.GITHUB_TOKEN }} 46 | content: | 47 | 48 | --- 49 | 50 | ## Generated Summary: 51 | 52 | ${{ steps.description.outputs.description }} 53 | 54 | This summary was generated with ❤️ by [rigging](https://rigging.dreadnode.io/) 55 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Build and Test 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | paths-ignore: 7 | - 'README.md' 8 | - 'examples' 9 | pull_request: 10 | branches: [ "main" ] 11 | paths-ignore: 12 | - 'README.md' 13 | - 'examples' 14 | 15 | env: 16 | CARGO_TERM_COLOR: always 17 | 18 | jobs: 19 | build: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v5 25 | - name: Build 26 | run: cargo build --verbose 27 | - name: Run tests 28 | run: cargo test --verbose 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Rust ### 2 | /target/ 3 | **/*.rs.bk 4 | *.pdb 5 | Cargo.lock 6 | # Keep Cargo.lock for binary projects, uncomment this line if it's a library 7 | 8 | ### IDE and Editors ### 9 | # VSCode 10 | .vscode/* 11 | !.vscode/settings.json 12 | !.vscode/tasks.json 13 | !.vscode/launch.json 14 | !.vscode/extensions.json 15 | *.code-workspace 16 | 17 | # JetBrains IDEs (IntelliJ, CLion, etc.) 18 | .idea/ 19 | *.iml 20 | *.iws 21 | *.ipr 22 | out/ 23 | .idea_modules/ 24 | 25 | # SublimeText 26 | *.sublime-workspace 27 | *.sublime-project 28 | 29 | # Mac 30 | .DS_Store 31 | .AppleDouble 32 | .LSOverride 33 | ._* 34 | 35 | # Windows 36 | Thumbs.db 37 | ehthumbs.db 38 | Desktop.ini 39 | 40 | ### Python ### 41 | # Byte-compiled / optimized / DLL files 42 | __pycache__/ 43 | *.py[cod] 44 | *$py.class 45 | 46 | # Distribution / packaging 47 | .Python 48 | build/ 49 | develop-eggs/ 50 | dist/ 51 | downloads/ 52 | eggs/ 53 | .eggs/ 54 | lib/ 55 | lib64/ 56 | parts/ 57 | sdist/ 58 | var/ 59 | wheels/ 60 | *.egg-info/ 61 | .installed.cfg 62 | *.egg 63 | 64 | # Virtual environments 65 | .env 66 | .venv 67 | env/ 68 | venv/ 69 | ENV/ 70 | env.bak/ 71 | venv.bak/ 72 | .python-version 73 | 74 | # Jupyter Notebook 75 | .ipynb_checkpoints 76 | notebooks/ 77 | 78 | # pytest 79 | .pytest_cache/ 80 | .coverage 81 | htmlcov/ 82 | 83 | ### Project specific ### 84 | # proxy stuff 85 | burp.cer 86 | burp.pem 87 | install_burp.py 88 | 89 | # Logs 90 | logs/ 91 | *.log 92 | npm-debug.log* 93 | 94 | # Dependencies 95 | node_modules/ 96 | 97 | # Local config 98 | .env.local 99 | .env.development.local 100 | .env.test.local 101 | .env.production.local 102 | 103 | # Build artifacts 104 | /dist 105 | /build 106 | 107 | # proxy stuff 108 | burp.cer 109 | burp.pem 110 | install_burp.py -------------------------------------------------------------------------------- /.hooks/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dreadnode/robopages-cli/b2b5d22236f9e2521b8843ee7380c988c5b398b8/.hooks/.gitkeep -------------------------------------------------------------------------------- /.hooks/check_pinned_hash_dependencies.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import re 3 | import sys 4 | from pathlib import Path 5 | from typing import List, Tuple 6 | 7 | 8 | class GitHubActionChecker: 9 | def __init__(self): 10 | # Pattern for actions with SHA-1 hashes (pinned) 11 | self.pinned_pattern = re.compile(r"uses:\s+([^@\s]+)@([a-f0-9]{40})") 12 | 13 | # Pattern for actions with version tags (unpinned) 14 | self.unpinned_pattern = re.compile( 15 | r"uses:\s+([^@\s]+)@(v\d+(?:\.\d+)*(?:-[a-zA-Z0-9]+(?:\.\d+)*)?)" 16 | ) 17 | 18 | # Pattern for all uses statements 19 | self.all_uses_pattern = re.compile(r"uses:\s+([^@\s]+)@([^\s\n]+)") 20 | 21 | def format_terminal_link(self, file_path: str, line_number: int) -> str: 22 | """Format a terminal link to a file and line number. 23 | 24 | Args: 25 | file_path: Path to the file 26 | line_number: Line number in the file 27 | 28 | Returns: 29 | str: Formatted string with file path and line number 30 | """ 31 | return f"{file_path}:{line_number}" 32 | 33 | def get_line_numbers(self, content: str, pattern: re.Pattern) -> List[Tuple[str, int]]: 34 | """Find matches with their line numbers.""" 35 | matches = [] 36 | for i, line in enumerate(content.splitlines(), 1): 37 | for match in pattern.finditer(line): 38 | matches.append((match.group(0), i)) 39 | return matches 40 | 41 | def check_file(self, file_path: str) -> bool: 42 | """Check a single file for unpinned dependencies.""" 43 | try: 44 | content = Path(file_path).read_text() 45 | except Exception as e: 46 | print(f"\033[91mError reading file {file_path}: {e}\033[0m") 47 | return False 48 | 49 | # Get matches with line numbers 50 | pinned_matches = self.get_line_numbers(content, self.pinned_pattern) 51 | unpinned_matches = self.get_line_numbers(content, self.unpinned_pattern) 52 | all_matches = self.get_line_numbers(content, self.all_uses_pattern) 53 | 54 | print(f"\n\033[1m[=] Checking file: {file_path}\033[0m") 55 | 56 | # Print pinned dependencies 57 | if pinned_matches: 58 | print("\033[92m[+] Pinned:\033[0m") 59 | for match, line_num in pinned_matches: 60 | print(f" |- {match} \033[90m({file_path}:{line_num})\033[0m") 61 | 62 | # Track all found actions for validation 63 | found_actions = set() 64 | for match, _ in pinned_matches + unpinned_matches: 65 | action_name = self.pinned_pattern.match(match) or self.unpinned_pattern.match(match) 66 | if action_name: 67 | found_actions.add(action_name.group(1)) 68 | 69 | has_errors = False 70 | 71 | # Check for unpinned dependencies 72 | if unpinned_matches: 73 | has_errors = True 74 | print("\033[93m[!] Unpinned (using version tags):\033[0m") 75 | for match, line_num in unpinned_matches: 76 | print(f" |- {match} \033[90m({file_path}:{line_num})\033[0m") 77 | 78 | # Check for completely unpinned dependencies (no SHA or version) 79 | unpinned_without_hash = [ 80 | (match, line_num) 81 | for match, line_num in all_matches 82 | if not any(match in pinned[0] for pinned in pinned_matches) 83 | and not any(match in unpinned[0] for unpinned in unpinned_matches) 84 | ] 85 | 86 | if unpinned_without_hash: 87 | has_errors = True 88 | print("\033[91m[!] Completely unpinned (no SHA or version):\033[0m") 89 | for match, line_num in unpinned_without_hash: 90 | print( 91 | f" |- {match} \033[90m({self.format_terminal_link(file_path, line_num)})\033[0m" 92 | ) 93 | 94 | # Print summary 95 | total_actions = len(pinned_matches) + len(unpinned_matches) + len(unpinned_without_hash) 96 | if total_actions == 0: 97 | print("\033[93m[!] No GitHub Actions found in this file\033[0m") 98 | else: 99 | print("\n\033[1mSummary:\033[0m") 100 | print(f"Total actions: {total_actions}") 101 | print(f"Pinned: {len(pinned_matches)}") 102 | print(f"Unpinned with version: {len(unpinned_matches)}") 103 | print(f"Completely unpinned: {len(unpinned_without_hash)}") 104 | 105 | return not has_errors 106 | 107 | 108 | def main(): 109 | checker = GitHubActionChecker() 110 | files_to_check = sys.argv[1:] 111 | 112 | if not files_to_check: 113 | print("\033[91mError: No files provided to check\033[0m") 114 | print("Usage: python script.py <file1> <file2> ...") 115 | sys.exit(1) 116 | 117 | results = {file: checker.check_file(file) for file in files_to_check} 118 | 119 | # Print final summary 120 | print("\n\033[1mFinal Results:\033[0m") 121 | for file, passed in results.items(): 122 | status = "\033[92m✓ Passed\033[0m" if passed else "\033[91m✗ Failed\033[0m" 123 | print(f"{status} {file}") 124 | 125 | if not all(results.values()): 126 | sys.exit(1) 127 | 128 | 129 | if __name__ == "__main__": 130 | main() 131 | -------------------------------------------------------------------------------- /.hooks/generate_pr_description.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # /// script 3 | # requires-python = ">=3.10" 4 | # dependencies = [ 5 | # "rigging", 6 | # "typer", 7 | # ] 8 | # /// 9 | import asyncio 10 | import os 11 | import typing as t 12 | 13 | import rigging as rg 14 | import typer 15 | 16 | TRUNCATION_WARNING = ( 17 | "\n---\n**Note**: Due to the large size of this diff, some content has been truncated." 18 | ) 19 | 20 | 21 | @rg.prompt 22 | def generate_pr_description(diff: str) -> t.Annotated[str, rg.Ctx("markdown")]: # type: ignore[empty-body] 23 | """ 24 | Analyze the provided git diff and create a PR description in markdown format. 25 | <guidance> 26 | - Keep the summary concise and informative. 27 | - Use bullet points to structure important statements. 28 | - Focus on key modifications and potential impact - if any. 29 | - Do not add in general advice or best-practice information. 30 | - Write like a developer who authored the changes. 31 | - Prefer flat bullet lists over nested. 32 | - Do not include any title structure. 33 | - If there are no changes, just provide "No relevant changes." 34 | - Order your bullet points by importance. 35 | </guidance> 36 | """ 37 | 38 | 39 | async def _run_git_command(args: list[str]) -> str: 40 | """ 41 | Safely run a git command with validated input. 42 | """ 43 | # Validate git exists in PATH 44 | git_path = "git" # Could use shutil.which("git") for more security 45 | if not any( 46 | os.path.isfile(os.path.join(path, "git")) for path in os.environ["PATH"].split(os.pathsep) 47 | ): 48 | raise ValueError("Git executable not found in PATH") 49 | 50 | # Validate input parameters 51 | if not all(isinstance(arg, str) for arg in args): 52 | raise ValueError("All command arguments must be strings") 53 | 54 | # Use os.execv for more secure command execution 55 | try: 56 | # nosec B603 - Input is validated 57 | proc = await asyncio.create_subprocess_exec( 58 | git_path, 59 | *args, 60 | stdout=asyncio.subprocess.PIPE, 61 | stderr=asyncio.subprocess.PIPE, 62 | ) 63 | stdout, stderr = await proc.communicate() 64 | 65 | if proc.returncode != 0: 66 | raise RuntimeError(f"Git command failed: {stderr.decode()}") 67 | 68 | return stdout.decode().strip() 69 | except Exception as e: 70 | raise RuntimeError(f"Failed to execute git command: {e}") 71 | 72 | 73 | async def get_diff(base_ref: str, source_ref: str, *, exclude: list[str] | None = None) -> str: 74 | """ 75 | Get the git diff between two branches. 76 | """ 77 | # Validate refs 78 | for ref in (base_ref, source_ref): 79 | if not isinstance(ref, str) or not ref.strip(): 80 | raise ValueError("Invalid git reference") 81 | 82 | # Get merge base 83 | merge_base = await _run_git_command(["merge-base", source_ref, base_ref]) 84 | 85 | # Prepare diff command 86 | diff_command = ["diff", "--no-color", merge_base, source_ref] 87 | if exclude: 88 | validated_excludes = [] 89 | for path in exclude: 90 | # Validate path 91 | if not isinstance(path, str) or ".." in path: 92 | raise ValueError(f"Invalid exclude path: {path}") 93 | validated_excludes.append(f":(exclude){path}") 94 | diff_command.extend(["--", ".", *validated_excludes]) 95 | 96 | # Get diff 97 | return await _run_git_command(diff_command) 98 | 99 | 100 | def main( 101 | base_ref: str = "origin/main", 102 | source_ref: str = "HEAD", 103 | generator_id: str = "openai/gpt-4o-mini", 104 | max_diff_lines: int = 1000, 105 | exclude: list[str] | None = None, 106 | ) -> None: 107 | """ 108 | Use rigging to generate a PR description from a git diff. 109 | """ 110 | diff = asyncio.run(get_diff(base_ref, source_ref, exclude=exclude)) 111 | diff_lines = diff.split("\n") 112 | if len(diff_lines) > max_diff_lines: 113 | diff = "\n".join(diff_lines[:max_diff_lines]) + TRUNCATION_WARNING 114 | description = asyncio.run(generate_pr_description.bind(generator_id)(diff)) 115 | print(description) 116 | 117 | 118 | if __name__ == "__main__": 119 | typer.run(main) 120 | -------------------------------------------------------------------------------- /.hooks/linters/mdstyle.rb: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # Style file for markdownlint. 3 | # 4 | # https://github.com/markdownlint/markdownlint/blob/master/docs/configuration.md 5 | # 6 | # This file is referenced by the project `.mdlrc`. 7 | ################################################################################ 8 | 9 | #=============================================================================== 10 | # Start with all built-in rules. 11 | # https://github.com/markdownlint/markdownlint/blob/master/docs/RULES.md 12 | all 13 | 14 | #=============================================================================== 15 | # Override default parameters for some built-in rules. 16 | # https://github.com/markdownlint/markdownlint/blob/master/docs/creating_styles.md#parameters 17 | 18 | # Ignore line length for specific files 19 | rule 'MD013', 20 | ignore_code_blocks: true, 21 | files: ['CHANGELOG.md', 'RENOVATE_TESTING.md'], 22 | line_length: 99999 # Very high number to effectively disable for specified files 23 | 24 | # Allow duplicate headers in changelog files 25 | rule 'MD024', 26 | allow_different_nesting: true, 27 | files: ['CHANGELOG.md'] 28 | 29 | #=============================================================================== 30 | # Exclude the rules I disagree with. 31 | 32 | # IMHO it's easier to read lists like: 33 | # * outmost indent 34 | # - one indent 35 | # - second indent 36 | # * Another major bullet 37 | exclude_rule 'MD004' 38 | 39 | # Inconsistent indentation for list items is not a problem. 40 | exclude_rule 'MD005' 41 | 42 | # Ordered lists are fine. 43 | exclude_rule 'MD029' 44 | 45 | # The first line doesn't always need to be a top level header. 46 | exclude_rule 'MD041' 47 | 48 | # I find it necessary to use '<br/>' to force line breaks. 49 | exclude_rule 'MD033' # Inline HTML 50 | 51 | # Using bare URLs is fine. 52 | exclude_rule 'MD034' 53 | 54 | # Allow emphasis to be used as headings (e.g., **Section Title**) 55 | exclude_rule 'MD036' 56 | -------------------------------------------------------------------------------- /.hooks/linters/yamllint.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | extends: default 3 | 4 | rules: 5 | line-length: 6 | max: 400 7 | level: warning 8 | truthy: false 9 | comments: 10 | min-spaces-from-content: 1 11 | braces: disable 12 | indentation: disable 13 | -------------------------------------------------------------------------------- /.hooks/post_merge.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Get pre-merge hash from the target branch 4 | old_hash=$(git show ORIG_HEAD:poetry.lock | md5sum 2> /dev/null || echo "") 5 | 6 | # Get current hash 7 | new_hash=$(md5sum poetry.lock 2> /dev/null || echo "") 8 | 9 | # Compare and run poetry install if changed 10 | if [ "$old_hash" != "$new_hash" ]; then 11 | echo "📦 Root dependencies changed. Running poetry install..." 12 | poetry install || { 13 | echo "❌ Failed to update dependencies" 14 | exit 1 15 | } 16 | echo "✅ Root dependencies updated!" 17 | else 18 | echo "📦 No root dependency changes" 19 | fi 20 | 21 | # Get pre-merge hash from the target branch 22 | old_hash=$(git show ORIG_HEAD:components/api/poetry.lock | md5sum 2> /dev/null || echo "") 23 | 24 | # Get current hash 25 | new_hash=$(md5sum components/api/poetry.lock 2> /dev/null || echo "") 26 | 27 | # Compare and run poetry install if changed 28 | if [ "$old_hash" != "$new_hash" ]; then 29 | echo "📦 API dependencies changed. Running poetry install..." 30 | cd components/api || exit 31 | if ! poetry install --with dev; then 32 | echo "❌ Failed to update dependencies" 33 | exit 1 34 | fi 35 | echo "✅ API dependencies updated!" 36 | else 37 | echo "📦 No API dependency changes" 38 | fi 39 | -------------------------------------------------------------------------------- /.hooks/prettier.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | # Check if npm is installed 5 | if ! command -v npm &> /dev/null; then 6 | echo 'Error: npm is not installed.' >&2 7 | exit 1 8 | fi 9 | 10 | # Check if Prettier is installed, install it if missing 11 | if ! command -v prettier &> /dev/null; then 12 | echo 'Error: Prettier is not installed.' >&2 13 | echo 'Installing Prettier...' 14 | npm install -g prettier 15 | fi 16 | 17 | # Verify Prettier is installed 18 | if ! command -v prettier &> /dev/null; then 19 | echo 'Error: Prettier installation failed.' >&2 20 | exit 1 21 | fi 22 | 23 | # Run Prettier on staged .json, .yaml, and .yml files 24 | echo "Running Prettier on staged files..." 25 | 26 | # List all staged files, filter for the desired extensions, and run Prettier 27 | git diff --cached --name-only --diff-filter=d | 28 | grep -E '\.(json|ya?ml)$' | 29 | xargs -I {} prettier --write {} 30 | 31 | # Add the files back to staging area as Prettier may have modified them 32 | git diff --name-only --diff-filter=d | 33 | grep -E '\.(json|ya?ml)$' | 34 | xargs git add 35 | 36 | echo "Prettier formatting completed." 37 | exit 0 38 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v5.0.0 5 | hooks: 6 | - id: check-added-large-files 7 | args: [--maxkb=10240] 8 | - id: check-case-conflict 9 | - id: check-merge-conflict 10 | - id: check-executables-have-shebangs 11 | - id: check-json 12 | - id: check-shebang-scripts-are-executable 13 | - id: check-symlinks 14 | - id: check-yaml 15 | - id: detect-private-key 16 | - id: end-of-file-fixer 17 | - id: trailing-whitespace 18 | 19 | - repo: https://github.com/rhysd/actionlint 20 | rev: v1.7.7 21 | hooks: 22 | - id: actionlint 23 | 24 | - repo: https://github.com/adrienverge/yamllint.git 25 | rev: v1.35.1 26 | hooks: 27 | - id: yamllint 28 | entry: yamllint --strict -c .hooks/linters/yamllint.yaml 29 | 30 | - repo: https://github.com/codespell-project/codespell 31 | rev: v2.4.1 32 | hooks: 33 | - id: codespell 34 | entry: codespell -q 3 -f --skip=".git,.github,README.md" --ignore-words-list="astroid" 35 | 36 | - repo: https://github.com/jumanjihouse/pre-commit-hooks 37 | rev: 3.0.0 38 | hooks: 39 | - id: script-must-have-extension 40 | name: Ensure shell scripts end with .sh 41 | types: [shell] 42 | - id: shellcheck 43 | - id: shfmt 44 | # Configuration in .mdlrc and .hooks/linters/mdstyle.rb 45 | - id: markdownlint 46 | exclude: README.md 47 | 48 | - repo: https://github.com/Yelp/detect-secrets 49 | rev: v1.5.0 50 | hooks: 51 | - id: detect-secrets 52 | args: ["--baseline", ".secrets.baseline"] 53 | exclude: .secrets.baseline 54 | 55 | - repo: local 56 | hooks: 57 | # Ensure our GH actions are pinned to a specific hash 58 | - id: check-github-actions 59 | name: Check GitHub Actions for Pinned Dependencies 60 | entry: .hooks/check_pinned_hash_dependencies.py 61 | language: python 62 | files: \.github/.*\.yml$ 63 | 64 | - id: prettier 65 | name: Run prettier 66 | entry: .hooks/prettier.sh 67 | language: script 68 | types: [json, yaml] 69 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @dreadnode/team 2 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "robopages" 3 | description = "CLI and API server for Robopages, a YAML based files for describing tools to large language models (LLMs)" 4 | authors = ["Simone Margaritelli (simone@dreadnode.io)"] 5 | version = "0.4.0" 6 | edition = "2021" 7 | readme = "README.md" 8 | repository = "https://github.com/dreadnode/robopages-cli" 9 | homepage = "https://github.com/dreadnode/robopages" 10 | license = "MIT" 11 | 12 | [dependencies] 13 | actix-cors = "0.7.0" 14 | actix-web = "4.9.0" 15 | actix-web-lab = "0.23.0" 16 | anyhow = "1.0.90" 17 | async-ssh2-tokio = "0.9.0" 18 | camino = { version = "1.1.9", features = ["serde"] } 19 | clap = { version = "4.5.20", features = ["derive"] } 20 | env_logger = "0.11.5" 21 | futures = "0.3.31" 22 | glob = "0.3.1" 23 | include_dir = "0.7.4" 24 | lazy-regex = "3.3.0" 25 | log = "0.4.22" 26 | regex = "1.11.0" 27 | reqwest = "0.12.8" 28 | serde = { version = "1.0.211", features = ["derive"] } 29 | serde_yaml = "0.9.34" 30 | shell-escape = "0.1.5" 31 | shellexpand = { version = "3.1.0", features = ["full"] } 32 | tempfile = "3.13.0" 33 | tokio = { version = "1.43.1", features = ["full"] } 34 | which = "8.0.0" 35 | zip = "4.0.0" 36 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:bullseye AS builder 2 | 3 | RUN apt-get update && apt-get install -y libssl-dev ca-certificates cmake git 4 | 5 | WORKDIR /app 6 | ADD . /app 7 | RUN cargo build --release 8 | 9 | FROM debian:bullseye 10 | 11 | RUN apt-get update && apt-get install -y libssl-dev curl ca-certificates 12 | RUN curl -fsSL https://get.docker.com | sh 13 | 14 | COPY --from=builder /app/target/release/robopages /usr/bin/robopages 15 | 16 | ENTRYPOINT ["/usr/bin/robopages"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 dreadnode 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Robopages Server 2 | 3 | <div align="center"> 4 | 5 | <img 6 | src="https://d1lppblt9t2x15.cloudfront.net/logos/5714928f3cdc09503751580cffbe8d02.png" 7 | alt="Logo" 8 | align="center" 9 | width="144px" 10 | height="144px" 11 | /> 12 | 13 | 14 | 15 | <p align="center"> 16 | <a href="https://github.com/dreadnode/robopages-cli/releases/latest"><img alt="Release" src="https://img.shields.io/github/release/dreadnode/robopages-cli.svg?style=fl_pathat-square"></a> 17 | <a href="https://crates.io/crates/robopages"><img alt="Crate" src="https://img.shields.io/crates/v/robopages.svg"></a> 18 | <a href="https://hub.docker.com/r/dreadnode/robopages"><img alt="Docker Hub" src="https://img.shields.io/docker/v/dreadnode/robopages?logo=docker"></a> 19 | <a href="https://rust-reportcard.xuri.me/report/github.com/dreadnode/robopages-cli"><img alt="Rust Report" src="https://rust-reportcard.xuri.me/badge/github.com/dreadnode/robopages-cli"></a> 20 | <a href="#"><img alt="GitHub Actions Workflow Status" src="https://img.shields.io/github/actions/workflow/status/dreadnode/robopages-cli/test.yml"></a> 21 | <a href="https://github.com/dreadnode/robopages-cli/blob/master/LICENSE.md"><img alt="Software License" src="https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat-square"></a> 22 | </p> 23 | 24 | **CLI and API server for [robopages](https://github.com/dreadnode/robopages)** 25 | 26 | </div> 27 | 28 | # Table of Contents 29 | 30 | - [Robopages Server](#robopages-server) 31 | - [CLI and API server for robopages](#cli-and-api-server-for-robopages) 32 | - [Table of Contents](#table-of-contents) 33 | - [Install with Cargo](#install-with-cargo) 34 | - [Pull from Docker Hub](#pull-from-docker-hub) 35 | - [Build Docker image](#build-docker-image) 36 | - [Note about Docker](#note-about-docker) 37 | - [Build from source](#build-from-source) 38 | - [Usage](#usage) 39 | - [CLI](#cli) 40 | - [SSH](#ssh) 41 | - [Using with LLMs](#using-with-llms) 42 | - [Docker Container Failures](#docker-container-failures) 43 | 44 | 45 | [Robopages are YAML based files](https://github.com/dreadnode/robopages) for describing tools to large language models (LLMs). They simplify the process of defining and using external tools in LLM-powered applications. By leveraging the `robopages-cli` function calling server, developers can avoid the tedious task of manually writing JSON declarations for each tool. This approach streamlines tool integration, improves maintainability, and allows for more dynamic and flexible interactions between LLMs and external utilities. 46 | 47 | Pages are loaded by default from the `~/.robopages/` directory (or any folder set in the `ROBOPAGES_PATH` environment variable), see the `https://github.com/dreadnode/robopages` repository for examples. 48 | 49 | ## Install with Cargo 50 | 51 | This is the recommended way to install and use the tool: 52 | 53 | ```bash 54 | cargo install robopages 55 | ``` 56 | 57 | ## Pull from Docker Hub 58 | 59 | ```bash 60 | docker pull dreadnode/robopages:latest 61 | ``` 62 | 63 | ## Build Docker image 64 | 65 | To build your own Docker image for the tool, run: 66 | 67 | ```bash 68 | docker build . -t robopages 69 | ``` 70 | 71 | Optionally, you can create a bash alias like so: 72 | 73 | `alias robopages='docker run -v /var/run/docker.sock:/var/run/docker.sock -v ~/.robopages:/root/.robopages -p 8000:8000 robopages'` 74 | 75 | ## Note about Docker 76 | 77 | If you are using `robopages` inside a container, make sure to share the docker socket from the host machine with the container: 78 | 79 | ```bash 80 | docker run -it \ 81 | # allow the container itself to instrument docker on the host \ 82 | -v/var/run/docker.sock:/var/run/docker.sock 83 | # share your robopages 84 | -v$HOME/.robopages:/root/.robopages \ 85 | # the rest of the command line 86 | robopages view 87 | ``` 88 | 89 | ## Build from source 90 | 91 | Alternatively you can build the project from source, in which case you'll need to have Rust and Cargo [installed on your system](https://rustup.rs/) and clone this repository. 92 | 93 | To build the project: 94 | 95 | ```bash 96 | cargo build --release 97 | ``` 98 | 99 | The compiled binary will be available in the `target/release` directory. You can run it directly or add it to your system's PATH: 100 | 101 | ```bash 102 | # Run directly 103 | ./target/release/robopages 104 | 105 | # Or, copy to a directory in your PATH (e.g., /usr/local/bin) 106 | sudo cp target/release/robopages /usr/local/bin/ 107 | ``` 108 | 109 | ## Usage 110 | 111 | This project consists of a CLI for creating, viewing and serving robopages as a REST API. 112 | 113 | ### CLI 114 | 115 | Install robopages: 116 | 117 | ```bash 118 | # install https://github.com/dreadnode/robopages to ~/.robopages/ 119 | robopages install 120 | 121 | # install a custom repository 122 | robopages install --source user/repo 123 | 124 | # install from a local archive 125 | robopages install --source /path/to/archive.zip 126 | ``` 127 | 128 | View installed robopages: 129 | 130 | ```bash 131 | robopages view 132 | ``` 133 | 134 | Create a robopage with the preferred template: 135 | 136 | ```bash 137 | # create with the basic template, will run the command in the current shell 138 | robopages create --name my_first_page.yml --template basic 139 | 140 | # create with the docker-image template, will use a docker image to run the command 141 | robopages create --name my_first_page.yml --template docker-image 142 | 143 | # create with the docker-build template, will build a docker image to run the command 144 | robopages create --name my_first_page.yml --template docker-build 145 | ``` 146 | 147 | Validate one or more files: 148 | 149 | ```bash 150 | # validate all pages in ~/.robopages 151 | robopages validate 152 | 153 | # validate a specific page 154 | robopages validate --path my_first_page.yml 155 | 156 | # do not attempt to pull or build containers 157 | robopages validate --skip-docker 158 | ``` 159 | 160 | Start the REST API: 161 | 162 | > [!IMPORTANT] 163 | > While strict CORS rules are enforced by default, no authentication layer is provided. It is highly recommended to never bind this API to addresses other than localhost (as per default configuration). 164 | 165 | ```bash 166 | # this will pre build and pull all containers 167 | robopages serve 168 | 169 | # this will build or pull containers on demand 170 | robopages serve --lazy 171 | ``` 172 | 173 | Execute a function manually without user interaction: 174 | 175 | ```bash 176 | robopages run --function nikto_scan --auto 177 | ``` 178 | 179 | You can also define variables to be used in the function call: 180 | 181 | ```bash 182 | robopages run -F httpx_tech_detect -A --define target=www.example.com 183 | ``` 184 | 185 | Repeat for multiple variables: 186 | 187 | ```bash 188 | robopages run -F function_name -A -D target=www.example.com -D foo=bar 189 | ``` 190 | 191 | #### SSH 192 | 193 | The `run` and `serve` commands support an optional SSH connection string. If provided, commands will be executed over SSH on the given host. 194 | 195 | ```bash 196 | robopages serve --ssh user@host:port --ssh-key ~/.ssh/id_ed25519 197 | ``` 198 | 199 | > [!IMPORTANT] 200 | > * Setting a SSH connection string will override any container configuration. 201 | > * If the function requires sudo, the remote host is expected to have passwordless sudo access. 202 | 203 | ### Using with LLMs 204 | 205 | The examples folder contains integration examples for [Rigging](/examples/rigging_example.py), [OpenAI](/examples/openai_example.py), [Groq](/examples/groq_example.py), [OLLAMA](/examples/ollama_example.py) and [Nerve](/examples/nerve.md). 206 | 207 | ## Docker Container Failures 208 | 209 | If a function's required Docker container fails to pull (e.g., due to missing permissions or non-existent image), the function will fail to execute. To resolve this: 210 | 211 | 1. Either gain access to the required container, or 212 | 2. Remove the robopage file that references the inaccessible container 213 | 214 | This behavior is intentional to prevent functions from executing without their required runtime dependencies. 215 | -------------------------------------------------------------------------------- /examples/groq_example.py: -------------------------------------------------------------------------------- 1 | import json 2 | import groq 3 | import asyncio 4 | import requests 5 | 6 | from rich import print 7 | 8 | 9 | async def run(model: str): 10 | client = groq.AsyncClient() 11 | 12 | messages = [ 13 | { 14 | "role": "user", 15 | "content": "Find open ports on 127.0.0.1", 16 | } 17 | ] 18 | 19 | response = await client.chat.completions.create( 20 | model=model, 21 | messages=messages, 22 | # get the tools from the Robopages server 23 | tools=requests.get("http://localhost:8000/").json(), 24 | ) 25 | 26 | print(response) 27 | 28 | # if the response contains tool calls 29 | if response.choices[0].message.tool_calls: 30 | # execute them via the API 31 | results = requests.post( 32 | "http://localhost:8000/process", 33 | json=[ 34 | { 35 | "id": tool_call.id, 36 | "type": tool_call.type, 37 | "function": { 38 | "name": tool_call.function.name, 39 | # for some reason the arguments are returned as a string 40 | "arguments": json.loads(tool_call.function.arguments), 41 | }, 42 | } 43 | for tool_call in response.choices[0].message.tool_calls 44 | ], 45 | ) 46 | results.raise_for_status() 47 | # do whatever you want with the results 48 | print(results.json()) 49 | 50 | 51 | asyncio.run(run("llama-3.1-70b-versatile")) 52 | -------------------------------------------------------------------------------- /examples/langchain_example.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from typing import Dict, List, Optional, Type 3 | import requests 4 | 5 | from langchain_core.tools import BaseTool 6 | from pydantic import BaseModel, Field, create_model 7 | 8 | _SERVER_URL = getenv("ROBOPAGES_SERVER", "http://127.0.0.1:8000") 9 | 10 | class RoboPagesTool(BaseTool): 11 | name: str 12 | description: str 13 | parameters: List[Dict] 14 | args_schema: Optional[ArgsSchema] 15 | 16 | __baseURL: str = _SERVER_URL 17 | 18 | def _run(self, *args, **kwargs): 19 | process_url = f"{self.__baseURL}/process" 20 | headers = {"Content-type": "application/json"} 21 | 22 | payload = [ 23 | { 24 | "type": "function", 25 | "function": { 26 | "name": self.name, 27 | "arguments": kwargs 28 | } 29 | } 30 | ] 31 | 32 | response = requests.post( 33 | url=process_url, 34 | headers=headers, 35 | json=payload 36 | ) 37 | return response.json()[0] 38 | 39 | class RoboPagesOutput(BaseModel): 40 | tool: str = Field(description="The tool that was used") 41 | parameters: Dict = Field(description="The parameters for the requested tool") 42 | 43 | class RoboPages: 44 | def __init__(self, server_url: str = None): 45 | """Initialize RoboPages with an optional base URL override.""" 46 | self.__server_url: str = server_url if server_url else _SERVER_URL 47 | self.__server_url += "/?flavor=rigging" 48 | self.tools: List[RoboPagesTool] = [] 49 | self.RoboPagesOutput = RoboPagesOutput 50 | 51 | def __get_tools(self): 52 | response = requests.get(self.__server_url) 53 | response.raise_for_status() 54 | return response.json() 55 | 56 | def __create_tools(self) -> List[RoboPagesTool]: 57 | """Create LangChain Tools based on the functions from the root endpoint.""" 58 | functions = self.__get_tools() 59 | self.tools = [] 60 | 61 | for item in functions: 62 | for func in item["functions"]: 63 | name = func["name"] 64 | description = func["description"] 65 | parameters = func["parameters"] 66 | 67 | #Building arg fields for pydantic 68 | args = {} 69 | for param in parameters: 70 | args[param["name"]] = (param["type"], Field(description=param["description"])) 71 | 72 | tool = RoboPagesTool( 73 | name= name, 74 | description= description, 75 | parameters= parameters, 76 | args_schema= create_model( f"{name}_schema", **args), 77 | ) 78 | self.tools.append(tool) 79 | 80 | return self.tools 81 | 82 | def get_tools(self) -> List[RoboPagesTool]: 83 | """Return the list of created tools, fetching and creating tools if needed.""" 84 | if not self.tools: 85 | self.__create_tools() 86 | return self.tools 87 | 88 | def get_tool(self, name: str) -> RoboPagesTool | None: 89 | """Retrieve a specific tool by its name, fetching and creating tools if needed.""" 90 | if not self.tools: 91 | self.__create_tools() 92 | for tool in self.tools: 93 | if tool.name == name: 94 | return tool 95 | return None 96 | 97 | def filter_tools(self, filter_string: str) -> List[RoboPagesTool] | None: 98 | """Retrieve a set of tools with the filter_string in the name, fetching and creating tools if needed.""" 99 | output: List[RoboPagesTool] = [] 100 | if not self.tools: 101 | self.__create_tools() 102 | for tool in self.tools: 103 | if filter_string.lower() in tool.name: 104 | output.append(tool) 105 | if output: 106 | return output 107 | else: 108 | return None 109 | 110 | 111 | if __name__ == "__main__": 112 | # Testing RoboPagesTool with default http_get function parameters 113 | print("++ Test tool 'http_get':") 114 | RoboPagesTool_test_name = "http_get" 115 | RoboPagesTool_test_description = "Perform an HTTP GET request to a given URL." 116 | RoboPagesTool_test_parameters =[ 117 | { 118 | "description": "The URL to perform the GET request on.", 119 | "examples": "", 120 | "name": "url", 121 | "type": "str" 122 | }, 123 | { 124 | "description": "An optional, NON EMPTY User-Agent string to use for the request.", 125 | "examples": "", 126 | "name": "user_agent", 127 | "type": "str" 128 | } 129 | ] 130 | http_get = RoboPagesTool( 131 | name=RoboPagesTool_test_name, 132 | description=RoboPagesTool_test_description, 133 | parameters=RoboPagesTool_test_parameters, 134 | args_schema=create_model( 135 | "http_get", 136 | url=(str, Field(description="The URL to perform the GET request on.")), 137 | user_agent=(str, Field(description="An optional, NON EMPTY User-Agent string to use for the request.")) 138 | ) 139 | ) 140 | http_get_tool_call = http_get.invoke({ 141 | "url": "http://example.com", 142 | "user_agent": "RoboPages" 143 | }) 144 | if http_get_tool_call[0:15] == "<!doctype html>": 145 | print(f"\033[32mPassed!\033[0m") 146 | else: 147 | print(f"\033[91mFailed!\033[0m") 148 | 149 | print("++ Test Class RoboPages:") 150 | print("--- Create Tools Function") 151 | rb = RoboPages() 152 | robo_tools = [] 153 | robo_tools = rb.get_tools() 154 | if robo_tools: 155 | print(f"\033[32mPassed!\033[0m") 156 | else: 157 | print(f"\033[91mFailed!\033[0m") 158 | 159 | print("--- Get Tool Function") 160 | rb = RoboPages() 161 | robo_tool = [] 162 | robo_tool = rb.get_tool(name= "http_get") 163 | if robo_tool: 164 | print(f"\033[32mPassed!\033[0m") 165 | else: 166 | print(f"\033[91mFailed!\033[0m") 167 | print("Done!") 168 | 169 | print("--- Filter Tool Function") 170 | rb = RoboPages() 171 | robo_tool = [] 172 | robo_tool = rb.filter_tools(filter_string="http_get") 173 | if robo_tool: 174 | print(f"\033[32mPassed!\033[0m") 175 | else: 176 | print(f"\033[91mFailed!\033[0m") 177 | print("Done!") 178 | -------------------------------------------------------------------------------- /examples/nerve.md: -------------------------------------------------------------------------------- 1 | Robopages can be used with any [Nerve tasklet](https://github.com/evilsocket/nerve): 2 | 3 | ```bash 4 | nerve -G "..." -T /path/to/tasklet -R "localhost:8000/cybersecurity/reverse-engineering" 5 | ``` -------------------------------------------------------------------------------- /examples/ollama_example.py: -------------------------------------------------------------------------------- 1 | import ollama 2 | import asyncio 3 | import requests 4 | 5 | from rich import print 6 | 7 | 8 | async def run(model: str): 9 | client = ollama.AsyncClient() 10 | 11 | messages = [ 12 | { 13 | "role": "user", 14 | "content": "Find open ports on 127.0.0.1", 15 | } 16 | ] 17 | 18 | response = await client.chat( 19 | model=model, 20 | messages=messages, 21 | # get the tools from the Robopages server 22 | tools=requests.get("http://localhost:8000/").json(), 23 | ) 24 | 25 | print(response) 26 | 27 | # if the response contains tool calls 28 | if response["message"]["tool_calls"]: 29 | # execute them via the API 30 | results = requests.post( 31 | "http://localhost:8000/process", json=response["message"]["tool_calls"] 32 | ) 33 | results.raise_for_status() 34 | # do whatever you want with the results 35 | print(results.json()) 36 | 37 | 38 | asyncio.run(run("llama3.1")) 39 | -------------------------------------------------------------------------------- /examples/openai_example.py: -------------------------------------------------------------------------------- 1 | import json 2 | import openai 3 | import asyncio 4 | import requests 5 | 6 | from rich import print 7 | 8 | 9 | async def run(model: str): 10 | client = openai.AsyncOpenAI() 11 | 12 | messages = [ 13 | { 14 | "role": "user", 15 | "content": "Find open ports on 127.0.0.1", 16 | } 17 | ] 18 | 19 | response = await client.chat.completions.create( 20 | model=model, 21 | messages=messages, 22 | # get the tools from the Robopages server 23 | tools=requests.get("http://localhost:8000/").json(), 24 | ) 25 | 26 | print(response) 27 | 28 | # if the response contains tool calls 29 | if response.choices[0].message.tool_calls: 30 | # execute them via the API 31 | results = requests.post( 32 | "http://localhost:8000/process", 33 | json=[ 34 | { 35 | "id": tool_call.id, 36 | "type": tool_call.type, 37 | "function": { 38 | "name": tool_call.function.name, 39 | # for some reason the arguments are returned as a string 40 | "arguments": json.loads(tool_call.function.arguments), 41 | }, 42 | } 43 | for tool_call in response.choices[0].message.tool_calls 44 | ], 45 | ) 46 | results.raise_for_status() 47 | # do whatever you want with the results 48 | print(results.json()) 49 | 50 | 51 | asyncio.run(run("gpt-3.5-turbo")) 52 | -------------------------------------------------------------------------------- /examples/rigging_example.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | from loguru import logger 4 | import rigging as rg 5 | from rigging import logging 6 | from rich import print 7 | 8 | os.environ["LOGFIRE_IGNORE_NO_CONFIG"] = "1" 9 | logging.configure_logging("DEBUG", None, "DEBUG") 10 | 11 | 12 | async def run(): 13 | """Main function that runs the chat with RoboPages tools""" 14 | 15 | try: 16 | logger.info("Fetching tools from RoboPages server") 17 | # Use the built-in robopages integration 18 | tools = rg.integrations.robopages("http://localhost:8000") 19 | 20 | logger.info(f"Fetched {len(tools)} tools from RoboPages server") 21 | 22 | prompt = """ 23 | I need you to find all open ports on the local machine (127.0.0.1). 24 | Please use the available tools to scan the ports and provide a summary of the results. 25 | 26 | Be thorough but concise in your analysis. Present the information in a clear format. 27 | 28 | After scanning, list all the open ports you found and what services might be running on them. 29 | """ 30 | 31 | logger.info("Starting chat with model") 32 | generator = rg.get_generator("gpt-4o") 33 | 34 | chat = await generator.chat(prompt).using(*tools, force=True).run() 35 | 36 | logger.info("Chat completed. Full conversation:") 37 | for i, message in enumerate(chat.messages): 38 | logger.info(f"Message {i + 1} ({message.role}):") 39 | logger.info( 40 | message.content[:200] + ("..." if len(message.content) > 200 else "") 41 | ) 42 | 43 | print("\n--- RESULT ---\n") 44 | print(chat.last.content) 45 | 46 | return chat 47 | 48 | except Exception as e: 49 | logger.error(f"Error: {e}") 50 | import traceback 51 | 52 | traceback.print_exc() 53 | return None 54 | 55 | 56 | if __name__ == "__main__": 57 | chat = asyncio.run(run()) 58 | if chat: 59 | print(chat.conversation) 60 | -------------------------------------------------------------------------------- /release.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import subprocess 3 | import re 4 | 5 | # print changelog 6 | current_tag = subprocess.run( 7 | ["git", "describe", "--tags", "--abbrev=0"], capture_output=True, text=True 8 | ).stdout.strip() 9 | if current_tag == "": 10 | # os.system("git log HEAD --oneline") 11 | interval = "HEAD" 12 | else: 13 | print("current tag: %s" % current_tag) 14 | interval = "%s..HEAD" % current_tag 15 | 16 | print( 17 | "CHANGELOG:\n\n%s\n" 18 | % subprocess.run( 19 | ["git", "log", interval, "--oneline"], capture_output=True, text=True 20 | ).stdout.strip() 21 | ) 22 | 23 | version_match_re = r'^version\s*=\s*"([^"]+)"$' 24 | 25 | with open("Cargo.toml", "rt") as fp: 26 | manifest = fp.read() 27 | 28 | # parse current version and get next from user 29 | m = re.findall(version_match_re, manifest, re.MULTILINE) 30 | if len(m) != 1: 31 | print("could not parse current version from Cargo.toml") 32 | quit() 33 | 34 | current_ver = m[0] 35 | next_ver = input("current version is %s, enter next: " % current_ver) 36 | 37 | # generate new manifest 38 | result = re.sub( 39 | version_match_re, 'version = "%s"' % next_ver, manifest, 0, re.MULTILINE 40 | ) 41 | with open("Cargo.toml", "w+t") as fp: 42 | fp.write(result) 43 | 44 | # commit, push and create new tag 45 | print("git add Cargo.*") 46 | print("git commit -m 'releasing version %s'" % next_ver) 47 | print("git push") 48 | print("git tag -a v%s -m 'releasing v%s'" % (next_ver, next_ver)) 49 | print("git push origin v%s" % next_ver) 50 | 51 | # print() 52 | # publish on crates.io 53 | # print("cargo publish") 54 | -------------------------------------------------------------------------------- /src/book/flavors/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | pub(crate) mod nerve; 4 | pub(crate) mod openai; 5 | pub(crate) mod rigging; 6 | 7 | #[derive(Default, Debug)] 8 | pub(crate) enum Flavor { 9 | #[default] 10 | OpenAI, 11 | Nerve, 12 | Rigging, 13 | } 14 | 15 | #[allow(dead_code)] 16 | impl Flavor { 17 | pub fn from_string(s: &str) -> anyhow::Result<Self> { 18 | match s.to_lowercase().as_str() { 19 | "openai" => Ok(Flavor::OpenAI), 20 | "nerve" => Ok(Flavor::Nerve), 21 | "rigging" => Ok(Flavor::Rigging), 22 | _ => Err(anyhow!("unknown flavor: {}", s)), 23 | } 24 | } 25 | 26 | pub fn from_map_or_default(query: &HashMap<String, String>) -> anyhow::Result<Self> { 27 | query 28 | .get("flavor") 29 | .map_or(Ok(Flavor::default()), |s| Self::from_string(s)) 30 | } 31 | 32 | pub fn is_openai(&self) -> bool { 33 | matches!(self, Flavor::OpenAI) 34 | } 35 | 36 | pub fn is_nerve(&self) -> bool { 37 | matches!(self, Flavor::Nerve) 38 | } 39 | 40 | pub fn is_rigging(&self) -> bool { 41 | matches!(self, Flavor::Rigging) 42 | } 43 | } 44 | 45 | #[cfg(test)] 46 | mod tests { 47 | use super::*; 48 | 49 | #[test] 50 | fn test_flavor_from_string() { 51 | assert!(matches!(Flavor::from_string("openai"), Ok(Flavor::OpenAI))); 52 | assert!(matches!(Flavor::from_string("OpenAI"), Ok(Flavor::OpenAI))); 53 | assert!(matches!(Flavor::from_string("OPENAI"), Ok(Flavor::OpenAI))); 54 | 55 | assert!(matches!(Flavor::from_string("nerve"), Ok(Flavor::Nerve))); 56 | assert!(matches!(Flavor::from_string("Nerve"), Ok(Flavor::Nerve))); 57 | assert!(matches!(Flavor::from_string("NERVE"), Ok(Flavor::Nerve))); 58 | 59 | assert!(matches!( 60 | Flavor::from_string("rigging"), 61 | Ok(Flavor::Rigging) 62 | )); 63 | assert!(matches!( 64 | Flavor::from_string("Rigging"), 65 | Ok(Flavor::Rigging) 66 | )); 67 | assert!(matches!( 68 | Flavor::from_string("RIGGING"), 69 | Ok(Flavor::Rigging) 70 | )); 71 | 72 | assert!(Flavor::from_string("unknown").is_err()); 73 | assert!(Flavor::from_string("").is_err()); 74 | } 75 | 76 | #[test] 77 | fn test_flavor_from_map_or_default() { 78 | let mut map = HashMap::new(); 79 | 80 | // Test default case 81 | assert!(matches!( 82 | Flavor::from_map_or_default(&map), 83 | Ok(Flavor::OpenAI) 84 | )); 85 | 86 | // Test valid flavor 87 | map.insert("flavor".to_string(), "openai".to_string()); 88 | assert!(matches!( 89 | Flavor::from_map_or_default(&map), 90 | Ok(Flavor::OpenAI) 91 | )); 92 | 93 | // Test invalid flavor 94 | map.insert("flavor".to_string(), "unknown".to_string()); 95 | assert!(Flavor::from_map_or_default(&map).is_err()); 96 | 97 | // Test empty string 98 | map.insert("flavor".to_string(), "".to_string()); 99 | assert!(Flavor::from_map_or_default(&map).is_err()); 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /src/book/flavors/nerve.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use crate::book::Page; 6 | 7 | // https://github.com/evilsocket/nerve/blob/main/nerve-core/src/agent/task/tasklet.rs#L205 8 | 9 | #[derive(Default, Serialize, Deserialize, Debug, Clone)] 10 | pub(crate) struct Action { 11 | name: String, 12 | description: String, 13 | args: Option<HashMap<String, String>>, 14 | example_payload: Option<String>, 15 | tool: String, 16 | } 17 | 18 | #[derive(Default, Serialize, Deserialize, Debug, Clone)] 19 | pub(crate) struct FunctionGroup { 20 | pub name: String, 21 | pub description: Option<String>, 22 | pub actions: Vec<Action>, 23 | } 24 | 25 | impl From<&Page> for Vec<FunctionGroup> { 26 | fn from(page: &Page) -> Self { 27 | let mut group = FunctionGroup { 28 | name: page.name.clone(), 29 | description: page.description.clone(), 30 | actions: vec![], 31 | }; 32 | 33 | for (func_name, func) in &page.functions { 34 | let mut args = HashMap::new(); 35 | for (param_name, param) in &func.parameters { 36 | args.insert(param_name.clone(), param.description.clone()); 37 | } 38 | 39 | group.actions.push(Action { 40 | name: func_name.clone(), 41 | description: func.description.clone(), 42 | args: Some(args), 43 | example_payload: None, 44 | tool: format!("{}.{}@robopages", page.name, func_name), 45 | }); 46 | } 47 | 48 | vec![group] 49 | } 50 | } 51 | 52 | #[cfg(test)] 53 | mod tests { 54 | use super::*; 55 | use crate::book::{Function, Page, Parameter}; 56 | use std::collections::BTreeMap; 57 | 58 | fn create_test_page() -> Page { 59 | let mut functions = BTreeMap::new(); 60 | let mut parameters = BTreeMap::new(); 61 | parameters.insert( 62 | "param1".to_string(), 63 | Parameter { 64 | param_type: "string".to_string(), 65 | description: "Test parameter".to_string(), 66 | required: true, 67 | examples: None, 68 | }, 69 | ); 70 | 71 | functions.insert( 72 | "test_function".to_string(), 73 | Function { 74 | description: "A test function".to_string(), 75 | parameters, 76 | execution: crate::book::runtime::ExecutionContext::CommandLine(vec![ 77 | "echo".to_string(), 78 | "test".to_string(), 79 | ]), 80 | container: None, 81 | }, 82 | ); 83 | 84 | Page { 85 | name: "TestPage".to_string(), 86 | description: Some("A test page".to_string()), 87 | categories: vec!["test".to_string()], 88 | functions, 89 | } 90 | } 91 | 92 | #[test] 93 | fn test_page_to_function_group() { 94 | let page = create_test_page(); 95 | let function_groups: Vec<FunctionGroup> = (&page).into(); 96 | 97 | assert_eq!(function_groups.len(), 1); 98 | let group = &function_groups[0]; 99 | 100 | assert_eq!(group.name, "TestPage"); 101 | assert_eq!(group.description, Some("A test page".to_string())); 102 | assert_eq!(group.actions.len(), 1); 103 | 104 | let action = &group.actions[0]; 105 | assert_eq!(action.name, "test_function"); 106 | assert_eq!(action.description, "A test function"); 107 | assert_eq!(action.tool, "TestPage.test_function@robopages"); 108 | 109 | let args = action.args.as_ref().unwrap(); 110 | assert_eq!(args.len(), 1); 111 | assert_eq!(args.get("param1"), Some(&"Test parameter".to_string())); 112 | } 113 | 114 | #[test] 115 | fn test_empty_page() { 116 | let page = Page { 117 | name: "EmptyPage".to_string(), 118 | description: None, 119 | categories: vec![], 120 | functions: BTreeMap::new(), 121 | }; 122 | 123 | let function_groups: Vec<FunctionGroup> = (&page).into(); 124 | 125 | assert_eq!(function_groups.len(), 1); 126 | let group = &function_groups[0]; 127 | 128 | assert_eq!(group.name, "EmptyPage"); 129 | assert_eq!(group.description, None); 130 | assert_eq!(group.actions.len(), 0); 131 | } 132 | 133 | #[test] 134 | fn test_multiple_functions() { 135 | let mut page = create_test_page(); 136 | page.functions.insert( 137 | "another_function".to_string(), 138 | Function { 139 | description: "Another test function".to_string(), 140 | parameters: BTreeMap::new(), 141 | execution: crate::book::runtime::ExecutionContext::CommandLine(vec![ 142 | "echo".to_string(), 143 | "another".to_string(), 144 | ]), 145 | container: None, 146 | }, 147 | ); 148 | 149 | let function_groups: Vec<FunctionGroup> = (&page).into(); 150 | 151 | assert_eq!(function_groups.len(), 1); 152 | let group = &function_groups[0]; 153 | 154 | assert_eq!(group.actions.len(), 2); 155 | assert!(group.actions.iter().any(|a| a.name == "test_function")); 156 | assert!(group.actions.iter().any(|a| a.name == "another_function")); 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /src/book/flavors/openai.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use crate::book::Page; 6 | 7 | // https://platform.openai.com/docs/guides/function-calling 8 | 9 | #[derive(Debug, Serialize)] 10 | pub(crate) struct Tool { 11 | #[serde(rename = "type")] 12 | #[serde(default = "function")] 13 | pub tool_type: String, 14 | pub function: Function, 15 | } 16 | 17 | #[derive(Debug, Serialize)] 18 | pub(crate) struct Function { 19 | pub name: String, 20 | pub description: String, 21 | pub parameters: Parameters, 22 | } 23 | 24 | #[derive(Debug, Serialize)] 25 | pub(crate) struct Parameters { 26 | #[serde(rename = "type")] 27 | #[serde(default = "object")] 28 | pub params_type: String, 29 | pub properties: BTreeMap<String, Parameter>, 30 | pub required: Vec<String>, 31 | } 32 | 33 | #[derive(Debug, Serialize)] 34 | pub(crate) struct Parameter { 35 | #[serde(rename = "type")] 36 | pub param_type: String, 37 | pub description: String, 38 | } 39 | 40 | impl From<&Page> for Vec<Tool> { 41 | fn from(page: &Page) -> Self { 42 | page.functions 43 | .iter() 44 | .map(|(func_name, func)| { 45 | let mut properties = BTreeMap::new(); 46 | let mut required = Vec::new(); 47 | 48 | for (param_name, param) in &func.parameters { 49 | properties.insert( 50 | param_name.clone(), 51 | Parameter { 52 | param_type: param.param_type.clone(), 53 | description: param.description.clone(), 54 | }, 55 | ); 56 | 57 | if param.required { 58 | required.push(param_name.clone()); 59 | } 60 | } 61 | 62 | // NOTE: it'd be nice if we could add examples 63 | 64 | Tool { 65 | tool_type: "function".to_string(), 66 | function: Function { 67 | name: func_name.clone(), 68 | description: func.description.clone(), 69 | parameters: Parameters { 70 | params_type: "object".to_string(), 71 | properties, 72 | required, 73 | }, 74 | }, 75 | } 76 | }) 77 | .collect() 78 | } 79 | } 80 | 81 | #[derive(Debug, Serialize, Deserialize)] 82 | pub(crate) struct FunctionCall { 83 | pub name: String, 84 | pub arguments: BTreeMap<String, String>, 85 | } 86 | 87 | type CallId = String; 88 | 89 | #[derive(Debug, Serialize, Deserialize)] 90 | pub(crate) struct Call { 91 | pub id: Option<CallId>, 92 | #[serde(rename = "type")] 93 | #[serde(default = "default_call_type")] 94 | pub call_type: String, 95 | pub function: FunctionCall, 96 | } 97 | 98 | fn default_call_type() -> String { 99 | "function".to_string() 100 | } 101 | 102 | #[derive(Debug, Serialize, Deserialize)] 103 | pub(crate) struct CallResultMessage { 104 | #[serde(default = "default_result_message_role")] 105 | pub role: String, 106 | pub call_id: Option<CallId>, 107 | pub content: String, 108 | } 109 | 110 | fn default_result_message_role() -> String { 111 | "tool".to_string() 112 | } 113 | 114 | #[cfg(test)] 115 | mod tests { 116 | use super::*; 117 | 118 | #[test] 119 | fn test_function_call() { 120 | let mut arguments = BTreeMap::new(); 121 | arguments.insert("arg1".to_string(), "value1".to_string()); 122 | arguments.insert("arg2".to_string(), "value2".to_string()); 123 | 124 | let function_call = FunctionCall { 125 | name: "test_function".to_string(), 126 | arguments, 127 | }; 128 | 129 | assert_eq!(function_call.name, "test_function"); 130 | assert_eq!(function_call.arguments.len(), 2); 131 | assert_eq!( 132 | function_call.arguments.get("arg1"), 133 | Some(&"value1".to_string()) 134 | ); 135 | assert_eq!( 136 | function_call.arguments.get("arg2"), 137 | Some(&"value2".to_string()) 138 | ); 139 | } 140 | 141 | #[test] 142 | fn test_call() { 143 | let function_call = FunctionCall { 144 | name: "test_function".to_string(), 145 | arguments: BTreeMap::new(), 146 | }; 147 | 148 | let call = Call { 149 | id: Some("test_id".to_string()), 150 | call_type: "function".to_string(), 151 | function: function_call, 152 | }; 153 | 154 | assert_eq!(call.id, Some("test_id".to_string())); 155 | assert_eq!(call.call_type, "function"); 156 | assert_eq!(call.function.name, "test_function"); 157 | } 158 | 159 | #[test] 160 | fn test_call_default_type() { 161 | let function_call = FunctionCall { 162 | name: "test_function".to_string(), 163 | arguments: BTreeMap::new(), 164 | }; 165 | 166 | let call = Call { 167 | id: None, 168 | call_type: default_call_type(), 169 | function: function_call, 170 | }; 171 | 172 | assert_eq!(call.call_type, "function"); 173 | } 174 | 175 | #[test] 176 | fn test_call_result_message() { 177 | let message = CallResultMessage { 178 | role: "custom_role".to_string(), 179 | call_id: Some("test_id".to_string()), 180 | content: "Test content".to_string(), 181 | }; 182 | 183 | assert_eq!(message.role, "custom_role"); 184 | assert_eq!(message.call_id, Some("test_id".to_string())); 185 | assert_eq!(message.content, "Test content"); 186 | } 187 | 188 | #[test] 189 | fn test_call_result_message_default_role() { 190 | let message = CallResultMessage { 191 | role: default_result_message_role(), 192 | call_id: None, 193 | content: "Test content".to_string(), 194 | }; 195 | 196 | assert_eq!(message.role, "tool"); 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /src/book/flavors/rigging.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::book::Page; 4 | 5 | // https://rigging.dreadnode.io/topics/tools/ 6 | 7 | #[derive(Debug, Deserialize, Serialize)] 8 | pub(crate) struct Parameter { 9 | pub name: String, 10 | #[serde(rename = "type")] 11 | pub param_type: String, 12 | pub description: String, 13 | pub examples: Vec<String>, 14 | } 15 | 16 | // rigging uses python types 17 | fn rigging_param_type(s: &str) -> String { 18 | if s == "string" { 19 | return "str".to_string(); 20 | } 21 | 22 | s.to_string() 23 | } 24 | 25 | #[derive(Default, Serialize, Deserialize, Debug)] 26 | pub(crate) struct Function { 27 | name: String, 28 | description: String, 29 | parameters: Vec<Parameter>, 30 | } 31 | 32 | #[derive(Default, Serialize, Deserialize, Debug)] 33 | pub(crate) struct Tool { 34 | pub name: String, 35 | pub description: Option<String>, 36 | pub functions: Vec<Function>, 37 | } 38 | 39 | impl From<&Page> for Vec<Tool> { 40 | fn from(page: &Page) -> Self { 41 | let mut tool = Tool { 42 | name: page.name.clone(), 43 | description: page.description.clone(), 44 | functions: vec![], 45 | }; 46 | 47 | for (func_name, func) in &page.functions { 48 | tool.functions.push(Function { 49 | name: func_name.clone(), 50 | description: func.description.clone(), 51 | parameters: func 52 | .parameters 53 | .iter() 54 | .map(|p| Parameter { 55 | name: p.0.clone(), 56 | param_type: rigging_param_type(&p.1.param_type), 57 | description: p.1.description.clone(), 58 | examples: p.1.examples.clone().unwrap_or_default(), 59 | }) 60 | .collect(), 61 | }); 62 | } 63 | 64 | vec![tool] 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/book/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{BTreeMap, HashMap}; 2 | 3 | use camino::Utf8PathBuf; 4 | use glob::glob; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | use crate::runtime::{CommandLine, ContainerSource}; 8 | 9 | pub(crate) mod flavors; 10 | pub(crate) mod runtime; 11 | pub(crate) mod templates; 12 | 13 | macro_rules! eval_if_in_filter { 14 | ($path:expr, $filter:expr, $action:expr) => { 15 | // include by default 16 | let mut include = true; 17 | // if filter is set 18 | if let Some(filter) = &$filter { 19 | // if it does not match, do not include 20 | if !$path.as_str().contains(filter) { 21 | include = false; 22 | } 23 | } 24 | if include { 25 | $action 26 | } 27 | }; 28 | } 29 | 30 | #[derive(Debug, Serialize, Deserialize)] 31 | pub struct Parameter { 32 | #[serde(rename = "type")] 33 | pub param_type: String, 34 | pub description: String, 35 | #[serde(default = "default_required")] 36 | pub required: bool, 37 | #[serde(skip_serializing_if = "Option::is_none")] 38 | pub examples: Option<Vec<String>>, 39 | } 40 | 41 | fn default_required() -> bool { 42 | true 43 | } 44 | 45 | #[derive(Debug, Serialize, Deserialize)] 46 | pub struct Container { 47 | #[serde(flatten)] 48 | pub source: ContainerSource, 49 | #[serde(skip_serializing_if = "Option::is_none")] 50 | pub args: Option<Vec<String>>, 51 | #[serde(skip_serializing_if = "Option::is_none")] 52 | pub volumes: Option<Vec<String>>, 53 | #[serde(default = "default_force")] 54 | #[serde(skip_serializing_if = "is_false")] 55 | pub force: bool, 56 | #[serde(default = "default_preserve_app")] 57 | #[serde(skip_serializing_if = "is_false")] 58 | pub preserve_app: bool, 59 | #[serde(skip_serializing_if = "Option::is_none")] 60 | pub platform: Option<String>, 61 | } 62 | 63 | fn is_false(b: &bool) -> bool { 64 | !(*b) 65 | } 66 | 67 | fn default_force() -> bool { 68 | false 69 | } 70 | 71 | fn default_preserve_app() -> bool { 72 | false 73 | } 74 | 75 | impl Container { 76 | pub fn wrap(&self, cmdline: CommandLine) -> anyhow::Result<CommandLine> { 77 | let mut dockerized = CommandLine { 78 | sudo: false, 79 | app: which::which("docker") 80 | .map_err(|e| anyhow::anyhow!("docker executable not found: {}", e))? 81 | .to_string_lossy() 82 | .to_string(), 83 | app_in_path: true, 84 | args: vec!["run".to_string(), "--rm".to_string()], 85 | env: BTreeMap::new(), 86 | temp_env_file: None, 87 | }; 88 | 89 | // handle environment variables if present 90 | if !cmdline.env.is_empty() { 91 | let mut env_contents = String::new(); 92 | for (key, value) in &cmdline.env { 93 | env_contents.push_str(&format!("{}={}\n", key, value)); 94 | } 95 | 96 | // create temp file 97 | let temp_file = tempfile::NamedTempFile::new() 98 | .map_err(|e| anyhow::anyhow!("failed to create temp env file: {}", e))?; 99 | 100 | // write env vars 101 | std::fs::write(temp_file.path(), env_contents) 102 | .map_err(|e| anyhow::anyhow!("failed to write env file: {}", e))?; 103 | 104 | // add env-file arg 105 | dockerized 106 | .args 107 | .push(format!("--env-file={}", temp_file.path().display())); 108 | 109 | // keep temp file alive until docker run completes 110 | dockerized.temp_env_file = Some(temp_file); 111 | } 112 | 113 | // add volumes if any 114 | if let Some(volumes) = &self.volumes { 115 | for volume in volumes { 116 | dockerized.args.push(format!("-v{}", volume)); 117 | } 118 | } 119 | 120 | // add any additional args 121 | if let Some(args) = &self.args { 122 | dockerized.args.extend(args.clone()); 123 | } 124 | 125 | // add image 126 | dockerized.args.push(self.source.image().to_string()); 127 | 128 | if self.preserve_app { 129 | // add the original app to the args 130 | dockerized.args.push(cmdline.app.clone()); 131 | } 132 | 133 | // add the original arguments 134 | dockerized.args.extend(cmdline.args); 135 | 136 | Ok(dockerized) 137 | } 138 | 139 | pub async fn resolve(&self) -> anyhow::Result<()> { 140 | self.source.resolve(self.platform.clone()).await 141 | } 142 | } 143 | 144 | // TODO: add optional parsers to reduce output tokens 145 | 146 | #[derive(Debug, Serialize, Deserialize)] 147 | pub struct Function { 148 | pub description: String, 149 | pub parameters: BTreeMap<String, Parameter>, 150 | #[serde(skip_serializing_if = "Option::is_none")] 151 | pub container: Option<Container>, 152 | #[serde(flatten)] 153 | pub execution: runtime::ExecutionContext, 154 | } 155 | 156 | #[derive(Debug, Serialize, Deserialize)] 157 | pub struct Page { 158 | #[serde(skip_serializing_if = "String::is_empty")] 159 | #[serde(default = "String::new")] 160 | pub name: String, 161 | #[serde(skip_serializing_if = "Option::is_none")] 162 | pub description: Option<String>, 163 | pub functions: BTreeMap<String, Function>, 164 | #[serde(skip_serializing_if = "Vec::is_empty")] 165 | #[serde(default = "Vec::new")] 166 | pub categories: Vec<String>, 167 | } 168 | 169 | impl Page { 170 | fn preprocess(path: &Utf8PathBuf, text: String) -> anyhow::Result<String> { 171 | let path = path.canonicalize_utf8()?; 172 | let base_path = path.parent().unwrap(); 173 | 174 | Ok(text.replace("${cwd}", base_path.as_ref())) 175 | } 176 | 177 | pub fn from_path(path: &Utf8PathBuf) -> anyhow::Result<Self> { 178 | let text = std::fs::read_to_string(path) 179 | .map_err(|e| anyhow::anyhow!("error while reading {:?}: {}", path, e))?; 180 | let text = Self::preprocess(path, text) 181 | .map_err(|e| anyhow::anyhow!("error while preprocessing {:?}: {}", path, e))?; 182 | let page = serde_yaml::from_str(&text) 183 | .map_err(|e| anyhow::anyhow!("error while parsing {:?}: {}", path, e))?; 184 | Ok(page) 185 | } 186 | } 187 | 188 | #[derive(Debug)] 189 | pub struct Book { 190 | pub pages: BTreeMap<Utf8PathBuf, Page>, 191 | } 192 | 193 | impl Book { 194 | pub fn from_path(path: Utf8PathBuf, filter: Option<String>) -> anyhow::Result<Self> { 195 | log::debug!("Searching for pages in {:?}", path); 196 | let mut page_paths = Vec::new(); 197 | 198 | let path = Utf8PathBuf::from( 199 | shellexpand::full(path.as_str()) 200 | .map_err(|e| anyhow::anyhow!("failed to expand path: {}", e))? 201 | .into_owned(), 202 | ) 203 | .canonicalize_utf8() 204 | .map_err(|e| anyhow::anyhow!("failed to canonicalize path: {}", e))?; 205 | 206 | log::debug!("canonicalized path: {:?}", path); 207 | 208 | if path.is_file() { 209 | log::debug!("path is a file"); 210 | eval_if_in_filter!(path, filter, page_paths.push(path.to_path_buf())); 211 | } else if path.is_dir() { 212 | log::debug!("path is a directory, searching for .yml files"); 213 | let glob_pattern = path.join("**/*.yml").as_str().to_string(); 214 | log::debug!("using glob pattern: {}", glob_pattern); 215 | 216 | for entry in glob(&glob_pattern)? { 217 | match entry { 218 | Ok(entry_path) => { 219 | log::debug!("found file: {:?}", entry_path); 220 | // skip files in hidden directories (starting with .) 221 | // but allow the root .robopages directory 222 | if let Ok(relative_path) = entry_path.strip_prefix(&path) { 223 | if relative_path.components().any(|component| { 224 | let comp_str = component.as_os_str().to_string_lossy(); 225 | comp_str.starts_with(".") && comp_str != "." && comp_str != ".." 226 | }) { 227 | log::debug!("skipping hidden file/directory"); 228 | continue; 229 | } 230 | } 231 | 232 | if let Ok(utf8_path) = Utf8PathBuf::from_path_buf(entry_path) { 233 | eval_if_in_filter!(utf8_path, filter, { 234 | log::debug!("Adding path: {:?}", utf8_path); 235 | page_paths.push(utf8_path); 236 | }); 237 | } else { 238 | log::error!("failed to convert path to Utf8PathBuf"); 239 | } 240 | } 241 | Err(e) => { 242 | log::error!("error in glob: {}", e); 243 | } 244 | } 245 | } 246 | } 247 | 248 | log::debug!("found {} page paths", page_paths.len()); 249 | 250 | if page_paths.is_empty() { 251 | return Err(anyhow::anyhow!("no pages found in {:?}", path)); 252 | } 253 | 254 | log::debug!("loading {} pages from {:?}", page_paths.len(), path); 255 | 256 | let mut pages = BTreeMap::new(); 257 | let mut function_names = HashMap::new(); 258 | 259 | for page_path in page_paths { 260 | let page_path = page_path.canonicalize_utf8()?; 261 | let mut page = Page::from_path(&page_path)?; 262 | 263 | // if name is not set, use the file name 264 | if page.name.is_empty() { 265 | page.name = page_path.file_stem().unwrap().to_string(); 266 | } 267 | 268 | // if categories are not set, use the path components 269 | if page.categories.is_empty() { 270 | let path_buf = page_path.strip_prefix(&path)?; 271 | let parent = path_buf.parent(); 272 | 273 | if let Some(parent_path) = parent { 274 | page.categories = parent_path 275 | .components() 276 | .map(|c| c.as_str().to_string()) 277 | .collect(); 278 | 279 | // Skip empty categories 280 | page.categories.retain(|c| !c.is_empty()); 281 | } 282 | } 283 | 284 | // make sure function names are unique 285 | let mut renames = HashMap::new(); 286 | for func_name in page.functions.keys() { 287 | if function_names.contains_key(func_name) { 288 | let new_func_name = format!("{}_{}", &page.name, func_name); 289 | if !function_names.contains_key(&new_func_name) { 290 | log::warn!( 291 | "function name {} in {:?} is not unique, renaming to {}", 292 | func_name, 293 | page_path, 294 | new_func_name 295 | ); 296 | renames.insert(func_name.clone(), new_func_name.clone()); 297 | } else { 298 | return Err(anyhow::anyhow!( 299 | "function name {} in {:?} is not unique", 300 | func_name, 301 | page_path 302 | )); 303 | } 304 | } 305 | function_names.insert(func_name.clone(), 1); 306 | } 307 | 308 | for (old_name, new_name) in renames { 309 | let function = page.functions.remove(&old_name).unwrap(); 310 | page.functions.insert(new_name, function); 311 | } 312 | 313 | pages.insert(page_path, page); 314 | } 315 | 316 | Ok(Self { pages }) 317 | } 318 | 319 | pub fn size(&self) -> usize { 320 | self.pages.len() 321 | } 322 | 323 | pub fn get_function<'a>(&'a self, name: &str) -> anyhow::Result<runtime::FunctionRef<'a>> { 324 | for (page_path, page) in &self.pages { 325 | if let Some(function) = page.functions.get(name) { 326 | return Ok(runtime::FunctionRef { 327 | name: name.to_owned(), 328 | path: page_path, 329 | page, 330 | function, 331 | }); 332 | } 333 | } 334 | 335 | Err(anyhow::anyhow!("function {} not found", name)) 336 | } 337 | 338 | pub fn as_tools<'a, T>(&'a self, filter: Option<String>) -> Vec<T> 339 | where 340 | Vec<T>: std::convert::From<&'a Page>, 341 | { 342 | let mut tools = Vec::new(); 343 | 344 | for (page_path, page) in &self.pages { 345 | eval_if_in_filter!( 346 | page_path, 347 | filter, 348 | tools.extend(<&Page as Into<Vec<T>>>::into(page)) 349 | ); 350 | } 351 | 352 | tools 353 | } 354 | } 355 | 356 | #[cfg(test)] 357 | mod tests { 358 | use super::*; 359 | use camino::Utf8PathBuf; 360 | use flavors::openai; 361 | use std::collections::BTreeMap; 362 | 363 | fn create_test_book() -> Book { 364 | let mut pages = BTreeMap::new(); 365 | let mut page = Page { 366 | name: "Test Page".to_string(), 367 | description: Some("A test page".to_string()), 368 | categories: vec!["test".to_string()], 369 | functions: BTreeMap::new(), 370 | }; 371 | page.functions.insert( 372 | "test_function".to_string(), 373 | Function { 374 | description: "A test function".to_string(), 375 | parameters: BTreeMap::new(), 376 | execution: runtime::ExecutionContext::CommandLine(vec![ 377 | "echo".to_string(), 378 | "test".to_string(), 379 | ]), 380 | container: None, 381 | }, 382 | ); 383 | pages.insert(Utf8PathBuf::from("test_page"), page); 384 | Book { pages } 385 | } 386 | 387 | #[test] 388 | fn test_book_size() { 389 | let book = create_test_book(); 390 | assert_eq!(book.size(), 1); 391 | } 392 | 393 | #[test] 394 | fn test_get_existing_function() { 395 | let book = create_test_book(); 396 | let result = book.get_function("test_function"); 397 | assert!(result.is_ok()); 398 | let function_ref = result.unwrap(); 399 | assert_eq!(function_ref.name, "test_function"); 400 | assert_eq!(function_ref.path, &Utf8PathBuf::from("test_page")); 401 | } 402 | 403 | #[test] 404 | fn test_get_non_existing_function() { 405 | let book = create_test_book(); 406 | let result = book.get_function("non_existing_function"); 407 | assert!(result.is_err()); 408 | } 409 | 410 | #[test] 411 | fn test_as_tools_without_filter() { 412 | let book = create_test_book(); 413 | let tools = book.as_tools::<openai::Tool>(None); 414 | assert_eq!(tools.len(), 1); 415 | } 416 | 417 | #[test] 418 | fn test_as_tools_with_matching_filter() { 419 | let book = create_test_book(); 420 | let tools = book.as_tools::<openai::Tool>(Some("test_page".to_string())); 421 | assert_eq!(tools.len(), 1); 422 | } 423 | 424 | #[test] 425 | fn test_as_tools_with_non_matching_filter() { 426 | let book = create_test_book(); 427 | let tools = book.as_tools::<openai::Tool>(Some("non_existing_page".to_string())); 428 | assert_eq!(tools.len(), 0); 429 | } 430 | 431 | #[test] 432 | fn test_container_preserve_app() { 433 | let container = Container { 434 | source: ContainerSource::Image("test_image".to_string()), 435 | args: None, 436 | volumes: None, 437 | force: false, 438 | preserve_app: true, 439 | platform: None, 440 | }; 441 | 442 | let original_cmdline = CommandLine { 443 | sudo: false, 444 | app: "original_app".to_string(), 445 | app_in_path: true, 446 | args: vec!["arg1".to_string(), "arg2".to_string()], 447 | env: BTreeMap::new(), 448 | temp_env_file: None, 449 | }; 450 | 451 | let wrapped_cmdline = container.wrap(original_cmdline).unwrap(); 452 | 453 | assert!(wrapped_cmdline.args.contains(&"original_app".to_string())); 454 | assert!(wrapped_cmdline.args.contains(&"arg1".to_string())); 455 | assert!(wrapped_cmdline.args.contains(&"arg2".to_string())); 456 | 457 | // check that the original app is inserted before its arguments 458 | let app_index = wrapped_cmdline 459 | .args 460 | .iter() 461 | .position(|arg| arg == "original_app") 462 | .unwrap(); 463 | let arg1_index = wrapped_cmdline 464 | .args 465 | .iter() 466 | .position(|arg| arg == "arg1") 467 | .unwrap(); 468 | let arg2_index = wrapped_cmdline 469 | .args 470 | .iter() 471 | .position(|arg| arg == "arg2") 472 | .unwrap(); 473 | assert!(app_index < arg1_index); 474 | assert!(app_index < arg2_index); 475 | } 476 | 477 | #[test] 478 | fn test_book_creation_with_duplicate_function_names() { 479 | use std::fs; 480 | use tempfile::TempDir; 481 | 482 | let temp_dir = TempDir::with_prefix("robopage-test-").unwrap(); 483 | let base_path = temp_dir.path(); 484 | 485 | fs::write( 486 | base_path.join("page1.yml"), 487 | r#" 488 | description: First page 489 | categories: [test] 490 | functions: 491 | duplicate_function: 492 | description: A function 493 | parameters: {} 494 | cmdline: [echo, test] 495 | "#, 496 | ) 497 | .unwrap(); 498 | 499 | fs::write( 500 | base_path.join("page2.yml"), 501 | r#" 502 | description: Second page 503 | categories: [test] 504 | functions: 505 | duplicate_function: 506 | description: Another function 507 | parameters: {} 508 | cmdline: [echo, test] 509 | "#, 510 | ) 511 | .unwrap(); 512 | 513 | let result = Book::from_path(Utf8PathBuf::from(base_path.to_str().unwrap()), None).unwrap(); 514 | 515 | assert_eq!(result.size(), 2); 516 | assert!(result.get_function("duplicate_function").is_ok()); 517 | assert!(result.get_function("page2_duplicate_function").is_ok()); 518 | } 519 | 520 | #[test] 521 | fn test_book_skips_hidden_directories() { 522 | use std::fs; 523 | use tempfile::TempDir; 524 | 525 | let temp_dir = TempDir::with_prefix("robopage-test-").unwrap(); 526 | let base_path = temp_dir.path(); 527 | 528 | // Create a visible directory with a valid page 529 | fs::create_dir(base_path.join("visible")).unwrap(); 530 | fs::write( 531 | base_path.join("visible/page1.yml"), 532 | r#" 533 | description: Visible page 534 | categories: [test] 535 | functions: 536 | function1: 537 | description: A function 538 | parameters: {} 539 | cmdline: [echo, test] 540 | "#, 541 | ) 542 | .unwrap(); 543 | 544 | // Create a hidden directory with a page that should be skipped 545 | fs::create_dir(base_path.join(".hidden")).unwrap(); 546 | fs::write( 547 | base_path.join(".hidden/page2.yml"), 548 | r#" 549 | description: Hidden page 550 | categories: [test] 551 | functions: 552 | function2: 553 | description: Another function 554 | parameters: {} 555 | cmdline: [echo, test] 556 | "#, 557 | ) 558 | .unwrap(); 559 | 560 | let result = Book::from_path(Utf8PathBuf::from(base_path.to_str().unwrap()), None).unwrap(); 561 | 562 | // Should only find the page from the visible directory 563 | assert_eq!(result.size(), 1); 564 | assert!(result.get_function("function1").is_ok()); 565 | assert!(result.get_function("function2").is_err()); 566 | } 567 | 568 | #[test] 569 | fn test_wrap_with_env() { 570 | let env: BTreeMap<String, String> = { 571 | let mut env = BTreeMap::new(); 572 | env.insert("TEST_VAR".to_string(), "test_value".to_string()); 573 | env 574 | }; 575 | 576 | let command_line = 577 | CommandLine::from_vec_with_env(&vec!["echo".to_string(), "test".to_string()], env) 578 | .unwrap(); 579 | 580 | let container = Container { 581 | source: ContainerSource::Image("test_image".to_string()), 582 | args: None, 583 | volumes: None, 584 | force: false, 585 | preserve_app: true, 586 | platform: None, 587 | }; 588 | 589 | let wrapped = container.wrap(command_line).unwrap(); 590 | 591 | // Find the env-file argument 592 | let env_file_arg = wrapped 593 | .args 594 | .iter() 595 | .find(|arg| arg.starts_with("--env-file=")) 596 | .expect("--env-file argument not found") 597 | .clone(); 598 | 599 | // Extract the file path 600 | let env_file_path = env_file_arg 601 | .strip_prefix("--env-file=") 602 | .expect("Failed to strip --env-file= prefix"); 603 | 604 | let env_file = std::path::Path::new(env_file_path); 605 | assert!(env_file.exists()); 606 | 607 | // Read the env file contents 608 | let env_file_contents = std::fs::read_to_string(env_file).expect("Failed to read env file"); 609 | 610 | // Verify it contains the expected environment variable 611 | assert!(env_file_contents.contains("TEST_VAR=test_value")); 612 | 613 | // Clean up the env file 614 | drop(wrapped); 615 | assert!(!env_file.exists(), "env file was not deleted"); 616 | } 617 | } 618 | -------------------------------------------------------------------------------- /src/book/runtime.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use camino::Utf8PathBuf; 4 | use lazy_regex::{lazy_regex, Lazy}; 5 | use regex::Regex; 6 | use serde::{Deserialize, Serialize}; 7 | 8 | use crate::runtime::CommandLine; 9 | 10 | use super::{Function, Page}; 11 | 12 | static ARG_VALUE_PARSER: Lazy<Regex> = lazy_regex!(r"(?m)\$\{\s*([\w\.]+)(\s+or\s+([^}]+))?\}"); 13 | 14 | const ARG_EXPRESSION_ERROR: &str = 15 | "argument expression must be in the form of ${name} or ${name or default_value}"; 16 | 17 | #[allow(dead_code)] 18 | pub enum ExecutionFlavor { 19 | Shell(String), 20 | Sudo, 21 | Docker(String), 22 | Error(String), 23 | } 24 | 25 | impl ExecutionFlavor { 26 | pub fn shell(shell: String) -> Self { 27 | ExecutionFlavor::Shell(shell) 28 | } 29 | 30 | pub fn sudo() -> Self { 31 | ExecutionFlavor::Sudo 32 | } 33 | 34 | pub fn docker(image: String) -> Self { 35 | ExecutionFlavor::Docker(image) 36 | } 37 | 38 | pub fn error(message: String) -> Self { 39 | ExecutionFlavor::Error(message) 40 | } 41 | 42 | fn get_current_shell() -> String { 43 | let shell_name = std::env::var("SHELL") 44 | .map(|s| s.split('/').last().unwrap_or("unknown").to_string()) 45 | .unwrap_or_else(|_| "unknown".to_string()); 46 | 47 | if let Ok(shell_path) = which::which(shell_name.clone()) { 48 | shell_path.to_string_lossy().to_string() 49 | } else { 50 | shell_name 51 | } 52 | } 53 | 54 | pub fn for_function(function: &Function) -> anyhow::Result<ExecutionFlavor> { 55 | let mut has_container = false; 56 | if let Some(container) = function.container.as_ref() { 57 | has_container = true; 58 | if container.force { 59 | return Ok(ExecutionFlavor::docker( 60 | container.source.image().to_string(), 61 | )); 62 | } 63 | } 64 | 65 | match function.execution.get_command_line() { 66 | Ok(raw_parts) => { 67 | let cmdline = CommandLine::from_vec(&raw_parts)?; 68 | if cmdline.sudo { 69 | return Ok(if has_container { 70 | ExecutionFlavor::docker( 71 | function 72 | .container 73 | .as_ref() 74 | .unwrap() 75 | .source 76 | .image() 77 | .to_string(), 78 | ) 79 | } else { 80 | ExecutionFlavor::sudo() 81 | }); 82 | } else if !cmdline.app_in_path { 83 | return Ok(if has_container { 84 | ExecutionFlavor::docker( 85 | function 86 | .container 87 | .as_ref() 88 | .unwrap() 89 | .source 90 | .image() 91 | .to_string(), 92 | ) 93 | } else { 94 | ExecutionFlavor::error("app not in $PATH".to_string()) 95 | }); 96 | } else { 97 | return Ok(ExecutionFlavor::shell(Self::get_current_shell())); 98 | } 99 | } 100 | Err(e) => Err(e), 101 | } 102 | } 103 | } 104 | 105 | impl std::fmt::Display for ExecutionFlavor { 106 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 107 | let s = match self { 108 | Self::Shell(shell) => shell.to_string(), 109 | Self::Sudo => "sudo".to_string(), 110 | Self::Docker(image) => format!("docker {}", image), 111 | Self::Error(message) => message.to_string(), 112 | }; 113 | write!(f, "{}", s) 114 | } 115 | } 116 | 117 | #[derive(Debug, Serialize, Deserialize)] 118 | pub enum ExecutionContext { 119 | #[serde(rename = "cmdline")] 120 | CommandLine(Vec<String>), 121 | #[serde(rename = "platforms")] 122 | PlatformSpecific(BTreeMap<String, Vec<String>>), 123 | } 124 | 125 | impl ExecutionContext { 126 | pub fn get_command_line(&self) -> anyhow::Result<Vec<String>> { 127 | match self { 128 | Self::CommandLine(cmdline) => Ok(cmdline.clone()), 129 | Self::PlatformSpecific(platforms) => { 130 | if let Some(cmdline) = platforms.get(std::env::consts::OS) { 131 | Ok(cmdline.clone()) 132 | } else { 133 | Err(anyhow::anyhow!( 134 | "no command line for platform {}", 135 | std::env::consts::OS 136 | )) 137 | } 138 | } 139 | } 140 | } 141 | } 142 | 143 | #[allow(dead_code)] // we might need path and page in the future 144 | #[derive(Debug)] 145 | pub struct FunctionRef<'a> { 146 | pub name: String, 147 | pub path: &'a Utf8PathBuf, 148 | pub page: &'a Page, 149 | pub function: &'a Function, 150 | } 151 | 152 | impl<'a> FunctionRef<'a> { 153 | pub fn validate_arguments( 154 | &self, 155 | provided_arguments: &BTreeMap<String, String>, 156 | ) -> anyhow::Result<()> { 157 | // check for missing required arguments 158 | for (arg_name, param) in &self.function.parameters { 159 | if param.required && !provided_arguments.contains_key(arg_name) { 160 | return Err(anyhow::anyhow!( 161 | "missing required argument {} for function {}", 162 | arg_name, 163 | &self.name 164 | )); 165 | } 166 | } 167 | 168 | // check for extra arguments 169 | for arg_name in provided_arguments.keys() { 170 | if !self.function.parameters.contains_key(arg_name) { 171 | return Err(anyhow::anyhow!( 172 | "unknown argument {} for function {}", 173 | arg_name, 174 | &self.name 175 | )); 176 | } 177 | } 178 | 179 | Ok(()) 180 | } 181 | 182 | pub fn resolve_command_line( 183 | &self, 184 | arguments: &BTreeMap<String, String>, 185 | ) -> anyhow::Result<CommandLine> { 186 | // determine the command line to execute 187 | let command_line = self.function.execution.get_command_line()?; 188 | let mut env = BTreeMap::new(); 189 | 190 | // interpolate the arguments 191 | let command_line = { 192 | let mut interpolated = Vec::new(); 193 | for arg in command_line { 194 | if ARG_VALUE_PARSER.is_match(&arg) { 195 | // Process args with placeholders by replacing only the matched patterns 196 | let mut processed_arg = arg.clone(); 197 | 198 | // Find all matches and collect the replacements 199 | let mut replacements = Vec::new(); 200 | for caps in ARG_VALUE_PARSER.captures_iter(&arg) { 201 | let full_match = caps.get(0).unwrap().as_str(); 202 | let var_name = caps.get(1).ok_or(ARG_EXPRESSION_ERROR).map_err(| e| anyhow!(e))?.as_str(); 203 | let var_default = caps.get(3).map(|m| m.as_str()); 204 | 205 | let replacement = if var_name.starts_with("env.") || var_name. starts_with("ENV.") { 206 | let env_var_name = var_name.replace("env.", "").replace ("ENV.", ""); 207 | let env_var = std::env::var(&env_var_name); 208 | let env_var_value = if let Ok(value) = env_var { 209 | value 210 | } else if let Some(def) = var_default { 211 | def.to_string() 212 | } else { 213 | return Err(anyhow::anyhow!( 214 | "environment variable {} not set", 215 | env_var_name 216 | )); 217 | }; 218 | 219 | // add the environment variable to the command line for later use 220 | env.insert(env_var_name, env_var_value.to_owned()); 221 | 222 | env_var_value 223 | } else if let Some(value) = arguments.get(var_name) { 224 | if value.is_empty() { 225 | if let Some(def) = var_default { 226 | def.to_string() 227 | } else { 228 | value.to_string() 229 | } 230 | } else { 231 | value.to_string() 232 | } 233 | } else if let Some(default_value) = var_default { 234 | default_value.to_string() 235 | } else { 236 | return Err(anyhow::anyhow!("argument {} not provided", var_name)); 237 | }; 238 | 239 | replacements.push((full_match, replacement)); 240 | } 241 | 242 | // Apply all replacements to the arg string 243 | for (pattern, replacement) in replacements { 244 | processed_arg = processed_arg.replace(pattern, &replacement); 245 | } 246 | 247 | interpolated.push(processed_arg); 248 | } else { 249 | // For args without placeholders, use as-is 250 | interpolated.push(arg); 251 | } 252 | } 253 | interpolated 254 | }; 255 | // final parsing 256 | CommandLine::from_vec_with_env(&command_line, env) 257 | } 258 | } 259 | 260 | #[cfg(test)] 261 | mod tests { 262 | use super::*; 263 | use std::collections::BTreeMap; 264 | 265 | #[test] 266 | fn test_resolve_command_line_with_valid_arguments() { 267 | let function = Function { 268 | execution: ExecutionContext::CommandLine(vec![ 269 | "echo".to_string(), 270 | "${message}".to_string(), 271 | ]), 272 | description: "".to_string(), 273 | parameters: BTreeMap::new(), 274 | container: None, 275 | }; 276 | let resolver = FunctionRef { 277 | function: &function, 278 | name: "test_function".to_string(), 279 | path: &Utf8PathBuf::from("test/path"), 280 | page: &Page { 281 | name: "test_page".to_string(), 282 | description: None, 283 | categories: Vec::new(), 284 | functions: BTreeMap::new(), 285 | }, 286 | }; 287 | let mut arguments = BTreeMap::new(); 288 | arguments.insert("message".to_string(), "Hello, World!".to_string()); 289 | 290 | let result = resolver.resolve_command_line(&arguments); 291 | assert!(result.is_ok()); 292 | let command_line = result.unwrap(); 293 | assert!(command_line.app.ends_with("/echo")); 294 | assert_eq!(command_line.args, vec!["Hello, World!"]); 295 | } 296 | 297 | #[test] 298 | fn test_resolve_command_line_with_default_value() { 299 | let function = Function { 300 | execution: ExecutionContext::CommandLine(vec![ 301 | "echo".to_string(), 302 | "${message or Default message}".to_string(), 303 | ]), 304 | description: "".to_string(), 305 | parameters: BTreeMap::new(), 306 | container: None, 307 | }; 308 | let resolver = FunctionRef { 309 | function: &function, 310 | name: "test_function".to_string(), 311 | path: &Utf8PathBuf::from("test/path"), 312 | page: &Page { 313 | name: "test_page".to_string(), 314 | description: None, 315 | categories: Vec::new(), 316 | functions: BTreeMap::new(), 317 | }, 318 | }; 319 | let arguments = BTreeMap::new(); 320 | 321 | let result = resolver.resolve_command_line(&arguments); 322 | assert!(result.is_ok()); 323 | let command_line = result.unwrap(); 324 | assert!(command_line.app.ends_with("/echo")); 325 | assert_eq!(command_line.args, vec!["Default message"]); 326 | } 327 | 328 | #[test] 329 | fn test_resolve_command_line_with_empty_value_and_default() { 330 | let function = Function { 331 | execution: ExecutionContext::CommandLine(vec![ 332 | "echo".to_string(), 333 | "${message or Default message}".to_string(), 334 | ]), 335 | description: "".to_string(), 336 | parameters: BTreeMap::new(), 337 | container: None, 338 | }; 339 | let resolver = FunctionRef { 340 | function: &function, 341 | name: "test_function".to_string(), 342 | path: &Utf8PathBuf::from("test/path"), 343 | page: &Page { 344 | name: "test_page".to_string(), 345 | description: None, 346 | categories: Vec::new(), 347 | functions: BTreeMap::new(), 348 | }, 349 | }; 350 | let mut arguments = BTreeMap::new(); 351 | arguments.insert("message".to_string(), "".to_string()); 352 | 353 | let result = resolver.resolve_command_line(&arguments); 354 | assert!(result.is_ok()); 355 | let command_line = result.unwrap(); 356 | assert!(command_line.app.ends_with("/echo")); 357 | assert_eq!(command_line.args, vec!["Default message"]); 358 | } 359 | 360 | #[test] 361 | fn test_resolve_command_line_with_missing_required_argument() { 362 | let function = Function { 363 | execution: ExecutionContext::CommandLine(vec![ 364 | "echo".to_string(), 365 | "${required_arg}".to_string(), 366 | ]), 367 | description: "".to_string(), 368 | parameters: BTreeMap::new(), 369 | container: None, 370 | }; 371 | let resolver = FunctionRef { 372 | function: &function, 373 | name: "test_function".to_string(), 374 | path: &Utf8PathBuf::from("test/path"), 375 | page: &Page { 376 | name: "test_page".to_string(), 377 | description: None, 378 | categories: Vec::new(), 379 | functions: BTreeMap::new(), 380 | }, 381 | }; 382 | let arguments = BTreeMap::new(); 383 | 384 | let result = resolver.resolve_command_line(&arguments); 385 | assert!(result.is_err()); 386 | assert_eq!( 387 | result.unwrap_err().to_string(), 388 | "argument required_arg not provided" 389 | ); 390 | } 391 | 392 | #[test] 393 | fn test_resolve_command_line_with_multiple_arguments() { 394 | let function = Function { 395 | execution: ExecutionContext::CommandLine(vec![ 396 | "echo".to_string(), 397 | "${arg1}".to_string(), 398 | "${arg2 or default}".to_string(), 399 | "literal".to_string(), 400 | ]), 401 | description: "".to_string(), 402 | parameters: BTreeMap::new(), 403 | container: None, 404 | }; 405 | let resolver = FunctionRef { 406 | function: &function, 407 | name: "test_function".to_string(), 408 | path: &Utf8PathBuf::from("test/path"), 409 | page: &Page { 410 | name: "test_page".to_string(), 411 | description: None, 412 | categories: Vec::new(), 413 | functions: BTreeMap::new(), 414 | }, 415 | }; 416 | let mut arguments = BTreeMap::new(); 417 | arguments.insert("arg1".to_string(), "value1".to_string()); 418 | 419 | let result = resolver.resolve_command_line(&arguments); 420 | assert!(result.is_ok()); 421 | let command_line = result.unwrap(); 422 | assert!(command_line.app.ends_with("/echo")); 423 | assert_eq!(command_line.args, vec!["value1", "default", "literal"]); 424 | } 425 | 426 | #[test] 427 | fn test_resolve_command_line_with_env_variables() { 428 | std::env::set_var("TEST_VAR", "test_value"); 429 | 430 | let function = Function { 431 | execution: ExecutionContext::CommandLine(vec![ 432 | "echo".to_string(), 433 | "${env.TEST_VAR}".to_string(), 434 | "${ENV.TEST_VAR}".to_string(), 435 | ]), 436 | description: "".to_string(), 437 | parameters: BTreeMap::new(), 438 | container: None, 439 | }; 440 | let resolver = FunctionRef { 441 | function: &function, 442 | name: "test_function".to_string(), 443 | path: &Utf8PathBuf::from("test/path"), 444 | page: &Page { 445 | name: "test_page".to_string(), 446 | description: None, 447 | categories: Vec::new(), 448 | functions: BTreeMap::new(), 449 | }, 450 | }; 451 | let arguments = BTreeMap::new(); 452 | 453 | let result = resolver.resolve_command_line(&arguments); 454 | assert!(result.is_ok()); 455 | let command_line = result.unwrap(); 456 | assert!(command_line.app.ends_with("/echo")); 457 | assert_eq!(command_line.args, vec!["test_value", "test_value"]); 458 | 459 | std::env::remove_var("TEST_VAR"); 460 | } 461 | 462 | #[test] 463 | fn test_resolve_command_line_with_undefined_env_variable() { 464 | let function = Function { 465 | execution: ExecutionContext::CommandLine(vec![ 466 | "echo".to_string(), 467 | "${env.UNDEFINED_VAR}".to_string(), 468 | ]), 469 | description: "".to_string(), 470 | parameters: BTreeMap::new(), 471 | container: None, 472 | }; 473 | let resolver = FunctionRef { 474 | function: &function, 475 | name: "test_function".to_string(), 476 | path: &Utf8PathBuf::from("test/path"), 477 | page: &Page { 478 | name: "test_page".to_string(), 479 | description: None, 480 | categories: Vec::new(), 481 | functions: BTreeMap::new(), 482 | }, 483 | }; 484 | let arguments = BTreeMap::new(); 485 | 486 | let result = resolver.resolve_command_line(&arguments); 487 | assert!(result.is_err()); 488 | assert_eq!( 489 | result.unwrap_err().to_string(), 490 | "environment variable UNDEFINED_VAR not set" 491 | ); 492 | } 493 | 494 | #[test] 495 | fn test_resolve_command_line_with_undefined_env_variable_with_default() { 496 | let function = Function { 497 | execution: ExecutionContext::CommandLine(vec![ 498 | "echo".to_string(), 499 | "${env.UNDEFINED_VAR or default_value}".to_string(), 500 | ]), 501 | description: "".to_string(), 502 | parameters: BTreeMap::new(), 503 | container: None, 504 | }; 505 | let resolver = FunctionRef { 506 | function: &function, 507 | name: "test_function".to_string(), 508 | path: &Utf8PathBuf::from("test/path"), 509 | page: &Page { 510 | name: "test_page".to_string(), 511 | description: None, 512 | categories: Vec::new(), 513 | functions: BTreeMap::new(), 514 | }, 515 | }; 516 | let arguments = BTreeMap::new(); 517 | 518 | let result = resolver.resolve_command_line(&arguments); 519 | assert!(result.is_ok()); 520 | let command_line = result.unwrap(); 521 | assert!(command_line.app.ends_with("/echo")); 522 | assert_eq!(command_line.args, vec!["default_value"]); 523 | } 524 | } 525 | -------------------------------------------------------------------------------- /src/book/templates.rs: -------------------------------------------------------------------------------- 1 | use clap::ValueEnum; 2 | use include_dir::{include_dir, Dir}; 3 | use lazy_regex::{lazy_regex, Lazy}; 4 | use regex::Regex; 5 | 6 | static TEMPLATES: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/src/book/templates"); 7 | 8 | static ASSETS_REF_PARSER: Lazy<Regex> = lazy_regex!(r"(?m)\$\{cwd\}/(.+)"); 9 | 10 | pub(crate) struct TemplateData { 11 | pub(crate) name: Option<String>, 12 | pub(crate) data: &'static str, 13 | } 14 | 15 | #[derive(Clone, Debug, ValueEnum)] 16 | pub(crate) enum Template { 17 | Basic, 18 | DockerImage, 19 | DockerBuild, 20 | } 21 | 22 | impl Template { 23 | pub fn get_data(&self) -> anyhow::Result<Vec<TemplateData>> { 24 | let base_name = self.to_string().to_lowercase(); 25 | let template_name = format!("{}.yml", &base_name); 26 | let template_data = TEMPLATES 27 | .get_file(&template_name) 28 | .ok_or_else(|| anyhow::anyhow!("template not found: {}", template_name))? 29 | .contents_utf8() 30 | .ok_or_else(|| { 31 | anyhow::anyhow!("failed to read template file as utf8: {}", template_name) 32 | })?; 33 | 34 | let mut parts = vec![TemplateData { 35 | name: None, 36 | data: template_data, 37 | }]; 38 | 39 | // check if the template references any assets in ${cwd} 40 | let caps = ASSETS_REF_PARSER.captures(template_data); 41 | if let Some(caps) = caps { 42 | let asset_name = caps.get(1).unwrap().as_str(); 43 | let asset = TEMPLATES.get_file(asset_name).unwrap(); 44 | parts.push(TemplateData { 45 | name: Some(asset_name.to_string()), 46 | data: asset.contents_utf8().unwrap(), 47 | }); 48 | } 49 | 50 | Ok(parts) 51 | } 52 | } 53 | 54 | impl std::fmt::Display for Template { 55 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 56 | let s = match self { 57 | Template::Basic => "basic".to_string(), 58 | Template::DockerImage => "docker_image".to_string(), 59 | Template::DockerBuild => "docker_build".to_string(), 60 | }; 61 | write!(f, "{}", s) 62 | } 63 | } 64 | 65 | #[cfg(test)] 66 | mod tests { 67 | use super::*; 68 | use crate::book::Page; 69 | 70 | #[test] 71 | fn test_templates_deserialize() { 72 | for entry in TEMPLATES.files() { 73 | let template_name = entry.path().file_stem().unwrap().to_str().unwrap(); 74 | if template_name.ends_with(".yml") { 75 | let yaml_content = entry.contents_utf8().unwrap(); 76 | let page: Page = serde_yaml::from_str(yaml_content).unwrap_or_else(|e| { 77 | panic!( 78 | "failed to deserialize template '{}': {:?}", 79 | template_name, e 80 | ) 81 | }); 82 | 83 | assert!( 84 | !page.functions.is_empty(), 85 | "template '{}' has no functions", 86 | template_name 87 | ); 88 | } 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/book/templates/basic.yml: -------------------------------------------------------------------------------- 1 | description: You can use this for a description. 2 | 3 | # declare one or more functions per page 4 | functions: 5 | # the function name 6 | example_function_name: 7 | description: This is an example function describing a command line. 8 | # function parameters 9 | parameters: 10 | # the parameter name 11 | foo: 12 | # the parameter type 13 | type: string 14 | description: An example paramter named foo. 15 | # whether the parameter is required, default to true 16 | # required: false 17 | # optional examples of valid values 18 | examples: 19 | - bar 20 | - baz 21 | 22 | # the command line to execute 23 | cmdline: 24 | - echo 25 | # valid syntax for parameters interpolation: 26 | # ${parameter_name} 27 | # ${parameter_name or default_value} 28 | - ${foo} 29 | -------------------------------------------------------------------------------- /src/book/templates/docker_build.yml: -------------------------------------------------------------------------------- 1 | description: An example using a docker container built locally. 2 | 3 | # declare one or more functions per page 4 | functions: 5 | # the function name 6 | nmap_tcp_ports_syn_scan: 7 | description: Scan one or more targets for the list of common TCP ports using a TCP SYN scan. 8 | # function parameters 9 | parameters: 10 | # the parameter name 11 | target: 12 | # the parameter type 13 | type: string 14 | description: The IP address, CIDR, range or hostname to scan. 15 | # optional examples of valid values 16 | examples: 17 | - 192.168.1.1 18 | - 192.168.1.0/24 19 | - scanme.nmap.org 20 | 21 | # the container to use 22 | container: 23 | # normally, if the binary specificed in cmdline is found in $PATH, 24 | # it will be used instead of the container binary 25 | # by setting force to true, the container image will be used instead 26 | force: true 27 | # specify how to build the container image 28 | build: 29 | # path to the Dockerfile, ${cwd} is the directory of the robopage.yml file 30 | path: ${cwd}/nmap.Dockerfile 31 | # how to tag the image 32 | name: nmap_local 33 | 34 | # optional volumes to mount 35 | # volumes: 36 | # - /var/run/docker.sock:/var/run/docker.sock 37 | # optional container arguments 38 | args: 39 | # share the same network as the host 40 | - --net=host 41 | 42 | # the command line to execute 43 | cmdline: 44 | # sudo is automatically removed if running as container 45 | - sudo 46 | - nmap 47 | - -sS 48 | - -Pn 49 | - -A 50 | - ${target} 51 | -------------------------------------------------------------------------------- /src/book/templates/docker_image.yml: -------------------------------------------------------------------------------- 1 | description: An example using a docker image. 2 | 3 | # declare one or more functions per page 4 | functions: 5 | # the function name 6 | http_get: 7 | description: Fetch a page from the web. 8 | # function parameters 9 | parameters: 10 | # the parameter name 11 | url: 12 | # the parameter type 13 | type: string 14 | description: The URL of the page to fetch. 15 | examples: 16 | - https://example.com 17 | 18 | # the container to use 19 | container: 20 | # normally, if the binary specificed in cmdline is found in $PATH, 21 | # it will be used instead of the container binary 22 | # by setting force to true, the container image will be used instead 23 | force: true 24 | # the container image to use 25 | image: alpine/curl 26 | # optional volumes to mount 27 | # volumes: 28 | # - /var/run/docker.sock:/var/run/docker.sock 29 | # optional container arguments 30 | args: 31 | # share the same network as the host 32 | - --net=host 33 | 34 | # the command line to execute 35 | cmdline: 36 | - curl 37 | - -s 38 | - -L 39 | - ${url} 40 | -------------------------------------------------------------------------------- /src/book/templates/nmap.Dockerfile: -------------------------------------------------------------------------------- 1 | # credits: https://github.com/instrumentisto/nmap-docker-image/blob/main/Dockerfile 2 | 3 | # https://hub.docker.com/_/alpine 4 | FROM alpine:3.22 5 | 6 | ARG nmap_ver=7.95 7 | ARG build_rev=4 8 | 9 | 10 | # Install dependencies 11 | RUN apk add --update --no-cache \ 12 | ca-certificates \ 13 | libpcap \ 14 | libgcc libstdc++ \ 15 | libssl3 \ 16 | && update-ca-certificates \ 17 | && rm -rf /var/cache/apk/* 18 | 19 | 20 | # Compile and install Nmap from sources 21 | RUN apk add --update --no-cache --virtual .build-deps \ 22 | libpcap-dev lua-dev linux-headers openssl-dev \ 23 | autoconf automake g++ libtool make \ 24 | curl \ 25 | \ 26 | && curl -fL -o /tmp/nmap.tar.bz2 \ 27 | https://nmap.org/dist/nmap-${nmap_ver}.tar.bz2 \ 28 | && tar -xjf /tmp/nmap.tar.bz2 -C /tmp \ 29 | && cd /tmp/nmap* \ 30 | && ./configure \ 31 | --prefix=/usr \ 32 | --sysconfdir=/etc \ 33 | --mandir=/usr/share/man \ 34 | --infodir=/usr/share/info \ 35 | --without-zenmap \ 36 | --without-nmap-update \ 37 | --with-openssl=/usr/lib \ 38 | --with-liblua=/usr/include \ 39 | && make \ 40 | && make install \ 41 | \ 42 | && apk del .build-deps \ 43 | && rm -rf /var/cache/apk/* \ 44 | /tmp/nmap* 45 | 46 | 47 | ENTRYPOINT ["/usr/bin/nmap"] -------------------------------------------------------------------------------- /src/cli/create.rs: -------------------------------------------------------------------------------- 1 | use super::CreateArgs; 2 | 3 | pub(crate) async fn create(args: CreateArgs) -> anyhow::Result<()> { 4 | if args.name.exists() { 5 | return Err(anyhow::anyhow!("{:?} already exists", args.name)); 6 | } 7 | 8 | for parts in args.template.get_data()? { 9 | if let Some(part_name) = parts.name { 10 | let asset = args.name.parent().unwrap().join(part_name); 11 | log::info!("creating asset {:?}", asset.to_string()); 12 | 13 | std::fs::write(asset, parts.data)?; 14 | } else { 15 | log::info!( 16 | "creating {:?} from template {}", 17 | &args.name, 18 | args.template.to_string() 19 | ); 20 | 21 | std::fs::write(&args.name, parts.data)?; 22 | } 23 | } 24 | 25 | Ok(()) 26 | } 27 | -------------------------------------------------------------------------------- /src/cli/install.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fs::File, 3 | io::{self, Write}, 4 | path::{Path, PathBuf}, 5 | }; 6 | 7 | use camino::Utf8PathBuf; 8 | 9 | use super::InstallArgs; 10 | 11 | fn extract_archive_without_intermediate_folder( 12 | mut archive: zip::ZipArchive<File>, 13 | target_path: &Path, 14 | ) -> io::Result<()> { 15 | // Iterate through each entry in the ZIP archive 16 | for i in 0..archive.len() { 17 | let mut file_in_zip = archive.by_index(i)?; 18 | let file_path = file_in_zip.mangled_name(); 19 | 20 | // Skip directories by default 21 | if file_in_zip.is_dir() { 22 | continue; 23 | } 24 | 25 | // Strip the first component of the file path (e.g., intermediate-folder-name) 26 | let stripped_path = file_path.iter().skip(1).collect::<PathBuf>(); 27 | let target_file_path = target_path.join(stripped_path); 28 | 29 | // Create parent directories as needed 30 | if let Some(parent) = target_file_path.parent() { 31 | std::fs::create_dir_all(parent)?; 32 | } 33 | 34 | // Write the file to the target path 35 | let mut outfile = File::create(&target_file_path)?; 36 | io::copy(&mut file_in_zip, &mut outfile)?; 37 | } 38 | 39 | Ok(()) 40 | } 41 | 42 | fn extract_archive(archive_path: &Path, target_path: &Path) -> io::Result<()> { 43 | log::info!("extracting to {:?}", target_path); 44 | 45 | let file = File::open(archive_path)?; 46 | let mut archive = zip::ZipArchive::new(file)?; 47 | 48 | // check if all files share the same prefix 49 | let file_names: Vec<_> = archive.file_names().collect(); 50 | let mut single_root_folder = false; 51 | 52 | if !file_names.is_empty() { 53 | if let Some(first_name) = file_names.first() { 54 | if let Some(first_prefix) = first_name.split('/').next() { 55 | single_root_folder = file_names 56 | .iter() 57 | .all(|name| name.split('/').next() == Some(first_prefix)) 58 | } 59 | } 60 | } 61 | 62 | if single_root_folder { 63 | // if the archive comes from a github repository, it will have a single root folder 64 | // so we can extract it without the intermediate folder 65 | extract_archive_without_intermediate_folder(archive, target_path)?; 66 | } else { 67 | // otherwise, we extract the archive as it is 68 | archive.extract(target_path)?; 69 | } 70 | 71 | Ok(()) 72 | } 73 | 74 | pub(crate) async fn install(args: InstallArgs) -> anyhow::Result<()> { 75 | let path = Utf8PathBuf::from( 76 | shellexpand::full(args.path.as_str()) 77 | .map_err(|e| anyhow::anyhow!("failed to expand path: {}", e))? 78 | .into_owned(), 79 | ); 80 | if path.exists() { 81 | return Err(anyhow::anyhow!("{:?} already exists", path)); 82 | } 83 | 84 | if args.source.ends_with(".zip") { 85 | // install from zip archive 86 | log::info!("extracting archive {} to {:?}", &args.source, &path); 87 | let mut zip = zip::ZipArchive::new(std::fs::File::open(&args.source)?)?; 88 | zip.extract(path)?; 89 | } else { 90 | // install from github repository 91 | let source = if !args.source.contains("://") { 92 | format!( 93 | "https://github.com/{}/archive/refs/heads/main.zip", 94 | &args.source 95 | ) 96 | } else { 97 | format!("{}/archive/refs/heads/main.zip", &args.source) 98 | }; 99 | 100 | log::info!("downloading robopages from {} ...", source); 101 | 102 | let temp_file = tempfile::NamedTempFile::new()?; 103 | let mut response = reqwest::get(&source).await?; 104 | let mut file = std::fs::File::create(temp_file.path())?; 105 | 106 | while let Some(chunk) = response.chunk().await? { 107 | file.write_all(&chunk)?; 108 | } 109 | 110 | extract_archive(temp_file.path(), path.as_std_path())?; 111 | } 112 | 113 | Ok(()) 114 | } 115 | -------------------------------------------------------------------------------- /src/cli/mod.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | use camino::Utf8PathBuf; 4 | use clap::{Args, Parser, Subcommand}; 5 | 6 | mod create; 7 | mod install; 8 | mod run; 9 | mod serve; 10 | mod validate; 11 | mod view; 12 | 13 | pub(crate) use create::*; 14 | pub(crate) use install::*; 15 | pub(crate) use run::*; 16 | pub(crate) use serve::*; 17 | pub(crate) use validate::*; 18 | pub(crate) use view::*; 19 | 20 | use crate::book::templates::Template; 21 | 22 | const DEFAULT_REPO: &str = "dreadnode/robopages"; 23 | const DEFAULT_PATH: &str = "~/.robopages/"; 24 | 25 | #[derive(Debug, Parser)] 26 | #[clap(name = "robopages", about, version)] 27 | pub(crate) struct Arguments { 28 | #[clap(subcommand)] 29 | pub command: Command, 30 | } 31 | 32 | #[derive(Debug, Subcommand)] 33 | pub(crate) enum Command { 34 | /// Install robopages from a given repository or ZIP archive. 35 | Install(InstallArgs), 36 | /// Create a new robopage file. 37 | Create(CreateArgs), 38 | /// View currently installed robopages. 39 | View(ViewArgs), 40 | /// Serve the robopages as a local API. 41 | Serve(ServeArgs), 42 | /// Execute a function from the robopages. 43 | Run(RunArgs), 44 | /// Validate a robopage YML file. 45 | Validate(ValidateArgs), 46 | } 47 | 48 | #[derive(Debug, Args)] 49 | pub(crate) struct InstallArgs { 50 | /// Repository user/name, URL or ZIP archive path. 51 | #[clap(long, short = 'S', default_value = DEFAULT_REPO)] 52 | source: String, 53 | /// Destination path. 54 | #[clap(long, short = 'P', default_value = DEFAULT_PATH)] 55 | path: Utf8PathBuf, 56 | } 57 | 58 | #[derive(Debug, Args)] 59 | pub(crate) struct CreateArgs { 60 | /// Template name. 61 | #[clap(long, short = 'T', value_enum, default_value = "basic")] 62 | template: Template, 63 | /// File name. 64 | #[clap(long, short = 'N', default_value = "robopage.yml")] 65 | name: Utf8PathBuf, 66 | } 67 | 68 | #[derive(Debug, Args)] 69 | pub(crate) struct ViewArgs { 70 | /// Base path to search for robopages. 71 | #[clap(long, short = 'P', default_value = DEFAULT_PATH)] 72 | path: Utf8PathBuf, 73 | /// Filter results by this string. 74 | #[clap(long, short = 'F')] 75 | filter: Option<String>, 76 | } 77 | 78 | #[derive(Debug, Args)] 79 | pub(crate) struct ServeArgs { 80 | /// Base path to search for robopages. 81 | #[clap(long, short = 'P', default_value = DEFAULT_PATH)] 82 | path: Utf8PathBuf, 83 | /// Filter results by this string. 84 | #[clap(long, short = 'F')] 85 | filter: Option<String>, 86 | /// Address to bind to. 87 | #[clap(long, short = 'A', default_value = "127.0.0.1:8000")] 88 | address: String, 89 | /// If set, the tool will not attempt to pre build and pull all containers. 90 | #[clap(long)] 91 | lazy: bool, 92 | /// Maximum number of parallel calls to execute. Leave to 0 to use all available cores. 93 | #[clap(long, default_value = "0")] 94 | workers: usize, 95 | /// Optional SSH connection string, if set commands will be executed over SSH on the given host. 96 | #[clap(long)] 97 | ssh: Option<String>, 98 | /// SSH key to use for authentication if --ssh is set. 99 | #[clap(long, default_value = "~/.ssh/id_ed25519")] 100 | ssh_key: String, 101 | /// SSH passphrase to unlock the key. 102 | #[clap(long)] 103 | ssh_key_passphrase: Option<String>, 104 | } 105 | 106 | #[derive(Debug, Args)] 107 | pub(crate) struct RunArgs { 108 | /// Base path to search for robopages. 109 | #[clap(long, short = 'P', default_value = DEFAULT_PATH)] 110 | path: Utf8PathBuf, 111 | /// Function name. 112 | #[clap(long, short = 'F')] 113 | function: String, 114 | /// Define one or more variables as key=value pairs. 115 | #[clap(long = "define", short = 'D', value_parser = parse_key_val::<String, String>, number_of_values = 1)] 116 | defines: Vec<(String, String)>, 117 | /// Execute the function without user interaction. 118 | #[clap(long, short = 'A')] 119 | auto: bool, 120 | /// Optional SSH connection string, if set commands will be executed over SSH on the given host. 121 | #[clap(long)] 122 | ssh: Option<String>, 123 | /// SSH key to use for authentication if --ssh is set. 124 | #[clap(long, default_value = "~/.ssh/id_ed25519")] 125 | ssh_key: String, 126 | /// SSH passphrase to unlock the key. 127 | #[clap(long)] 128 | ssh_key_passphrase: Option<String>, 129 | } 130 | 131 | #[derive(Debug, Args)] 132 | pub(crate) struct ValidateArgs { 133 | /// Path to the robopage YML file or files to validate. 134 | #[clap(long, short = 'P', default_value = DEFAULT_PATH)] 135 | path: Utf8PathBuf, 136 | /// Do not attempt to pull or build containers. 137 | #[clap(long)] 138 | skip_docker: bool, 139 | } 140 | 141 | /// Parse a single key-value pair 142 | fn parse_key_val<T, U>(s: &str) -> Result<(T, U), Box<dyn Error + Send + Sync + 'static>> 143 | where 144 | T: std::str::FromStr, 145 | T::Err: Error + Send + Sync + 'static, 146 | U: std::str::FromStr, 147 | U::Err: Error + Send + Sync + 'static, 148 | { 149 | let pos = s 150 | .find('=') 151 | .ok_or_else(|| format!("invalid KEY=value: no `=` found in `{s}`"))?; 152 | Ok((s[..pos].parse()?, s[pos + 1..].parse()?)) 153 | } 154 | -------------------------------------------------------------------------------- /src/cli/run.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::BTreeMap, sync::Arc}; 2 | 3 | use crate::{ 4 | book::{flavors::openai, Book}, 5 | runtime::{self, prompt, ssh::SSHConnection}, 6 | }; 7 | 8 | use super::RunArgs; 9 | 10 | pub(crate) async fn run(args: RunArgs) -> anyhow::Result<()> { 11 | // parse and validate SSH connection string if provided 12 | let ssh = if let Some(ssh_str) = args.ssh { 13 | // parse 14 | let conn = SSHConnection::from_str(&ssh_str, &args.ssh_key, args.ssh_key_passphrase)?; 15 | // make sure we can connect 16 | conn.test_connection().await?; 17 | 18 | Some(conn) 19 | } else { 20 | None 21 | }; 22 | 23 | let book = Arc::new(Book::from_path(args.path, None)?); 24 | let function = book.get_function(&args.function)?; 25 | 26 | let mut arguments = BTreeMap::new(); 27 | 28 | // convert defines to BTreeMap 29 | let defines: BTreeMap<String, String> = args.defines.into_iter().collect(); 30 | 31 | for arg_name in function.function.parameters.keys() { 32 | if let Some(value) = defines.get(arg_name) { 33 | arguments.insert(arg_name.to_string(), value.to_string()); 34 | } else { 35 | arguments.insert( 36 | arg_name.to_string(), 37 | prompt::ask( 38 | &format!(">> enter value for argument '{}': ", arg_name), 39 | &[], 40 | )?, 41 | ); 42 | } 43 | } 44 | 45 | let call = openai::Call { 46 | id: None, 47 | function: openai::FunctionCall { 48 | name: args.function, 49 | arguments, 50 | }, 51 | call_type: "function".to_string(), 52 | }; 53 | 54 | let result = runtime::execute_call(ssh, !args.auto, 10, book, call).await?; 55 | 56 | println!("\n{}", result.content); 57 | 58 | Ok(()) 59 | } 60 | -------------------------------------------------------------------------------- /src/cli/serve.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::sync::Arc; 3 | 4 | use actix_cors::Cors; 5 | use actix_web::web; 6 | use actix_web::App; 7 | use actix_web::HttpResponse; 8 | use actix_web::HttpServer; 9 | 10 | use crate::book::flavors::rigging; 11 | use crate::book::flavors::Flavor; 12 | use crate::book::{ 13 | flavors::{nerve, openai}, 14 | Book, 15 | }; 16 | use crate::runtime; 17 | use crate::runtime::ssh::SSHConnection; 18 | 19 | use super::ServeArgs; 20 | 21 | struct AppState { 22 | max_running_tasks: usize, 23 | book: Arc<Book>, 24 | ssh: Option<SSHConnection>, 25 | } 26 | 27 | async fn not_found() -> actix_web::Result<HttpResponse> { 28 | Ok(HttpResponse::NotFound().body("nope")) 29 | } 30 | 31 | async fn serve_pages_impl( 32 | state: web::Data<Arc<AppState>>, 33 | query: web::Query<HashMap<String, String>>, 34 | filter: Option<String>, 35 | ) -> actix_web::Result<HttpResponse> { 36 | let flavor = Flavor::from_map_or_default(&query) 37 | .map_err(|e| actix_web::error::ErrorBadRequest(e.to_string()))?; 38 | 39 | match flavor { 40 | Flavor::Nerve => { 41 | Ok(HttpResponse::Ok().json(state.book.as_tools::<nerve::FunctionGroup>(filter))) 42 | } 43 | Flavor::Rigging => { 44 | Ok(HttpResponse::Ok().json(state.book.as_tools::<rigging::Tool>(filter))) 45 | } 46 | // default to openai 47 | _ => Ok(HttpResponse::Ok().json(state.book.as_tools::<openai::Tool>(filter))), 48 | } 49 | } 50 | 51 | async fn serve_pages_with_filter( 52 | state: web::Data<Arc<AppState>>, 53 | query: web::Query<HashMap<String, String>>, 54 | actix_web_lab::extract::Path((filter,)): actix_web_lab::extract::Path<(String,)>, 55 | ) -> actix_web::Result<HttpResponse> { 56 | serve_pages_impl(state, query, Some(filter)).await 57 | } 58 | 59 | async fn serve_pages( 60 | state: web::Data<Arc<AppState>>, 61 | query: web::Query<HashMap<String, String>>, 62 | ) -> actix_web::Result<HttpResponse> { 63 | serve_pages_impl(state, query, None).await 64 | } 65 | 66 | async fn process_calls( 67 | state: web::Data<Arc<AppState>>, 68 | calls: web::Json<Vec<openai::Call>>, 69 | ) -> actix_web::Result<HttpResponse> { 70 | match runtime::execute( 71 | state.ssh.clone(), 72 | false, 73 | state.book.clone(), 74 | calls.0, 75 | state.max_running_tasks, 76 | ) 77 | .await 78 | { 79 | Ok(resp) => Ok(HttpResponse::Ok().json(resp)), 80 | Err(e) => Err(actix_web::error::ErrorBadRequest(e)), 81 | } 82 | } 83 | 84 | pub(crate) async fn serve(args: ServeArgs) -> anyhow::Result<()> { 85 | if !args.address.contains("127.0.0.1:") && !args.address.contains("localhost:") { 86 | log::warn!("external address specified, this is an unsafe configuration as no authentication is provided"); 87 | } 88 | 89 | // parse and validate SSH connection string if provided 90 | let ssh = if let Some(ssh_str) = args.ssh { 91 | // parse 92 | let conn = SSHConnection::from_str(&ssh_str, &args.ssh_key, args.ssh_key_passphrase)?; 93 | // make sure we can connect 94 | conn.test_connection().await?; 95 | 96 | Some(conn) 97 | } else { 98 | None 99 | }; 100 | 101 | let book = Arc::new(Book::from_path(args.path, args.filter)?); 102 | if !args.lazy { 103 | for page in book.pages.values() { 104 | for (func_name, func) in page.functions.iter() { 105 | if let Some(container) = &func.container { 106 | log::info!("pre building container for function {} ...", func_name); 107 | container.resolve().await?; 108 | } 109 | } 110 | } 111 | } 112 | 113 | let max_running_tasks = if args.workers == 0 { 114 | std::thread::available_parallelism()?.into() 115 | } else { 116 | args.workers 117 | }; 118 | 119 | log::info!( 120 | "serving {} pages on http://{} with {max_running_tasks} max running tasks", 121 | book.size(), 122 | &args.address, 123 | ); 124 | 125 | let app_state = Arc::new(AppState { 126 | max_running_tasks, 127 | book, 128 | ssh, 129 | }); 130 | 131 | HttpServer::new(move || { 132 | let cors = Cors::default().max_age(3600); 133 | 134 | App::new() 135 | .wrap(cors) 136 | .app_data(web::Data::new(app_state.clone())) 137 | .route("/process", web::post().to(process_calls)) 138 | // TODO: is this is the best way to do this? can't find a clean way to have an optional path parameter 139 | .service(web::resource("/{filter}").route(web::get().to(serve_pages_with_filter))) 140 | .service(web::resource("/").route(web::get().to(serve_pages))) 141 | .default_service(web::route().to(not_found)) 142 | .wrap(actix_web::middleware::Logger::default()) 143 | }) 144 | .bind(&args.address) 145 | .map_err(|e| anyhow!(e))? 146 | .run() 147 | .await 148 | .map_err(|e| anyhow!(e)) 149 | } 150 | -------------------------------------------------------------------------------- /src/cli/validate.rs: -------------------------------------------------------------------------------- 1 | use crate::{book::Book, runtime::CommandLine}; 2 | 3 | use super::ValidateArgs; 4 | 5 | pub(crate) async fn validate(args: ValidateArgs) -> anyhow::Result<()> { 6 | let book = Book::from_path(args.path.clone(), None)?; 7 | 8 | // we need at least one page 9 | if book.pages.is_empty() { 10 | return Err(anyhow::anyhow!("no pages found in {:?}", &args.path)); 11 | } 12 | 13 | for (page_path, page) in book.pages { 14 | log::info!("validating {:?} ...", page_path); 15 | 16 | // and at least one function per page, at least what's the point of the page? 17 | if page.functions.is_empty() { 18 | return Err(anyhow::anyhow!("no functions found in {:?}", page_path)); 19 | } else if page.name.is_empty() { 20 | // set by Book::from_path if not specified 21 | return Err(anyhow::anyhow!("page name is empty in {:?}", page_path)); 22 | } else if page.categories.is_empty() { 23 | // set by Book::from_path if not specified 24 | return Err(anyhow::anyhow!( 25 | "page categories are empty in {:?}", 26 | page_path 27 | )); 28 | } 29 | 30 | for (func_name, func) in page.functions { 31 | // the model needs at least a name and a description 32 | if func_name.is_empty() { 33 | return Err(anyhow::anyhow!("function name is empty in {:?}", page_path)); 34 | } else if func.description.is_empty() { 35 | return Err(anyhow::anyhow!( 36 | "function description is empty in {:?}", 37 | page_path 38 | )); 39 | } 40 | 41 | if func.parameters.is_empty() { 42 | return Err(anyhow::anyhow!( 43 | "function {} parameters are empty in {:?}", 44 | func_name, 45 | page_path 46 | )); 47 | } 48 | 49 | // make sure the function resolves to a valid command line 50 | let cmdline = func.execution.get_command_line().map_err(|e| { 51 | anyhow::anyhow!( 52 | "error while getting command line for function {}: {}", 53 | func_name, 54 | e 55 | ) 56 | })?; 57 | 58 | if cmdline.is_empty() { 59 | return Err(anyhow::anyhow!( 60 | "command line is empty for function {} in {:?}", 61 | func_name, 62 | page_path 63 | )); 64 | } 65 | 66 | let cmdline = CommandLine::from_vec(&cmdline).map_err(|e| { 67 | anyhow::anyhow!( 68 | "error while parsing command line for function {}: {}", 69 | func_name, 70 | e 71 | ) 72 | })?; 73 | 74 | // validate container requirements - a container is required if: 75 | let container = if !cmdline.app_in_path { 76 | // the binary is not in $PATH 77 | if let Some(container) = &func.container { 78 | Some(container) 79 | } else { 80 | return Err(anyhow::anyhow!( 81 | "binary for function {} in {:?} not in $PATH and container not specified", 82 | func_name, 83 | page_path 84 | )); 85 | } 86 | } else if func.container.is_some() && func.container.as_ref().unwrap().force { 87 | // it's set and forced 88 | Some(func.container.as_ref().unwrap()) 89 | } else { 90 | None 91 | }; 92 | 93 | // validate the container if any 94 | if let Some(container) = container { 95 | if args.skip_docker { 96 | // or not :P 97 | log::warn!("skipping container resolution for function {}", func_name); 98 | } else { 99 | // this will pull or build the image 100 | container.resolve().await.map_err(|e| { 101 | anyhow::anyhow!( 102 | "error while resolving container for function {} in {}: {}", 103 | func_name, 104 | page_path, 105 | e 106 | ) 107 | })?; 108 | 109 | // if volumes are defined make sure they exist 110 | if let Some(volumes) = &container.volumes { 111 | for volume in volumes { 112 | let (on_host, on_guest) = 113 | volume.split_once(':').unwrap_or((volume, volume)); 114 | 115 | let on_host = shellexpand::full(on_host) 116 | .map_err(|e| { 117 | anyhow::anyhow!( 118 | "error while expanding volume path for function {}: {}", 119 | func_name, 120 | e 121 | ) 122 | })? 123 | .to_string(); 124 | 125 | if !std::path::Path::new(&on_host).exists() { 126 | return Err(anyhow::anyhow!( 127 | "page {}, function {}, path {} for volume '{}' does not exist", 128 | page_path, 129 | func_name, 130 | on_host, 131 | on_guest 132 | )); 133 | } 134 | } 135 | } 136 | } 137 | } 138 | 139 | log::info!(" {} - ok", func_name); 140 | log::debug!(" cmdline = {:?}", cmdline); 141 | if let Some(container) = container { 142 | log::debug!(" container = {:?}", container); 143 | } 144 | } 145 | } 146 | 147 | Ok(()) 148 | } 149 | -------------------------------------------------------------------------------- /src/cli/view.rs: -------------------------------------------------------------------------------- 1 | use crate::book::{runtime::ExecutionFlavor, Book}; 2 | 3 | use super::ViewArgs; 4 | 5 | pub(crate) async fn view(args: ViewArgs) -> anyhow::Result<()> { 6 | let book = Book::from_path(args.path, args.filter)?; 7 | 8 | for (_, page) in book.pages { 9 | println!("{} > [{}]", page.categories.join(" > "), page.name); 10 | 11 | for (function_name, function) in page.functions { 12 | println!(" * {} : {}", function_name, function.description); 13 | println!( 14 | " running with: {}", 15 | ExecutionFlavor::for_function(&function)? 16 | ); 17 | println!(" parameters:"); 18 | for (parameter_name, parameter) in &function.parameters { 19 | println!(" {} : {}", parameter_name, parameter.description); 20 | } 21 | 22 | println!(); 23 | } 24 | } 25 | 26 | Ok(()) 27 | } 28 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate anyhow; 3 | 4 | mod book; 5 | mod cli; 6 | mod runtime; 7 | 8 | use clap::Parser; 9 | use cli::Arguments; 10 | 11 | #[tokio::main] 12 | async fn main() -> anyhow::Result<()> { 13 | let args = Arguments::parse(); 14 | 15 | if std::env::var_os("RUST_LOG").is_none() { 16 | // set `RUST_LOG=debug` to see debug logs 17 | // NOTE: actix_server is waaaay too verbose at the info level -.- 18 | std::env::set_var("RUST_LOG", "info,actix_server=warn"); 19 | } 20 | 21 | env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")) 22 | .format_module_path(false) 23 | .format_target(false) 24 | .init(); 25 | 26 | let result = match args.command { 27 | cli::Command::Install(args) => cli::install(args).await, 28 | cli::Command::Create(args) => cli::create(args).await, 29 | cli::Command::View(args) => cli::view(args).await, 30 | cli::Command::Serve(args) => cli::serve(args).await, 31 | cli::Command::Run(args) => cli::run(args).await, 32 | cli::Command::Validate(args) => cli::validate(args).await, 33 | }; 34 | 35 | if let Err(e) = result { 36 | log::error!("{:?}", e); 37 | std::process::exit(1); 38 | } 39 | 40 | Ok(()) 41 | } 42 | -------------------------------------------------------------------------------- /src/runtime/cmd.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::BTreeMap, fmt}; 2 | 3 | #[derive(Debug)] 4 | pub struct CommandLine { 5 | pub sudo: bool, 6 | pub app: String, 7 | pub app_in_path: bool, 8 | pub args: Vec<String>, 9 | pub env: BTreeMap<String, String>, 10 | 11 | // used to keep a valid reference to this while the command is running 12 | pub temp_env_file: Option<tempfile::NamedTempFile>, 13 | } 14 | 15 | impl CommandLine { 16 | pub fn from_vec(vec: &Vec<String>) -> anyhow::Result<Self> { 17 | log::debug!("Creating CommandLine from vector: {:?}", vec); 18 | 19 | if vec.is_empty() { 20 | log::error!("Empty command line vector provided"); 21 | return Err(anyhow::anyhow!("empty command line")); 22 | } 23 | 24 | let mut sudo = false; 25 | let mut app = String::new(); 26 | let mut args = Vec::new(); 27 | 28 | for arg in vec { 29 | log::trace!("Processing argument: {}", arg); 30 | if arg == "sudo" { 31 | log::debug!("Sudo flag detected"); 32 | sudo = true; 33 | } else if app.is_empty() { 34 | log::debug!("Setting application name: {}", arg); 35 | app = arg.to_string(); 36 | } else { 37 | log::trace!("Adding argument: {}", arg); 38 | args.push(arg.to_string()); 39 | } 40 | } 41 | 42 | if app.is_empty() { 43 | log::error!("Could not determine application name from: {:?}", vec); 44 | return Err(anyhow::anyhow!( 45 | "could not determine application name from command line: {:?}", 46 | vec 47 | )); 48 | } 49 | 50 | let app_in_path = if let Ok(path) = which::which(&app) { 51 | log::debug!("Found application in path: {}", path.display()); 52 | app = path.to_string_lossy().to_string(); 53 | true 54 | } else { 55 | log::debug!("Application '{}' not found in PATH", app); 56 | false 57 | }; 58 | 59 | log::debug!( 60 | "Created CommandLine: sudo={}, app={}, app_in_path={}, args={:?}", 61 | sudo, 62 | app, 63 | app_in_path, 64 | args 65 | ); 66 | 67 | Ok(Self { 68 | sudo, 69 | app, 70 | args, 71 | app_in_path, 72 | env: BTreeMap::new(), 73 | temp_env_file: None, 74 | }) 75 | } 76 | 77 | pub fn from_vec_with_env( 78 | vec: &Vec<String>, 79 | env: BTreeMap<String, String>, 80 | ) -> anyhow::Result<Self> { 81 | log::debug!("creating CommandLine with environment variables"); 82 | log::trace!("environment variables: {:?}", env); 83 | let mut cmd = Self::from_vec(vec)?; 84 | cmd.env = env; 85 | Ok(cmd) 86 | } 87 | 88 | fn get_env_interpolated_args(&self) -> Vec<String> { 89 | log::debug!("interpolating variables from environment: {:?}", self.env); 90 | 91 | let args = self 92 | .args 93 | .iter() 94 | .map(|arg| { 95 | let mut result = arg.clone(); 96 | for (key, value) in &self.env { 97 | let pattern = format!("${{{}}}", key); 98 | if result.contains(&pattern) { 99 | log::debug!("replacing {} with {}", pattern, value); 100 | result = result.replace(&pattern, value); 101 | } 102 | } 103 | result 104 | }) 105 | .collect(); 106 | 107 | log::debug!("after interpolation: {:?}", &args); 108 | 109 | args 110 | } 111 | 112 | pub async fn execute(&self) -> anyhow::Result<String> { 113 | log::debug!("executing command: {}", self); 114 | log::debug!("full command details: {:?}", self); 115 | 116 | let args = self.get_env_interpolated_args(); 117 | 118 | let mut command = tokio::process::Command::new(&self.app); 119 | command.args(&args); 120 | 121 | // log environment variables if present 122 | if !self.env.is_empty() { 123 | log::debug!("setting environment variables: {:?}", self.env); 124 | command.envs(&self.env); 125 | } 126 | 127 | let output = command.output().await?; 128 | log::debug!("command completed with status: {:?}", output.status); 129 | 130 | let mut parts = vec![]; 131 | 132 | let stdout = String::from_utf8_lossy(&output.stdout); 133 | let stderr = String::from_utf8_lossy(&output.stderr); 134 | 135 | if !output.status.success() { 136 | log::warn!("command failed with exit code: {}", output.status); 137 | parts.push(format!("EXIT CODE: {}", &output.status)); 138 | } 139 | 140 | if !stdout.is_empty() { 141 | log::trace!("command stdout: {}", stdout); 142 | parts.push(stdout.to_string()); 143 | } 144 | 145 | if !stderr.is_empty() { 146 | if output.status.success() { 147 | log::debug!("command stderr (success): {}", stderr); 148 | parts.push(stderr.to_string()); 149 | } else { 150 | log::error!("command stderr (failure): {}", stderr); 151 | parts.push(format!("ERROR: {}", stderr)); 152 | } 153 | } 154 | 155 | let result = parts.join("\n"); 156 | log::debug!( 157 | "command execution completed, output length: {}", 158 | result.len() 159 | ); 160 | log::trace!("command output: {}", result); 161 | 162 | Ok(result) 163 | } 164 | } 165 | 166 | impl fmt::Display for CommandLine { 167 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 168 | let mut command = String::new(); 169 | 170 | if self.sudo { 171 | command.push_str("sudo "); 172 | } 173 | 174 | command.push_str(&self.app); 175 | 176 | for arg in &self.args { 177 | command.push(' '); 178 | command.push_str(arg); 179 | } 180 | 181 | write!(f, "{}", command) 182 | } 183 | } 184 | 185 | #[cfg(test)] 186 | mod tests { 187 | use super::*; 188 | 189 | #[test] 190 | fn test_command_line_display() { 191 | let cmd = CommandLine { 192 | sudo: false, 193 | app: "ls".to_string(), 194 | args: vec!["-l".to_string(), "-a".to_string()], 195 | app_in_path: true, 196 | env: BTreeMap::new(), 197 | temp_env_file: None, 198 | }; 199 | assert_eq!(format!("{}", cmd), "ls -l -a"); 200 | 201 | let cmd_with_sudo = CommandLine { 202 | sudo: true, 203 | app: "apt".to_string(), 204 | args: vec!["install".to_string(), "package".to_string()], 205 | app_in_path: true, 206 | env: BTreeMap::new(), 207 | temp_env_file: None, 208 | }; 209 | assert_eq!(format!("{}", cmd_with_sudo), "sudo apt install package"); 210 | } 211 | 212 | #[tokio::test] 213 | async fn test_command_line_execute_success() { 214 | let cmd = CommandLine { 215 | sudo: false, 216 | app: "echo".to_string(), 217 | args: vec!["-n".to_string(), "Hello, World!".to_string()], 218 | app_in_path: true, 219 | env: BTreeMap::new(), 220 | temp_env_file: None, 221 | }; 222 | let result = cmd.execute().await.unwrap(); 223 | assert_eq!(result, "Hello, World!"); 224 | } 225 | 226 | #[tokio::test] 227 | async fn test_command_line_execute_failure() { 228 | let cmd = CommandLine { 229 | sudo: false, 230 | app: "ls".to_string(), 231 | args: vec!["nonexistent_file".to_string()], 232 | app_in_path: true, 233 | env: BTreeMap::new(), 234 | temp_env_file: None, 235 | }; 236 | let result = cmd.execute().await.unwrap(); 237 | assert!(result.contains("EXIT CODE:")); 238 | assert!(result.contains("ERROR:")); 239 | } 240 | 241 | #[tokio::test] 242 | async fn test_command_line_execute_with_stderr() { 243 | let cmd = CommandLine { 244 | sudo: false, 245 | app: "sh".to_string(), 246 | args: vec![ 247 | "-c".to_string(), 248 | "echo 'Hello' && echo 'Error' >&2".to_string(), 249 | ], 250 | app_in_path: true, 251 | env: BTreeMap::new(), 252 | temp_env_file: None, 253 | }; 254 | let result = cmd.execute().await.unwrap(); 255 | assert!(result.contains("Hello")); 256 | assert!(result.contains("Error")); 257 | } 258 | 259 | #[tokio::test] 260 | async fn test_command_line_empty_app() { 261 | let cmd = CommandLine { 262 | sudo: false, 263 | app: "".to_string(), 264 | args: vec!["arg1".to_string(), "arg2".to_string()], 265 | app_in_path: true, 266 | env: BTreeMap::new(), 267 | temp_env_file: None, 268 | }; 269 | let result = cmd.execute().await; 270 | assert!(result.is_err()); 271 | } 272 | 273 | #[test] 274 | fn test_get_env_interpolated_args_with_env_vars() { 275 | let mut env = BTreeMap::new(); 276 | env.insert("TEST_VAR".to_string(), "test_value".to_string()); 277 | env.insert("OTHER_VAR".to_string(), "other_value".to_string()); 278 | 279 | let cmd = CommandLine { 280 | sudo: false, 281 | app: "echo".to_string(), 282 | args: vec!["${TEST_VAR}".to_string(), "${OTHER_VAR}".to_string()], 283 | app_in_path: true, 284 | env, 285 | temp_env_file: None, 286 | }; 287 | 288 | let result = cmd.get_env_interpolated_args(); 289 | assert_eq!(result, vec!["test_value", "other_value"]); 290 | } 291 | 292 | #[test] 293 | fn test_get_env_interpolated_args_with_missing_vars() { 294 | let env = BTreeMap::new(); 295 | let cmd = CommandLine { 296 | sudo: false, 297 | app: "echo".to_string(), 298 | args: vec!["${MISSING_VAR}".to_string()], 299 | app_in_path: true, 300 | env, 301 | temp_env_file: None, 302 | }; 303 | 304 | let result = cmd.get_env_interpolated_args(); 305 | assert_eq!(result, vec!["${MISSING_VAR}"]); 306 | } 307 | 308 | #[test] 309 | fn test_get_env_interpolated_args_with_mixed_content() { 310 | let mut env = BTreeMap::new(); 311 | env.insert("VAR".to_string(), "value".to_string()); 312 | 313 | let cmd = CommandLine { 314 | sudo: false, 315 | app: "echo".to_string(), 316 | args: vec![ 317 | "prefix_${VAR}".to_string(), 318 | "normal_arg".to_string(), 319 | "${VAR}_suffix".to_string(), 320 | ], 321 | app_in_path: true, 322 | env, 323 | temp_env_file: None, 324 | }; 325 | 326 | let result = cmd.get_env_interpolated_args(); 327 | assert_eq!(result, vec!["prefix_value", "normal_arg", "value_suffix"]); 328 | } 329 | } 330 | -------------------------------------------------------------------------------- /src/runtime/docker.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | path::{Path, PathBuf}, 3 | process::Stdio, 4 | }; 5 | 6 | use serde::{Deserialize, Serialize}; 7 | use tokio::{ 8 | io::{AsyncBufReadExt, BufReader}, 9 | process::Command, 10 | task, 11 | }; 12 | 13 | #[derive(Debug, Serialize, Deserialize)] 14 | pub enum ContainerSource { 15 | #[serde(rename = "image")] 16 | Image(String), 17 | #[serde(rename = "build")] 18 | Build { name: String, path: String }, 19 | } 20 | 21 | impl ContainerSource { 22 | pub async fn resolve(&self, platform: Option<String>) -> anyhow::Result<()> { 23 | match self { 24 | Self::Image(image) => pull_image(image, platform).await, 25 | Self::Build { name, path } => build_image(name, path).await, 26 | } 27 | } 28 | 29 | pub fn image(&self) -> &str { 30 | match self { 31 | Self::Image(image) => image, 32 | Self::Build { name, .. } => name, 33 | } 34 | } 35 | } 36 | 37 | async fn run_command(command: &str, args: &[&str]) -> anyhow::Result<()> { 38 | let mut child = Command::new(command) 39 | .args(args) 40 | .stdout(Stdio::piped()) 41 | .stderr(Stdio::piped()) 42 | .spawn()?; 43 | 44 | let stdout = child.stdout.take().expect("failed to capture stdout"); 45 | let stderr = child.stderr.take().expect("failed to capture stderr"); 46 | 47 | let mut stdout_reader = BufReader::new(stdout).lines(); 48 | let mut stderr_reader = BufReader::new(stderr).lines(); 49 | 50 | let stdout_task = task::spawn(async move { 51 | while let Some(line) = stdout_reader.next_line().await.unwrap_or(None) { 52 | log::info!("{}", line); 53 | } 54 | }); 55 | 56 | let stderr_task = task::spawn(async move { 57 | while let Some(line) = stderr_reader.next_line().await.unwrap_or(None) { 58 | // docker logs to stderr ... -.- 59 | log::info!("{}", line); 60 | } 61 | }); 62 | 63 | let status = child.wait().await?; 64 | 65 | stdout_task.await?; 66 | stderr_task.await?; 67 | 68 | if status.success() { 69 | Ok(()) 70 | } else { 71 | Err(anyhow::anyhow!("command failed with status: {:?}", status)) 72 | } 73 | } 74 | 75 | pub(crate) async fn pull_image(image: &str, platform: Option<String>) -> anyhow::Result<()> { 76 | run_command( 77 | "sh", 78 | &[ 79 | "-c", 80 | &format!( 81 | "docker images -q '{image}' | grep -q . || docker pull {}'{image}'", 82 | if let Some(platform) = platform { 83 | format!("--platform '{}' ", platform) 84 | } else { 85 | "".to_string() 86 | } 87 | ), 88 | ], 89 | ) 90 | .await 91 | } 92 | 93 | pub(crate) async fn build_image(name: &str, path: &str) -> anyhow::Result<()> { 94 | let dockerfile = PathBuf::from(path); 95 | if !dockerfile.exists() { 96 | return Err(anyhow::anyhow!("dockerfile '{}' does not exist", path)); 97 | } else if !dockerfile.is_file() { 98 | return Err(anyhow::anyhow!("path '{}' is not a dockerfile", path)); 99 | } 100 | 101 | log::info!("building image '{}' from '{}'", name, dockerfile.display()); 102 | 103 | run_command( 104 | "sh", 105 | &[ 106 | "-c", 107 | &format!( 108 | "docker build -f '{}' -t '{name}' --quiet '{}'", 109 | dockerfile.display(), 110 | dockerfile.parent().unwrap_or(Path::new(".")).display(), 111 | ), 112 | ], 113 | ) 114 | .await 115 | } 116 | -------------------------------------------------------------------------------- /src/runtime/mod.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{atomic::AtomicUsize, Arc}; 2 | 3 | use crate::book::{flavors::openai, Book}; 4 | 5 | mod cmd; 6 | mod docker; 7 | 8 | pub(crate) mod prompt; 9 | pub(crate) mod ssh; 10 | 11 | pub(crate) use cmd::CommandLine; 12 | pub(crate) use docker::ContainerSource; 13 | use ssh::SSHConnection; 14 | 15 | static ACTIVE_TASKS: AtomicUsize = AtomicUsize::new(0); 16 | 17 | // https://stackoverflow.com/questions/29963449/golang-like-defer-in-rust 18 | struct ScopeCall<F: FnOnce()> { 19 | c: Option<F>, 20 | } 21 | impl<F: FnOnce()> Drop for ScopeCall<F> { 22 | fn drop(&mut self) { 23 | self.c.take().unwrap()() 24 | } 25 | } 26 | 27 | macro_rules! expr { 28 | ($e: expr) => { 29 | $e 30 | }; 31 | } // tt hack 32 | macro_rules! defer { 33 | ($($data: tt)*) => ( 34 | let _scope_call = ScopeCall { 35 | c: Some(|| -> () { expr!({ $($data)* }) }) 36 | }; 37 | ) 38 | } 39 | 40 | async fn wait_for_available_tasks(max_running_tasks: usize) { 41 | let wait = std::time::Duration::from_secs(1); 42 | loop { 43 | let active_tasks = ACTIVE_TASKS.load(std::sync::atomic::Ordering::Relaxed); 44 | if active_tasks < max_running_tasks { 45 | break; 46 | } 47 | 48 | log::debug!("waiting for our turn, {} active tasks", active_tasks); 49 | tokio::time::sleep(wait).await; 50 | } 51 | } 52 | 53 | pub(crate) async fn execute_call( 54 | ssh: Option<SSHConnection>, 55 | interactive: bool, 56 | max_running_tasks: usize, 57 | book: Arc<Book>, 58 | call: openai::Call, 59 | ) -> anyhow::Result<openai::CallResultMessage> { 60 | wait_for_available_tasks(max_running_tasks).await; 61 | 62 | // increment the active tasks counter 63 | ACTIVE_TASKS.fetch_add(1, std::sync::atomic::Ordering::Relaxed); 64 | defer! { 65 | log::debug!("decrementing active tasks counter"); 66 | ACTIVE_TASKS.fetch_sub(1, std::sync::atomic::Ordering::Relaxed); 67 | } 68 | 69 | log::debug!("processing call: {:?}", call); 70 | 71 | let function = book.get_function(&call.function.name)?; 72 | 73 | log::debug!("{} resolved to: {:?}", &call.function.name, &function); 74 | 75 | // fail early if the arguments are invalid 76 | function.validate_arguments(&call.function.arguments)?; 77 | 78 | log::debug!("arguments validated"); 79 | 80 | let command_line = function.resolve_command_line(&call.function.arguments)?; 81 | 82 | log::debug!("command line: {:?}", command_line); 83 | 84 | // validate runtime requirements 85 | let container = function.function.container.as_ref(); 86 | let mut needs_container = false; 87 | let mut can_ssh = false; 88 | 89 | // if --ssh was provided 90 | if let Some(ssh) = ssh.as_ref() { 91 | // check if the app is in $PATH on the ssh host 92 | can_ssh = ssh.app_in_path(&command_line.app).await?; 93 | if !can_ssh { 94 | log::warn!( 95 | "{} not found in $PATH on {}", 96 | command_line.app, 97 | ssh.to_string() 98 | ); 99 | } 100 | } 101 | 102 | // we are not going to use ssh, so we need to check if we need a container 103 | if !can_ssh { 104 | if command_line.sudo && !interactive { 105 | // we're running in non-interactive mode, can't sudo 106 | needs_container = true; 107 | } else if !command_line.app_in_path { 108 | // app not in $PATH, we need a container 109 | needs_container = true; 110 | } else if container.is_some() && container.unwrap().force { 111 | // forced container use 112 | needs_container = true; 113 | } 114 | } 115 | 116 | // wrap the command line in a container if needed 117 | let command_line = if needs_container { 118 | let container = match container { 119 | Some(c) => c, 120 | None => { 121 | return Err(anyhow::anyhow!( 122 | "container required for function {}", 123 | call.function.name 124 | )) 125 | } 126 | }; 127 | 128 | log::debug!("using container: {:?}", container); 129 | 130 | // build or pull the image if needed 131 | container.resolve().await?; 132 | 133 | // wrap the command line 134 | container.wrap(command_line)? 135 | } else { 136 | // keep it as it is 137 | command_line 138 | }; 139 | 140 | if can_ssh { 141 | log::warn!( 142 | "executing (as {}): {}", 143 | ssh.as_ref().unwrap().to_string(), 144 | &command_line 145 | ); 146 | } else { 147 | log::warn!("executing: {}", &command_line); 148 | } 149 | 150 | if interactive 151 | && prompt::ask( 152 | ">> enter 'y' to proceed or any other key to cancel: ", 153 | &["y", "n"], 154 | )? != "y" 155 | { 156 | return Ok(openai::CallResultMessage { 157 | role: "tool".to_string(), 158 | call_id: call.id.clone(), 159 | content: "<command execution cancelled by user>".to_string(), 160 | }); 161 | } 162 | 163 | // finally execute the command line 164 | let content = if can_ssh { 165 | // execute via ssh 166 | ssh.as_ref() 167 | .unwrap() 168 | .execute(command_line.sudo, &command_line.app, &command_line.args) 169 | .await? 170 | } else { 171 | // execute locally 172 | command_line.execute().await? 173 | }; 174 | 175 | Ok(openai::CallResultMessage { 176 | role: "tool".to_string(), 177 | call_id: call.id.clone(), 178 | content, 179 | }) 180 | } 181 | 182 | pub(crate) async fn execute( 183 | ssh: Option<SSHConnection>, 184 | interactive: bool, 185 | book: Arc<Book>, 186 | calls: Vec<openai::Call>, 187 | max_running_tasks: usize, 188 | ) -> anyhow::Result<Vec<openai::CallResultMessage>> { 189 | let mut futures = Vec::new(); 190 | for call in calls { 191 | futures.push(tokio::spawn(execute_call( 192 | ssh.clone(), 193 | interactive, 194 | max_running_tasks, 195 | book.clone(), 196 | call, 197 | ))); 198 | } 199 | 200 | let mut results = Vec::new(); 201 | for future_result in futures::future::join_all(futures).await { 202 | match future_result { 203 | Ok(result) => match result { 204 | Ok(result) => results.push(result), 205 | Err(e) => return Err(anyhow!("error executing call: {:?}", e)), 206 | }, 207 | Err(e) => log::error!("error joining task: {:?}", e), 208 | } 209 | } 210 | 211 | Ok(results) 212 | } 213 | 214 | #[cfg(test)] 215 | mod tests { 216 | use crate::book::{runtime::ExecutionContext, Function, Page}; 217 | 218 | use super::*; 219 | use std::collections::BTreeMap; 220 | 221 | #[tokio::test] 222 | async fn test_execute_call() { 223 | let call = openai::Call { 224 | id: Some("test_call".to_string()), 225 | call_type: "function".to_string(), 226 | function: openai::FunctionCall { 227 | name: "test_function".to_string(), 228 | arguments: BTreeMap::new(), 229 | }, 230 | }; 231 | 232 | let mock_page = Page { 233 | name: "test_page".to_string(), 234 | description: Some("Test page".to_string()), 235 | categories: Vec::new(), 236 | functions: { 237 | let mut map = BTreeMap::new(); 238 | map.insert( 239 | "test_function".to_string(), 240 | Function { 241 | description: "Test function".to_string(), 242 | parameters: BTreeMap::new(), 243 | execution: ExecutionContext::CommandLine(vec![ 244 | "echo".to_string(), 245 | "test".to_string(), 246 | ]), 247 | container: None, 248 | }, 249 | ); 250 | map 251 | }, 252 | }; 253 | 254 | let book = Arc::new(Book { 255 | pages: { 256 | let mut map = BTreeMap::new(); 257 | map.insert(camino::Utf8PathBuf::from("test_page"), mock_page); 258 | map 259 | }, 260 | }); 261 | 262 | let result = execute_call(None, false, 10, book, call).await.unwrap(); 263 | 264 | assert_eq!(result.role, "tool"); 265 | assert_eq!(result.call_id, Some("test_call".to_string())); 266 | assert_eq!(result.content, "test\n"); 267 | } 268 | 269 | #[tokio::test] 270 | async fn test_execute() { 271 | let calls = vec![ 272 | openai::Call { 273 | id: Some("call1".to_string()), 274 | call_type: "function".to_string(), 275 | function: openai::FunctionCall { 276 | name: "echo1".to_string(), 277 | arguments: BTreeMap::new(), 278 | }, 279 | }, 280 | openai::Call { 281 | id: Some("call2".to_string()), 282 | call_type: "function".to_string(), 283 | function: openai::FunctionCall { 284 | name: "echo2".to_string(), 285 | arguments: BTreeMap::new(), 286 | }, 287 | }, 288 | ]; 289 | 290 | let mock_page = Page { 291 | name: "test_page".to_string(), 292 | description: Some("Test page".to_string()), 293 | categories: Vec::new(), 294 | functions: { 295 | let mut map = BTreeMap::new(); 296 | map.insert( 297 | "echo1".to_string(), 298 | Function { 299 | description: "Echo 1".to_string(), 300 | parameters: BTreeMap::new(), 301 | execution: ExecutionContext::CommandLine(vec![ 302 | "echo".to_string(), 303 | "test1".to_string(), 304 | ]), 305 | container: None, 306 | }, 307 | ); 308 | map.insert( 309 | "echo2".to_string(), 310 | Function { 311 | description: "Echo 2".to_string(), 312 | parameters: BTreeMap::new(), 313 | execution: ExecutionContext::CommandLine(vec![ 314 | "echo".to_string(), 315 | "test2".to_string(), 316 | ]), 317 | container: None, 318 | }, 319 | ); 320 | map 321 | }, 322 | }; 323 | 324 | let book = Arc::new(Book { 325 | pages: { 326 | let mut map = BTreeMap::new(); 327 | map.insert(camino::Utf8PathBuf::from("test_page"), mock_page); 328 | map 329 | }, 330 | }); 331 | 332 | let results = execute(None, false, book, calls, 10).await.unwrap(); 333 | 334 | assert_eq!(results.len(), 2); 335 | assert_eq!(results[0].content, "test1\n"); 336 | assert_eq!(results[1].content, "test2\n"); 337 | } 338 | 339 | #[tokio::test] 340 | async fn test_execute_with_non_existent_function() { 341 | let book = Arc::new(Book { 342 | pages: { 343 | let mut map = BTreeMap::new(); 344 | map.insert( 345 | camino::Utf8PathBuf::from("test_page"), 346 | Page { 347 | name: "test_page".to_string(), 348 | description: Some("Test page".to_string()), 349 | categories: Vec::new(), 350 | functions: BTreeMap::new(), 351 | }, 352 | ); 353 | map 354 | }, 355 | }); 356 | 357 | let calls = vec![openai::Call { 358 | id: Some("call1".to_string()), 359 | call_type: "function".to_string(), 360 | function: openai::FunctionCall { 361 | name: "non_existent_function".to_string(), 362 | arguments: BTreeMap::new(), 363 | }, 364 | }]; 365 | 366 | let result = execute(None, false, Arc::clone(&book), calls, 10).await; 367 | assert!(result.is_err()); 368 | } 369 | 370 | #[tokio::test] 371 | async fn test_execute_with_non_existent_command() { 372 | let book = Arc::new(Book { 373 | pages: { 374 | let mut map = BTreeMap::new(); 375 | map.insert( 376 | camino::Utf8PathBuf::from("test_page"), 377 | Page { 378 | name: "test_page".to_string(), 379 | description: Some("Test page".to_string()), 380 | categories: Vec::new(), 381 | functions: { 382 | let mut map = BTreeMap::new(); 383 | map.insert( 384 | "non_existent".to_string(), 385 | Function { 386 | description: "Non-existent command".to_string(), 387 | parameters: BTreeMap::new(), 388 | execution: ExecutionContext::CommandLine(vec![ 389 | "non_existent_command".to_string(), 390 | ]), 391 | container: None, 392 | }, 393 | ); 394 | map 395 | }, 396 | }, 397 | ); 398 | map 399 | }, 400 | }); 401 | 402 | let calls = vec![openai::Call { 403 | id: Some("call1".to_string()), 404 | call_type: "function".to_string(), 405 | function: openai::FunctionCall { 406 | name: "non_existent".to_string(), 407 | arguments: BTreeMap::new(), 408 | }, 409 | }]; 410 | 411 | let result = execute(None, false, Arc::clone(&book), calls, 10).await; 412 | assert!(result.is_err()); 413 | } 414 | } 415 | -------------------------------------------------------------------------------- /src/runtime/prompt.rs: -------------------------------------------------------------------------------- 1 | use std::io::Write; 2 | 3 | pub(crate) fn ask<'a>(prompt: &str, choices: &'a [&'a str]) -> anyhow::Result<String> { 4 | loop { 5 | print!("{}", prompt); 6 | std::io::stdout().flush()?; 7 | 8 | let mut user_input = String::new(); 9 | std::io::stdin().read_line(&mut user_input)?; 10 | println!(); 11 | 12 | let choice = user_input.trim().to_lowercase(); 13 | if choices.is_empty() || choices.contains(&choice.as_str()) { 14 | return Ok(choice); 15 | } else { 16 | log::error!("valid choices are: {}", choices.join(", ")); 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/runtime/ssh.rs: -------------------------------------------------------------------------------- 1 | use std::{borrow::Cow, path::PathBuf}; 2 | 3 | use async_ssh2_tokio::{AuthMethod, Client, ServerCheckMethod}; 4 | 5 | #[derive(Debug, Clone)] 6 | pub struct SSHConnection { 7 | host: String, 8 | port: u16, 9 | user: String, 10 | auth_method: AuthMethod, 11 | } 12 | 13 | impl SSHConnection { 14 | pub fn from_str(s: &str, public_key: &str, passphrase: Option<String>) -> anyhow::Result<Self> { 15 | let mut user = std::env::var("USER").unwrap_or_else(|_| "root".to_string()); 16 | let host; 17 | let mut port = 22; 18 | 19 | if s.is_empty() { 20 | return Err(anyhow::anyhow!("SSH connection string cannot be empty")); 21 | } 22 | 23 | // split on @ first to separate user if present 24 | let parts: Vec<&str> = s.split('@').collect(); 25 | match parts.len() { 26 | // only.host or only.host:port 27 | 1 => { 28 | let host_parts: Vec<&str> = parts[0].split(':').collect(); 29 | match host_parts.len() { 30 | 1 => host = host_parts[0].to_string(), 31 | 2 => { 32 | host = host_parts[0].to_string(); 33 | port = host_parts[1].parse()?; 34 | } 35 | _ => return Err(anyhow::anyhow!("invalid host format")), 36 | } 37 | } 38 | // user@host or user@host:port 39 | 2 => { 40 | user = parts[0].to_string(); 41 | let host_parts: Vec<&str> = parts[1].split(':').collect(); 42 | match host_parts.len() { 43 | 1 => host = host_parts[0].to_string(), 44 | 2 => { 45 | host = host_parts[0].to_string(); 46 | port = host_parts[1].parse()?; 47 | } 48 | _ => return Err(anyhow::anyhow!("invalid host format")), 49 | } 50 | } 51 | _ => return Err(anyhow::anyhow!("invalid SSH connection string format")), 52 | } 53 | 54 | let public_key = shellexpand::full(public_key)?.to_string(); 55 | let public_key = PathBuf::from(public_key); 56 | if !public_key.exists() { 57 | return Err(anyhow::anyhow!( 58 | "public key file {} does not exist", 59 | public_key.display() 60 | )); 61 | } 62 | let public_key = public_key.canonicalize()?.to_string_lossy().to_string(); 63 | 64 | let auth_method = AuthMethod::with_key_file(&public_key, passphrase.as_deref()); 65 | 66 | Ok(Self { 67 | host, 68 | port, 69 | user, 70 | auth_method, 71 | }) 72 | } 73 | 74 | async fn client(&self) -> anyhow::Result<Client> { 75 | Client::connect( 76 | (self.host.as_str(), self.port), 77 | self.user.as_str(), 78 | self.auth_method.clone(), 79 | ServerCheckMethod::NoCheck, 80 | ) 81 | .await 82 | .map_err(|e| anyhow::anyhow!("failed to connect to SSH server: {:?}", e)) 83 | } 84 | 85 | fn create_command_line(with_sudo: bool, app: &str, args: &Vec<String>) -> String { 86 | let mut command = String::new(); 87 | if with_sudo { 88 | command.push_str("sudo "); 89 | } 90 | 91 | command.push_str(&shell_escape::escape(Cow::Borrowed(app))); 92 | 93 | for arg in args { 94 | command.push(' '); 95 | command.push_str(&shell_escape::escape(Cow::Borrowed(arg))); 96 | } 97 | 98 | command 99 | } 100 | 101 | pub(crate) async fn execute( 102 | &self, 103 | with_sudo: bool, 104 | app: &str, 105 | args: &Vec<String>, 106 | ) -> anyhow::Result<String> { 107 | let command_line = Self::create_command_line(with_sudo, app, args); 108 | let result = self.client().await?.execute(&command_line).await?; 109 | 110 | let mut parts = vec![]; 111 | 112 | if result.exit_status != 0 { 113 | parts.push(format!("EXIT CODE: {}", &result.exit_status)); 114 | } 115 | 116 | if !result.stdout.is_empty() { 117 | parts.push(result.stdout.to_string()); 118 | } 119 | 120 | if !result.stderr.is_empty() { 121 | if result.exit_status == 0 { 122 | parts.push(result.stderr.to_string()); 123 | } else { 124 | parts.push(format!("ERROR: {}", result.stderr)); 125 | } 126 | } 127 | 128 | Ok(parts.join("\n")) 129 | } 130 | 131 | pub(crate) async fn test_connection(&self) -> anyhow::Result<()> { 132 | log::info!("testing ssh connection to {}:{} ...", self.host, self.port); 133 | let result = self.client().await?.execute("echo robopages").await?; 134 | if result.exit_status != 0 { 135 | return Err(anyhow::anyhow!("failed to execute command: {:?}", result)); 136 | } else if result.stdout != "robopages\n" { 137 | return Err(anyhow::anyhow!("unexpected output: {:?}", result)); 138 | } 139 | 140 | Ok(()) 141 | } 142 | 143 | pub(crate) async fn app_in_path(&self, app: &str) -> anyhow::Result<bool> { 144 | let result = self 145 | .client() 146 | .await? 147 | .execute(&format!("which {}", app)) 148 | .await?; 149 | 150 | Ok(result.exit_status == 0) 151 | } 152 | } 153 | 154 | impl std::fmt::Display for SSHConnection { 155 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 156 | write!(f, "{}@{}:{}", self.user, self.host, self.port) 157 | } 158 | } 159 | 160 | #[cfg(test)] 161 | mod tests { 162 | use super::*; 163 | 164 | #[test] 165 | fn test_from_str_host_only() { 166 | let conn = SSHConnection::from_str("example.com", "/dev/null", None).unwrap(); 167 | assert_eq!(conn.host, "example.com"); 168 | assert_eq!(conn.port, 22); 169 | assert_eq!( 170 | conn.user, 171 | std::env::var("USER").unwrap_or_else(|_| "root".to_string()) 172 | ); 173 | } 174 | 175 | #[test] 176 | fn test_from_str_host_and_port() { 177 | let conn = SSHConnection::from_str("example.com:2222", "/dev/null", None).unwrap(); 178 | assert_eq!(conn.host, "example.com"); 179 | assert_eq!(conn.port, 2222); 180 | assert_eq!( 181 | conn.user, 182 | std::env::var("USER").unwrap_or_else(|_| "root".to_string()) 183 | ); 184 | } 185 | 186 | #[test] 187 | fn test_from_str_user_and_host() { 188 | let conn = SSHConnection::from_str("testuser@example.com", "/dev/null", None).unwrap(); 189 | assert_eq!(conn.host, "example.com"); 190 | assert_eq!(conn.port, 22); 191 | assert_eq!(conn.user, "testuser"); 192 | } 193 | 194 | #[test] 195 | fn test_from_str_full() { 196 | let conn = SSHConnection::from_str("testuser@example.com:2222", "/dev/null", None).unwrap(); 197 | assert_eq!(conn.host, "example.com"); 198 | assert_eq!(conn.port, 2222); 199 | assert_eq!(conn.user, "testuser"); 200 | } 201 | 202 | #[test] 203 | fn test_from_str_empty() { 204 | assert!(SSHConnection::from_str("", "/dev/null", None).is_err()); 205 | } 206 | 207 | #[test] 208 | fn test_from_str_invalid_port() { 209 | assert!(SSHConnection::from_str("example.com:invalid", "/dev/null", None).is_err()); 210 | } 211 | 212 | #[test] 213 | fn test_from_str_invalid_format() { 214 | assert!(SSHConnection::from_str("user@host@extra", "/dev/null", None).is_err()); 215 | assert!(SSHConnection::from_str("host:port:extra", "/dev/null", None).is_err()); 216 | } 217 | 218 | #[test] 219 | fn test_from_str_nonexistent_key() { 220 | assert!(SSHConnection::from_str("example.com", "/nonexistent/key/path", None).is_err()); 221 | } 222 | } 223 | --------------------------------------------------------------------------------