├── .circleci
└── config.yml
├── .gitpod.Dockerfile
├── .gitpod.yml
├── .idea
├── .gitignore
├── autogpt-package.iml
├── modules.xml
└── vcs.xml
├── .vscode
└── settings.json
├── LICENCE
├── README.md
├── gitpod.png
├── kurtosis.yml
├── main.star
├── plugins.star
├── run.gif
└── src
└── common.star
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2.1
2 |
3 | orbs:
4 | slack: circleci/slack@4.10.1
5 | kurtosis-docs-checker: kurtosis-tech/docs-checker@0.2.3
6 |
7 | executors:
8 | ubuntu_vm:
9 | machine:
10 | image: ubuntu-2004:202201-02
11 |
12 | parameters:
13 | # To enable/disabled the check_latest_version workflow execution which will be triggered by this scheduled pipeline: https://app.circleci.com/settings/project/github/kurtosis-tech/autogpt-package/triggers
14 | should-enable-nightly-plugin-check:
15 | type: boolean
16 | default: false
17 | should-enable-run-starlark-workflow:
18 | type: boolean
19 | default: true
20 |
21 | jobs:
22 | run_starlark:
23 | executor: ubuntu_vm
24 | resource_class: xlarge
25 | steps:
26 |
27 | # Set up Kurtosis
28 | - run: |
29 | echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list
30 | sudo apt update
31 | sudo apt install kurtosis-cli
32 | - checkout
33 |
34 | - run: kurtosis analytics disable
35 |
36 | - run: kurtosis engine restart
37 |
38 | - run: |
39 | kurtosis run . '{"OPENAI_API_KEY": "test", "MEMORY_BACKEND": "local", "ALLOWLISTED_PLUGINS": "AutoGPTTwitter", "__skip_env_vars_validation": "True", "__skip_env_vars_default_values_set": "True"}'
40 | kurtosis run . '{"OPENAI_API_KEY": "test"}'
41 |
42 |
43 | # this emualtes https://github.com/Significant-Gravitas/Auto-GPT-Plugins/blob/master/.github/workflows/test-plugin-installation.yml
44 | test_plugins_ci_run:
45 | executor: ubuntu_vm
46 | resource_class: xlarge
47 | steps:
48 |
49 | # Set up Kurtosis
50 | - run: |
51 | echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list
52 | sudo apt update
53 | sudo apt install kurtosis-cli
54 | - checkout
55 |
56 | - run: kurtosis analytics disable
57 |
58 | - run: kurtosis engine restart
59 |
60 | - run: |
61 | kurtosis run . '{"OPENAI_API_KEY": "test", "ALLOWLISTED_PLUGINS": "AutoGPTTwitter", "__plugin_branch_to_use": "master", "__plugin_repo_to_use": "significant-gravitas/Auto-GPT-Plugins"}'
62 |
63 | - run: |
64 | kurtosis run . '{"OPENAI_API_KEY": "test", "ALLOWLISTED_PLUGINS": ["AutoGPTTwitter"], "__plugin_branch_to_use": "master", "__plugin_repo_to_use": "significant-gravitas/Auto-GPT-Plugins"}'
65 |
66 | run_package_with_plugins_nightly:
67 | executor: ubuntu_vm
68 | resource_class: xlarge
69 | steps:
70 |
71 | # Set up Kurtosis
72 | - run: |
73 | echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list
74 | sudo apt update
75 | sudo apt install kurtosis-cli
76 | - checkout
77 |
78 | - run: kurtosis analytics disable
79 |
80 | - run: kurtosis engine restart
81 |
82 | - run: |
83 | kurtosis run . '{"OPENAI_API_KEY": "test", "ALLOWLISTED_PLUGINS": "AutoGPTTwitter", "__plugin_branch_to_use": "master", "__plugin_repo_to_use": "significant-gravitas/Auto-GPT-Plugins"}'
84 |
85 | # this step will be always run even if the previous step fails, some steps are configured this way. More info here: https://circleci.com/docs/configuration-reference#steps
86 | - slack/notify:
87 | channel: engineering
88 | event: fail
89 | # You can preview this template and know more about templates here: https://github.com/CircleCI-Public/slack-orb/wiki#templates
90 | template: basic_fail_1
91 |
92 | workflows:
93 | build:
94 | when: << pipeline.parameters.should-enable-run-starlark-workflow >>
95 | jobs:
96 | # -- PR check jobs ------------------------------------------
97 | - run_starlark:
98 | filters:
99 | branches:
100 | ignore:
101 | - test_plugins_ci_run:
102 | filters:
103 | branches:
104 | ignore:
105 | run_package_with_plugins_nightly:
106 | when: << pipeline.parameters.should-enable-nightly-plugin-check >>
107 | jobs:
108 | - run_package_with_plugins_nightly:
109 | context:
110 | - slack-secrets
111 |
--------------------------------------------------------------------------------
/.gitpod.Dockerfile:
--------------------------------------------------------------------------------
1 | # this project uses workspace-full but if you only want to run Starlark
2 | # use workspace-base instead
3 | # can't use that here as this needs Go and TS
4 | FROM gitpod/workspace-base
5 |
--------------------------------------------------------------------------------
/.gitpod.yml:
--------------------------------------------------------------------------------
1 | image:
2 | file: .gitpod.Dockerfile
3 |
4 | tasks:
5 | - name: Setup Kurtosis
6 | command: |
7 | echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list
8 | sudo apt update
9 | sudo apt install kurtosis-cli
10 | kurtosis run github.com/fake-package/fake-package-purely-for-analytics || kurtosis clean -a
11 | code README.md
12 | source <(kurtosis completion bash)
13 | echo "source <(kurtosis completion bash)" >> ~/.bashrc
14 | clear
15 |
16 | ports:
17 | - name: Kurtosis Engine Grpc Listen
18 | port: 9710
19 | onOpen: ignore
20 | - name: Kurtosis Engine Grpc Proxy Listen
21 | port: 9710
22 | onOpen: ignore
23 |
24 |
25 | vscode:
26 | extensions:
27 | - Kurtosis.kurtosis-extension
28 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | # Editor-based HTTP Client requests
5 | /httpRequests/
6 | # Datasource local storage ignored files
7 | /dataSources/
8 | /dataSources.local.xml
9 |
--------------------------------------------------------------------------------
/.idea/autogpt-package.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "files.associations": {
3 | "*.star": "starlark"
4 | },
5 | "workbench.editorAssociations": {
6 | "*.md": "vscode.markdown.preview.editor"
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/LICENCE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Kurtosis Tech
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Auto-GPT Package
2 |
3 | 
4 |
5 | "It's like AutoGPT got a `brew install`", made possible by [Kurtosis](https://www.kurtosis.com/).
6 |
7 | **NOTE**: This now runs with 0.4.0 that drops support for Milvus, Weaviate and PineCone. You can run Kurtosis against 0.3.1 by doing `kurtosis run github.com/kurtosis-tech/autogpt-package@0.3.1` with the desired arguments
8 |
9 | ## Run AutoGPT in the browser (no installation needed)
10 |
11 | 1. If you don't have an OpenAI API key, get one [here](https://platform.openai.com/account/api-keys)
12 | 1. Click [this link](https://gitpod.io/?editor=code#https://github.com/kurtosis-tech/autogpt-package) to open a Gitpod, selecting "Continue" to use the default resources
13 | 1. Wait for the Gitpod to boot up and the terminal to finish installing Kurtosis (should take ~30 seconds)
14 | 1. Run the following in the terminal (replacing `YOUR_API_KEY_HERE` with your OpenAI API key)
15 | ```bash
16 | kurtosis run github.com/kurtosis-tech/autogpt-package --enclave autogpt '{"OPENAI_API_KEY": "YOUR_API_KEY_HERE"}'
17 | ```
18 | 1. When installing & starting AutoGPT has finished, run the following in the terminal to open the AutoGPT prompt:
19 | ```bash
20 | kurtosis service shell autogpt autogpt --exec "python -m autogpt"
21 | ```
22 | 1. Use AutoGPT as you please!
23 |
24 | 
25 |
26 | ## Run AutoGPT on your machine
27 |
28 | 1. If you don't have an OpenAI API key, get one [here](https://platform.openai.com/account/api-keys)
29 | 1. Install Kurtosis using [these instructions](https://docs.kurtosis.com/install)
30 | 1. Run the following in your terminal (replacing `YOUR_API_KEY_HERE` with your OpenAI API key)
31 | ```bash
32 | kurtosis run github.com/kurtosis-tech/autogpt-package --enclave autogpt '{"OPENAI_API_KEY": "YOUR_API_KEY_HERE"}'
33 | ```
34 | 1. When installing & starting AutoGPT has finished, run the following in your terminal to open the AutoGPT prompt:
35 | ```bash
36 | kurtosis service shell autogpt autogpt
37 | ```
38 | and then within the prompt:
39 | ```bash
40 | > python -m autogpt
41 | ```
42 | 1. Use AutoGPT as you please! To destroy the AutoGPT instance, run:
43 | ```
44 | kurtosis enclave rm -f autogpt
45 | ```
46 |
47 | ## Configuring AutoGPT (including memory backend)
48 |
49 | To pass any of the AutoGPT configuration values listed [here](https://github.com/Significant-Gravitas/Auto-GPT/blob/master/.env.template), pass the argument as a property of the JSON object you're passing to Kurtosis just like you passed in `OPENAI_API_KEY`.
50 |
51 | For example, this is how you'd pass the `RESTRICT_TO_WORKSPACE` flag:
52 |
53 | ```bash
54 | kurtosis run github.com/kurtosis-tech/autogpt-package --enclave autogpt '{"OPENAI_API_KEY": "YOUR_API_KEY_HERE", "RESTRICT_TO_WORKSPACE": "False"}'
55 | ```
56 |
57 | **NOTE:** this package spins up AutoGPT using the `local` backend by default. Other backends are available by setting the `MEMORY_BACKEND` parameter in the JSON object you pass in when you run the `kurtosis run` command above.
58 |
59 | For example, to set the `redis` memory backend:
60 |
61 | ```bash
62 | kurtosis run github.com/kurtosis-tech/autogpt-package --enclave autogpt '{"OPENAI_API_KEY": "YOUR_API_KEY_HERE", "MEMORY_BACKEND": "redis"}'
63 | ```
64 |
65 | **NOTE**: Redis isn't working with 0.4.0 for now
66 |
67 | To run with a different image other than the one hardcoded in `main.star` use
68 | ```bash
69 | kurtosis run github.com/kurtosis-tech/autogpt-package --enclave autogpt '{"OPENAI_API_KEY": "YOUR_API_KEY_HERE", "AUTOGPT_IMAGE": "significantgravitas/auto-gpt:v0.4.0"}'
70 | ```
71 |
72 | ## Using AutoGPT plugins
73 |
74 | Kurtosis supports the `ALLOWLISTED_PLUGINS` configuration flag that AutoGPT ships with. For example, to run the `AutoGPTTwitter` plugin do the following:
75 |
76 | ```bash
77 | kurtosis run github.com/kurtosis-tech/autogpt-package --enclave autogpt '{"OPENAI_API_KEY": "YOUR_API_KEY_HERE", "ALLOWLISTED_PLUGINS": "AutoGPTTwitter"}'
78 | ```
79 |
80 | To get multiple plugins running at the same time; separate them with comma without spaces like so:
81 |
82 | ```
83 | kurtosis run github.com/kurtosis-tech/autogpt-package --enclave autogpt '{"OPENAI_API_KEY": "YOUR_API_KEY_HERE", "ALLOWLISTED_PLUGINS": "AutoGPTTwitter,AutoGPTEmailPlugin"}'
84 | ```
85 |
86 | Under the hood, Kurtosis will download and install the package for you.
87 |
88 | As of now the following plugins are supported:
89 |
90 | ### First Party
91 | - [AutoGPTTwitter](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
92 | - [AutoGPTEmailPlugin](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
93 | - [AutoGPTSceneXPlugin](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
94 | - [AutoGPTBingSearch](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
95 | - [AutoGPTNewsSearch](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
96 | - [AutoGPTWikipediaSearch](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
97 | - [AutoGPTApiTools](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
98 | - [AutoGPTRandomValues](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
99 | - [AutoGPTSpacePlugin](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
100 | - [AutoGPTBaiduSearch](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
101 | - [AutoGPTBluesky](https://github.com/Significant-Gravitas/Auto-GPT-Plugins)
102 | - [AutoGPTAlpacaTraderPlugin](https://github.com/danikhan632/Auto-GPT-AlpacaTrader-Plugin)
103 | - [AutoGPTUserInput](https://github.com/HFrovinJensen/Auto-GPT-User-Input-Plugin)
104 | - [BingAI](https://github.com/gravelBridge/AutoGPT-BingAI)
105 | - [AutoGPTCryptoPlugin](https://github.com/isaiahbjork/Auto-GPT-Crypto-Plugin)
106 | - [AutoGPTDiscord](https://github.com/gravelBridge/AutoGPT-Discord)
107 | - [AutoGPTDollyPlugin](https://github.com/pr-0f3t/Auto-GPT-Dolly-Plugin)
108 |
109 | ### Third Party
110 | - [AutoGPTGoogleAnalyticsPlugin](https://github.com/isaiahbjork/Auto-GPT-Google-Analytics-Plugin)
111 | - [AutoGPT_IFTTT](https://github.com/AntonioCiolino/AutoGPT-IFTTT)
112 | - [AutoGPT_Zapier](https://github.com/AntonioCiolino/AutoGPT-Zapier)
113 | - [AutoGPT_YouTube](https://github.com/jpetzke/AutoGPT-YouTube)
114 | - [AutoGPTPMPlugin](https://github.com/minfenglu/AutoGPT-PM-Plugin)
115 | - [AutoGPTWolframAlpha](https://github.com/gravelBridge/AutoGPT-WolframAlpha)
116 | - [AutoGPTTodoistPlugin](https://github.com/danikhan632/Auto-GPT-Todoist-Plugin)
117 | - [AutoGPTMessagesPlugin](https://github.com/danikhan632/Auto-GPT-Messages-Plugin)
118 | - [AutoGPTWebInteraction](https://github.com/gravelBridge/AutoGPT-Web-Interaction)
119 | - [AutoGPTNotion](https://github.com/doutv/Auto-GPT-Notion)
120 | - [SystemInformationPlugin](https://github.com/hdkiller/Auto-GPT-SystemInfo)
121 |
122 | To add support for more plugins, simply create an issue or create a PR adding an entry to [`plugins.star`](https://github.com/kurtosis-tech/autogpt-package/blob/main/plugins.star).
123 |
124 | ## Run without OpenAI
125 |
126 | We understand OpenAI can be expensive for some people; more-ever some people might be trying to use this with their own models. AutoGPT-Package supports running AutoGPT against a `GPT4All` model that runs via `LocalAI`. To use a local model -
127 |
128 | ```bash
129 | kurtosis run github.com/kurtosis-tech/autogpt-package '{"GPT_4ALL": true}'
130 | ```
131 |
132 | This uses the `https://gpt4all.io/models/ggml-gpt4all-j.bin` model default
133 |
134 | To use a different model try the `MODEL_URL` parameter like -
135 |
136 |
137 | ```bash
138 | kurtosis run github.com/kurtosis-tech/autogpt-package '{"GPT_4ALL": true, "MODEL_URL": "https://gpt4all.io/models/ggml-gpt4all-l13b-snoozy.bin"}'
139 | ```
140 |
141 | ## Development
142 |
143 | To develop on this package, clone this repo and run the following:
144 |
145 | ```bash
146 | kurtosis run . --enclave autogpt '{"OPENAI_API_KEY": "YOUR_API_KEY_HERE"}'
147 | ```
148 |
149 | Note the `.` - this tells Kurtosis to use the version of the package on your local machine (rather than the version on Github).
150 |
151 | Kurtosis also has [an extension available on the VSCode marketplace](https://marketplace.visualstudio.com/items?itemName=Kurtosis.kurtosis-extension) that provides syntax highlighting and autocompletion for the Starlark that this package is composed of.
152 |
153 | ## Feedback or Questions?
154 |
155 | Let us know in our [Discord](https://discord.gg/eBWFjGtm) or on [Twitter @KurtosisTech](https://twitter.com/KurtosisTech)!
156 |
157 | Feel free to create an issue on GitHub if you have any bugs or feature requests.
158 |
--------------------------------------------------------------------------------
/gitpod.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kurtosis-tech/autogpt-package/8ab3617f0a006b0e8a4527bf6edd97d28e9d15c2/gitpod.png
--------------------------------------------------------------------------------
/kurtosis.yml:
--------------------------------------------------------------------------------
1 | name: "github.com/kurtosis-tech/autogpt-package"
--------------------------------------------------------------------------------
/main.star:
--------------------------------------------------------------------------------
1 | redis_module = import_module("github.com/kurtosis-tech/redis-package/main.star")
2 | plugins = import_module("./plugins.star")
3 | common = import_module("./src/common.star")
4 |
5 | AUTOGPT_IMAGE = "significantgravitas/auto-gpt:v0.4.2"
6 | AUTOGPT_IMAGE_ARG = "AUTOGPT_IMAGE"
7 | REDIS_IMAGE = "redis/redis-stack-server:latest"
8 |
9 | AUTOGPT_SERVICE_NAME = "autogpt"
10 |
11 | OPENAI_API_KEY_ARG = "OPENAI_API_KEY"
12 |
13 | WEAVIATE_PORT = 8080
14 | WEAVIATE_PORT_ID = "http"
15 | WEAVIATE_PORT_PROTOCOL = WEAVIATE_PORT_ID
16 |
17 | SKIP_ENV_VARS_VALIDATION = "__skip_env_vars_validation"
18 | SKIP_ENV_VARS_DEFAULT_VALUES_SET = "__skip_env_vars_default_values_set"
19 | ARGS_TO_SKIP_FOR_ENV_VARS = ["__plugin_branch_to_use", "__plugin_repo_to_use", SKIP_ENV_VARS_VALIDATION, SKIP_ENV_VARS_DEFAULT_VALUES_SET]
20 |
21 | DEFAULT_PLUGINS_DIRNAME = "plugins"
22 | # Chrome seems to be having some issues starting up in Docker
23 | # We set USE_WEB_BROWSER=DEFAULT_WEB_BROWSER unless the user specifies something else
24 | # TODO fix this after https://github.com/Significant-Gravitas/Auto-GPT/issues/3779 is fixed
25 | DEFAULT_WEB_BROWSER = "firefox"
26 |
27 | ALLOW_LISTED_PLUGINS_ENV_VAR_KEY = 'ALLOWLISTED_PLUGINS'
28 |
29 | # Replace OpenAI with GPT4All
30 | GPT4_ALL_ARG = "GPT_4ALL"
31 | MODEL_ARG = "GPT_4ALL_CUSTOM_MODEL_URL"
32 | LOCAL_AI_IMAGE = "quay.io/go-skynet/local-ai:latest"
33 | LOCAL_AI_SERVICE = "local-ai"
34 | DEFAULT_MODEL_URL = "https://gpt4all.io/models/ggml-gpt4all-j.bin"
35 |
36 | def run(plan, args):
37 |
38 | is_gpt4all = args.get(GPT4_ALL_ARG, False)
39 | if is_gpt4all:
40 | local_ai_service = plan.add_service(
41 | name = LOCAL_AI_SERVICE,
42 | config = ServiceConfig(
43 | image = LOCAL_AI_IMAGE,
44 | ports = {
45 | "http": PortSpec(number = 8080, transport_protocol="TCP", wait=None)
46 | },
47 | )
48 | )
49 | plan.print("Downloading the model; this will take a while")
50 | model_url = args.get(MODEL_ARG, DEFAULT_MODEL_URL)
51 | model_name = model_url.split("/")[-1]
52 | # AutoGPT checks for this
53 | model_name = "gpt-3.5-turbo"
54 | wget_str = " ".join(["wget", model_url, "-O", "models/{0}".format(model_name)])
55 | plan.exec(
56 | service_name=LOCAL_AI_SERVICE,
57 | recipe = ExecRecipe(
58 | command = ["/bin/sh", "-c", "mkdir models/ && " + wget_str + " > /dev/null 2>&1"]
59 | )
60 | )
61 | plan.wait(
62 | service_name=LOCAL_AI_SERVICE,
63 | recipe = GetHttpRequestRecipe(
64 | port_id="http",
65 | endpoint="/v1/models",
66 | extract={
67 | "model-id": ".data[0].id",
68 | }
69 | ),
70 | field = "extract.model-id",
71 | assertion = "==",
72 | target_value=model_name,
73 | timeout="5m"
74 | )
75 | if OPENAI_API_KEY_ARG not in args:
76 | args[OPENAI_API_KEY_ARG] = "sk---anystirnghere"
77 | args["OPENAI_API_BASE_URL"] = "http://{}:8080/v1".format(local_ai_service.ip_address)
78 | args["SMART_LLM_MODEL"] = model_name
79 |
80 |
81 | if OPENAI_API_KEY_ARG not in args:
82 | fail("{0} is a required argument that needs to be passed to this script".format(OPENAI_API_KEY_ARG))
83 |
84 | env_vars = {}
85 |
86 | # this is purely for CI test of plugins
87 | # replaces the download url from master.zip to the name of the branch
88 | # this does a mass replace
89 | plugin_branch_to_use = None
90 | plugin_repo_to_use = None
91 | if "__plugin_branch_to_use" in args:
92 | plugin_branch_to_use = args["__plugin_branch_to_use"]
93 | if "__plugin_repo_to_use" in args:
94 | plugin_repo_to_use = args["__plugin_repo_to_use"]
95 |
96 | for env_var_key, env_var_value in args.items():
97 | if env_var_key in ARGS_TO_SKIP_FOR_ENV_VARS:
98 | continue
99 | if env_var_key == ALLOW_LISTED_PLUGINS_ENV_VAR_KEY and type(env_var_value) == "list":
100 | # arg items expects the values to be the string
101 | # so if user input is a list, convert it into a string which can
102 | # be parsed later
103 | env_vars[env_var_key] = (",").join(env_var_value)
104 | else:
105 | env_vars[env_var_key] = str(env_var_value)
106 |
107 | plugins_dir = env_vars.get("PLUGINS_DIR", DEFAULT_PLUGINS_DIRNAME)
108 |
109 |
110 | if ALLOW_LISTED_PLUGINS_ENV_VAR_KEY in env_vars:
111 | plugins_names = env_vars[ALLOW_LISTED_PLUGINS_ENV_VAR_KEY].split(',')
112 |
113 | # validate plugins names
114 | plugins.validatePluginNames(plugins_names)
115 |
116 | # this means if its running in old CI configurations (AutoGPT CI config before adding validations) we need to know this for not creating a breaking change
117 | isRunningInOldCIConfig = plugin_branch_to_use != None and plugin_repo_to_use != None
118 |
119 | # validate plugins
120 | # skip validation if it explicity requested in the arguments or if it's running in an old CI config
121 | skip_env_vars_validation = SKIP_ENV_VARS_VALIDATION in args
122 |
123 | if not isRunningInOldCIConfig and not skip_env_vars_validation:
124 | are_all_required_env_vars_set, missing_required_env_vars = plugins.areAllRequiredEnvVarsSet(env_vars, plugins_names)
125 | if not are_all_required_env_vars_set:
126 | fail("Error while validating the required env var for plugins. The missing required env vars are '{0}'".format(missing_required_env_vars))
127 |
128 | # set plugins default env vars values
129 | # skip plugin default env vars set if it explicity requested in the arguments or if it's running in an old CI config
130 | skip_env_vars_default_values_set = SKIP_ENV_VARS_DEFAULT_VALUES_SET in args
131 |
132 | if not isRunningInOldCIConfig and not skip_env_vars_default_values_set:
133 | default_plugin_env_vars_values = plugins.getPluginsEnvVarsDefaultValues(plugins_names, env_vars)
134 | env_vars.update(default_plugin_env_vars_values)
135 |
136 | if "USE_WEB_BROWSER" not in env_vars:
137 | env_vars["USE_WEB_BROWSER"] = DEFAULT_WEB_BROWSER
138 |
139 | if "MEMORY_BACKEND" in env_vars and env_vars["MEMORY_BACKEND"] == "redis":
140 | env_vars["MEMORY_BACKEND"] = "redis"
141 | plan.print("Using the '{0}' memory backend".format(env_vars["MEMORY_BACKEND"]))
142 | if "REDIS_HOST" in env_vars and "REDIS_PORT" in env_vars:
143 | plan.print("As REDIS_HOST & REDIS_PORT are provided we will just use the remote Redis instance")
144 | else:
145 | plan.print("Setting up Redis")
146 | redis_server = redis_module.run(plan, {'redis-image': REDIS_IMAGE})
147 | # redis has to run inside the enclave so we set it up for them and change the vars
148 | env_vars["REDIS_HOST"] = redis_server["hostname"]
149 | env_vars["REDIS_PORT"] = str(redis_server["client-port"])
150 | env_vars["REDIS_PASSWORD"] = ""
151 | elif env_vars.get("MEMORY_BACKEND", "local"):
152 | plan.print("Using the local memory backend")
153 | else:
154 | plan.print("Memory backend needs to be one of redis, local. We default to local if nothing is specified. Got '{0}' which isn't a valid value".format(env_vars["MEMORY_BACKEND"]))
155 |
156 | plan.print("Starting AutoGpt with environment variables set to\n{0}".format(env_vars))
157 |
158 | autogpt_image = args.get(AUTOGPT_IMAGE_ARG, AUTOGPT_IMAGE)
159 |
160 | plan.add_service(
161 | name = AUTOGPT_SERVICE_NAME,
162 | config = ServiceConfig(
163 | image = autogpt_image,
164 | entrypoint = ["sleep", "9999999"],
165 | env_vars = env_vars,
166 | )
167 | )
168 |
169 | init_env_file_command = "echo '{0}' > /app/.env".format("\n".join(["{0}={1}".format(k, v) for (k, v) in env_vars.items()]))
170 | plan.exec(
171 | service_name = "autogpt",
172 | recipe = ExecRecipe(
173 | command = ["/bin/sh", "-c", init_env_file_command]
174 | )
175 | )
176 |
177 | if ALLOW_LISTED_PLUGINS_ENV_VAR_KEY in env_vars:
178 | plan.exec(
179 | service_name = AUTOGPT_SERVICE_NAME,
180 | recipe = ExecRecipe(
181 | command = ["mkdir", "-p", "/app/plugins"]
182 | )
183 | )
184 |
185 | plugins_to_download = list()
186 | plugins_already_in_download_list = list()
187 |
188 | for plugin_name in plugins_names:
189 | if plugin_name in plugins.plugins_map:
190 | plugin = plugins.plugins_map[plugin_name]
191 | if plugin_name in plugins_already_in_download_list:
192 | continue
193 | plugins_to_download.append(plugin)
194 | plugins_already_in_download_list.append(plugin_name)
195 | else:
196 | fail("Invalid plugin name {0}. The supported plugins are: {1}. You can add support for a new plugin by creating an issue or PR at {2}".format(plugin_name, ", ".join(plugins.plugins_map.keys()), common.KURTOSIS_AUTOGPT_PACKAGE_URL))
197 |
198 | if plugins_to_download:
199 | download_plugins(plan, plugins_dir, plugins_to_download, plugin_branch_to_use, plugin_repo_to_use)
200 | install_plugins(plan)
201 |
202 |
203 | def download_plugins(plan, plugins_dir, plugins_to_download, plugin_branch_to_use=None, plugin_repo_to_use = None):
204 | for plugin in plugins_to_download:
205 | url = plugins.get_plugin_url(plugin, plugin_branch_to_use, plugin_repo_to_use)
206 | plugin_filename = plugins.get_filename(plugin)
207 | download_and_run_command = "wget -O ./{0}/{1} {2}".format(plugins_dir, plugin_filename, url)
208 | plan.exec(
209 | service_name = AUTOGPT_SERVICE_NAME,
210 | recipe = ExecRecipe(
211 | command = ["/bin/sh", "-c", download_and_run_command],
212 | )
213 | )
214 |
215 | def install_plugins(plan):
216 | plan.exec(
217 | service_name = AUTOGPT_SERVICE_NAME,
218 | recipe = ExecRecipe(
219 | command = ["/bin/sh", "-c", "python scripts/install_plugin_deps.py"],
220 | )
221 | )
--------------------------------------------------------------------------------
/plugins.star:
--------------------------------------------------------------------------------
1 | common = import_module("./src/common.star")
2 |
3 | MAIN_BRANCH = "main"
4 | MASTER_BRANCH = "master"
5 | STDLIB_PLUGIN_REPO = "Significant-Gravitas/Auto-GPT-Plugins"
6 | ZIP_EXTENSION = ".zip"
7 | REQUIRED_ENV_VARS = "required_env_vars"
8 | ENV_VARS_DEFAULT_VALUES = "env_vars_default_values"
9 |
10 |
11 | plugins_map = {
12 | # begin standard plugins
13 | "AutoGPTTwitter": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["TW_CONSUMER_KEY", "TW_CONSUMER_SECRET", "TW_ACCESS_TOKEN", "TW_ACCESS_TOKEN_SECRET", "TW_CLIENT_ID", "TW_CLIENT_ID_SECRET"]},
14 | "AutoGPTEmailPlugin": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["EMAIL_ADDRESS", "EMAIL_PASSWORD"]},
15 | "AutoGPTSceneXPlugin": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["SCENEX_API_KEY"]},
16 | "AutoGPTBingSearch": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["BING_API_KEY"], ENV_VARS_DEFAULT_VALUES: {"SEARCH_ENGINE": "bing"}},
17 | "AutoGPTNewsSearch": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["NEWSAPI_API_KEY"]},
18 | "PlannerPlugin": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH},
19 | "AutoGPTWikipediaSearch": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH},
20 | "AutoGPTApiTools": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH},
21 | "AutoGPTRandomValues": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH},
22 | "AutoGPTSpacePlugin": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH},
23 | "AutoGPTBaiduSearch": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["BAIDU_COOKIE"], ENV_VARS_DEFAULT_VALUES: {"SEARCH_ENGINE": "baidu"}},
24 | "AutoGPTBluesky": {"repository": STDLIB_PLUGIN_REPO, "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["BLUESKY_USERNAME", "BLUESKY_APP_PASSWORD"]},
25 | # end of standard plugins
26 | "AutoGPTAlpacaTraderPlugin": {"repository": "danikhan632/Auto-GPT-AlpacaTrader-Plugin", "branch": MASTER_BRANCH, REQUIRED_ENV_VARS:["APCA_API_KEY_ID", "APCA_API_SECRET_KEY"], ENV_VARS_DEFAULT_VALUES: {"APCA_IS_PAPER": "True"}},
27 | "AutoGPTUserInput": {"repository": "HFrovinJensen/Auto-GPT-User-Input-Plugin", "branch": MASTER_BRANCH},
28 | "BingAI": {"repository": "gravelBridge/AutoGPT-BingAI", "branch": MAIN_BRANCH, REQUIRED_ENV_VARS: ["BINGAI_COOKIES_PATH", "BINGAI_MODE"]},
29 | "AutoGPTCryptoPlugin": {"repository": "isaiahbjork/Auto-GPT-Crypto-Plugin", "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["ETHERSCAN_API_KEY", "POLYSCAN_API_KEY", "ETH_WALLET_ADDRESS", "ETH_WALLET_PRIVATE_KEY", "LUNAR_CRUSH_API_KEY", "TELEGRAM_API_ID", "TELEGRAM_API_HASH", "FCS_API_KEY", "CMC_API_KEY", "EXCHANGES", "EXCHANGE_NAME_SECRET", "EXCHANGE_NAME_API_KEY"]},
30 | "AutoGPTDiscord": {"repository": "gravelBridge/AutoGPT-Discord", "branch": MAIN_BRANCH, REQUIRED_ENV_VARS: ["DISCORD_BOT_TOKEN", "AUTHORIZED_USER_IDS", "BOT_PREFIX", "CHANNEL_ID"], ENV_VARS_DEFAULT_VALUES: {"ASK_FOR_INPUT": "True"}},
31 | "AutoGPTDollyPlugin": {"repository": "pr-0f3t/Auto-GPT-Dolly-Plugin", "branch": MASTER_BRANCH},
32 | "AutoGPTGoogleAnalyticsPlugin": {"repository": "isaiahbjork/Auto-GPT-Google-Analytics-Plugin", "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["GOOGLE_ANALYTICS_VIEW_ID"], ENV_VARS_DEFAULT_VALUES: {"GOOGLE_APPLICATION_CREDENTIALS": "firebase.json"}},
33 | "AutoGPT_IFTTT": {"repository": "AntonioCiolino/AutoGPT-IFTTT", "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["IFTTT_WEBHOOK_TRIGGER_NAME", "IFTTT_KEY"]},
34 | "AutoGPT_YouTube": {"repository": "jpetzke/AutoGPT-YouTube", "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["YOUTUBE_API_KEY"]},
35 | "AutoGPTPMPlugin": {"repository": "minfenglu/AutoGPT-PM-Plugin", "branch": MAIN_BRANCH, REQUIRED_ENV_VARS: ["TRELLO_API_KEY", "TRELLO_API_TOKEN", "TRELLO_CONFIG_FILE"]},
36 | "AutoGPTWolframAlpha": {"repository":"gravelBridge/AutoGPT-WolframAlpha", "branch": MAIN_BRANCH, REQUIRED_ENV_VARS: ["WOLFRAM_ALPHA_APP_ID"]},
37 | "AutoGPTTodoistPlugin": {"repository": "danikhan632/Auto-GPT-Todoist-Plugin", "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["TODOIST_TOKEN"]},
38 | "AutoGPTMessagesPlugin": {"repository": "danikhan632/Auto-GPT-Messages-Plugin", "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["IMESSAGE_PASSWORD_KEY", "IMESSAGE_BASE_URL"]},
39 | "AutoGPTWebInteraction": {"repository": "gravelBridge/AutoGPT-Web-Interaction", "branch": MAIN_BRANCH},
40 | "AutoGPTNotion": {"repository": "doutv/Auto-GPT-Notion", "branch": MASTER_BRANCH, REQUIRED_ENV_VARS: ["NOTION_TOKEN", "NOTION_DATABASE_ID"]},
41 | "SystemInformationPlugin": {"repository": "hdkiller/Auto-GPT-SystemInfo", "branch": MASTER_BRANCH},
42 | "AutoGPT_Zapier": {"repository": "AntonioCiolino/AutoGPT-Zapier", "branch": MAIN_BRANCH, REQUIRED_ENV_VARS: ["ZAPIER_WEBHOOK_ENDPOINT"]},
43 | }
44 |
45 |
46 | def get_plugin_url(plugin_data, plugin_branch_to_use, plugin_repo_to_use):
47 | repo = plugin_data["repository"]
48 | if plugin_repo_to_use:
49 | repo = plugin_repo_to_use
50 | branch = plugin_data["branch"]
51 | if plugin_branch_to_use:
52 | branch = plugin_branch_to_use
53 | return "https://github.com/{0}/archive/refs/heads/{1}.zip".format(repo, branch)
54 |
55 |
56 | def get_filename(plugin):
57 | author, actual_repo = plugin["repository"].split("/")
58 | return actual_repo + ZIP_EXTENSION
59 |
60 |
61 | def areAllRequiredEnvVarsSet(env_vars, plugins_names):
62 | areAllRequiredEnvVarsSet = True
63 | missing_env_vars = {}
64 | all_required_env_vars = getAllRequiredEnvVarsFor(plugins_names)
65 | for plugin_name, plugin_env_vars in all_required_env_vars.items():
66 | plugin_missing_env_vars = []
67 | for required_env_var in plugin_env_vars:
68 | if required_env_var not in env_vars:
69 | areAllRequiredEnvVarsSet = False
70 | plugin_missing_env_vars.append(required_env_var)
71 | if len(plugin_missing_env_vars) > 0:
72 | missing_env_vars[plugin_name] = plugin_missing_env_vars
73 |
74 | return areAllRequiredEnvVarsSet, missing_env_vars
75 |
76 |
77 | def validatePluginNames(plugins_names):
78 | for plugin_name in plugins_names:
79 | if plugin_name not in plugins_map:
80 | fail("Invalid plugin name {0}. The supported plugins are: {1}. You can add support for a new plugin by creating an issue or PR at {2}".format(plugin_name, ", ".join(plugins_map.keys()), common.KURTOSIS_AUTOGPT_PACKAGE_URL))
81 |
82 |
83 | def getAllRequiredEnvVarsFor(plugins_names):
84 | required_env_vars = {}
85 | for plugin_name in plugins_names:
86 | plugin_data = plugins_map.get(plugin_name)
87 | plugin_env_vars = plugin_data.get(REQUIRED_ENV_VARS)
88 | if plugin_env_vars == None:
89 | continue
90 | required_env_vars[plugin_name] = plugin_env_vars
91 |
92 | return required_env_vars
93 |
94 |
95 | def getPluginsEnvVarsDefaultValues(plugins_names, user_env_vars):
96 | env_vars_set = []
97 | env_vars_set_by_plugin = {}
98 | env_vars_default_values = {}
99 | for plugin_name in plugins_names:
100 | plugin_data = plugins_map.get(plugin_name)
101 | plugin_env_vars = plugin_data.get(ENV_VARS_DEFAULT_VALUES)
102 | if plugin_env_vars == None:
103 | continue
104 | plugin_env_vars_names = plugin_env_vars.keys()
105 | for env_var_name in plugin_env_vars_names:
106 | if env_var_name in user_env_vars:
107 | continue
108 | env_var_value = plugin_env_vars.get(env_var_name)
109 | if env_var_name in env_vars_set:
110 | plugin_name_already_set_env_var = env_vars_set_by_plugin[env_var_name]
111 | already_set_env_var_value = env_vars_default_values[env_var_name]
112 | if env_var_value != already_set_env_var_value:
113 | fail("You are trying to use '{0}', '{1}' both of which use the same environment variable '{2}' with different values. We recommend you use just one of '{0}' or '{1}'".format(plugin_name, plugin_name_already_set_env_var, env_var_name))
114 | env_vars_set.append(env_var_name)
115 | env_vars_set_by_plugin[env_var_name] = plugin_name
116 | env_vars_default_values[env_var_name] = env_var_value
117 |
118 | return env_vars_default_values
119 |
--------------------------------------------------------------------------------
/run.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kurtosis-tech/autogpt-package/8ab3617f0a006b0e8a4527bf6edd97d28e9d15c2/run.gif
--------------------------------------------------------------------------------
/src/common.star:
--------------------------------------------------------------------------------
1 | KURTOSIS_AUTOGPT_PACKAGE_URL = "https://github.com/kurtosis-tech/autogpt-package"
2 |
--------------------------------------------------------------------------------