├── stackstorm ├── __init__.py ├── st2.conf ├── console.conf ├── config.py └── handler.py ├── requirements-test.txt ├── lint-configs ├── python │ ├── .flake8 │ └── .pylintrc └── README.md ├── tests └── e2e │ ├── service │ ├── v1 │ │ ├── packs │ │ │ └── test │ │ │ │ ├── tests │ │ │ │ └── test_action_parse_xml.py.json │ │ │ │ ├── pack.yaml │ │ │ │ └── actions │ │ │ │ ├── parse.json │ │ │ │ └── list_vms.json │ │ └── index.json │ ├── container │ │ └── Dockerfile │ └── serverless.yml │ └── commands.js ├── .eslintrc.yml ├── package.json ├── serverless.example.yml ├── .circleci └── config.yml ├── .gitignore ├── lib └── docker.js ├── README.md ├── LICENSE ├── index.js └── index.test.js /stackstorm/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | flake8 2 | pylint 3 | -------------------------------------------------------------------------------- /lint-configs/python/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 100 3 | ignore = E128,E402 4 | exclude=*.egg/* 5 | -------------------------------------------------------------------------------- /tests/e2e/service/v1/packs/test/tests/test_action_parse_xml.py.json: -------------------------------------------------------------------------------- 1 | { 2 | "filename": "test_action_parse_xml.py" 3 | } 4 | -------------------------------------------------------------------------------- /tests/e2e/service/v1/packs/test/pack.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | description: st2 pack to test package management pipeline 3 | keywords: 4 | - some 5 | - search 6 | - terms 7 | version: 0.4.0 8 | author: st2-dev 9 | email: info@stackstorm.com 10 | content: 11 | actions: 12 | count: 2 13 | resources: 14 | - list_vms 15 | - parse 16 | tests: 17 | count: 1 18 | resources: 19 | - test_action_parse_xml.py 20 | -------------------------------------------------------------------------------- /stackstorm/st2.conf: -------------------------------------------------------------------------------- 1 | [system] 2 | debug = True 3 | base_path = ~st2 4 | 5 | [system_user] 6 | user = stanley 7 | ssh_key_file = /home/vagrant/.ssh/stanley_rsa 8 | 9 | [ssh_runner] 10 | remote_dir = /tmp 11 | 12 | [content] 13 | system_packs_base_path = ~st2/packs 14 | system_runners_base_path = ~st2/runners 15 | 16 | [auth] 17 | api_url = None 18 | 19 | [actionrunner] 20 | logging = ~st2/console.conf 21 | stream_output = False 22 | -------------------------------------------------------------------------------- /tests/e2e/service/v1/packs/test/actions/parse.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Parse XML string and return JSON object.", 3 | "enabled": true, 4 | "entry_point": "parse_xml.py", 5 | "name": "parse", 6 | "parameters": { 7 | "data": { 8 | "description": "XML string to parse.", 9 | "required": true, 10 | "type": "string" 11 | } 12 | }, 13 | "runner_type": "python-script" 14 | } 15 | -------------------------------------------------------------------------------- /.eslintrc.yml: -------------------------------------------------------------------------------- 1 | env: 2 | es6: true 3 | node: true 4 | extends: 'eslint:recommended' 5 | rules: 6 | indent: 7 | - error 8 | - 2 9 | linebreak-style: 10 | - error 11 | - unix 12 | quotes: 13 | - error 14 | - single 15 | semi: 16 | - error 17 | - always 18 | no-console: 19 | - off 20 | plugins: 21 | - async-await 22 | parserOptions: 23 | ecmaVersion: 8 24 | ecmaFeatures: 25 | experimentalObjectRestSpread: true 26 | -------------------------------------------------------------------------------- /tests/e2e/service/v1/packs/test/actions/list_vms.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "List available VMs.", 3 | "enabled": true, 4 | "entry_point": "list_vms.py", 5 | "name": "list_vms", 6 | "parameters": { 7 | "credentials": { 8 | "description": "Name of the credentials set (as defined in the config) to use.", 9 | "required": true, 10 | "type": "string" 11 | } 12 | }, 13 | "runner_type": "python-script" 14 | } 15 | -------------------------------------------------------------------------------- /stackstorm/console.conf: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root 3 | 4 | [handlers] 5 | keys=consoleHandler 6 | 7 | [formatters] 8 | keys=simpleConsoleFormatter 9 | 10 | [logger_root] 11 | level=INFO 12 | handlers=consoleHandler 13 | 14 | [handler_consoleHandler] 15 | class=StreamHandler 16 | level=DEBUG 17 | formatter=simpleConsoleFormatter 18 | args=(sys.stdout,) 19 | 20 | [formatter_simpleConsoleFormatter] 21 | class=st2common.logging.formatters.ConsoleLogFormatter 22 | format=%(asctime)s %(levelname)s [-] %(message)s 23 | datefmt= -------------------------------------------------------------------------------- /tests/e2e/service/container/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM lambci/lambda:build-python2.7 2 | 3 | ENV XDG_CACHE_HOME /cache 4 | RUN git clone https://github.com/stackstorm/st2 /dist/st2 5 | RUN git clone https://github.com/stackstorm-exchange/stackstorm-test /dist/stackstorm-test 6 | RUN mkdir -p /tmp/pkgs \ 7 | && pip download --dest /tmp/pkgs -r /dist/st2/st2common/requirements.txt \ 8 | -r /dist/st2/contrib/runners/python_runner/requirements.txt \ 9 | -r /dist/stackstorm-test/requirements.txt \ 10 | && rm -R /tmp/pkgs 11 | -------------------------------------------------------------------------------- /tests/e2e/service/serverless.yml: -------------------------------------------------------------------------------- 1 | service: my-service 2 | 3 | provider: 4 | name: aws 5 | runtime: python2.7 # StackStorm runners are based on Python 2 6 | 7 | functions: 8 | get_issue: 9 | stackstorm: # `stackstorm` object replaces `handler`. The rest is the same. 10 | action: github.get_issue 11 | input: 12 | user: "{{ input.pathParameters.user }}" 13 | repo: "{{ input.pathParameters.repo }}" 14 | issue_id: "{{ input.pathParameters.issue_id }}" 15 | output: 16 | statusCode: 200 17 | body: "{{ output }}" 18 | events: 19 | - http: 20 | method: GET 21 | path: issues/{user}/{repo}/{issue_id} 22 | 23 | custom: 24 | stackstorm: 25 | indexRoot: http://localhost:45032/v1/ 26 | st2common_pkg: /dist/st2/st2common 27 | python_runner_pkg: /dist/st2/contrib/runners/python_runner 28 | buildImage: slstest 29 | runImage: slstest 30 | 31 | plugins: 32 | - ../../../../index.js 33 | -------------------------------------------------------------------------------- /lint-configs/README.md: -------------------------------------------------------------------------------- 1 | # StackStorm Lint Configs 2 | 3 | This repository contains lint configs for different programming languages and 4 | tools (flake8, pylint, etc.) used by different StackStorm repositories. 5 | 6 | Configs are grouped in sub-directories by programming language. 7 | 8 | ## Usage 9 | 10 | To use those configs, add this repository as a git subtree to the repository 11 | where you want to utilize those configs. After that is done, update make 12 | targets (or similar) to correctly pass path to the configs to the tools 13 | in question. 14 | 15 | ```bash 16 | git subtree add --prefix lint-configs https://github.com/StackStorm/lint-configs.git master --squash 17 | ``` 18 | 19 | To use it (example with pylint) 20 | 21 | ```bash 22 | pylint -E --rcfile=./lint-configs/python/.pylintrc 23 | ... 24 | ``` 25 | 26 | And once you want to pull changes / updates from the lint-configs repository: 27 | 28 | ```bash 29 | git subtree pull --prefix lint-configs https://github.com/StackStorm/lint-configs.git master --squash 30 | ``` 31 | -------------------------------------------------------------------------------- /tests/e2e/service/v1/index.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "generated_ts": 1512633599, 4 | "hash": "72c7655b059b387f074ea0a868b54a2f" 5 | }, 6 | "packs": { 7 | "test": { 8 | "author": "st2-dev", 9 | "content": { 10 | "actions": { 11 | "count": 2, 12 | "resources": [ 13 | "list_vms", 14 | "parse" 15 | ] 16 | }, 17 | "tests": { 18 | "count": 1, 19 | "resources": [ 20 | "test_action_parse_xml.py" 21 | ] 22 | } 23 | }, 24 | "description": "st2 pack to test package management pipeline", 25 | "email": "info@stackstorm.com", 26 | "keywords": [ 27 | "some", 28 | "search", 29 | "terms" 30 | ], 31 | "name": "test", 32 | "repo_url": "https://github.com/StackStorm-Exchange/stackstorm-test", 33 | "version": "0.4.0" 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "serverless-plugin-stackstorm", 3 | "version": "1.2.2-0", 4 | "description": "Running StackStorm actions serverlessly", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "eslint .", 8 | "test": "mocha \"./{,!(node_modules)/**/}*.test.js\"", 9 | "integration": "mocha \"tests/e2e/*.js\"", 10 | "cover": "nyc --reporter=lcovonly npm test" 11 | }, 12 | "engines": { 13 | "node": "^8.4.0" 14 | }, 15 | "repository": { 16 | "type" : "git", 17 | "url" : "https://github.com/StackStorm/serverless-plugin-stackstorm.git" 18 | }, 19 | "author": "Kirill Enykeev ", 20 | "license": "Apache-2.0", 21 | "dependencies": { 22 | "axios": "0.16.2", 23 | "chalk": "2.3.0", 24 | "fs-extra": "4.0.2", 25 | "get-stdin": "5.0.1", 26 | "js-yaml": "3.10.0", 27 | "lodash": "4.17.11", 28 | "nopy": "0.2.3", 29 | "simple-git": "^1.84.0", 30 | "url-join": "2.0.2" 31 | }, 32 | "devDependencies": { 33 | "chai": "4.1.2", 34 | "chai-as-promised": "7.1.1", 35 | "eslint": "4.9.0", 36 | "eslint-plugin-async-await": "0.0.0", 37 | "mocha": "4.0.1", 38 | "nyc": "11.3.0", 39 | "promise-retry": "1.1.1", 40 | "proxyquire": "1.8.0", 41 | "serve": "6.4.1", 42 | "serverless": "1.24.1", 43 | "sinon": "4.0.2", 44 | "sinon-chai": "2.14.0", 45 | "tmp": "0.0.33" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /serverless.example.yml: -------------------------------------------------------------------------------- 1 | service: my_service 2 | 3 | provider: 4 | name: aws 5 | runtime: python2.7 # StackStorm runners are based on Python 2 6 | 7 | functions: 8 | get_issue: 9 | stackstorm: # `stackstorm` object replaces `handler`. The rest is the same. 10 | action: github.get_issue 11 | input: 12 | user: "{{ input.pathParameters.user }}" 13 | repo: "{{ input.pathParameters.repo }}" 14 | issue_id: "{{ input.pathParameters.issue_id }}" 15 | output: 16 | statusCode: 200 17 | body: "{{ output.result.body }}" 18 | config: 19 | # Optional: use token for private repos or actions requiring authorization 20 | # token: ${env:GITHUB_TOKEN} 21 | 22 | environment: 23 | ${file(env.yml):github} 24 | # Uncomment the following line to enable debug mode (log level will be set 25 | # to debug and more debug related information will be logged) 26 | # ST2_DEBUG: "true" 27 | 28 | events: 29 | - http: 30 | method: GET 31 | path: issues/{user}/{repo}/{issue_id} 32 | 33 | 34 | custom: 35 | # Optional settings, to adjust defaults to your liking 36 | stackstorm: 37 | runImage: 'lambci/lambda:python2.7' 38 | buildImage: 'lambci/lambda:build-python2.7' 39 | indexRoot: 'https://index.stackstorm.org/v1/' 40 | st2common_pkg: 'git+https://github.com/stackstorm/st2.git#egg=st2common&subdirectory=st2common' 41 | python_runner_pkg: 'git+https://github.com/StackStorm/st2.git#egg=python_runner&subdirectory=contrib/runners/python_runner' 42 | 43 | plugins: 44 | - serverless-plugin-stackstorm 45 | -------------------------------------------------------------------------------- /lint-configs/python/.pylintrc: -------------------------------------------------------------------------------- 1 | [MESSAGES CONTROL] 2 | # C0111 Missing docstring 3 | # I0011 Warning locally suppressed using disable-msg 4 | # I0012 Warning locally suppressed using disable-msg 5 | # W0704 Except doesn't do anything Used when an except clause does nothing but "pass" and there is no "else" clause 6 | # W0142 Used * or * magic* Used when a function or method is called using *args or **kwargs to dispatch arguments. 7 | # W0212 Access to a protected member %s of a client class 8 | # W0232 Class has no __init__ method Used when a class has no __init__ method, neither its parent classes. 9 | # W0613 Unused argument %r Used when a function or method argument is not used. 10 | # W0702 No exception's type specified Used when an except clause doesn't specify exceptions type to catch. 11 | # R0201 Method could be a function 12 | # W0614 Unused import XYZ from wildcard import 13 | # R0914 Too many local variables 14 | # R0912 Too many branches 15 | # R0915 Too many statements 16 | # R0913 Too many arguments 17 | # R0904 Too many public methods 18 | # E0211: Method has no argument 19 | # E1128: Assigning to function call which only returns None Used when an assignment is done on a function call but the inferred function returns nothing but None. 20 | # E1129: Context manager ‘%s’ doesn’t implement __enter__ and __exit__. Used when an instance in a with statement doesn’t implement the context manager protocol(__enter__/__exit__). 21 | disable=C0103,C0111,I0011,I0012,W0704,W0142,W0212,W0232,W0613,W0702,R0201,W0614,R0914,R0912,R0915,R0913,R0904,R0801,not-context-manager,assignment-from-none 22 | 23 | [TYPECHECK] 24 | # Note: This modules are manipulated during the runtime so we can't detect all the properties during 25 | # static analysis 26 | ignored-modules=distutils,eventlet.green.subprocess,six,six.moves 27 | 28 | [FORMAT] 29 | max-line-length=100 30 | max-module-lines=1000 31 | indent-string=' ' 32 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.0 2 | 3 | workflows: 4 | version: 2 5 | build: 6 | jobs: 7 | - "javascript" 8 | - "python" 9 | 10 | jobs: 11 | javascript: 12 | machine: true 13 | working_directory: ~/serverless-plugin-stackstorm 14 | steps: 15 | - checkout 16 | - run: 17 | name: Switch to supported version of Node 18 | command: | 19 | export NVM_DIR="/opt/circleci/.nvm" 20 | [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh" 21 | nvm install 8.4.0 && nvm alias default 8.4.0 22 | 23 | # Each step uses the same `$BASH_ENV`, so need to modify it 24 | echo 'export NVM_DIR="/opt/circleci/.nvm"' >> $BASH_ENV 25 | echo "[ -s \"$NVM_DIR/nvm.sh\" ] && . \"$NVM_DIR/nvm.sh\"" >> $BASH_ENV 26 | - restore_cache: 27 | key: dependency-cache-{{ checksum "package.json" }} 28 | - run: 29 | name: Install the package 30 | command: npm install 31 | - save_cache: 32 | key: dependency-cache-{{ checksum "package.json" }} 33 | paths: 34 | - ./node_modules 35 | - run: 36 | name: Lint 37 | command: npm run lint 38 | - run: 39 | name: Run unit tests 40 | command: npm test 41 | - run: 42 | name: Build test container 43 | command: docker build -t slstest tests/e2e/service/container 44 | - run: 45 | name: Run integration tests 46 | command: npm run integration 47 | 48 | python: 49 | working_directory: ~/serverless-plugin-stackstorm 50 | docker: 51 | - image: circleci/python:2.7 52 | steps: 53 | - checkout 54 | - run: 55 | name: Install Python test dependencies 56 | command: | 57 | virtualenv venv 58 | . venv/bin/activate 59 | pip install -r requirements-test.txt 60 | - run: 61 | name: Python Lint (flake8) 62 | command: venv/bin/flake8 --config ./lint-configs/python/.flake8 stackstorm/handler.py 63 | - run: 64 | name: Python Lint (pylint) 65 | command: venv/bin/pylint -E --rcfile=./lint-configs/python/.pylintrc stackstorm/handler.py 66 | -------------------------------------------------------------------------------- /stackstorm/config.py: -------------------------------------------------------------------------------- 1 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | """ 17 | Configuration options registration and useful routines. 18 | """ 19 | import os 20 | 21 | from oslo_config import cfg 22 | 23 | import st2common.config as common_config 24 | from st2common.constants.system import VERSION_STRING 25 | 26 | CONF = cfg.CONF 27 | 28 | 29 | def parse_args(args=None): 30 | CONF(args=args, version=VERSION_STRING) 31 | 32 | 33 | def register_opts(): 34 | _register_common_opts() 35 | _register_action_runner_opts() 36 | 37 | 38 | def _register_common_opts(): 39 | common_config.register_opts() 40 | 41 | 42 | def _register_action_runner_opts(): 43 | logging_opts = [ 44 | cfg.StrOpt('logging', default='conf/logging.conf', 45 | help='location of the logging.conf file'), 46 | ] 47 | CONF.register_opts(logging_opts, group='actionrunner') 48 | 49 | dispatcher_pool_opts = [ 50 | cfg.IntOpt('workflows_pool_size', default=40, 51 | help='Internal pool size for dispatcher used by workflow actions.'), 52 | cfg.IntOpt('actions_pool_size', default=60, 53 | help='Internal pool size for dispatcher used by regular actions.') 54 | ] 55 | CONF.register_opts(dispatcher_pool_opts, group='actionrunner') 56 | 57 | db_opts = [ 58 | cfg.StrOpt('host', default='127.0.0.1', help='host of db server'), 59 | cfg.IntOpt('port', default=27017, help='port of db server'), 60 | cfg.StrOpt('db_name', default='st2', help='name of database') 61 | ] 62 | CONF.register_opts(db_opts, group='database') 63 | 64 | ssh_runner_opts = [ 65 | cfg.StrOpt('remote_dir', 66 | default='/tmp', 67 | help='Location of the script on the remote filesystem.'), 68 | cfg.BoolOpt('allow_partial_failure', 69 | default=False, 70 | help='How partial success of actions run on multiple nodes ' + 71 | 'should be treated.'), 72 | cfg.BoolOpt('use_ssh_config', default=False, 73 | help='Use the .ssh/config file. Useful to override ports etc.'), 74 | cfg.StrOpt('ssh_config_file_path', 75 | default='~/.ssh/config', 76 | help='Path to the ssh config file.') 77 | 78 | ] 79 | CONF.register_opts(ssh_runner_opts, group='ssh_runner') 80 | 81 | cloudslang_opts = [ 82 | cfg.StrOpt('home_dir', default='/opt/cslang', 83 | help='CloudSlang home directory.'), 84 | ] 85 | CONF.register_opts(cloudslang_opts, group='cloudslang') 86 | 87 | 88 | def get_logging_config_path(): 89 | return CONF.actionrunner.logging 90 | 91 | 92 | register_opts() 93 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/node,python,serverless,osx 3 | 4 | ### Node ### 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | 24 | # nyc test coverage 25 | .nyc_output 26 | 27 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 28 | .grunt 29 | 30 | # Bower dependency directory (https://bower.io/) 31 | bower_components 32 | 33 | # node-waf configuration 34 | .lock-wscript 35 | 36 | # Compiled binary addons (http://nodejs.org/api/addons.html) 37 | build/Release 38 | 39 | # Dependency directories 40 | node_modules/ 41 | jspm_packages/ 42 | 43 | # Typescript v1 declaration files 44 | typings/ 45 | 46 | # Optional npm cache directory 47 | .npm 48 | 49 | # Optional eslint cache 50 | .eslintcache 51 | 52 | # Optional REPL history 53 | .node_repl_history 54 | 55 | # Output of 'npm pack' 56 | *.tgz 57 | 58 | # Yarn Integrity file 59 | .yarn-integrity 60 | 61 | # dotenv environment variables file 62 | .env 63 | 64 | 65 | ### OSX ### 66 | *.DS_Store 67 | .AppleDouble 68 | .LSOverride 69 | 70 | # Icon must end with two \r 71 | Icon 72 | 73 | # Thumbnails 74 | ._* 75 | 76 | # Files that might appear in the root of a volume 77 | .DocumentRevisions-V100 78 | .fseventsd 79 | .Spotlight-V100 80 | .TemporaryItems 81 | .Trashes 82 | .VolumeIcon.icns 83 | .com.apple.timemachine.donotpresent 84 | 85 | # Directories potentially created on remote AFP share 86 | .AppleDB 87 | .AppleDesktop 88 | Network Trash Folder 89 | Temporary Items 90 | .apdisk 91 | 92 | ### Python ### 93 | # Byte-compiled / optimized / DLL files 94 | __pycache__/ 95 | *.py[cod] 96 | *$py.class 97 | 98 | # C extensions 99 | *.so 100 | 101 | # Distribution / packaging 102 | .Python 103 | build/ 104 | develop-eggs/ 105 | dist/ 106 | downloads/ 107 | eggs/ 108 | .eggs/ 109 | lib/ 110 | lib64/ 111 | parts/ 112 | sdist/ 113 | var/ 114 | wheels/ 115 | *.egg-info/ 116 | .installed.cfg 117 | *.egg 118 | 119 | # PyInstaller 120 | # Usually these files are written by a python script from a template 121 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 122 | *.manifest 123 | *.spec 124 | 125 | # Installer logs 126 | pip-log.txt 127 | pip-delete-this-directory.txt 128 | 129 | # Unit test / coverage reports 130 | htmlcov/ 131 | .tox/ 132 | .coverage 133 | .coverage.* 134 | .cache 135 | nosetests.xml 136 | coverage.xml 137 | *.cover 138 | .hypothesis/ 139 | 140 | # Translations 141 | *.mo 142 | *.pot 143 | 144 | # Django stuff: 145 | local_settings.py 146 | 147 | # Flask stuff: 148 | instance/ 149 | .webassets-cache 150 | 151 | # Scrapy stuff: 152 | .scrapy 153 | 154 | # Sphinx documentation 155 | docs/_build/ 156 | 157 | # PyBuilder 158 | target/ 159 | 160 | # Jupyter Notebook 161 | .ipynb_checkpoints 162 | 163 | # pyenv 164 | .python-version 165 | 166 | # celery beat schedule file 167 | celerybeat-schedule 168 | 169 | # SageMath parsed files 170 | *.sage.py 171 | 172 | # Environments 173 | .venv 174 | env/ 175 | venv/ 176 | ENV/ 177 | env.bak/ 178 | venv.bak/ 179 | 180 | # Spyder project settings 181 | .spyderproject 182 | .spyproject 183 | 184 | # Rope project settings 185 | .ropeproject 186 | 187 | # mkdocs documentation 188 | /site 189 | 190 | # mypy 191 | .mypy_cache/ 192 | 193 | ### Serverless ### 194 | # Ignore build directory 195 | .serverless 196 | 197 | # End of https://www.gitignore.io/api/node,python,serverless,osx 198 | 199 | !lib/ 200 | -------------------------------------------------------------------------------- /lib/docker.js: -------------------------------------------------------------------------------- 1 | const child_process = require('child_process'); 2 | const EventEmitter = require('events'); 3 | 4 | function factory(args) { 5 | const spawnOptions = { 6 | encoding: 'utf8' 7 | }; 8 | 9 | const emitter = new EventEmitter(); 10 | 11 | const run = child_process.spawn('docker', args, spawnOptions); 12 | 13 | run.stdout.on('data', (data) => { 14 | emitter.emit('stdout', data.toString().replace(/\n$/, '')); 15 | }); 16 | 17 | run.stderr.on('data', (data) => { 18 | emitter.emit('stderr', data.toString().replace(/\n$/, '')); 19 | }); 20 | 21 | return { run, emitter }; 22 | } 23 | 24 | function bufferStreams(run) { 25 | const o = { 26 | stdout: '', 27 | stderr: '' 28 | }; 29 | 30 | run.stdout.on('data', (data) => { 31 | o.stdout += data.toString(); 32 | }); 33 | 34 | run.stderr.on('data', (data) => { 35 | o.stderr += data.toString(); 36 | }); 37 | 38 | return o; 39 | } 40 | 41 | function mixin(promise, emitter) { 42 | promise.on = (...args) => { 43 | emitter.on(...args); 44 | 45 | return promise; 46 | }; 47 | 48 | return promise; 49 | } 50 | 51 | module.exports.pullDockerImage = (dockerImage) => { 52 | const args = ['pull'] 53 | .concat([dockerImage]) 54 | ; 55 | 56 | const { run, emitter } = factory(args); 57 | 58 | const buf = bufferStreams(run); 59 | 60 | const promise = new Promise((resolve, reject) => { 61 | run.on('close', (code) => { 62 | if (code === 0) { 63 | resolve({ ...buf, code }); 64 | } else { 65 | reject(new Error(buf.stderr)); 66 | } 67 | }); 68 | }); 69 | 70 | return mixin(promise, emitter); 71 | }; 72 | 73 | module.exports.startDocker = (dockerImage, volume) => { 74 | const args = ['run'] 75 | .concat(['-d', '--rm', '-v', volume, dockerImage]) 76 | .concat(['tail', '-f', '/dev/null']) 77 | ; 78 | 79 | const { run, emitter } = factory(args); 80 | 81 | const promise = Promise.all([ 82 | new Promise((resolve, reject) => { 83 | run.stdout.on('data', (data) => { 84 | resolve(data.toString().replace(/\n$/, '')); 85 | }); 86 | 87 | run.stderr.on('data', (data) => { 88 | reject(data.toString().replace(/\n$/, '')); 89 | }); 90 | }), 91 | new Promise(resolve => run.on('close', resolve)) 92 | ]).then(res => res[0]); 93 | 94 | return mixin(promise, emitter); 95 | }; 96 | 97 | module.exports.runDocker = (dockerImage, volumes, envs, cmd) => { 98 | let args = ['run'] 99 | .concat(['--rm']) 100 | ; 101 | 102 | for (const volume of volumes) { 103 | args = args.concat(['-v', volume]); 104 | } 105 | 106 | for (const env of envs) { 107 | args = args.concat(['-e', env]); 108 | } 109 | 110 | args = args 111 | .concat([dockerImage]) 112 | .concat(cmd); 113 | 114 | const { run, emitter } = factory(args); 115 | 116 | const buf = bufferStreams(run); 117 | 118 | const promise = new Promise((resolve, reject) => { 119 | run.on('close', (code) => { 120 | const res = { ...buf, code }; 121 | 122 | const lines = buf.stdout.split('\n'); 123 | if (lines.length > 1 && lines[lines.length - 2]) { 124 | try { 125 | lines.pop(); // Remove empty element 126 | res.result = JSON.parse(lines.pop()); 127 | lines.push(''); // Put empty element back 128 | res.stdout = lines.join('\n'); 129 | } catch (e) { 130 | // If it didn't work, well, ¯\_(ツ)_/¯ 131 | } 132 | } 133 | 134 | if (code === 0) { 135 | resolve(res); 136 | } else { 137 | reject(res); 138 | } 139 | }); 140 | }); 141 | 142 | return mixin(promise, emitter); 143 | }; 144 | 145 | module.exports.execDocker = (dockerId, command) => { 146 | const args = ['exec', dockerId] 147 | .concat(command) 148 | ; 149 | 150 | const { run, emitter } = factory(args); 151 | 152 | const buf = bufferStreams(run); 153 | 154 | const promise = new Promise((resolve, reject) => { 155 | run.on('close', (code) => { 156 | if (code === 0) { 157 | resolve({ ...buf, code}); 158 | } else { 159 | reject(new Error(buf.stderr)); 160 | } 161 | }); 162 | }); 163 | 164 | return mixin(promise, emitter); 165 | }; 166 | 167 | module.exports.stopDocker = (dockerId) => { 168 | const args = ['stop', dockerId]; 169 | 170 | const { run, emitter } = factory(args); 171 | 172 | const promise = new Promise((resolve, reject) => { 173 | run.stdout.on('data', (data) => { 174 | resolve(data.toString().replace(/\n$/, '')); 175 | }); 176 | 177 | run.stderr.on('data', (data) => { 178 | reject(data.toString().replace(/\n$/, '')); 179 | }); 180 | }); 181 | 182 | return mixin(promise, emitter); 183 | }; 184 | -------------------------------------------------------------------------------- /tests/e2e/commands.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | const path = require('path'); 3 | const chai = require('chai'); 4 | const chaiAsPromised = require('chai-as-promised'); 5 | const sinon = require('sinon'); 6 | const sinonChai = require('sinon-chai'); 7 | const serve = require('serve'); 8 | const tmp = require('tmp'); 9 | const fs = require('fs-extra'); 10 | const axios = require('axios'); 11 | const promiseRetry = require('promise-retry'); 12 | 13 | chai.use(chaiAsPromised); 14 | chai.use(sinonChai); 15 | const expect = chai.expect; 16 | 17 | const Serverless = require('serverless'); 18 | 19 | const containers = new Set(); 20 | 21 | function SLS(command, { servicePath } = {}) { 22 | const opts = { 23 | interactive: false 24 | }; 25 | 26 | if (servicePath) { 27 | opts.servicePath = path.join(__dirname, servicePath); 28 | } 29 | 30 | const sls = new Serverless(opts); 31 | 32 | class CLI extends sls.classes.CLI { 33 | constructor(serverless) { 34 | super(serverless, command); 35 | 36 | this.consoleLog = sinon.stub().callsFake((str) => { 37 | const container_regex = /(\u001b\[2m)([0-9a-f]{64})(\u001b\[22m)/; 38 | const match = str.match(container_regex); 39 | if (match) { 40 | containers.add(match[2]); 41 | } 42 | }); 43 | } 44 | } 45 | 46 | sls.classes.CLI = CLI; 47 | 48 | return sls; 49 | } 50 | 51 | function enterWorkspace() { 52 | const tmpdir = tmp.dirSync({ unsafeCleanup: true }); 53 | fs.copySync(path.join(__dirname, './service/serverless.yml'), path.join(tmpdir.name, 'serverless.yml')); 54 | process.chdir(tmpdir.name); 55 | 56 | return tmpdir; 57 | } 58 | 59 | describe('StackStorm Serverless Plugin E2E', () => { 60 | let server, tmpdir, workdir; 61 | 62 | before(() => { 63 | const port = 45032; 64 | workdir = process.cwd(); 65 | server = serve(path.join(__dirname, './service'), { port, clipless: true, silent: true }); 66 | 67 | const request = axios.create({ baseURL: `http://localhost:${port}/` }); 68 | return promiseRetry(retry => request.get('/').catch(retry)); 69 | }); 70 | 71 | beforeEach(() => { 72 | tmpdir = enterWorkspace(); 73 | }); 74 | 75 | describe('sls stackstorm info', () => { 76 | it('should return action info', async () => { 77 | const sls = SLS(['stackstorm', 'info', '--action', 'test.list_vms'], { servicePath: './service' }); 78 | 79 | await sls.init(); 80 | await sls.run(); 81 | 82 | expect(sls.cli.consoleLog).to.be.calledWith([ 83 | '\u001b[33mtest.list_vms\u001b[39m \u001b[2m.................\u001b[22m List available VMs.', 84 | '\u001b[33m\u001b[4mParameters\u001b[24m\u001b[39m', 85 | ' \u001b[33mcredentials [string] (required)\u001b[39m Name of the credentials set (as defined in the config) to use.', 86 | '\u001b[2mThe action does not require config parameters\u001b[22m' 87 | ].join('\n')); 88 | }).timeout(0); 89 | 90 | it('should return pack info', async () => { 91 | const sls = SLS(['stackstorm', 'info', '--pack', 'test'], { servicePath: './service' }); 92 | 93 | await sls.init(); 94 | await sls.run(); 95 | 96 | expect(sls.cli.consoleLog).to.be.calledWith([ 97 | '\u001b[33mtest\u001b[39m \u001b[2m..........................\u001b[22m st2 pack to test package management pipeline', 98 | '\u001b[33m\u001b[4mActions\u001b[24m\u001b[39m', 99 | ' list_vms', 100 | ' parse' 101 | ].join('\n')); 102 | }).timeout(0); 103 | }); 104 | 105 | describe('sls stackstorm install adapter', () => { 106 | it('should copy stackstorm files to working directory', async () => { 107 | const sls = SLS(['stackstorm', 'install', 'adapter']); 108 | 109 | await sls.init(); 110 | await sls.run(); 111 | 112 | expect(fs.readdirSync('.')).to.have.members(['serverless.yml', '~st2']); 113 | expect(fs.readdirSync('~st2')).to.have.members([ 114 | '__init__.py', 115 | 'config.py', 116 | 'console.conf', 117 | 'handler.py', 118 | 'st2.conf' 119 | ]); 120 | }); 121 | }); 122 | 123 | describe('sls stackstorm install deps', () => { 124 | it('should copy stackstorm files to working directory', async () => { 125 | const sls = SLS(['stackstorm', 'install', 'deps', '--noPull']); 126 | 127 | await sls.init(); 128 | await sls.run(); 129 | 130 | expect(fs.readdirSync('~st2/deps')).to.have.members([ 131 | 'bin', 132 | 'include', 133 | 'lib', 134 | 'lib64', 135 | 'share' 136 | ]); 137 | }).timeout(0); 138 | }); 139 | 140 | describe('sls stackstorm install pack', () => { 141 | it('should copy stackstorm files to working directory', async () => { 142 | const sls = SLS(['stackstorm', 'install', 'packs', '--pack', 'test']); 143 | 144 | await sls.init(); 145 | await sls.run(); 146 | 147 | expect(fs.readdirSync('~st2/packs')).to.have.members([ 148 | 'test' 149 | ]); 150 | }).timeout(0); 151 | }); 152 | 153 | describe('sls stackstorm install packDeps', () => { 154 | it('should copy stackstorm files to working directory', async () => { 155 | const pack = SLS(['stackstorm', 'install', 'packs', '--pack', 'test']); 156 | 157 | await pack.init(); 158 | await pack.run(); 159 | 160 | const sls = SLS(['stackstorm', 'install', 'packDeps', '--pack', 'test', '--noPull']); 161 | 162 | await sls.init(); 163 | await sls.run(); 164 | 165 | expect(fs.readdirSync('~st2/virtualenvs')).to.have.members([ 166 | 'test' 167 | ]); 168 | }).timeout(0); 169 | }); 170 | 171 | afterEach(() => { 172 | try { 173 | tmpdir.removeCallback(); 174 | } catch (e) { 175 | // YOLO 176 | } 177 | }); 178 | 179 | after(async function () { 180 | this.timeout(0); 181 | 182 | console.log('Cleaning up containers...'); 183 | 184 | tmpdir = enterWorkspace(); 185 | for (let id of containers) { 186 | try { 187 | const pack = SLS(['stackstorm', 'docker', 'stop', '--dockerId', id]); 188 | 189 | await pack.init(); 190 | await pack.run(); 191 | 192 | containers.delete(id); 193 | } catch (e) { 194 | // Do nothing 195 | } 196 | } 197 | tmpdir.removeCallback(); 198 | 199 | if (containers.size) { 200 | console.log('Some containers have not been garbage collected:'); 201 | for (let id of containers) { 202 | console.log(id); 203 | } 204 | } 205 | 206 | process.chdir(workdir); 207 | server.stop(); 208 | }); 209 | }); 210 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Serverless StackStorm Plugin 2 | 3 | [![serverless](http://public.serverless.com/badges/v3.svg)](http://www.serverless.com) 4 | [![npm version](https://badge.fury.io/js/serverless-plugin-stackstorm.svg)](https://badge.fury.io/js/serverless-plugin-stackstorm) 5 | 6 | Run ready to use actions from [StackStorm Exchange](https://exchange.stackstorm.com/) 7 | as AWS Lambda with [serverless framework](http://serverless.com/). Serverless and Stackstormless. 8 | 9 | ## Prerequisite 10 | 11 | - [Serverless framework](https://serverless.com/framework/docs/getting-started/) 12 | - [NodeJS](https://nodejs.org/en/download/) >= 8.4.0 13 | - [Docker](https://docs.docker.com/engine/installation/) - used to build and local-run Lambda on any OS 14 | - Build tools (``build-essentials`` package on Ubuntu) 15 | - SSL dev files (``libssl-dev`` package on Ubuntu) 16 | 17 | ## Getting Started 18 | 19 | Install serverless dependency globally 20 | 21 | ```bash 22 | npm install -g serverless 23 | ``` 24 | 25 | Init with `package.json`: 26 | 27 | ```bash 28 | npm init 29 | ``` 30 | 31 | Install the plugin: 32 | 33 | ```bash 34 | npm i --save-dev serverless-plugin-stackstorm 35 | ``` 36 | 37 | Browse [StackStorm Exchange](https://exchange.stackstorm.com/) 38 | to find the integration pack and an action you'd like to use. 39 | In the example below we use `github.get_issue` from [GitHub integration pack](https://github.com/StackStorm-Exchange/stackstorm-github). 40 | 41 | Configure your service to use the plugin by creating `serverless.yml` file. 42 | 43 | ```yaml 44 | service: my-service 45 | 46 | provider: 47 | name: aws 48 | runtime: python2.7 # StackStorm runners are based on Python 2 49 | 50 | functions: 51 | get_issue: 52 | stackstorm: # `stackstorm` object replaces `handler`. The rest is the same. 53 | action: github.get_issue 54 | config: 55 | token: ${env:GITHUB_TOKEN} 56 | input: 57 | user: "{{ input.pathParameters.user }}" 58 | repo: "{{ input.pathParameters.repo }}" 59 | issue_id: "{{ input.pathParameters.issue_id }}" 60 | output: 61 | statusCode: 200 62 | body: "{{ output }}" 63 | events: 64 | - http: 65 | method: GET 66 | path: issues/{user}/{repo}/{issue_id} 67 | 68 | plugins: 69 | - serverless-plugin-stackstorm 70 | ``` 71 | 72 | There are few new options inside the function definition 73 | (see [serverless.example.yml](./serverless.example.yml) for more options): 74 | - `stackstorm.action` allows you to pick up a function you want to turn into a lambda 75 | - `stackstorm.config` sets config parameters for the action. Config parameters are pack-wide in stackstorm and are commonly used for authentication tokens and such. 76 | - `stackstorm.input` defines how input event parameters should be transformed to match the parameters list stackstorm action expects 77 | - `stackstorm.output` defines the transformation that should be applied to the action output to form a result of lambda execution 78 | 79 | If you are in doubt on the list of parameters given StackStorm action expects, check action info: 80 | 81 | ``` 82 | $ sls stackstorm info --action github.get_issue 83 | github.get_issue .............. Retrieve information about a particular Github issue. 84 | Parameters 85 | issue_id [string] (required) Issue id 86 | repo [string] (required) .... Repository name. 87 | user [string] (required) .... User / organization name. 88 | Config 89 | base_url [string] (required) The GitHub URL, for GitHub Enterprise please set enterprise_url. 90 | deployment_environment [string] (required) The environment for this StackStorm server. 91 | enterprise_url [string] .... GitHub API url (including /api/v3) of your GitHub Enterprise hostname. 92 | github_type [string] (required) Default to either github or enterprise. 93 | password [string] .......... GitHub Password 94 | repository_sensor [object] . Sensor specific settings. 95 | token [string] (required) ... GitHub oAuth Token 96 | user [string] .............. GitHub Username 97 | ``` 98 | 99 | Then deploy your function to the cloud and invoke it: 100 | 101 | ``` 102 | sls deploy 103 | 104 | sls invoke --function get_issue --log \ 105 | --data '{"pathParameters": {"user": "StackStorm", "repo": "st2", "issue_id": "3785"}}' 106 | ``` 107 | 108 | You can also invoke a function locally for testing. It runs in docker container to ensure 109 | compatibility with AWS lambda environment. 110 | ``` 111 | sls stackstorm docker run -f get_issue --verbose --passthrough -d '{"pathParameters": {"user": "StackStorm", "repo": "st2", "issue_id": "3785"}}' 112 | ``` 113 | 114 | Note the options: 115 | 116 | * `--passthrough`: skips actual invocation - comes handy to ensure the input maps to action parameters right, without invoking the body of the lambda. 117 | * `--verbose`: shows the transformation routine that happened for a particular input and output. 118 | 119 | Here is an example of a verbose output: 120 | ``` 121 | Incoming event -> 122 | { 123 | "issue_id": "222" 124 | } 125 | -> Parameter transformer -> 126 | { 127 | "repo": "st2", 128 | "issue_id": "222", 129 | "user": "StackStorm" 130 | } 131 | -> Action call -> 132 | { 133 | "result": { 134 | "url": "https://github.com/StackStorm/st2/pull/222", 135 | "created_at": "2014-07-14T19:25:46.000000+00:00", 136 | ... 137 | }, 138 | "exit_code": 0, 139 | "stderr": "", 140 | "stdout": "" 141 | } 142 | -> Output transformer -> 143 | { 144 | "result": "2014-07-14T19:25:46.000000+00:00" 145 | } 146 | ``` 147 | 148 | ## Commands 149 | 150 | The plugin also provides a few optional commands. You don't have to use them as they are all included into `sls package`, but they still might be handy in some situations. 151 | 152 | - `sls stackstorm` - Build λ with StackStorm 153 | - `sls stackstorm clean` - Clean StackStorm code 154 | - `sls stackstorm docker pull` - Pull λ docker image 155 | - `sls stackstorm docker start` - Start λ docker container 156 | - `sls stackstorm docker stop` - Stop λ docker container 157 | - `sls stackstorm docker exec` - Execute a command in λ docker container 158 | - `sls stackstorm docker run` - Execute a function in λ docker container 159 | - `sls stackstorm install adapter` - Install StackStorm adapter 160 | - `sls stackstorm install deps` - Install StackStorm dependencies 161 | - `sls stackstorm install packs` - Install a pack 162 | - `sls stackstorm install packDeps` - Install dependencies for packs 163 | - `sls stackstorm info` - Print information on the action 164 | 165 | ## Exchange 166 | 167 | The available packs can be discovered in StackStorm Exchange (https://exchange.stackstorm.com/). At the moment, the collection consist of 6500+ actions spread across 130 packs. We've yet to try them all, though, but the one we did are marked with [`serverless`](https://exchange.stackstorm.org/#serverless) tag. 168 | 169 | ## Contributing to Exchange 170 | 171 | The StackStorm packs this plugin allows you to run on serverless infrastructure are all part of [StackStorm Exchange](https://github.com/StackStorm-Exchange). We encourage community members to contribute to this packs to enrich the entire ecosystem. The most simple way to help us is to try different packs, mark the one that works with `serverless` keyword and report ones that don't work for some reason. For now, the plugin only supports stackstorm's python runner, but they represent more than 90% of exchange actions. 172 | 173 | 174 | -------------------------------------------------------------------------------- /stackstorm/handler.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | # pylint: disable=import-error 18 | import os 19 | import sys 20 | import json 21 | import uuid 22 | import logging 23 | 24 | import six 25 | from oslo_config import cfg 26 | from stevedore.driver import DriverManager 27 | from six.moves.urllib.parse import parse_qsl 28 | 29 | from st2common.bootstrap.actionsregistrar import ActionsRegistrar 30 | from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED 31 | from st2common.constants.pack import CONFIG_SCHEMA_FILE_NAME 32 | from st2common.constants.system import VERSION_STRING 33 | from st2common.content.loader import ContentPackLoader, MetaLoader 34 | from st2common.content import utils as content_utils 35 | from st2common.exceptions import actionrunner 36 | from st2common.exceptions.param import ParamException 37 | from st2common.models.api.action import ActionAPI, RunnerTypeAPI 38 | from st2common.models.api.pack import ConfigSchemaAPI 39 | from st2common.runners.base import ActionRunner 40 | from st2common.util.pack import validate_config_against_schema 41 | from st2common.util import param as param_utils 42 | 43 | import config # noqa 44 | 45 | del sys.argv[1:] 46 | 47 | cfg.CONF(args=('--config-file', '~st2/st2.conf'), version=VERSION_STRING) 48 | 49 | LOG = logging.getLogger(__name__) 50 | 51 | 52 | class PassthroughRunner(ActionRunner): 53 | def __init__(self): 54 | super(PassthroughRunner, self).__init__(runner_id=str(uuid.uuid4())) 55 | 56 | def run(self, action_parameters): 57 | return (LIVEACTION_STATUS_SUCCEEDED, action_parameters, None) 58 | 59 | 60 | def _load_actions(): 61 | actions = {} 62 | action_dirs = ContentPackLoader().get_content(content_utils.get_packs_base_paths(), 'actions') 63 | 64 | for pack in action_dirs: 65 | for action_path in ActionsRegistrar().get_resources_from_pack(action_dirs[pack]): 66 | content = MetaLoader().load(action_path) 67 | ref = pack + "." + content['name'] 68 | 69 | action_api = ActionAPI(pack=pack, **content) 70 | action_api.validate() 71 | # action_validator.validate_action(action_api) 72 | actions[ref] = ActionAPI.to_model(action_api) 73 | 74 | return actions 75 | 76 | 77 | def _load_config_schemas(): 78 | config_schemas = {} 79 | 80 | packs = ContentPackLoader().get_packs(content_utils.get_packs_base_paths()) 81 | 82 | for pack_name, pack_dir in six.iteritems(packs): 83 | config_schema_path = os.path.join(pack_dir, CONFIG_SCHEMA_FILE_NAME) 84 | 85 | if not os.path.isfile(config_schema_path): 86 | # Note: Config schema is optional 87 | continue 88 | 89 | values = MetaLoader().load(config_schema_path) 90 | 91 | if not values: 92 | raise ValueError('Config schema "%s" is empty and invalid.' % (config_schema_path)) 93 | 94 | content = {} 95 | content['pack'] = pack_name 96 | content['attributes'] = values 97 | 98 | config_schema_api = ConfigSchemaAPI(**content) 99 | config_schema_api = config_schema_api.validate() 100 | config_schemas[pack_name] = values 101 | 102 | return config_schemas 103 | 104 | 105 | ACTIONS = _load_actions() 106 | CONFIG_SCHEMAS = _load_config_schemas() 107 | 108 | 109 | def base(event, context, passthrough=False): 110 | # Set up logging 111 | logger = logging.getLogger() 112 | 113 | # Read DEBUG value from the environment variable 114 | debug = os.environ.get('ST2_DEBUG', False) 115 | if str(debug).lower() in ['true', '1']: 116 | debug = True 117 | 118 | if debug: 119 | logger.setLevel(logging.DEBUG) 120 | else: 121 | logger.setLevel(logging.INFO) 122 | 123 | if isinstance(event, basestring): 124 | try: 125 | event = json.loads(event) 126 | except ValueError as e: 127 | LOG.error("ERROR: Can not parse `event`: '{}'\n{}".format(str(event), str(e))) 128 | raise e 129 | 130 | LOG.info("Received event: " + json.dumps(event, indent=2)) 131 | 132 | # Special case for Lambda function being called over HTTP via API gateway 133 | # See 134 | # https://serverless.com/framework/docs/providers/aws/events/apigateway 135 | # #example-lambda-proxy-event-default 136 | # for details 137 | is_event_body_string = (isinstance(event.get('body'), basestring) is True) 138 | content_type = event.get('headers', {}).get('content-type', '').lower() 139 | 140 | if is_event_body_string: 141 | if content_type == 'application/json': 142 | try: 143 | event['body'] = json.loads(event['body']) 144 | except Exception as e: 145 | LOG.warn('`event` has `body` which is not JSON: %s', str(e.message)) 146 | elif content_type == 'application/x-www-form-urlencoded': 147 | try: 148 | event['body'] = dict(parse_qsl(['body'], keep_blank_values=True)) 149 | except Exception as e: 150 | LOG.warn('`event` has `body` which is not `%s`: %s', content_type, str(e.message)) 151 | else: 152 | LOG.warn('Unsupported event content type: %s' % (content_type)) 153 | 154 | action_name = os.environ['ST2_ACTION'] 155 | try: 156 | action_db = ACTIONS[action_name] 157 | except KeyError: 158 | raise ValueError('No action named "%s" has been installed.' % (action_name)) 159 | 160 | manager = DriverManager(namespace='st2common.runners.runner', invoke_on_load=False, 161 | name=action_db.runner_type['name']) 162 | runnertype_db = RunnerTypeAPI.to_model(RunnerTypeAPI(**manager.driver.get_metadata()[0])) 163 | 164 | if passthrough: 165 | runner = PassthroughRunner() 166 | else: 167 | runner = manager.driver.get_runner() 168 | 169 | runner._sandbox = False 170 | runner.runner_type_db = runnertype_db 171 | runner.action = action_db 172 | runner.action_name = action_db.name 173 | # runner.liveaction = liveaction_db 174 | # runner.liveaction_id = str(liveaction_db.id) 175 | # runner.execution = ActionExecution.get(liveaction__id=runner.liveaction_id) 176 | # runner.execution_id = str(runner.execution.id) 177 | runner.entry_point = content_utils.get_entry_point_abs_path(pack=action_db.pack, 178 | entry_point=action_db.entry_point) 179 | runner.context = {} # getattr(liveaction_db, 'context', dict()) 180 | # runner.callback = getattr(liveaction_db, 'callback', dict()) 181 | runner.libs_dir_path = content_utils.get_action_libs_abs_path(pack=action_db.pack, 182 | entry_point=action_db.entry_point) 183 | 184 | # For re-run, get the ActionExecutionDB in which the re-run is based on. 185 | # rerun_ref_id = runner.context.get('re-run', {}).get('ref') 186 | # runner.rerun_ex_ref = ActionExecution.get(id=rerun_ref_id) if rerun_ref_id else None 187 | 188 | config_schema = CONFIG_SCHEMAS.get(action_db.pack, None) 189 | config_values = os.environ.get('ST2_CONFIG', None) 190 | if config_schema and config_values: 191 | runner._config = validate_config_against_schema(config_schema=config_schema, 192 | config_object=json.loads(config_values), 193 | config_path=None, 194 | pack_name=action_db.pack) 195 | 196 | param_values = os.environ.get('ST2_PARAMETERS', None) 197 | try: 198 | if param_values: 199 | live_params = param_utils.render_live_params( 200 | runner_parameters=runnertype_db.runner_parameters, 201 | action_parameters=action_db.parameters, 202 | params=json.loads(param_values), 203 | action_context={}, 204 | additional_contexts={ 205 | 'input': event 206 | }) 207 | else: 208 | live_params = event 209 | 210 | if debug and 'log_level' not in live_params: 211 | # Set log_level runner parameter 212 | live_params['log_level'] = 'DEBUG' 213 | 214 | runner_params, action_params = param_utils.render_final_params( 215 | runner_parameters=runnertype_db.runner_parameters, 216 | action_parameters=action_db.parameters, 217 | params=live_params, 218 | action_context={}) 219 | except ParamException as e: 220 | raise actionrunner.ActionRunnerException(str(e)) 221 | 222 | runner.runner_parameters = runner_params 223 | 224 | LOG.debug('Performing pre-run for runner: %s', runner.runner_id) 225 | runner.pre_run() 226 | 227 | (status, output, context) = runner.run(action_params) 228 | 229 | output_values = os.environ.get('ST2_OUTPUT', None) 230 | if output_values: 231 | try: 232 | result = param_utils.render_live_params( 233 | runner_parameters=runnertype_db.runner_parameters, 234 | action_parameters=action_db.parameters, 235 | params=json.loads(output_values), 236 | action_context={}, 237 | additional_contexts={ 238 | 'input': event, 239 | 'output': output 240 | }) 241 | except ParamException as e: 242 | raise actionrunner.ActionRunnerException(str(e)) 243 | else: 244 | result = output 245 | 246 | # Log the logs generated by the action. We do that so the actual action logs 247 | # (action stderr) end up in CloudWatch 248 | output = output or {} 249 | 250 | if output.get('stdout', None): 251 | LOG.info('Action stdout: %s' % (output['stdout'])) 252 | 253 | if output.get('stderr', None): 254 | LOG.info('Action stderr and logs: %s' % (output['stderr'])) 255 | 256 | return { 257 | 'event': event, 258 | 'live_params': live_params, 259 | 'output': output, 260 | 'result': result 261 | } 262 | 263 | 264 | # for backwards compatibility 265 | def stackstorm(*args, **kwargs): 266 | res = base(*args, **kwargs) 267 | return res['result'] 268 | 269 | 270 | def basic(*args, **kwargs): 271 | res = base(*args, **kwargs) 272 | return res 273 | 274 | 275 | def passthrough(*args, **kwargs): 276 | res = base(*args, passthrough=True, **kwargs) 277 | return res 278 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | const fs = require('fs-extra'); 3 | const path = require('path'); 4 | const git = require('simple-git/promise'); 5 | const yaml = require('js-yaml'); 6 | const nopy = require('nopy'); 7 | const request = require('axios'); 8 | const stdin = require('get-stdin'); 9 | const urljoin = require('url-join'); 10 | const chalk = require('chalk'); 11 | 12 | const { pullDockerImage, startDocker, runDocker, execDocker, stopDocker } = require('./lib/docker'); 13 | 14 | 15 | const MAGIC_FOLDER = '~st2'; 16 | const INTERNAL_MAGIC_FOLDER = `/var/task/${MAGIC_FOLDER}`; 17 | const DEFAULT_PYTHON_PATH = [ 18 | `${INTERNAL_MAGIC_FOLDER}`, 19 | `${INTERNAL_MAGIC_FOLDER}/deps/lib/python2.7/site-packages`, 20 | `${INTERNAL_MAGIC_FOLDER}/deps/lib64/python2.7/site-packages` 21 | ]; 22 | 23 | class StackstormPlugin { 24 | constructor(serverless, options) { 25 | this.serverless = serverless; 26 | this.options = options; 27 | 28 | this.hooks = { 29 | 'stackstorm:package': () => this.serverless.pluginManager.spawn('package'), 30 | 'stackstorm:clean:clean': () => this.clean(), 31 | 'stackstorm:docker:pull:pull': () => this.pullDockerImage(), 32 | 'stackstorm:docker:start:start': () => { 33 | const { noPull } = this.options; 34 | return this.startDocker({ noPull }); 35 | }, 36 | 'stackstorm:docker:stop:stop': () => this.stopDocker(this.options.dockerId), 37 | 'stackstorm:docker:exec:exec': () => { 38 | const { noPull } = this.options; 39 | return this.execDocker(this.options.cmd.split(' '), { noPull }); 40 | }, 41 | 'stackstorm:docker:run:run': () => { 42 | const { 'function': func, data, ...rest } = this.options; 43 | return this.runDocker(func, data, rest); 44 | }, 45 | 'stackstorm:install:adapter:copyAdapter': () => this.copyAdapter(), 46 | 'stackstorm:install:deps:copyDeps': () => { 47 | const { noPull } = this.options; 48 | return this.copyDeps({ noPull }); 49 | }, 50 | 'stackstorm:install:packs:clonePacks': () => { 51 | if (this.options.pack) { 52 | return this.clonePack(this.options.pack); 53 | } 54 | 55 | return this.clonePacks(); 56 | }, 57 | 'stackstorm:install:packDeps:copyPackDeps': () => { 58 | const { pack, noPull } = this.options; 59 | 60 | if (pack) { 61 | return this.copyPackDeps(pack, { noPull }); 62 | } 63 | 64 | return this.copyAllPacksDeps({ force: true, noPull }); 65 | }, 66 | 'stackstorm:info:info': () => this.showInfo(this.options), 67 | 'before:package:createDeploymentArtifacts': () => this.beforeCreateDeploymentArtifacts(), 68 | 'before:simulate:apigateway:initialize': () => this.beforeCreateDeploymentArtifacts(), 69 | 'before:invoke:local:invoke': () => this.beforeCreateDeploymentArtifacts(true) 70 | }; 71 | 72 | this.commands = { 73 | stackstorm: { 74 | usage: 'Build λ with StackStorm', 75 | lifecycleEvents: [ 76 | 'package', 77 | ], 78 | commands: { 79 | clean: { 80 | usage: 'Clean StackStorm code', 81 | lifecycleEvents: [ 82 | 'clean', 83 | ] 84 | }, 85 | docker: { 86 | commands: { 87 | pull: { 88 | usage: 'Pull λ docker image', 89 | lifecycleEvents: [ 90 | 'pull' 91 | ] 92 | }, 93 | start: { 94 | usage: 'Start λ docker container', 95 | lifecycleEvents: [ 96 | 'start' 97 | ] 98 | }, 99 | stop: { 100 | usage: 'Stop λ docker container', 101 | lifecycleEvents: [ 102 | 'stop' 103 | ], 104 | options: { 105 | dockerId: { 106 | usage: 'λ docker container ID', 107 | required: true 108 | } 109 | } 110 | }, 111 | exec: { 112 | usage: 'Execute a command in λ docker container', 113 | lifecycleEvents: [ 114 | 'exec' 115 | ], 116 | options: { 117 | dockerId: { 118 | usage: 'λ docker container ID', 119 | required: true 120 | }, 121 | cmd: { 122 | usage: 'command to execute', 123 | shortcut: 'c', 124 | required: true 125 | } 126 | } 127 | }, 128 | run: { 129 | usage: 'Execute a function in λ docker container', 130 | lifecycleEvents: [ 131 | 'run' 132 | ], 133 | options: { 134 | function: { 135 | usage: 'Name of the function', 136 | shortcut: 'f', 137 | required: true 138 | }, 139 | path: { 140 | usage: 'Path to JSON or YAML file holding input data', 141 | shortcut: 'p', 142 | }, 143 | data: { 144 | usage: 'Input data', 145 | shortcut: 'd', 146 | required: true 147 | }, 148 | passthrough: { 149 | usage: 'Return incoming event as a result instead of running StackStorm action' 150 | }, 151 | verbose: { 152 | usage: 'Print all the transformation steps', 153 | shortcut: 'v' 154 | } 155 | } 156 | } 157 | } 158 | }, 159 | install: { 160 | commands: { 161 | adapter: { 162 | usage: 'Install StackStorm adapter', 163 | lifecycleEvents: [ 164 | 'copyAdapter' 165 | ] 166 | }, 167 | deps: { 168 | usage: 'Install StackStorm dependencies', 169 | lifecycleEvents: [ 170 | 'copyDeps' 171 | ], 172 | options: { 173 | dockerId: { 174 | usage: 'λ docker container ID' 175 | }, 176 | noPull: { 177 | usage: 'Do not pull the docker image' 178 | } 179 | } 180 | }, 181 | packs: { 182 | usage: 'Install a pack', 183 | lifecycleEvents: [ 184 | 'clonePacks' 185 | ], 186 | options: { 187 | pack: { 188 | usage: 'Install specific StackStorm pack', 189 | shortcut: 'p' 190 | } 191 | } 192 | }, 193 | packDeps: { 194 | usage: 'Install dependencies for packs', 195 | lifecycleEvents: [ 196 | 'copyPackDeps' 197 | ], 198 | options: { 199 | dockerId: { 200 | usage: 'λ docker container ID' 201 | }, 202 | noPull: { 203 | usage: 'Do not pull the docker image' 204 | }, 205 | pack: { 206 | usage: 'Install dependencies for specific pack.', 207 | shortcut: 'p' 208 | } 209 | } 210 | } 211 | } 212 | }, 213 | info: { 214 | usage: 'Print information on the action', 215 | lifecycleEvents: [ 216 | 'info', 217 | ], 218 | options: { 219 | action: { 220 | usage: 'Action name' 221 | }, 222 | pack: { 223 | usage: 'Pack name' 224 | } 225 | } 226 | } 227 | } 228 | } 229 | }; 230 | 231 | const { custom = {} } = this.serverless.service; 232 | const { stackstorm = {} } = custom; 233 | 234 | this.dockerId = null; 235 | this.dockerRunImage = stackstorm && stackstorm.runImage || 'lambci/lambda:python2.7'; 236 | this.dockerBuildImage = stackstorm && stackstorm.buildImage 237 | || stackstorm.image 238 | || 'lambci/lambda:build-python2.7'; 239 | 240 | this.index_root = stackstorm && stackstorm.indexRoot || 'https://index.stackstorm.org/v1/'; 241 | this.index_url = stackstorm && stackstorm.index || urljoin(this.index_root, 'index.json'); 242 | 243 | this.st2common_pkg = stackstorm && stackstorm.st2common_pkg 244 | || 'git+https://github.com/stackstorm/st2.git@v2.8.1#egg=st2common&subdirectory=st2common'; 245 | this.python_runner_pkg = stackstorm && stackstorm.python_runner_pkg 246 | || 'git+https://github.com/StackStorm/st2.git@v2.8.1#egg=stackstorm-runner-python&subdirectory=contrib/runners/python_runner'; 247 | } 248 | 249 | async getIndex() { 250 | if (!this._index) { 251 | this._index = await request.get(this.index_url).then(res => res.data); 252 | } 253 | 254 | return this._index; 255 | } 256 | 257 | async clean() { 258 | await fs.remove(MAGIC_FOLDER); 259 | } 260 | 261 | async copyAdapter() { 262 | this.serverless.cli.log('Copying StackStorm adapter code...'); 263 | await fs.copy(__dirname + '/stackstorm', MAGIC_FOLDER); 264 | } 265 | 266 | async copyDeps({ noPull } = {}) { 267 | this.serverless.cli.log('Installing StackStorm adapter dependencies...'); 268 | const prefix = `${INTERNAL_MAGIC_FOLDER}/deps`; 269 | await this.execDocker(['mkdir', '-p', prefix], { noPull }); 270 | await this.execDocker(['pip', 'install', '-I', this.st2common_pkg, this.python_runner_pkg, '--prefix', prefix], { noPull }); 271 | } 272 | 273 | async copyPackDeps(pack, { noPull } = {}) { 274 | const prefix = `${INTERNAL_MAGIC_FOLDER}/virtualenvs/${pack}`; 275 | const pythonpath = `${prefix}/lib/python2.7/site-packages`; 276 | const requirements = `${INTERNAL_MAGIC_FOLDER}/packs/${pack}/requirements.txt`; 277 | await this.execDocker(['mkdir', '-p', pythonpath], { noPull }); 278 | await this.execDocker([ 279 | '/bin/bash', '-c', 280 | `PYTHONPATH=$PYTHONPATH:${pythonpath} ` + 281 | `pip --isolated install --ignore-installed -r ${requirements} --prefix ${prefix} --src ${prefix}/src` 282 | ], { noPull }); 283 | } 284 | 285 | async copyAllPacksDeps({ force, noPull } = {}) { 286 | this.serverless.cli.log('Ensuring virtual environments for packs...'); 287 | const packs = fs.readdirSync(`${MAGIC_FOLDER}/packs`); 288 | 289 | for (let pack of packs) { 290 | const depsExists = await fs.pathExists(`${MAGIC_FOLDER}/virtualenvs/${pack}`); 291 | if (force || !depsExists) { 292 | await this.copyPackDeps(pack, { noPull }); 293 | } 294 | } 295 | } 296 | 297 | async clonePack(packName) { 298 | const index = await this.getIndex(); 299 | const debug = (process.env['DEBUG'] !== undefined); 300 | const packMeta = index.packs[packName]; 301 | if (!packMeta) { 302 | throw new this.serverless.classes.Error(`Pack "${packName}" not found.`); 303 | } 304 | 305 | const localPath = `${MAGIC_FOLDER}/packs/${packMeta.ref || packMeta.name}`; 306 | try { 307 | const silent = !debug; 308 | 309 | this.serverless.cli.log(`Cloning pack "${packMeta.ref || packMeta.name}"...`); 310 | await git().silent(silent).clone(packMeta.repo_url, localPath); 311 | } catch (e) { 312 | await git(localPath).fetch(); 313 | await git(localPath).pull('origin', 'master'); 314 | } 315 | 316 | return localPath; 317 | } 318 | 319 | async clonePacks() { 320 | return Promise.all(_(this.getFunctions()) 321 | .map(func => func.split('.')[0]) 322 | .uniq() 323 | .map(packName => this.clonePack(packName)) 324 | ); 325 | } 326 | 327 | getFunctions() { 328 | return _.map(this.serverless.service.functions, func => { 329 | if (func.stackstorm) { 330 | if (func.handler) { 331 | throw new this.serverless.classes.Error('properties stackstorm and handler are mutually exclusive'); 332 | } 333 | 334 | return func.stackstorm.action; 335 | } 336 | }).filter(Boolean); 337 | } 338 | 339 | async getAction(packName, actionName) { 340 | const actionContent = fs.readFileSync(`${MAGIC_FOLDER}/packs/${packName}/actions/${actionName}.yaml`); 341 | 342 | return yaml.safeLoad(actionContent); 343 | } 344 | 345 | async pullDockerImage() { 346 | const promise = pullDockerImage(this.dockerBuildImage); 347 | 348 | promise.on('stdout', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 349 | promise.on('stderr', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 350 | 351 | return await promise; 352 | } 353 | 354 | async startDocker({ noPull } = {}) { 355 | if (!this.dockerId) { 356 | if (!noPull) { 357 | await this.pullDockerImage(); 358 | } 359 | 360 | this.serverless.cli.log('Spinning Docker container to build python dependencies...'); 361 | const volume = `${path.resolve('./')}/${MAGIC_FOLDER}:${INTERNAL_MAGIC_FOLDER}`; 362 | const promise = startDocker(this.dockerBuildImage, volume); 363 | 364 | promise.on('stdout', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 365 | promise.on('stderr', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 366 | 367 | this.dockerId = await promise; 368 | return this.dockerId; 369 | } 370 | 371 | throw new this.serverless.classes.Error('Docker container for this session is already set. Stop it before creating a new one.'); 372 | } 373 | 374 | async stopDocker(dockerId = this.dockerId) { 375 | if (dockerId) { 376 | const promise = stopDocker(dockerId); 377 | this.serverless.cli.log('Stopping Docker container...'); 378 | 379 | promise.on('stdout', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 380 | promise.on('stderr', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 381 | 382 | return await promise; 383 | } 384 | 385 | throw new this.serverless.classes.Error('No Docker container is set for this session. You need to start one first.'); 386 | } 387 | 388 | async execDocker(cmd, { noPull } = {}) { 389 | let dockerId = this.dockerId || this.options.dockerId; 390 | if (!dockerId) { 391 | this.dockerId = dockerId = await this.startDocker({ noPull }); 392 | } 393 | 394 | const promise = execDocker(dockerId, cmd); 395 | 396 | promise.on('stdout', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 397 | promise.on('stderr', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 398 | 399 | return await promise; 400 | } 401 | 402 | async runDocker(funcName, data, opts={}) { 403 | if (!data) { 404 | if (opts.path) { 405 | const absolutePath = path.isAbsolute(opts.path) ? 406 | opts.path : 407 | path.join(this.serverless.config.servicePath, opts.path); 408 | 409 | if (!this.serverless.utils.fileExistsSync(absolutePath)) { 410 | throw new this.serverless.classes.Error('The file you provided does not exist.'); 411 | } 412 | 413 | data = this.serverless.utils.readFileSync(absolutePath); 414 | } else { 415 | try { 416 | data = await stdin(); 417 | } catch (exception) { 418 | // resolve if no stdin was provided 419 | } 420 | } 421 | } 422 | 423 | await this.beforeCreateDeploymentArtifacts(); 424 | 425 | const func = this.serverless.service.functions[funcName]; 426 | 427 | const volumes = [`${path.resolve('./')}/${MAGIC_FOLDER}:${INTERNAL_MAGIC_FOLDER}`]; 428 | const envs = _.map(func.environment, (value, key) => `${key}=${value}`); 429 | 430 | const cmd = [`${MAGIC_FOLDER}/handler.${opts.passthrough ? 'passthrough' : 'basic'}`, data]; 431 | 432 | this.serverless.cli.log('Spinning Docker container to run a function locally...'); 433 | 434 | const promise = runDocker(this.dockerRunImage, volumes, envs, cmd); 435 | 436 | promise.on('stdout', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 437 | promise.on('stderr', (str) => this.serverless.cli.consoleLog(chalk.dim(str))); 438 | 439 | const { result } = await promise 440 | .catch(e => { 441 | if (e.result && e.result.errorMessage) { 442 | throw new Error(`Function error: ${e.result.errorMessage}`); 443 | } 444 | throw e; 445 | }); 446 | 447 | const msg = []; 448 | 449 | if (opts.verbose) { 450 | msg.push(`${chalk.yellow.underline('Incoming event ->')}`); 451 | msg.push(`${JSON.stringify(result.event, null, 2)}`); 452 | msg.push(`${chalk.yellow.underline('-> Parameter transformer ->')}`); 453 | msg.push(`${JSON.stringify(result.live_params, null, 2)}`); 454 | msg.push(`${chalk.yellow.underline( 455 | `-> Action call ${opts.passthrough ? '(passthrough) ' : ''}->` 456 | )}`); 457 | msg.push(`${JSON.stringify(result.output, null, 2)}`); 458 | msg.push(`${chalk.yellow.underline('-> Output transformer ->')}`); 459 | } 460 | 461 | msg.push(`${JSON.stringify(result.result, null, 2)}`); 462 | 463 | this.serverless.cli.consoleLog(msg.join('\n')); 464 | 465 | return result.result; 466 | } 467 | 468 | showInfo({ action, pack }) { 469 | if (action) { 470 | return this.showActionInfo(action); 471 | } else if (pack) { 472 | return this.showPackInfo(pack); 473 | } else { 474 | throw new Error('Either action or pack should be provided'); 475 | } 476 | } 477 | 478 | async showActionInfo(action) { 479 | const [ packName, ...actionNameRest ] = action.split('.'); 480 | const actionName = actionNameRest.join('.'); 481 | 482 | const metaUrl = urljoin(this.index_root, 'packs', packName, 'actions', `${actionName}.json`); 483 | const packRequest = () => request.get(metaUrl).then(res => res.data); 484 | 485 | const configUrl = urljoin(this.index_root, 'packs', packName, 'config.schema.json'); 486 | const configRequest = () => request.get(configUrl).then(res => res.data); 487 | 488 | const dots = 30; 489 | const indent = ' '; 490 | 491 | const msg = []; 492 | 493 | try { 494 | const packMeta = await packRequest(); 495 | const usage = packMeta.description || chalk.dim('action description is missing'); 496 | 497 | msg.push(`${chalk.yellow(action)} ${chalk.dim(_.repeat('.', dots - action.length))} ${usage}`); 498 | msg.push(`${chalk.yellow.underline('Parameters')}`); 499 | for (let name in packMeta.parameters) { 500 | const param = packMeta.parameters[name]; 501 | const title = `${name} [${param.type}] ${param.required ? '(required)' : ''}`; 502 | const dotsLength = dots - indent.length - title.length; 503 | const usage = param.description || chalk.dim('description is missing'); 504 | msg.push(`${indent}${chalk.yellow(title)} ${chalk.dim(_.repeat('.', dotsLength))} ${usage}`); 505 | } 506 | } catch (e) { 507 | throw new Error(`No such action in the index: ${action}`); 508 | } 509 | 510 | try { 511 | const configMeta = await configRequest(); 512 | msg.push(`${chalk.yellow.underline('Config')}`); 513 | for (let name in configMeta) { 514 | const param = configMeta[name]; 515 | const title = `${name} [${param.type}] ${param.required ? '(required)' : ''}`; 516 | const dotsLength = dots - indent.length - title.length; 517 | const usage = param.description || chalk.dim('description is missing'); 518 | msg.push(`${indent}${chalk.yellow(title)} ${chalk.dim(_.repeat('.', dotsLength))} ${usage}`); 519 | } 520 | } catch (e) { 521 | msg.push(chalk.dim('The action does not require config parameters')); 522 | } 523 | 524 | this.serverless.cli.consoleLog(msg.join('\n')); 525 | } 526 | 527 | async showPackInfo(packName) { 528 | const indexUrl = urljoin(this.index_root, 'index.json'); 529 | const index = await request.get(indexUrl).then(res => res.data); 530 | 531 | const dots = 30; 532 | const indent = ' '; 533 | 534 | const msg = []; 535 | 536 | const pack = index.packs[packName]; 537 | if (!pack) { 538 | throw new Error(`No such pack in the index: ${packName}`); 539 | } 540 | const usage = pack.description || chalk.dim('pack description is missing'); 541 | const { actions={} } = pack.content; 542 | 543 | msg.push(`${chalk.yellow(packName)} ${chalk.dim(_.repeat('.', dots - packName.length))} ${usage}`); 544 | msg.push(`${chalk.yellow.underline('Actions')}`); 545 | for (let name of actions.resources) { 546 | msg.push(`${indent}${name}`); 547 | } 548 | 549 | this.serverless.cli.consoleLog(msg.join('\n')); 550 | } 551 | 552 | async beforeCreateDeploymentArtifacts(local) { 553 | let needCommons = false; 554 | 555 | this.serverless.service.package.exclude = (this.serverless.service.package.exclude || []) 556 | .concat([`${MAGIC_FOLDER}/**/.git/**`]); 557 | 558 | for (let key of Object.keys(this.serverless.service.functions)) { 559 | const func = this.serverless.service.functions[key]; 560 | 561 | if (func.stackstorm) { 562 | if (func.handler) { 563 | throw new this.serverless.classes.Error('properties stackstorm and handler are mutually exclusive'); 564 | } 565 | 566 | const [ packName, ...actionNameRest ] = func.stackstorm.action.split('.'); 567 | const actionName = actionNameRest.join('.'); 568 | await this.clonePack(packName); 569 | await this.getAction(packName, actionName); 570 | 571 | func.handler = `${MAGIC_FOLDER}/handler.stackstorm`; 572 | func.environment = func.environment || {}; 573 | func.environment.ST2_ACTION = func.stackstorm.action; 574 | if (func.stackstorm.config) { 575 | func.environment.ST2_CONFIG = JSON.stringify(func.stackstorm.config); 576 | } 577 | if (func.stackstorm.input) { 578 | func.environment.ST2_PARAMETERS = JSON.stringify(func.stackstorm.input); 579 | } 580 | if (func.stackstorm.output) { 581 | func.environment.ST2_OUTPUT = JSON.stringify(func.stackstorm.output); 582 | } 583 | func.environment.PYTHONPATH = DEFAULT_PYTHON_PATH 584 | .concat([ 585 | `${INTERNAL_MAGIC_FOLDER}/virtualenvs/${packName}/lib/python2.7/site-packages`, 586 | `${INTERNAL_MAGIC_FOLDER}/virtualenvs/${packName}/lib64/python2.7/site-packages` 587 | ]) 588 | .join(':'); 589 | needCommons = true; 590 | 591 | this.serverless.service.functions[key] = func; 592 | } 593 | } 594 | 595 | if (needCommons) { 596 | await this.copyAdapter(); 597 | 598 | if (local) { 599 | await this.installCommonsLocally(); 600 | } else { 601 | await this.installCommonsDockerized(); 602 | } 603 | } 604 | } 605 | 606 | async installCommonsLocally() { 607 | const depsExists = await fs.pathExists(`${MAGIC_FOLDER}/deps`); 608 | if (!depsExists) { 609 | this.serverless.cli.log('Checking if pip is installed...'); 610 | await nopy.spawnPython([ 611 | path.join(__dirname, 'node_modules/nopy/src/get-pip.py'), '--user', '--quiet' 612 | ], { 613 | interop: 'status', 614 | spawn: { 615 | stdio: 'inherit', 616 | } 617 | }); 618 | 619 | this.serverless.cli.log('Installing StackStorm adapter dependencies...'); 620 | await nopy.spawnPython([ 621 | '-m', 'pip', 'install', 622 | 'git+https://github.com/stackstorm/st2.git#egg=st2common&subdirectory=st2common', 623 | '-I', 624 | '--prefix', `${MAGIC_FOLDER}/deps` 625 | ], { 626 | interop: 'buffer' 627 | }); 628 | } 629 | } 630 | 631 | async installCommonsDockerized() { 632 | const depsExists = await fs.pathExists(`${MAGIC_FOLDER}/deps`); 633 | if (!depsExists) { 634 | await this.copyDeps(); 635 | } 636 | 637 | await this.copyAllPacksDeps(); 638 | 639 | try { 640 | await this.stopDocker(); 641 | } catch (e) { 642 | // Do nothing 643 | } 644 | } 645 | } 646 | 647 | module.exports = StackstormPlugin; 648 | -------------------------------------------------------------------------------- /index.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | const path = require('path'); 3 | const chai = require('chai'); 4 | const chaiAsPromised = require('chai-as-promised'); 5 | const mock = require('proxyquire'); 6 | const sinon = require('sinon'); 7 | const sinonChai = require('sinon-chai'); 8 | 9 | chai.use(chaiAsPromised); 10 | chai.use(sinonChai); 11 | const expect = chai.expect; 12 | 13 | const StackStorm = require('./index.js'); 14 | 15 | function makePromiseStub(value) { 16 | const promise = Promise.resolve(value); 17 | promise.on = () => {}; 18 | return sinon.stub().returns(promise); 19 | } 20 | 21 | class CustomError extends Error {} 22 | 23 | describe('index', () => { 24 | const sls = { 25 | cli: { 26 | log: sinon.stub(), 27 | consoleLog: sinon.stub() 28 | }, 29 | classes: { 30 | Error: CustomError 31 | }, 32 | service: {} 33 | }; 34 | const opts = {}; 35 | 36 | it('should have falsy initial value of dockerId', () => { 37 | const instance = new StackStorm(sls, opts); 38 | 39 | expect(instance).to.have.property('dockerId').that.is.not.ok; 40 | }); 41 | 42 | it('should have default initial value of dockerRunImage', () => { 43 | const instance = new StackStorm(sls, opts); 44 | 45 | expect(instance).to.have.property('dockerBuildImage').that.equal('lambci/lambda:build-python2.7'); 46 | }); 47 | 48 | it('should have default initial value of dockerRunImage', () => { 49 | const instance = new StackStorm(sls, opts); 50 | 51 | expect(instance).to.have.property('dockerBuildImage').that.equal('lambci/lambda:build-python2.7'); 52 | }); 53 | 54 | it('should allow redefine dockerBuildImage and dockerRunImage value with custom fields', () => { 55 | const serverless = { 56 | service: { 57 | custom: { 58 | stackstorm: { 59 | buildImage: 'custom/image', 60 | runImage: 'custom/otherimage' 61 | } 62 | } 63 | } 64 | }; 65 | 66 | const instance = new StackStorm(serverless, opts); 67 | 68 | expect(instance).to.have.property('dockerBuildImage').that.equal('custom/image'); 69 | expect(instance).to.have.property('dockerRunImage').that.equal('custom/otherimage'); 70 | }); 71 | 72 | it('should have default initial value of index_url', () => { 73 | const instance = new StackStorm(sls, opts); 74 | 75 | expect(instance).to.have.property('index_url').that.equal('https://index.stackstorm.org/v1/index.json'); 76 | }); 77 | 78 | it('should allow redefine index_url value with custom field', () => { 79 | const serverless = { 80 | service: { 81 | custom: { 82 | stackstorm: { 83 | index: 'http://custom/url' 84 | } 85 | } 86 | } 87 | }; 88 | 89 | const instance = new StackStorm(serverless, opts); 90 | 91 | expect(instance).to.have.property('index_url').that.equal('http://custom/url'); 92 | }); 93 | 94 | describe('#getIndex', () => { 95 | it('should retrieve and return StackStorm index', async () => { 96 | const getStub = sinon.stub().resolves({ data: 'some' }); 97 | const StackStorm = mock('./index.js', { 98 | axios: { 99 | get: getStub 100 | } 101 | }); 102 | 103 | const instance = new StackStorm(sls, opts); 104 | 105 | await expect(instance.getIndex()).to.eventually.equal('some'); 106 | expect(getStub).to.be.calledOnce; 107 | expect(getStub).to.be.calledWith(instance.index_url); 108 | }); 109 | }); 110 | 111 | describe('#clean', () => { 112 | it('should remove MAGIC_FOLDER', async () => { 113 | const removeStub = sinon.stub().resolves(); 114 | const StackStorm = mock('./index.js', { 115 | 'fs-extra': { 116 | remove: removeStub 117 | } 118 | }); 119 | 120 | const instance = new StackStorm(sls, opts); 121 | 122 | await expect(instance.clean()).to.eventually.be.fulfilled; 123 | expect(removeStub).to.be.calledOnce; 124 | expect(removeStub).to.be.calledWith('~st2'); 125 | }); 126 | }); 127 | 128 | describe('#copyDeps', () => { 129 | it('should install StackStorm deps', async () => { 130 | const execStub = makePromiseStub(); 131 | const StackStorm = mock('./index.js', { 132 | './lib/docker': { 133 | execDocker: execStub 134 | } 135 | }); 136 | 137 | const instance = new StackStorm(sls, opts); 138 | instance.dockerId = 'some'; 139 | 140 | await expect(instance.copyDeps()).to.eventually.be.fulfilled; 141 | expect(execStub).to.be.calledTwice; 142 | expect(execStub).to.be.calledWith(instance.dockerId, ['mkdir', '-p', '/var/task/~st2/deps']); 143 | expect(execStub).to.be.calledWith(instance.dockerId, [ 144 | 'pip', 'install', '-I', 145 | 'git+https://github.com/stackstorm/st2.git@v2.8.1#egg=st2common&subdirectory=st2common', 146 | 'git+https://github.com/StackStorm/st2.git@v2.8.1#egg=stackstorm-runner-python&subdirectory=contrib/runners/python_runner', 147 | '--prefix', '/var/task/~st2/deps' 148 | ]); 149 | }); 150 | 151 | it('should spin a docker container if one had not been started yet', async () => { 152 | const execStub = makePromiseStub(); 153 | const StackStorm = mock('./index.js', { 154 | './lib/docker': { 155 | execDocker: execStub 156 | } 157 | }); 158 | 159 | const instance = new StackStorm(sls, opts); 160 | instance.startDocker = sinon.stub().resolves('some'); 161 | 162 | await expect(instance.copyDeps()).to.eventually.be.fulfilled; 163 | expect(instance.startDocker).to.be.calledOnce; 164 | expect(instance.startDocker).to.be.calledWith(); 165 | }); 166 | }); 167 | 168 | describe('#copyPackDeps', () => { 169 | it('should install pack deps', async () => { 170 | const execStub = makePromiseStub(); 171 | const StackStorm = mock('./index.js', { 172 | './lib/docker': { 173 | execDocker: execStub 174 | } 175 | }); 176 | 177 | const instance = new StackStorm(sls, opts); 178 | instance.dockerId = 'some'; 179 | 180 | await expect(instance.copyPackDeps('dummypack')).to.eventually.be.fulfilled; 181 | expect(execStub).to.be.calledTwice; 182 | expect(execStub).to.be.calledWith(instance.dockerId, [ 183 | 'mkdir', '-p', '/var/task/~st2/virtualenvs/dummypack/lib/python2.7/site-packages' 184 | ]); 185 | expect(execStub).to.be.calledWith(instance.dockerId, [ 186 | '/bin/bash', '-c', 187 | 'PYTHONPATH=$PYTHONPATH:/var/task/~st2/virtualenvs/dummypack/lib/python2.7/site-packages ' + 188 | 'pip --isolated install --ignore-installed -r /var/task/~st2/packs/dummypack/requirements.txt ' + 189 | '--prefix /var/task/~st2/virtualenvs/dummypack --src /var/task/~st2/virtualenvs/dummypack/src' 190 | ]); 191 | }); 192 | 193 | it('should spin a docker container if one had not been started yet', async () => { 194 | const execStub = makePromiseStub(); 195 | const StackStorm = mock('./index.js', { 196 | './lib/docker': { 197 | execDocker: execStub 198 | } 199 | }); 200 | 201 | const instance = new StackStorm(sls, opts); 202 | instance.startDocker = sinon.stub().resolves('some'); 203 | 204 | await expect(instance.copyDeps()).to.eventually.be.fulfilled; 205 | expect(instance.startDocker).to.be.calledOnce; 206 | expect(instance.startDocker).to.be.calledWith(); 207 | }); 208 | }); 209 | 210 | describe('#copyAllPacksDeps', () => { 211 | it('should install StackStorm deps', async () => { 212 | const StackStorm = mock('./index.js', { 213 | 'fs-extra': { 214 | readdirSync: () => [1, 2, 3] 215 | } 216 | }); 217 | 218 | const instance = new StackStorm(sls, opts); 219 | instance.copyPackDeps = sinon.stub().resolves(); 220 | 221 | await expect(instance.copyAllPacksDeps()).to.eventually.be.fulfilled; 222 | expect(instance.copyPackDeps).to.be.calledThrice; 223 | expect(instance.copyPackDeps).to.be.calledWith(1); 224 | expect(instance.copyPackDeps).to.be.calledWith(2); 225 | expect(instance.copyPackDeps).to.be.calledWith(3); 226 | }); 227 | }); 228 | 229 | describe('#clonePack', () => { 230 | it('should clone StackStorm pack if it doesn\'t exist yet', async () => { 231 | const cloneStub = sinon.stub().resolves(); 232 | const StackStorm = mock('./index.js', { 233 | 'simple-git/promise': () => { 234 | const self = { 235 | silent: () => self, 236 | clone: cloneStub 237 | }; 238 | 239 | return self; 240 | } 241 | }); 242 | 243 | const instance = new StackStorm(sls, opts); 244 | instance.getIndex = () => ({ 245 | packs: { 246 | some: { 247 | ref: 'some', 248 | repo_url: 'http://thing/' 249 | } 250 | } 251 | }); 252 | 253 | await expect(instance.clonePack('some')).to.eventually.be.fulfilled; 254 | expect(cloneStub).to.be.calledOnce; 255 | expect(cloneStub).to.be.calledWith('http://thing/', '~st2/packs/some'); 256 | }); 257 | 258 | it('should pull the latest master for StackStorm pack if it exists already', async () => { 259 | const fetchStub = sinon.stub().resolves(); 260 | const pullStub = sinon.stub().resolves(); 261 | const StackStorm = mock('./index.js', { 262 | 'simple-git/promise': () => { 263 | const self = { 264 | fetch: fetchStub, 265 | pull: pullStub 266 | }; 267 | 268 | return self; 269 | } 270 | }); 271 | 272 | const instance = new StackStorm(sls, opts); 273 | instance.getIndex = () => ({ 274 | packs: { 275 | some: { 276 | ref: 'some', 277 | repo_url: 'http://thing/' 278 | } 279 | } 280 | }); 281 | 282 | await expect(instance.clonePack('some')).to.eventually.be.fulfilled; 283 | expect(fetchStub).to.be.calledOnce; 284 | expect(fetchStub).to.be.calledWith(); 285 | expect(pullStub).to.be.calledOnce; 286 | expect(pullStub).to.be.calledWith('origin', 'master'); 287 | }); 288 | }); 289 | 290 | describe('#clonePacks', () => { 291 | it('should clone all stackstorm packs mentioned in the serverless.yml', async () => { 292 | const serverless = { 293 | service: { 294 | functions: { 295 | one: { 296 | stackstorm: { 297 | action: 'some.one' 298 | } 299 | }, 300 | two: { 301 | stackstorm: { 302 | action: 'some.two' 303 | } 304 | }, 305 | three: { 306 | stackstorm: { 307 | action: 'someother.three' 308 | } 309 | }, 310 | four: { 311 | handler: 'some' 312 | } 313 | } 314 | } 315 | }; 316 | 317 | const instance = new StackStorm(serverless, opts); 318 | instance.clonePack = sinon.stub().resolves(); 319 | 320 | await expect(instance.clonePacks()).to.eventually.be.fulfilled; 321 | expect(instance.clonePack).to.be.calledTwice; 322 | expect(instance.clonePack).to.be.calledWith('some'); 323 | expect(instance.clonePack).to.be.calledWith('someother'); 324 | }); 325 | }); 326 | 327 | describe('#getAction', () => { 328 | it('should return action\'s metadata', async () => { 329 | const actionMetaYaml = 'some: thing'; 330 | const readStub = sinon.stub().returns(actionMetaYaml); 331 | const StackStorm = mock('./index.js', { 332 | 'fs-extra': { 333 | readFileSync: readStub 334 | } 335 | }); 336 | 337 | const instance = new StackStorm(sls, opts); 338 | 339 | await expect(instance.getAction('some', 'thing')).to.eventually.be.deep.equal({some: 'thing'}); 340 | expect(readStub).to.be.calledOnce; 341 | expect(readStub).to.be.calledWith('~st2/packs/some/actions/thing.yaml'); 342 | }); 343 | }); 344 | 345 | describe('#pullDockerImage', () => { 346 | it('should pull the image', async () => { 347 | const pullStub = makePromiseStub(); 348 | const StackStorm = mock('./index.js', { 349 | './lib/docker': { 350 | pullDockerImage: pullStub 351 | } 352 | }); 353 | 354 | const instance = new StackStorm(sls, opts); 355 | 356 | await expect(instance.pullDockerImage()).to.eventually.be.fulfilled; 357 | expect(pullStub).to.be.calledOnce; 358 | expect(pullStub).to.be.calledWith(); 359 | }); 360 | }); 361 | 362 | describe('#startDocker', () => { 363 | it('should start the container', async () => { 364 | const startStub = makePromiseStub(Promise.resolve('deadbeef')); 365 | const StackStorm = mock('./index.js', { 366 | './lib/docker': { 367 | startDocker: startStub 368 | } 369 | }); 370 | 371 | const instance = new StackStorm(sls, opts); 372 | instance.pullDockerImage = sinon.stub().resolves(); 373 | 374 | await expect(instance.startDocker()).to.eventually.be.equal('deadbeef'); 375 | expect(startStub).to.be.calledOnce; 376 | expect(startStub).to.be.calledWith(); 377 | }); 378 | 379 | it('should fail if docker container is already spinning', async () => { 380 | const startStub = makePromiseStub(Promise.resolve('deadbeef')); 381 | const StackStorm = mock('./index.js', { 382 | './lib/docker': { 383 | startDocker: startStub 384 | } 385 | }); 386 | 387 | const instance = new StackStorm(sls, opts); 388 | instance.pullDockerImage = sinon.stub().resolves(); 389 | 390 | await instance.startDocker(); 391 | 392 | await expect(instance.startDocker()).to.eventually.be.rejected; 393 | }); 394 | }); 395 | 396 | describe('#stopDocker', () => { 397 | it('should stop the container', async () => { 398 | const stopStub = makePromiseStub(); 399 | const StackStorm = mock('./index.js', { 400 | './lib/docker': { 401 | stopDocker: stopStub 402 | } 403 | }); 404 | 405 | const instance = new StackStorm(sls, opts); 406 | 407 | await expect(instance.stopDocker('someId')).to.eventually.be.fulfilled; 408 | expect(stopStub).to.be.calledOnce; 409 | expect(stopStub).to.be.calledWith('someId'); 410 | }); 411 | 412 | it('should fail if no docker container is set', async () => { 413 | const startStub = makePromiseStub(Promise.resolve('deadbeef')); 414 | const StackStorm = mock('./index.js', { 415 | './lib/docker': { 416 | startDocker: startStub 417 | } 418 | }); 419 | 420 | const instance = new StackStorm(sls, opts); 421 | 422 | await expect(instance.stopDocker()).to.eventually.be.rejected; 423 | }); 424 | }); 425 | 426 | describe('#execDocker', () => { 427 | it('should execute a command in the container', async () => { 428 | const execStub = makePromiseStub(); 429 | const StackStorm = mock('./index.js', { 430 | './lib/docker': { 431 | execDocker: execStub 432 | } 433 | }); 434 | 435 | const instance = new StackStorm(sls, opts); 436 | instance.dockerId = 'someId'; 437 | 438 | await expect(instance.execDocker('some command')).to.eventually.be.fulfilled; 439 | expect(execStub).to.be.calledOnce; 440 | expect(execStub).to.be.calledWith('someId', 'some command'); 441 | }); 442 | }); 443 | 444 | describe('#runDocker', () => { 445 | it('should execute a function in the container', async () => { 446 | const runStub = makePromiseStub({ result: 'some' }); 447 | const StackStorm = mock('./index.js', { 448 | './lib/docker': { 449 | runDocker: runStub 450 | } 451 | }); 452 | 453 | const serverless = { 454 | ...sls, 455 | service: { 456 | package: {}, 457 | functions: { 458 | somefunc: { 459 | stackstorm: { 460 | action: 'some.function' 461 | } 462 | } 463 | } 464 | } 465 | }; 466 | 467 | const instance = new StackStorm(serverless, opts); 468 | instance.clonePack = sinon.stub().resolves(); 469 | instance.getAction = sinon.stub().resolves(); 470 | instance.copyAdapter = sinon.stub().resolves(); 471 | instance.installCommonsDockerized = sinon.stub().resolves(); 472 | 473 | await expect(instance.runDocker('somefunc', '{"inputData": true}', { 474 | verbose: true 475 | })).to.eventually.be.fulfilled; 476 | expect(runStub).to.be.calledOnce; 477 | expect(runStub).to.be.calledWith( 478 | 'lambci/lambda:python2.7', 479 | [`${path.resolve('./~st2')}:/var/task/~st2`], 480 | [ 481 | 'ST2_ACTION=some.function', 482 | [ 483 | 'PYTHONPATH=/var/task/~st2', 484 | '/var/task/~st2/deps/lib/python2.7/site-packages', 485 | '/var/task/~st2/deps/lib64/python2.7/site-packages', 486 | '/var/task/~st2/virtualenvs/some/lib/python2.7/site-packages', 487 | '/var/task/~st2/virtualenvs/some/lib64/python2.7/site-packages' 488 | ].join(':') 489 | ], 490 | ['~st2/handler.basic', '{"inputData": true}'] 491 | ); 492 | }); 493 | 494 | it('should read input data from stdio', async () => { 495 | const runStub = makePromiseStub({ result: 'some' }); 496 | const StackStorm = mock('./index.js', { 497 | './lib/docker': { 498 | runDocker: runStub 499 | }, 500 | 'get-stdin': sinon.stub().resolves('{"inputStream": false}') 501 | }); 502 | 503 | const serverless = { 504 | ...sls, 505 | service: { 506 | package: {}, 507 | functions: { 508 | somefunc: { 509 | stackstorm: { 510 | action: 'some.function' 511 | } 512 | } 513 | } 514 | } 515 | }; 516 | 517 | const instance = new StackStorm(serverless, opts); 518 | instance.clonePack = sinon.stub().resolves(); 519 | instance.getAction = sinon.stub().resolves(); 520 | instance.copyAdapter = sinon.stub().resolves(); 521 | instance.installCommonsDockerized = sinon.stub().resolves(); 522 | 523 | await expect(instance.runDocker('somefunc')).to.eventually.be.fulfilled; 524 | expect(runStub).to.be.calledOnce; 525 | expect(runStub).to.be.calledWith( 526 | 'lambci/lambda:python2.7', 527 | [`${path.resolve('./~st2')}:/var/task/~st2`], 528 | [ 529 | 'ST2_ACTION=some.function', 530 | [ 531 | 'PYTHONPATH=/var/task/~st2', 532 | '/var/task/~st2/deps/lib/python2.7/site-packages', 533 | '/var/task/~st2/deps/lib64/python2.7/site-packages', 534 | '/var/task/~st2/virtualenvs/some/lib/python2.7/site-packages', 535 | '/var/task/~st2/virtualenvs/some/lib64/python2.7/site-packages' 536 | ].join(':') 537 | ], 538 | ['~st2/handler.basic', '{"inputStream": false}'] 539 | ); 540 | }); 541 | 542 | it('should read input data from file', async () => { 543 | const runStub = makePromiseStub({ result: 'some' }); 544 | const StackStorm = mock('./index.js', { 545 | './lib/docker': { 546 | runDocker: runStub 547 | } 548 | }); 549 | 550 | const serverless = { 551 | ...sls, 552 | config: { 553 | servicePath: '~' 554 | }, 555 | service: { 556 | package: {}, 557 | functions: { 558 | somefunc: { 559 | stackstorm: { 560 | action: 'some.function' 561 | } 562 | } 563 | } 564 | }, 565 | utils: { 566 | fileExistsSync: sinon.stub().returns(true), 567 | readFileSync: sinon.stub().returns('{"inputFile": "some"}') 568 | } 569 | }; 570 | 571 | const instance = new StackStorm(serverless, opts); 572 | instance.clonePack = sinon.stub().resolves(); 573 | instance.getAction = sinon.stub().resolves(); 574 | instance.copyAdapter = sinon.stub().resolves(); 575 | instance.installCommonsDockerized = sinon.stub().resolves(); 576 | 577 | await expect(instance.runDocker('somefunc', null, { 578 | path: 'some' 579 | })).to.eventually.be.fulfilled; 580 | expect(runStub).to.be.calledOnce; 581 | expect(runStub).to.be.calledWith( 582 | 'lambci/lambda:python2.7', 583 | [`${path.resolve('./~st2')}:/var/task/~st2`], 584 | [ 585 | 'ST2_ACTION=some.function', 586 | [ 587 | 'PYTHONPATH=/var/task/~st2', 588 | '/var/task/~st2/deps/lib/python2.7/site-packages', 589 | '/var/task/~st2/deps/lib64/python2.7/site-packages', 590 | '/var/task/~st2/virtualenvs/some/lib/python2.7/site-packages', 591 | '/var/task/~st2/virtualenvs/some/lib64/python2.7/site-packages' 592 | ].join(':') 593 | ], 594 | ['~st2/handler.basic', '{"inputFile": "some"}'] 595 | ); 596 | }); 597 | 598 | it('should reject if file does not exist', async () => { 599 | const runStub = makePromiseStub({ result: 'some' }); 600 | const StackStorm = mock('./index.js', { 601 | './lib/docker': { 602 | runDocker: runStub 603 | } 604 | }); 605 | 606 | const serverless = { 607 | ...sls, 608 | config: { 609 | servicePath: '~' 610 | }, 611 | service: { 612 | package: {}, 613 | functions: { 614 | somefunc: { 615 | stackstorm: { 616 | action: 'some.function' 617 | } 618 | } 619 | } 620 | }, 621 | utils: { 622 | fileExistsSync: sinon.stub().returns(false), 623 | readFileSync: sinon.stub().returns('{"inputFile": "some"}') 624 | } 625 | }; 626 | 627 | const instance = new StackStorm(serverless, opts); 628 | instance.clonePack = sinon.stub().resolves(); 629 | instance.getAction = sinon.stub().resolves(); 630 | instance.copyAdapter = sinon.stub().resolves(); 631 | instance.installCommonsDockerized = sinon.stub().resolves(); 632 | 633 | await expect(instance.runDocker('somefunc', null, { 634 | path: 'some' 635 | })).to.eventually.be.rejected; 636 | 637 | }); 638 | }); 639 | 640 | describe('#showActionInfo', () => { 641 | it('should display action help', async () => { 642 | const getStub = sinon.stub(); 643 | 644 | getStub 645 | .withArgs('https://index.stackstorm.org/v1/packs/some/actions/action.with.dots.json') 646 | .resolves({ 647 | data: { 648 | 'description': 'register a server to the SLB', 649 | 'enabled': true, 650 | 'entry_point': 'ax_action_runner.py', 651 | 'name': 'add_slb_server', 652 | 'parameters': { 653 | 'action': { 654 | 'default': 'create', 655 | 'immutable': true, 656 | 'type': 'string' 657 | }, 658 | 'appliance': { 659 | 'description': 'The appliance information to connect, which is specified at the "appliance" parameter in the configuration.', 660 | 'required': true, 661 | 'type': 'string' 662 | } 663 | }, 664 | 'runner_type': 'python-script' 665 | } 666 | }); 667 | 668 | getStub 669 | .withArgs('https://index.stackstorm.org/v1/packs/some/config.schema.json') 670 | .resolves({ 671 | data: { 672 | 'appliance': { 673 | 'description': 'Appliance parameters to connect', 674 | 'type': 'array' 675 | } 676 | } 677 | }); 678 | 679 | const StackStorm = mock('./index.js', { 680 | 'axios': { 681 | get: getStub 682 | } 683 | }); 684 | 685 | const serverless = { 686 | ...sls, 687 | cli: { 688 | consoleLog: sinon.spy() 689 | } 690 | }; 691 | 692 | const instance = new StackStorm(serverless, opts); 693 | 694 | await expect(instance.showActionInfo('some.action.with.dots')).to.eventually.be.fulfilled; 695 | expect(serverless.cli.consoleLog).to.be.calledWith([ 696 | '\u001b[33msome.action.with.dots\u001b[39m \u001b[2m.........\u001b[22m register a server to the SLB', 697 | '\u001b[33m\u001b[4mParameters\u001b[24m\u001b[39m', 698 | ' \u001b[33maction [string] \u001b[39m \u001b[2m............\u001b[22m \u001b[2mdescription is missing\u001b[22m', 699 | ' \u001b[33mappliance [string] (required)\u001b[39m The appliance information to connect, which is specified at the "appliance" parameter in the configuration.', 700 | '\u001b[33m\u001b[4mConfig\u001b[24m\u001b[39m', 701 | ' \u001b[33mappliance [array] \u001b[39m \u001b[2m..........\u001b[22m Appliance parameters to connect' 702 | ].join('\n')); 703 | }); 704 | }); 705 | 706 | describe('#showPackInfo', () => { 707 | it('should display pack help', async () => { 708 | const getStub = sinon.stub(); 709 | 710 | getStub 711 | .withArgs('https://index.stackstorm.org/v1/index.json') 712 | .resolves({ 713 | 'data': { 714 | 'metadata': { 715 | 'generated_ts': 1512633599, 716 | 'hash': '72c7655b059b387f074ea0a868b54a2f' 717 | }, 718 | 'packs': { 719 | 'test': { 720 | 'author': 'st2-dev', 721 | 'content': { 722 | 'actions': { 723 | 'count': 2, 724 | 'resources': [ 725 | 'list_vms', 726 | 'parse' 727 | ] 728 | }, 729 | 'tests': { 730 | 'count': 1, 731 | 'resources': [ 732 | 'test_action_parse_xml.py' 733 | ] 734 | } 735 | }, 736 | 'description': 'st2 pack to test package management pipeline', 737 | 'email': 'info@stackstorm.com', 738 | 'keywords': [ 739 | 'some', 740 | 'search', 741 | 'terms' 742 | ], 743 | 'name': 'test', 744 | 'repo_url': 'https://github.com/StackStorm-Exchange/stackstorm-test', 745 | 'version': '0.4.0' 746 | } 747 | } 748 | } 749 | }); 750 | 751 | const StackStorm = mock('./index.js', { 752 | 'axios': { 753 | get: getStub 754 | } 755 | }); 756 | 757 | const serverless = { 758 | ...sls, 759 | cli: { 760 | consoleLog: sinon.spy() 761 | } 762 | }; 763 | 764 | const instance = new StackStorm(serverless, opts); 765 | 766 | await expect(instance.showPackInfo('test')).to.eventually.be.fulfilled; 767 | expect(serverless.cli.consoleLog).to.be.calledWith([ 768 | '\u001b[33mtest\u001b[39m \u001b[2m..........................\u001b[22m st2 pack to test package management pipeline', 769 | '\u001b[33m\u001b[4mActions\u001b[24m\u001b[39m', 770 | ' list_vms', 771 | ' parse' 772 | ].join('\n')); 773 | }); 774 | }); 775 | }); 776 | --------------------------------------------------------------------------------