├── .gitignore ├── README.md ├── ansible_playbooks └── test.yml ├── app ├── __init__.py ├── main │ ├── __init__.py │ ├── errors.py │ ├── utils.py │ └── views.py └── tasks │ ├── __init__.py │ ├── ansible_api.py │ ├── exec_ansible.py │ └── mycallbacks.py ├── celery_worker.py ├── config.py ├── logconfig ├── __init__.py └── logconfig.py ├── logs └── .gitkeep ├── manage.py ├── requirements.txt ├── supervisord.conf ├── tests └── __init__.py └── uwsgi.yml /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Ansible API 2 | 基于Flask、Celery开发的Ansible api (ansible==1.6.6) 3 | Modified from [yumaojun03/ansible_async_api](https://github.com/yumaojun03/ansible_async_api) 4 | --- 5 | 6 | ## Based on 7 | * flask 8 | * celery 9 | * redis 10 | 11 | ## Debug 12 | 1. run develop server: 13 | ``` 14 | python manage.py runserver 15 | ``` 16 | 2. run celery worker: 17 | ``` 18 | # ansible use root (Not recommanded) 19 | export C_FORCE_ROOT=True 20 | # 21 | export PYTHONOPTIMIZE=1 22 | celery -A celery_worker.celery worker --loglevel=debug 23 | ``` 24 | 25 | ## API Usage: 26 | 27 | * POST /ad_hoc 28 | - param : < json > 29 | ``` 30 | { 31 | "module": "shell", 32 | "args": "hostname -s", 33 | "host": "all", 34 | "resource": { 35 | "hosts": { 36 | "testdb": { 37 | "ip": "xxx.xxx.xxx.x", 38 | "port": "22", 39 | "username": "root", 40 | "password": "yyyyyyyyy", 41 | } 42 | } 43 | }, 44 | "sign": "5135ec5c6526d01b5a57ea221390d9dc", 45 | } 46 | ``` 47 | - return: < json > 48 | ``` 49 | { 50 | "task_id": "suijizifuchuan", 51 | "task_url": "/taskstats/ad_hoc/suijizifuchuan" 52 | } 53 | ``` 54 | * POST /playbook 55 | - param: < json > 56 | ``` 57 | { 58 | "playbook": "test.yml", 59 | "sign" : "9c25246e3bf6af494ebfcf304c23e2b1" 60 | } 61 | ``` 62 | - return: < json > 63 | ``` 64 | { 65 | "task_id": "suijizifuchuan", 66 | "task_url": "/taskstats/playbook/suijizifuchuan" 67 | } 68 | ``` 69 | * GET /taskstats/< task_type >/< task_id > 70 | - return: < json > 71 | ``` 72 | { 73 | "state": "task_state", 74 | "status": "task_info", 75 | } 76 | ``` 77 | 78 | ## Deploy 79 | * supervisord 启动配置: supervisord.conf 80 | * uwsgi 配置: uwsgi.yml 81 | 82 | `supervisord -c supervisord.conf` 83 | -------------------------------------------------------------------------------- /ansible_playbooks/test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - hosts: cache1 3 | tasks: 4 | - name: test 5 | shell: time ls 6 | 7 | - name: sleep 2 seconds 8 | shell: sleep 2 -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | from celery import Celery 3 | # from flask_mail import Mail 4 | from config import config, Config 5 | from werkzeug.contrib.fixers import ProxyFix 6 | from logconfig.logconfig import init_logging 7 | 8 | 9 | # mail = Mail() 10 | celery = Celery(__name__, broker=Config.CELERY_BROKER_URL) 11 | 12 | 13 | def create_app(config_name): 14 | app = Flask(__name__) 15 | app.config.from_object(config[config_name]) 16 | config[config_name].init_app(app) 17 | 18 | # mail.init_app(app) 19 | init_logging(app.config['APP_LOG_DIR']) 20 | app.wsgi_app = ProxyFix(app.wsgi_app) 21 | celery.conf.update(app.config) 22 | 23 | # routes and errorhandlers 24 | from .main import main as main_blueprint 25 | app.register_blueprint(main_blueprint) 26 | 27 | return app 28 | -------------------------------------------------------------------------------- /app/main/__init__.py: -------------------------------------------------------------------------------- 1 | from flask import Blueprint 2 | 3 | main = Blueprint('main', __name__) 4 | 5 | from . import views, errors 6 | -------------------------------------------------------------------------------- /app/main/errors.py: -------------------------------------------------------------------------------- 1 | from flask import make_response, jsonify 2 | from . import main 3 | 4 | 5 | @main.app_errorhandler(404) 6 | def page_not_found(e): 7 | return make_response(jsonify({'error': 'Page Not Found', }), 404) 8 | 9 | 10 | @main.app_errorhandler(500) 11 | def internal_server_error(e): 12 | return make_response(jsonify({'error': 'Internal Server Error', }), 500) 13 | 14 | 15 | @main.app_errorhandler(400) 16 | def page_not_found(e): 17 | return make_response(jsonify({'error': e.description['message']}), 400) 18 | 19 | 20 | @main.app_errorhandler(401) 21 | def page_not_found(e): 22 | return make_response(jsonify({'error': 'NOT AUTHORIZED'}), 401) 23 | -------------------------------------------------------------------------------- /app/main/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | from flask import current_app 4 | import hashlib 5 | import socket 6 | import json 7 | 8 | # sign_key='secret_sign_key' 9 | 10 | 11 | def getmd5(str_to_md5): 12 | """ 13 | md5sum string 14 | :param str_to_md5: 15 | :return: 16 | """ 17 | hash_handler = hashlib.md5() 18 | hash_handler.update(str_to_md5) 19 | return hash_handler.hexdigest() 20 | 21 | 22 | def check_sign(dict_to_check, sign): 23 | """ 24 | simple method to check sign 25 | :param dict_to_check: 26 | :param sign: 27 | :return: 28 | """ 29 | # global sign_key 30 | sorted_list = sorted(dict_to_check.iteritems(), key=lambda d: d[0], reverse=True) 31 | s = '' 32 | for key, value in sorted_list: 33 | try: 34 | s += value 35 | except TypeError: 36 | s += json.dumps(value) 37 | if getmd5(s+current_app.config['API_SIGN_KEY']) == sign: 38 | return True 39 | else: 40 | return False 41 | 42 | 43 | def is_safe_ip(ip): 44 | """ 45 | check ip in whitelist or not 46 | :param ip: ip address 47 | :return: 48 | """ 49 | # should define global 50 | # white_ip_list = ('127.0.0.1', '192.168.1.1') 51 | try: 52 | socket.inet_pton(socket.AF_INET, ip) 53 | except AttributeError: 54 | try: 55 | socket.inet_aton(ip) 56 | except socket.error: 57 | return False 58 | except socket.error: 59 | return False 60 | 61 | if ip.count('.') == 3 and ip in current_app.config['API_WHITE_IP_LIST']: 62 | return True 63 | else: 64 | return False 65 | -------------------------------------------------------------------------------- /app/main/views.py: -------------------------------------------------------------------------------- 1 | from flask import make_response, jsonify, url_for, request, abort 2 | from . import main 3 | from .utils import is_safe_ip, check_sign 4 | from ..tasks.exec_ansible import exec_ad_hoc, exec_playbook 5 | 6 | 7 | @main.route('/ad_hoc', methods=['POST']) 8 | def ad_hoc(): 9 | # check client ip 10 | ip = request.remote_addr 11 | if not is_safe_ip(ip): 12 | abort(401) 13 | data = { 14 | 'module_name': None, # ansible module name 15 | 'module_args': None, # ansible module argumemts 16 | 'pattern': None, # host pattern 17 | 'resource': None, # self define assets, Dynamic maybe 18 | 'su_user': None, # su - username 19 | 'timeout': None, # timeout 20 | 'forks': None, # fork 21 | 'timestamp': None, # timestamp, for API Sign 22 | 'sign': None, # sign, for API Sign 23 | } 24 | try: 25 | data.update(request.get_json()) 26 | except Exception: 27 | abort(400, {'message': 'Invalid Parameters!'}) 28 | if not (data['timestamp'] and data['sign']): 29 | abort(400, {'message': 'sign and timestamp are required.!'}) 30 | if not((data['pattern'] or data['resource']) and data['module_name'] and data['module_args']): 31 | abort(400, {'message': 'resource/pattern and module_name and module_args are required.!'}) 32 | # clear None variables 33 | for k in data.keys(): 34 | if not data[k]: 35 | data.pop(k) 36 | # get sign 37 | sign = data.pop('sign') 38 | if not check_sign(data, sign): 39 | abort(400, {'message': 'Sign Error!'}) 40 | # pop timestamp 41 | data.pop('timestamp') 42 | # Here create ad_hoc task 43 | task = exec_ad_hoc.delay(data) 44 | return jsonify({'task_id': task.id, 'task_url': url_for('.task_stats', task_type='ad_hoc', task_id=task.id)}), 201 45 | 46 | 47 | @main.route('/playbook', methods=['POST']) 48 | def playbook(): 49 | # check client ip 50 | ip = request.remote_addr 51 | if not is_safe_ip(ip): 52 | abort(401) 53 | data = { 54 | 'resource': None, # self define assets, Dynamic maybe 55 | 'playbook': None, # playbook filename 56 | 'sign': None, # sign, for API sign 57 | 'timestamp': None, # timestamp, for API sign 58 | 'extra_vars': None, # playbook extra_vars, dictionary 59 | } 60 | try: 61 | data.update(request.get_json()) 62 | except Exception: 63 | abort(400, {'message': 'Invalid Parameters!'}) 64 | if not (data['timestamp'] and data['sign']): 65 | abort(400, {'message': 'sign and timestamp are required.!'}) 66 | if not data['playbook']: 67 | abort(400, {'message': 'playbook are required.!'}) 68 | # clear None variables 69 | for k in data.keys(): 70 | if not data[k]: 71 | data.pop(k) 72 | # get sign 73 | sign = data.pop('sign') 74 | if not check_sign(data, sign): 75 | abort(400, {'message': 'Sign Error!'}) 76 | # pop timestamp 77 | data.pop('timestamp') 78 | # here create playbook task 79 | task = exec_playbook.delay(data) 80 | return jsonify({'task_id': task.id, 'task_url': url_for('.task_stats', task_type='playbook', task_id=task.id)}), 201 81 | 82 | 83 | @main.route('/taskstats//', methods=['GET']) 84 | def task_stats(task_type, task_id): 85 | # check client ip 86 | ip = request.remote_addr 87 | if not is_safe_ip(ip): 88 | abort(401) 89 | 90 | if task_type == "ad_hoc": 91 | task = exec_ad_hoc.AsyncResult(task_id) 92 | elif task_type == "playbook": 93 | task = exec_playbook.AsyncResult(task_id) 94 | else: 95 | abort(400, {'message': 'Unknown Task Type!'}) 96 | 97 | if task.state == 'PENDING': 98 | response = { 99 | 'state': task.state, 100 | 'status': 'Pending...' 101 | } 102 | elif task.state != 'FAILURE': 103 | response = { 104 | 'state': task.state, 105 | 'status': task.info 106 | } 107 | if 'result' in task.info: 108 | response['result'] = task.info['result'] 109 | else: 110 | # something went wrong in the background job 111 | response = { 112 | 'state': task.state, 113 | 'status': task.info, # this is the exception raised 114 | } 115 | 116 | return jsonify(response) 117 | -------------------------------------------------------------------------------- /app/tasks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Erazx/ansible_api/09a13f0232a6dc3dcec08aa72d40b430ec4b368b/app/tasks/__init__.py -------------------------------------------------------------------------------- /app/tasks/ansible_api.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | from __future__ import print_function, unicode_literals, absolute_import 4 | from ansible.inventory import Inventory 5 | from ansible.inventory.group import Group 6 | from ansible.inventory.host import Host 7 | from ansible.runner import Runner 8 | from ansible.playbook import PlayBook 9 | from ansible import callbacks, utils 10 | from flask import current_app 11 | from .mycallbacks import MyAggregateStats, log_redis 12 | import os 13 | import ansible.constants as ANS_CONS 14 | 15 | 16 | # ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 17 | # PLAYBOOK_DIR = os.path.join(ROOT_DIR, 'ansible_playbooks') 18 | 19 | 20 | class ResourceBase(object): 21 | """ 22 | generate inventory 23 | 24 | :param resource: inventory resource, format: 25 | { 26 | "hosts" : { 27 | "host1": {"port": "22", "username": "test", "password": "xxxx"}, 28 | "host2": {"port": "22", "username": "test", "password": "xxxx"}, 29 | }, 30 | "groups": { 31 | "group1": {"hosts": ["host1", "host2",...], "vars": {'var1':'xxxx', 'var2':'yyy',...} }, 32 | "group2": {"hosts": ["host1", "host2",...], "child": ["group1"], "vars": {'var1':'xxxx', 'var2':'yyy',...} }, 33 | } 34 | } 35 | """ 36 | def __init__(self, resource=None): 37 | host_list = not resource and ANS_CONS.DEFAULT_HOST_LIST or [] 38 | self.inventory = Inventory(host_list=host_list) 39 | self.resource = resource 40 | resource and self.gen_inventory() 41 | 42 | @staticmethod 43 | def gen_host(host_name=None, host_vars={}): 44 | """ 45 | Generate ansible Host object 46 | :param host_name: ansible inventory hostname 47 | :param host_vars: host variables 48 | :return: Host object 49 | """ 50 | ssh_host = host_vars.get('ip', host_name) 51 | ssh_port = host_vars.get('port', ANS_CONS.DEFAULT_REMOTE_PORT) 52 | ssh_user = host_vars.get('username') 53 | ssh_pass = host_vars.get('password') 54 | ssh_fkey = host_vars.get('ssh_key') 55 | # init Host 56 | host = Host(name=host_name, port=ssh_port) 57 | host.set_variable('ansible_ssh_host', ssh_host) 58 | # shortcut variables 59 | ssh_user and host.set_variable('ansible_ssh_user', ssh_user) 60 | ssh_pass and host.set_variable('ansible_ssh_pass', ssh_pass) 61 | ssh_fkey and host.set_variable('ansible_private_key_file', ssh_fkey) 62 | # extra variables 63 | for key, value in host_vars.iteritems(): 64 | if key not in ['ip', 'port', 'username', 'password', 'ssh_key']: 65 | host.set_variable(key, value) 66 | # return Host object 67 | return host 68 | 69 | @staticmethod 70 | def gen_group(group_name=None, group_vars={}): 71 | """ 72 | Generate ansible Group object 73 | :param group_name: Group Name 74 | :param group_vars: Group Variables 75 | :return: ansible Group object 76 | """ 77 | group = Group(name=group_name) 78 | for key, value in group_vars.iteritems(): 79 | group.set_variable(key, value) 80 | return group 81 | 82 | def gen_inventory(self): 83 | """ 84 | :return: None 85 | """ 86 | # set hosts 87 | if 'hosts' in self.resource.keys(): 88 | for host, info in self.resource['hosts'].iteritems(): 89 | obj_host = self.gen_host(host, info) 90 | self.inventory.get_group('all').add_host(obj_host) 91 | # add group 92 | if 'groups' in self.resource.keys(): 93 | for group, detail in self.resource['groups'].iteritems(): 94 | obj_group = self.gen_group(group, detail.get('vars', {})) 95 | for host in detail.get('hosts', []): 96 | obj_group.add_host(self.inventory.get_host(host)) 97 | for child in detail.get('child', []): 98 | obj_group.add_child_group(self.inventory.get_group(child)) 99 | self.inventory.add_group(obj_group) 100 | 101 | def get_lists(self): 102 | print("Host: ") 103 | print("=================") 104 | for host in self.inventory.list_hosts(): 105 | print(host) 106 | print("Group: ") 107 | print("=================") 108 | for group in self.inventory.list_groups(): 109 | print(group) 110 | 111 | 112 | class AdHoc(ResourceBase): 113 | """ 114 | execute ansible ad-hoc mode 115 | """ 116 | def __init__(self, resource=None): 117 | super(AdHoc, self).__init__(resource) 118 | self.result_raw = {} 119 | 120 | def run(self, task, module_args, module_name="shell", timeout=10, forks=10, pattern='*', su_user=None): 121 | 122 | runner = Runner( 123 | module_name=module_name, 124 | module_args=module_args, 125 | inventory=self.inventory, 126 | pattern=pattern, 127 | forks=forks, 128 | timeout=timeout, 129 | su=su_user and True or False, 130 | su_user=su_user, 131 | ) 132 | self.result_raw['celery_task_id'] = task 133 | tmp = runner.run() 134 | 135 | for (host, value) in tmp.get('contacted', {}).iteritems(): 136 | if value.get('invocation', {}).get('module_name', '') != 'setup': 137 | if not self.result_raw.get(host): 138 | self.result_raw[host] = {} 139 | self.result_raw[host]['result'] = value 140 | for (host, value) in tmp.get('dark', {}).iteritems(): 141 | if not self.result_raw.get(host): 142 | self.result_raw[host] = {} 143 | value['outcome'] = 'dark' 144 | self.result_raw[host]['result'] = value 145 | 146 | log_redis(self.result_raw) 147 | return self.result_raw 148 | 149 | 150 | class MyPlayBook(ResourceBase): 151 | """ 152 | execute ansible playbook 153 | """ 154 | def __init__(self, resource=None): 155 | super(MyPlayBook, self).__init__(resource) 156 | self.result_raw = None 157 | 158 | def run(self, task, playbook, extra_vars=None, check=False): 159 | stats = MyAggregateStats() 160 | playbook_callback = callbacks.PlaybookCallbacks(verbose=utils.VERBOSITY) 161 | runner_callback = callbacks.PlaybookRunnerCallbacks(stats, verbose=utils.VERBOSITY) 162 | abs_playbook_path = os.path.join(current_app.config['ANSIBLE_PLAYBOOKS_DIR'], playbook) 163 | 164 | pb = PlayBook( 165 | playbook=abs_playbook_path, 166 | stats=stats, 167 | callbacks=playbook_callback, 168 | runner_callbacks=runner_callback, 169 | inventory=self.inventory, 170 | extra_vars=extra_vars, 171 | check=check, 172 | ) 173 | self.result_raw = pb.run() 174 | self.result_raw['celery_task_id'] = task 175 | log_redis(self.result_raw) 176 | return self.result_raw 177 | 178 | if __name__ == "__main__": 179 | res = { 180 | "hosts" : { 181 | "192.168.1.1": {"port": "22", "username": "root", "password": "xxxx"}, 182 | "192.168.1.2": {"port": "22", "username": "root", "password": "yyyy"}, 183 | "192.168.1.3": {"port": "22", "username": "root", "password": "zzz"}, 184 | }, 185 | "groups": { 186 | "group1": { "hosts": ["192.168.1.1", "192.168.1.2"], vars: {'var1':'xxxx', 'var2':'yyy'} }, 187 | "group2": { "hosts": ["192.168.1.3"], "child": ["group1"], vars: {'var3':'z', 'var4':'o'} }, 188 | } 189 | } 190 | inv = ResourceBase(res) 191 | inv.get_lists() -------------------------------------------------------------------------------- /app/tasks/exec_ansible.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # from flask import current_app 4 | from app import celery 5 | # from celery.util.log import get_task_logger 6 | from .ansible_api import AdHoc, MyPlayBook 7 | 8 | 9 | # @celery.task(name='app.exec_ad_hoc', bind=True) 10 | @celery.task(bind=True) 11 | def exec_ad_hoc(self, data): 12 | try: 13 | res = data.pop('resource') 14 | except KeyError: 15 | res = None 16 | r = AdHoc(res) 17 | return r.run(self.request.id, **data) 18 | 19 | 20 | # @celery.task(name='app.exec_playbook', bind=True) 21 | @celery.task(bind=True) 22 | def exec_playbook(self, data): 23 | try: 24 | res = data.pop('resource') 25 | except KeyError: 26 | res = None 27 | pb = MyPlayBook(res) 28 | return pb.run(self.request.id, **data) 29 | -------------------------------------------------------------------------------- /app/tasks/mycallbacks.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | from __future__ import print_function, absolute_import, division 4 | from ansible import callbacks 5 | from datetime import datetime 6 | from redis import Redis 7 | import logging 8 | from copy import deepcopy 9 | from flask import current_app 10 | __metaclass__ = type 11 | 12 | try: 13 | import simplejson as json 14 | except ImportError: 15 | import json 16 | 17 | logger = logging.getLogger('myapp') 18 | 19 | 20 | def log_redis(content): 21 | r = Redis(host=current_app.config['ELK_REDIS_BROKER_HOST'], port=current_app.config['ELK_REDIS_BROKER_PORT']) 22 | msg = { 23 | "timestamp": datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S"), 24 | "server_addr": current_app.config['ANSIBLE_CONTROL_HOST'], 25 | "type": current_app.config['ELK_MESSAGE_TYPE'], 26 | "level": 4, 27 | "celery_task_id": content.pop('celery_task_id'), 28 | } 29 | for host, value in content.iteritems(): 30 | tmp = deepcopy(msg) 31 | tmp['server_name'] = host 32 | tmp.update(value) 33 | try: 34 | r.rpush(current_app.config['ELK_LOGSTASH_KEY'], json.dumps(tmp)) 35 | except Exception as e: 36 | logger.error('Log Redis Failed: %s' % e.message) 37 | logger.error('Data: %s' % json.dumps(tmp)) 38 | finally: 39 | tmp = {} 40 | 41 | 42 | class MyAggregateStats(callbacks.AggregateStats): 43 | """ 44 | Holds stats about per-host activity during playbook runs. 45 | """ 46 | def __init__(self): 47 | super(MyAggregateStats, self).__init__() 48 | self.result = {} 49 | 50 | def _increment(self, what, host, value=None): 51 | self.processed[host] = 1 52 | if what == 'result': 53 | prev = (getattr(self, what)).get(host, []) 54 | prev.append(value) 55 | getattr(self, what)[host] = prev 56 | else: 57 | prev = (getattr(self, what)).get(host, 0) 58 | getattr(self, what)[host] = prev+1 59 | 60 | def compute(self, runner_results, setup=False, poll=False, ignore_errors=False): 61 | """ 62 | Walk through all results and increment stats. 63 | """ 64 | super(MyAggregateStats, self).compute(runner_results, setup, poll, ignore_errors) 65 | 66 | for (host, value) in runner_results.get('contacted', {}).iteritems(): 67 | if value.get('invocation', {}).get('module_name', '') != 'setup': 68 | self._increment('result', host, value) 69 | for (host, value) in runner_results.get('dark', {}).iteritems(): 70 | value['outcome'] = 'dark' 71 | self._increment('result', host, value) 72 | 73 | def summarize(self, host): 74 | """ 75 | Return information about a particular host 76 | """ 77 | summarized_info = super(MyAggregateStats, self).summarize(host) 78 | 79 | # Adding the info I need 80 | summarized_info['result'] = self.result.get(host, {}) 81 | 82 | return summarized_info 83 | -------------------------------------------------------------------------------- /celery_worker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import os 4 | from app import celery, create_app 5 | 6 | app = create_app(os.getenv('FLASK_CONFIG') or 'default') 7 | app.app_context().push() 8 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | import os 2 | basedir = os.path.abspath(os.path.dirname(__file__)) 3 | 4 | 5 | class Config: 6 | SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guress string' 7 | SQLALCHEMY_COMMIT_ON_TEARDOWN = True 8 | SQLALCHEMY_TRACK_MODIFICATIONS = True 9 | FLASKY_MAIL_SUBJECT_PREFIX = '[Flasky]' 10 | FLASKY_MAIL_SENDER = 'Flasky Admin ' 11 | FLASKY_ADMIN = os.environ.get('FLASKY_ADMIN') 12 | CELERY_BROKER_URL='redis://localhost:6379', 13 | CELERY_RESULT_BACKEND='redis://localhost:6379' 14 | API_SIGN_KEY = "secret_sign_key" 15 | API_WHITE_IP_LIST = ('127.0.0.1', '192.168.1.1') 16 | ANSIBLE_PLAYBOOKS_DIR = os.path.join(basedir, 'ansible_playbooks') 17 | APP_LOG_DIR = os.path.join(basedir, 'logs') 18 | ANSIBLE_CONTROL_HOST = "192.168.1.101" 19 | ELK_REDIS_BROKER_HOST = "192.168.1.120" 20 | ELK_REDIS_BROKER_PORT = 6379 21 | ELK_MESSAGE_TYPE = 'ansible' 22 | ELK_LOGSTASH_KEY = 'ansible' 23 | 24 | @staticmethod 25 | def init_app(app): 26 | pass 27 | 28 | 29 | class DevelopmentConfig(Config): 30 | DEBUG = True 31 | MAIL_SERVER = 'smtp.example.com' 32 | MAIL_PORT = 25 33 | MAIL_USERNAME = os.environ.get('MAIL_USERNAME') or 'example@example.com' 34 | MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD') or 'your-password-for-email' 35 | 36 | 37 | class TestingConfig(Config): 38 | TESTING = True 39 | SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \ 40 | 'sqlite:///' + os.path.join(basedir, 'data-test.sqlite') 41 | 42 | 43 | class ProductionConfig(Config): 44 | SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \ 45 | 'sqlite:///' + os.path.join(basedir, 'data.sqlite') 46 | 47 | 48 | config = { 49 | 'development': DevelopmentConfig, 50 | 'testing': TestingConfig, 51 | 'production': ProductionConfig, 52 | 53 | 'default': DevelopmentConfig, 54 | } 55 | -------------------------------------------------------------------------------- /logconfig/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /logconfig/logconfig.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | from __future__ import absolute_import 4 | import logging 5 | try: 6 | # for python 2.6 7 | from logutils.dictconfig import dictConfig 8 | except ImportError: 9 | from logging.config import dictConfig 10 | 11 | # logging.NullHandler New in python 2.7 12 | try: 13 | # Python 2.7+ 14 | from logging import NullHandler 15 | except ImportError: 16 | class NullHandler(logging.Handler): 17 | def emit(self, record): 18 | pass 19 | # logging.getLogger(__name__).addHandler(NullHandler()) 20 | # use namespace 21 | logging.NullHandler = NullHandler 22 | 23 | import os.path 24 | 25 | 26 | def init_logging(log_dir): 27 | """ 28 | initial logging 29 | """ 30 | LOGGING = { 31 | 'version': 1, 32 | 'disable_existing_loggers': True, 33 | 'formatters': { 34 | 'standard': { 35 | 'format': '%(asctime)s - %(filename)s:%(lineno)d(%(module)s:%(funcName)s) - %(levelname)s - %(message)s', 36 | 'datefmt': '%Y-%m-%d %H:%M:%S' 37 | }, 38 | 'simple': { 39 | 'format': '%(asctime)s - %(levelname)s - %(message)s', 40 | 'datefmt': '%Y-%m-%d %H:%M:%S' 41 | }, 42 | }, 43 | 'filters': { 44 | }, 45 | 'handlers': { 46 | 'null': { 47 | 'level': 'DEBUG', 48 | 'class': 'logging.NullHandler', 49 | }, 50 | 'console': { 51 | 'class': 'logging.StreamHandler', 52 | 'level': 'DEBUG', 53 | 'formatter': 'standard', 54 | 'stream': 'ext://sys.stderr', 55 | }, 56 | 'syslog': { 57 | 'level': 'DEBUG', 58 | 'class': 'logging.handlers.SysLogHandler', 59 | 'facility': 'logging.handlers.SysLogHandler.LOG_LOCAL7', 60 | 'formatter': 'standard', 61 | }, 62 | 'syslog2': { 63 | 'level': 'DEBUG', 64 | 'class': 'logging.handlers.SysLogHandler', 65 | 'facility': 'logging.handlers.SysLogHandler.LOG_LOCAL7', 66 | 'formatter': 'standard', 67 | }, 68 | 'access': { 69 | 'level': 'DEBUG', 70 | 'class': 'logging.handlers.RotatingFileHandler', 71 | 'filename': os.path.join(log_dir, 'access.log'), 72 | 'maxBytes': 1024*1024*2, 73 | 'backupCount': 5, 74 | 'formatter': 'standard', 75 | }, 76 | 'application': { 77 | 'level': 'DEBUG', 78 | 'class': 'logging.handlers.RotatingFileHandler', 79 | 'filename': os.path.join(log_dir, 'app.log'), 80 | 'maxBytes': 1024*1024*2, 81 | 'backupCount': 5, 82 | 'formatter': 'standard', 83 | }, 84 | }, 85 | 'loggers': { 86 | 'werkzeug': { 87 | 'handlers': ['access', 'console'], 88 | 'level': 'DEBUG', 89 | 'propagate': False, 90 | }, 91 | 'myapp': { 92 | 'handlers': ['application'], 93 | 'level': 'DEBUG', 94 | 'propagate': True, 95 | }, 96 | }, 97 | } 98 | 99 | dictConfig(LOGGING) 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | -------------------------------------------------------------------------------- /logs/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Erazx/ansible_api/09a13f0232a6dc3dcec08aa72d40b430ec4b368b/logs/.gitkeep -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import os 4 | from app import create_app 5 | from flask_script import Manager, Shell 6 | 7 | 8 | app = create_app(os.getenv('FLACK_CONFIG') or 'default') 9 | manager = Manager(app) 10 | 11 | 12 | if __name__ == '__main__': 13 | manager.run() 14 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ansible==1.6.6 2 | anyjson==0.3.3 3 | celery==3.1.23 4 | click==6.6 5 | Flask==0.11.1 6 | Flask-LogConfig==0.4.2 7 | Flask-Script==2.0.5 8 | futures==2.1.6 9 | httplib2==0.7.7 10 | importlib==1.0.3 11 | iniparse==0.3.1 12 | itsdangerous==0.24 13 | Jinja2==2.8 14 | kombu==3.0.35 15 | logconfig==0.4.0 16 | logutils==0.3.2 17 | MarkupSafe==0.23 18 | ordereddict==1.1 19 | paramiko==1.7.5 20 | pyasn1==0.0.12a0 21 | pycrypto==2.0.1 22 | pycurl==7.19.0 23 | pygpgme==0.1 24 | python-keyczar==0.71rc0 25 | pytz==2016.6.1 26 | PyYAML==3.11 27 | redis==2.10.5 28 | simplejson==2.0.9 29 | singledispatch==3.4.0.3 30 | six==1.10.0 31 | urlgrabber==3.9.1 32 | Werkzeug==0.11.10 33 | yum-metadata-parser==1.1.2 34 | -------------------------------------------------------------------------------- /supervisord.conf: -------------------------------------------------------------------------------- 1 | [program:uwsgi] 2 | command=/usr/bin/uwsgi --yml /home/ansible/ansible_api/uwsgi.yml 3 | directory=/home/ansible/ansible_api 4 | autostart=true 5 | autorestart=true 6 | stdout_logfile=/tmp/uwsgi.log 7 | redirect_stderr=true 8 | stopsignal=QUIT 9 | 10 | [program:celery] 11 | command=/usr/bin/celery -A celery_worker.celery worker 12 | directory=/home/ansible/ansible_api 13 | stdout_logfile=/tmp/celery.log 14 | redirect_stderr=true 15 | autostart=true 16 | autorestart=true 17 | stopsignal=QUIT 18 | environment=C_FORCE_ROOT=True,PYTHONOPTIMIZE=1 19 | #environment=PYTHONOPTIMIZE=1 20 | #user=ansible 21 | 22 | [supervisord] 23 | logfile=/tmp/supervisord.log 24 | pidfile=/tmp/supervisord.pid 25 | 26 | [unix_http_server] 27 | file=/tmp/supervisord.sock 28 | chmod=0777 29 | chown= nobody:nobody 30 | 31 | [supervisorctl] 32 | serverurl=unix:///tmp/supervisord.sock 33 | 34 | [rpcinterface:supervisor] 35 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Erazx/ansible_api/09a13f0232a6dc3dcec08aa72d40b430ec4b368b/tests/__init__.py -------------------------------------------------------------------------------- /uwsgi.yml: -------------------------------------------------------------------------------- 1 | uwsgi: 2 | socket: 127.0.0.1:5658 3 | chdir: /home/ansible/ansible_api 4 | wsgi-file: manage.py 5 | callable: app 6 | processes: 1 7 | threads: 2 8 | stats: 127.0.0.1:9191 9 | uid: ansible --------------------------------------------------------------------------------