├── .dockerignore ├── .gitignore ├── .gitmodules ├── Makefile ├── README.md ├── app ├── __init__.py ├── controllers │ ├── __init__.py │ ├── api │ │ ├── __init__.py │ │ ├── doc.html │ │ ├── doc.py │ │ └── v1 │ │ │ ├── __init__.py │ │ │ ├── account.py │ │ │ ├── actions.py │ │ │ ├── datacenters.py │ │ │ ├── groups.py │ │ │ ├── hosts.py │ │ │ ├── network_groups.py │ │ │ ├── open.py │ │ │ ├── users.py │ │ │ └── work_groups.py │ ├── auth_controller.py │ └── main.py ├── models │ ├── __init__.py │ ├── api_action.py │ ├── datacenter.py │ ├── group.py │ ├── host.py │ ├── network_group.py │ ├── storable_model.py │ ├── token.py │ ├── user.py │ └── work_group.py └── tests │ ├── __init__.py │ ├── httpapi │ ├── __init__.py │ ├── httpapi_testcase.py │ ├── test_account_ctrl.py │ ├── test_datacenter_ctrl.py │ ├── test_group_ctrl.py │ ├── test_host_ctrl.py │ ├── test_network_group_ctrl.py │ └── test_user_ctrl.py │ ├── models │ ├── __init__.py │ ├── test_datacenter_model.py │ ├── test_group_model.py │ ├── test_host_model.py │ ├── test_network_group_model.py │ ├── test_storable_model.py │ ├── test_user_model.py │ └── test_work_group_model.py │ └── utils │ ├── __init__.py │ ├── test_diff.py │ ├── test_merge.py │ ├── test_ownership.py │ ├── test_pbkdf2.py │ └── test_permutation.py ├── commands ├── __init__.py ├── actions.py ├── check.py ├── convert.py ├── index.py ├── run.py ├── sessions.py ├── shell.py ├── test.py └── work_groups.py ├── config ├── development │ ├── app.py │ ├── cache.py │ ├── db.py │ └── log.py ├── production │ ├── app.py │ ├── cache.py │ ├── db.py │ └── log.py └── testing │ ├── app.py │ ├── cache.py │ ├── db.py │ └── log.py ├── extconf ├── logrotate.conf ├── nginx │ └── nginx.conf └── uwsgi │ └── inventoree.ini ├── library ├── __init__.py ├── db.py ├── engine │ ├── __init__.py │ ├── action_log.py │ ├── baseapp.py │ ├── cache.py │ ├── errors.py │ ├── graph.py │ ├── json_encoder.py │ ├── ownership.py │ ├── pbkdf2.py │ ├── permissions.py │ ├── permutation.py │ └── utils.py └── mongo_session.py ├── micro.py ├── plugins ├── __init__.py ├── authorizers │ ├── .gitignore │ ├── __init__.py │ ├── local_authorizer.py │ └── vk_authorizer.py ├── commands │ ├── .gitignore │ ├── .gitkeep │ ├── __init__.py │ └── example.py └── extend │ ├── __init__.py │ └── csrf.py ├── postinst.sh ├── requirements.txt ├── tests_coverage.sh └── wsgi.py /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .idea 3 | 4 | .coverage 5 | 6 | .vscode 7 | .venv 8 | webui 9 | 10 | /build.sh 11 | /Dockerfile 12 | /*.log 13 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/.gitmodules -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | CONTAINER_NAME = inventoree-builder 2 | 3 | build: 4 | docker build -t $(CONTAINER_NAME) . && docker run -it $(CONTAINER_NAME) 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Inventoree 2 | 3 | Inventoree leads you through the chaos of your infrastructure 4 | 5 | **Disclaimer:** inventoree (previously known as conductor) is nothing more than a servers inventory. Originally such a project was created at Yandex to help system engineers to store and classify their servers combining them into groups which are combined into projects. Inventoree is able to watch for your data structure consistency and check users' permissions to modify it. With a fast and well structured REST API it can be used by various CMS systems like Saltstack, Chef, Puppet and Ansible as an extra filter for running states and commands. With built-in inventoree **tags** and **custom fields** one can store roles of groups or individual servers which is convenient for use as an extra data for state formulas. 6 | 7 | Original version of conductor has an ability to track deployment and packages movement among repositories which won't be implemented in Inventoree. 8 | 9 | **Important:** this version is not based on the original one but created from scratch using absolutely separate technologies. 10 | 11 | ## Development bootstrap 12 | 13 | Being in deep development and moving from one laptop to another, from MacOSX to Windows and Linux, the current inventoree version has become very easy to set up. 14 | 15 | * Clone repo and `cd` into its' directory. 16 | * Run `pip install -r requirements.txt` to install the python requirements. Using virtualenv is highly recommended 17 | * This version requires mongodb 2.6+ server to be installed on localhost with inventoree itself. You can change this behaviour in `config/db.py` file. Although it's tracked by git and no mechanism has been created for custom configuration yet. So just put mongodb right alongside your inventoree installation, it's just the most appropriate solution at the moment. 18 | * Run `./micro.py index` to create indexes. Be sure your mongodb server is up and running. 19 | * Run `./micro.py shell` to start project shell. If you have IPython installed in your virtualenv, it will be used automatically. 20 | * Create a supervisor user like described below: 21 | ``` 22 | from app.models import User 23 | user = User(username="", password_raw="", supervisor=True) 24 | user.save() 25 | ``` 26 | * Exit the shell using `Ctrl-D` and run `./micro.py run` to start python development server. By default it binds on port 5000, you can check it with a browser or something like curl using url `http://localhost:5000/api/v1/account/me` - it will give you Unauthorized error. 27 | * `cd` to `reactapp` and run `npm install` to install React.js and all the react application dependencies. You must have node.js 6+ installed 28 | * Run `npm start` to build react application, don't be scared when it starts your browser automatically. 29 | * Log in using your supervisor username and password 30 | 31 | ## Experimental Web UI 32 | 33 | Inventoree has a new nice looking redesigned web ui written using vue.js. To replace the original one follow the steps: 34 | * Update `webui` submodule using `git submodule init; git submodule update`. This will pull the new ui sources from [inventoree-ui](https://github.com/viert/inventoree-ui) repository into `webui` directory 35 | * Instead of "cd-ing into `reactapp`" step `cd` into `webui` and run `npm install` to install Vue.js and all its dependencies. You must have node.js 6+ installed. 36 | * Run `npm run dev` to build the application and serve it using built-in nodejs http server. To create optimized production-ready static files use `npm run build` instead. 37 | 38 | ## Note on external authentication 39 | 40 | Example authorizer (via vk.com) is located in `plugins` folder. The only thing is mandatory in authorizer is the `get_authentication_url` method. If this method returns an actual url like 41 | `https://oauth.vk.com/authorize?client_id=...` the button *EXTERNAL AUTH* appears on Login page automatically (and leads to that url). In case of example vk authorizer there's a *NAME* class property assigned which makes button text change to *VK LOGIN*. 42 | 43 | The second thing you have to do is create a special handler in flask (that's why authorizers get flask app in constructor) which is supposed to handle callbacks from external auth services (All modern authentication systems like OAuth, SAML SSO and OpenID act like this). This handler is aimed to find an actual local user related to external user data you receive. If user doesn't exist, your task is to create it first and connect to the external data (`ext_id` field in `User` model serves for this purpose and is indexed by default). Next time user logins your handler should find it by `ext_id`. 44 | 45 | The last task you have to do in callback handler is put the local `_id` field of found user instance to session["user_id"] - that's how Inventoree gets user authenticated. 46 | 47 | Don't forget to set `AUTHORIZER=""` in app config to actually set up your authorizer. All plugins in `plugins` directory are loaded automatically but no 48 | authorizer is set until configured explicitly. 49 | 50 | ## Roadmap 51 | 52 | ### v6.12 53 | * Tag search UI 54 | * Applications Registry API 55 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- 1 | from library.engine.baseapp import BaseApp 2 | 3 | 4 | class App(BaseApp): 5 | 6 | def configure_routes(self): 7 | self.logger.info("Configuring conductor endpoints") 8 | 9 | self.logger.debug("main_ctrl at /") 10 | from app.controllers.main import main_ctrl 11 | self.flask.register_blueprint(main_ctrl, url_prefix="/") 12 | 13 | self.logger.debug("doc_ctrl at /api") 14 | from app.controllers.api.doc import doc_ctrl 15 | self.flask.register_blueprint(doc_ctrl, url_prefix="/api") 16 | 17 | self.logger.debug("work_groups_ctrl at /api/v1/work_groups") 18 | from app.controllers.api.v1.work_groups import work_groups_ctrl 19 | self.flask.register_blueprint(work_groups_ctrl, url_prefix="/api/v1/work_groups") 20 | 21 | self.logger.debug("account_ctrl at /api/v1/account") 22 | from app.controllers.api.v1.account import account_ctrl 23 | self.flask.register_blueprint(account_ctrl, url_prefix="/api/v1/account") 24 | 25 | self.logger.debug("datacenters_ctrl at /api/v1/datacenters") 26 | from app.controllers.api.v1.datacenters import datacenters_ctrl 27 | self.flask.register_blueprint(datacenters_ctrl, url_prefix="/api/v1/datacenters") 28 | 29 | self.logger.debug("groups_ctrl at /api/v1/groups") 30 | from app.controllers.api.v1.groups import groups_ctrl 31 | self.flask.register_blueprint(groups_ctrl, url_prefix="/api/v1/groups") 32 | 33 | self.logger.debug("groups_ctrl at /api/v1/network_groups") 34 | from app.controllers.api.v1.network_groups import network_groups_ctrl 35 | self.flask.register_blueprint(network_groups_ctrl, url_prefix="/api/v1/network_groups") 36 | 37 | self.logger.debug("hosts_ctrl at /api/v1/hosts") 38 | from app.controllers.api.v1.hosts import hosts_ctrl 39 | self.flask.register_blueprint(hosts_ctrl, url_prefix="/api/v1/hosts") 40 | 41 | self.logger.debug("users_ctrl at /api/v1/users") 42 | from app.controllers.api.v1.users import users_ctrl 43 | self.flask.register_blueprint(users_ctrl, url_prefix="/api/v1/users") 44 | 45 | self.logger.debug("actions_ctrl at /api/v1/actions") 46 | from app.controllers.api.v1.actions import actions_ctrl 47 | self.flask.register_blueprint(actions_ctrl, url_prefix="/api/v1/actions") 48 | 49 | self.logger.debug("open_ctrl at /api/v1/open") 50 | from app.controllers.api.v1.open import open_ctrl 51 | self.flask.register_blueprint(open_ctrl, url_prefix="/api/v1/open") 52 | 53 | if self.envtype == 'development': 54 | from app.models import ApiAction 55 | self.logger.info("checking action handlers") 56 | failed_types = ApiAction.check_compute_handlers() 57 | for atype in failed_types: 58 | self.logger.error("action type %s doesn't have a compute handler" % atype) 59 | 60 | 61 | app = App() 62 | -------------------------------------------------------------------------------- /app/controllers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/app/controllers/__init__.py -------------------------------------------------------------------------------- /app/controllers/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/app/controllers/api/__init__.py -------------------------------------------------------------------------------- /app/controllers/api/doc.py: -------------------------------------------------------------------------------- 1 | from app.controllers.auth_controller import AuthController 2 | from os.path import dirname, join 3 | from flask import make_response 4 | 5 | doc_ctrl = AuthController("doc", __name__, require_auth=False) 6 | 7 | 8 | @doc_ctrl.route("/") 9 | def doc(): 10 | dochtml = join(dirname(__file__), 'doc.html') 11 | with open(dochtml) as f: 12 | r = make_response(f.read()) 13 | r.headers["Content-Type"] = "text/html" 14 | return r 15 | -------------------------------------------------------------------------------- /app/controllers/api/v1/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/app/controllers/api/v1/__init__.py -------------------------------------------------------------------------------- /app/controllers/api/v1/account.py: -------------------------------------------------------------------------------- 1 | from app.controllers.auth_controller import AuthController 2 | from flask import session, g 3 | from library.engine.utils import json_response 4 | from plugins.authorizers.local_authorizer import LocalAuthorizer 5 | 6 | account_ctrl = AuthController("auth", __name__, require_auth=False) 7 | 8 | 9 | @account_ctrl.route("/me", methods=["GET"]) 10 | def me(): 11 | from app.models import User 12 | if g.user is None: 13 | return AuthController.error_response() 14 | else: 15 | user_data = g.user.to_dict(fields=list(User.FIELDS) + ["auth_token", "avatar"]) 16 | return json_response({ "data": user_data }) 17 | 18 | 19 | @account_ctrl.route("/authenticate", methods=["POST"]) 20 | def authenticate(): 21 | user_data = LocalAuthorizer.get_user_data() 22 | session["user_id"] = user_data["_id"] 23 | session.modified = True 24 | return json_response({ "status": "authenticated", "data": user_data }) 25 | 26 | 27 | @account_ctrl.route("/logout", methods=["POST"]) 28 | def logout(): 29 | del(session["user_id"]) 30 | session.modified = True 31 | g.user = None 32 | return json_response({ "status": "logged out" }) 33 | -------------------------------------------------------------------------------- /app/controllers/api/v1/actions.py: -------------------------------------------------------------------------------- 1 | from app.controllers.auth_controller import AuthController 2 | from library.engine.utils import paginated_data, json_response, resolve_id 3 | from flask import request 4 | 5 | actions_ctrl = AuthController("actions", __name__, require_auth=True) 6 | 7 | 8 | @actions_ctrl.route("/", methods=["GET"]) 9 | @actions_ctrl.route("/", methods=["GET"]) 10 | def index(id=None): 11 | from app.models import ApiAction 12 | 13 | if id is None: 14 | query = {} 15 | if "_users" in request.values: 16 | users = request.values["_users"] 17 | users = users.split(",") 18 | query["username"] = {"$in": users} 19 | if "_action_types" in request.values: 20 | action_types = request.values["_action_types"] 21 | action_types = action_types.split(",") 22 | query["action_type"] = {"$in": action_types} 23 | actions = ApiAction.find(query).sort([('created_at', -1)]) 24 | else: 25 | action_id = resolve_id(id) 26 | actions = ApiAction.find({"_id": action_id}) 27 | return json_response(paginated_data(actions)) 28 | 29 | 30 | @actions_ctrl.route("/action_types", methods=["GET"]) 31 | def action_types(): 32 | from library.engine.action_log import action_types 33 | from app.models import ApiAction 34 | data = [(x, hasattr(ApiAction, '_compute_' + x)) for x in action_types] 35 | return json_response({"action_types": dict(data)}) -------------------------------------------------------------------------------- /app/controllers/api/v1/datacenters.py: -------------------------------------------------------------------------------- 1 | from app.controllers.auth_controller import AuthController 2 | from library.engine.utils import resolve_id, json_response, paginated_data, \ 3 | get_request_fields, json_body_required, filter_query 4 | from library.engine.errors import DatacenterNotFound 5 | from library.engine.action_log import logged_action 6 | from flask import request 7 | 8 | 9 | datacenters_ctrl = AuthController("datacenters", __name__, require_auth=True) 10 | 11 | 12 | @datacenters_ctrl.route("/", methods=["GET"]) 13 | @datacenters_ctrl.route("/", methods=["GET"]) 14 | def show(datacenter_id=None): 15 | from app.models import Datacenter 16 | if datacenter_id is None: 17 | query = {} 18 | if "_filter" in request.values: 19 | name_filter = request.values["_filter"] 20 | if len(name_filter) > 0: 21 | query["name"] = filter_query(name_filter) 22 | datacenters = Datacenter.find(query) 23 | else: 24 | datacenter_id = resolve_id(datacenter_id) 25 | datacenters = Datacenter.find({ "$or": [ 26 | { "_id": datacenter_id }, 27 | { "name": datacenter_id } 28 | ]}) 29 | if datacenters.count() == 0: 30 | raise DatacenterNotFound("datacenter not found") 31 | data = paginated_data(datacenters.sort("name")) 32 | return json_response(data) 33 | 34 | 35 | @datacenters_ctrl.route("/", methods=["POST"]) 36 | @logged_action("datacenter_create") 37 | @json_body_required 38 | def create(): 39 | from app.models import Datacenter 40 | 41 | dc_attrs = dict([x for x in request.json.items() if x[0] in Datacenter.FIELDS]) 42 | dc = Datacenter(**dc_attrs) 43 | # TODO: check permissions! 44 | dc.save() 45 | if "parent_id" in dc_attrs and dc_attrs["parent_id"] is not None: 46 | return set_parent(dc_id=dc._id) 47 | return json_response({ "data": dc.to_dict(get_request_fields()) }, 201) 48 | 49 | 50 | @datacenters_ctrl.route("/", methods=["PUT"]) 51 | @logged_action("datacenter_update") 52 | @json_body_required 53 | def update(dc_id): 54 | from app.models import Datacenter 55 | dc = Datacenter.get(dc_id, DatacenterNotFound("datacenter not found")) 56 | 57 | # TODO: check permissions! 58 | dc.update(request.json) 59 | if "parent_id" in request.json: 60 | parent_id = resolve_id(request.json["parent_id"]) 61 | if parent_id != dc.parent_id: 62 | return set_parent(dc_id=dc._id) 63 | return json_response({ "data": dc.to_dict(get_request_fields()) }) 64 | 65 | 66 | @datacenters_ctrl.route("/", methods=["DELETE"]) 67 | @logged_action("datacenter_delete") 68 | def delete(dc_id): 69 | from app.models import Datacenter 70 | dc = Datacenter.get(dc_id, DatacenterNotFound("datacenter not found")) 71 | # TODO: check permissions! 72 | dc.destroy() 73 | return json_response({ "data": dc.to_dict(get_request_fields()) }) 74 | 75 | 76 | @datacenters_ctrl.route("//set_parent", methods=["PUT"]) 77 | @logged_action("datacenter_set_parent") 78 | @json_body_required 79 | def set_parent(dc_id): 80 | from app.models import Datacenter 81 | dc = Datacenter.get(dc_id, DatacenterNotFound("datacenter not found")) 82 | 83 | # TODO: check permissions! 84 | parent_id = request.json.get("parent_id") 85 | if dc.parent: 86 | dc.unset_parent() 87 | if parent_id is not None: 88 | parent = Datacenter.get(parent_id, DatacenterNotFound("parent datacenter not found")) 89 | dc.set_parent(parent._id) 90 | return json_response({ "data": dc.to_dict(get_request_fields()) }) -------------------------------------------------------------------------------- /app/controllers/api/v1/network_groups.py: -------------------------------------------------------------------------------- 1 | from library.engine.utils import resolve_id, paginated_data, json_response, \ 2 | get_request_fields, json_body_required, filter_query 3 | from library.engine.permissions import get_user_from_app_context 4 | from library.engine.errors import NetworkGroupNotFound, WorkGroupNotFound, IntegrityError, Forbidden, InputDataError 5 | from app.controllers.auth_controller import AuthController 6 | from library.engine.action_log import logged_action 7 | from flask import request 8 | 9 | network_groups_ctrl = AuthController('network_groups', __name__, require_auth=True) 10 | 11 | 12 | @network_groups_ctrl.route("/", methods=["GET"]) 13 | @network_groups_ctrl.route("/", methods=["GET"]) 14 | def show(network_group_id=None): 15 | from app.models import NetworkGroup 16 | if network_group_id is None: 17 | query = {} 18 | if "_filter" in request.values: 19 | name_filter = request.values["_filter"] 20 | if len(name_filter) > 0: 21 | query["name"] = filter_query(name_filter) 22 | if "work_group_id" in request.values: 23 | work_group_id = resolve_id(request.values["work_group_id"]) 24 | query["work_group_id"] = work_group_id 25 | elif "work_group_name" in request.values: 26 | from app.models import WorkGroup 27 | wg = WorkGroup.get(request.values["work_group_name"]) 28 | if wg is None: 29 | query["work_group_id"] = None 30 | else: 31 | query["work_group_id"] = wg._id 32 | network_groups = NetworkGroup.find(query) 33 | else: 34 | network_group_id = resolve_id(network_group_id) 35 | network_groups = NetworkGroup.find({"$or": [ 36 | { "_id": network_group_id }, 37 | { "name": network_group_id } 38 | ]}) 39 | if network_groups.count() == 0: 40 | raise NetworkGroupNotFound("server group not found") 41 | data = paginated_data(network_groups.sort("name")) 42 | return json_response(data) 43 | 44 | 45 | @network_groups_ctrl.route("/", methods=["POST"]) 46 | @logged_action("network_group_create") 47 | @json_body_required 48 | def create(): 49 | from app.models import NetworkGroup, WorkGroup 50 | 51 | user = get_user_from_app_context() 52 | if user is None or not user.system: 53 | raise Forbidden("only system users are allowed to create server groups") 54 | 55 | sgroup_attrs = request.json.copy() 56 | if "work_group_id" not in sgroup_attrs: 57 | if "work_group_name" in sgroup_attrs: 58 | work_group = WorkGroup.find_one({"name": sgroup_attrs["work_group_name"]}) 59 | if work_group is not None: 60 | sgroup_attrs["work_group_id"] = work_group._id 61 | del(sgroup_attrs["work_group_name"]) 62 | else: 63 | raise WorkGroupNotFound("work_group provided has not been found") 64 | else: 65 | raise IntegrityError("network_group has to be in a work_group") 66 | else: 67 | if sgroup_attrs["work_group_id"] is None: 68 | raise IntegrityError("group has to be in a work_group") 69 | work_group = WorkGroup.get(sgroup_attrs["work_group_id"], WorkGroupNotFound("work_group provided has not been found")) 70 | sgroup_attrs["work_group_id"] = work_group._id 71 | 72 | sgroup_attrs = dict([x for x in sgroup_attrs.items() if x[0] in NetworkGroup.FIELDS]) 73 | network_group = NetworkGroup(**sgroup_attrs) 74 | network_group.save() 75 | return json_response({ "data": network_group.to_dict(get_request_fields()) }, 201) 76 | 77 | 78 | @network_groups_ctrl.route("/", methods=["DELETE"]) 79 | @logged_action("network_group_delete") 80 | def destroy(network_group_id): 81 | from app.models import NetworkGroup 82 | user = get_user_from_app_context() 83 | if user is None or not user.system: 84 | raise Forbidden("only system users are allowed to create server groups") 85 | 86 | network_group_id = resolve_id(network_group_id) 87 | network_group = NetworkGroup.find_one({"$or": [ 88 | { "_id": network_group_id }, 89 | { "name": network_group_id } 90 | ]}) 91 | 92 | if network_group is None: 93 | raise NetworkGroupNotFound("server group not found") 94 | 95 | network_group.clear_hosts() 96 | network_group.destroy() 97 | return json_response({ "data": network_group.to_dict(get_request_fields()) }) 98 | -------------------------------------------------------------------------------- /app/controllers/api/v1/open.py: -------------------------------------------------------------------------------- 1 | from flask import request, make_response 2 | from datetime import datetime 3 | from app.controllers.auth_controller import AuthController 4 | from library.engine.utils import json_response, cursor_to_list, get_app_version, get_boolean_request_param 5 | from library.engine.errors import ApiError, WorkGroupNotFound 6 | 7 | open_ctrl = AuthController("open", __name__, require_auth=False) 8 | 9 | 10 | def get_executer_data(query, recursive=False, include_unattached=False): 11 | from app.models import WorkGroup, Datacenter, Group, Host 12 | 13 | host_fields = list(Host.FIELDS) 14 | group_fields = list(Group.FIELDS) 15 | 16 | if recursive: 17 | host_fields += ["all_tags", "all_custom_fields"] 18 | group_fields += ["all_tags", "all_custom_fields"] 19 | 20 | work_groups = WorkGroup.find(query) 21 | work_groups = cursor_to_list(work_groups) 22 | work_group_ids = [x["_id"] for x in work_groups] 23 | 24 | groups = Group.find({ "work_group_id": { "$in": work_group_ids }}) 25 | groups = cursor_to_list(groups, fields=group_fields) 26 | group_ids = [x["_id"] for x in groups] 27 | 28 | if include_unattached: 29 | hosts = Host.find({}) 30 | else: 31 | hosts = Host.find({ "group_id": { "$in": group_ids }}) 32 | hosts = cursor_to_list(hosts, fields=host_fields) 33 | 34 | datacenters = Datacenter.find({}) 35 | datacenters = cursor_to_list(datacenters) 36 | return { 37 | "datacenters": datacenters, 38 | "work_groups": work_groups, 39 | "groups": groups, 40 | "hosts": hosts 41 | } 42 | 43 | 44 | @open_ctrl.route("/executer_data") 45 | def executer_data(): 46 | query = {} 47 | if "work_groups" in request.values: 48 | work_group_names = [x for x in request.values["work_groups"].split(",") if x != ""] 49 | if len(work_group_names) > 0: 50 | query["name"] = {"$in": work_group_names} 51 | 52 | recursive = get_boolean_request_param("recursive") 53 | include_unattached = get_boolean_request_param("include_unattached") 54 | 55 | results = get_executer_data(query, recursive, include_unattached) 56 | return json_response({"data": results}) 57 | 58 | 59 | @open_ctrl.route("/ansible") 60 | def ansible(): 61 | from app.models import WorkGroup 62 | from library.engine.utils import full_group_structure, ansible_group_structure 63 | 64 | if "work_groups" not in request.values: 65 | raise ApiError("work_groups query param is mandatory") 66 | 67 | work_group_names = [x for x in request.values["work_groups"].split(",") if x != ""] 68 | query = {"name": {"$in": work_group_names}} 69 | work_group_ids = [x._id for x in WorkGroup.find(query)] 70 | if not work_group_ids: 71 | raise WorkGroupNotFound("can't find any workgroup on your request") 72 | 73 | include_vars = get_boolean_request_param("vars") 74 | if include_vars: 75 | host_fields = ["fqdn", "group_id", "ansible_vars"] 76 | else: 77 | host_fields = ["fqdn", "group_id"] 78 | 79 | fmt = request.values.get("format", "plain") 80 | if fmt == "plain": 81 | structure = full_group_structure(work_group_ids, host_fields=host_fields) 82 | render = "# This ansible inventory file was rendered from inventoree database, %s\n# For more info on inventoree please refer to https://github.com/viert/inventoree\n\n" % datetime.now().isoformat() 83 | for group_id, group in structure.items(): 84 | if len(group["all_hosts"]) > 0: 85 | render += "[%s]\n" % group["name"] 86 | if include_vars: 87 | hosts = group["all_hosts"].values() 88 | hosts.sort(key=lambda x: x["fqdn"]) 89 | for host in hosts: 90 | render += host["fqdn"] 91 | if host["ansible_vars"] is not None: 92 | for key, value in host["ansible_vars"].iteritems(): 93 | render += " %s=%s" % (key, value) 94 | render += "\n" 95 | else: 96 | host_names = [x["fqdn"] for x in group["all_hosts"].values()] 97 | host_names.sort() 98 | for fqdn in host_names: 99 | render += fqdn + "\n" 100 | render += "\n\n" 101 | 102 | response = make_response(render) 103 | response.headers["Content-Type"] = "text/plain" 104 | return response 105 | elif fmt == "json": 106 | return json_response(ansible_group_structure(work_group_ids, include_vars)) 107 | else: 108 | raise ApiError("Invalid format. Valid formats are either \"plain\" or \"json\". (Defaults to \"plain\"") 109 | 110 | 111 | @open_ctrl.route("/app", methods=["GET", "POST"]) 112 | def info(): 113 | from app import app 114 | 115 | results = { 116 | "app": { 117 | "name": "inventoree" 118 | } 119 | } 120 | 121 | results["app"]["version"] = get_app_version() 122 | results["app"]["action_logging_enabled"] = app.action_logging 123 | 124 | from library.db import db 125 | results["mongodb"] = db.conn.client.server_info() 126 | 127 | import flask 128 | results["flask_version"] = flask.__version__ 129 | 130 | from library.engine.cache import check_cache 131 | results["cache"] = { 132 | "type": app.cache.__class__.__name__, 133 | "active": check_cache() 134 | } 135 | 136 | return json_response({ "app_info": results }) 137 | 138 | 139 | def _get_hosts(group_names=None, tags=None): 140 | from app.models import Group 141 | if group_names is None: 142 | tag_query = [{ "tags": x } for x in tags] 143 | groups = Group.find({ "$or": tag_query }) 144 | else: 145 | groups = Group.find({"name": {"$in": group_names}}) 146 | 147 | all_hosts = set() 148 | for group in groups: 149 | all_hosts = all_hosts.union(group.all_hosts.all()) 150 | 151 | if tags is not None: 152 | hosts = [] 153 | for host in all_hosts: 154 | for tag in tags: 155 | if tag in host.all_tags: 156 | hosts.append(host) 157 | else: 158 | hosts = list(all_hosts) 159 | 160 | return hosts 161 | 162 | 163 | @open_ctrl.route("/resolve_hosts") 164 | def resolve(): 165 | from app.models import Host 166 | if "groups" not in request.values and "tags" not in request.values: 167 | return json_response({ "errors": ["You must provide groups and/or tags to search with"] }) 168 | 169 | if "groups" in request.values: 170 | group_names = request.values["groups"].split(",") 171 | else: 172 | group_names = None 173 | 174 | if "tags" in request.values: 175 | tags = request.values["tags"].split(",") 176 | else: 177 | tags = None 178 | 179 | if "fields" in request.values: 180 | fields = request.values["fields"].split(",") 181 | else: 182 | fields = list(Host.FIELDS) + ["all_tags"] 183 | 184 | hosts = _get_hosts(group_names, tags) 185 | return json_response({ "data": cursor_to_list(hosts,fields) }) 186 | -------------------------------------------------------------------------------- /app/controllers/api/v1/users.py: -------------------------------------------------------------------------------- 1 | from app.controllers.auth_controller import AuthController 2 | from flask import request, g 3 | from library.engine.utils import json_response, resolve_id, paginated_data, \ 4 | get_request_fields, json_body_required, filter_query 5 | from library.engine.errors import UserNotFound, Forbidden, ApiError, UserAlreadyExists 6 | from library.engine.action_log import logged_action 7 | 8 | users_ctrl = AuthController("users", __name__, require_auth=True) 9 | 10 | 11 | @users_ctrl.route("/") 12 | @users_ctrl.route("/") 13 | def show(user_id=None): 14 | from app.models import User 15 | if user_id is None: 16 | query = {} 17 | if "_filter" in request.values: 18 | name_filter = request.values["_filter"] 19 | if len(name_filter) > 0: 20 | query["username"] = filter_query(name_filter) 21 | users = User.find(query) 22 | else: 23 | user_id = resolve_id(user_id) 24 | users = User.find({ 25 | "$or": [ 26 | { "_id": user_id }, 27 | { "username": user_id } 28 | ] 29 | }) 30 | if users.count() == 0: 31 | raise UserNotFound("user not found") 32 | 33 | return json_response(paginated_data(users.sort("username"))) 34 | 35 | 36 | @users_ctrl.route("/", methods=["POST"]) 37 | @logged_action("user_create") 38 | @json_body_required 39 | def create(): 40 | if not g.user.supervisor: 41 | raise Forbidden("you don't have permission to create new users") 42 | 43 | from app.models import User 44 | user_attrs = dict([(k, v) for k, v in request.json.items() if k in User.FIELDS]) 45 | if "password_hash" in user_attrs: 46 | del(user_attrs["password_hash"]) 47 | 48 | try: 49 | passwd = request.json["password_raw"] 50 | passwd_confirm = request.json["password_raw_confirm"] 51 | if passwd != passwd_confirm: 52 | raise ApiError("passwords don't match") 53 | user_attrs["password_raw"] = passwd 54 | except KeyError: 55 | pass 56 | 57 | existing = User.get(user_attrs["username"]) 58 | if existing is not None: 59 | raise UserAlreadyExists("user with such username already exists") 60 | 61 | new_user = User(**user_attrs) 62 | new_user.save() 63 | 64 | return json_response({"data": new_user.to_dict(get_request_fields())}) 65 | 66 | 67 | @users_ctrl.route("/", methods=["PUT"]) 68 | @logged_action("user_update") 69 | @json_body_required 70 | def update(user_id): 71 | from app.models import User 72 | user = User.get(user_id, UserNotFound("user not found")) 73 | if not user.modification_allowed: 74 | raise Forbidden("you don't have permissions to modify this user") 75 | user_attrs = dict([(k, v) for k, v in request.json.items() if k in User.FIELDS]) 76 | user.update(user_attrs) 77 | return json_response({"data":user.to_dict(get_request_fields())}) 78 | 79 | 80 | @users_ctrl.route("//set_password", methods=["PUT"]) 81 | @logged_action("user_set_password") 82 | @json_body_required 83 | def set_password(user_id): 84 | from app.models import User 85 | user = User.get(user_id, UserNotFound("user not found")) 86 | if not user.modification_allowed: 87 | raise Forbidden("you don't have permissions to modify this user") 88 | try: 89 | passwd = request.json["password_raw"] 90 | passwd_confirm = request.json["password_raw_confirm"] 91 | if passwd != passwd_confirm: 92 | raise ApiError("passwords don't match") 93 | except KeyError: 94 | raise ApiError("you should provide password_raw and password_raw_confirm fields") 95 | 96 | user.set_password(passwd) 97 | user.save() 98 | return json_response({"data":user.to_dict(get_request_fields())}) 99 | 100 | 101 | @users_ctrl.route("//set_supervisor", methods=["PUT"]) 102 | @logged_action("user_set_supervisor") 103 | @json_body_required 104 | def set_supervisor(user_id): 105 | from app.models import User 106 | user = User.get(user_id, UserNotFound("user not found")) 107 | if not user.supervisor_set_allowed: 108 | raise Forbidden("you don't have permissions to set supervisor property for this user") 109 | 110 | try: 111 | supervisor = request.json["supervisor"] 112 | except KeyError: 113 | raise ApiError("no supervisor field provided") 114 | 115 | if type(supervisor) != bool: 116 | raise ApiError("invalid superuser field type") 117 | 118 | user.supervisor = supervisor 119 | user.save() 120 | return json_response({"data":user.to_dict(get_request_fields())}) 121 | 122 | 123 | @users_ctrl.route("//set_system", methods=["PUT"]) 124 | @logged_action("user_set_system") 125 | @json_body_required 126 | def set_system(user_id): 127 | from app.models import User 128 | user = User.get(user_id, UserNotFound("user not found")) 129 | if not user.system_set_allowed: 130 | raise Forbidden("you don't have permissions to set system property for this user") 131 | try: 132 | system = request.json["system"] 133 | except KeyError: 134 | raise ApiError("no system field provided") 135 | 136 | if type(system) != bool: 137 | raise ApiError("invalid system field type") 138 | 139 | user.system = system 140 | user.save() 141 | return json_response({"data":user.to_dict(get_request_fields())}) 142 | 143 | 144 | @users_ctrl.route("/", methods=["DELETE"]) 145 | @logged_action("user_delete") 146 | def delete(user_id): 147 | from app.models import User 148 | user = User.get(user_id, UserNotFound("user not found")) 149 | if not g.user.supervisor: 150 | raise Forbidden("you don't have permissions to delete this user") 151 | 152 | user.destroy() 153 | 154 | return json_response({"data":user.to_dict(get_request_fields())}) 155 | -------------------------------------------------------------------------------- /app/controllers/api/v1/work_groups.py: -------------------------------------------------------------------------------- 1 | from flask import request, g 2 | from app.controllers.auth_controller import AuthController 3 | from library.engine.utils import resolve_id, paginated_data, \ 4 | json_response, clear_aux_fields, get_request_fields, json_body_required, filter_query, get_boolean_request_param 5 | from library.engine.errors import WorkGroupNotFound, Forbidden, ApiError, UserNotFound, Conflict 6 | from library.engine.action_log import logged_action 7 | work_groups_ctrl = AuthController("work_groups", __name__, require_auth=True) 8 | 9 | 10 | @work_groups_ctrl.route("/", methods=["GET"]) 11 | @work_groups_ctrl.route("/", methods=["GET"]) 12 | def show(work_group_id=None): 13 | from app.models import WorkGroup 14 | if work_group_id is None: 15 | query = {} 16 | if "_filter" in request.values: 17 | name_filter = request.values["_filter"] 18 | if len(name_filter) > 0: 19 | query["name"] = filter_query(name_filter) 20 | if get_boolean_request_param("_mine"): 21 | user_id = g.user._id 22 | query["$or"] = [ 23 | {"member_ids": user_id}, 24 | {"owner_id": user_id} 25 | ] 26 | work_groups = WorkGroup.find(query) 27 | else: 28 | work_group_id = resolve_id(work_group_id) 29 | work_groups = WorkGroup.find({"$or": [ 30 | { "_id": work_group_id }, 31 | { "name": work_group_id } 32 | ] }) 33 | if work_groups.count() == 0: 34 | raise WorkGroupNotFound("work_group not found") 35 | return json_response(paginated_data(work_groups.sort("name"))) 36 | 37 | 38 | @work_groups_ctrl.route("/", methods=["POST"]) 39 | @logged_action("work_group_create") 40 | @json_body_required 41 | def create(): 42 | from app.models import WorkGroup 43 | data = clear_aux_fields(request.json) 44 | data["owner_id"] = g.user._id 45 | work_group = WorkGroup(**data) 46 | work_group.save() 47 | return json_response({ "data": work_group.to_dict(get_request_fields()) }) 48 | 49 | 50 | @work_groups_ctrl.route("/", methods=["PUT"]) 51 | @logged_action("work_group_update") 52 | @json_body_required 53 | def update(work_group_id): 54 | from app.models import WorkGroup 55 | data = clear_aux_fields(request.json) 56 | work_group = WorkGroup.get(work_group_id, WorkGroupNotFound("work_group not found")) 57 | if not work_group.modification_allowed: 58 | raise Forbidden("you don't have permission to modify this work_group") 59 | work_group.update(data) 60 | return json_response({"data": work_group.to_dict(get_request_fields()), "status":"updated"}) 61 | 62 | 63 | @work_groups_ctrl.route("/", methods=["DELETE"]) 64 | @logged_action("work_group_delete") 65 | def delete(work_group_id): 66 | from app.models import WorkGroup 67 | work_group = WorkGroup.get(work_group_id, WorkGroupNotFound("work_group not found")) 68 | if not work_group.member_list_modification_allowed: 69 | raise Forbidden("you don't have permission to modify this work_group") 70 | work_group.destroy() 71 | return json_response({ "data": work_group.to_dict(get_request_fields()), "status": "deleted" }) 72 | 73 | 74 | @work_groups_ctrl.route("//add_member", methods=["POST"]) 75 | @logged_action("work_group_add_member") 76 | @json_body_required 77 | def add_member(work_group_id): 78 | from app.models import WorkGroup, User 79 | 80 | work_group = WorkGroup.get(work_group_id, WorkGroupNotFound("work_group not found")) 81 | if not work_group.member_list_modification_allowed: 82 | raise Forbidden("you don't have permission to modify this work_group") 83 | if not "user_id" in request.json: 84 | raise ApiError("no user_id given in request payload") 85 | 86 | member = User.get(request.json["user_id"], UserNotFound("user not found")) 87 | work_group.add_member(member) 88 | return json_response({"data": work_group.to_dict(get_request_fields()), "status":"updated"}) 89 | 90 | 91 | @work_groups_ctrl.route("//remove_member", methods=["POST"]) 92 | @logged_action("work_group_remove_member") 93 | @json_body_required 94 | def remove_member(work_group_id): 95 | from app.models import WorkGroup, User 96 | 97 | work_group = WorkGroup.get(work_group_id, WorkGroupNotFound("work_group not found")) 98 | if not work_group.member_list_modification_allowed: 99 | raise Forbidden("you don't have permission to modify this work_group") 100 | if not "user_id" in request.json: 101 | raise ApiError("no user_id given in request payload") 102 | 103 | member = User.get(request.json["user_id"], UserNotFound("user not found")) 104 | work_group.remove_member(member) 105 | return json_response({"data": work_group.to_dict(get_request_fields()), "status":"updated"}) 106 | 107 | 108 | @work_groups_ctrl.route("//switch_owner", methods=["POST"]) 109 | @logged_action("work_group_switch_owner") 110 | @json_body_required 111 | def switch_owner(work_group_id): 112 | from app.models import WorkGroup, User 113 | 114 | work_group = WorkGroup.get(work_group_id, WorkGroupNotFound("work_group not found")) 115 | if not work_group.member_list_modification_allowed: 116 | raise Forbidden("you don't have permission to modify the work_group's owner") 117 | if not "owner_id" in request.json: 118 | raise ApiError("you should provide owner_id field") 119 | 120 | user = User.get(request.json["owner_id"], UserNotFound("new owner not found")) 121 | if user._id == work_group.owner_id: 122 | raise Conflict("old and new owners match") 123 | 124 | work_group.owner_id = user._id 125 | work_group.save() 126 | 127 | return json_response({"data": work_group.to_dict(get_request_fields()), "status":"updated"}) 128 | 129 | @work_groups_ctrl.route("//set_members", methods=["POST"]) 130 | @logged_action("work_group_set_members") 131 | @json_body_required 132 | def set_members(work_group_id): 133 | from app.models import WorkGroup, User 134 | from bson.objectid import ObjectId 135 | 136 | work_group = WorkGroup.get(work_group_id, WorkGroupNotFound("work_group not found")) 137 | if not work_group.member_list_modification_allowed: 138 | raise Forbidden("you don't have permission to modify the work_group's members") 139 | if not "member_ids" in request.json: 140 | raise ApiError("you should provide member_ids field") 141 | if type(request.json["member_ids"]) != list: 142 | raise ApiError("member_ids field must be an array type") 143 | 144 | member_ids = [ObjectId(x) for x in request.json["member_ids"]] 145 | failed_ids = [] 146 | for member_id in member_ids: 147 | member = User.get(member_id) 148 | if member is None: 149 | failed_ids.append(member_id) 150 | 151 | if len(failed_ids) > 0: 152 | raise UserNotFound("users with the following ids haven't been found: %s" % ", ".join([str(x) for x in failed_ids])) 153 | 154 | if work_group.owner_id in member_ids: 155 | member_ids.remove(work_group.owner_id) 156 | 157 | work_group.member_ids = member_ids 158 | work_group.save() 159 | 160 | return json_response({"data": work_group.to_dict(get_request_fields()), "status":"updated"}) 161 | -------------------------------------------------------------------------------- /app/controllers/auth_controller.py: -------------------------------------------------------------------------------- 1 | from flask import Blueprint, g, session, request 2 | from library.engine.utils import json_response 3 | 4 | 5 | class AuthController(Blueprint): 6 | def __init__(self, *args, **kwargs): 7 | self.require_auth = kwargs.get("require_auth") or False 8 | if "require_auth" in kwargs: 9 | del(kwargs["require_auth"]) 10 | Blueprint.__init__(self, *args, **kwargs) 11 | self.before_request(self.set_current_user) 12 | 13 | @staticmethod 14 | def _get_user_from_authorization_header(): 15 | if "Authorization" in request.headers: 16 | auth = request.headers["Authorization"].split() 17 | if len(auth) == 2 and auth[0] == "Token": 18 | from app.models import Token 19 | token = Token.find_one({"token": auth[1]}) 20 | if token is not None and not token.expired(): 21 | return token.user 22 | return None 23 | 24 | @staticmethod 25 | def _get_user_from_session(): 26 | from app.models import User 27 | user_id = session.get("user_id") 28 | if user_id: 29 | user = User.find_one({"_id": user_id}) 30 | return user 31 | 32 | @staticmethod 33 | def _get_user_from_x_api_auth_token(): 34 | if "X-Api-Auth-Token" in request.headers: 35 | from app.models import Token 36 | token = Token.find_one({"token": request.headers["X-Api-Auth-Token"]}) 37 | if token is not None and not token.expired(): 38 | return token.user 39 | else: 40 | return None 41 | 42 | def set_current_user(self): 43 | g.user = self._get_user_from_x_api_auth_token() or \ 44 | self._get_user_from_authorization_header() or \ 45 | self._get_user_from_session() 46 | if g.user is None and self.require_auth: 47 | return self.error_response() 48 | 49 | @staticmethod 50 | def error_response(): 51 | from app import app 52 | response = { 53 | "errors": ["You must be authenticated first"], 54 | "state": "logged out", 55 | "auth_url": app.authorizer.get_authentication_url() 56 | } 57 | if hasattr(app.authorizer, "NAME"): 58 | response["auth_text"] = app.authorizer.NAME 59 | else: 60 | response["auth_text"] = "external auth" 61 | return json_response(response, 403) 62 | -------------------------------------------------------------------------------- /app/controllers/main.py: -------------------------------------------------------------------------------- 1 | from flask import Blueprint 2 | 3 | main_ctrl = Blueprint("main", __name__) 4 | 5 | @main_ctrl.route("/") 6 | def index(): 7 | return "Hello world, you shouldn't ever see this string" 8 | -------------------------------------------------------------------------------- /app/models/__init__.py: -------------------------------------------------------------------------------- 1 | from work_group import WorkGroup 2 | from group import Group 3 | from datacenter import Datacenter 4 | from user import User 5 | from host import Host 6 | from token import Token 7 | from api_action import ApiAction 8 | from network_group import NetworkGroup -------------------------------------------------------------------------------- /app/models/datacenter.py: -------------------------------------------------------------------------------- 1 | from app.models.storable_model import StorableModel, save_required, now 2 | from bson.objectid import ObjectId 3 | from library.engine.errors import ParentAlreadyExists, ParentDoesNotExist, ParentCycle, DatacenterNotEmpty 4 | from library.engine.errors import ChildAlreadyExists, ChildDoesNotExist, ObjectSaveRequired, DatacenterNotFound 5 | 6 | 7 | class Datacenter(StorableModel): 8 | 9 | _host_class = None 10 | 11 | FIELDS = ( 12 | "_id", 13 | "name", 14 | "description", 15 | "parent_id", 16 | "root_id", 17 | "child_ids", 18 | "created_at", 19 | "updated_at", 20 | ) 21 | 22 | KEY_FIELD = "name" 23 | 24 | REQUIRED_FIELDS = ( 25 | "name", 26 | "created_at", 27 | "updated_at", 28 | ) 29 | 30 | DEFAULTS = { 31 | "child_ids": [], 32 | "created_at": now, 33 | "updated_at": now 34 | } 35 | 36 | INDEXES = ( 37 | ["name", { "unique": True }], 38 | "parent_id", 39 | "root_id", 40 | ) 41 | 42 | REJECTED_FIELDS = ( 43 | "parent_id", 44 | "root_id", 45 | "child_ids", 46 | "created_at", 47 | "updated_at" 48 | ) 49 | 50 | __slots__ = list(FIELDS) 51 | 52 | @classmethod 53 | def _resolve_dc(cls, dc): 54 | # gets datacenter or datacenter_id or str with datacenter_id 55 | # and return a tuple of actual (datacenter, datacenter_id) 56 | if type(dc) == cls: 57 | dc_id = dc._id 58 | if dc_id is None: 59 | raise ObjectSaveRequired("%s must be saved first" % dc) 60 | elif type(dc) == ObjectId: 61 | dc_id = dc 62 | dc = cls.find_one({ "_id": dc_id }) 63 | else: 64 | dc_id = ObjectId(dc) 65 | dc = cls.find_one({ "_id": dc_id }) 66 | if dc is None: 67 | raise DatacenterNotFound("Datacenter %s not found" % dc_id) 68 | return dc, dc_id 69 | 70 | @save_required 71 | def set_parent(self, parent): 72 | if self.parent_id is not None: 73 | raise ParentAlreadyExists("You should unset current parent first") 74 | parent, parent_id = self._resolve_dc(parent) 75 | if parent is None: 76 | raise ParentDoesNotExist("Parent with id %s doesn't exist" % parent_id) 77 | if parent._id == self._id: 78 | raise ParentCycle("Can't set parent to myself") 79 | if parent in self.get_all_children(): 80 | raise ParentCycle("Can't set one of (grand)children parent") 81 | parent.child_ids.append(self._id) 82 | self.parent_id = parent_id 83 | parent.save() 84 | self.save() 85 | 86 | @save_required 87 | def unset_parent(self): 88 | if self.parent_id is None: 89 | raise ParentDoesNotExist("This object doesn't have a parent") 90 | parent = self.parent 91 | parent.child_ids.remove(self._id) 92 | self.parent_id = None 93 | self.save() 94 | parent.save() 95 | 96 | @save_required 97 | def add_child(self, child): 98 | child, child_id = self._resolve_dc(child) 99 | if child is None: 100 | raise ChildDoesNotExist("Child with id %s doesn't exist" % child_id) 101 | if child.parent_id is not None: 102 | raise ParentAlreadyExists("This child already have a parent") 103 | if child_id == self._id: 104 | raise ParentCycle("Can't make datacenter child of itself") 105 | if self in child.get_all_children(): 106 | raise ParentCycle("Can't add on of (grand)parents as child") 107 | if child_id in self.child_ids: 108 | raise ChildAlreadyExists("%s is already a child of %s" % (child.name, self.name)) 109 | self.child_ids.append(child_id) 110 | child.parent_id = self._id 111 | child.save() 112 | self.save() 113 | 114 | @save_required 115 | def remove_child(self, child): 116 | child, child_id = self._resolve_dc(child) 117 | if child is None: 118 | raise ChildDoesNotExist("Child with id %s doesn't exist" % child_id) 119 | self.child_ids.remove(child_id) 120 | child.parent_id = None 121 | child.save() 122 | self.save() 123 | 124 | def get_all_children(self): 125 | children = self.children[:] 126 | for child in self.children: 127 | children += child.get_all_children() 128 | return children 129 | 130 | def touch(self): 131 | self.updated_at = now() 132 | 133 | @property 134 | def parent(self): 135 | return self.__class__.find_one({ "_id": self.parent_id }) 136 | 137 | @property 138 | def root(self): 139 | return self.__class__.find_one({ "_id": self.root_id }) 140 | 141 | @property 142 | def all_children(self): 143 | return self.get_all_children() 144 | 145 | @save_required 146 | def _detect_root_id(self): 147 | if self.parent_id is None: 148 | return self._id 149 | else: 150 | return self.parent._detect_root_id() 151 | 152 | def _before_save(self): 153 | if not self.is_new: 154 | root_id = self._detect_root_id() 155 | if root_id == self._id: 156 | root_id = None 157 | self.root_id = root_id 158 | else: 159 | # set parent_id only via set_parent 160 | self.parent_id = None 161 | self.touch() 162 | 163 | def _before_delete(self): 164 | if len(self.child_ids) > 0: 165 | raise DatacenterNotEmpty("Can not delete datacenter because it's not empty") 166 | if self.parent is not None: 167 | self.unset_parent() 168 | self.host_class.unset_datacenter(self._id) 169 | 170 | @property 171 | def children(self): 172 | return self.__class__.find({ "_id": { "$in": self.child_ids } }).all() 173 | 174 | @property 175 | @save_required 176 | def is_root(self): 177 | return self.root_id is None 178 | 179 | @property 180 | def host_class(self): 181 | if self._host_class is None: 182 | from app.models import Host 183 | self.__class__._host_class = Host 184 | return self._host_class 185 | 186 | @property 187 | def hosts(self): 188 | return self.host_class.find({ "datacenter_id": self._id }) 189 | 190 | @property 191 | def all_hosts(self): 192 | all_ids = [self._id] + [x._id for x in self.get_all_children()] 193 | return self.host_class.find({ "datacenter_id": { "$in": all_ids } }) 194 | -------------------------------------------------------------------------------- /app/models/network_group.py: -------------------------------------------------------------------------------- 1 | from storable_model import StorableModel 2 | from library.engine.permissions import get_user_from_app_context 3 | from library.engine.errors import InvalidWorkGroupId, ServerGroupNotEmpty 4 | 5 | 6 | class NetworkGroup(StorableModel): 7 | 8 | # This model relates to mail.ru-specific terms 9 | 10 | _collection = 'network_groups' 11 | 12 | FIELDS = ( 13 | "_id", 14 | "name", 15 | "work_group_id", 16 | "is_master", 17 | ) 18 | 19 | REQUIRED_FIELDS = ( 20 | "work_group_id", 21 | "name" 22 | ) 23 | 24 | INDEXES = ( 25 | ["name", {"unique": True}], 26 | "work_group_id", 27 | "is_master" 28 | ) 29 | 30 | DEFAULTS = { 31 | "is_master": False 32 | } 33 | 34 | KEY_FIELD = "name" 35 | 36 | __slots__ = FIELDS 37 | 38 | @property 39 | def work_group(self): 40 | from app.models import WorkGroup 41 | return WorkGroup.find_one({"_id": self.work_group_id}) 42 | 43 | @property 44 | def work_group_name(self): 45 | wg = self.work_group 46 | if wg is None: 47 | return None 48 | return wg.name 49 | 50 | @property 51 | def modification_allowed(self): 52 | user = get_user_from_app_context() 53 | return self._modification_allowed_by(user) 54 | 55 | @property 56 | def assigning_allowed(self): 57 | return self.work_group.modification_allowed 58 | 59 | @property 60 | def hosts(self): 61 | from app.models import Host 62 | return Host.find({"network_group_id": self._id}) 63 | 64 | @property 65 | def hosts_count(self): 66 | return self.hosts.count() 67 | 68 | def _check_work_group(self): 69 | if self.work_group is None and self.work_group_id is not None: 70 | raise InvalidWorkGroupId("WorkGroup with id %s doesn't exist" % self.work_group_id) 71 | 72 | def _before_save(self): 73 | self._check_work_group() 74 | 75 | def _before_delete(self): 76 | if self.hosts_count > 0: 77 | raise ServerGroupNotEmpty("server group has hosts attached") 78 | 79 | def clear_hosts(self): 80 | for host in self.hosts: 81 | host.network_group_id = None 82 | host.save() 83 | 84 | @staticmethod 85 | def _modification_allowed_by(user): 86 | if user is None: 87 | return False 88 | return user.system 89 | -------------------------------------------------------------------------------- /app/models/storable_model.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from functools import wraps 3 | from library.engine.errors import FieldRequired, ObjectSaveRequired 4 | from library.engine.cache import request_time_cache 5 | from library.engine.permissions import current_user_is_system 6 | from copy import deepcopy 7 | 8 | 9 | def hungarian(name): 10 | result = "" 11 | for i, l in enumerate(name): 12 | if 65 <= ord(l) <= 90: 13 | if i != 0: 14 | result += "_" 15 | result += l.lower() 16 | else: 17 | result += l 18 | return result 19 | 20 | 21 | # For some reason Mongo stores datetime rounded to milliseconds 22 | # Following is useful to avoid inconsistencies in unit tests 23 | # - Roman Andriadi 24 | def now(): 25 | dt = datetime.utcnow() 26 | dt = dt.replace(microsecond=dt.microsecond//1000*1000) 27 | return dt 28 | 29 | 30 | class ModelMeta(type): 31 | _collection = None 32 | 33 | @property 34 | def collection(cls): 35 | if cls._collection is None: 36 | cls._collection = hungarian(cls.__name__) 37 | return cls._collection 38 | 39 | 40 | class StorableModel(object): 41 | 42 | __metaclass__ = ModelMeta 43 | 44 | FIELDS = [] 45 | REJECTED_FIELDS = [] 46 | REQUIRED_FIELDS = set() 47 | RESTRICTED_FIELDS = [] 48 | SYSTEM_FIELDS = [] 49 | KEY_FIELD = None 50 | DEFAULTS = {} 51 | INDEXES = [] 52 | 53 | AUXILIARY_SLOTS = ( 54 | "AUXILIARY_SLOTS", 55 | "FIELDS", 56 | "REJECTED_FIELDS", 57 | "REQUIRED_FIELDS", 58 | "RESTRICTED_FIELDS", 59 | "SYSTEM_FIELDS", 60 | "KEY_FIELD", 61 | "DEFAULTS", 62 | "INDEXES", 63 | ) 64 | 65 | __hash__ = None 66 | 67 | def __init__(self, **kwargs): 68 | if "_id" not in kwargs: 69 | self._id = None 70 | for field, value in kwargs.iteritems(): 71 | if field in self.FIELDS: 72 | setattr(self, field, value) 73 | self.__slots__ = [] 74 | for field in self.FIELDS: 75 | if field not in kwargs: 76 | value = self.DEFAULTS.get(field) 77 | if callable(value): 78 | value = value() 79 | elif hasattr(value, "copy"): 80 | value = value.copy() 81 | elif hasattr(value, "__getitem__"): 82 | value = value[:] 83 | setattr(self, field, value) 84 | setattr(self, '_initial_state', deepcopy(self.to_dict(self.FIELDS))) 85 | 86 | def save(self, skip_callback=False): 87 | from library.db import db 88 | for field in self.missing_fields: 89 | raise FieldRequired(field) 90 | if not skip_callback: 91 | self._before_save() 92 | db.save_obj(self) 93 | setattr(self, '_initial_state', deepcopy(self.to_dict(self.FIELDS))) 94 | return self 95 | 96 | def update(self, data, skip_callback=False): 97 | for field in self.FIELDS: 98 | if field in data and field not in self.REJECTED_FIELDS and field != "_id": 99 | # system fields are silently excluded if the current user is not a system user 100 | if field in self.SYSTEM_FIELDS and not current_user_is_system(): 101 | continue 102 | self.__setattr__(field, data[field]) 103 | self.save(skip_callback=skip_callback) 104 | return self 105 | 106 | def destroy(self, skip_callback=False): 107 | from library.db import db 108 | if self.is_new: 109 | return 110 | if not skip_callback: 111 | self._before_delete() 112 | db.delete_obj(self) 113 | self._id = None 114 | return self 115 | 116 | def _before_save(self): 117 | pass 118 | 119 | def _before_delete(self): 120 | pass 121 | 122 | def __repr__(self): 123 | attributes = ["%s=%r" % (a, getattr(self, a)) 124 | for a in list(self.FIELDS)] 125 | return '%s(\n %s\n)' % (self.__class__.__name__, ',\n '.join(attributes)) 126 | 127 | def __eq__(self, other): 128 | if self.__class__ != other.__class__: 129 | return False 130 | for field in self.FIELDS: 131 | if hasattr(self, field): 132 | if not hasattr(other, field): 133 | return False 134 | if getattr(self, field) != getattr(other, field): 135 | return False 136 | elif hasattr(other, field): 137 | return False 138 | return True 139 | 140 | def __ne__(self, other): 141 | return not self.__eq__(other) 142 | 143 | def to_dict(self, fields=None, include_restricted=False): 144 | if fields is None: 145 | fields = self.FIELDS 146 | 147 | # This was refactored from a list comprehension to a for-loop 148 | # as hasattr() catches all the exceptions in python2 149 | # and raising Exceptions in calculated properties don't have any effect 150 | # 151 | # For addtitional info refer to: 152 | # https://stackoverflow.com/questions/903130/hasattr-vs-try-except-block-to-deal-with-non-existent-attributes/16186050#16186050 153 | 154 | dict_data = [] 155 | for f in fields: 156 | try: 157 | value = getattr(self, f) 158 | except AttributeError: 159 | continue 160 | if not (f != "_id" and f.startswith("_")) \ 161 | and not (f in self.AUXILIARY_SLOTS) \ 162 | and (include_restricted or f not in self.RESTRICTED_FIELDS) \ 163 | and not callable(value): 164 | dict_data.append((f, value)) 165 | result = dict(dict_data) 166 | return result 167 | 168 | def reload(self): 169 | tmp = self.__class__.find_one({ "_id": self._id }) 170 | for field in self.FIELDS: 171 | if field == "_id": 172 | continue 173 | value = getattr(tmp, field) 174 | setattr(self, field, value) 175 | return self 176 | 177 | @property 178 | def collection(self): 179 | return self.__class__.collection 180 | 181 | @property 182 | def is_complete(self): 183 | return len(self.missing_fields) == 0 184 | 185 | @property 186 | def is_new(self): 187 | from bson.objectid import ObjectId 188 | return not (hasattr(self, "_id") and type(self._id) == ObjectId) 189 | 190 | @property 191 | def missing_fields(self): 192 | mfields = [] 193 | for field in self.REQUIRED_FIELDS: 194 | if not hasattr(self, field) or getattr(self, field) in ["", None]: 195 | mfields.append(field) 196 | return mfields 197 | 198 | @classmethod 199 | @request_time_cache() 200 | def find(cls, query={}, **kwargs): 201 | from library.db import db 202 | return db.get_objs(cls, cls.collection, query, **kwargs) 203 | 204 | @classmethod 205 | @request_time_cache() 206 | def find_one(cls, query, **kwargs): 207 | from library.db import db 208 | return db.get_obj(cls, cls.collection, query, **kwargs) 209 | 210 | @classmethod 211 | def get(cls, expression, raise_if_none=None): 212 | from bson.objectid import ObjectId 213 | from library.engine.utils import resolve_id 214 | expression = resolve_id(expression) 215 | if expression is None: 216 | res = None 217 | else: 218 | if type(expression) == ObjectId: 219 | query = {"_id": expression} 220 | else: 221 | expression = str(expression) 222 | query = {cls.KEY_FIELD: expression} 223 | res = cls.find_one(query) 224 | if res is None and raise_if_none is not None: 225 | if isinstance(raise_if_none, Exception): 226 | raise raise_if_none 227 | else: 228 | from library.engine.errors import NotFound 229 | raise NotFound(cls.__name__ + " not found") 230 | else: 231 | return res 232 | 233 | @classmethod 234 | def destroy_all(cls): 235 | cls.destroy_many({}) 236 | 237 | @classmethod 238 | def destroy_many(cls, query): 239 | from library.db import db 240 | db.delete_query(cls.collection, query) 241 | 242 | @classmethod 243 | def update_many(cls, query, attrs): 244 | # warning: being a faster method than traditional model manipulation, 245 | # this method doesn't provide any lifecycle callback for independent 246 | # objects 247 | from library.db import db 248 | db.update_query(cls.collection, query, attrs) 249 | 250 | @classmethod 251 | def ensure_indexes(cls, loud=False, overwrite=False): 252 | 253 | if type(cls.INDEXES) != list and type(cls.INDEXES) != tuple: 254 | raise TypeError("INDEXES field must be of type list or tuple") 255 | 256 | from pymongo import ASCENDING, DESCENDING, HASHED 257 | from pymongo.errors import OperationFailure 258 | from library.db import db 259 | from app import app 260 | 261 | def parse(key): 262 | if key.startswith("-"): 263 | key = key[1:] 264 | order = DESCENDING 265 | elif key.startswith("#"): 266 | key = key[1:] 267 | order = HASHED 268 | else: 269 | order = ASCENDING 270 | if key.startswith("+"): 271 | key = key[1:] 272 | return (key, order) 273 | 274 | for index in cls.INDEXES: 275 | if type(index) == str: 276 | index = [index] 277 | keys = [] 278 | options = { "sparse": False } 279 | 280 | for subindex in index: 281 | if type(subindex) == str: 282 | keys.append(parse(subindex)) 283 | else: 284 | for key, value in subindex.items(): 285 | options[key] = value 286 | if loud: 287 | app.logger.debug("Creating index with options: %s, %s" % (keys, options)) 288 | 289 | try: 290 | db.conn[cls.collection].create_index(keys, **options) 291 | except OperationFailure as e: 292 | if e.details.get("codeName") == "IndexOptionsConflict" or e.details.get("code") == 85: 293 | if overwrite: 294 | if loud: 295 | app.logger.debug("Dropping index %s as conflicting" % keys) 296 | db.conn[cls.collection].drop_index(keys) 297 | if loud: 298 | app.logger.debug("Creating index with options: %s, %s" % (keys, options)) 299 | db.conn[cls.collection].create_index(keys, **options) 300 | else: 301 | app.logger.error("Index %s conflicts with exising one, use overwrite param to fix it" % keys) 302 | 303 | @property 304 | def __dict__(self): 305 | return dict([x for x in self.to_dict(self.FIELDS).iteritems() if x[1] is not None]) 306 | 307 | 308 | def save_required(func): 309 | @wraps(func) 310 | def wrapper(*args, **kwargs): 311 | this = args[0] 312 | if this.is_new: 313 | raise ObjectSaveRequired("This object must be saved first") 314 | return func(*args, **kwargs) 315 | return wrapper 316 | -------------------------------------------------------------------------------- /app/models/token.py: -------------------------------------------------------------------------------- 1 | from storable_model import StorableModel, now 2 | from library.engine.utils import uuid4_string 3 | from datetime import datetime, timedelta 4 | 5 | 6 | class Token(StorableModel): 7 | 8 | _user_class = None 9 | 10 | FIELDS = ( 11 | "_id", 12 | "type", 13 | "token", 14 | "user_id", 15 | "created_at" 16 | ) 17 | 18 | KEY_FIELD = "token" 19 | 20 | REQUIRED_FIELDS = ( 21 | "type", 22 | "token", 23 | "user_id", 24 | "created_at" 25 | ) 26 | 27 | DEFAULTS = { 28 | "type": "auth", 29 | "token": uuid4_string, 30 | "created_at": now 31 | } 32 | 33 | INDEXES = ( 34 | ["token", {"unique": True}], 35 | ["user_id", "type"] 36 | ) 37 | 38 | @property 39 | def user(self): 40 | return self.user_class.find_one({"_id": self.user_id}) 41 | 42 | @property 43 | def user_class(self): 44 | if self._user_class is None: 45 | from app.models import User 46 | self.__class__._user_class = User 47 | return self._user_class 48 | 49 | def expired(self): 50 | from app import app 51 | if app.auth_token_ttl is None: 52 | # No token expiration 53 | return False 54 | expires_at = self.created_at + app.auth_token_ttl 55 | return expires_at < now() 56 | 57 | def close_to_expiration(self): 58 | from app import app 59 | if app.auth_token_ttr is None: 60 | # No time to renew 61 | return False 62 | renew_at = self.created_at + app.auth_token_ttr 63 | return renew_at < now() 64 | -------------------------------------------------------------------------------- /app/models/user.py: -------------------------------------------------------------------------------- 1 | from storable_model import StorableModel, now 2 | from library.engine.pbkdf2 import pbkdf2_hex 3 | from library.engine.permissions import get_user_from_app_context 4 | from library.engine.cache import request_time_cache 5 | from library.engine.errors import InvalidPassword, InvalidDocumentsPerPage 6 | from time import mktime 7 | from flask import g, has_request_context 8 | import bcrypt 9 | 10 | 11 | class UserIsInWorkGroups(Exception): 12 | pass 13 | 14 | 15 | class User(StorableModel): 16 | 17 | _token_class = None 18 | 19 | FIELDS = ( 20 | "_id", 21 | "ext_id", 22 | "username", 23 | "first_name", 24 | "last_name", 25 | "email", 26 | "avatar_url", 27 | "password_hash", 28 | "created_at", 29 | "updated_at", 30 | "supervisor", 31 | "system", 32 | "custom_data", 33 | "documents_per_page", 34 | ) 35 | 36 | KEY_FIELD = "username" 37 | 38 | DEFAULTS = { 39 | "first_name": "", 40 | "last_name": "", 41 | "avatar_url": "", 42 | "password_hash": "-", 43 | "email": "", 44 | "custom_data": {}, 45 | "ext_id": None, 46 | "supervisor": False, 47 | "system": False, 48 | "documents_per_page": 20 49 | } 50 | 51 | RESTRICTED_FIELDS = [ 52 | "password_hash", 53 | "salt" 54 | ] 55 | 56 | REQUIRED_FIELDS = ( 57 | "username", 58 | "password_hash" 59 | ) 60 | 61 | REJECTED_FIELDS = ( 62 | "password_hash", 63 | "supervisor", 64 | "created_at", 65 | "updated_at", 66 | "system", 67 | ) 68 | 69 | INDEXES = ( 70 | ["username",{"unique": True}], 71 | "ext_id", 72 | "custom_data", 73 | "supervisor", 74 | "system", 75 | ) 76 | 77 | __slots__ = list(FIELDS) + ["_salt"] 78 | 79 | @property 80 | def salt(self): 81 | if self._salt is None: 82 | from app import app 83 | secret_key = app.config.app.get("SECRET_KEY") 84 | if secret_key is None: 85 | raise RuntimeError("No SECRET_KEY in app section of config") 86 | self._salt = "%s.%d" % (secret_key, int(mktime(self.created_at.utctimetuple()))) 87 | return self._salt 88 | 89 | def __set_legacy_password_hash(self, password_raw): 90 | if not self.created_at: 91 | ts = now() 92 | self.created_at = ts 93 | self.password_hash = pbkdf2_hex(str(password_raw), self.salt) 94 | 95 | def __check_legacy_password_hash(self, password_raw): 96 | return pbkdf2_hex(str(password_raw), self.salt) == self.password_hash 97 | 98 | def __set_password_hash(self, password_raw): 99 | if type(password_raw) != unicode: 100 | password_raw = unicode(password_raw) 101 | self.password_hash = bcrypt.hashpw(password_raw.encode('utf-8'), bcrypt.gensalt()) 102 | 103 | def __check_password_hash(self, password_raw): 104 | if type(password_raw) != unicode: 105 | password_raw = unicode(password_raw) 106 | if type(self.password_hash) != unicode: 107 | self.password_hash = unicode(self.password_hash) 108 | return bcrypt.checkpw(password_raw.encode('utf-8'), self.password_hash.encode('utf-8')) 109 | 110 | def __init__(self, **kwargs): 111 | if "password_raw" in kwargs: 112 | password_raw = kwargs["password_raw"] 113 | del(kwargs["password_raw"]) 114 | else: 115 | password_raw = None 116 | StorableModel.__init__(self, **kwargs) 117 | 118 | self._salt = None 119 | if password_raw is not None: 120 | from app import app 121 | if app.config.app.get("LEGACY_PASSWORDS", False): 122 | self.__set_legacy_password_hash(password_raw) 123 | else: 124 | self.__set_password_hash(password_raw) 125 | 126 | def touch(self): 127 | self.updated_at = now() 128 | 129 | def _before_save(self): 130 | if not isinstance(self.documents_per_page, int): 131 | raise InvalidDocumentsPerPage("documents_per_page must be int") 132 | self.touch() 133 | 134 | def _before_delete(self): 135 | if self.work_groups_owned.count() > 0: 136 | raise UserIsInWorkGroups("Can't remove user with work_groups owned by") 137 | for work_group in self.work_groups_included_into: 138 | work_group.remove_member(self) 139 | 140 | def set_password(self, password_raw): 141 | if password_raw == "": 142 | raise InvalidPassword("Password can not be empty") 143 | from app import app 144 | if app.config.app.get("LEGACY_PASSWORDS", False): 145 | self.__set_legacy_password_hash(password_raw) 146 | else: 147 | self.__set_password_hash(password_raw) 148 | 149 | def check_password(self, password_raw): 150 | from app import app 151 | if app.config.app.get("LEGACY_PASSWORDS", False): 152 | return self.__check_legacy_password_hash(password_raw) 153 | else: 154 | return self.__check_password_hash(password_raw) 155 | 156 | @property 157 | def token_class(self): 158 | if self._token_class is None: 159 | from app.models import Token 160 | self.__class__._token_class = Token 161 | return self._token_class 162 | 163 | @property 164 | def tokens(self): 165 | if not has_request_context(): 166 | return self.token_class.find({"user_id": self._id}) 167 | else: 168 | current_user = g.user 169 | if current_user and (current_user.supervisor or current_user._id == self._id): 170 | return self.token_class.find({"user_id": self._id}) 171 | return [] 172 | 173 | @property 174 | def avatar(self): 175 | if self.avatar_url: 176 | return self.avatar_url 177 | if not self.email: 178 | return "" 179 | 180 | from hashlib import md5 181 | from app import app 182 | gravatar_path = app.config.app.get("GRAVATAR_PATH") 183 | if not gravatar_path: 184 | return "" 185 | gravatar_hash = md5(self.email.strip()).hexdigest() 186 | return "%s/%s.jpg" % (gravatar_path, gravatar_hash) 187 | 188 | def get_auth_token(self): 189 | tokens = self.token_class.find({"type": "auth", "user_id": self._id}) 190 | suitable_token = None 191 | for token in tokens: 192 | if token.expired(): 193 | token.destroy() 194 | elif token.close_to_expiration(): 195 | continue 196 | else: 197 | suitable_token = token 198 | if suitable_token is None: 199 | suitable_token = self.token_class(type="auth", user_id=self._id) 200 | suitable_token.save() 201 | return suitable_token 202 | 203 | @property 204 | def auth_token(self): 205 | if has_request_context(): 206 | current_user = g.user 207 | if not current_user: 208 | return None 209 | if not current_user.supervisor and not current_user._id == self._id: 210 | return None 211 | return self.get_auth_token().token 212 | 213 | @property 214 | @request_time_cache() 215 | def work_groups_owned(self): 216 | from app.models import WorkGroup 217 | return WorkGroup.find({"owner_id": self._id}) 218 | 219 | @property 220 | @request_time_cache() 221 | def work_groups_included_into(self): 222 | from app.models import WorkGroup 223 | return WorkGroup.find({"member_ids": self._id}) 224 | 225 | @property 226 | def member_of(self): 227 | from app.models import WorkGroup 228 | return WorkGroup.find({"$or":[ 229 | {"member_ids": self._id}, 230 | {"owner_id": self._id} 231 | ]}) 232 | 233 | @property 234 | def modification_allowed(self): 235 | user = get_user_from_app_context() 236 | if user is None: return False 237 | if user.supervisor or self._id == user._id: return True 238 | return False 239 | 240 | @property 241 | def system_set_allowed(self): 242 | return self.supervisor_set_allowed 243 | 244 | @property 245 | def supervisor_set_allowed(self): 246 | # user can't revoke his supervisor privileges himself 247 | # just in case of misclick 248 | user = get_user_from_app_context() 249 | return user.supervisor and user._id != self._id 250 | -------------------------------------------------------------------------------- /app/models/work_group.py: -------------------------------------------------------------------------------- 1 | from app.models.storable_model import StorableModel, now 2 | from library.engine.permissions import get_user_from_app_context 3 | from library.engine.utils import check_lists_are_equal 4 | 5 | 6 | class WorkGroupNotEmpty(Exception): 7 | pass 8 | 9 | 10 | class InvalidOwner(Exception): 11 | pass 12 | 13 | 14 | class WorkGroup(StorableModel): 15 | 16 | _owner_class = None 17 | _group_class = None 18 | 19 | FIELDS = ( 20 | "_id", 21 | "name", 22 | "description", 23 | "email", 24 | "owner_id", 25 | "member_ids", 26 | "updated_at", 27 | "created_at", 28 | ) 29 | 30 | KEY_FIELD = "name" 31 | 32 | REQUIRED_FIELDS = ( 33 | "name", 34 | "created_at", 35 | "updated_at", 36 | "owner_id", 37 | "member_ids" 38 | ) 39 | 40 | DEFAULTS = { 41 | "created_at": now, 42 | "updated_at": now, 43 | "member_ids": [], 44 | "description": "", 45 | "email": "", 46 | } 47 | 48 | REJECTED_FIELDS = ( 49 | "created_at", 50 | "updated_at", 51 | "owner_id", 52 | "member_ids" 53 | ) 54 | 55 | INDEXES = [ 56 | [ "name", { "unique": True } ], 57 | "member_ids", 58 | "owner_id" 59 | ] 60 | 61 | __slots__ = FIELDS 62 | 63 | @property 64 | def owner_class(self): 65 | if self._owner_class is None: 66 | from app.models import User 67 | self.__class__._owner_class = User 68 | return self._owner_class 69 | 70 | @property 71 | def owner(self): 72 | return self.owner_class.find_one({"_id": self.owner_id}) 73 | 74 | @property 75 | def owner_name(self): 76 | return self.owner.username 77 | 78 | @property 79 | def members(self): 80 | return self.owner_class.find({"_id":{"$in":self.member_ids}}) 81 | 82 | @property 83 | def member_usernames(self): 84 | return [x.username for x in self.members] 85 | 86 | @property 87 | def participants(self): 88 | return self.owner_class.find({ 89 | "_id": {"$in": self.member_ids + [self.owner_id]} 90 | }) 91 | 92 | @property 93 | def participant_usernames(self): 94 | return [x.username for x in self.participants] 95 | 96 | @property 97 | def modification_allowed(self): 98 | user = get_user_from_app_context() 99 | if user is None: return False 100 | if user.supervisor or self.owner._id == user._id: return True 101 | if user._id in self.member_ids: return True 102 | return False 103 | 104 | @property 105 | def member_list_modification_allowed(self): 106 | user = get_user_from_app_context() 107 | if user is None: return False 108 | if user.supervisor or self.owner._id == user._id: return True 109 | return False 110 | 111 | def is_member(self, user): 112 | return user._id in self.member_ids 113 | 114 | def add_member(self, user): 115 | if user._id not in self.member_ids: 116 | self.member_ids.append(user._id) 117 | self.save() 118 | 119 | def remove_member(self, user): 120 | if user._id in self.member_ids: 121 | self.member_ids.remove(user._id) 122 | self.save() 123 | 124 | @property 125 | def group_class(self): 126 | if self._group_class is None: 127 | from app.models import Group 128 | self.__class__._group_class = Group 129 | return self._group_class 130 | 131 | def _before_save(self): 132 | if self.owner is None: 133 | raise InvalidOwner("can not save workgroup without an owner") 134 | if not self.is_new: 135 | self.touch() 136 | if self.owner_id != self._initial_state.get("owner_id") or \ 137 | not check_lists_are_equal(self.member_ids, self._initial_state.get("member_ids")): 138 | self.reset_responsibles_cache() 139 | 140 | def touch(self): 141 | self.updated_at = now() 142 | 143 | def reset_responsibles_cache(self): 144 | for group in self.groups: 145 | group.reset_responsibles_cache(self.participant_usernames) 146 | group.save(skip_callback=True) 147 | 148 | def _before_delete(self): 149 | if self.groups_count > 0: 150 | raise WorkGroupNotEmpty("can not delete workgroup having groups") 151 | if self.network_groups_count > 0: 152 | raise WorkGroupNotEmpty("can not delete workgroup having server groups") 153 | 154 | @property 155 | def groups_count(self): 156 | return self.groups.count() 157 | 158 | @property 159 | def groups(self): 160 | return self.group_class.find({"work_group_id": self._id}) 161 | 162 | @property 163 | def network_groups(self): 164 | from app.models import NetworkGroup 165 | return NetworkGroup.find({"work_group_id": self._id}) 166 | 167 | @property 168 | def network_groups_count(self): 169 | return self.network_groups.count() 170 | 171 | @property 172 | def hosts(self): 173 | from app.models import Host 174 | group_ids = [x._id for x in self.groups] 175 | return Host.find({"group_id": {"$in": group_ids}}) 176 | -------------------------------------------------------------------------------- /app/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from app.tests.models.test_datacenter_model import TestDatacenterModel 2 | from app.tests.models.test_group_model import TestGroupModel 3 | from app.tests.models.test_host_model import TestHostModel 4 | from app.tests.models.test_work_group_model import TestWorkGroupModel 5 | from app.tests.models.test_storable_model import TestStorableModel 6 | from app.tests.models.test_network_group_model import TestNetworkGroupModel 7 | from app.tests.models.test_user_model import TestUserModel 8 | 9 | from app.tests.httpapi.test_account_ctrl import TestAccountCtrl 10 | from app.tests.httpapi.test_group_ctrl import TestGroupCtrl 11 | from app.tests.httpapi.test_host_ctrl import TestHostCtrl 12 | from app.tests.httpapi.test_user_ctrl import TestUserCtrl 13 | from app.tests.httpapi.test_datacenter_ctrl import TestDatacenterCtrl 14 | from app.tests.httpapi.test_network_group_ctrl import TestNetworkGroupCtrl 15 | 16 | from app.tests.utils.test_pbkdf2 import TestPBKDF2 17 | from app.tests.utils.test_diff import TestDiff 18 | from app.tests.utils.test_permutation import TestPermutation 19 | from app.tests.utils.test_ownership import TestOwnership 20 | from app.tests.utils.test_merge import TestMerge 21 | -------------------------------------------------------------------------------- /app/tests/httpapi/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/app/tests/httpapi/__init__.py -------------------------------------------------------------------------------- /app/tests/httpapi/httpapi_testcase.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from app.models import User, WorkGroup, Group, Host 3 | from app import app 4 | from flask import json 5 | 6 | 7 | class HttpApiTestCase(TestCase): 8 | SUPERVISOR = { 9 | "username": "super", 10 | "password": "superpasswd" 11 | } 12 | 13 | GENERAL_USER = { 14 | "username": "user", 15 | "password": "userpassword" 16 | } 17 | 18 | SYSTEM_USER = { 19 | "username": "sys", 20 | "password": "syspassword" 21 | } 22 | 23 | SYSTEM_SUPERVISOR = { 24 | "username": "supersys", 25 | "password": "supersyspassword" 26 | } 27 | 28 | @classmethod 29 | def setUpClass(cls): 30 | cls.session = None 31 | User.destroy_all() 32 | supervisor = User(username=HttpApiTestCase.SUPERVISOR["username"], 33 | supervisor=True, 34 | password_raw=HttpApiTestCase.SUPERVISOR["password"]) 35 | supervisor.save() 36 | 37 | user = User(username=HttpApiTestCase.GENERAL_USER["username"], 38 | supervisor=False, 39 | password_raw=HttpApiTestCase.GENERAL_USER["password"]) 40 | user.save() 41 | 42 | system = User(username=HttpApiTestCase.SYSTEM_USER["username"], 43 | supervisor=False, 44 | system=True, 45 | password_raw=HttpApiTestCase.SYSTEM_USER["password"]) 46 | system.save() 47 | 48 | supersystem = User(username=HttpApiTestCase.SYSTEM_SUPERVISOR["username"], 49 | supervisor=True, 50 | system=True, 51 | password_raw=HttpApiTestCase.SYSTEM_SUPERVISOR["password"]) 52 | supersystem.save() 53 | 54 | supertoken = supervisor.get_auth_token() 55 | usertoken = user.get_auth_token() 56 | systemtoken = system.get_auth_token() 57 | systemsupertoken = supersystem.get_auth_token() 58 | 59 | cls.supervisor = supervisor 60 | cls.supertoken = supertoken.token 61 | 62 | cls.user = user 63 | cls.usertoken = usertoken.token 64 | 65 | cls.systemuser = system 66 | cls.systemtoken = systemtoken.token 67 | 68 | cls.systemsuperuser = supersystem 69 | cls.systemsupertoken = systemsupertoken.token 70 | 71 | cls.work_group1 = WorkGroup(name="Test WorkGroup 1", owner_id=supervisor._id) 72 | cls.work_group1.save() 73 | cls.work_group2 = WorkGroup(name="Test WorkGroup 2", owner_id=user._id) 74 | cls.work_group2.save() 75 | cls.work_group2.add_member(system) 76 | 77 | @classmethod 78 | def tearDownClass(cls): 79 | User.destroy_all() 80 | WorkGroup.destroy_all() 81 | Group.destroy_all() 82 | Host.destroy_all() 83 | 84 | @property 85 | def fake_client(self): 86 | return app.flask.test_client() 87 | 88 | def get_proper_token(self, supervisor, system): 89 | if supervisor: 90 | if system: 91 | return self.systemsupertoken 92 | else: 93 | return self.supertoken 94 | elif system: 95 | return self.systemtoken 96 | return self.usertoken 97 | 98 | def get(self, url, supervisor=True, system=False): 99 | token = self.get_proper_token(supervisor, system) 100 | return self.fake_client.get(url, headers={ "X-Api-Auth-Token": token }) 101 | 102 | def delete(self, url, supervisor=True, system=False): 103 | token = self.get_proper_token(supervisor, system) 104 | return self.fake_client.delete(url, headers={ "X-Api-Auth-Token": token }) 105 | 106 | def post_json(self, url, data, supervisor=True, system=False): 107 | token = self.get_proper_token(supervisor, system) 108 | data = json.dumps(data, default=app.flask.json_encoder().default) 109 | return self.fake_client.post(url, data=data, headers={ "Content-Type": "application/json", "X-Api-Auth-Token": token }) 110 | 111 | def put_json(self, url, data, supervisor=True, system=False): 112 | token = self.get_proper_token(supervisor, system) 113 | data = json.dumps(data, default=app.flask.json_encoder().default) 114 | return self.fake_client.put(url, data=data, headers={ "Content-Type": "application/json", "X-Api-Auth-Token": token }) 115 | 116 | def get_json_data_should_be_successful(self, url, **kwargs): 117 | r = self.get(url, **kwargs) 118 | self.assertEqual(200, r.status_code) 119 | return json.loads(r.data) -------------------------------------------------------------------------------- /app/tests/httpapi/test_account_ctrl.py: -------------------------------------------------------------------------------- 1 | import flask.json as json 2 | from httpapi_testcase import HttpApiTestCase 3 | 4 | 5 | class TestAccountCtrl(HttpApiTestCase): 6 | def test_not_authenticated(self): 7 | r = self.fake_client.get("/api/v1/account/me") 8 | self.assertEqual(r.status_code, 403) 9 | body = json.loads(r.data) 10 | self.assertIn("state", body) 11 | self.assertEqual("logged out", body["state"]) 12 | 13 | def test_authenticate(self): 14 | r = self.fake_client.post("/api/v1/account/authenticate", data=json.dumps(self.SUPERVISOR), headers={ "Content-Type": "application/json" }) 15 | self.assertEqual(r.status_code, 200) 16 | self.assertIn("Set-Cookie", r.headers) 17 | 18 | def test_me(self): 19 | r = self.get("/api/v1/account/me") 20 | self.assertEqual(r.status_code, 200) 21 | body = json.loads(r.data) 22 | self.assertIn("data", body) 23 | body = body["data"] 24 | self.assertIn("_id", body) 25 | self.assertIn("username", body) 26 | self.assertEqual(body["username"], TestAccountCtrl.SUPERVISOR["username"]) -------------------------------------------------------------------------------- /app/tests/httpapi/test_datacenter_ctrl.py: -------------------------------------------------------------------------------- 1 | from httpapi_testcase import HttpApiTestCase 2 | from flask import json 3 | from app.models import Datacenter 4 | 5 | 6 | class TestDatacenterCtrl(HttpApiTestCase): 7 | 8 | def create_datacenter_tree(self): 9 | self.dc1 = Datacenter(name="dc1", description="Datacenter 1") 10 | self.dc1.save() 11 | self.dc11 = Datacenter(name="dc1.1", description="Datacenter 1 Row 1") 12 | self.dc11.save() 13 | self.dc11.set_parent(self.dc1) 14 | self.dc12 = Datacenter(name="dc1.2", description="Datacenter 1 Row 2") 15 | self.dc12.save() 16 | self.dc12.set_parent(self.dc1) 17 | self.dc2 = Datacenter(name="dc2", description="Datacenter 2") 18 | self.dc2.save() 19 | 20 | def setUp(self): 21 | Datacenter.destroy_all() 22 | self.create_datacenter_tree() 23 | 24 | def test_list_datacenters(self): 25 | r = self.get("/api/v1/datacenters/") 26 | self.assertEqual(r.status_code, 200) 27 | data = json.loads(r.data) 28 | self.assertIn("data", data) 29 | data = data["data"] # zombie-zombie-zombie 30 | self.assertIs(type(data), list) 31 | self.assertEqual(len(data), 4) 32 | 33 | def test_show_datacenter_by_name(self): 34 | r = self.get("/api/v1/datacenters/%s" % str(self.dc12.name)) 35 | self.assertEqual(r.status_code, 200) 36 | data = json.loads(r.data) 37 | self.assertIn("data", data) 38 | data = data["data"] 39 | self.assertIs(type(data), list) 40 | self.assertEqual(len(data), 1) 41 | data = data[0] 42 | self.assertEqual(data["name"], self.dc12.name) 43 | self.assertEqual(data["description"], self.dc12.description) 44 | self.assertEqual(data["parent_id"], str(self.dc1._id)) 45 | 46 | def test_show_datacenter_by_id(self): 47 | r = self.get("/api/v1/datacenters/%s" % str(self.dc12._id)) 48 | self.assertEqual(r.status_code, 200) 49 | data = json.loads(r.data) 50 | self.assertIn("data", data) 51 | data = data["data"] 52 | self.assertIs(type(data), list) 53 | self.assertEqual(len(data), 1) 54 | data = data[0] 55 | self.assertEqual(data["name"], self.dc12.name) 56 | self.assertEqual(data["description"], self.dc12.description) 57 | self.assertEqual(data["parent_id"], str(self.dc1._id)) 58 | 59 | def test_dc_not_found(self): 60 | r = self.get("/api/v1/datacenters/non_existent") 61 | self.assertEqual(r.status_code, 404) 62 | 63 | def test_create_datacenter(self): 64 | payload = { 65 | "name": "dc2.1", 66 | "description": "Datacenter 2 Row 1", 67 | "parent_id": str(self.dc2._id) 68 | } 69 | r = self.post_json("/api/v1/datacenters/", payload) 70 | self.assertEqual(r.status_code, 200) 71 | dc21 = Datacenter.find_one({"name": payload["name"]}) 72 | self.assertIsNotNone(dc21) 73 | self.assertEqual(dc21.description, payload["description"]) 74 | self.assertEqual(dc21.parent_id, self.dc2._id) 75 | 76 | def test_update_datacenter(self): 77 | payload = { 78 | "name": "dc3", 79 | "description": "Datacenter 3", 80 | "parent_id": None 81 | } 82 | r = self.put_json("/api/v1/datacenters/%s" % self.dc12.name, payload) 83 | self.assertEqual(r.status_code, 200) 84 | self.dc12.reload() 85 | self.assertEqual(self.dc12.name, payload["name"]) 86 | self.assertEqual(self.dc12.description, payload["description"]) 87 | self.assertIsNone(self.dc12.parent_id) 88 | 89 | def test_delete_datacenter(self): 90 | r = self.delete("/api/v1/datacenters/%s" % self.dc12.name) 91 | self.assertEqual(r.status_code, 200) 92 | self.assertIsNone(Datacenter.find_one({"_id": self.dc12._id})) 93 | 94 | def test_set_parent(self): 95 | r = self.put_json("/api/v1/datacenters/%s/set_parent" % self.dc12.name, {"parent_id": str(self.dc2._id)}) 96 | self.assertEqual(r.status_code, 200) 97 | self.dc12.reload() 98 | self.assertEqual(self.dc12.parent_id, self.dc2._id) 99 | 100 | def test_unset_parent(self): 101 | r = self.put_json("/api/v1/datacenters/%s/set_parent" % self.dc12._id, {"parent_id": None}) 102 | self.assertEqual(r.status_code, 200) 103 | self.dc12.reload() 104 | self.assertIsNone(self.dc12.parent_id) 105 | -------------------------------------------------------------------------------- /app/tests/httpapi/test_network_group_ctrl.py: -------------------------------------------------------------------------------- 1 | from httpapi_testcase import HttpApiTestCase 2 | from app.models import NetworkGroup, Host 3 | 4 | 5 | class TestNetworkGroupCtrl(HttpApiTestCase): 6 | 7 | @classmethod 8 | def setUpClass(cls): 9 | HttpApiTestCase.setUpClass() 10 | NetworkGroup.ensure_indexes() 11 | 12 | def setUp(self): 13 | NetworkGroup.destroy_all() 14 | self.sg1 = NetworkGroup(name="sg1", work_group_id=self.work_group1._id) 15 | self.sg1.save() 16 | self.sg2 = NetworkGroup(name="sg2", work_group_id=self.work_group2._id) 17 | self.sg2.save() 18 | self.sg3 = NetworkGroup(name="sg3", work_group_id=self.work_group1._id) 19 | self.sg3.save() 20 | 21 | def tearDown(self): 22 | NetworkGroup.destroy_all() 23 | 24 | def test_list_network_groups(self): 25 | data = self.get_json_data_should_be_successful("/api/v1/network_groups/") 26 | self.assertEqual(3, data["count"]) 27 | 28 | data = self.get_json_data_should_be_successful("/api/v1/network_groups/?work_group_id=%s" % self.work_group1._id) 29 | self.assertEqual(2, data["count"]) 30 | 31 | data = self.get_json_data_should_be_successful("/api/v1/network_groups/?work_group_name=%s" % self.work_group2.name) 32 | self.assertEqual(1, data["count"]) 33 | 34 | data = self.get_json_data_should_be_successful("/api/v1/network_groups/%s" % self.sg1.name) 35 | self.assertEqual(1, data["count"]) 36 | sg = data["data"][0] 37 | self.assertEqual(str(self.sg1._id), sg["_id"]) 38 | self.assertEqual(str(self.sg1.work_group_id), sg["work_group_id"]) 39 | self.assertEqual(self.sg1.is_master, sg["is_master"]) 40 | self.assertEqual(self.sg1.name, sg["name"]) 41 | 42 | def test_create_network_group_insuff_perms(self): 43 | payload = { 44 | "name": "sg4", 45 | "work_group_name": self.work_group1.name 46 | } 47 | r = self.post_json("/api/v1/network_groups/", payload, supervisor=True, system=False) 48 | self.assertEqual(403, r.status_code, "non system user should have no permissions to create SGs") 49 | 50 | def test_create_network_group(self): 51 | payload = { 52 | "name": "sg4", 53 | "work_group_name": self.work_group1.name 54 | } 55 | r = self.post_json("/api/v1/network_groups/", payload, supervisor=False, system=True) 56 | self.assertEqual(201, r.status_code) 57 | sg = NetworkGroup.get("sg4") 58 | self.assertIsNotNone(sg) 59 | 60 | def test_create_network_group_invalid_wg(self): 61 | payload = { 62 | "name": "sg4", 63 | "work_group_name": "non-existing" 64 | } 65 | r = self.post_json("/api/v1/network_groups/", payload, supervisor=False, system=True) 66 | self.assertEqual(404, r.status_code) 67 | 68 | def test_delete_network_group_insuff_perms(self): 69 | r = self.delete("/api/v1/network_groups/%s" % self.sg1._id, supervisor=True, system=False) 70 | self.assertEqual(403, r.status_code) 71 | 72 | def test_delete_network_group_with_hosts(self): 73 | h = Host(fqdn="host.example.com", network_group_id=self.sg1._id) 74 | h.save() 75 | r = self.delete("/api/v1/network_groups/%s" % self.sg1._id, supervisor=False, system=True) 76 | self.assertEqual(200, r.status_code) 77 | h.reload() 78 | self.assertIsNone(h.network_group_id) 79 | -------------------------------------------------------------------------------- /app/tests/httpapi/test_user_ctrl.py: -------------------------------------------------------------------------------- 1 | from httpapi_testcase import HttpApiTestCase 2 | from flask import json 3 | from app.models import User 4 | from copy import copy 5 | 6 | class TestUserCtrl(HttpApiTestCase): 7 | 8 | NEW_USER = { 9 | "username": "new_user_for_testing", 10 | "first_name": "Test", 11 | "last_name": "Case" 12 | } 13 | 14 | def tearDown(self): 15 | nu = User.find_one({"username": self.NEW_USER["username"]}) 16 | if nu is not None: 17 | nu.destroy() 18 | 19 | nu = User.find_one({"username": "changed_username"}) 20 | if nu is not None: 21 | nu.destroy() 22 | 23 | def test_list_users(self): 24 | r = self.get("/api/v1/users/") 25 | self.assertEqual(r.status_code, 200) 26 | body = json.loads(r.data) 27 | self.assertIn("data", body) 28 | self.assertIsInstance(body["data"], list) 29 | self.assertIs(4, len(body["data"])) 30 | 31 | def test_get_user(self): 32 | r = self.get("/api/v1/users/%s?_fields=username,auth_token" % self.supervisor._id, supervisor=False) 33 | self.assertEqual(r.status_code, 200) 34 | body = json.loads(r.data) 35 | self.assertIn("data", body) 36 | self.assertIsInstance(body["data"], list) 37 | self.assertIs(1, len(body["data"])) 38 | user_data = body["data"][0] 39 | self.assertEqual(user_data["username"], self.supervisor.username) 40 | self.assertIn("auth_token", user_data) 41 | self.assertIsNone(user_data["auth_token"]) 42 | 43 | def test_get_user_supervisor(self): 44 | r = self.get("/api/v1/users/%s?_fields=username,auth_token" % self.supervisor._id, supervisor=True) 45 | self.assertEqual(r.status_code, 200) 46 | body = json.loads(r.data) 47 | self.assertIn("data", body) 48 | self.assertIsInstance(body["data"], list) 49 | self.assertIs(1, len(body["data"])) 50 | user_data = body["data"][0] 51 | self.assertEqual(user_data["username"], self.supervisor.username) 52 | self.assertIn("auth_token", user_data) 53 | 54 | def test_user_create(self): 55 | r = self.post_json("/api/v1/users/", self.NEW_USER) 56 | self.assertEqual(r.status_code, 200) 57 | body = json.loads(r.data) 58 | self.assertIn("data", body) 59 | self.assertIsInstance(body["data"], dict) 60 | user_data = body["data"] 61 | self.assertEqual(user_data["username"], self.NEW_USER["username"]) 62 | user_id = user_data["_id"] 63 | user = User.get(user_id) 64 | self.assertEqual(user.username, self.NEW_USER["username"]) 65 | 66 | def test_user_create_insufficient(self): 67 | r = self.post_json("/api/v1/users/", self.NEW_USER, supervisor=False) 68 | self.assertEqual(r.status_code, 403) 69 | 70 | def test_user_create_existing(self): 71 | r = self.post_json("/api/v1/users/", {"username": self.supervisor.username}) 72 | self.assertEqual(r.status_code, 409) 73 | 74 | def test_user_create_passwords_mismatch(self): 75 | new_user = copy(self.NEW_USER) 76 | new_user["password_raw"] = "123" 77 | new_user["password_raw_confirm"] = "456" 78 | r = self.post_json("/api/v1/users/", new_user) 79 | self.assertEqual(r.status_code, 400) 80 | 81 | def test_passwd(self): 82 | new_user = copy(self.NEW_USER) 83 | password = "super$ecr3t" 84 | new_user["password_raw"] = password 85 | new_user["password_raw_confirm"] = password 86 | r = self.post_json("/api/v1/users/", new_user) 87 | self.assertEqual(r.status_code, 200) 88 | 89 | # trying to log in with the new credentials 90 | r = self.fake_client.post("/api/v1/account/authenticate", data=json.dumps({"username":new_user["username"], "password": password}), headers={ "Content-Type": "application/json" }) 91 | self.assertEqual(r.status_code, 200) 92 | 93 | def test_update(self): 94 | new_user = copy(self.NEW_USER) 95 | password = "super$ecr3t" 96 | new_user["password_raw"] = password 97 | new_user["password_raw_confirm"] = password 98 | r = self.post_json("/api/v1/users/", new_user) 99 | self.assertEqual(r.status_code, 200) 100 | user_id = json.loads(r.data)["data"]["_id"] 101 | user = User.get(user_id) 102 | 103 | r = self.put_json("/api/v1/users/%s" % user_id, {"username": "changed_username"}) 104 | self.assertEqual(r.status_code, 200) 105 | user = User.get(user_id) 106 | self.assertEqual(user.username, "changed_username") 107 | 108 | def test_set_password(self): 109 | r = self.put_json( 110 | "/api/v1/users/%s/set_password" % self.user.username, 111 | { 112 | "password_raw": "123", 113 | "password_raw_confirm": "456" 114 | } 115 | ) 116 | self.assertEqual(r.status_code, 400) 117 | r = self.put_json( 118 | "/api/v1/users/%s/set_password" % self.user.username, 119 | { 120 | "password_raw": "newpwd", 121 | "password_raw_confirm": "newpwd" 122 | } 123 | ) 124 | self.assertEqual(r.status_code, 200) 125 | # trying to log in with the new credentials 126 | r = self.fake_client.post( 127 | "/api/v1/account/authenticate", 128 | data=json.dumps( 129 | { 130 | "username": self.user.username, 131 | "password": "newpwd" 132 | } 133 | ), 134 | headers={ 135 | "Content-Type": "application/json" 136 | } 137 | ) 138 | self.assertEqual(r.status_code, 200) 139 | 140 | def test_set_supervisor(self): 141 | r = self.put_json( 142 | "/api/v1/users/%s/set_supervisor" % self.user.username, 143 | {"supervisor": True} 144 | ) 145 | self.assertEqual(r.status_code, 200) 146 | user = User.get(self.user.username) 147 | self.assertTrue(user.supervisor) 148 | 149 | r = self.put_json( 150 | "/api/v1/users/%s/set_supervisor" % self.user.username, 151 | {"supervisor": False} 152 | ) 153 | self.assertEqual(r.status_code, 200) 154 | user = User.get(self.user.username) 155 | self.assertFalse(user.supervisor) 156 | 157 | def test_cant_unset_supervisor_on_self(self): 158 | r = self.put_json( 159 | "/api/v1/users/%s/set_supervisor" % self.supervisor.username, 160 | {"supervisor": False}, supervisor=True 161 | ) 162 | self.assertEqual(r.status_code, 403) 163 | 164 | def test_delete_user(self): 165 | u = User(**self.NEW_USER) 166 | u.save() 167 | r = self.delete("/api/v1/users/%s" % u.username) 168 | self.assertEqual(r.status_code, 200) 169 | -------------------------------------------------------------------------------- /app/tests/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/app/tests/models/__init__.py -------------------------------------------------------------------------------- /app/tests/models/test_datacenter_model.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from app.models.datacenter import Datacenter, DatacenterNotEmpty 3 | from library.engine.errors import FieldRequired, ParentCycle 4 | 5 | 6 | class TestDatacenterModel(TestCase): 7 | 8 | @classmethod 9 | def setUpClass(cls): 10 | Datacenter.destroy_all() 11 | Datacenter.ensure_indexes() 12 | 13 | def setUp(self): 14 | Datacenter.destroy_all() 15 | 16 | def tearDown(self): 17 | Datacenter.destroy_all() 18 | 19 | @classmethod 20 | def tearDownClass(cls): 21 | Datacenter.destroy_all() 22 | 23 | def test_incomlete(self): 24 | dc = Datacenter() 25 | self.assertRaises(FieldRequired, dc.save) 26 | 27 | def test_children_before_save(self): 28 | from app.models.storable_model import ObjectSaveRequired 29 | dc1 = Datacenter(name="dc1") 30 | dc12 = Datacenter(name="dc1.2") 31 | self.assertRaises(ObjectSaveRequired, dc1.add_child, dc12) 32 | self.assertRaises(ObjectSaveRequired, dc12.set_parent, dc1) 33 | 34 | def test_self_parent(self): 35 | dc1 = Datacenter(name="dc1") 36 | dc1.save() 37 | self.assertRaises(ParentCycle, dc1.set_parent, dc1._id) 38 | self.assertRaises(ParentCycle, dc1.set_parent, dc1) 39 | dc1.destroy() 40 | 41 | def test_destroy_non_empty(self): 42 | dc1 = Datacenter(name="dc1") 43 | dc1.save() 44 | dc12 = Datacenter(name="dc1.2") 45 | dc12.save() 46 | dc1.add_child(dc12) 47 | self.assertRaises(DatacenterNotEmpty, dc1.destroy) 48 | 49 | def test_add_child_by_object(self): 50 | dc1 = Datacenter(name="dc1") 51 | dc1.save() 52 | dc12 = Datacenter(name="dc1.2") 53 | dc12.save() 54 | dc1.add_child(dc12) 55 | dc1 = Datacenter.find_one({ "name": "dc1" }) 56 | dc12 = Datacenter.find_one({ "name": "dc1.2" }) 57 | self.assertIn(dc12._id, dc1.child_ids) 58 | self.assertEqual(dc12.parent_id, dc1._id) 59 | 60 | def test_add_child_by_id(self): 61 | dc1 = Datacenter(name="dc1") 62 | dc1.save() 63 | dc12 = Datacenter(name="dc1.2") 64 | dc12.save() 65 | dc1.add_child(dc12._id) 66 | dc1 = Datacenter.find_one({ "name": "dc1" }) 67 | dc12 = Datacenter.find_one({ "name": "dc1.2" }) 68 | self.assertIn(dc12._id, dc1.child_ids) 69 | self.assertEqual(dc12.parent_id, dc1._id) 70 | 71 | def test_set_parent_by_object(self): 72 | dc1 = Datacenter(name="dc1") 73 | dc1.save() 74 | dc12 = Datacenter(name="dc1.2") 75 | dc12.save() 76 | dc12.set_parent(dc1) 77 | dc1 = Datacenter.find_one({ "name": "dc1" }) 78 | dc12 = Datacenter.find_one({ "name": "dc1.2" }) 79 | self.assertIn(dc12._id, dc1.child_ids) 80 | self.assertEqual(dc12.parent_id, dc1._id) 81 | 82 | def test_set_parent_by_id(self): 83 | dc1 = Datacenter(name="dc1") 84 | dc1.save() 85 | dc12 = Datacenter(name="dc1.2") 86 | dc12.save() 87 | dc12.set_parent(dc1._id) 88 | dc1 = Datacenter.find_one({ "name": "dc1" }) 89 | dc12 = Datacenter.find_one({ "name": "dc1.2" }) 90 | self.assertIn(dc12._id, dc1.child_ids) 91 | self.assertEqual(dc12.parent_id, dc1._id) 92 | 93 | def test_remove_from_parent_before_destroy(self): 94 | dc1 = Datacenter(name="dc1") 95 | dc1.save() 96 | dc12 = Datacenter(name="dc1.2") 97 | dc12.save() 98 | dc12.set_parent(dc1._id) 99 | dc12 = Datacenter.find_one({ "name": "dc1.2" }) 100 | child_id = dc12._id 101 | dc12.destroy() 102 | dc1 = Datacenter.find_one({ "name": "dc1" }) 103 | self.assertNotIn(child_id, dc1.child_ids) 104 | 105 | def test_all_children(self): 106 | dc1 = Datacenter(name="dc1") 107 | dc1.save() 108 | dc12 = Datacenter(name="dc1.2") 109 | dc12.save() 110 | dc12.set_parent(dc1) 111 | dc121 = Datacenter(name="dc1.2.1") 112 | dc121.save() 113 | dc121.set_parent(dc12) 114 | dc122 = Datacenter(name="dc1.2.2") 115 | dc122.save() 116 | dc122.set_parent(dc12) 117 | self.assertItemsEqual([dc12, dc121, dc122], dc1.get_all_children()) 118 | 119 | def test_cycle(self): 120 | # DC1 -> DC1.2 -> DC1.2.1 -> DC1.2.1.3 --X--> DC1 121 | # the last connection must raise ParentCycle 122 | dc1 = Datacenter(name="dc1") 123 | dc1.save() 124 | dc12 = Datacenter(name="dc1.2") 125 | dc12.save() 126 | dc12.set_parent(dc1) 127 | dc121 = Datacenter(name="dc1.2.1") 128 | dc121.save() 129 | dc121.set_parent(dc12) 130 | dc1213 = Datacenter(name="dc1.2.1.3") 131 | dc1213.save() 132 | dc1213.set_parent(dc121) 133 | self.assertRaises(ParentCycle, dc1.set_parent, dc1213) 134 | self.assertRaises(ParentCycle, dc1213.add_child, dc1) -------------------------------------------------------------------------------- /app/tests/models/test_host_model.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from app.models import WorkGroup, Group, Host, Datacenter, User, NetworkGroup 3 | from app.models.storable_model import ObjectSaveRequired, now 4 | from library.engine.errors import GroupNotFound, DatacenterNotFound, InvalidTags, InvalidAliases, NetworkGroupNotFound 5 | from pymongo.errors import DuplicateKeyError 6 | from datetime import timedelta 7 | 8 | ANSIBLE_DATA1 = { 9 | "ansible_vars": { 10 | "port": 5335, 11 | "proto": "udp" 12 | } 13 | } 14 | 15 | ANSIBLE_DATA2 = { 16 | "ansible_vars": { 17 | "port": 8080, 18 | "description": "this is a test host" 19 | } 20 | } 21 | 22 | ANSIBLE_RESULT = { 23 | "port": 8080, 24 | "proto": "udp", 25 | "description": "this is a test host" 26 | } 27 | 28 | 29 | class TestHostModel(TestCase): 30 | 31 | @classmethod 32 | def setUpClass(cls): 33 | Host.destroy_all() 34 | Host.ensure_indexes() 35 | Group.destroy_all() 36 | Group.ensure_indexes() 37 | NetworkGroup.destroy_all() 38 | NetworkGroup.ensure_indexes() 39 | WorkGroup.destroy_all() 40 | WorkGroup.ensure_indexes() 41 | User.destroy_all() 42 | User.ensure_indexes() 43 | cls.twork_group_owner = User(username='viert', password_hash='hash') 44 | cls.twork_group_owner.save() 45 | cls.twork_group_member = User(username='member', password_hash='hash2') 46 | cls.twork_group_member.save() 47 | cls.twork_group = WorkGroup(name="test_work_group", owner_id=cls.twork_group_owner._id) 48 | cls.twork_group.save() 49 | cls.twork_group.add_member(cls.twork_group_member) 50 | 51 | @classmethod 52 | def tearDownClass(cls): 53 | Host.destroy_all() 54 | NetworkGroup.destroy_all() 55 | Group.destroy_all() 56 | WorkGroup.destroy_all() 57 | User.destroy_all() 58 | 59 | def setUp(self): 60 | Host.destroy_all() 61 | Group.destroy_all() 62 | 63 | def tearDown(self): 64 | Host.destroy_all() 65 | Group.destroy_all() 66 | 67 | def test_invalid_group(self): 68 | g = Group(name="test_group", work_group_id=self.twork_group._id) 69 | g.save() 70 | group_id = g._id 71 | g.destroy() 72 | h = Host(fqdn="host.example.com", group_id=group_id) 73 | self.assertRaises(GroupNotFound, h.save) 74 | 75 | def test_invalid_datacenter(self): 76 | g = Group(name="test_group", work_group_id=self.twork_group._id) 77 | g.save() 78 | d = Datacenter(name="test_datacenter") 79 | d.save() 80 | dc_id = d._id 81 | d.destroy() 82 | h = Host(fqdn="host.example.com", group_id=g._id, datacenter_id=dc_id) 83 | self.assertRaises(DatacenterNotFound, h.save) 84 | 85 | def test_invalid_tags(self): 86 | g = Group(name="test_group", work_group_id=self.twork_group._id) 87 | g.save() 88 | h = Host(fqdn="host.example.com", group_id=g._id, tags="invalid_tags") 89 | self.assertRaises(InvalidTags, h.save) 90 | 91 | def test_duplicate_fqdn(self): 92 | g = Group(name="test_group", work_group_id=self.twork_group._id) 93 | g.save() 94 | h = Host(fqdn="host.example.com", group_id=g._id) 95 | h.save() 96 | h = Host(fqdn="host.example.com", group_id=g._id) 97 | self.assertRaises(DuplicateKeyError, h.save) 98 | 99 | def test_root_datacenter(self): 100 | g = Group(name="test_group", work_group_id=self.twork_group._id) 101 | g.save() 102 | dc1 = Datacenter(name="dc1") 103 | dc1.save() 104 | dc11 = Datacenter(name="dc1.1") 105 | dc11.save() 106 | dc11.set_parent(dc1) 107 | h = Host(fqdn="host.example.com", group_id=g._id, datacenter_id=dc11._id) 108 | h.save() 109 | self.assertEqual(h.root_datacenter, dc1) 110 | 111 | def test_tags(self): 112 | g1 = Group(name="test_group", work_group_id=self.twork_group._id, tags=["tag1", "tag2"]) 113 | g1.save() 114 | g2 = Group(name="test_group2", work_group_id=self.twork_group._id, tags=["tag2", "tag3"]) 115 | g2.save() 116 | h = Host(fqdn="host.example.com", group_id=g2._id, tags=["tag4"]) 117 | h.save() 118 | self.assertItemsEqual(["tag2", "tag3", "tag4"], h.all_tags) 119 | g1.add_child(g2) 120 | self.assertItemsEqual(["tag1", "tag2", "tag3", "tag4"], h.all_tags) 121 | 122 | def test_default_aliases(self): 123 | h1 = Host(fqdn="host.example.com") 124 | h1.save() 125 | h1 = Host.get("host.example.com") 126 | self.assertIs(type(h1.aliases), list) 127 | self.assertEqual(len(h1.aliases), 0) 128 | 129 | def test_invalid_aliases(self): 130 | h1 = Host(fqdn="host.example.com", aliases="host.i.example.com") 131 | self.assertRaises(InvalidAliases, h1.save) 132 | 133 | def test_aliases(self): 134 | h1 = Host(fqdn="host.example.com", aliases=["host.i.example.com"]) 135 | h1.save() 136 | h1 = Host.get("host.example.com") 137 | self.assertItemsEqual(["host.i.example.com"], h1.aliases) 138 | 139 | def test_aliases_search(self): 140 | h1 = Host(fqdn="host.example.com", aliases=["host.i.example.com"]) 141 | h1.save() 142 | h1 = Host.find_one({"aliases": "host.i.example.com"}) 143 | self.assertIsNotNone(h1) 144 | 145 | def test_ansible_vars(self): 146 | g1 = Group(name="g1", work_group_id=self.twork_group._id, local_custom_data=ANSIBLE_DATA1) 147 | g1.save() 148 | h = Host(fqdn="host.example.com", group_id=g1._id, local_custom_data=ANSIBLE_DATA2) 149 | h.save() 150 | self.assertDictEqual(h.ansible_vars, ANSIBLE_RESULT) 151 | 152 | def test_responsibles(self): 153 | g1 = Group(name="g1", work_group_id=self.twork_group._id) 154 | g1.save() 155 | h = Host(fqdn="host.example.com", group_id=g1._id) 156 | h.save() 157 | self.assertItemsEqual(h.responsibles, [self.twork_group_member, self.twork_group_owner]) 158 | 159 | def test_invalid_server_groups(self): 160 | g1 = Group(name="g1", work_group_id=self.twork_group._id) 161 | g1.save() 162 | h = Host(fqdn="host.example.com", group_id=g1._id, network_group_id="doesntmakeanysense") 163 | self.assertRaises(NetworkGroupNotFound, h.save) 164 | 165 | def test_custom_data(self): 166 | g1 = Group(name="g1", work_group_id=self.twork_group._id, 167 | local_custom_data={ 168 | "group1data": "group1", 169 | "common": { 170 | "group1": 1, 171 | "all": 2, 172 | } 173 | }) 174 | g1.save() 175 | g2 = Group(name="g2", work_group_id=self.twork_group._id, 176 | local_custom_data={ 177 | "group2data": "group2", 178 | "common": { 179 | "group2": 2, 180 | "all": 3 181 | } 182 | }) 183 | g2.save() 184 | g1.add_child(g2) 185 | 186 | h = Host(fqdn="host.example.com", group_id=g2._id, local_custom_data={ 187 | "hostdata": "host1", 188 | "common": { 189 | "domain": "example.com" 190 | } 191 | }) 192 | h.save() 193 | 194 | self.assertDictEqual( 195 | h.custom_data, 196 | { 197 | "group1data": "group1", 198 | "group2data": "group2", 199 | "hostdata": "host1", 200 | "common": { 201 | "group1": 1, 202 | "group2": 2, 203 | "all": 3, 204 | "domain": "example.com" 205 | } 206 | } 207 | ) 208 | 209 | del(g1.local_custom_data["group1data"]) 210 | del(g1.local_custom_data["common"]["all"]) 211 | g1.save() 212 | self.assertDictEqual( 213 | h.custom_data, 214 | { 215 | "group2data": "group2", 216 | "hostdata": "host1", 217 | "common": { 218 | "group1": 1, 219 | "group2": 2, 220 | "all": 3, 221 | "domain": "example.com" 222 | } 223 | } 224 | ) 225 | 226 | def test_responsibles_cache(self): 227 | h = Host(fqdn="host.example.com") 228 | h.save() 229 | self.assertItemsEqual(h.responsibles_usernames_cache, []) 230 | 231 | g = Group(name="g1", work_group_id=self.twork_group._id) 232 | g.save() 233 | 234 | h.group_id = g._id 235 | h.save() 236 | self.assertItemsEqual(h.responsibles_usernames_cache, ['viert', 'member']) 237 | 238 | def test_add_remove_custom_data(self): 239 | h = Host(fqdn="host.example.com", local_custom_data={"key1": {"key1_1": True}, "key2": "value2"}) 240 | h.save() 241 | 242 | h.add_local_custom_data({"key1.key1_2": False, "key2": "override"}) 243 | self.assertDictEqual(h.local_custom_data, { 244 | "key1": { 245 | "key1_1": True, 246 | "key1_2": False 247 | }, 248 | "key2": "override" 249 | }) 250 | 251 | h.remove_local_custom_data("key1.key1_1") 252 | self.assertDictEqual(h.local_custom_data, { 253 | "key1": { 254 | "key1_2": False 255 | }, 256 | "key2": "override" 257 | }) 258 | 259 | def test_security_key(self): 260 | h = Host(fqdn="host.example.com", local_custom_data={"key1": {"key1_1": True}, "key2": "value2"}) 261 | self.assertRaises(ObjectSaveRequired, h.generate_security_key) 262 | self.assertTrue(h.security_key_expired()) 263 | 264 | h.save() 265 | key = h.generate_security_key() 266 | self.assertIsNotNone(Host.get(key)) 267 | 268 | h.security_key_expires_at = now() - timedelta(seconds=10) 269 | h.save() 270 | 271 | self.assertIsNone(Host.get(key)) 272 | 273 | key2 = h.generate_security_key() 274 | self.assertNotEqual(key, key2) 275 | 276 | self.assertIsNotNone(Host.get(key2)) 277 | -------------------------------------------------------------------------------- /app/tests/models/test_network_group_model.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from app.models import WorkGroup, User, NetworkGroup, Host 3 | from library.engine.errors import FieldRequired, InvalidWorkGroupId, ServerGroupNotEmpty 4 | 5 | 6 | class TestNetworkGroupModel(TestCase): 7 | 8 | @classmethod 9 | def setUpClass(cls): 10 | WorkGroup.destroy_all() 11 | WorkGroup.ensure_indexes() 12 | cls.twork_group_owner = User(username='test_user', password_hash='test_hash', supervisor=False, system=False) 13 | cls.twork_group_owner.save() 14 | cls.system_user = User(username='test_system_user', password_hash='test_hash', supervisor=False, system=True) 15 | cls.system_user.save() 16 | cls.twork_group = WorkGroup(name="test_work_group", owner_id=cls.twork_group_owner._id) 17 | cls.twork_group.save() 18 | cls.twork_group2 = WorkGroup(name="test_work_group2", owner_id=cls.twork_group_owner._id) 19 | cls.twork_group2.save() 20 | 21 | def setUp(self): 22 | NetworkGroup.destroy_all() 23 | 24 | def tearDown(self): 25 | NetworkGroup.destroy_all() 26 | 27 | @classmethod 28 | def tearDownClass(cls): 29 | NetworkGroup.destroy_all() 30 | WorkGroup.destroy_all() 31 | User.destroy_all() 32 | 33 | def test_incomplete(self): 34 | sg = NetworkGroup() 35 | self.assertRaises(FieldRequired, sg.save) 36 | sg.name = "my test group" 37 | sg.work_group_id = "some invalid id" 38 | self.assertRaises(InvalidWorkGroupId, sg.save) 39 | 40 | def test_permissions(self): 41 | sg = NetworkGroup(name='ng1', work_group_id=self.twork_group._id) 42 | sg.save() 43 | self.assertFalse(sg._modification_allowed_by(self.twork_group_owner)) 44 | self.assertTrue(sg._modification_allowed_by(self.system_user)) 45 | 46 | def test_get(self): 47 | sg = NetworkGroup(name='ng1', work_group_id=self.twork_group._id) 48 | sg.save() 49 | sg = NetworkGroup.get("ng1") 50 | self.assertIsNotNone(sg) 51 | 52 | def test_destroy_not_empty(self): 53 | sg = NetworkGroup(name='ng1', work_group_id=self.twork_group._id) 54 | sg.save() 55 | host = Host(fqdn='host.example.com', network_group_id=sg._id) 56 | host.save() 57 | self.assertRaises(ServerGroupNotEmpty, sg.destroy) 58 | host.destroy() 59 | sg.destroy() 60 | -------------------------------------------------------------------------------- /app/tests/models/test_storable_model.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from app.models.storable_model import StorableModel, FieldRequired 3 | 4 | CALLABLE_DEFAULT_VALUE = 4 5 | 6 | 7 | def callable_default(): 8 | return CALLABLE_DEFAULT_VALUE 9 | 10 | 11 | class TestModel(StorableModel): 12 | 13 | FIELDS = ( 14 | '_id', 15 | 'field1', 16 | 'field2', 17 | 'field3', 18 | 'callable_default_field' 19 | ) 20 | 21 | DEFAULTS = { 22 | 'field1': 'default_value', 23 | 'field3': 'required_default_value', 24 | 'callable_default_field': callable_default 25 | } 26 | 27 | REQUIRED_FIELDS = ( 28 | 'field2', 29 | 'field3', 30 | ) 31 | 32 | REJECTED_FIELDS = ( 33 | 'field1', 34 | ) 35 | 36 | # Incorrect: not a tuple!!! 37 | INDEXES = ( 38 | "field1" 39 | ) 40 | 41 | 42 | class TestStorableModel(TestCase): 43 | 44 | def setUp(self): 45 | TestModel.destroy_all() 46 | 47 | def tearDown(self): 48 | TestModel.destroy_all() 49 | 50 | def test_init(self): 51 | model = TestModel(field1='value') 52 | self.assertEqual(model.field1, 'value') 53 | model._before_delete() 54 | model._before_save() 55 | 56 | def test_incomplete(self): 57 | model = TestModel(field1='value') 58 | self.assertRaises(FieldRequired, model.save) 59 | 60 | def test_incorrect_index(self): 61 | model = TestModel() 62 | self.assertRaises(TypeError, model.ensure_indexes) 63 | 64 | def test_eq(self): 65 | model = TestModel(field2="mymodel") 66 | model.save() 67 | model2 = TestModel.find_one({ "field2": "mymodel" }) 68 | self.assertEqual(model, model2) 69 | 70 | def test_reject_on_update(self): 71 | model = TestModel(field1="original_value", field2="mymodel_reject_test") 72 | model.save() 73 | id = model._id 74 | model.update({ "field1": "new_value" }) 75 | model = TestModel.find_one({ "_id": id }) 76 | self.assertEqual(model.field1, "original_value") 77 | 78 | def test_update(self): 79 | model = TestModel(field1="original_value", field2="mymodel_update_test") 80 | model.save() 81 | id = model._id 82 | model.update({"field2": "mymodel_updated"}) 83 | model = TestModel.find_one({ "_id": id }) 84 | self.assertEqual(model.field2, "mymodel_updated") 85 | 86 | def test_update_many(self): 87 | model1 = TestModel(field1="original_value", field2="mymodel_update_test") 88 | model1.save() 89 | model2 = TestModel(field1="original_value", field2="mymodel_update_test") 90 | model2.save() 91 | model3 = TestModel(field1="do_not_modify", field2="mymodel_update_test") 92 | model3.save() 93 | 94 | TestModel.update_many({"field1": "original_value"}, {"$set": {"field2": "mymodel_updated"}}) 95 | model1.reload() 96 | model2.reload() 97 | model3.reload() 98 | 99 | self.assertEqual(model1.field2, "mymodel_updated") 100 | self.assertEqual(model2.field2, "mymodel_updated") 101 | self.assertEqual(model3.field2, "mymodel_update_test") 102 | 103 | -------------------------------------------------------------------------------- /app/tests/models/test_user_model.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from app.models import User, Token 3 | from app.models.storable_model import now 4 | from datetime import datetime, timedelta 5 | 6 | 7 | class TestUserModel(TestCase): 8 | 9 | @classmethod 10 | def setUpClass(cls): 11 | from app import app 12 | app.auth_token_ttl = timedelta(seconds=100) 13 | app.auth_token_ttr = timedelta(seconds=10) 14 | 15 | def setUp(self): 16 | Token.destroy_all() 17 | User.destroy_all() 18 | 19 | def test_token_expiration(self): 20 | u = User(username="test_user") 21 | u.save() 22 | 23 | self.assertEqual(u.tokens.count(), 0) 24 | 25 | _ = u.auth_token 26 | self.assertEqual(u.tokens.count(), 1) 27 | 28 | t1 = u.tokens[0] 29 | self.assertFalse(t1.expired()) 30 | 31 | t1.created_at = now() - timedelta(seconds=20) # need renewing 32 | t1.save() 33 | self.assertFalse(t1.expired()) 34 | 35 | at = u.auth_token 36 | self.assertNotEqual(at, t1.token, "another token should be issued, time to renew") 37 | tokens = Token.find({"user_id": u._id}) 38 | self.assertEqual(tokens.count(), 2) 39 | 40 | t1.created_at = now() - timedelta(seconds=120) 41 | t1.save() 42 | new_at = u.auth_token 43 | self.assertEqual(at, new_at, "token should not have changed") 44 | self.assertEqual(u.tokens.count(), 1, "the first token should be destroyed due to expiration") 45 | -------------------------------------------------------------------------------- /app/tests/models/test_work_group_model.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from app.models import WorkGroup, User, Group, NetworkGroup 3 | from app.models.work_group import WorkGroupNotEmpty, InvalidOwner 4 | from time import sleep 5 | from pymongo.errors import DuplicateKeyError 6 | 7 | TEST_WG_NAME = "testCase.my_unique_test_work_group" 8 | TEST_GROUP_NAME = "testCase.my_unique_test_group" 9 | 10 | 11 | class TestWorkGroupModel(TestCase): 12 | 13 | @classmethod 14 | def setUpClass(cls): 15 | WorkGroup.destroy_all() 16 | WorkGroup.ensure_indexes() 17 | Group.destroy_all() 18 | Group.ensure_indexes() 19 | User.destroy_all() 20 | User.ensure_indexes() 21 | cls.owner = User(username="viert", password_hash="hash") 22 | cls.owner.save() 23 | cls.user = User(username="someuser") 24 | cls.user.save() 25 | WorkGroup.ensure_indexes() 26 | 27 | @classmethod 28 | def tearDownClass(cls): 29 | WorkGroup.destroy_all() 30 | cls.owner.destroy() 31 | cls.user.destroy() 32 | 33 | def setUp(self): 34 | Group.destroy_all() 35 | p = WorkGroup.find_one({"name": TEST_WG_NAME}) 36 | if p is not None: 37 | p.destroy() 38 | 39 | def tearDown(self): 40 | Group.destroy_all() 41 | p = WorkGroup.find_one({"name": TEST_WG_NAME}) 42 | if p is not None: 43 | p.destroy() 44 | 45 | def test_unique_index(self): 46 | p = WorkGroup(name=TEST_WG_NAME, owner_id=self.owner._id) 47 | p.save() 48 | p = WorkGroup(name=TEST_WG_NAME, owner_id=self.owner._id) 49 | self.assertRaises(DuplicateKeyError, p.save) 50 | 51 | def test_touch_on_save(self): 52 | p = WorkGroup(name=TEST_WG_NAME, owner_id=self.owner._id) 53 | p.save() 54 | dt1 = p.updated_at 55 | sleep(1) 56 | p.save() 57 | dt2 = p.updated_at 58 | self.assertNotEqual(dt1, dt2, msg="updated_at not changed while saving WorkGroup") 59 | 60 | def test_delete_non_empty(self): 61 | p = WorkGroup(name=TEST_WG_NAME, owner_id=self.owner._id) 62 | p.save() 63 | g = Group(name=TEST_GROUP_NAME, work_group_id=p._id) 64 | g.save() 65 | self.assertRaises(WorkGroupNotEmpty, p.destroy) 66 | g.destroy() 67 | p.destroy() 68 | 69 | def test_delete_non_empty_sg(self): 70 | p = WorkGroup(name=TEST_WG_NAME, owner_id=self.owner._id) 71 | p.save() 72 | sg = NetworkGroup(name=TEST_GROUP_NAME, work_group_id=p._id) 73 | sg.save() 74 | self.assertRaises(WorkGroupNotEmpty, p.destroy) 75 | sg.destroy() 76 | p.destroy() 77 | 78 | def test_owner(self): 79 | p = WorkGroup(name="TEST_WG_NAME", owner_id="arbitrary") 80 | self.assertRaises(InvalidOwner, p.save) 81 | 82 | def test_responsibles_cache(self): 83 | from app.models import Group, Host 84 | wg = WorkGroup(name="TEST_WG_NAME", owner_id=self.owner._id) 85 | wg.save() 86 | 87 | g = Group(name="group1", work_group_id=wg._id) 88 | g.save() 89 | 90 | h = Host(fqdn="myhost.example.com", group_id=g._id) 91 | h.save() 92 | 93 | self.assertItemsEqual(g.responsibles_usernames_cache, [self.owner.username]) 94 | self.assertItemsEqual(h.responsibles_usernames_cache, [self.owner.username]) 95 | 96 | wg.add_member(self.user) 97 | g.reload() 98 | h.reload() 99 | self.assertItemsEqual(g.responsibles_usernames_cache, [self.owner.username, self.user.username]) 100 | self.assertItemsEqual(h.responsibles_usernames_cache, [self.owner.username, self.user.username]) 101 | -------------------------------------------------------------------------------- /app/tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/app/tests/utils/__init__.py -------------------------------------------------------------------------------- /app/tests/utils/test_diff.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from library.engine.utils import diff 3 | 4 | class TestDiff(TestCase): 5 | 6 | def test_diff(self): 7 | original = [1,3,7,9,12] 8 | updated = [3,7,9,14] 9 | d = diff(original, updated) 10 | self.assertItemsEqual([1,12], d.remove) 11 | self.assertItemsEqual([14], d.add) 12 | 13 | def test_equal(self): 14 | original = [1,3,7,9,12] 15 | updated = [1,3,7,9,12] 16 | d = diff(original, updated) 17 | self.assertItemsEqual([], d.add) 18 | self.assertItemsEqual([], d.remove) 19 | -------------------------------------------------------------------------------- /app/tests/utils/test_merge.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from library.engine.utils import merge, convert_keys 3 | 4 | D1 = { 5 | "field1": 3, 6 | "field2": "a", 7 | "field3": [1, 3, 5], 8 | "field4": { 9 | "field5": { 10 | "field6": "hello" 11 | } 12 | } 13 | } 14 | 15 | D2 = { 16 | "field1": { 17 | "override": "yes" 18 | }, 19 | "field3": [9, 7, 4], 20 | "field4": { 21 | "field5": { 22 | "field7": { 23 | "new_data": True 24 | } 25 | } 26 | }, 27 | "field8": "bye" 28 | } 29 | 30 | EXPECTED = { 31 | "field1": { 32 | "override": "yes" 33 | }, 34 | "field2": "a", 35 | "field3": [9, 7, 4], 36 | "field4": { 37 | "field5": { 38 | "field6": "hello", 39 | "field7": { 40 | "new_data": True 41 | } 42 | } 43 | }, 44 | "field8": "bye" 45 | } 46 | 47 | CK_INPUT = { 48 | "key0": "value0", 49 | "key1.key1_1.key1_1_1": "value1_1_1", 50 | "key1.key1_2": "value1_2", 51 | "key2.key1": "value_test" 52 | } 53 | 54 | CK_EXPECTED = { 55 | "key0": "value0", 56 | "key1": { 57 | "key1_1": { 58 | "key1_1_1": "value1_1_1", 59 | }, 60 | "key1_2": "value1_2", 61 | }, 62 | "key2": { 63 | "key1": "value_test" 64 | } 65 | } 66 | 67 | 68 | class TestMerge(TestCase): 69 | 70 | def test_merge_dicts(self): 71 | result = merge(D1, D2) 72 | self.assertDictEqual(result, EXPECTED) 73 | 74 | def test_convert_keys(self): 75 | self.assertDictEqual(convert_keys(CK_INPUT), CK_EXPECTED) 76 | -------------------------------------------------------------------------------- /app/tests/utils/test_ownership.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from app.models import User, Group, WorkGroup, Host 3 | from library.engine.ownership import user_groups, user_work_groups, user_hosts 4 | 5 | 6 | class TestOwnership(TestCase): 7 | 8 | user1 = None 9 | user2 = None 10 | wg1 = None 11 | wg2 = None 12 | 13 | groups = {} 14 | hosts = [] 15 | 16 | @classmethod 17 | def setUpClass(cls): 18 | cls.user1 = User(username="user1").save() 19 | cls.user2 = User(username="user2").save() 20 | cls.wg1 = WorkGroup(name="wg1", owner_id=cls.user1._id).save() 21 | cls.wg2 = WorkGroup(name="wg2", owner_id=cls.user2._id).save() 22 | cls.groups["g11"] = Group(name="g11", work_group_id=cls.wg1._id).save() 23 | cls.groups["g12"] = Group(name="g12", work_group_id=cls.wg1._id).save() 24 | cls.groups["g21"] = Group(name="g21", work_group_id=cls.wg2._id).save() 25 | cls.groups["g22"] = Group(name="g22", work_group_id=cls.wg2._id).save() 26 | cls.hosts.append(Host(fqdn="host1", group_id=cls.groups["g11"]._id).save()) 27 | cls.hosts.append(Host(fqdn="host2", group_id=cls.groups["g11"]._id).save()) 28 | cls.hosts.append(Host(fqdn="host3", group_id=cls.groups["g12"]._id).save()) 29 | cls.hosts.append(Host(fqdn="host4", group_id=cls.groups["g12"]._id).save()) 30 | cls.hosts.append(Host(fqdn="host5", group_id=cls.groups["g21"]._id).save()) 31 | cls.hosts.append(Host(fqdn="host6", group_id=cls.groups["g21"]._id).save()) 32 | cls.hosts.append(Host(fqdn="host7", group_id=cls.groups["g22"]._id).save()) 33 | cls.hosts.append(Host(fqdn="host8", group_id=cls.groups["g22"]._id).save()) 34 | cls.hosts.append(Host(fqdn="host9").save()) 35 | cls.hosts.append(Host(fqdn="host10").save()) 36 | 37 | def test_user_work_groups(self): 38 | wgs = user_work_groups(self.user1._id).all() 39 | self.assertEqual(1, len(wgs)) 40 | ids = [x._id for x in wgs] 41 | self.assertItemsEqual(ids, [self.wg1._id]) 42 | 43 | wgs = user_work_groups(self.user2._id).all() 44 | self.assertEqual(1, len(wgs)) 45 | ids = [x._id for x in wgs] 46 | self.assertItemsEqual(ids, [self.wg2._id]) 47 | 48 | def test_user_groups(self): 49 | wgs = user_groups(self.user1._id).all() 50 | self.assertEqual(2, len(wgs)) 51 | ids = [x._id for x in wgs] 52 | self.assertItemsEqual(ids, [ 53 | self.groups["g11"]._id, 54 | self.groups["g12"]._id, 55 | ]) 56 | 57 | wgs = user_groups(self.user2._id).all() 58 | self.assertEqual(2, len(wgs)) 59 | ids = [x._id for x in wgs] 60 | self.assertItemsEqual(ids, [ 61 | self.groups["g21"]._id, 62 | self.groups["g22"]._id, 63 | ]) 64 | 65 | def test_user_hosts(self): 66 | hsts = user_hosts(self.user1._id, include_not_assigned=False).all() 67 | self.assertEqual(4, len(hsts)) 68 | ids = list(set([x.group_id for x in hsts])) 69 | self.assertItemsEqual(ids, [self.groups["g11"]._id, self.groups["g12"]._id]) 70 | 71 | hsts = user_hosts(self.user2._id, include_not_assigned=True).all() 72 | self.assertEqual(6, len(hsts)) 73 | ids = list(set([x.group_id for x in hsts])) 74 | self.assertItemsEqual(ids, [self.groups["g21"]._id, self.groups["g22"]._id, None]) 75 | 76 | @classmethod 77 | def tearDownClass(cls): 78 | User.destroy_all() -------------------------------------------------------------------------------- /app/tests/utils/test_pbkdf2.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from library.engine.pbkdf2 import pbkdf2_hex 3 | 4 | 5 | class TestPBKDF2(TestCase): 6 | 7 | TEST_DATA = ( 8 | # From RFC 6070 9 | ('password', 'salt', 1, 20, 10 | '0c60c80f961f0e71f3a9b524af6012062fe037a6'), 11 | ('password', 'salt', 2, 20, 12 | 'ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957'), 13 | ('password', 'salt', 4096, 20, 14 | '4b007901b765489abead49d926f721d065a429c1'), 15 | ('passwordPASSWORDpassword', 'saltSALTsaltSALTsaltSALTsaltSALTsalt', 16 | 4096, 25, '3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038'), 17 | ('pass\x00word', 'sa\x00lt', 4096, 16, 18 | '56fa6aa75548099dcc37d7f03425e0c3'), 19 | # From Crypt-PBKDF2 20 | ('password', 'ATHENA.MIT.EDUraeburn', 1, 16, 21 | 'cdedb5281bb2f801565a1122b2563515'), 22 | ('password', 'ATHENA.MIT.EDUraeburn', 1, 32, 23 | 'cdedb5281bb2f801565a1122b25635150ad1f7a04bb9f3a333ecc0e2e1f70837'), 24 | ('password', 'ATHENA.MIT.EDUraeburn', 2, 16, 25 | '01dbee7f4a9e243e988b62c73cda935d'), 26 | ('password', 'ATHENA.MIT.EDUraeburn', 2, 32, 27 | '01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86'), 28 | ('password', 'ATHENA.MIT.EDUraeburn', 1200, 32, 29 | '5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13'), 30 | ('X' * 64, 'pass phrase equals block size', 1200, 32, 31 | '139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1'), 32 | ('X' * 65, 'pass phrase exceeds block size', 1200, 32, 33 | '9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a'), 34 | ) 35 | 36 | def test_pbkdf2(self): 37 | for case in self.TEST_DATA: 38 | data, salt, iterations, keylen, expected = case 39 | self.assertEqual(pbkdf2_hex(data, salt, iterations, keylen), expected) 40 | -------------------------------------------------------------------------------- /app/tests/utils/test_permutation.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from library.engine.permutation import succ, sequence, expand_pattern_with_vars 3 | 4 | class TestPermutation(TestCase): 5 | """Test cases created from ruby console""" 6 | SUCC_CASES = { 7 | "0": "1", 8 | "a": "b", 9 | "D": "E", 10 | "9z": "10a", 11 | "D9jzZ": "D9kaA" 12 | } 13 | 14 | SEQUENCE_CASES = ( 15 | ("0", "15", ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15"]), 16 | ("03", "12", ["03", "04", "05", "06", "07", "08", "09", "10", "11", "12"]), 17 | ("0a", "z9", ["0a", "0b", "0c", "0d", "0e", "0f", "0g", "0h", "0i", "0j", "0k", "0l", "0m", "0n", "0o", "0p", "0q", "0r", "0s", "0t", "0u", "0v", "0w", "0x", "0y", "0z", "1a", "1b", "1c", "1d", "1e", "1f", "1g", "1h", "1i", "1j", "1k", "1l", "1m", "1n", "1o", "1p", "1q", "1r", "1s", "1t", "1u", "1v", "1w", "1x", "1y", "1z", "2a", "2b", "2c", "2d", "2e", "2f", "2g", "2h", "2i", "2j", "2k", "2l", "2m", "2n", "2o", "2p", "2q", "2r", "2s", "2t", "2u", "2v", "2w", "2x", "2y", "2z", "3a", "3b", "3c", "3d", "3e", "3f", "3g", "3h", "3i", "3j", "3k", "3l", "3m", "3n", "3o", "3p", "3q", "3r", "3s", "3t", "3u", "3v", "3w", "3x", "3y", "3z", "4a", "4b", "4c", "4d", "4e", "4f", "4g", "4h", "4i", "4j", "4k", "4l", "4m", "4n", "4o", "4p", "4q", "4r", "4s", "4t", "4u", "4v", "4w", "4x", "4y", "4z", "5a", "5b", "5c", "5d", "5e", "5f", "5g", "5h", "5i", "5j", "5k", "5l", "5m", "5n", "5o", "5p", "5q", "5r", "5s", "5t", "5u", "5v", "5w", "5x", "5y", "5z", "6a", "6b", "6c", "6d", "6e", "6f", "6g", "6h", "6i", "6j", "6k", "6l", "6m", "6n", "6o", "6p", "6q", "6r", "6s", "6t", "6u", "6v", "6w", "6x", "6y", "6z", "7a", "7b", "7c", "7d", "7e", "7f", "7g", "7h", "7i", "7j", "7k", "7l", "7m", "7n", "7o", "7p", "7q", "7r", "7s", "7t", "7u", "7v", "7w", "7x", "7y", "7z", "8a", "8b", "8c", "8d", "8e", "8f", "8g", "8h", "8i", "8j", "8k", "8l", "8m", "8n", "8o", "8p", "8q", "8r", "8s", "8t", "8u", "8v", "8w", "8x", "8y", "8z", "9a", "9b", "9c", "9d", "9e", "9f", "9g", "9h", "9i", "9j", "9k", "9l", "9m", "9n", "9o", "9p", "9q", "9r", "9s", "9t", "9u", "9v", "9w", "9x", "9y", "9z"]), 18 | ("j3", "m8", ["j3", "j4", "j5", "j6", "j7", "j8", "j9", "k0", "k1", "k2", "k3", "k4", "k5", "k6", "k7", "k8", "k9", "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7", "l8", "l9", "m0", "m1", "m2", "m3", "m4", "m5", "m6", "m7", "m8"]) 19 | ) 20 | 21 | VARS_CASES = ( 22 | ("a[01-03][a,b]", [ 23 | ("a01a", ["a01a", "01", "a"]), 24 | ("a01b", ["a01b", "01", "b"]), 25 | ("a02a", ["a02a", "02", "a"]), 26 | ("a02b", ["a02b", "02", "b"]), 27 | ("a03a", ["a03a", "03", "a"]), 28 | ("a03b", ["a03b", "03", "b"]), 29 | ]), 30 | ) 31 | 32 | def test_succ(self): 33 | for i, nx in self.SUCC_CASES.items(): 34 | self.assertEqual(succ(i), nx) 35 | 36 | def test_sequence(self): 37 | for fr, to, res in self.SEQUENCE_CASES: 38 | self.assertItemsEqual(res, sequence(fr, to)) 39 | 40 | def test_expand_with_vars(self): 41 | for pattern, results in self.VARS_CASES: 42 | actual = list(expand_pattern_with_vars(pattern)) 43 | 44 | self.assertEqual(len(results), len(actual)) 45 | for i in xrange(len(results)): 46 | self.assertEqual(results[i][0], actual[i][0]) # result pattern 47 | self.assertItemsEqual(results[i][1], actual[i][1]) # result vars 48 | -------------------------------------------------------------------------------- /commands/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import pkgutil 3 | import inspect 4 | from argparse import ArgumentParser, REMAINDER 5 | 6 | 7 | class Command(object): 8 | """Base class for CLI commands 9 | You should override at least run() method 10 | run() - method is being called to do the main job 11 | init_argument_parser(parser) - override this method if you want to accept arguments 12 | NAME - if this property is not None it is used as command name. 13 | Otherwise command name is generated from class name 14 | DESCRIPTION - description that is used in help messages. Consider setting it to something meaningful. 15 | """ 16 | NAME = None 17 | DESCRIPTION = None 18 | NO_ARGPARSE = False 19 | 20 | def __init__(self): 21 | if self.NAME is None: 22 | self.NAME = self.__class__.__name__.lower() 23 | if self.DESCRIPTION is None: 24 | self.DESCRIPTION = '"%s" has no DESCRIPTION' % (self.NAME,) 25 | self.args = None 26 | self.raw_args = None 27 | 28 | def init_argument_parser(self, parser): 29 | """ 30 | This method is called to configure argument subparser for command 31 | Override it if you need to accept arguments 32 | - parser: argparse.ArgumentParser to fill with arguments 33 | """ 34 | pass 35 | 36 | def run(self): 37 | """ 38 | This is a main method of command. Override it and do all the job here 39 | Command arguments can be read from self.args 40 | The return value from this method will be used as CLI exit code 41 | """ 42 | raise NotImplementedError() 43 | 44 | 45 | def is_a_command_class(obj): 46 | return inspect.isclass(obj) and Command in inspect.getmro(obj) and obj != Command 47 | 48 | 49 | def load_commands_from_module(module): 50 | return [obj for _, obj in inspect.getmembers(module) if is_a_command_class(obj)] 51 | 52 | 53 | def load_commands_from_package(package): 54 | commands = [] 55 | 56 | for modloader, modname, ispkg in pkgutil.iter_modules(package.__path__): 57 | module = modloader.find_module(modname).load_module(modname) 58 | commands.extend(load_commands_from_module(module)) 59 | 60 | if ispkg: 61 | commands.extend(load_commands_from_package(module)) 62 | return commands 63 | 64 | 65 | def load_commands(): 66 | commands = [] 67 | this_module = sys.modules[__name__] 68 | 69 | if this_module.__package__: 70 | commands = load_commands_from_package(sys.modules[this_module.__package__]) 71 | 72 | try: 73 | import plugins.commands 74 | commands.extend(load_commands_from_package(plugins.commands)) 75 | except ImportError: 76 | pass 77 | 78 | commands.extend(load_commands_from_module(this_module)) 79 | return commands 80 | 81 | 82 | def main(): 83 | parser = ArgumentParser() 84 | subparsers = parser.add_subparsers( 85 | title='Commands', 86 | help="One of the following commands", 87 | description='use --help to get help on particular command', 88 | metavar="", 89 | ) 90 | for command_class in load_commands(): 91 | command = command_class() 92 | if command.NO_ARGPARSE: 93 | command_parser = subparsers.add_parser( 94 | command.NAME, 95 | help=command.DESCRIPTION, 96 | add_help=False, 97 | prefix_chars=chr(0), # Ugly hack to prevent arguments from being parsed as options 98 | ) 99 | command_parser.add_argument('raw_args', nargs=REMAINDER) 100 | else: 101 | command_parser = subparsers.add_parser(command.NAME, help=command.DESCRIPTION) 102 | command_parser.set_defaults(command=command) 103 | command.init_argument_parser(command_parser) 104 | 105 | args = parser.parse_args() 106 | args.command.args = args 107 | if 'raw_args' in args: 108 | args.command.raw_args = args.raw_args 109 | return args.command.run() -------------------------------------------------------------------------------- /commands/actions.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | from datetime import datetime, timedelta 3 | 4 | DEFAULT_ACTION_LOG_TTL = 86400 * 31 * 6 # approximately half a year 5 | 6 | 7 | class Actions(Command): 8 | 9 | def init_argument_parser(self, parser): 10 | parser.add_argument('action_name', nargs=1, choices=['count', 'cleanup']) 11 | 12 | def run(self): 13 | from app import app 14 | from app.models import ApiAction 15 | ttl = app.config.app.get("ACTION_LOG_TTL", DEFAULT_ACTION_LOG_TTL) 16 | delta = timedelta(seconds=ttl) 17 | min_date = datetime.utcnow() - delta 18 | 19 | action_name = self.args.action_name[0] 20 | ttl_query = {"created_at": {"$lt": min_date}} 21 | 22 | expired = ApiAction.find(ttl_query).count() 23 | 24 | if action_name == 'count': 25 | total = ApiAction.find().count() 26 | print "ApiActions count, total = %d, expired = %d" % (total, expired) 27 | return 28 | 29 | if action_name == 'cleanup': 30 | if expired == 0: 31 | print "There's no expired actions to cleanup" 32 | ApiAction.destroy_many(ttl_query) 33 | print "%d expired ApiActions removed" % expired 34 | -------------------------------------------------------------------------------- /commands/check.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | 3 | 4 | class Check(Command): 5 | 6 | def init_argument_parser(self, parser): 7 | parser.add_argument("-f", "--fix", action="store_true", dest="fix", default=False, help="Fix faulty relations") 8 | 9 | def run(self): 10 | from app.models import Group 11 | from app.models.group import GroupNotFound 12 | from app import app 13 | groups = Group.find({}) 14 | for group in groups: 15 | cids = [] 16 | pids = [] 17 | for child_id in group.child_ids: 18 | try: 19 | Group._resolve_group(child_id) 20 | except GroupNotFound: 21 | cids.append(child_id) 22 | app.logger.error("Group %s has faulty child_id %s" % (group.name, child_id)) 23 | for parent_id in group.parent_ids: 24 | try: 25 | Group._resolve_group(parent_id) 26 | except GroupNotFound: 27 | pids.append(parent_id) 28 | app.logger.error("Group %s has faulty parent_id %s" % (group.name, parent_id)) 29 | 30 | if self.args.fix: 31 | for _id in cids: 32 | group.child_ids.remove(_id) 33 | for _id in pids: 34 | group.parent_ids.remove(_id) 35 | if len(cids) + len(pids) > 0: 36 | group.save() 37 | app.logger.info("Group %s has been fixed. Children removed %d, parents removed %d" % 38 | (group.name, len(cids), len(pids))) -------------------------------------------------------------------------------- /commands/convert.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | import re 3 | 4 | DELIMITER = re.compile("[:\.]+") 5 | 6 | 7 | def convert_cf(cf, obj): 8 | # eine segments hack 9 | if cf["key"] == "eine:segments": 10 | cf["value"] = [x.strip() for x in cf["value"].split(",")] 11 | key_tokens = DELIMITER.split(cf["key"]) 12 | node = obj 13 | while len(key_tokens) > 1: 14 | token = key_tokens.pop(0) 15 | if token not in node: 16 | node[token] = {} 17 | node = node[token] 18 | token = key_tokens.pop(0) 19 | node[token] = cf["value"] 20 | 21 | 22 | class Convert(Command): 23 | 24 | NAME = "convert" 25 | 26 | def init_argument_parser(self, parser): 27 | parser.add_argument('action', type=str, choices=['custom', 'responsibles']) 28 | 29 | @staticmethod 30 | def convert_custom(): 31 | from app.models import Group, Host 32 | from app import app 33 | 34 | for group in Group.find(): 35 | cnt = 0 36 | app.logger.info("Converting group %s" % group.name) 37 | for cf in group.custom_fields: 38 | cnt += 1 39 | convert_cf(cf, group.local_custom_data) 40 | group.save() 41 | if cnt > 0: 42 | app.logger.info("%d custom fields converted in group %s" % (cnt, group.name)) 43 | 44 | for host in Host.find(): 45 | cnt = 0 46 | app.logger.info("Converting host %s" % host.fqdn) 47 | for cf in host.custom_fields: 48 | cnt += 1 49 | convert_cf(cf, host.local_custom_data) 50 | host.save() 51 | if cnt > 0: 52 | app.logger.info("%d custom fields converted in host %s" % (cnt, host.fqdn)) 53 | 54 | @staticmethod 55 | def convert_responsibles(): 56 | from app import app 57 | from app.models import Host, Group 58 | 59 | for h in Host.find(): 60 | app.logger.debug("Setting host responsibles for host %s" % h.fqdn) 61 | h.reset_responsibles_cache() 62 | h.save(skip_callback=True) 63 | 64 | for g in Group.find(): 65 | app.logger.debug("Setting group responsibles for group %s" % g.name) 66 | g.reset_responsibles_cache() 67 | g.save(skip_callback=True) 68 | 69 | def run(self): 70 | if self.args.action == 'custom': 71 | return self.convert_custom() 72 | elif self.args.action == 'responsibles': 73 | return self.convert_responsibles() 74 | -------------------------------------------------------------------------------- /commands/index.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | from library.engine.utils import get_modules 3 | import importlib 4 | import os.path 5 | 6 | 7 | class Index(Command): 8 | 9 | def init_argument_parser(self, parser): 10 | parser.add_argument("-w", "--overwrite", dest="overwrite", action="store_true", default=False, 11 | help="Overwrite existing indexes in case of conflicts") 12 | 13 | def run(self): 14 | from app import app 15 | app.logger.info("Creating indexes") 16 | models_directory = os.path.join(app.BASE_DIR, "app/models") 17 | modules = [x for x in get_modules(models_directory) if x != "storable_model"] 18 | for mname in modules: 19 | module = importlib.import_module("app.models.%s" % mname) 20 | for attr in dir(module): 21 | if attr.startswith("__") or attr == 'StorableModel': 22 | continue 23 | obj = getattr(module, attr) 24 | if hasattr(obj, "ensure_indexes"): 25 | app.logger.info("Creating indexes for %s, collection %s" % (attr, obj.collection)) 26 | obj.ensure_indexes(True, self.args.overwrite) 27 | from library.db import db 28 | app.logger.info("Creating sessions indexes") 29 | db.conn["sessions"].create_index("sid", unique=True, sparse=False) 30 | db.conn["sessions"].create_index("expiration") 31 | -------------------------------------------------------------------------------- /commands/run.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | 3 | class Run(Command): 4 | def run(self): 5 | from app import app 6 | app.run(debug=True) -------------------------------------------------------------------------------- /commands/sessions.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | 3 | 4 | class Sessions(Command): 5 | 6 | def init_argument_parser(self, parser): 7 | parser.add_argument("action", type=str, nargs=1, choices=["cleanup", "count"]) 8 | 9 | def run(self): 10 | from library.db import db 11 | from datetime import datetime 12 | action = self.args.action[0] 13 | if action == "count": 14 | total = db.ro_conn["sessions"].find().count() 15 | expired = db.ro_conn["sessions"].find({"expiration": {"$lt": datetime.now()}}).count() 16 | print "Total number of sessions: %d, expired: %d" % (total, expired) 17 | if expired > 0: 18 | print "Use to remove old sessions manually" 19 | 20 | elif action == "cleanup": 21 | print "Starting sessions clean up process..." 22 | count = db.cleanup_sessions() 23 | if count == 0: 24 | print "There's no expired sessions to clean up" 25 | else: 26 | print "%d expired sessions have been cleaned up" % count 27 | -------------------------------------------------------------------------------- /commands/shell.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | 3 | 4 | class Shell(Command): 5 | 6 | DESCRIPTION = 'Run shell (using IPython if available)' 7 | 8 | def run(self): 9 | from app.models import ApiAction, Datacenter, Group, Host, WorkGroup, Token, User, NetworkGroup 10 | try: 11 | # trying IPython if installed... 12 | from IPython import embed 13 | embed() 14 | except ImportError: 15 | # ... or python default console if not 16 | try: 17 | # optional readline interface for history if installed 18 | import readline 19 | except ImportError: 20 | pass 21 | import code 22 | variables = globals().copy() 23 | variables.update(locals()) 24 | shell = code.InteractiveConsole(variables) 25 | shell.interact() -------------------------------------------------------------------------------- /commands/test.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | from unittest import main 3 | import logging 4 | 5 | 6 | class Test(Command): 7 | 8 | NO_ARGPARSE = True 9 | 10 | def run(self, *args, **kwargs): 11 | from app.models import WorkGroup, Group, Host, Datacenter, User, ApiAction, Token, NetworkGroup 12 | from app import app 13 | from app import tests 14 | app.logger.level = logging.ERROR 15 | 16 | WorkGroup._collection = 'test_work_groups' 17 | Group._collection = 'test_groups' 18 | Host._collection = 'test_hosts' 19 | Datacenter._collection = 'test_datacenters' 20 | User._collection = 'test_users' 21 | ApiAction._collection = 'test_api_actions' 22 | Token._collection = 'test_tokens' 23 | NetworkGroup._collection = 'test_network_groups' 24 | app.action_logging = False 25 | 26 | argv = ['micro.py test'] + self.raw_args 27 | test_program = main(argv=argv, module=tests, exit=False) 28 | if test_program.result.wasSuccessful(): 29 | return 0 30 | else: 31 | return 1 32 | -------------------------------------------------------------------------------- /commands/work_groups.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | from copy import deepcopy 3 | 4 | 5 | class WorkGroups(Command): 6 | 7 | NAME = "wg" 8 | 9 | def run(self): 10 | from app.models import WorkGroup 11 | from library.db import db 12 | 13 | wgmap = {} 14 | projects = db.conn.project.find({}) 15 | for p in projects: 16 | project_id = p["_id"] 17 | attrs = deepcopy(p) 18 | del(attrs["_id"]) 19 | wg = WorkGroup.find_one({"name": p["name"]}) 20 | if wg is None: 21 | print "Creating workgroup %s" % p["name"] 22 | wg = WorkGroup(**attrs) 23 | wg.save() 24 | wgmap[str(project_id)] = wg._id 25 | 26 | groups = db.conn.groups.find({}) 27 | for g in groups: 28 | if "project_id" not in g: 29 | print "Group %s doesn't have project_id, thus already converted" % g["name"] 30 | continue 31 | print "Converting group %s" % g["name"] 32 | g_project_id = g["project_id"] 33 | wg_id = wgmap[str(g_project_id)] 34 | db.conn.groups.update( 35 | {"_id": g["_id"]}, 36 | { 37 | "$set": {"work_group_id": wg_id}, 38 | "$unset": {"project_id": ""} 39 | } 40 | ) 41 | 42 | -------------------------------------------------------------------------------- /config/development/app.py: -------------------------------------------------------------------------------- 1 | CSRF_PROTECTION = False 2 | DOCUMENTS_PER_PAGE = 20 3 | SECRET_KEY = "some_secret_key" 4 | FLASK_APP_SETTINGS = { 5 | "SESSION_COOKIE_NAME": "conductor_sid" 6 | } 7 | PORT = 3000 8 | STATIC_FOLDER = "static" 9 | AUTHORIZER = "LocalAuthorizer" 10 | ACTION_LOGGING = True 11 | 12 | SECURITY_KEY_TTL = 600 13 | 14 | SESSIONS_AUTO_CLEANUP = True 15 | SESSIONS_AUTO_CLEANUP_RAND_TRIGGER = 0.05 16 | 17 | GRAVATAR_PATH = "https://sys.mail.ru/avatar/internal" 18 | 19 | DEFAULT_GROUP_POSTFIX = "_unknown" -------------------------------------------------------------------------------- /config/development/cache.py: -------------------------------------------------------------------------------- 1 | # MEMCACHE_BACKENDS = [ 2 | # 'inet6:[cache1.example.com]:11211', 3 | # 'inet6:[cache2.example.com]:11211', 4 | # 'inet6:[cache3.example.com]:11211' 5 | # ] 6 | -------------------------------------------------------------------------------- /config/development/db.py: -------------------------------------------------------------------------------- 1 | MONGO = { 2 | "uri": "mongodb://localhost", 3 | "pymongo_extra": { 4 | "connectTimeoutMS": 1100, 5 | "socketKeepAlive": True, 6 | }, 7 | "dbname": "conductor_dev", 8 | } 9 | -------------------------------------------------------------------------------- /config/development/log.py: -------------------------------------------------------------------------------- 1 | LOG_LEVEL = "debug" 2 | LOG_FORMAT = "[%(asctime)s] %(levelname)s\t%(module)s:%(lineno)d %(message)s" 3 | LOG_FILE = "application.log" 4 | ACTION_LOG_FILE = "action.log" 5 | DEBUG = True 6 | 7 | LOG_TIMINGS = True 8 | -------------------------------------------------------------------------------- /config/production/app.py: -------------------------------------------------------------------------------- 1 | CSRF_PROTECTION = False 2 | DOCUMENTS_PER_PAGE = 20 3 | SECRET_KEY = "some_secret_key" 4 | FLASK_APP_SETTINGS = { 5 | "SESSION_COOKIE_NAME": "conductor_sid" 6 | } 7 | PORT = 3000 8 | STATIC_FOLDER = "static" 9 | ACTION_LOGGING = True 10 | 11 | SESSIONS_AUTO_CLEANUP = True 12 | SESSIONS_AUTO_CLEANUP_RAND_TRIGGER = 0.05 13 | 14 | DEFAULT_GROUP_POSTFIX = "_unknown" 15 | 16 | SECURITY_KEY_TTL = 600 17 | 18 | -------------------------------------------------------------------------------- /config/production/cache.py: -------------------------------------------------------------------------------- 1 | # MEMCACHE_BACKENDS = [ 2 | # 'inet6:[cache1.example.com]:11211', 3 | # 'inet6:[cache2.example.com]:11211', 4 | # 'inet6:[cache3.example.com]:11211' 5 | # ] 6 | -------------------------------------------------------------------------------- /config/production/db.py: -------------------------------------------------------------------------------- 1 | MONGO = { 2 | "uri": "mongodb://localhost", 3 | "pymongo_extra": { 4 | "connectTimeoutMS": 1100, 5 | "socketKeepAlive": True, 6 | }, 7 | "dbname": "inventoree", 8 | } 9 | -------------------------------------------------------------------------------- /config/production/log.py: -------------------------------------------------------------------------------- 1 | LOG_LEVEL = "info" 2 | LOG_FORMAT = "[%(asctime)s] %(levelname)s\t%(module)s:%(lineno)d %(message)s" 3 | LOG_TIMINGS = True 4 | LOG_FILE = "/var/log/inventoree/app.log" 5 | ACTION_LOG_FILE = "/var/log/inventoree/action.log" 6 | DEBUG = False 7 | -------------------------------------------------------------------------------- /config/testing/app.py: -------------------------------------------------------------------------------- 1 | CSRF_PROTECTION = False 2 | DOCUMENTS_PER_PAGE = 20 3 | SECRET_KEY = "some_secret_key" 4 | FLASK_APP_SETTINGS = {} 5 | PORT = 3000 6 | STATIC_FOLDER = "static" 7 | ACTION_LOGGING = True 8 | DEFAULT_GROUP_POSTFIX = "_unknown" -------------------------------------------------------------------------------- /config/testing/cache.py: -------------------------------------------------------------------------------- 1 | # MEMCACHE_BACKENDS = [ 2 | # 'inet6:[cache1.example.com]:11211', 3 | # 'inet6:[cache2.example.com]:11211', 4 | # 'inet6:[cache3.example.com]:11211' 5 | # ] 6 | -------------------------------------------------------------------------------- /config/testing/db.py: -------------------------------------------------------------------------------- 1 | MONGO = { 2 | "uri": "mongodb://localhost", 3 | "pymongo_extra": { 4 | "connectTimeoutMS": 1100, 5 | "socketKeepAlive": True, 6 | }, 7 | "dbname": "conductor_test", 8 | } 9 | -------------------------------------------------------------------------------- /config/testing/log.py: -------------------------------------------------------------------------------- 1 | LOG_LEVEL = "info" 2 | LOG_FORMAT = "[%(asctime)s] %(levelname)s\t%(module)s:%(lineno)d %(message)s" 3 | DEBUG = False -------------------------------------------------------------------------------- /extconf/logrotate.conf: -------------------------------------------------------------------------------- 1 | "/var/log/inventoree/*.log" { 2 | daily 3 | rotate 7 4 | copytruncate 5 | delaycompress 6 | compress 7 | notifempty 8 | missingok 9 | postrotate 10 | systemctl restart eine-tftp 11 | endscript 12 | } -------------------------------------------------------------------------------- /extconf/nginx/nginx.conf: -------------------------------------------------------------------------------- 1 | server { 2 | listen 80; 3 | server_name inventoree.example.com; 4 | 5 | root /var/lib/inventoree/webui/dist; 6 | index index.html; 7 | 8 | location /api { 9 | if ($request_method = OPTIONS) { 10 | add_header Access-Control-Allow-Origin *; 11 | add_header Access-Control-Allow-Headers authorization; 12 | add_header Access-Control-Allow-Headers x-api-auth-token; 13 | return 200; 14 | } 15 | uwsgi_pass unix:///run/uwsgi/inventoree-uwsgi.sock; 16 | include /etc/nginx/uwsgi_params; 17 | add_header Access-Control-Allow-Origin *; 18 | } 19 | 20 | location /oauth_callback { 21 | uwsgi_pass unix:///run/uwsgi/inventoree-uwsgi.sock; 22 | include /etc/nginx/uwsgi_params; 23 | } 24 | 25 | } -------------------------------------------------------------------------------- /extconf/uwsgi/inventoree.ini: -------------------------------------------------------------------------------- 1 | [uwsgi] 2 | plugins = python 3 | virtualenv = /usr/lib/inventoree/.venv 4 | chdir = /usr/lib/inventoree 5 | 6 | uid = uwsgi 7 | gid = uwsgi 8 | master = True 9 | processes = 4 10 | threads = 2 11 | socket = /run/uwsgi/inventoree-uwsgi.sock 12 | chmod-socket = 777 13 | vacuum = True 14 | 15 | env = MICROENG_ENV=production 16 | module = wsgi 17 | callable = app_callable -------------------------------------------------------------------------------- /library/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/library/__init__.py -------------------------------------------------------------------------------- /library/db.py: -------------------------------------------------------------------------------- 1 | from app import app 2 | from pymongo import MongoClient 3 | from pymongo.errors import ServerSelectionTimeoutError 4 | from bson.objectid import ObjectId, InvalidId 5 | from time import sleep 6 | from datetime import datetime 7 | from random import random 8 | 9 | MONGO_RETRIES = 6 10 | MONGO_RETRIES_RO = 6 11 | RETRY_SLEEP = 3 # 3 seconds 12 | 13 | __mongo_retries = MONGO_RETRIES 14 | __mongo_retries_ro = MONGO_RETRIES_RO 15 | 16 | 17 | def intercept_mongo_errors_rw(func): 18 | def wrapper(*args, **kwargs): 19 | global __mongo_retries 20 | try: 21 | result = func(*args, **kwargs) 22 | except ServerSelectionTimeoutError: 23 | app.logger.error("ServerSelectionTimeout in db module for read/write operations") 24 | __mongo_retries -= 1 25 | if __mongo_retries == MONGO_RETRIES/2: 26 | app.logger.error("Mongo connection %d retries passed with no result, " 27 | "trying to reinstall connection" % (MONGO_RETRIES/2)) 28 | db_obj = args[0] 29 | db_obj.reset_conn() 30 | if __mongo_retries == 0: 31 | __mongo_retries = MONGO_RETRIES 32 | app.logger.error("Mongo connection %d retries more passed with no result, giving up" % (MONGO_RETRIES/2)) 33 | return None 34 | else: 35 | sleep(RETRY_SLEEP) 36 | return wrapper(*args, **kwargs) 37 | return result 38 | return wrapper 39 | 40 | 41 | def intercept_mongo_errors_ro(func): 42 | def wrapper(*args, **kwargs): 43 | global __mongo_retries_ro 44 | try: 45 | result = func(*args, **kwargs) 46 | except ServerSelectionTimeoutError: 47 | app.logger.error("ServerSelectionTimeout in db module for read-only operations") 48 | __mongo_retries_ro -= 1 49 | if __mongo_retries_ro == MONGO_RETRIES_RO/2: 50 | app.logger.error("Mongo readonly connection %d retries passed, switching " 51 | "readonly operations to read-write socket" % (MONGO_RETRIES_RO/2)) 52 | db_obj = args[0] 53 | db_obj._ro_conn = db_obj.conn 54 | if __mongo_retries_ro == 0: 55 | __mongo_retries_ro = MONGO_RETRIES_RO 56 | app.logger.error("Mongo connection %d retries more passed with no result, giving up" % (MONGO_RETRIES/2)) 57 | return None 58 | else: 59 | sleep(RETRY_SLEEP) 60 | return wrapper(*args, **kwargs) 61 | return result 62 | return wrapper 63 | 64 | 65 | class IncompleteObject(Exception): 66 | pass 67 | 68 | class QueryPermissionsUpdateFailed(Exception): 69 | pass 70 | 71 | class ObjectsCursor(object): 72 | 73 | def __init__(self, cursor, obj_class): 74 | self.obj_class = obj_class 75 | self.cursor = cursor 76 | 77 | def all(self): 78 | return list(self) 79 | 80 | def limit(self, *args, **kwargs): 81 | self.cursor.limit(*args, **kwargs) 82 | return self 83 | 84 | def skip(self, *args, **kwargs): 85 | self.cursor.skip(*args, **kwargs) 86 | return self 87 | 88 | def sort(self, *args, **kwargs): 89 | self.cursor.sort(*args, **kwargs) 90 | return self 91 | 92 | def __iter__(self): 93 | for item in self.cursor: 94 | yield self.obj_class(**item) 95 | 96 | def __getitem__(self, item): 97 | return self.obj_class(**self.cursor.__getitem__(item)) 98 | 99 | def __getattr__(self, item): 100 | return getattr(self.cursor, item) 101 | 102 | 103 | class DB(object): 104 | def __init__(self): 105 | self._conn = None 106 | self._ro_conn = None 107 | 108 | def reset_conn(self): 109 | self._conn = None 110 | 111 | def reset_ro_conn(self): 112 | self._ro_conn = None 113 | 114 | def init_ro_conn(self): 115 | app.logger.info("Creating a read-only mongo connection") 116 | client_kwargs = app.config.db.get("pymongo_extra", {}) 117 | database = app.config.db["MONGO"]['dbname'] 118 | if "uri_ro" in app.config.db["MONGO"]: 119 | ro_client = MongoClient(host=app.config.db["MONGO"]["uri_ro"], **client_kwargs) 120 | # AUTHENTICATION 121 | if 'username' in app.config.db["MONGO"] and 'password' in app.config.db["MONGO"]: 122 | username = app.config.db["MONGO"]["username"] 123 | password = app.config.db["MONGO"]['password'] 124 | ro_client[database].authenticate(username, password) 125 | self._ro_conn = ro_client[database] 126 | else: 127 | app.logger.info("No uri_ro option found in configuration, falling back to read/write default connection") 128 | self._ro_conn = self.conn 129 | 130 | def init_conn(self): 131 | app.logger.info("Creating a read/write mongo connection") 132 | client_kwargs = app.config.db.get("pymongo_extra", {}) 133 | client = MongoClient(host=app.config.db["MONGO"]["uri"], **client_kwargs) 134 | database = app.config.db["MONGO"]['dbname'] 135 | 136 | # AUTHENTICATION 137 | if 'username' in app.config.db["MONGO"] and 'password' in app.config.db["MONGO"]: 138 | username = app.config.db["MONGO"]["username"] 139 | password = app.config.db["MONGO"]['password'] 140 | client[database].authenticate(username, password) 141 | self._conn = client[database] 142 | 143 | @property 144 | def conn(self): 145 | if self._conn is None: 146 | self.init_conn() 147 | return self._conn 148 | 149 | @property 150 | def ro_conn(self): 151 | if self._ro_conn is None: 152 | self.init_ro_conn() 153 | return self._ro_conn 154 | 155 | @intercept_mongo_errors_ro 156 | def get_obj(self, cls, collection, query): 157 | if type(query) is not dict: 158 | try: 159 | query = { '_id': ObjectId(query) } 160 | except InvalidId: 161 | pass 162 | data = self.ro_conn[collection].find_one(query) 163 | if data: 164 | return cls(**data) 165 | 166 | @intercept_mongo_errors_ro 167 | def get_obj_id(self, collection, query): 168 | return self.ro_conn[collection].find_one(query, projection=())['_id'] 169 | 170 | @intercept_mongo_errors_ro 171 | def get_objs(self, cls, collection, query, **kwargs): 172 | cursor = self.ro_conn[collection].find(query, **kwargs) 173 | return ObjectsCursor(cursor, cls) 174 | 175 | def get_objs_by_field_in(self, cls, collection, field, values, **kwargs): 176 | return self.get_objs( 177 | cls, 178 | collection, 179 | { 180 | field: { 181 | '$in': values, 182 | }, 183 | }, 184 | **kwargs 185 | ) 186 | 187 | @intercept_mongo_errors_rw 188 | def save_obj(self, obj): 189 | if obj.is_new: 190 | data = obj.to_dict(include_restricted=True) # object to_dict() method should always return all fields 191 | del(data["_id"]) # although with the new object we shouldn't pass _id=null to mongo 192 | inserted_id = self.conn[obj.collection].insert_one(data).inserted_id 193 | obj._id = inserted_id 194 | else: 195 | self.conn[obj.collection].replace_one({'_id': obj._id}, obj.to_dict(include_restricted=True), upsert=True) 196 | 197 | @intercept_mongo_errors_rw 198 | def delete_obj(self, obj): 199 | if obj.is_new: 200 | return 201 | self.conn[obj.collection].delete_one({'_id': obj._id}) 202 | 203 | @intercept_mongo_errors_rw 204 | def delete_query(self, collection, query): 205 | return self.conn[collection].delete_many(query) 206 | 207 | @intercept_mongo_errors_rw 208 | def update_query(self, collection, query, update): 209 | return self.conn[collection].update_many(query, update) 210 | 211 | # SESSIONS 212 | 213 | @intercept_mongo_errors_ro 214 | def get_session(self, sid, collection='sessions'): 215 | return self.ro_conn[collection].find_one({ 'sid': sid }) 216 | 217 | @intercept_mongo_errors_rw 218 | def update_session(self, sid, data, expiration, collection='sessions'): 219 | self.conn[collection].update({ 'sid': sid }, { 'sid': sid, 'data': data, 'expiration': expiration }, True) 220 | if app.config.app.get("SESSIONS_AUTO_CLEANUP", False): 221 | rtrigger = app.config.app.get("SESSIONS_AUTO_CLEANUP_RAND_TRIGGER", 0.05) 222 | if random() < rtrigger: 223 | app.logger.info("Cleaning up sessions") 224 | self.cleanup_sessions() 225 | 226 | @intercept_mongo_errors_rw 227 | def cleanup_sessions(self, collection='sessions'): 228 | return self.conn[collection].remove({'expiration': {'$lt': datetime.now() }})["n"] 229 | 230 | 231 | db = DB() 232 | -------------------------------------------------------------------------------- /library/engine/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/library/engine/__init__.py -------------------------------------------------------------------------------- /library/engine/action_log.py: -------------------------------------------------------------------------------- 1 | from flask import g 2 | import functools 3 | import json 4 | import copy 5 | from library.engine.errors import ApiError, handle_other_errors, handle_api_error 6 | 7 | action_types = [] 8 | 9 | 10 | def logged_action(action_type): 11 | global action_types 12 | action_types.append(action_type) 13 | action_types.sort() 14 | 15 | def log_action_decorator(func): 16 | @functools.wraps(func) 17 | def wrapper(*args, **kwargs): 18 | from app import app 19 | if not app.action_logging: 20 | return func(*args, **kwargs) 21 | 22 | from app.models import ApiAction 23 | from flask import g, request 24 | if g.user is None: 25 | username = "_unauthorized_" 26 | else: 27 | username = g.user.username 28 | if request.json is not None: 29 | action_args = copy.deepcopy(request.json) 30 | else: 31 | action_args = {} 32 | 33 | arg_keys = action_args.keys() 34 | 35 | # removing plain text passwords from action log 36 | for k in arg_keys: 37 | if k.startswith("password"): 38 | del(action_args[k]) 39 | 40 | action = ApiAction( 41 | username=username, 42 | action_type=action_type, 43 | kwargs=kwargs, 44 | params=action_args, 45 | status="requested" 46 | ) 47 | app.logger.debug("action '%s' created in request context" % action.action_type) 48 | 49 | action.status = "error" 50 | try: 51 | response = func(*args, **kwargs) 52 | if 100 <= response.status_code < 300: 53 | action.status = "success" 54 | else: 55 | app.logger.error("Action status set to error, response.data is following") 56 | try: 57 | data = json.loads(response.data) 58 | if "errors" in data: 59 | action.errors = data["errors"] 60 | except: 61 | pass 62 | except ApiError as ae: 63 | action.status = "error" 64 | response = handle_api_error(ae) 65 | data = json.loads(response.data) 66 | action.errors = data["errors"] 67 | app.logger.debug("action '%s' status updated to %s" % (action.action_type, action.status)) 68 | action.save() 69 | raise 70 | except Exception as e: 71 | action.status = "error" 72 | response = handle_other_errors(e) 73 | data = json.loads(response.data) 74 | action.errors = data["errors"] 75 | app.logger.debug("action '%s' status updated to %s" % (action.action_type, action.status)) 76 | action.save() 77 | raise 78 | app.logger.debug("action '%s' status updated to %s" % (action.action_type, action.status)) 79 | action.save() 80 | return response 81 | return wrapper 82 | return log_action_decorator 83 | -------------------------------------------------------------------------------- /library/engine/baseapp.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | import sys 4 | import importlib 5 | import logging 6 | import time 7 | from logging.handlers import WatchedFileHandler 8 | from flask import Flask, request, session 9 | from datetime import timedelta 10 | from collections import namedtuple 11 | from library.engine.utils import get_py_files, uuid4_string 12 | from library.engine.errors import ApiError, handle_api_error, handle_other_errors 13 | from library.engine.json_encoder import MongoJSONEncoder 14 | from library.mongo_session import MongoSessionInterface 15 | from werkzeug.contrib.cache import MemcachedCache, SimpleCache 16 | 17 | ENVIRONMENT_TYPES = ( 18 | "development", 19 | "testing", 20 | "production", 21 | ) 22 | 23 | DEFAULT_ENVIRONMENT_TYPE = "development" 24 | DEFAULT_SESSION_EXPIRATION_TIME = 86400 * 7 * 2 # 2 weeks 25 | 26 | 27 | class BaseApp(object): 28 | 29 | DEFAULT_LOG_FORMAT = "[%(asctime)s] %(levelname)s %(filename)s:%(lineno)d %(message)s" 30 | ACTION_LOG_FORMAT = "[%(asctime)s] %(message)s" 31 | DEFAULT_LOG_LEVEL = "debug" 32 | CTRL_MODULES_PREFIX = "app.controllers" 33 | 34 | def __init__(self): 35 | import inspect 36 | class_file = inspect.getfile(self.__class__) 37 | self.APP_DIR = os.path.dirname(os.path.abspath(class_file)) 38 | self.BASE_DIR = os.path.abspath(os.path.join(self.APP_DIR, "../")) 39 | # detecting environment type 40 | self.envtype = os.environ.get("MICROENG_ENV") 41 | self.action_types = [] 42 | self.auth_token_ttl = None 43 | self.auth_token_ttr = None 44 | if not self.envtype in ENVIRONMENT_TYPES: 45 | self.envtype = DEFAULT_ENVIRONMENT_TYPE 46 | self.__read_config() 47 | self.test_config() 48 | self.after_configured() 49 | self.__prepare_logger() 50 | self.__load_plugins() 51 | self.__prepare_flask() 52 | self.__set_authorizer() 53 | self.__set_session_expiration() 54 | self.__set_request_id() 55 | self.__set_request_times() 56 | self.__set_cache() 57 | self.__set_token_expiration() 58 | self.__init_plugins() 59 | 60 | def __set_token_expiration(self): 61 | ttl = self.config.app.get("AUTH_TOKEN_TTL") 62 | if ttl is None: 63 | return 64 | try: 65 | self.auth_token_ttl = timedelta(seconds=ttl) 66 | except TypeError: 67 | self.logger.error("AUTH_TOKEN_TTL configuration is invalid, token expiration is turned off") 68 | 69 | ttr = self.config.app.get("AUTH_TOKEN_TTR", ttl * 0.9) 70 | try: 71 | self.auth_token_ttr = timedelta(seconds=ttr) 72 | except TypeError: 73 | self.logger.error("AUTH_TOKEN_TTR configuration is invalid, token expiration is turned off") 74 | 75 | def __set_cache(self): 76 | self.logger.debug("Setting up the cache") 77 | if hasattr(self.config, 'cache') and "MEMCACHE_BACKENDS" in self.config.cache: 78 | self.cache = MemcachedCache(self.config.cache["MEMCACHE_BACKENDS"]) 79 | else: 80 | self.cache = SimpleCache() 81 | 82 | def __load_plugins(self): 83 | self.plugins = { 84 | "authorizers": [], 85 | "extend": {} 86 | } 87 | plugin_directory = os.path.abspath(os.path.join(self.BASE_DIR, 'plugins')) 88 | 89 | # load authorizers 90 | directory = os.path.join(plugin_directory, "authorizers") 91 | plugin_files = get_py_files(directory) 92 | module_names = [x[:-3] for x in plugin_files if not x.startswith("__")] 93 | for module_name in module_names: 94 | try: 95 | self.logger.debug("Loading authorizer %s" % module_name) 96 | module = importlib.import_module("plugins.authorizers.%s" % module_name) 97 | self.plugins["authorizers"].append(module) 98 | except ImportError as e: 99 | self.logger.error("Error loading module %s: %s" % (module_name, e.message)) 100 | 101 | # load extenders 102 | directory = os.path.join(plugin_directory, "extend") 103 | plugin_files = get_py_files(directory) 104 | module_names = [x[:-3] for x in plugin_files if not x.startswith("__")] 105 | for module_name in module_names: 106 | try: 107 | self.logger.debug("Loading plugin %s" % module_name) 108 | module = importlib.import_module("plugins.extend.%s" % module_name) 109 | self.plugins["extend"][module_name] = module 110 | except ImportError as e: 111 | self.logger.error("Error loading module %s: %s" % (module_name, e.message)) 112 | 113 | def __init_plugins(self): 114 | for module_name, module in self.plugins["extend"].iteritems(): 115 | try: 116 | main = getattr(module, "main") 117 | except: 118 | self.logger.error("no main() function found in plugin %s, skipping" % module_name) 119 | continue 120 | main(self) 121 | 122 | def __set_authorizer(self): 123 | authorizer_name = self.config.app.get("AUTHORIZER", "LocalAuthorizer") 124 | self.authorizer = None 125 | authorizer_class = None 126 | 127 | for auth_module in self.plugins["authorizers"]: 128 | try: 129 | authorizer_class = getattr(auth_module, authorizer_name) 130 | except AttributeError: 131 | continue 132 | if authorizer_class is None: 133 | raise RuntimeError("No authorizer '%s' found in authorizers" % authorizer_name) 134 | else: 135 | self.authorizer = authorizer_class(self.flask) 136 | self.logger.debug("Authorizer '%s' registered" % authorizer_name) 137 | 138 | def __set_session_expiration(self): 139 | e_time = self.config.app.get("SESSION_EXPIRATION_TIME", DEFAULT_SESSION_EXPIRATION_TIME) 140 | @self.flask.before_request 141 | def session_expiration(): 142 | session.permanent = True 143 | self.flask.permanent_session_lifetime = timedelta(seconds=e_time) 144 | 145 | def __set_request_id(self): 146 | @self.flask.before_request 147 | def add_request_id(): 148 | if not hasattr(request, "id"): 149 | setattr(request, "id", uuid4_string()) 150 | 151 | def __set_request_times(self): 152 | if self.config.log.get("LOG_TIMINGS"): 153 | @self.flask.before_request 154 | def add_request_started_time(): 155 | setattr(request, "started", time.time()) 156 | 157 | @self.flask.after_request 158 | def add_request_time_logging(response): 159 | dt = time.time() - request.started 160 | self.logger.info("%s completed in %.3fs" % (request.path, dt)) 161 | return response 162 | 163 | def __prepare_flask(self): 164 | self.logger.debug("Creating flask app") 165 | static_folder = self.config.app.get("STATIC", "static") 166 | flask_app_settings = self.config.app.get("FLASK_APP_SETTINGS", {}) 167 | static_folder = os.path.abspath(os.path.join(self.BASE_DIR, static_folder)) 168 | self.flask = Flask(__name__, static_folder=static_folder) 169 | 170 | self.logger.debug("Applying Flask application settings") 171 | for k, v in flask_app_settings.items(): 172 | self.logger.debug(" %s: %s" % (k, v)) 173 | self.flask.config[k] = v 174 | 175 | self.logger.debug("Setting JSON Encoder") 176 | self.flask.json_encoder = MongoJSONEncoder 177 | self.logger.debug("Setting sessions interface") 178 | self.flask.session_interface = MongoSessionInterface(collection_name='sessions') 179 | self.flask._register_error_handler(None, ApiError, handle_api_error) 180 | self.flask._register_error_handler(None, Exception, handle_other_errors) 181 | self.configure_routes() 182 | 183 | def configure_routes(self): 184 | pass 185 | 186 | def __read_config(self): 187 | # reading and compiling config files 188 | config_directory = os.path.abspath(os.path.join(self.BASE_DIR, 'config', self.envtype)) 189 | config_files = get_py_files(config_directory) 190 | module_names = [x[:-3] for x in config_files] 191 | data = {} 192 | for i, filename in enumerate(config_files): 193 | full_filename = os.path.join(config_directory, filename) 194 | module_name = module_names[i] 195 | data[module_name] = {} 196 | with open(full_filename) as f: 197 | text = f.read() 198 | code = compile(text, filename, 'exec') 199 | exec(code, data[module_name]) 200 | del(data[module_name]["__builtins__"]) 201 | self.config = namedtuple('Configuration', data.keys())(*data.values()) 202 | 203 | def __prepare_logger(self): 204 | 205 | self.logger = logging.getLogger('app') 206 | self.logger.propagate = False 207 | 208 | log_level = self.config.log.get("LOG_LEVEL") or self.DEFAULT_LOG_LEVEL 209 | log_level = log_level.upper() 210 | log_level = getattr(logging, log_level) 211 | 212 | if "LOG_FILE" in self.config.log: 213 | handler = WatchedFileHandler(self.config.log["LOG_FILE"]) 214 | self.logger.addHandler(handler) 215 | if self.config.log.get("DEBUG"): 216 | handler = logging.StreamHandler(stream=sys.stdout) 217 | log_level = logging.DEBUG 218 | self.logger.addHandler(handler) 219 | if len(self.logger.handlers) == 0: 220 | handler = logging.StreamHandler(stream=sys.stdout) 221 | self.logger.addHandler(handler) 222 | 223 | log_format = self.config.log.get("LOG_FORMAT") or self.DEFAULT_LOG_FORMAT 224 | log_format = logging.Formatter(log_format) 225 | 226 | self.logger.setLevel(log_level) 227 | 228 | for handler in self.logger.handlers: 229 | handler.setLevel(log_level) 230 | handler.setFormatter(log_format) 231 | self.logger.info("Logger created. Environment type set to %s" % self.envtype) 232 | 233 | if not self.config.app.get("ACTION_LOGGING"): 234 | self.logger.warn("Action logging is off") 235 | self.action_logging = False 236 | return 237 | 238 | self.action_logging = True 239 | self.alogger = logging.getLogger('actions') 240 | self.alogger.propagate = False 241 | 242 | if "ACTION_LOG_FILE" in self.config.log: 243 | handler = WatchedFileHandler(self.config.log.get("ACTION_LOG_FILE")) 244 | self.alogger.addHandler(handler) 245 | 246 | if self.config.log.get("DEBUG"): 247 | handler = logging.StreamHandler(stream=sys.stdout) 248 | self.alogger.addHandler(handler) 249 | 250 | log_format = self.ACTION_LOG_FORMAT 251 | log_format = logging.Formatter(log_format) 252 | 253 | self.alogger.setLevel(log_level) 254 | for handler in self.alogger.handlers: 255 | handler.setLevel(log_level) 256 | handler.setFormatter(log_format) 257 | self.logger.info("Action Logger created.") 258 | 259 | def test_config(self): 260 | pass 261 | 262 | def after_configured(self): 263 | pass 264 | 265 | # shortcut method 266 | def run(self, **kwargs): 267 | self.flask.run(**kwargs) 268 | -------------------------------------------------------------------------------- /library/engine/cache.py: -------------------------------------------------------------------------------- 1 | import functools 2 | from datetime import datetime 3 | from flask import request, g 4 | 5 | DEFAULT_CACHE_PREFIX = 'microeng' 6 | DEFAULT_CACHE_TIMEOUT = 3600 7 | 8 | 9 | def _get_cache_key(pref, funcname, args, kwargs): 10 | from hashlib import md5 11 | key = "%s:%s(%s.%s)" % (pref, funcname, md5(str(args)).hexdigest(), md5(str(kwargs)).hexdigest()) 12 | kwargs_str = ", ".join(["%s=%s" % (x[0], x[1]) for x in kwargs.items()]) 13 | arguments = "" 14 | if len(args) > 0: 15 | arguments = ", ".join([str(x) for x in args]) 16 | if len(kwargs) > 0: 17 | arguments += ", " + kwargs_str 18 | else: 19 | if len(kwargs) > 0: 20 | arguments = kwargs 21 | cached_call = "%s:%s(%s)" % (pref, funcname, arguments) 22 | return key, cached_call 23 | 24 | 25 | def cached_function(cache_key_prefix=DEFAULT_CACHE_PREFIX, cache_timeout=DEFAULT_CACHE_TIMEOUT, positive_only=False): 26 | def cache_decorator(func): 27 | @functools.wraps(func) 28 | def wrapper(*args, **kwargs): 29 | from app import app 30 | cache_key, cached_call = _get_cache_key(cache_key_prefix, func.__name__, args, kwargs) 31 | t1 = datetime.now() 32 | 33 | if app.cache.has(cache_key): 34 | value = app.cache.get(cache_key) 35 | app.logger.debug("Cache HIT %s (%.3f seconds)" % (cached_call, (datetime.now() - t1).total_seconds())) 36 | else: 37 | value = func(*args, **kwargs) 38 | if value or not positive_only: 39 | app.cache.set(cache_key, value, timeout=cache_timeout) 40 | app.logger.debug("Cache MISS %s (%.3f seconds)" % (cached_call, (datetime.now() - t1).total_seconds())) 41 | return value 42 | return wrapper 43 | return cache_decorator 44 | 45 | 46 | def check_cache(): 47 | from app import app 48 | from hashlib import md5 49 | from random import randint 50 | k = md5(str(randint(0, 1000000))).hexdigest() 51 | v = md5(str(randint(0, 1000000))).hexdigest() 52 | app.cache.set(k, v) 53 | if app.cache.get(k) != v: 54 | return False 55 | else: 56 | app.cache.delete(k) 57 | return True 58 | 59 | 60 | def request_time_cache(cache_key_prefix=DEFAULT_CACHE_PREFIX): 61 | """ 62 | Decorator used for caching data during one api request. 63 | It's useful while some "list something" handlers with a number of cross-references generate 64 | many repeating database requests which are known to generate the same response during the api request. 65 | I.e. list of 20 hosts included in the same group and inheriting the same set of tags/custom fields 66 | may produce 20 additional db requests and 20 requests for each parent group recursively. This may be fixed 67 | by caching db responses in flask "g" store. 68 | """ 69 | def cache_decorator(func): 70 | @functools.wraps(func) 71 | def wrapper(*args, **kwargs): 72 | from app import app 73 | from library.db import ObjectsCursor 74 | try: 75 | request_id = request.id 76 | except (RuntimeError, AttributeError): 77 | # cache only if request id is available 78 | return func(*args, **kwargs) 79 | cache_key, cached_call = _get_cache_key(cache_key_prefix, func.__name__, args, kwargs) 80 | t1 = datetime.now() 81 | 82 | if not hasattr(g, "_request_local_cache"): 83 | g._request_local_cache = {} 84 | 85 | if cache_key not in g._request_local_cache: 86 | value = func(*args, **kwargs) 87 | g._request_local_cache[cache_key] = value 88 | ts = (datetime.now() - t1).total_seconds() 89 | app.logger.debug("RequestTimeCache %s MISS %s (%.3f seconds)" % (request_id, cache_key, ts)) 90 | else: 91 | value = g._request_local_cache[cache_key] 92 | if type(value) == ObjectsCursor: 93 | value.cursor.rewind() 94 | ts = (datetime.now() - t1).total_seconds() 95 | app.logger.debug("RequestTimeCache %s HIT %s (%.3f seconds)" % (request_id, cache_key, ts)) 96 | return value 97 | return wrapper 98 | return cache_decorator 99 | 100 | 101 | def _get_custom_data_cache_key(obj): 102 | from app import app 103 | try: 104 | model = type(obj).__name__ 105 | model_id = obj._id 106 | return "%s.%s.custom_data" % (model, model_id) 107 | except Exception as e: 108 | app.logger.error("error generating custom_data cache key: %s" % e) 109 | return None 110 | 111 | 112 | def cache_custom_data(func): 113 | """ 114 | cache_custom_data decorator is used to cache merged custom_data _forever_. 115 | invalidating custom data cache should be done using recursive calls to invalidate_custom_data(obj) 116 | """ 117 | 118 | @functools.wraps(func) 119 | def wrapper(self): 120 | from app import app 121 | cache_key = _get_custom_data_cache_key(self) 122 | if cache_key is None: 123 | return func(self) 124 | 125 | t1 = datetime.now() 126 | if app.cache.has(cache_key): 127 | value = app.cache.get(cache_key) 128 | app.logger.debug("%s HIT (%.3f seconds)" % (cache_key, (datetime.now() - t1).total_seconds())) 129 | else: 130 | value = func(self) 131 | app.cache.set(cache_key, value) 132 | app.logger.debug("%s MISS (%.3f seconds)" % (cache_key, (datetime.now() - t1).total_seconds())) 133 | return value 134 | 135 | return wrapper 136 | 137 | 138 | def invalidate_custom_data(obj): 139 | from app import app 140 | cache_key = _get_custom_data_cache_key(obj) 141 | if app.cache.has(cache_key): 142 | app.cache.delete(cache_key) 143 | app.logger.debug("%s DELETE cache" % cache_key) 144 | -------------------------------------------------------------------------------- /library/engine/errors.py: -------------------------------------------------------------------------------- 1 | from library.engine.utils import json_response 2 | from traceback import format_exc 3 | 4 | 5 | class ApiError(Exception): 6 | status_code = 400 7 | 8 | def __init__(self, errors, status_code=None, payload=None): 9 | Exception.__init__(self) 10 | if type(errors) != list: 11 | self.errors = [errors] 12 | else: 13 | self.errors = errors 14 | 15 | if status_code is not None: 16 | self.status_code = status_code 17 | self.payload = payload 18 | 19 | def to_dict(self): 20 | data = { 21 | "errors": self.errors, 22 | "error_type": self.__class__.__name__ 23 | } 24 | if self.payload: 25 | data["data"] = self.payload 26 | return data 27 | 28 | def __repr__(self): 29 | return "%s: %s, status_code=%s" % (self.__class__.__name__, ", ".join(self.errors), self.status_code) 30 | 31 | def __str__(self): 32 | return "%s, status_code=%s" % (", ".join(self.errors), self.status_code) 33 | 34 | class InputDataError(ApiError): 35 | pass 36 | 37 | class NotFound(ApiError): 38 | status_code = 404 39 | 40 | 41 | class Conflict(ApiError): 42 | status_code = 409 43 | 44 | 45 | class IntegrityError(Conflict): 46 | pass 47 | 48 | 49 | class DatacenterNotFound(NotFound): 50 | pass 51 | 52 | 53 | class DatacenterNotEmpty(IntegrityError): 54 | pass 55 | 56 | 57 | class ParentAlreadyExists(Conflict): 58 | pass 59 | 60 | 61 | class ParentCycle(IntegrityError): 62 | pass 63 | 64 | 65 | class ParentDoesNotExist(NotFound): 66 | pass 67 | 68 | 69 | class ChildAlreadyExists(Conflict): 70 | pass 71 | 72 | 73 | class ChildDoesNotExist(NotFound): 74 | pass 75 | 76 | 77 | class ObjectSaveRequired(ApiError): 78 | pass 79 | 80 | 81 | class FieldRequired(ApiError): 82 | pass 83 | 84 | 85 | class InvalidTags(IntegrityError): 86 | pass 87 | 88 | 89 | class InvalidIpAddresses(IntegrityError): 90 | pass 91 | 92 | 93 | class InvalidHardwareAddresses(IntegrityError): 94 | pass 95 | 96 | 97 | class InvalidNetInterfaces(IntegrityError): 98 | pass 99 | 100 | 101 | class InvalidFQDN(IntegrityError): 102 | pass 103 | 104 | 105 | class InvalidCustomFields(IntegrityError): 106 | pass 107 | 108 | 109 | class InvalidCustomData(IntegrityError): 110 | pass 111 | 112 | 113 | class InvalidAliases(IntegrityError): 114 | pass 115 | 116 | 117 | class GroupNotFound(NotFound): 118 | pass 119 | 120 | 121 | class GroupNotEmpty(IntegrityError): 122 | pass 123 | 124 | 125 | class HostNotFound(NotFound): 126 | pass 127 | 128 | 129 | class WorkGroupNotFound(NotFound): 130 | pass 131 | 132 | 133 | class NetworkGroupNotFound(NotFound): 134 | pass 135 | 136 | 137 | class ServerGroupNotEmpty(IntegrityError): 138 | pass 139 | 140 | 141 | class WorkGroupNotEmpty(IntegrityError): 142 | pass 143 | 144 | 145 | class UserNotFound(NotFound): 146 | pass 147 | 148 | 149 | class UserAlreadyExists(Conflict): 150 | pass 151 | 152 | 153 | class InvalidWorkGroupId(ApiError): 154 | pass 155 | 156 | 157 | class AuthenticationError(ApiError): 158 | status_code = 403 159 | 160 | 161 | class Forbidden(ApiError): 162 | status_code = 403 163 | 164 | 165 | class InvalidPassword(ApiError): 166 | pass 167 | 168 | 169 | class InvalidDocumentsPerPage(ApiError): 170 | pass 171 | 172 | 173 | def handle_api_error(error): 174 | return json_response(error.to_dict(), error.status_code) 175 | 176 | 177 | def handle_other_errors(error): 178 | from app import app 179 | app.logger.error(format_exc(error)) 180 | return json_response({ "errors": [str(error)] }, 400) 181 | -------------------------------------------------------------------------------- /library/engine/graph.py: -------------------------------------------------------------------------------- 1 | 2 | def _group_parents_recursive(group, fields, host_fields): 3 | result = group.to_dict(fields) 4 | result["hosts"] = [x.to_dict(host_fields) for x in group.hosts] 5 | result["parents"] = [] 6 | for parent in group.parents: 7 | result["parents"].append(_group_parents_recursive(parent, fields, host_fields)) 8 | return result 9 | 10 | 11 | def _group_children_recursive(group, fields, host_fields): 12 | result = group.to_dict(fields) 13 | result["hosts"] = [x.to_dict(host_fields) for x in group.hosts] 14 | result["children"] = [] 15 | for child in group.children: 16 | result["children"].append(_group_children_recursive(child, fields, host_fields)) 17 | return result 18 | 19 | 20 | def group_structure(group, fields, host_fields): 21 | result = group.to_dict(fields) 22 | result["hosts"] = [x.to_dict(host_fields) for x in group.hosts] 23 | result["parents"] = _group_parents_recursive(group, fields, host_fields)["parents"] 24 | result["children"] = _group_children_recursive(group, fields, host_fields)["children"] 25 | return result -------------------------------------------------------------------------------- /library/engine/json_encoder.py: -------------------------------------------------------------------------------- 1 | from bson.objectid import ObjectId 2 | from flask.json import JSONEncoder 3 | 4 | 5 | class MongoJSONEncoder(JSONEncoder): 6 | def default(self, o): 7 | from app.models.storable_model import StorableModel 8 | from library.db import ObjectsCursor 9 | if isinstance(o, ObjectId): 10 | return str(o) 11 | elif isinstance(o, ObjectsCursor) or isinstance(o, set): 12 | return list(o) 13 | elif isinstance(o, StorableModel): 14 | return o.to_dict() 15 | else: 16 | return JSONEncoder.default(self, o) -------------------------------------------------------------------------------- /library/engine/ownership.py: -------------------------------------------------------------------------------- 1 | from flask import g 2 | from app.models import WorkGroup, Group, Host 3 | 4 | 5 | def user_work_groups(user_id=None): 6 | if user_id is None: 7 | if g.user is None: 8 | return [] 9 | user_id = g.user._id 10 | return WorkGroup.find({"$or":[ 11 | {"owner_id": user_id}, 12 | {"member_ids": user_id}, 13 | ]}) 14 | 15 | 16 | def user_groups(user_id=None): 17 | wgs = user_work_groups(user_id) 18 | if wgs.count() == 0: 19 | return [] 20 | return Group.find({"work_group_id": { 21 | "$in": [x._id for x in wgs] 22 | }}) 23 | 24 | 25 | def user_hosts(user_id=None, include_not_assigned=True): 26 | grps = user_groups(user_id) 27 | query = {"group_id": {"$in": [x._id for x in grps]}} 28 | if include_not_assigned: 29 | query = {"$or": [ 30 | {"group_id": None}, 31 | query 32 | ]} 33 | return Host.find(query) 34 | -------------------------------------------------------------------------------- /library/engine/pbkdf2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | pbkdf2 4 | ~~~~~~ 5 | This module implements pbkdf2 for Python. It also has some basic 6 | tests that ensure that it works. The implementation is straightforward 7 | and uses stdlib only stuff and can be easily be copy/pasted into 8 | your favourite application. 9 | Use this as replacement for bcrypt that does not need a c implementation 10 | of a modified blowfish crypto algo. 11 | Example usage: 12 | >>> pbkdf2_hex('what i want to hash', 'the random salt') 13 | 'fa7cc8a2b0a932f8e6ea42f9787e9d36e592e0c222ada6a9' 14 | How to use this: 15 | 1. Use a constant time string compare function to compare the stored hash 16 | with the one you're generating:: 17 | def safe_str_cmp(a, b): 18 | if len(a) != len(b): 19 | return False 20 | rv = 0 21 | for x, y in izip(a, b): 22 | rv |= ord(x) ^ ord(y) 23 | return rv == 0 24 | 2. Use `os.urandom` to generate a proper salt of at least 8 byte. 25 | Use a unique salt per hashed password. 26 | 3. Store ``algorithm$salt:costfactor$hash`` in the database so that 27 | you can upgrade later easily to a different algorithm if you need 28 | one. For instance ``PBKDF2-256$thesalt:10000$deadbeef...``. 29 | :copyright: (c) Copyright 2011 by Armin Ronacher. 30 | :license: BSD, see LICENSE for more details. 31 | """ 32 | import hmac 33 | import hashlib 34 | from struct import Struct 35 | from operator import xor 36 | from itertools import izip, starmap 37 | 38 | 39 | _pack_int = Struct('>I').pack 40 | 41 | 42 | def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None): 43 | """Like :func:`pbkdf2_bin` but returns a hex encoded string.""" 44 | return pbkdf2_bin(data, salt, iterations, keylen, hashfunc).encode('hex') 45 | 46 | 47 | def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None): 48 | """Returns a binary digest for the PBKDF2 hash algorithm of `data` 49 | with the given `salt`. It iterates `iterations` time and produces a 50 | key of `keylen` bytes. By default SHA-1 is used as hash function, 51 | a different hashlib `hashfunc` can be provided. 52 | """ 53 | hashfunc = hashfunc or hashlib.sha1 54 | mac = hmac.new(data, None, hashfunc) 55 | def _pseudorandom(x, mac=mac): 56 | h = mac.copy() 57 | h.update(x) 58 | return map(ord, h.digest()) 59 | buf = [] 60 | for block in xrange(1, -(-keylen // mac.digest_size) + 1): 61 | rv = u = _pseudorandom(salt + _pack_int(block)) 62 | for i in xrange(iterations - 1): 63 | u = _pseudorandom(''.join(map(chr, u))) 64 | rv = starmap(xor, izip(rv, u)) 65 | buf.extend(rv) 66 | return ''.join(map(chr, buf))[:keylen] -------------------------------------------------------------------------------- /library/engine/permissions.py: -------------------------------------------------------------------------------- 1 | from flask import g 2 | 3 | 4 | def current_user_is_system(): 5 | user = get_user_from_app_context() 6 | if user is None: 7 | return False 8 | return user.system 9 | 10 | 11 | def get_user_from_app_context(): 12 | user = None 13 | try: 14 | user = g.user 15 | except AttributeError: 16 | pass 17 | return user 18 | 19 | 20 | def can_create_hosts(): 21 | user = get_user_from_app_context() 22 | if user is None: 23 | return False 24 | # all non-system users can create hosts. system users can only if 25 | # they are supervisors 26 | return not user.system or user.supervisor 27 | -------------------------------------------------------------------------------- /library/engine/permutation.py: -------------------------------------------------------------------------------- 1 | import string 2 | 3 | 4 | class InvalidPattern(Exception): 5 | pass 6 | 7 | lw = list(string.ascii_lowercase) 8 | up = list(string.ascii_uppercase) 9 | nums = list(string.digits) 10 | 11 | 12 | def succ(s): 13 | """Ruby String.succ implementation""" 14 | def get_alphabeth(sym): 15 | if sym.isupper(): return up 16 | if sym.islower(): return lw 17 | if sym.isdigit(): return nums 18 | return None 19 | 20 | def symbol_succ(sym): 21 | """Returns the next symbol in alphabeth, the carry flag and the alphabeth""" 22 | alphabeth = get_alphabeth(sym) 23 | if alphabeth is None: 24 | return None, None, None 25 | i = alphabeth.index(sym) + 1 26 | if i == len(alphabeth): 27 | carry = True 28 | i = 0 29 | else: 30 | carry = False 31 | return alphabeth[i], carry, alphabeth 32 | 33 | sym_list = list(s)[::-1] 34 | if len(sym_list) == 0: 35 | return "" 36 | i = 0 37 | while True: 38 | sym = sym_list[i] 39 | 40 | new_sym, carry, current_alphabeth = symbol_succ(sym) 41 | if new_sym is None: 42 | raise ValueError("Wrong alphabeth") 43 | 44 | sym_list[i] = new_sym 45 | if not carry: 46 | break 47 | i += 1 48 | if len(sym_list) == i: 49 | if current_alphabeth is nums: 50 | sym_list.append(current_alphabeth[1]) 51 | else: 52 | sym_list.append(current_alphabeth[0]) 53 | break 54 | 55 | return "".join(sym_list[::-1]) 56 | 57 | 58 | def sequence(fr, to, exclude=False): 59 | """ 60 | Ruby-like string range generator 61 | ================================ 62 | Ruby's 'a0'..'e4' equals to sequence('a0', 'e4') 63 | use @exclude parameter to exclude the last element (@to) from the generated sequence 64 | """ 65 | while fr is not None and fr != to and len(fr) <= len(to): 66 | yield fr 67 | fr = succ(fr) 68 | if fr == to and not exclude: 69 | yield fr 70 | 71 | 72 | def expand_single_pattern(pattern): 73 | import re 74 | single_expr = re.compile('([0-9a-zA-Z]+)\-([0-9a-zA-Z]+)') 75 | 76 | if pattern.startswith('[') and pattern.endswith(']'): 77 | pattern = pattern[1:-1] 78 | 79 | tokens = pattern.split(',') 80 | for token in tokens: 81 | corners = single_expr.search(token) 82 | if corners is None: 83 | yield token 84 | else: 85 | for item in sequence(*corners.groups()): 86 | yield item 87 | 88 | 89 | def get_braces_indices(pattern): 90 | stack = list() 91 | indices = [] 92 | current_index = [] 93 | for ind, sym in enumerate(pattern): 94 | if sym == '[': 95 | stack.append(True) 96 | current_index.append(ind) 97 | elif sym == ']': 98 | try: 99 | stack.pop() 100 | except IndexError: 101 | raise InvalidPattern('Closing brace without opening one') 102 | current_index.append(ind) 103 | if len(current_index) == 2: 104 | if len(stack) != 0: 105 | raise InvalidPattern('Nested patterns are not allowed') 106 | indices.append(current_index) 107 | current_index = [] 108 | if len(stack) > 0: 109 | raise InvalidPattern('Closing brace is absent') 110 | return indices 111 | 112 | 113 | def expand_pattern(pattern): 114 | indices = get_braces_indices(pattern) 115 | if len(indices) == 0: 116 | yield pattern 117 | return 118 | fr, to = indices[0] 119 | for token in expand_single_pattern(pattern[fr+1:to]): 120 | for result in expand_pattern(pattern[:fr] + token + pattern[to+1:]): 121 | yield result 122 | 123 | 124 | def expand_pattern_with_vars(pattern, vars=[]): 125 | indices = get_braces_indices(pattern) 126 | if len(indices) == 0: 127 | yield pattern, [pattern] + vars 128 | return 129 | fr, to = indices[0] 130 | for token in expand_single_pattern(pattern[fr+1:to]): 131 | for result in expand_pattern_with_vars(pattern[:fr] + token + pattern[to+1:], vars + [token]): 132 | yield result 133 | 134 | 135 | def apply_vars(pattern, vars): 136 | for i, v in enumerate(vars): 137 | pattern = pattern.replace("$%d" % i, v) 138 | return pattern 139 | -------------------------------------------------------------------------------- /library/mongo_session.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | from datetime import datetime, timedelta 3 | from flask.sessions import SessionInterface, SessionMixin 4 | from werkzeug.datastructures import CallbackDict 5 | 6 | 7 | class MongoSession(CallbackDict, SessionMixin): 8 | def __init__(self, initial=None, sid=None): 9 | CallbackDict.__init__(self, initial) 10 | self.sid = sid 11 | self.modified = False 12 | 13 | 14 | class MongoSessionInterface(SessionInterface): 15 | def __init__(self, collection_name='sessions'): 16 | self.collection_name = collection_name 17 | 18 | def open_session(self, app, request): 19 | from library.db import db 20 | sid = request.cookies.get(app.session_cookie_name) 21 | if sid: 22 | stored_session = db.get_session(sid, collection=self.collection_name) 23 | if stored_session: 24 | if stored_session.get('expiration') > datetime.utcnow(): 25 | return MongoSession(initial=stored_session['data'], sid=stored_session['sid']) 26 | else: 27 | sid = str(uuid4()) 28 | return MongoSession(sid=sid) 29 | 30 | def save_session(self, app, session, response): 31 | from library.db import db 32 | domain = self.get_cookie_domain(app) 33 | if not session: 34 | response.delete_cookie(app.session_cookie_name, domain=domain) 35 | return 36 | 37 | if session.modified: 38 | expiration = self.get_expiration_time(app, session) 39 | if not expiration: 40 | expiration = datetime.utcnow() + timedelta(hours=1) 41 | db.update_session(session.sid, session, expiration, collection=self.collection_name) 42 | response.set_cookie(app.session_cookie_name, session.sid, 43 | expires=self.get_expiration_time(app, session), 44 | httponly=True, domain=domain) 45 | -------------------------------------------------------------------------------- /micro.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from commands import main 3 | 4 | if __name__ == '__main__': 5 | raise SystemExit(main()) -------------------------------------------------------------------------------- /plugins/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/plugins/__init__.py -------------------------------------------------------------------------------- /plugins/authorizers/.gitignore: -------------------------------------------------------------------------------- 1 | sys_authorizer.py -------------------------------------------------------------------------------- /plugins/authorizers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/plugins/authorizers/__init__.py -------------------------------------------------------------------------------- /plugins/authorizers/local_authorizer.py: -------------------------------------------------------------------------------- 1 | from flask import g, request 2 | from library.engine.errors import AuthenticationError 3 | 4 | AUTHENTICATION_URL = None 5 | 6 | 7 | class LocalAuthorizer(object): 8 | 9 | def __init__(self, flask_app=None): 10 | self.flask = flask_app 11 | 12 | @staticmethod 13 | def get_authentication_url(): 14 | return AUTHENTICATION_URL 15 | 16 | @staticmethod 17 | def get_user_data(): 18 | if g.user: 19 | user_data = g.user.to_dict() 20 | if "password_hash" in user_data: 21 | del (user_data["password_hash"]) 22 | return user_data 23 | 24 | from app.models import User 25 | data = request.json 26 | if data is None: 27 | raise AuthenticationError("No JSON in POST data", 400) 28 | if "username" not in data or "password" not in data: 29 | raise AuthenticationError("Insufficient fields for authenticate handler", 400) 30 | 31 | user = User.find_one({ "username": data["username"] }) 32 | if not user or not user.check_password(data["password"]): 33 | raise AuthenticationError("Invalid username or password") 34 | 35 | user_data = user.to_dict() 36 | if "password_hash" in user_data: 37 | del(user_data["password_hash"]) 38 | user_data["auth_token"] = user.get_auth_token().token 39 | return user_data -------------------------------------------------------------------------------- /plugins/authorizers/vk_authorizer.py: -------------------------------------------------------------------------------- 1 | from flask import request, session, redirect 2 | import requests 3 | import json 4 | 5 | CLIENT_ID = 0 6 | CLIENT_SECRET = "" 7 | OAUTH_SCOPE = 0 8 | REDIRECT_URI = "/oauth_callback" 9 | 10 | 11 | class VkAuthorizer(object): 12 | 13 | NAME = "Vk Login" 14 | 15 | def __init__(self, flask_app=None): 16 | self.flask = flask_app 17 | self.flask.add_url_rule("/oauth_callback", "oauth_callback", self.oauth_callback) 18 | 19 | def oauth_callback(self): 20 | code = request.args.get("code") 21 | access_data = requests.get("https://oauth.vk.com/access_token?client_id=%s&client_secret=%s&redirect_uri=%s&code=%s" % ( 22 | CLIENT_ID, 23 | CLIENT_SECRET, 24 | self.get_redirect_url(), 25 | code 26 | )) 27 | json_data = json.loads(access_data.content) 28 | if "error" in json_data: 29 | from app import app 30 | app.logger.error("Error in VK Authentication: %s" % json_data["error_description"]) 31 | else: 32 | access_token = json_data["access_token"] 33 | user_id = json_data["user_id"] 34 | user_data = self.get_user_data(access_token, user_id) 35 | from app.models import User 36 | user = User.find_one({"ext_id": user_id}) 37 | if user is None: 38 | user = User( 39 | first_name=user_data["first_name"], 40 | last_name=user_data["last_name"], 41 | username=user_data["screen_name"], 42 | ext_id=user_data["id"] 43 | ) 44 | user.save() 45 | session["user_id"] = user._id 46 | return redirect("http://localhost:3000") 47 | 48 | def get_redirect_url(self): 49 | return "http://%s%s" % ( request.headers.get("Host"), REDIRECT_URI ) 50 | 51 | def get_authentication_url(self): 52 | auth_url = "https://oauth.vk.com/authorize?client_id=%s&scope=%s&redirect_uri=%s" % ( 53 | CLIENT_ID, 54 | OAUTH_SCOPE, 55 | self.get_redirect_url() 56 | ) 57 | return auth_url 58 | 59 | def get_user_data(self, access_token, user_id): 60 | user_data = requests.get("https://api.vk.com/method/users.get?user_ids=%s&access_token=%s&v=5.69&fields=screen_name" % 61 | ( user_id, access_token ) 62 | ) 63 | user_data = json.loads(user_data.content) 64 | return user_data["response"][0] 65 | -------------------------------------------------------------------------------- /plugins/commands/.gitignore: -------------------------------------------------------------------------------- 1 | *.py -------------------------------------------------------------------------------- /plugins/commands/.gitkeep: -------------------------------------------------------------------------------- 1 | __init__.py 2 | example.py -------------------------------------------------------------------------------- /plugins/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/plugins/commands/__init__.py -------------------------------------------------------------------------------- /plugins/commands/example.py: -------------------------------------------------------------------------------- 1 | from commands import Command 2 | 3 | 4 | class Example(Command): 5 | 6 | DESCRIPTION = "Plugin-based example command" 7 | 8 | def run(self): 9 | print("I turned myself into an example command! Example-Command-Rick!") 10 | -------------------------------------------------------------------------------- /plugins/extend/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/viert/inventoree/61badd840f207598451f0e53d9aa0cf4193b1dbd/plugins/extend/__init__.py -------------------------------------------------------------------------------- /plugins/extend/csrf.py: -------------------------------------------------------------------------------- 1 | from flask import request, session 2 | from library.engine.utils import json_response 3 | 4 | 5 | def main(app): 6 | if 'CSRF_PROTECTION' in app.config.app and app.config.app['CSRF_PROTECTION']: 7 | app.logger.debug("csrf protection plugin initializing") 8 | # CSRF Protection 9 | @app.flask.before_request 10 | def csrf_protect(): 11 | if request.method != "GET": 12 | token = session.get('_csrf_token', None) 13 | if not token or token != request.form.get('_csrf_token'): 14 | return json_response({'errors': ['request is not authorized: csrf token is invalid']}, 403) 15 | else: 16 | app.logger.debug("csrf protection plugin is disabled") 17 | -------------------------------------------------------------------------------- /postinst.sh: -------------------------------------------------------------------------------- 1 | mkdir -p /var/log/inventoree 2 | chown uwsgi:uwsgi /etc/uwsgi.d/inventoree.ini 3 | chown uwsgi:uwsgi /var/log/inventoree 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pymongo 2 | flask 3 | coverage 4 | PyYAML 5 | python-memcached 6 | prometheus_client 7 | requests 8 | bcrypt -------------------------------------------------------------------------------- /tests_coverage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | coverage run --source="." --omit="tests/*,commands/*" --branch ./micro.py test "$@" 4 | 5 | echo 6 | echo 7 | echo Test coverage: 8 | echo 9 | 10 | coverage report -m 11 | -------------------------------------------------------------------------------- /wsgi.py: -------------------------------------------------------------------------------- 1 | from app import app 2 | app_callable = app.flask 3 | --------------------------------------------------------------------------------