├── tests ├── __init__.py ├── assets │ ├── mqe-query2.yml │ ├── metricdef1.yml │ ├── mqe-query1.yml │ ├── query1.yml │ ├── mqe-metset1.yml │ ├── metset1.yml │ ├── queryset1.yml │ ├── mqe-metset2.yml │ ├── queryset2.yml │ └── metset-http.yml ├── COVERAGE.sh ├── RUN.py ├── util.py ├── test_mqengine.py ├── test_axobj.py ├── test_servant.py ├── test_axdictutil.py ├── test_erout.py ├── test_mdefl.py ├── test_erout_geckoboard.py ├── test_metricdef.py ├── conftest.py ├── test_query.py └── test_axplugin.py ├── py ├── axonchisel │ ├── metrics │ │ ├── io │ │ │ ├── __init__.py │ │ │ ├── erout │ │ │ │ ├── __init__.py │ │ │ │ ├── plugins │ │ │ │ │ ├── ero_geckoboard │ │ │ │ │ │ ├── __init__.py │ │ │ │ │ │ ├── base.py │ │ │ │ │ │ ├── meter.py │ │ │ │ │ │ └── rag.py │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── ero_strbuf.py │ │ │ │ │ └── ero_csv.py │ │ │ │ ├── interface.py │ │ │ │ └── base.py │ │ │ └── emfetch │ │ │ │ ├── __init__.py │ │ │ │ ├── plugins │ │ │ │ ├── __init__.py │ │ │ │ └── emf_random.py │ │ │ │ ├── interface.py │ │ │ │ ├── tmrange_time_t.py │ │ │ │ └── base.py │ │ ├── run │ │ │ ├── __init__.py │ │ │ ├── mqengine │ │ │ │ ├── __init__.py │ │ │ │ └── mqestate.py │ │ │ └── servant │ │ │ │ ├── __init__.py │ │ │ │ ├── state.py │ │ │ │ ├── request.py │ │ │ │ └── config.py │ │ ├── foundation │ │ │ ├── __init__.py │ │ │ ├── ax │ │ │ │ ├── __init__.py │ │ │ │ └── obj.py │ │ │ ├── chrono │ │ │ │ ├── __init__.py │ │ │ │ ├── ghost.py │ │ │ │ ├── timerange.py │ │ │ │ └── dtmath.py │ │ │ ├── data │ │ │ │ ├── __init__.py │ │ │ │ ├── multi.py │ │ │ │ ├── point.py │ │ │ │ └── series.py │ │ │ ├── query │ │ │ │ ├── __init__.py │ │ │ │ ├── qtimeframe.py │ │ │ │ ├── qghosts.py │ │ │ │ ├── queryset.py │ │ │ │ ├── qformat.py │ │ │ │ └── query.py │ │ │ └── metricdef │ │ │ │ ├── __init__.py │ │ │ │ ├── reduce.py │ │ │ │ ├── metset.py │ │ │ │ └── filters.py │ │ ├── version.py │ │ └── __init__.py │ └── __init__.py └── Ax_Metrics.egg-info │ ├── dependency_links.txt │ ├── top_level.txt │ ├── requires.txt │ └── SOURCES.txt ├── MANIFEST.in ├── .gitignore ├── conftest.py ├── LICENSE.txt ├── setup.py └── docs └── developers.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/run/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/Ax_Metrics.egg-info/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/ax/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/emfetch/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/run/mqengine/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/run/servant/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/Ax_Metrics.egg-info/top_level.txt: -------------------------------------------------------------------------------- 1 | axonchisel 2 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/chrono/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/data/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/query/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/metricdef/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/version.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.9.2' 2 | -------------------------------------------------------------------------------- /py/Ax_Metrics.egg-info/requires.txt: -------------------------------------------------------------------------------- 1 | PyYAML>=3.11 2 | requests>=2.4.3 -------------------------------------------------------------------------------- /py/axonchisel/metrics/__init__.py: -------------------------------------------------------------------------------- 1 | from .version import __version__ 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE.txt 2 | include README.md 3 | recursive-include py *.py 4 | recursive-include docs *.md 5 | recursive-include tests *.sh *.py *.yml 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | dist/ 3 | _notes/ 4 | htmlcov/ 5 | 6 | MANIFEST 7 | .coverage 8 | .DS_Store 9 | 10 | *.pyc 11 | *.db 12 | *.orig 13 | *.tmproj 14 | *.sublime-* 15 | -------------------------------------------------------------------------------- /py/axonchisel/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python trickery to support multiple separate repos/directories 3 | sharing this same package namespace (axonchisel.*). 4 | 5 | Explanation: http://www.doughellmann.com/PyMOTW/pkgutil/ 6 | """ 7 | 8 | import pkgutil 9 | 10 | __path__ = pkgutil.extend_path(__path__, __name__) 11 | -------------------------------------------------------------------------------- /tests/assets/mqe-query2.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MQL single Query test file. 5 | # Used by autotmated tests on MQEngine. 6 | # 7 | 8 | id: rev_new_sales_wtd 9 | 10 | data: 11 | metrics: 12 | - metric: rev_new_sales 13 | goal: 1000 14 | goal_mode: FROMZERO 15 | 16 | timeframe: 17 | mode: CURRENT 18 | range_unit: WEEK 19 | range_val: 1 20 | gran_unit: DAY 21 | 22 | format: 23 | 24 | ghosts: 25 | - PREV_PERIOD1 26 | - PREV_YEAR1 27 | - PREV_YEAR2 28 | 29 | -------------------------------------------------------------------------------- /tests/assets/metricdef1.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MDefL single MetricDef test file. 5 | # Used by autotmated tests. 6 | # 7 | 8 | id: num_new_sales 9 | emfetch_id: mysql 10 | emfetch_opts: {db: "mydb1"} 11 | table: first_sales 12 | func: COUNT 13 | time_field: timeCreated 14 | time_type: TIME_EPOCH_SECS 15 | data_field: myfield 16 | data_type: NUM_INT 17 | filters: 18 | - {field: "foo", op: "EQ", value: 123} 19 | - {field: "bar", op: "EQ", value: "big bar"} 20 | -------------------------------------------------------------------------------- /tests/assets/mqe-query1.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MQL single Query test file. 5 | # Used by autotmated tests on MQEngine. 6 | # 7 | 8 | id: rev_new_sales_per_user_avg_wtd 9 | 10 | data: 11 | metrics: 12 | - metric: rev_new_sales 13 | div: new_users 14 | goal: 10 15 | goal_mode: CONSTANT 16 | 17 | timeframe: 18 | mode: CURRENT 19 | range_unit: WEEK 20 | range_val: 1 21 | gran_unit: DAY 22 | 23 | format: 24 | csv: {} 25 | 26 | ghosts: 27 | - PREV_PERIOD1 28 | - PREV_YEAR1 29 | - PREV_YEAR2 30 | 31 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/emfetch/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Default EMFetcher plugin class aggregation 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | # 13 | # Default Plugins 14 | # 15 | 16 | from .emf_random import EMFetcher_random 17 | from .emf_http import EMFetcher_http 18 | 19 | 20 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Ax_Metrics - Common pytest configuration. 4 | 5 | Main tests and conftest.py are in tests/ directory. 6 | This is a top level conftest to allow more flexible running of tests, 7 | e.g. by executing "tests/RUN.py" from this directory. 8 | 9 | Note: 'conftest.py' is a magic filename used by py.test. 10 | 11 | ------------------------------------------------------------------------------ 12 | Author: Dan Kamins 13 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 14 | """ 15 | 16 | 17 | # ---------------------------------------------------------------------------- 18 | 19 | 20 | collect_ignore = [ 21 | 'dist', # don't descend into built versions in 'dist' 22 | ] 23 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EROut plugins for Geckoboard support. 3 | 4 | See: http://www.geckoboard.com 5 | See: https://developer.geckoboard.com/ 6 | 7 | ------------------------------------------------------------------------------ 8 | Author: Dan Kamins 9 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 10 | """ 11 | 12 | 13 | # ---------------------------------------------------------------------------- 14 | 15 | 16 | from .bullet import EROut_geckoboard_bullet 17 | from .meter import EROut_geckoboard_meter 18 | from .numsec import EROut_geckoboard_numsec_comp 19 | from .numsec import EROut_geckoboard_numsec_trend 20 | from .rag import EROut_geckoboard_rag 21 | from .text import EROut_geckoboard_text 22 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Default EROut plugin class aggregation 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | # 13 | # Default Plugins 14 | # 15 | 16 | from .ero_strbuf import EROut_strbuf 17 | from .ero_csv import EROut_csv 18 | 19 | from .ero_geckoboard import EROut_geckoboard_bullet 20 | from .ero_geckoboard import EROut_geckoboard_meter 21 | from .ero_geckoboard import EROut_geckoboard_numsec_comp 22 | from .ero_geckoboard import EROut_geckoboard_numsec_trend 23 | from .ero_geckoboard import EROut_geckoboard_rag 24 | from .ero_geckoboard import EROut_geckoboard_text 25 | 26 | -------------------------------------------------------------------------------- /tests/assets/query1.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MQL single Query test file. 5 | # Used by autotmated tests. 6 | # 7 | 8 | id: pct_new_paid_accounts_3mo_r30d 9 | 10 | data: 11 | metrics: 12 | - metric: num_new_paid_accounts 13 | div: num_total_paid_accounts 14 | goal: 0.20 15 | goal_mode: CONSTANT 16 | 17 | timeframe: 18 | mode: CURRENT 19 | range_unit: MONTH 20 | range_val: 3 21 | gran_unit: DAY 22 | 23 | smooth_unit: DAY 24 | smooth_val: 30 25 | 26 | # The following 4 timeframe specs are not actually relevant to 27 | # this metric but are used for testing purposes only: 28 | reframe_dt: 2014-11-01 29 | accumulate: True 30 | allow_overflow_begin: False 31 | allow_overflow_end: True 32 | 33 | format: 34 | geckoboard: 35 | type: GB.MULTILINE 36 | title: "New Paid Accounts %" 37 | subtitle: "(rolling 30d)" 38 | 39 | ghosts: 40 | - PREV_PERIOD1 41 | - PREV_YEAR1 42 | - PREV_YEAR2 43 | 44 | -------------------------------------------------------------------------------- /tests/assets/mqe-metset1.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MDefL MetSet test file. 5 | # Used by autotmated tests on MQEngine. 6 | # 7 | 8 | 9 | # ---------------------------------------------------------------------------- 10 | 11 | table_defaults: 12 | 13 | - table: rand 14 | emfetch_id: random 15 | time_field: not_relevant 16 | time_type: TIME_DATE 17 | emfetch_opts: 18 | random: 19 | min: 1 20 | max: 5000 21 | 22 | 23 | # ---------------------------------------------------------------------------- 24 | 25 | metrics: 26 | 27 | - id: rev_new_sales 28 | table: rand 29 | func: SUM 30 | data_type: MONEY_FLOAT 31 | 32 | - id: new_users 33 | table: rand 34 | func: COUNT 35 | data_type: NUM_INT 36 | emfetch_opts: 37 | random: 38 | round: true 39 | max: 200 40 | 41 | - id: cancels 42 | table: rand 43 | func: COUNT 44 | data_type: NUM_INT 45 | emfetch_opts: 46 | random: 47 | round: true 48 | max: 10 49 | 50 | 51 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /tests/COVERAGE.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Ax_Metrics - Unit testing coverage measurement 5 | # 6 | # Runs all unit tests using py.test wrapped in coverage.py. 7 | # See http://nedbatchelder.com/code/coverage/ 8 | # Outputs: 9 | # - HTML report in ./htmlcov/index.html 10 | # - basic console report printed to stdout 11 | # 12 | # Requires coverage.py. Install with: 13 | # $ pip install coverage 14 | # 15 | # ------------------------------------------------------------------------------ 16 | # Author: Dan Kamins 17 | # Copyright (c) 2014 Dan Kamins, AxonChisel.net 18 | # 19 | 20 | # ---------------------------------------------------------------------------- 21 | 22 | COVERAGE_SOURCE="axonchisel.metrics" 23 | RUN_PY="`dirname $0`/RUN.py" 24 | RUN_OPTS="$*" 25 | 26 | # ---------------------------------------------------------------------------- 27 | 28 | echo "[Ax_Metrics - automated py.test coverage measurement]" 29 | 30 | date 31 | 32 | coverage erase 33 | coverage run --source=$COVERAGE_SOURCE "$RUN_PY" $RUN_OPTS 34 | TEST_RESULT="$?" 35 | 36 | date 37 | 38 | if [ $TEST_RESULT -ne 0 ] 39 | then 40 | exit $? 41 | fi 42 | 43 | coverage html 44 | coverage report 45 | 46 | date 47 | 48 | echo "See more detailed HTML report in ./htmlcov/index.html" 49 | -------------------------------------------------------------------------------- /tests/assets/metset1.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MDefL MetSet test file. 5 | # Used by autotmated tests. 6 | # 7 | 8 | # ---------------------------------------------------------------------------- 9 | 10 | table_defaults: 11 | 12 | - table: first_sales 13 | emfetch_id: mysql 14 | emfetch_opts: {db: "mydb1"} 15 | time_field: timeCreated 16 | time_type: TIME_EPOCH_SECS 17 | filters: 18 | - {field: "foo", op: "EQ", value: 123} 19 | - {field: "bar", op: "EQ", value: "big bar"} 20 | 21 | - table: invites 22 | emfetch_id: mysql 23 | emfetch_opts: {db: "mydb1"} 24 | time_type: TIME_EPOCH_SECS 25 | 26 | 27 | # ---------------------------------------------------------------------------- 28 | 29 | metrics: 30 | 31 | - id: num_new_sales 32 | table: first_sales 33 | func: COUNT 34 | 35 | - id: rev_new_sales 36 | table: first_sales 37 | func: SUM 38 | data_field: money_value 39 | data_type: MONEY_FLOAT 40 | 41 | - id: num_invites_sent 42 | table: invites 43 | time_field: timeSent 44 | func: COUNT 45 | 46 | - id: num_active_canaries 47 | emfetch_id: mysql 48 | emfetch_opts: {db: "mydb1"} 49 | table: coalmine_stats 50 | func: LAST 51 | time_field: log_day 52 | time_type: TIME_DATE 53 | data_field: canaries 54 | data_type: NUM_INT 55 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/query/qtimeframe.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Query component for time frame specification 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | from axonchisel.metrics.foundation.chrono.framespec import FrameSpec 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | class QTimeFrame(AxObj): 22 | """ 23 | Query component for time frame specification. 24 | Wraps complex FrameSpec object which defines query period, granularity, 25 | smoothing, and more. 26 | """ 27 | 28 | def __init__(self): 29 | self._tmfrspec = FrameSpec() 30 | 31 | 32 | # 33 | # Public Properties 34 | # 35 | 36 | @property 37 | def tmfrspec(self): 38 | """Wrapped FrameSpec.""" 39 | return self._tmfrspec 40 | @tmfrspec.setter 41 | def tmfrspec(self, val): 42 | self._assert_type("tmfrspec", val, FrameSpec) 43 | self._tmfrspec = val 44 | 45 | 46 | # 47 | # Public Methods 48 | # 49 | 50 | 51 | # 52 | # Internal Methods 53 | # 54 | 55 | def __unicode__(self): 56 | return u"QTimeFrame({self._tmfrspec})".format(self=self) 57 | 58 | 59 | -------------------------------------------------------------------------------- /tests/assets/queryset1.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MQL QuerySet test file. 5 | # Used by autotmated tests. 6 | # 7 | 8 | # ---------------------------------------------------------------------------- 9 | 10 | queries: 11 | 12 | 13 | - id: pct_new_paid_accounts_rolling_30d 14 | 15 | data: 16 | metrics: 17 | - metric: num_new_paid_accounts 18 | div: num_total_paid_accounts 19 | goal: 10 20 | goal_mode: CONSTANT 21 | 22 | timeframe: 23 | range_val: 3 24 | range_unit: MONTH 25 | gran_unit: DAY 26 | mode: CURRENT 27 | smooth_unit: DAY 28 | smooth_val: 30 29 | 30 | format: 31 | _default: 32 | title: "New Paid Accounts %" 33 | subtitle: "(rolling 30d)" 34 | geckoboard_bullet: 35 | rag: [0, 3, 10, 20] 36 | projected: [1, 5] 37 | axis: ["0", "5", "10", "15", "20"] 38 | geckoboard: 39 | type: GB.MULTILINE 40 | title: "New Paid Accounts %" 41 | subtitle: "(rolling 30d)" 42 | 43 | ghosts: 44 | - PREV_PERIOD1 45 | - PREV_YEAR1 46 | - PREV_YEAR2 47 | 48 | 49 | - id: num_new_paid_accounts_month_to_date 50 | 51 | data: 52 | metrics: 53 | - metric: num_new_paid_accounts 54 | goal: 100 55 | goal_mode: FROMZERO 56 | 57 | timeframe: 58 | range_unit: MONTH 59 | range_val: 1 60 | gran_unit: DAY 61 | mode: CURRENT 62 | accumulate: True 63 | 64 | format: 65 | geckoboard: 66 | type: GB.BULLET 67 | title: "New Paid Accounts" 68 | subtitle: "(current month to date)" 69 | 70 | 71 | -------------------------------------------------------------------------------- /tests/RUN.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Ax_Metrics - Unit testing runner 4 | 5 | Runs all unit tests using py.test, passing through cmdline args as if 6 | py.test had been run directly. 7 | 8 | On failures, ring terminal bell. 9 | 10 | Optionally specify specific test(s) on cmdline via expression to be 11 | passed to py.test -k. (See py.test help for more info) 12 | 13 | ------------------------------------------------------------------------------ 14 | Author: Dan Kamins 15 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 16 | """ 17 | 18 | 19 | # ---------------------------------------------------------------------------- 20 | 21 | 22 | import os 23 | import sys 24 | 25 | import pytest 26 | 27 | 28 | # ---------------------------------------------------------------------------- 29 | 30 | 31 | def add_app_python_path(): 32 | """Add our app's python code to system python path.""" 33 | app_py_path = os.path.join( 34 | os.path.dirname(os.path.realpath(__file__)), 35 | '../py' 36 | ) 37 | sys.path.append(app_py_path) 38 | 39 | def run_tests(): 40 | """Run tests, returning py.test numeric exit code (0=success).""" 41 | rescode = pytest.main(sys.argv[1:]) 42 | if rescode != 0: 43 | alert_fail() 44 | return rescode 45 | 46 | def alert_fail(): 47 | """Called when tests fail. Ring bell.""" 48 | print '\x07' # ring terminal bell 49 | 50 | 51 | # ---------------------------------------------------------------------------- 52 | 53 | 54 | if __name__ == "__main__": 55 | add_app_python_path() 56 | rescode = run_tests() 57 | sys.exit(rescode) 58 | 59 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/query/qghosts.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Query component for ghost comparisons specification 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | from axonchisel.metrics.foundation.chrono.ghost import Ghost 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | class QGhosts(AxObj): 22 | """ 23 | Query component for ghosts comparison specification. 24 | Contains list of Ghost references (relative time specs). 25 | """ 26 | 27 | def __init__(self): 28 | self._ghosts = list() 29 | 30 | 31 | # 32 | # Public Methods 33 | # 34 | 35 | def add_ghost(self, ghost): 36 | """Add a Ghost to the list.""" 37 | if not isinstance(ghost, Ghost): 38 | raise TypeError("QData expected Ghost, got: {t}". 39 | format(t=type(ghost))) 40 | self._ghosts.append(ghost) 41 | 42 | def count_ghosts(self): 43 | """Return number of Ghosts included.""" 44 | return len(self._ghosts) 45 | 46 | def get_ghosts(self): 47 | """Get (shallow copy of) list of Ghosts.""" 48 | return list(self._ghosts) 49 | 50 | 51 | # 52 | # Internal Methods 53 | # 54 | 55 | def __getitem__(self, key): 56 | """Allow indexing like a list itself""" 57 | return self._ghosts[key] 58 | 59 | def __unicode__(self): 60 | return (u"QGhosts({ghosts})" 61 | ).format(self=self, 62 | ghosts=u", ".join(map(unicode, self._ghosts)) 63 | ) 64 | 65 | 66 | 67 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/metricdef/reduce.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Metric Function Reduce Functions 3 | 4 | Methods reduce a list of values (including None) to a single value. 5 | Used internally. 6 | 7 | ------------------------------------------------------------------------------ 8 | Author: Dan Kamins 9 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 10 | """ 11 | 12 | 13 | # ---------------------------------------------------------------------------- 14 | 15 | 16 | class _ReduceFuncs(object): 17 | """ 18 | Internal static wrapper for MetricDef FUNC reduce functions. 19 | Methods reduce a list of values (including None) to a single value. 20 | """ 21 | 22 | @classmethod 23 | def reduce_COUNT(cls, vals): 24 | return len(vals) 25 | 26 | @classmethod 27 | def reduce_FIRST(cls, vals): 28 | try: 29 | return cls._strip_None(vals)[0] 30 | except IndexError: 31 | return None 32 | 33 | @classmethod 34 | def reduce_LAST(cls, vals): 35 | try: 36 | return cls._strip_None(vals)[-1] 37 | except IndexError: 38 | return None 39 | 40 | @classmethod 41 | def reduce_SUM(cls, vals): 42 | return sum(cls._strip_None(vals)) 43 | 44 | @classmethod 45 | def reduce_MIN(cls, vals): 46 | return min(cls._strip_None(vals)) 47 | 48 | @classmethod 49 | def reduce_MAX(cls, vals): 50 | return max(cls._strip_None(vals)) 51 | 52 | @classmethod 53 | def reduce_AVG(cls, vals): 54 | vals = cls._strip_None(vals) 55 | try: 56 | return float(sum(vals)) / len(vals) 57 | except ZeroDivisionError: 58 | return None 59 | 60 | @classmethod 61 | def _strip_None(cls, vals): 62 | """Return list of values from vals that are not None.""" 63 | return [v for v in vals if v is not None] 64 | 65 | 66 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/chrono/ghost.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Relative "ghost" alias specification 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | 16 | # ---------------------------------------------------------------------------- 17 | 18 | 19 | # Ghost alowed types 20 | GHOST_TYPES = { 21 | 'PREV_PERIOD1': {}, # previous period 22 | 'PREV_PERIOD2': {}, # period before previous period 23 | 'PREV_YEAR1': {}, # same period 1 year ago 24 | 'PREV_YEAR2': {}, # same period 2 years ago 25 | } 26 | 27 | 28 | # ---------------------------------------------------------------------------- 29 | 30 | 31 | class Ghost(AxObj): 32 | """ 33 | Specification of relative "ghost" time. 34 | 35 | Usually used 36 | 37 | """ 38 | 39 | def __init__(self, gtype='PREV_PERIOD1'): 40 | """ 41 | Initialize, optionally overriding any default properties with kwargs. 42 | """ 43 | # Set valid default state: 44 | self.gtype = 'PREV_PERIOD1' 45 | 46 | # Apply initial values from kwargs: 47 | self.gtype = gtype 48 | 49 | 50 | # 51 | # Public Properties 52 | # 53 | 54 | @property 55 | def gtype(self): 56 | """Ghost type.""" 57 | return self._gtype 58 | @gtype.setter 59 | def gtype(self, val): 60 | self._assert_type_string("gtype", val) 61 | self._assert_value("gtype", val, GHOST_TYPES) 62 | self._gtype = val 63 | 64 | 65 | # 66 | # Internal Methods 67 | # 68 | 69 | def __unicode__(self): 70 | return (u"Ghost({self.gtype})").format(self=self) 71 | 72 | 73 | -------------------------------------------------------------------------------- /tests/assets/mqe-metset2.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MDefL MetSet test file. 5 | # Used by autotmated tests on MQEngine. 6 | # 7 | 8 | 9 | # ---------------------------------------------------------------------------- 10 | 11 | table_defaults: 12 | 13 | - table: kpi 14 | emfetch_id: httpd 15 | time_field: kpi_date 16 | time_type: TIME_DATE 17 | data_field: kpi_value 18 | emfetch_opts: 19 | options: 20 | timeout: 30 21 | verify_ssl: False 22 | isolate: False 23 | request: 24 | method: GET 25 | url: "{extinfo.api_url}" 26 | params: 27 | api_key: "{extinfo.api_key}" 28 | table: "{extinfo.table_prefix}{mdef.table}" 29 | time_field: "{mdef.time_field}" 30 | time_type: "date" 31 | function: "{mdef.func}" 32 | time_start: "{tmrange.inc_begin:%Y-%m-%d}" 33 | time_stop: "{tmrange.exc_end:%Y-%m-%d}" 34 | data_field: "{mdef.data_field}" 35 | filter1_field: "{mdef.filters.safe_indexable[0].field}" 36 | filter1_val: "{mdef.filters.safe_indexable[0].value}" 37 | filter2_field: "{mdef.filters.safe_indexable[1].field}" 38 | filter2_val: "{mdef.filters.safe_indexable[1].value}" 39 | response: 40 | format: JSON 41 | path: body.result 42 | 43 | 44 | # ---------------------------------------------------------------------------- 45 | 46 | metrics: 47 | 48 | - id: rev_new_sales 49 | emfetch_id: http 50 | table: kpi 51 | func: SUM 52 | data_type: MONEY_INT100 53 | filters: 54 | - {field: "kpi_stat", op: "EQ", value: "rev_exp_new_24hr_paysub_sum"} 55 | 56 | - id: new_users 57 | emfetch_id: http 58 | table: kpi 59 | func: SUM 60 | data_type: NUM_INT 61 | filters: 62 | - {field: "kpi_stat", op: "EQ", value: "user_new_24hr_count"} 63 | 64 | 65 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/query/queryset.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - QuerySet Query Set (Query collection) 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | from .query import Query 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | class QuerySet(AxObj): 22 | """ 23 | QuerySet Query Set (QueryDef collection). 24 | """ 25 | 26 | def __init__(self): 27 | self._queries = dict() # Queries keyed by their id 28 | 29 | 30 | # 31 | # Public Methods 32 | # 33 | 34 | def add_query(self, query1): 35 | """ 36 | Add valid Query to collection, replacing any with same id. 37 | """ 38 | self._assert_type("query", query1, Query) 39 | query1.validate() 40 | self._queries[query1.id] = query1 41 | 42 | def count_queries(self): 43 | """ 44 | Returns total number of queries in set. 45 | """ 46 | return len(self._queries) 47 | 48 | def get_query_by_id(self, id): 49 | """ 50 | Returns specified Query, or raise KeyError if not found. 51 | """ 52 | m = self._queries.get(id) 53 | if not m: 54 | raise KeyError("Query #{id} not in {set}".format(id=id, set=self)) 55 | return m 56 | 57 | def validate(self): 58 | """ 59 | Validate self and all contained Querys. 60 | Raise TypeError, ValueError if any problems. 61 | """ 62 | for (id, q) in self._queries.iteritems(): 63 | q.validate() 64 | 65 | 66 | # 67 | # Internal Methods 68 | # 69 | 70 | def __unicode__(self): 71 | return (u"QuerySet({len} Queries)" 72 | .format(len=len(self._queries))) 73 | 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/emfetch/interface.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EMFetch (Extensible Metrics Fetch) Plugin Interface 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.plugin import AxPlugin 14 | 15 | 16 | # ---------------------------------------------------------------------------- 17 | 18 | 19 | class EMFetcher(AxPlugin): 20 | """ 21 | EMFetch (Extensible Metrics Fetch) Plugin Interface. 22 | 23 | MQEngine uses EMFetch plugins to access raw time-indexed metrics data 24 | for each data point. 25 | Implementations provide access to various data sources by overriding the 26 | plugin_fetch() abstract method. 27 | 28 | See AxPlugin and AxPluginBase for architecture details. 29 | 30 | Additional Parameters: 31 | 32 | - mdef : definition of metric to query. 33 | (axonchisel.metrics.foundation.metricdef.metricdef.MetricDef) 34 | """ 35 | 36 | # 37 | # Abstract Methods 38 | # 39 | 40 | # abstract 41 | def __init__(self, mdef, extinfo=None): 42 | """ 43 | Initialize around specific MetricDef and optional extinfo dict. 44 | """ 45 | raise NotImplementedError("EMFetcher abstract superclass") 46 | 47 | # abstract 48 | def plugin_fetch(self, tmrange): 49 | """ 50 | EMFetcher plugins must implement this abstract method. 51 | Invoked by fetch() after parameters are validated. 52 | 53 | Returns a single DataPoint. 54 | (axonchisel.metrics.foundation.data.point.DataPoint) 55 | 56 | Parameters: 57 | 58 | - tmrange : specification of time range to gather data for. 59 | (axonchisel.metrics.foundation.chrono.timerange.TimeRange) 60 | Also available in TimeRange_time_t format as self._tmrange. 61 | """ 62 | raise NotImplementedError("EMFetcher abstract superclass") 63 | 64 | 65 | 66 | 67 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/metricdef/metset.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - MetSet Metric Set (MetricDef collection) 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | from .metricdef import MetricDef 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | class MetSet(AxObj): 22 | """ 23 | MetSet Metric Set (MetricDef collection). 24 | """ 25 | def __init__(self): 26 | self._metrics = dict() # MetricDefs keyed by their id 27 | 28 | 29 | # 30 | # Public Methods 31 | # 32 | 33 | def add_metric(self, mdef): 34 | """ 35 | Add MetricDef to collection, replacing any with same id. 36 | """ 37 | if not isinstance(mdef, MetricDef): 38 | raise TypeError("{set} can't add non MetricDef: {t}" 39 | .format(set=self, t=type(mdef))) 40 | self._metrics[mdef.id] = mdef 41 | 42 | def count_metrics(self): 43 | """ 44 | Returns total number of metrics in set. 45 | """ 46 | return len(self._metrics) 47 | 48 | def get_metric_by_id(self, id): 49 | """ 50 | Returns specified MetricDef, or raise KeyError if not found. 51 | """ 52 | mdef = self._metrics.get(id) 53 | if not mdef: 54 | raise KeyError("Metric #{id} not in {set}".format(id=id, set=self)) 55 | return mdef 56 | 57 | def validate(self): 58 | """ 59 | Validate self and all contained MetricDefs. 60 | Raise TypeError, ValueError if any problems. 61 | """ 62 | for (id, mdef) in self._metrics.iteritems(): 63 | mdef.validate() 64 | 65 | 66 | # 67 | # Internal Methods 68 | # 69 | 70 | def __unicode__(self): 71 | return (u"MetSet({len} MetricDefs)" 72 | .format(len=len(self._metrics))) 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/interface.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EROut (Extensible Report Outputter) Plugin Interface 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.plugin import AxPlugin 14 | 15 | 16 | # ---------------------------------------------------------------------------- 17 | 18 | 19 | class EROut(AxPlugin): 20 | """ 21 | EROut (Extensible Report Outputter) Plugin Interface. 22 | 23 | EROut plugins are used (e.g. by Servant) to process MultiDataSeries 24 | into final or intermediary report formats, for humans or machines. 25 | 26 | Implementations provide various format outputs by overriding the 27 | plugin_output() abstract method. 28 | 29 | See AxPlugin and AxPluginBase for architecture details. 30 | 31 | Additional Parameters: 32 | 33 | - query : Query originally used to create the data, with QFormat, etc. 34 | Optional. Plugins should work without access to Query. 35 | (axonchisel.metrics.foundation.query.query.Query) 36 | """ 37 | 38 | # 39 | # Abstract Methods 40 | # 41 | 42 | # abstract 43 | def __init__(self, extinfo=None): 44 | """ 45 | Initialize around optional extinfo dict. 46 | """ 47 | raise NotImplementedError("EROut abstract superclass") 48 | 49 | # abstract 50 | def plugin_output(self, mdseries, query=None): 51 | """ 52 | EROut plugins must implement this abstract method. 53 | Invoked to output MultiDataSeries as specified. 54 | 55 | Returns nothing. Output target should be configured separately. 56 | 57 | Parameters: 58 | 59 | - mdseries : MultiDataSeries query result with data to output. 60 | (axonchisel.metrics.foundation.data.multi.MultiDataSeries) 61 | 62 | - query : optional Query source with more formatting details, etc. 63 | Optional. Plugins should work without access to Query. 64 | (axonchisel.metrics.foundation.query.query.Query) 65 | 66 | """ 67 | raise NotImplementedError("EROut abstract superclass") 68 | 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/run/servant/state.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Servant Internal State 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | from axonchisel.metrics.foundation.query.query import Query 15 | from axonchisel.metrics.foundation.data.multi import MultiDataSeries 16 | from axonchisel.metrics.io.erout.interface import EROut 17 | from axonchisel.metrics.run.mqengine.mqengine import MQEngine 18 | 19 | from .request import ServantRequest 20 | 21 | 22 | # ---------------------------------------------------------------------------- 23 | 24 | 25 | class ServantState(AxObj): 26 | """ 27 | Servant Internal State. Not for public use. 28 | """ 29 | 30 | def __init__(self): 31 | # Set valid default state: 32 | self._request = None # (ServantRequest) 33 | self._erouts = list() # (list(EROut)) 34 | self._mqengine = None # (MQEngine) 35 | 36 | 37 | # 38 | # Public Methods 39 | # 40 | 41 | 42 | # 43 | # Public Properties 44 | # 45 | 46 | @property 47 | def request(self): 48 | """ServantRequest being processed.""" 49 | return self._request 50 | @request.setter 51 | def request(self, val): 52 | self._assert_type("request", val, ServantRequest) 53 | self._request = val 54 | 55 | @property 56 | def erouts(self): 57 | """EROut plugin objects to use.""" 58 | return self._erouts 59 | @erouts.setter 60 | def erouts(self, val): 61 | self._assert_type_list("erouts", val, ofsupercls=EROut) 62 | self._erouts = val 63 | 64 | @property 65 | def mqengine(self): 66 | """MQEngine processing query.""" 67 | return self._mqengine 68 | @mqengine.setter 69 | def mqengine(self, val): 70 | self._assert_type("mqengine", val, MQEngine) 71 | self._mqengine = val 72 | 73 | 74 | 75 | # 76 | # Internal Methods 77 | # 78 | 79 | def __unicode__(self): 80 | return (u"ServantState({self.request}, {self.erouts},"+ 81 | "{self.mqengine})" 82 | ).format(self=self) 83 | 84 | 85 | 86 | 87 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/emfetch/plugins/emf_random.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EMFetch plugin 'random' 3 | 4 | Mostly for testing purposes. Provides random data values. 5 | 6 | ------------------------------------------------------------------------------ 7 | Author: Dan Kamins 8 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 9 | """ 10 | 11 | 12 | # ---------------------------------------------------------------------------- 13 | 14 | 15 | import random 16 | 17 | from axonchisel.metrics.foundation.data.point import DataPoint 18 | 19 | from ..base import EMFetcherBase 20 | 21 | 22 | # ---------------------------------------------------------------------------- 23 | 24 | 25 | class EMFetcher_random(EMFetcherBase): 26 | """ 27 | EMFetch (Extensible Metrics Fetch) Plugin 'random'. 28 | Mostly for testing purposes. Provides random data values. 29 | """ 30 | 31 | # 32 | # Abstract Method Implementations 33 | # 34 | 35 | # abstract 36 | def plugin_create(self): 37 | """ 38 | Invoked once by MQEngine to allow plugin to setup what it needs. 39 | Always called before any fetch() invocations. 40 | """ 41 | pass 42 | 43 | # abstract 44 | def plugin_destroy(self): 45 | """ 46 | Invoked once by MQEngine to allow plugin to clean up after itself. 47 | Always called after create() and any fetch() invocations, assuming 48 | no fatal errors occurred. 49 | """ 50 | pass 51 | 52 | # abstract 53 | def plugin_fetch(self, tmrange): 54 | """ 55 | EMFetcher plugins must implement this abstract method. 56 | Invoked by fetch() after parameters are validated. 57 | 58 | Returns a single DataPoint. 59 | (axonchisel.metrics.foundation.data.point.DataPoint) 60 | 61 | Parameters: 62 | 63 | - tmrange : specification of time range to gather data for. 64 | (axonchisel.metrics.foundation.chrono.timerange.TimeRange) 65 | Also available in TimeRange_time_t format as self._tmrange. 66 | """ 67 | dpoint = DataPoint(tmrange=tmrange) 68 | vmin = self.plugin_option('random.min', 0) 69 | vmax = self.plugin_option('random.max', 100) 70 | val = vmin + (random.random() * (vmax-vmin)) 71 | if self.plugin_option('random.round', False): 72 | val = int(round(val)) 73 | dpoint.value = val 74 | return dpoint 75 | 76 | 77 | # ---------------------------------------------------------------------------- 78 | 79 | 80 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EROut geckoboard base plugin 3 | 4 | Superclass for Geckoboard JSON output for various charts for use with 5 | http://www.geckoboard.com. 6 | 7 | ------------------------------------------------------------------------------ 8 | Author: Dan Kamins 9 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 10 | """ 11 | 12 | 13 | # ---------------------------------------------------------------------------- 14 | 15 | 16 | import time 17 | 18 | from axonchisel.metrics.foundation.ax.dictutil import OrderedDict 19 | from axonchisel.metrics.foundation.chrono.stepper import Stepper 20 | 21 | from axonchisel.metrics.io.erout.base import EROutBase 22 | 23 | import logging 24 | log = logging.getLogger(__name__) 25 | 26 | 27 | # ---------------------------------------------------------------------------- 28 | 29 | 30 | class EROut_geckoboard(EROutBase): 31 | """ 32 | Superclass EROut (Extensible Report Outputter) Plugin for geckoboard. 33 | Adds JSON-serializable output to extinfo['jout'] dict. 34 | 35 | Subclasses should define plugin_output() and immediately set 36 | self._qfdomain to the str name of the key in QFormat under which 37 | its format can be found. 38 | """ 39 | 40 | # 41 | # Abstract Method Implementations 42 | # 43 | 44 | # abstract 45 | def plugin_create(self): 46 | """ 47 | Invoked once to allow plugin to setup what it needs. 48 | """ 49 | self._prep_output() 50 | 51 | # abstract 52 | def plugin_destroy(self): 53 | """ 54 | Invoked once to allow plugin to clean up after itself. 55 | """ 56 | pass 57 | 58 | 59 | # 60 | # Protected Methods for Subclasses 61 | # 62 | 63 | def _prep_output(self): 64 | """Prepare jout output.""" 65 | self.jout = self.plugin_extinfo('jout') 66 | if self.jout.get('item') is None: 67 | self.jout['item'] = [] 68 | 69 | def _qformat_get(self, key, default): 70 | """Proxy for QFormat.get that allows for missing Query.""" 71 | if not self.query: 72 | return default 73 | return self.query.qformat.get(self._qfdomain, key, default) 74 | 75 | @staticmethod 76 | def _round_sigdigs(val, ndigits): 77 | """Round float value to specified number of significant digits.""" 78 | fmt = "%%.%de" % (ndigits - 1) 79 | return float(fmt % val) 80 | 81 | @staticmethod 82 | def _is_round(val): 83 | """Return bool indicating if float val is a round integer.""" 84 | return (int(val) == val) 85 | 86 | 87 | # ---------------------------------------------------------------------------- 88 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/data/multi.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Container for multiple DataSeries 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | from .series import DataSeries 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | class MultiDataSeries(AxObj): 22 | """ 23 | Data container for multiple DataSeries. 24 | Each DataSeries contains context and multiple 2D DataPoints. 25 | """ 26 | def __init__(self): 27 | self._series = list() # list of DataSeries 28 | 29 | 30 | # 31 | # Public Methods 32 | # 33 | 34 | def add_series(self, series1): 35 | """ 36 | Add DataSeries to list. 37 | """ 38 | self._assert_type("series", series1, DataSeries) 39 | self._series.append(series1) 40 | 41 | def count_series(self): 42 | """ 43 | Returns total number of seriess in list. 44 | """ 45 | return len(self._series) 46 | 47 | def iter_series(self): 48 | """ 49 | Return an iterator over series. 50 | """ 51 | return iter(self._series) 52 | 53 | def iter_primary_series(self): 54 | """ 55 | Return an iterator over primary (non-ghost) series. 56 | """ 57 | return (ds for ds in self._series if ds.ghost is None) 58 | 59 | def iter_ghost_series(self): 60 | """ 61 | Return an iterator over ghost series. 62 | """ 63 | return (ds for ds in self._series if ds.ghost is not None) 64 | 65 | def get_series(self, idx): 66 | """ 67 | Return specific 0-based indexed DataSeries. 68 | Supports negative indexes from tail (-1 = last). 69 | Raise IndexError if out of range. 70 | """ 71 | return self._series[idx] 72 | 73 | def get_series_by_id(self, id): 74 | """ 75 | Returns specified DataSeries, or raise KeyError if not found. 76 | """ 77 | for s in self._series: 78 | if s.id == id: 79 | return s 80 | raise KeyError("Series #{id} not in {set}".format(id=id, set=self)) 81 | 82 | 83 | # 84 | # Internal Methods 85 | # 86 | 87 | def __unicode__(self): 88 | return (u"MultiDataSeries('{len} DataSeries: [{series}]')" 89 | ).format(len=len(self._series), 90 | series=u", ".join(map(unicode, self._series)) 91 | ) 92 | 93 | 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /tests/assets/queryset2.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MQL QuerySet test file. 5 | # Used by autotmated tests. 6 | # 7 | 8 | # ---------------------------------------------------------------------------- 9 | 10 | queries: 11 | 12 | - id: new_users_mtd 13 | data: 14 | metrics: 15 | - metric: new_users 16 | goal: 100 17 | goal_mode: FROMZERO 18 | rag: [60, 90] 19 | timeframe: 20 | mode: CURRENT 21 | range_unit: MONTH 22 | range_val: 1 23 | gran_unit: DAY 24 | ghosts: 25 | - PREV_YEAR1 26 | format: 27 | _default: 28 | title: "New Users" 29 | subtitle: "#users MTD" 30 | geckoboard_bullet: 31 | orientation: vertical 32 | 33 | - id: new_users_r7d 34 | data: 35 | metrics: 36 | - metric: new_users 37 | label: "New Users" 38 | goal: 25 39 | goal_mode: CONSTANT 40 | timeframe: 41 | mode: CURRENT 42 | range_unit: DAY 43 | range_val: 7 44 | gran_unit: DAY 45 | ghosts: 46 | - PREV_PERIOD1 47 | - PREV_PERIOD2 48 | format: 49 | _default: 50 | title: "New Users" 51 | subtitle: "#users r7d" 52 | geckoboard_numsec_trend: 53 | title: "New users, last 7d" 54 | reduce: SUM 55 | geckoboard_meter: 56 | min: 0 57 | 58 | - id: cancels_r7d 59 | data: 60 | metrics: 61 | - metric: cancels 62 | label: "Cancels" 63 | goal: 5 64 | goal_mode: CONSTANT 65 | impact: NEGATIVE 66 | rag: [6, 3] 67 | timeframe: 68 | mode: CURRENT 69 | range_unit: DAY 70 | range_val: 7 71 | gran_unit: DAY 72 | ghosts: 73 | - PREV_PERIOD1 74 | - PREV_PERIOD2 75 | format: 76 | _default: 77 | title: "Cancels" 78 | subtitle: "#users r7d" 79 | geckoboard_numsec_comp: 80 | title: "Cancels last 7d" 81 | absolute: true 82 | geckoboard_text: 83 | red: "DANGER: SENSOR {qmlabel} - {value} OVER LIMIT!" 84 | amber: "Notice: Sensor {qmlabel} - {value} near limit ({red})" 85 | green: "Sensor {qmlabel} OK" 86 | geckoboard_rag: 87 | red: "Red Cancel" 88 | amber: OFF 89 | green: "Green Cancel" 90 | 91 | - id: rev_new_sales_qtd 92 | data: 93 | metrics: 94 | - metric: rev_new_sales 95 | goal: 1000 96 | goal_mode: FROMZERO 97 | timeframe: 98 | mode: CURRENT 99 | range_unit: QUARTER 100 | range_val: 1 101 | gran_unit: WEEK 102 | ghosts: 103 | - PREV_PERIOD1 104 | - PREV_YEAR1 105 | - PREV_YEAR2 106 | format: 107 | _default: 108 | title: "New Sales" 109 | subtitle: "$rev QTD" 110 | geckoboard_numsec_comp: 111 | prefix: "$" 112 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/query/qformat.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Query component for format specification 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | 16 | # ---------------------------------------------------------------------------- 17 | 18 | 19 | # Special domain for common property defaults. 20 | DOMAIN_DEFAULT = '_default' 21 | 22 | 23 | # ---------------------------------------------------------------------------- 24 | 25 | 26 | class QFormat(AxObj): 27 | """ 28 | Mostly opaque format container, mostly for use by ERout output plugins. 29 | 30 | Contains "domain" options dicts, each domain identified by a short ID 31 | (often the name of the plugin). 32 | """ 33 | def __init__(self): 34 | self._domain_options = dict() 35 | 36 | 37 | # 38 | # Public Methods 39 | # 40 | 41 | def has_domain(self, domain): 42 | """Return T/F indicating if options are set or indicated domain id.""" 43 | return domain in self._domain_options 44 | 45 | def get_domain(self, domain): 46 | """ 47 | Get options dict for specified domain id, creating if new. 48 | Note that this dict does not take the common default properties into 49 | consideration at all. Use get() for that. 50 | """ 51 | if domain not in self._domain_options: 52 | self._domain_options[domain] = dict() 53 | return self._domain_options[domain] 54 | 55 | def get(self, domain, key, default=KeyError): 56 | """ 57 | Get value of key in specified domain id or common default, else default. 58 | Unlike get_domain(), if domain does not exist, it is NOT created. 59 | If the key is not present, the special '_default' domain is checked. 60 | Finally if still not found, default is returned, or if not specified, 61 | KeyError is raised. 62 | """ 63 | found = False 64 | if domain in self._domain_options: 65 | if key in self._domain_options[domain]: 66 | return self._domain_options[domain][key] 67 | if DOMAIN_DEFAULT in self._domain_options: 68 | if key in self._domain_options[DOMAIN_DEFAULT]: 69 | return self._domain_options[DOMAIN_DEFAULT][key] 70 | if default is KeyError: 71 | raise KeyError(( 72 | u"'{key}' within domain '{domain}' not found in {self}" 73 | ).format(self=self, key=key, domain=domain)) 74 | else: 75 | return default 76 | 77 | 78 | # 79 | # Internal Methods 80 | # 81 | 82 | def __unicode__(self): 83 | return (u"QFormat({options})" 84 | ).format(self=self, options=self._domain_options) 85 | 86 | 87 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/query/query.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Query container and QuerySet collection 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | from .qdata import QData 16 | from .qtimeframe import QTimeFrame 17 | from .qformat import QFormat 18 | from .qghosts import QGhosts 19 | 20 | 21 | # ---------------------------------------------------------------------------- 22 | 23 | 24 | class Query(AxObj): 25 | """ 26 | Representation of an MQL (Metrics Query Language) query. 27 | 28 | Query objects can be processed by MQEngine to obtain data. 29 | """ 30 | 31 | def __init__(self, 32 | id = '' 33 | ): 34 | # Set valid default state: 35 | self.id = '' 36 | self._qdata = QData() 37 | self._qtimeframe = QTimeFrame() 38 | self._qformat = QFormat() 39 | self._qghosts = QGhosts() 40 | 41 | # Apply initial values from kwargs: 42 | self.id = id 43 | 44 | 45 | # 46 | # Public Methods 47 | # 48 | 49 | def is_valid(self): 50 | """ 51 | Check T/F if Query is valid. 52 | """ 53 | try: 54 | self.validate() 55 | return True 56 | except (TypeError, ValueError) as e: 57 | return False 58 | 59 | def validate(self): 60 | """ 61 | Validate self. 62 | Raise TypeError, ValueError if any problems. 63 | """ 64 | if self.qdata.count_qmetrics() == 0: 65 | raise ValueError("Query #{self.id} qdata has no metrics". 66 | format(self=self)) 67 | 68 | 69 | # 70 | # Public Properties 71 | # 72 | 73 | @property 74 | def qdata(self): 75 | """Wrapped QData.""" 76 | return self._qdata 77 | @qdata.setter 78 | def qdata(self, val): 79 | self._assert_type("qdata", val, QData) 80 | self._qdata = val 81 | 82 | @property 83 | def qtimeframe(self): 84 | """Wrapped QTimeFrame.""" 85 | return self._qtimeframe 86 | @qtimeframe.setter 87 | def qtimeframe(self, val): 88 | self._assert_type("qtimeframe", val, QTimeFrame) 89 | self._qtimeframe = val 90 | 91 | @property 92 | def qformat(self): 93 | """Wrapped QFormat.""" 94 | return self._qformat 95 | @qformat.setter 96 | def qformat(self, val): 97 | self._assert_type("qformat", val, QFormat) 98 | self._qformat = val 99 | 100 | @property 101 | def qghosts(self): 102 | """Wrapped QGhosts.""" 103 | return self._qghosts 104 | @qghosts.setter 105 | def qghosts(self, val): 106 | self._assert_type("qghosts", val, QGhosts) 107 | self._qghosts = val 108 | 109 | 110 | # 111 | # Internal Methods 112 | # 113 | 114 | def __unicode__(self): 115 | return (u"Query(#{self.id} " + 116 | "{self.qdata}, {self.qtimeframe}, "+ 117 | "{self.qformat}, {self.qghosts})" 118 | ).format(self=self) 119 | 120 | 121 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/data/point.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Encapsulation of a single data point with time range and value 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | from axonchisel.metrics.foundation.chrono.timerange import TimeRange 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | class DataPoint(AxObj): 22 | """ 23 | Single 2D data point with a) time range and b) value. 24 | 25 | Value of None is allowed to indicate missing data. 26 | 27 | Many DataPoints may be represented by a DataSeries. 28 | """ 29 | 30 | def __init__(self, **kwargs): 31 | """ 32 | Initialize, optionally overriding any default properties with kwargs. 33 | """ 34 | # Set default state: 35 | self._tmrange = None 36 | self._value = None 37 | 38 | # Apply initial values from kwargs: 39 | self._init_kwargs(kwargs, [ 40 | 'tmrange', 'value', 41 | ]) 42 | 43 | 44 | # 45 | # Public Methods 46 | # 47 | 48 | def is_valid(self): 49 | """ 50 | Check T/F if DataPoint is valid. 51 | """ 52 | try: 53 | self.validate() 54 | return True 55 | except (TypeError, ValueError) as e: 56 | return False 57 | 58 | def validate(self): 59 | """ 60 | Validate self to ensure valid TimeRange. 61 | Raise TypeError, ValueError if any problems. 62 | While much validation happens already via property accessors, 63 | this method does final validation on additional status. 64 | DataPoint missing data (i.e. value=None) can still be valid. 65 | """ 66 | self._assert_type("tmrange", self.tmrange, TimeRange) 67 | self.tmrange.validate() 68 | 69 | def is_missing(self): 70 | """ 71 | Check T/F if DataPoint is missing value (i.e. None). 72 | Note: this is separate from the validity of the DataPoint itself. 73 | """ 74 | return self._value is None 75 | 76 | 77 | # 78 | # Public Properties 79 | # 80 | 81 | @property 82 | def tmrange(self): 83 | """Wrapped TimeRange.""" 84 | return self._tmrange 85 | @tmrange.setter 86 | def tmrange(self, val): 87 | self._assert_type("tmrange", val, TimeRange) 88 | self._tmrange = val 89 | 90 | @property 91 | def value(self): 92 | """Wrapped numeric value, or None for missing data.""" 93 | return self._value 94 | @value.setter 95 | def value(self, val): 96 | if val is not None: 97 | self._assert_type_numeric("tmrange", val) 98 | self._value = val 99 | 100 | 101 | # 102 | # Internal Methods 103 | # 104 | 105 | def __unicode__(self): 106 | return (u"{cls}('{self.value} at {self.tmrange}')" 107 | ).format(self=self, cls=self.__class__.__name__) 108 | 109 | 110 | # ---------------------------------------------------------------------------- 111 | 112 | 113 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/run/mqengine/mqestate.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - MQEngine State encapsulation 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import copy 14 | from datetime import datetime 15 | 16 | from axonchisel.metrics.foundation.ax.obj import AxObj 17 | 18 | from axonchisel.metrics.foundation.chrono.framespec import FrameSpec 19 | from axonchisel.metrics.foundation.metricdef.metset import MetSet 20 | from axonchisel.metrics.foundation.data.point import DataPoint 21 | from axonchisel.metrics.foundation.data.series import DataSeries 22 | from axonchisel.metrics.foundation.data.multi import MultiDataSeries 23 | from axonchisel.metrics.foundation.query.query import Query 24 | 25 | 26 | # ---------------------------------------------------------------------------- 27 | 28 | 29 | class MQEState(AxObj): 30 | """ 31 | MQEngine state encapsulation. 32 | Lifecycle: Should be reset() before each query. 33 | """ 34 | 35 | def __init__(self, mqe): 36 | """Init with back pointer to parent MQEngine.""" 37 | # Set valid default state: 38 | self._mqe = None 39 | self.reset() 40 | 41 | # Apply initial values from args: 42 | self.mqe = mqe 43 | 44 | 45 | # 46 | # Public Methods 47 | # 48 | 49 | def reset(self, query=None): 50 | """Reset state, to allow new queries.""" 51 | self.mdseries = MultiDataSeries() # current results accumulation 52 | self._query = None 53 | self._tmfrspec = None 54 | if query: 55 | self.query = query # current Query obj 56 | 57 | def pin_tmfrspec(self): 58 | """ 59 | Saves pinned (fixed reframe_dt) copy of Query's FrameSpec. 60 | Ensures all step sequences run over same time frame even if some 61 | take a long time to execute (because "now" doesn't change). 62 | """ 63 | tmfrspec = copy.deepcopy(self.query.qtimeframe.tmfrspec) 64 | if tmfrspec.reframe_dt is None: 65 | tmfrspec.reframe_dt = datetime.now() 66 | self.tmfrspec = tmfrspec 67 | 68 | 69 | # 70 | # Public Properties 71 | # 72 | 73 | @property 74 | def query(self): 75 | """Query currently working on.""" 76 | return self._query 77 | @query.setter 78 | def query(self, val): 79 | self._assert_type("query", val, Query) 80 | self._query = val 81 | 82 | @property 83 | def tmfrspec(self): 84 | """Pinned FrameSpec (adjusted from Query).""" 85 | return self._tmfrspec 86 | @tmfrspec.setter 87 | def tmfrspec(self, val): 88 | self._assert_type("tmfrspec", val, FrameSpec) 89 | self._tmfrspec = val 90 | 91 | @property 92 | def mdseries(self): 93 | """MultiDataSeries accumulating data into.""" 94 | return self._mdseries 95 | @mdseries.setter 96 | def mdseries(self, val): 97 | self._assert_type("mdseries", val, MultiDataSeries) 98 | self._mdseries = val 99 | 100 | 101 | # 102 | # Internal Methods 103 | # 104 | 105 | def __unicode__(self): 106 | return (u"MQEState()" 107 | ).format(self=self) 108 | 109 | 110 | -------------------------------------------------------------------------------- /tests/util.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Common testing utility functions. 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import os 14 | from datetime import datetime 15 | import logging 16 | 17 | import axonchisel.metrics.foundation.metricdef.mdefl as mdefl 18 | import axonchisel.metrics.foundation.query.mql as mql 19 | 20 | 21 | # ---------------------------------------------------------------------------- 22 | 23 | 24 | # ---------------------------------------------------------------------------- 25 | # Utility Functions 26 | 27 | 28 | def dt(dtstr): # (parse datetime str) 29 | """ 30 | Return datetime parsed from str of various accepted formats, eg: 31 | - '2014-02-14 16:30:45 001234' 32 | - '2014-02-14 16:30:45' 33 | - '2014-02-14 16:30' 34 | - '2014-02-14' 35 | Raise ValueError if not recognizable. 36 | """ 37 | FORMATS = ( 38 | '%Y-%m-%d %H:%M:%S %f', 39 | '%Y-%m-%d %H:%M:%S', 40 | '%Y-%m-%d %H:%M', 41 | '%Y-%m-%d', 42 | ) 43 | for fmt in FORMATS: 44 | try: 45 | dt1 = datetime.strptime(dtstr, fmt) 46 | return dt1 47 | except ValueError as e: 48 | pass 49 | raise ValueError("time data '{dtstr}' does not match any format" 50 | .format(dtstr=dtstr)) 51 | 52 | 53 | def load_test_asset(fname): 54 | """ 55 | Load a test file from assets directory, returning contents as str. 56 | """ 57 | path = os.path.dirname(os.path.realpath(__file__)) 58 | fname = os.path.join(path, 'assets', fname) 59 | with open(fname, 'r') as f: 60 | return f.read() 61 | 62 | def load_metset(asset): 63 | """Helper to load test asset and parse as MetSet, returning.""" 64 | yaml_metset1 = load_test_asset(asset) 65 | parser1 = mdefl.MetSetParser() 66 | return parser1.parse_ystr_metset(yaml_metset1) 67 | 68 | def load_queryset(asset): 69 | """Helper to load test asset and parse as QuerySet, returning.""" 70 | yaml_queryset1 = load_test_asset(asset) 71 | parser1 = mql.QuerySetParser() 72 | return parser1.parse_ystr_queryset(yaml_queryset1) 73 | 74 | def load_query(asset): 75 | """Helper to load test asset and parse as Query, returning.""" 76 | yaml_query1 = load_test_asset(asset) 77 | parser1 = mql.QueryParser() 78 | return parser1.parse_ystr_query(yaml_query1) 79 | 80 | def log_config(level=logging.DEBUG): 81 | """ 82 | Configure logging. 83 | Hackily colors log output if 'colorlog' is installed. 84 | See: https://pypi.python.org/pypi/colorlog/ 85 | """ 86 | 87 | format = ("%(asctime)s %(levelname)-8s [%(name)s] %(message)s") 88 | cformat = ("%(bg_black)s%(asctime)s "+ 89 | "%(log_color)s%(levelname)-8s [%(name)-50s] %(reset)s "+ 90 | "%(log_color)s%(bold)s%(message)s") 91 | 92 | def color_if_possible(): 93 | try: 94 | from colorlog import ColoredFormatter 95 | except ImportError: 96 | return # optional lib not available, so don't color 97 | formatter = ColoredFormatter(cformat) 98 | logging.getLogger().handlers[0].formatter = formatter 99 | 100 | logging.basicConfig(level=level, format=format) 101 | color_if_possible() 102 | 103 | 104 | 105 | 106 | 107 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/plugins/ero_strbuf.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EROut plugin 'strbuf' 3 | 4 | Mostly useful for testing and as a superclass for other plugins. 5 | 6 | ------------------------------------------------------------------------------ 7 | Author: Dan Kamins 8 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 9 | """ 10 | 11 | 12 | # ---------------------------------------------------------------------------- 13 | 14 | 15 | try: 16 | from cStringIO import StringIO 17 | except ImportError: 18 | from StringIO import StringIO 19 | 20 | from ..base import EROutBase 21 | 22 | 23 | # ---------------------------------------------------------------------------- 24 | 25 | 26 | 27 | class EROut_strbuf(EROutBase): 28 | """ 29 | EROut (Extensible Report Outputter) Plugin 'strbuf'. 30 | Accumulates output in an internal file-like string (byte) buffer. 31 | Mostly useful as superclass for other plugins. 32 | """ 33 | 34 | # 35 | # Abstract Method Implementations 36 | # 37 | 38 | # abstract 39 | def plugin_create(self): 40 | """ 41 | Invoked once to allow plugin to setup what it needs. 42 | """ 43 | self.buf_reset() 44 | 45 | # abstract 46 | def plugin_destroy(self): 47 | """ 48 | Invoked once to allow plugin to clean up after itself. 49 | """ 50 | self.buf_reset() 51 | 52 | # abstract 53 | def plugin_output(self, mdseries, query=None): 54 | """ 55 | EROut plugins must implement this abstract method. 56 | Invoked to output MultiDataSeries as specified. 57 | 58 | Returns nothing. Output target should be configured separately. 59 | 60 | Parameters: 61 | 62 | - mdseries : MultiDataSeries query result with data to output. 63 | (axonchisel.metrics.foundation.data.multi.MultiDataSeries) 64 | 65 | - query : optional Query source with more formatting details, etc. 66 | Optional. Plugins should work without access to Query. 67 | (axonchisel.metrics.foundation.query.query.Query) 68 | """ 69 | # Placeholder just adds str version of mdseries. 70 | # Subclasses should override and define this themselves. 71 | self.buf_add_line(str(mdseries)) 72 | 73 | 74 | # 75 | # Public Buffer Methods 76 | # 77 | 78 | def buf_reset(self): 79 | """Reset internal buffer to empty, discarding contents.""" 80 | self._buf = StringIO() 81 | 82 | def buf_get(self): 83 | """Return internal buffer contents as single string.""" 84 | return self._buf.getvalue() 85 | 86 | def buf_get_lines(self): 87 | """ 88 | Break internal buffer into list of lines, returning. 89 | """ 90 | return self.buf_get().splitlines() 91 | 92 | def buf_add(self, text): 93 | self._assert_type_string("text", text) 94 | if isinstance(text, unicode): 95 | text = text.encode('utf-8') 96 | self._buf.write(text) 97 | 98 | def buf_add_line(self, line): 99 | """Add a single line to the internal buffer.""" 100 | self._assert_type_string("line", line) 101 | self.buf_add("%s\n" % line) 102 | 103 | def buf_add_lines(self, lines): 104 | """Add a list of lines to the internal buffer.""" 105 | for line in lines: 106 | self.buf_add_line(line) 107 | 108 | 109 | # ---------------------------------------------------------------------------- 110 | 111 | 112 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/emfetch/tmrange_time_t.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - TimeRange extension with time_t support, for plugin internal use. 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import time 14 | import re 15 | 16 | import axonchisel.metrics.foundation.chrono.timerange as timerange 17 | 18 | 19 | # ---------------------------------------------------------------------------- 20 | 21 | 22 | class TimeRange_time_t(timerange.TimeRange): 23 | """ 24 | Extend TimeRange to provide 4 new properties named with _time_t appended. 25 | Each contains time_t (in int seconds) version of equivalent properties. 26 | Provide patch_format_str support method. 27 | May be used internally by EMFetch plugins for string formatting. 28 | This object is intended as a read-only decorator and not for updating. 29 | New time_t properties are lazily initialized to avoid unnecessary 30 | performance penalties when time_t variants are not referenced. 31 | """ 32 | 33 | def __init__(self, tmrange): 34 | """Init based on a TimeRange.""" 35 | timerange.TimeRange.__init__(self, 36 | anchor=tmrange.anchor, 37 | inc_begin=tmrange.inc_begin, 38 | exc_end=tmrange.exc_end, 39 | ) 40 | 41 | 42 | # 43 | # Public Properties 44 | # 45 | 46 | @property 47 | def inc_begin_time_t(self): 48 | if not hasattr(self, '_inc_begin_time_t'): 49 | self._inc_begin_time_t = int(self._dt_to_time_t(self.inc_begin)) 50 | return self._inc_begin_time_t 51 | 52 | @property 53 | def exc_begin_time_t(self): 54 | if not hasattr(self, '_exc_begin_time_t'): 55 | self._exc_begin_time_t = int(self._dt_to_time_t(self.exc_begin)) 56 | return self._exc_begin_time_t 57 | 58 | @property 59 | def inc_end_time_t(self): 60 | if not hasattr(self, '_inc_end_time_t'): 61 | self._inc_end_time_t = int(self._dt_to_time_t(self.inc_end)) 62 | return self._inc_end_time_t 63 | 64 | @property 65 | def exc_end_time_t(self): 66 | if not hasattr(self, '_exc_end_time_t'): 67 | self._exc_end_time_t = int(self._dt_to_time_t(self.exc_end)) 68 | return self._exc_end_time_t 69 | 70 | # 71 | # Public Static Helpers 72 | # 73 | 74 | @staticmethod 75 | def patch_format_str(fmt, varnames): 76 | """ 77 | Given a str.format format str, patch :%s types for _time_t use. 78 | 79 | The datetime:%s format is intended to request time_t but is not 80 | supported natively in Python, and inconsistently on various 81 | platforms. 82 | 83 | For all 'varname' in varnames list, this method finds references in 84 | the format str like: 85 | '{varname.bar:%s}' 86 | and replaces them with the valid: 87 | '{varname.bar_time_t}' 88 | 89 | It is intended for use with TimeRange_time_t objects. 90 | """ 91 | for varname in varnames: 92 | pattern = r'\{' + varname + r'\.(\w+):%s\}' 93 | sub = r'{' + varname + '.\\1_time_t}' 94 | fmt = re.sub(pattern, sub, fmt) 95 | return fmt 96 | 97 | 98 | # 99 | # Internal Methods 100 | # 101 | 102 | def _dt_to_time_t(self, dt): 103 | """Helper: convert datetime to int time_t""" 104 | return int(time.mktime(dt.timetuple())) 105 | 106 | 107 | 108 | 109 | 110 | -------------------------------------------------------------------------------- /tests/test_mqengine.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test MQEngine 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | 15 | import axonchisel.metrics.foundation.chrono.framespec as framespec 16 | import axonchisel.metrics.foundation.metricdef.mdefl as mdefl 17 | import axonchisel.metrics.foundation.query.query as query 18 | import axonchisel.metrics.foundation.query.qdata as qdata 19 | import axonchisel.metrics.foundation.query.qtimeframe as qtimeframe 20 | import axonchisel.metrics.foundation.query.qformat as qformat 21 | import axonchisel.metrics.foundation.query.qghosts as qghosts 22 | import axonchisel.metrics.foundation.query.queryset as queryset 23 | import axonchisel.metrics.foundation.query.mql as mql 24 | import axonchisel.metrics.run.mqengine.mqengine as mqengine 25 | 26 | from .util import dt, log_config, load_metset, load_query 27 | 28 | import logging 29 | 30 | 31 | # ---------------------------------------------------------------------------- 32 | 33 | 34 | # Enable TEST_EXTRA_HTTP to run a more complex HTTP stat through MQEngine. 35 | # This can add significant time to the test run! 36 | # The HTTP components are already tested separately, so this test is more 37 | # for testing HTTP receivers and whole system throughput. 38 | TEST_EXTRA_HTTP = False 39 | 40 | 41 | # ---------------------------------------------------------------------------- 42 | 43 | 44 | def setup_module(module): 45 | log_config(level=logging.INFO) 46 | # log_config(level=logging.DEBUG) 47 | 48 | 49 | # ---------------------------------------------------------------------------- 50 | 51 | 52 | class TestMQEngine(object): 53 | """ 54 | Test MQEngine running Querys. 55 | """ 56 | 57 | # 58 | # Setup / Teardown 59 | # 60 | 61 | def setup_method(self, method): 62 | self.metset1 = load_metset( 'mqe-metset1.yml' ) 63 | self.query1 = load_query( 'mqe-query1.yml' ) 64 | self.mqe1 = mqengine.MQEngine( self.metset1 ) 65 | 66 | # 67 | # Tests 68 | # 69 | 70 | def test_query_1(self): 71 | mds = self.mqe1.query( self.query1 ) 72 | 73 | def test_extinfo(self): 74 | emfetch_extinfo = { 'foo': 'bar' } 75 | mqe2 = mqengine.MQEngine( self.metset1, emfetch_extinfo ) 76 | mds = mqe2.query( self.query1 ) 77 | 78 | def test_nodiv(self): 79 | assert self.query1.qdata.get_qmetric(0).div_metric_id is not None 80 | self.query1.qdata.get_qmetric(0).div_metric_id = None 81 | mds = self.mqe1.query( self.query1 ) 82 | 83 | def test_reframe_dt(self): 84 | self.query1.qtimeframe.tmfrspec.reframe_dt = dt('2013-08-15') 85 | mds = self.mqe1.query( self.query1 ) 86 | 87 | def test_str(self): 88 | mds = self.mqe1.query( self.query1 ) 89 | str(self.mqe1) 90 | str(self.mqe1._state) 91 | 92 | @pytest.mark.skipif("not TEST_EXTRA_HTTP") 93 | def test_real_http_backend(self): 94 | metset1 = load_metset('mqe-metset2.yml') 95 | query1 = load_query('mqe-query2.yml') 96 | query1.qtimeframe.tmfrspec.reframe_dt = dt('2013-08-15') 97 | emfetch_extinfo = { 98 | 'http': { 99 | 'api_url': 'http://localhost/stats/KPI/reduce/', 100 | 'api_key': 'TestKey', 101 | 'table_prefix': 'tm_', 102 | } 103 | } 104 | mqe = mqengine.MQEngine(metset1, emfetch_extinfo) 105 | mds = mqe.query(query1) 106 | 107 | 108 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/emfetch/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EMFetch (Extensible Metrics Fetch) Plugin Superclass Base 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import collections 14 | 15 | from axonchisel.metrics.foundation.ax.obj import AxObj 16 | from axonchisel.metrics.foundation.ax.plugin import AxPluginBase 17 | 18 | from axonchisel.metrics.foundation.chrono.timerange import TimeRange 19 | from axonchisel.metrics.foundation.metricdef.metricdef import MetricDef 20 | from axonchisel.metrics.foundation.data.point import DataPoint 21 | 22 | from .tmrange_time_t import TimeRange_time_t 23 | from .interface import EMFetcher 24 | 25 | 26 | # ---------------------------------------------------------------------------- 27 | 28 | 29 | class EMFetcherBase(EMFetcher, AxPluginBase): 30 | """ 31 | EMFetch (Extensible Metrics Fetch) Plugin Superclass Base. 32 | 33 | See EMFetcher interface class for detailed docs. 34 | """ 35 | 36 | def __init__(self, mdef, extinfo=None): 37 | """ 38 | Initialize around specific MetricDef and optional extinfo dict. 39 | """ 40 | 41 | # Default state: 42 | self._mdef = None # MetricDef from config 43 | self._tmrange = None # TimeRange transient storage per fetch 44 | 45 | # Superclass init: 46 | AxPluginBase.__init__(self) 47 | 48 | # Validate, store MetricDef in self._mdef: 49 | self._assert_type("mdef", mdef, MetricDef) 50 | mdef.validate() # (raises TypeError, ValueError) 51 | self._mdef = mdef 52 | 53 | # Pass options to superclass: 54 | self.configure(options = self.mdef.emfetch_opts, extinfo = extinfo) 55 | 56 | # 57 | # Public Methods 58 | # 59 | 60 | def fetch(self, tmrange): 61 | """ 62 | Invoked by MQEngine to fetch an individual data point. 63 | May be called multiple times to load multiple data points. 64 | Validates input, calls plugin_fetch(), validates, returns DataPoint. 65 | """ 66 | # Validate and cache input: 67 | self._assert_type("tmrange", tmrange, TimeRange) 68 | tmrange.validate() 69 | self._tmrange = TimeRange_time_t(tmrange) 70 | 71 | # Defer to plugin abstract method to fetch: 72 | dpoint = self.plugin_fetch(tmrange) 73 | 74 | # Validate result DataPoint: 75 | self._assert_type("result", dpoint, DataPoint) 76 | return dpoint 77 | 78 | 79 | # 80 | # Public Properties 81 | # 82 | 83 | @property 84 | def mdef(self): 85 | """MetricDef we operate on (get only).""" 86 | return self._mdef 87 | 88 | 89 | # 90 | # Protected Methods for Subclasses 91 | # 92 | 93 | def _format_str(self, fmt, what='?', od_defaults = Exception): 94 | """ 95 | Override from AxPluginBase - 96 | Format a string using options, extinfo, and extra context (if any). 97 | Protected wrapper for Python str.format. 98 | """ 99 | context = dict() 100 | context['mdef'] = self._mdef 101 | context['tmrange'] = self._tmrange 102 | fmt = TimeRange_time_t.patch_format_str(fmt, ('tmrange',)) 103 | return AxPluginBase._format_str(self, fmt, 104 | context=context, what=what, od_defaults=od_defaults) 105 | 106 | 107 | # 108 | # Internal Methods 109 | # 110 | 111 | def __unicode__(self): 112 | return (u"{cls}({self.mdef})" 113 | ).format(self=self, cls=self.__class__.__name__, 114 | ) 115 | 116 | 117 | # ---------------------------------------------------------------------------- 118 | 119 | -------------------------------------------------------------------------------- /tests/assets/metset-http.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # 4 | # MDefL MetSet test file. 5 | # Used by autotmated tests for EMFetcher_http. 6 | # 7 | 8 | # 9 | # The MetSet represented here supports integration with a hypothetical 10 | # HTTP-based metrics query API which responds to GET requests with JSON data. 11 | # 12 | # This hypothetical API might sit atop a metrics database into which is 13 | # recorded a number of statistics every 24 hours with the following 14 | # SQL structure: 15 | # 16 | # CREATE TABLE `kpi` ( 17 | # `kpi_date` DATE NOT NULL COMMENT 'YYYY-MM-DD (UTC) of this stat', 18 | # `kpi_stat` CHAR(40) NOT NULL COMMENT 'Stat id, eg "cnt_users"', 19 | # `kpi_value` INT NOT NULL COMMENT 'Value of stat on this day', 20 | # UNIQUE KEY `IDX_kpi_stat_date` (`kpi_stat`,`kpi_date`), 21 | # KEY `IDX_kpi_date` (`kpi_date`) 22 | # ); 23 | # 24 | # The hypothetical API provides a "reduce" operation based on Ax_Metrics, 25 | # exposing this underlying KPI data for high level reporting. 26 | # It responds to queries comprising serialized versions of a 27 | # MetricDef plus a TimeRange to yield a single numeric value for the 28 | # given time range. 29 | # The HTTP EMFetcher will be invoked by MQEngine to hit this API once 30 | # for each data point (TimeRange) of a query. 31 | # 32 | # The hypothetical API request URL here is expected to be constructed like: 33 | # http://(endpoint)/? 34 | # &api_key=(apikey) 35 | # &table=kpi 36 | # &time_field=kpi_date 37 | # &time_type=date 38 | # &function=(function, eg "SUM") 39 | # &time_start=(date, eg "2014-02-14") 40 | # &time_stop=(date, eg "2014-03-14") 41 | # &data_field=kpi_value 42 | # &filter1_field=kpi_stat 43 | # &filter1_val=(stat name, eg "user_new_24hr_count") 44 | # &filter2_field= 45 | # &filter2_val= 46 | # 47 | # The extinfo fed to the EMFetcher is expected to contain: 48 | # { "api_url": "http://...", "api_key": "MyApiKey" } 49 | # 50 | # The API response JSON looks like: 51 | # { "body": { "result": 12345 } } 52 | # 53 | # Notice how the MetSet table default below wraps up all the common settings, 54 | # allowing the individual metrics to be concise and streamlined. 55 | # 56 | 57 | 58 | # ---------------------------------------------------------------------------- 59 | 60 | table_defaults: 61 | 62 | - table: kpi 63 | emfetch_id: httpd 64 | time_field: kpi_date 65 | time_type: TIME_DATE 66 | data_field: kpi_value 67 | emfetch_opts: 68 | options: 69 | timeout: 30 70 | verify_ssl: False 71 | isolate: False 72 | request: 73 | method: GET 74 | url: "{extinfo[api_url]}" 75 | params: 76 | api_key: "{extinfo[api_key]}" 77 | table: "{extinfo[table_prefix]}{mdef.table}" 78 | time_field: "{mdef.time_field}" 79 | time_type: "date" 80 | function: "{mdef.func}" 81 | time_start: "{tmrange.inc_begin:%Y-%m-%d}" 82 | time_stop: "{tmrange.exc_end:%Y-%m-%d}" 83 | data_field: "{mdef.data_field}" 84 | filter1_field: "{mdef.filters.safe_indexable[0].field}" 85 | filter1_val: "{mdef.filters.safe_indexable[0].value}" 86 | filter2_field: "{mdef.filters.safe_indexable[1].field}" 87 | filter2_val: "{mdef.filters.safe_indexable[1].value}" 88 | response: 89 | format: JSON 90 | path: body.result 91 | 92 | 93 | # ---------------------------------------------------------------------------- 94 | 95 | metrics: 96 | 97 | - id: rev_new_sales 98 | emfetch_id: http 99 | table: kpi 100 | func: SUM 101 | data_type: MONEY_INT100 102 | filters: 103 | - {field: "kpi_stat", op: "EQ", value: "rev_exp_new_24hr_paysub_sum"} 104 | 105 | - id: new_users 106 | emfetch_id: http 107 | table: kpi 108 | func: SUM 109 | data_type: NUM_INT 110 | filters: 111 | - {field: "kpi_stat", op: "EQ", value: "user_new_24hr_count"} 112 | 113 | 114 | -------------------------------------------------------------------------------- /tests/test_axobj.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test foundation AxObj 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | from datetime import datetime 15 | 16 | from axonchisel.metrics.foundation.ax.obj import AxObj 17 | 18 | 19 | # ---------------------------------------------------------------------------- 20 | 21 | 22 | class TestAxObj(object): 23 | """ 24 | Test AxObj object. 25 | """ 26 | 27 | # 28 | # Setup / Teardown 29 | # 30 | 31 | # 32 | # Tests 33 | # 34 | 35 | def test_str(self): 36 | axo = AxObj() 37 | str(axo) 38 | axo.id = 'MyId' 39 | str(axo) 40 | 41 | def test_init_kwargs(self): 42 | axo = AxObj() 43 | kwargs = {'foo': 123, 'bar': 'zig'} 44 | axo._init_kwargs(kwargs, ['foo', 'never']) 45 | assert hasattr(axo, 'foo') 46 | assert not hasattr(axo, 'bar') 47 | assert not hasattr(axo, 'never') 48 | assert axo.foo == 123 49 | 50 | def test_assert_misc(self): 51 | axo = AxObj() 52 | axo._assert_not_none('param', 12345) 53 | with pytest.raises(ValueError): 54 | axo._assert_not_none('param', None) 55 | axo._assert_value("param", 'foo', ['foo', 'bar', 'zig']) 56 | with pytest.raises(ValueError): 57 | axo._assert_value("param", 'NOWAY', ['foo', 'bar', 'zig']) 58 | 59 | def test_assert_types1(self): 60 | axo = AxObj() 61 | axo._assert_type("param", 12345, (int, long)) 62 | with pytest.raises(TypeError): 63 | axo._assert_type("param", "Not int", (int, long)) 64 | axo._assert_type_string("param", "String") 65 | with pytest.raises(TypeError): 66 | axo._assert_type_string("param", 12345) 67 | axo._assert_type_int("param", 12345) 68 | with pytest.raises(TypeError): 69 | axo._assert_type_int("param", "Not int") 70 | axo._assert_type_numeric("param", 12345) 71 | axo._assert_type_numeric("param", 12345L) 72 | axo._assert_type_numeric("param", 12345.678) 73 | with pytest.raises(TypeError): 74 | axo._assert_type_numeric("param", "Not numeric") 75 | axo._assert_type_datetime("param", datetime.now()) 76 | with pytest.raises(TypeError): 77 | axo._assert_type_datetime("param", "Not datetime") 78 | axo._assert_type_bool("param", True) 79 | with pytest.raises(TypeError): 80 | axo._assert_type_bool("param", "Not bool") 81 | 82 | def test_assert_types_mapping(self): 83 | axo = AxObj() 84 | axo._assert_type_mapping("param", {}) 85 | axo._assert_type_mapping("param", {'a':10}) 86 | with pytest.raises(TypeError): 87 | axo._assert_type_mapping("param", "Not mapping") 88 | 89 | def test_assert_types_list(self): 90 | axo = AxObj() 91 | axo._assert_type_list("param", []) 92 | axo._assert_type_list("param", [10, 20]) 93 | axo._assert_type_list_string("param", ["Foo", "Bar"]) 94 | with pytest.raises(TypeError): 95 | axo._assert_type_list_string("param", ["Foo", 20]) 96 | axo._assert_type_list_numeric("param", [10, 20.5]) 97 | with pytest.raises(TypeError): 98 | axo._assert_type_list_numeric("param", [10, "B"]) 99 | with pytest.raises(TypeError): 100 | axo._assert_type_list("param", 12345) 101 | axo._assert_type_list("param", [10, 20], length=2) 102 | with pytest.raises(ValueError): 103 | axo._assert_type_list("param", [10, 20], length=3) 104 | class A(object): 105 | pass 106 | class B(A): 107 | pass 108 | axo._assert_type_list("param", [A(), B()], ofsupercls=A) 109 | with pytest.raises(TypeError): 110 | axo._assert_type_list("param", [A(), 10], ofsupercls=A) 111 | 112 | 113 | # 114 | # Internal Helpers 115 | # 116 | 117 | 118 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EROut (Extensible Report Outputter) Plugin Superclass Base 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import collections 14 | import time 15 | from datetime import datetime 16 | 17 | from axonchisel.metrics.foundation.ax.obj import AxObj 18 | from axonchisel.metrics.foundation.ax.plugin import AxPluginBase 19 | 20 | from axonchisel.metrics.foundation.query.query import Query 21 | from axonchisel.metrics.foundation.data.multi import MultiDataSeries 22 | 23 | from .interface import EROut 24 | 25 | 26 | # ---------------------------------------------------------------------------- 27 | 28 | 29 | class EROutBase(EROut, AxPluginBase): 30 | """ 31 | ERout (Extensible Report Outputter) Plugin Superclass Base. 32 | 33 | See EROut interface class for detailed docs. 34 | """ 35 | 36 | def __init__(self, extinfo=None): 37 | """ 38 | Initialize around optional extinfo dict. 39 | """ 40 | 41 | # Default state: 42 | self._query = None # Query transient storage per output 43 | self._mdseries = None # MultiDataSeries transient storage per output 44 | 45 | # Superclass init: 46 | AxPluginBase.__init__(self) 47 | 48 | # Pass options to superclass: 49 | self.configure(extinfo = extinfo) 50 | 51 | 52 | # 53 | # Public Methods 54 | # 55 | 56 | def output(self, mdseries, query=None): 57 | """ 58 | Invoked to output MultiDataSeries as specified, in optional 59 | query context. 60 | May be called multiple times to output multiple MultiDataSeries. 61 | Validates input, calls plugin_output(), validates, returns DataPoint. 62 | """ 63 | # Validate and cache input: 64 | self._assert_type("mdseries", mdseries, MultiDataSeries) 65 | self._mdseries = mdseries 66 | if query is not None: 67 | self._assert_type("query", query, Query) 68 | query.validate() # (raises TypeError, ValueError) 69 | self._query = query 70 | 71 | # Defer to plugin abstract method to output: 72 | self.plugin_output(mdseries, query=query) 73 | 74 | 75 | # 76 | # Public Properties 77 | # 78 | 79 | @property 80 | def query(self): 81 | """Query our data originally came from (get only). May be None.""" 82 | return self._query 83 | 84 | @property 85 | def mdseries(self): 86 | """MultiDataSeries we are outputting (get only).""" 87 | return self._mdseries 88 | 89 | 90 | # 91 | # Protected Methods for Subclasses 92 | # 93 | 94 | def _format_str(self, fmt, what='?', od_defaults = Exception): 95 | """ 96 | Override from AxPluginBase - 97 | Format a string using options, extinfo, and extra context (if any). 98 | Protected wrapper for Python str.format. 99 | """ 100 | context = dict() 101 | context['query'] = self._query 102 | return AxPluginBase._format_str(self, fmt, 103 | context=context, what=what, od_defaults = od_defaults) 104 | 105 | def _format_datetime(self, fmt, dt): 106 | """ 107 | Helper to format a datetime obj using strftime. 108 | If dt is None, returns empty string (""). 109 | Handles "%s" format (time_t) manually since this is not supported 110 | on all platforms. 111 | """ 112 | if dt is None: 113 | return "" 114 | self._assert_type("datetime", dt, datetime) 115 | if fmt == "%s": 116 | return int(time.mktime(dt.timetuple())) # datetime to time_t 117 | return dt.strftime(fmt) 118 | 119 | 120 | # 121 | # Internal Methods 122 | # 123 | 124 | def __unicode__(self): 125 | return (u"{cls}({self._mdseries})" 126 | ).format(self=self, cls=self.__class__.__name__, 127 | ) 128 | 129 | 130 | # ---------------------------------------------------------------------------- 131 | 132 | -------------------------------------------------------------------------------- /py/Ax_Metrics.egg-info/SOURCES.txt: -------------------------------------------------------------------------------- 1 | LICENSE.txt 2 | MANIFEST.in 3 | README.md 4 | setup.py 5 | docs/developers.md 6 | py/Ax_Metrics.egg-info/PKG-INFO 7 | py/Ax_Metrics.egg-info/SOURCES.txt 8 | py/Ax_Metrics.egg-info/dependency_links.txt 9 | py/Ax_Metrics.egg-info/requires.txt 10 | py/Ax_Metrics.egg-info/top_level.txt 11 | py/axonchisel/__init__.py 12 | py/axonchisel/metrics/__init__.py 13 | py/axonchisel/metrics/version.py 14 | py/axonchisel/metrics/foundation/__init__.py 15 | py/axonchisel/metrics/foundation/ax/__init__.py 16 | py/axonchisel/metrics/foundation/ax/dictutil.py 17 | py/axonchisel/metrics/foundation/ax/obj.py 18 | py/axonchisel/metrics/foundation/ax/plugin.py 19 | py/axonchisel/metrics/foundation/chrono/__init__.py 20 | py/axonchisel/metrics/foundation/chrono/dtmath.py 21 | py/axonchisel/metrics/foundation/chrono/framespec.py 22 | py/axonchisel/metrics/foundation/chrono/ghost.py 23 | py/axonchisel/metrics/foundation/chrono/stepper.py 24 | py/axonchisel/metrics/foundation/chrono/timerange.py 25 | py/axonchisel/metrics/foundation/data/__init__.py 26 | py/axonchisel/metrics/foundation/data/multi.py 27 | py/axonchisel/metrics/foundation/data/point.py 28 | py/axonchisel/metrics/foundation/data/series.py 29 | py/axonchisel/metrics/foundation/metricdef/__init__.py 30 | py/axonchisel/metrics/foundation/metricdef/filters.py 31 | py/axonchisel/metrics/foundation/metricdef/mdefl.py 32 | py/axonchisel/metrics/foundation/metricdef/metricdef.py 33 | py/axonchisel/metrics/foundation/metricdef/metset.py 34 | py/axonchisel/metrics/foundation/metricdef/reduce.py 35 | py/axonchisel/metrics/foundation/query/__init__.py 36 | py/axonchisel/metrics/foundation/query/mql.py 37 | py/axonchisel/metrics/foundation/query/qdata.py 38 | py/axonchisel/metrics/foundation/query/qformat.py 39 | py/axonchisel/metrics/foundation/query/qghosts.py 40 | py/axonchisel/metrics/foundation/query/qtimeframe.py 41 | py/axonchisel/metrics/foundation/query/query.py 42 | py/axonchisel/metrics/foundation/query/queryset.py 43 | py/axonchisel/metrics/io/__init__.py 44 | py/axonchisel/metrics/io/emfetch/__init__.py 45 | py/axonchisel/metrics/io/emfetch/base.py 46 | py/axonchisel/metrics/io/emfetch/interface.py 47 | py/axonchisel/metrics/io/emfetch/tmrange_time_t.py 48 | py/axonchisel/metrics/io/emfetch/plugins/__init__.py 49 | py/axonchisel/metrics/io/emfetch/plugins/emf_http.py 50 | py/axonchisel/metrics/io/emfetch/plugins/emf_random.py 51 | py/axonchisel/metrics/io/erout/__init__.py 52 | py/axonchisel/metrics/io/erout/base.py 53 | py/axonchisel/metrics/io/erout/interface.py 54 | py/axonchisel/metrics/io/erout/plugins/__init__.py 55 | py/axonchisel/metrics/io/erout/plugins/ero_csv.py 56 | py/axonchisel/metrics/io/erout/plugins/ero_strbuf.py 57 | py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/__init__.py 58 | py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/base.py 59 | py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/bullet.py 60 | py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/meter.py 61 | py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/numsec.py 62 | py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/rag.py 63 | py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/text.py 64 | py/axonchisel/metrics/run/__init__.py 65 | py/axonchisel/metrics/run/mqengine/__init__.py 66 | py/axonchisel/metrics/run/mqengine/mqengine.py 67 | py/axonchisel/metrics/run/mqengine/mqestate.py 68 | py/axonchisel/metrics/run/servant/__init__.py 69 | py/axonchisel/metrics/run/servant/config.py 70 | py/axonchisel/metrics/run/servant/request.py 71 | py/axonchisel/metrics/run/servant/servant.py 72 | py/axonchisel/metrics/run/servant/state.py 73 | tests/COVERAGE.sh 74 | tests/RUN.py 75 | tests/__init__.py 76 | tests/conftest.py 77 | tests/test_axdictutil.py 78 | tests/test_axobj.py 79 | tests/test_axplugin.py 80 | tests/test_chrono.py 81 | tests/test_data.py 82 | tests/test_dtmath.py 83 | tests/test_emfetch.py 84 | tests/test_emfetch_http.py 85 | tests/test_erout.py 86 | tests/test_erout_geckoboard.py 87 | tests/test_mdefl.py 88 | tests/test_metricdef.py 89 | tests/test_mqengine.py 90 | tests/test_mql.py 91 | tests/test_query.py 92 | tests/test_servant.py 93 | tests/util.py 94 | tests/assets/metricdef1.yml 95 | tests/assets/metset-http.yml 96 | tests/assets/metset1.yml 97 | tests/assets/mqe-metset1.yml 98 | tests/assets/mqe-metset2.yml 99 | tests/assets/mqe-query1.yml 100 | tests/assets/mqe-query2.yml 101 | tests/assets/query1.yml 102 | tests/assets/queryset1.yml 103 | tests/assets/queryset2.yml -------------------------------------------------------------------------------- /tests/test_servant.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test servant package 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | import logging 15 | from StringIO import StringIO 16 | 17 | import axonchisel.metrics.io.erout.plugins.ero_csv as ero_csv 18 | from axonchisel.metrics.run.servant.config import ServantConfig 19 | from axonchisel.metrics.run.servant.request import ServantRequest 20 | from axonchisel.metrics.run.servant.state import ServantState 21 | from axonchisel.metrics.run.servant.servant import Servant 22 | 23 | from .util import dt, log_config, load_metset, load_queryset 24 | 25 | 26 | 27 | # ---------------------------------------------------------------------------- 28 | 29 | 30 | def setup_module(module): 31 | log_config(level=logging.INFO) 32 | # log_config(level=logging.DEBUG) 33 | 34 | 35 | # ---------------------------------------------------------------------------- 36 | 37 | 38 | class TestServant(object): 39 | """ 40 | Test Servant. 41 | """ 42 | 43 | # 44 | # Setup / Teardown 45 | # 46 | 47 | def setup_method(self, method): 48 | self.buf1 = StringIO() 49 | self.emfetch_extinfo = { 50 | } 51 | self.erout_extinfo = { 52 | 'csv': { 53 | 'fout': self.buf1, 54 | } 55 | } 56 | self.metset1 = load_metset( 'mqe-metset1.yml' ) 57 | self.queryset1 = load_queryset( 'queryset2.yml' ) 58 | self.sconfig = ServantConfig( 59 | metset = self.metset1, 60 | queryset = self.queryset1, 61 | emfetch_extinfo = self.emfetch_extinfo, 62 | erout_extinfo = self.erout_extinfo, 63 | ) 64 | self.query_ids = [ 65 | 'new_users_mtd', 66 | 'new_users_r7d', 67 | 'rev_new_sales_qtd', 68 | ] 69 | self.erout_plugin_ids = ['strbuf', 'csv'] 70 | self.sreq = ServantRequest( 71 | query_ids=self.query_ids, 72 | erout_plugin_ids=self.erout_plugin_ids, 73 | ) 74 | 75 | 76 | # 77 | # Tests 78 | # 79 | 80 | def test_basic(self): 81 | servant = Servant(self.sconfig) 82 | servant.process(self.sreq) 83 | 84 | def test_collapse(self): 85 | servant = Servant(self.sconfig) 86 | self.sreq.collapse = True 87 | servant.process(self.sreq) 88 | lines = self.buf1.getvalue().splitlines() 89 | assert len(lines) == 10 # (header + 3 queries + 6 ghosts) 90 | 91 | def test_from_params(self): 92 | servant = Servant(self.sconfig) 93 | params = { 94 | 'query': 'new_users_mtd,new_users_r7d, rev_new_sales_qtd ,,', 95 | 'erout': 'strbuf,csv', 96 | } 97 | sreq = ServantRequest.from_params(params) 98 | assert sreq.collapse == False 99 | assert sreq.noghosts == False 100 | servant.process(sreq) 101 | params['collapse'] = '1' 102 | params['noghosts'] = '1' 103 | sreq = ServantRequest.from_params(params) 104 | assert sreq.collapse == True 105 | assert sreq.noghosts == True 106 | servant.process(sreq) 107 | 108 | def test_state(self): 109 | sstate = ServantState() 110 | sstate.erouts = [ero_csv.EROut_csv()] 111 | with pytest.raises(TypeError): 112 | sstate.erouts = ['not an EROut'] 113 | str(sstate) 114 | 115 | def test_misc(self): 116 | servant = Servant(self.sconfig) 117 | str(servant) 118 | str(servant.config) 119 | 120 | def test_config_bad(self): 121 | sconfig = ServantConfig() 122 | with pytest.raises(ValueError): 123 | sconfig.validate() 124 | sconfig.metset = self.metset1 125 | with pytest.raises(ValueError): 126 | sconfig.validate() 127 | sconfig.queryset = self.queryset1 128 | with pytest.raises(ValueError): 129 | sconfig.validate() 130 | sconfig.emfetch_extinfo = self.emfetch_extinfo 131 | with pytest.raises(ValueError): 132 | sconfig.validate() 133 | sconfig.erout_extinfo = self.erout_extinfo 134 | sconfig.validate() 135 | 136 | 137 | 138 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/plugins/ero_csv.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EROut plugin 'csv' 3 | 4 | Writes CSV output. 5 | 6 | ------------------------------------------------------------------------------ 7 | Author: Dan Kamins 8 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 9 | """ 10 | 11 | 12 | # ---------------------------------------------------------------------------- 13 | 14 | 15 | import csv 16 | 17 | from ..base import EROutBase 18 | 19 | 20 | # ---------------------------------------------------------------------------- 21 | 22 | 23 | # Ordered list of CSV field names to write: 24 | FIELDNAMES = [ 25 | 'query_id', 26 | 'series_id', 27 | 'ghost', 28 | 'tm_anchor', 29 | 'tm_begin_inc', 30 | 'tm_end_exc', 31 | 'value', 32 | ] 33 | 34 | 35 | # ---------------------------------------------------------------------------- 36 | 37 | 38 | class EROut_csv(EROutBase): 39 | """ 40 | EROut (Extensible Report Outputter) Plugin 'csv'. 41 | Writes CSV output to extinfo['fout'] file-like object. 42 | """ 43 | 44 | # 45 | # Abstract Method Implementations 46 | # 47 | 48 | # abstract 49 | def plugin_create(self): 50 | """ 51 | Invoked once to allow plugin to setup what it needs. 52 | """ 53 | self._wrote_header = False 54 | 55 | # abstract 56 | def plugin_destroy(self): 57 | """ 58 | Invoked once to allow plugin to clean up after itself. 59 | """ 60 | pass 61 | 62 | # abstract 63 | def plugin_output(self, mdseries, query=None): 64 | """ 65 | EROut plugins must implement this abstract method. 66 | Invoked to output MultiDataSeries as specified. 67 | 68 | Returns nothing. Output target should be configured separately. 69 | 70 | Parameters: 71 | 72 | - mdseries : MultiDataSeries query result with data to output. 73 | (axonchisel.metrics.foundation.data.multi.MultiDataSeries) 74 | 75 | - query : optional Query source with more formatting details, etc. 76 | Optional. Plugins should work without access to Query. 77 | (axonchisel.metrics.foundation.query.query.Query) 78 | """ 79 | # Prep CSV: 80 | fout = self.plugin_extinfo('fout') 81 | self._csvw = csv.DictWriter(fout, FIELDNAMES, dialect='excel') 82 | 83 | # Write header (but only once): 84 | self._write_header_row() 85 | 86 | # Iterate MDS, writing each series: 87 | for dseries in mdseries.iter_series(): 88 | self._write_series(dseries) 89 | 90 | # 91 | # Internal Methods 92 | # 93 | 94 | def _write_header_row(self): 95 | """Write CSV header row, but only once.""" 96 | if self._wrote_header: 97 | return 98 | row = dict(zip(FIELDNAMES, FIELDNAMES)) 99 | self._write_row(row) 100 | self._wrote_header = True 101 | 102 | def _write_series(self, dseries): 103 | """Write the current DataSeries to CSV writer.""" 104 | for dpoint in dseries.iter_points(): 105 | row = self._make_row(dseries, dpoint) 106 | self._write_row(row) 107 | 108 | def _make_row(self, dseries, dpoint): 109 | """Given current DataSeries and DataPoint, return row dict for CSV.""" 110 | datefmt = self.plugin_option('date_format', '%Y-%m-%d %H:%M:%S') 111 | row = { 112 | 'query_id' : self._query.id if self._query else None, 113 | 'series_id' : dseries.id, 114 | 'ghost' : dseries.ghost.gtype if dseries.ghost else None, 115 | 'tm_anchor' : self._format_datetime(datefmt, dpoint.tmrange.anchor), 116 | 'tm_begin_inc': self._format_datetime(datefmt, dpoint.tmrange.inc_begin), 117 | 'tm_end_exc' : self._format_datetime(datefmt, dpoint.tmrange.exc_end), 118 | 'value' : dpoint.value, 119 | } 120 | return row 121 | 122 | def _write_row(self, row): 123 | """Encode and write CSV dict row to open writer.""" 124 | row = self._encode_row(row) 125 | self._csvw.writerow(row) 126 | 127 | def _encode_row(self, row): 128 | """Given row dict, return version with all strings utf8 encoded.""" 129 | row2 = dict() 130 | for k, v in row.iteritems(): 131 | if isinstance(v, unicode): 132 | v = v.encode('utf-8') 133 | row2[k] = v 134 | return row2 135 | 136 | 137 | 138 | 139 | # ---------------------------------------------------------------------------- 140 | 141 | 142 | -------------------------------------------------------------------------------- /tests/test_axdictutil.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test foundation Ax dictutil 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | 15 | from axonchisel.metrics.foundation.ax.dictutil import dict_get_by_path 16 | from axonchisel.metrics.foundation.ax.dictutil import ObjectifiedDict 17 | 18 | 19 | # ---------------------------------------------------------------------------- 20 | 21 | 22 | @pytest.fixture 23 | def dicts(): 24 | return [{ 25 | 'foo': "Big Foo", 26 | 'bar': "Big Bar", 27 | 'zig': [10, 20, 30], 28 | 'zag': { 29 | 'z1': 1000, 30 | 'z2': { 31 | 'a': 65, 32 | 'b': 66, 33 | }, 34 | }, 35 | }] 36 | 37 | 38 | # ---------------------------------------------------------------------------- 39 | 40 | 41 | class TestAxDictUtil(object): 42 | """ 43 | Test dictutil module methods. 44 | """ 45 | 46 | # 47 | # Setup / Teardown 48 | # 49 | 50 | # 51 | # Tests 52 | # 53 | 54 | def test_good(self, dicts): 55 | d = dicts[0] 56 | assert dict_get_by_path(d, 'foo') == "Big Foo" 57 | assert dict_get_by_path(d, 'zig') == [10, 20, 30] 58 | assert dict_get_by_path(d, 'zag.z1') == 1000 59 | assert dict_get_by_path(d, 'zag.z2.a') == 65 60 | 61 | def test_default(self, dicts): 62 | d = dicts[0] 63 | assert dict_get_by_path(d, 'foo', "DEF") == "Big Foo" 64 | assert dict_get_by_path(d, 'BOGUS', "DEF") == "DEF" 65 | 66 | def test_bad(self, dicts): 67 | d = dicts[0] 68 | with pytest.raises(KeyError): 69 | dict_get_by_path(d, 'BOGUS') 70 | with pytest.raises(KeyError): 71 | dict_get_by_path(d, 'foo.BOGUS') 72 | with pytest.raises(KeyError): 73 | dict_get_by_path(d, 'zag.z1.BOGUS') 74 | with pytest.raises(KeyError): 75 | dict_get_by_path(d, 'zag.z2.BOGUS') 76 | 77 | def test_what(self, dicts): 78 | d = dicts[0] 79 | with pytest.raises(KeyError) as e: 80 | dict_get_by_path(d, 'BOGUS', what="Test Dict") 81 | assert "'Test Dict'" in str(e) 82 | 83 | 84 | # 85 | # Internal Helpers 86 | # 87 | 88 | 89 | # ---------------------------------------------------------------------------- 90 | 91 | 92 | class TestAxObjectifiedDict(object): 93 | """ 94 | Test Ax ObjectifiedDict. 95 | """ 96 | 97 | # 98 | # Setup / Teardown 99 | # 100 | 101 | # 102 | # Tests 103 | # 104 | 105 | def test_good(self, dicts): 106 | d = dicts[0] 107 | od = ObjectifiedDict(d) 108 | assert od.foo == "Big Foo" 109 | assert od.zig == [10, 20, 30] 110 | assert od.zag.z1 == 1000 111 | assert od.zag.z2.a == 65 112 | 113 | def test_as_dict(self, dicts): 114 | d = dicts[0] 115 | od = ObjectifiedDict(d) 116 | od['foo'] = "Bigger Foo" 117 | assert od.foo == "Bigger Foo" 118 | assert 'foo' in od.keys() 119 | assert 'zig' in od.keys() 120 | assert od.has_key('zag') 121 | del od['foo'] 122 | with pytest.raises(AttributeError): 123 | od.foo 124 | 125 | def test_default(self, dicts): 126 | d = dicts[0] 127 | od = ObjectifiedDict(d, default="DEF") 128 | assert od.foo == "Big Foo" 129 | assert od.BOGUS == "DEF" 130 | assert od.zag.BOGUS == "DEF" 131 | 132 | def test_copy(self, dicts): 133 | d = dicts[0] 134 | od = ObjectifiedDict(d) 135 | od2 = od.copy() 136 | assert od2.foo == "Big Foo" 137 | od['foo'] = "Another Foo" 138 | assert od2.foo == "Big Foo" 139 | 140 | def test_bad_key(self, dicts): 141 | d = dicts[0] 142 | od = ObjectifiedDict(d) 143 | with pytest.raises(AttributeError): 144 | od.BOGUS 145 | with pytest.raises(AttributeError): 146 | od.foo.BOGUS 147 | with pytest.raises(AttributeError): 148 | od.zag.z1.BOGUS 149 | with pytest.raises(AttributeError): 150 | od.zag.z2.BOGUS 151 | 152 | def test_bad_misc(self, dicts): 153 | d = dicts[0] 154 | with pytest.raises(TypeError): 155 | od = ObjectifiedDict("Not a Dict") 156 | 157 | def test_what(self, dicts): 158 | d = dicts[0] 159 | od = ObjectifiedDict(d, what="Test Dict") 160 | with pytest.raises(AttributeError) as e: 161 | od.zag.z2.BOGUS 162 | assert "'Test Dict'" in str(e) 163 | 164 | 165 | # 166 | # Internal Helpers 167 | # 168 | 169 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/run/servant/request.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Servant Request encapsulation 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | 16 | # ---------------------------------------------------------------------------- 17 | 18 | 19 | class ServantRequest(AxObj): 20 | """ 21 | Servant request encapsulation. 22 | 23 | Specifies a list of EROut plugins (by plugin id, loaded dynamically) 24 | and a list of Queries (by query id, from ServantConfig QuerySet). 25 | 26 | Each of the Queries will be run once and have their output processed 27 | by each EROut plugins. 28 | """ 29 | 30 | def __init__(self, **kwargs): 31 | """ 32 | Initialize, optionally overriding any default properties with kwargs. 33 | """ 34 | # Set valid default state: 35 | self._query_ids = list() # list(str) 36 | self._erout_plugin_ids = list() # list(str) 37 | self._collapse = False # bool 38 | self._noghosts = False # bool 39 | 40 | # Apply initial values from kwargs: 41 | self._init_kwargs(kwargs, [ 42 | 'query_ids', 'erout_plugin_ids', 'collapse', 'noghosts', 43 | ]) 44 | 45 | # Validate: 46 | self._assert_type_list_string( 47 | "query_ids", self.query_ids) 48 | self._assert_type_list_string( 49 | "erout_plugin_ids", self.erout_plugin_ids) 50 | 51 | 52 | # 53 | # Static Factory Methods 54 | # 55 | 56 | @staticmethod 57 | def from_params(params): 58 | """ 59 | Factory: return new ServantRequest parsed from str:str params dict. 60 | Useful for e.g. parsing web request params. 61 | Params supported: 62 | - query : CSL of query ids, e.g. "rev_new_mtd,rev_new_qtd" 63 | - erout : CSL of erout plugin ids, e.g. "csv,json" 64 | - collapse : '1' for collapse mode, or '0' (default) for normal 65 | - noghosts : '1' to disable ghosts, or '0' (default) for normal 66 | """ 67 | def parse_csl_ids(csl): 68 | return [id.strip() for id in filter(len, csl.split(','))] 69 | sreq = ServantRequest() 70 | sreq.query_ids = parse_csl_ids(params.get('query', '')) 71 | sreq.erout_plugin_ids = parse_csl_ids(params.get('erout', '')) 72 | if params.get('collapse') == '1': 73 | sreq.collapse = True 74 | if params.get('noghosts') == '1': 75 | sreq.noghosts = True 76 | return sreq 77 | 78 | 79 | # 80 | # Public Properties 81 | # 82 | 83 | @property 84 | def query_ids(self): 85 | """List of Query ids (strings) to execute.""" 86 | return self._query_ids 87 | @query_ids.setter 88 | def query_ids(self, val): 89 | self._assert_type_list_string("query_ids", val) 90 | self._query_ids = val 91 | 92 | @property 93 | def erout_plugin_ids(self): 94 | """List of EROut plugin ids (strings) to process results with.""" 95 | return self._erout_plugin_ids 96 | @erout_plugin_ids.setter 97 | def erout_plugin_ids(self, val): 98 | self._assert_type_list_string("erout_plugin_ids", val) 99 | self._erout_plugin_ids = val 100 | 101 | @property 102 | def collapse(self): 103 | """ 104 | Collapse all data into single point? (bool) 105 | Useful for capturing summaries (e.g. for bullet charts) of more 106 | complex query without having to duplicate queries in QuerySet. 107 | When collapsed: 108 | - query framespec granularity is set to match range unit 109 | - query framespec accumulate mode is enabled 110 | - only the last data point of each series is preserved 111 | """ 112 | return self._collapse 113 | @collapse.setter 114 | def collapse(self, val): 115 | self._assert_type_bool("collapse", val) 116 | self._collapse = val 117 | 118 | @property 119 | def noghosts(self): 120 | """ 121 | Remove all ghosts? (bool) 122 | Useful for capturing summaries (e.g. for bullet charts) of more 123 | complex query without having to duplicate queries in QuerySet. 124 | (Who you gonna call?) 125 | """ 126 | return self._noghosts 127 | @noghosts.setter 128 | def noghosts(self, val): 129 | self._assert_type_bool("noghosts", val) 130 | self._noghosts = val 131 | 132 | 133 | # 134 | # Internal Methods 135 | # 136 | 137 | def __unicode__(self): 138 | return (u"ServantRequest("+ 139 | "queries:{self.query_ids}, "+ 140 | "erouts:{self.erout_plugin_ids}, "+ 141 | "collapse:{self.collapse})" 142 | ).format(self=self) 143 | 144 | 145 | 146 | 147 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/run/servant/config.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Servant Config (construction config) 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | from axonchisel.metrics.foundation.metricdef.metset import MetSet 15 | from axonchisel.metrics.foundation.query.queryset import QuerySet 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | # Special extinfo domain for common property defaults. 22 | EXTINFO_DOMAIN_DEFAULT = '_default' 23 | 24 | 25 | # ---------------------------------------------------------------------------- 26 | 27 | 28 | class ServantConfig(AxObj): 29 | """ 30 | Construction configuration for Servant. 31 | See property docs for details. 32 | """ 33 | 34 | def __init__(self, **kwargs): 35 | """ 36 | Initialize, optionally overriding any default properties with kwargs. 37 | """ 38 | # Set valid default state: 39 | self._metset = None # (MetSet) 40 | self._queryset = None # (QuerySet) 41 | self._emfetch_extinfo = None # (dict) 42 | self._erout_extinfo = None # (dict) 43 | 44 | # Apply initial values from kwargs: 45 | self._init_kwargs(kwargs, [ 46 | 'metset', 'queryset', 47 | 'emfetch_extinfo', 'erout_extinfo', 48 | ]) 49 | 50 | 51 | # 52 | # Public Methods 53 | # 54 | 55 | def validate(self): 56 | """ 57 | Validate params in self against allowed values. 58 | Raise TypeError, ValueError if any problems. 59 | While much validation happens already via property accessors, 60 | this method does final validation on additional status and 61 | ensures default None properties have values. 62 | """ 63 | if self._metset is None: 64 | raise ValueError("ServantConfig invalid: missing MetSet") 65 | if self._queryset is None: 66 | raise ValueError("ServantConfig invalid: missing QuerySet") 67 | if self._emfetch_extinfo is None: 68 | raise ValueError("ServantConfig invalid: missing emfetch_extinfo") 69 | if self._erout_extinfo is None: 70 | raise ValueError("ServantConfig invalid: missing erout_extinfo") 71 | 72 | def erout_extinfo_for(self, plugin_id): 73 | """ 74 | Construct and return dict with EROut extinfo for given plugin_id. 75 | Uses _default dict (if any), extended with plugin_id dict. 76 | """ 77 | extinfo = dict(self.erout_extinfo.get(EXTINFO_DOMAIN_DEFAULT, {})) 78 | extinfo.update(self.erout_extinfo.get(plugin_id, {})) 79 | return extinfo 80 | 81 | 82 | # 83 | # Public Properties 84 | # 85 | 86 | @property 87 | def metset(self): 88 | """MetSet containing all metric definitions.""" 89 | return self._metset 90 | @metset.setter 91 | def metset(self, val): 92 | self._assert_type("metset", val, MetSet) 93 | self._metset = val 94 | 95 | @property 96 | def queryset(self): 97 | """QuerySet containing all query definitions.""" 98 | return self._queryset 99 | @queryset.setter 100 | def queryset(self, val): 101 | self._assert_type("queryset", val, QuerySet) 102 | self._queryset = val 103 | 104 | @property 105 | def emfetch_extinfo(self): 106 | """ 107 | Additional custom (site-specific, sensitive) info for EMFetchers. 108 | This must be a dict mapping from EMFetch Id (string) to extinfo dict. 109 | Key '_default' dict (if present) acts as base, extended by 110 | plugin-specific dicts. 111 | """ 112 | return self._emfetch_extinfo 113 | @emfetch_extinfo.setter 114 | def emfetch_extinfo(self, val): 115 | self._assert_type_mapping("emfetch_extinfo", val) 116 | self._emfetch_extinfo = val 117 | 118 | @property 119 | def erout_extinfo(self): 120 | """ 121 | Additional custom (site-specific, sensitive) info for EROuts. 122 | This must be a dict mapping from EROut Id (string) to extinfo dict. 123 | Key '_default' dict (if present) acts as base, extended by 124 | plugin-specific dicts. 125 | See macro method: self.erout_extinfo_for() 126 | """ 127 | return self._erout_extinfo 128 | @erout_extinfo.setter 129 | def erout_extinfo(self, val): 130 | self._assert_type_mapping("erout_extinfo", val) 131 | self._erout_extinfo = val 132 | 133 | 134 | # 135 | # Internal Methods 136 | # 137 | 138 | def __unicode__(self): 139 | # Note: do not reveal extinfo which may contain sensistive data. 140 | return (u"ServantConfig({self.metset}, {self.queryset})" 141 | .format(self=self)) 142 | 143 | 144 | 145 | 146 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/meter.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EROut plugin 'geckoboard_meter' 3 | 4 | Writes Geckoboard JSON output for various charts for use with 5 | http://www.geckoboard.com. 6 | 7 | Contents: 8 | - EROut_geckoboard_meter - speedometer style meter 9 | 10 | See: 11 | - https://developer.geckoboard.com/#geck-o-meter 12 | 13 | ------------------------------------------------------------------------------ 14 | Author: Dan Kamins 15 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 16 | """ 17 | 18 | 19 | # ---------------------------------------------------------------------------- 20 | 21 | 22 | from .base import EROut_geckoboard 23 | 24 | import logging 25 | log = logging.getLogger(__name__) 26 | 27 | 28 | # ---------------------------------------------------------------------------- 29 | 30 | 31 | class EROut_geckoboard_meter(EROut_geckoboard): 32 | """ 33 | EROut (Extensible Report Outputter) Plugin for Geckoboard "Gecko-o-meter". 34 | Adds JSON-serializable output to extinfo['jout'] dict. 35 | 36 | Typical usage is with non-collapsed query, default 'LAST' reduce function. 37 | If min/max is provided by QFormat, it is used as specified. 38 | Otherwise meter min/max is automatically determined by the min/max 39 | found in all data and ghost series. 40 | Min or max can be specified independently. E.g. min may be explicitly 41 | pegged at 0, while max is allowed to be auto-computed from data. 42 | 43 | Note that goal (if any) specified in Geckoboard widget config must be 44 | in min/max range, or the widget renders as an error. Thus if explicit 45 | goal is used in Geckoboard, the min/max should be defined explicitly too 46 | (unless possible data is predictable). 47 | 48 | QFormat support (under 'geckoboard_meter' or '_default'): 49 | reduce : (optional) Function from metricdef.FUNCS to reduce 50 | series with. Default 'LAST'. 51 | min : (optional) Meter min value. Default: based on all series. 52 | max : (optional) Meter max value. Default: based on all series. 53 | 54 | More info: 55 | - https://developer.geckoboard.com/#geck-o-meter 56 | 57 | Example JSON: 58 | { 59 | "item": 23, 60 | "min": { 61 | "value": 10 62 | }, 63 | "max": { 64 | "value": 30 65 | } 66 | } 67 | """ 68 | 69 | # 70 | # Abstract Method Implementations 71 | # 72 | 73 | # abstract 74 | def plugin_output(self, mdseries, query=None): 75 | """ 76 | EROut plugins must implement this abstract method. 77 | Invoked to output MultiDataSeries as specified. 78 | Returns nothing. Output target should be configured separately. 79 | """ 80 | log.debug("Outputting %s for query %s", mdseries, query) 81 | self._qfdomain = 'geckoboard_meter' 82 | 83 | # Write item from primary: 84 | self._write_primary() 85 | 86 | # Write min/max: 87 | self._write_min_max() 88 | 89 | 90 | # 91 | # Internal Methods 92 | # 93 | 94 | def _write_primary(self): 95 | """ 96 | Write the primary value to jout. 97 | Reduces first DataSeries. 98 | """ 99 | 100 | # Get primary value from first DataSeries: 101 | reduce_func = self._qformat_get('reduce', 'LAST') 102 | dseries = self.mdseries.get_series(0) 103 | value = dseries.reduce(reduce_func) 104 | 105 | # Add value: 106 | self.jout['item'] = value 107 | 108 | def _write_min_max(self): 109 | """ 110 | Write the meter min/max values. 111 | """ 112 | self._write_min() 113 | self._write_max() 114 | 115 | def _write_min(self): 116 | """ 117 | Compute and write the meter min value. 118 | """ 119 | # Initially check for explicit min: 120 | minval = self._qformat_get('min', None) 121 | 122 | # If no explicit minval, compute it from series: 123 | if minval is None: 124 | for dseries in self.mdseries.iter_series(): 125 | log.warn("MINCHECK %s, %s", minval, dseries) 126 | ds_minval = dseries.reduce('MIN') 127 | if minval is None: 128 | minval = ds_minval 129 | else: 130 | minval = min(minval, ds_minval) 131 | 132 | # Write min val: 133 | self.jout['min'] = { 'value': minval } 134 | 135 | def _write_max(self): 136 | """ 137 | Compute and write the meter max value. 138 | """ 139 | # Initially check for explicit max: 140 | maxval = self._qformat_get('max', None) 141 | 142 | # If no explicit maxval, compute it from series: 143 | if maxval is None: 144 | for dseries in self.mdseries.iter_series(): 145 | ds_maxval = dseries.reduce('MAX') 146 | if maxval is None: 147 | maxval = ds_maxval 148 | else: 149 | maxval = max(maxval, ds_maxval) 150 | 151 | # Write max val: 152 | self.jout['max'] = { 'value': maxval } 153 | 154 | 155 | -------------------------------------------------------------------------------- /tests/test_erout.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test io.erout package 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | from StringIO import StringIO 15 | 16 | import axonchisel.metrics.foundation.chrono.timerange as timerange 17 | from axonchisel.metrics.io.erout.interface import EROut 18 | from axonchisel.metrics.io.erout.base import EROutBase 19 | import axonchisel.metrics.io.erout.plugins.ero_strbuf as ero_strbuf 20 | import axonchisel.metrics.io.erout.plugins.ero_csv as ero_csv 21 | 22 | 23 | # ---------------------------------------------------------------------------- 24 | 25 | 26 | class TestEROut(object): 27 | """ 28 | Test general EROut. 29 | """ 30 | 31 | # 32 | # Setup / Teardown 33 | # 34 | 35 | def setup_method(self, method): 36 | self.extinfo = {'a': 65, 'b': "LilB", 'special': { 'q': 34, 'z': 35 } } 37 | 38 | 39 | # 40 | # Tests 41 | # 42 | 43 | def test_base_not_impl(self, queries, mdseries): 44 | with pytest.raises(NotImplementedError): 45 | absbase = EROut(queries[2]) 46 | class FakeBase(EROut): 47 | def __init__(self, extinfo=None): 48 | pass 49 | absbase = FakeBase(queries[2]) 50 | with pytest.raises(NotImplementedError): 51 | absbase.plugin_create() 52 | with pytest.raises(NotImplementedError): 53 | absbase.plugin_destroy() 54 | with pytest.raises(NotImplementedError): 55 | absbase.plugin_output(mdseries[1], query=queries[2]) 56 | 57 | def test_plugin_extinfo(self, queries): 58 | ero = ero_strbuf.EROut_strbuf(extinfo=self.extinfo) 59 | assert ero.plugin_extinfo('a') == 65 60 | assert ero.plugin_extinfo('b') == "LilB" 61 | with pytest.raises(KeyError): 62 | ero.plugin_extinfo('BOGUS') 63 | assert ero.plugin_extinfo('BOGUS', default="D") == "D" 64 | 65 | def test_basic_strbuf(self, queries, mdseries): 66 | ero = ero_strbuf.EROut_strbuf(extinfo=self.extinfo) 67 | str(ero) 68 | str(ero.query) 69 | ero.plugin_create() 70 | for x in range(100): 71 | ero.output(mdseries[1], query=queries[2]) 72 | assert len(ero.buf_get_lines()) == 100 73 | assert ero.buf_get().splitlines() == ero.buf_get_lines() 74 | ero.buf_add_line(u"New line") 75 | assert len(ero.buf_get_lines()) == 101 76 | ero.buf_add_lines([u"New line", u"New line 2", u"New line 3"]) 77 | assert len(ero.buf_get_lines()) == 104 78 | ero.plugin_destroy() 79 | 80 | def test_format_str(self, queries, mdseries): 81 | ero = ero_strbuf.EROut_strbuf(extinfo=self.extinfo) 82 | ero.plugin_create() 83 | ero.output(mdseries[1], query=queries[2]) # (to set _query) 84 | assert ero._format_str("plain") == "plain" 85 | assert ero._format_str("My {extinfo.special.q} here") == "My 34 here" 86 | assert ero._format_str("My {query.id} here") == "My q2 here" 87 | ero.plugin_destroy() 88 | 89 | 90 | # ---------------------------------------------------------------------------- 91 | 92 | 93 | class TestEROut_csv(object): 94 | """ 95 | Test EROut_csv. 96 | """ 97 | 98 | # 99 | # Setup / Teardown 100 | # 101 | 102 | def setup_method(self, method): 103 | self.buf = StringIO() 104 | self.extinfo = { 105 | 'fout': self.buf, 106 | } 107 | 108 | 109 | # 110 | # Tests 111 | # 112 | 113 | def test_plugin_basic(self, queries, mdseries): 114 | ero = ero_csv.EROut_csv(extinfo=self.extinfo) 115 | ero.plugin_create() 116 | ero.output(mdseries[3], query=queries[4]) 117 | lines = self.buf.getvalue().splitlines() 118 | assert lines[0] == 'query_id,series_id,ghost,tm_anchor,tm_begin_inc,tm_end_exc,value' 119 | assert lines[1] == 'q3,s\xc3\xa9r\xc3\xaees4,,2014-02-01 00:00:00,2014-02-01 00:00:00,2014-03-01 00:00:00,42' 120 | assert lines[6] == 'q3,s3,PREV_PERIOD1,2014-02-14 16:30:45,2014-02-14 16:30:45,2014-04-14 16:42:45,2' 121 | ero.plugin_destroy() 122 | 123 | def test_plugin_date_format1(self, queries, mdseries): 124 | ero = ero_csv.EROut_csv(extinfo=self.extinfo) 125 | ero._format_datetime('%Y-%m-%d', None) 126 | ero.configure(options={'date_format': '%Y-%m-%d'}) 127 | ero.plugin_create() 128 | ero.output(mdseries[3], query=queries[4]) 129 | lines = self.buf.getvalue().splitlines() 130 | assert lines[6] == 'q3,s3,PREV_PERIOD1,2014-02-14,2014-02-14,2014-04-14,2' 131 | ero.plugin_destroy() 132 | 133 | def test_plugin_date_format2(self, queries, mdseries): 134 | ero = ero_csv.EROut_csv(extinfo=self.extinfo) 135 | ero.configure(options={'date_format': '%s'}) 136 | ero.plugin_create() 137 | ero.output(mdseries[3], query=queries[4]) 138 | lines = self.buf.getvalue().splitlines() 139 | assert lines[6] == 'q3,s3,PREV_PERIOD1,1392424245,1392424245,1397518965,2' 140 | ero.plugin_destroy() 141 | 142 | 143 | 144 | 145 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/metricdef/filters.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - MetricDef Filter specification 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from collections import defaultdict 14 | 15 | from axonchisel.metrics.foundation.ax.obj import AxObj 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | # Filter allowed operations 22 | FILTER_OPS = { 23 | 'EQ': {}, 24 | } 25 | 26 | 27 | # ---------------------------------------------------------------------------- 28 | 29 | 30 | class Filters(AxObj): 31 | """ 32 | Part of a MetricDef, contains list of Filters. 33 | """ 34 | def __init__(self): 35 | self._filters = list() 36 | 37 | 38 | # 39 | # Public Methods 40 | # 41 | 42 | def add_filter(self, filter1): 43 | """Add a Filter to the list.""" 44 | self._assert_type("filter", filter1, Filter) 45 | self._filters.append(filter1) 46 | 47 | def count_filters(self): 48 | """Return number of Filters included.""" 49 | return len(self._filters) 50 | 51 | def get_filters(self): 52 | """Get (shallow copy of) list of filters.""" 53 | return list(self._filters) 54 | 55 | def remove_filter(self, filter1): 56 | """Remove first matching filter, or ValueError if no matches.""" 57 | self._filters.remove(filter1) 58 | 59 | def validate(self): 60 | """ 61 | Validate all contained Filters. 62 | Raise TypeError, ValueError if any problems. 63 | """ 64 | for f in self._filters: 65 | f.validate() 66 | 67 | 68 | # 69 | # Public Properties 70 | # 71 | 72 | @property 73 | def safe_indexable(self): 74 | """ 75 | Return indexable (by 0-based index) dict of filters, 76 | but with default Filters returned for indexes out of range. 77 | (Useful for dynamic string formatting in higher layers.) 78 | """ 79 | d = defaultdict(Filter) 80 | d.update(enumerate(self._filters)) 81 | return d 82 | 83 | 84 | # 85 | # Internal Methods 86 | # 87 | 88 | def __getitem__(self, key): 89 | """Allow indexing like a list itself""" 90 | return self._filters[key] 91 | 92 | def __unicode__(self): 93 | return (u"Filters({filters})" 94 | ).format(self=self, 95 | filters=u", ".join(map(unicode, self._filters)) 96 | ) 97 | 98 | 99 | # ---------------------------------------------------------------------------- 100 | 101 | 102 | class Filter(AxObj): 103 | """ 104 | Field filter representation. 105 | """ 106 | 107 | def __init__(self, **kwargs): 108 | """ 109 | Initialize, optionally overriding any default properties with kwargs. 110 | """ 111 | # Set valid default state: 112 | self.field = '' 113 | self.op = 'EQ' # from FILTER_OPS 114 | self.value = '' 115 | 116 | # Apply initial values from kwargs: 117 | self._init_kwargs(kwargs, [ 118 | 'field', 'op', 'value', 119 | ]) 120 | 121 | 122 | # 123 | # Public Methods 124 | # 125 | 126 | def validate(self): 127 | """ 128 | Validate params in self against allowed values. 129 | Raise TypeError, ValueError if any problems. 130 | While much validation happens already via property accessors, 131 | this method does final validation on additional status. 132 | """ 133 | if not self.field: 134 | raise ValueError("Missing Filter field") 135 | 136 | 137 | # 138 | # Public Properties 139 | # 140 | 141 | @property 142 | def field(self): 143 | """Field to filter on.""" 144 | return self._field 145 | @field.setter 146 | def field(self, val): 147 | self._assert_type_string("field", val) 148 | self._field = val 149 | 150 | @property 151 | def op(self): 152 | """Operator for filter comparison.""" 153 | return self._op 154 | @op.setter 155 | def op(self, val): 156 | self._assert_type_string("op", val) 157 | self._assert_value("op", val, FILTER_OPS) 158 | self._op = val 159 | 160 | @property 161 | def value(self): 162 | """Filter value.""" 163 | return self._value 164 | @value.setter 165 | def value(self, val): 166 | self._value = val 167 | 168 | 169 | # 170 | # Internal Methods 171 | # 172 | 173 | def __cmp__(self, other): 174 | """Compare filters, mainly for equality.""" 175 | if self.field != other.field: 176 | return cmp(self.field, other.field) 177 | if self.op != other.op: 178 | return cmp(self.op, other.op) 179 | if self.value != other.value: 180 | return cmp(self.value, other.value) 181 | return 0 182 | 183 | def __unicode__(self): 184 | return (u"Filter({self.field} {self.op} '{self.value}')" 185 | .format(self=self) 186 | ) 187 | 188 | 189 | 190 | -------------------------------------------------------------------------------- /tests/test_mdefl.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test foundation MDefL parsing 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | import re 15 | import yaml 16 | 17 | import axonchisel.metrics.foundation.metricdef.metricdef as metricdef 18 | import axonchisel.metrics.foundation.metricdef.mdefl as mdefl 19 | 20 | from .util import load_test_asset 21 | 22 | 23 | # ---------------------------------------------------------------------------- 24 | 25 | 26 | class TestMetricDefParse(object): 27 | """ 28 | Test MetricDef parsing. 29 | """ 30 | 31 | # 32 | # Setup / Teardown 33 | # 34 | 35 | def setup_method(self, method): 36 | self.yaml_metric1 = load_test_asset('metricdef1.yml') 37 | self.parser1 = mdefl.MetricDefParser() 38 | 39 | # 40 | # Tests 41 | # 42 | 43 | def test_parse_simple(self): 44 | mdef2 = self.parser1.parse_ystr_metric(self.yaml_metric1) 45 | mdef2.validate() 46 | mdef3 = self.parser1.get_metricdef() 47 | assert mdef2 == mdef3 48 | mdef3.validate() 49 | 50 | def test_str(self): 51 | str(self.parser1) 52 | 53 | def test_parse_missing_ok(self): 54 | missing_ok = [ 55 | 'data_field', 'data_type', 'emfetch_opts', 56 | ] 57 | for m in missing_ok: 58 | qobj = yaml.load(self.yaml_metric1) 59 | del(qobj[m]) 60 | mdef2 = self.parser1.parse_ystr_metric(yaml.dump(qobj)) 61 | mdef2.validate() 62 | 63 | def test_parse_missing_filters(self): 64 | qobj = yaml.load(self.yaml_metric1) 65 | qobj['filters'] = 12345 # not list 66 | with pytest.raises(mdefl.MDefLParseError): 67 | self.parser1.parse_ystr_metric(yaml.dump(qobj)) 68 | del(qobj['filters']) 69 | mdef2 = self.parser1.parse_ystr_metric(yaml.dump(qobj)) 70 | mdef2.validate() 71 | 72 | def test_parse_verify(self): 73 | mdef2 = self.parser1.parse_ystr_metric(self.yaml_metric1) 74 | mdef2.validate() 75 | assert mdef2.emfetch_id == 'mysql' 76 | assert mdef2.emfetch_opts.get('db') == 'mydb1' 77 | assert mdef2.table == 'first_sales' 78 | assert mdef2.func == 'COUNT' 79 | assert mdef2.time_field == 'timeCreated' 80 | assert mdef2.time_type == 'TIME_EPOCH_SECS' 81 | assert mdef2.data_field == 'myfield' 82 | assert mdef2.data_type == 'NUM_INT' 83 | assert mdef2.filters.count_filters() == 2 84 | filter2 = metricdef.Filter(field='foo', op='EQ', value=123) 85 | assert mdef2.filters.get_filters()[0] == filter2 86 | 87 | def test_reset(self): 88 | mdef2 = self.parser1.parse_ystr_metric(self.yaml_metric1) 89 | self.parser1.reset() 90 | self.parser1.reset(base=mdef2) 91 | with pytest.raises(TypeError): 92 | self.parser1.reset(base='Not MetricDef') 93 | 94 | # 95 | # Internal Helpers 96 | # 97 | 98 | 99 | # ---------------------------------------------------------------------------- 100 | 101 | 102 | class TestMetSetParse(object): 103 | """ 104 | Test MetSet parsing. 105 | """ 106 | 107 | # 108 | # Setup / Teardown 109 | # 110 | 111 | def setup_method(self, method): 112 | self.yaml_metset1 = load_test_asset('metset1.yml') 113 | self.parser1 = mdefl.MetSetParser() 114 | 115 | # 116 | # Tests 117 | # 118 | 119 | def test_parse_simple(self): 120 | metset2 = self.parser1.parse_ystr_metset(self.yaml_metset1) 121 | metset2.validate() 122 | metset3 = self.parser1.get_metset() 123 | assert metset2 == metset3 124 | metset3.validate() 125 | 126 | def test_str(self): 127 | str(self.parser1) 128 | 129 | def test_parse_table_defaults(self): 130 | qobj = yaml.load(self.yaml_metset1) 131 | del(qobj['table_defaults'][1]['table']) 132 | with pytest.raises(mdefl.MDefLParseError): 133 | self.parser1.parse_ystr_metset(yaml.dump(qobj)) 134 | qobj['table_defaults'] = 12345 # not list 135 | with pytest.raises(mdefl.MDefLParseError): 136 | self.parser1.parse_ystr_metset(yaml.dump(qobj)) 137 | del(qobj['table_defaults']) 138 | self.parser1.parse_ystr_metset(yaml.dump(qobj)) 139 | 140 | def test_parse_metrics(self): 141 | qobj = yaml.load(self.yaml_metset1) 142 | del(qobj['metrics'][1]['id']) 143 | with pytest.raises(mdefl.MDefLParseError): 144 | self.parser1.parse_ystr_metset(yaml.dump(qobj)) 145 | qobj['metrics'] = 12345 # not list 146 | with pytest.raises(mdefl.MDefLParseError): 147 | self.parser1.parse_ystr_metset(yaml.dump(qobj)) 148 | del(qobj['metrics']) 149 | with pytest.raises(mdefl.MDefLParseError): 150 | self.parser1.parse_ystr_metset(yaml.dump(qobj)) 151 | 152 | def test_parse_verify(self): 153 | metset2 = self.parser1.parse_ystr_metset(self.yaml_metset1) 154 | metset2.validate() 155 | assert metset2.count_metrics() == 4 156 | mdef2 = metset2.get_metric_by_id('rev_new_sales') 157 | assert mdef2.table == 'first_sales' # (explicit) 158 | assert mdef2.time_type == 'TIME_EPOCH_SECS' # (from tbldef) 159 | assert mdef2.filters.count_filters() == 2 # (from tbldef) 160 | 161 | # 162 | # Internal Helpers 163 | # 164 | 165 | 166 | 167 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - setup.py Packaging and Installation Support 3 | 4 | Note to developers: 5 | System utility 'pandoc' is required for building PyPi package. 6 | Install on OS X with 'brew install pandoc' (~1 hr unattended). 7 | More info: http://johnmacfarlane.net/pandoc/ 8 | 9 | ------------------------------------------------------------------------------ 10 | Author: Dan Kamins 11 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 12 | """ 13 | 14 | 15 | # ---------------------------------------------------------------------------- 16 | 17 | 18 | from __future__ import print_function 19 | from sys import stderr 20 | import os 21 | import re 22 | from subprocess import Popen, PIPE 23 | from setuptools import setup, find_packages 24 | 25 | 26 | # ---------------------------------------------------------------------------- 27 | # Setup dependencies and packages 28 | 29 | 30 | install_requires = [ 31 | 'PyYAML>=3.11', # YAML parse support (used by MDefL and MQL parsers) 32 | 'requests>=2.4.3', # HTTP requests for humans (used by EMFetcher_http) 33 | ] 34 | 35 | test_requires = [ 36 | 'pytest>=2.6.4', # py.test test harness 37 | 'coverage>=3.7.1', # code coverage assessment 38 | ] 39 | 40 | 41 | # ---------------------------------------------------------------------------- 42 | # Setup utility functions 43 | 44 | 45 | def read_file(fname): 46 | """Read and return contents of relative file.""" 47 | return open(os.path.join(os.path.dirname(__file__), fname)).read() 48 | 49 | def exec_file(fname): 50 | """Execute contents of relative Python file in global scope.""" 51 | exec(read_file(fname)) in globals() 52 | 53 | def md_to_rst(s, fname='?'): 54 | """ 55 | Return reStructuredText equiv string contents of Markdown string. 56 | If conversion (via 'pandoc' cmdline) fails, returns raw Markdown. 57 | Requires pandoc system utility: http://johnmacfarlane.net/pandoc/ 58 | Optional fname arg used only for logging/error message. 59 | """ 60 | try: 61 | args = ['pandoc', '-r', 'markdown', '-w', 'rst'] 62 | p = Popen(args, stdin=PIPE, stdout=PIPE, stderr=PIPE) 63 | (pout, perr) = p.communicate(s) 64 | if p.returncode == 0: 65 | return pout 66 | raise ValueError("pandoc exit %d, stderr: %s" % (p.returncode, perr)) 67 | except Exception as e: 68 | print("notice: error converting '%s' MD to RST " 69 | "(probably harmless, likely missing 'pandoc' utility)" 70 | ": " % fname, e, file=stderr) 71 | return s 72 | 73 | def pypi_md_clean(s): 74 | """ 75 | Sanitize Markdown string to remove markup that would break PyPi rendering 76 | when converted to reStructuredText (as of 2014-12). 77 | """ 78 | # Remove relative and anchor links: 79 | re_rel_link = re.compile(r'\[([^\]]*)\]\((\.[^\)]*)\)', re.DOTALL) 80 | re_anchor_link = re.compile(r'\[([^\]]*)\]\((#[^\)]*)\)', re.DOTALL) 81 | s = re.sub(re_rel_link, r'\1', s) 82 | s = re.sub(re_anchor_link, r'\1', s) 83 | return s 84 | 85 | def pypi_longdesc(): 86 | """ 87 | Load and convert README.md to PyPi compatible (2014) RST string. 88 | If errors occur (such as 'pandoc' utility not installed), the raw 89 | Markdown file is returned, which will probably look ugly in PyPi. 90 | """ 91 | return md_to_rst(pypi_md_clean(read_file('README.md')), 'README.md') 92 | 93 | 94 | # ---------------------------------------------------------------------------- 95 | # Setup main 96 | 97 | 98 | exec_file('py/axonchisel/metrics/version.py') # (sets __version__ global) 99 | 100 | setup( 101 | name = "Ax_Metrics", 102 | version = __version__, 103 | license = "MIT", 104 | author = "Dan Kamins", 105 | author_email = "dos@axonchisel.net", 106 | url = "https://github.com/axonchisel/ax_metrics", 107 | platforms = "Any", 108 | description = "\"BI Glue\" Business Intelligence middleware library for " 109 | "aggregation of metrics/KPI from any source and custom " 110 | "reporting for humans or other APIs", 111 | keywords = [ 112 | "business intelligence", "BI", "data", "data warehouse", "DW", "BIDW", 113 | "metrics", "KPI", "analytics", 114 | "middleware", "library", "api", 115 | "report", "chart", "dashboard", 116 | ], 117 | classifiers = [ 118 | "Development Status :: 4 - Beta", 119 | "Environment :: Web Environment", 120 | "Environment :: Other Environment", 121 | "Intended Audience :: Developers", 122 | "Intended Audience :: Information Technology", 123 | "Intended Audience :: Science/Research", 124 | "Intended Audience :: System Administrators", 125 | "Intended Audience :: Telecommunications Industry", 126 | "License :: OSI Approved :: MIT License", 127 | "Operating System :: OS Independent", 128 | "Programming Language :: Python :: 2", 129 | "Programming Language :: Python :: 2.6", 130 | "Programming Language :: Python :: 2.7", 131 | "Topic :: Database", 132 | "Topic :: Office/Business :: Financial", 133 | "Topic :: Scientific/Engineering :: Visualization", 134 | "Topic :: Software Development :: Libraries :: Python Modules", 135 | "Topic :: System :: Monitoring", 136 | ], 137 | long_description = pypi_longdesc(), 138 | package_dir = {'': 'py'}, 139 | packages = find_packages(where='py'), 140 | 141 | # These extra options trigger harmless distutils warning. ignore. 142 | # setuptools (including pip) likes and uses it. 143 | install_requires = install_requires, 144 | tests_require = test_requires, # (yes, 'tests_require') 145 | ) 146 | 147 | -------------------------------------------------------------------------------- /tests/test_erout_geckoboard.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test EROut_geckoboard (geckoboard.com output) 3 | 4 | STATUS NOTE: 5 | This module currently has insufficient test coverage. 6 | The individual EROut plugins are executed to ensure the code is run, 7 | but the output is not really verified at this point. 8 | 9 | 10 | ------------------------------------------------------------------------------ 11 | Author: Dan Kamins 12 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 13 | """ 14 | 15 | 16 | # ---------------------------------------------------------------------------- 17 | 18 | 19 | import pytest 20 | import logging 21 | import json 22 | 23 | import axonchisel.metrics.io.erout.plugins.ero_geckoboard as ero_geckoboard 24 | from axonchisel.metrics.run.servant.config import ServantConfig 25 | from axonchisel.metrics.run.servant.request import ServantRequest 26 | from axonchisel.metrics.run.servant.state import ServantState 27 | from axonchisel.metrics.run.servant.servant import Servant 28 | 29 | from .util import log_config, load_metset, load_queryset 30 | 31 | 32 | # ---------------------------------------------------------------------------- 33 | 34 | 35 | def setup_module(module): 36 | log_config(level=logging.INFO) 37 | # log_config(level=logging.DEBUG) 38 | 39 | 40 | # ---------------------------------------------------------------------------- 41 | 42 | 43 | class TestEROut_geckoboard(object): 44 | """ 45 | Test general EROut. 46 | """ 47 | 48 | # 49 | # Setup / Teardown 50 | # 51 | 52 | def setup_method(self, method): 53 | self.jout = dict() 54 | self.emfetch_extinfo = { 55 | } 56 | self.erout_extinfo = { 57 | '_default': { 58 | 'jout': self.jout, 59 | } 60 | } 61 | self.metset1 = load_metset( 'mqe-metset1.yml' ) 62 | self.queryset1 = load_queryset( 'queryset2.yml' ) 63 | self.sconfig = ServantConfig( 64 | metset = self.metset1, 65 | queryset = self.queryset1, 66 | emfetch_extinfo = self.emfetch_extinfo, 67 | erout_extinfo = self.erout_extinfo, 68 | ) 69 | self.query_ids = [ 70 | 'new_users_mtd', 71 | 'new_users_r7d', 72 | 'rev_new_sales_qtd', 73 | 'cancels_r7d', 74 | ] 75 | self.erout_plugin_ids = [] 76 | self.sreq = ServantRequest( 77 | query_ids=self.query_ids, 78 | erout_plugin_ids=self.erout_plugin_ids, 79 | ) 80 | 81 | 82 | 83 | # 84 | # Tests 85 | # 86 | 87 | def test_bullet(self): 88 | self.erout_extinfo.update({ 89 | 'geckoboard_bullet': { 90 | } 91 | }) 92 | servant = Servant(self.sconfig) 93 | self.sreq.collapse = True 94 | self.sreq.noghosts = True 95 | # self.sreq.query_ids = [ 'new_users_r7d' ] # TEMP 96 | self.sreq.erout_plugin_ids = ['geckoboard_bullet'] 97 | servant.process(self.sreq) 98 | jstr = json.dumps(self.jout, indent=4) 99 | # print jstr 100 | jout2 = json.loads(jstr) 101 | assert len(jout2['item']) == 4 102 | assert jout2['orientation'] == 'vertical' 103 | assert jout2['item'][0]['label'] == "New Users" 104 | 105 | def test_numsec_comp(self): 106 | self.erout_extinfo.update({ 107 | 'geckoboard_numsec_comp': { 108 | } 109 | }) 110 | servant = Servant(self.sconfig) 111 | self.sreq.collapse = True 112 | self.sreq.noghosts = False 113 | self.sreq.query_ids = [ 'rev_new_sales_qtd' ] 114 | # self.sreq.query_ids = [ 'cancels_r7d' ] 115 | self.sreq.erout_plugin_ids = [ 'geckoboard_numsec_comp' ] 116 | servant.process(self.sreq) 117 | jstr = json.dumps(self.jout, indent=4) 118 | # print jstr 119 | # TODO 120 | 121 | def test_numsec_trend(self): 122 | self.erout_extinfo.update({ 123 | 'geckoboard_numsec_trend': { 124 | } 125 | }) 126 | servant = Servant(self.sconfig) 127 | self.sreq.collapse = False 128 | self.sreq.noghosts = True 129 | self.sreq.query_ids = [ 'new_users_r7d' ] 130 | self.sreq.erout_plugin_ids = [ 'geckoboard_numsec_trend' ] 131 | servant.process(self.sreq) 132 | jstr = json.dumps(self.jout, indent=4) 133 | # print jstr 134 | # TODO 135 | 136 | def test_meter(self): 137 | self.erout_extinfo.update({ 138 | 'geckoboard_meter': { 139 | } 140 | }) 141 | servant = Servant(self.sconfig) 142 | self.sreq.query_ids = [ 'new_users_r7d' ] 143 | self.sreq.erout_plugin_ids = [ 'geckoboard_meter' ] 144 | servant.process(self.sreq) 145 | jstr = json.dumps(self.jout, indent=4) 146 | # print jstr 147 | # TODO 148 | 149 | def test_text(self): 150 | self.erout_extinfo.update({ 151 | 'geckoboard_text': { 152 | } 153 | }) 154 | servant = Servant(self.sconfig) 155 | self.sreq.collapse = True 156 | # self.sreq.noghosts = True 157 | self.sreq.query_ids = [ 'cancels_r7d' ] 158 | self.sreq.erout_plugin_ids = [ 'geckoboard_text' ] 159 | servant.process(self.sreq) 160 | jstr = json.dumps(self.jout, indent=4) 161 | # print jstr 162 | # TODO 163 | 164 | def test_rag(self): 165 | self.erout_extinfo.update({ 166 | 'geckoboard_rag': { 167 | } 168 | }) 169 | servant = Servant(self.sconfig) 170 | self.sreq.collapse = True 171 | self.sreq.noghosts = True 172 | self.sreq.query_ids = [ 'cancels_r7d', 'cancels_r7d' ] 173 | self.sreq.erout_plugin_ids = [ 'geckoboard_rag' ] 174 | servant.process(self.sreq) 175 | jstr = json.dumps(self.jout, indent=4) 176 | # print jstr 177 | # TODO 178 | 179 | 180 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/io/erout/plugins/ero_geckoboard/rag.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - EROut plugin 'geckoboard_rag' 3 | 4 | Writes Geckoboard JSON output for various charts for use with 5 | http://www.geckoboard.com. 6 | 7 | Contents: 8 | - EROut_geckoboard_rag - read/amber/green display of (1-)3 numbers 9 | 10 | See: 11 | - https://developer.geckoboard.com/#rag 12 | 13 | ------------------------------------------------------------------------------ 14 | Author: Dan Kamins 15 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 16 | """ 17 | 18 | 19 | # ---------------------------------------------------------------------------- 20 | 21 | 22 | from .base import EROut_geckoboard 23 | 24 | import logging 25 | log = logging.getLogger(__name__) 26 | 27 | 28 | # ---------------------------------------------------------------------------- 29 | 30 | 31 | class EROut_geckoboard_rag(EROut_geckoboard): 32 | """ 33 | EROut (Extensible Report Outputter) Plugin for Geckoboard RAG. 34 | Adds JSON-serializable output to extinfo['jout'] dict. 35 | 36 | Typical usage is with 1 collapsed query with 3 QMetrics, 37 | or several collapsed queries totalling 3 QMetrics, 38 | default 'LAST' reduce function, and ghosts disabled. 39 | This prevent needless queries from running. 40 | Non-collapsed queries with other reduce functions may be used too. 41 | 42 | The first 3 data series passed in to this EROut are reduced and used 43 | as the red, amber, and green values. 44 | If any of the colors are disabled by QFormat, then fewer than 3 45 | data series are used. I.e. if amber is disabled, the first two series 46 | will be treated as red and green. 47 | 48 | QMetric 'rag' parameter is not used, nor are 'impact', etc. 49 | Just raw reduced numbers and QFormat. 50 | 51 | QFormat support (under 'geckoboard_meter' or '_default'): 52 | reduce : (optional) Function from metricdef.FUNCS to reduce 53 | series with. Default 'LAST'. 54 | prefix : (optional) prefix for value, e.g. "$" 55 | red : label for red value, or "OFF" (False) to skip. 56 | amber : label for amber value, or "OFF" (False) to skip. 57 | green : label for green value, or "OFF" (False) to skip. 58 | 59 | More info: 60 | - https://developer.geckoboard.com/#rag 61 | 62 | Example JSON: 63 | { 64 | "item": [ 65 | { 66 | "value": 16, 67 | "text": "Long past due" 68 | }, 69 | { 70 | "value": 64, 71 | "text": "Overdue" 72 | }, 73 | { 74 | "value": 32, 75 | "text": "Due" 76 | } 77 | ] 78 | } 79 | """ 80 | 81 | # 82 | # Abstract Method Implementations 83 | # 84 | 85 | # abstract 86 | def plugin_output(self, mdseries, query=None): 87 | """ 88 | EROut plugins must implement this abstract method. 89 | Invoked to output MultiDataSeries as specified. 90 | Returns nothing. Output target should be configured separately. 91 | """ 92 | log.debug("Outputting %s for query %s", mdseries, query) 93 | self._qfdomain = 'geckoboard_rag' 94 | 95 | # Write options: 96 | self._write_options() 97 | 98 | # Write as many colors as possible given data we have: 99 | self._write_colors() 100 | 101 | 102 | # 103 | # Internal Methods 104 | # 105 | 106 | def _write_options(self): 107 | """ 108 | Write options to jout. 109 | """ 110 | if self.query: 111 | try: 112 | qformat = self.query.qformat 113 | self.jout['prefix'] = qformat.get(self._qfdomain, 'prefix') 114 | except KeyError: 115 | pass 116 | 117 | def _write_colors(self): 118 | """ 119 | Write as many colors to jout as possible given data we have. 120 | Tries to build up the colors progressively, allowing for multiple 121 | metrics and/or multiple passes from multiple queries. 122 | """ 123 | # Start based on how many we have so far: 124 | QF_RAGKEYS = ['red', 'amber', 'green'] # QFormat rag keys 125 | qfidx = len(self.jout['item']) 126 | 127 | # Keep going until we have all of the colors: 128 | iter_series = self.mdseries.iter_series() 129 | while qfidx < len(QF_RAGKEYS): 130 | 131 | # Identify this color: 132 | qfragkey = QF_RAGKEYS[qfidx] 133 | qfidx += 1 134 | label = self._qformat_get(qfragkey, "") 135 | 136 | # If instructed by QFormat to skip it, write an empty one: 137 | if label == False: 138 | self.jout['item'].append( {} ) 139 | continue 140 | 141 | # Try to pull and write the next color from data series: 142 | try: 143 | dseries = next(iter_series) 144 | except StopIteration: # No series availalable. 145 | # Hopefully another query will provide the rest. 146 | return 147 | self._write_colors_series(label, dseries) 148 | 149 | def _write_colors_series(self, label, dseries): 150 | """Write the next color from given DataSeries.""" 151 | 152 | self._dseries = dseries 153 | 154 | # Reduce series to single value by reduce func. 155 | # Usually func 'LAST' with collapsed series (Servant option), 156 | # but other operations can be useful too, e.g. AVG, etc. 157 | reduce_func = self._qformat_get('reduce', 'LAST') 158 | self._value = self._dseries.reduce(reduce_func) 159 | 160 | # Prep JSON-serializable template to fill in: 161 | self._jitem = { 162 | "text": label, 163 | "value": self._value, 164 | } 165 | self.jout['item'].append(self._jitem) 166 | 167 | 168 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/chrono/timerange.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Definition of time ranges for queries. 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from datetime import datetime, timedelta 14 | 15 | from axonchisel.metrics.foundation.ax.obj import AxObj 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | # Smallest unit of time (timedelta) that TimeRange can represent 22 | TIMERANGE_PRECISION = timedelta(microseconds=1) 23 | 24 | 25 | # ---------------------------------------------------------------------------- 26 | 27 | 28 | class TimeRange(AxObj): 29 | """ 30 | Single time range with beginning/end time to microsecond precision 31 | and optional anchor point. 32 | 33 | Anchor point commonly refers to the beginning of the period as would 34 | be labeled on the X-axis of a chart, but may be inside the period 35 | such as when smoothing is in effect. 36 | 37 | Provides get/set access to beginning and end times with both inclusive 38 | and exclusive semantics (on same points), with properties: 39 | - inc_begin 40 | - exc_begin 41 | - inc_end 42 | - exc_end 43 | 44 | Optional anchor (may be None) property: 45 | - anchor 46 | 47 | TimeRange objects are initially invalid and require setting begin and end 48 | points before use. 49 | """ 50 | 51 | def __init__(self, **kwargs): 52 | """ 53 | Initialize, optionally overriding any default properties with kwargs. 54 | By default, the begin and end times are unspecified, resulting 55 | in an invalid TimeRange until those attributes are set. 56 | """ 57 | # Set default state: 58 | self._anchor = None # datetime, internal optional 59 | self._inc_begin = None # datetime, internal 60 | self._exc_end = None # datetime, internal 61 | 62 | # Apply initial values from kwargs: 63 | self._init_kwargs(kwargs, [ 64 | 'anchor', 'inc_begin', 'exc_begin', 'inc_end', 'exc_end' 65 | ]) 66 | 67 | # 68 | # Public Methods 69 | # 70 | 71 | def is_valid(self): 72 | """ 73 | Check T/F if TimeRange is valid (has valid begin/end). 74 | """ 75 | try: 76 | self.validate() 77 | return True 78 | except (TypeError, ValueError) as e: 79 | return False 80 | 81 | def is_anchored(self): 82 | """ 83 | Check T/F if TimeRange is valid (has valid begin/end). 84 | """ 85 | return self.anchor is not None 86 | 87 | def validate(self): 88 | """ 89 | Validate self. 90 | Raise TypeError, ValueError if any problems, eg begin/end unspecified. 91 | """ 92 | if self._inc_begin is None: 93 | raise ValueError(("TimeRange {self} missing begin point") 94 | .format(self=self)) 95 | if self._exc_end is None: 96 | raise ValueError(("TimeRange {self} missing end point") 97 | .format(self=self)) 98 | 99 | 100 | # 101 | # Public Properties 102 | # 103 | 104 | @property 105 | def anchor(self): 106 | """Anchor datetime within range (optional, may be None).""" 107 | return self._anchor 108 | @anchor.setter 109 | def anchor(self, val): 110 | if val is not None: 111 | self._assert_type_datetime("anchor", val) 112 | self._anchor = val 113 | 114 | @property 115 | def inc_begin(self): 116 | """Inclusive datetime beginning of range (first moment in).""" 117 | return self._inc_begin 118 | @inc_begin.setter 119 | def inc_begin(self, val): 120 | self._assert_type_datetime("inc_begin", val) 121 | self._inc_begin = val 122 | 123 | @property 124 | def exc_begin(self): 125 | """Exclusive datetime beginning of range (last moment before).""" 126 | return self._inc_begin - TIMERANGE_PRECISION 127 | @exc_begin.setter 128 | def exc_begin(self, val): 129 | self._assert_type_datetime("exc_begin", val) 130 | self._inc_begin = val + TIMERANGE_PRECISION 131 | 132 | @property 133 | def inc_end(self): 134 | """Inclusive datetime end of range (last moment in).""" 135 | return self._exc_end - TIMERANGE_PRECISION 136 | @inc_end.setter 137 | def inc_end(self, val): 138 | self._assert_type_datetime("inc_end", val) 139 | self._exc_end = val + TIMERANGE_PRECISION 140 | 141 | @property 142 | def exc_end(self): 143 | """Exclusive datetime end of range (first moment after).""" 144 | return self._exc_end 145 | @exc_end.setter 146 | def exc_end(self, val): 147 | self._assert_type_datetime("exc_end", val) 148 | self._exc_end = val 149 | 150 | @property 151 | def duration(self): 152 | """Duration (timedelta) of range, read-only. Return 0 when invalid.""" 153 | if self._exc_end and self._inc_begin: 154 | return self._exc_end - self._inc_begin 155 | return 0 156 | 157 | 158 | # 159 | # Internal Methods 160 | # 161 | 162 | def __unicode__(self): 163 | return (u"TimeRange({dur} = [{begin}..{end}) anchor {anchor})" 164 | ).format( 165 | dur=self.duration, begin=self._inc_begin, end=self._exc_end, 166 | anchor=self._anchor 167 | ) 168 | 169 | 170 | # NOTE: Consider defining comparison operators like __lt__, __ge__, ... 171 | # See https://docs.python.org/2/reference/datamodel.html#object.__lt__ 172 | # Semantics: 173 | # - less/greater than should indicate entire range is older/newer than. 174 | # - equal should indicate exact match including anchor point. 175 | # Thus a TimeRange may be none of equal, less than, or greater than 176 | # another TimeRange, and that's OK -- it means they overlap somewhere. 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/ax/obj.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - AxObj foundation base class 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import collections 14 | from datetime import datetime 15 | 16 | 17 | # ---------------------------------------------------------------------------- 18 | 19 | 20 | class AxObj(object): 21 | """ 22 | Base class for Ax_Metrics classes. 23 | 24 | Assertion Usage Note: 25 | Failures in the _assert_* methods typically include {self} within 26 | the raised exception messages. If your __str__ / __unicode__ methods 27 | reference attributes on self, then you MUST define sane value for those 28 | attributes prior to invoking any assertions, or your assert exceptions 29 | will trigger their own confusing AttributeErrors when attempting to 30 | format the message! 31 | """ 32 | 33 | # 34 | # Internal Methods: Init 35 | # 36 | 37 | def _init_kwargs(self, kwargs, kws): 38 | """ 39 | Set all attributes (named in kws) on self with vals from kwargs. 40 | """ 41 | for k in kws: 42 | if k in kwargs: 43 | setattr(self, k, kwargs[k]) 44 | 45 | 46 | # 47 | # Internal Methods: Debug 48 | # 49 | 50 | def _get_debug_name(self): 51 | """Return object name suitable for error msg, stack trace, etc.""" 52 | name = u"{cls}".format(cls=self.__class__.__name__) 53 | if hasattr(self, 'id'): 54 | name += u" #{id}".format(id=self.id) 55 | return name 56 | 57 | 58 | # 59 | # Internal Methods: Type/Value Checks 60 | # 61 | 62 | def _assert_not_none(self, name, val): 63 | """ 64 | Raise ValueError if val is None. 65 | Not necessary if _assert_type is called, but useful on its own. 66 | """ 67 | if val is None: 68 | raise ValueError(( 69 | "{obj} {name} got unexpected None" 70 | ).format(obj=self._get_debug_name(), 71 | name=name) 72 | ) 73 | 74 | def _assert_type(self, name, val, reqtype): 75 | """ 76 | Raise TypeError if val is not reqtype type (or tuple of types). 77 | Values that are None will fail type assertion, so if you allow None, 78 | check for it before calling this. 79 | """ 80 | if not isinstance(val, reqtype): 81 | raise TypeError(( 82 | "{obj} {name} expected {reqtype}, got: {t}" 83 | ).format(obj=self._get_debug_name(), 84 | name=name, reqtype=reqtype, t=type(val)) 85 | ) 86 | 87 | def _assert_type_string(self, name, val): 88 | """ 89 | Raise TypeError if val is not a string type. 90 | """ 91 | self._assert_type(name, val, basestring) 92 | 93 | def _assert_type_int(self, name, val): 94 | """ 95 | Raise TypeError if val is not a int (or long) numeric type. 96 | """ 97 | self._assert_type(name, val, (int, long)) 98 | 99 | def _assert_type_numeric(self, name, val): 100 | """ 101 | Raise TypeError if val is not a numeric type. 102 | """ 103 | self._assert_type(name, val, (int, long, float)) 104 | 105 | def _assert_type_datetime(self, name, val): 106 | """ 107 | Raise TypeError if val is not a datetime. 108 | """ 109 | self._assert_type(name, val, datetime) 110 | 111 | def _assert_type_bool(self, name, val): 112 | """ 113 | Raise TypeError if val is not a bool. 114 | """ 115 | self._assert_type(name, val, bool) 116 | 117 | def _assert_type_mapping(self, name, val): 118 | """ 119 | Raise TypeError if val is not a dict-like mapping type. 120 | """ 121 | self._assert_type(name, val, collections.Mapping) 122 | 123 | def _assert_type_list(self, name, val, ofsupercls=None, length=None): 124 | """ 125 | Raise TypeError if val is not an iterable type. 126 | Optionally enforce all members subclass ofsupercls too. 127 | Optionally enforce length. 128 | """ 129 | self._assert_type(name, val, collections.Iterable) 130 | if ofsupercls is not None: 131 | name_item = "item in {name}".format(name=name) 132 | for x in val: 133 | self._assert_type(name_item, x, ofsupercls) 134 | if length is not None: 135 | if len(val) != length: 136 | raise ValueError(( 137 | "{obj} {name} length {lenval} != {lenexp}" 138 | ).format(obj=self._get_debug_name(), 139 | name=name, lenval=len(val), lenexp=length) 140 | ) 141 | 142 | def _assert_type_list_string(self, name, val, length=None): 143 | """ 144 | Raise TypeError if val is not a list of strings. 145 | """ 146 | self._assert_type_list( 147 | name, val, ofsupercls=basestring, length=length) 148 | 149 | def _assert_type_list_numeric(self, name, val, length=None): 150 | """ 151 | Raise TypeError if val is not a list of numeric types. 152 | """ 153 | self._assert_type_list( 154 | name, val, ofsupercls=(int, long, float), length=length) 155 | 156 | def _assert_value(self, name, val, allowed): 157 | """ 158 | Raise ValueError if val is not in iterable allowed. 159 | """ 160 | if val not in allowed: 161 | raise ValueError(( 162 | "{obj} {name} not valid: {v}" 163 | ).format(obj=self._get_debug_name(), 164 | name=name, v=val) 165 | ) 166 | 167 | 168 | # 169 | # Internal Methods: String Conversion 170 | # 171 | 172 | def __str__(self): 173 | """Return UTF-8 encoding of __unicode__.""" 174 | return unicode(self).encode('utf-8') 175 | 176 | def __unicode__(self): 177 | """Override this to provide more detailed display string.""" 178 | return self._get_debug_name() 179 | 180 | 181 | -------------------------------------------------------------------------------- /tests/test_metricdef.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test foundation metricdef package (not including MDefL parsing) 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | import copy 15 | 16 | from axonchisel.metrics.foundation.metricdef.filters import Filter 17 | from axonchisel.metrics.foundation.metricdef.metset import MetSet 18 | from axonchisel.metrics.foundation.metricdef.reduce import _ReduceFuncs 19 | 20 | 21 | # ---------------------------------------------------------------------------- 22 | 23 | 24 | class TestMetricDef(object): 25 | """ 26 | Test MetricDef object on its own. 27 | """ 28 | 29 | # 30 | # Setup / Teardown 31 | # 32 | 33 | # 34 | # Tests 35 | # 36 | 37 | def test_validate_simple(self, mdefs): 38 | mdefs[1].validate() 39 | assert mdefs[1].is_valid() == True 40 | 41 | def test_str(self, mdefs): 42 | str(mdefs[1]) 43 | 44 | def test_validate_missing_attrs(self, mdefs): 45 | required_attrs = [ 46 | 'id', 'emfetch_id', 'table', 'time_field', 47 | ] 48 | for a in required_attrs: 49 | mdef2 = copy.deepcopy(mdefs[1]) 50 | setattr(mdef2, a, '') 51 | assert mdef2.is_valid() == False 52 | with pytest.raises(ValueError): 53 | mdef2.validate() 54 | 55 | def test_validate_field_types(self, mdefs): 56 | ftype_attrs = [ 57 | 'data_type', 'time_type', 58 | ] 59 | for a in ftype_attrs: 60 | mdef2 = copy.deepcopy(mdefs[1]) 61 | with pytest.raises(ValueError): 62 | setattr(mdef2, a, "BOGUSVAL") 63 | 64 | def test_misc(self, mdefs): 65 | assert mdefs[1].emfetch_opts['foo'] == 123 66 | assert mdefs[1].data_field == 'myval' 67 | assert mdefs[1].data_type == 'NUM_INT' 68 | 69 | def test_filters_misc(self, mdefs, filters): 70 | str(mdefs[1].filters) 71 | filter1 = filters[3] 72 | str(filter1) 73 | sorted(filters) 74 | 75 | def test_filters_list(self, mdefs, filters): 76 | mdefs[1].filters.add_filter(filters[3]) 77 | mdefs[1].filters.add_filter(filters[4]) 78 | assert mdefs[1].filters.count_filters() == 2 79 | assert mdefs[1].filters.get_filters()[1] == filters[4] 80 | assert mdefs[1].filters[1] == filters[4] 81 | d = mdefs[1].filters.safe_indexable 82 | assert d[1] == filters[4] 83 | assert d[999] == Filter() 84 | 85 | def test_filters_cmp(self, filters): 86 | assert filters[0] != filters[1] 87 | assert filters[1] != filters[2] 88 | assert filters[2] != filters[3] 89 | assert filters[3] != filters[4] 90 | assert filters[4] != filters[0] 91 | filters[1]._op = 'BOGUS' 92 | assert filters[1] != filters[3] 93 | 94 | def test_validate_filter_types(self, mdefs, filters): 95 | filter1 = filters[3] 96 | filter1.validate() 97 | mdefs[1].filters.add_filter(filter1) 98 | mdefs[1].validate() 99 | with pytest.raises(TypeError): 100 | mdefs[1].filters.add_filter("not a filter object") 101 | 102 | def test_validate_filters_bad1(self, mdefs, filters): 103 | with pytest.raises(ValueError): 104 | filters[0].validate() 105 | with pytest.raises(ValueError): 106 | filters[2].validate() 107 | 108 | def test_validate_filters_bad2(self, mdefs, filters): 109 | mdefs[1].filters.add_filter(filters[3]) 110 | mdefs[1].filters.add_filter(filters[0]) 111 | with pytest.raises(ValueError): 112 | mdefs[1].validate() 113 | filters[0].field = 'realfield' 114 | mdefs[1].validate() 115 | with pytest.raises(ValueError): 116 | filters[0].op = 'BOGUSOP' 117 | 118 | def test_validate_filters_remove(self, mdefs, filters): 119 | mdefs[1].filters.add_filter(filters[3]) 120 | mdefs[1].filters.add_filter(filters[0]) 121 | mdefs[1].filters.remove_filter(filters[3]) 122 | 123 | def test_validate_emfetch(self, mdefs): 124 | mdefs[1] = copy.deepcopy(mdefs[1]) 125 | with pytest.raises(TypeError): 126 | mdefs[1].emfetch_opts = "not a dict" 127 | 128 | def test_reduce(self): 129 | vals = [42, 90, 2] 130 | assert _ReduceFuncs.reduce_COUNT(vals) == 3 131 | assert _ReduceFuncs.reduce_FIRST([]) == None 132 | assert _ReduceFuncs.reduce_FIRST(vals) == 42 133 | assert _ReduceFuncs.reduce_LAST(vals) == 2 134 | assert _ReduceFuncs.reduce_LAST([]) == None 135 | assert _ReduceFuncs.reduce_SUM(vals) == 134 136 | assert _ReduceFuncs.reduce_MIN(vals) == 2 137 | assert _ReduceFuncs.reduce_MAX(vals) == 90 138 | assert _ReduceFuncs.reduce_AVG(vals) == (float(134)/3) 139 | assert _ReduceFuncs.reduce_AVG([]) == None 140 | 141 | 142 | # 143 | # Internal Helpers 144 | # 145 | 146 | 147 | # ---------------------------------------------------------------------------- 148 | 149 | 150 | class TestMetSet(object): 151 | """ 152 | Test MetSet collection of MetricDefs. 153 | """ 154 | 155 | # 156 | # Setup / Teardown 157 | # 158 | 159 | def setup_method(self, method): 160 | self.metset1 = MetSet() 161 | 162 | # 163 | # Tests 164 | # 165 | 166 | def test_simple(self): 167 | self.metset1.validate() 168 | 169 | def test_str(self): 170 | str(self.metset1) 171 | 172 | def test_add_get(self, mdefs): 173 | metset1 = self.metset1 174 | metset1.add_metric(mdefs[1]) 175 | metset1.validate() 176 | with pytest.raises(TypeError): 177 | metset1.add_metric('Not a MetricDef') 178 | metset1.validate() 179 | mdef2 = metset1.get_metric_by_id(mdefs[1].id) 180 | assert mdefs[1] == mdef2 181 | with pytest.raises(KeyError): 182 | metset1.get_metric_by_id('Invalid ID') 183 | assert self.metset1.count_metrics() == 1 184 | 185 | # 186 | # Internal Helpers 187 | # 188 | 189 | 190 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/data/series.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Series of single data points and their context 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from axonchisel.metrics.foundation.ax.obj import AxObj 14 | 15 | from axonchisel.metrics.foundation.chrono.ghost import Ghost 16 | from axonchisel.metrics.foundation.metricdef.metricdef import MetricDef, FUNCS 17 | from axonchisel.metrics.foundation.chrono.framespec import FrameSpec 18 | 19 | from .point import DataPoint 20 | 21 | 22 | # ---------------------------------------------------------------------------- 23 | 24 | 25 | class DataSeries(AxObj): 26 | """ 27 | Series of single DataPoints and context: Metric, FrameSpec, Ghost. 28 | 29 | Multiple DataSeries may be represented by a MultiDataSeries. 30 | """ 31 | 32 | def __init__(self, **kwargs): 33 | """ 34 | Initialize, optionally overriding any default properties with kwargs. 35 | """ 36 | # Set default state: 37 | self.id = '' 38 | self.query_id = '' 39 | self.mdef = MetricDef() 40 | self.tmfrspec = FrameSpec() 41 | self.ghost = None 42 | self.label = "" 43 | self._points = list() 44 | 45 | # Apply initial values from kwargs: 46 | self._init_kwargs(kwargs, [ 47 | 'id', 'query_id', 'mdef', 'tmfrspec', 'ghost', 'label', 48 | ]) 49 | 50 | 51 | # 52 | # Public Methods 53 | # 54 | 55 | def count_points(self): 56 | """Return number of DataPoints.""" 57 | return len(self._points) 58 | 59 | def reset_points(self): 60 | """Reset DataPoints to empty list.""" 61 | self._points = list() 62 | 63 | def add_point(self, dpoint): 64 | """Add a valid DataPoint.""" 65 | self._assert_type("dpoint", dpoint, DataPoint) 66 | dpoint.validate() 67 | self._points.append(dpoint) 68 | 69 | def add_points(self, dpoints): 70 | """Add a list of valid DataPoint.""" 71 | for dpoint in dpoints: 72 | self.add_point(dpoint) 73 | 74 | def get_point(self, idx): 75 | """ 76 | Return specific 0-based indexed DataPoint. 77 | Supports negative indexes from tail (-1 = last). 78 | Raise IndexError if out of range. 79 | """ 80 | return self._points[idx] 81 | 82 | def iter_points(self): 83 | """Return an iterator over DataPoints.""" 84 | return iter(self._points) 85 | 86 | def count_missing(self): 87 | """Return number of points missing data.""" 88 | return sum(1 if dp.is_missing() else 0 for dp in self._points) 89 | 90 | def div_series(self, dseries2): 91 | """ 92 | Divide each point value by value from same point in other series. 93 | If either point's value is None, the resulting value will be None. 94 | If dseries2 is shorter, all unmatched values will be None. 95 | """ 96 | for i, dp in enumerate(self._points): 97 | try: 98 | dp2 = dseries2.get_point(i) 99 | except IndexError: 100 | dp2 = None 101 | if (dp2 is None) or (dp2.value is None): 102 | dp.value = None 103 | if dp.value is not None: 104 | dp.value /= dp2.value 105 | 106 | def reduce(self, mdef_func): 107 | """ 108 | Reduce the series to a single value by MetricDef func specified. 109 | Returns value. 110 | func is a string from: 111 | axonchisel.metrics.foundation.metricdef.metricdef.FUNCS 112 | """ 113 | self._assert_type_string("reduce mdef_func", mdef_func) 114 | self._assert_value("reduce mdef_func", mdef_func, FUNCS.keys()) 115 | vals = [dp.value for dp in self._points] 116 | func = FUNCS[mdef_func]['reduce'] 117 | return func(vals) 118 | 119 | 120 | # 121 | # Public Properties 122 | # 123 | 124 | @property 125 | def id(self): 126 | """Id of the series.""" 127 | return self._id 128 | @id.setter 129 | def id(self, val): 130 | self._assert_type_string("id", val) 131 | self._id = val 132 | 133 | @property 134 | def query_id(self): 135 | """Optional query_id of the query that produced the series.""" 136 | return self._query_id 137 | @query_id.setter 138 | def query_id(self, val): 139 | self._assert_type_string("query_id", val) 140 | self._query_id = val 141 | 142 | @property 143 | def mdef(self): 144 | """Wrapped MetricDef.""" 145 | return self._mdef 146 | @mdef.setter 147 | def mdef(self, val): 148 | self._assert_type("mdef", val, MetricDef) 149 | self._mdef = val 150 | 151 | @property 152 | def tmfrspec(self): 153 | """Wrapped FrameSpec.""" 154 | return self._tmfrspec 155 | @tmfrspec.setter 156 | def tmfrspec(self, val): 157 | self._assert_type("tmfrspec", val, FrameSpec) 158 | self._tmfrspec = val 159 | 160 | @property 161 | def ghost(self): 162 | """Wrapped Ghost, optional.""" 163 | return self._ghost 164 | @ghost.setter 165 | def ghost(self, val): 166 | if val is not None: 167 | self._assert_type("ghost", val, Ghost) 168 | self._ghost = val 169 | 170 | @property 171 | def label(self): 172 | """Optional human-readable label for the series.""" 173 | return self._label 174 | @label.setter 175 | def label(self, val): 176 | self._assert_type_string("label", val) 177 | self._label = val 178 | 179 | 180 | # 181 | # Internal Methods 182 | # 183 | 184 | def __unicode__(self): 185 | return (u"{cls}('{self.label}' #{self.id} (Q#{self.query_id}) "+ 186 | "of {self._mdef} over {self._tmfrspec} ghost {self.ghost} "+ 187 | "with {cnt} points: [{points}])" 188 | ).format(self=self, cls=self.__class__.__name__, 189 | cnt=self.count_points(), 190 | points=u", ".join("{0}".format(p.value) for p in self._points) 191 | ) 192 | 193 | 194 | 195 | # ---------------------------------------------------------------------------- 196 | 197 | 198 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Ax_Metrics - Common pytest configuration. 4 | 5 | Note: 'conftest.py' is a magic filename used by py.test. 6 | 7 | ------------------------------------------------------------------------------ 8 | Author: Dan Kamins 9 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 10 | """ 11 | 12 | 13 | # ---------------------------------------------------------------------------- 14 | 15 | 16 | import pytest 17 | from datetime import datetime 18 | 19 | from .util import dt 20 | 21 | import axonchisel.metrics.foundation.metricdef.metricdef as metricdef 22 | import axonchisel.metrics.foundation.chrono.timerange as timerange 23 | import axonchisel.metrics.foundation.chrono.framespec as framespec 24 | import axonchisel.metrics.foundation.chrono.ghost as ghost 25 | import axonchisel.metrics.foundation.data.point as point 26 | import axonchisel.metrics.foundation.data.series as series 27 | import axonchisel.metrics.foundation.data.multi as multi 28 | import axonchisel.metrics.foundation.query.query as query 29 | import axonchisel.metrics.foundation.query.qdata as qdata 30 | 31 | 32 | 33 | # ---------------------------------------------------------------------------- 34 | # Basic fixtures 35 | 36 | 37 | @pytest.fixture 38 | def dts(): 39 | """Fixture with common datetime objects.""" 40 | return [ 41 | dt('2014-02-01'), 42 | dt('2014-03-01'), 43 | dt('2014-04-14 16:42:45'), # (monday) 44 | dt('2014-04-15 16:42:45'), # (tuesday) 45 | dt('2014-02-14 16:30:45 001234'), # (friday) 46 | dt('2014-04-14 16:42:45 001234'), # (monday) 47 | ] 48 | 49 | 50 | # ---------------------------------------------------------------------------- 51 | # foundation.chrono fixtures 52 | 53 | 54 | @pytest.fixture 55 | def tmranges(dts): 56 | """Fixture with common TimeRange objects.""" 57 | return [ 58 | timerange.TimeRange(), 59 | timerange.TimeRange(inc_begin=dts[0], exc_end=dts[1], anchor=dts[0]), 60 | timerange.TimeRange(inc_begin=dts[2], exc_end=dts[3], anchor=dts[2]), 61 | timerange.TimeRange(inc_begin=dts[4], exc_end=dts[5], anchor=dts[4]), 62 | ] 63 | 64 | @pytest.fixture 65 | def tmfrspecs(): 66 | """Fixture with common FrameSpec objects.""" 67 | return [ 68 | framespec.FrameSpec(), 69 | framespec.FrameSpec(smooth_val=4, smooth_unit='HOUR') 70 | ] 71 | 72 | 73 | # ---------------------------------------------------------------------------- 74 | # foundation.metricdef fixtures 75 | 76 | 77 | @pytest.fixture 78 | def mdefs(): 79 | """Fixture with common MetricDef objects.""" 80 | return [ 81 | metricdef.MetricDef(), 82 | metricdef.MetricDef( 83 | id = 'mdef1', 84 | emfetch_id = 'emfetchid', 85 | emfetch_opts = {'foo': 123, 'bar': {'zig':"Zoom", 'zag':"Boom"}}, 86 | table = 'tblname', 87 | func = 'COUNT', 88 | time_field = 'when', 89 | time_type = 'TIME_DATE', 90 | data_field = 'myval', 91 | data_type = 'NUM_INT', 92 | # filters, 93 | ), 94 | ] 95 | 96 | @pytest.fixture 97 | def filters(): 98 | """Fixture with common Filter objects.""" 99 | return [ 100 | metricdef.Filter(), 101 | metricdef.Filter(field='ffield'), 102 | metricdef.Filter(op='EQ'), 103 | metricdef.Filter(field='ffield', op='EQ'), 104 | metricdef.Filter(field='ffield', op='EQ', value=123), 105 | ] 106 | 107 | 108 | 109 | # ---------------------------------------------------------------------------- 110 | # foundation.data fixtures 111 | 112 | 113 | @pytest.fixture 114 | def dpoints(tmranges): 115 | """Fixture with common DataPoint objects.""" 116 | return [ 117 | point.DataPoint(), 118 | point.DataPoint(tmrange=tmranges[1], value=42), 119 | point.DataPoint(tmrange=tmranges[2], value=90), 120 | point.DataPoint(tmrange=tmranges[3], value=2), 121 | ] 122 | 123 | @pytest.fixture 124 | def dseries(mdefs, tmfrspecs, dpoints): 125 | """Fixture with common DataSeries objects.""" 126 | dseries3 = series.DataSeries(id='s3', 127 | mdef=mdefs[1], tmfrspec=tmfrspecs[1], 128 | ghost=ghost.Ghost('PREV_PERIOD1')) 129 | dseries3.add_point(dpoints[1]) 130 | dseries3.add_point(dpoints[2]) 131 | dseries3.add_point(dpoints[3]) 132 | 133 | dseries4 = series.DataSeries(id=u'sérîes4', 134 | mdef=mdefs[1], tmfrspec=tmfrspecs[1]) 135 | dseries4.add_point(dpoints[1]) 136 | dseries4.add_point(dpoints[2]) 137 | dseries4.add_point(dpoints[3]) 138 | 139 | return [ 140 | series.DataSeries(), 141 | series.DataSeries(id='s1', 142 | mdef=mdefs[1], tmfrspec=tmfrspecs[1]), 143 | series.DataSeries(id='s2', 144 | mdef=mdefs[1], tmfrspec=tmfrspecs[1]), 145 | dseries3, 146 | dseries4, 147 | ] 148 | 149 | @pytest.fixture 150 | def mdseries(dseries): 151 | """Fixture with common MultiDataSeries objects.""" 152 | mdseries1 = multi.MultiDataSeries() 153 | mdseries1.add_series(dseries[1]) 154 | mdseries1.add_series(dseries[2]) 155 | mdseries2 = multi.MultiDataSeries() 156 | mdseries2.add_series(dseries[1]) 157 | mdseries2.add_series(dseries[3]) 158 | mdseries3 = multi.MultiDataSeries() 159 | mdseries3.add_series(dseries[4]) 160 | mdseries3.add_series(dseries[3]) 161 | return [ 162 | multi.MultiDataSeries(), 163 | mdseries1, 164 | mdseries2, 165 | mdseries3, 166 | ] 167 | 168 | 169 | # ---------------------------------------------------------------------------- 170 | # foundation.query fixtures 171 | 172 | @pytest.fixture 173 | def qmetrics(): 174 | """Fixture with common QMetric objects.""" 175 | return [ 176 | qdata.QMetric(), 177 | qdata.QMetric(metric_id='metric1'), 178 | qdata.QMetric(metric_id='metric1'), 179 | ] 180 | 181 | @pytest.fixture 182 | def queries(qmetrics): 183 | """Fixture with common Query objects.""" 184 | q2 = query.Query(id='q2') 185 | q2.qdata.add_qmetric(qmetrics[1]) 186 | q3 = query.Query(id='q3') 187 | q3.qdata.add_qmetric(qmetrics[1]) 188 | q3b = query.Query(id='q3') 189 | q3b.qdata.add_qmetric(qmetrics[2]) 190 | q3b.qghosts.add_ghost(ghost.Ghost('PREV_PERIOD1')) 191 | return [ 192 | query.Query(), 193 | query.Query(id='q1'), 194 | q2, 195 | q3, 196 | q3b, 197 | ] 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | -------------------------------------------------------------------------------- /docs/developers.md: -------------------------------------------------------------------------------- 1 | 2 | # Ax_Metrics Developers Overview 3 | 4 | This document is not required reading for *using* Ax_Metrics. For an introduction and general user information, please see the [README](../README.md). 5 | 6 | Ax_Metrics is a library targeted largely at developers, designed to empower BI initiatives. These users may gain helpful or interesting background information here. 7 | 8 | But this document is aimed primarily at developers looking to work with and contribute to the actual Ax_Metrics project itself. 9 | 10 | 11 | 12 | ## Source Code & Contributing 13 | 14 | Fork, submit pull requests, report issues, and discuss at: 15 | 16 | - https://github.com/axonchisel/ax_metrics - GitHub official page 17 | 18 | 19 | ## Technical Overview 20 | 21 | 22 | ### Project Structure 23 | 24 | ``` 25 | / axonchisel 26 | | 27 | | 28 | +-- py/ Python code root 29 | | | 30 | | +-- axonchisel/ axonchisel 31 | | | (top level shared package namespace) 32 | | ... (see "Python Package Hierarchy" below) 33 | | 34 | +-- tests/ (full package py.test test coverage) 35 | | | 36 | | +-- assets/ (assets for testing: MQL, MDefL, etc.) 37 | | 38 | +-- docs/ (documentation) 39 | 40 | ``` 41 | 42 | 43 | ### Python Package Hierarchy 44 | 45 | ``` 46 | /py/axonchisel/ axonchisel 47 | | (top level shared package namespace) 48 | | 49 | | 50 | +-- metrics/ axonchisel.metrics 51 | | (Ax_Metrics project) 52 | | 53 | | 54 | +-- run/ axonchisel.metrics.run 55 | | | (processing queries at runtime) 56 | | | 57 | | +-- servant/ axonchisel.metrics.run.servant 58 | | | (wraps fetch, query, engine, report process) 59 | | | 60 | | +-- mqengine/ axonchisel.metrics.run.mqengine 61 | | (process queries to obtain data) 62 | | (MQEngine = Metrics Query Engine) 63 | | 64 | | 65 | +-- io/ axonchisel.metrics.io 66 | | | (connections to the world: stats, output) 67 | | | 68 | | +-- emfetch/ axonchisel.metrics.io.emfetch 69 | | | (plugins to access raw metrics data) 70 | | | (EMFetch = Extensible Metrics Fetcher) 71 | | | 72 | | +-- erout/ axonchisel.metrics.io.erout 73 | | (plugins to output various report formats) 74 | | (ERout = Extensible Report Outputter) 75 | | 76 | | 77 | +-- foundation/ axonchisel.metrics.foundation 78 | | (core data models, logic, parsers) 79 | | 80 | +-- ax/ axonchisel.metrics.foundation.ax 81 | | (common base classes and utilities) 82 | | 83 | | 84 | +-- query/ axonchisel.metrics.foundation.query 85 | | (query models, MQL parser) 86 | | (MQL = Metrics Query Language) 87 | | 88 | +-- data/ axonchisel.metrics.foundation.data 89 | | (time range x value points, series) 90 | | 91 | +-- metricdef/ axonchisel.metrics.foundation.metricdef 92 | | (core metrics models, MDefL parser) 93 | | (MDefL = Metrics Definition Language) 94 | | 95 | +-- chrono/ axonchisel.metrics.foundation.chrono 96 | (Time-related models, math, logic) 97 | 98 | ``` 99 | 100 | 101 | ### Architecture / Dependency Graph 102 | 103 | ``` 104 | 105 | RUN HERE 106 | _____v______ 107 | / | | 108 | | | servant | 109 | |R |____________| 110 | |U | | 111 | |N __________|_ | 112 | | | | | 113 | | | mqengine | | 114 | \ |____________| | 115 | | | | 116 | / _________|__ | _|__________ 117 | |I | | | | | 118 | |O METRICS <-| emfetch | | | erout |-> REPORTS 119 | \ |____________| | |____________| 120 | | | | | | | 121 | / | | | | | | 122 | |F _______| | |_____ | | | 123 | |O | ______|_____ | _|________|_ .....|...... 124 | |U | | | | | | : : 125 | |N | | data | | | query | : data : 126 | |D | |____________| | |____________| :............: 127 | |A | | | | | 128 | |T _____|____|_ _|___|____|_ 129 | |I | | | | 130 | |O | metricdef | | chrono | 131 | |N |____________| |____________| 132 | \ 133 | 134 | ``` 135 | 136 | 137 | 138 | 139 | 140 | ------------------------------------------------------------------------------ 141 | 142 | *Return to the [README](../README.md)* 143 | 144 | ------------------------------------------------------------------------------ 145 | 146 | Ax_Metrics - Copyright (c) 2014 Dan Kamins, AxonChisel.net 147 | -------------------------------------------------------------------------------- /tests/test_query.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test foundation query package (not including MQL) 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | 15 | import axonchisel.metrics.foundation.chrono.ghost as ghost 16 | import axonchisel.metrics.foundation.chrono.framespec as framespec 17 | import axonchisel.metrics.foundation.query.query as query 18 | import axonchisel.metrics.foundation.query.qdata as qdata 19 | import axonchisel.metrics.foundation.query.qtimeframe as qtimeframe 20 | import axonchisel.metrics.foundation.query.qformat as qformat 21 | import axonchisel.metrics.foundation.query.qghosts as qghosts 22 | import axonchisel.metrics.foundation.query.queryset as queryset 23 | 24 | 25 | # ---------------------------------------------------------------------------- 26 | 27 | 28 | class TestQuery(object): 29 | """ 30 | Test Query object on its own. 31 | """ 32 | 33 | # 34 | # Setup / Teardown 35 | # 36 | 37 | def setup_method(self, method): 38 | self.query1 = query.Query() 39 | 40 | # 41 | # Tests 42 | # 43 | 44 | def test_validate_simple(self): 45 | assert self.query1.is_valid() == False 46 | with pytest.raises(ValueError): 47 | self.query1.validate() 48 | self.query1.qdata.add_qmetric(qdata.QMetric(metric_id='ametric')) 49 | assert self.query1.is_valid() 50 | self.query1.validate() 51 | 52 | def test_str(self): 53 | str(self.query1) 54 | 55 | def test_qdata(self): 56 | qdata1 = qdata.QData() 57 | self.query1.qdata = qdata1 58 | with pytest.raises(TypeError): 59 | qdata1.add_qmetric('Not QMetric') 60 | str(qdata1) 61 | qm1 = qdata.QMetric() 62 | qdata1.add_qmetric(qm1) 63 | str(qdata1) 64 | assert qdata1.count_qmetrics() == 1 65 | qm1.rag = [10, 20] 66 | with pytest.raises(ValueError): 67 | qm1.rag = [10, 20, 30] 68 | with pytest.raises(TypeError): 69 | self.query1.qdata = 'Not QData' 70 | qm2 = qdata.QMetric() 71 | qdata1.add_qmetric(qm2) 72 | assert list(qdata1.iter_qmetrics()) == [qm1, qm2] 73 | assert qdata1.get_qmetric(0) == qm1 74 | assert qdata1.get_qmetric(1) == qm2 75 | with pytest.raises(IndexError): 76 | assert qdata1.get_qmetric(999) 77 | 78 | def test_qmetric(self): 79 | qm1 = qdata.QMetric() 80 | str(qm1) 81 | qm1.div_metric_id = 'ametric' 82 | qm1.goal = 123 83 | 84 | 85 | def test_qtimeframe(self): 86 | qtimeframe1 = qtimeframe.QTimeFrame() 87 | str(qtimeframe1) 88 | qtimeframe1.tmfrspec = framespec.FrameSpec() 89 | assert qtimeframe1.tmfrspec 90 | with pytest.raises(TypeError): 91 | qtimeframe1.tmfrspec = 'Not FrameSpec' 92 | self.query1.qtimeframe = qtimeframe1 93 | with pytest.raises(TypeError): 94 | self.query1.qtimeframe = 'Not QTimeFrame' 95 | 96 | def test_qformat(self): 97 | qformat1 = qformat.QFormat() 98 | self.query1.qformat = qformat1 99 | str(qformat1) 100 | assert qformat1.has_domain('newdomain') == False 101 | dom = qformat1.get_domain('newdomain') 102 | assert qformat1.has_domain('newdomain') == True 103 | dom['k1'] = 'val1' 104 | dom['k2'] = (1,2,3) 105 | str(qformat1) 106 | with pytest.raises(TypeError): 107 | self.query1.qformat = 'Not QFormat' 108 | assert qformat1.get('newdomain', 'k1') == 'val1' 109 | assert qformat1.get('newdomain', 'BOGUS', 'def') == 'def' 110 | with pytest.raises(KeyError): 111 | assert qformat1.get('newdomain', 'BOGUS') == 'def' 112 | qformat1.get_domain('_default')['comkey'] = 'comval1' 113 | assert qformat1.get('newdomain', 'comkey') == 'comval1' 114 | 115 | def test_qghosts(self): 116 | qghosts1 = qghosts.QGhosts() 117 | str(qghosts1) 118 | self.query1.qghosts = qghosts1 119 | with pytest.raises(TypeError): 120 | self.query1.qghosts = 'Not QGhosts' 121 | with pytest.raises(TypeError): 122 | self.query1.qghosts.add_ghost('Not QGhost') 123 | assert self.query1.qghosts.count_ghosts() == 0 124 | g1 = ghost.Ghost('PREV_PERIOD1') 125 | g2 = ghost.Ghost('PREV_YEAR1') 126 | assert g2.gtype == 'PREV_YEAR1' 127 | self.query1.qghosts.add_ghost(g1) 128 | self.query1.qghosts.add_ghost(g2) 129 | assert self.query1.qghosts.count_ghosts() == 2 130 | glist = self.query1.qghosts.get_ghosts() 131 | assert len(glist) == 2 132 | assert glist[1] == g2 133 | assert self.query1.qghosts[1] == g2 134 | str(g1) 135 | with pytest.raises(ValueError): 136 | g1.gtype = 'Not valid gtype' 137 | 138 | # 139 | # Internal Helpers 140 | # 141 | 142 | 143 | # ---------------------------------------------------------------------------- 144 | 145 | 146 | class TestQuerySet(object): 147 | """ 148 | Test QuerySet object on its own. 149 | """ 150 | 151 | # 152 | # Setup / Teardown 153 | # 154 | 155 | def setup_method(self, method): 156 | self.queryset1 = queryset.QuerySet() 157 | 158 | # 159 | # Tests 160 | # 161 | 162 | def test_str(self): 163 | str(self.queryset1) 164 | 165 | def test_validate(self, queries): 166 | self.queryset1.validate() 167 | self.queryset1.add_query(queries[2]) 168 | self.queryset1.add_query(queries[3]) 169 | self.queryset1.validate() 170 | 171 | def test_count_add(self, queries): 172 | assert self.queryset1.count_queries() == 0 173 | self.queryset1.add_query(queries[2]) 174 | self.queryset1.add_query(queries[3]) 175 | assert self.queryset1.count_queries() == 2 176 | self.queryset1.add_query(queries[4]) # (same id) 177 | assert self.queryset1.count_queries() == 2 178 | 179 | def test_get_by_id(self, queries): 180 | self.queryset1.add_query(queries[2]) 181 | self.queryset1.add_query(queries[3]) 182 | qx = self.queryset1.get_query_by_id('q2') 183 | assert qx == queries[2] 184 | with pytest.raises(KeyError): 185 | self.queryset1.get_query_by_id('No matching key') 186 | 187 | def test_bad_add(self): 188 | with pytest.raises(TypeError): 189 | self.queryset1.add_query('Not Query') 190 | 191 | # 192 | # Internal Helpers 193 | # 194 | 195 | 196 | -------------------------------------------------------------------------------- /py/axonchisel/metrics/foundation/chrono/dtmath.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - datetime math utilities 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | from datetime import datetime, timedelta 14 | 15 | 16 | # ---------------------------------------------------------------------------- 17 | 18 | 19 | def add(dt, 20 | years=0, quarters=0, months=0, weeks=0, days=0, 21 | hours=0, 22 | minutes5=0, minutes10=0, minutes15=0, minutes30=0, 23 | minutes=0, 24 | seconds=0, milliseconds=0, microseconds=0 25 | ): 26 | """ 27 | Given a datetime.datetime obj, add or subtract specified amounts of time, 28 | returning new datetime obj. 29 | """ 30 | if microseconds: 31 | dt = add_microseconds(dt, microseconds) 32 | if milliseconds: 33 | dt = add_microseconds(dt, 1000*milliseconds) 34 | if seconds: 35 | dt = add_seconds(dt, seconds) 36 | if minutes: 37 | dt = add_minutes(dt, minutes) 38 | if minutes5: 39 | dt = add_minutes(dt, 5 * minutes5) 40 | if minutes10: 41 | dt = add_minutes(dt, 10 * minutes10) 42 | if minutes15: 43 | dt = add_minutes(dt, 15 * minutes15) 44 | if minutes30: 45 | dt = add_minutes(dt, 30 * minutes30) 46 | if hours: 47 | dt = add_hours(dt, hours) 48 | if days: 49 | dt = add_days(dt, days) 50 | if weeks: 51 | dt = add_weeks(dt, weeks) 52 | if months: 53 | dt = add_months(dt, months) 54 | if quarters: 55 | dt = add_quarters(dt, quarters) 56 | if years: 57 | dt = add_years(dt, years) 58 | return dt 59 | 60 | 61 | # ---------------------------------------------------------------------------- 62 | 63 | 64 | def begin_year(dt): 65 | """Return datetime marking beginning of year containing dt.""" 66 | return dt.replace( microsecond=0, second=0, minute=0, hour=0, 67 | day=1, month=1) 68 | 69 | def begin_quarter(dt): 70 | """Return datetime marking beginning of quarter containing dt.""" 71 | quarter0 = ((dt.month - 1) / 3) 72 | month = quarter0 * 3 + 1 73 | return dt.replace( microsecond=0, second=0, minute=0, hour=0, 74 | day=1, month=month) 75 | 76 | def begin_month(dt): 77 | """Return datetime marking beginning of month containing dt.""" 78 | return dt.replace( microsecond=0, second=0, minute=0, hour=0, 79 | day=1) 80 | 81 | def begin_week(dt, day0_sunday_ofs=0): 82 | """ 83 | Return datetime marking beginning of week containing dt. 84 | If day0_sunday_ofs specified, it is treated as day offset from Sunday 85 | indicating what first day of week is, e.g. 1=Monday, -1=Saturday. 86 | """ 87 | dt = begin_day(dt) 88 | weekday = dt.weekday() # (Mon=0, Sun=6) 89 | weekday = (weekday + 1) % 7 # (Sun=0, Sat=6) 90 | day0 = day0_sunday_ofs % 7 91 | days_off = (weekday - day0) % 7 92 | td = timedelta(days=-days_off) 93 | return dt + td 94 | 95 | def begin_day(dt): 96 | """Return datetime marking beginning of day containing dt.""" 97 | return dt.replace(microsecond=0, second=0, minute=0, hour=0) 98 | 99 | def begin_hour(dt): 100 | """Return datetime marking beginning of hour containing dt.""" 101 | return dt.replace(microsecond=0, second=0, minute=0) 102 | 103 | def begin_minute30(dt): 104 | """Return datetime marking beginning of 30-min period containing dt.""" 105 | minutes = dt.minute / 30 * 30 106 | return dt.replace(microsecond=0, second=0, minute=minutes) 107 | 108 | def begin_minute15(dt): 109 | """Return datetime marking beginning of 15-min period containing dt.""" 110 | minutes = dt.minute / 15 * 15 111 | return dt.replace(microsecond=0, second=0, minute=minutes) 112 | 113 | def begin_minute10(dt): 114 | """Return datetime marking beginning of 10-min containing dt.""" 115 | minutes = dt.minute / 10 * 10 116 | return dt.replace(microsecond=0, second=0, minute=minutes) 117 | 118 | def begin_minute5(dt): 119 | """Return datetime marking beginning of 5-min containing dt.""" 120 | minutes = dt.minute / 5 * 5 121 | return dt.replace(microsecond=0, second=0, minute=minutes) 122 | 123 | def begin_minute(dt): 124 | """Return datetime marking beginning of minute containing dt.""" 125 | return dt.replace(microsecond=0, second=0) 126 | 127 | def begin_second(dt): 128 | """Return datetime marking beginning of second containing dt.""" 129 | return dt.replace(microsecond=0) 130 | 131 | 132 | # ---------------------------------------------------------------------------- 133 | 134 | 135 | def add_years(dt, delta): 136 | """Return datetime offset by +/- delta years.""" 137 | val = dt.year + delta 138 | return dt.replace(year=val) 139 | 140 | def add_quarters(dt, delta): 141 | """Return datetime offset by +/- delta quarters.""" 142 | return add_months(dt, delta * 3) 143 | 144 | def add_months(dt, delta): 145 | """Return datetime offset by +/- delta months.""" 146 | val = dt.month + delta 147 | if val > 12: 148 | carry = (val - 1) / 12 149 | val -= (carry * 12) 150 | dt = add_years(dt, carry) 151 | if val <= 0: 152 | borrow = -((val - 1) / 12) 153 | val += (borrow * 12) 154 | dt = add_years(dt, -borrow) 155 | return dt.replace(month=val) 156 | 157 | def add_weeks(dt, delta): 158 | """Return datetime offset by +/- delta weeks.""" 159 | return dt + timedelta(days=7*delta) 160 | 161 | def add_days(dt, delta): 162 | """Return datetime offset by +/- delta days.""" 163 | return dt + timedelta(days=delta) 164 | 165 | def add_hours(dt, delta): 166 | """Return datetime offset by +/- delta hours.""" 167 | return dt + timedelta(seconds=60*60*delta) 168 | 169 | def add_minutes(dt, delta): 170 | """Return datetime offset by +/- delta minutes.""" 171 | return dt + timedelta(seconds=60*delta) 172 | 173 | def add_minute5s(dt, delta): 174 | """Return datetime offset by +/- delta 5-minute increments.""" 175 | return dt + timedelta(seconds=5*60*delta) 176 | 177 | def add_minute10s(dt, delta): 178 | """Return datetime offset by +/- delta 10-minute increments.""" 179 | return dt + timedelta(seconds=10*60*delta) 180 | 181 | def add_minute15s(dt, delta): 182 | """Return datetime offset by +/- delta 15-minute increments.""" 183 | return dt + timedelta(seconds=15*60*delta) 184 | 185 | def add_minute30s(dt, delta): 186 | """Return datetime offset by +/- delta 30-minute increments.""" 187 | return dt + timedelta(seconds=30*60*delta) 188 | 189 | def add_seconds(dt, delta): 190 | """Return datetime offset by +/- delta seconds.""" 191 | return dt + timedelta(seconds=delta) 192 | 193 | def add_microseconds(dt, delta): 194 | """Return datetime offset by +/- delta microseconds.""" 195 | return dt + timedelta(microseconds=delta) 196 | 197 | 198 | # ---------------------------------------------------------------------------- 199 | 200 | 201 | # (See test suite in axonchisel.metrics.tests.test_dtmath) 202 | 203 | -------------------------------------------------------------------------------- /tests/test_axplugin.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ax_Metrics - Test foundation AxPlugin 3 | 4 | ------------------------------------------------------------------------------ 5 | Author: Dan Kamins 6 | Copyright (c) 2014 Dan Kamins, AxonChisel.net 7 | """ 8 | 9 | 10 | # ---------------------------------------------------------------------------- 11 | 12 | 13 | import pytest 14 | 15 | import axonchisel.metrics.foundation.ax.plugin as axplugin 16 | 17 | 18 | # ---------------------------------------------------------------------------- 19 | 20 | 21 | class TestAxPlugin(object): 22 | """ 23 | Test AxPlugin class. 24 | """ 25 | 26 | # 27 | # Setup / Teardown 28 | # 29 | 30 | def setup_method(self, method): 31 | options = {'foo': 10, 'bar': {'a': 100, 'b': 200}} 32 | extinfo = {'zig': 10, 'zag': {'x': 100, 'y': 200}} 33 | self.p = axplugin.AxPluginBase(options=options, extinfo=extinfo) 34 | 35 | # 36 | # Tests 37 | # 38 | 39 | def test_str(self): 40 | str(self.p) 41 | 42 | def test_config_basic(self): 43 | assert self.p.options['foo'] == 10 44 | assert self.p.plugin_option('foo') == 10 45 | assert self.p.extinfo['zig'] == 10 46 | assert self.p.plugin_extinfo('zig') == 10 47 | 48 | def test_config_basic_defaults(self): 49 | with pytest.raises(KeyError): 50 | self.p.options['BOGUS'] 51 | assert self.p.plugin_option('BOGUS', "DEF") == "DEF" 52 | with pytest.raises(KeyError): 53 | self.p.extinfo['BOGUS'] 54 | assert self.p.plugin_extinfo('BOGUS', "DEF") == "DEF" 55 | 56 | def test_config_dotted(self): 57 | assert self.p.plugin_option('bar.a') == 100 58 | assert self.p.plugin_extinfo('zag.x') == 100 59 | 60 | def test_config_dotted_defaults(self): 61 | with pytest.raises(KeyError): 62 | self.p.plugin_option('bar.BOGUS') 63 | assert self.p.plugin_option('bar.BOGUS', "DEF") == "DEF" 64 | with pytest.raises(KeyError): 65 | self.p.plugin_extinfo('zag.BOGUS') 66 | assert self.p.plugin_extinfo('zag.BOGUS', "DEF") == "DEF" 67 | 68 | def test_format_str_basic(self): 69 | tests = [ # [(result, fmtstr)] 70 | ("simple literal", "simple literal"), 71 | ("foo bar.a is 10 100", "foo bar.a is {options.foo} {options.bar.a}"), 72 | ("zig zag.x is 10 100", "zig zag.x is {extinfo.zig} {extinfo.zag.x}"), 73 | ("barb zagy is 200 200", "barb zagy is {options.bar.b} {extinfo.zag.y}"), 74 | ("Jimbo weighs 190", "{name} weighs {more.weight}"), 75 | ("Jimbo weighs 190", "{name} weighs {more[weight]}"), 76 | ] 77 | context = { 78 | 'name': "Jimbo", 79 | 'more': { 'weight': 190, 'height': 75 }, 80 | } 81 | for exp, fmt in tests: 82 | v = self.p._format_str(fmt, context=context) 83 | assert v == exp 84 | 85 | def test_format_str_defaults(self): 86 | tests = [ # [(result, fmtstr, od_defaults)] 87 | ("Jimbo eats garbage.", "{name} eats {more.favoritefood}.", "garbage"), 88 | ] 89 | context = { 90 | 'name': "Jimbo", 91 | 'more': { 'weight': 190, 'height': 75 }, 92 | } 93 | for exp, fmt, oddefs in tests: 94 | v = self.p._format_str(fmt, context=context, od_defaults=oddefs) 95 | assert v == exp 96 | 97 | def test_format_str_bad(self): 98 | tests = [ # [(exception, fmtstr)] 99 | (AttributeError, "{name} eats {more.favoritefood}."), 100 | (KeyError, "{name} eats {more[favoritefood]}."), 101 | ] 102 | context = { 103 | 'name': "Jimbo", 104 | 'more': { 'weight': 190, 'height': 75 }, 105 | } 106 | for exc, fmt in tests: 107 | with pytest.raises(exc): 108 | self.p._format_str(fmt, context=context) 109 | 110 | 111 | 112 | # ---------------------------------------------------------------------------- 113 | 114 | 115 | class TestAxPluginLoad(object): 116 | """ 117 | Test AxPlugin loading. 118 | """ 119 | 120 | # 121 | # Setup / Teardown 122 | # 123 | 124 | # 125 | # Tests 126 | # 127 | 128 | def test_good(self): 129 | # Note: We use "AxPluginLoadError" as a "plugin" class here for 130 | # the sole reason that it is a known class that will be available 131 | # to this code. The fact that it is an "Exception" is irrelevant. 132 | cls = axplugin.AxPluginLoadError 133 | tests = [ # [(class expected, {load_plugin_class args})] 134 | (cls, { # (absolute class) 135 | 'plugin_id': 136 | 'axonchisel.metrics.foundation.ax.plugin.AxPluginLoadError', 137 | }), 138 | (cls, { # (simple default class) 139 | 'plugin_id': 'AxPluginLoadError', 140 | 'def_module_name': 'axonchisel.metrics.foundation.ax.plugin', 141 | }), 142 | (cls, { # (prefixed default class) 143 | 'plugin_id': 'LoadError', 144 | 'def_module_name': 'axonchisel.metrics.foundation.ax.plugin', 145 | 'def_cls_name_pfx': 'AxPlugin', 146 | }), 147 | (cls, { # (require base class) 148 | 'plugin_id': 'AxPluginLoadError', 149 | 'def_module_name': 'axonchisel.metrics.foundation.ax.plugin', 150 | 'require_base_cls': axplugin.AxPluginLoadError, 151 | }), 152 | ] 153 | for (testcls, test) in tests: 154 | test['what'] = "Test Plugin" 155 | cls = axplugin.load_plugin_class(**test) 156 | assert cls == testcls 157 | 158 | def test_errors(self): 159 | tests = [ # [(str expected in exception, {load_plugin_class args})] 160 | ('must be absolute', { # (default without def_module_name) 161 | 'plugin_id': 'PluginId', 162 | }), 163 | ('import', { # (invalid default module) 164 | 'plugin_id': 'PluginId', 165 | 'def_module_name': 'axonchisel.metrics.BOGUS' 166 | }), 167 | ('import', { # (invalid class in default module) 168 | 'plugin_id': 'Bogus', 169 | 'def_module_name': 'axonchisel.metrics.foundation.ax.plugin' 170 | }), 171 | ('not subclass', { # (require base class) 172 | 'plugin_id': 'AxPluginLoadError', # (see note in test_good) 173 | 'def_module_name': 'axonchisel.metrics.foundation.ax.plugin', 174 | 'require_base_cls': self.__class__, # (just another class) 175 | }), 176 | ('absolute mode', { # (absolute not allowed) 177 | 'allow_absolute': False, 178 | 'plugin_id': 179 | 'axonchisel.metrics.foundation.ax.plugin.AxPluginLoadError', 180 | }), 181 | ] 182 | for (errstr, test) in tests: 183 | test['what'] = "Test Plugin" 184 | with pytest.raises(axplugin.AxPluginLoadError) as e: 185 | cls = axplugin.load_plugin_class(**test) 186 | assert errstr in str(e) 187 | 188 | 189 | 190 | # 191 | # Internal Helpers 192 | # 193 | 194 | 195 | --------------------------------------------------------------------------------