├── .gitignore
├── .landscape.yaml
├── .travis.yml
├── .vscode
├── settings.json
└── tasks.json
├── AUTHORS.md
├── LICENSE.md
├── MANIFEST.in
├── Makefile
├── README.md
├── moira
├── __init__.py
├── api
│ ├── __init__.py
│ ├── request.py
│ ├── resources
│ │ ├── __init__.py
│ │ ├── contact.py
│ │ ├── event.py
│ │ ├── metric.py
│ │ ├── notification.py
│ │ ├── pattern.py
│ │ ├── redis.py
│ │ ├── subscription.py
│ │ ├── tags.py
│ │ ├── trigger.py
│ │ └── user.py
│ ├── server.py
│ └── site.py
├── cache.py
├── checker
│ ├── __init__.py
│ ├── check.py
│ ├── event.py
│ ├── expression.py
│ ├── master.py
│ ├── server.py
│ ├── state.py
│ ├── timeseries.py
│ ├── trigger.py
│ └── worker.py
├── config.py
├── db.py
├── graphite
│ ├── __init__.py
│ ├── attime.py
│ ├── datalib.py
│ ├── evaluator.py
│ ├── functions.py
│ ├── grammar.py
│ └── util.py
├── logs.py
├── metrics
│ ├── __init__.py
│ ├── graphite.py
│ └── spy.py
├── tools
│ ├── __init__.py
│ └── converter.py
└── trigger.py
├── pkg
├── moira-api.service
├── moira-checker.service
├── postinst
└── worker.yml
├── requirements.txt
├── scripts
├── __init__.py
└── migration-v1.2.8.py
├── setup.py
├── tests
├── __init__.py
├── functional
│ ├── __init__.py
│ ├── test_api.py
│ └── test_data.py
└── unit
│ ├── __init__.py
│ ├── test_cache.py
│ ├── test_expression.py
│ └── test_fetchdata.py
└── version.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | _trial_temp
3 | twistd.pid
4 | .DS_Store
5 | log
6 | *.coverage
7 | *.coverprofile
8 | dist
9 | build
10 | *.egg-info
11 | version.txt
12 | /.idea
13 |
--------------------------------------------------------------------------------
/.landscape.yaml:
--------------------------------------------------------------------------------
1 | strictness: medium
2 | ignore-paths:
3 | - moira/graphite
4 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - '2.7'
4 | notifications:
5 | webhooks:
6 | urls:
7 | - https://webhooks.gitter.im/e/9992cd0d36a6ba9bb40b
8 | on_success: change
9 | on_failure: always
10 | on_start: never
11 | install:
12 | - make prepare
13 | - make prepare_test
14 | script:
15 | - make test
16 | after_success:
17 | - coveralls
18 | - make pip
19 | - |
20 | body='{"request": {"branch":"master"}}'
21 | curl -s -X POST \
22 | -H "Content-Type: application/json" \
23 | -H "Accept: application/json" \
24 | -H "Travis-API-Version: 3" \
25 | -H "Authorization: token ${TOKEN}" \
26 | -d "$body" \
27 | https://api.travis-ci.org/repo/moira-alert%2Fpython-moira-client/requests
28 |
29 | before_deploy:
30 | - export RELEASE_PIP_TAR=$(ls dist/moira_worker-*.tar.gz)
31 | deploy:
32 | provider: releases
33 | api_key:
34 | secure: TJEJ42jvX4DfwEWLP3M4jl8bMSiv39DE/p0qfiwUR0uxQVcmsjv3/BV/ZmUzcyqVIbycKxnLiZRMprjEgCSknZsh/Bxbrl9jQlXpLd8V+ug5yBKrsoW/3flwDCg9F0W4aeGewbqcztgKCf6TRNinBZNPcvjTwRW5Uh4R/7+wXB3f+wO8Zeg4ABBwtjOQ/BWw5/JyXmJ+pkJcJhoVlaM+rKvKf5MAUvQITDdIrP78di7FLPpYcS5zCvqnUfQCfPtzQHhVVXR/cyhEgJdlrcJnQcKZvIzadQOCuj57ebR5rVbnEP30V9sLW4QlXU9gh1gwdRh5bb95CgSU+lTA5OXl9E431x9jCfmDBtAuGW2+2K6C0C61V14FnQRJ2/C3VoA+2Nof+G8ah1KQzx+wm1WbNfyGSet3SMcN8VUcWPwPNas43leFdzD9D2Mw7hcprYDCdnXLyAdJaovBMSRcJHGubs9H5NY1eBM4qD4TPYM2gfkbEmbZQOVrVz4nPzSc1hzmi9oV77fjNIrtz8OChwVlJShHz/8WydrpipMdniH/FfYS5zXdCsWPhMXtfkZSG8rAyPIQskzdnDXdD/hKq4kIX8Bp5UJUeVeEX9bXnA22jAu1bmvTGHPpmG2N8LaJbpr5aHFu3RncWhwdT2361FNyiXnxId5SL4RNpb76k7vAaec=
35 | file:
36 | - "${RELEASE_PIP_TAR}"
37 | skip_cleanup: true
38 | on:
39 | tags: true
40 | condition: $TRAVIS_PYTHON_VERSION = 2.7
41 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | // Place your settings in this file to overwrite default and user settings.
2 | {
3 | "editor.insertSpaces": true,
4 | "files.exclude": {
5 | "**/.git": true,
6 | "**/.DS_Store": true,
7 | "build": true,
8 | "dist": true,
9 | "*.egg-info": true,
10 | "**/*.pyc": true
11 | },
12 | "python.linting.flake8Enabled": true,
13 | "python.linting.flake8Path": "flake8 --max-line-length=120 --exclude=moira/graphite --ignore=E402"
14 | }
--------------------------------------------------------------------------------
/.vscode/tasks.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.1.0",
3 | "command": "flake8",
4 | "isShellCommand": true,
5 | "showOutput": "silent",
6 | "args": [
7 | "--max-line-length=120",
8 | "--exclude=moira/graphite",
9 | "moira"
10 | ],
11 | "problemMatcher": {
12 | "fileLocation": [
13 | "relative",
14 | "${workspaceRoot}"
15 | ],
16 | "pattern": {
17 | "regexp": "^(.*):(\\d+):(\\d+):\\s+([WEF]\\d+)\\s+(.*)$",
18 | "file": 1,
19 | "line": 2,
20 | "column": 3,
21 | "severity": 4,
22 | "message": 5
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/AUTHORS.md:
--------------------------------------------------------------------------------
1 | # Maintainer
2 | Moira was originally developed and is supported by SKB Kontur (https://kontur.ru/eng/about).
3 |
4 | # Graphite authors
5 | This code contains substantial parts of Graphite Carbon code (see bin/graphite directory).
6 | Graphite code is developed and maintained by its authors here: https://github.com/graphite-project/carbon
7 |
8 | We express gratitude to Graphite authors - without them Moira would have been impossible to make.
9 |
10 | # Original authors
11 | - Alexandr Akulov (akulov@skbkontur.ru)
12 | - Alexey Kirpichnikov (alexkir@skbkontur.ru)
13 | - Alexey Larkov (larkov@skbkontur.ru)
14 |
15 | # Contributors
16 | - Denis Kataev (denis.a.kataev+git@gmail.com)
17 | - Eugene Klimov (bloodjazman@gmail.com)
18 | - Alexey Gavrilov (le9i0nx+github@gmail.com)
19 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | ==========================
3 | Version 3, 29 June 2007
4 | ==========================
5 |
6 | > Copyright (C) 2007 Free Software Foundation, Inc.
7 | Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
8 |
9 | # Preamble
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | # TERMS AND CONDITIONS
72 |
73 | ## 0. Definitions.
74 |
75 | _"This License"_ refers to version 3 of the GNU General Public License.
76 |
77 | _"Copyright"_ also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | _"The Program"_ refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as _"you"_. _"Licensees"_ and
82 | "recipients" may be individuals or organizations.
83 |
84 | To _"modify"_ a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a _"modified version"_ of the
87 | earlier work or a work _"based on"_ the earlier work.
88 |
89 | A _"covered work"_ means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To _"propagate"_ a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To _"convey"_ a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | ## 1. Source Code.
113 |
114 | The _"source code"_ for a work means the preferred form of the work
115 | for making modifications to it. _"Object code"_ means any non-source
116 | form of a work.
117 |
118 | A _"Standard Interface"_ means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The _"System Libraries"_ of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The _"Corresponding Source"_ for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | ## 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | ## 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | ## 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | ## 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | ## 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A _"User Product"_ is either (1) a _"consumer product"_, which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | _"Installation Information"_ for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | ## 7. Additional Terms.
344 |
345 | _"Additional permissions"_ are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | ## 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | ## 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | ## 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An _"entity transaction"_ is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | ## 11. Patents.
472 |
473 | A _"contributor"_ is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's _"essential patent claims"_ are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | ## 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | ## 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | ## 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | ## 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | ## 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | ## 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | # END OF TERMS AND CONDITIONS
622 | --------------------------------------------------------------------------
623 |
624 |
625 | # How to Apply These Terms to Your New Programs
626 |
627 | If you develop a new program, and you want it to be of the greatest
628 | possible use to the public, the best way to achieve this is to make it
629 | free software which everyone can redistribute and change under these terms.
630 |
631 | To do so, attach the following notices to the program. It is safest
632 | to attach them to the start of each source file to most effectively
633 | state the exclusion of warranty; and each file should have at least
634 | the "copyright" line and a pointer to where the full notice is found.
635 |
636 |
637 | Copyright (C)
638 |
639 | This program is free software: you can redistribute it and/or modify
640 | it under the terms of the GNU General Public License as published by
641 | the Free Software Foundation, either version 3 of the License, or
642 | (at your option) any later version.
643 |
644 | This program is distributed in the hope that it will be useful,
645 | but WITHOUT ANY WARRANTY; without even the implied warranty of
646 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
647 | GNU General Public License for more details.
648 |
649 | You should have received a copy of the GNU General Public License
650 | along with this program. If not, see .
651 |
652 | Also add information on how to contact you by electronic and paper mail.
653 |
654 | If the program does terminal interaction, make it output a short
655 | notice like this when it starts in an interactive mode:
656 |
657 | Copyright (C)
658 | This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'.
659 | This is free software, and you are welcome to redistribute it
660 | under certain conditions; type 'show c' for details.
661 |
662 | The hypothetical commands _'show w'_ and _'show c'_ should show the appropriate
663 | parts of the General Public License. Of course, your program's commands
664 | might be different; for a GUI interface, you would use an "about box".
665 |
666 | You should also get your employer (if you work as a programmer) or school,
667 | if any, to sign a "copyright disclaimer" for the program, if necessary.
668 | For more information on this, and how to apply and follow the GNU GPL, see
669 | .
670 |
671 | The GNU General Public License does not permit incorporating your program
672 | into proprietary programs. If your program is a subroutine library, you
673 | may consider it more useful to permit linking proprietary applications with
674 | the library. If this is what you want to do, use the GNU Lesser General
675 | Public License instead of this License. But first, please read
676 | .
677 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE.md
2 | include README.md
3 | include MANIFEST.in
4 | include version.txt
5 | include requirements.txt
6 | include pkg/moira-api.service
7 | include pkg/moira-checker.service
8 | include pkg/worker.yml
9 | include pkg/postinst
10 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | VERSION := $(shell git describe --always --tags --abbrev=0 | tail -c +2)
2 | RELEASE := $(shell git describe --always --tags | awk -F- '{ if ($$2) dot="."} END { printf "1%s%s%s%s\n",dot,$$2,dot,$$3}')
3 | PIP_VERSION := $(shell git describe --always --tags | tail -c +2 | awk -F- '{ if ($$2) printf "%s.dev%s-%s\n",$$1,$$2,$$3; else print $$1 }')
4 | TRIAL := $(shell which trial)
5 | PYTHON := $(shell which python)
6 | PIP := $(shell which pip)
7 |
8 | VENDOR := "SKB Kontur"
9 | URL := "https://github.com/moira-alert"
10 | LICENSE := "GPLv3"
11 |
12 | default: clean prepare prepare_test test pip
13 |
14 | version:
15 | echo $(PIP_VERSION) > version.txt
16 |
17 | prepare:
18 | $(PIP) install -r requirements.txt
19 |
20 | prepare_test:
21 | $(PIP) install fakeredis
22 | $(PIP) install coveralls
23 | $(PIP) install flake8
24 |
25 | test:
26 | coverage run --source="moira" --omit="moira/graphite/*,moira/metrics/*" $(TRIAL) tests.unit tests.functional
27 | flake8 --max-line-length=120 --exclude=moira/graphite moira
28 |
29 | pip: version
30 | $(PYTHON) setup.py sdist
31 |
32 | clean:
33 | rm -rf build dist moira_worker.egg-info tests/_trial_temp
34 |
35 | rpm: version
36 | fpm -t rpm \
37 | -s "python" \
38 | --description "Moira Worker" \
39 | --vendor $(VENDOR) \
40 | --url $(URL) \
41 | --license $(LICENSE) \
42 | --name "moira-worker" \
43 | --version "$(VERSION)" \
44 | --iteration "$(RELEASE)" \
45 | --after-install "./pkg/postinst" \
46 | --no-python-dependencies \
47 | -p build \
48 | setup.py
49 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DEPRECATED, SEE https://github.com/moira-alert/moira
2 |
3 | # THIS REPOSITORY MAY BE REMOVED WITHOUT FURTHER NOTICE AFTER 31 AUG 2018
4 |
--------------------------------------------------------------------------------
/moira/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/moira/__init__.py
--------------------------------------------------------------------------------
/moira/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/moira/api/__init__.py
--------------------------------------------------------------------------------
/moira/api/request.py:
--------------------------------------------------------------------------------
1 | from time import time
2 |
3 | import anyjson
4 | from moira.graphite.datalib import createRequestContext
5 | from moira.graphite.evaluator import evaluateTarget
6 | from twisted.internet import defer
7 | from twisted.web import http, server
8 |
9 | from moira.checker.expression import getExpression
10 | from moira.checker import state
11 | from moira.trigger import trigger_reformat
12 | from moira.logs import log
13 |
14 |
15 | def bad_request(request, message):
16 | request.setResponseCode(http.BAD_REQUEST)
17 | request.write(message)
18 | request.finish()
19 | return message
20 |
21 |
22 | def check_json(f):
23 | @defer.inlineCallbacks
24 | def decorator(*args, **kwargs):
25 | request = args[1]
26 | try:
27 | request.body = request.content.getvalue()
28 | request.body_json = anyjson.deserialize(request.body)
29 | except Exception as e:
30 | log.error("Invalid trigger json [{json}]: {e}", json=request.body, e=e)
31 | defer.returnValue(bad_request(request, "Content is not json"))
32 | yield f(*args, **kwargs)
33 | return decorator
34 |
35 |
36 | def is_simple_target(requestContext):
37 | if len(requestContext['graphite_patterns']) > 1:
38 | return False
39 |
40 | complexPatternFound = False
41 | for pattern in requestContext['graphite_patterns'].iterkeys():
42 | if '*' in pattern or '{' in pattern:
43 | complexPatternFound = True
44 | break
45 |
46 | return not complexPatternFound
47 |
48 |
49 | @defer.inlineCallbacks
50 | def resolve_patterns(request, expression_values):
51 | now = int(time())
52 | context = createRequestContext(str(now - 600), str(now), allowRealTimeAlerting=True)
53 | resolved = set()
54 | target_num = 1
55 | context['time_series_names'] = set()
56 | is_simple_trigger = True
57 | if len(request.body_json["targets"]) > 1:
58 | is_simple_trigger = False
59 | for target in request.body_json["targets"]:
60 | time_series = yield evaluateTarget(context, target)
61 | if is_simple_trigger and not is_simple_target(context):
62 | is_simple_trigger = False
63 | target_name = "t%s" % target_num
64 | for ts in time_series:
65 | context['time_series_names'].add(ts.name)
66 | expression_values[target_name] = 42
67 | target_num += 1
68 | for pattern, resolve in context['graphite_patterns'].iteritems():
69 | for r in resolve:
70 | if r != pattern:
71 | resolved.add(r)
72 | request.body_json["patterns"] = [pattern for pattern in context['graphite_patterns']
73 | if pattern not in resolved]
74 | request.body_json["is_simple_trigger"] = is_simple_trigger
75 | request.context = context
76 |
77 |
78 | def check_trigger(f):
79 | @defer.inlineCallbacks
80 | def decorator(*args, **kwargs):
81 | request = args[1]
82 | json = request.body_json
83 | request.graphite_patterns = []
84 | for field, alt in [("targets", None), ("warn_value", "expression"), ("error_value", "expression")]:
85 | if json.get(field) is None and json.get(alt) is None:
86 | defer.returnValue(bad_request(request, "%s is required" % field))
87 | if type(json["targets"]) is not list:
88 | defer.returnValue(bad_request(request, "Invalid trigger targets"))
89 | try:
90 | request.body_json = trigger_reformat(json, json.get("id"), json.get("tags", []))
91 | except Exception as e:
92 | log.error("Invalid trigger format [{json}]: {e}", json=json, e=e)
93 | defer.returnValue(bad_request(request, "Invalid trigger format"))
94 | expression_values = {'warn_value': json.get('warn_value'),
95 | 'error_value': json.get('error_value'),
96 | 'PREV_STATE': state.NODATA}
97 | try:
98 | yield resolve_patterns(request, expression_values)
99 | except Exception as e:
100 | log.error("Invalid graphite targets [{targets}]: {e}", targets=request.body_json["targets"], e=e)
101 | defer.returnValue(bad_request(request, "Invalid graphite targets"))
102 | try:
103 | getExpression(json.get("expression"), **expression_values)
104 | except Exception as e:
105 | log.error("Invalid expression [{expression}]: {e}", expression=json.get("expression"), e=e)
106 | defer.returnValue(bad_request(request, "Invalid expression"))
107 | yield f(*args, **kwargs)
108 | return decorator
109 |
110 |
111 | def delayed(f):
112 | def decorator(resource, request):
113 | @defer.inlineCallbacks
114 | def wrapper():
115 | try:
116 | yield f(resource, request)
117 | except Exception as e:
118 | log.error("Error in delayed decorator wrapped function: {e}", e=e)
119 | request.setResponseCode(http.INTERNAL_SERVER_ERROR)
120 | request.finish()
121 | wrapper()
122 | return server.NOT_DONE_YET
123 | return decorator
124 |
--------------------------------------------------------------------------------
/moira/api/resources/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/moira/api/resources/__init__.py
--------------------------------------------------------------------------------
/moira/api/resources/contact.py:
--------------------------------------------------------------------------------
1 | from twisted.internet import defer
2 |
3 | from moira.api.request import delayed, check_json
4 | from moira.api.resources.redis import RedisResource
5 |
6 |
7 | class Contact(RedisResource):
8 |
9 | def __init__(self, db, contact_id):
10 | self.contact_id = contact_id
11 | RedisResource.__init__(self, db)
12 |
13 | @delayed
14 | @defer.inlineCallbacks
15 | def render_DELETE(self, request):
16 | login = request.login
17 | existing = yield self.db.getContact(self.contact_id)
18 | yield self.db.deleteUserContact(self.contact_id, login,
19 | request=request, existing=existing)
20 | request.finish()
21 |
22 |
23 | class Contacts(RedisResource):
24 | def getChild(self, path, request):
25 | if not path:
26 | return self
27 | return Contact(self.db, path)
28 |
29 | @delayed
30 | @defer.inlineCallbacks
31 | def render_GET(self, request):
32 | contacts = yield self.db.getAllContacts()
33 | self.write_json(request, {'list': contacts})
34 |
35 | @delayed
36 | @check_json
37 | @defer.inlineCallbacks
38 | def render_PUT(self, request):
39 | existing_id = request.body_json.get("id")
40 | existing = None if existing_id is None else (yield self.db.getContact(existing_id))
41 | contact = yield self.db.saveUserContact(request.login, request.body_json,
42 | request=request,
43 | existing=existing)
44 | self.write_json(request, contact)
45 |
--------------------------------------------------------------------------------
/moira/api/resources/event.py:
--------------------------------------------------------------------------------
1 | from twisted.internet import defer
2 |
3 | from moira.api.request import delayed
4 | from moira.api.resources.redis import RedisResource
5 |
6 |
7 | class Events(RedisResource):
8 |
9 | def __init__(self, db, trigger_id=None):
10 | self.trigger_id = trigger_id
11 | RedisResource.__init__(self, db)
12 |
13 | @delayed
14 | @defer.inlineCallbacks
15 | def render_GET(self, request):
16 | page = request.args.get("p")
17 | size = request.args.get("size")
18 | page = 0 if page is None else int(page[0])
19 | size = 100 if size is None else int(size[0])
20 | events, total = yield self.db.getEvents(trigger_id=self.trigger_id, start=page * size, size=size - 1)
21 | self.write_json(request, {"list": events, "page": page, "size": size, "total": total})
22 |
23 | def getChild(self, path, request):
24 | if not path:
25 | return self
26 | return Events(self.db, path)
27 |
--------------------------------------------------------------------------------
/moira/api/resources/metric.py:
--------------------------------------------------------------------------------
1 | from moira.graphite.datalib import createRequestContext
2 | from moira.graphite.evaluator import evaluateTarget
3 | from twisted.internet import defer
4 |
5 | from moira.api.request import bad_request
6 | from moira.api.request import delayed
7 | from moira.api.resources.redis import RedisResource
8 |
9 |
10 | class Metrics(RedisResource):
11 |
12 | def __init__(self, db, trigger_id):
13 | self.trigger_id = trigger_id
14 | RedisResource.__init__(self, db)
15 |
16 | @delayed
17 | @defer.inlineCallbacks
18 | def render_GET(self, request):
19 | json, trigger = yield self.db.getTrigger(self.trigger_id)
20 | if json is None:
21 | defer.returnValue(bad_request(request, "Trigger not found"))
22 | raise StopIteration
23 |
24 | fromTime = request.args.get('from')[0]
25 | endTime = request.args.get('to')[0]
26 | context = createRequestContext(fromTime, endTime, allowRealTimeAlerting=True)
27 | result = {}
28 | for target in trigger.get("targets", [trigger.get("target")]):
29 | time_series = yield evaluateTarget(context, target)
30 | for time_serie in time_series:
31 | values = [(time_serie.start + time_serie.step * i, time_serie[i]) for i in range(0, len(time_serie))]
32 | result[time_serie.name] = [{"ts": ts, "value": value} for ts, value in values if value is not None]
33 | self.write_json(request, result)
34 |
35 | @delayed
36 | @defer.inlineCallbacks
37 | def render_DELETE(self, request):
38 | metric = request.args.get('name')[0]
39 |
40 | json, trigger = yield self.db.getTrigger(self.trigger_id)
41 |
42 | if json is None:
43 | defer.returnValue(bad_request(request, "Trigger not found"))
44 | raise StopIteration
45 |
46 | yield self.db.acquireTriggerCheckLock(self.trigger_id, 10)
47 |
48 | last_check = yield self.db.getTriggerLastCheck(self.trigger_id)
49 |
50 | if last_check is None:
51 | defer.returnValue(bad_request(request, "Trigger check not found"))
52 | raise StopIteration
53 |
54 | metrics = last_check.get('metrics', {})
55 | if metric in metrics:
56 | del last_check['metrics'][metric]
57 |
58 | for pattern in trigger.get("patterns"):
59 | yield self.db.delPatternMetrics(pattern)
60 |
61 | yield self.db.setTriggerLastCheck(self.trigger_id, last_check)
62 | yield self.db.delTriggerCheckLock(self.trigger_id)
63 |
64 | request.finish()
65 |
--------------------------------------------------------------------------------
/moira/api/resources/notification.py:
--------------------------------------------------------------------------------
1 | from twisted.internet import defer
2 |
3 | from moira.api.request import delayed
4 | from moira.api.resources.redis import RedisResource
5 |
6 |
7 | class Notifications(RedisResource):
8 |
9 | def __init__(self, db):
10 | RedisResource.__init__(self, db)
11 |
12 | @delayed
13 | @defer.inlineCallbacks
14 | def render_GET(self, request):
15 | notifications, total = yield self.db.getNotifications(request.args.get('start')[0],
16 | request.args.get('end')[0])
17 | self.write_json(request, {"list": list(notifications), "total": total})
18 |
19 | @delayed
20 | @defer.inlineCallbacks
21 | def render_DELETE(self, request):
22 | result = yield self.db.removeNotification(request.args.get('id')[0])
23 | self.write_json(request, {"result": result})
24 |
25 | def getChild(self, path, request):
26 | if not path:
27 | return self
28 |
--------------------------------------------------------------------------------
/moira/api/resources/pattern.py:
--------------------------------------------------------------------------------
1 | from twisted.internet import defer
2 |
3 | from moira.api.request import delayed
4 | from moira.api.resources.redis import RedisResource
5 |
6 |
7 | class Pattern(RedisResource):
8 |
9 | def __init__(self, db, pattern):
10 | self.pattern = pattern
11 | RedisResource.__init__(self, db)
12 |
13 | @delayed
14 | @defer.inlineCallbacks
15 | def render_DELETE(self, request):
16 | yield self.db.removePattern(self.pattern, request=request)
17 | request.finish()
18 |
19 |
20 | class Patterns(RedisResource):
21 |
22 | def __init__(self, db):
23 | RedisResource.__init__(self, db)
24 |
25 | def getChild(self, path, request):
26 | if not path:
27 | return self
28 | return Pattern(self.db, path)
29 |
30 | @delayed
31 | @defer.inlineCallbacks
32 | def render_GET(self, request):
33 | result = []
34 | patterns = yield self.db.getPatterns()
35 | for pattern in patterns:
36 | triggers = yield self.db.getPatternTriggers(pattern)
37 | triggers_list = []
38 | for trigger_id in triggers:
39 | _, trigger = yield self.db.getTrigger(trigger_id)
40 | triggers_list.append(trigger)
41 | metrics = yield self.db.getPatternMetrics(pattern)
42 | item = {
43 | "pattern": pattern,
44 | "triggers": triggers_list,
45 | "metrics": metrics}
46 | result.append(item)
47 | self.write_json(request, {"list": result})
48 |
--------------------------------------------------------------------------------
/moira/api/resources/redis.py:
--------------------------------------------------------------------------------
1 | import anyjson
2 | from twisted.internet import defer
3 | from twisted.web.resource import Resource
4 |
5 | from moira.api.request import check_trigger, check_json
6 | from moira.checker import state
7 |
8 |
9 | class RedisResource(Resource):
10 |
11 | def __init__(self, db):
12 | Resource.__init__(self)
13 | self.db = db
14 |
15 | def write_json(self, request, result):
16 | request.setHeader("Content-Type", "application/json")
17 | request.write(anyjson.serialize(result))
18 | request.finish()
19 |
20 | def write_dumped_json(self, request, result):
21 | request.setHeader("Content-Type", "application/json")
22 | request.write(str(result))
23 | request.finish()
24 |
25 | @check_json
26 | @check_trigger
27 | @defer.inlineCallbacks
28 | def save_trigger(self, request, trigger_id, message):
29 | _, existing = yield self.db.getTrigger(trigger_id)
30 |
31 | yield self.db.acquireTriggerCheckLock(trigger_id, 10)
32 | last_check = yield self.db.getTriggerLastCheck(trigger_id)
33 | if last_check:
34 | for metric in list(last_check.get('metrics', {})):
35 | if metric not in request.context['time_series_names']:
36 | del last_check['metrics'][metric]
37 | else:
38 | last_check = {
39 | "metrics": {},
40 | "state": state.NODATA,
41 | "score": 0
42 | }
43 |
44 | yield self.db.setTriggerLastCheck(trigger_id, last_check)
45 |
46 | yield self.db.delTriggerCheckLock(trigger_id)
47 |
48 | yield self.db.saveTrigger(trigger_id, request.body_json,
49 | request=request, existing=existing)
50 |
51 | self.write_json(request, {
52 | "id": trigger_id,
53 | "message": message
54 | })
55 |
--------------------------------------------------------------------------------
/moira/api/resources/subscription.py:
--------------------------------------------------------------------------------
1 | from moira.graphite.attime import parseATTime
2 | from moira.graphite.util import epoch
3 | from twisted.internet import defer
4 |
5 | from moira.api.request import delayed, check_json
6 | from moira.api.resources.redis import RedisResource
7 |
8 |
9 | class Test(RedisResource):
10 |
11 | def __init__(self, db, sub_id):
12 | self.sub_id = sub_id
13 | RedisResource.__init__(self, db)
14 |
15 | @delayed
16 | @defer.inlineCallbacks
17 | def render_PUT(self, request):
18 | yield self.db.pushEvent({
19 | "sub_id": self.sub_id,
20 | "metric": "Test.metric.value",
21 | "value": 1,
22 | "old_state": "TEST",
23 | "state": "TEST",
24 | "timestamp": int(epoch(parseATTime("now")))
25 | }, ui=False, request=request)
26 | request.finish()
27 |
28 |
29 | class Subscription(RedisResource):
30 |
31 | def __init__(self, db, sub_id):
32 | self.sub_id = sub_id
33 | RedisResource.__init__(self, db)
34 | self.putChild("test", Test(db, sub_id))
35 |
36 | @delayed
37 | @defer.inlineCallbacks
38 | def render_DELETE(self, request):
39 | existing = yield self.db.getSubscription(self.sub_id)
40 | yield self.db.removeUserSubscription(request.login, self.sub_id, request=request, existing=existing)
41 | request.finish()
42 |
43 |
44 | class Subscriptions(RedisResource):
45 |
46 | def __init__(self, db):
47 | RedisResource.__init__(self, db)
48 |
49 | def getChild(self, path, request):
50 | if not path:
51 | return self
52 | return Subscription(self.db, path)
53 |
54 | @delayed
55 | @defer.inlineCallbacks
56 | def render_GET(self, request):
57 | login = request.login
58 | subs = yield self.db.getUserSubscriptions(login)
59 | result = []
60 | yield self.db.join(subs, self.db.getSubscription, result)
61 | self.write_json(request, {'list': result})
62 |
63 | @delayed
64 | @check_json
65 | @defer.inlineCallbacks
66 | def render_PUT(self, request):
67 | login = request.login
68 | get_existing = self.db.getSubscription(request.body_json.get('id'))
69 | sub = yield self.db.saveUserSubscription(login, request.body_json, request=request,
70 | get_existing=get_existing)
71 | self.write_json(request, sub)
72 |
--------------------------------------------------------------------------------
/moira/api/resources/tags.py:
--------------------------------------------------------------------------------
1 | from twisted.internet import defer
2 | from twisted.web import http
3 |
4 | from moira.api.request import delayed, check_json
5 | from moira.api.resources.redis import RedisResource
6 |
7 |
8 | class Stats(RedisResource):
9 |
10 | def __init__(self, db):
11 | RedisResource.__init__(self, db)
12 |
13 | @delayed
14 | @defer.inlineCallbacks
15 | def render_GET(self, request):
16 | tags = yield self.db.getTags()
17 | result = []
18 | for tag in tags:
19 | triggers = yield self.db.getTagTriggers(tag)
20 | subs = yield self.db.getTagSubscriptions(tag)
21 | data = yield self.db.getTag(tag)
22 | tag_data = {
23 | "name": tag,
24 | "triggers": triggers,
25 | "subscriptions": subs,
26 | "data": data}
27 | result.append(tag_data)
28 | self.write_json(request, {"list": result})
29 |
30 |
31 | class Data(RedisResource):
32 |
33 | def __init__(self, db, tag):
34 | self.tag = tag
35 | RedisResource.__init__(self, db)
36 |
37 | @delayed
38 | @check_json
39 | @defer.inlineCallbacks
40 | def render_PUT(self, request):
41 | existing = yield self.db.getTag(self.tag)
42 | yield self.db.setTag(self.tag, request.body_json, request=request, existing=existing)
43 | request.finish()
44 |
45 |
46 | class Tag(RedisResource):
47 |
48 | def __init__(self, db, tag):
49 | self.tag = tag
50 | RedisResource.__init__(self, db)
51 | self.putChild("data", Data(db, tag))
52 |
53 | @delayed
54 | @defer.inlineCallbacks
55 | def render_DELETE(self, request):
56 | triggers = yield self.db.getTagTriggers(self.tag)
57 | if triggers:
58 | request.setResponseCode(http.BAD_REQUEST)
59 | request.write(
60 | "This tag is assigned to %s triggers. Remove tag from triggers first" %
61 | len(triggers))
62 | request.finish()
63 | else:
64 | existing = yield self.db.getTag(self.tag)
65 | yield self.db.removeTag(self.tag, request=request, existing=existing)
66 | self.write_json(request, {"message": "tag deleted"})
67 |
68 |
69 | class Tags(RedisResource):
70 |
71 | def __init__(self, db):
72 | RedisResource.__init__(self, db)
73 | self.putChild("stats", Stats(db))
74 |
75 | def getChild(self, path, request):
76 | if not path:
77 | return self
78 | return Tag(self.db, path)
79 |
80 | @delayed
81 | @defer.inlineCallbacks
82 | def render_GET(self, request):
83 | tags = yield self.db.getTags()
84 | self.write_json(request, {"tags": tags, "list": [unicode(k) for k in tags]})
85 |
--------------------------------------------------------------------------------
/moira/api/resources/trigger.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | from urllib import unquote
3 |
4 | from twisted.internet import defer
5 | from twisted.web import http
6 |
7 | from moira.api.request import delayed, check_json
8 | from moira.api.resources.metric import Metrics
9 | from moira.api.resources.redis import RedisResource
10 |
11 |
12 | class State(RedisResource):
13 |
14 | def __init__(self, db, trigger_id):
15 | self.trigger_id = trigger_id
16 | RedisResource.__init__(self, db)
17 |
18 | @delayed
19 | @defer.inlineCallbacks
20 | def render_GET(self, request):
21 | check = yield self.db.getTriggerLastCheck(self.trigger_id)
22 | result = {} if check is None else check
23 | result["trigger_id"] = self.trigger_id
24 | self.write_json(request, result)
25 |
26 |
27 | class Throttling(RedisResource):
28 |
29 | def __init__(self, db, trigger_id):
30 | self.trigger_id = trigger_id
31 | RedisResource.__init__(self, db)
32 |
33 | @delayed
34 | @defer.inlineCallbacks
35 | def render_GET(self, request):
36 | result = yield self.db.getTriggerThrottling(self.trigger_id)
37 | self.write_json(request, {"throttling": result})
38 |
39 | @delayed
40 | @defer.inlineCallbacks
41 | def render_DELETE(self, request):
42 | yield self.db.deleteTriggerThrottling(self.trigger_id)
43 | request.finish()
44 |
45 |
46 | class Maintenance(RedisResource):
47 |
48 | def __init__(self, db, trigger_id):
49 | self.trigger_id = trigger_id
50 | RedisResource.__init__(self, db)
51 |
52 | @delayed
53 | @check_json
54 | @defer.inlineCallbacks
55 | def render_PUT(self, request):
56 | yield self.db.setTriggerMetricsMaintenance(self.trigger_id, request.body_json)
57 | request.finish()
58 |
59 |
60 | class Trigger(RedisResource):
61 |
62 | def __init__(self, db, trigger_id):
63 | self.trigger_id = trigger_id
64 | RedisResource.__init__(self, db)
65 | self.putChild("state", State(db, trigger_id))
66 | self.putChild("throttling", Throttling(db, trigger_id))
67 | self.putChild("metrics", Metrics(db, trigger_id))
68 | self.putChild("maintenance", Maintenance(db, trigger_id))
69 |
70 | @delayed
71 | @defer.inlineCallbacks
72 | def render_PUT(self, request):
73 | yield self.save_trigger(request, self.trigger_id, "trigger updated")
74 |
75 | @delayed
76 | @defer.inlineCallbacks
77 | def render_GET(self, request):
78 | json, trigger = yield self.db.getTrigger(self.trigger_id)
79 | if json is None:
80 | request.setResponseCode(http.NOT_FOUND)
81 | request.finish()
82 | else:
83 | throttling = yield self.db.getTriggerThrottling(self.trigger_id)
84 | trigger["throttling"] = throttling
85 | self.write_json(request, trigger)
86 |
87 | @delayed
88 | @defer.inlineCallbacks
89 | def render_DELETE(self, request):
90 | _, existing = yield self.db.getTrigger(self.trigger_id)
91 | yield self.db.removeTrigger(self.trigger_id, request=request, existing=existing)
92 | request.finish()
93 |
94 |
95 | class Triggers(RedisResource):
96 |
97 | def __init__(self, db):
98 | RedisResource.__init__(self, db)
99 | self.putChild("page", Page(db))
100 |
101 | def getChild(self, path, request):
102 | if not path:
103 | return self
104 | return Trigger(self.db, path)
105 |
106 | @delayed
107 | @defer.inlineCallbacks
108 | def render_GET(self, request):
109 | result = yield self.db.getTriggersChecks()
110 | self.write_json(request, {"list": result})
111 |
112 | @delayed
113 | @defer.inlineCallbacks
114 | def render_PUT(self, request):
115 | trigger_id = str(uuid.uuid4())
116 | yield self.save_trigger(request, trigger_id, "trigger created")
117 |
118 |
119 | class Page(RedisResource):
120 |
121 | def __init__(self, db):
122 | RedisResource.__init__(self, db)
123 |
124 | @delayed
125 | @defer.inlineCallbacks
126 | def render_GET(self, request):
127 | filter_ok = request.getCookie('moira_filter_ok')
128 | filter_tags = request.getCookie('moira_filter_tags')
129 | page = request.args.get("p")
130 | size = request.args.get("size")
131 | page = 0 if page is None else int(page[0])
132 | size = 10 if size is None else int(size[0])
133 | filter_ok = False if filter_ok is None else filter_ok == 'true'
134 | filter_tags = [] if not filter_tags else unquote(filter_tags).split(',')
135 | if not filter_ok and len(filter_tags) == 0:
136 | triggers, total = yield self.db.getTriggersChecksPage(page * size, size - 1)
137 | else:
138 | triggers, total = yield self.db.getFilteredTriggersChecksPage(page, size, filter_ok, filter_tags)
139 | self.write_json(request, {"list": triggers, "page": page, "size": size, "total": total})
140 |
--------------------------------------------------------------------------------
/moira/api/resources/user.py:
--------------------------------------------------------------------------------
1 | from twisted.internet import defer
2 |
3 | from moira.api.request import delayed
4 | from moira.api.resources.redis import RedisResource
5 |
6 |
7 | class Login(RedisResource):
8 |
9 | def __init__(self, db):
10 | RedisResource.__init__(self, db)
11 | self.putChild("settings", Settings(db))
12 |
13 | @delayed
14 | def render_GET(self, request):
15 | login = request.login
16 | self.write_json(request, {'login': login})
17 |
18 |
19 | class Settings(RedisResource):
20 |
21 | def __init__(self, db):
22 | RedisResource.__init__(self, db)
23 |
24 | @delayed
25 | @defer.inlineCallbacks
26 | def render_GET(self, request):
27 | login = request.login
28 | settings = {"login": login,
29 | "subscriptions": [],
30 | "contacts": []}
31 | subs = yield self.db.getUserSubscriptions(login)
32 | contacts = yield self.db.getUserContacts(login)
33 | yield self.db.join(contacts, self.db.getContact, settings["contacts"])
34 | yield self.db.join(subs, self.db.getSubscription, settings["subscriptions"])
35 | self.write_json(request, settings)
36 |
--------------------------------------------------------------------------------
/moira/api/server.py:
--------------------------------------------------------------------------------
1 | from moira.graphite import datalib
2 | from twisted.application import service, internet
3 | from twisted.internet import reactor
4 |
5 | from moira import config
6 | from moira import logs
7 | from moira.api.site import Site
8 | from moira.db import Db
9 |
10 |
11 | def run():
12 |
13 | config.read()
14 | logs.api()
15 |
16 | top_service = service.MultiService()
17 |
18 | db = Db()
19 | datalib.db = db
20 | db.setServiceParent(top_service)
21 |
22 | http_service = internet.TCPServer(config.HTTP_PORT, Site(db), interface=config.HTTP_ADDR)
23 | http_service.setServiceParent(top_service)
24 |
25 | top_service.startService()
26 |
27 | reactor.addSystemEventTrigger('before', 'shutdown', top_service.stopService)
28 |
29 | reactor.run()
30 |
--------------------------------------------------------------------------------
/moira/api/site.py:
--------------------------------------------------------------------------------
1 | from twisted.internet import reactor
2 | from twisted.web import server, http
3 | from twisted.web.resource import Resource
4 |
5 | from moira import config
6 | from moira.api.resources.contact import Contacts
7 | from moira.api.resources.event import Events
8 | from moira.api.resources.notification import Notifications
9 | from moira.api.resources.pattern import Patterns
10 | from moira.api.resources.subscription import Subscriptions
11 | from moira.api.resources.tags import Tags
12 | from moira.api.resources.trigger import Triggers
13 | from moira.api.resources.user import Login
14 |
15 |
16 | class MoiraRequest(server.Request):
17 |
18 | def __init__(self, channel, queued):
19 | self.creation = reactor.seconds()
20 | self.body_json = None
21 | server.Request.__init__(self, channel, queued)
22 |
23 | @property
24 | def login(self):
25 | return self.getHeader('x-webauth-user') or ''
26 |
27 |
28 | class Site(server.Site):
29 |
30 | requestFactory = MoiraRequest
31 | displayTracebacks = False
32 |
33 | def __init__(self, db):
34 | self.prefix = ""
35 | root = Resource()
36 | prefix = root
37 | for path in config.PREFIX.split('/'):
38 | if len(path):
39 | r = Resource()
40 | prefix.putChild(path, r)
41 | prefix = r
42 | self.prefix += "/%s" % path
43 | prefix.putChild("trigger", Triggers(db))
44 | prefix.putChild("tag", Tags(db))
45 | prefix.putChild("pattern", Patterns(db))
46 | prefix.putChild("event", Events(db))
47 | prefix.putChild("contact", Contacts(db))
48 | prefix.putChild("subscription", Subscriptions(db))
49 | prefix.putChild("user", Login(db))
50 | prefix.putChild("notification", Notifications(db))
51 | server.Site.__init__(self, root)
52 |
53 | def _escape(self, s):
54 | if hasattr(http, '_escape'):
55 | return http._escape(s)
56 | return server.Site._escape(self, s)
57 |
58 | def log(self, request):
59 | if hasattr(self, "logFile"):
60 | elapsed = reactor.seconds() - request.creation
61 | line = '- %.3f "%s" %d %s\n' % (
62 | elapsed,
63 | '%s %s %s' % (self._escape(request.method),
64 | self._escape(request.uri),
65 | self._escape(request.clientproto)),
66 | request.code,
67 | request.requestHeaders.getRawHeaders('Content-Length', ["-"])[0])
68 | self.logFile.write(line)
69 |
--------------------------------------------------------------------------------
/moira/cache.py:
--------------------------------------------------------------------------------
1 | from functools import wraps
2 |
3 | from twisted.internet import reactor, defer
4 |
5 | CACHE = {}
6 |
7 |
8 | def cache(f):
9 | @wraps(f)
10 | @defer.inlineCallbacks
11 | def wrapper(*args, **kwargs):
12 | if 'cache_key' in kwargs and 'cache_ttl' in kwargs:
13 | key = "%s%s" % (f, kwargs['cache_key'])
14 | ttl = kwargs['cache_ttl']
15 | del kwargs['cache_key']
16 | del kwargs['cache_ttl']
17 | now = reactor.seconds()
18 |
19 | @defer.inlineCallbacks
20 | def get_value():
21 | result = yield f(*args, **kwargs)
22 | defer.returnValue(result)
23 | timestamp, result = CACHE.get(key, (0, None))
24 | if timestamp + ttl < now:
25 | CACHE[key] = (now, result)
26 | result = yield get_value()
27 | CACHE[key] = (now, result)
28 | else:
29 | result = yield f(*args, **kwargs)
30 | defer.returnValue(result)
31 | return wrapper
32 |
--------------------------------------------------------------------------------
/moira/checker/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/moira/checker/__init__.py
--------------------------------------------------------------------------------
/moira/checker/check.py:
--------------------------------------------------------------------------------
1 | from time import time
2 | from twisted.internet import defer
3 | from moira.graphite import datalib
4 | from moira import config
5 | from moira.checker import expression
6 | from moira.checker import state
7 | from moira.checker import event
8 | from moira.logs import log
9 |
10 |
11 | @defer.inlineCallbacks
12 | def trigger(trigger, fromTime, now, cache_ttl):
13 | now = now or int(time())
14 |
15 | log.info("Checking trigger {id}", id=trigger.id)
16 | initialized = yield trigger.init(now, fromTime=fromTime)
17 | if not initialized:
18 | raise StopIteration
19 |
20 | if fromTime is None:
21 | fromTime = trigger.last_check.get("timestamp", now)
22 |
23 | fromTime = str(fromTime - (trigger.ttl or 600))
24 | requestContext = datalib.createRequestContext(fromTime, endTime=str(now), allowRealTimeAlerting=trigger.is_simple)
25 |
26 | check = {
27 | "metrics": trigger.last_check["metrics"].copy(),
28 | "state": state.OK,
29 | "timestamp": now,
30 | "score": trigger.last_check.get("score")
31 | }
32 |
33 | try:
34 | time_series = yield trigger.get_timeseries(requestContext)
35 |
36 | for metric in requestContext['metrics']:
37 | yield trigger.db.cleanupMetricValues(metric, now - config.METRICS_TTL,
38 | cache_key=metric, cache_ttl=cache_ttl)
39 |
40 | if not time_series:
41 | if trigger.ttl:
42 | check["state"] = trigger.ttl_state
43 | check["msg"] = "Trigger has no metrics"
44 | yield event.compare_states(trigger, check, trigger.last_check, now)
45 | else:
46 |
47 | for t_series in time_series.values():
48 | for tN in t_series:
49 | if not tN.stub:
50 | check["metrics"][tN.name] = tN.last_state.copy()
51 |
52 | for t1 in time_series[1]:
53 |
54 | log.debug("Checking timeserie {name}: {values}", name=t1.name, values=list(t1))
55 | log.debug("Checking interval: {start} - {end} ({duration}s), step: {step}",
56 | start=t1.start, end=t1.end, step=t1.step, duration=t1.end - t1.start)
57 | metric_state = check["metrics"].get(t1.name)
58 | if not metric_state:
59 | log.debug("No metric state for {name}.", name=t1.name)
60 | continue
61 |
62 | checkpoint = max(t1.last_state["timestamp"] - config.CHECKPOINT_GAP,
63 | metric_state.get("event_timestamp", 0))
64 | log.debug("Checkpoint for {name}: {checkpoint}", name=t1.name, checkpoint=checkpoint)
65 |
66 | for value_timestamp in xrange(t1.start, now + t1.step, t1.step):
67 |
68 | if value_timestamp <= checkpoint:
69 | continue
70 |
71 | expression_values = time_series.get_expression_values(t1, value_timestamp)
72 |
73 | t1_value = expression_values["t1"]
74 |
75 | log.debug("values for ts {timestamp}: {values}",
76 | timestamp=value_timestamp, values=expression_values)
77 | if None in expression_values.values():
78 | continue
79 |
80 | expression_values.update({'warn_value': trigger.struct.get('warn_value'),
81 | 'error_value': trigger.struct.get('error_value'),
82 | 'PREV_STATE': metric_state['state']})
83 |
84 | expression_state = expression.getExpression(trigger.struct.get('expression'),
85 | **expression_values)
86 |
87 | time_series.update_state(t1, check, expression_state, expression_values, value_timestamp)
88 |
89 | yield event.compare_states(trigger, metric_state, t1.last_state,
90 | value_timestamp, value=t1_value,
91 | metric=t1.name)
92 |
93 | # compare with last_check timestamp in case if we have not run checker for a long time
94 | if trigger.ttl and metric_state["timestamp"] + trigger.ttl < trigger.last_check["timestamp"]:
95 | log.info("Metric {name} TTL expired for state {state}", name=t1.name, state=metric_state)
96 | if trigger.ttl_state == state.DEL and metric_state.get("event_timestamp") is not None:
97 | log.info("Remove metric {name}", name=t1.name)
98 | del check["metrics"][t1.name]
99 | for tN, tName in time_series.other_targets_names.iteritems():
100 | log.info("Remove metric {name}", name=tName)
101 | del check["metrics"][tName]
102 | for pattern in trigger.struct.get("patterns"):
103 | yield trigger.db.delPatternMetrics(pattern)
104 | continue
105 | time_series.update_state(t1, check, state.to_metric_state(trigger.ttl_state), None,
106 | trigger.last_check["timestamp"] - trigger.ttl)
107 | yield event.compare_states(trigger, metric_state, t1.last_state, metric_state["timestamp"],
108 | metric=t1.name)
109 |
110 | except StopIteration:
111 | raise
112 | except Exception as e:
113 | log.error("Trigger check failed: {e}", e=e)
114 | check["state"] = state.EXCEPTION
115 | check["msg"] = "Trigger evaluation exception"
116 | yield event.compare_states(trigger, check, trigger.last_check, now)
117 | scores = sum(map(lambda m: state.SCORES[m["state"]], check["metrics"].itervalues()))
118 | check["score"] = scores + state.SCORES[check["state"]]
119 | yield trigger.db.setTriggerLastCheck(trigger.id, check)
120 |
--------------------------------------------------------------------------------
/moira/checker/event.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from twisted.internet import defer
3 |
4 | from moira import config
5 | from moira.checker import state
6 | from moira.logs import log
7 |
8 |
9 | @defer.inlineCallbacks
10 | def compare_states(trigger,
11 | current_state,
12 | last_state,
13 | timestamp,
14 | value=None,
15 | metric=None):
16 | current_state_value = current_state["state"]
17 | last_state_value = last_state["state"]
18 | last_state["state"] = current_state_value
19 |
20 | if current_state.get("event_timestamp") is None:
21 | current_state["event_timestamp"] = timestamp
22 |
23 | event = {
24 | "trigger_id": trigger.id,
25 | "state": current_state_value,
26 | "old_state": last_state_value,
27 | "timestamp": timestamp,
28 | "metric": metric
29 | }
30 |
31 | if current_state_value == last_state_value:
32 | remind_interval = config.BAD_STATES_REMINDER.get(current_state_value)
33 | if remind_interval is None or timestamp - last_state.get("event_timestamp", timestamp) < remind_interval:
34 | if not last_state.get("suppressed") or current_state_value == state.OK:
35 | raise StopIteration
36 | else:
37 | event["msg"] = "This metric has been in bad state for more than %s hours - please, fix." % \
38 | (remind_interval / 3600)
39 | current_state["event_timestamp"] = timestamp
40 | last_state["event_timestamp"] = timestamp
41 | if value is not None:
42 | event["value"] = value
43 | current_state["suppressed"] = False
44 | last_state["suppressed"] = False
45 | if trigger.isSchedAllows(timestamp):
46 | state_maintenance = current_state.get("maintenance", 0)
47 | if trigger.maintenance >= timestamp:
48 | current_state["suppressed"] = True
49 | log.info("Event {event} suppressed due to maintenance until {date}.",
50 | event=str(event), date=datetime.fromtimestamp(trigger.maintenance))
51 | elif state_maintenance >= timestamp:
52 | current_state["suppressed"] = True
53 | log.info("Event {event} suppressed due to metric {metric} maintenance until {date}.",
54 | event=str(event), metric=metric, date=datetime.fromtimestamp(state_maintenance))
55 | else:
56 | log.info("Writing new event: {event}", event=str(event))
57 | yield trigger.db.pushEvent(event)
58 | else:
59 | current_state["suppressed"] = True
60 | log.info("Event {event} suppressed due to trigger schedule", event=str(event))
61 |
--------------------------------------------------------------------------------
/moira/checker/expression.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import operator
3 |
4 | from moira.checker import state
5 |
6 | _default = ast.parse("ERROR if compare_operator(t1, error_value) else \
7 | WARN if compare_operator(t1, warn_value) else OK", mode='eval')
8 | DEFAULT = compile(_default, '', mode='eval')
9 |
10 |
11 | class ExpressionError(Exception):
12 | pass
13 |
14 |
15 | cache = {}
16 |
17 |
18 | def compile_expression(exp):
19 | cached = cache.get(exp)
20 | if cached is not None:
21 | return cached
22 | _exp = ast.parse(exp)
23 | nodes = [node for node in ast.walk(_exp)]
24 | if len(nodes) < 2 or not isinstance(nodes[1], ast.Expr):
25 | raise ExpressionError("%s is not Expression" % exp)
26 | for node in nodes:
27 | if isinstance(node, ast.Call):
28 | raise ExpressionError("Call method is forbidden")
29 | if isinstance(node, ast.Lambda):
30 | raise ExpressionError("Lambda is strongly forbidden")
31 | result = compile(exp, '', mode='eval')
32 | cache[exp] = result
33 | return result
34 |
35 |
36 | def getExpression(trigger_expression=None, **kwargs):
37 | global_dict = {"OK": state.OK,
38 | "WARN": state.WARN,
39 | "WARNING": state.WARN,
40 | "ERROR": state.ERROR,
41 | "NODATA": state.NODATA}
42 | for k, v in kwargs.iteritems():
43 | global_dict[k] = v
44 | if not trigger_expression:
45 | global_dict['compare_operator'] = operator.ge if global_dict['warn_value'] <= global_dict['error_value'] \
46 | else operator.le
47 | return eval(DEFAULT, global_dict)
48 | else:
49 | return eval(compile_expression(trigger_expression), global_dict)
50 |
--------------------------------------------------------------------------------
/moira/checker/master.py:
--------------------------------------------------------------------------------
1 | import anyjson
2 |
3 | import txredisapi as redis
4 |
5 | from twisted.application import service
6 | from twisted.internet import defer, reactor
7 | from twisted.internet.task import LoopingCall
8 |
9 | from moira import config
10 | from moira.logs import log
11 |
12 |
13 | class MasterProtocol(redis.SubscriberProtocol):
14 |
15 | @defer.inlineCallbacks
16 | def messageReceived(self, ignored, channel, message, nocache=False):
17 | try:
18 | json = anyjson.deserialize(message)
19 | db = self.factory.db
20 | db.last_data = reactor.seconds()
21 | pattern = json["pattern"]
22 | metric = json["metric"]
23 | yield db.addPatternMetric(pattern, metric)
24 | triggers = yield db.getPatternTriggers(pattern)
25 | if not triggers:
26 | yield db.removePattern(pattern)
27 | metrics = yield db.getPatternMetrics(pattern)
28 | for metric in metrics:
29 | yield db.delMetric(metric)
30 | yield db.delPatternMetrics(pattern)
31 |
32 | for trigger_id in triggers:
33 | if nocache:
34 | yield db.addTriggerCheck(trigger_id)
35 | else:
36 | yield db.addTriggerCheck(trigger_id, cache_key=trigger_id, cache_ttl=config.CHECK_INTERVAL)
37 | except Exception as e:
38 | log.error("Failed to receive metric: {e}", e=e)
39 |
40 |
41 | class MasterService(service.Service):
42 |
43 | def __init__(self, db, channel="metric-event"):
44 | self.db = db
45 | self.channel = channel
46 | self.db.last_data = reactor.seconds()
47 |
48 | @defer.inlineCallbacks
49 | def startService(self):
50 | service.Service.startService(self)
51 | factory = redis.SubscriberFactory()
52 | factory.protocol = MasterProtocol
53 | factory.continueTrying = True
54 | factory.db = self.db
55 | yield self.db.startService()
56 | yield reactor.connectTCP(config.REDIS_HOST, config.REDIS_PORT, factory)
57 | self.rc = yield factory.deferred
58 | yield self.rc.subscribe(self.channel)
59 | log.info('Subscribed to {channel}', channel=self.channel)
60 | self.lc = LoopingCall(self.checkNoData)
61 | self.nodata_check = self.lc.start(config.NODATA_CHECK_INTERVAL, now=True)
62 |
63 | @defer.inlineCallbacks
64 | def checkNoData(self):
65 | try:
66 | now = reactor.seconds()
67 | if self.db.last_data + config.STOP_CHECKING_INTERVAL < now:
68 | log.info("Checking nodata disabled. No metrics for {seconds} seconds",
69 | seconds=int(now - self.db.last_data))
70 | else:
71 | log.info("Checking nodata")
72 | triggers = yield self.db.getTriggers()
73 | for trigger_id in triggers:
74 | yield self.db.addTriggerCheck(trigger_id, cache_key=trigger_id, cache_ttl=60)
75 | except Exception as e:
76 | log.error("NoData check failed: {e}", e=e)
77 |
78 | @defer.inlineCallbacks
79 | def stopService(self):
80 | yield self.lc.stop()
81 | yield self.nodata_check
82 | yield self.rc.disconnect()
83 |
--------------------------------------------------------------------------------
/moira/checker/server.py:
--------------------------------------------------------------------------------
1 | import multiprocessing
2 | import os
3 | import sys
4 |
5 | from moira.graphite import datalib
6 | from twisted.application import service
7 | from twisted.internet import reactor
8 | from twisted.internet.protocol import ProcessProtocol
9 |
10 | from moira import config
11 | from moira import logs
12 | from moira.logs import log
13 | from moira.checker.master import MasterService
14 | from moira.checker.worker import check
15 | from moira.db import Db
16 |
17 | WORKER_PATH = os.path.abspath(
18 | os.path.join(
19 | os.path.abspath(
20 | os.path.dirname(__file__)), 'worker.py'))
21 |
22 |
23 | class CheckerProcessProtocol(ProcessProtocol):
24 |
25 | def connectionMade(self):
26 | log.info("Run worker - {pid}", pid=self.transport.pid)
27 |
28 | def processEnded(self, reason):
29 | log.info("Checker process ended with reason: {reason}", reason=reason)
30 | if reactor.running:
31 | reactor.stop()
32 |
33 |
34 | class TopService(service.MultiService):
35 |
36 | checkers = []
37 |
38 | def startService(self):
39 | service.MultiService.startService(self)
40 | for i in range(max(1, multiprocessing.cpu_count() - 1)):
41 | checker = reactor.spawnProcess(
42 | CheckerProcessProtocol(), sys.executable,
43 | ['moira-checker', WORKER_PATH, "-n", str(i), "-c", config.CONFIG_PATH, "-l", config.LOG_DIRECTORY],
44 | childFDs={0: 'w', 1: 1, 2: 2}, env=os.environ)
45 | self.checkers.append(checker)
46 |
47 |
48 | def run():
49 |
50 | config.read()
51 | logs.checker_master()
52 |
53 | if config.ARGS.t:
54 | check(config.ARGS.t)
55 | return
56 |
57 | top_service = TopService()
58 |
59 | db = Db()
60 | datalib.db = db
61 | db.setServiceParent(top_service)
62 |
63 | sub_service = MasterService(db)
64 | sub_service.setServiceParent(top_service)
65 |
66 | top_service.startService()
67 |
68 | reactor.addSystemEventTrigger('before', 'shutdown', top_service.stopService)
69 |
70 | reactor.run()
71 |
--------------------------------------------------------------------------------
/moira/checker/state.py:
--------------------------------------------------------------------------------
1 | OK = "OK"
2 | WARN = "WARN"
3 | ERROR = "ERROR"
4 | NODATA = "NODATA"
5 | EXCEPTION = "EXCEPTION"
6 | DEL = "DEL"
7 |
8 | SCORES = {
9 | "OK": 0,
10 | "DEL": 0,
11 | "WARN": 1,
12 | "ERROR": 100,
13 | "NODATA": 1000,
14 | "EXCEPTION": 100000
15 | }
16 |
17 |
18 | def to_metric_state(state):
19 | if state == DEL:
20 | return NODATA
21 | return state
22 |
--------------------------------------------------------------------------------
/moira/checker/timeseries.py:
--------------------------------------------------------------------------------
1 | class TargetTimeSeries(dict):
2 |
3 | def __init__(self, *arg, **kwargs):
4 | super(TargetTimeSeries, self).__init__(*arg, **kwargs)
5 | self.other_targets_names = {}
6 |
7 | def get_expression_values(self, t1, timestamp):
8 | expression_values = {}
9 | for target_number in xrange(1, len(self) + 1):
10 | target_name = "t%s" % target_number
11 | tN = self[target_number][0] if target_number > 1 else t1
12 | value_index = (timestamp - tN.start) / tN.step
13 | tN_value = tN[value_index] if len(tN) > value_index else None
14 | expression_values[target_name] = tN_value
15 | if tN_value is None:
16 | break
17 | return expression_values
18 |
19 | def set_state_value(self, metric_state, expression_values, tN):
20 | if expression_values is None:
21 | if "value" in metric_state:
22 | del metric_state["value"]
23 | else:
24 | metric_state["value"] = expression_values[tN]
25 |
26 | def update_state(self, t1, check, expression_state, expression_values, timestamp):
27 | metric_state = check["metrics"][t1.name]
28 | metric_state["state"] = expression_state
29 | metric_state["timestamp"] = timestamp
30 | self.set_state_value(metric_state, expression_values, "t1")
31 |
32 | for tN, tName in self.other_targets_names.iteritems():
33 | other_metric_state = check["metrics"][tName]
34 | other_metric_state["state"] = expression_state
35 | other_metric_state["timestamp"] = timestamp
36 | self.set_state_value(other_metric_state, expression_values, tN)
37 |
--------------------------------------------------------------------------------
/moira/checker/trigger.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta
2 |
3 | from moira.graphite.evaluator import evaluateTarget
4 | from twisted.internet import defer
5 |
6 | from moira.checker import state
7 | from moira.checker import check
8 | from moira.checker.timeseries import TargetTimeSeries
9 |
10 |
11 | class Trigger(object):
12 |
13 | def __init__(self, id, db):
14 | self.id = id
15 | self.db = db
16 |
17 | @defer.inlineCallbacks
18 | def init(self, now, fromTime=None):
19 | self.maintenance = 0
20 | json, self.struct = yield self.db.getTrigger(self.id)
21 | if json is None:
22 | defer.returnValue(False)
23 |
24 | self.is_simple = self.struct.get("is_simple_trigger", False)
25 |
26 | for tag in self.struct["tags"]:
27 | tag_data = yield self.db.getTag(tag)
28 | maintenance = tag_data.get('maintenance', 0)
29 | if maintenance > self.maintenance:
30 | self.maintenance = maintenance
31 | break
32 | self.ttl = self.struct.get("ttl")
33 | self.ttl_state = self.struct.get("ttl_state", state.NODATA)
34 | self.last_check = yield self.db.getTriggerLastCheck(self.id)
35 | begin = (fromTime or now) - 3600
36 | if self.last_check is None:
37 | self.last_check = {
38 | "metrics": {},
39 | "state": state.NODATA,
40 | "timestamp": begin
41 | }
42 | if self.last_check.get("timestamp") is None:
43 | self.last_check["timestamp"] = begin
44 | defer.returnValue(True)
45 |
46 | @defer.inlineCallbacks
47 | def get_timeseries(self, requestContext):
48 | targets = self.struct.get("targets", [])
49 | target_time_series = TargetTimeSeries()
50 | target_number = 1
51 |
52 | for target in targets:
53 | time_series = yield evaluateTarget(requestContext, target)
54 |
55 | if target_number > 1:
56 | if len(time_series) == 1:
57 | target_time_series.other_targets_names["t%s" % target_number] = time_series[0].name
58 | elif not time_series:
59 | raise Exception("Target #%s has no timeseries" % target_number)
60 | else:
61 | raise Exception("Target #%s has more than one timeseries" % target_number)
62 |
63 | for time_serie in time_series:
64 | time_serie.last_state = self.last_check["metrics"].get(
65 | time_serie.name, {
66 | "state": state.NODATA,
67 | "timestamp": time_serie.start - 3600})
68 | target_time_series[target_number] = time_series
69 | target_number += 1
70 |
71 | defer.returnValue(target_time_series)
72 |
73 | @defer.inlineCallbacks
74 | def check(self, fromTime=None, now=None, cache_ttl=60):
75 | yield check.trigger(self, fromTime, now, cache_ttl)
76 |
77 | def isSchedAllows(self, ts):
78 | sched = self.struct.get('sched')
79 | if sched is None:
80 | return True
81 |
82 | timestamp = ts - ts % 60 - sched["tzOffset"] * 60
83 | date = datetime.fromtimestamp(timestamp)
84 | if not sched['days'][date.weekday()]['enabled']:
85 | return False
86 | day_start = datetime.fromtimestamp(timestamp - timestamp % (24 * 3600))
87 | start_datetime = day_start + timedelta(minutes=sched["startOffset"])
88 | end_datetime = day_start + timedelta(minutes=sched["endOffset"])
89 | if date < start_datetime:
90 | return False
91 | if date > end_datetime:
92 | return False
93 | return True
94 |
--------------------------------------------------------------------------------
/moira/checker/worker.py:
--------------------------------------------------------------------------------
1 | import random
2 |
3 | from twisted.internet import defer, reactor, task
4 |
5 | from moira.graphite import datalib
6 | from moira import config
7 | from moira.checker.trigger import Trigger
8 | from moira.db import Db
9 | from moira.metrics import spy, graphite
10 | from moira import logs
11 | from moira.logs import log
12 |
13 |
14 | PERFORM_INTERVAL = 0.01
15 | ERROR_TIMEOUT = 10
16 |
17 |
18 | class TriggersCheck:
19 |
20 | def __init__(self, db):
21 | self.db = db
22 |
23 | def start(self):
24 | self.t = task.LoopingCall(self.perform)
25 | self.finished = self.t.start(PERFORM_INTERVAL, now=False)
26 | log.info("Checker service started")
27 |
28 | @defer.inlineCallbacks
29 | def stop(self):
30 | self.t.stop()
31 | yield self.finished
32 |
33 | @defer.inlineCallbacks
34 | def perform(self):
35 | try:
36 | trigger_id = yield self.db.getTriggerToCheck()
37 | while trigger_id is not None:
38 | acquired = yield self.db.setTriggerCheckLock(trigger_id)
39 | if acquired is not None:
40 | start = reactor.seconds()
41 | trigger = Trigger(trigger_id, self.db)
42 | yield trigger.check()
43 | end = reactor.seconds()
44 | yield self.db.delTriggerCheckLock(trigger_id)
45 | spy.TRIGGER_CHECK.report(end - start)
46 | trigger_id = yield self.db.getTriggerToCheck()
47 | yield task.deferLater(reactor, random.uniform(PERFORM_INTERVAL * 10, PERFORM_INTERVAL * 20), lambda: None)
48 | except GeneratorExit:
49 | pass
50 | except Exception as e:
51 | spy.TRIGGER_CHECK_ERRORS.report(0)
52 | log.error("Failed to perform triggers check: {e}", e=e)
53 | yield task.deferLater(reactor, ERROR_TIMEOUT, lambda: None)
54 |
55 |
56 | def run(callback):
57 |
58 | db = Db()
59 | datalib.db = db
60 | init = db.startService()
61 | init.addCallback(callback)
62 |
63 | reactor.run()
64 |
65 |
66 | def main(number):
67 |
68 | def get_metrics():
69 | return [
70 | ("checker.time.%s.%s" %
71 | (config.HOSTNAME,
72 | number),
73 | spy.TRIGGER_CHECK.get_metrics()["sum"]),
74 | ("checker.triggers.%s.%s" %
75 | (config.HOSTNAME,
76 | number),
77 | spy.TRIGGER_CHECK.get_metrics()["count"]),
78 | ("checker.errors.%s.%s" %
79 | (config.HOSTNAME,
80 | number),
81 | spy.TRIGGER_CHECK_ERRORS.get_metrics()["count"])]
82 |
83 | graphite.sending(get_metrics)
84 |
85 | def start(db):
86 | checker = TriggersCheck(db)
87 | checker.start()
88 | reactor.addSystemEventTrigger('before', 'shutdown', checker.stop)
89 |
90 | run(start)
91 |
92 |
93 | def check(trigger_id):
94 |
95 | @defer.inlineCallbacks
96 | def start(db):
97 | trigger = Trigger(trigger_id, db)
98 | yield trigger.check()
99 | reactor.stop()
100 |
101 | run(start)
102 |
103 |
104 | if __name__ == '__main__':
105 |
106 | config.read()
107 | logs.checker_worker()
108 | main(config.ARGS.n)
109 |
--------------------------------------------------------------------------------
/moira/config.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import os
3 | import socket
4 |
5 | import anyjson
6 | import yaml
7 |
8 | try:
9 | import ujson
10 |
11 | ujson.loads("{}")
12 | if anyjson._modules[0][0] != 'ujson':
13 | anyjson._modules.insert(
14 | 0,
15 | ("ujson",
16 | "dumps",
17 | TypeError,
18 | "loads",
19 | ValueError,
20 | "load"))
21 | anyjson.force_implementation('ujson')
22 | except ImportError:
23 | ujson = None
24 |
25 | CONFIG_PATH = '/etc/moira/config.yml'
26 | REDIS_HOST = "localhost"
27 | REDIS_PORT = 6379
28 | DBID = 0
29 | LOG_DIRECTORY = "stdout"
30 | LOG_LEVEL = "info"
31 | HTTP_PORT = 8081
32 | HTTP_ADDR = ''
33 | GRAPHITE = []
34 | GRAPHITE_PREFIX = 'DevOps.moira'
35 | GRAPHITE_INTERVAL = 10
36 | NODATA_CHECK_INTERVAL = 60
37 | CHECK_INTERVAL = 5
38 | CHECK_LOCK_TTL = 30
39 | STOP_CHECKING_INTERVAL = 30
40 | METRICS_TTL = 3600
41 | CHECKPOINT_GAP = 120
42 | PREFIX = "/api"
43 | HOSTNAME = socket.gethostname().split('.')[0]
44 | BAD_STATES_REMINDER = {'ERROR': 86400, 'NODATA': 86400}
45 | ARGS = None
46 |
47 |
48 | def get_parser():
49 | parser = argparse.ArgumentParser()
50 | parser.add_argument('-c', help='path to configuration file (default: %s)' % (CONFIG_PATH))
51 | parser.add_argument('-l', help='path to log directory (default: %s)' % (LOG_DIRECTORY))
52 | parser.add_argument('-port', help='listening port (default: %s)' % (HTTP_PORT), type=int)
53 | parser.add_argument('-t', help='check single trigger by id and exit')
54 | parser.add_argument('-n', help='checker number', type=int)
55 | parser.add_argument('-v', help='verbosity log', default=False, const=True, nargs='?')
56 | return parser
57 |
58 |
59 | def read():
60 | global REDIS_HOST
61 | global REDIS_PORT
62 | global DBID
63 | global LOG_DIRECTORY
64 | global LOG_LEVEL
65 | global HTTP_PORT
66 | global HTTP_ADDR
67 | global GRAPHITE_PREFIX
68 | global GRAPHITE_INTERVAL
69 | global NODATA_CHECK_INTERVAL
70 | global CHECK_INTERVAL
71 | global METRICS_TTL
72 | global ARGS
73 | global STOP_CHECKING_INTERVAL
74 | global CONFIG_PATH
75 |
76 | parser = get_parser()
77 | args = parser.parse_args()
78 |
79 | ARGS = args
80 | if args.c:
81 | CONFIG_PATH = args.c
82 |
83 | if os.path.exists(CONFIG_PATH):
84 | with open(CONFIG_PATH, 'r') as yml:
85 | cfg = yaml.load(yml)
86 | REDIS_HOST = cfg['redis']['host']
87 | REDIS_PORT = cfg['redis']['port']
88 | DBID = cfg['redis'].get('dbid', 0)
89 | LOG_DIRECTORY = cfg['worker']['log_dir']
90 | LOG_LEVEL = cfg['worker'].get('log_level', 'info')
91 | HTTP_PORT = cfg['api']['port']
92 | HTTP_ADDR = cfg['api']['listen']
93 | if 'graphite' in cfg:
94 | for key in cfg['graphite']:
95 | if key.startswith('uri'):
96 | host, port = cfg['graphite'][key].split(':')
97 | GRAPHITE.append((host, int(port)))
98 | GRAPHITE_PREFIX = cfg['graphite']['prefix']
99 | GRAPHITE_INTERVAL = cfg['graphite']['interval']
100 | NODATA_CHECK_INTERVAL = cfg['checker'].get('nodata_check_interval', 60)
101 | CHECK_INTERVAL = cfg['checker'].get('check_interval', 5)
102 | METRICS_TTL = cfg['checker'].get('metrics_ttl', 3600)
103 | STOP_CHECKING_INTERVAL = cfg['checker'].get('stop_checking_interval', 30)
104 |
105 | if args.l:
106 | LOG_DIRECTORY = args.l
107 | if args.v:
108 | LOG_LEVEL = 'debug'
109 | if args.port:
110 | HTTP_PORT = args.port
111 |
--------------------------------------------------------------------------------
/moira/graphite/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/moira/graphite/__init__.py
--------------------------------------------------------------------------------
/moira/graphite/attime.py:
--------------------------------------------------------------------------------
1 | """Copyright 2008 Orbitz WorldWide
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License."""
14 |
15 | from datetime import datetime, timedelta
16 | from time import daylight
17 | import pytz
18 |
19 | months = [
20 | 'jan',
21 | 'feb',
22 | 'mar',
23 | 'apr',
24 | 'may',
25 | 'jun',
26 | 'jul',
27 | 'aug',
28 | 'sep',
29 | 'oct',
30 | 'nov',
31 | 'dec']
32 | weekdays = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']
33 |
34 |
35 | def parseATTime(s, tzinfo=None):
36 | if tzinfo is None:
37 | tzinfo = pytz.utc
38 | s = s.strip().lower().replace('_', '').replace(',', '').replace(' ', '')
39 | if s.isdigit():
40 | if len(s) == 8 and int(s[:4]) > 1900 and int(
41 | s[4:6]) < 13 and int(s[6:]) < 32:
42 | pass # Fall back because its not a timestamp, its YYYYMMDD form
43 | else:
44 | return datetime.fromtimestamp(int(s), tzinfo)
45 | elif ':' in s and len(s) == 13:
46 | return tzinfo.localize(datetime.strptime(s, '%H:%M%Y%m%d'), daylight)
47 | if '+' in s:
48 | ref, offset = s.split('+', 1)
49 | offset = '+' + offset
50 | elif '-' in s:
51 | ref, offset = s.split('-', 1)
52 | offset = '-' + offset
53 | else:
54 | ref, offset = s, ''
55 | return (
56 | parseTimeReference(ref) +
57 | parseTimeOffset(offset)).astimezone(tzinfo)
58 |
59 |
60 | def parseTimeReference(ref):
61 | if not ref or ref == 'now':
62 | return pytz.utc.localize(datetime.utcnow())
63 |
64 | # Time-of-day reference
65 | i = ref.find(':')
66 | hour, min = 0, 0
67 | if i != -1:
68 | hour = int(ref[:i])
69 | min = int(ref[i + 1:i + 3])
70 | ref = ref[i + 3:]
71 | if ref[:2] == 'am':
72 | ref = ref[2:]
73 | elif ref[:2] == 'pm':
74 | hour = (hour + 12) % 24
75 | ref = ref[2:]
76 | if ref.startswith('noon'):
77 | hour, min = 12, 0
78 | ref = ref[4:]
79 | elif ref.startswith('midnight'):
80 | hour, min = 0, 0
81 | ref = ref[8:]
82 | elif ref.startswith('teatime'):
83 | hour, min = 16, 0
84 | ref = ref[7:]
85 |
86 | refDate = datetime.now().replace(hour=hour, minute=min, second=0)
87 |
88 | # Day reference
89 | if ref in ('yesterday', 'today', 'tomorrow'): # yesterday, today, tomorrow
90 | if ref == 'yesterday':
91 | refDate = refDate - timedelta(days=1)
92 | if ref == 'tomorrow':
93 | refDate = refDate + timedelta(days=1)
94 | elif ref.count('/') == 2: # MM/DD/YY[YY]
95 | m, d, y = map(int, ref.split('/'))
96 | if y < 1900:
97 | y += 1900
98 | if y < 1970:
99 | y += 100
100 | refDate = refDate.replace(year=y)
101 |
102 | try: # Fix for Bug #551771
103 | refDate = refDate.replace(month=m)
104 | refDate = refDate.replace(day=d)
105 | except:
106 | refDate = refDate.replace(day=d)
107 | refDate = refDate.replace(month=m)
108 |
109 | elif len(ref) == 8 and ref.isdigit(): # YYYYMMDD
110 | refDate = refDate.replace(year=int(ref[:4]))
111 |
112 | try: # Fix for Bug #551771
113 | refDate = refDate.replace(month=int(ref[4:6]))
114 | refDate = refDate.replace(day=int(ref[6:8]))
115 | except:
116 | refDate = refDate.replace(day=int(ref[6:8]))
117 | refDate = refDate.replace(month=int(ref[4:6]))
118 |
119 | elif ref[:3] in months: # MonthName DayOfMonth
120 | refDate = refDate.replace(month=months.index(ref[:3]) + 1)
121 | if ref[-2:].isdigit():
122 | refDate = refDate.replace(day=int(ref[-2:]))
123 | elif ref[-1:].isdigit():
124 | refDate = refDate.replace(day=int(ref[-1:]))
125 | else:
126 | raise Exception("Day of month required after month name")
127 | elif ref[:3] in weekdays: # DayOfWeek (Monday, etc)
128 | todayDayName = refDate.strftime("%a").lower()[:3]
129 | today = weekdays.index(todayDayName)
130 | twoWeeks = weekdays * 2
131 | dayOffset = today - twoWeeks.index(ref[:3])
132 | if dayOffset < 0:
133 | dayOffset += 7
134 | refDate -= timedelta(days=dayOffset)
135 | elif ref:
136 | raise Exception("Unknown day reference")
137 | return refDate
138 |
139 |
140 | def parseTimeOffset(offset):
141 | if not offset:
142 | return timedelta()
143 |
144 | t = timedelta()
145 |
146 | if offset[0].isdigit():
147 | sign = 1
148 | else:
149 | sign = {'+': 1, '-': -1}[offset[0]]
150 | offset = offset[1:]
151 |
152 | while offset:
153 | i = 1
154 | while offset[:i].isdigit() and i <= len(offset):
155 | i += 1
156 | num = int(offset[:i - 1])
157 | offset = offset[i - 1:]
158 | i = 1
159 | while offset[:i].isalpha() and i <= len(offset):
160 | i += 1
161 | unit = offset[:i - 1]
162 | offset = offset[i - 1:]
163 | unitString = getUnitString(unit)
164 | if unitString == 'months':
165 | unitString = 'days'
166 | num = num * 30
167 | if unitString == 'years':
168 | unitString = 'days'
169 | num = num * 365
170 | t += timedelta(**{unitString: sign * num})
171 |
172 | return t
173 |
174 |
175 | def getUnitString(s):
176 | if s.startswith('s'):
177 | return 'seconds'
178 | if s.startswith('min'):
179 | return 'minutes'
180 | if s.startswith('h'):
181 | return 'hours'
182 | if s.startswith('d'):
183 | return 'days'
184 | if s.startswith('w'):
185 | return 'weeks'
186 | if s.startswith('mon'):
187 | return 'months'
188 | if s.startswith('m'):
189 | return 'minutes'
190 | if s.startswith('y'):
191 | return 'years'
192 | raise Exception("Invalid offset unit '%s'" % s)
193 |
--------------------------------------------------------------------------------
/moira/graphite/datalib.py:
--------------------------------------------------------------------------------
1 | """Copyright 2008 Orbitz WorldWide
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License."""
14 |
15 | from moira.graphite.util import epoch
16 | from moira.graphite.attime import parseATTime
17 | from twisted.internet import defer
18 |
19 | db = None
20 |
21 |
22 | def createRequestContext(fromTime, endTime, allowRealTimeAlerting):
23 | return {'startTime': parseATTime(fromTime),
24 | 'endTime': parseATTime(endTime),
25 | 'bootstrap': False,
26 | 'allowRealTimeAlerting': allowRealTimeAlerting,
27 | 'localOnly': False,
28 | 'template': None,
29 | 'graphite_patterns': {},
30 | 'metrics': set()}
31 |
32 |
33 | class TimeSeries(list):
34 |
35 | def __init__(self, name, start, end, step, values, consolidate='average'):
36 | list.__init__(self, values)
37 | self.name = name
38 | self.start = start
39 | self.end = end
40 | self.step = step
41 | self.consolidationFunc = consolidate
42 | self.valuesPerPoint = 1
43 | self.options = {}
44 | self.stub = False
45 |
46 | def __iter__(self):
47 | if self.valuesPerPoint > 1:
48 | return self.__consolidatingGenerator(list.__iter__(self))
49 | else:
50 | return list.__iter__(self)
51 |
52 | def consolidate(self, valuesPerPoint):
53 | self.valuesPerPoint = int(valuesPerPoint)
54 |
55 | def __consolidatingGenerator(self, gen):
56 | buf = []
57 | for x in gen:
58 | buf.append(x)
59 | if len(buf) == self.valuesPerPoint:
60 | while None in buf:
61 | buf.remove(None)
62 | if buf:
63 | yield self.__consolidate(buf)
64 | buf = []
65 | else:
66 | yield None
67 | while None in buf:
68 | buf.remove(None)
69 | if buf:
70 | yield self.__consolidate(buf)
71 | else:
72 | yield None
73 | raise StopIteration
74 |
75 | def __consolidate(self, values):
76 | usable = [v for v in values if v is not None]
77 | if not usable:
78 | return None
79 | if self.consolidationFunc == 'sum':
80 | return sum(usable)
81 | if self.consolidationFunc == 'average':
82 | return float(sum(usable)) / len(usable)
83 | if self.consolidationFunc == 'max':
84 | return max(usable)
85 | if self.consolidationFunc == 'min':
86 | return min(usable)
87 | raise Exception("Invalid consolidation function!")
88 |
89 | def __repr__(self):
90 | return 'TimeSeries(name=%s, start=%s, end=%s, step=%s)' % (
91 | self.name, self.start, self.end, self.step)
92 |
93 | def getInfo(self):
94 | """Pickle-friendly representation of the series"""
95 | return {
96 | 'name': self.name,
97 | 'start': self.start,
98 | 'end': self.end,
99 | 'step': self.step,
100 | 'values': list(self),
101 | }
102 |
103 |
104 | def unpackTimeSeries(dataList, retention, startTime, endTime, bootstrap, allowRealTimeAlerting):
105 |
106 | def getTimeSlot(timestamp):
107 | return int((timestamp - startTime) / retention)
108 |
109 | valuesList = []
110 | for data in dataList:
111 | points = {}
112 | for value, timestamp in data:
113 | points[getTimeSlot(timestamp)] = float(value.split()[1])
114 |
115 | lastTimeSlot = getTimeSlot(endTime)
116 |
117 | values = []
118 | # note that right boundary is exclusive
119 | for timeSlot in range(0, lastTimeSlot):
120 | values.append(points.get(timeSlot))
121 |
122 | lastPoint = points.get(lastTimeSlot)
123 | if bootstrap or (allowRealTimeAlerting and lastPoint is not None):
124 | values.append(lastPoint)
125 |
126 | valuesList.append(values)
127 | return valuesList
128 |
129 |
130 | @defer.inlineCallbacks
131 | def fetchData(requestContext, pathExpr):
132 |
133 | global db
134 |
135 | if db is None:
136 | raise Exception("Redis connection is not initialized")
137 |
138 | startTime = int(epoch(requestContext['startTime']))
139 | endTime = int(epoch(requestContext['endTime']))
140 | bootstrap = requestContext['bootstrap']
141 | allowRealTimeAlerting = requestContext['allowRealTimeAlerting']
142 |
143 | seriesList = []
144 | metrics = list((yield db.getPatternMetrics(pathExpr)))
145 | if len(metrics) == 0:
146 | series = TimeSeries(pathExpr, startTime, startTime, 60, [])
147 | series.pathExpression = pathExpr
148 | series.stub = True
149 | seriesList.append(series)
150 | else:
151 | first_metric = metrics[0]
152 | retention = yield db.getMetricRetention(first_metric, cache_key=first_metric, cache_ttl=60)
153 | if bootstrap:
154 | # in bootstrap mode in order to avoid overlapping of bootstrap time series with current time series
155 | # we have to fetch all points up to the last retention time slot boundary preceding endTime
156 | # not including that boundary because endTime is set to be equal startTime from the original requestContext
157 | endTime -= int((endTime - startTime) % retention) + 1
158 | dataList = yield db.getMetricsValues(metrics, startTime, endTime)
159 | valuesList = unpackTimeSeries(dataList, retention, startTime, endTime, bootstrap, allowRealTimeAlerting)
160 | for i, metric in enumerate(metrics):
161 | requestContext['metrics'].add(metric)
162 | series = TimeSeries(
163 | metric,
164 | startTime,
165 | endTime,
166 | retention,
167 | valuesList[i])
168 | series.pathExpression = pathExpr
169 | seriesList.append(series)
170 |
171 | defer.returnValue(seriesList)
172 |
--------------------------------------------------------------------------------
/moira/graphite/evaluator.py:
--------------------------------------------------------------------------------
1 | """Copyright 2008 Orbitz WorldWide
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License."""
14 |
15 | import re
16 |
17 | from moira.graphite.grammar import grammar
18 | from moira.graphite.datalib import TimeSeries, fetchData
19 | from twisted.internet import defer
20 |
21 |
22 | @defer.inlineCallbacks
23 | def evaluateTarget(requestContext, target):
24 | tokens = grammar.parseString(target)
25 | result = yield evaluateTokens(requestContext, tokens)
26 | if isinstance(result, TimeSeries):
27 | # we have to return a list of TimeSeries objects
28 | defer.returnValue([result])
29 | else:
30 | defer.returnValue(result)
31 |
32 |
33 | @defer.inlineCallbacks
34 | def evaluateTokens(requestContext, tokens, replacements=None):
35 | if tokens.template:
36 | arglist = dict()
37 | if tokens.template.kwargs:
38 | arglist.update(dict([(kwarg.argname, (yield evaluateTokens(requestContext, kwarg.args[0])))
39 | for kwarg in tokens.template.kwargs]))
40 | if tokens.template.args:
41 | arglist.update(dict([(str(i + 1), (yield evaluateTokens(requestContext, arg)))
42 | for i, arg in enumerate(tokens.template.args)]))
43 | if 'template' in requestContext:
44 | arglist.update(requestContext['template'])
45 | result = yield evaluateTokens(requestContext, tokens.template, arglist)
46 | defer.returnValue(result)
47 | raise StopIteration
48 |
49 | elif tokens.expression:
50 | result = yield evaluateTokens(requestContext, tokens.expression, replacements=replacements)
51 | for exp in tokens.expression:
52 | if type(exp) is not unicode:
53 | continue
54 | for r in result:
55 | if not isinstance(r, TimeSeries):
56 | continue
57 | resolve = requestContext['graphite_patterns'].get(exp, set())
58 | resolve.add(r.name)
59 | requestContext['graphite_patterns'][exp] = resolve
60 | defer.returnValue(result)
61 | raise StopIteration
62 |
63 | elif tokens.pathExpression:
64 | expression = tokens.pathExpression
65 | if replacements:
66 | for name in replacements:
67 | if expression == '$' + name:
68 | val = replacements[name]
69 | if not isinstance(
70 | val,
71 | str) and not isinstance(
72 | val,
73 | basestring):
74 | defer.returnValue(val)
75 | raise StopIteration
76 | elif re.match('^-?[\d.]+$', val):
77 | defer.returnValue(float(val))
78 | raise StopIteration
79 | else:
80 | defer.returnValue(val)
81 | raise StopIteration
82 | else:
83 | expression = expression.replace(
84 | '$' + name, str(replacements[name]))
85 | timeseries = yield fetchData(requestContext, expression)
86 | defer.returnValue(timeseries)
87 | raise StopIteration
88 |
89 | elif tokens.call:
90 |
91 | if tokens.call.funcname == 'template':
92 | # if template propagates down here, it means the grammar didn't match the invocation
93 | # as tokens.template. this generally happens if you try to pass
94 | # non-numeric/string args
95 | raise ValueError(
96 | "invaild template() syntax, only string/numeric arguments are allowed")
97 |
98 | func = SeriesFunctions[tokens.call.funcname]
99 | args = [(yield evaluateTokens(requestContext, arg, replacements=replacements)) for arg in tokens.call.args]
100 | kwargs = dict([(kwarg.argname, (yield evaluateTokens(requestContext, kwarg.args[0], replacements=replacements)))
101 | for kwarg in tokens.call.kwargs])
102 | try:
103 | defer.returnValue((yield func(requestContext, *args, **kwargs)))
104 | except NormalizeEmptyResultError:
105 | defer.returnValue([])
106 | raise StopIteration
107 |
108 | elif tokens.number:
109 | if tokens.number.integer:
110 | defer.returnValue(int(tokens.number.integer))
111 | raise StopIteration
112 | elif tokens.number.float:
113 | defer.returnValue(float(tokens.number.float))
114 | raise StopIteration
115 | elif tokens.number.scientific:
116 | defer.returnValue(float(tokens.number.scientific[0]))
117 | raise StopIteration
118 |
119 | elif tokens.string:
120 | defer.returnValue(tokens.string[1:-1])
121 | raise StopIteration
122 |
123 | elif tokens.boolean:
124 | defer.returnValue(tokens.boolean[0] == 'true')
125 | raise StopIteration
126 | else:
127 | raise ValueError("unknown token in target evaulator")
128 |
129 |
130 | # Avoid import circularities
131 | from moira.graphite.functions import (SeriesFunctions, NormalizeEmptyResultError) # noqa
132 |
--------------------------------------------------------------------------------
/moira/graphite/grammar.py:
--------------------------------------------------------------------------------
1 | """Copyright 2008 Orbitz WorldWide
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License."""
14 |
15 | from pyparsing import (
16 | ParserElement, Forward, Combine, Optional, Word, Literal, CaselessKeyword,
17 | CaselessLiteral, Group, FollowedBy, LineEnd, OneOrMore, ZeroOrMore,
18 | nums, alphas, alphanums, printables, delimitedList, quotedString,
19 | __version__,
20 | )
21 |
22 | ParserElement.enablePackrat()
23 | grammar = Forward()
24 |
25 | expression = Forward()
26 |
27 | # Literals
28 | intNumber = Combine(
29 | Optional('-') + Word(nums)
30 | )('integer')
31 |
32 | floatNumber = Combine(
33 | Optional('-') + Word(nums) + Literal('.') + Word(nums)
34 | )('float')
35 |
36 | sciNumber = Combine(
37 | (floatNumber | intNumber) + CaselessLiteral('e') + intNumber
38 | )('scientific')
39 |
40 | aString = quotedString('string')
41 |
42 | # Use lookahead to match only numbers in a list (can't remember why this
43 | # is necessary)
44 | afterNumber = FollowedBy(",") ^ FollowedBy(")") ^ FollowedBy(LineEnd())
45 | number = Group(
46 | (sciNumber + afterNumber) |
47 | (floatNumber + afterNumber) |
48 | (intNumber + afterNumber)
49 | )('number')
50 |
51 | boolean = Group(
52 | CaselessKeyword("true") |
53 | CaselessKeyword("false")
54 | )('boolean')
55 |
56 | argname = Word(alphas + '_', alphanums + '_')('argname')
57 | funcname = Word(alphas + '_', alphanums + '_')('funcname')
58 |
59 | # Symbols
60 | leftParen = Literal('(').suppress()
61 | rightParen = Literal(')').suppress()
62 | comma = Literal(',').suppress()
63 | equal = Literal('=').suppress()
64 |
65 | # Function calls
66 |
67 | # Symbols
68 | leftBrace = Literal('{')
69 | rightBrace = Literal('}')
70 | leftParen = Literal('(').suppress()
71 | rightParen = Literal(')').suppress()
72 | comma = Literal(',').suppress()
73 | equal = Literal('=').suppress()
74 | backslash = Literal('\\').suppress()
75 |
76 | symbols = '''(){},=.'"\\'''
77 | arg = Group(
78 | boolean |
79 | number |
80 | aString |
81 | expression
82 | )('args*')
83 | kwarg = Group(argname + equal + arg)('kwargs*')
84 |
85 | args = delimitedList(~kwarg + arg) # lookahead to prevent failing on equals
86 | kwargs = delimitedList(kwarg)
87 |
88 | call = Group(
89 | funcname + leftParen +
90 | Optional(
91 | args + Optional(
92 | comma + kwargs
93 | )
94 | ) + rightParen
95 | )('call')
96 |
97 | # Metric pattern (aka. pathExpression)
98 | validMetricChars = ''.join((set(printables) - set(symbols)))
99 | escapedChar = backslash + Word(symbols, exact=1)
100 | partialPathElem = Combine(
101 | OneOrMore(
102 | escapedChar | Word(validMetricChars)
103 | )
104 | )
105 |
106 | matchEnum = Combine(
107 | leftBrace +
108 | delimitedList(partialPathElem, combine=True) +
109 | rightBrace
110 | )
111 |
112 | pathElement = Combine(
113 | Group(partialPathElem | matchEnum) +
114 | ZeroOrMore(matchEnum | partialPathElem)
115 | )
116 | pathExpression = delimitedList(
117 | pathElement,
118 | delim='.',
119 | combine=True)('pathExpression')
120 |
121 | litarg = Group(
122 | number | aString
123 | )('args*')
124 | litkwarg = Group(argname + equal + litarg)('kwargs*')
125 | # lookahead to prevent failing on equals
126 | litargs = delimitedList(~litkwarg + litarg)
127 | litkwargs = delimitedList(litkwarg)
128 |
129 | template = Group(
130 | Literal('template') + leftParen +
131 | (call | pathExpression) +
132 | Optional(comma + (litargs | litkwargs)) +
133 | rightParen
134 | )('template')
135 |
136 | if __version__.startswith('1.'):
137 | expression << Group(template | call | pathExpression)('expression')
138 | grammar << expression
139 | else:
140 | expression <<= Group(template | call | pathExpression)('expression')
141 | grammar <<= expression
142 |
143 |
144 | def enableDebug():
145 | for name, obj in globals().items():
146 | try:
147 | obj.setName(name)
148 | obj.setDebug(True)
149 | except:
150 | pass
151 |
--------------------------------------------------------------------------------
/moira/graphite/util.py:
--------------------------------------------------------------------------------
1 | """Copyright 2008 Orbitz WorldWide
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License."""
14 |
15 | import pytz
16 | import calendar
17 |
18 |
19 | def epoch(dt):
20 | """
21 | Returns the epoch timestamp of a timezone-aware datetime object.
22 | """
23 | return calendar.timegm(dt.astimezone(pytz.utc).timetuple())
24 |
--------------------------------------------------------------------------------
/moira/logs.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | from twisted.logger import Logger
5 | from twisted.logger import globalLogPublisher
6 | from twisted.logger import textFileLogObserver
7 | from twisted.logger import FilteringLogObserver, LogLevelFilterPredicate, LogLevel
8 | from twisted.python.logfile import DailyLogFile
9 |
10 | from moira import config
11 |
12 | log = Logger()
13 |
14 | levels = {
15 | 'debug': LogLevel.debug,
16 | 'info': LogLevel.info,
17 | 'warn': LogLevel.warn,
18 | 'warning': LogLevel.warn,
19 | 'error': LogLevel.error,
20 | 'critical': LogLevel.critical
21 | }
22 |
23 |
24 | class ZeroPaddingDailyLogFile(DailyLogFile):
25 |
26 | def suffix(self, tupledate):
27 | """Return the suffix given a (year, month, day) tuple or unixtime"""
28 | try:
29 | return ''.join(map(lambda x: ("0%d" % x) if x < 10 else str(x), tupledate))
30 | except Exception:
31 | # try taking a float unixtime
32 | return ''.join(map(str, self.toDate(tupledate)))
33 |
34 |
35 | def init(outFile):
36 | level = levels[config.LOG_LEVEL]
37 | predicate = LogLevelFilterPredicate(defaultLogLevel=level)
38 | observer = FilteringLogObserver(textFileLogObserver(outFile=outFile), [predicate])
39 | observer._encoding = "utf-8"
40 | globalLogPublisher.addObserver(observer)
41 | log.info("Start logging with {l}", l=level)
42 |
43 |
44 | def api():
45 | init(sys.stdout if config.LOG_DIRECTORY == "stdout" else daily("api.log"))
46 |
47 |
48 | def checker_master():
49 | outFile = sys.stdout if config.LOG_DIRECTORY == "stdout" else daily("checker.log")
50 | init(outFile)
51 |
52 |
53 | def checker_worker():
54 | outFile = sys.stdout if config.LOG_DIRECTORY == "stdout" else daily("checker-{0}.log".format(config.ARGS.n))
55 | init(outFile)
56 |
57 |
58 | def audit():
59 | outFile = sys.stdout if config.LOG_DIRECTORY == "stdout" else daily("audit.log")
60 | observer = textFileLogObserver(outFile=outFile)
61 | observer._encoding = "utf-8"
62 | return Logger(observer=observer)
63 |
64 |
65 | def daily(name):
66 | path = os.path.abspath(config.LOG_DIRECTORY)
67 | if not os.path.exists(path):
68 | os.makedirs(path)
69 | return ZeroPaddingDailyLogFile(name, path)
70 |
--------------------------------------------------------------------------------
/moira/metrics/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/moira/metrics/__init__.py
--------------------------------------------------------------------------------
/moira/metrics/graphite.py:
--------------------------------------------------------------------------------
1 | import time
2 | from moira import config
3 | from moira.logs import log
4 | from twisted.internet import reactor
5 | from twisted.internet.endpoints import TCP4ClientEndpoint
6 | from twisted.internet.protocol import Factory, Protocol
7 | from twisted.internet.task import LoopingCall
8 |
9 |
10 | class GraphiteProtocol(Protocol):
11 |
12 | def send_metrics(self, get_metrics):
13 | timestamp = int(time.time())
14 | metrics = get_metrics()
15 | for name, value in metrics:
16 | self.transport.write(
17 | "%s.%s %s %s\n" %
18 | (config.GRAPHITE_PREFIX, name, value, timestamp))
19 |
20 | def connectionLost(self, reason):
21 | log.error(str(reason))
22 | self.connected = 0
23 |
24 |
25 | class GraphiteReplica(object):
26 |
27 | def __init__(self, host, port):
28 | self.host = host
29 | self.port = port
30 | self.connection = None
31 | self.connecting = False
32 |
33 | def __str__(self):
34 | return "%s:%s" % (self.host, self.port)
35 |
36 | def connect(self, reconnecting=False):
37 | if self.connecting and not reconnecting:
38 | return
39 | self.connecting = True
40 | end_point = TCP4ClientEndpoint(reactor, self.host, self.port, 10)
41 | d = end_point.connect(Factory.forProtocol(GraphiteProtocol))
42 |
43 | def success(connection):
44 | self.connecting = False
45 | log.info('Connected to {replica}', replica=self)
46 | self.connection = connection
47 |
48 | def failed(error):
49 | log.error('Connect to {replica} failed: {error}', replica=self, error=error)
50 | reactor.callLater(10, self.connect, True)
51 | d.addCallbacks(success, failed)
52 |
53 | def connected(self):
54 | return self.connection and self.connection.connected
55 |
56 | def send(self, get_metrics):
57 | self.connection.send_metrics(get_metrics)
58 |
59 |
60 | class GraphiteClusterClient(object):
61 |
62 | def __init__(self, replicas):
63 | self.replicas = replicas
64 | self.index = 0
65 |
66 | def connect(self):
67 | for replica in self.replicas:
68 | replica.connect()
69 |
70 | def next(self):
71 | self.index = (self.index + 1) % len(self.replicas)
72 |
73 | def send(self, get_metrics):
74 | index = self.index
75 | replica = self.replicas[self.index]
76 | while not replica.connected():
77 | replica.connect()
78 | self.next()
79 | if self.index == index:
80 | log.error("No graphite connection")
81 | return
82 | replica = self.replicas[self.index]
83 | replica.send(get_metrics)
84 | self.next()
85 | log.info("Sent metrics to {replica}", replica=replica)
86 |
87 |
88 | def sending(get_metrics):
89 | if not config.GRAPHITE:
90 | return
91 | client = GraphiteClusterClient(
92 | [GraphiteReplica(host, port) for host, port in config.GRAPHITE])
93 | client.connect()
94 | lc = LoopingCall(client.send, get_metrics)
95 | lc.start(config.GRAPHITE_INTERVAL, now=False)
96 |
--------------------------------------------------------------------------------
/moira/metrics/spy.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta
2 |
3 |
4 | def get_total_seconds(td):
5 | if hasattr(timedelta, 'total_seconds'):
6 | return td.total_seconds()
7 | else:
8 | return int(
9 | (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 10 ** 6)
10 |
11 |
12 | class Spy(object):
13 |
14 | def __init__(self):
15 | self.eventList = []
16 |
17 | def trim_event_list(self, timestamp):
18 | while self.eventList:
19 | first_event = self.eventList[0]
20 | time_delta = timestamp - first_event['timestamp']
21 | if get_total_seconds(time_delta) > 60:
22 | del self.eventList[0]
23 | else:
24 | return
25 |
26 | def report(self, size):
27 | now = datetime.now()
28 | self.trim_event_list(now)
29 | if self.eventList:
30 | last_batch = self.eventList[-1]
31 | time_delta = now - last_batch['timestamp']
32 | if get_total_seconds(time_delta) < 10:
33 | last_batch['sum'] += size
34 | last_batch['count'] += 1
35 | return
36 | self.eventList.append({'timestamp': now, 'sum': size, 'count': 1})
37 |
38 | def get_metrics(self):
39 | now = datetime.now()
40 | count = 0
41 | summary = 0
42 | for batch in self.eventList:
43 | timedelta = now - batch['timestamp']
44 | if get_total_seconds(timedelta) > 60:
45 | break
46 | count += batch['count']
47 | summary += batch['sum']
48 | return {"sum": summary, "count": count}
49 |
50 |
51 | TRIGGER_CHECK = Spy()
52 | TRIGGER_CHECK_ERRORS = Spy()
53 |
--------------------------------------------------------------------------------
/moira/tools/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/moira/tools/__init__.py
--------------------------------------------------------------------------------
/moira/tools/converter.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import txredisapi
4 | from twisted.internet import defer, reactor
5 |
6 | from moira.logs import log
7 | from moira import config
8 | from moira.db import Db, METRIC_OLD_PREFIX, METRIC_PREFIX
9 |
10 |
11 | @defer.inlineCallbacks
12 | def convert(db):
13 |
14 | log.info(db.rc)
15 | log.info("Reading metrics keys")
16 | keys = yield db.rc.keys(METRIC_OLD_PREFIX.format("*"))
17 | log.info("Converting ...")
18 | for key in keys:
19 | _, name = key.split(':')
20 | try:
21 | pipe = yield db.rc.pipeline()
22 | metrics = yield db.rc.zrange(key)
23 | for metric in metrics:
24 | value, timestamp = metric.split()
25 | pipe.zadd(METRIC_PREFIX.format(name), timestamp, "{0} {1}".format(timestamp, value))
26 | yield pipe.execute_pipeline()
27 | except txredisapi.ResponseError as e:
28 | log.error("Can not convert {key}: {e}", key=key, e=e)
29 | log.info("Metric {name} converted", name=name)
30 |
31 | yield db.stopService()
32 | reactor.stop()
33 |
34 |
35 | def run():
36 |
37 | config.read()
38 | config.LOG_DIRECTORY = "stdout"
39 | log.startLogging(sys.stdout)
40 |
41 | db = Db()
42 | db.startService().addCallback(convert)
43 |
44 | reactor.run()
45 |
46 |
47 | if __name__ == '__main__':
48 |
49 | run()
50 |
--------------------------------------------------------------------------------
/moira/trigger.py:
--------------------------------------------------------------------------------
1 | def trigger_reformat(trigger, trigger_id, tags):
2 | if trigger_id:
3 | trigger["id"] = trigger_id
4 | if type(tags) is str or type(tags) is unicode:
5 | trigger["tags"] = [tags]
6 | else:
7 | trigger["tags"] = list(tags)
8 | for field in ["warn_value", "error_value"]:
9 | value = trigger.get(field)
10 | if value is not None:
11 | trigger[field] = float(value)
12 | else:
13 | trigger[field] = None
14 | trigger["name"] = unicode(trigger.get("name"))
15 | ttl = trigger.get("ttl")
16 | if ttl:
17 | trigger["ttl"] = int(ttl)
18 | else:
19 | trigger["ttl"] = None
20 | return trigger
21 |
--------------------------------------------------------------------------------
/pkg/moira-api.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=moira-api - REST-API service over http based on twisted python framework
3 |
4 | [Service]
5 | ExecStart=/usr/bin/moira-api -c /etc/moira/worker.yml -l /var/log/moira/worker
6 | User=moira
7 | Group=moira
8 | Restart=always
9 |
10 | [Install]
11 | WantedBy=multi-user.target
12 |
--------------------------------------------------------------------------------
/pkg/moira-checker.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=moira-checker - graphite metric checker service based on twisted python framework
3 |
4 | [Service]
5 | ExecStart=/usr/bin/moira-checker -c /etc/moira/worker.yml -l /var/log/moira/worker
6 | User=moira
7 | Group=moira
8 | Restart=always
9 |
10 | [Install]
11 | WantedBy=multi-user.target
12 |
--------------------------------------------------------------------------------
/pkg/postinst:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -e
4 |
5 | if ! getent group "moira" > /dev/null 2>&1 ; then
6 | groupadd -r "moira"
7 | fi
8 | if ! getent passwd "moira" > /dev/null 2>&1 ; then
9 | useradd -r -g moira -d /usr/share/moira -s /sbin/nologin \
10 | -c "Moira user" moira
11 | fi
12 |
13 | mkdir -p /var/log/moira/worker
14 | chown -R moira:moira /var/log/moira/worker
15 | chmod 755 /var/log/moira/worker
16 |
17 | if [ -x /bin/systemctl ] ; then
18 | /bin/systemctl daemon-reload
19 | /bin/systemctl enable moira-api.service
20 | /bin/systemctl enable moira-checker.service
21 | elif [ -x /sbin/chkconfig ] ; then
22 | /sbin/chkconfig --add moira-api
23 | /sbin/chkconfig --add moira-checker
24 | fi
25 |
26 |
--------------------------------------------------------------------------------
/pkg/worker.yml:
--------------------------------------------------------------------------------
1 | redis:
2 | host: localhost
3 | port: 6379
4 |
5 | worker:
6 | log_dir: /var/log/moira/worker
7 |
8 | api:
9 | port: 8081
10 | listen: '127.0.0.1'
11 |
12 | graphite:
13 | uri: localhost:2003
14 | prefix: DevOps.moira
15 | interval: 60
16 |
17 | checker:
18 | nodata_check_interval: 60
19 | check_interval: 10
20 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | anyjson
2 | pyparsing
3 | python-dateutil
4 | pytz
5 | Twisted==15.2.1
6 | txredisapi
7 | ujson
8 | pyyaml
9 |
--------------------------------------------------------------------------------
/scripts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/scripts/__init__.py
--------------------------------------------------------------------------------
/scripts/migration-v1.2.8.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | sys.path.insert(0,
4 | os.path.abspath(
5 | os.path.join(
6 | os.path.abspath(
7 | os.path.dirname(__file__)),
8 | '../')))
9 |
10 | import anyjson
11 |
12 | from time import time
13 |
14 | from twisted.internet import defer
15 | from twisted.internet import reactor
16 |
17 | from moira import config
18 | from moira import logs
19 | from moira.api.request import is_simple_target
20 | from moira.db import Db
21 | from moira.graphite import datalib
22 | from moira.graphite.datalib import createRequestContext
23 | from moira.graphite.evaluator import evaluateTarget
24 |
25 |
26 | @defer.inlineCallbacks
27 | def migrate_triggers(db):
28 | now = int(time())
29 |
30 | trigger_ids = yield db.getTriggers()
31 | logs.log.info("triggers count: %d" % len(trigger_ids))
32 |
33 | converted_triggers_count = 0
34 | simple_triggers_count = 0
35 | complex_triggers_count = 0
36 | failed_triggers_count = 0
37 | for trigger_id in trigger_ids:
38 | try:
39 | json, _ = yield db.getTrigger(trigger_id)
40 | if json is None:
41 | continue
42 |
43 | trigger = anyjson.deserialize(json)
44 | if "is_simple_trigger" in trigger:
45 | continue
46 |
47 | logs.log.info("recalculating for trigger %s (%s)" % (trigger_id, trigger.get("name")))
48 | context = createRequestContext(str(now - 600), str(now), allowRealTimeAlerting=True)
49 | if len(trigger["targets"]) != 1:
50 | is_simple_trigger = False
51 | else:
52 | yield evaluateTarget(context, trigger["targets"][0])
53 | is_simple_trigger = is_simple_target(context)
54 | trigger["is_simple_trigger"] = is_simple_trigger
55 | logs.log.info(str(trigger["is_simple_trigger"]))
56 |
57 | yield db.saveTrigger(trigger_id, trigger)
58 |
59 | converted_triggers_count += 1
60 | if is_simple_trigger:
61 | simple_triggers_count += 1
62 | else:
63 | complex_triggers_count += 1
64 | except Exception, e:
65 | failed_triggers_count += 1
66 | logs.log.error("conversion failed for trigger: %s" % e)
67 |
68 | logs.log.info("%d triggers converted, %d simple, %d complex, %d failed" %
69 | (converted_triggers_count, simple_triggers_count, complex_triggers_count, failed_triggers_count))
70 | reactor.stop()
71 |
72 |
73 | if __name__ == '__main__':
74 | config.read()
75 | logs.checker_worker()
76 |
77 | db = Db()
78 | datalib.db = db
79 | init = db.startService()
80 | init.addCallback(migrate_triggers)
81 |
82 | reactor.run()
83 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | with open('requirements.txt') as f:
4 | required = f.read().splitlines()
5 |
6 | with open('version.txt') as f:
7 | version = f.read().strip()
8 |
9 | setup(
10 | name="moira_worker",
11 | author="SKB Kontur",
12 | version=version,
13 | author_email="devops@skbkontur.ru",
14 | description="Moira checker and api modules",
15 | license="GPLv3",
16 | keywords="moira graphite alert monitoring",
17 | url="https://github.com/moira-alert",
18 | packages=find_packages(exclude=['tests']),
19 | long_description='Please, visit moira.readthedocs.org for more information',
20 | classifiers=[
21 | "Development Status :: 5 - Production/Stable",
22 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
23 | ],
24 | entry_points={
25 | 'console_scripts': ['moira-api = moira.api.server:run',
26 | 'moira-checker = moira.checker.server:run'],
27 | },
28 | install_requires=required,
29 | )
30 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/moira-alert/worker/16b67706f31877a4cbcc1901492b1a23c65f4236/tests/__init__.py
--------------------------------------------------------------------------------
/tests/functional/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | sys.path.insert(0,
4 | os.path.abspath(
5 | os.path.join(
6 | os.path.abspath(
7 | os.path.dirname(__file__)),
8 | '../../')))
9 |
10 | from fakeredis import FakeStrictRedis, FakePipeline
11 | from StringIO import StringIO
12 | from twisted.trial import unittest
13 | from twisted.web import client
14 | from twisted.internet import reactor, protocol
15 | from twisted.internet.defer import Deferred, inlineCallbacks
16 | from moira.api.site import Site
17 | from moira.checker.master import MasterProtocol
18 | from moira.graphite import datalib
19 | from moira.checker.worker import TriggersCheck
20 | from moira.checker.trigger import Trigger
21 | from moira import db
22 | from moira.logs import log, init
23 | from moira import config
24 |
25 | config.LOG_LEVEL = 'info'
26 | init(sys.stdout)
27 |
28 |
29 | def trigger(trigger_id):
30 | def decorator(f):
31 | def wrapper(*args, **kwargs):
32 | worker_test = args[0]
33 | worker_test.trigger = Trigger(trigger_id, worker_test.db)
34 | return f(*args, **kwargs)
35 | return wrapper
36 | return decorator
37 |
38 |
39 | class TwistedFakeTransaction(object):
40 |
41 | def __init__(self, pipeline):
42 | self.pipeline = pipeline
43 |
44 | def __getattr__(self, name):
45 | return self.pipeline.__getattr__(name)
46 |
47 | def __enter__(self):
48 | return self.pipeline
49 |
50 | def __exit__(self, exc_type, exc_value, traceback):
51 | self.pipeline.reset()
52 |
53 | def commit(self):
54 | return self.pipeline.execute()
55 |
56 |
57 | class TwistedFakePipeline(FakePipeline):
58 |
59 | def __init__(self, owner, transaction=True):
60 | super(TwistedFakePipeline, self).__init__(owner, transaction)
61 |
62 | def execute_pipeline(self):
63 | return FakePipeline.execute(self)
64 |
65 |
66 | class TwistedFakeRedis(FakeStrictRedis):
67 |
68 | def __init__(self):
69 | super(TwistedFakeRedis, self).__init__()
70 |
71 | def zrange(self, key, start=0, end=-1, withscores=False):
72 | return FakeStrictRedis.zrange(self, key, start, end, withscores=withscores)
73 |
74 | def zrevrange(self, key, start=0, end=-1, withscores=False):
75 | return FakeStrictRedis.zrange(self, key, start, end, desc=True, withscores=withscores)
76 |
77 | def zrangebyscore(self, key, min='-inf', max='+inf',
78 | withscores=False, offset=None, count=None):
79 | return FakeStrictRedis.zrangebyscore(self, key, min, max, start=offset, num=count, withscores=withscores)
80 |
81 | def multi(self):
82 | return TwistedFakeTransaction(self.pipeline(transaction=True))
83 |
84 | def disconnect(self):
85 | pass
86 |
87 | def pipeline(self, transaction=True):
88 | return TwistedFakePipeline(self, transaction)
89 |
90 | def getset(self, name, value):
91 | val = self._db.get(name)
92 | self._db[name] = value
93 | return val
94 |
95 | def set(self, key, value, expire=None, pexpire=None,
96 | only_if_not_exists=False, only_if_exists=False):
97 | return FakeStrictRedis.set(self, key, value, ex=expire, px=pexpire, nx=only_if_not_exists,
98 | xx=only_if_exists)
99 |
100 |
101 | class BodyReceiver(protocol.Protocol):
102 |
103 | def __init__(self):
104 | self.finished = Deferred()
105 | self.content = StringIO()
106 |
107 | def dataReceived(self, bytes):
108 | self.content.write(bytes)
109 |
110 | def connectionLost(self, reason):
111 | self.finished.callback(self.content.getvalue())
112 |
113 |
114 | class WorkerTests(unittest.TestCase):
115 |
116 | @inlineCallbacks
117 | def setUp(self):
118 | self.db = db.Db()
119 | self.db.rc = TwistedFakeRedis()
120 | yield self.db.startService()
121 | yield self.db.flush()
122 | datalib.db = self.db
123 | site = Site(self.db)
124 | self.protocol = MasterProtocol()
125 | self.protocol.factory = self
126 | self.port = reactor.listenTCP(0, site, interface="127.0.0.1")
127 | self.client = client.Agent(reactor)
128 | self.url_prefix = 'http://localhost:{0}{1}/'.format(
129 | self.port.getHost().port, site.prefix)
130 | self.now = int(reactor.seconds())
131 | self.check = TriggersCheck(self.db)
132 |
133 | @inlineCallbacks
134 | def tearDown(self):
135 | yield self.db.stopService()
136 | yield self.port.stopListening()
137 |
138 | @inlineCallbacks
139 | def assert_trigger_metric(self, metric, value, state):
140 | check = yield self.db.getTriggerLastCheck(self.trigger.id)
141 | log.info("Received check: {check}", check=check)
142 | self.assertIsNot(check, None)
143 | metric = [m for m in check["metrics"].itervalues()][0] \
144 | if isinstance(metric, int) \
145 | else check["metrics"].get(metric, {})
146 | self.assertEquals(value, metric.get("value"))
147 | self.assertEquals(state, metric.get("state"))
148 |
--------------------------------------------------------------------------------
/tests/functional/test_api.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import anyjson
3 | from . import trigger, WorkerTests, BodyReceiver
4 | from twisted.internet.defer import inlineCallbacks, returnValue
5 | from twisted.web import http, client
6 | from twisted.web.http_headers import Headers
7 | from StringIO import StringIO
8 | from moira.checker import state
9 |
10 |
11 | class ApiTests(WorkerTests):
12 |
13 | @inlineCallbacks
14 | def request(self, method, url, content=None, state=http.OK, add_headers=None):
15 | body = None if content is None else client.FileBodyProducer(
16 | StringIO(content))
17 | headers = {'Content-Type': ['application/json'], 'X-WebAuth-User': ['tester']}
18 | if add_headers:
19 | headers.update(add_headers)
20 | response = yield self.client.request(method,
21 | self.url_prefix + url,
22 | Headers(headers),
23 | body)
24 | self.assertEqual(state, response.code)
25 | body_receiver = BodyReceiver()
26 | response.deliverBody(body_receiver)
27 | body = yield body_receiver.finished
28 | if response.headers.getRawHeaders('content-type') == ['application/json']:
29 | body = anyjson.loads(body)
30 | returnValue((response, body))
31 |
32 | @trigger("not-existing")
33 | @inlineCallbacks
34 | def testTriggerNotFound(self):
35 | response, body = yield self.request('GET', 'trigger/{0}'.format(self.trigger.id), state=http.NOT_FOUND)
36 |
37 | @trigger("throttling")
38 | @inlineCallbacks
39 | def testThrottling(self):
40 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
41 | '{"name": "test trigger", "targets": ["DevOps.Metric"], \
42 | "warn_value": "1e-7", "error_value": 50, "tags": ["tag1", "tag2"] }',
43 | )
44 | yield self.db.setTriggerThrottling(self.trigger.id, self.now + 3600)
45 | yield self.db.addThrottledEvent(self.trigger.id, self.now + 3600, {'trigger_id': self.trigger.id})
46 | response, json = yield self.request('GET', 'trigger/{0}/throttling'.format(self.trigger.id))
47 | self.assertTrue(json['throttling'])
48 | response, json = yield self.request('DELETE', 'trigger/{0}/throttling'.format(self.trigger.id))
49 | response, json = yield self.request('GET', 'trigger/{0}/throttling'.format(self.trigger.id))
50 | self.assertFalse(json['throttling'])
51 |
52 | @inlineCallbacks
53 | def testPatternCleanup(self):
54 | response, body = yield self.request('PUT', 'trigger/name',
55 | '{"targets": ["DevOps.*.Metric"], \
56 | "warn_value": 1, "error_value": 2}')
57 | patterns = yield self.db.getPatterns()
58 | self.assertEqual(list(patterns), ["DevOps.*.Metric"])
59 | response, body = yield self.request('PUT', 'trigger/name',
60 | '{"targets": ["DevOps.*.OtherMetric"], \
61 | "warn_value": 1, "error_value": 2}')
62 | patterns = yield self.db.getPatterns()
63 | self.assertEqual(list(patterns), ["DevOps.*.OtherMetric"])
64 | response, body = yield self.request('DELETE', 'trigger/name')
65 | patterns = yield self.db.getPatterns()
66 | self.assertEqual(len(patterns), 0)
67 |
68 | @trigger("last-check-cleanup")
69 | @inlineCallbacks
70 | def testLastCheckCleanup(self):
71 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
72 | '{"targets": ["aliasByNode(DevOps.*.Metric, 1)"], \
73 | "warn_value": 1, "error_value": 2}')
74 | yield self.db.sendMetric('DevOps.*.Metric', 'DevOps.Node1.Metric', self.now - 60, 0)
75 | yield self.db.sendMetric('DevOps.*.Metric', 'DevOps.Node2.Metric', self.now - 60, 0)
76 | yield self.trigger.check()
77 | yield self.db.delPatternMetrics('DevOps.*.Metric')
78 | check = yield self.db.getTriggerLastCheck(self.trigger.id)
79 | self.assertTrue('Node1' in check['metrics'])
80 | yield self.db.sendMetric('DevOps.*.Metric', 'DevOps.Node1.Metric', self.now, 0)
81 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
82 | '{"targets": ["aliasByNode(DevOps.*.Metric, 1)"], \
83 | "warn_value": 1, "error_value": 2}')
84 | check = yield self.db.getTriggerLastCheck(self.trigger.id)
85 | self.assertTrue('Node1' in check['metrics'])
86 | self.assertFalse('Node2' in check['metrics'])
87 |
88 | @trigger("delete-tag")
89 | @inlineCallbacks
90 | def testTagDeletion(self):
91 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
92 | '{"name": "test trigger", "targets": ["sumSeries(*)"], \
93 | "warn_value": "1e-7", "error_value": 50, "tags": ["tag1", "tag2"] }',
94 | )
95 | response, body = yield self.request('GET', 'trigger/{0}'.format(self.trigger.id))
96 | response, body = yield self.request('DELETE', 'tag/tag1', state=http.BAD_REQUEST)
97 | response, body = yield self.request('DELETE', 'trigger/{0}'.format(self.trigger.id))
98 | response, body = yield self.request('DELETE', 'tag/tag1')
99 |
100 | @trigger("good-trigger")
101 | @inlineCallbacks
102 | def testTargetWithBracketsPUT(self):
103 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
104 | '{"name": "test trigger", "targets": ["aliasByNode(KE.Databases.{Mirroring-1,AG}.*.IsSynchronized,3)"], \
105 | "warn_value": "1e-7", "error_value": 50, "tags": ["tag1", "tag2"] }',
106 | )
107 |
108 | @trigger("good-trigger")
109 | @inlineCallbacks
110 | def testTargetWithNonAsciiNamePUT(self):
111 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
112 | '{"name": "Тестовый триггер", "targets": ["Metric.One"], \
113 | "warn_value": "1e-7", "error_value": 50, "tags": ["tag1", "tag2"] }',
114 | )
115 |
116 | @trigger("good-trigger")
117 | @inlineCallbacks
118 | def testSimpleTriggerPUT(self):
119 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
120 | '{"name": "test trigger", "targets": ["sumSeries(*)"], \
121 | "warn_value": "1e-7", "error_value": 50, "tags": ["tag1", "tag2"] }',
122 | )
123 | response, tags = yield self.request('GET', 'tag/stats')
124 | response, patterns = yield self.request('GET', 'pattern')
125 | self.assertEqual(2, len(tags["list"]))
126 | self.assertEqual(1, len(patterns["list"]))
127 | self.assertEqual(self.trigger.id, patterns["list"][0]["triggers"][0]["id"])
128 | response, triggers = yield self.request('GET', 'trigger')
129 | self.assertEqual(1, len(triggers["list"]))
130 |
131 | @trigger("trigger-update")
132 | @inlineCallbacks
133 | def testTriggerUpdate(self):
134 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
135 | '{"name": "test trigger", "targets": ["sumSeries(*)"], \
136 | "warn_value": "1", "error_value": 50, "tags": ["tag1", "tag2"] }',
137 | )
138 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
139 | '{"name": "test trigger", "targets": ["sumSeries(*)"], \
140 | "warn_value": "1", "error_value": 50, "tags": ["tag1"] }',
141 | )
142 |
143 | @trigger("good-trigger")
144 | @inlineCallbacks
145 | def testTriggersPaging(self):
146 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
147 | '{"name": "test trigger", "targets": ["sumSeries(*)"], \
148 | "warn_value": "1e-7", "error_value": 50, "tags": ["tag1", "tag2"] }')
149 | yield self.trigger.check()
150 | response, triggers = yield self.request('GET', 'trigger/page?p=0&size=10')
151 | self.assertEqual(1, len(triggers["list"]))
152 | self.assertEqual(0, triggers["page"])
153 | self.assertEqual(10, triggers["size"])
154 | self.assertEqual(1, triggers["total"])
155 |
156 | response, triggers = yield self.request('GET', 'trigger/page?p=0&size=10',
157 | add_headers={'Cookie': ['moira_filter_tags=tag1; \
158 | moira_filter_ok=true']})
159 | self.assertEqual(1, len(triggers["list"]))
160 | self.assertEqual(1, triggers["total"])
161 |
162 | response, triggers = yield self.request('GET', 'trigger/page?p=0&size=10',
163 | add_headers={'Cookie': ['moira_filter_tags=']})
164 | self.assertEqual(1, len(triggers["list"]))
165 | self.assertEqual(1, triggers["total"])
166 |
167 | response, triggers = yield self.request('GET', 'trigger/page?p=0&size=10',
168 | add_headers={'Cookie': ['moira_filter_tags=not-exising; \
169 | moira_filter_ok=true']})
170 | self.assertEqual(0, len(triggers["list"]))
171 | self.assertEqual(0, triggers["total"])
172 |
173 | @trigger("expression-trigger")
174 | @inlineCallbacks
175 | def testExpressionTriggerPUT(self):
176 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
177 | '{"name": "test trigger", "targets": ["sumSeries(*)"], \
178 | "tags": ["tag1", "tag2"], "expression": "ERROR if t1 > 1 else OK" }',
179 | )
180 | response, triggers = yield self.request('GET', 'trigger')
181 | self.assertEqual(1, len(triggers["list"]))
182 |
183 | @trigger("not-json-trigger")
184 | @inlineCallbacks
185 | def testSendNotJsonTrigger(self):
186 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
187 | "i am not json", http.BAD_REQUEST)
188 | self.flushLoggedErrors()
189 | self.assertEqual("Content is not json", body)
190 |
191 | @trigger("not-list-targets")
192 | @inlineCallbacks
193 | def testSendNotListTargets(self):
194 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
195 | '{ "name":"111", \
196 | "targets":{"target":"DevOps.system.*.loadavg.load"}, \
197 | "expression":"WARN if t1 > 10 else OK", \
198 | "tags":"1111" }', http.BAD_REQUEST)
199 | self.flushLoggedErrors()
200 | self.assertEqual("Invalid trigger targets", body)
201 |
202 | @trigger("invalid-expression-trigger")
203 | @inlineCallbacks
204 | def testSendInvalidExpressionTrigger(self):
205 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
206 | '{"name":"test trigger","targets":["metric"], \
207 | "warn_value":-0.1, "error_value":0.1,"ttl":600,"ttl_state":"NODATA", \
208 | "tags":["tag1"],"expression":"ERROR if"}', http.BAD_REQUEST)
209 | self.flushLoggedErrors()
210 | self.assertEqual("Invalid expression", body)
211 |
212 | @trigger("wrong-time-span")
213 | @inlineCallbacks
214 | def testSendWrongTimeSpan(self):
215 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
216 | '{"name": "test trigger", "targets": ["movingAverage(*, \\"10g\\")"], \
217 | "warn_value": "1e-7", "error_value": 50}', http.BAD_REQUEST)
218 | self.flushLoggedErrors()
219 | self.assertEqual("Invalid graphite targets", body)
220 |
221 | @trigger("without-warn-value")
222 | @inlineCallbacks
223 | def testSendWithoutWarnValue(self):
224 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
225 | '{"name": "test trigger", "targets": ["sumSeries(*)"], "error_value": 50 }',
226 | http.BAD_REQUEST)
227 | self.flushLoggedErrors()
228 | self.assertEqual("warn_value is required", body)
229 |
230 | @trigger("test-events")
231 | @inlineCallbacks
232 | def testEvents(self):
233 | yield self.db.pushEvent({
234 | "trigger_id": self.trigger.id,
235 | "state": state.OK,
236 | "old_state": state.WARN,
237 | "timestamp": self.now - 120,
238 | "metric": "test metric"
239 | })
240 | yield self.db.pushEvent({
241 | "trigger_id": self.trigger.id,
242 | "state": state.WARN,
243 | "old_state": state.OK,
244 | "timestamp": self.now,
245 | "metric": "test metric"
246 | })
247 | response, events = yield self.request('GET', 'event/{0}'.format(self.trigger.id))
248 | self.assertEqual(2, len(events['list']))
249 | response, events = yield self.request('GET', 'event')
250 | self.assertEqual(2, len(events['list']))
251 |
252 | @inlineCallbacks
253 | def testUserContact(self):
254 | contact = {'value': 'tester@company.com',
255 | 'type': 'email'}
256 | response, saved = yield self.request('PUT', 'contact', anyjson.dumps(contact))
257 | contact['id'] = saved['id']
258 | contact['user'] = 'tester'
259 | self.assertEqual(contact, saved)
260 | response, settings = yield self.request('GET', 'user/settings')
261 | self.assertEqual([contact], settings["contacts"])
262 | response, settings = yield self.request('GET', 'user/settings')
263 | self.assertEqual(contact['id'], settings["contacts"][0]["id"])
264 | response, body = yield self.request('DELETE', 'contact/' + str(contact['id']))
265 | response, settings = yield self.request('GET', 'user/settings')
266 | self.assertEqual([], settings["contacts"])
267 |
268 | @inlineCallbacks
269 | def testUserSubscriptions(self):
270 | contact = {'value': 'tester@company.com',
271 | 'type': 'email'}
272 | response, contact = yield self.request('PUT', 'contact', anyjson.dumps(contact))
273 | response, sub = yield self.request('PUT', 'subscription', anyjson.dumps({
274 | "contacts": [contact["id"]],
275 | "tags": ["devops", "tag1"]
276 | }))
277 | response, body = yield self.request('PUT', 'subscription/' + str(sub["id"]) + "/test")
278 | response, subscriptions = yield self.request('GET', 'subscription')
279 | self.assertEqual(sub['id'], subscriptions["list"][0]["id"])
280 | response, settings = yield self.request('GET', 'user/settings')
281 | self.assertEqual(sub['id'], settings["subscriptions"][0]["id"])
282 | subs = yield self.db.getTagSubscriptions("devops")
283 | self.assertEqual(sub["id"], subs[0]["id"])
284 | subs = yield self.db.getTagSubscriptions("tag1")
285 | self.assertEqual(sub["id"], subs[0]["id"])
286 | sub["tags"].remove("tag1")
287 | response, updated_sub = yield self.request('PUT', 'subscription', anyjson.serialize(sub))
288 | subs = yield self.db.getTagSubscriptions("tag1")
289 | self.assertEqual(len(subs), 0)
290 | response, updated_sub = yield self.request('DELETE', 'subscription/' + str(sub["id"]))
291 | subs = yield self.db.getTagSubscriptions("devops")
292 | self.assertEqual(len(subs), 0)
293 |
294 | @inlineCallbacks
295 | def testUserContactDelete(self):
296 | contact = {'value': 'tester@company.com',
297 | 'type': 'email'}
298 | response, contact = yield self.request('PUT', 'contact', anyjson.dumps(contact))
299 | response, sub = yield self.request('PUT', 'subscription', anyjson.dumps({
300 | "contacts": [contact["id"]],
301 | "tags": ["devops", "tag1"]
302 | }))
303 | response, body = yield self.request('PUT', 'subscription/' + str(sub["id"]) + "/test")
304 | response, body = yield self.request('DELETE', 'contact/' + str(contact["id"]))
305 | response, subscriptions = yield self.request('GET', 'subscription')
306 | self.assertNotIn(contact['id'], subscriptions["list"][0]["contacts"])
307 |
308 | @trigger("test-metrics")
309 | @inlineCallbacks
310 | def testMetricDeletion(self):
311 | pattern = "devops.functest.*"
312 | metric1 = "devops.functest.m1"
313 | metric2 = "devops.functest.m2"
314 | yield self.db.sendMetric(pattern, metric1, self.now - 60, 1)
315 | yield self.db.sendMetric(pattern, metric1, self.now, 2)
316 | yield self.db.sendMetric(pattern, metric2, self.now, 3)
317 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
318 | '{"name": "test trigger", "targets": ["' + pattern + '"], \
319 | "warn_value": 5, "error_value": 10 }',
320 | )
321 | response, metrics = yield self.request('GET', 'trigger/{0}/metrics?from={1}&to={2}'
322 | .format(self.trigger.id, self.now - 60, self.now))
323 | self.assertEqual(2, len(metrics))
324 | self.assertEqual([1, 2], [v['value'] for v in metrics[metric1]])
325 | metrics = yield self.db.getPatternMetrics(pattern)
326 | self.assertTrue(metric1 in metrics)
327 | self.assertTrue(metric2 in metrics)
328 | yield self.trigger.check()
329 | check = yield self.db.getTriggerLastCheck(self.trigger.id)
330 | self.assertEqual(2, len(check['metrics']))
331 | response, data = yield self.request('DELETE', 'trigger/{0}/metrics?name={1}'
332 | .format(self.trigger.id, metric1))
333 | metrics = yield self.db.getPatternMetrics(pattern)
334 | self.assertFalse(metric1 in metrics)
335 | self.assertFalse(metric2 in metrics)
336 | check = yield self.db.getTriggerLastCheck(self.trigger.id)
337 | self.assertEqual(1, len(check['metrics']))
338 |
339 | @trigger("test-trigger-maintenance")
340 | @inlineCallbacks
341 | def testTriggerMaintenance(self):
342 | metric = "devops.functest.m"
343 | yield self.db.sendMetric(metric, metric, self.now - 60, 0)
344 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
345 | '{"name": "test trigger", "targets": ["' + metric + '"], \
346 | "warn_value": 0, "error_value": 1, "tags":["tag1"] }',
347 | )
348 | response, _ = yield self.request('PUT', 'tag/tag1/data', anyjson.dumps({"maintenance": self.now}))
349 | yield self.trigger.check(now=self.now - 1)
350 | events, total = yield self.db.getEvents()
351 | self.assertEqual(0, total)
352 | response, _ = yield self.request('PUT', 'tag/tag1/data', anyjson.dumps({}))
353 | yield self.db.sendMetric(metric, metric, self.now, 1)
354 | yield self.trigger.check()
355 | events, total = yield self.db.getEvents()
356 | self.assertEqual(1, total)
357 |
358 | @trigger("test-trigger-maintenance2")
359 | @inlineCallbacks
360 | def testTriggerMaintenance2(self):
361 | metric = "devops.functest.m"
362 | yield self.db.sendMetric(metric, metric, self.now - 60, 1)
363 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
364 | '{"name": "test trigger", "targets": ["' + metric + '"], \
365 | "warn_value": 1, "error_value": 2, "tags":["tag1"] }',
366 | )
367 | response, _ = yield self.request('PUT', 'tag/tag1/data', anyjson.dumps({"maintenance": self.now}))
368 | yield self.trigger.check(now=self.now - 1)
369 | events, total = yield self.db.getEvents()
370 | self.assertEqual(0, total)
371 | response, _ = yield self.request('PUT', 'tag/tag1/data', anyjson.dumps({}))
372 | yield self.db.sendMetric(metric, metric, self.now, 0)
373 | yield self.db.sendMetric(metric, metric, self.now + 60, 1)
374 | yield self.db.sendMetric(metric, metric, self.now + 120, 1)
375 | yield self.trigger.check(now=self.now + 120)
376 | yield self.assert_trigger_metric(metric, 1, state.WARN)
377 | events, total = yield self.db.getEvents()
378 | self.assertEqual(2, total)
379 |
380 | @trigger("test-metric-maintenance")
381 | @inlineCallbacks
382 | def testMetricMaintenance(self):
383 | metric = "devops.functest.m"
384 | yield self.db.sendMetric(metric, metric, self.now - 60, 0)
385 | response, body = yield self.request('PUT', 'trigger/{0}'.format(self.trigger.id),
386 | '{"name": "test trigger", "targets": ["' + metric + '"], \
387 | "warn_value": 0, "error_value": 1, "tags":["tag1"] }',
388 | )
389 | yield self.trigger.check(now=self.now - 1)
390 | events, total = yield self.db.getEvents()
391 | self.assertEqual(1, total)
392 | response, _ = yield self.request('PUT', 'trigger/{0}/maintenance'.format(self.trigger.id),
393 | anyjson.dumps({metric: self.now}))
394 | yield self.db.sendMetric(metric, metric, self.now, 1)
395 | yield self.trigger.check()
396 | events, total = yield self.db.getEvents()
397 | self.assertEqual(1, total)
398 | yield self.db.sendMetric(metric, metric, self.now + 60, -1)
399 | yield self.trigger.check(now=self.now + 60)
400 | events, total = yield self.db.getEvents()
401 | self.assertEqual(2, total)
402 |
403 | @inlineCallbacks
404 | def testUserLogin(self):
405 | response, user = yield self.request('GET', 'user')
406 | self.assertEqual('tester', user["login"])
407 |
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | sys.path.insert(0,
4 | os.path.abspath(
5 | os.path.join(
6 | os.path.abspath(
7 | os.path.dirname(__file__)),
8 | '../../')))
9 |
--------------------------------------------------------------------------------
/tests/unit/test_cache.py:
--------------------------------------------------------------------------------
1 | from twisted.trial import unittest
2 | from moira.cache import cache
3 | from twisted.internet.defer import inlineCallbacks
4 |
5 |
6 | class Cache(unittest.TestCase):
7 |
8 | @cache
9 | @inlineCallbacks
10 | def function(self, items):
11 | items.append(1)
12 | yield None
13 |
14 | @inlineCallbacks
15 | def testCache(self):
16 | items = []
17 | yield self.function(items)
18 | yield self.function(items, cache_key=1, cache_ttl=10)
19 | self.assertEqual(len(items), 2)
20 | yield self.function(items, cache_key=1, cache_ttl=10)
21 | self.assertEqual(len(items), 2)
22 |
--------------------------------------------------------------------------------
/tests/unit/test_expression.py:
--------------------------------------------------------------------------------
1 | from twisted.trial import unittest
2 | from moira.checker import state, expression
3 |
4 |
5 | class Expression(unittest.TestCase):
6 |
7 | def testDefault(self):
8 | self.assertEqual(expression.getExpression(t1=10, warn_value=60, error_value=90), state.OK)
9 | self.assertEqual(expression.getExpression(t1=60, warn_value=60, error_value=90), state.WARN)
10 | self.assertEqual(expression.getExpression(t1=90, warn_value=60, error_value=90), state.ERROR)
11 | self.assertEqual(expression.getExpression(t1=40, warn_value=30, error_value=10), state.OK)
12 | self.assertEqual(expression.getExpression(t1=20, warn_value=30, error_value=10), state.WARN)
13 | self.assertEqual(expression.getExpression(t1=10, warn_value=30, error_value=10), state.ERROR)
14 | self.assertEqual(expression.getExpression(**{'t1': 10, 'warn_value': 30, 'error_value': 10}), state.ERROR)
15 |
16 | def testCustom(self):
17 | self.assertEqual(expression.getExpression("ERROR if t1 > 10 and t2 > 3 else OK", t1=11, t2=4), state.ERROR)
18 | with self.assertRaises(expression.ExpressionError):
19 | expression.getExpression("ERROR if f.min(t1,t2) else OK", t1=11, t2=4)
20 | with self.assertRaises(expression.ExpressionError):
21 | expression.getExpression("(lambda f: ())", t1=11, t2=4)
22 |
--------------------------------------------------------------------------------
/tests/unit/test_fetchdata.py:
--------------------------------------------------------------------------------
1 | from twisted.trial import unittest
2 | from moira.graphite.datalib import unpackTimeSeries
3 |
4 |
5 | class FetchData(unittest.TestCase):
6 | def generateRedisDataPoint(self, timestamp, value):
7 | return ("%f %f" % (timestamp, value), timestamp)
8 |
9 | def testConservativeShiftedSeries(self):
10 | retention = 10
11 | startTime = 0
12 | dataList = [[]]
13 |
14 | # time == 0
15 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=False), [[]])
16 |
17 | # time == 5
18 | dataList[0].append(self.generateRedisDataPoint(5, 100.))
19 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 5, bootstrap=False, allowRealTimeAlerting=False), [[]])
20 |
21 | # time == 9, 10, 11
22 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 9, bootstrap=False, allowRealTimeAlerting=False), [[]])
23 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 10, bootstrap=False, allowRealTimeAlerting=False), [[100.]])
24 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 11, bootstrap=False, allowRealTimeAlerting=False), [[100.]])
25 |
26 | # time == 15
27 | dataList[0].append(self.generateRedisDataPoint(15, 200.))
28 |
29 | # time == 25
30 | dataList[0].append(self.generateRedisDataPoint(25, 300.))
31 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 25, bootstrap=False, allowRealTimeAlerting=False), [[100., 200.]])
32 |
33 | # time == 29, 30
34 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 29, bootstrap=False, allowRealTimeAlerting=False), [[100., 200.]])
35 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 30, bootstrap=False, allowRealTimeAlerting=False), [[100., 200., 300.]])
36 |
37 | def testRealTimeShiftedSeries(self):
38 | retention = 10
39 | startTime = 0
40 | dataList = [[]]
41 |
42 | # time == 0
43 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=True), [[]])
44 |
45 | # time == 5
46 | dataList[0].append(self.generateRedisDataPoint(5, 100.))
47 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 5, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
48 |
49 | # time == 9, 10, 11
50 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 9, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
51 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 10, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
52 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 11, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
53 |
54 | # time == 15
55 | dataList[0].append(self.generateRedisDataPoint(15, 200.))
56 |
57 | # time == 25
58 | dataList[0].append(self.generateRedisDataPoint(25, 300.))
59 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 25, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.]])
60 |
61 | # time == 29, 30
62 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 29, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.]])
63 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 30, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.]])
64 |
65 | def testConservativeAlignedSeries(self):
66 | retention = 10
67 | startTime = 0
68 | dataList = [[]]
69 |
70 | # time == 0
71 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=False), [[]])
72 | dataList[0].append(self.generateRedisDataPoint(0, 100.))
73 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=False), [[]])
74 |
75 | # time == 9, 10
76 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 9, bootstrap=False, allowRealTimeAlerting=False), [[]])
77 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 10, bootstrap=False, allowRealTimeAlerting=False), [[100.]])
78 | dataList[0].append(self.generateRedisDataPoint(10, 200.))
79 |
80 | # time == 20
81 | dataList[0].append(self.generateRedisDataPoint(20, 300.))
82 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=False, allowRealTimeAlerting=False), [[100., 200.]])
83 |
84 | # time == 29, 30
85 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 29, bootstrap=False, allowRealTimeAlerting=False), [[100., 200.]])
86 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 30, bootstrap=False, allowRealTimeAlerting=False), [[100., 200., 300.]])
87 |
88 | def testRealtimeAlignedSeries(self):
89 | retention = 10
90 | startTime = 0
91 | dataList = [[]]
92 |
93 | # time == 0
94 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=True), [[]])
95 | dataList[0].append(self.generateRedisDataPoint(0, 100.))
96 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
97 |
98 | # time == 9, 10
99 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 9, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
100 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 10, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
101 | dataList[0].append(self.generateRedisDataPoint(10, 200.))
102 |
103 | # time == 20
104 | dataList[0].append(self.generateRedisDataPoint(20, 300.))
105 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.]])
106 |
107 | # time == 29, 30
108 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 29, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.]])
109 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 30, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.]])
110 |
111 | def testNodataSeries(self):
112 | retention = 10
113 | startTime = 0
114 | dataList = [[]]
115 |
116 | # time == 0
117 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=False), [[]])
118 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=True), [[]])
119 |
120 | # time == 9, 10, 11
121 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 9, bootstrap=False, allowRealTimeAlerting=False), [[]])
122 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 9, bootstrap=False, allowRealTimeAlerting=True), [[]])
123 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 10, bootstrap=False, allowRealTimeAlerting=False), [[None]])
124 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 10, bootstrap=False, allowRealTimeAlerting=True), [[None]])
125 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 11, bootstrap=False, allowRealTimeAlerting=False), [[None]])
126 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 11, bootstrap=False, allowRealTimeAlerting=True), [[None]])
127 |
128 | # time == 20
129 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=False, allowRealTimeAlerting=False), [[None, None]])
130 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=False, allowRealTimeAlerting=True), [[None, None]])
131 |
132 | def testConservativeMultipleSeries(self):
133 | retention = 10
134 | startTime = 0
135 | dataList = [[], []]
136 |
137 | # time == 0
138 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=False), [[], []])
139 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=True), [[], []])
140 | dataList[0].append(self.generateRedisDataPoint(0, 100.))
141 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=False), [[], []])
142 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 0, bootstrap=False, allowRealTimeAlerting=True), [[100.], []])
143 |
144 | # time == 5
145 | dataList[1].append(self.generateRedisDataPoint(5, 150.))
146 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 5, bootstrap=False, allowRealTimeAlerting=False), [[], []])
147 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 5, bootstrap=False, allowRealTimeAlerting=True), [[100.], [150.]])
148 |
149 | # time == 9, 10
150 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 9, bootstrap=False, allowRealTimeAlerting=False), [[], []])
151 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 9, bootstrap=False, allowRealTimeAlerting=True), [[100.], [150.]])
152 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 10, bootstrap=False, allowRealTimeAlerting=False), [[100.], [150.]])
153 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 10, bootstrap=False, allowRealTimeAlerting=True), [[100.], [150.]])
154 | dataList[0].append(self.generateRedisDataPoint(10, 200.))
155 |
156 | # time == 15
157 | dataList[1].append(self.generateRedisDataPoint(15, 250.))
158 |
159 | # time == 20
160 | dataList[0].append(self.generateRedisDataPoint(20, 300.))
161 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=False, allowRealTimeAlerting=False), [[100., 200.], [150.,250.]])
162 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.], [150.,250.]])
163 |
164 | # time == 25
165 | dataList[1].append(self.generateRedisDataPoint(25, 350.))
166 |
167 | # time == 29, 30
168 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 29, bootstrap=False, allowRealTimeAlerting=False), [[100., 200.], [150., 250.]])
169 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 29, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.], [150., 250., 350.]])
170 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 30, bootstrap=False, allowRealTimeAlerting=False), [[100., 200., 300.], [150., 250., 350.]])
171 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 30, bootstrap=False, allowRealTimeAlerting=True), [[100., 200., 300.], [150., 250., 350.]])
172 |
173 | def testNonZeroStartTimeSeries(self):
174 | retention = 10
175 | startTime = 2
176 | dataList = [[]]
177 |
178 | # time == 11
179 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 11, bootstrap=False, allowRealTimeAlerting=False), [[]])
180 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 11, bootstrap=False, allowRealTimeAlerting=True), [[]])
181 | dataList[0].append(self.generateRedisDataPoint(11, 100.))
182 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 11, bootstrap=False, allowRealTimeAlerting=False), [[]])
183 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 11, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
184 |
185 | # time == 12
186 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 12, bootstrap=False, allowRealTimeAlerting=False), [[100.]])
187 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 12, bootstrap=False, allowRealTimeAlerting=True), [[100.]])
188 |
189 |
190 | def testBootstrapMode(self):
191 | retention = 10
192 | startTime = 0
193 | dataList = [[]]
194 |
195 | # time == 0, 10
196 | dataList[0].append(self.generateRedisDataPoint(0, 100.))
197 | dataList[0].append(self.generateRedisDataPoint(10, 200.))
198 |
199 | # time == 20
200 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=True, allowRealTimeAlerting=True), [[100., 200., None]])
201 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=True, allowRealTimeAlerting=False), [[100., 200., None]])
202 | dataList[0].append(self.generateRedisDataPoint(20, 300.))
203 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=True, allowRealTimeAlerting=True), [[100., 200., 300.]])
204 | self.assertEqual(unpackTimeSeries(dataList, retention, startTime, 20, bootstrap=True, allowRealTimeAlerting=False), [[100., 200., 300.]])
205 |
--------------------------------------------------------------------------------
/version.txt:
--------------------------------------------------------------------------------
1 | master
2 |
--------------------------------------------------------------------------------