├── .gitignore
├── .travis.yml
├── CHANGELOG.md
├── LICENSE
├── MANIFEST.in
├── README.rst
├── bin
└── grab_oncall.py
├── pygerduty
├── __init__.py
├── common.py
├── events.py
├── exceptions.py
├── v2.py
└── version.py
├── setup.py
├── tests
├── __init__.py
├── addon_test.py
├── client_test.py
├── collection_test.py
├── container_test.py
├── datetime_test.py
├── events_test.py
├── extensions_test.py
├── fixtures
│ ├── addon_update_request_v2.json
│ ├── addon_update_response_v2.json
│ ├── addon_v2.json
│ ├── contacts_v1.json
│ ├── contacts_v2.json
│ ├── event_request.json
│ ├── extensions_list_v2.json
│ ├── get_extension_v2.json
│ ├── get_incident_v2.json
│ ├── incident_get_v2.json
│ ├── incident_list_v2.json
│ ├── incident_postassign.json
│ ├── incident_preassign.json
│ ├── incident_put_v2.json
│ ├── incident_reassign.json
│ ├── incident_resp_v1.json
│ ├── incident_resp_v2.json
│ ├── incident_snooze.json
│ ├── notification_v2.json
│ ├── oncalls_filtered_v2.json
│ ├── oncalls_list_v2.json
│ ├── schedule_list_v1.json
│ ├── schedule_list_v2.json
│ ├── schedule_v2.json
│ ├── user_v1.json
│ └── user_v2.json
├── incident_test.py
├── oncalls_test.py
├── schedule_test.py
└── user_test.py
└── tox.ini
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[cod]
2 | MANIFEST
3 | .cache
4 | .eggs
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Packages
10 | *.egg
11 | *.egg-info
12 | .idea
13 | dist
14 | build
15 | eggs
16 | parts
17 | var
18 | sdist
19 | develop-eggs
20 | .installed.cfg
21 | lib
22 | lib64
23 |
24 | # Installer logs
25 | pip-log.txt
26 |
27 | # Unit test / coverage reports
28 | .coverage
29 | .tox
30 | nosetests.xml
31 |
32 | # Translations
33 | *.mo
34 |
35 | # Mr Developer
36 | .mr.developer.cfg
37 | .project
38 | .pydevproject
39 |
40 | # vim
41 | *.swp
42 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 |
3 | # Keep in-sync with tox.ini.
4 | env:
5 | - TOXENV=py27
6 | - TOXENV=py36
7 |
8 | install:
9 | - "pip install ."
10 | - "pip install tox"
11 |
12 | script: tox
13 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
6 |
7 | ## [Unreleased]
8 |
9 | ## [0.38.3] - 2020-04-22
10 |
11 | ### Fixed
12 | * Fixes for Python 3.7+ compatability.
13 | * Fixed tests for Python 3.
14 |
15 | ## [0.38.2] - 2019-05-07
16 |
17 | ### Changed
18 | * Consolidated exception types to ease error handling between modules.
19 |
20 | ### Fixed
21 | * Better handling for`oncalls.list()` optional fields.
22 |
23 | ## [0.38.1] - 2019-04-03
24 |
25 | ### Fixed
26 | * Better handling for`oncalls.list()` optional fields.
27 |
28 | ## [0.38.0] - 2018-09-26
29 |
30 | #### Fixed
31 | * `Services.enable()/Services.disable()` fixed.
32 |
33 | ### Added
34 | * `oncalls` is a new endpoint for querying current oncalls.
35 | * `extensions` collection added for querying extensions.
36 |
37 |
38 | ## [0.37.0] - 2017-11-27
39 |
40 | ### Fixed
41 | * `incidents/{id}/snooze` was not deprecated and requires old logic from `_do_action`. Fixed.
42 | * `trigger_summary_data` attribute was removed from incident response, removed `has_subject` function which referenced this.
43 |
44 | ### Added
45 | * `incidents/reassign` is a new endpoint, added logic for this.
46 | * Tests for the new incidents behavior.
47 | * LogEntries default to adding include[]=channels for list and show to get "custom details".
48 |
49 | ## [0.36.3] - 2017-08-10
50 |
51 | ### Fixed
52 | - Bug with Incident.ack/resolve. This should now work.
53 |
54 | ### Changed
55 | - Renamed requester_id to requester to make it more clear you pass an e-mail now instead of user id on various Incident methods.
56 |
57 | ## [0.36.2] - 2017-08-07
58 |
59 | ### Fixed
60 | - `incidents/{id}/{verb}` has been deprecated with v2 of the PagerDuty API. Fixed.
61 |
62 | ### Added
63 | - Tests for the new incidents behavior.
64 |
65 | ## [0.36.1] - 2017-07-11
66 |
67 | ### Added
68 | - This CHANGELOG file.
69 | - `FinalSchedule`, `RenderSchedule` containers added to provide more readable responses.
70 |
71 | ## Removed
72 | - `Entries` collection and `entries` attribute from `Schedule` from v2 module as they're not supported in the v2 API.
73 |
74 | ## Changed
75 | - `Events` class now has a default `Requester` to improve ergonomics.
76 |
77 |
78 | ## [0.36.0] - 2017-07-05
79 |
80 | ### Added
81 | - Forked __init__.py into new v2, common, events modules. These are meant to support the v2 REST API without breaking existing users of pygerduty.
82 | - Lots of tests for v2 module.
83 | - Usage of v2 library to README
84 | - Overview of modules to README
85 |
86 | ### Changed
87 | - Moved inline json into fixture files to clean up unit tests.
88 |
89 | ### Fixed
90 | - setup.py test integrations to properly fail on Travis CI.
91 |
92 | ## [0.35.2] - 2016-12-07
93 | ## [0.35.1] - 2016-08-30
94 | ## [0.35.0] - 2016-06-04
95 | ## [0.34.0] - 2016-03-10
96 | ## [0.33.0] - 2016-03-07
97 | ## [0.32.1] - 2016-02-03
98 | ## [0.32.0] - 2016-02-02
99 | ## [0.31.0] - 2016-01-18
100 | ## [0.30.1] - 2016-01-04
101 | ## [0.30.0] - 2015-12-07
102 | ## [0.29.1] - 2015-11-07
103 | ## [0.29.0] - 2015-10-09
104 | ## [0.28.1] - 2015-09-30
105 | ## [0.28] - 2015-04-01
106 | ## [0.27] - 2015-03-04
107 | ## [0.26] - 2015-03-04
108 | ## [0.25] - 2015-01-02
109 | ## [0.24] - 2014-12-09
110 | ## [0.23] - 2014-12-08
111 | ## [0.22] - 2014-07-01
112 | ## [0.21] - 2014-06-22
113 | ## [0.20] - 2014-05-30
114 | ## [0.19] - 2014-02-19
115 | ## [0.18] - 2013-11-30
116 | ## [0.17] - 2013-10-23
117 | ## [0.16] - 2013-10-01
118 | ## [0.15] - 2013-09-25
119 | ## [0.14] - 2013-08-22
120 | ## [0.13] - 2013-08-07
121 | ## [0.12] - 2012-12-16
122 | ## [0.12] - 2012-12-16
123 | ## [0.11] - 2012-12-13
124 | ## [0.1] - 2012-12-12
125 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2012 Dropbox, Inc., http://www.dropbox.com
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of
4 | this software and associated documentation files (the "Software"), to deal in
5 | the Software without restriction, including without limitation the rights to
6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
7 | of the Software, and to permit persons to whom the Software is furnished to do
8 | so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 | SOFTWARE.
20 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include README.rst
3 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 |
2 | .. image:: https://travis-ci.org/dropbox/pygerduty.svg?branch=master
3 | :target: https://travis-ci.org/dropbox/pygerduty
4 |
5 | =========
6 | Pygerduty
7 | =========
8 |
9 | Python Library for PagerDuty's REST API and Events API. This library was originally written to support v1 and
10 | is currently being updated to be compatible with v2 of the API. See "Migrating from v1 to v2" for more details.
11 |
12 | This library is currently evolving and backwards compatibility cannot always be guaranteed at this time.
13 |
14 |
15 | Installation
16 | ============
17 |
18 | You can install with ``pip install pygerduty``.
19 |
20 | If you want to install from source, then ``python setup.py install``.
21 |
22 |
23 | Requirements
24 | ============
25 |
26 | Tested on Python 2.7, or >= 3.6.
27 |
28 | Known to work on Python 2.6.
29 |
30 | Documentation
31 | =============
32 |
33 | Pygerduty is a thin wrapper around PagerDuty's APIs. You will need to refer
34 | to the the `PagerDuty Documentation `_ for
35 | all available parameters to pass and all available attributes on responses.
36 |
37 | The main methods available to resources are list, show, create, update, and
38 | delete. Not all resources have endpoints for all of the above methods. Again,
39 | refer to the `PagerDuty Documentation `_ to
40 | see all available endpoints.
41 |
42 | Top level resources will be accessible via the PagerDuty object and nested
43 | resources available on containers returned from their parent resource.
44 |
45 |
46 | Migrating from v1 to v2
47 | =======================
48 |
49 | In order to allow for a smooth transition between versions 1 and 2 of the library,
50 | version 1 library remains in the file called `__init__.py` inside of the pygerduty directory.
51 | Also in that directory you will see four other files:
52 |
53 | - `v2.py` — This file contains all updated logic compatible with v2 of the API.
54 | - `events.py` — PygerDuty also provides an Events API which is separate from the REST API that has had the recent update. Since the logic is mostly disjoint, we have created a new module for logic related to the Events API.
55 | - `common.py` — This file contains all common functions used by both `v2.py` and `events.py`.
56 | - `version.py` — Contains version info.
57 |
58 | See the examples below to see how this affects how you will instantiate a client in v2.
59 |
60 |
61 | Examples
62 | ========
63 |
64 | Instantiating a client:
65 |
66 | Version 1:
67 |
68 | ::
69 |
70 | import pygerduty
71 | pager = pygerduty.PagerDuty("foobar", "SOMEAPIKEY123456")
72 |
73 | Version 2:
74 |
75 | ::
76 |
77 | import pygerduty.v2
78 | pager = pygerduty.v2.PagerDuty("SOMEAPIKEY123456")
79 |
80 | Listing a resource:
81 |
82 | ::
83 |
84 | for schedule in pager.schedules.list():
85 | print(schedule.id, schedule.name)
86 |
87 | # PX7F8S3 Primary
88 | # PJ48C0S Tertiary
89 | # PCJ94SK Secondary
90 |
91 | Getting all schedules associated with a user:
92 |
93 | ::
94 |
95 | user = pager.users.show('PDSKF08')
96 | for schedule in user.schedules.list():
97 | print(schedule.id, schedule.name)
98 |
99 | # PDSARUD Ops
100 | # PTDSKJH Support
101 |
102 | Getting a resource by ID:
103 |
104 | ::
105 |
106 | schedule = pager.schedules.show("PX7F8S3")
107 |
108 | Creating a resource:
109 |
110 | ::
111 |
112 | user = next(pager.users.list(query="gary", limit=1))
113 | override = schedule.overrides.create(
114 | start="2012-12-16", end="2012-12-17", user_id=user.id)
115 |
116 | Delete a resource:
117 |
118 | ::
119 |
120 | schedule.overrides.delete(override.id)
121 |
122 |
123 | Updating a resource:
124 |
125 | ::
126 |
127 | pagerduty.users.update(user.id, name="Gary Example")
128 |
129 |
130 | Acknowledging a group by incidents:
131 |
132 | ::
133 |
134 | me = next(pager.users.list(query="me@you.com", limit=1))
135 | for incident in pagerduty.incidents.list(status='triggered'):
136 | incident.acknowledge(requester_id=me.id)
137 |
--------------------------------------------------------------------------------
/bin/grab_oncall.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import datetime
4 | import getpass
5 | import optparse
6 | import pygerduty
7 | import re
8 | import sys
9 |
10 | TIME_STRING_RE = re.compile(
11 | r'(?:(?P\d+)d)?'
12 | r'(?:(?P\d+)h)?'
13 | r'(?:(?P\d+)m)?'
14 | r'(?:(?P\d+)s)?'
15 | )
16 |
17 |
18 | def parse_time_string(time_string):
19 | times = TIME_STRING_RE.match(time_string).groupdict()
20 | for key, value in times.iteritems():
21 | if value is None:
22 | times[key] = 0
23 | else:
24 | times[key] = int(value)
25 |
26 | return times
27 |
28 |
29 | def get_times(time_string):
30 | times = parse_time_string(time_string)
31 |
32 | now = datetime.datetime.utcnow()
33 | then = datetime.timedelta(**times)
34 |
35 | return isoformat(now), isoformat(now + then)
36 |
37 |
38 | def isoformat(dtime):
39 | return "%sZ" % dtime.isoformat()
40 |
41 |
42 | def format_overrides(overrides):
43 | output = []
44 | format_str = "%-10s%-28s%-28s%-20s"
45 | output.append(format_str % ("ID:", "Start:", "End:", "User:"))
46 | for override in overrides:
47 | output.append(format_str % (
48 | override.id, override.start, override.end, override.user.name))
49 | return "\n".join(output)
50 |
51 |
52 | def print_overrides(schedule):
53 | now = datetime.datetime.utcnow()
54 | since = isoformat(now)
55 | until = isoformat(now + datetime.timedelta(hours=2))
56 | overrides = schedule.overrides.list(
57 | editable=True, overflow=True, since=since, until=until)
58 | if not overrides:
59 | print "No Editable Overrides."
60 | sys.exit(1)
61 | print format_overrides(overrides)
62 |
63 |
64 | def main():
65 | parser = optparse.OptionParser()
66 |
67 | parser.add_option("--list", default=False, action="store_true",
68 | help="List editable overrides.")
69 | parser.add_option("--remove", default=None,
70 | help="Remove from list of overrides.")
71 |
72 | parser.add_option("--user", default=getpass.getuser(),
73 | help="User to create the override for.")
74 | parser.add_option("--schedule", default=None,
75 | help="Schedule to add the override to.")
76 | parser.add_option("--api_key", default=None,
77 | help="Integration API key.")
78 | parser.add_option("--subdomain", default=None,
79 | help="PagerDuty subdomain.")
80 |
81 | options, args = parser.parse_args()
82 |
83 | time_string = None
84 | if len(args) >= 1:
85 | time_string = args[0]
86 |
87 | required_options = [options.subdomain, options.api_key, options.schedule]
88 | if not all(required_options):
89 | parser.print_help()
90 | sys.exit(1)
91 |
92 | if not any([time_string, options.list, options.remove]):
93 | print "Please provide either a time_string, --list, or --remove"
94 | parser.print_help()
95 | sys.exit(1)
96 |
97 | if (time_string and
98 | any([options.list, options.remove]) or
99 | all([options.list, options.remove])):
100 |
101 | print "Please provide a single time string argument",
102 | print "OR action option (--list, --remove)."
103 | parser.print_help()
104 | sys.exit(1)
105 |
106 | pager = pygerduty.PagerDuty(options.subdomain, options.api_key)
107 |
108 | users = list(pager.users.list(query="%s" % options.user))
109 | if len(users) != 1:
110 | print "Expected 1 user. Found (%s)" % len(users)
111 | sys.exit(1)
112 | uid = users[0].id
113 |
114 | schedules = list(pager.schedules.list(query=options.schedule, limit=1))
115 | if len(schedules) != 1:
116 | print "Expected 1 schedule. Found (%s)" % len(schedules)
117 | sys.exit(1)
118 | schedule = schedules[0]
119 |
120 | if time_string:
121 | now, then = get_times(time_string)
122 | schedule.overrides.create(start=now, end=then, user_id=uid)
123 | print "Override created."
124 | elif options.list:
125 | print_overrides(schedule)
126 | elif options.remove:
127 | schedule.overrides.delete(options.remove)
128 | print "Removed Override."
129 | else:
130 | parser.print_help()
131 | sys.exit(1)
132 |
133 | if __name__ == "__main__":
134 | main()
135 |
--------------------------------------------------------------------------------
/pygerduty/__init__.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import datetime
3 | import functools
4 | import json
5 | import time
6 | import re
7 |
8 | import six
9 | from six import string_types
10 | from six.moves import urllib
11 | from .exceptions import Error, IntegrationAPIError, BadRequest, NotFound
12 |
13 | __author__ = "Mike Cugini "
14 | from .version import __version__, version_info # noqa
15 |
16 | TRIGGER_LOG_ENTRY_RE = re.compile(
17 | r'log_entries/(?P[A-Z0-9]+)'
18 | )
19 |
20 | ISO8601_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
21 |
22 | # TODO:
23 | # Support for Log Entries
24 | # Support for Reports
25 |
26 |
27 | class Collection(object):
28 | paginated = True
29 |
30 | def __init__(self, pagerduty, base_container=None):
31 | self.name = getattr(self, "name", False) or _lower(self.__class__.__name__)
32 | self.sname = getattr(self, "sname", False) or _singularize(self.name)
33 | self.container = (
34 | getattr(self, "container", False) or globals()[_upper(self.sname)])
35 |
36 | self.pagerduty = pagerduty
37 | self.base_container = base_container
38 |
39 | def create(self, **kwargs):
40 | path = "{0}".format(self.name)
41 | if self.base_container:
42 | path = "{0}/{1}/{2}".format(
43 | self.base_container.collection.name,
44 | self.base_container.id, self.name)
45 |
46 | data = {self.sname: {}}
47 |
48 | # requester_id needs to be up a level
49 | if "requester_id" in kwargs:
50 | data["requester_id"] = kwargs["requester_id"]
51 | del kwargs["requester_id"]
52 |
53 | data[self.sname] = kwargs
54 |
55 | response = self.pagerduty.request("POST", path, data=_json_dumper(data))
56 | return self.container(self, **response.get(self.sname, {}))
57 |
58 | def update(self, entity_id, **kwargs):
59 | path = "{0}/{1}".format(self.name, entity_id)
60 | if self.base_container:
61 | path = "{0}/{1}/{2}/{3}".format(
62 | self.base_container.collection.name,
63 | self.base_container.id, self.name, entity_id)
64 |
65 | data = {self.sname: {}}
66 |
67 | # requester_id needs to be up a level
68 | if "requester_id" in kwargs:
69 | data["requester_id"] = kwargs["requester_id"]
70 | del kwargs["requester_id"]
71 |
72 | data[self.sname] = kwargs
73 |
74 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data))
75 | return self.container(self, **response.get(self.sname, {}))
76 |
77 | def _list_response(self, response):
78 | entities = []
79 | for entity in response.get(self.name, []):
80 | entities.append(self.container(self, **entity))
81 | return entities
82 |
83 | def _list_no_pagination(self, **kwargs):
84 | path = self.name
85 | if self.base_container:
86 | path = "{0}/{1}/{2}".format(
87 | self.base_container.collection.name,
88 | self.base_container.id, self.name)
89 |
90 | suffix_path = kwargs.pop("_suffix_path", None)
91 | if suffix_path is not None:
92 | path += "/{0}".format(suffix_path)
93 |
94 | response = self.pagerduty.request("GET", path, query_params=kwargs)
95 | return self._list_response(response)
96 |
97 | def list(self, **kwargs):
98 | # Some APIs are paginated. If they are, and the user isn't doing
99 | # pagination themselves, let's do it for them
100 | if not self.paginated or any(key in kwargs for key in ('offset', 'limit')):
101 | for i in self._list_no_pagination(**kwargs):
102 | yield i
103 | else:
104 | offset = 0
105 | limit = self.pagerduty.page_size
106 | seen_items = set()
107 | while True:
108 | these_kwargs = copy.copy(kwargs)
109 | these_kwargs.update({
110 | 'limit': limit,
111 | 'offset': offset,
112 | })
113 | this_paginated_result = self._list_no_pagination(**these_kwargs)
114 | if not this_paginated_result:
115 | break
116 | for item in this_paginated_result:
117 | if item.id in seen_items:
118 | continue
119 | seen_items.add(item.id)
120 | yield item
121 | offset += len(this_paginated_result)
122 | if len(this_paginated_result) > limit:
123 | # sometimes pagerduty decides to ignore your limit and
124 | # just return everything. it seems to only do this when
125 | # you're near the last page.
126 | break
127 |
128 | def count(self, **kwargs):
129 | path = "{0}/count".format(self.name)
130 | response = self.pagerduty.request("GET", path, query_params=kwargs)
131 | return response.get("total", None)
132 |
133 | def show(self, entity_id, **kwargs):
134 | path = "{0}/{1}".format(self.name, entity_id)
135 | if self.base_container:
136 | path = "{0}/{1}/{2}/{3}".format(
137 | self.base_container.collection.name,
138 | self.base_container.id, self.name, entity_id)
139 |
140 | response = self.pagerduty.request(
141 | "GET", path, query_params=kwargs)
142 | if response.get(self.sname):
143 | return self.container(self, **response.get(self.sname, {}))
144 | else:
145 | return self.container(self, **response)
146 |
147 | def delete(self, entity_id):
148 | path = "{0}/{1}".format(self.name, entity_id)
149 | if self.base_container:
150 | path = "{0}/{1}/{2}/{3}".format(
151 | self.base_container.collection.name,
152 | self.base_container.id, self.name, entity_id)
153 |
154 | response = self.pagerduty.request("DELETE", path)
155 | return response
156 |
157 |
158 | class MaintenanceWindows(Collection):
159 | def list(self, **kwargs):
160 | path = self.name
161 |
162 | if "type" in kwargs:
163 | path = "{0}/{1}".format(self.name, kwargs["type"])
164 | del kwargs["type"]
165 |
166 | response = self.pagerduty.request("GET", path, query_params=kwargs)
167 | return self._list_response(response)
168 |
169 | def update(self, entity_id, **kwargs):
170 | path = "{0}/{1}".format(self.name, entity_id)
171 | response = self.pagerduty.request("PUT", path, data=_json_dumper(kwargs))
172 | return self.container(self, **response.get(self.sname, {}))
173 |
174 |
175 | class Incidents(Collection):
176 | def update(self, requester_id, *args):
177 | path = "{0}".format(self.name)
178 | data = {"requester_id": requester_id, self.name: args}
179 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data))
180 | return self.container(self, **response.get(self.sname, {}))
181 |
182 |
183 | class Services(Collection):
184 | def disable(self, entity_id, requester_id):
185 | path = "{0}/{1}/disable".format(self.name, entity_id)
186 | data = {"requester_id": requester_id}
187 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data))
188 | return response
189 |
190 | def enable(self, entity_id):
191 | path = "{0}/{1}/enable".format(self.name, entity_id)
192 | response = self.pagerduty.request("PUT", path, data="")
193 | return response
194 |
195 | def regenerate_key(self, entity_id):
196 | path = "{0}/{1}/regenerate_key".format(self.name, entity_id)
197 | response = self.pagerduty.request("POST", path, data="")
198 | return self.container(self, **response.get(self.sname, {}))
199 |
200 |
201 | class Teams(Collection):
202 | pass
203 |
204 |
205 | class Alerts(Collection):
206 | pass
207 |
208 |
209 | class Overrides(Collection):
210 | paginated = False
211 |
212 |
213 | class Entries(Collection):
214 | paginated = False
215 |
216 |
217 | class EscalationPolicies(Collection):
218 | def on_call(self, **kwargs):
219 | return self.list(_suffix_path="on_call", **kwargs)
220 |
221 |
222 | class EscalationRules(Collection):
223 | paginated = False
224 |
225 | def update(self, entity_id, **kwargs):
226 | path = "{0}/{1}/{2}/{3}".format(
227 | self.base_container.collection.name,
228 | self.base_container.id, self.name, entity_id)
229 | response = self.pagerduty.request("PUT", path, data=_json_dumper(kwargs))
230 | return self.container(self, **response.get(self.sname, {}))
231 |
232 |
233 | class Schedules(Collection):
234 | def update(self, entity_id, **kwargs):
235 | path = "{0}/{1}".format(self.name, entity_id)
236 | data = {"overflow": kwargs["overflow"],
237 | "schedule": kwargs["schedule"]}
238 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data))
239 | return self.container(self, **response.get(self.sname, {}))
240 |
241 |
242 | class ScheduleUsers(Collection):
243 | name = 'users'
244 | paginated = False
245 |
246 |
247 | class Users(Collection):
248 | def on_call(self, **kwargs):
249 | return self.list(_suffix_path="on_call", **kwargs)
250 |
251 |
252 | class Restrictions(Collection):
253 | pass
254 |
255 |
256 | class NotificationRules(Collection):
257 | paginated = False
258 |
259 |
260 | class ContactMethods(Collection):
261 | paginated = False
262 |
263 |
264 | class EmailFilters(Collection):
265 | pass
266 |
267 |
268 | class LogEntries(Collection):
269 | pass
270 |
271 |
272 | class Notes(Collection):
273 | paginated = False
274 |
275 | def update(self, *args, **kwargs):
276 | raise NotImplementedError()
277 |
278 | def count(self, *args, **kwargs):
279 | raise NotImplementedError()
280 |
281 | def show(self, *args, **kwargs):
282 | raise NotImplementedError()
283 |
284 | def delete(self, *args, **kwargs):
285 | raise NotImplementedError()
286 |
287 |
288 | class Container(object):
289 | ATTR_NAME_OVERRIDE_KEY = '_attr_name_override'
290 |
291 | def __init__(self, collection, **kwargs):
292 | # This class depends on the existence on the _kwargs attr.
293 | # Use object's __setattr__ to initialize.
294 | object.__setattr__(self, "_kwargs", {})
295 |
296 | self.collection = collection
297 | self.pagerduty = collection.pagerduty
298 | self._attr_overrides = kwargs.pop(Container.ATTR_NAME_OVERRIDE_KEY, None)
299 |
300 | def _check_kwarg(key, value):
301 | if isinstance(value, dict):
302 | value[Container.ATTR_NAME_OVERRIDE_KEY] = self._attr_overrides
303 | container = globals().get(_upper(_singularize(key)))
304 | if container is not None and issubclass(container, Container):
305 | _collection = globals().get(_upper(_pluralize(key)),
306 | Collection)
307 | return container(_collection(self.pagerduty), **value)
308 | else:
309 | return Container(Collection(self.pagerduty), **value)
310 | return value
311 |
312 | for key, value in kwargs.items():
313 | if self._attr_overrides and key in self._attr_overrides:
314 | key = self._attr_overrides[key]
315 | if isinstance(value, list):
316 | self._kwargs[key] = []
317 | for item in value:
318 | sname = _singularize(key)
319 | self._kwargs[key].append(_check_kwarg(sname, item))
320 | else:
321 | self._kwargs[key] = _check_kwarg(key, value)
322 |
323 | def __getattr__(self, name):
324 | if name not in self._kwargs:
325 | raise AttributeError(name)
326 | return self._kwargs[name]
327 |
328 | def __setattr__(self, name, value):
329 | if name not in self._kwargs:
330 | return object.__setattr__(self, name, value)
331 | self._kwargs[name] = value
332 |
333 | def __str__(self):
334 | attrs = ["{0}={1}".format(k, repr(v)) for k, v in self._kwargs.items()]
335 | return "<{0}: {1}>".format(self.__class__.__name__, ", ".join(attrs))
336 |
337 | def __repr__(self):
338 | return str(self)
339 |
340 | def to_json(self):
341 | json_dict = {}
342 | overriden_attrs = dict()
343 | if self._attr_overrides:
344 | for key, value in self._attr_overrides.items():
345 | overriden_attrs[value] = key
346 | for key, value in self._kwargs.items():
347 | if key in overriden_attrs:
348 | key = overriden_attrs[key]
349 | if isinstance(value, Container):
350 | json_dict[key] = value.to_json()
351 | elif isinstance(value, list):
352 | json_dict[key] = []
353 | for v in value:
354 | if isinstance(v, Container):
355 | json_dict[key].append(v.to_json())
356 | else:
357 | json_dict[key].append(v)
358 | else:
359 | json_dict[key] = value
360 | return json_dict
361 |
362 |
363 | class Incident(Container):
364 | def __init__(self, *args, **kwargs):
365 | Container.__init__(self, *args, **kwargs)
366 | self.log_entries = LogEntries(self.pagerduty, self)
367 | self.notes = Notes(self.pagerduty, self)
368 |
369 | def _do_action(self, verb, requester_id, **kwargs):
370 | path = '{0}/{1}/{2}'.format(self.collection.name, self.id, verb)
371 | data = {'requester_id': requester_id}
372 | data.update(kwargs)
373 | return self.pagerduty.request('PUT', path, data=_json_dumper(data))
374 |
375 | def has_subject(self):
376 | return hasattr(self.trigger_summary_data, 'subject')
377 |
378 | def resolve(self, requester_id):
379 | self._do_action('resolve', requester_id=requester_id)
380 |
381 | def acknowledge(self, requester_id):
382 | self._do_action('acknowledge', requester_id=requester_id)
383 |
384 | def snooze(self, requester_id, duration):
385 | self._do_action('snooze', requester_id=requester_id, duration=duration)
386 |
387 | def get_trigger_log_entry(self, **kwargs):
388 | match = TRIGGER_LOG_ENTRY_RE.search(self.trigger_details_html_url)
389 | return self.log_entries.show(match.group('log_entry_id'), **kwargs)
390 |
391 | def reassign(self, user_ids, requester_id):
392 | """Reassign this incident to a user or list of users
393 |
394 | :param user_ids: A non-empty list of user ids
395 | """
396 | if not user_ids:
397 | raise Error('Must pass at least one user id')
398 | self._do_action('reassign', requester_id=requester_id, assigned_to_user=','.join(user_ids))
399 |
400 |
401 | class Note(Container):
402 | pass
403 |
404 |
405 | class Alert(Container):
406 | pass
407 |
408 |
409 | class EmailFilter(Container):
410 | pass
411 |
412 |
413 | class MaintenanceWindow(Container):
414 | pass
415 |
416 |
417 | class Override(Container):
418 | pass
419 |
420 |
421 | class NotificationRule(Container):
422 | pass
423 |
424 |
425 | class ContactMethod(Container):
426 | pass
427 |
428 |
429 | class EscalationPolicy(Container):
430 | def __init__(self, *args, **kwargs):
431 | Container.__init__(self, *args, **kwargs)
432 | self.escalation_rules = EscalationRules(self.pagerduty, self)
433 |
434 |
435 | class EscalationRule(Container):
436 | pass
437 |
438 |
439 | class RuleObject(Container):
440 | pass
441 |
442 |
443 | class ScheduleLayer(Container):
444 | pass
445 |
446 |
447 | class Service(Container):
448 | def __init__(self, *args, **kwargs):
449 | Container.__init__(self, *args, **kwargs)
450 | self.email_filters = EmailFilters(self.pagerduty, self)
451 |
452 |
453 | class Schedule(Container):
454 | def __init__(self, *args, **kwargs):
455 | # The json representation of Schedule has a field called
456 | # "users". Rename it to schedule_users to avoid conflict with
457 | # Users
458 | kwargs[Container.ATTR_NAME_OVERRIDE_KEY] = {"users": "schedule_users"}
459 | Container.__init__(self, *args, **kwargs)
460 | self.overrides = Overrides(self.pagerduty, self)
461 | self.users = ScheduleUsers(self.pagerduty, self)
462 | self.entries = Entries(self.pagerduty, self)
463 |
464 |
465 | class ScheduleUser(Container):
466 | pass
467 |
468 |
469 | class Team(Container):
470 | pass
471 |
472 |
473 | class Restriction(Container):
474 | pass
475 |
476 |
477 | class User(Container):
478 | def __init__(self, *args, **kwargs):
479 | Container.__init__(self, *args, **kwargs)
480 | self.notification_rules = NotificationRules(self.pagerduty, self)
481 | self.contact_methods = ContactMethods(self.pagerduty, self)
482 | self.schedules = Schedules(self.pagerduty, self)
483 | self.escalation_policies = EscalationPolicies(self.pagerduty, self)
484 | self.log_entries = LogEntries(self.pagerduty, self)
485 |
486 |
487 | class Entry(Container):
488 | pass
489 |
490 |
491 | class LogEntry(Container):
492 | pass
493 |
494 |
495 | class PagerDuty(object):
496 |
497 | INTEGRATION_API_URL =\
498 | "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
499 |
500 | def __init__(self, subdomain, api_token, timeout=10, max_403_retries=0,
501 | page_size=25, proxies=None, parse_datetime=False):
502 |
503 | self.api_token = api_token
504 | self.subdomain = subdomain
505 | self._host = "{0}.pagerduty.com".format(subdomain)
506 | self._api_base = "https://{0}/api/v1/".format(self._host)
507 | self.timeout = timeout
508 | self.max_403_retries = max_403_retries
509 | self.page_size = page_size
510 |
511 | self.json_loader = json.loads
512 | if parse_datetime:
513 | self.json_loader = _json_loader
514 |
515 | handlers = []
516 | if proxies:
517 | handlers.append(urllib.request.ProxyHandler(proxies))
518 | self.opener = urllib.request.build_opener(*handlers)
519 |
520 | # Collections
521 | self.incidents = Incidents(self)
522 | self.alerts = Alerts(self)
523 | self.schedules = Schedules(self)
524 | self.escalation_policies = EscalationPolicies(self)
525 | self.users = Users(self)
526 | self.services = Services(self)
527 | self.maintenance_windows = MaintenanceWindows(self)
528 | self.teams = Teams(self)
529 | self.log_entries = LogEntries(self)
530 |
531 | def create_event(self, service_key, description, event_type,
532 | details, incident_key, **kwargs):
533 |
534 | # Only assign client/client_url/contexts if they exist, only for trigger_incident
535 | client = kwargs.pop('client', None)
536 | client_url = kwargs.pop('client_url', None)
537 | contexts = kwargs.pop('contexts', None)
538 |
539 | headers = {
540 | "Content-type": "application/json",
541 | }
542 |
543 | data = {
544 | "service_key": service_key,
545 | "event_type": event_type,
546 | "description": description,
547 | "details": details,
548 | "incident_key": incident_key,
549 | "client": client,
550 | "client_url": client_url,
551 | "contexts": contexts,
552 | }
553 |
554 | request = urllib.request.Request(PagerDuty.INTEGRATION_API_URL,
555 | data=_json_dumper(data).encode('utf-8'),
556 | headers=headers)
557 | response = self.execute_request(request)
558 |
559 | if not response["status"] == "success":
560 | raise IntegrationAPIError(response["message"], event_type)
561 | return response["incident_key"]
562 |
563 | def resolve_incident(self, service_key, incident_key,
564 | description=None, details=None):
565 | """ Causes the referenced incident to enter resolved state.
566 | Send a resolve event when the problem that caused the initial
567 | trigger has been fixed.
568 | """
569 |
570 | return self.create_event(service_key, description, "resolve",
571 | details, incident_key)
572 |
573 | def acknowledge_incident(self, service_key, incident_key,
574 | description=None, details=None):
575 | """ Causes the referenced incident to enter the acknowledged state.
576 | Send an acknowledge event when someone is presently working on the
577 | incident.
578 | """
579 |
580 | return self.create_event(service_key, description, "acknowledge",
581 | details, incident_key)
582 |
583 | def trigger_incident(self, service_key, description,
584 | incident_key=None, details=None,
585 | client=None, client_url=None, contexts=None):
586 | """ Report a new or ongoing problem. When PagerDuty receives a trigger,
587 | it will either open a new incident, or add a new log entry to an
588 | existing incident.
589 | """
590 |
591 | return self.create_event(service_key, description, "trigger",
592 | details, incident_key,
593 | client=client, client_url=client_url, contexts=contexts)
594 |
595 | def execute_request(self, request, retry_count=0):
596 | try:
597 | response = (self.opener.open(request, timeout=self.timeout).
598 | read().decode("utf-8"))
599 | except urllib.error.HTTPError as err:
600 | if err.code / 100 == 2:
601 | response = err.read().decode("utf-8")
602 | elif err.code == 400:
603 | raise BadRequest(self.json_loader(err.read().decode("utf-8")))
604 | elif err.code == 403:
605 | if retry_count < self.max_403_retries:
606 | time.sleep(1 * (retry_count + 1))
607 | return self.execute_request(request, retry_count + 1)
608 | else:
609 | raise
610 | elif err.code == 404:
611 | raise NotFound("URL ({0}) Not Found.".format(
612 | request.get_full_url()))
613 | else:
614 | raise
615 |
616 | try:
617 | response = self.json_loader(response)
618 | except ValueError:
619 | response = None
620 |
621 | return response
622 |
623 | @staticmethod
624 | def _process_query_params(query_params):
625 | new_qp = []
626 | for key, value in query_params.items():
627 | if isinstance(value, (list, set, tuple)):
628 | for elem in value:
629 | new_qp.append(("{0}[]".format(key), elem))
630 | else:
631 | new_qp.append((key, value))
632 |
633 | return urllib.parse.urlencode(new_qp)
634 |
635 | def request(self, method, path, query_params=None, data=None,
636 | extra_headers=None):
637 |
638 | auth = "Token token={0}".format(self.api_token)
639 | headers = {
640 | "Content-type": "application/json",
641 | "Authorization": auth
642 | }
643 |
644 | if extra_headers:
645 | headers.update(extra_headers)
646 |
647 | if query_params is not None:
648 | query_params = self._process_query_params(query_params)
649 |
650 | url = urllib.parse.urljoin(self._api_base, path)
651 |
652 | if query_params:
653 | url += "?{0}".format(query_params)
654 |
655 | if isinstance(data, six.text_type):
656 | data = data.encode("utf-8")
657 |
658 | request = urllib.request.Request(url, data=data, headers=headers)
659 | request.get_method = lambda: method.upper()
660 |
661 | return self.execute_request(request)
662 |
663 |
664 | def _lower(string):
665 | """Custom lower string function.
666 |
667 | Examples:
668 | FooBar -> foo_bar
669 | """
670 | if not string:
671 | return ""
672 |
673 | new_string = [string[0].lower()]
674 | for char in string[1:]:
675 | if char.isupper():
676 | new_string.append("_")
677 | new_string.append(char.lower())
678 |
679 | return "".join(new_string)
680 |
681 |
682 | def _upper(string):
683 | """Custom upper string function.
684 |
685 | Examples:
686 | foo_bar -> FooBar
687 | """
688 | return string.title().replace("_", "")
689 |
690 |
691 | def _singularize(string):
692 | """Hacky singularization function."""
693 |
694 | if string.endswith("ies"):
695 | return string[:-3] + "y"
696 | if string.endswith("s"):
697 | return string[:-1]
698 | return string
699 |
700 |
701 | def _pluralize(string):
702 | """Hacky pluralization function."""
703 |
704 | if string.endswith("y"):
705 | return string[:-1] + "ies"
706 | if not string.endswith("s"):
707 | return string + "s"
708 | return string
709 |
710 |
711 | class _DatetimeEncoder(json.JSONEncoder):
712 | def default(self, obj):
713 | if isinstance(obj, (datetime.date, datetime.datetime)):
714 | return obj.strftime(ISO8601_FORMAT)
715 | super(_DatetimeEncoder, self).default(obj)
716 |
717 |
718 | def _datetime_decoder(obj):
719 | for k, v in obj.items():
720 | if isinstance(v, string_types):
721 | try:
722 | obj[k] = datetime.datetime.strptime(v, ISO8601_FORMAT)
723 | except ValueError:
724 | pass
725 | return obj
726 |
727 |
728 | _json_dumper = functools.partial(json.dumps, cls=_DatetimeEncoder)
729 | _json_loader = functools.partial(json.loads, object_hook=_datetime_decoder)
730 |
--------------------------------------------------------------------------------
/pygerduty/common.py:
--------------------------------------------------------------------------------
1 | # Requester Module used by REST API module as well as EVENTS API module.
2 | import datetime
3 | import functools
4 | import json
5 | from .exceptions import Error, IntegrationAPIError, BadRequest, NotFound
6 | from six import string_types
7 | from six.moves import urllib
8 |
9 | ISO8601_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
10 |
11 |
12 | class Requester(object):
13 | def __init__(self, timeout=10, proxies=None, parse_datetime=False):
14 | self.timeout = timeout
15 | self.json_loader = json.loads
16 |
17 | if parse_datetime:
18 | self.json_loader = _json_loader
19 |
20 | handlers = []
21 | if proxies:
22 | handlers.append(urllib.request.ProxyHandler(proxies))
23 | self.opener = urllib.request.build_opener(*handlers)
24 |
25 | def execute_request(self, request):
26 |
27 | try:
28 | response = (self.opener.open(request, timeout=self.timeout).
29 | read().decode("utf-8"))
30 | except urllib.error.HTTPError as err:
31 | if err.code / 100 == 2:
32 | response = err.read().decode("utf-8")
33 | elif err.code == 400:
34 | raise BadRequest(self.json_loader(err.read().decode("utf-8")))
35 | elif err.code == 403:
36 | raise
37 | elif err.code == 404:
38 | raise NotFound("URL ({0}) Not Found.".format(
39 | request.get_full_url()))
40 | elif err.code == 429:
41 | raise
42 | else:
43 | raise
44 |
45 | try:
46 | response = self.json_loader(response)
47 | except ValueError:
48 | response = None
49 |
50 | response = clean_response(response)
51 |
52 | return response
53 |
54 |
55 | def clean_response(response):
56 | '''Recurse through dictionary and replace any keys "self" with
57 | "self_"'''
58 | if type(response) is list:
59 | for elem in response:
60 | clean_response(elem)
61 | elif type(response) is dict:
62 | for key, val in response.copy().items():
63 | if key == 'self':
64 | val = response.pop('self')
65 | response['self_'] = val
66 | clean_response(val)
67 | else:
68 | clean_response(response[key])
69 | return response
70 |
71 |
72 | def _lower(string):
73 | """Custom lower string function.
74 | Examples:
75 | FooBar -> foo_bar
76 | """
77 | if not string:
78 | return ""
79 |
80 | new_string = [string[0].lower()]
81 | for char in string[1:]:
82 | if char.isupper():
83 | new_string.append("_")
84 | new_string.append(char.lower())
85 |
86 | return "".join(new_string)
87 |
88 |
89 | def _upper(string):
90 | """Custom upper string function.
91 | Examples:
92 | foo_bar -> FooBar
93 | """
94 | return string.title().replace("_", "")
95 |
96 |
97 | def _singularize(string):
98 | """Hacky singularization function."""
99 |
100 | if string.endswith("ies"):
101 | return string[:-3] + "y"
102 | if string.endswith("s"):
103 | return string[:-1]
104 | return string
105 |
106 |
107 | def _pluralize(string):
108 | """Hacky pluralization function."""
109 |
110 | if string.endswith("y"):
111 | return string[:-1] + "ies"
112 | if not string.endswith("s"):
113 | return string + "s"
114 | return string
115 |
116 |
117 | class _DatetimeEncoder(json.JSONEncoder):
118 | def default(self, obj):
119 | if isinstance(obj, (datetime.date, datetime.datetime)):
120 | return obj.strftime(ISO8601_FORMAT)
121 | super(_DatetimeEncoder, self).default(obj)
122 |
123 |
124 | def _datetime_decoder(obj):
125 | for k, v in obj.items():
126 | if isinstance(v, string_types):
127 | try:
128 | obj[k] = datetime.datetime.strptime(v, ISO8601_FORMAT)
129 | except ValueError:
130 | pass
131 | return obj
132 |
133 |
134 | _json_dumper = functools.partial(json.dumps, cls=_DatetimeEncoder)
135 | _json_loader = functools.partial(json.loads, object_hook=_datetime_decoder)
136 |
--------------------------------------------------------------------------------
/pygerduty/events.py:
--------------------------------------------------------------------------------
1 | # Event module for pygerduty version 2. These methods are compatible with
2 | # Pagerduty Events API.
3 |
4 | from six.moves import urllib
5 | from .exceptions import Error, IntegrationAPIError, BadRequest, NotFound
6 | from .common import (
7 | _json_dumper,
8 | Requester,
9 | )
10 |
11 | INTEGRATION_API_URL =\
12 | "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
13 |
14 |
15 | class Events(object):
16 | def __init__(self, service_key, requester=None):
17 | self.service_key = service_key
18 | if requester is None:
19 | self.requester = Requester()
20 | else:
21 | self.requester = requester
22 |
23 | def create_event(self, description, event_type,
24 | details, incident_key, **kwargs):
25 |
26 | # Only assign client/client_url/contexts if they exist, only for trigger_incident
27 | client = kwargs.pop('client', None)
28 | client_url = kwargs.pop('client_url', None)
29 | contexts = kwargs.pop('contexts', None)
30 |
31 | headers = {
32 | "Content-type": "application/json",
33 | "Accept": "application/vnd.pagerduty+json;version=2",
34 | }
35 |
36 | data = {
37 | "service_key": self.service_key,
38 | "event_type": event_type,
39 | "description": description,
40 | "details": details,
41 | "incident_key": incident_key,
42 | "client": client,
43 | "client_url": client_url,
44 | "contexts": contexts,
45 | }
46 |
47 | request = urllib.request.Request(INTEGRATION_API_URL,
48 | data=_json_dumper(data).encode('utf-8'),
49 | headers=headers)
50 |
51 | response = self.requester.execute_request(request)
52 |
53 | if not response["status"] == "success":
54 | raise IntegrationAPIError(response["message"], event_type)
55 | return response["incident_key"]
56 |
57 | def resolve_incident(self, incident_key,
58 | description=None, details=None):
59 | """ Causes the referenced incident to enter resolved state.
60 | Send a resolve event when the problem that caused the initial
61 | trigger has been fixed.
62 | """
63 |
64 | return self.create_event(description, "resolve",
65 | details, incident_key)
66 |
67 | def acknowledge_incident(self, incident_key,
68 | description=None, details=None):
69 | """ Causes the referenced incident to enter the acknowledged state.
70 | Send an acknowledge event when someone is presently working on the
71 | incident.
72 | """
73 |
74 | return self.create_event(description, "acknowledge",
75 | details, incident_key)
76 |
77 | def trigger_incident(self, description, incident_key=None, details=None,
78 | client=None, client_url=None, contexts=None):
79 | """ Report a new or ongoing problem. When PagerDuty receives a trigger,
80 | it will either open a new incident, or add a new log entry to an
81 | existing incident.
82 | """
83 |
84 | return self.create_event(description, "trigger",
85 | details, incident_key,
86 | client=client, client_url=client_url, contexts=contexts)
87 |
--------------------------------------------------------------------------------
/pygerduty/exceptions.py:
--------------------------------------------------------------------------------
1 | class Error(Exception):
2 | pass
3 |
4 |
5 | class IntegrationAPIError(Error):
6 | def __init__(self, message, event_type):
7 | self.event_type = event_type
8 | self.message = message
9 |
10 | def __str__(self):
11 | return "Creating {0} event failed: {1}".format(self.event_type,
12 | self.message)
13 |
14 |
15 | class BadRequest(Error):
16 | def __init__(self, payload, *args, **kwargs):
17 | # Error Responses don't always contain all fields.
18 | # Sane defaults must be set.
19 | self.code = payload.get("error", {}).get('code', 99999)
20 | self.errors = payload.get("error", {}).get('errors', [])
21 | self.message = payload.get("error", {}).get('message', str(payload))
22 |
23 | Error.__init__(self, *args)
24 |
25 | def __str__(self):
26 | return "{0} ({1}): {2}".format(
27 | self.message, self.code, self.errors)
28 |
29 |
30 | class NotFound(Error):
31 | pass
32 |
--------------------------------------------------------------------------------
/pygerduty/v2.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import re
3 | import six
4 | from six.moves import urllib
5 | from .exceptions import Error, IntegrationAPIError, BadRequest, NotFound
6 | from .common import (
7 | Requester,
8 | _lower,
9 | _upper,
10 | _singularize,
11 | _pluralize,
12 | _json_dumper,
13 | )
14 |
15 | __author__ = "Mike Cugini "
16 | from .version import __version__, version_info # noqa
17 |
18 | TRIGGER_LOG_ENTRY_RE = re.compile(
19 | r'log_entries/(?P[A-Z0-9]+)'
20 | )
21 |
22 | # TODO:
23 | # Support for Log Entries
24 | # Support for Reports
25 |
26 |
27 | class Collection(object):
28 | paginated = True
29 | default_query_params = {}
30 |
31 | def __init__(self, pagerduty, base_container=None):
32 | self.name = getattr(self, "name", False) or _lower(self.__class__.__name__)
33 | self.sname = getattr(self, "sname", False) or _singularize(self.name)
34 | self.container = (
35 | getattr(self, "container", False) or globals()[_upper(self.sname)])
36 |
37 | self.pagerduty = pagerduty
38 | self.base_container = base_container
39 |
40 | def create(self, **kwargs):
41 | path = "{0}".format(self.name)
42 | if self.base_container:
43 | path = "{0}/{1}/{2}".format(
44 | self.base_container.collection.name,
45 | self.base_container.id, self.name)
46 |
47 | data = {self.sname: {}}
48 |
49 | extra_headers = {}
50 | if "requester_id" in kwargs:
51 | extra_headers["From"] = kwargs.pop("requester_id")
52 | new_kwargs = Collection.process_kwargs(kwargs)
53 | data[self.sname] = new_kwargs
54 | response = self.pagerduty.request("POST", path, data=_json_dumper(data), extra_headers=extra_headers)
55 | return self.container(self, **response.get(self.sname, {}))
56 |
57 | @staticmethod
58 | def process_kwargs(kwargs):
59 | new_kwargs = {}
60 | for kwarg_key, kwarg_value in kwargs.items():
61 | if kwarg_key.endswith('_id'):
62 | new_key = Collection.cut_suffix(kwarg_key)
63 | new_kwargs[new_key] = Collection.id_to_obj(new_key, kwarg_value)
64 | elif kwarg_key.endswith('_ids'):
65 | new_key = Collection.cut_suffix(kwarg_key)
66 | new_kwargs[_pluralize(new_key)] = Collection.ids_to_objs(new_key, kwarg_value)
67 | else:
68 | new_kwargs[kwarg_key] = kwarg_value
69 | return new_kwargs
70 |
71 | @staticmethod
72 | def cut_suffix(key):
73 | if key.endswith('_id'):
74 | return key[:-3]
75 | elif key.endswith('_ids'):
76 | return key[:-4]
77 | else:
78 | return key
79 |
80 | @staticmethod
81 | def id_to_obj(key, value):
82 | return {
83 | "id": value,
84 | "type": key
85 | }
86 |
87 | @staticmethod
88 | def ids_to_objs(key, value):
89 | new_kwargs = []
90 | for v in value:
91 | new_kwarg = Collection.id_to_obj(key, v)
92 | new_kwargs.append(new_kwarg)
93 | return new_kwargs
94 |
95 | def _apply_default_kwargs(self, kwargs):
96 | for k, v in self.default_query_params.items():
97 | if k not in kwargs:
98 | kwargs[k] = v
99 | return kwargs
100 |
101 | def update(self, entity_id, **kwargs):
102 | path = "{0}/{1}".format(self.name, entity_id)
103 | if self.base_container:
104 | path = "{0}/{1}/{2}/{3}".format(
105 | self.base_container.collection.name,
106 | self.base_container.id, self.name, entity_id)
107 |
108 | data = {self.sname: {}}
109 |
110 | extra_headers = {}
111 | if "requester_id" in kwargs:
112 | extra_headers["From"] = kwargs.pop("requester_id")
113 |
114 | data[self.sname] = kwargs
115 |
116 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data),
117 | extra_headers=extra_headers)
118 | return self.container(self, **response.get(self.sname, {}))
119 |
120 | def _list_response(self, response):
121 | entities = []
122 | for entity in response.get(self.name, []):
123 | entities.append(self.container(self, **entity))
124 | return entities
125 |
126 | def _list_no_pagination(self, **kwargs):
127 | path = self.name
128 | if self.base_container:
129 | path = "{0}/{1}/{2}".format(
130 | self.base_container.collection.name,
131 | self.base_container.id, self.name)
132 | suffix_path = kwargs.pop("_suffix_path", None)
133 |
134 | if suffix_path is not None:
135 | path += "/{0}".format(suffix_path)
136 |
137 | response = self.pagerduty.request("GET", path, query_params=kwargs)
138 |
139 | return self._list_response(response)
140 |
141 | def list(self, **kwargs):
142 | kwargs = self._apply_default_kwargs(kwargs)
143 | # Some APIs are paginated. If they are, and the user isn't doing
144 | # pagination themselves, let's do it for them
145 | if not self.paginated or any(key in kwargs for key in ('offset', 'limit')):
146 | for i in self._list_no_pagination(**kwargs):
147 | yield i
148 |
149 | else:
150 | offset = 0
151 | limit = self.pagerduty.page_size
152 | seen_items = set()
153 | while True:
154 | these_kwargs = copy.copy(kwargs)
155 | these_kwargs.update({
156 | 'limit': limit,
157 | 'offset': offset,
158 | })
159 | this_paginated_result = self._list_no_pagination(**these_kwargs)
160 |
161 | if not this_paginated_result:
162 | break
163 |
164 | for item in this_paginated_result:
165 | if item.id in seen_items:
166 | continue
167 | seen_items.add(item.id)
168 | yield item
169 |
170 | offset += len(this_paginated_result)
171 | if len(this_paginated_result) > limit:
172 | # sometimes pagerduty decides to ignore your limit and
173 | # just return everything. it seems to only do this when
174 | # you're near the last page.
175 | break
176 |
177 | def count(self, **kwargs):
178 | path = "{0}/count".format(self.name)
179 | response = self.pagerduty.request("GET", path, query_params=kwargs)
180 | return response.get("total", None)
181 |
182 | def show(self, entity_id, **kwargs):
183 | kwargs = self._apply_default_kwargs(kwargs)
184 | path = "{0}/{1}".format(self.name, entity_id)
185 | if self.base_container:
186 | path = "{0}/{1}/{2}/{3}".format(
187 | self.base_container.collection.name,
188 | self.base_container.id, self.name, entity_id)
189 | response = self.pagerduty.request(
190 | "GET", path, query_params=kwargs)
191 |
192 | if response.get(self.sname):
193 | return self.container(self, **response.get(self.sname, {}))
194 | else:
195 | return self.container(self, **response)
196 |
197 | def delete(self, entity_id):
198 | path = "{0}/{1}".format(self.name, entity_id)
199 | if self.base_container:
200 | path = "{0}/{1}/{2}/{3}".format(
201 | self.base_container.collection.name,
202 | self.base_container.id, self.name, entity_id)
203 |
204 | response = self.pagerduty.request("DELETE", path)
205 | return response
206 |
207 |
208 | class MaintenanceWindows(Collection):
209 | def list(self, **kwargs):
210 | path = self.name
211 |
212 | if "type" in kwargs:
213 | path = "{0}/{1}".format(self.name, kwargs["type"])
214 | del kwargs["type"]
215 |
216 | response = self.pagerduty.request("GET", path, query_params=kwargs)
217 | return self._list_response(response)
218 |
219 | def update(self, entity_id, **kwargs):
220 | path = "{0}/{1}".format(self.name, entity_id)
221 | response = self.pagerduty.request("PUT", path, data=_json_dumper(kwargs))
222 | return self.container(self, **response.get(self.sname, {}))
223 |
224 |
225 | class Incidents(Collection):
226 | def update(self, requester_id, *args):
227 | path = "{0}".format(self.name)
228 | extra_headers = {"From": requester_id}
229 | data = {self.name: args}
230 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data), extra_headers=extra_headers)
231 | return self.container(self, **response.get(self.sname, {}))
232 |
233 |
234 | class Services(Collection):
235 | def disable(self, entity_id, requester_id):
236 | path = "{0}/{1}".format(self.name, entity_id)
237 | extra_headers = {"From": requester_id}
238 | data = {"service": {"status": "disabled"}}
239 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data), extra_headers=extra_headers)
240 | return response
241 |
242 | def enable(self, entity_id):
243 | path = "{0}/{1}".format(self.name, entity_id)
244 | data = {"service": {"status": "active"}}
245 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data))
246 | return response
247 |
248 | def regenerate_key(self, entity_id):
249 | path = "{0}/{1}/regenerate_key".format(self.name, entity_id)
250 | response = self.pagerduty.request("POST", path, data="")
251 | return self.container(self, **response.get(self.sname, {}))
252 |
253 |
254 | class Teams(Collection):
255 | pass
256 |
257 |
258 | class Alerts(Collection):
259 | pass
260 |
261 |
262 | class Overrides(Collection):
263 | paginated = False
264 |
265 |
266 | class EscalationPolicies(Collection):
267 | pass
268 |
269 |
270 | class EscalationRules(Collection):
271 | paginated = False
272 |
273 | def update(self, entity_id, **kwargs):
274 | path = "{0}/{1}/{2}/{3}".format(
275 | self.base_container.collection.name,
276 | self.base_container.id, self.name, entity_id)
277 | response = self.pagerduty.request("PUT", path, data=_json_dumper(kwargs))
278 | return self.container(self, **response.get(self.sname, {}))
279 |
280 |
281 | class Schedules(Collection):
282 | def update(self, entity_id, **kwargs):
283 | path = "{0}/{1}".format(self.name, entity_id)
284 | data = {"overflow": kwargs["overflow"],
285 | "schedule": kwargs["schedule"]}
286 | response = self.pagerduty.request("PUT", path, data=_json_dumper(data))
287 | return self.container(self, **response.get(self.sname, {}))
288 |
289 |
290 | class ScheduleUsers(Collection):
291 | """This class exists because Users returned from a Schedule query are not
292 | paginated, whereas responses for Users class are. This causes a pagination
293 | bug if removed."""
294 | name = 'users'
295 | paginated = False
296 |
297 |
298 | class Users(Collection):
299 | pass
300 |
301 |
302 | class Restrictions(Collection):
303 | pass
304 |
305 |
306 | class NotificationRules(Collection):
307 | paginated = False
308 |
309 |
310 | class ContactMethods(Collection):
311 | paginated = False
312 |
313 |
314 | class EmailFilters(Collection):
315 | pass
316 |
317 |
318 | class Extensions(Collection):
319 | pass
320 |
321 |
322 | class Addons(Collection):
323 | pass
324 |
325 |
326 | class Oncalls(Collection):
327 | pass
328 |
329 |
330 | class LogEntries(Collection):
331 | # https://support.pagerduty.com/v1/docs/retrieve-trigger-event-data-using-the-api#section-how-to-obtain-the-data # noqa
332 | default_query_params = {'include': ['channels']}
333 |
334 |
335 | class Notes(Collection):
336 | paginated = False
337 |
338 | def update(self, *args, **kwargs):
339 | raise NotImplementedError()
340 |
341 | def count(self, *args, **kwargs):
342 | raise NotImplementedError()
343 |
344 | def show(self, *args, **kwargs):
345 | raise NotImplementedError()
346 |
347 | def delete(self, *args, **kwargs):
348 | raise NotImplementedError()
349 |
350 |
351 | class Container(object):
352 | ATTR_NAME_OVERRIDE_KEY = '_attr_name_override'
353 |
354 | def __init__(self, collection, **kwargs):
355 | # This class depends on the existence on the _kwargs attr.
356 | # Use object's __setattr__ to initialize.
357 | object.__setattr__(self, "_kwargs", {})
358 |
359 | self.collection = collection
360 | self.pagerduty = collection.pagerduty
361 | self._attr_overrides = kwargs.pop(Container.ATTR_NAME_OVERRIDE_KEY, None)
362 |
363 | def _check_kwarg(key, value):
364 | if isinstance(value, dict):
365 | value[Container.ATTR_NAME_OVERRIDE_KEY] = self._attr_overrides
366 | container = globals().get(_upper(_singularize(key)))
367 | if container is not None and issubclass(container, Container):
368 | _collection = globals().get(_upper(_pluralize(key)),
369 | Collection)
370 | return container(_collection(self.pagerduty), **value)
371 | else:
372 | return Container(Collection(self.pagerduty), **value)
373 | return value
374 |
375 | for key, value in kwargs.items():
376 | if self._attr_overrides and key in self._attr_overrides:
377 | key = self._attr_overrides[key]
378 | if isinstance(value, list):
379 | self._kwargs[key] = []
380 | for item in value:
381 | sname = _singularize(key)
382 | self._kwargs[key].append(_check_kwarg(sname, item))
383 | else:
384 | self._kwargs[key] = _check_kwarg(key, value)
385 |
386 | def __getattr__(self, name):
387 | if name not in self._kwargs:
388 | raise AttributeError(name)
389 | return self._kwargs[name]
390 |
391 | def __setattr__(self, name, value):
392 | if name not in self._kwargs:
393 | return object.__setattr__(self, name, value)
394 | self._kwargs[name] = value
395 |
396 | def __str__(self):
397 | attrs = ["{0}={1}".format(k, repr(v)) for k, v in self._kwargs.items()]
398 | return "<{0}: {1}>".format(self.__class__.__name__, ", ".join(attrs))
399 |
400 | def __repr__(self):
401 | return str(self)
402 |
403 | def to_json(self):
404 | json_dict = {}
405 | overriden_attrs = dict()
406 | if self._attr_overrides:
407 | for key, value in self._attr_overrides.items():
408 | overriden_attrs[value] = key
409 | for key, value in self._kwargs.items():
410 | if key in overriden_attrs:
411 | key = overriden_attrs[key]
412 | if isinstance(value, Container):
413 | json_dict[key] = value.to_json()
414 | elif isinstance(value, list):
415 | json_dict[key] = []
416 | for v in value:
417 | if isinstance(v, Container):
418 | json_dict[key].append(v.to_json())
419 | else:
420 | json_dict[key].append(v)
421 | else:
422 | json_dict[key] = value
423 | return json_dict
424 |
425 |
426 | class Extension(Container):
427 | pass
428 |
429 |
430 | class Addon(Container):
431 | def install(self, *args, **kwargs):
432 | raise NotImplementedError()
433 |
434 | def delete(self, *args, **kwargs):
435 | raise NotImplementedError()
436 |
437 | def list(self, *args, **kwargs):
438 | raise NotImplementedError()
439 |
440 |
441 | class Oncall(Container):
442 | def __init__(self, *args, **kwargs):
443 | Container.__init__(self, *args, **kwargs)
444 | self.id = '%s:%s:%s' % (self.user.id if hasattr(self, 'user') and self.user else '',
445 | self.schedule.id if hasattr(
446 | self, 'schedule') and self.schedule else '',
447 | self.escalation_policy.id if hasattr(
448 | self, 'escalation_policy') and self.escalation_policy else '')
449 |
450 |
451 | class Incident(Container):
452 | def __init__(self, *args, **kwargs):
453 | Container.__init__(self, *args, **kwargs)
454 | self.log_entries = LogEntries(self.pagerduty, self)
455 | self.notes = Notes(self.pagerduty, self)
456 |
457 | def _do_action(self, verb, requester, **kwargs):
458 | path = '{0}/{1}'.format(self.collection.name, self.id)
459 | data = {
460 | "incident": {
461 | "type": "incident_reference",
462 | "status": verb
463 | }
464 | }
465 | extra_headers = {'From': requester}
466 | return self.pagerduty.request('PUT', path, data=_json_dumper(data), extra_headers=extra_headers)
467 |
468 | def resolve(self, requester):
469 | """Resolve this incident.
470 | :param requester: The email address of the individual acknowledging.
471 | """
472 | self._do_action('resolved', requester=requester)
473 |
474 | def acknowledge(self, requester):
475 | """Acknowledge this incident.
476 | :param requester: The email address of the individual acknowledging.
477 | """
478 | self._do_action('acknowledged', requester=requester)
479 |
480 | def snooze(self, requester, duration):
481 | """Snooze incident.
482 | :param requester: The email address of the individual requesting snooze.
483 | """
484 | path = '{0}/{1}/{2}'.format(self.collection.name, self.id, 'snooze')
485 | data = {"duration": duration}
486 | extra_headers = {"From": requester}
487 | return self.pagerduty.request('POST', path, data=_json_dumper(data), extra_headers=extra_headers)
488 |
489 | def get_trigger_log_entry(self, **kwargs):
490 | match = TRIGGER_LOG_ENTRY_RE.search(self.trigger_details_html_url)
491 | return self.log_entries.show(match.group('log_entry_id'), **kwargs)
492 |
493 | def reassign(self, user_ids, requester):
494 | """Reassign this incident to a user or list of users
495 |
496 | :param user_ids: A non-empty list of user ids
497 | :param requester: The email address of individual requesting reassign
498 | """
499 | path = '{0}'.format(self.collection.name)
500 | assignments = []
501 | if not user_ids:
502 | raise Error('Must pass at least one user id')
503 | for user_id in user_ids:
504 | ref = {
505 | "assignee": {
506 | "id": user_id,
507 | "type": "user_reference"
508 | }
509 | }
510 | assignments.append(ref)
511 | data = {
512 | "incidents": [
513 | {
514 | "id": self.id,
515 | "type": "incident_reference",
516 | "assignments": assignments
517 | }
518 | ]
519 | }
520 | extra_headers = {"From": requester}
521 | return self.pagerduty.request('PUT', path, data=_json_dumper(data), extra_headers=extra_headers)
522 |
523 |
524 | class Note(Container):
525 | pass
526 |
527 |
528 | class Alert(Container):
529 | pass
530 |
531 |
532 | class EmailFilter(Container):
533 | pass
534 |
535 |
536 | class MaintenanceWindow(Container):
537 | pass
538 |
539 |
540 | class Override(Container):
541 | pass
542 |
543 |
544 | class NotificationRule(Container):
545 | pass
546 |
547 |
548 | class ContactMethod(Container):
549 | pass
550 |
551 |
552 | class EscalationPolicy(Container):
553 | def __init__(self, *args, **kwargs):
554 | Container.__init__(self, *args, **kwargs)
555 | self.escalation_rules = EscalationRules(self.pagerduty, self)
556 |
557 |
558 | class EscalationRule(Container):
559 | pass
560 |
561 |
562 | class RuleObject(Container):
563 | pass
564 |
565 |
566 | class ScheduleLayer(Container):
567 | pass
568 |
569 |
570 | class Service(Container):
571 | def __init__(self, *args, **kwargs):
572 | Container.__init__(self, *args, **kwargs)
573 | self.email_filters = EmailFilters(self.pagerduty, self)
574 |
575 |
576 | class Schedule(Container):
577 | def __init__(self, *args, **kwargs):
578 | # The json representation of Schedule has a field called
579 | # "users". Rename it to schedule_users to avoid conflict with
580 | # Users
581 | kwargs[Container.ATTR_NAME_OVERRIDE_KEY] = {"users": "schedule_users"}
582 | Container.__init__(self, *args, **kwargs)
583 | self.overrides = Overrides(self.pagerduty, self)
584 | self.users = ScheduleUsers(self.pagerduty, self)
585 |
586 |
587 | class ScheduleUser(Container):
588 | pass
589 |
590 |
591 | class Team(Container):
592 | pass
593 |
594 |
595 | class Restriction(Container):
596 | pass
597 |
598 |
599 | class User(Container):
600 | def __init__(self, *args, **kwargs):
601 | Container.__init__(self, *args, **kwargs)
602 | self.notification_rules = NotificationRules(self.pagerduty, self)
603 | self.contact_methods = ContactMethods(self.pagerduty, self)
604 | self.schedules = Schedules(self.pagerduty, self)
605 | self.escalation_policies = EscalationPolicies(self.pagerduty, self)
606 | self.log_entries = LogEntries(self.pagerduty, self)
607 |
608 |
609 | class Entry(Container):
610 | pass
611 |
612 |
613 | class LogEntry(Container):
614 | pass
615 |
616 |
617 | class FinalSchedule(Container):
618 | pass
619 |
620 |
621 | class RenderSchedule(Container):
622 | pass
623 |
624 |
625 | class PagerDuty(object):
626 | def __init__(self, api_token, timeout=10, page_size=25,
627 | proxies=None, parse_datetime=False):
628 |
629 | self.api_token = api_token
630 | self._host = "api.pagerduty.com"
631 | self._api_base = "https://{0}/".format(self._host)
632 | self.timeout = timeout
633 | self.page_size = page_size
634 | self.requester = Requester(timeout=timeout, proxies=proxies, parse_datetime=parse_datetime)
635 |
636 | # Collections
637 | self.incidents = Incidents(self)
638 | self.alerts = Alerts(self)
639 | self.schedules = Schedules(self)
640 | self.escalation_policies = EscalationPolicies(self)
641 | self.users = Users(self)
642 | self.services = Services(self)
643 | self.maintenance_windows = MaintenanceWindows(self)
644 | self.teams = Teams(self)
645 | self.log_entries = LogEntries(self)
646 | self.extensions = Extensions(self)
647 | self.addons = Addons(self)
648 | self.oncalls = Oncalls(self)
649 |
650 | @staticmethod
651 | def _process_query_params(query_params):
652 | new_qp = []
653 | for key, value in query_params.items():
654 | if isinstance(value, (list, set, tuple)):
655 | for elem in value:
656 | new_qp.append(("{0}[]".format(key), elem))
657 | else:
658 | new_qp.append((key, value))
659 |
660 | return urllib.parse.urlencode(new_qp)
661 |
662 | def request(self, method, path, query_params=None, data=None,
663 | extra_headers=None):
664 |
665 | auth = "Token token={0}".format(self.api_token)
666 | headers = {
667 | "Accept": "application/vnd.pagerduty+json;version=2",
668 | "Content-type": "application/json",
669 | "Authorization": auth
670 | }
671 |
672 | if extra_headers:
673 | headers.update(extra_headers)
674 |
675 | if query_params is not None:
676 | query_params = self._process_query_params(query_params)
677 |
678 | url = urllib.parse.urljoin(self._api_base, path)
679 |
680 | if query_params:
681 | url += "?{0}".format(query_params)
682 |
683 | if isinstance(data, six.text_type):
684 | data = data.encode("utf-8")
685 |
686 | request = urllib.request.Request(url, data=data, headers=headers)
687 | request.get_method = lambda: method.upper()
688 |
689 | return self.requester.execute_request(request)
690 |
--------------------------------------------------------------------------------
/pygerduty/version.py:
--------------------------------------------------------------------------------
1 | version_info = (0, 38, 3)
2 | __version__ = ".".join(str(v) for v in version_info)
3 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from setuptools import setup
4 | from setuptools.command.test import test as TestCommand
5 | import sys
6 |
7 |
8 | class PyTest(TestCommand):
9 |
10 | def finalize_options(self):
11 | TestCommand.finalize_options(self)
12 | self.test_args = ["-v", "-r a"]
13 | self.test_suite = True
14 |
15 | def run_tests(self):
16 | #import here, cause outside the eggs aren't loaded
17 | import pytest
18 | sys.exit(pytest.main(self.test_args))
19 |
20 |
21 | with open('pygerduty/version.py') as version_file:
22 | exec(compile(version_file.read(), version_file.name, 'exec'))
23 |
24 | kwargs = {
25 | "name": "pygerduty",
26 | "version": str(__version__), # noqa
27 | "packages": ["pygerduty"],
28 | "scripts": ["bin/grab_oncall.py"],
29 | "description": "Python Client Library for PagerDuty's REST API",
30 | "author": "Mike Cugini",
31 | "maintainer": "Mike Cugini",
32 | "author_email": "cugini@dropbox.com",
33 | "maintainer_email": "cugini@dropbox.com",
34 | "license": "MIT",
35 | "url": "https://github.com/dropbox/pygerduty",
36 | "download_url": "https://github.com/dropbox/pygerduty/archive/master.tar.gz",
37 | "classifiers": [
38 | "Programming Language :: Python",
39 | "Topic :: Software Development",
40 | "Topic :: Software Development :: Libraries",
41 | "Topic :: Software Development :: Libraries :: Python Modules",
42 | ],
43 | "install_requires": ["six"],
44 | "tests_require": [
45 | "httpretty<1",
46 | "pytest>=3.0,<5.0",
47 | "pyparsing<3",
48 | "zipp<2",
49 | ],
50 | "cmdclass": {"test": PyTest}
51 | }
52 |
53 | setup(**kwargs)
54 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dropbox/pygerduty/ef0f6c64737d38bbd1cb709d434595b1e2892d72/tests/__init__.py
--------------------------------------------------------------------------------
/tests/addon_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import httpretty
4 | import pygerduty
5 | import pygerduty.v2
6 |
7 |
8 | ###################
9 | # Version 2 Tests #
10 | ###################
11 |
12 | @httpretty.activate
13 | def test_get_addon_v2():
14 | body = open('tests/fixtures/addon_v2.json').read()
15 | httpretty.register_uri(
16 | httpretty.GET, "https://api.pagerduty.com/addons/PKX7F81",
17 | body=body, status=200)
18 |
19 | p = pygerduty.v2.PagerDuty("password")
20 | addon = p.addons.show("PKX7F81")
21 |
22 | assert addon.id == "PKX7F81"
23 | assert addon.type == "incident_show_addon"
24 | assert addon.name == "Service Runbook"
25 | assert addon.src == "https://intranet.example.com/runbook.html"
26 | assert addon.services[0].id == "PIJ90N7"
27 |
28 | @httpretty.activate
29 | def test_update_addon_v2():
30 | body_req = open('tests/fixtures/addon_update_request_v2.json').read()
31 | body_resp = open('tests/fixtures/addon_update_response_v2.json').read()
32 | httpretty.register_uri(
33 | httpretty.PUT, "https://api.pagerduty.com/addons/PKX7F81",
34 | body=body_req,
35 | responses=[httpretty.Response(body=body_resp, status=200)])
36 |
37 | p = pygerduty.v2.PagerDuty("password")
38 | addon = p.addons.update("PKX7F81")
39 |
40 | assert addon.id == "PKX7F81"
41 | assert addon.type == "incident_show_addon"
42 | assert addon.name == "Service Runbook"
43 | assert addon.src == "https://intranet.example.com/runbook.html"
44 | assert addon.services[0].id == "PIJ90N7"
45 |
--------------------------------------------------------------------------------
/tests/client_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import httpretty
4 | import pygerduty
5 | import pygerduty.v2
6 | import pytest
7 |
8 | ###################
9 | # Version 1 Tests #
10 | ###################
11 |
12 | @httpretty.activate
13 | def test_unknown_subdomain_v1():
14 | httpretty.register_uri(
15 | httpretty.GET, "https://contosso.pagerduty.com/api/v1/users/ABCDEFG",
16 | body='{"error":{"message":"Account Not Found","code":2007}}', status=404)
17 |
18 | p = pygerduty.PagerDuty("contosso", "password")
19 |
20 | with pytest.raises(pygerduty.NotFound):
21 | p.users.show("ABCDEFG")
22 |
23 | ###################
24 | # Version 2 Tests #
25 | ###################
26 |
27 | @httpretty.activate
28 | def test_v2_domain():
29 | httpretty.register_uri(
30 | httpretty.GET, "https://api.pagerduty.com/users/EFGHIJK",
31 | body='{"error": {"message":"API Not found", "code":207}}', status=404)
32 | p = pygerduty.v2.PagerDuty("password")
33 |
34 | with pytest.raises(pygerduty.common.NotFound):
35 | p.users.show("EFGHIJK")
36 |
--------------------------------------------------------------------------------
/tests/collection_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import pygerduty.v2
4 |
5 | ###################
6 | # Version 2 Tests #
7 | ###################
8 |
9 | def test_id_to_obj():
10 |
11 | kwargs = {
12 | "escalation_policy_id": "PIJ90N7",
13 | }
14 | new_key = pygerduty.v2.Collection.cut_suffix("escalation_policy_id")
15 | assert new_key == 'escalation_policy'
16 |
17 | new_kwargs = pygerduty.v2.Collection.id_to_obj(new_key, kwargs["escalation_policy_id"])
18 | assert new_kwargs == {
19 | "id": "PIJ90N7",
20 | "type": "escalation_policy"
21 | }
22 |
23 |
24 | def test_ids_to_objs():
25 |
26 | kwargs = {
27 | "service_ids": [
28 | "PF9KMXH",
29 | "PIJ90N7"
30 | ]
31 | }
32 | new_key = pygerduty.v2.Collection.cut_suffix("service_ids")
33 | assert new_key == "service"
34 |
35 | new_kwargs = pygerduty.v2.Collection.ids_to_objs(new_key, kwargs["service_ids"])
36 | assert new_kwargs == [
37 | {
38 | "id": "PF9KMXH",
39 | "type": "service"
40 | },
41 | {
42 | "id": "PIJ90N7",
43 | "type": "service"
44 | }
45 | ]
46 |
47 |
48 | def test_process_kwargs_id():
49 |
50 | kwargs = {
51 | "name": "default-email",
52 | "description": "default email service",
53 | "escalation_policy_id": "PIJ90N7",
54 | "service_key": "default-email"
55 | }
56 |
57 | new_kwargs = pygerduty.v2.Collection.process_kwargs(kwargs)
58 |
59 | assert new_kwargs == {
60 | "name": "default-email",
61 | "description": "default email service",
62 | "escalation_policy": {
63 | "id": "PIJ90N7",
64 | "type": "escalation_policy"
65 | },
66 | "service_key": "default-email"
67 | }
68 |
69 |
70 | def test_process_kwargs_ids():
71 |
72 | kwargs = {
73 | "start_time": "2012-06-16T13:00:00-04:00Z",
74 | "end_time": "2012-06-16T14:00:00-04:00Z",
75 | "description": "Description goes here",
76 | "service_ids": [
77 | "PF9KMXH",
78 | "P45HJSK"
79 | ]
80 | }
81 |
82 | new_kwargs = pygerduty.v2.Collection.process_kwargs(kwargs)
83 |
84 | assert new_kwargs == {
85 | "start_time": "2012-06-16T13:00:00-04:00Z",
86 | "end_time": "2012-06-16T14:00:00-04:00Z",
87 | "description": "Description goes here",
88 | "services": [
89 | {
90 | "id": "PF9KMXH",
91 | "type": "service"
92 | },
93 | {
94 | "id": "P45HJSK",
95 | "type": "service"
96 | }
97 | ]
98 | }
99 |
100 |
101 |
102 |
--------------------------------------------------------------------------------
/tests/container_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import pygerduty
4 | import pygerduty.v2
5 |
6 | ###################
7 | # Version 1 Tests #
8 | ###################
9 |
10 | def test_to_json_v1():
11 | p = pygerduty.PagerDuty("contosso", "password")
12 | collection = pygerduty.Collection(p)
13 |
14 | container1 = pygerduty.Container(collection, name='first')
15 | container2 = pygerduty.Container(collection, container=container1)
16 |
17 | assert {'container': {'name': 'first'}} == container2.to_json()
18 |
19 |
20 | def test_to_json_list_convertion_v1():
21 | p = pygerduty.PagerDuty("contosso", "password")
22 | collection = pygerduty.Collection(p)
23 |
24 | container = pygerduty.Container(collection, handlers=['first', 'second'])
25 | assert {'handlers': ['first', 'second']} == container.to_json()
26 |
27 | ###################
28 | # Version 2 Tests #
29 | ###################
30 |
31 |
32 | def test_to_json_v2():
33 | p = pygerduty.v2.PagerDuty("password")
34 | collection = pygerduty.v2.Collection(p)
35 |
36 | container1 = pygerduty.v2.Container(collection, name='test1')
37 | container2 = pygerduty.v2.Container(collection, container=container1)
38 |
39 | assert {'container': {'name': 'test1'}} == container2.to_json()
40 |
41 |
42 | def test_to_json_list_convertion_v2():
43 | p = pygerduty.v2.PagerDuty("password")
44 | collection = pygerduty.Collection(p)
45 |
46 | container = pygerduty.v2.Container(collection, handlers=['test1', 'test2'])
47 |
48 | assert {'handlers': ['test1', 'test2']} == container.to_json()
49 |
--------------------------------------------------------------------------------
/tests/datetime_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import datetime
4 | import httpretty
5 | import pygerduty
6 | import pygerduty.v2
7 | import pytest
8 | import uuid
9 |
10 | ###################
11 | # Version 1 Tests #
12 | ###################
13 |
14 | @httpretty.activate
15 | def test_loads_with_datetime_v1():
16 | body = open('tests/fixtures/incident_resp_v1.json').read()
17 | httpretty.register_uri(
18 | httpretty.GET, "https://acme.pagerduty.com/api/v1/incidents/PIJ90N7",
19 | body=body, status=200
20 | )
21 |
22 | pd = pygerduty.PagerDuty("acme", "password", parse_datetime=True)
23 | incident = pd.incidents.show("PIJ90N7")
24 |
25 | assert incident.last_status_change_on == datetime.datetime(2012, 12, 22, 0, 35, 22)
26 | assert incident.created_on == datetime.datetime(2012, 12, 22, 0, 35, 21)
27 |
28 | assert incident.assigned_to[0].at == datetime.datetime(2012, 12, 22, 0, 35, 21)
29 |
30 | assert incident.pending_actions[0].at == datetime.datetime(2014, 1, 1, 8, 0)
31 | assert incident.pending_actions[1].at == datetime.datetime(2014, 1, 1, 10, 0)
32 | assert incident.pending_actions[2].at == datetime.datetime(2014, 1, 1, 11, 0)
33 |
34 |
35 | @httpretty.activate
36 | def test_loads_without_datetime_v1():
37 | body = open('tests/fixtures/incident_resp_v1.json').read()
38 | httpretty.register_uri(
39 | httpretty.GET, "https://acme.pagerduty.com/api/v1/incidents/PIJ90N7",
40 | body=body, status=200
41 | )
42 |
43 | pd = pygerduty.PagerDuty("acme", "password", parse_datetime=False)
44 | incident = pd.incidents.show("PIJ90N7")
45 |
46 | assert incident.last_status_change_on == "2012-12-22T00:35:22Z"
47 | assert incident.created_on == "2012-12-22T00:35:21Z"
48 |
49 | assert incident.assigned_to[0].at == "2012-12-22T00:35:21Z"
50 |
51 | assert incident.pending_actions[0].at == "2014-01-01T08:00:00Z"
52 | assert incident.pending_actions[1].at == "2014-01-01T10:00:00Z"
53 | assert incident.pending_actions[2].at == "2014-01-01T11:00:00Z"
54 |
55 | def test_datetime_encoder_decoder_v1():
56 | obj = {
57 | "d": datetime.datetime(2014, 1, 1, 8, 0),
58 | "s": "string",
59 | "i": 10,
60 | }
61 |
62 | # Make sure we can roundtrip
63 | assert obj == pygerduty._json_loader(pygerduty._json_dumper(obj))
64 |
65 | # Test our encoder uses default properly
66 | with pytest.raises(TypeError) as excinfo:
67 | pygerduty._json_dumper({"test": uuid.uuid4()})
68 | excinfo.match(r".* is not JSON serializable")
69 |
70 |
71 | ###################
72 | # Version 2 Tests #
73 | ###################
74 |
75 | @httpretty.activate
76 | def test_loads_with_datetime_v2():
77 | body = open('tests/fixtures/incident_resp_v2.json').read()
78 | httpretty.register_uri(
79 | httpretty.GET, "https://api.pagerduty.com/incidents/PT4KHLK",
80 | body=body, status=200
81 | )
82 |
83 | pd = pygerduty.v2.PagerDuty("password", parse_datetime=True)
84 | incident = pd.incidents.show("PT4KHLK")
85 |
86 | assert incident.last_status_change_at == datetime.datetime(2015, 10, 6, 21, 38, 23)
87 | assert incident.created_at == datetime.datetime(2015, 10, 6, 21, 30, 42)
88 |
89 | assert incident.assignments[0].at == datetime.datetime(2015, 11, 10, 0, 31, 52)
90 |
91 | assert incident.pending_actions[0].at == datetime.datetime(2015, 11, 10, 1, 2, 52)
92 | assert incident.pending_actions[1].at == datetime.datetime(2015, 11, 10, 4, 31, 52)
93 |
94 |
95 | @httpretty.activate
96 | def test_loads_without_datetime_v2():
97 | body = open('tests/fixtures/incident_resp_v2.json').read()
98 | httpretty.register_uri(
99 | httpretty.GET, "https://api.pagerduty.com/incidents/PT4KHLK",
100 | body=body, status=200
101 | )
102 |
103 | pd = pygerduty.v2.PagerDuty("password", parse_datetime=False)
104 | incident = pd.incidents.show("PT4KHLK")
105 |
106 | assert incident.last_status_change_at == "2015-10-06T21:38:23Z"
107 | assert incident.created_at == "2015-10-06T21:30:42Z"
108 |
109 | assert incident.assignments[0].at == "2015-11-10T00:31:52Z"
110 |
111 | assert incident.pending_actions[0].at == "2015-11-10T01:02:52Z"
112 | assert incident.pending_actions[1].at == "2015-11-10T04:31:52Z"
113 |
114 |
115 | def test_datetime_encoder_decoder_v2():
116 | obj = {
117 | "d": datetime.datetime(2014, 1, 1, 8, 0),
118 | "s": "string",
119 | "i": 10,
120 | }
121 |
122 | # Make sure we can roundtrip
123 | assert obj == pygerduty.common._json_loader(pygerduty.common._json_dumper(obj))
124 |
125 | # Test our encoder uses default properly
126 | with pytest.raises(TypeError) as excinfo:
127 | pygerduty.common._json_dumper({"test": uuid.uuid4()})
128 | excinfo.match(r".* is not JSON serializable")
129 |
--------------------------------------------------------------------------------
/tests/events_test.py:
--------------------------------------------------------------------------------
1 | import httpretty
2 | import json
3 | import textwrap
4 | import pygerduty.events
5 |
6 | from pygerduty.events import INTEGRATION_API_URL
7 | from pygerduty.common import Requester
8 |
9 |
10 | @httpretty.activate
11 | def test_create_event():
12 | body = textwrap.dedent("""
13 | {
14 | "status": "success",
15 | "message": "Event processed",
16 | "incident_key": "srv01/HTTP"
17 | }
18 | """)
19 | httpretty.register_uri(
20 | httpretty.POST, INTEGRATION_API_URL,
21 | body=body, status=200)
22 |
23 | requester = Requester()
24 | p = pygerduty.events.Events('my_key', requester)
25 | request_json = open('tests/fixtures/event_request.json').read()
26 |
27 | request = json.loads(request_json)
28 |
29 | response = p.create_event(
30 | request['description'],
31 | request['event_type'],
32 | request['details'],
33 | request['incident_key'],
34 | )
35 |
36 | assert response == 'srv01/HTTP'
37 |
38 |
--------------------------------------------------------------------------------
/tests/extensions_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import httpretty
4 | import pygerduty.v2
5 | import textwrap
6 |
7 | ###################
8 | # Version 2 Tests #
9 | ###################
10 |
11 |
12 | @httpretty.activate
13 | def test_get_extension_v2():
14 | body = open('tests/fixtures/get_extension_v2.json').read()
15 | httpretty.register_uri(
16 | httpretty.GET, "https://api.pagerduty.com/extensions/PPGPXHO",
17 | body=body, status=200)
18 | p = pygerduty.v2.PagerDuty("password")
19 | extension = p.extensions.show("PPGPXHO")
20 |
21 | assert extension.self_ == 'https://api.pagerduty.com/extensions/PPGPXHO'
22 | assert extension.endpoint_url == 'https://example.com/recieve_a_pagerduty_webhook'
23 | assert len(extension.extension_objects) == 1
24 | ext_obj = extension.extension_objects[0]
25 | assert ext_obj.id == 'PIJ90N7'
26 |
27 |
28 | @httpretty.activate
29 | def test_list_extensions_v2():
30 | body = open('tests/fixtures/extensions_list_v2.json').read()
31 | httpretty.register_uri(
32 | httpretty.GET, "https://api.pagerduty.com/extensions", responses=[
33 | httpretty.Response(body=body, status=200),
34 | httpretty.Response(body=textwrap.dedent("""\
35 | {
36 | "limit": 25,
37 | "more": false,
38 | "offset": 1,
39 | "extensions": [],
40 | "total": null
41 | }
42 | """), status=200),
43 | ],
44 | )
45 |
46 | p = pygerduty.v2.PagerDuty("password")
47 | extensions = [s for s in p.extensions.list()]
48 |
49 | assert len(extensions) == 2
50 | assert extensions[0].self_ == 'https://api.pagerduty.com/extensions/PPGPXHO'
51 | assert extensions[0].endpoint_url == 'https://example.com/recieve_a_pagerduty_webhook'
52 | assert len(extensions[0].extension_objects) == 1
53 | ext_obj0 = extensions[0].extension_objects[0]
54 | assert ext_obj0.id == 'PIJ90N7'
55 |
56 | assert extensions[1].self_ == 'https://api.pagerduty.com/extensions/PPGPXHI'
57 | assert extensions[1].endpoint_url == 'https://example.com/recieve_a_pagerduty_webhook_2'
58 | assert len(extensions[1].extension_objects) == 1
59 | ext_obj1 = extensions[1].extension_objects[0]
60 | assert ext_obj1.id == 'PIJ90N8'
61 |
--------------------------------------------------------------------------------
/tests/fixtures/addon_update_request_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "addon": {
3 | "type": "full_page_addon",
4 | "name": "Service Runbook",
5 | "src": "https://intranet.example.com/runbook.html"
6 | }
7 | }
--------------------------------------------------------------------------------
/tests/fixtures/addon_update_response_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "addon": {
3 | "id": "PKX7F81",
4 | "type": "incident_show_addon",
5 | "name": "Service Runbook",
6 | "src": "https://intranet.example.com/runbook.html",
7 | "services": [
8 | {
9 | "id": "PIJ90N7",
10 | "type": "service",
11 | "summary": "My Application Service",
12 | "self": "https://api.pagerduty.com/services/PIJ90N7",
13 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
14 | }
15 | ]
16 | }
17 | }
--------------------------------------------------------------------------------
/tests/fixtures/addon_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "addon": {
3 | "id": "PKX7F81",
4 | "type": "incident_show_addon",
5 | "name": "Service Runbook",
6 | "src": "https://intranet.example.com/runbook.html",
7 | "services": [
8 | {
9 | "id": "PIJ90N7",
10 | "type": "service",
11 | "summary": "My Application Service",
12 | "self": "https://api.pagerduty.com/services/PIJ90N7",
13 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
14 | }
15 | ]
16 | }
17 | }
--------------------------------------------------------------------------------
/tests/fixtures/contacts_v1.json:
--------------------------------------------------------------------------------
1 | {
2 | "contact_methods": [
3 | {
4 | "address": "bart@example.com",
5 | "email": "bart@example.com",
6 | "id": "PZMO0JF",
7 | "label": "Default",
8 | "send_short_email": false,
9 | "type": "email",
10 | "user_id": "PIJ90N7"
11 | },
12 | {
13 | "address": "9373249222",
14 | "country_code": 1,
15 | "id": "PDGR818",
16 | "label": "Mobile",
17 | "phone_number": "9373249222",
18 | "type": "phone",
19 | "user_id": "PIJ90N7"
20 | },
21 | {
22 | "address": "9373249222",
23 | "country_code": 1,
24 | "enabled": true,
25 | "id": "P25E95E",
26 | "label": "Mobile",
27 | "phone_number": "9373249222",
28 | "type": "SMS",
29 | "user_id": "PIJ90N7"
30 | }
31 | ]
32 | }
33 |
--------------------------------------------------------------------------------
/tests/fixtures/contacts_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "contact_methods": [
3 | {
4 | "address": "betty@example.com",
5 | "id": "PZMO0JF",
6 | "self": "https://api.pagerduty.com/users/PXPGF42/contact_method/PZMO0JF",
7 | "label": "Default",
8 | "send_short_email": false,
9 | "type": "email",
10 | "html_url": null,
11 | "user_id": "PXPGF42"
12 | },
13 | {
14 | "address": "7483784787",
15 | "id": "PWEN34G",
16 | "self": "https://api.pagerduty.com/users/PXPGF42/contact_method/PWEN34G",
17 | "label": "Default",
18 | "send_short_email": false,
19 | "type": "SMS",
20 | "html_url": null,
21 | "user_id": "PXPGF42"
22 | },
23 | {
24 | "address": "7483784787",
25 | "id": "PZMP0KL",
26 | "self": "https://api.pagerduty.com/users/PXPGF42/contact_method/PZMP0KL",
27 | "label": "Default",
28 | "send_short_email": false,
29 | "type": "phone",
30 | "html_url": null,
31 | "user_id": "PXPGF42"
32 | }
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/tests/fixtures/event_request.json:
--------------------------------------------------------------------------------
1 | {
2 | "service_key": "e93facc04764012d7bfb002500d5d1a6",
3 | "incident_key": "srv01/HTTP",
4 | "event_type": "trigger",
5 | "description": "FAILURE for production/HTTP on machine srv01.acme.com",
6 | "client": "Sample Monitoring Service",
7 | "client_url": "https://monitoring.service.com",
8 | "details": {
9 | "ping time": "1500ms",
10 | "load avg": 0.75
11 | },
12 | "contexts": [
13 | {
14 | "type": "link",
15 | "href": "http://acme.pagerduty.com"
16 | },
17 | {
18 | "type": "link",
19 | "href": "http://acme.pagerduty.com"
20 | },
21 | {
22 | "text": "View the incident on PagerDuty",
23 | "type": "image",
24 | "src": "https://chart.googleapis.com/chart?chs=600x400&chd=t:6,2,9,5,2,5,7,4,8,2,1&cht=lc&chds=a&chxt=y&chm=D,0033FF,0,0,5,1"
25 | },
26 | {
27 | "type": "image",
28 | "src": "https://chart.googleapis.com/chart?chs=600x400&chd=t:6,2,9,5,2,5,7,4,8,2,1&cht=lc&chds=a&chxt=y&chm=D,0033FF,0,0,5,1",
29 | "href": "https://google.com"
30 | }
31 | ]
32 | }
33 |
--------------------------------------------------------------------------------
/tests/fixtures/extensions_list_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "extensions": [
3 | {
4 | "id": "PPGPXHO",
5 | "self": "https://api.pagerduty.com/extensions/PPGPXHO",
6 | "html_url": null,
7 | "endpoint_url": "https://example.com/recieve_a_pagerduty_webhook",
8 | "name": "My Webhook",
9 | "summary": "My Webhook",
10 | "type": "extension",
11 | "extension_schema": {
12 | "id": "PJFWPEP",
13 | "type": "extension_schema_reference",
14 | "summary": "Generic Webhook",
15 | "self": "https://api.pagerduty.com/extension_schemas/PJFWPEP",
16 | "html_url": null
17 | },
18 | "extension_objects": [
19 | {
20 | "id": "PIJ90N7",
21 | "type": "service_reference",
22 | "summary": "My Application Service",
23 | "self": "https://api.pagerduty.com/services/PIJ90N7",
24 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
25 | }
26 | ]
27 | },
28 | {
29 | "id": "PPGPXHI",
30 | "self": "https://api.pagerduty.com/extensions/PPGPXHI",
31 | "html_url": null,
32 | "endpoint_url": "https://example.com/recieve_a_pagerduty_webhook_2",
33 | "name": "My Webhook 2",
34 | "summary": "My Webhook w",
35 | "type": "extension",
36 | "extension_schema": {
37 | "id": "PJFWPEP",
38 | "type": "extension_schema_reference",
39 | "summary": "Generic Webhook",
40 | "self": "https://api.pagerduty.com/extension_schemas/PJFWPEP",
41 | "html_url": null
42 | },
43 | "extension_objects": [
44 | {
45 | "id": "PIJ90N8",
46 | "type": "service_reference",
47 | "summary": "My Application Service",
48 | "self": "https://api.pagerduty.com/services/PIJ90N8",
49 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N8"
50 | }
51 | ]
52 | }
53 | ],
54 | "limit": 25,
55 | "offset": 0,
56 | "total": null,
57 | "more": false
58 | }
59 |
--------------------------------------------------------------------------------
/tests/fixtures/get_extension_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "extension": {
3 | "id": "PPGPXHO",
4 | "self": "https://api.pagerduty.com/extensions/PPGPXHO",
5 | "html_url": null,
6 | "endpoint_url": "https://example.com/recieve_a_pagerduty_webhook",
7 | "name": "My Webhook",
8 | "summary": "My Webhook",
9 | "type": "extension",
10 | "extension_schema": {
11 | "id": "PJFWPEP",
12 | "type": "extension_schema_reference",
13 | "summary": "Generic Webhook",
14 | "self": "https://api.pagerduty.com/extension_schemas/PJFWPEP",
15 | "html_url": null
16 | },
17 | "extension_objects": [
18 | {
19 | "id": "PIJ90N7",
20 | "type": "service_reference",
21 | "summary": "My Application Service",
22 | "self": "https://api.pagerduty.com/services/PIJ90N7",
23 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
24 | }
25 | ]
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/tests/fixtures/get_incident_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "incident": {
3 | "id": "PT4KHLK",
4 | "type": "incident",
5 | "summary": "[#1234] The server is on fire.",
6 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
7 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
8 | "incident_number": 1234,
9 | "created_at": "2015-10-06T21:30:42Z",
10 | "status": "resolved",
11 | "title": "The server is on fire.",
12 | "resolve_reason": null,
13 | "alert_counts": {
14 | "all": 2,
15 | "triggered": 0,
16 | "resolved": 2
17 | },
18 | "pending_actions": [
19 | {
20 | "type": "unacknowledge",
21 | "at": "2015-11-10T01:02:52Z"
22 | },
23 | {
24 | "type": "resolve",
25 | "at": "2015-11-10T04:31:52Z"
26 | }
27 | ],
28 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
29 | "service": {
30 | "id": "PIJ90N7",
31 | "type": "generic_email_reference",
32 | "summary": "My Mail Service",
33 | "self": "https://api.pagerduty.com/services/PIJ90N7",
34 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
35 | },
36 | "assignments": [
37 | {
38 | "at": "2015-11-10T00:31:52Z",
39 | "assignee": {
40 | "id": "PXPGF42",
41 | "type": "user_reference",
42 | "summary": "Earline Greenholt",
43 | "self": "https://api.pagerduty.com/users/PXPGF42",
44 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
45 | }
46 | }
47 | ],
48 | "acknowledgements": [
49 | {
50 | "at": "2015-11-10T00:32:52Z",
51 | "acknowledger": {
52 | "id": "PXPGF42",
53 | "type": "user_reference",
54 | "summary": "Earline Greenholt",
55 | "self": "https://api.pagerduty.com/users/PXPGF42",
56 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
57 | }
58 | }
59 | ],
60 | "last_status_change_at": "2015-10-06T21:38:23Z",
61 | "last_status_change_by": {
62 | "id": "PXPGF42",
63 | "type": "user_reference",
64 | "summary": "Earline Greenholt",
65 | "self": "https://api.pagerduty.com/users/PXPGF42",
66 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
67 | },
68 | "first_trigger_log_entry": {
69 | "id": "Q02JTSNZWHSEKV",
70 | "type": "trigger_log_entry_reference",
71 | "summary": "Triggered through the API",
72 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
73 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
74 | },
75 | "escalation_policy": {
76 | "id": "PT20YPA",
77 | "type": "escalation_policy_reference",
78 | "summary": "Another Escalation Policy",
79 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
80 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
81 | },
82 | "teams": [
83 | {
84 | "id": "PQ9K7I8",
85 | "type": "team_reference",
86 | "summary": "Engineering",
87 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
88 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
89 | }
90 | ],
91 | "urgency": "high"
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_get_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "incident": {
3 | "id": "PT4KHLK",
4 | "type": "incident",
5 | "summary": "[#1234] The server is on fire.",
6 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
7 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
8 | "incident_number": 1234,
9 | "created_at": "2015-10-06T21:30:42Z",
10 | "status": "resolved",
11 | "title": "The server is on fire.",
12 | "resolve_reason": null,
13 | "alert_counts": {
14 | "all": 2,
15 | "triggered": 0,
16 | "resolved": 2
17 | },
18 | "pending_actions": [
19 | {
20 | "type": "unacknowledge",
21 | "at": "2015-11-10T01:02:52Z"
22 | },
23 | {
24 | "type": "resolve",
25 | "at": "2015-11-10T04:31:52Z"
26 | }
27 | ],
28 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
29 | "service": {
30 | "id": "PIJ90N7",
31 | "type": "generic_email_reference",
32 | "summary": "My Mail Service",
33 | "self": "https://api.pagerduty.com/services/PIJ90N7",
34 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
35 | },
36 | "priority": {
37 | "id": "P53ZZH5",
38 | "type": "priority_reference",
39 | "summary": "P2",
40 | "self": "https://api.pagerduty.com/priorities/P53ZZH5",
41 | "html_url": null
42 | },
43 | "assignments": [
44 | {
45 | "at": "2015-11-10T00:31:52Z",
46 | "assignee": {
47 | "id": "PXPGF42",
48 | "type": "user_reference",
49 | "summary": "Earline Greenholt",
50 | "self": "https://api.pagerduty.com/users/PXPGF42",
51 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
52 | }
53 | }
54 | ],
55 | "acknowledgements":[],
56 | "last_status_change_at": "2015-10-06T21:38:23Z",
57 | "last_status_change_by": {
58 | "id": "PXPGF42",
59 | "type": "user_reference",
60 | "summary": "Earline Greenholt",
61 | "self": "https://api.pagerduty.com/users/PXPGF42",
62 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
63 | },
64 | "first_trigger_log_entry": {
65 | "id": "Q02JTSNZWHSEKV",
66 | "type": "trigger_log_entry_reference",
67 | "summary": "Triggered through the API",
68 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
69 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
70 | },
71 | "escalation_policy": {
72 | "id": "PT20YPA",
73 | "type": "escalation_policy_reference",
74 | "summary": "Another Escalation Policy",
75 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
76 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
77 | },
78 | "teams": [
79 | {
80 | "id": "PQ9K7I8",
81 | "type": "team_reference",
82 | "summary": "Engineering",
83 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
84 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
85 | }
86 | ],
87 | "urgency": "high"
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_list_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "incidents": [
3 | {
4 | "id": "PT4KHLK",
5 | "type": "incident",
6 | "summary": "[#1234] The server is on fire.",
7 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
8 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
9 | "incident_number": 1234,
10 | "created_at": "2015-10-06T21:30:42Z",
11 | "status": "resolved",
12 | "title": "The server is on fire.",
13 | "pending_actions": [
14 | {
15 | "type": "unacknowledge",
16 | "at": "2015-11-10T01:02:52Z"
17 | },
18 | {
19 | "type": "resolve",
20 | "at": "2015-11-10T04:31:52Z"
21 | }
22 | ],
23 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
24 | "service": {
25 | "id": "PIJ90N7",
26 | "type": "generic_email",
27 | "summary": "My Mail Service",
28 | "self": "https://api.pagerduty.com/services/PIJ90N7",
29 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
30 | },
31 | "assignments": [
32 | {
33 | "at": "2015-11-10T00:31:52Z",
34 | "assignee": {
35 | "id": "PXPGF42",
36 | "type": "user_reference",
37 | "summary": "Earline Greenholt",
38 | "self": "https://api.pagerduty.com/users/PXPGF42",
39 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
40 | }
41 | }
42 | ],
43 | "acknowledgements": [
44 | {
45 | "at": "2015-11-10T00:32:52Z",
46 | "acknowledger": {
47 | "id": "PXPGF42",
48 | "type": "user_reference",
49 | "summary": "Earline Greenholt",
50 | "self": "https://api.pagerduty.com/users/PXPGF42",
51 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
52 | }
53 | }
54 | ],
55 | "last_status_change_at": "2015-10-06T21:38:23Z",
56 | "last_status_change_by": {
57 | "id": "PXPGF42",
58 | "type": "user_reference",
59 | "summary": "Earline Greenholt",
60 | "self": "https://api.pagerduty.com/users/PXPGF42",
61 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
62 | },
63 | "first_trigger_log_entry": {
64 | "id": "Q02JTSNZWHSEKV",
65 | "type": "trigger_log_entry_reference",
66 | "summary": "Triggered through the API",
67 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
68 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
69 | },
70 | "escalation_policy": {
71 | "id": "PT20YPA",
72 | "type": "escalation_policy_reference",
73 | "summary": "Another Escalation Policy",
74 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
75 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
76 | },
77 | "teams": [
78 | {
79 | "id": "PQ9K7I8",
80 | "type": "team_reference",
81 | "summary": "Engineering",
82 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
83 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
84 | }
85 | ],
86 | "urgency": "high"
87 | }
88 | ],
89 | "limit": 1,
90 | "offset": 0,
91 | "total": null,
92 | "more": false
93 | }
94 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_postassign.json:
--------------------------------------------------------------------------------
1 | {
2 | "incident": {
3 | "id": "PT4KHLK",
4 | "type": "incident",
5 | "summary": "[#1234] The server is on fire.",
6 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
7 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
8 | "incident_number": 1234,
9 | "created_at": "2015-10-06T21:30:42Z",
10 | "status": "resolved",
11 | "title": "The server is on fire.",
12 | "resolve_reason": null,
13 | "alert_counts": {
14 | "all": 2,
15 | "triggered": 0,
16 | "resolved": 2
17 | },
18 | "pending_actions": [
19 | {
20 | "type": "unacknowledge",
21 | "at": "2015-11-10T01:02:52Z"
22 | },
23 | {
24 | "type": "resolve",
25 | "at": "2015-11-10T04:31:52Z"
26 | }
27 | ],
28 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
29 | "service": {
30 | "id": "PIJ90N7",
31 | "type": "generic_email_reference",
32 | "summary": "My Mail Service",
33 | "self": "https://api.pagerduty.com/services/PIJ90N7",
34 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
35 | },
36 | "priority": {
37 | "id": "P53ZZH5",
38 | "type": "priority_reference",
39 | "summary": "P2",
40 | "self": "https://api.pagerduty.com/priorities/P53ZZH5",
41 | "html_url": null
42 | },
43 | "assignments": [
44 | {
45 | "at": "2015-11-10T00:31:52Z",
46 | "assignee": {
47 | "id": "PXPGF42",
48 | "type": "user_reference",
49 | "summary": "Earline Greenholt",
50 | "self": "https://api.pagerduty.com/users/PXPGF42",
51 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
52 | }
53 | },
54 | {
55 | "at": "2015-11-10T00:32:32Z",
56 | "assignee": {
57 | "id": "PG23GSK",
58 | "type": "user_reference",
59 | "summary": "Vlad Green",
60 | "self": "https://api.pagerduty.com/users/PG23GSK",
61 | "html_url": "https://subdomain.pagerduty.com/users/PG23GSK"
62 | }
63 | }
64 | ],
65 | "acknowledgements": [
66 | {
67 | "at": "2015-11-10T00:32:52Z",
68 | "acknowledger": {
69 | "id": "PXPGF42",
70 | "type": "user_reference",
71 | "summary": "Earline Greenholt",
72 | "self": "https://api.pagerduty.com/users/PXPGF42",
73 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
74 | }
75 | }
76 | ],
77 | "last_status_change_at": "2015-10-06T21:38:23Z",
78 | "last_status_change_by": {
79 | "id": "PXPGF42",
80 | "type": "user_reference",
81 | "summary": "Earline Greenholt",
82 | "self": "https://api.pagerduty.com/users/PXPGF42",
83 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
84 | },
85 | "first_trigger_log_entry": {
86 | "id": "Q02JTSNZWHSEKV",
87 | "type": "trigger_log_entry_reference",
88 | "summary": "Triggered through the API",
89 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
90 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
91 | },
92 | "escalation_policy": {
93 | "id": "PT20YPA",
94 | "type": "escalation_policy_reference",
95 | "summary": "Another Escalation Policy",
96 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
97 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
98 | },
99 | "teams": [
100 | {
101 | "id": "PQ9K7I8",
102 | "type": "team_reference",
103 | "summary": "Engineering",
104 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
105 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
106 | }
107 | ],
108 | "urgency": "high"
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_preassign.json:
--------------------------------------------------------------------------------
1 | {
2 | "incident": {
3 | "id": "PT4KHLK",
4 | "type": "incident",
5 | "summary": "[#1234] The server is on fire.",
6 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
7 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
8 | "incident_number": 1234,
9 | "created_at": "2015-10-06T21:30:42Z",
10 | "status": "resolved",
11 | "title": "The server is on fire.",
12 | "resolve_reason": null,
13 | "alert_counts": {
14 | "all": 2,
15 | "triggered": 0,
16 | "resolved": 2
17 | },
18 | "pending_actions": [
19 | {
20 | "type": "unacknowledge",
21 | "at": "2015-11-10T01:02:52Z"
22 | },
23 | {
24 | "type": "resolve",
25 | "at": "2015-11-10T04:31:52Z"
26 | }
27 | ],
28 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
29 | "service": {
30 | "id": "PIJ90N7",
31 | "type": "generic_email_reference",
32 | "summary": "My Mail Service",
33 | "self": "https://api.pagerduty.com/services/PIJ90N7",
34 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
35 | },
36 | "priority": {
37 | "id": "P53ZZH5",
38 | "type": "priority_reference",
39 | "summary": "P2",
40 | "self": "https://api.pagerduty.com/priorities/P53ZZH5",
41 | "html_url": null
42 | },
43 | "assignments": [],
44 | "acknowledgements": [
45 | {
46 | "at": "2015-11-10T00:32:52Z",
47 | "acknowledger": {
48 | "id": "PXPGF42",
49 | "type": "user_reference",
50 | "summary": "Earline Greenholt",
51 | "self": "https://api.pagerduty.com/users/PXPGF42",
52 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
53 | }
54 | }
55 | ],
56 | "last_status_change_at": "2015-10-06T21:38:23Z",
57 | "last_status_change_by": {
58 | "id": "PXPGF42",
59 | "type": "user_reference",
60 | "summary": "Earline Greenholt",
61 | "self": "https://api.pagerduty.com/users/PXPGF42",
62 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
63 | },
64 | "first_trigger_log_entry": {
65 | "id": "Q02JTSNZWHSEKV",
66 | "type": "trigger_log_entry_reference",
67 | "summary": "Triggered through the API",
68 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
69 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
70 | },
71 | "escalation_policy": {
72 | "id": "PT20YPA",
73 | "type": "escalation_policy_reference",
74 | "summary": "Another Escalation Policy",
75 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
76 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
77 | },
78 | "teams": [
79 | {
80 | "id": "PQ9K7I8",
81 | "type": "team_reference",
82 | "summary": "Engineering",
83 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
84 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
85 | }
86 | ],
87 | "urgency": "high"
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_put_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "incident": {
3 | "id": "PT4KHLK",
4 | "type": "incident",
5 | "summary": "[#1234] The server is on fire.",
6 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
7 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
8 | "incident_number": 1234,
9 | "created_at": "2015-10-06T21:30:42Z",
10 | "status": "resolved",
11 | "title": "The server is on fire.",
12 | "resolve_reason": null,
13 | "alert_counts": {
14 | "all": 2,
15 | "triggered": 0,
16 | "resolved": 2
17 | },
18 | "pending_actions": [
19 | {
20 | "type": "unacknowledge",
21 | "at": "2015-11-10T01:02:52Z"
22 | },
23 | {
24 | "type": "resolve",
25 | "at": "2015-11-10T04:31:52Z"
26 | }
27 | ],
28 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
29 | "service": {
30 | "id": "PIJ90N7",
31 | "type": "generic_email_reference",
32 | "summary": "My Mail Service",
33 | "self": "https://api.pagerduty.com/services/PIJ90N7",
34 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
35 | },
36 | "priority": {
37 | "id": "P53ZZH5",
38 | "type": "priority_reference",
39 | "summary": "P2",
40 | "self": "https://api.pagerduty.com/priorities/P53ZZH5",
41 | "html_url": null
42 | },
43 | "assignments": [
44 | {
45 | "at": "2015-11-10T00:31:52Z",
46 | "assignee": {
47 | "id": "PXPGF42",
48 | "type": "user_reference",
49 | "summary": "Earline Greenholt",
50 | "self": "https://api.pagerduty.com/users/PXPGF42",
51 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
52 | }
53 | }
54 | ],
55 | "acknowledgements": [
56 | {
57 | "at": "2015-11-10T00:32:52Z",
58 | "acknowledger": {
59 | "id": "PXPGF42",
60 | "type": "user_reference",
61 | "summary": "Earline Greenholt",
62 | "self": "https://api.pagerduty.com/users/PXPGF42",
63 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
64 | }
65 | }
66 | ],
67 | "last_status_change_at": "2015-10-06T21:38:23Z",
68 | "last_status_change_by": {
69 | "id": "PXPGF42",
70 | "type": "user_reference",
71 | "summary": "Earline Greenholt",
72 | "self": "https://api.pagerduty.com/users/PXPGF42",
73 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
74 | },
75 | "first_trigger_log_entry": {
76 | "id": "Q02JTSNZWHSEKV",
77 | "type": "trigger_log_entry_reference",
78 | "summary": "Triggered through the API",
79 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
80 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
81 | },
82 | "escalation_policy": {
83 | "id": "PT20YPA",
84 | "type": "escalation_policy_reference",
85 | "summary": "Another Escalation Policy",
86 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
87 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
88 | },
89 | "teams": [
90 | {
91 | "id": "PQ9K7I8",
92 | "type": "team_reference",
93 | "summary": "Engineering",
94 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
95 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
96 | }
97 | ],
98 | "urgency": "high"
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_reassign.json:
--------------------------------------------------------------------------------
1 | {
2 | "incidents": [
3 | {
4 | "id": "PT4KHLK",
5 | "type": "incident",
6 | "summary": "[#1234] The server is on fire.",
7 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
8 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
9 | "incident_number": 1234,
10 | "created_at": "2015-10-06T21:30:42Z",
11 | "status": "resolved",
12 | "title": "The server is on fire.",
13 | "resolve_reason": null,
14 | "alert_counts": {
15 | "all": 2,
16 | "triggered": 0,
17 | "resolved": 2
18 | },
19 | "pending_actions": [
20 | {
21 | "type": "unacknowledge",
22 | "at": "2015-11-10T01:02:52Z"
23 | },
24 | {
25 | "type": "resolve",
26 | "at": "2015-11-10T04:31:52Z"
27 | }
28 | ],
29 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
30 | "service": {
31 | "id": "PIJ90N7",
32 | "type": "generic_email",
33 | "summary": "My Mail Service",
34 | "self": "https://api.pagerduty.com/services/PIJ90N7",
35 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
36 | },
37 | "assignments": [
38 | {
39 | "at": "2015-11-10T00:31:52Z",
40 | "assignee": {
41 | "id": "PXPGF42",
42 | "type": "user_reference",
43 | "summary": "Earline Greenholt",
44 | "self": "https://api.pagerduty.com/users/PXPGF42",
45 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
46 | }
47 | },
48 | {
49 | "at": "2015-11-10T00:32:32Z",
50 | "assignee": {
51 | "id": "PG23GSK",
52 | "type": "user_reference",
53 | "summary": "Vlad Green",
54 | "self": "https://api.pagerduty.com/users/PG23GSK",
55 | "html_url": "https://subdomain.pagerduty.com/users/PG23GSK"
56 | }
57 | }
58 | ],
59 | "acknowledgements": [
60 | {
61 | "at": "2015-11-10T00:32:52Z",
62 | "acknowledger": {
63 | "id": "PXPGF42",
64 | "type": "user_reference",
65 | "summary": "Earline Greenholt",
66 | "self": "https://api.pagerduty.com/users/PXPGF42",
67 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
68 | }
69 | }
70 | ],
71 | "last_status_change_at": "2015-10-06T21:38:23Z",
72 | "last_status_change_by": {
73 | "id": "PXPGF42",
74 | "type": "user_reference",
75 | "summary": "Earline Greenholt",
76 | "self": "https://api.pagerduty.com/users/PXPGF42",
77 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
78 | },
79 | "first_trigger_log_entry": {
80 | "id": "Q02JTSNZWHSEKV",
81 | "type": "trigger_log_entry_reference",
82 | "summary": "Triggered through the API",
83 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
84 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
85 | },
86 | "escalation_policy": {
87 | "id": "PT20YPA",
88 | "type": "escalation_policy_reference",
89 | "summary": "Another Escalation Policy",
90 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
91 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
92 | },
93 | "teams": [
94 | {
95 | "id": "PQ9K7I8",
96 | "type": "team_reference",
97 | "summary": "Engineering",
98 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
99 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
100 | }
101 | ],
102 | "urgency": "high"
103 | }
104 | ]
105 | }
106 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_resp_v1.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "PIJ90N7",
3 | "incident_number": 1,
4 | "created_on": "2012-12-22T00:35:21Z",
5 | "status": "triggered",
6 | "pending_actions": [
7 | {
8 | "type": "escalate",
9 | "at": "2014-01-01T08:00:00Z"
10 | },
11 | {
12 | "type": "unacknowledge",
13 | "at": "2014-01-01T10:00:00Z"
14 | },
15 | {
16 | "type": "resolve",
17 | "at": "2014-01-01T11:00:00Z"
18 | }
19 | ],
20 | "html_url": "https://acme.pagerduty.com/incidents/PIJ90N7",
21 | "incident_key": null,
22 | "service": {
23 | "id": "PBAZLIU",
24 | "name": "service",
25 | "description": "service description",
26 | "html_url": "https://acme.pagerduty.com/services/PBAZLIU"
27 | },
28 | "assigned_to_user": {
29 | "id": "PPI9KUT",
30 | "name": "Alan Kay",
31 | "email": "alan@pagerduty.com",
32 | "html_url": "https://acme.pagerduty.com/users/PPI9KUT"
33 | },
34 | "assigned_to": [
35 | {
36 | "at": "2012-12-22T00:35:21Z",
37 | "object": {
38 | "id": "PPI9KUT",
39 | "name": "Alan Kay",
40 | "email": "alan@pagerduty.com",
41 | "html_url": "https://acme.pagerduty.com/users/PPI9KUT",
42 | "type": "user"
43 | }
44 | }
45 | ],
46 | "trigger_summary_data": {
47 | "subject": "45645"
48 | },
49 | "trigger_details_html_url": "https://acme.pagerduty.com/incidents/PIJ90N7/log_entries/PIJ90N7",
50 | "last_status_change_on": "2012-12-22T00:35:22Z",
51 | "last_status_change_by": null,
52 | "urgency": "high"
53 | }
54 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_resp_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "incident": {
3 | "id": "PT4KHLK",
4 | "type": "incident",
5 | "summary": "[#1234] The server is on fire.",
6 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
7 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
8 | "incident_number": 1234,
9 | "created_at": "2015-10-06T21:30:42Z",
10 | "status": "resolved",
11 | "title": "The server is on fire.",
12 | "resolve_reason": null,
13 | "alert_counts": {
14 | "all": 2,
15 | "triggered": 0,
16 | "resolved": 2
17 | },
18 | "pending_actions": [
19 | {
20 | "type": "unacknowledge",
21 | "at": "2015-11-10T01:02:52Z"
22 | },
23 | {
24 | "type": "resolve",
25 | "at": "2015-11-10T04:31:52Z"
26 | }
27 | ],
28 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
29 | "service": {
30 | "id": "PIJ90N7",
31 | "type": "generic_email_reference",
32 | "summary": "My Mail Service",
33 | "self": "https://api.pagerduty.com/services/PIJ90N7",
34 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
35 | },
36 | "assignments": [
37 | {
38 | "at": "2015-11-10T00:31:52Z",
39 | "assignee": {
40 | "id": "PXPGF42",
41 | "type": "user_reference",
42 | "summary": "Earline Greenholt",
43 | "self": "https://api.pagerduty.com/users/PXPGF42",
44 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
45 | }
46 | }
47 | ],
48 | "acknowledgements": [
49 | {
50 | "at": "2015-11-10T00:32:52Z",
51 | "acknowledger": {
52 | "id": "PXPGF42",
53 | "type": "user_reference",
54 | "summary": "Earline Greenholt",
55 | "self": "https://api.pagerduty.com/users/PXPGF42",
56 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
57 | }
58 | }
59 | ],
60 | "last_status_change_at": "2015-10-06T21:38:23Z",
61 | "last_status_change_by": {
62 | "id": "PXPGF42",
63 | "type": "user_reference",
64 | "summary": "Earline Greenholt",
65 | "self": "https://api.pagerduty.com/users/PXPGF42",
66 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
67 | },
68 | "first_trigger_log_entry": {
69 | "id": "Q02JTSNZWHSEKV",
70 | "type": "trigger_log_entry_reference",
71 | "summary": "Triggered through the API",
72 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
73 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
74 | },
75 | "escalation_policy": {
76 | "id": "PT20YPA",
77 | "type": "escalation_policy_reference",
78 | "summary": "Another Escalation Policy",
79 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
80 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
81 | },
82 | "teams": [
83 | {
84 | "id": "PQ9K7I8",
85 | "type": "team_reference",
86 | "summary": "Engineering",
87 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
88 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
89 | }
90 | ],
91 | "urgency": "high"
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/tests/fixtures/incident_snooze.json:
--------------------------------------------------------------------------------
1 | {
2 | "incident": {
3 | "id": "PT4KHLK",
4 | "type": "incident",
5 | "summary": "[#1234] The server is on fire.",
6 | "self": "https://api.pagerduty.com/incidents/PT4KHLK",
7 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK",
8 | "incident_number": 1234,
9 | "created_at": "2015-10-06T21:30:42Z",
10 | "status": "resolved",
11 | "title": "The server is on fire.",
12 | "resolve_reason": null,
13 | "alert_counts": {
14 | "all": 2,
15 | "triggered": 0,
16 | "resolved": 2
17 | },
18 | "pending_actions": [
19 | {
20 | "type": "unacknowledge",
21 | "at": "2015-11-10T01:02:52Z"
22 | },
23 | {
24 | "type": "resolve",
25 | "at": "2015-11-10T04:31:52Z"
26 | }
27 | ],
28 | "incident_key": "baf7cf21b1da41b4b0221008339ff357",
29 | "service": {
30 | "id": "PIJ90N7",
31 | "type": "generic_email_reference",
32 | "summary": "My Mail Service",
33 | "self": "https://api.pagerduty.com/services/PIJ90N7",
34 | "html_url": "https://subdomain.pagerduty.com/services/PIJ90N7"
35 | },
36 | "priority": {
37 | "id": "P53ZZH5",
38 | "type": "priority_reference",
39 | "summary": "P2",
40 | "self": "https://api.pagerduty.com/priorities/P53ZZH5",
41 | "html_url": null
42 | },
43 | "assignments": [
44 | {
45 | "at": "2015-11-10T00:31:52Z",
46 | "assignee": {
47 | "id": "PXPGF42",
48 | "type": "user_reference",
49 | "summary": "Earline Greenholt",
50 | "self": "https://api.pagerduty.com/users/PXPGF42",
51 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
52 | }
53 | }
54 | ],
55 | "acknowledgements": [
56 | {
57 | "at": "2015-11-10T00:32:52Z",
58 | "acknowledger": {
59 | "id": "PXPGF42",
60 | "type": "user_reference",
61 | "summary": "Earline Greenholt",
62 | "self": "https://api.pagerduty.com/users/PXPGF42",
63 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
64 | }
65 | }
66 | ],
67 | "last_status_change_at": "2015-10-06T21:38:23Z",
68 | "last_status_change_by": {
69 | "id": "PXPGF42",
70 | "type": "user_reference",
71 | "summary": "Earline Greenholt",
72 | "self": "https://api.pagerduty.com/users/PXPGF42",
73 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
74 | },
75 | "first_trigger_log_entry": {
76 | "id": "Q02JTSNZWHSEKV",
77 | "type": "trigger_log_entry_reference",
78 | "summary": "Triggered through the API",
79 | "self": "https://api.pagerduty.com/log_entries/Q02JTSNZWHSEKV?incident_id=PT4KHLK",
80 | "html_url": "https://subdomain.pagerduty.com/incidents/PT4KHLK/log_entries/Q02JTSNZWHSEKV"
81 | },
82 | "escalation_policy": {
83 | "id": "PT20YPA",
84 | "type": "escalation_policy_reference",
85 | "summary": "Another Escalation Policy",
86 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
87 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
88 | },
89 | "teams": [
90 | {
91 | "id": "PQ9K7I8",
92 | "type": "team_reference",
93 | "summary": "Engineering",
94 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
95 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
96 | }
97 | ],
98 | "urgency": "high"
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/tests/fixtures/notification_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "notification_rules": [
3 | {
4 | "id": "PXPGF42",
5 | "type": "assignment_notification_rule",
6 | "summary": "Work",
7 | "self": "https://api.pagerduty.com/users/PXPGF42/notification_rules/PPSCXAN",
8 | "html_url": null,
9 | "start_delay_in_minutes": 0,
10 | "contact_method": {
11 | "id": "PXPGF42",
12 | "type": "contact_method_reference",
13 | "summary": "Work",
14 | "self": "https://api.pagerduty.com/users/PXPGF42/contact_methods/PXPGF42",
15 | "html_url": null
16 | },
17 | "created_at": "2016-02-01T16:06:27-05:00",
18 | "urgency": "high"
19 | }
20 | ]
21 | }
22 |
--------------------------------------------------------------------------------
/tests/fixtures/oncalls_filtered_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "oncalls": [
3 | {
4 | "user": {
5 | "id": "PT23IWX",
6 | "type": "user_reference",
7 | "summary": "Tim Wright",
8 | "self": "https://api.pagerduty.com/users/PT23IWX",
9 | "html_url": "https://subdomain.pagerduty.com/users/PT23IWX"
10 | },
11 | "schedule": {
12 | "id": "PI7DH85",
13 | "type": "schedule_reference",
14 | "summary": "Daily Engineering Rotation",
15 | "self": "https://api.pagerduty.com/schedules/PI7DH85",
16 | "html_url": "https://subdomain.pagerduty.com/schedules/PI7DH85"
17 | },
18 | "escalation_policy": {
19 | "id": "PT20YPA",
20 | "type": "escalation_policy_reference",
21 | "summary": "Engineering Escalation Policy",
22 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
23 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
24 | },
25 | "escalation_level": 2,
26 | "start": "2015-03-06T15:28:51-05:00",
27 | "end": "2015-03-07T15:28:51-05:00"
28 | }
29 | ],
30 | "limit": 25,
31 | "offset": 0,
32 | "more": false,
33 | "total": null
34 | }
35 |
--------------------------------------------------------------------------------
/tests/fixtures/oncalls_list_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "oncalls": [
3 | {
4 | "user": {
5 | "id": "PT23IWX",
6 | "type": "user_reference",
7 | "summary": "Tim Wright",
8 | "self": "https://api.pagerduty.com/users/PT23IWX",
9 | "html_url": "https://subdomain.pagerduty.com/users/PT23IWX"
10 | },
11 | "schedule": {
12 | "id": "PI7DH85",
13 | "type": "schedule_reference",
14 | "summary": "Daily Engineering Rotation",
15 | "self": "https://api.pagerduty.com/schedules/PI7DH85",
16 | "html_url": "https://subdomain.pagerduty.com/schedules/PI7DH85"
17 | },
18 | "escalation_policy": {
19 | "id": "PT20YPA",
20 | "type": "escalation_policy_reference",
21 | "summary": "Engineering Escalation Policy",
22 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
23 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
24 | },
25 | "escalation_level": 2,
26 | "start": "2015-03-06T15:28:51-05:00",
27 | "end": "2015-03-07T15:28:51-05:00"
28 | },
29 | {
30 | "user": {
31 | "id": "PT23IEW",
32 | "type": "user_reference",
33 | "summary": "Tim Wrong",
34 | "self": "https://api.pagerduty.com/users/PT23IEW",
35 | "html_url": "https://subdomain.pagerduty.com/users/PT23IEW"
36 | },
37 | "schedule": {
38 | "id": "PI7DD43",
39 | "type": "schedule_reference",
40 | "summary": "Daily Engineering Rotation",
41 | "self": "https://api.pagerduty.com/schedules/PI7DD43",
42 | "html_url": "https://subdomain.pagerduty.com/schedules/PI7DD43"
43 | },
44 | "escalation_policy": {
45 | "id": "PT20YPB",
46 | "type": "escalation_policy_reference",
47 | "summary": "Engineering Escalation Policy",
48 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPB",
49 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPB"
50 | },
51 | "escalation_level": 2,
52 | "start": "2015-03-06T15:28:51-05:00",
53 | "end": "2015-05-07T15:28:51-05:00"
54 | }
55 | ],
56 | "limit": 25,
57 | "offset": 0,
58 | "more": false,
59 | "total": null
60 | }
61 |
--------------------------------------------------------------------------------
/tests/fixtures/schedule_list_v1.json:
--------------------------------------------------------------------------------
1 | {
2 | "schedules": [
3 | {
4 | "id": "PWEVPB6",
5 | "name": "Primary",
6 | "time_zone": "Eastern Time (US & Canada)",
7 | "today": "2013-03-26",
8 | "escalation_policies": [
9 |
10 | ]
11 | },
12 | {
13 | "id": "PT57OLG",
14 | "name": "Secondary",
15 | "time_zone": "Eastern Time (US & Canada)",
16 | "today": "2013-03-26",
17 | "escalation_policies": [
18 |
19 | ]
20 | }
21 | ],
22 | "limit": 100,
23 | "offset": 0,
24 | "total": 2
25 | }
26 |
--------------------------------------------------------------------------------
/tests/fixtures/schedule_list_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "schedules": [
3 | {
4 | "id": "PI7DH85",
5 | "type": "schedule",
6 | "summary": "Daily Engineering Rotation",
7 | "self": "https://api.pagerduty.com/schedules/PI7DH85",
8 | "html_url": "https://contosso.pagerduty.com/schedules/PI7DH85",
9 | "name": "Daily Engineering Rotation",
10 | "time_zone": "America/New_York",
11 | "description": "Rotation schedule for engineering",
12 | "escalation_policies": [
13 | {
14 | "id": "PT20YPA",
15 | "type": "escalation_policy_reference",
16 | "summary": "Another Escalation Policy",
17 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
18 | "html_url": "https://contosso.pagerduty.com/escalation_policies/PT20YPA"
19 | }
20 | ],
21 | "users": [
22 | {
23 | "id": "PEYSGVF",
24 | "type": "user",
25 | "summary": "PagerDuty Admin",
26 | "self": "https://api.pagerduty.com/users/PEYSGVF",
27 | "html_url": "https://contosso.pagerduty.com/users/PEYSGVF"
28 | }
29 | ]
30 | }
31 | ],
32 | "limit": 100,
33 | "offset": 0,
34 | "total": null,
35 | "more": false
36 | }
37 |
--------------------------------------------------------------------------------
/tests/fixtures/schedule_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "schedule": {
3 | "id": "PI7DH85",
4 | "type": "schedule",
5 | "summary": "Daily Engineering Rotation",
6 | "self": "https://api.pagerduty.com/schedules/PI7DH85",
7 | "html_url": "https://subdomain.pagerduty.com/schedules/PI7DH85",
8 | "name": "Daily Engineering Rotation",
9 | "time_zone": "America/New_York",
10 | "description": "Rotation schedule for engineering",
11 | "escalation_policies": [
12 | {
13 | "id": "PT20YPA",
14 | "type": "escalation_policy_reference",
15 | "summary": "Another Escalation Policy",
16 | "self": "https://api.pagerduty.com/escalation_policies/PT20YPA",
17 | "html_url": "https://subdomain.pagerduty.com/escalation_policies/PT20YPA"
18 | }
19 | ],
20 | "users": [
21 | {
22 | "id": "PXPGF42",
23 | "type": "user",
24 | "summary": "Regina Phalange",
25 | "self": "https://api.pagerduty.com/users/PXPGF42",
26 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
27 | }
28 | ],
29 | "schedule_layers": [
30 | {
31 | "name": "Layer 1",
32 | "rendered_schedule_entries": [
33 | {
34 | "start": "2015-11-09T08:00:00-05:00",
35 | "end": "2015-11-09T17:00:00-05:00",
36 | "user": {
37 | "id": "PXPGF42",
38 | "type": "user",
39 | "summary": "Regina Phalange",
40 | "self": "https://api.pagerduty.com/users/PXPGF42",
41 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
42 | }
43 | }
44 | ],
45 | "rendered_coverage_percentage": 37.5,
46 | "id": "PG68P1M",
47 | "start": "2015-11-06T21:00:00-05:00",
48 | "end": null,
49 | "rotation_virtual_start": "2015-11-06T20:00:00-05:00",
50 | "rotation_turn_length_seconds": 86400,
51 | "users": [
52 | {
53 | "id": "PXPGF42",
54 | "type": "user",
55 | "summary": "Regina Phalange",
56 | "self": "https://api.pagerduty.com/users/PXPGF42",
57 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
58 | }
59 | ],
60 | "restrictions": [
61 | {
62 | "type": "daily_restriction",
63 | "start_time_of_day": "08:00:00",
64 | "duration_seconds": 32400
65 | }
66 | ]
67 | }
68 | ],
69 | "overrides_subschedule": {
70 | "name": "Overrides",
71 | "rendered_schedule_entries": [],
72 | "rendered_coverage_percentage": 0
73 | },
74 | "final_schedule": {
75 | "name": "Final Schedule",
76 | "rendered_schedule_entries": [
77 | {
78 | "start": "2015-11-10T08:00:00-05:00",
79 | "end": "2015-11-10T17:00:00-05:00",
80 | "user": {
81 | "id": "PXPGF42",
82 | "type": "user",
83 | "summary": "Regina Phalange",
84 | "self": "https://api.pagerduty.com/users/PXPGF42",
85 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42"
86 | }
87 | }
88 | ],
89 | "rendered_coverage_percentage": 37.5
90 | }
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/tests/fixtures/user_v1.json:
--------------------------------------------------------------------------------
1 | {
2 | "user": {
3 | "avatar_url": "https://secure.gravatar.com/avatar/6e1b6fc29a03fc3c13756bd594e314f7.png?d=mm&r=PG",
4 | "color": "dark-slate-grey",
5 | "email": "bart@example.com",
6 | "id": "PIJ90N7",
7 | "invitation_sent": true,
8 | "job_title": "Developer",
9 | "marketing_opt_out": true,
10 | "name": "Bart Simpson",
11 | "role": "admin",
12 | "time_zone": "Eastern Time (US & Canada)",
13 | "user_url": "/users/PIJ90N7"
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/tests/fixtures/user_v2.json:
--------------------------------------------------------------------------------
1 | {
2 | "user": {
3 | "id": "PXPGF42",
4 | "type": "user",
5 | "summary": "Earline Greenholt",
6 | "self": "https://api.pagerduty.com/users/PXPGF42",
7 | "html_url": "https://subdomain.pagerduty.com/users/PXPGF42",
8 | "name": "Earline Greenholt",
9 | "email": "125.greenholt.earline@graham.name",
10 | "time_zone": "America/Lima",
11 | "color": "green",
12 | "role": "admin",
13 | "avatar_url": "https://secure.gravatar.com/avatar/a8b714a39626f2444ee05990b078995f.png?d=mm&r=PG",
14 | "description": "I'm the boss",
15 | "invitation_sent": false,
16 | "contact_methods": [
17 | {
18 | "id": "PTDVERC",
19 | "type": "email_contact_reference",
20 | "summary": "Default",
21 | "self": "https://api.pagerduty.com/users/PXPGF42/contact_methods/PTDVERC",
22 | "html_url": null
23 | }
24 | ],
25 | "notification_rules": [
26 | {
27 | "id": "P8GRWKK",
28 | "start_delay_in_minutes": 0,
29 | "created_at": "2015-12-11T23:38:11.000Z",
30 | "contact_method": {
31 | "id": "PTDVERC",
32 | "type": "email_contact_reference",
33 | "summary": "Default",
34 | "self": "https://api.pagerduty.com/users/PXPGF42/contact_methods/PTDVERC",
35 | "html_url": null
36 | },
37 | "urgency": "low",
38 | "type": "assignment_notification_rule"
39 | }
40 | ],
41 | "job_title": "Director of Engineering",
42 | "teams": [
43 | {
44 | "id": "PQ9K7I8",
45 | "type": "team_reference",
46 | "summary": "Engineering",
47 | "self": "https://api.pagerduty.com/teams/PQ9K7I8",
48 | "html_url": "https://subdomain.pagerduty.com/teams/PQ9K7I8"
49 | }
50 | ]
51 | }
52 | }
53 |
54 |
--------------------------------------------------------------------------------
/tests/incident_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import httpretty
4 | import pygerduty.v2
5 | import textwrap
6 |
7 | ###################
8 | # Version 2 Tests #
9 | ###################
10 |
11 | @httpretty.activate
12 | def test_get_incident_v2():
13 | body = open('tests/fixtures/get_incident_v2.json').read()
14 | httpretty.register_uri(
15 | httpretty.GET, "https://api.pagerduty.com/incidents/PT4KHLK",
16 | body=body, status=200)
17 | p = pygerduty.v2.PagerDuty("password")
18 | incident = p.incidents.show("PT4KHLK")
19 |
20 | assert incident.self_ == 'https://api.pagerduty.com/incidents/PT4KHLK'
21 | assert len(incident.pending_actions) == 2
22 | assert incident.service.type == 'generic_email_reference'
23 | assert len(incident.assignments) == 1
24 |
25 |
26 | @httpretty.activate
27 | def test_list_incidents_v2():
28 | body = open('tests/fixtures/incident_list_v2.json').read()
29 | httpretty.register_uri(
30 | httpretty.GET, "https://api.pagerduty.com/incidents", responses=[
31 | httpretty.Response(body=body, status=200),
32 | httpretty.Response(body=textwrap.dedent("""\
33 | {
34 | "limit": 25,
35 | "more": false,
36 | "offset": 1,
37 | "incidents": [],
38 | "total": null
39 | }
40 | """), status=200),
41 | ],
42 | )
43 |
44 | p = pygerduty.v2.PagerDuty("password")
45 | incidents = [s for s in p.incidents.list()]
46 |
47 | assert len(incidents) == 1
48 | assert incidents[0].created_at == '2015-10-06T21:30:42Z'
49 | assert incidents[0].self_ == 'https://api.pagerduty.com/incidents/PT4KHLK'
50 | assert len(incidents[0].pending_actions) == 2
51 |
52 |
53 | @httpretty.activate
54 | def test_verb_action_v2():
55 | body1 = open('tests/fixtures/incident_get_v2.json').read()
56 | body2 = open('tests/fixtures/incident_put_v2.json').read()
57 | httpretty.register_uri(
58 | httpretty.GET, "https://api.pagerduty.com/incidents/PT4KHLK", responses=[
59 | httpretty.Response(body=body1, status=200),
60 | httpretty.Response(body=body2, status=200),
61 | ],
62 | )
63 | httpretty.register_uri(
64 | httpretty.PUT, "https://api.pagerduty.com/incidents/PT4KHLK",
65 | body=body2, status=200)
66 | p = pygerduty.v2.PagerDuty("password")
67 | incident1 = p.incidents.show('PT4KHLK')
68 | incident1.acknowledge(requester='eg@sample.com')
69 | incident2 = p.incidents.show('PT4KHLK')
70 |
71 | assert incident1.acknowledgements == []
72 | assert incident2.acknowledgements[0].acknowledger.id == 'PXPGF42'
73 |
74 |
75 | @httpretty.activate
76 | def test_snooze_v2():
77 | body1 = open('tests/fixtures/incident_get_v2.json').read()
78 | body2 = open('tests/fixtures/incident_snooze.json').read()
79 | httpretty.register_uri(
80 | httpretty.GET, "https://api.pagerduty.com/incidents/PT4KHLK", responses=[
81 | httpretty.Response(body=body1, status=200),
82 | httpretty.Response(body=body2, status=200),
83 | ],
84 | )
85 |
86 | httpretty.register_uri(
87 | httpretty.POST, 'https://api.pagerduty.com/incidents/PT4KHLK/snooze',
88 | body=body2, status=200)
89 |
90 | p = pygerduty.v2.PagerDuty("password")
91 | incident1 = p.incidents.show('PT4KHLK')
92 | incident1.snooze(requester='test@dropbox.com', duration=2000)
93 | incident2 = p.incidents.show('PT4KHLK')
94 |
95 | assert incident2.self_ == "https://api.pagerduty.com/incidents/PT4KHLK"
96 | assert len(incident2.pending_actions) == 2
97 | assert incident2.service.type == 'generic_email_reference'
98 | assert len(incident2.assignments) == 1
99 |
100 |
101 | @httpretty.activate
102 | def test_reassign_v2():
103 | body1 = open('tests/fixtures/incident_preassign.json').read()
104 | body2 = open('tests/fixtures/incident_postassign.json').read()
105 | httpretty.register_uri(
106 | httpretty.GET, "https://api.pagerduty.com/incidents/PT4KHLK", responses=[
107 | httpretty.Response(body=body1, status=200),
108 | httpretty.Response(body=body2, status=200),
109 | ],
110 | )
111 | body3 = open('tests/fixtures/incident_reassign.json').read()
112 | httpretty.register_uri(
113 | httpretty.PUT, 'https://api.pagerduty.com/incidents',
114 | body=body3, status=200)
115 |
116 | p = pygerduty.v2.PagerDuty("password")
117 | incident1 = p.incidents.show('PT4KHLK')
118 | incident1.reassign(user_ids=['PXPGF42', 'PG23GSK'], requester='test@dropbox.com')
119 | incident2 = p.incidents.show('PT4KHLK')
120 |
121 | assert len(incident1.assignments) == 0
122 | assert len(incident2.assignments) == 2
123 |
--------------------------------------------------------------------------------
/tests/oncalls_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import httpretty
4 | import pygerduty.v2
5 | import textwrap
6 |
7 | ###################
8 | # Version 2 Tests #
9 | ###################
10 |
11 |
12 | @httpretty.activate
13 | def test_list_oncalls_v2():
14 | body = open('tests/fixtures/oncalls_list_v2.json').read()
15 | httpretty.register_uri(
16 | httpretty.GET, "https://api.pagerduty.com/oncalls", responses=[
17 | httpretty.Response(body=body, status=200),
18 | httpretty.Response(body=textwrap.dedent("""\
19 | {
20 | "limit": 25,
21 | "more": false,
22 | "offset": 1,
23 | "oncalls": [],
24 | "total": null
25 | }
26 | """), status=200),
27 | ],
28 | )
29 |
30 | p = pygerduty.v2.PagerDuty("password")
31 | oncalls = [s for s in p.oncalls.list()]
32 |
33 | assert len(oncalls) == 2
34 | assert oncalls[0].user.type == 'user_reference'
35 | assert oncalls[0].user.self_ == 'https://api.pagerduty.com/users/PT23IWX'
36 | assert oncalls[0].schedule.type == 'schedule_reference'
37 | assert oncalls[0].schedule.self_ == 'https://api.pagerduty.com/schedules/PI7DH85'
38 | assert oncalls[0].escalation_policy.type == 'escalation_policy_reference'
39 | assert oncalls[0].escalation_policy.self_ == 'https://api.pagerduty.com/escalation_policies/PT20YPA'
40 | assert oncalls[0].start == '2015-03-06T15:28:51-05:00'
41 | assert oncalls[0].end == '2015-03-07T15:28:51-05:00'
42 |
43 | assert oncalls[1].user.type == 'user_reference'
44 | assert oncalls[1].user.self_ == 'https://api.pagerduty.com/users/PT23IEW'
45 | assert oncalls[1].schedule.type == 'schedule_reference'
46 | assert oncalls[1].schedule.self_ == 'https://api.pagerduty.com/schedules/PI7DD43'
47 | assert oncalls[1].escalation_policy.type == 'escalation_policy_reference'
48 | assert oncalls[1].escalation_policy.self_ == 'https://api.pagerduty.com/escalation_policies/PT20YPB'
49 | assert oncalls[1].start == '2015-03-06T15:28:51-05:00'
50 | assert oncalls[1].end == '2015-05-07T15:28:51-05:00'
51 |
52 | @httpretty.activate
53 | def test_list_oncalls_filtered_v2():
54 | body = open('tests/fixtures/oncalls_filtered_v2.json').read()
55 | httpretty.register_uri(
56 | httpretty.GET, "https://api.pagerduty.com/oncalls", responses=[
57 | httpretty.Response(body=body, status=200),
58 | httpretty.Response(body=textwrap.dedent("""\
59 | {
60 | "limit": 25,
61 | "more": false,
62 | "offset": 1,
63 | "oncalls": [],
64 | "total": null
65 | }
66 | """), status=200),
67 | ],
68 | )
69 |
70 | p = pygerduty.v2.PagerDuty("password")
71 | oncalls = [s for s in p.oncalls.list(schedule_ids=['PI7DH85'])]
72 |
73 | assert len(oncalls) == 1
74 | assert oncalls[0].user.type == 'user_reference'
75 | assert oncalls[0].user.self_ == 'https://api.pagerduty.com/users/PT23IWX'
76 | assert oncalls[0].schedule.type == 'schedule_reference'
77 | assert oncalls[0].schedule.self_ == 'https://api.pagerduty.com/schedules/PI7DH85'
78 | assert oncalls[0].escalation_policy.type == 'escalation_policy_reference'
79 | assert oncalls[0].escalation_policy.self_ == 'https://api.pagerduty.com/escalation_policies/PT20YPA'
80 | assert oncalls[0].start == '2015-03-06T15:28:51-05:00'
81 | assert oncalls[0].end == '2015-03-07T15:28:51-05:00'
82 |
83 | def test_oncall_ids():
84 | p = pygerduty.v2.PagerDuty("password")
85 | collection = pygerduty.v2.Collection(p)
86 |
87 | oncall = pygerduty.v2.Oncall(
88 | collection=collection,
89 | user=None,
90 | schedule={'id': 'schedule'},
91 | escalation_policy={'id': 'escalation_policy'})
92 | assert oncall.id == ':schedule:escalation_policy'
93 |
94 | oncall = pygerduty.v2.Oncall(
95 | collection=collection,
96 | user={'id': 'user'},
97 | schedule=None,
98 | escalation_policy={'id': 'escalation_policy'})
99 | assert oncall.id == 'user::escalation_policy'
100 |
101 | oncall = pygerduty.v2.Oncall(
102 | collection=collection,
103 | user={'id': 'user'},
104 | schedule={'id': 'schedule'},
105 | escalation_policy=None)
106 | assert oncall.id == 'user:schedule:'
107 |
108 | oncall = pygerduty.v2.Oncall(
109 | collection=collection,
110 | schedule={'id': 'schedule'},
111 | escalation_policy={'id': 'escalation_policy'})
112 | assert oncall.id == ':schedule:escalation_policy'
113 |
114 | oncall = pygerduty.v2.Oncall(
115 | collection=collection,
116 | user={'id': 'user'},
117 | escalation_policy={'id': 'escalation_policy'})
118 | assert oncall.id == 'user::escalation_policy'
119 |
120 | oncall = pygerduty.v2.Oncall(
121 | collection=collection,
122 | user={'id': 'user'},
123 | schedule={'id': 'schedule'})
124 | assert oncall.id == 'user:schedule:'
125 |
--------------------------------------------------------------------------------
/tests/schedule_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import httpretty
4 | import pygerduty.v2
5 | import textwrap
6 |
7 | ###################
8 | # Version 2 Tests #
9 | ###################
10 |
11 | @httpretty.activate
12 | def test_get_schedule_v2():
13 | body = open('tests/fixtures/schedule_v2.json').read()
14 | httpretty.register_uri(
15 | httpretty.GET, "https://api.pagerduty.com/schedules/PI7DH85",
16 | body=body, status=200)
17 | p = pygerduty.v2.PagerDuty("password")
18 | schedule = p.schedules.show("PI7DH85")
19 |
20 | assert schedule.self_ == "https://api.pagerduty.com/schedules/PI7DH85"
21 | assert len(schedule.schedule_users) == 1
22 | assert len(schedule.schedule_layers) == 1
23 | assert schedule.schedule_layers[0].start == "2015-11-06T21:00:00-05:00"
24 |
25 |
26 | @httpretty.activate
27 | def test_list_schedules_v2():
28 | body = open('tests/fixtures/schedule_list_v2.json').read()
29 | httpretty.register_uri(
30 | httpretty.GET, "https://api.pagerduty.com/schedules", responses=[
31 | httpretty.Response(body=body, status=200),
32 | httpretty.Response(body=textwrap.dedent("""\
33 | {
34 | "limit": 25,
35 | "more": false,
36 | "offset": 1,
37 | "schedules": [],
38 | "total": null
39 | }
40 | """), status=200),
41 | ]
42 | )
43 | p = pygerduty.v2.PagerDuty("password")
44 | schedules = [s for s in p.schedules.list()]
45 |
46 | assert len(schedules) == 1
47 | assert schedules[0].name == 'Daily Engineering Rotation'
48 | assert schedules[0].self_ == 'https://api.pagerduty.com/schedules/PI7DH85'
49 | assert len(schedules[0].escalation_policies) == 1
50 |
--------------------------------------------------------------------------------
/tests/user_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import httpretty
4 | import pygerduty
5 | import pygerduty.v2
6 |
7 | ###################
8 | # Version 1 Tests #
9 | ###################
10 |
11 | @httpretty.activate
12 | def test_get_user_v1():
13 | body = open('tests/fixtures/user_v1.json').read()
14 | httpretty.register_uri(
15 | httpretty.GET, "https://contosso.pagerduty.com/api/v1/users/PIJ90N7",
16 | body=body, status=200)
17 |
18 | p = pygerduty.PagerDuty("contosso", "password")
19 | user = p.users.show("PIJ90N7")
20 |
21 | assert user.id == "PIJ90N7"
22 | assert user.name == "Bart Simpson"
23 | assert user.role == "admin"
24 |
25 | @httpretty.activate
26 | def test_list_user_contact_methods_v1():
27 | user_body = open('tests/fixtures/user_v1.json').read()
28 | contact_body = open('tests/fixtures/contacts_v1.json').read()
29 | httpretty.register_uri(
30 | httpretty.GET, "https://contosso.pagerduty.com/api/v1/users/PIJ90N7",
31 | body=user_body, status=200),
32 | httpretty.register_uri(
33 | httpretty.GET, "https://contosso.pagerduty.com/api/v1/users/PIJ90N7/contact_methods",
34 | body=contact_body, status=200)
35 |
36 | p = pygerduty.PagerDuty("contosso", "password")
37 | user = p.users.show("PIJ90N7")
38 | contact_methods = [c for c in user.contact_methods.list()]
39 |
40 | assert len(contact_methods) == 3
41 | assert len([c for c in contact_methods if c.type == "email"]) == 1
42 | assert len([c for c in contact_methods if c.type == "phone"]) == 1
43 | assert len([c for c in contact_methods if c.type == "SMS"]) == 1
44 |
45 | ###################
46 | # Version 2 Tests #
47 | ###################
48 |
49 | @httpretty.activate
50 | def test_get_user_v2():
51 | body = open('tests/fixtures/user_v2.json').read()
52 | httpretty.register_uri(
53 | httpretty.GET, "https://api.pagerduty.com/users/PXPGF42",
54 | body=body, status=200)
55 |
56 | p = pygerduty.v2.PagerDuty("password")
57 | user = p.users.show("PXPGF42")
58 |
59 | assert user.id == "PXPGF42"
60 | assert user.name == "Earline Greenholt"
61 | assert user.role == "admin"
62 | assert user.self_ == 'https://api.pagerduty.com/users/PXPGF42'
63 |
64 | @httpretty.activate
65 | def test_list_user_contact_methods_v2():
66 | user_body = open('tests/fixtures/user_v2.json').read()
67 | contact_body = open('tests/fixtures/contacts_v2.json').read()
68 | httpretty.register_uri(
69 | httpretty.GET, "https://api.pagerduty.com/users/PXPGF42",
70 | body=user_body, status=200)
71 | httpretty.register_uri(
72 | httpretty.GET, "https://api.pagerduty.com/users/PXPGF42/contact_methods",
73 | body=contact_body, status=200)
74 |
75 | p = pygerduty.v2.PagerDuty("password")
76 |
77 | user = p.users.show("PXPGF42")
78 |
79 | contact_methods = [c for c in user.contact_methods.list()]
80 |
81 | assert len(contact_methods) == 3
82 | assert len([c for c in contact_methods if c.type == "email"]) == 1
83 | assert len([c for c in contact_methods if c.type == "phone"]) == 1
84 | assert len([c for c in contact_methods if c.type == "SMS"]) == 1
85 | assert user.self_ == 'https://api.pagerduty.com/users/PXPGF42'
86 |
87 | @httpretty.activate
88 | def test_user_notification_rules_v2():
89 | user_body = open('tests/fixtures/user_v2.json').read()
90 | notification_body = open('tests/fixtures/notification_v2.json').read()
91 | httpretty.register_uri(
92 | httpretty.GET, "https://api.pagerduty.com/users/PXPGF42",
93 | body=user_body, status=200)
94 | httpretty.register_uri(
95 | httpretty.GET, "https://api.pagerduty.com/users/PXPGF42/notification_rules",
96 | body=notification_body, status=200)
97 |
98 | p = pygerduty.v2.PagerDuty("password")
99 | user = p.users.show("PXPGF42")
100 |
101 | notification_rules = [n for n in user.notification_rules.list()]
102 |
103 | assert len(notification_rules) == 1
104 | assert len([n for n in notification_rules if n.type == "assignment_notification_rule"]) == 1
105 | assert user.self_ == "https://api.pagerduty.com/users/PXPGF42"
106 |
107 |
108 | def test_clean_response():
109 | mock_response = {
110 | "user" : {
111 | "id": "PHDGK84",
112 | "type": "user",
113 | "self": "https://api.pagerduty.com/users/PHDGK84",
114 | "name": "Snoopy",
115 | "contact_methods": [
116 | {
117 | "address": "betty@example.com",
118 | "id": "PZMO0JF",
119 | "self": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMO0JF",
120 | "label": "Default"
121 | },
122 | {
123 | "address": "8928393498",
124 | "id": "PZMN843",
125 | "self": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMN843",
126 | "label": "Default"
127 | }
128 | ],
129 | "notification_rules": [
130 | {
131 | "id": "P8WETWW",
132 | "contact_method": {
133 | "id": "PZMO0JF",
134 | "self": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMO0JF",
135 | }
136 | }
137 | ]
138 | }
139 | }
140 | clean_response = pygerduty.common.clean_response(mock_response)
141 |
142 | assert clean_response == {
143 | "user" : {
144 | "id": "PHDGK84",
145 | "type": "user",
146 | "self_": "https://api.pagerduty.com/users/PHDGK84",
147 | "name": "Snoopy",
148 | "contact_methods": [
149 | {
150 | "address": "betty@example.com",
151 | "id": "PZMO0JF",
152 | "self_": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMO0JF",
153 | "label": "Default"
154 | },
155 | {
156 | "address": "8928393498",
157 | "id": "PZMN843",
158 | "self_": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMN843",
159 | "label": "Default"
160 | }
161 | ],
162 | "notification_rules": [
163 | {
164 | "id": "P8WETWW",
165 | "contact_method": {
166 | "id": "PZMO0JF",
167 | "self_": "https://api.pagerduty.com/users/PHDGK84/contact_method/PZMO0JF",
168 | }
169 | }
170 | ]
171 | }
172 | }
173 |
174 |
175 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | project = pygerduty
3 |
4 | # Keep in-sync with .travis.yml.
5 | envlist = py27,py36
6 |
7 | [testenv]
8 | install_command = pip install {opts} {packages}
9 | deps = flake8
10 | commands =
11 | flake8 {[tox]project} setup.py
12 | {envpython} setup.py test
13 |
14 | [flake8]
15 | ignore = E265,E309,E501,F401,W504
16 |
--------------------------------------------------------------------------------