├── README.md
├── hapi-1.0.0
└── HAPI-data-access-schema-1.0.0.json
├── hapi-1.1.0
├── HAPI-data-access-schema-1.1.0.json
├── HAPI-data-access-schema-1.1.0.md
├── HAPI-data-access-spec-1.1.0.md
└── HAPI-data-access-spec-1.1.0.pdf
├── hapi-2.0.0
├── HAPI-data-access-spec-2.0.0.md
├── HAPI-data-access-spec-2.0.0.pdf
└── changelog.md
├── hapi-2.1.0
├── HAPI-data-access-spec-2.1.0.md
├── HAPI-data-access-spec-2.1.0.pdf
└── changelog.md
├── hapi-2.1.1
├── HAPI-data-access-spec-2.1.1.md
├── HAPI-data-access-spec-2.1.1.pdf
└── changelog.md
├── hapi-3.0.0
├── HAPI-data-access-spec-3.0.0.md
├── HAPI-data-access-spec-3.0.0.pdf
└── changelog.md
├── hapi-3.0.1
├── HAPI-data-access-spec-3.0.1.md
└── changelog.md
├── hapi-3.1.0
├── HAPI-data-access-spec-3.1.0.md
├── HAPI-data-access-spec-3.1.0.pdf
└── changelog.md
├── hapi-3.2.0
├── HAPI-data-access-spec-3.2.0.md
├── HAPI-data-access-spec-3.2.0.pdf
└── changelog.md
└── hapi-dev
├── HAPI-data-access-spec-dev.md
└── changelog.md
/README.md:
--------------------------------------------------------------------------------
1 | HAPI Data Access Specification
2 | ==============================
3 |
4 | The Heliophysics Application Programmer's Interface (HAPI) data access specification is a RESTful API and streaming format specification for delivering digital time series data.
5 |
6 | The HAPI specification describes a minimum set of capabilities needed for a server to allow access to the time series data values within one or more data collections.
7 |
8 | **Current stable release:** Version 3.2.0
9 | ([PDF](https://github.com/hapi-server/data-specification/raw/master/hapi-3.2.0/HAPI-data-access-spec-3.2.0.pdf)) ([HTML](https://github.com/hapi-server/data-specification/blob/master/hapi-3.2.0/HAPI-data-access-spec-3.2.0.md))
10 |
11 | Current [draft version](https://github.com/hapi-server/data-specification/blob/master/hapi-dev/HAPI-data-access-spec-dev.md)
12 |
13 | For a list of clients, servers, verifiers, and other tools, see http://hapi-server.org/
14 |
--------------------------------------------------------------------------------
/hapi-1.0.0/HAPI-data-access-schema-1.0.0.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-04/schema#",
3 | "title": "HAPI Header V1.0",
4 | "description": "dataset description for a HAPI-compliant server",
5 | "definitions" : {
6 | "datetime" : {
7 | "description" : "multiple regular expressions to handle all allowed day-of-year and year-month-day date formats",
8 | "anyOf" : [
9 | { "type" : "string", "pattern" : "^[0-9]{4}Z?$"},
10 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{3}T?Z?$"},
11 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,3}T[0-9]{1,2}Z?$"},
12 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,3}T[0-9]{1,2}:[0-9]{1,2}Z?$"},
13 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,3}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\\.?Z?$"},
14 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,3}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\\.[0-9]*Z?$"},
15 |
16 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T?Z?$"},
17 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}Z?$"},
18 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}Z?$"},
19 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\\.?Z?$"},
20 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\\.[0-9]*Z?$"}
21 | ]
22 | }
23 | },
24 | "type": "object",
25 | "properties": {
26 | "HAPI": {
27 | "description": "version of the server API",
28 | "type": "string"
29 | },
30 | "creationDate": {
31 | "description": "the date and time when this dataset was created",
32 | "$ref": "#/definitions/datetime"
33 | },
34 | "modificationDate": {
35 | "description": "the date and time when this dataset was last modified",
36 | "$ref": "#/definitions/datetime"
37 | },
38 | "contact": {
39 | "description": "the name of a person to contact for more information about the dataset",
40 | "type": "string"
41 | },
42 | "contactID": {
43 | "description": "the identifier for the contact person in an external data system",
44 | "type": "string"
45 | },
46 | "startDate": {
47 | "description": "the start date and time of the dataset",
48 | "$ref": "#/definitions/datetime"
49 | },
50 | "stopDate": {
51 | "description": "the end date and time of the dataset",
52 | "$ref": "#/definitions/datetime"
53 | },
54 | "format": {
55 | "description": "required when the header is a prefix to data and not allowed otherwise; this indicates whether the data following the header is csv or binary",
56 | "enum": [
57 | "csv",
58 | "binary",
59 | "json"
60 | ]
61 | },
62 | "sampleStartDate": {
63 | "description": "a sample start time for illustrative data in the dataset",
64 | "$ref": "#/definitions/datetime"
65 | },
66 | "sampleEndDate": {
67 | "description": "a sample end time for illustrative data in the dataset",
68 | "$ref": "#/definitions/datetime"
69 | },
70 | "description": {
71 | "description": "a short description for the contents of the dataset",
72 | "type": "string"
73 | },
74 | "resourceURL": {
75 | "description": "a link to more information about the dataset",
76 | "type": "string"
77 | },
78 | "resourceID": {
79 | "description": "an external identifier by which other datasets might know this dataset, such as a SPASE ID",
80 | "type": "string"
81 | },
82 | "parameters": {
83 | "type": "array",
84 | "items": {
85 | "type": "object",
86 | "properties": {
87 | "name": {
88 | "type": "string"
89 | },
90 | "type": {
91 | "enum": [
92 | "integer",
93 | "double",
94 | "isotime",
95 | "string"
96 | ]
97 | },
98 | "units": {
99 | "type": "string"
100 | },
101 | "size": {
102 | "type": "array",
103 | "items": {
104 | "type": "integer"
105 | },
106 | "minItems": 1
107 | },
108 | "length": {
109 | "type": "integer",
110 | "minimum": 1
111 | },
112 | "fill": {
113 | "type": "string"
114 | },
115 | "description": {
116 | "type": "string"
117 | },
118 | "bins": {
119 | "type" : "array",
120 | "items": {
121 | "type": "object",
122 | "properties": {
123 | "units": {
124 | "type": "string"
125 | },
126 | "name": {
127 | "type" : "string"
128 | },
129 | "description": {
130 | "type" : "string"
131 | },
132 | "centers" : {
133 | "type": "array",
134 | "items" : { "type" : "number" }
135 | },
136 | "ranges" : {
137 | "type": "array",
138 | "items" : {
139 | "type" : "array",
140 | "items": {"type" : "number"},
141 | "minItems" : 2,
142 | "maxItems" : 2
143 | }
144 | }
145 | },
146 | "required": [
147 | "name"
148 | ],
149 | "additionalProperties": false
150 | }
151 | }
152 | },
153 | "required": [
154 | "name",
155 | "type"
156 | ],
157 | "additionalProperties": false
158 | },
159 | "minItems": 1,
160 | "uniqueItems": true
161 | }
162 | },
163 | "required": [
164 | "HAPI",
165 | "startDate",
166 | "stopDate",
167 | "creationDate",
168 | "parameters"
169 | ],
170 | "additionalProperties": false
171 | }
172 |
--------------------------------------------------------------------------------
/hapi-1.1.0/HAPI-data-access-schema-1.1.0.json:
--------------------------------------------------------------------------------
1 | {
2 | "HAPI": {
3 | "id": "/HAPI",
4 | "description": "HAPI Version",
5 | "type": "string",
6 | "pattern": "^1.1$"
7 | },
8 | "HAPIDateTime": {
9 | "id": "/HAPIDateTime",
10 | "description" : "Regular expressions to handle all allowed HAPI date/time formats (a subset of ISO 8601)",
11 | "anyOf" : [
12 | { "type" : "string", "pattern" : "^[0-9]{4}Z?$"},
13 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{3}T?Z?$"},
14 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,3}T[0-9]{1,2}Z?$"},
15 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,3}T[0-9]{1,2}:[0-9]{1,2}Z?$"},
16 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,3}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\\.?Z?$"},
17 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,3}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\\.[0-9]*Z?$"},
18 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T?Z?$"},
19 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}Z?$"},
20 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}Z?$"},
21 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\\.?Z?$"},
22 | { "type" : "string", "pattern" : "^[0-9]{4}-[0-9]{1,2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}\\.[0-9]*Z?$"}
23 | ]
24 | },
25 | "HAPIStatus": {
26 | "id": "/HAPIStatus",
27 | "title": "info",
28 | "description": "Request status",
29 | "type": "object",
30 | "properties": {
31 | "required": ["HAPI","status"],
32 | "HAPI": {"$ref": "/HAPI"},
33 | "status": {
34 | "description": "Request status",
35 | "type": "object",
36 | "required": ["code","message"],
37 | "properties": {
38 | "code": {
39 | "description": "HAPI request status code",
40 | "type": "integer",
41 | "enum": [1200,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1500,1501]
42 | },
43 | "message": {
44 | "description": "HAPI request status message",
45 | "type": "string"
46 | }
47 | }
48 | }
49 | }
50 | },
51 | "capabilities": {
52 | "title": "capabilities",
53 | "description": "Server /capabilities response",
54 | "type": "object",
55 | "required": ["HAPI","outputFormats","status"],
56 | "properties": {
57 | "HAPI": {"$ref": "/HAPI"},
58 | "status": {"$ref": "/HAPIStatus"},
59 | "outputFormats": {
60 | "description": "Output formats",
61 | "type": "array",
62 | "items": {"type": "string"},
63 | "minItems": 1,
64 | "additionalItems": true,
65 | "uniqueItems": true
66 | }
67 | }
68 | },
69 | "catalog": {
70 | "title": "catalog",
71 | "description": "Server /catalog response",
72 | "type": "object",
73 | "required": ["HAPI","status","catalog"],
74 | "properties": {
75 | "HAPI": {"$ref": "/HAPI"},
76 | "status": {"$ref": "/HAPIStatus"},
77 | "catalog": {
78 | "description": "Catalog of datasets",
79 | "type": "array",
80 | "minItems": 1,
81 | "additionalItems": true,
82 | "uniqueItems": true,
83 | "items": {"type": "object", "required": ["id"]},
84 | "properties": {
85 | "id": {
86 | "description": "Dataset ID",
87 | "type": "string",
88 | "uniqueItems": true
89 | },
90 | "title": {
91 | "description": "Optional dataset title",
92 | "type": "string",
93 | "uniqueItems": true
94 | }
95 | }
96 | }
97 | }
98 | },
99 | "info": {
100 | "title": "info",
101 | "description": "Server /info response",
102 | "type": "object",
103 | "required": ["HAPI","status","startDate","stopDate","parameters"],
104 | "dependencies": {
105 | "sampleStartDate": ["sampleStopDate"],
106 | "sampleStopDate": ["sampleStartDate"]
107 | },
108 | "properties": {
109 | "HAPI": {"$ref": "/HAPI"},
110 | "status": {"$ref": "/HAPIStatus"},
111 | "format": {
112 | "type": "string"
113 | },
114 | "startDate": {
115 | "description": "start date/time of parameter data",
116 | "$ref": "/HAPIDateTime"
117 | },
118 | "stopDate": {
119 | "description": "start date/time of parameter data",
120 | "$ref": "/HAPIDateTime"
121 | },
122 | "sampleStartDate": {
123 | "description": "start of a sample time period for a dataset, where the time period should contain a manageable, representative example of valid, non-fill data",
124 | "$ref": "/HAPIDateTime"
125 | },
126 | "sampleStopDate": {
127 | "description": "stop date/time associated with sampleStartDate",
128 | "$ref": "/HAPIDateTime"
129 | },
130 | "cadence": {
131 | "description": "Time difference between records as an ISO 8601 duration. This is meant as a guide to the nominal cadence of the data and not necessarily a precise statement about the time between measurements.",
132 | "type": "string"
133 | },
134 | "description": {
135 | "description": "a brief description of the dataset",
136 | "type": "string"
137 | },
138 | "resourceURL": {
139 | "description": "a link to more information about the dataset",
140 | "type": "string"
141 | },
142 | "resourceID": {
143 | "description": "an external identifier by which other datasets might know this dataset, such as a DOI or SPASE ID",
144 | "type": "string"
145 | },
146 | "creationDate": {
147 | "description": "the date and time when this dataset was created",
148 | "$ref": "/HAPIDateTime"
149 | },
150 | "modificationDate": {
151 | "description": "the date and time when this dataset was last modified",
152 | "$ref": "/HAPIDateTime"
153 | },
154 | "contact": {
155 | "description": "the name of a person to contact for more information about the dataset",
156 | "type": "string"
157 | },
158 | "contactID": {
159 | "description": "the identifier for the contact person in an external data system",
160 | "type": "string"
161 | },
162 | "parameters": {
163 | "description": "Parameters in dataset",
164 | "type": "array",
165 | "minItems": 1,
166 | "additionalItems": true,
167 | "uniqueItems": true,
168 | "items": {
169 | "type": "object",
170 | "required": ["name","type","units","fill"],
171 | "properties": {
172 | "name": {
173 | "description": "Parameter name",
174 | "type": "string"
175 | },
176 | "type": {
177 | "description": "Parameter type",
178 | "type": "string",
179 | "enum": ["string","double","integer","isotime"]
180 | },
181 | "units": {
182 | "description": "Parameter units",
183 | "type": ["string","null"]
184 | },
185 | "length": {
186 | "description": "Number of characters in parameter (isotime and string parameters only)",
187 | "type": "integer"
188 | },
189 | "size": {
190 | "description": "Parameter size",
191 | "type": "array",
192 | "items": {"type": "integer"},
193 | "minItems": 1,
194 | "minimum": 1
195 | },
196 | "fill": {
197 | "description": "Parameter fill",
198 | "type": ["string","null"]
199 | },
200 | "bins": {
201 | "description": "Bins for components of a with a size having at least one element greater than 1",
202 | "type": "array",
203 | "items": {
204 | "type": "object",
205 | "required": ["name","centers"],
206 | "properties": {
207 | "name": {
208 | "description": "Name for the dimension (e.g., 'Frequency')",
209 | "type": "string"
210 | },
211 | "description": {
212 | "description": "A brief comment explaining what the bins represent",
213 | "type": "string"
214 | },
215 | "units": {
216 | "description": "The units for the bins (e.g., 'Hz')",
217 | "type": "string"
218 | },
219 | "centers": {
220 | "description": "The centers of each bin",
221 | "type": "array",
222 | "items": {
223 | "type": "number"
224 | },
225 | "minItems": 1
226 | },
227 | "ranges": {
228 | "description": "The boundaries for each bin",
229 | "type": "array",
230 | "items": {
231 | "type": "array",
232 | "items": {
233 | "type": "number"
234 | },
235 | "minItems": 1
236 | }
237 | }
238 | }
239 | }
240 | }
241 | }
242 | }
243 | }
244 | }
245 | }
246 | }
--------------------------------------------------------------------------------
/hapi-1.1.0/HAPI-data-access-schema-1.1.0.md:
--------------------------------------------------------------------------------
1 | The file `HAPI-data-access-schema-1.1.json` contains several JSON schema for HAPI server responses.
2 |
3 | To use it, one must resolve the references. For example, using `nodejs` and the `jsonschema` package,
4 |
5 | ```javascript
6 | var fs = require('fs');
7 | var schema = fs.readFileSync("HAPI-data-access-schema-1.1.json");
8 | var schema = JSON.parse(schema);
9 |
10 | // Capabilities response from server
11 | var json = {"outputFormats":["csv","binary"],"HAPI":"1.1","status":{"code":1200,"message":"OK"}};
12 |
13 | var Validator = require('jsonschema').Validator;
14 | var v = new Validator();
15 | v.addSchema(schema, '/HAPI');
16 | v.addSchema(schema, '/HAPIDateTime');
17 | v.addSchema(schema, '/HAPIStatus');
18 | vr = v.validate(json, schema['capabilities']);
19 | console.log(vr)
20 | ```
--------------------------------------------------------------------------------
/hapi-1.1.0/HAPI-data-access-spec-1.1.0.md:
--------------------------------------------------------------------------------
1 | # HAPI Data Access Specification
2 | Version 1.1.0 | Heliophysics Data and Model Consortium (HDMC) | May 16, 2017
3 |
4 |
5 |
6 | **Table of Contents**
7 |
8 | - [Introduction](#introduction)
9 | - [Endpoints](#endpoints)
10 | - [hapi](#hapi)
11 | - [capabilities](#capabilities)
12 | - [catalog](#catalog)
13 | - [info](#info)
14 | - [data](#data)
15 | - [Data Stream Content](#data-stream-content)
16 | - [Implications of the HAPI data model](#implications-of-the-hapi-data-model)
17 | - [Cross Origin Resource Sharing](#cross-origin-resource-sharing)
18 | - [HAPI Status Codes](#hapi-status-codes)
19 | - [HAPI Client Error Handling](#hapi-client-error-handling)
20 | - [Representation of Time](#representation-of-time)
21 | - [Additional Keyword / Value Pairs](#additional-keyword--value-pairs)
22 | - [More About](#more-about)
23 | - [Data Types](#data-types)
24 | - [The ‘size’ Attribute](#the-size-attribute)
25 | - ['fill' Values](#fill-values)
26 | - [Data Streams](#data-streams)
27 | - [Security Notes](#security-notes)
28 | - [Adoption](#adoption)
29 | - [References](#references)
30 | - [Contact](#contact)
31 |
32 |
33 |
34 | # Introduction
35 |
36 | This document describes the Heliophysics Application Programmer’s Interface (HAPI) specification, which is an API and streaming format specification for delivering digital time series data. The intent of HAPI is to enhance interoperability among time series data providers. The HAPI specification describes a lowest common denominator of services that any provider of time series data could implement. In fact, many providers already offer access to their data holdings through some kind of API. The hope is that this specification captures what many providers are already doing, but just codifies the specific details so that providers could use the same exact API. This would make it possible to obtain time series science data content seamlessly from many sources.
37 |
38 | This document is intended to be used by two groups of people: first by data providers who want to make time series data available through a HAPI server, and second by data users who want to understand how data is made available from a HAPI server, or perhaps to write client software to obtain data from an existing HAPI server.
39 |
40 | HAPI constitutes a minimum but complete set of capabilities needed for a server to allow access to the time series data values within one or more data collections. Because of this focus on access to data content, HAPI is very light on metadata and data discovery. Within the metadata offered by HAPI are optional ways to indicate where further descriptive details for any dataset could be found.
41 |
42 | The API itself is built using REST principles that emphasize URLs as stable endpoints through which clients can request data. Because it is based on well-established HTTP request and response rules, a wide range of HTTP clients can be used to interact with HAPI servers.
43 |
44 | The following definitions are provided first to ensure clarity in ensuing descriptions.
45 |
46 | **parameter** – a measured science quantity or a related ancillary quantity at one instant in time; may be scalar as a function of time, or an array at each time step; must have units; also must have a fill value that represents no measurement or absent information.
47 |
48 | **dataset** – a collection with a conceptually uniform of set of parameters; one instance of all the parameters together with associated with a time value constitutes a data record. A HAPI service presents a dataset as a seamless collection of time ordered records, offering a way to retrieve the parameters while hiding actual storage details.
49 |
50 | **request parameter** – keywords that appear after the ‘?’ in a URL with a GET request.
51 |
52 | Consider this example GET request:
53 |
http://hapi-server.org/hapi/data?id=alpha&time.min=2016-07-13
54 | The two request parameters are `id` and `time.min`. They are shown in bold and have values of `alpha` and `2016-07-13` respectively. This document will always use the full phrase "request parameter" to refer to these URL elements to draw a clear distinction from a parameter in a dataset.
55 |
56 | **Line Endings**
57 |
58 | HAPI servers must use a newline (ASCII code 10 in decimal, 0x0A in hexidecimal) for line endings.
59 |
60 | # Endpoints
61 |
62 | The HAPI specification consists of four required endpoints that give clients a precise way to first determine the data holdings of the server and then to request data from the server. The functionality of each endpoint is as follows:
63 |
64 | 1. describe the capabilities of the server; lists the output formats the server can emit (`csv`, `binary`, or `json`, described below)
65 | 2. list the catalog of datasets that are available; each dataset is associated with a unique id and may optionally have a title
66 | 3. show information about a dataset with a given id; a primary component of the description is the list of parameters in the dataset
67 | 4. stream data content for a dataset of a given id; the streaming request must have time bounds (specified by request parameters `time.min` and `time.max`) and may indicate a subset of parameters (default is all parameters)
68 |
69 | There is also an optional landing page endpoint for the HAPI service that returns human-readable HTML. Although there is recommended content for this landing page, it is not essential to the functioning of the server.
70 |
71 | The four required endpoints are REST-style services, in that the resulting HTTP response is the complete response for each endpoint. In particular, the `data` endpoint does not just give URLs or links to the data, but rather streams the data content in the HTTP response. The full specification for each endpoint is discussed below.
72 |
73 | All endpoints must be directly below a `hapi` path element in the URL:
74 | ```
75 | http://hapi-server.org/hapi/capabilities
76 | http://hapi-server.org/hapi/catalog
77 | http://hapi-server.org/hapi/info
78 | http://hapi-server.org/hapi/data
79 | ```
80 |
81 | All requests to a HAPI server are for retrieving resources and must not change the server state. Therefore, all HAPI endpoints must respond only to HTTP GET requests. POST requests should result in an error. This represents a RESTful approach in which GET requests are restricted to be read-only operations from the server. The HAPI specification does not allow any input to the server (which for RESTful services are often implemented using POST requests).
82 |
83 | The input specification for each endpoint (the request parameters and their allowed values) must be strictly enforced by the server. HAPI servers are not allowed to add additional request parameters beyond those in the specification. If a request URL contains any unrecognized or misspelled request parameters, a HAPI server must respond with an error status. ([See below](#hapi-status-codes) for more details on how a HAPI server returns status information to clients.) The principle being followed here is that the server must not silently ignore unrecognized request parameters, because this would falsely indicate to clients that the request parameter was understood and was taken into account when creating the output. For example, if a server is given a request parameter that is not part of the HAPI specification, such as ```averagingInterval=5s```, the server must report an error for two reasons: 1. additional request parametrs are not allowed, and 2. the server will not be doing any averaging.
84 |
85 | The outputs from a HAPI server to the `catalog`, `capabilities`, and `info` endpoints are JSON structures, the formats of which are described below in the sections detailing each endpoint. The `data` endpoint must be able to deliver Comma Separated Value (CSV) data, but may optionally deliver data content in binary format or JSON format. The structure of the response stream formats are described below.
86 |
87 | The following is the detailed specification for the four main HAPI endpoints as well as the optional landing page endpoint.
88 |
89 | ## hapi
90 |
91 | This root endpoint is optional and serves as a human-readable landing page for the server. Unlike the other endpoints, there is no strict definition for the output, but if present, it should include a brief description of the other endpoints, and links to documentation on how to use the server. An example landing page that can be easily customized for a new server is available here:
92 | https://github.com/hapi-server/data-specification/blob/master/example_hapi_landing_page.html
93 |
94 | There are many options for landing page content, such as an HTML view of the catalog, or links to commonly requested data.
95 |
96 | **Sample Invocation**
97 | ```
98 | http://hapi-server.org/hapi
99 | ```
100 |
101 | **Request Parameters**
102 |
103 | None
104 |
105 | **Response**
106 |
107 | The response is in HTML format with a mime type of `text/html`. The content for the landing page is not strictly defined, but should look something like the example below.
108 |
109 | **Example**
110 |
111 | Retrieve landing page for this server.
112 | ```
113 | http://hapi-server.org/hapi
114 | ```
115 | **Example Response:**
116 | ```
117 |
118 |
119 |
120 | HAPI Server
121 | This server supports the HAPI 1.0 specification for delivery of time series
122 | data. The server consists of the following 4 REST-like endpoints that will
123 | respond to HTTP GET requests.
124 |
125 |
126 | - capabilities describe the capabilities of the server; this lists the output formats the server can emit (CSV and binary)
127 | - catalog list the datasets that are available; each dataset is associated with a unique id
128 | - info obtain a description for dataset of a given id; the description defines the parameters in every dataset record
129 | - data stream data content for a dataset of a given id; the streaming request must have time bounds (specified by request parameters time.min and time.max) and may indicate a subset of parameters (default is all parameters)
130 |
131 | For more information, see this HAPI description at the SPASE web site.
132 |
133 |
134 | ```
135 |
136 | ## capabilities
137 |
138 | This endpoint describes relevant implementation capabilities for this server. Currently, the only possible variability from server to server is the list of output formats that are supported.
139 |
140 | A server must support `csv` output format, but `binary` output format and `json` output may optionally be supported. Servers may support custom output formats, which would be advertised here. All custom formats listed by a server must begin with the string `x_` to indicate that they are custom formats and avoid collisions with possible future additions to the specificaiton.
141 |
142 | **Sample Invocation**
143 | ```
144 | http://hapi-server.org/hapi/capabilities
145 | ```
146 |
147 | **Request Parameters**
148 |
149 | None
150 |
151 | **Response**
152 |
153 | The server's response to this endpoint must be in JSON format [3] as defined by RFC-7159, and the response must indicate a mime type of `application/json`. Server capabilities are described using keyword-value pairs, with `outputFormats` being the only keyword currently in use.
154 |
155 | **Capabilities Object**
156 |
157 | | Name | Type | Description |
158 | | -------- | -------- | ----------- |
159 | | HAPI | string | **Required**
The version number of the HAPI specification this description complies with. |
160 | | status | Status object | **Required**
Server response status for this request.|
161 | | outputFormats | string array | **Required**
The list of output formats the serve can emit. All HAPI servers must support at least `csv` output format, with `binary` and `json` output formats being optional. |
162 |
163 | **Example**
164 |
165 | Retrieve a listing of capabilities of this server.
166 | ```
167 | http://hapi-server.org/hapi/capabilities
168 | ```
169 | **Example Response:**
170 | ```
171 | {
172 | "HAPI": "1.1",
173 | "status": { "code": 1200, "message": "OK"},
174 | "outputFormats": [ "csv", "binary", "json" ]
175 | }
176 | ```
177 | If a server only reports an output format of `csv`, then requesting `binary` data should cause the server to respond with an error status. There is a specific HAPI error code for this, namely 1409 "Bad request - unsupported output format" with a corresponding HTTP response code of 400. [See below](#hapi-status-codes) for more about error responses.
178 |
179 |
180 | ## catalog
181 |
182 | This endpoint provides a list of datasets available from this server.
183 |
184 | **Sample Invocation**
185 | ```
186 | http://hapi-server.org/hapi/catalog
187 | ```
188 |
189 | **Request Parameters**
190 |
191 | None
192 |
193 | **Response**
194 |
195 | The response is in JSON format [3] as defined by RFC-7159 and has a mime type of `application/json`. The catalog is a simple listing of identifiers for the datasets available through the server providing the catalog. Additional metadata about each dataset is available through the `info` endpoint (described below). The catalog takes no query parameters and always lists the full catalog.
196 |
197 | **Catalog Object**
198 |
199 | | Name | Type | Description |
200 | | ------ | ------- | ----------- |
201 | | HAPI | string | **Required**
The version number of the HAPI specification this description complies with. |
202 | | status | object | **Required**
Server response status for this request. (see [HAPI Status Codes](#hapi-status-codes))|
203 | | catalog | array of Dataset | **Required**
A list of datasets available from this server. |
204 |
205 | **Dataset Object**
206 |
207 | | Name | Type | Description |
208 | | ------ | ------- | ----------- |
209 | | id | string | **Required**
The computer friendly identifier that the host system uses to locate the dataset. Each identifier must be unique within the HAPI server where it is provided. |
210 | | title | string | **Optional**
A short human readable name for the dataset. If none is given, it defaults to the id. The suggested maximum length is 40 characters. |
211 |
212 | **Example**
213 |
214 | Retrieve a listing of datasets shared by this server.
215 | ```
216 | http://hapi-server.org/hapi/catalog
217 | ```
218 | **Example Response:**
219 | ```
220 | {
221 | "HAPI" : "1.1",
222 | "status": { "code": 1200, "message": "OK"},
223 | "catalog" :
224 | [
225 | {"id": "ACE_MAG", title:"ACE Magnetometer data"},
226 | {"id": "data/IBEX/ENA/AVG5MIN"},
227 | {"id": "data/CRUISE/PLS"},
228 | {"id": "any_identifier_here"}
229 | ]
230 | }
231 | ```
232 | The identifiers must be unique within a single HAPI server. Also, dataset identifiers in the catalog should be stable over time. Including rapidly changing version numbers or other revolving elements (dates, processing ids, etc.) in the datasets identifiers should be avoided. The intent of the HAPI specification is to allow data to be referenced using RESTful URLs that have a reasonable lifetime.
233 |
234 | Also, note that the identifiers can have slashes in them.
235 |
236 | ## info
237 |
238 | This endpoint provides a data header for a given dataset. The header is expressed in JSON format [3] as defined by RFC-7159 and has a mime type of `application/json`. The focus of the header is to provide enough metadata to allow automated reading of the data content that is streamed via the `data` endpoint. The header must include a list of the parameters in the dataset, as well as the date range covered by the dataset. There are also about ten optional metadata elements for capturing other high level information such as a brief description of the dataset, the typical cadence of the data, and ways to learn more about a dataset. A table below lists all required and optional dataset attributes in the header.
239 |
240 | Servers may include additional custom (server-specific) keywords or keyword/value pairs in the header, but any non-standard keywords must begin with the prefix `x_`.
241 |
242 | Each parameter listed in the header must itself be described by specific metadata elements, and a separate table below describes the required and optional parameter attributes.
243 |
244 | By default, all the parameters available in the dataset are listed in the header. However, a client may request a header for just a subset of the parameters. The subset of interest is specified as a comma separated list via the request parameter called `parameters`. (Note that the client would have to obtain the parameter names from a prior request.) This reduced header is potentially useful because it is also possible to request a subset of parameters when asking for data (see the `data` endpoint), and a reduced header can be requested that would then match the subset of parameters in the data. The server must ignore duplicates in the subset list, and the server's response must order the subset of parameters according to the ordering in the original, full list of parameters. This ensures that a data request for a subset of parameters can be interpreted properly even if no header is requested. (Although a way to write a client as safe as possible would be to always request the header, and rely on the parameter ordering in the header to guide interpretation of the data column ordering.)
245 |
246 | Note that the `data` endpoint may optionally prepend the info header to the data stream. In cases where the `data` endpoint response includes a header followed by `csv` or `binary` data, the header must always end with a newline. This enables the end of the JSON header to be more easily detected when it is in front of a binary data response. One good way to detect the end of the header is calculate the number of open braces minus the number of closed braces. The last character in the header is the newline following the closing brace that makes open braces minus closed braces equal to zero. For `json` output, the header and data are all withing a single JSON entity, and so newlines are not necessary.
247 |
248 |
249 | **Sample Invocation**
250 | ```
251 | http://hapi-server.org/hapi/info?id=ACE_MAG
252 | ```
253 |
254 | **Request Parameters**
255 |
256 | | Name | Description |
257 | | ---------- | ----------- |
258 | | id | **Required**
The identifier for the dataset. |
259 | | parameters | **Optional**
A subset of the parameters to include in the header. |
260 |
261 | **Response**
262 |
263 | The response is in JSON format [3] and provides metadata about one dataset.
264 |
265 | **Info Object**
266 |
267 | | Dataset Attribute | Type | Description |
268 | | ----------------- | ------- | ----------- |
269 | | HAPI | string | **Required**
The version number of the HAPI specification with which this description complies.|
270 | | status | object | **Required**
Server response status for this request. (see [HAPI Status Codes](#hapi-status-codes))|
271 | | format | string | **Required** (when header is prefixed to data stream)
Format of the data as `csv` or `binary` or `json`. |
272 | | parameters | array of Parameter | **Required**
Description of the parameters in the data. |
273 | | startDate | string | **Required**
[ISO 8601](https://github.com/hapi-server/data-specification#representation-of-time) date of first record of data in the entire dataset. |
274 | | stopDate | string | **Required**
ISO 8601 date for the last record of data in the entire dataset. For actively growing datasets, the end date can be approximate, but it is the server's job to report an accurate end date. |
275 | | sampleStartDate | string | **Optional**
ISO 8601 date giving the start of a sample time period for a dataset, where the time period must contain a manageable, representative example of valid, non-fill data. |
276 | | sampleStopDate | string | **Optional**
ISO 8601 date giving the end of a sample time period for a dataset, where the time period must contain a manageable, representative example of valid, non-fill data. |
277 | | description | string | **Optional**
A brief description of the dataset. |
278 | | resourceURL | string | **Optional**
URL linking to more detailed information about this dataset. |
279 | | resourceID | string | **Optional**
An identifier by which this data is known in another setting, for example, the SPASE ID. |
280 | | creationDate | string | **Optional**
ISO 8601 date and time of the dataset creation. |
281 | | modificationDate | string | **Optional**
Last modification time of the data content in the dataset as an ISO 8601 date. |
282 | | cadence | string | **Optional**
Time difference between records as an ISO 8601 duration. This is meant as a guide to the nominal cadence of the data and not a precise statement about the time between measurements. |
283 | | contact | string | **Optional**
Relevant contact person and possibly contact information. |
284 | | contactID | string | **Optional**
The identifier in the discovery system for information about the contact. For example, the SPASE ID of the person. |
285 |
286 | **Parameter**
287 |
288 | The focus of the header is to list the parameters in a dataset. The first parameter in the list must be a time value. This time column serves as the independent variable for the dataset. The time column parameter may have any name, but its type must be `isotime` and there must not be any fill values in the data stream for this column. Note that the HAPI specification does not clarify if the time values given are the start, middle, or end of the measurment intervals. There can be other parameters of type `isotime` in the parameter list. The table below describes the Parameter items and their allowed types.
289 |
290 |
291 | | Parameter Attribute | Type | Description |
292 | | ------------------- | ------- | ----------- |
293 | | name | string | **Required**
A short name for this parameter. It is recommended that all parameter names start with a letter or underscore, followed by letters, underscores or numbers. Parameter names in a dataset must be unique, and names are not allowed to differ only by having different case. |
294 | | type | string | **Required**
One of `string`, `double`, `integer`, `isotime`. Binary content for `double` is always 8 bytes in IEEE 754 format, `integer` is 4 bytes little-endian. There is no default length for `string` and `isotime` types. [See below](#data-types) for more information on data types. |
295 | | length | integer | **Required** for type `string` and `isotime`; **not allowed for others**
The number of bytes or characters that contain the value, where shorter strings can be null-terminated (byte value 0) or padded with spaces. This is relevant only when data is streamed in binary format, but strings in CSV should not be longer than this length. |
296 | | units | string | **Required**
The units for the data values represented by this parameter. For dimensionless quantities, the value can be ‘dimensionless’ or ```null```. For ```isotime``` parameters, the type must be ```UTC```.
297 | | size | array of integers | **Required** for array parameters; **not allowed for others**
Must be a 1-D array whose values are the number of array elements in each dimension of this parameter. For example, `"size"=[7]` indicates that the value in each record is a 1-D array of length 7. For the `csv` and `binary` output, there must be 7 columns for this parameter -- one column for each array element, effectively unwinding this array. The `json` output for this data parameter must contain an actual JSON array (whose elements would be enclosed by `[ ]`). For arrays 2-D and higher, such as `"size"=[2,3]`, the later indices are the fastest moving, so that the CSV and binary columns for such a 2 by 3 would be `[0,0]`, `[0,1]`, `[0,2]` and then `[1,0]`, `[1,1]`, `[1,2]`. [See below](#the-size-attribute) for more about array sizes. **NOTE: array sizes of 2-D or higher are experimental at this point, and future versions of this specification may update the way 2-D or higher data is described.** |
298 | | fill | string | **Required**
A fill value indicates no valid data is present. If a parameter has no fill present for any records in the dataset, this can be indicated by using a JSON null for this attribute as in `"fill": null` [See below](#fill-values) for more about fill values, including the issues related to specifying numeric fill values as strings. Note that since the primary time column cannot have fill values, it must specify `"fill": null` in the header. |
299 | | description | string | **Optional**
A brief description of the parameter. |
300 | | bins | array of Bins object | **Optional**
For array parameters, each object in the `bins` array corresponds to one of the dimensions of the array, and describes values associated with each element in the corresponding dimension of the array. A table below describes all required and optional attributes within each `bins` object. If the parameter represents a 1-D frequency spectrum, the `bins` array will have one object describing the frequency values for each frequency bin. Within that object, the `centers` attribute points to an array of values to use for the central frequency of each channel, and the `ranges` attribute specifies a range (min to max) associated with each channel. At least one of these must be specified. The bins object has an optional `units` keyword (any string value is allowed), and `name` is required. See below for an example showing a parameter that holds a proton energy spectrum. The use of `bins` to describe values associated with 2-D or higher arrays is currently supported but should be considered experimental. |
301 |
302 | **Bins Object**
303 |
304 | The bins attribute of a parameter is an array of JSON objects. These objects have the attributes described below.
305 | **NOTE: Even though `ranges` and `centers` are marked as required, only one of the two must be specified.**
306 |
307 | | Bins Attribute | Type | Description |
308 | | ------------------- | ------- | ----------- |
309 | | name | string | **Required**
name for the dimension (e.g. "Frequency") |
310 | | centers | array of n doubles | **Required**
the centers of each bin |
311 | | ranges | array of n array of 2 doubles | **Required**
the boundaries for each bin |
312 | | units | string | **Optional**
the units for the bins |
313 | | description | string | **Optional**
brief comment explaining what the bins represent |
314 |
315 |
316 | **Example**
317 | ```
318 | http://hapi-server.org/hapi/info?id=ACE_MAG
319 | ```
320 | **Example Response:**
321 | ```
322 | { "HAPI": "1.1",
323 | "status": { "code": 1200, "message": "OK"},
324 | "startDate": "1998-001",
325 | "stopDate" : "2017-100",
326 | "parameters": [
327 | { "name": "Time",
328 | "type": "isotime",
329 | "units": "UTC",
330 | "length": 24 },
331 | { "name": "radial_position",
332 | "type": "double",
333 | "units": "km",
334 | "description": "radial position of the spacecraft" },
335 | { "name": "quality flag",
336 | "type": "integer",
337 | "units ": "none ",
338 | "description ": "0=OK and 1=bad " },
339 | { "name": "mag_GSE",
340 | "type": "double",
341 | "units": "nT",
342 | "size" : [3],
343 | "description": "hourly average Cartesian magnetic field in nT in GSE" }
344 | ]
345 | }
346 | ```
347 |
348 | **Subsetting the Parameters**
349 |
350 | Clients may request a response that includes only a subset of the parameters in a dataset. When creating a header for a subset of parameters (via the `info` endpoint), or a data stream for a subset of parameters (via the `data` endpoint, described next), the logic on the server is the same in terms of what dataset parameters are included in the response. The primary time parameter (always required to be the first parameter in the list) is always included, even if not requested. These examples clarify the way a server must respond to various types of dataset parameter subsetting requests:
351 |
352 | - request: do not ask for any specific parameters (i.e., there is no request parameter called ‘parameters’); response: all columns
353 | - request: ask for just the primary time parameter; response: just the primary time column
354 | - request: ask for a single parameter other than the primary time column (like ‘parameters=Bx’); response: primary time column and the one requested data column
355 | - request: ask for two or more parameters other than the primary time column; response: primary time column followed by the requested parameters in the order they occurred in the original, non-subsetted dataset header (not in the order of the subset request)
356 |
357 |
358 | ## data
359 |
360 | Provides access to a dataset and allows for selecting time ranges and parameters to return. Data is returned as a stream in CSV[2], binary, or JSON format. The [Data Stream Content](#data-stream-content) section describes the stream structure and layout for each format.
361 |
362 | The resulting data stream can be thought of as a stream of records, where each record contains one value for each of the dataset parameters. Each data record must contain a data value or a fill value (of the same data type) for each parameter.
363 |
364 | **Request Parameters**
365 |
366 | | Name | Description |
367 | | ------------ | ----------- |
368 | | id | **Required**
The identifier for the dataset |
369 | | time.min | **Required**
The inclusive begin time for the data to include in the response |
370 | | time.max | **Required**
The exclusive end time for the data to include in the response |
371 | | parameters | **Optional**
A comma separated list of parameters to include in the response. Default is all parameters.|
372 | | include | **Optional**
Has one possible value of "header" to indicate that the info header should precede the data. The header lines will be prefixed with the "#" character. |
373 | | format | **Optional**
The desired format for the data stream. Possible values are "csv", "binary", and "json". |
374 |
375 | **Response**
376 |
377 | Response is in one of three formats: CSV format as defined by RFC-4180 with a mime type of "text/csv"; binary format where floating points number are in IEEE 754[5] format and byte order is LSB and a mime type of `application/octet-stream`; JSON format with the structure as described below and a mime type of `application/json`. The default data format is CSV. See the section on Data Stream Content for more details.
378 |
379 | If the header is requested, then for binary and CSV formats, each line of the header must begin with a hash (#) character. For JSON output, no prefix character should be used, because the data object will just be another JSON element within the response. Other than the possible prefix character, the contents of the header should be the same as returned from the info endpoint. When a data stream has an attached header, the header must contain an additional "format" attribute to indicate if the content after the header is "csv", "binary", or "json". Note that when a header is included in a CSV response, the data stream is not strictly in CSV format.
380 |
381 | The first parameter in the data must be a time column (type of "isotime") and this must be the independent variable for the dataset. If a subset of parameters is requested, the time column is always provided, even if it is not requested.
382 |
383 | Note that the `time.min` request parameter represents an inclusive lower bound and `time.max` request parameter is the exclusive upper bound. The server must return data records within these time constraints, i.e., no extra records outside the requested time range. This enables concatenation of results from adjacent time ranges.
384 |
385 | There is an interaction between the `info` endpoint and the `data` endpoint, because the header from the `info` endpoint describes the record structure of data emitted by the `data` endpoint. Thus after a single call to the `info` endpoint, a client could make multiple calls to the `data` endpoint (for multiple time ranges, for example) with the expectation that each data response would contain records described by the single call to the `info` endpoint. The `data` endpoint can optionally prefix the data stream with header information, potentially obviating the need for the `info` endpoint. But the `info` endpoint is useful in that it allows clients to learn about a dataset without having to make a data request.
386 |
387 | Both the `info` and `data` endpoints take an optional request parameter (recall the definition of request parameter in the introduction) called `parameters` that allows users to restrict the dataset parameters listed in the header and data stream, respectively. This enables clients (that already have a list of dataset parameters from a previous info or data request) to request a header for a subset of parameters that will match a data stream for the same subset of parameters. Consider the following dataset header for a fictional dataset with the identifier MY_MAG_DATA.
388 |
389 | An `info` request like this:
390 | ```
391 | http://hapi-server.org/hapi/info?id=MY_MAG_DATA
392 | ```
393 | would result in a header listing of all the dataset parameters:
394 | ```
395 | { "HAPI": "1.1",
396 | "status": { "code": 1200, "message": "OK"},
397 | "startDate": "2005-01-21T12:05:00.000",
398 | "stopDate" : "2010-10-18T00:00:00",
399 | "parameters": [
400 | { "name": "Time",
401 | "type": "isotime",
402 | "units": "UTC",
403 | "length": 24 },
404 | { "name": "Bx", "type": "double", "units": "nT" },
405 | { "name": "By", "type": "double", "units": "nT" },
406 | { "name": "Bz", "type": "double", "units": "nT" },
407 | ]
408 | }
409 | ```
410 | An `info` request like this:
411 | ```
412 | http://hapi-server.org/hapi/info?id=MY_MAG_DATA¶meters=Bx
413 | ```
414 | would result in a header listing only the one dataset parameter:
415 | ```
416 | { "HAPI": "1.1",
417 | "status": { "code": 1200, "message": "OK"},
418 | "startDate": "2005-01-21T12:05:00.000",
419 | "stopDate" : "2010-10-18T00:00:00",
420 | "parameters": [
421 | { "name": "Time",
422 | "type": "isotime",
423 | "units": "UTC",
424 | "length": 24 },
425 | { "name": "Bx", "type": "double", "units": "nT" },
426 | ]
427 | }
428 | ```
429 | Note that the time parameter is included even though it was not requested.
430 |
431 | ### Data Stream Content
432 |
433 | The three possible output formats are `csv`, `binary`, and `json`. A HAPI server must support `csv`, while `binary` and `json` are optional.
434 |
435 | In the CSV stream, each record is one line of text, with commas between the values for each dataset parameter. An array parameter (i.e., the value of a parameter within one record is an array) will have multiple columns resulting from placing each element in the array into its own column. For 1-D arrays, the ordering of the unwound columns is just the index ordering of the array elements. For 2-D arrays or higher, the right-most array index is the fastest moving index when mapping array elements to columns.
436 |
437 | It is up to the server to decide how much precision to include in the ASCII values when generating CSV output.
438 |
439 | The binary data output is best described as a binary translation of the CSV stream, with full numerical precision and no commas. Recall that the dataset header provides type information for each dataset parameter, and this definitively indicates the number of bytes and the byte structure of each parameter, and thus of each binary record in the stream. Array parameters are unwound in the same way for binary as for CSV data (as described in the previous paragraph). All numeric values are little endian (LSB), integers are always four byte, and floating point values are always IEEE 754 double precision values.
440 |
441 | Dataset parameters of type `string` and `isotime` (which are just strings of ISO 8601 dates) must have in their header a length element. Records containing strings shorter than this length must be null-terminated or padded with spaces.
442 |
443 | For the JSON output, an additional `data` element added to the header contains the array of data records. These records are very similar to the CSV output, except that strings must be quoted and arrays must be delimited with array brackets in standard JSON fashion. An example helps illustrate what the JSON format looks like. Consider a dataset with four parameters: time, a scalar value, an 1-D array value with array length of 3, and a string value. The header with the data object might look like this:
444 |
445 | ```
446 | { "HAPI": "1.1",
447 | "status": { "code": 1200, "message": "OK"},
448 | "startDate": "2005-01-21T12:05:00.000",
449 | "stopDate" : "2010-10-18T00:00:00",
450 | "parameters": [
451 | { "name": "Time", "type": "isotime", "units": "UTC", "length": 24 },
452 | { "name": "quality_flag", "type": "integer", "description": "0=ok; 1=bad" },
453 | { "name": "mag_GSE", "type": "double", "units": "nT", "size" : [3],
454 | "description": "hourly average Cartesian magnetic field in nT in GSE" },
455 | { "name": "region", "type": "string", "length": 20, "units" : null}
456 | ],
457 | "format": "json",
458 | "data" : [
459 | ["2010-001T12:01:00",0,[0.44302,0.398,-8.49],"sheath"],
460 | ["2010-001T12:02:00",0,[0.44177,0.393,-9.45],"sheath"],
461 | ["2010-001T12:03:00",0,[0.44003,0.397,-9.38],"sheath"],
462 | ["2010-001T12:04:00",1,[0.43904,0.399,-9.16],"sheath"]
463 | ]
464 |
465 | }
466 | ```
467 |
468 | The data element is a JSON array of records. Each record is itself an array of parameters. The time and string values are in quotes, and any data parameter in the record that is an array must be inside square brackets. This data element appears as the last JSON element in the header.
469 |
470 | The record-oriented arrangement of the JSON format is designed to allow a streaming client reader to begin reading (and processing) the JSON data stream before it is complete. Note also that servers can start streaming the data as soon as records are avaialble. In other words, the JSON format can be read and written without first having to hold all the records in memory. This may rquire some custom elements in the JSON parser, but preserving this streaming capabliity is important for keeping the HAPI spec scalable. Note that if pulling all the data content into memory is not a problem, then ordinary JSON parsers will also have no trouble with this JSON arrangement.
471 |
472 | **Errors While Streaming Data**
473 |
474 | If the server encounters an error while streaming the data and can no longer continue, it will have to terminate the stream. The `status` code (both HTTP and HAPI) and message will already have been set in the header and is unlikely to represent the error. Clients will have to be able to detect an abnormally terminated stream, and should treat this aborted condition the same as an internal server error. See [HAPI Status Codes](#hapi-status-codes) for more about error conditions.
475 |
476 | **Examples**
477 |
478 | Two examples of data requests and responses are given – one with the header and one without.
479 |
480 | **Data with Header**
481 |
482 | Note that in the following request, the header is to be included, so the same header from the `info` endpoint will be prepended to the data, but with a ‘#’ character as a prefix for every header line.
483 | ```
484 | http://hapi-server.org/hapi/data?id=path/to/ACE_MAG&time.min=2016-01-01&time.max=2016-02-01&include=header
485 | ```
486 | **Example Response: Data with Header**
487 | ```
488 | #{
489 | # "HAPI": "1.1",
490 | # "status": { "code": 1200, "message": "OK"},
491 | # "format": "csv",
492 | # "startDate": "1998-001",
493 | # "stopDate" : "2017-001",
494 | # "parameters": [
495 | # { "name": "Time",
496 | # "type": "isotime",
497 | # "units": "UTC",
498 | # "length": 24
499 | # },
500 | # { "name": "radial_position",
501 | # "type": "double",
502 | # "units": "km",
503 | # "description": "radial position of the spacecraft"
504 | # },
505 | # { "name": "quality flag",
506 | # "type": "integer",
507 | # "units ": null,
508 | # "description ": "0=OK and 1=bad "
509 | # },
510 | # { "name": "mag_GSE",
511 | # "type": "double",
512 | # "units": "nT",
513 | # "size" : [3],
514 | # "description": "hourly average Cartesian magnetic field in nT in GSE"
515 | # }
516 | # ]
517 | #}
518 | 2016-01-01T00:00:00.000,6.848351,0,0.05,0.08,-50.98
519 | 2016-01-01T01:00:00.000,6.890149,0,0.04,0.07,-45.26
520 | ...
521 | ...
522 | 2016-01-01T02:00:00.000,8.142253,0,2.74,0.17,-28.62
523 | ```
524 |
525 | **Data Only**
526 |
527 | The following example is the same, except it lacks the request to include the header.
528 | ```
529 | http://hapi-server.org/hapi/data?id=path/to/ACE_MAG&time.min=2016-01-01&time.max=2016-02-01
530 | ```
531 | **Example Response: Data Only**
532 |
533 | Consider a dataset that contains a time field, two scalar fields and one array field of length 3. The response will look something like:
534 | ```
535 | 2016-01-01T00:00:00.000,6.848351,0,0.05,0.08,-50.98
536 | 2016-01-01T01:00:00.000,6.890149,0,0.04,0.07,-45.26
537 | ...
538 | ...
539 | 2016-01-01T02:00:00.000,8.142253,0,2.74,0.17,-28.62
540 | ```
541 | Note that there is no leading row with column names. The CSV standard [2] indicates that such a header row is optional. Leaving out this row avoids the complication of having to name individual columns representing array elements within an array parameter. Recall that an array parameter has only a single name. The place HAPI specifies parameter names is via the `info` endpoint, which also provides size details for each parameter (scalar or array, and array size if needed). The size of each parameter must be used to determine how many columns it will use in the CSV data. By not specifying a row of column names, HAPI avoids the need to have a naming convention for columns representing elements within an array parameter.
542 |
543 | # Implications of the HAPI data model
544 |
545 | Because HAPI requires a single time column to be the first column, this requires each record (one row of data) to be associated with one time value (the first value in the row). This has implications for serving files with multiple time arrays in them. Supposed a file contains 1 second data, 3 second data, and 5 second data, all from the same measurement but averaged differently. A HAPI server could expose this data, but not as a single dataset.
546 | To a HAPI server, each time resolution could be presented as a separate dataset, each with its own unique time array.
547 |
548 | # Cross Origin Resource Sharing
549 |
550 | Because of the increasing importance of JavaScript clients that use AJAX requests, HAPI servers are strongly encouraged to implement Cross Origin Resource Sharing (CORS) https://www.w3.org/TR/cors/. This will allow AJAX requests by browser clients from any domain. For servers with only public data, enabling CORS is fairly common, and not implementing CORS limits the type of clients that can interface with a HAPI server. Server implementors are strongly encouraged to pursue deeper understanding before proceeding with CORS. For testing purposes, the following headers have been sufficent for browser clients to HAPI servers:
551 |
552 | ```
553 | Access-Control-Allow-Origin: *
554 | Access-Control-Allow-Methods: GET
555 | Access-Control-Allow-Headers: Content-Type
556 | ```
557 |
558 | # HAPI Status Codes
559 |
560 | There are two levels of error reporting a HAPI server must perform. Because every HAPI server response is an HTTP response, an appropriate HTTP status must be set for each response. Although the HTTP codes are robust, they are more difficult for clients to extract -- a HAPI client using a high-level URL retrieving mechanism may not have easy access to HTTP header content. Therefore, every HAPI response with a header must also include a `status` object indicating if the request succeeded or not. The two status indicators (HAPI and HTTP) must be consistent, i.e., if one indicates success, so must the other.
561 |
562 | The status information returned from an endpoint is as follows:
563 |
564 |
565 | |Name |Type |Description|
566 | |--------|---------|-----------|
567 | |code | integer |specific value indicating the category of the outcome of the request - see [HAPI Status Codes](#hapi-status-codes)|
568 | |message | string |human readable description of the status - must conceptually match the intent of the integer code|
569 |
570 |
571 | HAPI servers must categorize the response status using at least the following three status codes: 1200 - OK, 1400 - Bad Request, and 1500 - Internal Server Error. These are intentional analgous to the similar HTTP codes 200 - OK, 400 - Bad Request, and 500 - Internal Server Error. Note that HAPI code numbers are 1000 higher than the HTTP codes to avoid collisions. For these three simple status categorizations, the HTTP code can be derived from the HAPI code by just subtracting 1000. The following table summarizes the minimum required status response categories.
572 |
573 |
574 | | HTTP code |HAPI status `code`| HAPI status `message` |
575 | |--------------:|-------------:|-------------------------|
576 | | 200 | 1200 | OK |
577 | | 400 | 1400 | Bad request - user input error |
578 | | 500 | 1500 | Internal server error |
579 |
580 | The exact wording in the message does not need to match what is shown here. The conceptual message must be consistent with the status, but the wording is allowed to be different (or in another language, for example).
581 |
582 | The `capabilities` and `catalog` endpoints just need to indicate "1200 - OK" or "1500 - Internal Server Error" since they do not take any request parameters. The `info` and `data` endpoints do take request parameters, so their status response must include "1400 - Bad Request" when appropriate.
583 |
584 | Servers may optionally provide a more specific error code for the following common types of input processing problems. It is recommended but not required that a server implement this more complete set of status responses. Servers may add their own codes, but must use numbers outside the 1200s, 1400s, and 1500s to avoid collisions with possible future HAPI codes.
585 |
586 |
587 | | HTTP code |HAPI status `code`| HAPI status `message` |
588 | |--------------:|-------------:|-------------------------|
589 | | 200 | 1200 | OK |
590 | | 200 | 1201 | OK - no data for time range |
591 | | 400 | 1400 | Bad request - user input error |
592 | | 400 | 1401 | Bad request - unknown request parameter |
593 | | 400 | 1402 | Bad request - error in start time |
594 | | 400 | 1403 | Bad request - error in stop time |
595 | | 400 | 1404 | Bad request - start time after stop time |
596 | | 400 | 1405 | Bad request - time outside valid range |
597 | | 404 | 1406 | Bad request - unknown dataset id |
598 | | 404 | 1407 | Bad request - unknown dataset parameter |
599 | | 400 | 1408 | Bad request - too much time or data requested |
600 | | 400 | 1409 | Bad request - unsupported output format |
601 | | 500 | 1500 | Internal server error |
602 | | 500 | 1501 | Internal server error - upstream request error |
603 |
604 | Note that there is an OK status to indicate that the request was properly fulfilled, but that no data was found. This can be very useful
605 | feedback to clients and users, who may otherwise suspect server problems if no data is returned.
606 |
607 | Note also the response 1408 indicating that the server will not fulfill the request, since it is too large. This gives a HAPI server a way to let clients know about internal limits within the server.
608 |
609 | In cases where the server cannot create a full response (such as an `info` request or `data` request for an unknown dataset), the JSON header response must include the HAPI version and a HAPI status object indicating that an error has occurred.
610 | ```
611 | {
612 | "HAPI": "1.1",
613 | "status": { "code": 1401, "message": "Bad request - unknown request parameter"}
614 | }
615 | ```
616 |
617 | If no JSON header was requested, then the HTTP error will be the only indicator of a problem. Similarly, for the `data` endpoint, clients may request data with no JSON header, and in this case, the HTTP status is the only place a client can determine the response status.
618 |
619 | ## HAPI Client Error Handling
620 |
621 | Because web servers are not required to limit HTTP return codes to those in the above table, HAPI clients should be able to handle the full range of HTTP responses. Also, the HAPI server code may not be the only software to interact with a URL-based request from a HAPI server. There may be a load balancer or upstream request routing or caching mechanism in place. Therefore, it is good client-side practice to be able to handle any HTTP errors.
622 |
623 | # Representation of Time
624 |
625 | The HAPI specification is focused on access to time series data, so understanding how the server parses and emits time values is important.
626 |
627 | When making a request to the server, the time range (`time.min` and `time.max`) values must each be valid time strings according to the ISO 8601 standard. Only two flavors of ISO 8601 time strings are allowed, namely those formatted at year-month-day (yyyy-mm-ddThh\:mm\:ss.sss) or day-of-year (yyyy-dddThh\:mm\:ss.sss). Servers should be able to handle either of these time string formats, but do not need to handle some of the more esoteric ISO 8601 formats, such as year + week-of-year. Any date or time elements missing from the string are assumed to take on their smallest possible value. For example, the string `2017-01-10T12` is the same as `2017-01-10T12:00:00.000.` Servers should be able to parse and properly interpret these types of truncated time strings.
628 |
629 | Time values in the outgoing data stream must be ISO 8601 strings. A server may use either the yyyy-mm-ddThh:mm:ss or the yyyy-dddThh:mm:ss form, but should use just one format within any given dataset. Emitting truncated time strings is allowed, and again missing date or time elments are assumed to have the lowest value. Therefore, clients must be able to transparently handle truncated ISO strings of both flavors. For `binary` and `csv` data, a truncated time string is indicated by setting the `length` attribute for the time parameter. See https://en.wikipedia.org/wiki/ISO_8601.
630 |
631 | The data returned from a request should strictly fall within the limits of `time.min` and `time.max`, i.e., servers should not pad the data with extra records outside the requested time range. Furthermore, note that the `time.min` value is inclusive (data at or beyond this time can be included), while `time.max` is exclusive (data at or beyond this time shall not be included in the response).
632 |
633 | The primary time column is not allowed to contain any fill values. Each record must be identified with a valid time value. If other columns contain parameters of type `isotime` (i.e., time columns that are not the primary time column), there may be fill values in these columns. Note that the `fill` definition is required for all types, including `isotime` parameters. The fill value for a (non-primary) `isotime` parameter does not have to be a valid time string - it can be any string, but it must be the same length string as the time variable.
634 |
635 | Note that the ISO 8601 time format allows arbitrary precision on the time values. HAPI servers should therefore also accept time values with high precision. As a practical limit, servers should at least handle time values down to the nanosecond or picosecond level.
636 |
637 | # Additional Keyword / Value Pairs
638 |
639 | While the HAPI server strictly checks all request parameters (servers must return an error code given any unrecognized request parameter as described earlier), the JSON content output by a HAPI server may contain additional, user-defined metadata elements. All non-standard metadata keywords must begin with the prefix `x_` to indicate to HAPI clients that these are extensions. Custom clients could make use of the additional keywords, but standard clients would ignore the extensions. By using the standard prefix, the custom keywords will not conflict with any future keywords added to the HAPI standard. Servers using these extensions may wish to include additional, domain-specific characters after the `x_` to avoid possible collisions with extensions from other servers.
640 |
641 |
642 | # More About
643 | ## Data Types
644 |
645 | Note that there are only a few supported data types: isotime, string, integer, and double. This is intended to keep the client code simple in terms of dealing with the data stream. However, the spec may be expanded in the future to include other types, such as 4 byte floating point values (which would be called float), or 2 byte integers (which would be called short).
646 |
647 | ## The ‘size’ Attribute
648 |
649 | The 'size' attribute is required for array parameters and not allowed for others. The length of the `size` array indicates the number of dimensions, and each element in the size array indicates the number of elements in that dimension. For example, the size attribute for a 1-D array would be a 1-D JSON array of length one, with the one element in the JSON array indicating the number of elements in the data array. For a spectrum, this number of elements is the number of wavelengths or energies in the spectrum. Thus `"size":[9]` refers to a data parameter that is a 1-D array of length 9, and in the `csv` and `binary` output formats, there will be 9 columns for this data parameter. In the `json` output for this data parameter, each record will contain a JSON array of 9 elements (enclosed in brackets `[ ]`).
650 |
651 | For arrays of size 2-D or higher, the column orderings need to be specified for the `csv` and `binary` output formats. In both cases, the later indices are faster moving, so that if you have a 2-D array of `"size":[2,5]` then the 5 item index changes the most quickly. Items in each record will be ordered like this `[0,0] [0,1], [0,2] [0,3] [0,4] [1,0,] [1,1] [1,2] [1,3] [1,4]` and the ordering is similarly done for higher dimensions.
652 |
653 | ## 'fill' Values
654 |
655 | Note that fill values for all types must be specified as a string. For `double` and `integer` types, the string should correspond to a numeric value. In other words, using a string like `invalid_int` would not be allowed for an integer fill value. Care should be taken to ensure that the string value given will have an exact numeric representation, and special care shoudl be taked for `double` values which can suffer from round-off problems. For integers, string fill values must correspond to an integer value that is small enough to fit into an 4 byte integer. For `double` parameters, the fill string must parse to an exact IEEE 754 double representation. One suggestions is to use large negative integers, such as `-1.0E30`. The string `NaN` is allowed, in which case `csv` output should contain the string `NaN` for fill values. For double NaN values, the bit pattern for quiet NaN should be used, as opposed to the signaling NaN, which should not be used (see reference [6]). For `string` and `isotime` parameters, the string `fill` value is used at face value, and it should have a length that fits in the length of the data parameter.
656 |
657 | ## Examples
658 |
659 | The following two examples illustrate two different ways to represent a magnetic field dataset. The first lists a time column and three scalar data columns, Bx, By, and Bz for the Cartesian components.
660 | ```
661 | {
662 | "HAPI": "1.1",
663 | "status": { "code": 1200, "message": "OK"},
664 | "startDate": "2016-01-01T00:00:00.000",
665 | "stopDate": "2016-01-31T24:00:00.000",
666 | "parameters": [
667 | {"name" : "timestamp", "type": "isotime", "units": "UTC", "length": 24},
668 | {"name" : "bx", "type": "double", "units": "nT"},
669 | {"name" : "by", "type": "double", "units": "nT"},
670 | {"name" : "bz", "type": "double", "units": "nT"}
671 | ]
672 | }
673 | ```
674 | This example shows a header for the same conceptual data (time and three magnetic field components), but with the three components grouped into a one-dimensional array of size 3.
675 | ```
676 | {
677 | "HAPI": "1.1",
678 | "status": { "code": 1200, "message": "OK"},
679 | "startDate": "2016-01-01T00:00:00.000",
680 | "stopDate": "2016-01-31T24:00:00.000",
681 | "parameters": [
682 | { "name" : "timestamp", "type": "isotime", "units": "UTC", "length": 24 },
683 | { "name" : "b_field", "type": "double", "units": "nT","size": [3] }
684 | ]
685 | }
686 | ```
687 | These two different representations affect how a subset of parameters could be requested from a server. The first example, by listing Bx, By, and Bz as separate parameters, allows clients to request individual components:
688 | ```
689 | http://hapi-server.org/hapi/data?id=MY_MAG_DATA&time.min=2001&time.max=2010¶meters=Bx
690 | ```
691 | This request would just return a time column (always included as the first column) and a Bx column. But in the second example, the components are all inside a single parameter named `b_field` and so a request for this parameter must always return all the components of the parameter. There is no way to request individual elements of an array parameter.
692 |
693 | The following example shows a proton energy spectrum and illustrates the use of the ‘bins’ element. Note also that the uncertainty of the values associated with the proton spectrum are a separate variable. There is currently no way in the HAPI spec to explicitly link a variable to its uncertainties.
694 | ```
695 | {"HAPI": "1.1",
696 | "status": { "code": 1200, "message": "OK"},
697 | "startDate": "2016-01-01T00:00:00.000",
698 | "stopDate": "2016-01-31T24:00:00.000",
699 | "parameters": [
700 | { "name": "Time",
701 | "type": "isotime",
702 | "units": "UTC",
703 | "length": 24
704 | },
705 | { "name": "qual_flag",
706 | "type": "int",
707 | "units": null
708 | },
709 | { "name": "maglat",
710 | "type": "double",
711 | "units": "degrees",
712 | "description": "magnetic latitude"
713 | },
714 | { "name": "MLT",
715 | "type": "string",
716 | "length": 5,
717 | "units": "hours:minutes",
718 | "description": "magnetic local time in HH:MM"
719 | },
720 | { "name": "proton_spectrum",
721 | "type": "double",
722 | "size": [3],
723 | "units": "particles/(sec ster cm^2 keV)"
724 | "bins": [ {
725 | "name": "energy",
726 | "units": "keV",
727 | "centers": [ 15, 25, 35 ],
728 | } ],
729 | { "name": "proton_spectrum_uncerts",
730 | "type": "double",
731 | "size": [3],
732 | "units": "particles/(sec ster cm^2 keV)"
733 | "bins": [ {
734 | "name": "energy",
735 | "units": "keV",
736 | "centers": [ 15, 25, 35 ],
737 | } ]
738 | }
739 |
740 | ]
741 | }
742 | ```
743 | This shows how "ranges" can specify the bins:
744 | ```
745 | {
746 | "HAPI": "1.1",
747 | "status": { "code": 1200, "message": "OK"},
748 | "startDate": "2016-01-01T00:00:00.000",
749 | "stopDate": "2016-01-31T24:00:00.000",
750 | "parameters": [
751 | {
752 | "length": 24,
753 | "name": "Time",
754 | "type": "isotime",
755 | "units": "UTC"
756 | },
757 | {
758 | "bins": [{
759 | "ranges": [
760 | [ 0, 30 ],
761 | [ 30, 60 ],
762 | [ 60, 90 ],
763 | [ 90, 120 ],
764 | [ 120, 150 ],
765 | [ 150, 180 ]
766 | ],
767 | "units": "degrees"
768 | }],
769 | "fill": -1.0E38,
770 | "name": "pitchAngleSpectrum",
771 | "size": [6],
772 | "type": "double",
773 | "units": "particles/sec/cm^2/ster/keV"
774 | }
775 | ]
776 | }
777 |
778 | ```
779 |
780 | # Security Notes
781 |
782 | When the server sees a request parameter that it does not recognize, it should throw an error.
783 |
784 | So given this query
785 |
786 | ```
787 | http://hapi-server.org/hapi/data?id=DATA&time.min=T1&time.max=T2&fields=mag_GSE&avg=5s
788 | ```
789 | the server should throw an error with a status of "1400 - Bad Request" with HTTP status of 400. The server could optionally be more specific with "1401 = misspelled or invalid request parameter" with an HTTP code of 404 - Not Found.
790 |
791 | In following general security practices, HAPI servers should carefully screen incoming request parameter names values. Unknown request parameters and values, including incorrectly formatted time values, should **not** be echoed in the error response.
792 |
793 | # Adoption
794 |
795 | In terms of adopting HAPI as a data delivery mechanism, data providers will likely not want to change existing services, so a HAPI compliant access mechanism could be added alongside existing services. Several demonstration servers exist, but there are not yet any libraries or tools available for providers to use or adapt. These will be made available as they are created. The goal is to create a reference implementation as a full-fledged example that providers could adapt. On the client side, there are also demonstration level capabilities, and Autoplot currently can access HAPI compliant servers. Eventually, libraries in several languages will be made available to assist in writing clients that extract data from HAPI servers. However, even without example code, the HAPI specification is designed to be simple enough so that even small data providers could add HAPI compliant access to their holdings.
796 |
797 |
798 | # References
799 |
800 | [1] ISO 8601:2004, http://dotat.at/tmp/ISO_8601-2004_E.pdf
801 | [2] CSV format, https://tools.ietf.org/html/rfc4180
802 | [3] JSON Format, https://tools.ietf.org/html/rfc7159
803 | [4] "JSON Schema", http://json-schema.org/
804 | [5] EEE Computer Society (August 29, 2008). "IEEE Standard for Floating-Point Arithmetic". IEEE. doi:10.1109/IEEESTD.2008.4610935. ISBN 978-0-7381-5753-5. IEEE Std 754-2008
805 | [6] IEEE Standard 754 Floating Point Numbers, http://steve.hollasch.net/cgindex/coding/ieeefloat.html
806 |
807 | # Contact
808 |
809 | Todd King (tking@igpp.ucla.edu)
810 | Jon Vandegriff (jon.vandegriff@jhuapl.edu)
811 | Robert Weigel (rweigel@gmu.edu)
812 | Robert Candey (Robert.M.Candey@nasa.gov)
813 | Aaron Roberts (aaron.roberts@nasa.gov)
814 | Bernard Harris (bernard.t.harris@nasa.gov)
815 | Nand Lal (nand.lal-1@nasa.gov)
816 | Jeremy Faden (faden@cottagesystems.com)
817 |
--------------------------------------------------------------------------------
/hapi-1.1.0/HAPI-data-access-spec-1.1.0.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hapi-server/data-specification/41ab442cd41e644639af39502cb2a99b2df61985/hapi-1.1.0/HAPI-data-access-spec-1.1.0.pdf
--------------------------------------------------------------------------------
/hapi-2.0.0/HAPI-data-access-spec-2.0.0.md:
--------------------------------------------------------------------------------
1 | HAPI Data Access Specification
2 | ==============================
3 |
4 | Version 2.0.0 \| Heliophysics Data and Model Consortium (HDMC) \|
5 |
6 |
7 |
8 | Table of Contents
9 |
10 | - [HAPI Data Access Specification](#hapi-data-access-specification)
11 | - [Introduction](#introduction)
12 | - [Endpoints](#endpoints)
13 | - [hapi](#hapi)
14 | - [capabilities](#capabilities)
15 | - [catalog](#catalog)
16 | - [info](#info)
17 | - [data](#data)
18 | - [Data Stream Content](#data-stream-content)
19 | - [Implications of the HAPI data model](#implications-of-the-hapi-data-model)
20 | - [Cross Origin Resource Sharing](#cross-origin-resource-sharing)
21 | - [HAPI Status Codes](#hapi-status-codes)
22 | - [HAPI Client Error Handling](#hapi-client-error-handling)
23 | - [Representation of Time](#representation-of-time)
24 | - [Incoming time values](#incoming-time-values)
25 | - [Outgoing time values](#outgoing-time-values)
26 | - [Additional Keyword / Value Pairs](#additional-keyword--value-pairs)
27 | - [More About](#more-about)
28 | - [Data Types](#data-types)
29 | - [The ‘size’ Attribute](#the-size-attribute)
30 | - ['fill' Values](#fill-values)
31 | - [Examples](#examples)
32 | - [Security Notes](#security-notes)
33 | - [Adoption](#adoption)
34 | - [References](#references)
35 | - [Contact](#contact)
36 | - [Appendix A: Sample Landing Page](#appendix-a-sample-landing-page)
37 | - [Appendix B: JSON Object of HAPI Error Codes](#appendix-b-json-object-of-hapi-error-codes)
38 |
39 |
40 |
41 |
42 | Introduction
43 | ============
44 |
45 | This document describes the Heliophysics Application Programmer’s Interface
46 | (HAPI) specification, which is an API and streaming format specification for
47 | delivering digital time series data. The intent of HAPI is to enhance
48 | interoperability among time series data providers. The HAPI specification
49 | describes a lowest common denominator of services that any provider of time
50 | series data could implement. In fact, many providers already offer access to
51 | their data holdings through some kind of API. The hope is that this
52 | specification captures what many providers are already doing, but just codifies
53 | the specific details so that providers could use the same exact API. This would
54 | make it possible to obtain time series science data content seamlessly from many
55 | sources.
56 |
57 | This document is intended to be used by two groups of people: first by data
58 | providers who want to make time series data available through a HAPI server, and
59 | second by data users who want to understand how data is made available from a
60 | HAPI server, or perhaps to write client software to obtain data from an existing
61 | HAPI server.
62 |
63 | HAPI constitutes a minimum but complete set of capabilities needed for a server
64 | to allow access to the time series data values within one or more data
65 | collections. Because of this focus on access to data content, HAPI is very light
66 | on metadata and data discovery. Within the metadata offered by HAPI are optional
67 | ways to indicate where further descriptive details for any dataset could be
68 | found.
69 |
70 | The API itself is built using REST principles that emphasize URLs as stable
71 | endpoints through which clients can request data. Because it is based on
72 | well-established HTTP request and response rules, a wide range of HTTP clients
73 | can be used to interact with HAPI servers.
74 |
75 | The following definitions are provided first to ensure clarity in ensuing
76 | descriptions.
77 |
78 | **parameter** – a measured science quantity or a related ancillary quantity at
79 | one instant in time; may be scalar as a function of time, or an array at each
80 | time step; must have units; also must have a fill value that represents no
81 | measurement or absent information.
82 |
83 | **dataset** – a collection with a conceptually uniform of set of parameters; one
84 | instance of all the parameters together with associated with a time value
85 | constitutes a data record. A HAPI service presents a dataset as a seamless
86 | collection of time ordered records, offering a way to retrieve the parameters
87 | while hiding actual storage details.
88 |
89 | **request parameter** – keywords that appear after the ‘?’ in a URL with a GET
90 | request.
91 |
92 | Consider this example GET request:
93 |
94 | The two request parameters are `id` and `time.min`. They are shown in bold and
95 | have values of `alpha` and `2016-07-13` respectively. This document will always
96 | use the full phrase "request parameter" to refer to these URL elements to draw a
97 | clear distinction from a parameter in a dataset.
98 |
99 | Endpoints
100 | =========
101 |
102 | The HAPI specification consists of four required endpoints that give clients a
103 | precise way to first determine the data holdings of the server and then to
104 | request data from the server. The functionality of each endpoint is as follows:
105 |
106 | 1. describe the capabilities of the server; lists the output formats the server
107 | can emit (`csv`, `binary`, or `json`, described below)
108 |
109 | 2. list the catalog of datasets that are available; each dataset is associated
110 | with a unique id and may optionally have a title
111 |
112 | 3. show information about a dataset with a given id; a primary component of the
113 | description is the list of parameters in the dataset
114 |
115 | 4. stream data content for a dataset of a given id; the streaming request must
116 | have time bounds (specified by request parameters `time.min` and `time.max`)
117 | and may indicate a subset of parameters (default is all parameters)
118 |
119 | There is also an optional landing page endpoint for the HAPI service that
120 | returns human-readable HTML. Although there is recommended content for this
121 | landing page, it is not essential to the functioning of the server.
122 |
123 | The four required endpoints are REST-style services, in that the resulting HTTP
124 | response is the complete response for each endpoint. In particular, the `data`
125 | endpoint does not just give URLs or links to the data, but rather streams the
126 | data content in the HTTP response. The full specification for each endpoint is
127 | discussed below.
128 |
129 | All endpoints must be directly below a `hapi` path element in the URL:
130 |
131 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
132 | http://hapi-server.org/hapi/capabilities
133 | http://hapi-server.org/hapi/catalog
134 | http://hapi-server.org/hapi/info
135 | http://hapi-server.org/hapi/data
136 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
137 |
138 | All requests to a HAPI server are for retrieving resources and must not change
139 | the server state. Therefore, all HAPI endpoints must respond only to HTTP GET
140 | requests. POST requests should result in an error. This represents a RESTful
141 | approach in which GET requests are restricted to be read-only operations from
142 | the server. The HAPI specification does not allow any input to the server (which
143 | for RESTful services are often implemented using POST requests).
144 |
145 | The input specification for each endpoint (the request parameters and their
146 | allowed values) must be strictly enforced by the server. HAPI servers are not
147 | allowed to add additional request parameters beyond those in the specification.
148 | If a request URL contains any unrecognized or misspelled request parameters, a
149 | HAPI server must respond with an error status. ([See below](#hapi-status-codes)
150 | for more details on how a HAPI server returns status information to clients.)
151 | The principle being followed here is that the server must not silently ignore
152 | unrecognized request parameters, because this would falsely indicate to clients
153 | that the request parameter was understood and was taken into account when
154 | creating the output. For example, if a server is given a request parameter that
155 | is not part of the HAPI specification, such as `averagingInterval=5s`, the
156 | server must report an error for two reasons: 1. additional request parametrs are
157 | not allowed, and 2. the server will not be doing any averaging.
158 |
159 | The outputs from a HAPI server to the `catalog`, `capabilities`, and `info`
160 | endpoints are JSON structures, the formats of which are described below in the
161 | sections detailing each endpoint. The `data` endpoint must be able to deliver
162 | Comma Separated Value (CSV) data, but may optionally deliver data content in
163 | binary format or JSON format. The structure of the response stream formats are
164 | described below.
165 |
166 | The following is the detailed specification for the four main HAPI endpoints as
167 | well as the optional landing page endpoint.
168 |
169 | hapi
170 | ----
171 |
172 | This root endpoint is optional and serves as a human-readable landing page for
173 | the server. Unlike the other endpoints, there is no strict definition for the
174 | output, but if present, it should include a brief description of the other
175 | endpoints, and links to documentation on how to use the server. An example
176 | landing page that can be easily customized for a new server is given in Appendix A.
177 |
178 | There are many options for landing page content, such as an HTML view of the
179 | catalog, or links to commonly requested data.
180 |
181 | **Sample Invocation**
182 |
183 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
184 | http://hapi-server.org/hapi
185 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
186 |
187 | **Request Parameters**
188 |
189 | None
190 |
191 | **Response**
192 |
193 | The response is in HTML format with a mime type of `text/html`. The content for
194 | the landing page is not strictly defined, but should look something like the
195 | example below.
196 |
197 | **Example**
198 |
199 | Retrieve landing page for this server.
200 |
201 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
202 | http://hapi-server.org/hapi
203 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
204 |
205 | **Example Response:**
206 |
207 | See Appendix A.
208 |
209 | capabilities
210 | ------------
211 |
212 | This endpoint describes relevant implementation capabilities for this server.
213 | Currently, the only possible variability from server to server is the list of
214 | output formats that are supported.
215 |
216 | A server must support `csv` output format, but `binary` output format and `json`
217 | output may optionally be supported. Servers may support custom output formats,
218 | which would be advertised here. All custom formats listed by a server must begin
219 | with the string `x_` to indicate that they are custom formats and avoid
220 | collisions with possible future additions to the specificaiton.
221 |
222 | **Sample Invocation**
223 |
224 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
225 | http://hapi-server.org/hapi/capabilities
226 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
227 |
228 | **Request Parameters**
229 |
230 | None
231 |
232 | **Response**
233 |
234 | The server's response to this endpoint must be in JSON format [3] as defined by
235 | RFC-7159, and the response must indicate a mime type of `application/json`.
236 | Server capabilities are described using keyword-value pairs, with
237 | `outputFormats` being the only keyword currently in use.
238 |
239 | **Capabilities Object**
240 |
241 | | Name | Type | Description |
242 | |---------------|---------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
243 | | HAPI | string | **Required** The version number of the HAPI specification this description complies with. |
244 | | status | Status object | **Required** Server response status for this request. |
245 | | outputFormats | string array | **Required** The list of output formats the serve can emit. All HAPI servers must support at least `csv` output format, with `binary` and `json` output formats being optional. |
246 |
247 | **Example**
248 |
249 | Retrieve a listing of capabilities of this server.
250 |
251 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
252 | http://hapi-server.org/hapi/capabilities
253 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
254 |
255 | **Example Response:**
256 |
257 | ```javascript
258 | {
259 | "HAPI": "2.0",
260 | "status": { "code": 1200, "message": "OK"},
261 | "outputFormats": [ "csv", "binary", "json" ]
262 | }
263 | ```
264 |
265 | If a server only reports an output format of `csv`, then requesting `binary`
266 | data should cause the server to respond with an error status. There is a
267 | specific HAPI error code for this, namely `1409 "Bad request - unsupported output
268 | format"` with a corresponding HTTP response code of 400. [See
269 | below](#hapi-status-codes) for more about error responses.
270 |
271 | catalog
272 | -------
273 |
274 | This endpoint provides a list of datasets available from this server.
275 |
276 | **Sample Invocation**
277 |
278 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
279 | http://hapi-server.org/hapi/catalog
280 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
281 |
282 | **Request Parameters**
283 |
284 | None
285 |
286 | **Response**
287 |
288 | The response is in JSON format [3] as defined by RFC-7159 and has a mime type of
289 | `application/json`. The catalog is a simple listing of identifiers for the
290 | datasets available through the server providing the catalog. Additional metadata
291 | about each dataset is available through the `info` endpoint (described below).
292 | The catalog takes no query parameters and always lists the full catalog.
293 |
294 | **Catalog Object**
295 |
296 | | Name | Type | Description |
297 | |---------|------------------|----------------------------------------------------------------------------------------------------|
298 | | HAPI | string | **Required** The version number of the HAPI specification this description complies with. |
299 | | status | object | **Required** Server response status for this request. (see [HAPI Status Codes](#hapi-status-codes)) |
300 | | catalog | array of Dataset | **Required** A list of datasets available from this server. |
301 |
302 | **Dataset Object**
303 |
304 | | Name | Type | Description |
305 | |-------|--------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
306 | | id | string | **Required** The computer friendly identifier that the host system uses to locate the dataset. Each identifier must be unique within the HAPI server where it is provided. |
307 | | title | string | **Optional** A short human readable name for the dataset. If none is given, it defaults to the id. The suggested maximum length is 40 characters. |
308 |
309 | **Example**
310 |
311 | Retrieve a listing of datasets shared by this server.
312 |
313 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
314 | http://hapi-server.org/hapi/catalog
315 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
316 |
317 | **Example Response:**
318 |
319 | ```javascript
320 | {
321 | "HAPI" : "2.0",
322 | "status": { "code": 1200, "message": "OK"},
323 | "catalog" :
324 | [
325 | {"id": "ACE_MAG", title:"ACE Magnetometer data"},
326 | {"id": "data/IBEX/ENA/AVG5MIN"},
327 | {"id": "data/CRUISE/PLS"},
328 | {"id": "any_identifier_here"}
329 | ]
330 | }
331 | ```
332 |
333 | The identifiers must be unique within a single HAPI server. Also, dataset
334 | identifiers in the catalog should be stable over time. Including rapidly
335 | changing version numbers or other revolving elements (dates, processing ids,
336 | etc.) in the datasets identifiers should be avoided. The intent of the HAPI
337 | specification is to allow data to be referenced using RESTful URLs that have a
338 | reasonable lifetime.
339 |
340 | Also, note that the identifiers can have slashes in them.
341 |
342 | info
343 | ----
344 |
345 | This endpoint provides a data header for a given dataset. The header is
346 | expressed in JSON format [3] as defined by RFC-7159 and has a mime type of
347 | `application/json`. The focus of the header is to provide enough metadata to
348 | allow automated reading of the data content that is streamed via the `data`
349 | endpoint. The header must include a list of the parameters in the dataset, as
350 | well as the date range covered by the dataset. There are also about ten optional
351 | metadata elements for capturing other high level information such as a brief
352 | description of the dataset, the typical cadence of the data, and ways to learn
353 | more about a dataset. A table below lists all required and optional dataset
354 | attributes in the header.
355 |
356 | Servers may include additional custom (server-specific) keywords or
357 | keyword/value pairs in the header, but any non-standard keywords must begin with
358 | the prefix `x_`.
359 |
360 | Each parameter listed in the header must itself be described by specific
361 | metadata elements, and a separate table below describes the required and
362 | optional parameter attributes.
363 |
364 | By default, all the parameters available in the dataset are listed in the
365 | header. However, a client may request a header for just a subset of the
366 | parameters. The subset of interest is specified as a comma separated list via
367 | the request parameter called `parameters`. (Note that the client would have to
368 | obtain the parameter names from a prior request.) This reduced header is
369 | potentially useful because it is also possible to request a subset of parameters
370 | when asking for data (see the `data` endpoint), and a reduced header can be
371 | requested that would then match the subset of parameters in the data. The server
372 | must ignore duplicates in the subset list, and the server's response must order
373 | the subset of parameters according to the ordering in the original, full list of
374 | parameters. This ensures that a data request for a subset of parameters can be
375 | interpreted properly even if no header is requested. (Although a way to write a
376 | client as safe as possible would be to always request the header, and rely on
377 | the parameter ordering in the header to guide interpretation of the data column
378 | ordering.)
379 |
380 | Note that the `data` endpoint may optionally prepend the info header to the data
381 | stream. In cases where the `data` endpoint response includes a header followed
382 | by `csv` or `binary` data, the header must always end with a newline. This
383 | enables the end of the JSON header to be more easily detected when it is in
384 | front of a binary data response. One good way to detect the end of the header is
385 | calculate the number of open braces minus the number of closed braces. The last
386 | character in the header is the newline following the closing brace that makes
387 | open braces minus closed braces equal to zero. For `json` output, the header and
388 | data are all withing a single JSON entity, and so newlines are not necessary.
389 |
390 | **Sample Invocation**
391 |
392 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
393 | http://hapi-server.org/hapi/info?id=ACE_MAG
394 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
395 |
396 | **Request Parameters**
397 |
398 | | Name | Description |
399 | |------------|-------------------------------------------------------------------|
400 | | id | **Required** The identifier for the dataset. |
401 | | parameters | **Optional** A subset of the parameters to include in the header. |
402 |
403 | **Response**
404 |
405 | The response is in JSON format [3] and provides metadata about one dataset.
406 |
407 | **Info Object**
408 |
409 | | Dataset Attribute | Type | Description |
410 | |-------------------|--------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
411 | | HAPI | string | **Required** The version number of the HAPI specification with which this description complies. |
412 | | status | object | **Required** Server response status for this request. (see [HAPI Status Codes](#hapi-status-codes)) |
413 | | format | string | **Required** (when header is prefixed to data stream) Format of the data as `csv` or `binary` or `json`. |
414 | | parameters | array of Parameter | **Required** Description of the parameters in the data. |
415 | | startDate | string | **Required** [Restricted ISO 8601](#representation-of-time) date/time of first record of data in the entire dataset. |
416 | | stopDate | string | **Required** [Restricted ISO 8601](#representation-of-time) date/time of the last record of data in the entire dataset. For actively growing datasets, the end date can be approximate, but it is the server's job to report an accurate end date. |
417 | | timeStampLocation | string | **Optional** Indicates the positioning of the time stamp within the measurement window. Must be one of `BEGIN`, `CENTER`, `END` or `OTHER`. If this attribute is absent, clients are to assume a default value of `CENTER`, which is meant to indicate the exact middle of the measuement window. A value of `OTHER` indicates that the the location of the time stamp in the measurement window is either more complex than the options here, or it is not known. |
418 | | cadence | string | **Optional** Time difference between records as an ISO 8601 duration. This is meant as a guide to the nominal cadence of the data and not a precise statement about the time between measurements. |
419 | | sampleStartDate | string | **Optional** [Restricted ISO 8601](#representation-of-time) date/time of the start of a sample time period for a dataset, where the time period must contain a manageable, representative example of valid, non-fill data. **Required** if `sampleStopDate` given. |
420 | | sampleStopDate | string | **Optional** [Restricted ISO 8601](#representation-of-time) date/time of the end of a sample time period for a dataset, where the time period must contain a manageable, representative example of valid, non-fill data. **Required** if `sampleStartDate` given. |
421 | | description | string | **Optional** A brief description of the dataset. |
422 | | resourceURL | string | **Optional** URL linking to more detailed information about this dataset. |
423 | | resourceID | string | **Optional** An identifier by which this data is known in another setting, for example, the SPASE ID. |
424 | | creationDate | string | **Optional** [Restricted ISO 8601](#representation-of-time) date/time of the dataset creation. |
425 | | modificationDate | string | **Optional** [Restricted ISO 8601](#representation-of-time) date/time of the modification of the any content in the dataset. |
426 | | contact | string | **Optional** Relevant contact person and possibly contact information. |
427 | | contactID | string | **Optional** The identifier in the discovery system for information about the contact. For example, the SPASE ID of the person. |
428 |
429 |
430 | **Parameter**
431 |
432 | The focus of the header is to list the parameters in a dataset. The first
433 | parameter in the list must be a time value. This time column serves as the
434 | independent variable for the dataset. The time column parameter may have any
435 | name, but its type must be `isotime` and there must not be any fill values in
436 | the data stream for this column. Note that the HAPI specification does not
437 | clarify if the time values given are the start, middle, or end of the measurment
438 | intervals. There can be other parameters of type `isotime` in the parameter
439 | list. The table below describes the Parameter items and their allowed types.
440 |
441 | | Parameter Attribute | Type | Description |
442 | |---------------------|----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
443 | | name | string | **Required** A short name for this parameter. It is recommended that all parameter names start with a letter or underscore, followed by letters, underscores or numbers. This allows the parameter names to become variable names in computer languages. Parameter names in a dataset must be unique, and names are not allowed to differ only by having different case. Note that because parameter names can appear in URLs that can serve as permanent links to data, changing them will have negative implications, such as breaking links to data. Therefore, parameter names should be stable over time. |
444 | | type | string | **Required** One of `string`, `double`, `integer`, `isotime`. Binary content for `double` is always 8 bytes in IEEE 754 format, `integer` is 4 bytes signed little-endian. There is no default length for `string` and `isotime` types. [See below](#data-types) for more information on data types. |
445 | | length | integer | **Required** For type `string` and `isotime`; **not allowed for others**. Relevant only when data is streamed in binary format. The maximum number of bytes that the string may contain. If a string has fewer than this maximum number of bytes, the string must be padded with ASCII null bytes. |
446 | | units | string | **Required** The units for the data values represented by this parameter. For dimensionless quantities, the value can be ‘dimensionless’ or `null`. For `isotime` parameters, the type must be `UTC`. |
447 | | size | array of integers | **Required** For array parameters; **not allowed for others**. Must be a 1-D array whose values are the number of array elements in each dimension of this parameter. For example, `"size"=[7]` indicates that the value in each record is a 1-D array of length 7. For the `csv` and `binary` output, there must be 7 columns for this parameter -- one column for each array element, effectively unwinding this array. The `json` output for this data parameter must contain an actual JSON array (whose elements would be enclosed by `[ ]`). For arrays 2-D and higher, such as `"size"=[2,3]`, the later indices are the fastest moving, so that the CSV and binary columns for such a 2 by 3 would be `[0,0]`, `[0,1]`, `[0,2]` and then `[1,0]`, `[1,1]`, `[1,2]`.Note that `"size":[1]` is allowed but discouraged, because clients may interpret it as either an array of length 1 or as a scalar. Similarly, an array size of 1 in any dimension is discouraged, because of ambiguity in the way clients would treat this structure. **NOTE: array sizes of 2-D or higher are experimental at this point, and future versions of this specification may update the way 2-D or higher data is described.** [See below](#the-size-attribute) for more about array sizes. |
448 | | fill | string | **Required** A fill value indicates no valid data is present. If a parameter has no fill present for any records in the dataset, this can be indicated by using a JSON null for this attribute as in `"fill": null` [See below](#fill-values) for more about fill values, **including the issues related to specifying numeric fill values as strings**. Note that since the primary time column cannot have fill values, it must specify `"fill": null` in the header. |
449 | | description | string | **Optional** A brief description of the parameter. |
450 | | bins | array of Bins object | **Optional** For array parameters, each object in the `bins` array corresponds to one of the dimensions of the array, and describes values associated with each element in the corresponding dimension of the array. A table below describes all required and optional attributes within each `bins` object. If the parameter represents a 1-D frequency spectrum, the `bins` array will have one object describing the frequency values for each frequency bin. Within that object, the `centers` attribute points to an array of values to use for the central frequency of each channel, and the `ranges` attribute specifies a range (min to max) associated with each channel. At least one of these must be specified. The bins object has a required `units` keyword (any string value is allowed), and `name` is also required. See below for an example showing a parameter that holds a proton energy spectrum. The use of `bins` to describe values associated with 2-D or higher arrays is currently supported but should be considered experimental. |
451 |
452 | **Bins Object**
453 |
454 | The bins attribute of a parameter is an array of JSON objects. These objects
455 | have the attributes described below. **NOTE: Even though** `ranges` **and**
456 | `centers` **are marked as required, only one of the two must be specified.**
457 |
458 | | Bins Attribute | Type | Description |
459 | |----------------|-------------------------------|-----------------------------------------------------------------|
460 | | name | string | **Required** Name for the dimension (e.g. "Frequency"). |
461 | | centers | array of n doubles | **Required** The centers of each bin. |
462 | | ranges | array of n array of 2 doubles | **Required** The boundaries for each bin. |
463 | | units | string | **Required** The units for the bin ranges and/or center values. |
464 | | description | string | **Optional** Brief comment explaining what the bins represent. |
465 |
466 | Note that some dimensions of a multi-dimensional parameter may not represent binned data. Each dimension must be described in the `'bins'` object, but any dimension not representing binned data should indicate this by using `'"centers": null'` and not including the `'ranges'` attribute.
467 |
468 |
469 | **Example**
470 |
471 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
472 | http://hapi-server.org/hapi/info?id=ACE_MAG
473 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
474 |
475 | **Example Response:**
476 |
477 | ```javascript
478 | { "HAPI": "2.0",
479 | "status": { "code": 1200, "message": "OK"},
480 | "startDate": "1998-001Z",
481 | "stopDate" : "2017-100Z",
482 | "parameters": [
483 | { "name": "Time",
484 | "type": "isotime",
485 | "units": "UTC",
486 | "fill": null,
487 | "length": 24 },
488 | { "name": "radial_position",
489 | "type": "double",
490 | "units": "km",
491 | "fill": null,
492 | "description": "radial position of the spacecraft" },
493 | { "name": "quality flag",
494 | "type": "integer",
495 | "units": "none",
496 | "fill": null,
497 | "description ": "0=OK and 1=bad " },
498 | { "name": "mag_GSE",
499 | "type": "double",
500 | "units": "nT",
501 | "fill": "-1e31",
502 | "size" : [3],
503 | "description": "hourly average Cartesian magnetic field in nT in GSE" }
504 | ]
505 | }
506 | ```
507 |
508 | **Subsetting the Parameters**
509 |
510 | Clients may request a response that includes only a subset of the parameters in
511 | a dataset. When creating a header for a subset of parameters (via the `info`
512 | endpoint), or a data stream for a subset of parameters (via the `data` endpoint,
513 | described next), the logic on the server is the same in terms of what dataset
514 | parameters are included in the response. The primary time parameter (always
515 | required to be the first parameter in the list) is always included, even if not
516 | requested. These examples clarify the way a server must respond to various types
517 | of dataset parameter subsetting requests:
518 |
519 | - **request:** do not ask for any specific parameters (i.e., there is no request
520 | parameter called ‘parameters’);
521 | **example:** `http://hapi-server.org/hapi/data?id=MY_MAG_DATA&time.min=1999Z&time.max=2000Z`
522 | **response:** all columns
523 |
524 | - **request:** ask for just the primary time parameter;
525 | **example:** `http://hapi-server.org/hapi/data?id=MY_MAG_DATA¶meters=Epoch&time.min=1999Z&time.max=2000Z`
526 | **response:** just the primary time column
527 |
528 | - **request:** ask for a single parameter other than the primary time column (like ‘parameters=Bx’);
529 | **example:** `http://hapi-server.org/hapi/data?id=MY_MAG_DATA¶meters=Bx&time.min=1999Z&time.max=2000Z`
530 | **response:** primary time column and the one requested data column
531 |
532 | - **request:** ask for two or more parameters other than the primary time column;
533 | **example:** `http://hapi-server.org/hapi/data?id=MY_MAG_DATA¶meters=Bx,By&time.min=1999Z&time.max=2000Z`
534 | **response:** primary time column followed by the requested parameters in the
535 | order they occurred in the original, non-subsetted dataset header (not in
536 | the order of the subset request)
537 |
538 | - **request:** including the `parameters` option, but not specifying any parameter names;
539 | **example:** `http://hapi-server.org/hapi/data?id=MY_MAG_DATA¶meters=&time.min=1999Z&time.max=2000Z`
540 | **response:** the is an error condition; server should report a user input error
541 |
542 |
543 | data
544 | ----
545 |
546 | Provides access to a dataset and allows for selecting time ranges and parameters
547 | to return. Data is returned as a stream in CSV [2], binary, or JSON format. The
548 | [Data Stream Content](#data-stream-content) section describes the stream
549 | structure and layout for each format.
550 |
551 | The resulting data stream can be thought of as a stream of records, where each
552 | record contains one value for each of the dataset parameters. Each data record
553 | must contain a data value or a fill value (of the same data type) for each
554 | parameter.
555 |
556 | **Request Parameters**
557 |
558 | | Name | Description |
559 | |------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
560 | | id | **Required** The identifier for the dataset. |
561 | | time.min | **Required** The inclusive begin time for the data to include in the response. |
562 | | time.max | **Required** The exclusive end time for the data to include in the response. |
563 | | parameters | **Optional** A comma separated list of parameters to include in the response. Default is all parameters. |
564 | | include | **Optional** Has one possible value of "header" to indicate that the info header should precede the data. The header lines will be prefixed with the "\#" character. |
565 | | format | **Optional** The desired format for the data stream. Possible values are "csv", "binary", and "json". |
566 |
567 | **Response**
568 |
569 | Response is in one of three formats: CSV format as defined by RFC-4180 with a
570 | mime type of `text/csv`; binary format where floating points number are in IEEE
571 | 754 [5] format and byte order is LSB and a mime type of
572 | `application/octet-stream`; JSON format with the structure as described below
573 | and a mime type of `application/json`. The default data format is CSV. See the
574 | section on Data Stream Content for more details.
575 |
576 | If the header is requested, then for binary and CSV formats, each line of the
577 | header must begin with a hash (\#) character. For JSON output, no prefix
578 | character should be used, because the data object will just be another JSON
579 | element within the response. Other than the possible prefix character, the
580 | contents of the header should be the same as returned from the info endpoint.
581 | When a data stream has an attached header, the header must contain an additional
582 | "format" attribute to indicate if the content after the header is "csv",
583 | "binary", or "json". Note that when a header is included in a CSV response, the
584 | data stream is not strictly in CSV format.
585 |
586 | The first parameter in the data must be a time column (type of "isotime") and
587 | this must be the independent variable for the dataset. If a subset of parameters
588 | is requested, the time column is always provided, even if it is not requested.
589 |
590 | Note that the `time.min` request parameter represents an inclusive lower bound
591 | and `time.max` request parameter is the exclusive upper bound. The server must
592 | return data records within these time constraints, i.e., no extra records
593 | outside the requested time range. This enables concatenation of results from
594 | adjacent time ranges.
595 |
596 | There is an interaction between the `info` endpoint and the `data` endpoint,
597 | because the header from the `info` endpoint describes the record structure of
598 | data emitted by the `data` endpoint. Thus after a single call to the `info`
599 | endpoint, a client could make multiple calls to the `data` endpoint (for
600 | multiple time ranges, for example) with the expectation that each data response
601 | would contain records described by the single call to the `info` endpoint. The
602 | `data` endpoint can optionally prefix the data stream with header information,
603 | potentially obviating the need for the `info` endpoint. But the `info` endpoint
604 | is useful in that it allows clients to learn about a dataset without having to
605 | make a data request.
606 |
607 | Both the `info` and `data` endpoints take an optional request parameter (recall
608 | the definition of request parameter in the introduction) called `parameters`
609 | that allows users to restrict the dataset parameters listed in the header and
610 | data stream, respectively. This enables clients (that already have a list of
611 | dataset parameters from a previous info or data request) to request a header for
612 | a subset of parameters that will match a data stream for the same subset of
613 | parameters. Consider the following dataset header for a fictional dataset with
614 | the identifier MY\_MAG\_DATA.
615 |
616 | An `info` request like this:
617 |
618 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
619 | http://hapi-server.org/hapi/info?id=MY_MAG_DATA
620 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
621 |
622 | would result in a header listing of all the dataset parameters:
623 |
624 | ```javascript
625 | { "HAPI": "2.0",
626 | "status": { "code": 1200, "message": "OK"},
627 | "startDate": "2005-01-21T12:05:00.000Z",
628 | "stopDate" : "2010-10-18T00:00:00Z",
629 | "parameters": [
630 | { "name": "Time",
631 | "type": "isotime",
632 | "units": "UTC",
633 | "fill": null,
634 | "length": 24 },
635 | { "name": "Bx", "type": "double", "units": "nT", "fill": "-1e31"},
636 | { "name": "By", "type": "double", "units": "nT", "fill": "-1e31"},
637 | { "name": "Bz", "type": "double", "units": "nT", "fill": "-1e31"},
638 | ]
639 | }
640 | ```
641 |
642 | An `info` request like this:
643 |
644 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
645 | http://hapi-server.org/hapi/info?id=MY_MAG_DATA¶meters=Bx
646 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
647 |
648 | would result in a header listing only the one dataset parameter:
649 |
650 | ```javascript
651 | { "HAPI": "2.0",
652 | "status": { "code": 1200, "message": "OK"},
653 | "startDate": "2005-01-21T12:05:00.000Z",
654 | "stopDate" : "2010-10-18T00:00:00Z",
655 | "parameters": [
656 | { "name": "Time",
657 | "type": "isotime",
658 | "units": "UTC",
659 | "fill": null,
660 | "length": 24 },
661 | { "name": "Bx", "type": "double", "units": "nT", "fill": "-1e31" },
662 | ]
663 | }
664 | ```
665 |
666 | Note that the time parameter is included even though it was not requested.
667 |
668 | ### Data Stream Content
669 |
670 | The three possible output formats are `csv`, `binary`, and `json`. A HAPI server
671 | must support `csv`, while `binary` and `json` are optional.
672 |
673 | **CSV Output**
674 |
675 | The format of the CSV stream should follow the guidelines for CSV data as described
676 | by [2] (RFC 4180). Each CSV record is one line of text, with commas between the
677 | values for each dataset parameter. Any value containing a comma must be surrounded
678 | with double quotes, and any double quote within a value must be escaped by a preceeding
679 | double quote. An array parameter (i.e., the value of a
680 | parameter within one record is an array) will have multiple columns resulting
681 | from placing each element in the array into its own column. For 1-D arrays, the
682 | ordering of the unwound columns is just the index ordering of the array
683 | elements. For 2-D arrays or higher, the right-most array index is the fastest
684 | moving index when mapping array elements to columns.
685 |
686 | It is up to the server to decide how much precision to include in the ASCII
687 | values when generating CSV output.
688 |
689 | Clients programs interpreting the HAPI CSV stream are encouraged to use
690 | existing CSV parsing libraries to be able to interpret the full range
691 | of possible CSV values, including quoted commas and escaped quotes.
692 | However, it is expected that a simplistic CSV parser would probably
693 | handle more than 90% of known cases.
694 |
695 | **Binary Output**
696 |
697 | The binary data output is best described as a binary translation of the CSV
698 | stream, with full numerical precision and no commas or newlines. Recall that the dataset
699 | header provides type information for each dataset parameter, and this
700 | definitively indicates the number of bytes and the byte structure of each
701 | parameter, and thus of each binary record in the stream. Array parameters are
702 | unwound in the same way for binary as for CSV data as described above.
703 | All numeric values are little endian (LSB), integers are always signed and four
704 | byte, and floating point values are always IEEE 754 double precision values.
705 |
706 | Dataset parameters of type `string` and `isotime` (which are just strings of ISO
707 | 8601 dates) have a maximum length specified in the info header. This length indicates how
708 | many bytes to read for each string value. If the string content is less than
709 | the length, the remaining bytes must be padded with ASCII null bytes. If a string
710 | uses all the bytes specified in the length, no null terminator or padding is needed.
711 |
712 |
713 | **JSON Output**
714 |
715 | For the JSON output, an additional `data` element added to the header contains
716 | the array of data records. These records are very similar to the CSV output,
717 | except that strings must be quoted and arrays must be delimited with array
718 | brackets in standard JSON fashion. An example helps illustrate what the JSON
719 | format looks like. Consider a dataset with four parameters: time, a scalar
720 | value, an 1-D array value with array length of 3, and a string value. The header
721 | with the data object might look like this:
722 |
723 | ```javascript
724 | { "HAPI": "2.0",
725 | "status": { "code": 1200, "message": "OK"},
726 | "startDate": "2005-01-21T12:05:00.000Z",
727 | "stopDate" : "2010-10-18T00:00:00Z",
728 | "parameters": [
729 | { "name": "Time", "type": "isotime", "units": "UTC", "fill": null, "length": 24 },
730 | { "name": "quality_flag", "type": "integer", "description": "0=ok; 1=bad", "fill": null },
731 | { "name": "mag_GSE", "type": "double", "units": "nT", "fill": "-1e31", "size" : [3],
732 | "description": "hourly average Cartesian magnetic field in nT in GSE" },
733 | { "name": "region", "type": "string", "length": 20, "fill": "???", "units" : null}
734 | ],
735 | "format": "json",
736 | "data" : [
737 | ["2010-001T12:01:00Z",0,[0.44302,0.398,-8.49],"sheath"],
738 | ["2010-001T12:02:00Z",0,[0.44177,0.393,-9.45],"sheath"],
739 | ["2010-001T12:03:00Z",0,[0.44003,0.397,-9.38],"sheath"],
740 | ["2010-001T12:04:00Z",1,[0.43904,0.399,-9.16],"sheath"]
741 | ]
742 |
743 | }
744 | ```
745 |
746 | The data element is a JSON array of records. Each record is itself an array of
747 | parameters. The time and string values are in quotes, and any data parameter in
748 | the record that is an array must be inside square brackets. This data element
749 | appears as the last JSON element in the header.
750 |
751 | The record-oriented arrangement of the JSON format is designed to allow a
752 | streaming client reader to begin reading (and processing) the JSON data stream
753 | before it is complete. Note also that servers can start streaming the data as
754 | soon as records are avaialble. In other words, the JSON format can be read and
755 | written without first having to hold all the records in memory. This may rquire
756 | some custom elements in the JSON parser, but preserving this streaming
757 | capabliity is important for keeping the HAPI spec scalable. Note that if pulling
758 | all the data content into memory is not a problem, then ordinary JSON parsers
759 | will also have no trouble with this JSON arrangement.
760 |
761 | **Errors While Streaming Data**
762 |
763 | If the server encounters an error while streaming the data and can no longer
764 | continue, it will have to terminate the stream. The `status` code (both HTTP and
765 | HAPI) and message will already have been set in the header and is unlikely to
766 | represent the error. Clients will have to be able to detect an abnormally
767 | terminated stream, and should treat this aborted condition the same as an
768 | internal server error. See [HAPI Status Codes](#hapi-status-codes) for more
769 | about error conditions.
770 |
771 | **Examples**
772 |
773 | Two examples of data requests and responses are given – one with the header and
774 | one without.
775 |
776 | **Data with Header**
777 |
778 | Note that in the following request, the header is to be included, so the same
779 | header from the `info` endpoint will be prepended to the data, but with a ‘\#’
780 | character as a prefix for every header line.
781 |
782 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
783 | http://hapi-server.org/hapi/data?id=path/to/ACE_MAG&time.min=2016-01-01Z&time.max=2016-02-01Z&include=header
784 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
785 |
786 | **Example Response: Data with Header**
787 |
788 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
789 | #{
790 | # "HAPI": "2.0",
791 | # "status": { "code": 1200, "message": "OK"},
792 | # "format": "csv",
793 | # "startDate": "1998-001Z",
794 | # "stopDate" : "2017-001Z",
795 | # "parameters": [
796 | # { "name": "Time",
797 | # "type": "isotime",
798 | # "units": "UTC",
799 | # "fill": null,
800 | # "length": 24
801 | # },
802 | # { "name": "radial_position",
803 | # "type": "double",
804 | # "units": "km",
805 | # "fill": null,
806 | # "description": "radial position of the spacecraft"
807 | # },
808 | # { "name": "quality flag",
809 | # "type": "integer",
810 | # "units ": null,
811 | # "fill": null,
812 | # "description ": "0=OK and 1=bad "
813 | # },
814 | # { "name": "mag_GSE",
815 | # "type": "double",
816 | # "units": "nT",
817 | # "fill": "-1e31",
818 | # "size" : [3],
819 | # "description": "hourly average Cartesian magnetic field in nT in GSE"
820 | # }
821 | # ]
822 | #}
823 | 2016-01-01T00:00:00.000Z,6.848351,0,0.05,0.08,-50.98
824 | 2016-01-01T01:00:00.000Z,6.890149,0,0.04,0.07,-45.26
825 | ...
826 | ...
827 | 2016-01-01T02:00:00.000Z,8.142253,0,2.74,0.17,-28.62
828 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
829 |
830 | **Data Only**
831 |
832 | The following example is the same, except it lacks the request to include the
833 | header.
834 |
835 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
836 | http://hapi-server.org/hapi/data?id=path/to/ACE_MAG&time.min=2016-01-01&time.max=2016-02-01
837 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
838 |
839 | **Example Response: Data Only**
840 |
841 | Consider a dataset that contains a time field, two scalar fields and one array
842 | field of length 3. The response will look something like:
843 |
844 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
845 | 2016-01-01T00:00:00.000Z,6.848351,0,0.05,0.08,-50.98
846 | 2016-01-01T01:00:00.000Z,6.890149,0,0.04,0.07,-45.26
847 | ...
848 | ...
849 | 2016-01-01T02:00:00.000Z,8.142253,0,2.74,0.17,-28.62
850 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
851 |
852 | Note that there is no leading row with column names. The CSV standard [2]
853 | indicates that such a header row is optional. Leaving out this row avoids the
854 | complication of having to name individual columns representing array elements
855 | within an array parameter. Recall that an array parameter has only a single
856 | name. The place HAPI specifies parameter names is via the `info` endpoint, which
857 | also provides size details for each parameter (scalar or array, and array size
858 | if needed). The size of each parameter must be used to determine how many
859 | columns it will use in the CSV data. By not specifying a row of column names,
860 | HAPI avoids the need to have a naming convention for columns representing
861 | elements within an array parameter.
862 |
863 | Implications of the HAPI data model
864 | ===================================
865 |
866 | Because HAPI requires a single time column to be the first column, this requires
867 | each record (one row of data) to be associated with one time value (the first
868 | value in the row). This has implications for serving files with multiple time
869 | arrays in them. Supposed a file contains 1 second data, 3 second data, and 5
870 | second data, all from the same measurement but averaged differently. A HAPI
871 | server could expose this data, but not as a single dataset. To a HAPI server,
872 | each time resolution could be presented as a separate dataset, each with its own
873 | unique time array.
874 |
875 | Cross Origin Resource Sharing
876 | =============================
877 |
878 | Because of the increasing importance of JavaScript clients that use AJAX
879 | requests, HAPI servers are strongly encouraged to implement Cross Origin
880 | Resource Sharing (CORS) https://www.w3.org/TR/cors/. This will allow AJAX
881 | requests by browser clients from any domain. For servers with only public data,
882 | enabling CORS is fairly common, and not implementing CORS limits the type of
883 | clients that can interface with a HAPI server. Server implementors are strongly
884 | encouraged to pursue deeper understanding before proceeding with CORS. For
885 | testing purposes, the following headers have been sufficent for browser clients
886 | to HAPI servers:
887 |
888 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
889 | Access-Control-Allow-Origin: *
890 | Access-Control-Allow-Methods: GET
891 | Access-Control-Allow-Headers: Content-Type
892 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
893 |
894 | HAPI Status Codes
895 | =================
896 |
897 | There are two levels of error reporting a HAPI server must perform. Because
898 | every HAPI server response is an HTTP response, an appropriate HTTP status must
899 | be set for each response. Although the HTTP codes are robust, they are more
900 | difficult for clients to extract -- a HAPI client using a high-level URL
901 | retrieving mechanism may not have easy access to HTTP header content. Therefore,
902 | every HAPI response with a header must also include a `status` object indicating
903 | if the request succeeded or not. The two status indicators (HAPI and HTTP) must
904 | be consistent, i.e., if one indicates success, so must the other.
905 |
906 | The status information returned from an endpoint is as follows:
907 |
908 | | Name | Type | Description |
909 | |---------|---------|--------------------------------------------------------------------------------------------------------------------|
910 | | code | integer | Specific value indicating the category of the outcome of the request - see [HAPI Status Codes](#hapi-status-codes). |
911 | | message | string | Human readable description of the status - must conceptually match the intent of the integer code. |
912 |
913 | HAPI servers must categorize the response status using at least the following
914 | three status codes: 1200 - OK, 1400 - Bad Request, and 1500 - Internal Server
915 | Error. These are intentional analgous to the similar HTTP codes 200 - OK, 400 -
916 | Bad Request, and 500 - Internal Server Error. Note that HAPI code numbers are
917 | 1000 higher than the HTTP codes to avoid collisions. For these three simple
918 | status categorizations, the HTTP code can be derived from the HAPI code by just
919 | subtracting 1000. The following table summarizes the minimum required status
920 | response categories.
921 |
922 | | HTTP code | HAPI status `code` | HAPI status `message` |
923 | |-----------|--------------------|--------------------------------|
924 | | 200 | 1200 | OK |
925 | | 400 | 1400 | Bad request - user input error |
926 | | 500 | 1500 | Internal server error |
927 |
928 | The exact wording in the message does not need to match what is shown here. The
929 | conceptual message must be consistent with the status, but the wording is
930 | allowed to be different (or in another language, for example).
931 |
932 | The `capabilities` and `catalog` endpoints just need to indicate "1200 - OK" or
933 | "1500 - Internal Server Error" since they do not take any request parameters.
934 | The `info` and `data` endpoints do take request parameters, so their status
935 | response must include "1400 - Bad Request" when appropriate.
936 |
937 | Servers may optionally provide a more specific error code for the following
938 | common types of input processing problems. For convenience, a JSON object with these error codes is given in Appendix B. It is recommended but not required
939 | that a server implement this more complete set of status responses. Servers may
940 | add their own codes, but must use numbers outside the 1200s, 1400s, and 1500s to
941 | avoid collisions with possible future HAPI codes.
942 |
943 | | HTTP code | HAPI status `code` | HAPI status `message` |
944 | |-----------|--------------------|------------------------------------------------|
945 | | 200 | 1200 | OK |
946 | | 200 | 1201 | OK - no data for time range |
947 | | 400 | 1400 | Bad request - user input error |
948 | | 400 | 1401 | Bad request - unknown API parameter name |
949 | | 400 | 1402 | Bad request - error in start time |
950 | | 400 | 1403 | Bad request - error in stop time |
951 | | 400 | 1404 | Bad request - start time equal to or after stop time |
952 | | 400 | 1405 | Bad request - time outside valid range |
953 | | 404 | 1406 | Bad request - unknown dataset id |
954 | | 404 | 1407 | Bad request - unknown dataset parameter |
955 | | 400 | 1408 | Bad request - too much time or data requested |
956 | | 400 | 1409 | Bad request - unsupported output format |
957 | | 400 | 1410 | Bad request - unsupported include value |
958 | | 500 | 1500 | Internal server error |
959 | | 500 | 1501 | Internal server error - upstream request error |
960 |
961 | Note that there is an OK status to indicate that the request was properly
962 | fulfilled, but that no data was found. This can be very useful feedback to
963 | clients and users, who may otherwise suspect server problems if no data is
964 | returned.
965 |
966 | Note also the response 1408 indicating that the server will not fulfill the
967 | request, since it is too large. This gives a HAPI server a way to let clients
968 | know about internal limits within the server.
969 |
970 | In cases where the server cannot create a full response (such as an `info`
971 | request or `data` request for an unknown dataset), the JSON header response must
972 | include the HAPI version and a HAPI status object indicating that an error has
973 | occurred.
974 |
975 | ```javascript
976 | {
977 | "HAPI": "2.0",
978 | "status": { "code": 1401, "message": "Bad request - unknown request parameter"}
979 | }
980 | ```
981 |
982 | If no JSON header was requested, then the HTTP error will be the only indicator
983 | of a problem. Similarly, for the `data` endpoint, clients may request data with
984 | no JSON header, and in this case, the HTTP status is the only place a client can
985 | determine the response status.
986 |
987 | HAPI Client Error Handling
988 | --------------------------
989 |
990 | Because web servers are not required to limit HTTP return codes to those in the
991 | above table, HAPI clients should be able to handle the full range of HTTP
992 | responses. Also, the HAPI server code may not be the only software to interact
993 | with a URL-based request from a HAPI server. There may be a load balancer or
994 | upstream request routing or caching mechanism in place. Therefore, it is good
995 | client-side practice to be able to handle any HTTP errors.
996 |
997 | Representation of Time
998 | ======================
999 |
1000 | The HAPI specification is focused on access to time series data, so
1001 | understanding how the server parses and emits time values is important.
1002 | Time values are always strings, and the format is based on the ISO 8601
1003 | standard: https://en.wikipedia.org/wiki/ISO_8601.
1004 |
1005 | The name of the time parameter is not constrained by this specification.
1006 | However, it is strongly recommended that the time column name be "Time"
1007 | or "Epoch" or some easily recognizable label.
1008 |
1009 | Incoming time values
1010 | --------------------
1011 |
1012 | Servers must require incoming time values from clients (i.e.,
1013 | the `time.min` and `time.max` values on a data request) to be
1014 | valid ISO 8601 time values.
1015 | The full ISO 8601 specification allows many esoteric options, but servers must only
1016 | accept a subset of the full ISO 8601 specification,
1017 | namely one of either year-month-day (yyyy-mm-ddThh:mm:ss.sssZ) or day-of-year (yyyy-dddThh:mm:ss.sssZ).
1018 | Any date or time elements missing from the string are assumed to
1019 | take on their smallest possible value. For example, the string `2017-01-15T23:00:00.000Z`
1020 | could be given in truncated form as `2017-01-15T23Z`. Servers should be able to parse and
1021 | properly interpret these truncated time strings. When clients provide
1022 | a date at day resolution only, the T must not be included, so servers should be
1023 | able to parse day-level time strings wihtout the T, as in `2017-01-15Z`.
1024 |
1025 | Note that in the ISO 8601 specification, a trailing Z on the time string
1026 | indicates that no time zone offset should be applied (so the time zone is
1027 | GMT+0). If a server recieves an input value wihout the trailing Z, it should
1028 | still interpret the time zone as GMT+0 rather than a local time zone. This is true
1029 | for time strings with all fields present and for truncated time strings with some
1030 | fields missing.
1031 |
1032 | | Example time range request | comments |
1033 | |------------------------------|------------------------------------------------|
1034 | | `time.min=2017-01-15T00:00:00.000Z&time.max=2017-01-16T00:00.000Z` | OK - fully specified time value with proper trailing Z |
1035 | | `time.min=2017-01-15Z&time.max=2017-01-16Z` | OK - truncated time value that assumes 00:00.000 for the time |
1036 | | `time.min=2017-01-15&time.max=2017-01-16` | OK - truncated with missing trailing Z, but GMT+0 should be assumed |
1037 |
1038 |
1039 | There is no restriction on the earliest date or latest date a HAPI server can accept, but as
1040 | a practical limit, clients are likely to be written to handle dates only in the range from
1041 | years 1700 to 2100.
1042 |
1043 | Outgoing time values
1044 | --------------------
1045 |
1046 | Time values in the outgoing data stream must be ISO 8601 strings. A server may
1047 | use one of either the yyyy-mm-ddThh:mm:ssZ or the yyyy-dddThh:mm:ssZ form, but must
1048 | use one format and length within any given dataset. The times values must not have any local
1049 | time zone offset, and they must indicate this by including the trailing Z.
1050 | Time or date elements may be omitted from the end
1051 | to indicate that the missing time or date elements should be given their lowest possible
1052 | value. For date values at day resolution (i.e., no time values), the T must be
1053 | omitted, but the Z is still required. Note that this implies that clients must be able
1054 | to parse potentially truncated ISO strings of both Year-Month-Day and Year-Day-of-year flavors.
1055 |
1056 | For `binary` and `csv` data, the length of time string, truncated or not, is indicated
1057 | with the `length` attribute for the time parameter, which refers to the number of printable
1058 | characters in the string. Every time string must have the same length and so padding of time strings is not needed.
1059 |
1060 | The data returned from a request should strictly fall within the limits of
1061 | `time.min` and `time.max`, i.e., servers should not pad the data with extra
1062 | records outside the requested time range. Furthermore, note that the `time.min`
1063 | value is inclusive (data at or beyond this time can be included), while
1064 | `time.max` is exclusive (data at or beyond this time shall not be included in
1065 | the response).
1066 |
1067 | The primary time column is not allowed to contain any fill values. Each record
1068 | must be identified with a valid time value. If other columns contain parameters
1069 | of type `isotime` (i.e., time columns that are not the primary time column),
1070 | there may be fill values in these columns. Note that the `fill` definition is
1071 | required for all types, including `isotime` parameters. The fill value for a
1072 | (non-primary) `isotime` parameter does not have to be a valid time string - it
1073 | can be any string, but it must be the same length string as the time variable.
1074 |
1075 | Note that the ISO 8601 time format allows arbitrary precision on the time
1076 | values. HAPI servers should therefore also accept time values with high
1077 | precision. As a practical limit, servers should at least handle time values down
1078 | to the nanosecond or picosecond level.
1079 |
1080 | HAPI metadata (in the `info` header for a dataset) allows a server to specify
1081 | where time stamps fall within the measurement window. The `timeStampLocation`
1082 | attribute for a dataset is an enumeration with possible values of `BEGIN`, `CENTER`,
1083 | `END`, or `OTHER`. This attribute is optional, but the default value is `CENTER`,
1084 | which refers to the exact middle of the measurement window. If the location of
1085 | the time stamp is not known or is more complex than any of the allowed options,
1086 | the server can report `OTHER` for the `timeStampLocation`. Clients are likely
1087 | to use `CENTER` for `OTHER`, simply because there is not much else hey can do.
1088 | Note that the optional `cadence` attribute is not meant to be accurate
1089 | enough to use as a way to compute an alternate time stamp location. In other words,
1090 | given a `timeStampLocation` of `BEGIN` and a `cadence` of 10 seconds,
1091 | it may not always work to just add 5 seconds to get to the center of the
1092 | measurement interval for this dataset. This is because the `cadence` provides
1093 | a nominal duration, and the actual duration of each measurement may vary significantly
1094 | throughout the dataset.
1095 | Some datasets may have specific parameters devoted to accumulation time, or other
1096 | measurement window parameters, but HAPI metadata does not capture this level
1097 | of measurement window details.
1098 |
1099 | Additional Keyword / Value Pairs
1100 | ================================
1101 |
1102 | While the HAPI server strictly checks all request parameters (servers must
1103 | return an error code given any unrecognized request parameter as described
1104 | earlier), the JSON content output by a HAPI server may contain additional,
1105 | user-defined metadata elements. All non-standard metadata keywords must begin
1106 | with the prefix `x_` to indicate to HAPI clients that these are extensions.
1107 | Custom clients could make use of the additional keywords, but standard clients
1108 | would ignore the extensions. By using the standard prefix, the custom keywords
1109 | will not conflict with any future keywords added to the HAPI standard. Servers
1110 | using these extensions may wish to include additional, domain-specific
1111 | characters after the `x_` to avoid possible collisions with extensions from
1112 | other servers.
1113 |
1114 | More About
1115 | ==========
1116 |
1117 | Data Types
1118 | ----------
1119 |
1120 | Note that there are only a few supported data types: isotime, string, integer,
1121 | and double. This is intended to keep the client code simple in terms of dealing
1122 | with the data stream. However, the spec may be expanded in the future to include
1123 | other types, such as 4 byte floating point values (which would be called float),
1124 | or 2 byte integers (which would be called short).
1125 |
1126 | The ‘size’ Attribute
1127 | --------------------
1128 |
1129 | The 'size' attribute is required for array parameters and not allowed for
1130 | others. The length of the `size` array indicates the number of dimensions, and
1131 | each element in the size array indicates the number of elements in that
1132 | dimension. For example, the size attribute for a 1-D array would be a 1-D JSON
1133 | array of length one, with the one element in the JSON array indicating the
1134 | number of elements in the data array. For a spectrum, this number of elements is
1135 | the number of wavelengths or energies in the spectrum. Thus `"size":[9]` refers
1136 | to a data parameter that is a 1-D array of length 9, and in the `csv` and
1137 | `binary` output formats, there will be 9 columns for this data parameter. In the
1138 | `json` output for this data parameter, each record will contain a JSON array of
1139 | 9 elements (enclosed in brackets `[ ]`).
1140 |
1141 | For arrays of size 2-D or higher, the column orderings need to be specified for
1142 | the `csv` and `binary` output formats. In both cases, the later indices are
1143 | faster moving, so that if you have a 2-D array of `"size":[2,5]` then the 5 item
1144 | index changes the most quickly. Items in each record will be ordered like this
1145 | `[0,0] [0,1], [0,2] [0,3] [0,4] [1,0,] [1,1] [1,2] [1,3] [1,4]` and the
1146 | ordering is similarly done for higher dimensions.
1147 |
1148 | 'fill' Values
1149 | -------------
1150 |
1151 | Note that fill values for all types must be specified as a string. For `double`
1152 | and `integer` types, the string should correspond to a numeric value. In other
1153 | words, using a string like `invalid_int` would not be allowed for an integer
1154 | fill value. Care should be taken to ensure that the string value given will have
1155 | an exact numeric representation, and special care should be taken for `double`
1156 | values which can suffer from round-off problems. For integers, string fill
1157 | values must correspond to an integer value that is small enough to fit into an 4
1158 | byte signed integer. For `double` parameters, the fill string must parse to an exact
1159 | IEEE 754 double representation. One suggestion is to use large negative
1160 | integers, such as `-1.0E30`. The string `NaN` is allowed, in which case `csv`
1161 | output should contain the string `NaN` for fill values. For double NaN values,
1162 | the bit pattern for quiet NaN should be used, as opposed to the signaling NaN,
1163 | which should not be used (see reference [6]). For `string` and `isotime`
1164 | parameters, the string `fill` value is used at face value, and it should have a
1165 | length that fits in the length of the data parameter.
1166 |
1167 | Examples
1168 | --------
1169 |
1170 | The following two examples illustrate two different ways to represent a magnetic
1171 | field dataset. The first lists a time column and three scalar data columns, Bx,
1172 | By, and Bz for the Cartesian components.
1173 |
1174 | ```javascript
1175 | {
1176 | "HAPI": "2.0",
1177 | "status": { "code": 1200, "message": "OK"},
1178 | "startDate": "2016-01-01T00:00:00.000Z",
1179 | "stopDate": "2016-01-31T24:00:00.000Z",
1180 | "parameters": [
1181 | {"name" : "timestamp", "type": "isotime", "units": "UTC", "fill": null, "length": 24},
1182 | {"name" : "bx", "type": "double", "units": "nT", "fill": "-1e31"},
1183 | {"name" : "by", "type": "double", "units": "nT", "fill": "-1e31"},
1184 | {"name" : "bz", "type": "double", "units": "nT", "fill": "-1e31"}
1185 | ]
1186 | }
1187 | ```
1188 |
1189 | This example shows a header for the same conceptual data (time and three
1190 | magnetic field components), but with the three components grouped into a
1191 | one-dimensional array of size 3.
1192 |
1193 | ```javascript
1194 | {
1195 | "HAPI": "2.0",
1196 | "status": { "code": 1200, "message": "OK"},
1197 | "startDate": "2016-01-01T00:00:00.000Z",
1198 | "stopDate": "2016-01-31T24:00:00.000Z",
1199 | "parameters": [
1200 | { "name" : "timestamp", "type": "isotime", "units": "UTC", , "fill": null, "length": 24 },
1201 | { "name" : "b_field", "type": "double", "units": "nT",, "fill": "-1e31", "size": [3] }
1202 | ]
1203 | }
1204 | ```
1205 |
1206 | These two different representations affect how a subset of parameters could be
1207 | requested from a server. The first example, by listing Bx, By, and Bz as
1208 | separate parameters, allows clients to request individual components:
1209 |
1210 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1211 | http://hapi-server.org/hapi/data?id=MY_MAG_DATA&time.min=2001Z&time.max=2010Z¶meters=Bx
1212 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1213 |
1214 | This request would just return a time column (always included as the first
1215 | column) and a Bx column. But in the second example, the components are all
1216 | inside a single parameter named `b_field` and so a request for this parameter
1217 | must always return all the components of the parameter. There is no way to
1218 | request individual elements of an array parameter.
1219 |
1220 | The following example shows a proton energy spectrum and illustrates the use of
1221 | the ‘bins’ element. Note also that the uncertainty of the values associated with
1222 | the proton spectrum are a separate variable. There is currently no way in the
1223 | HAPI spec to explicitly link a variable to its uncertainties.
1224 |
1225 | ```javascript
1226 | {"HAPI": "2.0",
1227 | "status": { "code": 1200, "message": "OK"},
1228 | "startDate": "2016-01-01T00:00:00.000Z",
1229 | "stopDate": "2016-01-31T24:00:00.000Z",
1230 | "parameters": [
1231 | { "name": "Time",
1232 | "type": "isotime",
1233 | "units": "UTC",
1234 | "fill": null,
1235 | "length": 24
1236 | },
1237 | { "name": "qual_flag",
1238 | "type": "int",
1239 | "units": null,
1240 | "fill": null
1241 | },
1242 | { "name": "maglat",
1243 | "type": "double",
1244 | "units": "degrees",
1245 | "fill": null,
1246 | "description": "magnetic latitude"
1247 | },
1248 | { "name": "MLT",
1249 | "type": "string",
1250 | "length": 5,
1251 | "units": "hours:minutes",
1252 | "fill": "??:??",
1253 | "description": "magnetic local time in HH:MM"
1254 | },
1255 | { "name": "proton_spectrum",
1256 | "type": "double",
1257 | "size": [3],
1258 | "units": "particles/(sec ster cm^2 keV)",
1259 | "fill": "-1e31",
1260 | "bins": [ {
1261 | "name": "energy",
1262 | "units": "keV",
1263 | "centers": [ 15, 25, 35 ],
1264 | } ],
1265 | { "name": "proton_spectrum_uncerts",
1266 | "type": "double",
1267 | "size": [3],
1268 | "units": "particles/(sec ster cm^2 keV)",
1269 | "fill": "-1e31",
1270 | "bins": [ {
1271 | "name": "energy",
1272 | "units": "keV",
1273 | "centers": [ 15, 25, 35 ],
1274 | } ]
1275 | }
1276 |
1277 | ]
1278 | }
1279 | ```
1280 |
1281 | This shows how "ranges" can specify the bins:
1282 |
1283 | ```javascript
1284 | {
1285 | "HAPI": "2.0",
1286 | "status": { "code": 1200, "message": "OK"},
1287 | "startDate": "2016-01-01T00:00:00.000Z",
1288 | "stopDate": "2016-01-31T24:00:00.000Z",
1289 | "parameters": [
1290 | {
1291 | "length": 24,
1292 | "name": "Time",
1293 | "type": "isotime",
1294 | "fill": null,
1295 | "units": "UTC"
1296 | },
1297 | {
1298 | "bins": [{
1299 | "ranges": [
1300 | [ 0, 30 ],
1301 | [ 30, 60 ],
1302 | [ 60, 90 ],
1303 | [ 90, 120 ],
1304 | [ 120, 150 ],
1305 | [ 150, 180 ]
1306 | ],
1307 | "units": "degrees"
1308 | }],
1309 | "fill": "-1e31",
1310 | "name": "pitchAngleSpectrum",
1311 | "size": [6],
1312 | "type": "double",
1313 | "units": "particles/sec/cm^2/ster/keV"
1314 | }
1315 | ]
1316 | }
1317 | ```
1318 |
1319 | Security Notes
1320 | ==============
1321 |
1322 | When the server sees a request parameter that it does not recognize, it should
1323 | throw an error.
1324 |
1325 | So given this query
1326 |
1327 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1328 | http://hapi-server.org/hapi/data?id=DATA&time.min=T1&time.max=T2&fields=mag_GSE&avg=5s
1329 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1330 |
1331 | the server should throw an error with a status of "1400 - Bad Request" with HTTP
1332 | status of 400. The server could optionally be more specific with "1401 =
1333 | misspelled or invalid request parameter" with an HTTP code of 404 - Not Found.
1334 |
1335 | In following general security practices, HAPI servers should carefully screen
1336 | incoming request parameter names values. Unknown request parameters and values,
1337 | including incorrectly formatted time values, should **not** be echoed in the
1338 | error response.
1339 |
1340 | Adoption
1341 | ========
1342 |
1343 | In terms of adopting HAPI as a data delivery mechanism, data providers will
1344 | likely not want to change existing services, so a HAPI compliant access
1345 | mechanism could be added alongside existing services. Several demonstration
1346 | servers exist, but there are not yet any libraries or tools available for
1347 | providers to use or adapt. These will be made available as they are created. The
1348 | goal is to create a reference implementation as a full-fledged example that
1349 | providers could adapt. On the client side, there are also demonstration level
1350 | capabilities, and Autoplot currently can access HAPI compliant servers.
1351 | Eventually, libraries in several languages will be made available to assist in
1352 | writing clients that extract data from HAPI servers. However, even without
1353 | example code, the HAPI specification is designed to be simple enough so that
1354 | even small data providers could add HAPI compliant access to their holdings.
1355 |
1356 | References
1357 | ==========
1358 |
1359 | [1] ISO 8601:2004, http://dotat.at/tmp/ISO_8601-2004_E.pdf
1360 | [2] CSV format, https://tools.ietf.org/html/rfc4180
1361 | [3] JSON Format, https://tools.ietf.org/html/rfc7159
1362 | [4] "JSON Schema", http://json-schema.org/
1363 | [5] EEE Computer Society (August 29, 2008). "IEEE Standard for Floating-Point Arithmetic". IEEE. http://doi.org/10.1109/IEEESTD.2008.4610935. ISBN 978-0-7381-5753-5. IEEE Std 754-2008
1364 | [6] IEEE Standard 754 Floating Point Numbers, http://steve.hollasch.net/cgindex/coding/ieeefloat.html
1365 |
1366 | Contact
1367 | =======
1368 |
1369 | Todd King (tking\@igpp.ucla.edu)
1370 | Jon Vandegriff (jon.vandegriff\@jhuapl.edu)
1371 | Robert Weigel (rweigel\@gmu.edu)
1372 | Robert Candey (Robert.M.Candey\@nasa.gov)
1373 | Aaron Roberts (aaron.roberts\@nasa.gov)
1374 | Bernard Harris (bernard.t.harris\@nasa.gov)
1375 | Nand Lal (nand.lal-1\@nasa.gov)
1376 | Jeremy Faden (faden\@cottagesystems.com)
1377 |
1378 | Appendix A: Sample Landing Page
1379 | ===========================================
1380 | ```html
1381 |
1382 |
1383 |
1384 | HAPI Server
1385 | This server supports the HAPI 1.0 specification for delivery of time series
1386 | data. The server consists of the following 4 REST-like endpoints that will
1387 | respond to HTTP GET requests.
1388 |
1389 |
1390 | - capabilities describe the capabilities of the server; this lists the output formats the server can emit (CSV and binary)
1391 | - catalog list the datasets that are available; each dataset is associated with a unique id
1392 | - info obtain a description for dataset of a given id; the description defines the parameters in every dataset record
1393 | - data stream data content for a dataset of a given id; the streaming request must have time bounds (specified by request parameters time.min and time.max) and may indicate a subset of parameters (default is all parameters)
1394 |
1395 | For more information, see this HAPI description at the SPASE web site.
1396 |
1397 |
1398 | ```
1399 |
1400 | Appendix B: JSON Object of HAPI Error Codes
1401 | ===========================================
1402 |
1403 | ```javascript
1404 | {
1405 | "1400": {"status":{"code": 1400, "message": "HAPI error 1400: user input error"}},
1406 | "1401": {"status":{"code": 1401, "message": "HAPI error 1401: unknown API parameter name"}},
1407 | "1402": {"status":{"code": 1402, "message": "HAPI error 1402: error in start time"}},
1408 | "1403": {"status":{"code": 1403, "message": "HAPI error 1403: error in stop time"}},
1409 | "1404": {"status":{"code": 1404, "message": "HAPI error 1404: start time equal to or after stop time"}},
1410 | "1405": {"status":{"code": 1405, "message": "HAPI error 1405: time outside valid range"}},
1411 | "1406": {"status":{"code": 1406, "message": "HAPI error 1406: unknown dataset id"}},
1412 | "1407": {"status":{"code": 1407, "message": "HAPI error 1407: unknown dataset parameter"}},
1413 | "1408": {"status":{"code": 1408, "message": "HAPI error 1408: too much time or data requested"}},
1414 | "1409": {"status":{"code": 1409, "message": "HAPI error 1409: unsupported output format"}},
1415 | "1410": {"status":{"code": 1410, "message": "HAPI error 1410: unsupported include value"}},
1416 | "1500": {"status":{"code": 1500, "message": "HAPI error 1500: internal server error"}},
1417 | "1501": {"status":{"code": 1501, "message": "HAPI error 1501: upstream request error"}}
1418 | }
1419 | ```
1420 |
--------------------------------------------------------------------------------
/hapi-2.0.0/HAPI-data-access-spec-2.0.0.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hapi-server/data-specification/41ab442cd41e644639af39502cb2a99b2df61985/hapi-2.0.0/HAPI-data-access-spec-2.0.0.pdf
--------------------------------------------------------------------------------
/hapi-2.0.0/changelog.md:
--------------------------------------------------------------------------------
1 | Difference between 1.1 and 2.0: https://www.diffchecker.com/sBWweoDa
2 |
3 | Summary of changes:
4 | 1. time values output by the server now must end with "Z" (not backwards compatable) [diff](https://github.com/hapi-server/data-specification/commit/9bcb2f43014a05380425c8e2be24b457da7c5542)
5 | 2. the "units" on the bins are required (not backwards compatable) [diff](https://github.com/hapi-server/data-specification/commit/30d8c967e252069a256019b71537694cd7fe7f97)
6 | 3. incoming time values (in request URL) should have a trailing "Z" to indicate GMT+0, but are interpreted as such even if there is no trailing Z [diff](https://github.com/hapi-server/data-specification/commit/a7a528b455b57a02987fb50b5e8c4890b721e774)
7 | 4. the length value for strings is now consistently described; all unused characters in the string should be filled with null characters (so no null terminator is needed if the string is exactly the given length)
8 | [diff 1](https://github.com/hapi-server/data-specification/commit/4d8395578c42a7a20fd8cd0895c6cedb5b28abc7) [diff 2](https://github.com/hapi-server/data-specification/commit/32f2b219fdd6e8f7c33546077f9dc95f908b0752) [diff 3](https://github.com/hapi-server/data-specification/commit/c0aa8b9ca2f3c0d573a40019bdaf5c6a1edb1008) [diff4](https://github.com/hapi-server/data-specification/commit/533120bcd3622b58ae0f776f2e19fd854b4d3b54)
9 | 5. in CSV output, the use of quotes for string values with commas is clarified [diff](https://github.com/hapi-server/data-specification/commit/a1c444298f4cb0c7da2dfeb25b40879a80d100b0)
10 | 6. the subset of ISO8601 that we use is better described [diff](https://github.com/hapi-server/data-specification/commit/a7a528b455b57a02987fb50b5e8c4890b721e774)
11 | 7. added `timeStampLocation` [diff](https://github.com/hapi-server/data-specification/commit/9ee24e0e3f9c09243b0762664e13adbf446024b8) [diff](https://github.com/hapi-server/data-specification/commit/2603550251fefee14d4f2192095981c35753a2f3)
12 |
--------------------------------------------------------------------------------
/hapi-2.1.0/HAPI-data-access-spec-2.1.0.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hapi-server/data-specification/41ab442cd41e644639af39502cb2a99b2df61985/hapi-2.1.0/HAPI-data-access-spec-2.1.0.pdf
--------------------------------------------------------------------------------
/hapi-2.1.0/changelog.md:
--------------------------------------------------------------------------------
1 | # Changelog for HAPI specification version 2.1.0
2 |
3 | ## Documentation
4 |
5 | 1. replaced "hapi-server.org" with just "server" in URLs
6 | 2. clarified what the length attribute means for data parameters
7 | 3. clarified how to specify array sizes when one array dimension size is 1
8 | 4. more clarification and examples for error messages from HAPI servers
9 | 5. fixed multiple typos and made wording improvements
10 |
11 | ## Schema
12 |
13 | 1. changed the definition of 'units' to allow for different units in the elements of an array
14 | 2. changed the definition of 'label' to allow for different labels for each element in the array
15 | 3. deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
16 | 4. now allow multi-dimensional data to not have bins in every dimension; any dimenions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
17 |
18 | ## Server API
19 |
20 | 1. add HAPI 1411 error `out of order or duplicate parameters`
21 | 2. clarified server responses when time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
22 |
--------------------------------------------------------------------------------
/hapi-2.1.1/HAPI-data-access-spec-2.1.1.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hapi-server/data-specification/41ab442cd41e644639af39502cb2a99b2df61985/hapi-2.1.1/HAPI-data-access-spec-2.1.1.pdf
--------------------------------------------------------------------------------
/hapi-2.1.1/changelog.md:
--------------------------------------------------------------------------------
1 | Changelog for HAPI 2.1.1
2 |
3 | These are all small clarifications on relatively infrequent situations.
4 |
5 | Pull request for going from 2.1.0 to 2.1.1:
6 | https://github.com/hapi-server/data-specification/pull/93
7 |
8 | This URL shows differences between 2.1.0 and 2.1.1:
9 | https://github.com/hapi-server/data-specification/compare/b85e1db..8969633
10 |
11 | * updated version number to 2.1 when used in text and in example output (a57ab6b)
12 | * clarified how to indicate a dimensionless quantity within an array of units for an array-valued parameter with non-uniform units; see Issue #85
13 | * clarified the use of scalar and array values for labels and units that describe an array-valued parameter; see Issue #91
14 | * clarified that `null` is not allowed as a value within a `centers` or `ranges` array in a `bins` description; see Issue #86
15 |
16 | In a future release, there will be occasion to use `null` values for some bin definitions, but only when the bin `centers` and `ranges` are able to be specificed as time varying elements within the data (as opposed to fixed quantities in the `info` metadata). This is expected to be included in verion 3.0.
17 |
18 | Changelog for HAPI version 2.1.0
19 |
20 | This URL generates a diff:
21 |
22 | https://github.com/hapi-server/data-specification/compare/4702968b13439af684d43416b442c534bf569f6c..4a02df680b76d757a7cbf4a06e55c53b9b91e310
23 |
24 | Changes:
25 |
26 | deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
27 |
28 | replaced "hapi-server.org" with just "server" in URLs
29 |
30 | clarified what the length attribute means for data parameters
31 |
32 | clarified how to specify array sizes when one array dimension size is 1
33 |
34 | changed the definition of 'units' to allow for different units in the elements of an array
35 |
36 | changed the definition of 'label' to allow for different labels for each element in the array
37 |
38 | now allow multi-dimensional data to not have bins in every dimension; any dimenions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
39 |
40 | clarified that reordering parameters in a request has no effect on the order of the parameters in the data returned by the server (order of the fields returned is always the same regardless of the order in which they were requested)
41 |
42 | clarified server responses when time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
43 |
44 | more clarification and examples for error messages from HAPI servers
45 |
46 | fixed multiple typos and made wording improvements
47 |
--------------------------------------------------------------------------------
/hapi-3.0.0/HAPI-data-access-spec-3.0.0.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hapi-server/data-specification/41ab442cd41e644639af39502cb2a99b2df61985/hapi-3.0.0/HAPI-data-access-spec-3.0.0.pdf
--------------------------------------------------------------------------------
/hapi-3.0.0/changelog.md:
--------------------------------------------------------------------------------
1 | # Changelog for HAPI 3.0.0
2 |
3 | ## v2 to v3 API Changes
4 | Non-backward compatible changes to the request interface in HAPI 3.0:
5 |
6 | * The URL parameter id was replaced with dataset.
7 | * time.min and time.max were replaced with start and stop, respectively.
8 | * Addition of a new endpoint, "about", for server description metadata.
9 |
10 | These changes were discussed in issue #77. HAPI 3 servers must accept both the old and these new parameter names, but the HAPI 2 specification requires an error response if the new URL parameter names are used. In a future version, the deprecated older names will no longer be valid.
11 |
12 | ## v2 to v3 Schema Changes
13 | * Ability to specify time-varying bins (#83)
14 | * Ability to use JSON references in info response (#82)
15 | * Ability to indicate a units schema (if one is being used for units strings) (#81)
16 | * Added citation element
17 |
18 | # Changelog for HAPI 2.1.1
19 |
20 | These are all small clarifications on relatively infrequent situations.
21 |
22 | Pull request for going from 2.1.0 to 2.1.1:
23 | https://github.com/hapi-server/data-specification/pull/93
24 |
25 | This URL shows differences between 2.1.0 and 2.1.1:
26 | https://github.com/hapi-server/data-specification/compare/b85e1db..8969633
27 |
28 | * updated version number to 2.1 when used in text and in example output
29 | * clarified how to indicate a dimensionless quantity within an array of units for an array-valued parameter with non-uniform units; see Issue #85
30 | * clarified the use of scalar and array values for labels and units that describe an array-valued parameter; see Issue #91
31 | * clarified that `null` is not allowed as a value within a `centers` or `ranges` array in a `bins` description; see Issue #86
32 |
33 | In a future release, there will be occasion to use `null` values for some bin definitions, but only when the bin `centers` and `ranges` are able to be specificed as time varying elements within the data (as opposed to fixed quantities in the `info` metadata). This is expected to be included in verion 3.0.
34 |
35 | # Changelog for HAPI version 2.1.0
36 |
37 | This URL generates a diff:
38 |
39 | https://github.com/hapi-server/data-specification/compare/4702968b13439af684d43416b442c534bf569f6c..4a02df680b76d757a7cbf4a06e55c53b9b91e310
40 |
41 | Changes:
42 |
43 | deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
44 |
45 | replaced "hapi-server.org" with just "server" in URLs
46 |
47 | clarified what the length attribute means for data parameters
48 |
49 | clarified how to specify array sizes when one array dimension size is 1
50 |
51 | changed the definition of 'units' to allow for different units in the elements of an array
52 |
53 | changed the definition of 'label' to allow for different labels for each element in the array
54 |
55 | now allow multi-dimensional data to not have bins in every dimension; any dimenions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
56 |
57 | clarified that reordering parameters in a request has no effect on the order of the parameters in the data returned by the server (order of the fields returned is always the same regardless of the order in which they were requested)
58 |
59 | clarified server responses when time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
60 |
61 | more clarification and examples for error messages from HAPI servers
62 |
63 | fixed multiple typos and made wording improvements
64 |
--------------------------------------------------------------------------------
/hapi-3.0.1/changelog.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | [1 Changes from 2.1.1 to 3.0.0](#1-changes-from211-to300)
4 | [1.1 v2 to v3 API Changes](#11-v2-to-v3-api-changes)
5 | [1.2 v2 to v3 Schema Changes](#12-v2-to-v3-schema-changes)
6 | [2 Changes from 2.1.0 to 2.1.1](#2-changes-from210-to211)
7 | [3 Changes from 2.1.0 to 3.0.0](#3-changes-from210-to300)
8 | [4 Changes from 3.0.0 to 3.0.1](#4-changes-from-300-to-301)
9 |
10 |
11 | # 1 Changes from 2.1.1 to 3.0.0
12 |
13 | ## 1.1 v2 to v3 API Changes
14 | Non-backward compatible changes to the request interface in HAPI 3.0:
15 |
16 | * The URL parameter id was replaced with dataset.
17 | * time.min and time.max were replaced with start and stop, respectively.
18 | * Addition of a new endpoint, "about", for server description metadata.
19 |
20 | These changes were discussed in issue #77. HAPI 3 servers must accept both the old and these new parameter names, but the HAPI 2 specification requires an error response if the new URL parameter names are used. In a future version, the deprecated older names will no longer be valid.
21 |
22 | ## 1.2 v2 to v3 Schema Changes
23 | * Ability to specify time-varying bins (#83)
24 | * Ability to use JSON references in info response (#82)
25 | * Ability to indicate a units schema (if one is being used for units strings) (#81)
26 |
27 | # 2 Changes from 2.1.0 to 2.1.1
28 |
29 | These are all small clarifications on relatively infrequent situations.
30 |
31 | Pull request for going from 2.1.0 to 2.1.1:
32 | https://github.com/hapi-server/data-specification/pull/93
33 |
34 | This URL shows differences between 2.1.0 and 2.1.1:
35 | https://github.com/hapi-server/data-specification/compare/b85e1db..8969633
36 |
37 | * updated version number to 2.1 when used in text and in example output
38 | * clarified how to indicate a dimensionless quantity within an array of units for an array-valued parameter with non-uniform units; see Issue #85
39 | * clarified the use of scalar and array values for labels and units that describe an array-valued parameter; see Issue #91
40 | * clarified that `null` is not allowed as a value within a `centers` or `ranges` array in a `bins` description; see Issue #86
41 |
42 | In a future release, there will be occasion to use `null` values for some bin definitions, but only when the bin `centers` and `ranges` are able to be specificed as time varying elements within the data (as opposed to fixed quantities in the `info` metadata). This is expected to be included in verion 3.0.
43 |
44 | # 3 Changes from 2.1.0 to 3.0.0
45 |
46 | This URL generates a diff:
47 |
48 | https://github.com/hapi-server/data-specification/compare/4702968b13439af684d43416b442c534bf569f6c..4a02df680b76d757a7cbf4a06e55c53b9b91e310
49 |
50 | Changes:
51 |
52 | deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
53 |
54 | replaced "hapi-server.org" with just "server" in URLs
55 |
56 | clarified what the length attribute means for data parameters
57 |
58 | clarified how to specify array sizes when one array dimension size is 1
59 |
60 | changed the definition of 'units' to allow for different units in the elements of an array
61 |
62 | changed the definition of 'label' to allow for different labels for each element in the array
63 |
64 | now allow multi-dimensional data to not have bins in every dimension; any dimenions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
65 |
66 | clarified that reordering parameters in a request has no effect on the order of the parameters in the data returned by the server (order of the fields returned is always the same regardless of the order in which they were requested)
67 |
68 | clarified server responses when time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
69 |
70 | more clarification and examples for error messages from HAPI servers
71 |
72 | fixed multiple typos and made wording improvements
73 |
74 | # 4 Changes from 3.0.0 to 3.0.1
75 |
76 | Added statement that `dataset` and `parameters` may not contain Unicode but that this support will be added in 3.1. See [GitHub Issue #128](https://github.com/hapi-server/data-specification/issues/128).
--------------------------------------------------------------------------------
/hapi-3.1.0/HAPI-data-access-spec-3.1.0.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hapi-server/data-specification/41ab442cd41e644639af39502cb2a99b2df61985/hapi-3.1.0/HAPI-data-access-spec-3.1.0.pdf
--------------------------------------------------------------------------------
/hapi-3.1.0/changelog.md:
--------------------------------------------------------------------------------
1 | Changelog for the HAPI Specification
2 | ==================
3 |
4 | For each version, there may be up to three types of changes to the spceification:
5 | * API changes - new or different features to the request interface
6 | * Response Format Changes - new or different items in the server responses
7 | * Clarifications - better description or handling of corner cases, etc
8 |
9 | Some API and Response format changes may be non-backwards compatible, but this is so faronly true at Version 3.0.
10 |
11 | # Version 3.1
12 |
13 | Version 3.1 is backwards compatible with 3.0. It adds support for three optional aspects in the `info` response:
14 |
15 | List of changes from 3.0.1 is [here](https://github.com/hapi-server/data-specification/compare/cfed14f74995b39598b43e1976be702f2c8350c4..964f44f8bbe07f5d3fd97fb8adb07ab71debb328)
16 |
17 | ## Response Format Changes
18 |
19 | 1. support for vector quantities: parameters that are vector quantities can optionally specify a coordinate system and can identify vector components as such; datsaets can optionally specify a coordinate system schema ([#115](https://github.com/hapi-server/data-specification/issues/115))
20 | 1. a dataset may optionally include other types of metadata inside a separate block ([#117](https://github.com/hapi-server/data-specification/issues/117))
21 | 1. each dataset may optionally indicate a maximum time range to request data ([#136](https://github.com/hapi-server/data-specification/issues/136))
22 |
23 |
24 | # Version 3.0.1
25 |
26 | ## Clarifications
27 |
28 | Added statement that `dataset` and `parameters` may not contain Unicode but that this support will be added in 3.1. See [GitHub Issue #128](https://github.com/hapi-server/data-specification/issues/128).
29 |
30 |
31 | # Version 3.0
32 |
33 | ## API Changes
34 |
35 | Non-backward compatible changes to the request interface in HAPI 3.0:
36 |
37 | * The URL parameter id was replaced with dataset.
38 | * time.min and time.max were replaced with start and stop, respectively.
39 | * Addition of a new endpoint, "about", for server description metadata.
40 |
41 | These changes were discussed in issue #77. HAPI 3 servers must accept both the old and these new parameter names, but the HAPI 2 specification requires an error response if the new URL parameter names are used. In a future version, the deprecated older names will no longer be valid.
42 |
43 | ## Response Format Changes
44 | * Ability to specify time-varying bins (#83)
45 | * Ability to use JSON references in info response (#82)
46 | * Ability to indicate a units schema (if one is being used for units strings) (#81)
47 |
48 |
49 | This URL generates a diff:
50 |
51 | https://github.com/hapi-server/data-specification/compare/4702968b13439af684d43416b442c534bf569f6c..4a02df680b76d757a7cbf4a06e55c53b9b91e310
52 |
53 | ## Clarifications:
54 |
55 | deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
56 |
57 | replaced "hapi-server.org" with just "server" in URLs
58 |
59 | clarified what the length attribute means for data parameters
60 |
61 | clarified how to specify array sizes when one array dimension size is 1
62 |
63 | changed the definition of 'units' to allow for different units in the elements of an array
64 |
65 | changed the definition of 'label' to allow for different labels for each element in the array
66 |
67 | now allow multi-dimensional data to not have bins in every dimension; any dimenions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
68 |
69 | clarified that reordering parameters in a request has no effect on the order of the parameters in the data returned by the server (order of the fields returned is always the same regardless of the order in which they were requested)
70 |
71 | clarified server responses when time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
72 |
73 | more clarification and examples for error messages from HAPI servers
74 |
75 | fixed multiple typos and made wording improvements
76 |
77 |
78 |
79 | # Version 2.1.1
80 |
81 | These are all small clarifications on relatively infrequent situations.
82 |
83 | Pull request for going from 2.1.0 to 2.1.1:
84 | https://github.com/hapi-server/data-specification/pull/93
85 |
86 | This URL shows differences between 2.1.0 and 2.1.1:
87 | https://github.com/hapi-server/data-specification/compare/b85e1db..8969633
88 |
89 | ## Clarifications
90 |
91 | * updated version number to 2.1 when used in text and in example output
92 | * clarified how to indicate a dimensionless quantity within an array of units for an array-valued parameter with non-uniform units; see Issue #85
93 | * clarified the use of scalar and array values for labels and units that describe an array-valued parameter; see Issue #91
94 | * clarified that `null` is not allowed as a value within a `centers` or `ranges` array in a `bins` description; see Issue #86
95 |
96 | In a future release, there will be occasion to use `null` values for some bin definitions, but only when the bin `centers` and `ranges` are able to be specificed as time varying elements within the data (as opposed to fixed quantities in the `info` metadata). This is expected to be included in verion 3.0.
97 |
98 | # Version 2.1.0
99 |
100 | ## Clarifications
101 |
102 | 1. replaced "hapi-server.org" with just "server" in example URLs
103 | 2. clarified what the length attribute means for data parameters
104 | 3. clarified how to specify array sizes when one array dimension size is 1
105 | 4. more clarification and examples for error messages from HAPI servers
106 | 5. fixed multiple typos and made wording improvements
107 |
108 | ## Response Format Changes
109 |
110 | 1. changed the definition of 'units' to allow for different units in the elements of an array
111 | 2. changed the definition of 'label' to allow for different labels for each element in the array
112 | 3. deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
113 | 4. now allow multi-dimensional data to not have bins in every dimension; any dimenions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
114 |
115 | ## API Changes
116 |
117 | 1. add HAPI 1411 error `out of order or duplicate parameters`
118 | 2. clarified server responses when time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
119 |
120 |
121 | # Version 2.0
122 |
123 | Difference between 1.1 and 2.0: https://www.diffchecker.com/sBWweoDa
124 |
125 | Summary of changes:
126 | 1. time values output by the server now must end with "Z" (not backwards compatable) [diff](https://github.com/hapi-server/data-specification/commit/9bcb2f43014a05380425c8e2be24b457da7c5542)
127 | 2. the "units" on the bins are required (not backwards compatable) [diff](https://github.com/hapi-server/data-specification/commit/30d8c967e252069a256019b71537694cd7fe7f97)
128 | 3. incoming time values (in request URL) should have a trailing "Z" to indicate GMT+0, but are interpreted as such even if there is no trailing Z [diff](https://github.com/hapi-server/data-specification/commit/a7a528b455b57a02987fb50b5e8c4890b721e774)
129 | 4. the length value for strings is now consistently described; all unused characters in the string should be filled with null characters (so no null terminator is needed if the string is exactly the given length)
130 | [diff 1](https://github.com/hapi-server/data-specification/commit/4d8395578c42a7a20fd8cd0895c6cedb5b28abc7) [diff 2](https://github.com/hapi-server/data-specification/commit/32f2b219fdd6e8f7c33546077f9dc95f908b0752) [diff 3](https://github.com/hapi-server/data-specification/commit/c0aa8b9ca2f3c0d573a40019bdaf5c6a1edb1008) [diff4](https://github.com/hapi-server/data-specification/commit/533120bcd3622b58ae0f776f2e19fd854b4d3b54)
131 | 5. in CSV output, the use of quotes for string values with commas is clarified [diff](https://github.com/hapi-server/data-specification/commit/a1c444298f4cb0c7da2dfeb25b40879a80d100b0)
132 | 6. the subset of ISO8601 that we use is better described [diff](https://github.com/hapi-server/data-specification/commit/a7a528b455b57a02987fb50b5e8c4890b721e774)
133 | 7. added `timeStampLocation` [diff](https://github.com/hapi-server/data-specification/commit/9ee24e0e3f9c09243b0762664e13adbf446024b8) [diff](https://github.com/hapi-server/data-specification/commit/2603550251fefee14d4f2192095981c35753a2f3)
134 |
135 | # Version 1.1
136 |
137 |
138 | # Version 1.0
139 |
140 | Initial version!
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
--------------------------------------------------------------------------------
/hapi-3.2.0/HAPI-data-access-spec-3.2.0.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hapi-server/data-specification/41ab442cd41e644639af39502cb2a99b2df61985/hapi-3.2.0/HAPI-data-access-spec-3.2.0.pdf
--------------------------------------------------------------------------------
/hapi-3.2.0/changelog.md:
--------------------------------------------------------------------------------
1 | Changelog for the HAPI Specification
2 | ==================
3 |
4 | For each version, there may be up to three types of changes to the specification:
5 | * API changes - new or different features to the request interface
6 | * Response Format Changes - new or different items in the server responses
7 | * Clarifications - better description or handling of corner cases, etc
8 |
9 | Some API and Response format changes may be non-backward compatible, but this is so far only true in Version 3.0.
10 |
11 | # Version 3.2
12 |
13 | ## API Changes
14 |
15 | Version 3.2 is backward compatible with 3.1.
16 |
17 | There is a new optional way to query the `catalog` endpoint, which now takes a request parameter
18 | called `depth` to indicate how much detail the catalog response should include. The catalog
19 | can now include all the elements in each dataset's `info` response.
20 | The `capabilities` endpoint advertises if this functionality is supported. ([#164](https://github.com/hapi-server/data-specification/pull/164))
21 |
22 | The spec document now suggests a way for HAPI clients to identify themselves as bots or
23 | non-human users to a HAPI server. Doing this can help server administrators / developers in logging actual
24 | science usage, as opposed to web scraping or mirroring activity. This is not a change in the spec.
25 | ([#174](https://github.com/hapi-server/data-specification/pull/174))
26 |
27 | ## Response Format Changes
28 |
29 | There is now an error code for when the `depth` request parameter to the `catalog` endpoint is invalid.
30 | Only specific values are allowed for `depth.` ([#191](https://github.com/hapi-server/data-specification/pull/191))
31 |
32 | The `capabilities` now includes a way to ping the server to test that it is functioning. The way to do
33 | this is to make simple data request, and the optional new info the `capabilities` response allows a
34 | server to identify exactly what data request to use for this kind pf ping.
35 | ([#172](https://github.com/hapi-server/data-specification/pull/172))
36 |
37 | There have been many requests for HAPI to also serve images. To accomodate this, string parameters
38 | now can be identified as being URIs which then point to image files. This also enables HAPI to
39 | more uniformly offer lists of any kind of file. This file listing capability should be viewed as
40 | a different kind of service from the usual numeric data serving capability offered by HAPI.
41 | See 3.6.16 for more details and also ([#166](https://github.com/hapi-server/data-specification/pull/166))
42 |
43 |
44 | ## Clarifications
45 |
46 | The error message text was made more precise for HAPI error codes related to invalid start and stop times in a data request.
47 | ([#163](https://github.com/hapi-server/data-specification/pull/163))
48 |
49 | When describing response formats offered by HAPI, it is now emphasized that the output formats offered by HAPI are
50 | transport formats meant for streaming data and are not intended to be used as traditional file formats.
51 | ([#159](https://github.com/hapi-server/data-specification/pull/159))
52 |
53 | Clarified that a HAPI request with an empty string after `parameters=` is the same as
54 | not requesting any specific paramters, which defaults to requesting all parameters.
55 | ([#201](https://github.com/hapi-server/data-specification/pull/201))
56 |
57 | # Version 3.1
58 |
59 | Version 3.1 is backward compatible with 3.0. It adds support for three optional aspects in the `info` response:
60 |
61 | List of changes from 3.0.1 is [here](https://github.com/hapi-server/data-specification/compare/cfed14f74995b39598b43e1976be702f2c8350c4..964f44f8bbe07f5d3fd97fb8adb07ab71debb328)
62 |
63 | ## Response Format Changes
64 |
65 | 1. support for vector quantities: parameters that are vector quantities can optionally specify a coordinate system and can identify vector components as such; datasets can optionally specify a coordinate system schema ([#115](https://github.com/hapi-server/data-specification/issues/115))
66 | 1. a dataset may optionally include other types of metadata inside a separate block ([#117](https://github.com/hapi-server/data-specification/issues/117))
67 | 1. each dataset may optionally indicate a maximum time range to request data ([#136](https://github.com/hapi-server/data-specification/issues/136))
68 |
69 |
70 | # Version 3.0.1
71 |
72 | ## Clarifications
73 |
74 | Added statement that `dataset` and `parameters` may not contain Unicode but that this support will be added in 3.1. See [GitHub Issue #128](https://github.com/hapi-server/data-specification/issues/128).
75 |
76 |
77 | # Version 3.0
78 |
79 | ## API Changes
80 |
81 | Non-backward compatible changes to the request interface in HAPI 3.0:
82 |
83 | * The URL parameter `id` was replaced with `dataset`.
84 | * `time.min` and `time.max` were replaced with `start` and `stop`, respectively.
85 | * Addition of a new endpoint, `about`, for server description metadata.
86 |
87 | These changes were discussed in issue #77. HAPI 3 servers must accept both the old and these new parameter names, but the HAPI 2 specification requires an error response if the new URL parameter names are used. In a future version, the deprecated older names will no longer be valid.
88 |
89 | ## Response Format Changes
90 | * Ability to specify time-varying bins (#83)
91 | * Ability to use JSON references in info response (#82)
92 | * Ability to indicate a units schema (if one is being used for units strings) (#81)
93 |
94 |
95 | This URL generates a diff:
96 |
97 | https://github.com/hapi-server/data-specification/compare/4702968b13439af684d43416b442c534bf569f6c..4a02df680b76d757a7cbf4a06e55c53b9b91e310
98 |
99 | ## Clarifications:
100 |
101 | deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
102 |
103 | replaced "hapi-server.org" with just "server" in URLs
104 |
105 | clarified what the length attribute means for data parameters
106 |
107 | clarified how to specify array sizes when one array dimension size is 1
108 |
109 | changed the definition of 'units' to allow for different units in the elements of an array
110 |
111 | changed the definition of 'label' to allow for different labels for each element in the array
112 |
113 | now allow multi-dimensional data to not have bins in every dimension; any dimenions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
114 |
115 | clarified that reordering parameters in a request has no effect on the order of the parameters in the data returned by the server (order of the fields returned is always the same regardless of the order in which they were requested)
116 |
117 | clarified server responses when time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
118 |
119 | more clarification and examples for error messages from HAPI servers
120 |
121 | fixed multiple typos and made wording improvements
122 |
123 |
124 |
125 | # Version 2.1.1
126 |
127 | These are all small clarifications on relatively infrequent situations.
128 |
129 | Pull request for going from 2.1.0 to 2.1.1:
130 | https://github.com/hapi-server/data-specification/pull/93
131 |
132 | This URL shows differences between 2.1.0 and 2.1.1:
133 | https://github.com/hapi-server/data-specification/compare/b85e1db..8969633
134 |
135 | ## Clarifications
136 |
137 | * updated version number to 2.1 when used in text and in example output
138 | * clarified how to indicate a dimensionless quantity within an array of units for an array-valued parameter with non-uniform units; see Issue #85
139 | * clarified the use of scalar and array values for labels and units that describe an array-valued parameter; see Issue #91
140 | * clarified that `null` is not allowed as a value within a `centers` or `ranges` array in a `bins` description; see Issue #86
141 |
142 | In a future release, there will be an occasion to use `null` values for some bin definitions, but only when the bin `centers` and `ranges` are able to be specified as time-varying elements within the data (as opposed to fixed quantities in the `info` metadata). This is expected to be included in verion 3.0.
143 |
144 | # Version 2.1.0
145 |
146 | ## Clarifications
147 |
148 | 1. replaced "hapi-server.org" with just "server" in example URLs
149 | 2. clarified what the length attribute means for data parameters
150 | 3. clarified how to specify array sizes when one array dimension size is 1
151 | 4. more clarification and examples for error messages from HAPI servers
152 | 5. fixed multiple typos and made wording improvements
153 |
154 | ## Response Format Changes
155 |
156 | 1. changed the definition of 'units' to allow for different units in the elements of an array
157 | 2. changed the definition of 'label' to allow for different labels for each element in the array
158 | 3. deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
159 | 4. now allow multi-dimensional data to not have bins in every dimension; any dimensions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
160 |
161 | ## API Changes
162 |
163 | 1. add HAPI 1411 error `out of order or duplicate parameters`
164 | 2. clarified server responses when the time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
165 |
166 |
167 | # Version 2.0
168 |
169 | Difference between 1.1 and 2.0: https://www.diffchecker.com/sBWweoDa
170 |
171 | Summary of changes:
172 | 1. time values output by the server now must end with "Z" (not backward compatible) [diff](https://github.com/hapi-server/data-specification/commit/9bcb2f43014a05380425c8e2be24b457da7c5542)
173 | 2. the "units" on the bins are required (not backward compatible) [diff](https://github.com/hapi-server/data-specification/commit/30d8c967e252069a256019b71537694cd7fe7f97)
174 | 3. incoming time values (in request URL) should have a trailing "Z" to indicate GMT+0, but are interpreted as such even if there is no trailing Z [diff](https://github.com/hapi-server/data-specification/commit/a7a528b455b57a02987fb50b5e8c4890b721e774)
175 | 4. the length value for strings is now consistently described; all unused characters in the string should be filled with null characters (so no null terminator is needed if the string is exactly the given length)
176 | [diff 1](https://github.com/hapi-server/data-specification/commit/4d8395578c42a7a20fd8cd0895c6cedb5b28abc7) [diff 2](https://github.com/hapi-server/data-specification/commit/32f2b219fdd6e8f7c33546077f9dc95f908b0752) [diff 3](https://github.com/hapi-server/data-specification/commit/c0aa8b9ca2f3c0d573a40019bdaf5c6a1edb1008) [diff4](https://github.com/hapi-server/data-specification/commit/533120bcd3622b58ae0f776f2e19fd854b4d3b54)
177 | 5. in CSV output, the use of quotes for string values with commas is clarified [diff](https://github.com/hapi-server/data-specification/commit/a1c444298f4cb0c7da2dfeb25b40879a80d100b0)
178 | 6. the subset of ISO8601 that we use is better described [diff](https://github.com/hapi-server/data-specification/commit/a7a528b455b57a02987fb50b5e8c4890b721e774)
179 | 7. added `timeStampLocation` [diff](https://github.com/hapi-server/data-specification/commit/9ee24e0e3f9c09243b0762664e13adbf446024b8) [diff](https://github.com/hapi-server/data-specification/commit/2603550251fefee14d4f2192095981c35753a2f3)
180 |
181 | # Version 1.1
182 |
183 | # Version 1.0
184 |
185 | Initial version!
186 |
--------------------------------------------------------------------------------
/hapi-dev/changelog.md:
--------------------------------------------------------------------------------
1 | Changelog for the HAPI Specification
2 | ==================
3 |
4 | For each version, there may be up to three types of changes to the specification:
5 | * API changes - new or different features to the request interface
6 | * Response Format Changes - new or different items in the server responses
7 | * Clarifications - better description or handling of corner cases, etc
8 |
9 | Some API and Response format changes may be non-backward compatible, but this is so far only true in Version 3.0.
10 |
11 | # Version 3.3
12 |
13 | Note 3.3 is fully backward compatible with 3.2.
14 |
15 | ## API Changes
16 |
17 | * add new request parameter option `resolve_references=false` to `catalog` and `info` endpoints to tell the server not to perform JSON reference substitution ([#220](https://github.com/hapi-server/data-specification/pull/220))
18 |
19 | ## Response Format Changes
20 |
21 | * added optional `location` and `geoLocation` attributes to `/info` response for indicating where measurements were made ([#238](https://github.com/hapi-server/data-specification/pull/238))
22 | * added altitude quantity to list of valid vector component types since this is common for geo-location values ([#233](https://github.com/hapi-server/data-specification/pull/233))
23 | * now have distinct `serverCitation` in `about` endpoint and `datasetCitation` in `info` endpoint, and plain `citation` is deprecated ([#235](https://github.com/hapi-server/data-specification/pull/235))
24 | * added `resourceID` in `/about` and `provenance` and `licenseURL` in `/info` so that HAPI can describe FAIR data and added a section on how FAIR principles map to HAPI ([#224](https://github.com/hapi-server/data-specification/pull/224)). Also added appendix section describing relationship between HAPI and FAIR.
25 | * added optional `warning` and `note` attributes to `about` and `info` endpoints ([#223](https://github.com/hapi-server/data-specification/pull/223))
26 | * added new units schema for VOUnits to enumerated list of allowed schemas ([#260](https://github.com/hapi-server/data-specification/pull/260))
27 |
28 | ## Clarifications
29 |
30 | * clarified how extra trailing slash should be handled ([#248]https://github.com/hapi-server/data-specification/issues/248)
31 | * clarified how to name a custom (i.e., non-standard) endpoint ([#245](https://github.com/hapi-server/data-specification/pull/245))
32 | * clarified how scalar parameters can also contain vector components ([#244](https://github.com/hapi-server/data-specification/pull/244))
33 | * clarified requirements for which of bin centers and ranges need to be present ([#237](https://github.com/hapi-server/data-specification/pull/237))
34 | * clarified that HAPI is RESTful rather than strictly based on the original REST concept ([#236](https://github.com/hapi-server/data-specification/pull/236))
35 | * clarified that any non-standard data format in the `outputFormats` of the `capabilities` endpoint needs to begin with `x_` ([#222](https://github.com/hapi-server/data-specification/pull/222))
36 | * clarified the difference between `title` (a short label) in `/catalog` endpoint versus the `description` (few lines of details) in `/info` endpoint ([#221](https://github.com/hapi-server/data-specification/pull/221))
37 | * clarified expectations for `id` and `title` in `about` endpoint (acronyms ok in id, but expand in title; don't include word HAPI) ([#219](https://github.com/hapi-server/data-specification/pull/219))
38 | * fixed some typos and inconsistencies ([#241](https://github.com/hapi-server/data-specification/pull/241))
39 | * rearranged `info` section for clarity ([#247](https://github.com/hapi-server/data-specification/pull/247))
40 |
41 | # Version 3.2
42 |
43 | ## API Changes
44 |
45 | Version 3.2 is backward compatible with 3.1.
46 |
47 | There is a new optional way to query the `catalog` endpoint, which now takes a request parameter
48 | called `depth` to indicate how much detail the catalog response should include. The catalog
49 | can now include all the elements in each dataset's `info` response.
50 | The `capabilities` endpoint advertises if this functionality is supported. ([#164](https://github.com/hapi-server/data-specification/pull/164))
51 |
52 | The spec document now suggests a way for HAPI clients to identify themselves as bots or
53 | non-human users to a HAPI server. Doing this can help server administrators / developers in logging actual
54 | science usage, as opposed to web scraping or mirroring activity. This is not a change in the spec.
55 | ([#174](https://github.com/hapi-server/data-specification/pull/174))
56 |
57 | ## Response Format Changes
58 |
59 | There is now an error code for when the `depth` request parameter to the `catalog` endpoint is invalid.
60 | Only specific values are allowed for `depth.` ([#191](https://github.com/hapi-server/data-specification/pull/191))
61 |
62 | The `capabilities` now includes a way to ping the server to test that it is functioning. The way to do
63 | this is to make simple data request, and the optional new info the `capabilities` response allows a
64 | server to identify exactly what data request to use for this kind pf ping.
65 | ([#172](https://github.com/hapi-server/data-specification/pull/172))
66 |
67 | There have been many requests for HAPI to also serve images. To accomodate this, string parameters
68 | now can be identified as being URIs which then point to image files. This also enables HAPI to
69 | more uniformly offer lists of any kind of file. This file listing capability should be viewed as
70 | a different kind of service from the usual numeric data serving capability offered by HAPI.
71 | See 3.6.16 for more details and also ([#166](https://github.com/hapi-server/data-specification/pull/166))
72 |
73 | ## Clarifications
74 |
75 | The error message text was made more precise for HAPI error codes related to invalid start and stop times in a data request.
76 | ([#163](https://github.com/hapi-server/data-specification/pull/163))
77 |
78 | When describing response formats offered by HAPI, it is now emphasized that the output formats offered by HAPI are
79 | transport formats meant for streaming data and are not intended to be used as traditional file formats.
80 | ([#159](https://github.com/hapi-server/data-specification/pull/159))
81 |
82 | Clarified that a HAPI request with an empty string after `parameters=` is the same as
83 | not requesting any specific paramters, which defaults to requesting all parameters.
84 | ([#201](https://github.com/hapi-server/data-specification/pull/201))
85 |
86 | # Version 3.1
87 |
88 | Version 3.1 is backward compatible with 3.0. It adds support for three optional aspects in the `info` response:
89 |
90 | List of changes from 3.0.1 is [here](https://github.com/hapi-server/data-specification/compare/cfed14f74995b39598b43e1976be702f2c8350c4..964f44f8bbe07f5d3fd97fb8adb07ab71debb328)
91 |
92 | ## Response Format Changes
93 |
94 | 1. support for vector quantities: parameters that are vector quantities can optionally specify a coordinate system and can identify vector components as such; datasets can optionally specify a coordinate system schema ([#115](https://github.com/hapi-server/data-specification/issues/115))
95 | 1. a dataset may optionally include other types of metadata inside a separate block ([#117](https://github.com/hapi-server/data-specification/issues/117))
96 | 1. each dataset may optionally indicate a maximum time range to request data ([#136](https://github.com/hapi-server/data-specification/issues/136))
97 |
98 |
99 | # Version 3.0.1
100 |
101 | ## Clarifications
102 |
103 | Added statement that `dataset` and `parameters` may not contain Unicode but that this support will be added in 3.1. See [GitHub Issue #128](https://github.com/hapi-server/data-specification/issues/128).
104 |
105 |
106 | # Version 3.0
107 |
108 | ## API Changes
109 |
110 | Non-backward compatible changes to the request interface in HAPI 3.0:
111 |
112 | * The URL parameter `id` was replaced with `dataset`.
113 | * `time.min` and `time.max` were replaced with `start` and `stop`, respectively.
114 | * Addition of a new endpoint, `about`, for server description metadata.
115 |
116 | These changes were discussed in issue #77. HAPI 3 servers must accept both the old and these new parameter names, but the HAPI 2 specification requires an error response if the new URL parameter names are used. In a future version, the deprecated older names will no longer be valid.
117 |
118 | ## Response Format Changes
119 | * Ability to specify time-varying bins (#83)
120 | * Ability to use JSON references in info response (#82)
121 | * Ability to indicate a units schema (if one is being used for units strings) (#81)
122 |
123 |
124 | This URL generates a diff:
125 |
126 | https://github.com/hapi-server/data-specification/compare/4702968b13439af684d43416b442c534bf569f6c..4a02df680b76d757a7cbf4a06e55c53b9b91e310
127 |
128 | ## Clarifications:
129 |
130 | deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
131 |
132 | replaced "hapi-server.org" with just "server" in URLs
133 |
134 | clarified what the length attribute means for data parameters
135 |
136 | clarified how to specify array sizes when one array dimension size is 1
137 |
138 | changed the definition of 'units' to allow for different units in the elements of an array
139 |
140 | changed the definition of 'label' to allow for different labels for each element in the array
141 |
142 | now allow multi-dimensional data to not have bins in every dimension; any dimenions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
143 |
144 | clarified that reordering parameters in a request has no effect on the order of the parameters in the data returned by the server (order of the fields returned is always the same regardless of the order in which they were requested)
145 |
146 | clarified server responses when time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
147 |
148 | more clarification and examples for error messages from HAPI servers
149 |
150 | fixed multiple typos and made wording improvements
151 |
152 |
153 |
154 | # Version 2.1.1
155 |
156 | These are all small clarifications on relatively infrequent situations.
157 |
158 | Pull request for going from 2.1.0 to 2.1.1:
159 | https://github.com/hapi-server/data-specification/pull/93
160 |
161 | This URL shows differences between 2.1.0 and 2.1.1:
162 | https://github.com/hapi-server/data-specification/compare/b85e1db..8969633
163 |
164 | ## Clarifications
165 |
166 | * updated version number to 2.1 when used in text and in example output
167 | * clarified how to indicate a dimensionless quantity within an array of units for an array-valued parameter with non-uniform units; see Issue #85
168 | * clarified the use of scalar and array values for labels and units that describe an array-valued parameter; see Issue #91
169 | * clarified that `null` is not allowed as a value within a `centers` or `ranges` array in a `bins` description; see Issue #86
170 |
171 | In a future release, there will be an occasion to use `null` values for some bin definitions, but only when the bin `centers` and `ranges` are able to be specified as time-varying elements within the data (as opposed to fixed quantities in the `info` metadata). This is expected to be included in verion 3.0.
172 |
173 | # Version 2.1.0
174 |
175 | ## Clarifications
176 |
177 | 1. replaced "hapi-server.org" with just "server" in example URLs
178 | 2. clarified what the length attribute means for data parameters
179 | 3. clarified how to specify array sizes when one array dimension size is 1
180 | 4. more clarification and examples for error messages from HAPI servers
181 | 5. fixed multiple typos and made wording improvements
182 |
183 | ## Response Format Changes
184 |
185 | 1. changed the definition of 'units' to allow for different units in the elements of an array
186 | 2. changed the definition of 'label' to allow for different labels for each element in the array
187 | 3. deprecated the use of all uppercase for the time stamp location options in favor of all lowercase, which is more consistent with the rest of the specification
188 | 4. now allow multi-dimensional data to not have bins in every dimension; any dimensions with null for 'centers' and nothing present for 'ranges' will not be considered to have any binning in that dimension
189 |
190 | ## API Changes
191 |
192 | 1. add HAPI 1411 error `out of order or duplicate parameters`
193 | 2. clarified server responses when the time range has no data (HTTP status of 200 and HAPI status of 1201 and return no data content) or data is all fill (HTTP status of 200 and HAPI status of 1200 and return the fill values)
194 |
195 |
196 | # Version 2.0
197 |
198 | Difference between 1.1 and 2.0: https://www.diffchecker.com/sBWweoDa
199 |
200 | Summary of changes:
201 | 1. time values output by the server now must end with "Z" (not backward compatible) [diff](https://github.com/hapi-server/data-specification/commit/9bcb2f43014a05380425c8e2be24b457da7c5542)
202 | 2. the "units" on the bins are required (not backward compatible) [diff](https://github.com/hapi-server/data-specification/commit/30d8c967e252069a256019b71537694cd7fe7f97)
203 | 3. incoming time values (in request URL) should have a trailing "Z" to indicate GMT+0, but are interpreted as such even if there is no trailing Z [diff](https://github.com/hapi-server/data-specification/commit/a7a528b455b57a02987fb50b5e8c4890b721e774)
204 | 4. the length value for strings is now consistently described; all unused characters in the string should be filled with null characters (so no null terminator is needed if the string is exactly the given length)
205 | [diff 1](https://github.com/hapi-server/data-specification/commit/4d8395578c42a7a20fd8cd0895c6cedb5b28abc7) [diff 2](https://github.com/hapi-server/data-specification/commit/32f2b219fdd6e8f7c33546077f9dc95f908b0752) [diff 3](https://github.com/hapi-server/data-specification/commit/c0aa8b9ca2f3c0d573a40019bdaf5c6a1edb1008) [diff4](https://github.com/hapi-server/data-specification/commit/533120bcd3622b58ae0f776f2e19fd854b4d3b54)
206 | 5. in CSV output, the use of quotes for string values with commas is clarified [diff](https://github.com/hapi-server/data-specification/commit/a1c444298f4cb0c7da2dfeb25b40879a80d100b0)
207 | 6. the subset of ISO8601 that we use is better described [diff](https://github.com/hapi-server/data-specification/commit/a7a528b455b57a02987fb50b5e8c4890b721e774)
208 | 7. added `timeStampLocation` [diff](https://github.com/hapi-server/data-specification/commit/9ee24e0e3f9c09243b0762664e13adbf446024b8) [diff](https://github.com/hapi-server/data-specification/commit/2603550251fefee14d4f2192095981c35753a2f3)
209 |
210 | # Version 1.1
211 |
212 | # Version 1.0
213 |
214 | Initial version!
215 |
--------------------------------------------------------------------------------