├── LICENSE.txt
├── README.md
├── examples
├── datapackage-m-example.pbix
└── datapackage-m-example.xlsx
├── functions
├── DataPackage.Helper.pq
├── DataPackage.Table.pq
└── DataPackage.Tables.pq
├── images
├── excel
│ ├── datapackage-m-excel-illustration-01.PNG
│ ├── datapackage-m-excel-illustration-01.png
│ ├── datapackage-m-excel-illustration-02.PNG
│ ├── datapackage-m-excel-illustration-02.png
│ ├── datapackage-m-excel-illustration-03.PNG
│ ├── datapackage-m-excel-illustration-03.png
│ ├── datapackage-m-excel-illustration-04.PNG
│ ├── datapackage-m-excel-illustration-04.png
│ └── datapackage-m-in-action-excel.gif
├── frictionless-data
│ └── frictionless-color-full-logo.svg
└── power-bi
│ ├── datapackage-m-in-action-power-bi.gif
│ ├── datapackage-m-power-bi-illustration-01.png
│ ├── datapackage-m-power-bi-illustration-02.png
│ ├── datapackage-m-power-bi-illustration-03.png
│ └── datapackage-m-power-bi-illustration-04.png
├── templates
├── datapackage-m-template.pbit
└── datapackage-m-template.xltx
└── tests
├── data-package-examples
├── README.md
├── countries-and-currencies
│ ├── README.md
│ ├── data
│ │ ├── countries-using-usd-and-gbp.csv
│ │ └── currencies.csv
│ └── datapackage.json
├── cpi-data-via-url
│ ├── README.md
│ └── datapackage.json
├── cpi
│ ├── README.md
│ ├── data
│ │ └── cpi.csv
│ └── datapackage.json
├── currencies-encoding-tests
│ ├── README.md
│ ├── data
│ │ ├── currencies-gb2312.csv
│ │ ├── currencies-iso-8859-1.csv
│ │ ├── currencies-iso-8859-2.csv
│ │ ├── currencies-iso-8859-3.csv
│ │ ├── currencies-no-encoding-property.csv
│ │ ├── currencies-utf-8.csv
│ │ └── currencies-windows-1252.csv
│ └── datapackage.json
├── donation-codes-schema
│ ├── README.md
│ └── tableschema.json
├── donation-codes-via-url
│ ├── README.md
│ └── datapackage.json
├── donation-codes
│ ├── README.md
│ ├── data
│ │ └── donation-codes.csv
│ └── datapackage.json
├── donations
│ ├── README.md
│ ├── data
│ │ └── donations.csv
│ └── datapackage.json
├── finance-vix
│ ├── README.md
│ ├── data
│ │ └── vix-daily.csv
│ └── datapackage.json
├── geo-countries
│ ├── README.md
│ ├── data
│ │ └── countries.geojson
│ └── datapackage.json
├── inflation
│ ├── README.md
│ ├── data
│ │ ├── inflation-consumer.csv
│ │ └── inflation-gdp.csv
│ └── datapackage.json
├── iso-639-1-language-codes
│ ├── README.md
│ ├── data
│ │ └── ISO-639-1-codes.csv
│ └── datapackage.json
├── open-data-day-tweets-2018
│ ├── README.md
│ ├── data
│ │ └── subsetofopendatadaytweets.csv
│ ├── datapackage.json
│ ├── docs
│ │ └── geotagged_tweets.png
│ └── scripts
│ │ └── opendataday.R
├── periodic-table
│ ├── README.md
│ ├── data.csv
│ └── datapackage.json
├── text-file
│ ├── README.md
│ ├── datapackage.json
│ └── text-file.txt
└── units-and-prefixes
│ ├── README.md
│ ├── data
│ ├── unit-prefixes.csv
│ └── units.csv
│ └── datapackage.json
└── datapackage-m-tests.pbix
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018-2020 Nimble Learn Ltd (http://www.nimblelearn.com)
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Data Package M (datapackage-m)
2 |
3 | A set of functions written in [Power Query M](https://docs.microsoft.com/en-us/powerquery-m/) for working with [Tabular Data Packages](https://specs.frictionlessdata.io/tabular-data-package) in [Power BI Desktop](https://powerbi.microsoft.com/en-us/desktop/) and [Power Query for Excel](https://powerquery.microsoft.com/en-us/excel/) (also known as 'Get & Transform' in Excel 2016 and later). Data Package M functions implement several Frictionless Data [specifications](https://specs.frictionlessdata.io/) that help you to go from data to insight, *faster*.
4 |
5 |
6 | ## What is Frictionless Data?
7 |
8 | 
9 |
10 | A Tabular Data Package is a simple format for publishing and sharing tabular data. Tabular Data Packages extend and specialise the Data Package specification, and both come under the umbrella of [Frictionless Data](https://frictionlessdata.io/).
11 |
12 | Visit [https://frictionlessdata.io](https://frictionlessdata.io) to learn more.
13 |
14 |
15 | ## Data Package M in Power BI
16 |
17 | 
18 |
19 | 
20 | *Invoking the `DataPackage.Tables` function*
21 |
22 | 
23 | *Viewing the resources for a GDP Tabular Data Package and previewing the resource data*
24 |
25 | 
26 | *Getting a table from a GDP Tabular Data Package resource after navigating from the resource list*
27 |
28 | 
29 | *Creating a quick visualisation of the GDP data*
30 |
31 | ## Data Package M in Power Query for Excel
32 |
33 | 
34 |
35 | 
36 | *Invoking the `DataPackage.Tables` function*
37 |
38 | 
39 | *Viewing the resources for a GDP Tabular Data Package and previewing the resource data*
40 |
41 | 
42 | *Getting a table from a GDP Tabular Data Package resource after navigating from the resource list*
43 |
44 | 
45 | *The GDP table is loaded to a new Excel Worksheet and/or [Data Model](https://support.office.com/en-us/article/create-a-data-model-in-excel-87e7a54c-87dc-488e-9410-5c75dbcb0f7b) and ready for analysis*
46 |
47 | ## Data Package M Functions
48 |
49 | | Function Name | Query/Shared Name* | Description |
50 | | :----------------- | :---------------- | :-------------------------------------------------------------------------- |
51 | | DataPackage.Table | DataPackageTable | Returns a [Tabular Data Resource](https://specs.frictionlessdata.io/tabular-data-resource/) as a `table` |
52 | | DataPackage.Tables | DataPackageTables | Returns a `table` that lists the [Data Resources](https://specs.frictionlessdata.io/data-resource/) contained within a Data Package |
53 | | DataPackage.Helper | DataPackageHelper | Returns a Data Package helper function as a `function` |
54 |
55 | *This is the name that appears in the Power Query Editor in the 'Queries' pane. When invoking the functions through the Power Query Editor's 'Invoke Function' dialog, this name will appear in the auto-generated Power Query M expressions. This is also the name that will be exposed to the shared function library when the functions have been [setup](#setup).
56 |
57 |
58 | ### DataPackage.Table
59 |
60 | | Parameter | Type | Description |
61 | | :--------------------- | :----------- | :------------------------------------------------------- |
62 | | dataPackageIdentifier | text | A valid [Data Package Identifier](https://specs.frictionlessdata.io/data-package-identifier/) |
63 | | dataResourceIndex | number | A valid Data Resource index |
64 | | dataResourceName | text | A valid Data Resource name |
65 | | ignoreTableSchemaTypes | logical | Controls whether the Table Schema is applied to the data |
66 |
67 |
68 | ### DataPackage.Tables
69 |
70 | | Parameter | Type | Description |
71 | | :--------------------- | :----------- | :------------------------------------------------------- |
72 | | dataPackageIdentifier | text | A valid [Data Package Identifier](https://specs.frictionlessdata.io/data-package-identifier/) |
73 | | ignoreTableSchemaTypes | logical | Controls whether the [Table Schema](https://specs.frictionlessdata.io/table-schema/) is applied to the data |
74 |
75 | Any Data Resource that is detected as being tabular will contain the data table in the 'data' column. Data Package properties that are inferred and added by Data Package M have their name preceded by double underscore e.g. '__fullpath'.
76 |
77 |
78 | ### DataPackage.Helper
79 |
80 | | Parameter | Type | Description |
81 | | :--------------------- | :----------- | :------------------------------------------------------- |
82 | | functionName | text | A valid Data Package helper function name |
83 |
84 | This is a special function that acts as a library of Data Package helper functions. As the returned functions are only used as helpers for the `DataPackage.Table` and `DataPackage.Tables` functions, please see the comments inline with the Power Query M expressions to understand how they work. Advanced Power Query M users may wish to use these helper functions to work with Data Package metadata more directly.
85 |
86 |
87 | ## Table Schema Type Conversions
88 |
89 | Type conversion is attempted for the most common [Table Schema](https://specs.frictionlessdata.io/table-schema/) types:
90 |
91 | | Table Schema Type | M Type |
92 | | :-----------------| :------- |
93 | | string | text |
94 | | number | number |
95 | | integer | number |
96 | | boolean | logical |
97 | | date | date |
98 | | datetime | datetime |
99 | | time | time |
100 |
101 | Unhandled types are defaulted to the `text` type. Setting the `ignoreTableSchemaTypes` property to `true` when invoking `DataPackage.Table` or `DataPackage.Tables` will stop the Table Schema from being applied. This can be useful when one or more values in a column cause an error when the Table Schema type conversions are attempted.
102 |
103 |
104 | ## Implemented Frictionless Data Specifications
105 |
106 | The Data Package M functions are aligned closely with the v1 Frictionless Data specifications. The below table maps the significant Data Package M features to the corresponding specification(s):
107 |
108 | | Feature | Specification | Notes |
109 | | :------------------------------------------- | :--------------------------------------------------- | :-------------------------------------- |
110 | | Data Package Identifier resolution | [Data Package Identifier](https://specs.frictionlessdata.io/data-package-identifier/) | Identifier Strings only |
111 | | Remote and local resource path handling | [Data Resource](https://specs.frictionlessdata.io/data-resource/), [Tabular Data Resource](https://specs.frictionlessdata.io/tabular-data-resource/) | Includes handling path arrays (i.e. data in multiple files) |
112 | | Tabular Data Resource metadata handling | [Tabular Data Resource](https://specs.frictionlessdata.io/tabular-data-resource/) | 'dialect' is partially handled. 'encoding' is handled for the most common encoding types**|
113 | | Table Schema type conversions | [Table Schema](https://specs.frictionlessdata.io/table-schema/), [Tabular Data Resource](https://specs.frictionlessdata.io/tabular-data-resource/) | [Partial support](#table-schema-type-conversions). Includes resolving and applying remote schemas. |
114 | | Inline data handling | [Tabular Data Resource](https://specs.frictionlessdata.io/tabular-data-resource/) | |
115 | | Compressed resource handling | [Compression Resources Pattern](https://specs.frictionlessdata.io//patterns/#compression-of-resources) | Gzip compression support only |
116 |
117 | **The currently handled encoding types are gb2312, x-cp20949, euc-jp, iso-8859-1, iso-8859-2, iso-8859-3, iso-8859-4, iso-8859-5, iso-8859-6, iso-8859-7, iso-8859-8, iso-8859-9, iso-8859-13, iso-8859-15, us-ascii, utf-32be, utf-32, utf-16, utf-8, utf-7, and windows-1252.
118 |
119 | ## Setup
120 |
121 | ### Option 1: Power BI Desktop Template
122 |
123 | 1. Download the [latest release](https://github.com/nimblelearn/datapackage-m/releases).
124 | 2. Open the 'datapackage-m-template.pbit' file found in the 'templates' folder with Power BI Desktop.
125 | 3. Open the Power Query Editor window and invoke the `DataPackageTable` or `DataPackageTables` function with a valid [Data Package Identifier String](https://specs.frictionlessdata.io/data-package-identifier/). Invoking the `DataPackageTable` function also requires a valid resource name or index.
126 |
127 | For instructions on how to open the Power Query Editor window in Power BI Desktop [click here](https://powerbi.microsoft.com/en-us/documentation/powerbi-desktop-query-overview/).
128 |
129 | ### Option 2: Excel Template
130 |
131 | 1. Download the [latest release](https://github.com/nimblelearn/datapackage-m/releases).
132 | 2. Open the 'datapackage-m-template.xltx' file found in the 'templates' folder with Excel 2010 or later. For Excel 2010 and 2013, you MUST have the [Power Query for Excel](https://www.microsoft.com/en-gb/download/details.aspx?id=39379) add-in installed (installing the latest version is recommended). Power Query is built-in from with Excel 2016 and later, and can be found under the 'Data' tab in the 'Get & Transform Data' section.
133 | 3. Open the Power Query Editor window and invoke the `DataPackageTable` or `DataPackageTables` function with a valid [Data Package Identifier String](https://specs.frictionlessdata.io/data-package-identifier/). Invoking the `DataPackageTable` function also requires a valid resource name or index.
134 |
135 | For instructions on how to open the Power Query Editor window in Excel [click here](https://support.microsoft.com/en-us/office/create-power-query-formulas-in-excel-6bc50988-022b-4799-a709-f8aafdee2b2f).
136 |
137 | ### Option 3: Create New Functions from the Power Query M Files
138 |
139 | This scenario is mostly applicable if you have an existing Power BI or Excel file, or if you're an advanced Power Query M user.
140 |
141 | 1. Download the [latest release](https://github.com/nimblelearn/datapackage-m/releases).
142 | 2. For each .pq file in the 'functions' folder, create a blank query, copy and paste the Power Query M expression into the Advanced Editor, click 'Done', and give the function the same name as its .pq file but without the dot (.) or extension (e.g. for [DataPackage.Table.pq](./functions/DataPackage.Table.pq) the name should be 'DataPackageTable').
143 |
144 | You're now ready to invoke the Data Package M functions through the Power Query 'Invoke Function' dialog or via Power Query M expressions as shown in the examples below.
145 |
146 |
147 | ## Examples
148 |
149 |
150 | ### Example Files
151 |
152 | You can invoke the `DataPackage.Table` and `DataPackage.Tables` functions through the Power BI Desktop or Power Query for Excel user interface (UI) using the 'Invoke Function' dialog.
153 |
154 | Download the [latest release](https://github.com/nimblelearn/datapackage-m/releases) and try one of the example files found in the 'examples' folder.
155 |
156 | | Example File | Description |
157 | | :------------------------- | :------------------------------------------------------------------------------------------- |
158 | | datapackage-m-example.pbix | A simple Power BI Desktop example based on a [GDP](https://datahub.io/core/gdp) Data Package |
159 | | datapackage-m-example.xlsx | A simple Excel Workbook example based on a [GDP](https://datahub.io/core/gdp) Data Package |
160 |
161 |
162 | ### Power Query M Expression Examples
163 |
164 | The following examples show the *recommended* way to invoke the Data Package M functions when using them in your Power Query M expressions.
165 |
166 | #### Getting the List of Resources from a Data Package (Remote)
167 |
168 | ```text
169 | let
170 | // Setup the shared function reference
171 | DataPackage.Tables = DataPackageTables,
172 |
173 | // Invoke the function
174 | Source = DataPackage.Tables("https://datahub.io/core/gdp/datapackage.json")
175 | in
176 | Source
177 | ```
178 |
179 | #### Getting the List of Resources from a Data Package (Local)
180 |
181 | ```text
182 | let
183 | // Setup the shared function reference
184 | DataPackage.Tables = DataPackageTables,
185 |
186 | // Invoke the function
187 | Source = DataPackage.Tables("C:\gdp\datapackage.json")
188 | in
189 | Source
190 | ```
191 |
192 | #### Getting the Data for a Resource Using Its Index (Remote)
193 |
194 | ```text
195 | let
196 | // Setup the shared function reference
197 | DataPackage.Table = DataPackageTable,
198 |
199 | // Invoke the function
200 | Source = DataPackage.Table("https://datahub.io/core/gdp/datapackage.json", 0)
201 | in
202 | Source
203 | ```
204 |
205 | #### Getting the Data for a Resource Using Its Index (Local)
206 |
207 | ```text
208 | let
209 | // Setup the shared function reference
210 | DataPackage.Table = DataPackageTable,
211 |
212 | // Invoke the function
213 | Source = DataPackage.Table("C:\gdp\datapackage.json", 0)
214 | in
215 | Source
216 | ```
217 |
218 | #### Getting the Data for a Resource Using Its Name (Remote)
219 |
220 | ```text
221 | let
222 | // Setup the shared function reference
223 | DataPackage.Table = DataPackageTable,
224 |
225 | // Invoke the function
226 | Source = DataPackage.Table("https://datahub.io/core/gdp/datapackage.json", null, "gdp")
227 | in
228 | Source
229 | ```
230 |
231 | #### Getting the Data for a Resource Using Its Name (Local)
232 |
233 | ```text
234 | let
235 | // Setup the shared function reference
236 | DataPackage.Table = DataPackageTable,
237 |
238 | // Invoke the function
239 | Source = DataPackage.Table("C:\gdp\datapackage.json", null, "gdp")
240 | in
241 | Source
242 | ```
243 |
244 | #### Getting the Data for a Resource Without Table Schema Type Conversion
245 |
246 | ```text
247 | let
248 | // Setup the shared function reference
249 | DataPackage.Table = DataPackageTable,
250 |
251 | // Invoke the function
252 | Source = DataPackage.Table("https://datahub.io/core/gdp/datapackage.json", null, "gdp", true)
253 | in
254 | Source
255 | ```
256 |
257 | #### Invoking a Data Package Helper Function Directly
258 |
259 | ```text
260 | let
261 | // Setup the shared function reference
262 | DataPackage.Helper = DataPackageHelper,
263 |
264 | // Get the required helper function by name
265 | DataPackage.Package = DataPackage.Helper("DataPackage.Package"),
266 |
267 | // Invoke the helper function
268 | Source = DataPackage.Package("https://datahub.io/core/gdp/datapackage.json")
269 | in
270 | Source
271 | ```
272 |
273 | ## Try Data Package M with the Core Datasets
274 |
275 | * [Core Datasets on DataHub](https://datahub.io/core/)
276 | * [Core Datasets on GitHub](https://github.com/datasets/)
277 |
278 |
279 | ## Licensing
280 |
281 | This work was created by [Nimble Learn](https://www.nimblelearn.com) and has been published with the MIT License. The full license can be viewed in [plain text](./LICENSE.txt).
282 |
283 |
284 | ## Notes
285 |
286 | * When prompted for the 'Privacy Level' by Power BI or Power Query for Excel, choose either 'Public' or 'Organizational'.
287 |
288 | * If the field values in a CSV file do not match the expected field type, as defined in the [Table Schema](https://specs.frictionlessdata.io/table-schema/), the invalid values in the column will return an error (Expression.Error). You can get around this by setting the `ignoreTableSchemaTypes` parameter to `true`.
289 |
290 |
291 | ## Known Issues
292 |
293 | ### Power BI Service Data Refresh Support
294 |
295 | Data refresh only works from Power BI Desktop and Power Query for Excel but not from the Power BI service. The Power BI service performs a static analysis on all the Power Query M expressions in a Power BI file to determine whether it can be refreshed by the service. One scenario where a data refresh is not supported is when the [Web.Contents](https://docs.microsoft.com/en-gb/powerquery-m/web-contents) function is used with a [dynamically generated URL](https://ideas.powerbi.com/forums/265200-power-bi-ideas/suggestions/10927416-web-contents-should-support-scheduled-refresh-ev). This is one of the functions that Data Package M uses to dynamically handle Data Packages and this currently prevents the Power BI Service from being able to refresh the data.
296 |
297 | If you require Power BI service data refresh support, you can try the [Data Package Connector](https://github.com/nimblelearn/datapackage-connector). This is a [Power BI Custom Connector](https://docs.microsoft.com/en-us/power-bi/connect-data/desktop-connector-extensibility) that's based on the same Data Package M functions and supports data refresh in the Power BI service through an [On-premises data gateway](https://docs.microsoft.com/en-us/power-bi/connect-data/service-gateway-onprem).
298 |
--------------------------------------------------------------------------------
/examples/datapackage-m-example.pbix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/examples/datapackage-m-example.pbix
--------------------------------------------------------------------------------
/examples/datapackage-m-example.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/examples/datapackage-m-example.xlsx
--------------------------------------------------------------------------------
/functions/DataPackage.Helper.pq:
--------------------------------------------------------------------------------
1 | /*
2 | MIT License
3 |
4 | Copyright (c) 2018-2020 Nimble Learn Ltd (http://www.nimblelearn.com)
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 | */
24 |
25 | // A function that returns a Data Package helper function
26 | let
27 | DataPackage.Helper = (functionName as text) as function =>
28 | let
29 | /*
30 | Resolve a Data Package Identifier and return the Data Package metadata as a record along with some inferred helper properties.
31 | The inferred helper properties are prefixed with a double underscore e.g. '__fullpath'.
32 | */
33 | DataPackage.Package = (dataPackageIdentifier as text) as record =>
34 | let
35 | // Base path pattern to use when resolving Data Package names against the Core Data Package registry
36 | CoreRegistryBasePathPattern = "https://datahub.io/core/{name}/",
37 |
38 | // The data package spec states that the descriptor file MUST be named datapackage.json
39 | Descriptor = "datapackage.json",
40 |
41 | // Expand the path if needed. This is a preliminary step for resolving the Data Package identifier.
42 | ExpandedPath = if Text.Contains(dataPackageIdentifier, "\") = false and Text.Contains(dataPackageIdentifier, "/") = false then
43 | // Treat this as an identifier for a Data Package in the Core datasets registry on datahub.io
44 | Text.Replace(CoreRegistryBasePathPattern, "{name}", dataPackageIdentifier)
45 | // Resolve the github.com URL to the corresponding raw.githubusercontent.com URL
46 | else if Text.Contains(dataPackageIdentifier,"github.com") then
47 | Text.Replace(
48 | Text.Replace(
49 | Text.Replace(
50 | dataPackageIdentifier,
51 | "github.com",
52 | "raw.githubusercontent.com"
53 | ),
54 | "/blob/master",
55 | "/master"
56 | ),
57 | "/tree/master",
58 | "/master"
59 | )
60 | else
61 | dataPackageIdentifier,
62 |
63 | // Build the base path for the Data Package
64 | BasePath = Text.Replace(
65 | if Text.StartsWith(ExpandedPath, "http") then
66 | (
67 | if Text.End(ExpandedPath, 1) <> "/" and Text.Contains(ExpandedPath, "datapackage.json") = false then
68 | ExpandedPath & "/"
69 | else
70 | ExpandedPath
71 | )
72 | else
73 | (
74 | if Text.End(ExpandedPath, 1) <> "\" and Text.Contains(ExpandedPath, "datapackage.json") = false then
75 | ExpandedPath & "\"
76 | else
77 | ExpandedPath
78 | ),
79 | "datapackage.json",
80 | ""
81 | ),
82 |
83 | // Build the fully qualified path of the descriptor file
84 | DescriptorPath = BasePath & Descriptor,
85 |
86 | // Infer the Data Package location type
87 | DescriptorPathType = if Text.StartsWith(DescriptorPath, "http") then
88 | "remote"
89 | else
90 | "local",
91 |
92 | // Deserialise the Data Package metadata into a record
93 | DataPackage = Json.Document(DataPackage.ResourceContents(DescriptorPath, DescriptorPathType)),
94 |
95 | AddResourcePathType = (path as any) =>
96 | let
97 | Path = if path is list then
98 | // The path type must be of a single type when data is split across multiple files, so we only need to consider the first path in the list
99 | path{0}
100 | else
101 | path,
102 | PathType = if Text.StartsWith(Path, "http") then
103 | // The path is remote
104 | "remote"
105 | else
106 | // Inherit the DataPackage descriptor path type
107 | DescriptorPathType
108 | in
109 | PathType,
110 |
111 | AddResourceFullPath = (pathOuter as any, pathTypeOuter as text, descriptorPathTypeOuter as text) =>
112 | let
113 | // Get the fully qualified file path
114 | GetFullPath = (path as text, pathType as text, descriptorPathType as text) =>
115 | let
116 | // Flag whether the path is absolute or relative
117 | IsRelativePath = if Text.StartsWith(path, "http") then
118 | // This is an absolute path i.e. a URL
119 | false
120 | else
121 | // This is a relative path and should inherit the base path of the DataPackage descriptor
122 | true,
123 |
124 | AbsolutePath = if pathType = "remote" and IsRelativePath = false then
125 | // No resolution required; the path is already absolute
126 | path
127 | else if pathType = "remote" and IsRelativePath = true then
128 | // Resolve the relative remote path to an absolute URL
129 | BasePath & path
130 | else if pathType = "local" and IsRelativePath = true then
131 | // Resolve the relative local path to an absolute Windows file location
132 | BasePath & Text.Replace(path, "/", "\")
133 | else
134 | try error "Unhandled path type."
135 | in
136 | AbsolutePath,
137 |
138 | FullPath = if pathOuter is list then
139 | // The path type must be the same when data is split across files
140 | List.Transform(pathOuter, each GetFullPath(_, pathTypeOuter, descriptorPathTypeOuter))
141 | else
142 | GetFullPath(pathOuter, pathTypeOuter, descriptorPathTypeOuter)
143 | in
144 | FullPath,
145 |
146 | // Resolve the schema descriptor. It may be inline or a reference to the location of a schema descriptor.
147 | AddResourceResolveSchema = (schema as any) => if schema is record then
148 | // The schema object is already present; no resolution is required.
149 | schema
150 | // Treat this as a reference to a schema in a remote location
151 | else if schema is text then
152 | // The schema property points to an external schema descriptor; resolution is required.
153 | Json.Document(DataPackage.ResourceContents(schema, "remote"))
154 | else
155 | try error "The schema property is not valid.",
156 |
157 | // Resolve the dialect descriptor. It may be inline or a reference to the location of a dialect descriptor
158 | AddResourceResolveDialect = (dialect as any) => if dialect is record or dialect = null then
159 | // The dialect object is already present; no resolution is required
160 | dialect
161 | // The assumption has been made that reference will be to a dialect in a remote location
162 | else if dialect is text then
163 | // The dialect property points to an external dialect descriptor; resolution is required.
164 | Json.Document(DataPackage.ResourceContents(dialect, "remote"))
165 | else
166 | try error "The dialect property is not valid.",
167 |
168 | // Extend the Data Package record with the inferred fields (i.e. properties)
169 | ExtendedResources = [
170 | resources = List.Transform(
171 | DataPackage[resources],
172 | each Record.Combine(
173 | {
174 | _,
175 | [
176 | __pathtype = AddResourcePathType([path]),
177 | __fullpath = AddResourceFullPath([path], __pathtype, descriptorpathtype),
178 | __resolvedschema = AddResourceResolveSchema([schema]?),
179 | __resolveddialect = AddResourceResolveDialect([dialect]?)
180 | ]
181 | }
182 | )
183 | ),
184 | descriptorpathtype = DescriptorPathType
185 | ],
186 |
187 | // Rename the original 'resources' field to 'replacedresources' so that it can still be accessed in its unaltered state
188 | ExtendedDataPackage = Record.Combine(
189 | {
190 | Record.RenameFields(DataPackage, {"resources", "replacedresources"}),
191 | ExtendedResources
192 | }
193 | )
194 | in
195 | ExtendedDataPackage,
196 |
197 | // Returns the data content of a Data Resource. If the file is Gzip compressed, it will be decompressed.
198 | DataPackage.ResourceContents = (path as text, pathType as text, optional compression as text) =>
199 | let
200 | PathType = Text.Lower(pathType),
201 | Compression = if compression = null then
202 | "none"
203 | else
204 | Text.Lower(compression),
205 | WebContents = Web.Contents(path),
206 | FileContents = File.Contents(path),
207 | CompressionErrorMessage = "Unsupported compression type. Allowed values: ""none"", ""gz"".",
208 | PathTypeErrorMessage = "Unsupported path type. Allowed values: ""remote"", ""local"".",
209 | Contents = if PathType = "remote" then
210 | if Compression = "none" then
211 | WebContents
212 | else if Compression = "gz" then
213 | Binary.Decompress(WebContents, Compression.GZip)
214 | else
215 | try error CompressionErrorMessage
216 | else if PathType = "local" then
217 | if Compression = "none" then
218 | FileContents
219 | else if Compression = "gz" then
220 | Binary.Decompress(FileContents, Compression.GZip)
221 | else
222 | try error CompressionErrorMessage
223 | else
224 | try error PathTypeErrorMessage
225 | in
226 | Contents,
227 |
228 | // Returns the untyped Data Resource (no Table Schema applied) after using the Data Package metadata to determine how the data should be parsed.
229 | DataPackage.Resource = (dataResource as record) as table =>
230 | let
231 | // Get the fully qualified file path. This property will only exist if the data is not inline
232 | Path = dataResource[__fullpath]?,
233 |
234 | // If the data is inline then the tabular data will be represented as JSON in the 'data' property
235 | Data = dataResource[data]?,
236 |
237 | // Determine whether special handling of multiple data file paths is required: http://frictionlessdata.io/specs/data-resource/
238 | MultipleDataFilePath = Path is list,
239 |
240 | PathType = dataResource[__pathtype],
241 |
242 | // Map valid dataResource fields to variables
243 | Header = if (dataResource[__resolveddialect]?)[header]? = null then
244 | if (dataResource[dialect]?)[header]? = null then
245 | true
246 | else
247 | (dataResource[dialect]?)[header]?
248 | else
249 | (dataResource[__resolveddialect]?)[header]?,
250 |
251 | // The handling of the compression property isn't defined by the spec so this may need to be reworked in the near-future.
252 | InferredExtension = if MultipleDataFilePath then
253 | Text.Range(Path{0}, Text.PositionOf(Path{0}, ".", Occurrence.Last) + 1)
254 | else
255 | Text.Range(Path, Text.PositionOf(Path, ".", Occurrence.Last) + 1),
256 | InferredCompression = if List.Contains({"csv", "txt"}, InferredExtension) then
257 | "none"
258 | else
259 | InferredExtension,
260 | Compression = if dataResource[compression]? = null then
261 | InferredCompression
262 | else
263 | dataResource[compression]?,
264 |
265 | Delimiter = if dataResource[delimiter]? = null then
266 | ","
267 | else
268 | dataResource[delimiter]?,
269 |
270 | // Mapping common encodings to their corresponding code page identifiers. The mappings were sourced from: https://docs.microsoft.com/en-us/windows/desktop/intl/code-page-identifiers.
271 | CodePageIdentifiers = [
272 | #"gb2312" = 936, // ANSI/OEM Simplified Chinese (PRC, Singapore); Chinese Simplified (GB2312)
273 | #"x-cp20949" = 20949, // Korean Wansung
274 | #"euc-jp" = 20932, // Japanese (JIS 0208-1990 and 0212-1990)
275 | #"iso-8859-1" = 28591, // ISO 8859-1 Latin 1; Western European (ISO)
276 | #"iso-8859-2" = 28592, // ISO 8859-2 Central European; Central European (ISO)
277 | #"iso-8859-3" = 28593, // ISO 8859-3 Latin 3
278 | #"iso-8859-4" = 28594, // ISO 8859-4 Baltic
279 | #"iso-8859-5" = 28595, // ISO 8859-5 Cyrillic
280 | #"iso-8859-6" = 28596, // ISO 8859-6 Arabic
281 | #"iso-8859-7" = 28597, // ISO 8859-7 Greek
282 | #"iso-8859-8" = 28598, // ISO 8859-8 Hebrew; Hebrew (ISO-Visual)
283 | #"iso-8859-9" = 28599, // ISO 8859-9 Turkish
284 | #"iso-8859-13" = 28603, // ISO 8859-13 Estonian
285 | #"iso-8859-15" = 28605, // ISO 8859-15 Latin 9
286 | #"us-ascii" = 20127, // US-ASCII (7-bit)
287 | #"utf-32be" = 12001, // Unicode UTF-32, big endian byte order; available only to managed applications
288 | #"utf-32" = 12000, // Unicode UTF-32, little endian byte order; available only to managed applications
289 | #"utf-16" = 1200, // Unicode UTF-16, little endian byte order (BMP of ISO 10646); available only to managed applications
290 | #"utf-8" = 65001, // Unicode (UTF-8)
291 | #"utf-7" = 65000, // Unicode (UTF-7)
292 | #"windows-1252"= 1252 // ANSI Latin 1; Western European (Windows)
293 | ],
294 | Encoding = if dataResource[encoding]? = null then
295 | 65001 // UTF-8 encoding is used when this property is missing
296 | else
297 | // Look-up the corresponding code page identifier
298 | (
299 | try Record.Field(
300 | CodePageIdentifiers,
301 | Text.Lower(Text.From(dataResource[encoding]?))
302 | )
303 | otherwise 65001
304 | ),
305 |
306 | DocumentOptions = [
307 | Delimiter = Delimiter,
308 | Encoding = Encoding
309 | ],
310 |
311 | LoadCsvFile = (path as text) =>
312 | Csv.Document(
313 | DataPackage.ResourceContents(
314 | path,
315 | PathType,
316 | Compression
317 | ),
318 | DocumentOptions
319 | ),
320 |
321 | LoadInlineJSON = (data as text) =>
322 | let
323 | JsonTabularData = Json.Document(data),
324 | TabularDataRowType = if JsonTabularData is list and JsonTabularData{0}? is list then
325 | "arrays"
326 | else if JsonTabularData is list and JsonTabularData{0}? is record then
327 | "objects"
328 | else
329 | try error "Invalid JSON Tabular Data",
330 | DataTable = if TabularDataRowType = "arrays" then
331 | #table(
332 | JsonTabularData{0},
333 | List.RemoveFirstN(JsonTabularData, 1)
334 | )
335 | else if TabularDataRowType = "objects" then
336 | Table.FromRecords(JsonTabularData)
337 | else
338 | try error "Invalid JSON Tabular Data"
339 | in
340 | DataTable,
341 |
342 | // Load the data from a CSV file or inline JSON depending on whether the 'path' or 'data' property has a value
343 | LoadedData = if Path <> null then
344 | if MultipleDataFilePath then
345 | Table.Combine(
346 | List.Transform(
347 | Path,
348 | each LoadCsvFile(_)
349 | )
350 | )
351 | else
352 | LoadCsvFile(Path)
353 | else
354 | // The JSON Tabular Data specifies the header so the 'promote to header' step isn't needed
355 | LoadInlineJSON(Data),
356 |
357 | DataTable = if Header = true and Data = null then
358 | Table.PromoteHeaders(LoadedData)
359 | else
360 | LoadedData
361 | in
362 | DataTable,
363 |
364 | // Attempt to convert Table Schema field type values into appropriate M type values
365 | DataPackage.ConvertFieldValue = (fieldType as text, fieldValue as any) as any =>
366 | let
367 | FieldTypeAsText = Text.From(fieldValue),
368 | MTypeValue = if fieldType = "string" then
369 | FieldTypeAsText
370 | else if fieldType = "number" then
371 | Number.From(FieldTypeAsText)
372 | else if fieldType = "integer" then
373 | Number.From(FieldTypeAsText)
374 | else if fieldType = "boolean" then
375 | if List.Contains({"yes", "y", "true", "t", "1"}, Text.Lower(FieldTypeAsText)) then
376 | true
377 | else if List.Contains({"no", "n", "false", "f", "0"}, Text.Lower(FieldTypeAsText)) then
378 | false
379 | else
380 | FieldTypeAsText
381 | else if fieldType = "object" or fieldType = "array" then
382 | Json.Document(FieldTypeAsText)
383 | else if fieldType = "date" then
384 | Date.From(FieldTypeAsText)
385 | else if fieldType = "datetime" then
386 | DateTime.From(FieldTypeAsText)
387 | else if fieldType = "time" then
388 | Time.From(FieldTypeAsText)
389 | // Any field types that are not handled yet
390 | else
391 | FieldTypeAsText
392 | in
393 | MTypeValue,
394 |
395 | // Map JSON Table Schema field types to equivalent M types
396 | DataPackage.ConvertFieldType = (fieldType as text) as text =>
397 | let
398 | MType = if fieldType = "string" then
399 | "text"
400 | else if List.Contains({"number", "date", "datetime", "time"}, fieldType) then
401 | fieldType
402 | else if fieldType = "integer" then
403 | "number"
404 | else if fieldType = "boolean" then
405 | "logical"
406 | else
407 | "text"
408 | in
409 | MType,
410 |
411 | // Map the requested Data Package helper functions to record fields
412 | HelperFunctions = [
413 | DataPackage.Package = DataPackage.Package,
414 | DataPackage.ResourceContents = DataPackage.ResourceContents,
415 | DataPackage.Resource = DataPackage.Resource,
416 | DataPackage.ConvertFieldType = DataPackage.ConvertFieldType,
417 | DataPackage.ConvertFieldValue = DataPackage.ConvertFieldValue
418 | ],
419 |
420 | // Return the requested helper function
421 | HelperFunction = Record.Field(HelperFunctions, functionName)
422 | in
423 | HelperFunction,
424 | // Add documentation
425 | DataPackage.HelperWithDocumentation = type function (
426 | functionName as (
427 | type text meta [
428 | Documentation.FieldCaption = "Function Name",
429 | Documentation.FieldDescription = "A valid Data Package helper function name",
430 | Documentation.SampleValues = {"DataPackage.Package"}
431 | ]
432 | )
433 | ) as function meta [
434 | Documentation.Name = "DataPackage.Helper",
435 | Documentation.LongDescription = "Returns a Data Package helper function as a function.
436 |
Example:
437 |
438 |
439 | let
440 | // Setup the shared function reference
441 | DataPackage.Helper = DataPackageHelper,
442 |
443 | // Get the required helper function by name
444 | DataPackage.Package = DataPackage.Helper(""DataPackage.Package""),
445 |
446 | // Invoke the helper function
447 | Source = DataPackage.Package(""https://datahub.io/core/gdp/datapackage.json"")
448 | in
449 | Source
450 |
451 |
452 | More documentation available at: https://github.com/nimblelearn/datapackage-m",
453 | Documentation.Examples = null
454 | ]
455 | in
456 | Value.ReplaceType(DataPackage.Helper, DataPackage.HelperWithDocumentation)
--------------------------------------------------------------------------------
/functions/DataPackage.Table.pq:
--------------------------------------------------------------------------------
1 | /*
2 | MIT License
3 |
4 | Copyright (c) 2018 Nimble Learn Ltd (http://www.nimblelearn.com)
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 | */
24 |
25 | // Returns a Tabular Data Resource as a table
26 | let
27 | DataPackage.Table = (
28 | dataPackageIdentifier as text, // A valid Data Package Identifier
29 | optional dataResourceIndex as number, // A valid Data Resource index
30 | optional dataResourceName as text, // A valid Data Resource name
31 | optional ignoreTableSchemaTypes as logical // Controls whether the Table Schema is applied to the data
32 | ) as table =>
33 | let
34 | // Set up local references for shared functions
35 | DataPackage.Helper = DataPackageHelper,
36 | DataPackage.Package = DataPackage.Helper("DataPackage.Package"),
37 | DataPackage.Resource = DataPackage.Helper("DataPackage.Resource"),
38 | DataPackage.ConvertFieldType = DataPackage.Helper("DataPackage.ConvertFieldType"),
39 | DataPackage.ConvertFieldValue = DataPackage.Helper("DataPackage.ConvertFieldValue"),
40 |
41 | // Ensure ignoreTableSchemaTypes has a true/false value
42 | ignoreTableSchemaTypes = if ignoreTableSchemaTypes = null then
43 | false
44 | else
45 | ignoreTableSchemaTypes,
46 |
47 | // Deserialise the Data Package metadata
48 | DataPackage = DataPackage.Package(dataPackageIdentifier),
49 |
50 | // Assign the list of resources to a variable
51 | DataResources = DataPackage[resources],
52 |
53 | // Load the selected data resource
54 | DataResource = if dataResourceName <> null then
55 | // Get the resource by name
56 | List.Select(DataResources, each _[name] = dataResourceName){0}
57 | else
58 | //Get the resource by index
59 | DataResources{dataResourceIndex},
60 | UntypedData = DataPackage.Resource(DataResource),
61 |
62 | // Get the schema metadata
63 | Schema = DataResource[__resolvedschema],
64 | FieldCount = List.Count(Schema[fields]),
65 | FieldNames = List.Transform(Schema[fields], each [name]),
66 | FieldTypes = List.Transform(Schema[fields], each [type]),
67 |
68 | // Apply the DataPackage.ConvertFieldType function
69 | MFieldTypes = List.Transform(FieldTypes, each DataPackage.ConvertFieldType(_)),
70 |
71 | // Create a list combining the field names mapped to M data types
72 | FieldNamesAndTypes = List.Zip({FieldNames, FieldTypes, MFieldTypes}),
73 |
74 | // Iterate over the list of field type mappings and generate a text list of type transformations
75 | TypeTransformationsAsListOfText = List.Transform(
76 | FieldNamesAndTypes,
77 | each "{""" & _{0} & """, type " & _{2} & "}"
78 | ),
79 | // Iterate over the list of field type mappings and generate a text list of value transformations
80 | ValueTransformationsAsListOfText = List.Transform(
81 | FieldNamesAndTypes,
82 | each "{""" & _{0} & """, each DataPackage.ConvertFieldValue(""" & _{1} & """, _)}"
83 | ),
84 |
85 | // Prepare the list of text for conversion into a list of lists
86 | TypeTransformationsAsText = "{" & Text.Combine(TypeTransformationsAsListOfText, ", ") & "}",
87 | ValueTransformationAsText = "{" & Text.Combine(ValueTransformationsAsListOfText, ", ") & "}",
88 |
89 | // Evaluate the generated expression to create the list of lists to be used for column type transformations
90 | TypeTransformations = Expression.Evaluate(TypeTransformationsAsText),
91 |
92 | // Apply the value transformations
93 | ValueTransformations = Expression.Evaluate(ValueTransformationAsText, [DataPackage.ConvertFieldValue = DataPackage.ConvertFieldValue]),
94 |
95 | // Create a list mapping the header column names (old) with the ones from the schema fields (new)
96 | OldAndNewColumnNames = List.Zip({Table.ColumnNames(UntypedData), FieldNames}),
97 |
98 | // Iterate over the list of lists and generate a text list of old and new column names
99 | OldAndNewColumnNamesAsListOfText = List.Transform(
100 | OldAndNewColumnNames,
101 | each "{""" & _{0} & """, """ & _{1} & """}"
102 | ),
103 |
104 | // Prepare the list of text for conversion into a list of lists
105 | RenamedColumnsListAsText = "{" & Text.Combine(OldAndNewColumnNamesAsListOfText, ", ") & "}",
106 |
107 | // Evaluate the generated expression to create the list of lists to be used for renaming the columns
108 | RenamedColumns = Expression.Evaluate(RenamedColumnsListAsText),
109 |
110 | // Apply the column names taken from the schema
111 | UntypedDataWithRenamedColumns = Table.RenameColumns(UntypedData, RenamedColumns),
112 |
113 | // Make the field values M data type compatible
114 | ConformedData = Table.TransformColumns(UntypedDataWithRenamedColumns, ValueTransformations),
115 |
116 | // Apply the M data types to the columns
117 | ConformedAndTypedData = Table.TransformColumnTypes(ConformedData, TypeTransformations),
118 |
119 | // If both the provided resource index and resource name were not provided then we list all the resources,
120 | // otherwise we return the corresponding resource data as a table.
121 | DataTable = if dataResourceName <> null or dataResourceIndex <> null then
122 | // Determine whether to apply the JSON Table Schema to the data
123 | if ignoreTableSchemaTypes = false then
124 | ConformedAndTypedData
125 | else
126 | UntypedDataWithRenamedColumns
127 | else
128 | Record.ToTable(try error "A valid Data Resource name or index was not provided.")
129 | in
130 | DataTable,
131 |
132 | // Add documentation
133 | DataPackage.TableWithDocumentation = type function (
134 | dataPackageIdentifier as (
135 | type text meta [
136 | Documentation.FieldCaption = "Data Package Identifier",
137 | Documentation.FieldDescription = "A valid Data Package Identifier",
138 | Documentation.SampleValues = {"https://datahub.io/core/gdp/datapackage.json"}
139 | ]
140 | ),
141 | optional dataResourceIndex as (
142 | type number meta [
143 | Documentation.FieldCaption = "Data Resource Index",
144 | Documentation.FieldDescription = "A valid Data Resource index",
145 | Documentation.SampleValues = {0}
146 | ]
147 | ),
148 | optional dataResourceName as (
149 | type text meta [
150 | Documentation.FieldCaption = "Data Resource Name",
151 | Documentation.FieldDescription = "A valid Data Resource name",
152 | Documentation.SampleValues = {"gdp"}
153 | ]
154 | ),
155 | optional ignoreTableSchemaTypes as (
156 | type logical meta [
157 | Documentation.FieldCaption = "Ignore Table Schema Types",
158 | Documentation.FieldDescription = "Controls whether the Table Schema is applied to the data",
159 | Documentation.SampleValues = {false}
160 | ]
161 | )
162 | ) as table meta [
163 | Documentation.Name = "DataPackage.Table",
164 | Documentation.LongDescription = "Returns a Tabular Data Resource as a table.
165 |
Example:
166 |
167 |
168 | let
169 | // Setup the shared function reference
170 | DataPackage.Table = DataPackageTable,
171 |
172 | // Invoke the function
173 | Source = DataPackage.Table(""https://datahub.io/core/gdp/datapackage.json"", null, ""gdp"")
174 | in
175 | Source
176 |
177 |
178 | More documentation available at: https://github.com/nimblelearn/datapackage-m",
179 | Documentation.Examples = null
180 | ]
181 | in
182 | Value.ReplaceType(DataPackage.Table, DataPackage.TableWithDocumentation)
--------------------------------------------------------------------------------
/functions/DataPackage.Tables.pq:
--------------------------------------------------------------------------------
1 | /*
2 | MIT License
3 |
4 | Copyright (c) 2018 Nimble Learn Ltd (http://www.nimblelearn.com)
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 | */
24 |
25 | // Returns a table listing the Data Resources contained within a Data Package
26 | let
27 | DataPackage.Tables = (
28 | dataPackageIdentifier as text, // This can be a remote or local Data Package identifier
29 | optional ignoreTableSchemaTypes as logical // Ignore the Table Schema due to there being unhandled field values, types, and/or formatting
30 | ) as table =>
31 | let
32 | // Set up local references for shared functions
33 | DataPackage.Table = DataPackageTable,
34 | DataPackage.Helper = DataPackageHelper,
35 | DataPackage.Package = DataPackage.Helper("DataPackage.Package"),
36 | DataPackage.Resource = DataPackage.Helper("DataPackage.Resource"),
37 |
38 | // Ensure ignoreTableSchemaTypes has a true/false value
39 | ignoreTableSchemaTypes = if ignoreTableSchemaTypes = null then
40 | false
41 | else
42 | ignoreTableSchemaTypes,
43 |
44 | // Deserialise the Data Package metadata
45 | DataPackage = DataPackage.Package(dataPackageIdentifier),
46 |
47 | // Convert the list of resource records into a table and then expand the record columns
48 | DataResources = DataPackage[resources],
49 | DataResourcesAsTable = Table.FromList(
50 | DataResources,
51 | Splitter.SplitByNothing(),
52 | {"ResourceProperties"},
53 | null,
54 | ExtraValues.Error
55 | ),
56 | ExpandedDataResourcesAsTable = Table.ExpandRecordColumn(
57 | DataResourcesAsTable,
58 | "ResourceProperties",
59 | {
60 | "profile",
61 | "name",
62 | "path",
63 | "__fullpath",
64 | "__pathtype",
65 | "title",
66 | "description",
67 | "format",
68 | "mediatype",
69 | "encoding",
70 | "bytes",
71 | "hash",
72 | "schema",
73 | "__resolvedschema",
74 | "dialect",
75 | "__resolveddialect",
76 | "sources",
77 | "licenses"
78 | },
79 | {
80 | "profile",
81 | "name",
82 | "path",
83 | "__fullpath",
84 | "__pathtype",
85 | "title",
86 | "description",
87 | "format",
88 | "mediatype",
89 | "encoding",
90 | "bytes",
91 | "hash",
92 | "schema",
93 | "__resolvedschema",
94 | "dialect",
95 | "__resolveddialect",
96 | "sources",
97 | "licenses"
98 | }
99 | ),
100 | // Add a column with the fully qualified resource path. If the resource path is not a fully qualified URL then it's treated as a relative path.
101 | ExpandedDataResourcesWithDataAsTable = Table.AddColumn(
102 | Table.AddIndexColumn(ExpandedDataResourcesAsTable, "index", 0, 1),
103 | "data",
104 | // The file extension check needs to work on a path that is text or a list (i.e. a path property with multiple data files)
105 | each if [path] is list then
106 | if Text.Contains([path]{0}, ".csv") or Text.Contains([path]{0}, ".csv.gz") then
107 | DataPackage.Table(dataPackageIdentifier, Number.From([index]), null, ignoreTableSchemaTypes)
108 | else
109 | null
110 | else
111 | if Text.Contains([path], ".csv") or Text.Contains([path], ".csv.gz") then
112 | DataPackage.Table(dataPackageIdentifier, Number.From([index]), null, ignoreTableSchemaTypes)
113 | else
114 | null
115 | ),
116 | ExpandedDataResourcesWithDataAsBufferedTable = Table.Buffer(ExpandedDataResourcesWithDataAsTable),
117 |
118 | DataTables = // Project the Data Package properties as columns.
119 | Table.SelectColumns(
120 | ExpandedDataResourcesWithDataAsBufferedTable,
121 | {
122 | "index",
123 | "data",
124 | "profile",
125 | "name",
126 | "path",
127 | "__fullpath",
128 | "__pathtype",
129 | "title",
130 | "description",
131 | "format",
132 | "mediatype",
133 | "encoding",
134 | "bytes",
135 | "hash",
136 | "schema",
137 | "__resolvedschema",
138 | "dialect",
139 | "__resolveddialect",
140 | "sources",
141 | "licenses"
142 | }
143 | )
144 | in
145 | DataTables,
146 |
147 | // Add documentation
148 | DataPackage.TablesWithDocumentation = type function (
149 | dataPackageIdentifier as (
150 | type text meta [
151 | Documentation.FieldCaption = "Data Package Identifier",
152 | Documentation.FieldDescription = "A valid Data Package Identifier",
153 | Documentation.SampleValues = {"https://datahub.io/core/gdp/datapackage.json"}
154 | ]
155 | ),
156 | optional ignoreTableSchemaTypes as (
157 | type logical meta [
158 | Documentation.FieldCaption = "Ignore Table Schema Types",
159 | Documentation.FieldDescription = "Controls whether the Table Schema is applied to the data",
160 | Documentation.SampleValues = {false}
161 | ]
162 | )
163 | ) as table meta [
164 | Documentation.Name = "DataPackage.Tables",
165 | Documentation.LongDescription = "Returns a table that lists the Data Resources contained within a Data Package.
166 |
Example:
167 |
168 |
169 | let
170 | // Setup the shared function reference
171 | DataPackage.Tables = DataPackageTables,
172 |
173 | // Invoke the function
174 | Source = DataPackage.Tables(""https://datahub.io/core/gdp/datapackage.json"")
175 | in
176 | Source
177 |
178 |
179 | More documentation available at: https://github.com/nimblelearn/datapackage-m",
180 | Documentation.Examples = null
181 | ]
182 | in
183 | Value.ReplaceType(DataPackage.Tables, DataPackage.TablesWithDocumentation)
--------------------------------------------------------------------------------
/images/excel/datapackage-m-excel-illustration-01.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-excel-illustration-01.PNG
--------------------------------------------------------------------------------
/images/excel/datapackage-m-excel-illustration-01.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-excel-illustration-01.png
--------------------------------------------------------------------------------
/images/excel/datapackage-m-excel-illustration-02.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-excel-illustration-02.PNG
--------------------------------------------------------------------------------
/images/excel/datapackage-m-excel-illustration-02.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-excel-illustration-02.png
--------------------------------------------------------------------------------
/images/excel/datapackage-m-excel-illustration-03.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-excel-illustration-03.PNG
--------------------------------------------------------------------------------
/images/excel/datapackage-m-excel-illustration-03.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-excel-illustration-03.png
--------------------------------------------------------------------------------
/images/excel/datapackage-m-excel-illustration-04.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-excel-illustration-04.PNG
--------------------------------------------------------------------------------
/images/excel/datapackage-m-excel-illustration-04.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-excel-illustration-04.png
--------------------------------------------------------------------------------
/images/excel/datapackage-m-in-action-excel.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/excel/datapackage-m-in-action-excel.gif
--------------------------------------------------------------------------------
/images/frictionless-data/frictionless-color-full-logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
100 |
--------------------------------------------------------------------------------
/images/power-bi/datapackage-m-in-action-power-bi.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/power-bi/datapackage-m-in-action-power-bi.gif
--------------------------------------------------------------------------------
/images/power-bi/datapackage-m-power-bi-illustration-01.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/power-bi/datapackage-m-power-bi-illustration-01.png
--------------------------------------------------------------------------------
/images/power-bi/datapackage-m-power-bi-illustration-02.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/power-bi/datapackage-m-power-bi-illustration-02.png
--------------------------------------------------------------------------------
/images/power-bi/datapackage-m-power-bi-illustration-03.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/power-bi/datapackage-m-power-bi-illustration-03.png
--------------------------------------------------------------------------------
/images/power-bi/datapackage-m-power-bi-illustration-04.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/images/power-bi/datapackage-m-power-bi-illustration-04.png
--------------------------------------------------------------------------------
/templates/datapackage-m-template.pbit:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/templates/datapackage-m-template.pbit
--------------------------------------------------------------------------------
/templates/datapackage-m-template.xltx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/templates/datapackage-m-template.xltx
--------------------------------------------------------------------------------
/tests/data-package-examples/README.md:
--------------------------------------------------------------------------------
1 | # Example Data Packages for Testing
2 |
3 | A subset of the Data Packages available in the [Example Data Packages](https://github.com/frictionlessdata/example-data-packages) GitHub Repository. These samples are used for testing Data Package M with local Data Package scenarios.
--------------------------------------------------------------------------------
/tests/data-package-examples/countries-and-currencies/README.md:
--------------------------------------------------------------------------------
1 | A Foreign Key example linking two CSV files in the same data package
2 |
3 |
--------------------------------------------------------------------------------
/tests/data-package-examples/countries-and-currencies/data/countries-using-usd-and-gbp.csv:
--------------------------------------------------------------------------------
1 | name,currency_alphabetic_code
2 | American Samoa,USD
3 | "Bonaire, Sint Eustatius and Saba",USD
4 | British Indian Ocean Territory,USD
5 | Ecuador,USD
6 | El Salvador,USD
7 | Guam,USD
8 | Guernsey,GBP
9 | Haiti,USD
10 | Isle of Man,GBP
11 | Jersey,GBP
12 | Marshall Islands,USD
13 | "Micronesia, Federated States of",USD
14 | Northern Mariana Islands,USD
15 | Palau,USD
16 | Panama,USD
17 | Puerto Rico,USD
18 | Timor-Leste,USD
19 | Turks and Caicos Islands,USD
20 | United Kingdom,GBP
21 | United States,USD
22 | United States Minor Outlying Islands,USD
23 | "Virgin Islands, British",USD
24 | "Virgin Islands, U.S.",USD
25 |
--------------------------------------------------------------------------------
/tests/data-package-examples/countries-and-currencies/data/currencies.csv:
--------------------------------------------------------------------------------
1 | currency_alphabetic_code,currency,symbol
2 | USD,US Dollar,$
3 | GBP,Pound Sterling,£
4 |
--------------------------------------------------------------------------------
/tests/data-package-examples/countries-and-currencies/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "name": "countries-and-currencies",
4 | "title": "Countries and Currencies",
5 | "version": "v0.1.0",
6 | "licenses": [
7 | {
8 | "name": "CC0-1.0",
9 | "title": "Creative Commons CCZero 1.0",
10 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
11 | }
12 | ],
13 | "resources": [
14 | {
15 | "profile": "tabular-data-resource",
16 | "name": "currencies",
17 | "title": "Currencies",
18 | "path": "data/currencies.csv",
19 | "encoding": "UTF-8",
20 | "format": "csv",
21 | "mediatype": "text/csv",
22 | "licenses": [
23 | {
24 | "name": "CC0-1.0",
25 | "title": "Creative Commons CCZero 1.0",
26 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
27 | }
28 | ],
29 | "schema": {
30 | "fields": [
31 | {
32 | "name": "currency_alphabetic_code",
33 | "type": "string",
34 | "format": "default",
35 | "title": "Currency Alphabetic Code"
36 | },
37 | {
38 | "name": "currency",
39 | "type": "string",
40 | "format": "default"
41 | },
42 | {
43 | "name": "symbol",
44 | "type": "string",
45 | "format": "default"
46 | }
47 | ],
48 | "missingValues": [
49 | ""
50 | ]
51 | },
52 | "dialect": {
53 | "delimiter": ",",
54 | "quoteChar": "\"",
55 | "header": true,
56 | "doubleQuote": true,
57 | "lineTerminator": "\r\n",
58 | "skipInitialSpace": true,
59 | "caseSensitiveHeader": false
60 | },
61 | "primaryKeys": [
62 | "currency_alphabetic_code"
63 | ]
64 | },
65 | {
66 | "profile": "tabular-data-resource",
67 | "name": "countries-using-usd-and-gbp",
68 | "title": "Countries using USD and GBP currencies",
69 | "path": "data/countries-using-usd-and-gbp.csv",
70 | "encoding": "UTF-8",
71 | "format": "csv",
72 | "mediatype": "text/csv",
73 | "licenses": [
74 | {
75 | "name": "CC0-1.0",
76 | "title": "Creative Commons CCZero 1.0",
77 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
78 | }
79 | ],
80 | "schema": {
81 | "fields": [
82 | {
83 | "name": "name",
84 | "type": "string",
85 | "format": "default",
86 | "title": "Country Name"
87 | },
88 | {
89 | "name": "currency_alphabetic_code",
90 | "type": "string",
91 | "format": "default",
92 | "title": "Currency Alphabeic Code"
93 | }
94 | ],
95 | "missingValues": [
96 | ""
97 | ]
98 | },
99 | "dialect": {
100 | "delimiter": ",",
101 | "quoteChar": "\"",
102 | "header": true,
103 | "doubleQuote": true,
104 | "lineTerminator": "\r\n",
105 | "skipInitialSpace": true,
106 | "caseSensitiveHeader": false
107 | },
108 | "primaryKeys": [
109 | "name"
110 | ],
111 | "foreignKeys": [
112 | {
113 | "fields": [
114 | "currency_alphabetic_code"
115 | ],
116 | "reference": {
117 | "resource": "currencies",
118 | "fields": [
119 | "currency_alphabetic_code"
120 | ]
121 | }
122 | }
123 | ]
124 | }
125 | ]
126 | }
127 |
--------------------------------------------------------------------------------
/tests/data-package-examples/cpi-data-via-url/README.md:
--------------------------------------------------------------------------------
1 | Example data package that references its data via a URL
2 |
3 |
4 | Consumer price index (2010 = 100) by The World Bank.
5 |
6 | ## Data
7 |
8 | To learn more about this data, [view the metadata from The World Bank](http://databank.worldbank.org/data/reports.aspx?source=2&type=metadata&series=FP.CPI.TOTL)
9 |
10 | Used in the following Frictionless Data Guides:
11 | - [Joining Data](https://frictionlessdata.io/guides/joining-data-in-python/)
12 |
13 | ## Preparation
14 |
15 | This data was downloaded from The World Bank and a data package hand edited to describe the data. The data was validated against the table schema in the `datapackage.json` file using [goodtables.io](http://goodtables.io).
16 |
17 | The CPI column `description` in the `schema` conflicts with the dataset link. **Do not use the data for analysis.**
18 |
19 | ## License
20 |
21 | The World Bank: [Consumer price index (2010 = 100)](https://data.worldbank.org/indicator/FP.CPI.TOTL) is licensed under [CC-BY 4.0](https://datacatalog.worldbank.org/public-licenses#cc-by).
22 |
--------------------------------------------------------------------------------
/tests/data-package-examples/cpi-data-via-url/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cpi",
3 | "title": "Annual Consumer Price Index (CPI)",
4 | "description": "Annual Consumer Price Index (CPI) for most countries in the world. Reference year is 2005.",
5 | "profile": "tabular-data-package",
6 | "licenses": [{
7 | "name": "CC-BY-4.0",
8 | "title": "Creative Commons Attribution 4.0",
9 | "path": "https://creativecommons.org/licenses/by/4.0/"
10 | }],
11 | "keywords": ["CPI", "World", "Consumer Price Index", "Annual Data", "The World Bank"],
12 | "version": "2.0.0",
13 | "sources": [{
14 | "title": "The World Bank",
15 | "path": "http://data.worldbank.org/indicator/FP.CPI.TOTL"
16 | }],
17 | "resources": [{
18 | "path": "https://raw.githubusercontent.com/frictionlessdata/example-data-packages/master/cpi/data/cpi.csv",
19 | "name": "cpi",
20 | "profile": "tabular-data-resource",
21 | "schema": {
22 | "fields": [{
23 | "name": "Country Name",
24 | "type": "string"
25 | },
26 | {
27 | "name": "Country Code",
28 | "type": "string"
29 | },
30 | {
31 | "name": "Year",
32 | "type": "year"
33 | },
34 | {
35 | "name": "CPI",
36 | "description": "CPI (where 2005=100)",
37 | "type": "number"
38 | }
39 | ]
40 | }
41 | }]
42 | }
43 |
--------------------------------------------------------------------------------
/tests/data-package-examples/cpi/README.md:
--------------------------------------------------------------------------------
1 | Consumer price index (2010 = 100) by The World Bank.
2 |
3 | ## Data
4 |
5 | To learn more about this data, [view the metadata from The World Bank](http://databank.worldbank.org/data/reports.aspx?source=2&type=metadata&series=FP.CPI.TOTL)
6 |
7 | Used in the following Frictionless Data Guides:
8 | - [Joining Data](https://frictionlessdata.io/guides/joining-data-in-python/)
9 |
10 | ## Preparation
11 |
12 | This data was downloaded from The World Bank and a data package hand edited to describe the data. The data was validated against the table schema in the `datapackage.json` file using [goodtables.io](http://goodtables.io).
13 |
14 | The CPI column `description` in the `schema` conflicts with the dataset link. **Do not use the data for analysis.**
15 |
16 | ## License
17 |
18 | The World Bank: [Consumer price index (2010 = 100)](https://data.worldbank.org/indicator/FP.CPI.TOTL) is licensed under [CC-BY 4.0](https://datacatalog.worldbank.org/public-licenses#cc-by).
19 |
--------------------------------------------------------------------------------
/tests/data-package-examples/cpi/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cpi",
3 | "title": "Annual Consumer Price Index (CPI)",
4 | "description": "Annual Consumer Price Index (CPI) for most countries in the world. Reference year is 2005.",
5 | "profile": "tabular-data-package",
6 | "licenses": [{
7 | "name": "CC-BY-4.0",
8 | "title": "Creative Commons Attribution 4.0",
9 | "path": "https://creativecommons.org/licenses/by/4.0/"
10 | }],
11 | "keywords": ["CPI", "World", "Consumer Price Index", "Annual Data", "The World Bank"],
12 | "version": "2.0.0",
13 | "sources": [{
14 | "title": "The World Bank",
15 | "path": "http://data.worldbank.org/indicator/FP.CPI.TOTL"
16 | }],
17 | "resources": [{
18 | "path": "data/cpi.csv",
19 | "name": "cpi",
20 | "profile": "tabular-data-resource",
21 | "schema": {
22 | "fields": [{
23 | "name": "Country Name",
24 | "type": "string"
25 | },
26 | {
27 | "name": "Country Code",
28 | "type": "string"
29 | },
30 | {
31 | "name": "Year",
32 | "type": "year"
33 | },
34 | {
35 | "name": "CPI",
36 | "description": "CPI (where 2005=100)",
37 | "type": "number"
38 | }
39 | ]
40 | }
41 | }]
42 | }
43 |
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/README.md:
--------------------------------------------------------------------------------
1 | Multiple versions of the same file with different encodings.
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/data/currencies-gb2312.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/tests/data-package-examples/currencies-encoding-tests/data/currencies-gb2312.csv
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/data/currencies-iso-8859-1.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/tests/data-package-examples/currencies-encoding-tests/data/currencies-iso-8859-1.csv
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/data/currencies-iso-8859-2.csv:
--------------------------------------------------------------------------------
1 | currency_alphabetic_code,currency,symbol
2 | USD,US Dollar,$
3 | GBP,Pound Sterling,?
4 |
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/data/currencies-iso-8859-3.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/tests/data-package-examples/currencies-encoding-tests/data/currencies-iso-8859-3.csv
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/data/currencies-no-encoding-property.csv:
--------------------------------------------------------------------------------
1 | currency_alphabetic_code,currency,symbol
2 | USD,US Dollar,$
3 | GBP,Pound Sterling,£
4 |
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/data/currencies-utf-8.csv:
--------------------------------------------------------------------------------
1 | currency_alphabetic_code,currency,symbol
2 | USD,US Dollar,$
3 | GBP,Pound Sterling,£
4 |
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/data/currencies-windows-1252.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/tests/data-package-examples/currencies-encoding-tests/data/currencies-windows-1252.csv
--------------------------------------------------------------------------------
/tests/data-package-examples/currencies-encoding-tests/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "name": "currencies-encoding-tests",
4 | "title": "Countries and Currencies (Encoding Tests)",
5 | "version": "v0.1.0",
6 | "licenses": [
7 | {
8 | "name": "CC0-1.0",
9 | "title": "Creative Commons CCZero 1.0",
10 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
11 | }
12 | ],
13 | "resources": [
14 | {
15 | "profile": "tabular-data-resource",
16 | "name": "currencies-gb2312.csv",
17 | "title": "Currencies (gb2312)",
18 | "path": "data/currencies-gb2312.csv",
19 | "encoding": "gb2312",
20 | "format": "csv",
21 | "mediatype": "text/csv",
22 | "licenses": [
23 | {
24 | "name": "CC0-1.0",
25 | "title": "Creative Commons CCZero 1.0",
26 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
27 | }
28 | ],
29 | "schema": {
30 | "fields": [
31 | {
32 | "name": "currency_alphabetic_code",
33 | "type": "string",
34 | "format": "default",
35 | "title": "Currency Alphabetic Code"
36 | },
37 | {
38 | "name": "currency",
39 | "type": "string",
40 | "format": "default"
41 | },
42 | {
43 | "name": "symbol",
44 | "type": "string",
45 | "format": "default"
46 | }
47 | ],
48 | "missingValues": [
49 | ""
50 | ]
51 | },
52 | "dialect": {
53 | "delimiter": ",",
54 | "quoteChar": "\"",
55 | "header": true,
56 | "doubleQuote": true,
57 | "lineTerminator": "\r\n",
58 | "skipInitialSpace": true,
59 | "caseSensitiveHeader": false
60 | },
61 | "primaryKeys": [
62 | "currency_alphabetic_code"
63 | ]
64 | },
65 | {
66 | "profile": "tabular-data-resource",
67 | "name": "currencies-iso-8859-1.csv",
68 | "title": "Currencies (iso-8859-1)",
69 | "path": "data/currencies-iso-8859-1.csv",
70 | "encoding": "iso-8859-1",
71 | "format": "csv",
72 | "mediatype": "text/csv",
73 | "licenses": [
74 | {
75 | "name": "CC0-1.0",
76 | "title": "Creative Commons CCZero 1.0",
77 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
78 | }
79 | ],
80 | "schema": {
81 | "fields": [
82 | {
83 | "name": "currency_alphabetic_code",
84 | "type": "string",
85 | "format": "default",
86 | "title": "Currency Alphabetic Code"
87 | },
88 | {
89 | "name": "currency",
90 | "type": "string",
91 | "format": "default"
92 | },
93 | {
94 | "name": "symbol",
95 | "type": "string",
96 | "format": "default"
97 | }
98 | ],
99 | "missingValues": [
100 | ""
101 | ]
102 | },
103 | "dialect": {
104 | "delimiter": ",",
105 | "quoteChar": "\"",
106 | "header": true,
107 | "doubleQuote": true,
108 | "lineTerminator": "\r\n",
109 | "skipInitialSpace": true,
110 | "caseSensitiveHeader": false
111 | },
112 | "primaryKeys": [
113 | "currency_alphabetic_code"
114 | ]
115 | },
116 | {
117 | "profile": "tabular-data-resource",
118 | "name": "currencies-no-encoding-property.csv",
119 | "title": "Currencies (no-encoding-property)",
120 | "path": "data/currencies-no-encoding-property.csv",
121 | "format": "csv",
122 | "mediatype": "text/csv",
123 | "licenses": [
124 | {
125 | "name": "CC0-1.0",
126 | "title": "Creative Commons CCZero 1.0",
127 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
128 | }
129 | ],
130 | "schema": {
131 | "fields": [
132 | {
133 | "name": "currency_alphabetic_code",
134 | "type": "string",
135 | "format": "default",
136 | "title": "Currency Alphabetic Code"
137 | },
138 | {
139 | "name": "currency",
140 | "type": "string",
141 | "format": "default"
142 | },
143 | {
144 | "name": "symbol",
145 | "type": "string",
146 | "format": "default"
147 | }
148 | ],
149 | "missingValues": [
150 | ""
151 | ]
152 | },
153 | "dialect": {
154 | "delimiter": ",",
155 | "quoteChar": "\"",
156 | "header": true,
157 | "doubleQuote": true,
158 | "lineTerminator": "\r\n",
159 | "skipInitialSpace": true,
160 | "caseSensitiveHeader": false
161 | },
162 | "primaryKeys": [
163 | "currency_alphabetic_code"
164 | ]
165 | },
166 | {
167 | "profile": "tabular-data-resource",
168 | "name": "currencies-iso-8859-2.csv",
169 | "title": "Currencies (iso-8859-2) - '£' will appear as '?'",
170 | "path": "data/currencies-iso-8859-2.csv",
171 | "encoding": "iso-8859-2",
172 | "format": "csv",
173 | "mediatype": "text/csv",
174 | "licenses": [
175 | {
176 | "name": "CC0-1.0",
177 | "title": "Creative Commons CCZero 1.0",
178 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
179 | }
180 | ],
181 | "schema": {
182 | "fields": [
183 | {
184 | "name": "currency_alphabetic_code",
185 | "type": "string",
186 | "format": "default",
187 | "title": "Currency Alphabetic Code"
188 | },
189 | {
190 | "name": "currency",
191 | "type": "string",
192 | "format": "default"
193 | },
194 | {
195 | "name": "symbol",
196 | "type": "string",
197 | "format": "default"
198 | }
199 | ],
200 | "missingValues": [
201 | ""
202 | ]
203 | },
204 | "dialect": {
205 | "delimiter": ",",
206 | "quoteChar": "\"",
207 | "header": true,
208 | "doubleQuote": true,
209 | "lineTerminator": "\r\n",
210 | "skipInitialSpace": true,
211 | "caseSensitiveHeader": false
212 | },
213 | "primaryKeys": [
214 | "currency_alphabetic_code"
215 | ]
216 | },
217 | {
218 | "profile": "tabular-data-resource",
219 | "name": "currencies-iso-8859-3.csv",
220 | "title": "Currencies (iso-8859-3)",
221 | "path": "data/currencies-iso-8859-3.csv",
222 | "encoding": "iso-8859-3",
223 | "format": "csv",
224 | "mediatype": "text/csv",
225 | "licenses": [
226 | {
227 | "name": "CC0-1.0",
228 | "title": "Creative Commons CCZero 1.0",
229 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
230 | }
231 | ],
232 | "schema": {
233 | "fields": [
234 | {
235 | "name": "currency_alphabetic_code",
236 | "type": "string",
237 | "format": "default",
238 | "title": "Currency Alphabetic Code"
239 | },
240 | {
241 | "name": "currency",
242 | "type": "string",
243 | "format": "default"
244 | },
245 | {
246 | "name": "symbol",
247 | "type": "string",
248 | "format": "default"
249 | }
250 | ],
251 | "missingValues": [
252 | ""
253 | ]
254 | },
255 | "dialect": {
256 | "delimiter": ",",
257 | "quoteChar": "\"",
258 | "header": true,
259 | "doubleQuote": true,
260 | "lineTerminator": "\r\n",
261 | "skipInitialSpace": true,
262 | "caseSensitiveHeader": false
263 | },
264 | "primaryKeys": [
265 | "currency_alphabetic_code"
266 | ]
267 | },
268 | {
269 | "profile": "tabular-data-resource",
270 | "name": "currencies-utf-8.csv",
271 | "title": "Currencies (utf-8)",
272 | "path": "data/currencies-utf-8.csv",
273 | "encoding": "utf-8",
274 | "format": "csv",
275 | "mediatype": "text/csv",
276 | "licenses": [
277 | {
278 | "name": "CC0-1.0",
279 | "title": "Creative Commons CCZero 1.0",
280 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
281 | }
282 | ],
283 | "schema": {
284 | "fields": [
285 | {
286 | "name": "currency_alphabetic_code",
287 | "type": "string",
288 | "format": "default",
289 | "title": "Currency Alphabetic Code"
290 | },
291 | {
292 | "name": "currency",
293 | "type": "string",
294 | "format": "default"
295 | },
296 | {
297 | "name": "symbol",
298 | "type": "string",
299 | "format": "default"
300 | }
301 | ],
302 | "missingValues": [
303 | ""
304 | ]
305 | },
306 | "dialect": {
307 | "delimiter": ",",
308 | "quoteChar": "\"",
309 | "header": true,
310 | "doubleQuote": true,
311 | "lineTerminator": "\r\n",
312 | "skipInitialSpace": true,
313 | "caseSensitiveHeader": false
314 | },
315 | "primaryKeys": [
316 | "currency_alphabetic_code"
317 | ]
318 | },
319 | {
320 | "profile": "tabular-data-resource",
321 | "name": "currencies-windows-1252.csv",
322 | "title": "Currencies (windows-1252)",
323 | "path": "data/currencies-windows-1252.csv",
324 | "encoding": "windows-1252",
325 | "format": "csv",
326 | "mediatype": "text/csv",
327 | "licenses": [
328 | {
329 | "name": "CC0-1.0",
330 | "title": "Creative Commons CCZero 1.0",
331 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
332 | }
333 | ],
334 | "schema": {
335 | "fields": [
336 | {
337 | "name": "currency_alphabetic_code",
338 | "type": "string",
339 | "format": "default",
340 | "title": "Currency Alphabetic Code"
341 | },
342 | {
343 | "name": "currency",
344 | "type": "string",
345 | "format": "default"
346 | },
347 | {
348 | "name": "symbol",
349 | "type": "string",
350 | "format": "default"
351 | }
352 | ],
353 | "missingValues": [
354 | ""
355 | ]
356 | },
357 | "dialect": {
358 | "delimiter": ",",
359 | "quoteChar": "\"",
360 | "header": true,
361 | "doubleQuote": true,
362 | "lineTerminator": "\r\n",
363 | "skipInitialSpace": true,
364 | "caseSensitiveHeader": false
365 | },
366 | "primaryKeys": [
367 | "currency_alphabetic_code"
368 | ]
369 | }
370 | ]
371 | }
372 |
--------------------------------------------------------------------------------
/tests/data-package-examples/donation-codes-schema/README.md:
--------------------------------------------------------------------------------
1 | A table schema to be reference via a URL from data packages
2 |
3 | ## Known usage
4 |
5 | This schema is referenced from donation-code-via-url in this repository
6 |
--------------------------------------------------------------------------------
/tests/data-package-examples/donation-codes-schema/tableschema.json:
--------------------------------------------------------------------------------
1 | {
2 | "fields": [{
3 | "format": "default",
4 | "type": "string",
5 | "name": "donation code",
6 | "title": "Donation Code",
7 | "constraints": {
8 | "required": true,
9 | "unique": true
10 | }
11 | }, {
12 | "name": "description",
13 | "type": "string",
14 | "format": "default",
15 | "constraints": {
16 | "required": true,
17 | "unique": true
18 | },
19 | "title": "Donation Code Description"
20 | }],
21 | "missingValues": [""]
22 | }
23 |
--------------------------------------------------------------------------------
/tests/data-package-examples/donation-codes-via-url/README.md:
--------------------------------------------------------------------------------
1 | Data package that references its data, schema and dialect via URLs
2 |
--------------------------------------------------------------------------------
/tests/data-package-examples/donation-codes-via-url/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "name": "donation-codes",
4 | "title": "Donation Codes",
5 | "version": "0.1.0",
6 | "licenses": [{
7 | "name": "CC-BY-4.0",
8 | "title": "Creative Commons Attribution 4.0",
9 | "path": "https://creativecommons.org/licenses/by/4.0/"
10 | }],
11 | "resources": [{
12 | "profile": "tabular-data-resource",
13 | "path": "https://raw.githubusercontent.com/frictionlessdata/example-data-packages/master/donation-codes/data/donation-codes.csv",
14 | "name": "donation-codes",
15 | "title": "Donation Codes",
16 | "encoding": "UTF-8",
17 | "format": "csv",
18 | "mediatype": "text/csv",
19 | "schema": "https://raw.githubusercontent.com/frictionlessdata/example-data-packages/master/donation-codes-schema/tableschema.json",
20 | "dialect": "https://raw.githubusercontent.com/frictionlessdata/example-data-packages/master/resources/comma-separated-value-file-dialect.json"
21 | }]
22 | }
23 |
--------------------------------------------------------------------------------
/tests/data-package-examples/donation-codes/README.md:
--------------------------------------------------------------------------------
1 | Test data for foreign keys across data packages
--------------------------------------------------------------------------------
/tests/data-package-examples/donation-codes/data/donation-codes.csv:
--------------------------------------------------------------------------------
1 | donation code,description
2 | A,Cash
3 | B,Services
4 | C,Goods
5 |
--------------------------------------------------------------------------------
/tests/data-package-examples/donation-codes/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "resources": [{
4 | "profile": "tabular-data-resource",
5 | "encoding": "UTF-8",
6 | "schema": {
7 | "fields": [{
8 | "format": "default",
9 | "type": "string",
10 | "name": "donation code",
11 | "title": "Donation Code",
12 | "constraints": {
13 | "required": true,
14 | "unique": true
15 | }
16 | }, {
17 | "name": "description",
18 | "type": "string",
19 | "format": "default",
20 | "constraints": {
21 | "required": true,
22 | "unique": true
23 | },
24 | "title": "Donation Code Description"
25 | }],
26 | "missingValues": [""]
27 | },
28 | "format": "csv",
29 | "mediatype": "text/csv",
30 | "licenses": [{
31 | "name": "CC-BY-4.0",
32 | "title": "Creative Commons Attribution 4.0",
33 | "path": "https://creativecommons.org/licenses/by/4.0/"
34 | }],
35 | "name": "donation-codes",
36 | "title": "Donation Codes",
37 | "primaryKeys": ["donation code"],
38 | "path": "data/donation-codes.csv"
39 | }],
40 | "licenses": [{
41 | "name": "CC-BY-4.0",
42 | "title": "Creative Commons Attribution 4.0",
43 | "path": "https://creativecommons.org/licenses/by/4.0/"
44 | }],
45 | "name": "donation-codes",
46 | "title": "Donation Codes",
47 | "version": "0.1.0"
48 | }
49 |
--------------------------------------------------------------------------------
/tests/data-package-examples/donations/README.md:
--------------------------------------------------------------------------------
1 | Test data for foreign keys across data packages
--------------------------------------------------------------------------------
/tests/data-package-examples/donations/data/donations.csv:
--------------------------------------------------------------------------------
1 | id,amount $,code
2 | 1,99.00,A
3 | 2,100.00,B
4 | 3,12.34,A
5 | 4,1654.00,C
6 | 5,432.19,B
7 |
--------------------------------------------------------------------------------
/tests/data-package-examples/donations/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "resources": [{
4 | "profile": "tabular-data-resource",
5 | "encoding": "UTF-8",
6 | "schema": {
7 | "fields": [{
8 | "name": "id",
9 | "type": "integer",
10 | "format": "default",
11 | "constraints": {
12 | "required": true,
13 | "unique": true
14 | }
15 | }, {
16 | "name": "amount $",
17 | "type": "number",
18 | "format": "default",
19 | "constraints": {
20 | "required": true,
21 | "minimum": "0"
22 | },
23 | "title": "Donation Amount"
24 | }, {
25 | "name": "code",
26 | "type": "string",
27 | "format": "default",
28 | "title": "Donation Code",
29 | "constraints": {
30 | "required": true
31 | }
32 | }],
33 | "missingValues": [""]
34 | },
35 | "format": "csv",
36 | "mediatype": "text/csv",
37 | "licenses": [{
38 | "name": "CC-BY-4.0",
39 | "title": "Creative Commons Attribution 4.0",
40 | "path": "https://creativecommons.org/licenses/by/4.0/"
41 | }],
42 | "name": "donations",
43 | "title": "Donations",
44 | "primaryKeys": ["id"],
45 | "foreignKeys": [{
46 | "fields": ["code"],
47 | "reference": {
48 | "package": "https://raw.githubusercontent.com/frictionlessdata/example-data-packages/master/donation-codes/datapackage.json",
49 | "resource": "donation-codes",
50 | "fields": ["donation code"]
51 | }
52 | }],
53 | "path": "data/donations.csv"
54 | }],
55 | "licenses": [{
56 | "name": "CC-BY-4.0",
57 | "title": "Creative Commons Attribution 4.0",
58 | "path": "https://creativecommons.org/licenses/by/4.0/"
59 | }],
60 | "name": "donations",
61 | "title": "Donations",
62 | "version": "0.1.0"
63 | }
64 |
--------------------------------------------------------------------------------
/tests/data-package-examples/finance-vix/README.md:
--------------------------------------------------------------------------------
1 | Example Data Package providing Volatility Index (VIX) Historical Price Data by [Cboe](http://www.cboe.com/)
2 |
3 | ## Data
4 |
5 | The data source is http://www.cboe.com/micro/vix/historical.aspx. This data will not be updated. Do not use the data for analysis.
6 |
7 | ## Known usage
8 |
9 | This data package is used in the Frictionless Data [tutorials](https://frictionlessdata.io/docs/)
10 |
11 | ## License
12 |
13 | This data package is available under [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/).
14 |
15 | [Contact us](https://github.com/frictionlessdata/example-data-packages/issues/new) if you are aware of any copyright in the data.
16 |
--------------------------------------------------------------------------------
/tests/data-package-examples/finance-vix/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "name": "finance-vix",
4 | "title": "VIX - CBOE Volatility Index",
5 | "homepage": "http://www.cboe.com/micro/VIX/",
6 | "version": "0.0.1",
7 | "sources": [{
8 | "title": "CBOE VIX Page",
9 | "path": "http://www.cboe.com/micro/vix/historical.aspx"
10 | }],
11 | "resources": [{
12 | "profile": "tabular-data-resource",
13 | "name": "vix-daily",
14 | "path": "data/vix-daily.csv",
15 | "format": "csv",
16 | "mediatype": "text/csv",
17 | "schema": {
18 | "fields": [{
19 | "name": "Date",
20 | "type": "date",
21 | "format": "%m/%d/%Y"
22 | },
23 | {
24 | "name": "VIXOpen",
25 | "type": "number"
26 | },
27 | {
28 | "name": "VIXHigh",
29 | "type": "number"
30 | },
31 | {
32 | "name": "VIXLow",
33 | "type": "number"
34 | },
35 | {
36 | "name": "VIXClose",
37 | "type": "number"
38 | }
39 | ]
40 | }
41 | }]
42 | }
43 |
--------------------------------------------------------------------------------
/tests/data-package-examples/geo-countries/README.md:
--------------------------------------------------------------------------------
1 | Countries in GeoJSON format
2 |
3 | ## Data
4 |
5 | This GeoJSON file contains for each country:
6 |
7 | - the `geometry` of polygon that provides the rough outline of the country
8 | - an `ADMIN` property that is the common name of the country
9 | - the `ISO_A3` property that is a 3 character code for the country, according to [ISO3166 standard](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3)
10 |
11 | The data is used in the Frictionless Data Guide, [Joining Data](https://frictionlessdata.io/guides/joining-data-in-python/).
12 |
13 | ## Preparation
14 |
15 | The provenance of this data is unknown.
16 |
17 | ## License
18 |
19 | In the absence of any known provenance, the data is licensed under [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/).
20 |
21 | If you are aware of any copyright, please [contact Open Knowledge International](https://okfn.org/contact/).
22 |
--------------------------------------------------------------------------------
/tests/data-package-examples/geo-countries/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "geo-countries",
3 | "title": "Country Polygons as GeoJSON (Example)",
4 | "description": "Example Data Package providing GeoJSON polygons for all the world's countries",
5 | "licenses": [{
6 | "id": "CC0-1.0",
7 | "title": "CC0 1.0",
8 | "url": "https://creativecommons.org/publicdomain/zero/1.0/"
9 | }],
10 | "resources": [
11 | {
12 | "name": "countries",
13 | "path": "data/countries.geojson",
14 | "format": "geojson",
15 | "mediatype": "application/json",
16 | "schema": {
17 | "fields": [
18 | {
19 | "name": "ADMIN",
20 | "description": "Common name of the country",
21 | "type": "string"
22 | },
23 | {
24 | "name": "ISO_A3",
25 | "description": "3 characters code for the country, according to ISO3166 standard",
26 | "type": "string"
27 | }
28 | ]
29 | }
30 | }
31 | ]
32 | }
33 |
--------------------------------------------------------------------------------
/tests/data-package-examples/inflation/README.md:
--------------------------------------------------------------------------------
1 | Annual growth rate of the GDP implicit deflator and Inflation measured by the consumer price index
2 |
3 | ## Data
4 |
5 | Data source unknown. This data will not be updated. Do not use the data for analysis.
6 |
7 | ## Known usage
8 |
9 | No known usage
10 |
11 | ## License
12 |
13 | No known copyright. [Contact us](https://github.com/frictionlessdata/example-data-packages/issues/new) if you are aware of any copyright in the data.
14 |
15 | This data package is available under [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/).
16 |
--------------------------------------------------------------------------------
/tests/data-package-examples/inflation/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "name": "inflation",
4 | "title": "Annual inflation by GDP deflator and consumer prices",
5 | "description": "Annual growth rate of the GDP implicit deflator and Inflation measured by the consumer price index",
6 | "resources": [{
7 | "profile": "tabular-data-resource",
8 | "name": "inflation-gdp",
9 | "path": "data/inflation-gdp.csv",
10 | "schema": {
11 | "fields": [{
12 | "name": "Country",
13 | "type": "string"
14 | },
15 | {
16 | "name": "Country Code",
17 | "type": "string"
18 | },
19 | {
20 | "name": "Year",
21 | "type": "year"
22 | },
23 | {
24 | "name": "Inflation",
25 | "type": "number"
26 | }
27 | ]
28 | }
29 | },
30 | {
31 | "profile": "tabular-data-resource",
32 | "name": "inflation-consumer-gdp",
33 | "path": "data/inflation-consumer.csv",
34 | "schema": {
35 | "fields": [{
36 | "name": "Country",
37 | "type": "string"
38 | },
39 | {
40 | "name": "Country Code",
41 | "type": "string"
42 | },
43 | {
44 | "name": "Year",
45 | "type": "year"
46 | },
47 | {
48 | "name": "Inflation",
49 | "type": "number"
50 | }
51 | ]
52 | }
53 | }
54 | ]
55 | }
56 |
--------------------------------------------------------------------------------
/tests/data-package-examples/iso-639-1-language-codes/README.md:
--------------------------------------------------------------------------------
1 | ISO 639-1 assigns a two letter code to each language.
2 |
3 | ## Data
4 |
5 | Data derived from https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes with columns for non-ISO-639-1 standards are dropped.
6 |
7 | Data sourced on 2018-02-24. This data will not be updated. Do not use the data for analysis.
8 |
9 | This data package implements the [Language support](https://frictionlessdata.io/specs/patterns/#language-support) pattern. Some properties in the [data package](datapackage.json) have been given both English and Spanish values.
10 |
11 | This pattern is not implemented in data package validation tools. The CSV file has been validated using https://try.goodtables.io
12 |
13 | ## License
14 |
15 | Data derived from [List of ISO 639-1 codes](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) by [Wikipedia](https://wikimediafoundation.org/wiki/Home) is licensed under [Creative Commons Attribution-ShareAlike License](http://creativecommons.org/licenses/by-sa/3.0/)
16 |
--------------------------------------------------------------------------------
/tests/data-package-examples/iso-639-1-language-codes/data/ISO-639-1-codes.csv:
--------------------------------------------------------------------------------
1 | ISO language name,Native name (endonym),639-1,Notes
2 | Abkhazian,"аҧсуа бызшәа, аҧсшәа",ab,also known as Abkhaz
3 | Afar,Afaraf,aa,
4 | Afrikaans,Afrikaans,af,
5 | Akan,Akan,ak,"macrolanguage, Twi is [tw/twi], Fanti is [fat]"
6 | Albanian,Shqip,sq,"macrolanguage, ""Albanian Phylozone"" in 639-6"
7 | Amharic,አማርኛ,am,
8 | Arabic,العربية,ar,"macrolanguage, Standard Arabic is [arb]"
9 | Aragonese,aragonés,an,
10 | Armenian,Հայերեն,hy,
11 | Assamese,অসমীয়া,as,
12 | Avaric,"авар мацӀ, магӀарул мацӀ",av,also known as Avar
13 | Avestan,avesta,ae,ancient
14 | Aymara,aymar aru,ay,macrolanguage
15 | Azerbaijani,azərbaycan dili,az,macrolanguage
16 | Bambara,bamanankan,bm,
17 | Bashkir,башҡорт теле,ba,
18 | Basque,"euskara, euskera",eu,
19 | Belarusian,беларуская мова,be,
20 | Bengali,বাংলা,bn,also known as Bangla
21 | Bihari languages,भोजपुरी,bh,"collective language code for Bhojpuri, Magahi, and Maithili"
22 | Bislama,Bislama,bi,"Language formed from English and Ni-Vanuatu, with some French influence."
23 | Bosnian,bosanski jezik,bs,
24 | Breton,brezhoneg,br,
25 | Bulgarian,български език,bg,
26 | Burmese,ဗမာစာ,my,
27 | "Catalan, Valencian","català, valencià",ca,
28 | Central Khmer,"ខ្មែរ, ខេមរភាសា, ភាសាខ្មែរ",km,also known as Khmer or Cambodian
29 | Chamorro,Chamoru,ch,
30 | Chechen,нохчийн мотт,ce,
31 | "Chichewa, Chewa, Nyanja","chiCheŵa, chinyanja",ny,
32 | Chinese,"中文 (Zhōngwén), 汉语, 漢語",zh,macrolanguage
33 | "Church Slavic, Church Slavonic, Old Church Slavonic, Old Slavonic, Old Bulgarian",ѩзыкъ словѣньскъ,cu,"ancient, in use by Orthodox Church"
34 | Chuvash,чӑваш чӗлхи,cv,
35 | Cornish,Kernewek,kw,
36 | Corsican,"corsu, lingua corsa",co,
37 | Cree,ᓀᐦᐃᔭᐍᐏᐣ,cr,macrolanguage
38 | Croatian,hrvatski jezik,hr,
39 | Czech,"čeština, český jazyk",cs,
40 | Danish,dansk,da,
41 | "Divehi, Dhivehi, Maldivian",ދިވެހި,dv,
42 | "Dutch, Flemish","Nederlands, Vlaams",nl,
43 | Dzongkha,རྫོང་ཁ,dz,
44 | English,English,en,
45 | Esperanto,Esperanto,eo,"constructed, initiated from L.L. Zamenhof, 1887"
46 | Estonian,"eesti, eesti keel",et,macrolanguage
47 | Ewe,Eʋegbe,ee,
48 | Faroese,føroyskt,fo,
49 | Fijian,vosa Vakaviti,fj,
50 | Finnish,"suomi, suomen kieli",fi,
51 | French,"français, langue française",fr,
52 | Fulah,"Fulfulde, Pulaar, Pular",ff,"macrolanguage, also known as Fula"
53 | "Gaelic, Scottish Gaelic",Gàidhlig,gd,
54 | Galician,Galego,gl,
55 | Ganda,Luganda,lg,
56 | Georgian,ქართული,ka,
57 | German,Deutsch,de,
58 | Greek (modern),ελληνικά,el,
59 | Guaraní,Avañe'ẽ,gn,macrolanguage
60 | Gujarati,ગુજરાતી,gu,
61 | "Haitian, Haitian Creole",Kreyòl ayisyen,ht,
62 | Hausa,(Hausa) هَوُسَ,ha,
63 | Hebrew (modern),עברית,he,
64 | Herero,Otjiherero,hz,
65 | Hindi,"हिन्दी, हिंदी",hi,
66 | Hiri Motu,Hiri Motu,ho,
67 | Hungarian,magyar,hu,
68 | Icelandic,Íslenska,is,
69 | Ido,Ido,io,"constructed by De Beaufront, 1907, as variation of Esperanto"
70 | Igbo,Asụsụ Igbo,ig,
71 | Indonesian,Bahasa Indonesia,id,Covered by macrolanguage [ms/msa]
72 | Interlingua,Interlingua,ia,constructed by International Auxiliary Language Association
73 | Interlingue,Originally called Occidental; then Interlingue after WWII,ie,"constructed by Edgar de Wahl, first published in 1922"
74 | Inuktitut,ᐃᓄᒃᑎᑐᑦ,iu,macrolanguage
75 | Inupiaq,"Iñupiaq, Iñupiatun",ik,macrolanguage
76 | Irish,Gaeilge,ga,
77 | Italian,Italiano,it,
78 | Japanese,日本語 (にほんご),ja,
79 | Javanese,"ꦧꦱꦗꦮ, Basa Jawa",jv,
80 | "Kalaallisut, Greenlandic","kalaallisut, kalaallit oqaasii",kl,
81 | Kannada,ಕನ್ನಡ,kn,
82 | Kanuri,Kanuri,kr,macrolanguage
83 | Kashmiri,"कश्मीरी, كشميري",ks,
84 | Kazakh,қазақ тілі,kk,
85 | "Kikuyu, Gikuyu",Gĩkũyũ,ki,
86 | Kinyarwanda,Ikinyarwanda,rw,
87 | "Kirghiz, Kyrgyz","Кыргызча, Кыргыз тили",ky,
88 | Komi,коми кыв,kv,macrolanguage
89 | Kongo,Kikongo,kg,macrolanguage
90 | Korean,한국어,ko,
91 | "Kuanyama, Kwanyama",Kuanyama,kj,
92 | Kurdish,"Kurdî, كوردی",ku,macrolanguage
93 | Lao,ພາສາລາວ,lo,
94 | Latin,"latine, lingua latina",la,ancient
95 | Latvian,Latviešu Valoda,lv,macrolanguage
96 | "Limburgan, Limburger, Limburgish",Limburgs,li,
97 | Lingala,Lingála,ln,
98 | Lithuanian,lietuvių kalba,lt,
99 | Luba-Katanga,Kiluba,lu,also known as Luba-Shaba
100 | "Luxembourgish, Letzeburgesch",Lëtzebuergesch,lb,
101 | Macedonian,македонски јазик,mk,
102 | Malagasy,fiteny malagasy,mg,macrolanguage
103 | Malay,"Bahasa Melayu, بهاس ملايو",ms,"macrolanguage, Standard Malay is [zsm], Indonesian is [id/ind]"
104 | Malayalam,മലയാളം,ml,
105 | Maltese,Malti,mt,
106 | Manx,"Gaelg, Gailck",gv,
107 | Maori,te reo Māori,mi,also known as Māori
108 | Marathi,मराठी,mr,also known as Marāṭhī
109 | Marshallese,Kajin M̧ajeļ,mh,
110 | Mongolian,Монгол хэл,mn,macrolanguage
111 | Nauru,Dorerin Naoero,na,also known as Nauruan
112 | "Navajo, Navaho",Diné bizaad,nv,
113 | Ndonga,Owambo,ng,
114 | Nepali,नेपाली,ne,
115 | North Ndebele,isiNdebele,nd,also known as Northern Ndebele
116 | Northern Sami,Davvisámegiella,se,
117 | Norwegian,Norsk,no,"macrolanguage, Bokmål is [nb/nob], Nynorsk is [nn/nno]"
118 | Norwegian Bokmål,Norsk Bokmål,nb,Covered by macrolanguage [no/nor]
119 | Norwegian Nynorsk,Norsk Nynorsk,nn,Covered by macrolanguage [no/nor]
120 | Occitan,"occitan, lenga d'òc",oc,
121 | Ojibwa,ᐊᓂᔑᓈᐯᒧᐎᓐ,oj,"macrolanguage, also known as Ojibwe"
122 | Oriya,ଓଡ଼ିଆ,or,also known as Odia
123 | Oromo,Afaan Oromoo,om,macrolanguage
124 | "Ossetian, Ossetic",ирон æвзаг,os,
125 | Pali,पाऴि,pi,"ancient, also known as Pāli"
126 | "Panjabi, Punjabi",ਪੰਜਾਬੀ,pa,
127 | "Pashto, Pushto",پښتو,ps,macrolanguage
128 | Persian,فارسی,fa,"macrolanguage, also known as Farsi"
129 | Polabian,"wenske rec, Wenske",pox,
130 | Polish,"język polski, Polszczyzna",pl,
131 | Portuguese,Português,pt,
132 | Quechua,"Runa Simi, Kichwa",qu,macrolanguage
133 | "Romanian, Moldavian, Moldovan",Română,ro,"The identifiers mo and mol are deprecated, leaving ro and ron (639-2/T) and rum (639-2/B) the current language identifiers to be used for the variant of the Romanian language also known as Moldavian and Moldovan in English and moldave in French. The identifiers mo and mol will not be assigned to different items, and recordings using these identifiers will not be invalid."
134 | Romansh,Rumantsch Grischun,rm,
135 | Rundi,Ikirundi,rn,also known as Kirundi
136 | Russian,русский,ru,
137 | Samoan,gagana fa'a Samoa,sm,
138 | Sango,yângâ tî sängö,sg,
139 | Sanskrit,संस्कृतम्,sa,"ancient, still spoken, also known as Saṃskṛta"
140 | Sardinian,sardu,sc,macrolanguage
141 | Serbian,српски језик,sr,The ISO 639-2/T code srp deprecated the ISO 639-2/B code scc[1]
142 | Shona,chiShona,sn,
143 | "Sichuan Yi, Nuosu",ꆈꌠ꒿ Nuosuhxop,ii,Standard form of Yi languages
144 | Sindhi,"सिन्धी, سنڌي، سندھی",sd,
145 | "Sinhala, Sinhalese",සිංහල,si,
146 | Slovak,"Slovenčina, Slovenský Jazyk",sk,
147 | Slovenian,"Slovenski Jezik, Slovenščina",sl,also known as Slovene
148 | Somali,"Soomaaliga, af Soomaali",so,
149 | South Ndebele,isiNdebele,nr,also known as Southern Ndebele
150 | Southern Sotho,Sesotho,st,
151 | "Spanish, Castilian",Español,es,
152 | Sundanese,Basa Sunda,su,
153 | Swahili,Kiswahili,sw,macrolanguage
154 | Swati,SiSwati,ss,also known as Swazi
155 | Swedish,Svenska,sv,
156 | Tagalog,Wikang Tagalog,tl,Note: Filipino (Pilipino) has the code [fil]
157 | Tahitian,Reo Tahiti,ty,One of the Reo Mā`ohi (languages of French Polynesia)
158 | Tajik,"тоҷикӣ, toçikī, تاجیکی",tg,
159 | Tamil,தமிழ்,ta,
160 | Tatar,"татар теле, tatar tele",tt,
161 | Telugu,తెలుగు,te,
162 | Thai,ไทย,th,
163 | Tibetan,བོད་ཡིག,bo,also known as Standard Tibetan
164 | Tigrinya,ትግርኛ,ti,
165 | Tonga (Tonga Islands),Faka Tonga,to,
166 | Tsonga,Xitsonga,ts,
167 | Tswana,Setswana,tn,
168 | Turkish,Türkçe,tr,
169 | Turkmen,"Türkmen, Түркмен",tk,
170 | Twi,Twi,tw,Covered by macrolanguage [ak/aka]
171 | "Uighur, Uyghur","ئۇيغۇرچە, Uyghurche",ug,
172 | Ukrainian,Українська,uk,
173 | Urdu,اردو,ur,
174 | Uzbek,"Oʻzbek, Ўзбек, أۇزبېك",uz,macrolanguage
175 | Venda,Tshivenḓa,ve,
176 | Vietnamese,Tiếng Việt,vi,
177 | Volapük,Volapük,vo,constructed
178 | Walloon,Walon,wa,
179 | Welsh,Cymraeg,cy,
180 | Western Frisian,Frysk,fy,also known as Frisian
181 | Wolof,Wollof,wo,
182 | Xhosa,isiXhosa,xh,
183 | Yiddish,ייִדיש,yi,macrolanguage
184 | Yoruba,Yorùbá,yo,
185 | "Zhuang, Chuang","Saɯ cueŋƅ, Saw cuengh",za,macrolanguage
186 | Zulu,isiZulu,zu,
187 |
--------------------------------------------------------------------------------
/tests/data-package-examples/iso-639-1-language-codes/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "name": "iso-639-1-language-codes",
4 | "languages": ["en", "es"],
5 | "title": {
6 | "": "ISO 639-1 Language Codes",
7 | "es": "ISO 639-1 Códigos de idioma"
8 | },
9 | "description": {
10 | "": "ISO 639-1 two-letter language codes",
11 | "es": "ISO 639-1 códigos de idioma de dos letras"
12 | },
13 | "version": "0.1.0",
14 | "keywords": {
15 | "": "language",
16 | "es": "idioma"
17 | },
18 | "licenses": [{
19 | "title": {
20 | "": "Creative Commons Attribution Share-Alike 3.0",
21 | "es": "Reconocimiento-CompartirIgual 3.0 España"
22 | },
23 | "path": {
24 | "": "https://creativecommons.org/licenses/by-sa/3.0/",
25 | "es": "https://creativecommons.org/licenses/by-sa/3.0/es/"
26 | }
27 | }],
28 | "contributors": [{
29 | "title": "Joe Bloggs",
30 | "email": "joe@bloggs.com",
31 | "path": "http://www.bloggs.com",
32 | "role": {
33 | "": "author",
34 | "es": "autor"
35 | },
36 | "organization": {
37 | "": "International Organization for Standardization",
38 | "es": "Organización Internacional para la Estandarización"
39 | }
40 | }],
41 | "resources": [{
42 | "profile": "tabular-data-resource",
43 | "path": "data/ISO-639-1-codes.csv",
44 | "name": "iso-639-1-codes",
45 | "title": {
46 | "": "ISO 639-1 language codes",
47 | "es": "ISO 639-1 códigos de idioma"
48 | },
49 | "description": {
50 | "": "ISO 639-1: two-letter language codes",
51 | "es": "ISO 639-1 códigos de idioma de dos letras"
52 | },
53 | "encoding": "UTF-8",
54 | "format": "csv",
55 | "mediatype": "text/csv",
56 | "sources": [{
57 | "title": "List of ISO 639-1 codes",
58 | "path": "https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes"
59 | }],
60 | "schema": {
61 | "fields": [{
62 | "name": "ISO language name",
63 | "title": {
64 | "": "ISO language name",
65 | "es": "ISO Nombre del lenguaje"
66 | },
67 | "type": "string",
68 | "format": "default",
69 | "constraints": {
70 | "required": true
71 | }
72 | },
73 | {
74 | "name": "Native name (endonym)",
75 | "title": {
76 | "": "Native name (endonym)",
77 | "es": "Nombre nativo (endónimo)"
78 | },
79 | "type": "string",
80 | "format": "default"
81 | },
82 | {
83 | "name": "639-1",
84 | "type": "string",
85 | "format": "default"
86 | },
87 | {
88 | "name": "Notes",
89 | "title": {
90 | "": "Notes",
91 | "es": "Notas"
92 | },
93 | "type": "string",
94 | "format": "default"
95 | }
96 | ],
97 | "missingValues": [
98 | ""
99 | ],
100 | "primaryKeys": [
101 | "639-1"
102 | ]
103 | },
104 | "dialect": {
105 | "delimiter": ",",
106 | "quoteChar": "\"",
107 | "header": true,
108 | "doubleQuote": true,
109 | "lineTerminator": "\r\n",
110 | "skipInitialSpace": true,
111 | "caseSensitiveHeader": false
112 | }
113 | }]
114 | }
115 |
--------------------------------------------------------------------------------
/tests/data-package-examples/open-data-day-tweets-2018/README.md:
--------------------------------------------------------------------------------
1 | # Open Data Day 2018 Tweets
2 |
3 | [The 7th International Open Data Day](http://opendataday.org) was marked on March 3, 2018. On the day, people used the [#opendataday](https://twitter.com/hashtag/OpenDataDay) and [#odd18](https://twitter.com/hashtag/ODD18) hashtags to share insights from over 400 community events.
4 |
5 | ## Data
6 |
7 | The data was obtained on March 7, 2018 from two hashtags on Twitter: [#opendataday](https://twitter.com/hashtag/OpenDataDay) and [#odd18](https://twitter.com/hashtag/ODD18).
8 |
9 | This data package contains a [stripped-down version](data/subsetofopendatadaytweets.csv) of the tweets data, in line with [Twitter's Developer Policy](https://developer.twitter.com/en/developer-terms/policy.html). Other resources included herein:
10 | - a [datapackage.json](datapackage.json) file with schema and metadata
11 | - an [R script](scripts/opendataday.R) used to mine Open Data Day tweets via the Twitter Search API
12 | - a [simple map](docs/geotagged_tweets.png) generated from the analysis of tweets with geotagged information
13 |
14 | ## Preparation
15 |
16 | To obtain / update the data, you'll need
17 |
18 | - R v3.4.3 ([installation instructions](https://www.r-project.org))
19 | - Twitter API and access tokens from [http://apps.twitter.com](http://apps.twitter.com)
20 |
21 | Run [scripts/opendataday.R](scripts/opendataday.R) to obtain your own copy of the data and read the blog post detailing the data collection, analysis and publishing process for this data package [here](http://okfnlabs.org/blog/2018/03/08/open-data-day-tweets.html).
22 |
23 | ## License
24 |
25 | The data is licensed under [Twitter's Developer Policy](https://developer.twitter.com/en/developer-terms/policy.html). Everything else is licensed under the [CC0 Public Domain Waiver](https://creativecommons.org/publicdomain/zero/1.0/).
26 |
--------------------------------------------------------------------------------
/tests/data-package-examples/open-data-day-tweets-2018/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "resources": [
4 | {
5 | "name": "opendataday-tweets",
6 | "path": "data/subsetofopendatadaytweets.csv",
7 | "schema": {
8 | "fields": [
9 | {
10 | "name": "tweetid",
11 | "type": "integer",
12 | "format": "default",
13 | "description": "tweet id"
14 | },
15 | {
16 | "name": "retweetCount",
17 | "type": "integer",
18 | "format": "default",
19 | "description": "number of retweets",
20 | "constraints": {
21 | "minimum": 0
22 | }
23 | }
24 | ]
25 | },
26 | "profile": "tabular-data-resource"
27 | }
28 | ],
29 | "keywords": [
30 | "open data day",
31 | "odd18"
32 | ],
33 | "name": "opendataday-tweets-2018",
34 | "title": "Open Data Day Tweets",
35 | "description": "A stripped-down version of open data day data mined from Twitter in March 2018. Data Brevity is as a result of Twitter's Developer Policy.",
36 | "homepage": "https://okfnlabs.org/blog/2018/03/08/open-data-day-tweets.html",
37 | "version": "1.0.0",
38 | "licenses": [
39 | {
40 | "title": "Twitter Developer Agreement",
41 | "path": "https://developer.twitter.com/en/developer-terms/agreement"
42 | }
43 | ],
44 | "contributors": [
45 | {
46 | "title": "Serah Rono",
47 | "path": "https://twitter.com/serahrono",
48 | "role": "author"
49 | }
50 | ]
51 | }
52 |
--------------------------------------------------------------------------------
/tests/data-package-examples/open-data-day-tweets-2018/docs/geotagged_tweets.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/tests/data-package-examples/open-data-day-tweets-2018/docs/geotagged_tweets.png
--------------------------------------------------------------------------------
/tests/data-package-examples/open-data-day-tweets-2018/scripts/opendataday.R:
--------------------------------------------------------------------------------
1 | # install and load R package for Twitter
2 |
3 | install.packages(twitteR)
4 | library(twitteR)
5 |
6 | # Set up Twitter API and Access Tokens. Create this in dev.twitter.com
7 |
8 | api_key <- "YOUR_API_KEY"
9 | api_secret <- "YOUR_API_SECRET"
10 | access_token <- "YOUR_ACCESS_TOKEN"
11 | access_secret <- "YOUR_ACCESS_SECRET"
12 |
13 | setup_twitter_oauth(api_key, api_secret, access_token, access_secret)
14 |
15 | # read tweets from the two official open data day hashtags, #opendataday and #odd18
16 |
17 | opendataday <- searchTwitteR("#opendataday", n = 18000)
18 | odd18 <- searchTwitteR("#odd18", n = 18000)
19 |
20 | # view list of tweets mined from the #opendataday and #odd18 hashtags
21 | opendataday
22 | odd18
23 |
24 | # convert the mined list of tweets from either hashtag to a dataframe
25 |
26 | opendataday_df <- twListToDF(opendataday)
27 | odd18_df <- twListToDF(odd18)
28 |
29 | # combine dataframes from the two hashtags using the row bind function
30 |
31 | alltweets_df <- rbind(opendataday_df, odd18_df)
32 |
33 | # write your scraped data to a csv file
34 |
35 | write.csv(tweets_opendataday_df, file="data/opendataday_raw.csv")
36 | write.csv(tweets_odd18_df, file="data/odd18_raw.csv")
37 | write.csv(alltweets_df, file="data/allopendatadaytweets.csv")
38 |
39 | # analysis 1: how many open data day attendees tweeted from android phones?
40 |
41 |
42 | library(dplyr)
43 |
44 | android_tweets <- filter(alltweets_df, grepl("Twitter for Android", statusSource))
45 | tally(android_tweets)
46 |
47 |
48 | # analysis 2: which tweets mention GitHub resources?
49 |
50 | # install and load dplyr library for data analysis
51 |
52 | install.packages(dplyr)
53 | library(dplyr)
54 |
55 | # filter out tweets with the github url in them, then read tweets for context
56 |
57 | github_resources <- filter(alltweets_df, grepl("github.com", statusSource))
58 | tally(github_resources)
59 |
60 | # analysis 3: where in the world did people send open data day tweets from?
61 |
62 | # convert latitude and longitude variables to numeric data types (char by default)
63 |
64 | alltweets_df$latitude <- as.numeric(alltweets_df$latitude)
65 | alltweets_df$longitude <- as.numeric(alltweets_df$longitude)
66 |
67 | # install and load the leaflet library
68 |
69 | install.packages(leaflet)
70 | library(leaflet)
71 |
72 | # create simple map using the leaflet library to show where tweets are from
73 |
74 | map <- leaflet() %>%
75 | addTiles() %>%
76 | addCircles(data = alltweets_df, lat = ~ latitude, lng = ~ longitude)
77 |
78 | # view map
79 |
80 | map
81 |
82 | # before sharing, strip down tweets data to comply with Twitter's terms of use. Final dataset has tweet IDs and retweet count
83 |
84 | notretweets_df <- dplyr::filter(alltweets_df, grepl("FALSE", isRetweet))
85 | subsetoftweets <- select(notretweets_df,id, retweetCount)
86 | write.csv(subsetoftweets, file="data/subsetofopendatadaytweets.csv")
87 |
88 | # install and load datapackage-r library
89 |
90 | install.packages("devtools")
91 | devtools::install_github("frictionlessdata/datapackage-r")
92 | library(datapackage.r)
93 |
94 | # load CSV file and infer schema and create a datapackage. Alternatively, use the UI at create.frictionlessdata.io
95 |
96 | filepath = 'data/subsetoftweets.csv'
97 | schema = tableschema.r::infer(filepath)
98 |
--------------------------------------------------------------------------------
/tests/data-package-examples/periodic-table/README.md:
--------------------------------------------------------------------------------
1 | Periodic Table
2 |
3 | ## Data
4 |
5 | Learn more about the [Periodic Table](https://en.wikipedia.org/wiki/Periodic_table)
6 |
7 | Used in the following Frictionless Data Guides:
8 | - [Using Data Packages in Python]( https://frictionlessdata.io/guides/using-data-packages-in-python/)
9 | - [Creating Data Packages in Python](https://frictionlessdata.io/guides/creating-tabular-data-packages-in-python/)
10 |
11 | ## Preparation
12 |
13 | This `README.md` has been created retrospectively. The preparation steps and provenance is unknown.
14 |
15 | *Do not use this data for analysis.*
16 |
17 | ## License
18 |
19 | This data package is available under [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/).
20 |
--------------------------------------------------------------------------------
/tests/data-package-examples/periodic-table/data.csv:
--------------------------------------------------------------------------------
1 | atomic number,symbol,name,atomic mass,metal or nonmetal?
2 | 1,H,Hydrogen,1.00794,nonmetal
3 | 2,He,Helium,4.002602,noble gas
4 | 3,Li,Lithium,6.941,alkali metal
5 | 4,Be,Beryllium,9.012182,alkaline earth metal
6 | 5,B,Boron,10.811,metalloid
7 | 6,C,Carbon,12.0107,nonmetal
8 | 7,N,Nitrogen,14.0067,nonmetal
9 | 8,O,Oxygen,15.9994,nonmetal
10 | 9,F,Fluorine,18.9984032,halogen
11 | 10,Ne,Neon,20.1797,noble gas
12 | 11,Na,Sodium,22.98976928,alkali metal
13 | 12,Mg,Magnesium,24.305,alkaline earth metal
14 | 13,Al,Aluminum,26.9815386,metal
15 | 14,Si,Silicon,28.0855,metalloid
16 | 15,P,Phosphorus,30.973762,nonmetal
17 | 16,S,Sulfur,32.065,nonmetal
18 | 17,Cl,Chlorine,35.453,halogen
19 | 18,Ar,Argon,39.948,noble gas
20 | 19,K,Potassium,39.0983,alkali metal
21 | 20,Ca,Calcium,40.078,alkaline earth metal
22 | 21,Sc,Scandium,44.955912,transition metal
23 | 22,Ti,Titanium,47.867,transition metal
24 | 23,V,Vanadium,50.9415,transition metal
25 | 24,Cr,Chromium,51.9961,transition metal
26 | 25,Mn,Manganese,54.938045,transition metal
27 | 26,Fe,Iron,55.845,transition metal
28 | 27,Co,Cobalt,58.933195,transition metal
29 | 28,Ni,Nickel,58.6934,transition metal
30 | 29,Cu,Copper,63.546,transition metal
31 | 30,Zn,Zinc,65.38,transition metal
32 | 31,Ga,Gallium,69.723,metal
33 | 32,Ge,Germanium,72.64,metalloid
34 | 33,As,Arsenic,74.9216,metalloid
35 | 34,Se,Selenium,78.96,nonmetal
36 | 35,Br,Bromine,79.904,halogen
37 | 36,Kr,Krypton,83.798,noble gas
38 | 37,Rb,Rubidium,85.4678,alkali metal
39 | 38,Sr,Strontium,87.62,alkaline earth metal
40 | 39,Y,Yttrium,88.90585,transition metal
41 | 40,Zr,Zirconium,91.224,transition metal
42 | 41,Nb,Niobium,92.90638,transition metal
43 | 42,Mo,Molybdenum,95.96,transition metal
44 | 43,Tc,Technetium,98,transition metal
45 | 44,Ru,Ruthenium,101.07,transition metal
46 | 45,Rh,Rhodium,102.9055,transition metal
47 | 46,Pd,Palladium,106.42,transition metal
48 | 47,Ag,Silver,107.8682,transition metal
49 | 48,Cd,Cadmium,112.411,transition metal
50 | 49,In,Indium,114.818,metal
51 | 50,Sn,Tin,118.71,metal
52 | 51,Sb,Antimony,121.76,metalloid
53 | 52,Te,Tellurium,127.6,metalloid
54 | 53,I,Iodine,126.90447,halogen
55 | 54,Xe,Xenon,131.293,noble gas
56 | 55,Cs,Cesium,132.9054519,alkali metal
57 | 56,Ba,Barium,137.327,alkaline earth metal
58 | 57,La,Lanthanum,138.90547,lanthanoid
59 | 58,Ce,Cerium,140.116,lanthanoid
60 | 59,Pr,Praseodymium,140.90765,lanthanoid
61 | 60,Nd,Neodymium,144.242,lanthanoid
62 | 61,Pm,Promethium,145,lanthanoid
63 | 62,Sm,Samarium,150.36,lanthanoid
64 | 63,Eu,Europium,151.964,lanthanoid
65 | 64,Gd,Gadolinium,157.25,lanthanoid
66 | 65,Tb,Terbium,158.92535,lanthanoid
67 | 66,Dy,Dysprosium,162.5,lanthanoid
68 | 67,Ho,Holmium,164.93032,lanthanoid
69 | 68,Er,Erbium,167.259,lanthanoid
70 | 69,Tm,Thulium,168.93421,lanthanoid
71 | 70,Yb,Ytterbium,173.054,lanthanoid
72 | 71,Lu,Lutetium,174.9668,transition metal
73 | 72,Hf,Hafnium,178.49,transition metal
74 | 73,Ta,Tantalum,180.94788,transition metal
75 | 74,W,Tungsten,183.84,transition metal
76 | 75,Re,Rhenium,186.207,transition metal
77 | 76,Os,Osmium,190.23,transition metal
78 | 77,Ir,Iridium,192.217,transition metal
79 | 78,Pt,Platinum,195.084,transition metal
80 | 79,Au,Gold,196.966569,transition metal
81 | 80,Hg,Mercury,200.59,transition metal
82 | 81,Tl,Thallium,204.3833,metal
83 | 82,Pb,Lead,207.2,metal
84 | 83,Bi,Bismuth,208.9804,metal
85 | 84,Po,Polonium,209,metalloid
86 | 85,At,Astatine,210,halogen
87 | 86,Rn,Radon,222,noble gas
88 | 87,Fr,Francium,223,alkali metal
89 | 88,Ra,Radium,226,alkaline earth metal
90 | 89,Ac,Actinium,227,actinoid
91 | 90,Th,Thorium,232.03806,actinoid
92 | 91,Pa,Protactinium,231.03588,actinoid
93 | 92,U,Uranium,238.02891,actinoid
94 | 93,Np,Neptunium,237,actinoid
95 | 94,Pu,Plutonium,244,actinoid
96 | 95,Am,Americium,243,actinoid
97 | 96,Cm,Curium,247,actinoid
98 | 97,Bk,Berkelium,247,actinoid
99 | 98,Cf,Californium,251,actinoid
100 | 99,Es,Einsteinium,252,actinoid
101 | 100,Fm,Fermium,257,actinoid
102 | 101,Md,Mendelevium,258,actinoid
103 | 102,No,Nobelium,259,actinoid
104 | 103,Lr,Lawrencium,262,transition metal
105 | 104,Rf,Rutherfordium,267,transition metal
106 | 105,Db,Dubnium,268,transition metal
107 | 106,Sg,Seaborgium,271,transition metal
108 | 107,Bh,Bohrium,272,transition metal
109 | 108,Hs,Hassium,270,transition metal
110 | 109,Mt,Meitnerium,276,transition metal
111 | 110,Ds,Darmstadtium,281,transition metal
112 | 111,Rg,Roentgenium,280,transition metal
113 | 112,Cn,Copernicium,285,transition metal
114 | 113,Uut,Ununtrium,284,metal
115 | 114,Uuq,Ununquadium,289,metal
116 | 115,Uup,Ununpentium,288,halogen
117 | 116,Uuh,Ununhexium,293,noble gas
118 | 117,Uus,Ununseptium,294,alkali metal
119 | 118,Uuo,Ununoctium,294,alkaline earth metal
--------------------------------------------------------------------------------
/tests/data-package-examples/periodic-table/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "period-table",
3 | "title": "Periodic Table",
4 | "profile": "tabular-data-package",
5 | "licenses": [{
6 | "id": "CC0-1.0",
7 | "title": "CC0 1.0",
8 | "url": "https://creativecommons.org/publicdomain/zero/1.0/"
9 | }],
10 | "resources": [{
11 | "path": "data.csv",
12 | "name": "data",
13 | "profile": "tabular-data-resource",
14 | "format": "csv",
15 | "mediatype": "text/csv",
16 | "encoding": "UTF-8",
17 | "schema": {
18 | "fields": [{
19 | "name": "atomic number",
20 | "type": "integer",
21 | "format": "default"
22 | },
23 | {
24 | "name": "symbol",
25 | "type": "string",
26 | "format": "default"
27 | },
28 | {
29 | "name": "name",
30 | "type": "string",
31 | "format": "default"
32 | },
33 | {
34 | "name": "atomic mass",
35 | "type": "number",
36 | "format": "default"
37 | },
38 | {
39 | "name": "metal or nonmetal?",
40 | "type": "string",
41 | "format": "default"
42 | }
43 | ]
44 | }
45 | }]
46 | }
47 |
--------------------------------------------------------------------------------
/tests/data-package-examples/text-file/README.md:
--------------------------------------------------------------------------------
1 | An example of a txt file providing non-tabular data inside a data package.
2 |
3 | The data is an example. Do not buy a lottery ticket based on this data.
4 |
--------------------------------------------------------------------------------
/tests/data-package-examples/text-file/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "text-file",
3 | "title": "Text File Data Package",
4 | "description": "An example of a text file in a non-tabular data package",
5 | "licenses": [{
6 | "name": "CC0-1.0",
7 | "title": "CC0 1.0",
8 | "path": "https://creativecommons.org/publicdomain/zero/1.0/"
9 | }],
10 | "contributors": [
11 | {
12 | "title": "Joe Bloggs",
13 | "email": "joe@example.com",
14 | "path": "http://www.example.com/bloggs",
15 | "role": "author",
16 | "organization": "Bloggs and Associates"
17 | },
18 | {
19 | "title": "Mary Shelley",
20 | "email": "mshelly@example.com",
21 | "role": "contributor"
22 | }
23 | ],
24 | "version": "0.1.0",
25 | "created": "2018-03-04T05:45:00Z",
26 | "resources": [{
27 | "name": "text-file",
28 | "path": "text-file.txt",
29 | "title": "Text File Data Resource",
30 | "description": "An example of a text file as a non-tabular data resource",
31 | "format": "txt"
32 | }]
33 | }
34 |
--------------------------------------------------------------------------------
/tests/data-package-examples/text-file/text-file.txt:
--------------------------------------------------------------------------------
1 | This is just a simple text file. It is not tabular. It includes some data. Tomorrow's winning lottery numbers are 10, 35, 17 and 42.
2 |
--------------------------------------------------------------------------------
/tests/data-package-examples/units-and-prefixes/README.md:
--------------------------------------------------------------------------------
1 | These tables represent the values for units and their prefixes as specified in the draft [Units specification](http://specs.okfnlabs.org/units/). It contains:
2 |
3 | - An inventory of standard units with unique identifiers
4 | - A set of unit prefixes
5 |
--------------------------------------------------------------------------------
/tests/data-package-examples/units-and-prefixes/data/unit-prefixes.csv:
--------------------------------------------------------------------------------
1 | UID,name,symbol,factor
2 | da,deca,da,10
3 | h,hecto,h,100
4 | k,kilo,k,1000
5 | M,mega,M,1000000
6 | G,giga,G,1000000000
7 | T,tera,T,1000000000000
8 | P,peta,P,1000000000000000.0
9 | E,exa,E,1e+18
10 | Z,zetta,Z,1e+21
11 | Y,yotta,Y,1e+24
12 | d,deci,d,0.1
13 | c,centi,c,0.01
14 | m,milli,m,0.001
15 | μ,micro,μ,1e-06
16 | n,nano,n,1e-09
17 | p,pico,p,1e-12
18 | f,femto,f,1e-15
19 | a,atto,a,1e-18
20 | z,zepto,z,1e-21
21 | y,yocto,y,1e-24
22 |
--------------------------------------------------------------------------------
/tests/data-package-examples/units-and-prefixes/data/units.csv:
--------------------------------------------------------------------------------
1 | UID,name,symbol,physical quantity,dimensions
2 | η,amagat,η,number density,length^-3 x item
3 | Bq,bequerel,Bq,radioactivity,time^-1
4 | C,coloumb,C,electric charge,time x electric_current
5 | F,farad,F,electrical capacitance,length^-2 x mass^-1 x time^4 x electric_current^2
6 | Gy,gray,Gy,radiation absorbed dose,length^2 x time^-2
7 | Hz,hertz,Hz,frequency,time^-1
8 | H,henry,H,inductance,length^2 x mass x time^-2 x electric_current^-2
9 | kat,katal,kat,catalytic activity,time^-1 x amount_of_substance
10 | lm,luman,lm,luminous flux,luminous_intensity
11 | lx,lux,lx,illuminance,length^-2 x luminous_intensity
12 | Ohm,ohm,Ω,electric resistance,length^2 x mass x time^-3 x electric_current^-2
13 | S,siemens,S,electric conductance,length^-2 x mass^-1 x time^3 x electric_current^2
14 | Sv,sievert,Sv,radiation dose equivalent,length^2 x time^-2
15 | T,tesla,T,magnetic flux density,mass x time^-2 x electric_current^-1
16 | Wb,weber,Wb,magnetic flux,length^2 x mass x time^-2 x electric_current^-1
17 | m^2,square metre,m^2,area,length^2
18 | m^3,cubic metre,m^3,volume,length^3
19 | m/s,metre per second,m/s,velocity,length x time^-1
20 | m/s^2,metre per square second,m/s^2,acceleration,length x time^-2
21 | cm^-1,per centimetre,cm^-1,length^-1,
22 | cm/s^2,centimetre per square second,cm/s^2,acceleration,length x time^-2
23 | A,ampere,A,electric current,electric_current
24 | bit,bit,bit,information,information
25 | cd,candela,cd,luminous intensity,luminous_intensity
26 | K,kelvin,K,temperature,temperature
27 | m,metre,m,length,length
28 | mol,mole,mol,amount of substance,amount_of_substance
29 | s,second,s,time,time
30 | kg,kilogram,kg,mass,mass
31 | g,gram,g,mass,mass
32 | km,kilometre,km,length,length
33 | μm,micron,μm,length,length
34 | J,joule,J,energy,length^2 x mass x time^-2
35 | N,newton,N,force,length x mass x time^-2
36 | W,watt,W,power,length^2 x mass x time^-3
37 | V,volt,V,electric potential difference,length^2 x mass x time^-3 x electric_current^-1
38 | Pa,pascal,Pa,pressure,length^-1 x mass x time^-2
39 | acre,acre,acre,area,length^2
40 | a,are,a,area,length^2
41 | atm,atmosphere,atm,pressure,length^-1 x mass x time^-2
42 | bar,bar,bar,pressure,length^-1 x mass x time^-2
43 | b,barn,b,area,length^2
44 | bhp,boiler horsepower,bhp,power,length^2 x mass x time^-3
45 | btu_39f,british thermal unit (39 °F),BTU,energy,length^2 x mass x time^-2
46 | btu_60f,british thermal unit (60 °F),BTU,energy,length^2 x mass x time^-2
47 | btu_63f,british thermal unit (63 °F),BTU,energy,length^2 x mass x time^-2
48 | btu_iso,british thermal unit (ISO),BTU,energy,length^2 x mass x time^-2
49 | btu_it,british thermal unit (IT),BTU,energy,length^2 x mass x time^-2
50 | btu_mean,british thermal unit (mean),BTU,energy,length^2 x mass x time^-2
51 | btu_thermo,british thermal unit (thermochemical),BTU,energy,length^2 x mass x time^-2
52 | btu_59f,british thermal unit (59 °F),BTU,energy,length^2 x mass x time^-2
53 | bu_imp,US bushel,bu (Imp),volume,length^3
54 | bu_us,UK bushel,bu (US lvl),volume,length^3
55 | cal,calorie,cal,energy,length^2 x mass x time^-2
56 | cp,candle power,cp,luminous flux,luminous_intensity
57 | CHU,celsius heat unit,CHU,energy,length^2 x mass x time^-2
58 | cmHg,centimetre of mercury,cmHg,pressure,length^-1 x mass x time^-2
59 | cmH2O,centimetre of water,cmH2O,pressure,length^-1 x mass x time^-2
60 | clo,clo,clo,thermal resistance,mass^-1 x time^3 x temperature
61 | c_us,cup,c (US),volume,length^3
62 | Ci,curie,Ci,radioactivity,time^-1
63 | dyn,dyne,dyn,force,length x mass x time^-2
64 | dyn_cm,dyne centimetre,dyn cm,energy,length^2 x mass x time^-2
65 | hp_elec,electric horsepower,hp,power,length^2 x mass x time^-3
66 | eV,electron volt,eV,energy,length^2 x mass x time^-2
67 | erg,erg,erg,energy,length^2 x mass x time^-2
68 | Fd,faraday,F,electric charge,time x electric_current
69 | fc,footcandle,fc,illuminance,length^-2 x luminous_intensity
70 | ftH2O,foot of water,ftH2O,pressure,length^-1 x mass x time^-2
71 | Fr,franklin,Fr,electric charge,time x electric_current
72 | γ,gamma,γ,magnetic flux density,mass x time^-2 x electric_current^-1
73 | gauss,gauss,G,magnetic flux density,mass x time^-2 x electric_current^-1
74 | Eh,hartree,Eh,energy,length^2 x mass x time^-2
75 | ha,hectare,ha,area,length^2
76 | hhd,hogshead,hhd,volume,length^3
77 | inHg,inch of mercury,inHg,pressure,length^-1 x mass x time^-2
78 | inH2O,inch of water,inH2O,pressure,length^-1 x mass x time^-2
79 | kcal,kilocalorie,kcal,energy,length^2 x mass x time^-2
80 | kgf,kilogram force,kgf,force,length x mass x time^-2
81 | kn,knot,kn,velocity,length x time^-1
82 | La,lambert,La,illuminance,length^-2 x luminous_intensity
83 | L,litre,L,volume,length^3
84 | Mx,maxwell,Mx,magnetic flux,length^2 x mass x time^-2 x electric_current^-1
85 | hp,metric horsepower,hp,power,length^2 x mass x time^-3
86 | mbar,millibar,mbar,pressure,length^-1 x mass x time^-2
87 | mmHg,millimetre of mercury,mmHg,pressure,length^-1 x mass x time^-2
88 | bbl,petroleum barrel,bbl,volume,length^3
89 | p,poncelot,p,power,length^2 x mass x time^-3
90 | pdl,poundal,pdl,force,length x mass x time^-2
91 | lbf,pound force,lbf,force,length x mass x time^-2
92 | quad,quad,quad,energy,length^2 x mass x time^-2
93 | rd,rad,rad,radiation absorbed dose,length^2 x time^-2
94 | rem,rem,rem,radiation dose equivalent,length^2 x time^-2
95 | reyn,reyn,reyn,dynamic viscosity,length^-1 x mass x time^-1
96 | rood,rood,rood,area,length^2
97 | Rd,rutherford,rd,radioactivity,time^-1
98 | Ry,rydberg,Ry,energy,length^2 x mass x time^-2
99 | sn,sthene,sn,force,length x mass x time^-2
100 | St,stoke,St,kinematic viscosity,length^2 x time^-1
101 | thm,therm,thm,energy,length^2 x mass x time^-2
102 | th,thermie,th,energy,length^2 x mass x time^-2
103 | tog,tog,tog,thermal resistance,mass^-1 x time^3 x temperature
104 | bbl_imp,UK barrel,bl (Imp),volume,length^3
105 | oz_fl_uk,UK fluid ounce,fl oz,volume,length^3
106 | gal_uk,UK gallon,gal,volume,length^3
107 | gi_uk,UK gill,gi,volume,length^3
108 | hp_uk,UK horsepower,hp,power,length^2 x mass x time^-3
109 | gal_dry_us,US dry gallon,gal,volume,length^3
110 | bbl_dry_us,US dry barrel,bl (US),volume,length^3
111 | oz_fl,US fluid ounce,fl oz,volume,length^3
112 | gi_us,US gill,gi,volume,length^3
113 | bbl_fl_us,US liquid barrel,fl bl (US),volume,length^3
114 | gal,US liquid gallon,gal,volume,length^3
115 | kWh,kilowatt hour,kWh,energy,length^2 x mass x time^-2
116 | lbf/in^2,pound force per square inch,psi,pressure,length^-1 x mass x time^-2
117 | angstrom,angstrom,Å,length,length
118 | ua,astronomical unit,AU,length,length
119 | Bi,biot,Bi,electric current,electric_current
120 | byte,byte,byte,information,information
121 | kt,carat,kt,mass,mass
122 | ch,chain,ch,length,length
123 | d,day,d,time,time
124 | deg_c,degree celsius,°C,temperature,temperature
125 | deg_f,degree farenheit,°F,temperature,temperature
126 | deg_r,degree rankine,°R,temperature,temperature
127 | dram,dram,dram,length,length
128 | me,electron mass,me,mass,mass
129 | ell,ell,ell,length,length
130 | ftm,fathom,ftm,length,length
131 | fm,fermi,fm,length,length
132 | ft,foot,ft,length,length
133 | fur,furlong,fur,length,length
134 | gr,grain,gr,mass,mass
135 | h,hour,h,time,time
136 | cwt_long,hundredweight long,cwt,mass,mass
137 | cwt_short,hundredweight short,cwt,mass,mass
138 | in,inch,in,length,length
139 | ly,light year,ly,length,length
140 | ln,line,ln,length,length
141 | lnk,link,lnk,length,length
142 | ton_uk,long ton,ton,mass,mass
143 | mi,mile,mi,length,length
144 | min,minute,min,time,time
145 | month,month,month,time,time
146 | nl,nautical league,nl,length,length
147 | nmi,nautical mile,nmi,length,length
148 | oz,ounce,oz,mass,mass
149 | pc,parsec,pc,length,length
150 | dwt,pennyweight,dwt,mass,mass
151 | pt,point,pt,length,length
152 | lb,pound,lb,mass,mass
153 | lbmol,pound mole,lbmol,amount of substance,amount_of_substance
154 | ton_us,short ton,ton,mass,mass
155 | d_sid,sidereal day,d,time,time
156 | year_sid,sidereal year,yr,time,time
157 | lea,statute league,lea,length,length
158 | st,stone,st,mass,mass
159 | t,tonne,t,mass,mass
160 | u,unified atomic mass,u,mass,mass
161 | foot_survey_us,US survey foot,ft,length,length
162 | week,week,wk,time,time
163 | yd,yard,yd,length,length
164 | year,year,yr,time,time
165 | unity,,,dimensionless,
166 | percent,percent,%,dimensionless,
167 | rad,radian,rad,plane angle,
168 | sr,steridian,sr,solid angle,
169 | centiradian,centiradian,crad,plane angle,
170 | arc_min,arcminute,′,plane angle,
171 | arc_sec,arcsecond,″,plane angle,
172 | degree,degree,°,plane angle,
173 | grad,grad,grad,plane angle,
174 | rev,revolution,rev,plane angle,
175 | sphere,sphere,sphere,solid angle,
176 |
--------------------------------------------------------------------------------
/tests/data-package-examples/units-and-prefixes/datapackage.json:
--------------------------------------------------------------------------------
1 | {
2 | "profile": "tabular-data-package",
3 | "name": "units-and-prefixes",
4 | "title": "Units and Unit Prefixes",
5 | "version": "v0.1.0",
6 | "contributors": [{
7 | "title": "Andrew Berkeley ",
8 | "role": "author"
9 | },
10 | {
11 | "title": "James Smith",
12 | "role": "author",
13 | "organization": "Open Data Institute"
14 | },
15 | {
16 | "title": "Rufus Pollock",
17 | "role": "author",
18 | "organization": "Open Knowledge International"
19 | }
20 | ],
21 | "licenses": [{
22 | "name": "CC-BY-4.0",
23 | "title": "Creative Commons Attribution 4.0",
24 | "path": "https://creativecommons.org/licenses/by/4.0/"
25 | }],
26 | "resources": [{
27 | "profile": "tabular-data-resource",
28 | "path": "data/units.csv",
29 | "name": "units",
30 | "title": "Standard Units",
31 | "description": "Standard Units for the Frictionless Data specification",
32 | "format": "csv",
33 | "mediatype": "text/csv",
34 | "encoding": "utf-8",
35 | "sources": [{
36 | "title": "Units",
37 | "path": "http://specs.okfnlabs.org/units/"
38 | }],
39 | "schema": {
40 | "fields": [{
41 | "name": "UID",
42 | "title": "Unique Unit Identifier",
43 | "type": "string",
44 | "format": "default",
45 | "constraints": {
46 | "required": true,
47 | "unique": true
48 | }
49 | }, {
50 | "name": "name",
51 | "type": "string",
52 | "format": "default"
53 | }, {
54 | "name": "symbol",
55 | "type": "string",
56 | "format": "default"
57 | }, {
58 | "name": "physical quantity",
59 | "type": "string",
60 | "format": "default"
61 | }, {
62 | "name": "dimensions",
63 | "type": "string",
64 | "format": "default"
65 | }],
66 | "missingValues": [
67 | ""
68 | ],
69 | "primaryKeys": [
70 | "UID"
71 | ]
72 | },
73 | "dialect": {
74 | "caseSensitiveHeader": false,
75 | "delimiter": ",",
76 | "doubleQuote": true,
77 | "header": true,
78 | "lineTerminator": "\r\n",
79 | "quoteChar": "\"",
80 | "skipInitialSpace": true
81 | }
82 | },
83 | {
84 | "profile": "tabular-data-resource",
85 | "path": "data/unit-prefixes.csv",
86 | "name": "unit-prefixes",
87 | "title": "Unit Prefixes",
88 | "description": "Standard Unit Prefixes for the Frictionless Data specification",
89 | "format": "csv",
90 | "mediatype": "text/csv",
91 | "encoding": "utf-8",
92 | "sources": [{
93 | "title": "Units",
94 | "path": "http://specs.okfnlabs.org/units/"
95 | }],
96 | "schema": {
97 | "fields": [{
98 | "name": "UID",
99 | "title": "Unique Unit Prefix Identifier",
100 | "type": "string",
101 | "format": "default",
102 | "constraints": {
103 | "required": true,
104 | "unique": true
105 | }
106 | },
107 | {
108 | "name": "name",
109 | "type": "string",
110 | "format": "default"
111 | },
112 | {
113 | "name": "symbol",
114 | "type": "string",
115 | "format": "default"
116 | },
117 | {
118 | "name": "factor",
119 | "type": "any",
120 | "format": "default"
121 | }
122 | ],
123 | "missingValues": [
124 | ""
125 | ],
126 | "primaryKeys": [
127 | "UID"
128 | ]
129 | },
130 | "dialect": {
131 | "caseSensitiveHeader": false,
132 | "delimiter": ",",
133 | "doubleQuote": true,
134 | "header": true,
135 | "lineTerminator": "\r\n",
136 | "quoteChar": "\"",
137 | "skipInitialSpace": true
138 | }
139 | }
140 | ]
141 | }
142 |
--------------------------------------------------------------------------------
/tests/datapackage-m-tests.pbix:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblelearn/datapackage-m/1ec8534367710f4b01755cf548ff9377c1b10241/tests/datapackage-m-tests.pbix
--------------------------------------------------------------------------------