23 |
35 |
36 |
50 |
51 |
52 |
53 | {% set private_datasets = data.private_datasets or source_config.get('private_datasets') %}
54 |
55 |
66 |
67 | {% set existing_group = source_config.default_groups or data.default_groups %}
68 | {% set groups_available = h.groups_available() %}
69 | {% if groups_available %}
70 |
86 | {% endif %}
87 |
88 | {% endblock extra_config %}
89 |
90 |
91 | {% block delete_button %}
92 | {% set locale = h.dump_json({'content': _('Warning: Apart from deleting this source, this command will remove all its datasets, as well as all previous job reports. Are you sure you want to continue?')}) %}
93 |
{% block delete_button_text %}{{ _('Delete') }}{% endblock %}
94 |
95 | {% endblock %}
96 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/validation/xml/fgdc-std-012-2002/fgdc-std-001-1998-sect03.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/tests/test_fix_spatial.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import ckan.plugins as p
4 | import ckan.tests.factories as factories
5 | import ckan.tests.helpers as helpers
6 |
7 | from utils import populate_locations_table
8 |
9 |
10 | @pytest.mark.usefixtures("with_plugins")
11 | class TestSpatialField(object):
12 |
13 | @classmethod
14 | def setup_class(cls):
15 | populate_locations_table()
16 | cls.user = factories.Sysadmin(name='spatial_user')
17 |
18 | def test_numeric_spatial_transformation(self):
19 | old_geo = '10.0,0.0,15.0,5.0'
20 |
21 | context = {'user': self.user['name'], 'ignore_auth': True}
22 | pkg = {
23 | 'title': 'Spatial num',
24 | 'name': 'spatial-num',
25 | 'extras': [
26 | {'key': 'spatial', 'value': old_geo}
27 | ]
28 | }
29 | dataset = p.toolkit.get_action('package_create')(context, pkg)
30 |
31 | expected_spatial = ('{"type": "Polygon", "coordinates": [[[10.0, 0.0], [10.0, 5.0], [15.0, 5.0], '
32 | '[15.0, 0.0], [10.0, 0.0]]]}')
33 |
34 | spatial_extra_exists = False
35 | for extra in dataset['extras']:
36 | if extra['key'] == 'spatial':
37 | spatial_extra_exists = True
38 | assert extra['value'] == expected_spatial
39 |
40 | assert spatial_extra_exists is True
41 |
42 | result = helpers.call_action(
43 | 'package_search',
44 | extras={'ext_bbox': '9,-1,16,4'})
45 | assert result['count'] == 1
46 | assert result['results'][0]['id'] == dataset['id']
47 |
48 | def test_string_spatial_transformation(self):
49 |
50 | old_geo = 'California'
51 | # require locations table to be installed
52 |
53 | context = {'user': self.user['name'], 'ignore_auth': True}
54 | pkg = {
55 | 'title': 'Spatial String',
56 | 'name': 'spatial-str',
57 | 'extras': [
58 | {'key': 'spatial', 'value': old_geo}
59 | ]
60 | }
61 | dataset = p.toolkit.get_action('package_create')(context, pkg)
62 |
63 | expected_spatial = ('{"type":"Polygon",'
64 | '"coordinates":[[[-124.3926,32.5358],[-124.3926,42.0022],[-114.1252,42.0022],'
65 | '[-114.1252,32.5358],[-124.3926,32.5358]]]}')
66 | spatial_extra_exists = False
67 | for extra in dataset['extras']:
68 | if extra['key'] == 'spatial':
69 | spatial_extra_exists = True
70 | assert extra['value'] == expected_spatial
71 |
72 | assert spatial_extra_exists is True
73 |
74 | result = helpers.call_action(
75 | 'package_search',
76 | extras={'ext_bbox': '-125,31,-113,43'})
77 |
78 | assert result['count'] == 1
79 | assert result['results'][0]['id'] == dataset['id']
80 |
81 | def test_list_spatial_transformation(self):
82 |
83 | old_geo = '[[20.0, 10.0], [25.0, 15.0]]'
84 |
85 | context = {'user': self.user['name'], 'ignore_auth': True}
86 | pkg = {
87 | 'title': 'Spatial List',
88 | 'name': 'spatial-list',
89 | 'extras': [
90 | {'key': 'spatial', 'value': old_geo}
91 | ]
92 | }
93 | dataset = p.toolkit.get_action('package_create')(context, pkg)
94 |
95 | expected_spatial = ('{"type": "Polygon", "coordinates": [[[20.0, 10.0], [20.0, 15.0], [25.0, 15.0], '
96 | '[25.0, 10.0], [20.0, 10.0]]]}')
97 | spatial_extra_exists = False
98 | for extra in dataset['extras']:
99 | if extra['key'] == 'spatial':
100 | spatial_extra_exists = True
101 | assert extra['value'] == expected_spatial
102 |
103 | assert spatial_extra_exists is True
104 |
105 | result = helpers.call_action(
106 | 'package_search',
107 | extras={'ext_bbox': '19,9,26,16'})
108 |
109 | assert result['count'] == 1
110 | assert result['results'][0]['id'] == dataset['id']
111 |
112 | def test_spatial_plus_sign(self):
113 |
114 | old_geo = '-179.231086,-14.601813,+179.859681,+71.441059'
115 |
116 | context = {'user': self.user['name'], 'ignore_auth': True}
117 | pkg = {
118 | 'title': 'Spatial Plus Sign',
119 | 'name': 'spatial-plus',
120 | 'extras': [
121 | {'key': 'spatial', 'value': old_geo}
122 | ]
123 | }
124 | dataset = p.toolkit.get_action('package_create')(context, pkg)
125 |
126 | expected_spatial = ('{"type": "Polygon", "coordinates": [[[-179.231086, -14.601813], '
127 | '[-179.231086, 71.441059], [179.859681, 71.441059], [179.859681, '
128 | '-14.601813], [-179.231086, -14.601813]]]}')
129 | spatial_extra_exists = False
130 | for extra in dataset['extras']:
131 | if extra['key'] == 'spatial':
132 | spatial_extra_exists = True
133 | assert extra['value'] == expected_spatial
134 |
135 | assert spatial_extra_exists is True
136 |
137 | def test_bad_string_transformation(self):
138 |
139 | old_geo = 'US Domestic'
140 | # require locations table to be installed
141 |
142 | context = {'user': self.user['name'], 'ignore_auth': True}
143 | pkg = {
144 | 'title': 'Spatial US Domestic',
145 | 'name': 'spatial-usd',
146 | 'extras': [
147 | {'key': 'spatial', 'value': old_geo}
148 | ]
149 | }
150 | dataset = p.toolkit.get_action('package_create')(context, pkg)
151 |
152 | expected_spatial = ""
153 | spatial_extra_exists = False
154 | for extra in dataset['extras']:
155 | if extra['key'] == 'spatial':
156 | spatial_extra_exists = True
157 | assert extra['value'] == expected_spatial
158 | if extra['key'] == 'old-spatial':
159 | assert extra['value'] == old_geo
160 |
161 | assert spatial_extra_exists is True
162 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/validation/xml/fgdc-std-012-2002/fgdc-std-012-2002-sect03.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 | Bit representation of data value in raster cell.
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 | Specification for the independent axes in the coordinate system in which spatial data are located.
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 | Number of axes used in spatial data matrix.
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 | Description of individual axis in spatial data matrix.
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 | Designation assigned to an axis.
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 | The maximum number of data points along the corresponding axis.
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://github.com/GSA/ckanext-geodatagov/actions)
2 | [](https://badge.fury.io/py/ckanext-geodatagov)
3 |
4 | # Data.gov
5 |
6 | [Data.gov](http://data.gov) is an open data website created by the [U.S. General Services Administration](https://github.com/GSA/) that is based on two robust open source projects: [CKAN](http://ckan.org) and [WordPress](http://wordpress.org). The data catalog at [catalog.data.gov](catalog.data.gov) is powered by CKAN, while the content seen at [Data.gov](Data.gov) is powered by WordPress.
7 |
8 | **For all code, bugs, and feature requests related to Data.gov, see the project wide Data.gov [issue tracker](https://github.com/GSA/data.gov/issues).**
9 |
10 | Currently this repository is only used for source version control on the code for the CKAN extension for geospatial data, but you can see all of the Data.gov relevant repos listed in the [GSA Data.gov README file](https://github.com/GSA/data.gov/blob/master/README.md).
11 |
12 | ## CKAN Extension for Geospatial Data
13 |
14 | Most Data.gov specific CKAN customizations are contained within this extension, but the extension also provides additional geospatial capabilities.
15 |
16 | ### Customization
17 |
18 | Due to CKAN 2.3 and 2.8 migrations, some features should be removed or moved to the official community versions:
19 | - [Stop rolling up the extras](https://github.com/GSA/ckanext-geodatagov/issues/178)
20 | - [Move to the official search by geolocation](https://github.com/GSA/datagov-deploy/issues/2440) (probably sharing our version that has improvements)
21 | - Do a general analysis of this extension to detect other personalized functionalities that should be discontinued.
22 |
23 | ### Requirements
24 |
25 | Package | Notes
26 | ---------------------------------------------------------------------- | -------------
27 | [ckanext-harvest](https://github.com/ckan/ckanext-harvest/) | --
28 | [ckanext-spatial](https://github.com/ckan/ckanext-spatial) | --
29 | [PyZ3950](https://github.com/asl2/PyZ3950) | --
30 | [werkzeug](https://github.com/nickumia-reisys/werkzeug) | This only effects the tests. For all intents and purposes, this should be tracking [upstream](https://github.com/pallets/werkzeug)
31 |
32 | This extension is compatible with these versions of CKAN.
33 |
34 | CKAN version | Compatibility
35 | ------------ | -------------
36 | <=2.8 | no
37 | 2.9 | 0.1.37 (last supported)
38 | 2.10 | >=0.2.0
39 |
40 | ## Tests
41 |
42 | All the tests live in the [/ckanext/geodatagov/tests](/ckanext/geodatagov/tests) folder. [Github actions](https://github.com/GSA/ckanext-geodatagov/blob/main/.github/workflows/test.yml) is configured to run the tests against CKAN 2.10 when you open a pull request.
43 |
44 | ## Using the Docker Dev Environment
45 |
46 | ### Build Environment
47 |
48 | To start environment, run:
49 | ```docker compose build```
50 | ```docker compose up```
51 |
52 | CKAN will start at localhost:5000
53 |
54 | To shut down environment, run:
55 |
56 | ```docker compose down```
57 |
58 | To docker exec into the CKAN image, run:
59 |
60 | ```docker compose exec app /bin/bash```
61 |
62 | ### Testing
63 |
64 | They follow the guidelines for [testing CKAN
65 | extensions](https://docs.ckan.org/en/2.10/extensions/testing-extensions.html#testing-extensions).
66 |
67 | To run the extension tests, start the containers with `make up`, then:
68 |
69 | $ make test
70 |
71 | Lint the code.
72 |
73 | $ make lint
74 |
75 | ### Debugging
76 |
77 | We have not determined a good way for most IDE native debugging, however you can use the built in
78 | Python pdb debugger. Simply run `make debug`, which will run docker with an interactive shell.
79 | Add `import pdb; pdb.set_trace()` anywhere you want to start debugging, and if the code is triggered
80 | you should see a command prompt waiting in the shell. Use a pdb cheat sheet when starting to learn
81 | like [this](https://kapeli.com/cheat_sheets/Python_Debugger.docset/Contents/Resources/Documents/index).
82 |
83 | When you edit/add/remove code, the server is smart enough to restart. If you are editing logic that is
84 | not part of the webserver (ckan command, etc) then you should be able to run the command after edits
85 | and get the same debugger prompt.
86 |
87 | ### Matrix builds
88 |
89 | The existing development environment assumes a full catalog.data.gov test setup. This makes
90 | it difficult to develop and test against new versions of CKAN (or really any
91 | dependency) because everything is tightly coupled and would require us to
92 | upgrade everything at once which doesn't really work. A new make target
93 | `test-new` is introduced with a new docker-compose file.
94 |
95 | The "new" development environment drops as many dependencies as possible. It is
96 | not meant to have feature parity with
97 | [GSA/catalog.data.gov](https://github.com/GSA/catalog.data.gov/). Tests should
98 | mock external dependencies where possible.
99 |
100 | In order to support multiple versions of CKAN, or even upgrade to new versions
101 | of CKAN, we support development and testing through the `CKAN_VERSION`
102 | environment variable.
103 |
104 | $ make CKAN_VERSION=2.11 test
105 |
106 | ### Command line interface
107 |
108 | The following operations can be run from the command line as described underneath::
109 |
110 | geodatagov sitemap-to-s3 [{upload_to_s3}] [{page_size}] [{max_per_page}]
111 | - Generates sitemap and uploads to s3
112 |
113 | geodatagov db-solr-sync [{dryrun}] [{cleanup_solr}] [{update_solr}]
114 | - DB Solr sync.
115 |
116 | geodatagov tracking-update [{start_date}]
117 | - ckan tracking update with customized options and output
118 |
119 | ## Credit / Copying
120 |
121 | Original work written by the HealthData.gov team. It has been modified in support of Data.gov.
122 |
123 | As a work of the United States Government, this package is in the public
124 | domain within the United States. Additionally, we waive copyright and
125 | related rights in the work worldwide through the CC0 1.0 Universal
126 | public domain dedication (which can be found at http://creativecommons.org/publicdomain/zero/1.0/).
127 |
128 | ## Ways to Contribute
129 | We're so glad you're thinking about contributing to ckanext-datajson!
130 |
131 | Before contributing to ckanext-datajson we encourage you to read our
132 | [CONTRIBUTING](CONTRIBUTING.md) guide, our [LICENSE](LICENSE.md), and our README
133 | (you are here), all of which should be in this repository. If you have any
134 | questions, you can email the Data.gov team at
135 | [datagov@gsa.gov](mailto:datagov@gsa.gov).
136 |
--------------------------------------------------------------------------------
/ADR.md:
--------------------------------------------------------------------------------
1 |
2 | ADRs for CKANEXT_GEODATAGOV
3 | ==============================================
4 |
5 | # 1. Fix encoding issue for waf harvester
6 |
7 | Date: 2021-07-16
8 |
9 | ## Status
10 |
11 | Accepted
12 |
13 | ## Context
14 |
15 | We are using the upstream ckan version of ckanext-spatial. They upgraded the extension to PY3; however, their harvester tests were removed. The waf harvester was not being encoded properly to support PY2 and PY3 so our tests were failing.
16 |
17 | ## Decision
18 |
19 | We decided to fix the bug and submit a PR [upstream](https://github.com/ckan/ckanext-spatial/pull/252).
20 |
21 | ## Consequences
22 |
23 | - Until the fix is merged upstream, the ckanext-geodatagov repo will be tracking a pinned version of ckanext-spatial fork which adds complexity.
24 | - All of the customization of the GSA fork of ckanext-spatial is disregarded. The GSA fork was messy already.
25 |
26 |
27 |
28 | # 2. Fix JSON Serialization of dictionary
29 |
30 | Date: 2021-07-19
31 |
32 | ## Status
33 |
34 | Accepted
35 |
36 | ## Context
37 |
38 | We are using the upstream ckan version of ckanext-harvest. They upgraded the extension to PY3; however, there is a PY3-compatibility issue that causes our tests were failing.
39 |
40 | ## Decision
41 |
42 | We decided to fix the bug and submit a PR [upstream](https://github.com/ckan/ckanext-harvest/pull/450).
43 |
44 | ## Consequences
45 |
46 | - Until the fix is merged upstream, the ckanext-geodatagov repo will be tracking a pinned version of ckanext-spatial fork
47 | which adds complexity.
48 | - All of the customization of the GSA fork of ckanext-spatial is disregarded. The GSA fork was messy already.
49 |
50 |
51 | # 3. Use catalog.data.gov Solr Image
52 |
53 | Date: 2021-06-21
54 |
55 | ## Status
56 |
57 | Accepted
58 |
59 | ## Context
60 |
61 | The Solr dev image that ckanext-datajson uses was incompatible with ckanext-geodatagov. There was a 'solrsearch issue' that popped up with no clear resolution.
62 |
63 | ## Decision
64 |
65 | Using the catalog.data.gov stopped solr from throwing exceptions.
66 |
67 | ## Consequences
68 |
69 | - Consequences unknown.
70 | - All of the ckanext repos shouldn't be using varying versions of solr/postgres/etc..
71 |
72 |
73 | # 4. Fix CKAN Test Suite, specifically reset_db()
74 |
75 | Date: 2021-06-24
76 |
77 | ## Status
78 |
79 | Accepted
80 |
81 | ## Context
82 |
83 | If all of the tables are not initialized, the 'reset_db' function attempts to delete all of the tables and reinitialize everything. Becaues geodatagov requires the postgis tables which has a complicated initialized, the ckan function doesn't support it's maintenance (the current code doesn't support it, it doesn't mean they can't or won't). This is the [logic](https://github.com/ckan/ckan/blob/e2d9d1610e63d2256739a09ba2a18e59a29a45db/ckan/model/__init__.py#L225-L236) that breaks it. Either way, if reset_db() is called to early, the postgis tables will be deleted and will break the code. If reset_db() is called too late, the db can't initialize and the code breaks.
84 |
85 | ## Decision
86 |
87 | Implement two customizations.
88 | - https://github.com/GSA/ckanext-geodatagov/pull/190/commits/627a8ad689d50b446527ea39ff4b9290203929a9
89 | - https://github.com/GSA/ckanext-geodatagov/pull/190/commits/8e34ee0164ac1ce454d4c8944ee5fbc5d025b2ed
90 |
91 | ## Consequences
92 |
93 | - Consequences unknown.
94 | - If the commands called in the test_category_tags.py is called anywhere else, the tests fail.
95 | - If the commands are repeated in multiple files, the tests fail.
96 | - If any test needs to be run in isolation, the test_category_tags.py test needs to precede it, otherwise the independent test will fail..
97 |
98 |
99 | # 5. Track PY2 pip requirements separately from PY3
100 |
101 | Date: 2021-07-08
102 |
103 | ## Status
104 |
105 | Accepted
106 |
107 | ## Context
108 |
109 | There are a few libraries that either operate differently in py2 and py3 or have different support for py2 and py3 needed to use two separate version.
110 |
111 | PY2:
112 | - https://github.com/asl2/PyZ3950.git#egg=PyZ3950
113 | - OWSLib == 0.8.6
114 | - pyproj 1.9.6
115 | - factory-boy==2.1.1
116 | - werkzeug (no customization; it installed based on other dependencies)
117 |
118 | PY3:
119 | - https://github.com/danizen/PyZ3950.git#egg=PyZ3950
120 | - OWSLib >= 0.18.0
121 | - pyproj 2.6.1
122 | - factory-boy==2.12.0
123 | - https://github.com/nickumia-reisys/werkzeug@e1f6527604ab30e4b46b5430a5fb97e7a7055cd7#egg=werkzeug
124 |
125 | The PY3 upgrade for ckanext-harvest and ckanext-spatial had small bugs that were submitted as PRs upstream, until they are accepted, the local change needs to be tracked.
126 | - https://github.com/nickumia-reisys/ckanext-harvest.git@9d1f647d247c16b6c3acba26e321e9500cafb18c#egg=ckanext-harvest
127 | - https://github.com/GSA/ckanext-spatial.git@93c430ffc36ba7e306652fd511efd0d1e7081381#egg=ckanext-spatial
128 |
129 | ## Decision
130 |
131 | See [commit](https://github.com/GSA/ckanext-geodatagov/pull/190/commits/0cbd146d286fc1467fd2f3fba4800f7ba66b76ce)
132 |
133 | ## Consequences
134 |
135 | - A lot of specificity
136 |
137 |
138 | # 6. Remove csw harvester tests
139 |
140 | Date: 2021-07-16
141 |
142 | ## Status
143 |
144 | Accepted
145 |
146 | ## Context
147 |
148 | We don't have any customizations to the csw harvesting capability, so we no longer need to test our unique cases.
149 |
150 | ## Decision
151 |
152 | Remove [tests](https://github.com/GSA/ckanext-geodatagov/pull/190/commits/18927273785a8b2f06939c259f909c0d1ae36faf).
153 |
154 | ## Consequences
155 |
156 | - ckanext-spatial or ckanext-harvester are not testing csw harvesting, so there are missing tests overall.
157 |
158 |
159 | # 6. Rewrite source form test
160 |
161 | Date: 2021-07-19
162 |
163 | ## Status
164 |
165 | Unreviewed
166 |
167 | ## Context
168 |
169 | The CKAN test suite no longer supports forms in web pages; so custom parsing needs to be done to extract form options and data. The new tests leverage [this](https://docs.python.org/3/library/html.parser.html). The CKAN test suite changed the return type of the test app from [2.8](https://github.com/ckan/ckan/blob/2.8/ckan/tests/helpers.py#L147-L159) to [2.9](https://github.com/ckan/ckan/blob/2.9/ckan/tests/helpers.py#L194-L240).
170 |
171 | ## Decision
172 |
173 | Write [custom test functions](https://github.com/GSA/ckanext-geodatagov/pull/190/commits/18927273785a8b2f06939c259f909c0d1ae36faf).
174 |
175 | ## Consequences
176 |
177 | - ckanext-spatial or ckanext-harvester are not testing csw harvesting, so there are missing tests overall.
178 |
179 |
180 | # 7. Remove test_source_form test
181 |
182 | Date: 2022-12-12
183 |
184 | ## Status
185 |
186 | Unreviewed
187 |
188 | ## Context
189 |
190 | The test was trying to create a harvest source with a post request to `/harvest/new`; however, we suspect something in ckanext-harvest changed and broke this functionality. Since we are doing harvest tests in catalog.data.gov, we thought it was acceptable to remove this test altogether.
191 |
192 | ## Decision
193 |
194 | Remove test
195 |
196 | ## Consequences
197 |
198 | - Less tests?
199 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/validation/xml/fgdc-std-012-2002/fgdc-std-012-2002-sect05.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 | Function converting set of values on one scale to another.
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 | A function in successive powers of the independent variable, or the ratio of such functions, used in a transformation, one example of which is scaling, derivation of a set of values on one scale or coordinate system from the value in another, in the sense derived value = polynomial (initial value).
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 | The polynomial function when not a ratio, and the dividend of the ratio when it is.
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 | The number of nonzero terms in the numerator of the polynomial.
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 | The divisor of a polynomial function that is a ratio. (<i>Note: if absent, assumed equal to 1.</i>)
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 | The number of nonzero terms in the denominator of the polynomial.
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 | The coefficient of one term in the numerator or denominator of a polynomial function.(<i>Note: For a polynomial numerator or denominator of order m, there will be m+1 coefficients. Any of these coefficients, except that of the m power term, may be zero. When the function is linear, the coefficient of the zero-power term is the offset and the coefficient of the first power term is the scale factor.</i>)
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 | Text description of the function used to derive a set of values on one scale from their value in another, using a function that is not a polynomial.
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/validation/xml/fgdc-std-001.2-2001/fgdc-std-001.2-2001.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 | Federal Geographic Data Committee's Shoreline Metadata Profile of the Content Standard for Digital Geospatial Metadata (FGDC-STD-001.2-2001), June 2001 version of the standard. Data about the content, quality, condition, and other characteristics of data.
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/validation/xml/fgdc-std-012-2002/fgdc-std-012-2002-locainfo.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Information about the location of a set of one or more points.(<i>Note: this section provides a means of describing position in a coordinate system relevant to the calling element and is used by other sections of the metadata extensions. This section is never used alone. It differs from the Spatial Reference Information in that it provides positions in a coordinate system relevant to metadata elements, whereas the Spatial Reference Information refers only to positions at which the data are located.</i>)
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 | Number of coordinate positions.
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 | Definition of axes of coordinate system in which location of positions is provided.
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | Coordinate system which is not georeferenced and for which georeferencing information is unavailable or irrelevant.
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 | Coordinate system that can be georeferenced.
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 | Physical dimension corresponding to value of unity in x and y coordinate directions as defined in Coordinate System or referencing element, where the coordinates correspond to physical dimensions.
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 | Physical dimension corresponding to value of unity in z coordinate directions Coordinate System or referencing element, where the coordinates correspond to physical dimensions.
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 | Location of a coordinate point described by the referencing element.
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 | Location of point along x-axis.
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 | Location of point along y-axis.
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 | Location of point along z-axis.
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/tests/test_relink.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import datetime
4 | import pytest
5 |
6 | from ckan.common import config
7 | from ckan.lib.search.common import make_connection
8 | import ckan.model as model
9 | from ckanext.geodatagov.rebuild import rebuild
10 | from ckan.tests import factories
11 | from click.testing import CliRunner
12 | from ckanext.harvest.model import HarvestObject
13 | from ckanext.harvest.tests import factories as harvest_factories
14 | from ckanext.harvest.logic import HarvestJobExists
15 |
16 | import ckanext.geodatagov.cli as cli
17 |
18 |
19 | log = logging.getLogger(__name__)
20 |
21 |
22 | class TestRelink(object):
23 |
24 | @classmethod
25 | def setup_class(cls):
26 | organization = factories.Organization()
27 | # create two harvest sources
28 | cls.source1 = harvest_factories.HarvestSourceObj(
29 | url="http://test1",
30 | name="test-ho-id1",
31 | title="Test relink 1",
32 | source_type="ckan",
33 | frequency="MANUAL"
34 | )
35 | cls.source2 = harvest_factories.HarvestSourceObj(
36 | url="http://test2",
37 | name="test-ho-id2",
38 | title="Test relink 2",
39 | source_type="ckan",
40 | frequency="MANUAL"
41 | )
42 |
43 | # dataset 1 is for source 1
44 | cls.dataset1 = factories.Dataset(owner_org=organization["id"])
45 | # with false hoid1 and true hoid2
46 | cls.dataset1_hoid1 = HarvestObject(
47 | package_id=cls.dataset1['id'],
48 | job=create_harvest_job(cls.source1),
49 | import_finished=datetime.datetime.utcnow(),
50 | state='COMPLETE',
51 | report_status='',
52 | current=False
53 | )
54 | cls.dataset1_hoid2 = HarvestObject(
55 | package_id=cls.dataset1['id'],
56 | job=create_harvest_job(cls.source2),
57 | import_finished=datetime.datetime.utcnow(),
58 | state='COMPLETE',
59 | current=True
60 | )
61 | cls.dataset1_hoid1.save()
62 | cls.dataset1_hoid2.save()
63 |
64 | # dataset 2 is for source 2
65 | cls.dataset2 = factories.Dataset(owner_org=organization["id"])
66 | # with false hoid1 and true hoid2
67 | cls.dataset2_hoid1 = HarvestObject(
68 | package_id=cls.dataset2['id'],
69 | job=create_harvest_job(cls.source2),
70 | import_finished=datetime.datetime.utcnow(),
71 | state='COMPLETE',
72 | report_status='',
73 | current=False
74 | )
75 | cls.dataset2_hoid2 = HarvestObject(
76 | package_id=cls.dataset2['id'],
77 | job=create_harvest_job(cls.source2),
78 | import_finished=datetime.datetime.utcnow(),
79 | state='COMPLETE',
80 | current=True
81 | )
82 | cls.dataset2_hoid1.save()
83 | cls.dataset2_hoid2.save()
84 |
85 | rebuild()
86 |
87 | # check solr is using the current=True harvest object hoid2
88 | assert get_solr_hoid(cls.dataset1['id']) == cls.dataset1_hoid2.id
89 | assert get_solr_hoid(cls.dataset2['id']) == cls.dataset2_hoid2.id
90 |
91 | # make all harvest objects current=False, but hoid1 with newer import_finished
92 | cls.dataset1_hoid1.current = False
93 | cls.dataset1_hoid1.import_finished = datetime.datetime.utcnow()
94 | cls.dataset1_hoid1.save()
95 | cls.dataset1_hoid2.current = False
96 | cls.dataset1_hoid2.save()
97 |
98 | cls.dataset2_hoid1.current = False
99 | cls.dataset2_hoid1.import_finished = datetime.datetime.utcnow()
100 | cls.dataset2_hoid1.save()
101 | cls.dataset2_hoid2.current = False
102 | cls.dataset2_hoid2.save()
103 |
104 | @pytest.fixture
105 | def cli_result_source1(self):
106 | runner = CliRunner()
107 | raw_cli_output = runner.invoke(
108 | cli.harvest_object_relink,
109 | args=[self.source1.id],
110 | )
111 |
112 | return raw_cli_output
113 |
114 | @pytest.fixture
115 | def cli_result_all(self):
116 | runner = CliRunner()
117 | raw_cli_output = runner.invoke(
118 | cli.harvest_object_relink,
119 | args=[],
120 | )
121 |
122 | return raw_cli_output
123 |
124 | @pytest.mark.order1
125 | def test_relink_source1(self, cli_result_source1):
126 | """run harvest_object_relink and analyze results"""
127 | # check successful cli run
128 | assert cli_result_source1.exit_code == 0
129 |
130 | # check harvest object with newer import_finished is now current
131 | assert get_hoid_current(self.dataset1_hoid1.id) is True
132 | assert get_hoid_current(self.dataset1_hoid2.id) is False
133 |
134 | # check that solr has current harvest object for source1 dataset
135 | assert get_solr_hoid(self.dataset1['id']) == self.dataset1_hoid1.id
136 |
137 | # check that solr has not changed for source2 dataset
138 | assert get_solr_hoid(self.dataset2['id']) == self.dataset2_hoid2.id
139 |
140 | @pytest.mark.order2
141 | def test_relink_all(self, cli_result_all):
142 | """run harvest_object_relink and analyze results"""
143 | # check successful cli run
144 | assert cli_result_all.exit_code == 0
145 |
146 | # check harvest object with newer import_finished is now current
147 | assert get_hoid_current(self.dataset2_hoid1.id) is True
148 | assert get_hoid_current(self.dataset2_hoid2.id) is False
149 |
150 | # check that solr has current harvest object for both sources' datasets
151 | assert get_solr_hoid(self.dataset1['id']) == self.dataset1_hoid1.id
152 | assert get_solr_hoid(self.dataset2['id']) == self.dataset2_hoid1.id
153 |
154 |
155 | def get_hoid_current(id):
156 | """
157 | Return the current value for a particular harvest object in DB.
158 | """
159 | return model.Session.query(
160 | HarvestObject.current).filter(HarvestObject.id == id).first()[0]
161 |
162 |
163 | def get_solr_hoid(id):
164 | """
165 | Return the harvest_object_id for a particular package id in Solr.
166 | """
167 | query = "*:*"
168 | fq = "+site_id:\"%s\" " % config.get('ckan.site_id')
169 | fq += "+state:active "
170 | fq += "+id:%s" % (id)
171 |
172 | conn = make_connection()
173 | data = conn.search(query, fq=fq, rows=10, fl='validated_data_dict')
174 |
175 | harvest_object_id = None
176 | if data.docs:
177 | data_dict = json.loads(data.docs[0].get("validated_data_dict"))
178 | for extra in data_dict.get("extras", []):
179 | if extra["key"] == "harvest_object_id":
180 | harvest_object_id = extra["value"]
181 | break
182 |
183 | return harvest_object_id
184 |
185 |
186 | def create_harvest_job(source):
187 | """
188 | Create a fictitious harvest job object and return it
189 | """
190 | try:
191 | job = harvest_factories.HarvestJobObj(source=source)
192 | except HarvestJobExists: # not sure why
193 | job = source.get_jobs()[0]
194 |
195 | job.save()
196 |
197 | return job
198 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/validation/xml/fgdc-std-012-2002/fgdc-std-001-1998-sect05.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/validation/xml/fgdc-std-012-2002/fgdc-std-001-1998-sect09.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Information about the date and time of an event.
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | Means of encoding a single date and time.
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 | The year (and optionally month, or month and day).
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 | The hour (and optionally minute, or minute and second) of the day.
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 | Means of encoding multiple individual dates and times.
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 | Means of encoding a range of dates and times.
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 | The first year (and optionally month, or month and day) of the event.
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 | The first hour (and optionally minute, or minute and second) of the day for the event.
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 | The last year (and optionally month, or month and day) for the event.
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 | The last hour (and optionally minute, or minute and second) of the day for the event.
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/tests/test_waf-collection.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import pytest
4 |
5 | from ckan import model
6 | from ckanext.geodatagov.harvesters.waf_collection import WAFCollectionHarvester
7 | from ckanext.spatial.validation import all_validators
8 | import ckanext.harvest.model as harvest_model
9 | from ckan.tests.factories import Organization
10 | from ckan.tests.helpers import call_action
11 |
12 | from factories import HarvestJobObj, WafCollectionHarvestSourceObj
13 | from utils import PORT, reset_db_and_solr
14 |
15 |
16 | log = logging.getLogger(__name__)
17 |
18 |
19 | @pytest.mark.usefixtures("with_plugins")
20 | class TestWafCollectionHarvester(object):
21 |
22 | def setup_method(self):
23 | reset_db_and_solr()
24 |
25 | self.organization = Organization()
26 |
27 | def run_gather(self, url, source_config):
28 |
29 | sc = json.loads(source_config)
30 | existing_profiles = [v.name for v in all_validators]
31 | log.info('Existing validator profiles: {}'.format(existing_profiles))
32 | source = WafCollectionHarvestSourceObj(url=url,
33 | owner_org=self.organization['id'],
34 | # config=source_config,
35 | **sc)
36 | self.job = HarvestJobObj(source=source)
37 |
38 | self.harvester = WAFCollectionHarvester()
39 |
40 | # gather stage
41 | log.info('GATHERING %s', url)
42 | obj_ids = self.harvester.gather_stage(self.job)
43 | log.info('job.gather_errors=%s', self.job.gather_errors)
44 | if len(self.job.gather_errors) > 0:
45 | raise Exception(self.job.gather_errors[0])
46 |
47 | log.info('obj_ids=%s', obj_ids)
48 | if obj_ids is None or len(obj_ids) == 0:
49 | # nothing to see
50 | return
51 |
52 | self.harvest_objects = []
53 | for obj_id in obj_ids:
54 | harvest_object = harvest_model.HarvestObject.get(obj_id)
55 | log.info('ho guid=%s', harvest_object.guid)
56 | log.info('ho content=%s', harvest_object.content)
57 | self.harvest_objects.append(harvest_object)
58 |
59 | # this is a list of harvestObjects IDs. One for dataset
60 | return obj_ids
61 |
62 | def run_fetch(self):
63 | # fetch stage
64 | for harvest_object in self.harvest_objects:
65 | log.info('FETCHING %s' % harvest_object.id)
66 | result = self.harvester.fetch_stage(harvest_object)
67 |
68 | log.info('ho errors=%s', harvest_object.errors)
69 | log.info('result 1=%s', result)
70 | if len(harvest_object.errors) > 0:
71 | raise Exception(harvest_object.errors[0])
72 |
73 | def run_import(self):
74 | # fetch stage
75 | datasets = []
76 | for harvest_object in self.harvest_objects:
77 | log.info('IMPORTING %s' % harvest_object.id)
78 | result = self.harvester.import_stage(harvest_object)
79 |
80 | log.info('ho errors 2=%s', harvest_object.errors)
81 | log.info('result 2=%s', result)
82 | if len(harvest_object.errors) > 0:
83 | raise Exception(harvest_object.errors[0])
84 |
85 | log.info('ho pkg id=%s', harvest_object.package_id)
86 | dataset = model.Package.get(harvest_object.package_id)
87 | datasets.append(dataset)
88 | log.info('dataset name=%s', dataset.name)
89 |
90 | return datasets
91 |
92 | def get_datasets_from_waf_collection1_sample(self):
93 | """ harvest waf-collection1/ folder as waf-collection source """
94 | url = f'http://127.0.0.1:{PORT}/waf-collection1/index.html'
95 |
96 | collection_metadata = f"http://127.0.0.1:{PORT}/waf-collection1/cfg/SeriesCollection_tl_2013_county.shp.iso.xml"
97 | config = '{"collection_metadata_url": "%s", "validator_profiles": ["iso19139ngdc"], "private_datasets": false}' %\
98 | collection_metadata
99 | self.run_gather(url=url, source_config=config)
100 | self.run_fetch()
101 | datasets = self.run_import()
102 | self.job.status = 'Finished'
103 | self.job.save()
104 |
105 | return datasets
106 |
107 | def test_waf_collection1_datasets_count(self):
108 | """ Get datasets from waf-collection1/ folder as waf-collection source
109 | and test we have one dataset with the expected name """
110 |
111 | datasets = self.get_datasets_from_waf_collection1_sample()
112 | assert len(datasets) == 1
113 | dataset = datasets[0]
114 | assert dataset.name == 'tiger-line-shapefile-2013-nation-u-s-current-county-and-equivalent-national-shapefile'
115 |
116 | def test_waf_collection1_datasets_as_child(self):
117 | """ Harvest waf-collection1/ folder as waf-collection source
118 | and test we get one dataset and this dataset is a "child" (it have a "collection_package_id" extra)
119 | and is not a "parent" (do not include the collection_metadata extra) """
120 |
121 | datasets = self.get_datasets_from_waf_collection1_sample()
122 | dataset = datasets[0]
123 |
124 | extras = json.loads(dataset.extras['extras_rollup'])
125 | print(f'extras: {extras}')
126 | keys = [key for key in list(extras.keys())]
127 | assert 'collection_package_id' in keys
128 | assert 'collection_metadata' not in keys
129 |
130 | def test_waf_collection1_parent_exists(self):
131 | """ Harvest waf-collection1/ folder as waf-collection source
132 | and test parent dataset exists (include the collection_metadata=true extra) """
133 |
134 | datasets = self.get_datasets_from_waf_collection1_sample()
135 | dataset = datasets[0]
136 | extras = json.loads(dataset.extras['extras_rollup'])
137 |
138 | parent = call_action('package_show', context={'user': 'dummy'}, id=extras['collection_package_id'])
139 | parent_keys = [extra['key'] for extra in parent['extras']]
140 | assert 'collection_metadata' in parent_keys
141 | assert 'true' == [extra['value'] for extra in parent['extras'] if extra['key'] == 'collection_metadata'][0]
142 |
143 | def test_waf_collection1_parent_title(self):
144 | """ Harvest waf-collection1/ folder as waf-collection source
145 | and test parent dataset have the expected title and name """
146 |
147 | datasets = self.get_datasets_from_waf_collection1_sample()
148 | dataset = datasets[0]
149 | extras = json.loads(dataset.extras['extras_rollup'])
150 |
151 | parent = call_action('package_show', context={'user': 'dummy'}, id=extras['collection_package_id'])
152 |
153 | assert parent['title'] == ('TIGER/Line Shapefile, 2013, '
154 | 'Series Information File for the Current county and Equivalent National Shapefile')
155 | assert parent['name'] == ('tiger-line-shapefile-2013-'
156 | 'series-information-file-for-the-current-county-and-equivalent-nationa')
157 |
158 | def test_waf_collection_transformation_failed(self):
159 | url = f'http://127.0.0.1:{PORT}/waf-collection2/index.html'
160 |
161 | collection_metadata = f"http://127.0.0.1:{PORT}/waf-collection2/cfg/SeriesCollection_tl_2013_county.shp.iso.xml"
162 | config = '{"collection_metadata_url": "%s", "validator_profiles": ["iso19139ngdc"], "private_datasets": false}' %\
163 | collection_metadata
164 | self.run_gather(url=url, source_config=config)
165 |
166 | self.run_fetch()
167 |
168 | # we don't manage IS0 19110
169 | with pytest.raises(Exception) as e:
170 | self.run_import()
171 | assert 'Transformation to ISO failed' in str(e.value)
172 |
--------------------------------------------------------------------------------
/ckanext/geodatagov/validation/xml/fgdc-std-001.2-2001/fgdc-std-001.2-2001-sect09.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Information about the date and time of an event.
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | Means of encoding a single date and time.
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 | The year (and optionally month, or month and day).
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 | The hour and minute, and (optionally second) of the day.
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 | Means of encoding multiple individual dates and times.
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 | Means of encoding a range of dates and times.
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 | The first year (and optionally month, or month and day) of the event.
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 | The first hour and minute, or (optionally second) of the day for the event.
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 | The last year (and optionally month, or month and day) for the event.
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 | The last hour and minute, or (optionally second) of the day for the event.
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
--------------------------------------------------------------------------------