├── .gitignore
├── dataqs
├── cmap
│ ├── __init__.py
│ ├── resources
│ │ ├── cmap.nc
│ │ └── cmap.sld
│ ├── tasks.py
│ └── tests.py
├── gfms
│ ├── __init__.py
│ ├── resources
│ │ ├── test_gfms.zip
│ │ └── gfms.sld
│ ├── tasks.py
│ └── tests.py
├── spei
│ ├── __init__.py
│ ├── resources
│ │ ├── test_spei.zip
│ │ └── spei.sld
│ ├── tasks.py
│ ├── tests.py
│ └── spei.py
├── wqp
│ ├── __init__.py
│ ├── resources
│ │ ├── test_wqp_ph.rss
│ │ ├── test_wqp_ph.zip
│ │ ├── create_table.sql
│ │ └── wqp_api_ph_map.sld
│ ├── tasks.py
│ └── tests.py
├── airnow
│ ├── __init__.py
│ ├── resources
│ │ ├── test_airnow.zip
│ │ └── airnow.sld
│ └── tasks.py
├── aqicn
│ ├── __init__.py
│ ├── tasks.py
│ ├── tests.py
│ └── resources
│ │ └── aqicn.sld
├── gdacs
│ ├── __init__.py
│ ├── tasks.py
│ ├── resources
│ │ └── gdacs.sld
│ ├── tests.py
│ └── gdacs.py
├── gistemp
│ ├── __init__.py
│ ├── resources
│ │ ├── gistemp1200_ERSSTv4.nc
│ │ └── gistemp.sld
│ ├── tasks.py
│ ├── tests.py
│ └── gistemp.py
├── hadghcnd
│ ├── __init__.py
│ ├── resources
│ │ ├── HadGHCND_TXTN_anoms_1950-1960_15052015.nc
│ │ ├── HadGHCND_temperatures.sld
│ │ └── HadGHCND_anomalies.sld
│ ├── tasks.py
│ └── tests.py
├── hifld
│ ├── __init__.py
│ ├── resources
│ │ ├── line.sld
│ │ ├── polygon.sld
│ │ └── point.sld
│ └── tasks.py
├── landscan
│ ├── __init__.py
│ ├── tasks.py
│ ├── resources
│ │ └── landscan.sld
│ └── landscan.py
├── nasa_gpm
│ ├── __init__.py
│ ├── resources
│ │ ├── test_gpm.zip
│ │ └── gpm.sld
│ ├── tasks.py
│ └── tests.py
├── udatp
│ ├── __init__.py
│ ├── resources
│ │ ├── uodtest.nc
│ │ ├── uodtest.tif
│ │ ├── uod_air_mean_401.sld
│ │ └── uod_precip_total_401.sld
│ ├── tasks.py
│ └── tests.py
├── worldclim
│ ├── __init__.py
│ ├── tasks.py
│ └── resources
│ │ ├── worldclim_isotherm.sld
│ │ ├── worldclim_diurnal.sld
│ │ ├── worldclim_temp.sld
│ │ ├── worldclim_precip.sld
│ │ ├── worldclim_precip_seasonality.sld
│ │ ├── worldclim_temp_seasonality.sld
│ │ └── worldclim_precip_annual.sld
├── forecastio
│ ├── __init__.py
│ ├── resources
│ │ ├── test_forecastio.zip
│ │ └── forecastio.sld
│ ├── tasks.py
│ └── tests.py
├── usgs_quakes
│ ├── __init__.py
│ ├── tasks.py
│ ├── tests.py
│ └── resources
│ │ └── test_quakes.json
├── __init__.py
├── mmwr
│ ├── __init__.py
│ ├── tasks.py
│ ├── tests.py
│ └── resources
│ │ └── mmwr.sld
├── whisp
│ ├── __init__.py
│ ├── resources
│ │ ├── whispers_archive.zip
│ │ └── whisp.sld
│ ├── tasks.py
│ └── tests.py
└── csv_helpers.py
├── ansible
├── .gitignore
├── roles
│ ├── geonode
│ │ ├── .gitignore
│ │ └── templates
│ │ │ ├── create_db_store.py.j2
│ │ │ ├── create_django_admin.py.j2
│ │ │ └── local_settings.py.j2
│ ├── .gitignore
│ ├── geoserver
│ │ ├── templates
│ │ │ ├── setenv.sh
│ │ │ └── tomcat7
│ │ ├── handlers
│ │ │ └── main.yml
│ │ └── tasks
│ │ │ └── main.yml
│ ├── nginx
│ │ ├── handlers
│ │ │ └── main.yml
│ │ ├── tasks
│ │ │ └── main.yml
│ │ └── templates
│ │ │ └── nginx.conf.j2
│ ├── uwsgi
│ │ ├── handlers
│ │ │ └── main.yml
│ │ ├── templates
│ │ │ ├── uwsgi.conf.j2
│ │ │ └── vassals-default.skel.j2
│ │ └── tasks
│ │ │ └── main.yml
│ ├── postgres_postgis
│ │ ├── handlers
│ │ │ └── main.yml
│ │ ├── templates
│ │ │ └── pg_hba.conf
│ │ └── tasks
│ │ │ └── main.yml
│ ├── common
│ │ └── tasks
│ │ │ └── main.yml
│ ├── dataqs
│ │ ├── tasks
│ │ │ ├── geoserver_permissions.yml
│ │ │ └── main.yml
│ │ ├── handlers
│ │ │ └── main.yml
│ │ ├── templates
│ │ │ ├── datastore.properties
│ │ │ ├── coverage.json.j2
│ │ │ └── celery.conf
│ │ └── files
│ │ │ └── geonode_patch.diff
│ └── oracle_java
│ │ └── tasks
│ │ └── main.yml
├── requirements.txt
├── inventory
│ └── localhost
├── ansible.cfg
├── playbook.yml
├── Vagrantfile
├── group_vars
│ └── all
└── README.md
├── dev-requirements.txt
├── local_settings.py.template
├── flake8.cfg
├── license.txt
├── .travis.yml
├── setup.py
└── README.rst
/.gitignore:
--------------------------------------------------------------------------------
1 | .vagrant
2 |
--------------------------------------------------------------------------------
/dataqs/cmap/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/gfms/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/spei/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/wqp/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/airnow/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/aqicn/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/gdacs/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/gistemp/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/hadghcnd/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/hifld/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/landscan/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/nasa_gpm/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/udatp/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/worldclim/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ansible/.gitignore:
--------------------------------------------------------------------------------
1 | *.retry
2 |
--------------------------------------------------------------------------------
/dataqs/forecastio/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/usgs_quakes/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/wqp/resources/test_wqp_ph.rss:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dataqs/__init__.py:
--------------------------------------------------------------------------------
1 | __author__ = 'mbertrand'
2 |
--------------------------------------------------------------------------------
/dataqs/mmwr/__init__.py:
--------------------------------------------------------------------------------
1 | __author__ = 'mbertrand'
2 |
--------------------------------------------------------------------------------
/dataqs/whisp/__init__.py:
--------------------------------------------------------------------------------
1 | __author__ = 'mbertrand'
2 |
--------------------------------------------------------------------------------
/ansible/roles/geonode/.gitignore:
--------------------------------------------------------------------------------
1 | group_vars/all/secret
2 |
--------------------------------------------------------------------------------
/dev-requirements.txt:
--------------------------------------------------------------------------------
1 | mock==1.3.0
2 | httpretty==0.8.14
3 |
--------------------------------------------------------------------------------
/ansible/requirements.txt:
--------------------------------------------------------------------------------
1 | zenoamaro.supervisord
2 | geerlingguy.redis
3 |
--------------------------------------------------------------------------------
/ansible/roles/.gitignore:
--------------------------------------------------------------------------------
1 | geerlingguy.redis
2 | zenoamaro.supervisord
3 |
--------------------------------------------------------------------------------
/ansible/inventory/localhost:
--------------------------------------------------------------------------------
1 | [geoservices]
2 | localhost ansible_connection=local
3 |
--------------------------------------------------------------------------------
/ansible/roles/geoserver/templates/setenv.sh:
--------------------------------------------------------------------------------
1 | CATALINA_OPTS="$CATALINA_OPTS -Xms4096m -Xmx4096m -XX:MaxPermSize=512m"
--------------------------------------------------------------------------------
/dataqs/cmap/resources/cmap.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/cmap/resources/cmap.nc
--------------------------------------------------------------------------------
/dataqs/udatp/resources/uodtest.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/udatp/resources/uodtest.nc
--------------------------------------------------------------------------------
/ansible/roles/geoserver/handlers/main.yml:
--------------------------------------------------------------------------------
1 | - name: restart tomcat
2 | service: name=tomcat7 state=restarted
3 | sudo: yes
4 |
--------------------------------------------------------------------------------
/dataqs/gfms/resources/test_gfms.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/gfms/resources/test_gfms.zip
--------------------------------------------------------------------------------
/dataqs/spei/resources/test_spei.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/spei/resources/test_spei.zip
--------------------------------------------------------------------------------
/dataqs/udatp/resources/uodtest.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/udatp/resources/uodtest.tif
--------------------------------------------------------------------------------
/dataqs/nasa_gpm/resources/test_gpm.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/nasa_gpm/resources/test_gpm.zip
--------------------------------------------------------------------------------
/dataqs/wqp/resources/test_wqp_ph.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/wqp/resources/test_wqp_ph.zip
--------------------------------------------------------------------------------
/ansible/roles/nginx/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: restart nginx
4 | service: name=nginx state=restarted
5 | sudo: yes
6 |
--------------------------------------------------------------------------------
/ansible/roles/uwsgi/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: restart uwsgi
4 | service: name=uwsgi state=restarted
5 | sudo: yes
6 |
--------------------------------------------------------------------------------
/dataqs/airnow/resources/test_airnow.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/airnow/resources/test_airnow.zip
--------------------------------------------------------------------------------
/dataqs/whisp/resources/whispers_archive.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/whisp/resources/whispers_archive.zip
--------------------------------------------------------------------------------
/local_settings.py.template:
--------------------------------------------------------------------------------
1 | from django.conf import settings
2 |
3 | INSTALLED_APPS = settings.INSTALLED_APPS + (
4 | 'dataqs',
5 | )
6 |
--------------------------------------------------------------------------------
/dataqs/forecastio/resources/test_forecastio.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/forecastio/resources/test_forecastio.zip
--------------------------------------------------------------------------------
/dataqs/gistemp/resources/gistemp1200_ERSSTv4.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/gistemp/resources/gistemp1200_ERSSTv4.nc
--------------------------------------------------------------------------------
/ansible/roles/postgres_postgis/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: restart postgres_postgis
4 | service: name=postgresql state=restarted
5 | sudo: yes
6 |
--------------------------------------------------------------------------------
/ansible/ansible.cfg:
--------------------------------------------------------------------------------
1 | [defaults]
2 | forks=1
3 | host_key_checking=False
4 | record_host_keys=False
5 | ssh.insert_key=False
6 | roles_path=roles
7 | ask_sudo_pass=True
8 |
--------------------------------------------------------------------------------
/dataqs/hadghcnd/resources/HadGHCND_TXTN_anoms_1950-1960_15052015.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OpenGeoscience/dataqs/HEAD/dataqs/hadghcnd/resources/HadGHCND_TXTN_anoms_1950-1960_15052015.nc
--------------------------------------------------------------------------------
/ansible/roles/common/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: get UbuntuGIS repository
4 | apt_repository: repo='ppa:ubuntugis/ppa'
5 | sudo: yes
6 |
7 | - name: ensure apt cache is up to date
8 | apt: update_cache=yes
9 | sudo: yes
10 |
--------------------------------------------------------------------------------
/ansible/roles/dataqs/tasks/geoserver_permissions.yml:
--------------------------------------------------------------------------------
1 | - name: reset geoserver owner
2 | command: chown -R tomcat7:vagrant '/data/geodata'
3 | sudo: yes
4 |
5 | - name: reset geoserver permissions
6 | command: chmod -R g+rws '/data/geodata/'
7 | sudo: yes
8 |
--------------------------------------------------------------------------------
/dataqs/cmap/tasks.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from celery import shared_task
4 | from dataqs.cmap.cmap import CMAPProcessor
5 |
6 |
7 | @shared_task
8 | def cmap_task():
9 | processor = CMAPProcessor()
10 | processor.run()
11 |
--------------------------------------------------------------------------------
/ansible/roles/uwsgi/templates/uwsgi.conf.j2:
--------------------------------------------------------------------------------
1 | # Emperor uWSGI script
2 |
3 | description "uWSGI Emperor"
4 | start on runlevel [2345]
5 | stop on runlevel [06]
6 |
7 | respawn
8 |
9 | exec uwsgi --master --die-on-term --emperor /etc/uwsgi --uid www-data --gid www-data --logto /var/log/uwsgi/emperor.log
10 |
--------------------------------------------------------------------------------
/flake8.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length: 80
3 | show-pep8: False
4 | show-source: True
5 | max-complexity: 20
6 | format: pylint
7 | ignore: D100,D101,D102,D103,D104,D105,D200,D201,D202,D203,D204,D205,D208,D209,D300,D400,D401,D402,E123,E226,E241,E402,N802,N803,N806,N812
8 | exclude: __init__.py, ogr2ogr.py, docs
9 |
--------------------------------------------------------------------------------
/ansible/roles/dataqs/handlers/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: restart supervisor for celery
3 | command: service supervisor restart
4 | sudo: yes
5 |
6 | - name: reset geoserver permissions
7 | file: path=/var/lib/tomcat7/webapps/geoserver/data recurse=yes owner=tomcat7 group=tomcat7 state=directory mode=g+rws
8 | sudo: yes
9 |
--------------------------------------------------------------------------------
/ansible/playbook.yml:
--------------------------------------------------------------------------------
1 | - hosts: geoservices
2 | remote_user: "{{ deploy_user }}"
3 | roles:
4 | - common
5 | - {role: geerlingguy.redis, sudo: yes}
6 | - zenoamaro.supervisord
7 | - postgres_postgis
8 | - oracle_java
9 | - geoserver
10 | - nginx
11 | - uwsgi
12 | - geonode
13 | - dataqs
14 |
--------------------------------------------------------------------------------
/ansible/roles/dataqs/templates/datastore.properties:
--------------------------------------------------------------------------------
1 | SPI=org.geotools.data.postgis.PostgisNGDataStoreFactory
2 | host=localhost
3 | port=5432
4 | database={{db_data_instance}}
5 | schema=public
6 | user={{db_user}}
7 | passwd={{db_password}}
8 | Loose\ bbox=true
9 | Estimated\ extends=false
10 | validate\ connections=true
11 | Connection\ timeout=10
12 | preparedStatements=true
13 |
--------------------------------------------------------------------------------
/ansible/roles/postgres_postgis/templates/pg_hba.conf:
--------------------------------------------------------------------------------
1 | local all postgres trust
2 | local all geonode trust
3 | local all all trust
4 | host all all 127.0.0.1/32 md5
5 | host all all ::1/128 md5
6 |
--------------------------------------------------------------------------------
/ansible/roles/geonode/templates/create_db_store.py.j2:
--------------------------------------------------------------------------------
1 | from geoserver.catalog import Catalog
2 |
3 | cat = Catalog('http://localhost:8080/geoserver/rest')
4 | ds = cat.create_datastore('{{ app_name }}','geonode')
5 | ds.connection_parameters.update(host='localhost', port='5432', database='{{ db_data_instance }}', user='{{ db_user }}', passwd='{{ db_password }}', dbtype='postgis', schema='public')
6 | cat.save(ds)
7 |
--------------------------------------------------------------------------------
/ansible/roles/geonode/templates/create_django_admin.py.j2:
--------------------------------------------------------------------------------
1 | import os
2 | os.environ['DJANGO_SETTINGS_MODULE'] = '{{ app_name }}.settings'
3 | from django.contrib.auth import get_user_model
4 |
5 | User = get_user_model()
6 | for user in User.objects.filter(username='{{ geonode_admin_user }}'):
7 | user.delete()
8 | User.objects.create_superuser('{{ geonode_admin_user }}', '{{ geonode_admin_email }}', '{{ geonode_admin_password }}')
9 |
--------------------------------------------------------------------------------
/ansible/roles/nginx/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ###
2 | # Installs nginx
3 | #
4 | ---
5 |
6 | - name: ensure apt cache is up to date
7 | apt: update_cache=yes
8 | sudo: yes
9 |
10 | - name: ensure nginx is installed
11 | apt: name=nginx-extras
12 | sudo: yes
13 |
14 | - name: write nginx configuration for multiple subdomains
15 | template: src=nginx.conf.j2 dest=/etc/nginx/sites-available/default
16 | sudo: yes
17 | notify:
18 | - restart nginx
19 |
--------------------------------------------------------------------------------
/ansible/roles/dataqs/templates/coverage.json.j2:
--------------------------------------------------------------------------------
1 | {
2 | "coverage": {
3 | "enabled": true,
4 | "metadata": {
5 | "entry": [
6 | {
7 | "@key": "time",
8 | "dimensionInfo": {
9 | "defaultValue": "",
10 | "enabled": true,
11 | "presentation": "LIST",
12 | "units": "ISO8601"
13 | }
14 | }
15 | ]
16 | }
17 | }
18 | }
19 |
20 |
--------------------------------------------------------------------------------
/license.txt:
--------------------------------------------------------------------------------
1 | Copyright 2015 OpenGeoScience
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
--------------------------------------------------------------------------------
/ansible/roles/oracle_java/tasks/main.yml:
--------------------------------------------------------------------------------
1 | - name: Install add-apt-repostory
2 | sudo: yes
3 | apt: name=software-properties-common state=latest
4 |
5 | - name: Add Oracle Java Repository
6 | sudo: yes
7 | apt_repository: repo='ppa:webupd8team/java'
8 |
9 | - name: Accept Java 8 License
10 | sudo: yes
11 | debconf: name='oracle-java8-installer' question='shared/accepted-oracle-license-v1-1' value='true' vtype='select'
12 |
13 | - name: Install Oracle Java 8
14 | sudo: yes
15 | apt: name={{item}} state=latest
16 | with_items:
17 | - oracle-java8-installer
18 | - ca-certificates
19 | - oracle-java8-set-default
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 |
3 | sudo: required
4 | dist: trusty
5 |
6 | python:
7 | - "2.7"
8 |
9 | before_install:
10 | - sudo add-apt-repository ppa:ubuntugis/ppa -y
11 | - sudo apt-get -qq update
12 | - sudo apt-get install python-dev libgdal1-dev gdal-bin
13 |
14 | install:
15 | - export CPLUS_INCLUDE_PATH=/usr/include/gdal
16 | - export C_INCLUDE_PATH=/usr/include/gdal
17 | - pip install -r dev-requirements.txt
18 | - pip install -e .
19 | - git clone -b 2.4.x https://github.com/GeoNode/geonode.git
20 | - cp local_settings.py.template geonode/geonode/local_settings.py
21 | - pip install -e geonode
22 |
23 | script:
24 | - flake8 --config flake8.cfg dataqs
25 | - pushd geonode
26 | - python manage.py test dataqs
27 | - popd
28 |
--------------------------------------------------------------------------------
/ansible/roles/uwsgi/templates/vassals-default.skel.j2:
--------------------------------------------------------------------------------
1 | [uwsgi]
2 | ;unix socket (referenced in nginx configuration)
3 | socket = /tmp/%n.sock
4 |
5 | # set mode of created UNIX socket
6 | chmod-socket = 666
7 |
8 | # place timestamps into log
9 | log-date = true
10 |
11 | vacuum = true
12 |
13 | ; project-level logging to the logs/ folder
14 | # TODO
15 | # This option does not for some reason as it does
16 | # not create a file
17 | #logto = /var/log/nginx/uwsgi-%n.log
18 |
19 | chdir = {{app_code_dir}}/%n
20 | module = django.core.handlers.wsgi:WSGIHandler()
21 |
22 | ;enable-threads = true
23 |
24 | virtualenv = {{virtualenv_dir}}/{{app_name}}
25 | vacuum = true
26 | env = DJANGO_SETTINGS_MODULE=%n.settings
27 | pidfile = /tmp/%n.pid
28 | ;harakiri = 20 # respawn processes taking more than 20 seconds
29 | max-requests = 5000 # respawn processes after serving 5000 requests
30 |
--------------------------------------------------------------------------------
/dataqs/hifld/resources/line.sld:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 | {table}
10 |
11 | {title}
12 |
13 |
14 | {table}
15 | {title}
16 |
17 |
18 | #0000FF
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/ansible/roles/uwsgi/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ###
2 | # Install uwsgi from pypi and configure it with upstart in emperor mode.
3 | #
4 | ---
5 |
6 | - name: ensure web server packages are installed
7 | apt: name={{item}}
8 | sudo: yes
9 | with_items:
10 | - python-dev
11 | - python-pip
12 |
13 | - name: add uwsgi from pypi
14 | pip: name=uwsgi
15 | sudo: yes
16 |
17 | - name: create /etc/uwsgi to place apps
18 | file: path=/etc/uwsgi owner=www-data group=www-data state=directory mode=0755
19 | sudo: yes
20 |
21 | - name: write default vassals configuration file
22 | template: src=vassals-default.skel.j2 dest=/etc/uwsgi/vassals-default.skel
23 | sudo: yes
24 |
25 | - name: make the uwsgi plugins dir
26 | command: mkdir -p /usr/lib/uwsgi/plugins
27 | sudo: yes
28 |
29 | - name: write uwsgi configuration for upstart in emperor mode
30 | template: src=uwsgi.conf.j2 dest=/etc/init/uwsgi.conf
31 | sudo: yes
32 | notify:
33 | - restart uwsgi
34 |
--------------------------------------------------------------------------------
/dataqs/hifld/resources/polygon.sld:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 | {title}
10 |
11 | {table}
12 | {title}
13 |
14 | name
15 |
16 | {table}
17 | {title}
18 |
19 |
20 | #AAAAAA
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/ansible/Vagrantfile:
--------------------------------------------------------------------------------
1 | # -*- mode: ruby -*-
2 | # vi: set ft=ruby :
3 | Vagrant.configure("2") do |config|
4 | config.vm.box = "trusty64server"
5 | config.vm.box_url = "https://oss-binaries.phusionpassenger.com/vagrant/boxes/latest/ubuntu-14.04-amd64-vbox.box"
6 | config.ssh.username = 'vagrant'
7 | config.ssh.forward_agent = true
8 |
9 | config.vm.define :geoservices do |geoservices|
10 | geoservices.vm.network :public_network, :bridge => 'eth0', :auto_config => false
11 | # Using private network fixes the thumbnail issue
12 | # Ref: https://github.com/Kitware/minerva/pull/98#issuecomment-139023062
13 | config.vm.network "private_network", ip: "192.168.33.12"
14 | geoservices.vm.provider :virtualbox do |vb|
15 | vb.customize [ "modifyvm", :id, "--name", "geoservices","--memory", 10000 ]
16 | end
17 | geoservices.vm.provision "ansible" do |ansible|
18 | ansible.playbook = "playbook.yml"
19 | ansible.galaxy_role_file = "requirements.txt"
20 | ansible.verbose = "vv"
21 | ansible.host_key_checking = false
22 | end
23 | end
24 | end
25 |
--------------------------------------------------------------------------------
/dataqs/spei/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.spei.spei import SPEIProcessor
23 |
24 |
25 | @shared_task
26 | def spei_task():
27 | processor = SPEIProcessor()
28 | processor.run()
29 |
--------------------------------------------------------------------------------
/dataqs/gfms/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 |
22 | from celery import shared_task
23 | from dataqs.gfms.gfms import GFMSProcessor
24 |
25 |
26 | @shared_task
27 | def gfms_task():
28 | processor = GFMSProcessor()
29 | processor.run()
30 |
--------------------------------------------------------------------------------
/dataqs/hifld/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.hifld.hifld import HIFLDProcessor
23 |
24 |
25 | @shared_task
26 | def hifld_task():
27 | processor = HIFLDProcessor()
28 | processor.run()
29 |
--------------------------------------------------------------------------------
/dataqs/whisp/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.whisp.whisp import WhispProcessor
23 |
24 |
25 | @shared_task
26 | def wqp_task():
27 | processor = WhispProcessor()
28 | processor.run()
29 |
--------------------------------------------------------------------------------
/dataqs/gdacs/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 |
22 | from celery import shared_task
23 | from dataqs.gdacs.gdacs import GDACSProcessor
24 |
25 |
26 | @shared_task
27 | def gdacs_task():
28 | processor = GDACSProcessor()
29 | processor.run()
30 |
--------------------------------------------------------------------------------
/dataqs/mmwr/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.mmwr.mmwr import MortalityProcessor
23 |
24 |
25 | @shared_task()
26 | def mmwr_task():
27 | processor = MortalityProcessor()
28 | processor.run()
29 |
--------------------------------------------------------------------------------
/dataqs/nasa_gpm/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.nasa_gpm.nasa_gpm import GPMProcessor
23 |
24 |
25 | @shared_task
26 | def nasa_gpm_task():
27 | processor = GPMProcessor()
28 | processor.run()
29 |
--------------------------------------------------------------------------------
/dataqs/gistemp/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 |
22 | from celery import shared_task
23 | from dataqs.gistemp.gistemp import GISTEMPProcessor
24 |
25 |
26 | @shared_task
27 | def gistemp_task():
28 | processor = GISTEMPProcessor()
29 | processor.run()
30 |
--------------------------------------------------------------------------------
/dataqs/wqp/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.wqp.wqp import WaterQualityPortalProcessor
23 |
24 |
25 | @shared_task
26 | def wqp_task():
27 | processor = WaterQualityPortalProcessor()
28 | processor.run()
29 |
--------------------------------------------------------------------------------
/dataqs/hadghcnd/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 |
22 | from celery import shared_task
23 | from dataqs.hadghcnd.hadghcnd import HadGHCNDProcessor
24 |
25 |
26 | @shared_task
27 | def hadghcnd_task():
28 | processor = HadGHCNDProcessor()
29 | processor.run()
30 |
--------------------------------------------------------------------------------
/dataqs/landscan/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 |
22 | from celery import shared_task
23 | from dataqs.landscan.landscan import LandscanProcessor
24 |
25 |
26 | @shared_task
27 | def landscan_task():
28 | processor = LandscanProcessor()
29 | processor.run()
30 |
--------------------------------------------------------------------------------
/dataqs/aqicn/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.aqicn.aqicn import AQICNProcessor
23 |
24 |
25 | @shared_task
26 | def aqicn_task(countries):
27 | processor = AQICNProcessor(countries=countries)
28 | processor.run()
29 |
--------------------------------------------------------------------------------
/dataqs/udatp/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 |
22 | from celery import shared_task
23 | from dataqs.udatp.udatp import UoDAirTempPrecipProcessor
24 |
25 |
26 | @shared_task
27 | def udatp_task():
28 | processor = UoDAirTempPrecipProcessor()
29 | processor.run()
30 |
--------------------------------------------------------------------------------
/dataqs/usgs_quakes/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.usgs_quakes.usgs_quakes import USGSQuakeProcessor
23 |
24 |
25 | @shared_task
26 | def usgs_quake_task():
27 | processor = USGSQuakeProcessor()
28 | processor.run()
29 |
--------------------------------------------------------------------------------
/ansible/roles/postgres_postgis/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ###
2 | # Install PostGIS and Postgres 9.4 from the main apt repo.
3 | #
4 | ---
5 |
6 | - name: install PostGIS
7 | apt: pkg={{ item }} state=present
8 | sudo: yes
9 | with_items:
10 | - python-psycopg2
11 | - postgresql-9.3
12 | - postgresql-contrib-9.3
13 | - postgresql-9.3-postgis-2.1
14 |
15 | - name: setup postgres cluster to default to utf8
16 | sudo: yes
17 | shell: "service postgresql stop && pg_dropcluster 9.3 main ; pg_createcluster -e UTF-8 9.3 main && service postgresql start"
18 |
19 | - name: update postgres client configuration
20 | template: src=pg_hba.conf dest=/etc/postgresql/9.3/main/pg_hba.conf
21 | sudo: yes
22 | notify:
23 | - restart postgres_postgis
24 |
25 | - name: restart postgres_post_configuration
26 | command: /etc/init.d/postgresql restart
27 | sudo: yes
28 |
29 | - name: create database user
30 | postgresql_user: name={{db_user}}
31 | password={{db_password}}
32 | role_attr_flags=LOGIN,CREATEDB,NOSUPERUSER
33 | # in case the user already exists
34 | ignore_errors: True
35 | notify:
36 | - restart postgres_postgis
37 |
--------------------------------------------------------------------------------
/dataqs/airnow/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 |
22 | from celery import shared_task
23 | from dataqs.airnow.airnow import AirNowGRIB2HourlyProcessor
24 |
25 |
26 | @shared_task
27 | def airnow_grib_hourly_task():
28 | processor = AirNowGRIB2HourlyProcessor()
29 | processor.run()
30 |
--------------------------------------------------------------------------------
/dataqs/forecastio/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 |
22 | from celery import shared_task
23 | from dataqs.forecastio.forecastio_air import ForecastIOAirTempProcessor
24 |
25 |
26 | @shared_task
27 | def forecast_io_task():
28 | processor = ForecastIOAirTempProcessor()
29 | processor.run()
30 |
--------------------------------------------------------------------------------
/dataqs/hifld/resources/point.sld:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 | {table}
9 |
10 | {table}
11 | {title}
12 |
13 | {title}
14 |
15 |
16 |
17 |
18 | x
19 |
20 | #888800
21 |
22 |
23 | #ffffbb
24 |
25 |
26 | 10
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/dataqs/landscan/resources/landscan.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | Landscan
4 |
5 | Landscan
6 | Population - Landscan
7 |
8 | name
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/dataqs/gfms/resources/gfms.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | gfms_latest_4ed42a7a
4 |
5 | gfms_latest_4ed42a7a
6 | GFMS Flood Detection
7 |
8 | name
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/dataqs/worldclim/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | from celery import shared_task
22 | from dataqs.worldclim.worldclim import WorldClimCurrentProcessor, \
23 | WorldClimPastProcessor, WorldClimFutureProcessor
24 |
25 |
26 | @shared_task
27 | def worldclim_current_task():
28 | processor = WorldClimCurrentProcessor()
29 | processor.run()
30 |
31 |
32 | @shared_task
33 | def worldclim_past_task():
34 | processor = WorldClimPastProcessor()
35 | processor.run()
36 |
37 |
38 | @shared_task
39 | def worldclim_future_task():
40 | processor = WorldClimFutureProcessor()
41 | processor.run()
42 |
--------------------------------------------------------------------------------
/dataqs/airnow/resources/airnow.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | airnow
4 |
5 | airnow
6 |
7 |
8 |
9 |
10 | grid
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/ansible/roles/dataqs/files/geonode_patch.diff:
--------------------------------------------------------------------------------
1 | diff --git a/geonode/__init__.py b/geonode/__init__.py
2 | index 10f2581..5349416 100644
3 | --- a/geonode/__init__.py
4 | +++ b/geonode/__init__.py
5 | @@ -18,6 +18,7 @@
6 | #########################################################################
7 |
8 | import os
9 | +import celery_app as celery
10 |
11 | __version__ = (2, 4, 0, 'alpha', 0)
12 |
13 | diff --git a/geonode/celery_app.py b/geonode/celery_app.py
14 | index 0a22b10..d079943 100644
15 | --- a/geonode/celery_app.py
16 | +++ b/geonode/celery_app.py
17 | @@ -5,8 +5,14 @@ from celery import Celery
18 |
19 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geonode.settings')
20 |
21 | +from django.conf import settings
22 | app = Celery('geonode')
23 |
24 | # Using a string here means the worker will not have to
25 | # pickle the object when using Windows.
26 | app.config_from_object('django.conf:settings')
27 | +app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
28 | +
29 | +@app.task(bind=True)
30 | +def debug_task(self):
31 | + print('Request: {0!r}'.format(self.request))
32 | diff --git a/geonode/settings.py b/geonode/settings.py
33 | index bbced0e..ca86eed 100644
34 | --- a/geonode/settings.py
35 | +++ b/geonode/settings.py
36 | @@ -929,4 +929,6 @@ if 'geonode.geoserver' in INSTALLED_APPS:
37 | }
38 | baselayers = MAP_BASELAYERS
39 | MAP_BASELAYERS = [LOCAL_GEOSERVER]
40 | - MAP_BASELAYERS.extend(baselayers)
41 | \ No newline at end of file
42 | + MAP_BASELAYERS.extend(baselayers)
43 | +
44 | +INSTALLED_APPS += DATAQS_APPS
45 | \ No newline at end of file
--------------------------------------------------------------------------------
/dataqs/nasa_gpm/resources/gpm.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | nasa_gpm
4 |
5 | nasa_gpm
6 | NASA GPM Precipitation Estimate
7 | 1
8 |
9 | name
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | from setuptools import setup
3 |
4 | with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
5 | README = readme.read()
6 |
7 | # allow setup.py to be run from any path
8 | os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
9 |
10 | setup(
11 | name='dataqs',
12 | version='0.1',
13 | packages=['dataqs'],
14 | include_package_data=True,
15 | license='BSD License', # example license
16 | description='A simple GeoNode app to download, process, and import '
17 | 'spatial data into PostGIS.',
18 | long_description=README,
19 | url='http://www.example.com/',
20 | author='Matt Bertrand ',
21 | author_email='matt@epidemico.com',
22 | classifiers=[
23 | 'Environment :: Web Environment',
24 | 'Framework :: Django',
25 | 'Intended Audience :: Developers',
26 | 'License :: OSI Approved :: BSD License', # example license
27 | 'Operating System :: OS Independent',
28 | 'Programming Language :: Python',
29 | # Replace these appropriately if you are stuck on Python 2.
30 | 'Programming Language :: Python :: 2.7',
31 | 'Topic :: Internet :: WWW/HTTP',
32 | 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
33 | ],
34 | install_requires=[
35 | 'psycopg2',
36 | 'requests',
37 | 'celery',
38 | 'geopy',
39 | 'fiona',
40 | 'unicodecsv',
41 | 'shapely',
42 | 'pymongo',
43 | 'numpy',
44 | 'rasterio==0.31.0',
45 | 'gdal==2.1.0'
46 | ]
47 | )
48 |
--------------------------------------------------------------------------------
/dataqs/hadghcnd/resources/HadGHCND_temperatures.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | gistemp1200_ersstv4
4 |
5 | gistemp1200_ersstv4
6 | gistemp1200_ersstv4
7 |
8 | name
9 |
10 |
11 | 1.0
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/ansible/roles/geoserver/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ###
2 | # Install GeoServer from Custom GeoNode Build
3 | #
4 | ---
5 |
6 | - name: ensure apt cache is up to date
7 | apt: update_cache=yes
8 | sudo: yes
9 |
10 | - name: install Tomcat
11 | apt: pkg={{ item }} state=present
12 | sudo: yes
13 | with_items:
14 | - tomcat7
15 | - unzip
16 |
17 | - name: copy the Tomcat setenv.sh file to /usr/share/tomcat7/bin
18 | template: src=setenv.sh dest=/usr/share/tomcat7/bin/setenv.sh
19 | sudo: yes
20 |
21 | - name: copy the tomcat7 file to /etc/default
22 | template: src=tomcat7 dest=/etc/default/tomcat7
23 | sudo: yes
24 |
25 | - name: Create Geoserver data directory
26 | file: path=/data/ state=directory
27 | sudo: yes
28 |
29 | - name: Download Geoserver (remote)
30 | sudo: yes
31 | get_url:
32 | dest=/tmp/geoserver.war
33 | url={{ geoserver_url }}
34 | owner=0
35 | group=0
36 | mode=0644
37 |
38 | - name: Unzip Geoserver WAR file
39 | command: unzip -o -d '/tmp/geoserver' '/tmp/geoserver.war'
40 | sudo: yes
41 |
42 | - name: Copy Geoserver web.xml file with custom data directory
43 | template: src=web.xml dest=/tmp/geoserver/WEB-INF/web.xml
44 | sudo: yes
45 |
46 | - name: Move data files
47 | command: mv -n '/tmp/geoserver/data' '/data/geodata'
48 | sudo: yes
49 |
50 | - name: Move web app
51 | command: mv -n '/tmp/geoserver' '/var/lib/tomcat7/webapps/geoserver'
52 | sudo: yes
53 |
54 | - name: reset geoserver owner
55 | file:
56 | state: directory
57 | owner: tomcat7
58 | group: "{{ deploy_user }}"
59 | path: "/data"
60 | mode: 0774
61 | recurse: true
62 | sudo: yes
63 |
64 | - name: Restart Tomcat
65 | service: name=tomcat7 state=restarted
66 | sudo: yes
67 |
--------------------------------------------------------------------------------
/dataqs/hadghcnd/resources/HadGHCND_anomalies.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | gistemp1200_ersstv4
4 |
5 | gistemp1200_ersstv4
6 | gistemp1200_ersstv4
7 |
8 | name
9 |
10 |
11 | 1.0
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/dataqs/cmap/resources/cmap.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | cmap
4 |
5 | cmap
6 | cmap
7 |
8 | name
9 |
10 |
11 | 1.0
12 |
13 |
14 | {latest_band}
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/dataqs/spei/resources/spei.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | {name}
4 |
5 | {name}
6 | SPEI Drought Index
7 |
8 | spei03_inv_a1c0d6fd
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/dataqs/udatp/resources/uod_air_mean_401.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | uod_air_mean_401
4 |
5 | uod_air_mean_401
6 | uod_air_mean_401
7 |
8 | name
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/dataqs/udatp/resources/uod_precip_total_401.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | uod_precip_total_401
4 |
5 | uod_precip_total_401
6 | uod_precip_total_401
7 |
8 | name
9 |
10 |
11 | 1.0
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/dataqs/worldclim/resources/worldclim_isotherm.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | worldclim_isotherm
4 |
5 | worldclim_isotherm
6 |
7 | name
8 |
9 |
10 |
11 | grid
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/ansible/roles/geonode/templates/local_settings.py.j2:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
4 |
5 | SITEURL = "{{site_url}}"
6 |
7 | DATABASES = {
8 | 'default': {
9 | 'ENGINE': 'django.db.backends.postgresql_psycopg2',
10 | 'NAME': '{{db_metadata_instance}}',
11 | 'USER': '{{db_user}}',
12 | 'PASSWORD': '{{db_password}}',
13 | 'PORT': '5432'
14 | },
15 | # vector datastore for uploads
16 | '{{ app_name }}' : {
17 | 'ENGINE': 'django.contrib.gis.db.backends.postgis',
18 | 'NAME': '{{db_data_instance}}',
19 | 'USER' : '{{db_user}}',
20 | 'PASSWORD' : '{{db_password}}',
21 | 'HOST' : 'localhost',
22 | 'PORT' : '5432'
23 | }
24 | }
25 |
26 | # OGC (WMS/WFS/WCS) Server Settings
27 | OGC_SERVER = {
28 | 'default' : {
29 | 'BACKEND' : 'geonode.geoserver',
30 | 'LOCATION' : 'http://localhost:8080/geoserver/',
31 | 'PUBLIC_LOCATION' : SITEURL + 'geoserver/',
32 | 'USER' : '{{ geoserver_user }}',
33 | 'PASSWORD' : '{{ geoserver_password }}',
34 | 'MAPFISH_PRINT_ENABLED' : True,
35 | 'PRINT_NG_ENABLED' : True,
36 | 'GEONODE_SECURITY_ENABLED' : True,
37 | 'GEOGIG_ENABLED' : False,
38 | 'WMST_ENABLED' : False,
39 | 'BACKEND_WRITE_ENABLED': True,
40 | 'WPS_ENABLED' : False,
41 | 'LOG_FILE': '/tmp/geoserver.log',
42 | # Set to name of database in DATABASES dictionary to enable
43 | 'DATASTORE': '{{ app_name }}', #'datastore',
44 | }
45 | }
46 |
47 | CATALOGUE = {
48 | 'default': {
49 | 'ENGINE': 'geonode.catalogue.backends.pycsw_local',
50 | 'URL': '%scatalogue/csw' % SITEURL,
51 | }
52 | }
53 |
54 | MEDIA_ROOT = "/var/www/{{ app_name }}/uploaded"
55 | STATIC_ROOT = "/var/www/{{ app_name }}/static"
56 |
57 | POSTGIS_VERSION = (2, 1, 7)
58 |
--------------------------------------------------------------------------------
/dataqs/forecastio/resources/forecastio.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | forecast_io_airtemp_4326
4 |
5 | forecast_io_airtemp_4326
6 |
7 |
8 |
9 |
10 | grid
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/dataqs/worldclim/resources/worldclim_diurnal.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | worldclim_diurnal
4 |
5 | worldclim_diurnal
6 |
7 | name
8 |
9 |
10 |
11 | grid
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/dataqs/worldclim/resources/worldclim_temp.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | worldclim_temp
4 |
5 | worldclim_temp
6 |
7 | name
8 |
9 |
10 |
11 | grid
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/dataqs/worldclim/resources/worldclim_precip.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | worldclim_precip
4 |
5 | worldclim_precip
6 |
7 | name
8 |
9 |
10 |
11 | grid
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/dataqs/worldclim/resources/worldclim_precip_seasonality.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | worldclim_precip_seasonality
4 |
5 | worldclim_precip_seasonality
6 |
7 | name
8 |
9 |
10 |
11 | grid
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/dataqs/worldclim/resources/worldclim_temp_seasonality.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | worldclim_temp_seasonality
4 |
5 | worldclim_temp_seasonality
6 |
7 | name
8 |
9 |
10 |
11 | grid
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/dataqs/worldclim/resources/worldclim_precip_annual.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | worldclim_precip
4 |
5 | worldclim_precip
6 |
7 | name
8 |
9 |
10 |
11 | grid
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/dataqs/gistemp/resources/gistemp.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | gistemp1200_ersstv4
4 |
5 | gistemp1200_ersstv4
6 | gistemp1200_ersstv4
7 |
8 | name
9 |
10 |
11 | 1.0
12 |
13 |
14 | {latest_band}
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/ansible/roles/geoserver/templates/tomcat7:
--------------------------------------------------------------------------------
1 | # Run Tomcat as this user ID. Not setting this or leaving it blank will use the
2 | # default of tomcat7.
3 | TOMCAT7_USER=tomcat7
4 |
5 | # Run Tomcat as this group ID. Not setting this or leaving it blank will use
6 | # the default of tomcat7.
7 | TOMCAT7_GROUP=tomcat7
8 |
9 | # The home directory of the Java development kit (JDK). You need at least
10 | # JDK version 1.5. If JAVA_HOME is not set, some common directories for
11 | # OpenJDK, the Sun JDK, and various J2SE 1.5 versions are tried.
12 | JAVA_HOME=/usr/lib/jvm/java-8-oracle
13 |
14 | # You may pass JVM startup parameters to Java here. If unset, the default
15 | # options will be: -Djava.awt.headless=true -Xmx128m -XX:+UseConcMarkSweepGC
16 | #
17 | # Use "-XX:+UseConcMarkSweepGC" to enable the CMS garbage collector (improved
18 | # response time). If you use that option and you run Tomcat on a machine with
19 | # exactly one CPU chip that contains one or two cores, you should also add
20 | # the "-XX:+CMSIncrementalMode" option.
21 | JAVA_OPTS="-Djava.awt.headless=true -XX:+UseConcMarkSweepGC"
22 |
23 | # To enable remote debugging uncomment the following line.
24 | # You will then be able to use a java debugger on port 8000.
25 | #JAVA_OPTS="${JAVA_OPTS} -Xdebug -Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=n"
26 |
27 | # Java compiler to use for translating JavaServer Pages (JSPs). You can use all
28 | # compilers that are accepted by Ant's build.compiler property.
29 | #JSP_COMPILER=javac
30 |
31 | # Use the Java security manager? (yes/no, default: no)
32 | #TOMCAT7_SECURITY=no
33 |
34 | # Number of days to keep logfiles in /var/log/tomcat7. Default is 14 days.
35 | #LOGFILE_DAYS=14
36 | # Whether to compress logfiles older than today's
37 | #LOGFILE_COMPRESS=1
38 |
39 | # Location of the JVM temporary directory
40 | # WARNING: This directory will be destroyed and recreated at every startup !
41 | #JVM_TMP=/tmp/tomcat7-temp
42 |
43 | # If you run Tomcat on port numbers that are all higher than 1023, then you
44 | # do not need authbind. It is used for binding Tomcat to lower port numbers.
45 | # NOTE: authbind works only with IPv4. Do not enable it when using IPv6.
46 | # (yes/no, default: no)
47 | #AUTHBIND=no
--------------------------------------------------------------------------------
/ansible/roles/nginx/templates/nginx.conf.j2:
--------------------------------------------------------------------------------
1 | server {
2 | listen 80;
3 | server_name {{ server_name }};
4 |
5 | charset utf-8;
6 |
7 | server_tokens off;
8 | more_set_headers 'Server: Kitware ;)';
9 |
10 | access_log /var/log/nginx/{{ app_name }}.access.log;
11 | error_log /var/log/nginx/{{ app_name }}.error.log info;
12 |
13 | client_max_body_size 200M;
14 |
15 | location / {
16 | root /var/www/{{ app_name }};
17 | try_files $uri @wsgiapp;
18 | }
19 |
20 | location @wsgiapp {
21 | uwsgi_pass unix:///tmp/{{ app_name }}.sock;
22 | include /etc/nginx/uwsgi_params;
23 | }
24 |
25 | location /geoserver/ {
26 | proxy_set_header X-Forwarded-Host $host;
27 | proxy_set_header X-Forwarded-Server $host;
28 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
29 | proxy_pass http://127.0.0.1:8080;
30 | if ($request_method = 'OPTIONS') {
31 | add_header 'Access-Control-Allow-Origin' '*';
32 | #
33 | add_header 'Access-Control-Allow-Credentials' 'true';
34 | add_header 'Access-Control-Allow-Methods' 'GET, OPTIONS';
35 | #
36 | # Custom headers and headers various browsers *should* be OK with but aren't
37 | #
38 | add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
39 | #
40 | # Tell client that this pre-flight info is valid for 30 min.
41 | #
42 | add_header 'Access-Control-Max-Age' 1800;
43 | add_header 'Content-Type' 'text/plain charset=UTF-8';
44 | add_header 'Content-Length' 0;
45 | return 204;
46 | }
47 | if ($request_method = 'GET') {
48 | add_header 'Access-Control-Allow-Origin' '*';
49 | add_header 'Access-Control-Allow-Credentials' 'true';
50 | add_header 'Access-Control-Allow-Methods' 'GET, OPTIONS';
51 | add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
52 | }
53 | }
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/dataqs/cmap/tests.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import os
3 | import unittest
4 | from datetime import date
5 | from django.test import TestCase
6 | from dataqs.cmap.cmap import CMAPProcessor
7 |
8 | script_dir = os.path.dirname(os.path.realpath(__file__))
9 |
10 |
11 | class CMAPPTest(TestCase):
12 | """
13 | Tests the dataqs.cmap module. Since each processor is highly
14 | dependent on a running GeoNode instance for most functions, only
15 | independent functions are tested here.
16 | """
17 |
18 | def setUp(self):
19 | self.processor = CMAPProcessor()
20 | self.dl_file = os.path.join(script_dir, 'resources/cmap.nc')
21 |
22 | def tearDown(self):
23 | self.processor.cleanup()
24 |
25 | def test_convert(self):
26 | """
27 | Verify that a NetCDF file is converted via netcdf and cdo apps.
28 | Skip if these apps are not installed.
29 | """
30 | try:
31 | converted_nc = self.processor.convert(self.dl_file)
32 | self.assertTrue(os.path.exists(converted_nc))
33 | except OSError:
34 | # cdo and/or netcdf not installed
35 | raise unittest.SkipTest()
36 |
37 | def test_extract_band(self):
38 | """
39 | Verify that a GeoTIFF file is created.
40 | """
41 | dl_tif = self.processor.extract_band(
42 | self.dl_file, 1, os.path.join(self.processor.tmp_dir, 'cmap.tif'))
43 | self.assertTrue(os.path.exists(dl_tif))
44 |
45 | def test_get_title(self):
46 | """
47 | Verify that the correct title is returned
48 | """
49 | title = self.processor.get_title(451)
50 | self.assertEquals(
51 | 'CPC Merged Analysis of Precipitation, 1979/01 - 2016/07', title)
52 |
53 | def test_get_date(self):
54 | """
55 | Verify that the correct date is returned
56 | """
57 | band_date = self.processor.get_date(451)
58 | self.assertEquals(band_date, date(2016, 7, 1))
59 |
60 | def test_cleanup(self):
61 | """
62 | Make sure temporary files are deleted.
63 | """
64 | self.processor.extract_band(
65 | self.dl_file, 1, os.path.join(self.processor.tmp_dir, 'cmap.tif'))
66 | self.assertNotEqual([], glob.glob(os.path.join(
67 | self.processor.tmp_dir, self.processor.prefix + '*')))
68 | self.processor.cleanup()
69 | self.assertEquals([], glob.glob(os.path.join(
70 | self.processor.tmp_dir, self.processor.prefix + '*')))
71 |
--------------------------------------------------------------------------------
/ansible/group_vars/all:
--------------------------------------------------------------------------------
1 | ---
2 | # defaults file for geonode
3 |
4 | # app_name should be a shorthand, lowercase, no spaces version of the
5 | # application name since this variable will be used as a directory name
6 | # Also, it would be the name of the branch in the worldmap repo.
7 | # For example worldmap, my_geonode, master, feature1.
8 | app_name: geonode
9 |
10 | # github user who owns the repository
11 | github_user: GeoNode
12 |
13 | # version
14 | app_version: master
15 |
16 | # remote git repository to pull application code from, for example:
17 | code_repository: "https://github.com/{{ github_user }}/{{ app_name }}.git"
18 |
19 | # TODO: Allow for branch name
20 |
21 | # Server name for nginx.
22 | # TODO: Production IP address
23 | server_name: localhost:8080
24 |
25 | # geoserver url
26 | geoserver_root_url: http://localhost:8080/geoserver
27 |
28 | # geoserver rest endpoint
29 | geoserver_rest: "{{ geoserver_root_url }}/rest"
30 |
31 | # geonode workspace (probably shouldn't change this)
32 | geonode_workspace: geonode
33 |
34 | db_data_instance: "{{ app_name }}"
35 | db_metadata_instance: "{{ app_name }}_app"
36 | db_password: "{{ app_name }}"
37 | db_user: "{{ app_name }}"
38 |
39 | # Django settings module to use when running Manage commands
40 | # for example, txt2react.settings
41 | main_module: "{{ app_name }}"
42 |
43 | # name of the user created by the Fabric script that Ansible will use to
44 | # handle server tasks
45 | deploy_user: vagrant
46 |
47 | # server directory for the virtualenv that will be created to run the web app
48 | virtualenv_dir: /home/{{deploy_user}}/venvs
49 |
50 | # place where virtualenv binaries are stored
51 | virtualenv_bin: "{{virtualenv_dir}}/{{app_name}}/bin"
52 |
53 | # server directory to put the code into that is pulled from the code
54 | # repository
55 | app_code_dir: /home/{{deploy_user}}
56 |
57 | # app root directory
58 | geonode_root: "{{app_code_dir}}/{{app_name}}/geonode"
59 |
60 | # location of the requirements.txt flie in the code
61 | requirements_file: "{{app_code_dir}}/requirements.txt"
62 |
63 | # root domain name the server should run as. do not include a subdomain,
64 | # for example, txt2react.com
65 | site_url: "http://192.168.33.12/"
66 |
67 | ssh_dir: ~/.ssh/
68 |
69 | geoserver_url: http://build.geonode.org/geoserver/latest/geoserver.war
70 |
71 | # geoserver admin user
72 | geoserver_user: admin
73 | geoserver_password: geoserver
74 |
75 | # geonode admin user
76 | geonode_admin_user: admin
77 | geonode_admin_email: "{{ geonode_admin_user }}@geonode.com"
78 | geonode_admin_password: geonode
79 |
80 | gpm_email: "setme@email.com"
81 | healthmap_apikey: "setme"
82 |
--------------------------------------------------------------------------------
/dataqs/csv_helpers.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import csv
21 | import codecs
22 | import cStringIO
23 |
24 |
25 | class UTF8Recoder:
26 | """
27 | Iterator that reads an encoded stream and reencodes the input to UTF-8
28 | """
29 | def __init__(self, f, encoding):
30 | self.reader = codecs.getreader(encoding)(f)
31 |
32 | def __iter__(self):
33 | return self
34 |
35 | def next(self):
36 | return self.reader.next().encode("utf-8")
37 |
38 |
39 | class UnicodeReader:
40 | """
41 | A CSV reader which will iterate over lines in the CSV file "f",
42 | which is encoded in the given encoding.
43 | """
44 |
45 | def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
46 | f = UTF8Recoder(f, encoding)
47 | self.reader = csv.reader(f, dialect=dialect, **kwds)
48 |
49 | def next(self):
50 | row = self.reader.next()
51 | return [unicode(s, "utf-8") for s in row]
52 |
53 | def __iter__(self):
54 | return self
55 |
56 |
57 | class UnicodeWriter:
58 | """
59 | A CSV writer which will write rows to CSV file "f",
60 | which is encoded in the given encoding.
61 | """
62 |
63 | def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
64 | # Redirect output to a queue
65 | self.queue = cStringIO.StringIO()
66 | self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
67 | self.stream = f
68 | self.encoder = codecs.getincrementalencoder(encoding)()
69 |
70 | def writerow(self, row):
71 | self.writer.writerow([s.encode("utf-8") for s in row])
72 | # Fetch UTF-8 output from the queue ...
73 | data = self.queue.getvalue()
74 | data = data.decode("utf-8")
75 | # ... and reencode it into the target encoding
76 | data = self.encoder.encode(data)
77 | # write to the target stream
78 | self.stream.write(data)
79 | # empty queue
80 | self.queue.truncate(0)
81 |
82 | def writerows(self, rows):
83 | for row in rows:
84 | self.writerow(row)
85 |
--------------------------------------------------------------------------------
/dataqs/spei/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import zipfile
22 | import os
23 | from django.test import TestCase
24 | from dataqs.spei.spei import SPEIProcessor
25 | import httpretty
26 |
27 | script_dir = os.path.dirname(os.path.realpath(__file__))
28 |
29 |
30 | def get_mock_image():
31 | """
32 | Return a canned response with HTML for Boston
33 | """
34 | zf = zipfile.ZipFile(os.path.join(script_dir,
35 | 'resources/test_spei.zip'))
36 |
37 | return zf.read('test_spei.tif')
38 |
39 |
40 | class SpieTest(TestCase):
41 | """
42 | Tests the dataqs.spie module. Since each processor is highly
43 | dependent on a running GeoNode instance for most functions, only
44 | independent functions are tested here.
45 | """
46 |
47 | def setUp(self):
48 | self.processor = SPEIProcessor()
49 | httpretty.enable()
50 |
51 | def tearDown(self):
52 | httpretty.disable()
53 | self.processor.cleanup()
54 |
55 | def test_download(self):
56 | """
57 | Verify that files are downloaded.
58 | """
59 | imgurl = "{}spei03.nc".format(self.processor.base_url)
60 | httpretty.register_uri(httpretty.GET, imgurl,
61 | body=get_mock_image())
62 | imgfile = self.processor.download(imgurl, 'spei03.tif')
63 | self.assertTrue(os.path.exists(os.path.join(
64 | self.processor.tmp_dir, imgfile)))
65 |
66 | def test_cleanup(self):
67 | """
68 | Temporary files should be gone after cleanup
69 | :return:
70 | """
71 | imgurl = "{}spei03.nc".format(self.processor.base_url)
72 | httpretty.register_uri(httpretty.GET, imgurl,
73 | body=get_mock_image())
74 | self.processor.download(imgurl, 'spei03.tif')
75 | self.assertNotEqual([], glob.glob(os.path.join(
76 | self.processor.tmp_dir, self.processor.prefix + '*')))
77 | self.processor.cleanup()
78 | self.assertEquals([], glob.glob(os.path.join(
79 | self.processor.tmp_dir, self.processor.prefix + '*')))
80 |
--------------------------------------------------------------------------------
/dataqs/gistemp/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import os
22 | import httpretty
23 | from django.test import TestCase
24 | from dataqs.gistemp.gistemp import GISTEMPProcessor
25 |
26 | script_dir = os.path.dirname(os.path.realpath(__file__))
27 |
28 |
29 | def get_mock_image():
30 | """
31 | Return a canned test image (1 band of original NetCDF raster)
32 | """
33 | zf = os.path.join(script_dir, 'resources/gistemp1200_ERSSTv4.nc')
34 | with open(zf, 'rb') as gzfile:
35 | return gzfile.read()
36 |
37 |
38 | class GISTEMPTest(TestCase):
39 | """
40 | Tests the dataqs.gistemp module. Since each processor is highly
41 | dependent on a running GeoNode instance for most functions, only
42 | independent functions are tested here.
43 | """
44 |
45 | def setUp(self):
46 | self.processor = GISTEMPProcessor()
47 | httpretty.enable()
48 |
49 | def tearDown(self):
50 | httpretty.disable()
51 | self.processor.cleanup()
52 |
53 | def test_download(self):
54 | """
55 | Verify that a file is downloaded
56 | """
57 | httpretty.register_uri(httpretty.GET,
58 | self.processor.base_url,
59 | body=get_mock_image())
60 | imgfile = self.processor.download(
61 | self.processor.base_url,
62 | '{}.nc'.format(self.processor.layer_name))
63 | self.assertTrue(os.path.exists(
64 | os.path.join(self.processor.tmp_dir, imgfile)))
65 |
66 | def test_cleanup(self):
67 | httpretty.register_uri(httpretty.GET,
68 | self.processor.base_url,
69 | body=get_mock_image())
70 | self.processor.download(self.processor.base_url,
71 | '{}.nc'.format(self.processor.layer_name))
72 | self.assertNotEqual([], glob.glob(os.path.join(
73 | self.processor.tmp_dir, self.processor.prefix + '*')))
74 | self.processor.cleanup()
75 | self.assertEquals([], glob.glob(os.path.join(
76 | self.processor.tmp_dir, self.processor.prefix + '*')))
77 |
--------------------------------------------------------------------------------
/dataqs/gdacs/resources/gdacs.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | gdacs_alerts_3bb19f61
4 |
5 | gdacs_alerts_3bb19f61
6 | GDACS Alerts
7 |
8 | name
9 |
10 | Earthquake
11 |
12 |
13 | gdacs_eventtype
14 | EQ
15 |
16 |
17 |
18 |
19 |
20 |
21 | #F30A40
22 |
23 |
24 |
25 | 8
26 |
27 |
28 |
29 |
30 | Earthquake (copy)
31 | Flood
32 |
33 |
34 | gdacs_eventtype
35 | FL
36 |
37 |
38 |
39 |
40 |
41 | circle
42 |
43 | #0A1EF2
44 |
45 |
46 |
47 | 8
48 |
49 |
50 |
51 |
52 | Flood (copy)
53 | Cyclone
54 |
55 |
56 | gdacs_eventtype
57 | TC
58 |
59 |
60 |
61 |
62 |
63 | triangle
64 |
65 | #0AF31A
66 |
67 |
68 |
69 | 10
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | =====
2 | Data Queues
3 | =====
4 |
5 | dataqs (Data Queues) is a simple Django app to download, process,
6 | and import spatial data into GeoServer/GeoNode.
7 |
8 |
9 | Quick start
10 | -----------
11 |
12 | 1. Add "dataqs" to your INSTALLED_APPS setting like this::
13 |
14 | INSTALLED_APPS = (
15 | ...
16 | 'dataqs',
17 | 'dataqs.forecastio',
18 | 'dataqs.gfms',
19 | 'dataqs.airnow',
20 | 'dataqs.wqp',
21 | 'dataqs.aqicn',
22 | #etc
23 | )
24 |
25 | 2. In your settings.py or local_settings.py file, add a CELERYBEAT_SCHEDULE
26 | setting to specify when Celery should run data_queues tasks::
27 |
28 | from celery.schedules import crontab
29 | CELERYBEAT_SCHEDULE = {
30 | 'gfms': {
31 | 'task': 'dataqs.gfms.tasks.gfms_task',
32 | 'schedule': crontab(minute='3'),
33 | 'args': ()
34 | },
35 | 'forecast_io': {
36 | 'task': 'dataqs.forecastio.tasks.forecast_io_task',
37 | 'schedule': crontab(minute='1'),
38 | 'args': ()
39 | },
40 | 'task': 'dataqs.aqicn.tasks.aqicn_task',
41 | 'schedule': crontab(hour='*/6', minute='0'),
42 | 'args': ([],)
43 | },
44 | }
45 |
46 | 3. Also add the following settings::
47 |
48 | #Location of GeoServer data directory
49 | GS_DATA_DIR = '/usr/share/geoserver/data'
50 |
51 | #Directory where temporary dataqs geoprocessing files should be downloaded
52 | GS_TMP_DIR = GS_DATA_DIR + '/tmp'
53 |
54 | #AirNow API username:password
55 | #(sign up for a free account at http://airnowapi.org/account/request/)
56 | AIRNOW_ACCOUNT = 'your_airnow_username:your_airnow_password'
57 |
58 | #NASA GPM FTP ACCOUNT
59 | #(sign up at http://registration.pps.eosdis.nasa.gov/registration/)
60 | GPM_ACCOUNT = 'your_gpm_email_account'
61 |
62 | #HIFLD: Dictionary of layers to process in the form of:
63 | HIFLD_LAYERS = [
64 | {
65 | 'name': 'US State Boundaries',
66 | 'url': 'https://hifld-dhs-gii.opendata.arcgis.com/datasets/718791120f6549708cb642dac6ff0dbf_0.geojson',
67 | 'table': 'state_boundaries',
68 | 'sld': 'polygon'
69 | },
70 | {
71 | 'name': 'Cities and Towns NTAD',
72 | 'url': 'https://hifld-dhs-gii.opendata.arcgis.com/datasets/6a1e404a10754e59bac4bfa50db3f487_0.geojson',
73 | 'table': 'cities_towns',
74 | 'sld': 'point'
75 | },
76 | {
77 | 'name': 'Roads and Railroad Tunnels',
78 | 'url': 'https://hifld-dhs-gii.opendata.arcgis.com/datasets/2f6abb736360437ba363e0a1210b4d36_0.geojson',
79 | 'table': 'roads_tunnels',
80 | 'sld': 'line'
81 | }
82 | ]
83 |
84 | #Time to wait before updating Geoserver mosaic (keep at 0 unless Geoserver
85 | #is on a different server. In that case, there will need to be an automated
86 | #rsync between GS_TMP_DIR where celery is running and
87 | #GS_DATA_DIR where GeoServer is running.
88 | RSYNC_WAIT_TIME = 0
89 |
90 | 4. In order to run the spei processor, the following must be installed::
91 |
92 | sudo apt-get install netcdf-bin
93 | sudo apt-get install cdo
94 |
--------------------------------------------------------------------------------
/ansible/roles/dataqs/templates/celery.conf:
--------------------------------------------------------------------------------
1 | [program:celery_worker_high]
2 | command={{virtualenv_bin}}/celery --app=geonode.celery_app:app worker -n gdworker1.%%h
3 | directory=/home/{{deploy_user}}/epigeonode
4 | user={{ deploy_user }}
5 | numprocs=1
6 | stdout_logfile=/tmp/celery-worker-high.log
7 | stderr_logfile=/tmp/celery-worker-high-error.log
8 | autostart=true
9 | autorestart=false
10 | startsecs=10
11 |
12 |
13 | ; Need to wait for currently executing tasks to finish at shutdown.
14 |
15 |
16 | ; Increase this if you have very long running tasks.
17 | stopwaitsecs = 600
18 |
19 | ; When resorting to send SIGKILL to the program to terminate it
20 | ; send SIGKILL to its whole process group instead,
21 | ; taking care of its children as well.
22 | killasgroup=true
23 |
24 | ; if rabbitmq is supervised, set its priority higher
25 | ; so it starts first
26 | priority=1
27 |
28 | [program:celery_worker_low]
29 | command={{virtualenv_bin}}/celery --app=geonode.celery_app:app worker -n gdworker2.%%h
30 | directory=/home/{{deploy_user}}/epigeonode
31 | user={{ deploy_user }}
32 | numprocs=1
33 | stdout_logfile=/tmp/celery-worker-low.log
34 | stderr_logfile=/tmp/celery-worker-low-error.log
35 | autostart=true
36 | autorestart=false
37 | startsecs=10
38 |
39 | ; Need to wait for currently executing tasks to finish at shutdown.
40 | ; Increase this if you have very long running tasks.
41 | stopwaitsecs = 600
42 |
43 | ; When resorting to send SIGKILL to the program to terminate it
44 | ; send SIGKILL to its whole process group instead,
45 | ; taking care of its children as well.
46 | killasgroup=true
47 |
48 | ; if rabbitmq is supervised, set its priority higher
49 | ; so it starts first
50 | priority=1
51 |
52 | [program:celerybeat]
53 | command={{virtualenv_bin}}/celery --app=geonode.celery_app:app beat --loglevel=INFO
54 | directory=/home/{{deploy_user}}/epigeonode
55 | user={{ deploy_user }}
56 | numprocs=1
57 | stdout_logfile=/tmp/celery-beat.log
58 | stderr_logfile=/tmp/celery-beat.log
59 | autostart=true
60 | autorestart=true
61 | startsecs=10
62 |
63 | ; Need to wait for currently executing tasks to finish at shutdown.
64 | ; Increase this if you have very long running tasks.
65 | stopwaitsecs = 600
66 |
67 | ; When resorting to send SIGKILL to the program to terminate it
68 | ; send SIGKILL to its whole process group instead,
69 | ; taking care of its children as well.
70 | killasgroup=true
71 |
72 | ; if rabbitmq is supervised, set its priority higher
73 | ; so it starts first
74 | priority=10
75 |
76 | [program:celery-flower]
77 | command={{virtualenv_bin}}/celery --app=geonode.celery_app:app flower --loglevel=INFO --persistent=True
78 | directory=/home/{{deploy_user}}/epigeonode
79 | user={{ deploy_user }}
80 | numprocs=1
81 | stdout_logfile=/tmp/celery-flower.log
82 | stderr_logfile=/tmp/celery-flower.log
83 | autostart=true
84 | autorestart=true
85 | startsecs=10
86 |
87 | ; Need to wait for currently executing tasks to finish at shutdown.
88 | ; Increase this if you have very long running tasks.
89 | stopwaitsecs = 600
90 |
91 | ; When resorting to send SIGKILL to the program to terminate it
92 | ; send SIGKILL to its whole process group instead,
93 | ; taking care of its children as well.
94 | killasgroup=true
95 |
96 | ; if rabbitmq is supervised, set its priority higher
97 | ; so it starts first
98 | priority=998
99 |
--------------------------------------------------------------------------------
/dataqs/wqp/resources/create_table.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE IF NOT EXISTS {tablename}
2 | (
3 | "OrganizationIdentifier" character varying,
4 | "OrganizationFormalName" character varying,
5 | "ActivityIdentifier" character varying,
6 | "ActivityTypeCode" character varying,
7 | "ActivityMediaName" character varying,
8 | "ActivityMediaSubdivisionName" character varying,
9 | "ActivityStartDate" timestamp with time zone,
10 | "ActivityStartTime_Time" character varying,
11 | "ActivityStartTime_TimeZoneCode" character varying,
12 | "ActivityEndDate" timestamp with time zone,
13 | "ActivityEndTime_Time" character varying,
14 | "ActivityEndTime_TimeZoneCode" character varying,
15 | "ActivityDepthHeightMeasure_MeasureValue" float,
16 | "ActivityDepthHeightMeasure_MeasureUnitCode" character varying,
17 | "ActivityDepthAltitudeReferencePointText" character varying,
18 | "ActivityTopDepthHeightMeasure_MeasureValue" character varying,
19 | "ActivityTopDepthHeightMeasure_MeasureUnitCode" character varying,
20 | "ActivityBottomDepthHeightMeasure_MeasureValue" character varying,
21 | "ActivityBottomDepthHeightMeasure_MeasureUnitCode" character varying,
22 | "ProjectIdentifier" character varying,
23 | "ActivityConductingOrganizationText" character varying,
24 | "MonitoringLocationIdentifier" character varying,
25 | "ActivityCommentText" character varying,
26 | "SampleAquifer" character varying,
27 | "HydrologicCondition" character varying,
28 | "HydrologicEvent" character varying,
29 | "SampleCollectionMethod_MethodIdentifier" character varying,
30 | "SampleCollectionMethod_MethodIdentifierContext" character varying,
31 | "SampleCollectionMethod_MethodName" character varying,
32 | "SampleCollectionEquipmentName" character varying,
33 | "ResultDetectionConditionText" character varying,
34 | "CharacteristicName" character varying,
35 | "ResultSampleFractionText" character varying,
36 | "ResultMeasureValue" float,
37 | "ResultMeasure_MeasureUnitCode" character varying,
38 | "MeasureQualifierCode" character varying,
39 | "ResultStatusIdentifier" character varying,
40 | "StatisticalBaseCode" character varying,
41 | "ResultValueTypeName" character varying,
42 | "ResultWeightBasisText" character varying,
43 | "ResultTimeBasisText" character varying,
44 | "ResultTemperatureBasisText" character varying,
45 | "ResultParticleSizeBasisText" character varying,
46 | "PrecisionValue" character varying,
47 | "ResultCommentText" character varying,
48 | "USGSPCode" character varying,
49 | "ResultDepthHeightMeasure_MeasureValue" float,
50 | "ResultDepthHeightMeasure_MeasureUnitCode" character varying,
51 | "ResultDepthAltitudeReferencePointText" character varying,
52 | "SubjectTaxonomicName" character varying,
53 | "SampleTissueAnatomyName" character varying,
54 | "ResultAnalyticalMethod_MethodIdentifier" character varying,
55 | "ResultAnalyticalMethod_MethodIdentifierContext" character varying,
56 | "ResultAnalyticalMethod_MethodName" character varying,
57 | "MethodDescriptionText" character varying,
58 | "LaboratoryName" character varying,
59 | "AnalysisStartDate" character varying,
60 | "ResultLaboratoryCommentText" character varying,
61 | "DetectionQuantitationLimitTypeName" character varying,
62 | "DetectionQuantitationLimitMeasure_MeasureValue" character varying,
63 | "DetectionQuantitationLimitMeasure_MeasureUnitCode" character varying,
64 | "PreparationStartDate" character varying,
65 | "ProviderName" character varying,
66 | CONSTRAINT wqp_{tablename}_pkey PRIMARY KEY ("ActivityIdentifier")
67 | )
--------------------------------------------------------------------------------
/dataqs/gdacs/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import os
21 | import datetime
22 | from django.test import TestCase
23 | from dataqs.gdacs.gdacs import GDACSProcessor
24 | import httpretty
25 | from xml.etree import ElementTree as et
26 |
27 | script_dir = os.path.dirname(os.path.realpath(__file__))
28 |
29 |
30 | class GdacsTest(TestCase):
31 | """
32 | Tests the dataqs.gdacs module. Since each processor is highly
33 | dependent on a running GeoNode instance for most functions, only
34 | independent functions are tested here.
35 | """
36 |
37 | def setUp(self):
38 | today = datetime.datetime.utcnow()
39 | self.edate = today.strftime("%Y-%m-%d")
40 | self.sdate = (today - datetime.timedelta(days=7)).strftime("%Y-%m-%d")
41 | self.processor = GDACSProcessor(edate=self.edate, sdate=self.sdate)
42 | httpretty.enable()
43 |
44 | def tearDown(self):
45 | httpretty.disable()
46 | self.processor.cleanup()
47 |
48 | def test_download(self):
49 | """
50 | Verify that files are downloaded.
51 | """
52 | with open(os.path.join(script_dir, 'resources/test_gdacs.rss')) as inf:
53 | response = inf.read()
54 | httpretty.register_uri(httpretty.GET, self.processor.base_url,
55 | body=response)
56 | rssfile = self.processor.download(self.processor.base_url.format(
57 | self.processor.params['sdate'], self.processor.params['edate']),
58 | self.processor.prefix + ".rss")
59 | rsspath = os.path.join(
60 | self.processor.tmp_dir, rssfile)
61 | self.assertTrue(os.path.exists(rsspath))
62 | with open(rsspath) as rssin:
63 | rss_tree = et.fromstring(rssin.read())
64 | self.assertEquals(len(rss_tree.findall('channel/item')), 65)
65 |
66 | def test_cleanup(self):
67 | """
68 | Temporary files should be gone after cleanup
69 | :return:
70 | """
71 | with open(os.path.join(script_dir, 'resources/test_gdacs.rss')) as inf:
72 | response = inf.read()
73 | httpretty.register_uri(httpretty.GET, self.processor.base_url,
74 | body=response)
75 | rssfile = self.processor.download(self.processor.base_url.format(
76 | self.processor.params['sdate'], self.processor.params['edate']),
77 | self.processor.prefix + ".rss")
78 | rsspath = os.path.join(
79 | self.processor.tmp_dir, rssfile)
80 | self.assertTrue(os.path.exists(rsspath))
81 | self.processor.cleanup()
82 | self.assertFalse(os.path.exists(rsspath))
83 |
--------------------------------------------------------------------------------
/dataqs/usgs_quakes/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import json
22 | import os
23 | import datetime
24 | from django.test import TestCase
25 | from dataqs.usgs_quakes.usgs_quakes import USGSQuakeProcessor
26 | import httpretty
27 |
28 | script_dir = os.path.dirname(os.path.realpath(__file__))
29 |
30 |
31 | class UsgsQuakesTest(TestCase):
32 | """
33 | Tests the dataqs.usgs_quakes module. Since each processor is highly
34 | dependent on a running GeoNode instance for most functions, only
35 | independent functions are tested here.
36 | """
37 |
38 | def setUp(self):
39 | today = datetime.datetime.utcnow()
40 | self.edate = today.strftime("%Y-%m-%d")
41 | self.sdate = (today - datetime.timedelta(days=7)).strftime("%Y-%m-%d")
42 | self.processor = USGSQuakeProcessor(edate=self.edate, sdate=self.sdate)
43 | httpretty.enable()
44 |
45 | def tearDown(self):
46 | httpretty.disable()
47 | self.processor.cleanup()
48 |
49 | def test_download(self):
50 | """
51 | Verify that files are downloaded.
52 | """
53 | with open(os.path.join(
54 | script_dir, 'resources/test_quakes.json')) as inf:
55 | response = inf.read()
56 | dl_url = self.processor.base_url.format(
57 | self.processor.params['sdate'], self.processor.params['edate'])
58 | httpretty.register_uri(httpretty.GET, dl_url,
59 | body=response)
60 | jsonfile = self.processor.download(
61 | dl_url, filename=self.processor.prefix + '.rss')
62 | jsonpath = os.path.join(
63 | self.processor.tmp_dir, jsonfile)
64 | with open(jsonpath) as json_in:
65 | quakejson = json.load(json_in)
66 | self.assertTrue("features" in quakejson)
67 |
68 | def test_cleanup(self):
69 | """
70 | Temporary files should be gone after cleanup
71 | :return:
72 | """
73 | with open(os.path.join(
74 | script_dir, 'resources/test_quakes.json')) as inf:
75 | response = inf.read()
76 | dl_url = self.processor.base_url.format(
77 | self.processor.params['sdate'], self.processor.params['edate'])
78 | httpretty.register_uri(httpretty.GET, dl_url,
79 | body=response)
80 | self.processor.download(dl_url, filename=self.processor.prefix + '.rss')
81 | self.assertNotEqual([], glob.glob(os.path.join(
82 | self.processor.tmp_dir, self.processor.prefix + '*')))
83 | self.processor.cleanup()
84 | self.assertEquals([], glob.glob(os.path.join(
85 | self.processor.tmp_dir, self.processor.prefix + '*')))
86 |
--------------------------------------------------------------------------------
/dataqs/usgs_quakes/resources/test_quakes.json:
--------------------------------------------------------------------------------
1 | {"type":"FeatureCollection","metadata":{"generated":1453771259000,"url":"http://earthquake.usgs.gov/fdsnws/event/1/query?format=geojson&starttime=2016-01-01&endtime=2016-01-02","title":"USGS Earthquakes","status":200,"api":"1.1.1","count":232},"features":[{"type":"Feature","properties":{"mag":2.2,"place":"47km NE of Nikolski, Alaska","time":1451692324000,"updated":1452036507860,"tz":-540,"url":"http://earthquake.usgs.gov/earthquakes/eventpage/ak12383570","detail":"http://earthquake.usgs.gov/fdsnws/event/1/query?eventid=ak12383570&format=geojson","felt":null,"cdi":null,"mmi":null,"alert":null,"status":"reviewed","tsunami":0,"sig":74,"net":"ak","code":"12383570","ids":",ak12383570,","sources":",ak,","types":",general-link,geoserve,nearby-cities,origin,tectonic-summary,","nst":null,"dmin":null,"rms":0.65,"gap":null,"magType":"ml","type":"earthquake","title":"M 2.2 - 47km NE of Nikolski, Alaska"},"geometry":{"type":"Point","coordinates":[-168.3619,53.237,12.1]},"id":"ak12383570"},
2 | {"type":"Feature","properties":{"mag":0.6,"place":"2km S of Devore, California","time":1451691683670,"updated":1451921778596,"tz":-480,"url":"http://earthquake.usgs.gov/earthquakes/eventpage/ci37509488","detail":"http://earthquake.usgs.gov/fdsnws/event/1/query?eventid=ci37509488&format=geojson","felt":null,"cdi":null,"mmi":null,"alert":null,"status":"reviewed","tsunami":0,"sig":6,"net":"ci","code":"37509488","ids":",ci37509488,","sources":",ci,","types":",cap,general-link,geoserve,nearby-cities,origin,phase-data,scitech-link,","nst":17,"dmin":0.07017,"rms":0.21,"gap":62,"magType":"ml","type":"earthquake","title":"M 0.6 - 2km S of Devore, California"},"geometry":{"type":"Point","coordinates":[-117.4028333,34.2031667,2.99]},"id":"ci37509488"},
3 | {"type":"Feature","properties":{"mag":3.1,"place":"99km SSW of Chernabura Island, Alaska","time":1451690012000,"updated":1452036535857,"tz":-660,"url":"http://earthquake.usgs.gov/earthquakes/eventpage/ak12439432","detail":"http://earthquake.usgs.gov/fdsnws/event/1/query?eventid=ak12439432&format=geojson","felt":0,"cdi":1,"mmi":null,"alert":null,"status":"reviewed","tsunami":0,"sig":148,"net":"ak","code":"12439432","ids":",ak12439432,","sources":",ak,","types":",dyfi,general-link,geoserve,nearby-cities,origin,tectonic-summary,","nst":null,"dmin":null,"rms":0.45,"gap":null,"magType":"ml","type":"earthquake","title":"M 3.1 - 99km SSW of Chernabura Island, Alaska"},"geometry":{"type":"Point","coordinates":[-160.4066,54.0392,16.3]},"id":"ak12439432"},
4 | {"type":"Feature","properties":{"mag":5,"place":"56km WNW of Ndoi Island, Fiji","time":1451689493270,"updated":1451690547184,"tz":-720,"url":"http://earthquake.usgs.gov/earthquakes/eventpage/us10004asd","detail":"http://earthquake.usgs.gov/fdsnws/event/1/query?eventid=us10004asd&format=geojson","felt":0,"cdi":1,"mmi":null,"alert":null,"status":"reviewed","tsunami":0,"sig":385,"net":"us","code":"10004asd","ids":",us10004asd,","sources":",us,","types":",cap,dyfi,geoserve,nearby-cities,origin,phase-data,tectonic-summary,","nst":null,"dmin":3.777,"rms":0.86,"gap":90,"magType":"mb","type":"earthquake","title":"M 5.0 - 56km WNW of Ndoi Island, Fiji"},"geometry":{"type":"Point","coordinates":[-179.2291,-20.5278,655.8]},"id":"us10004asd"},
5 | {"type":"Feature","properties":{"mag":1.2,"place":"22km NNE of Badger, Alaska","time":1451607076000,"updated":1452036153596,"tz":-540,"url":"http://earthquake.usgs.gov/earthquakes/eventpage/ak12368055","detail":"http://earthquake.usgs.gov/fdsnws/event/1/query?eventid=ak12368055&format=geojson","felt":null,"cdi":null,"mmi":null,"alert":null,"status":"reviewed","tsunami":0,"sig":22,"net":"ak","code":"12368055","ids":",ak12368055,","sources":",ak,","types":",general-link,geoserve,nearby-cities,origin,","nst":null,"dmin":null,"rms":0.27,"gap":null,"magType":"ml","type":"earthquake","title":"M 1.2 - 22km NNE of Badger, Alaska"},"geometry":{"type":"Point","coordinates":[-147.3342,64.9859,0]},"id":"ak12368055"}],"bbox":[-179.2291,-50.5751,-0.48,166.6026,68.2349,655.8]}
--------------------------------------------------------------------------------
/dataqs/whisp/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import json
22 | import httpretty
23 | import os
24 | from django.test import TestCase
25 | from dataqs.whisp.whisp import WhispProcessor
26 | import mock
27 |
28 | script_dir = os.path.dirname(os.path.realpath(__file__))
29 |
30 |
31 | def test_data():
32 | with open(os.path.join(script_dir, 'resources/test_whisp.html')) as html:
33 | return html.read()
34 |
35 |
36 | def mock_insert_row(self, data):
37 | with open(os.path.join(
38 | self.tmp_dir, '().json'.format(self.prefix)), 'w') as testfile:
39 | json.dump(data, testfile)
40 |
41 |
42 | class WhispTest(TestCase):
43 | """
44 | Tests the dataqs.whisp module. Since each processor is highly
45 | dependent on a running GeoNode instance for most functions, only
46 | independent functions are tested here.
47 | """
48 |
49 | def setUp(self):
50 | self.processor = WhispProcessor()
51 | httpretty.enable()
52 |
53 | def tearDown(self):
54 | httpretty.disable()
55 | self.processor.cleanup()
56 |
57 | @mock.patch('dataqs.whisp.whisp.WhispProcessor.insert_row', mock_insert_row)
58 | def test_scrape(self):
59 | """
60 | Verify that the correct records can be read from html
61 | :return:
62 | """
63 | httpretty.register_uri(
64 | httpretty.GET,
65 | self.processor.base_url,
66 | body=test_data(),
67 | content_type='text/html')
68 | self.processor.scrape()
69 | testfile = os.path.join(
70 | self.processor.tmp_dir, '().json'.format(self.processor.prefix))
71 | self.assertTrue(os.path.exists(testfile))
72 | with open(testfile) as test:
73 | test_json = json.load(test)
74 | self.assertTrue(test_json['eventtype'])
75 | self.assertTrue(test_json['the_geom'])
76 | self.processor.cleanup()
77 |
78 | @mock.patch('dataqs.whisp.whisp.WhispProcessor.insert_row', mock_insert_row)
79 | def test_archive_import(self):
80 | """
81 | Verify that the correct records can be read from archive
82 | :return:
83 | """
84 | self.processor.import_archive()
85 | testfile = os.path.join(
86 | self.processor.tmp_dir, '().json'.format(self.processor.prefix))
87 | self.assertTrue(os.path.exists(testfile))
88 | with open(testfile) as test:
89 | test_json = json.load(test)
90 | self.assertTrue(test_json['eventtype'])
91 | self.assertTrue(test_json['the_geom'])
92 | self.processor.cleanup()
93 |
94 | @mock.patch('dataqs.whisp.whisp.WhispProcessor.insert_row', mock_insert_row)
95 | def test_cleanup(self):
96 | httpretty.register_uri(
97 | httpretty.GET,
98 | self.processor.base_url,
99 | body=test_data(),
100 | content_type='text/html')
101 | self.processor.scrape()
102 | self.processor.cleanup()
103 | self.assertEquals([], glob.glob(os.path.join(
104 | self.processor.tmp_dir, self.processor.prefix + '*')))
105 |
--------------------------------------------------------------------------------
/dataqs/landscan/landscan.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 | import os
20 | from urllib import urlretrieve
21 | import zipfile
22 |
23 | import gdal
24 |
25 | from dataqs.helpers import style_exists
26 | from dataqs.processor_base import GeoDataProcessor
27 |
28 | script_dir = os.path.dirname(os.path.realpath(__file__))
29 |
30 |
31 | class LandscanProcessor(GeoDataProcessor):
32 | """
33 | Class for processing sample Landscan data
34 | """
35 |
36 | layer = "landscan"
37 | description = "LandScan 2011 Global Population Project"
38 |
39 | def get_landscan(self):
40 | """
41 | Downloads the sample landscan image for Cyprus
42 | """
43 |
44 | url = "http://web.ornl.gov/sci/landscan/" + \
45 | "landscan2011/LS11sample_Cyprus.zip"
46 |
47 | zip_dir = os.path.join(self.tmp_dir, "landscan.zip")
48 | urlretrieve(url, zip_dir)
49 |
50 | # Open up the zip file
51 | zip_ref = zipfile.ZipFile(zip_dir, 'r')
52 | landscan_dir = os.path.join(self.tmp_dir, "landscan")
53 | zip_ref.extractall(landscan_dir)
54 | zip_ref.close()
55 |
56 | return landscan_dir
57 |
58 | def convert_landscan(self, landscan_dir):
59 | """
60 | Converts arcgrid into a tiff file
61 | """
62 |
63 | # Important
64 | # The directory which holds the .adf files
65 | # needs to be found here
66 |
67 | for subdir, dirs, files in os.walk(landscan_dir):
68 | for d in dirs:
69 | direc = os.listdir(os.path.join(subdir, d))
70 | if set([f.endswith('.adf') for f in direc]) == {True}:
71 | grid_dir = os.path.join(subdir, d)
72 |
73 | src_ds = gdal.Open(grid_dir)
74 | proj = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
75 | src_ds.SetProjection(proj)
76 | driver = gdal.GetDriverByName("GTiff")
77 |
78 | # Output to new format
79 | tiff_output = os.path.join(self.tmp_dir, "landscan.tiff")
80 | driver.CreateCopy(tiff_output, src_ds, 0)
81 |
82 | return tiff_output
83 |
84 | def import_landscan(self, landscan_tiff):
85 | """
86 | Imports landscan to geonode
87 | """
88 |
89 | self.post_geoserver(landscan_tiff, self.layer)
90 | if not style_exists(self.layer):
91 | with open(os.path.join(
92 | script_dir, 'resources/landscan.sld')) as sld:
93 | self.set_default_style(self.layer,
94 | self.layer, sld.read())
95 | self.update_geonode(self.layer, title="Population - Landscan",
96 | store=self.layer,
97 | description=self.description,
98 | extra_keywords=['category:Population'])
99 | self.truncate_gs_cache(self.layer)
100 |
101 | def run(self):
102 | """
103 | Retrieve and process Landscan sample data
104 | """
105 |
106 | landscan_dir = self.get_landscan()
107 | landscan_tiff = self.convert_landscan(landscan_dir)
108 | self.import_landscan(landscan_tiff)
109 |
110 | if __name__ == '__main__':
111 | processor = LandscanProcessor()
112 | processor.run()
113 |
--------------------------------------------------------------------------------
/dataqs/hadghcnd/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import os
22 | from datetime import date
23 |
24 | import gdal
25 | import httpretty
26 | from dataqs.hadghcnd.hadghcnd import HadGHCNDProcessor
27 | from django.test import TestCase
28 |
29 | script_dir = os.path.dirname(os.path.realpath(__file__))
30 |
31 |
32 | def get_mock_image():
33 | """
34 | Return a canned test image (1 band of original NetCDF raster)
35 | """
36 | nc = os.path.join(script_dir,
37 | 'resources/HadGHCND_TXTN_anoms_1950-1960_15052015.nc')
38 | with open(nc, 'rb') as ncfile:
39 | return ncfile.read()
40 |
41 |
42 | class HadGHCNDTest(TestCase):
43 | """
44 | Tests the dataqs.hadghcnd module. Since each processor is highly
45 | dependent on a running GeoNode instance for most functions, only
46 | independent functions are tested here.
47 | """
48 |
49 | def setUp(self):
50 | self.processor = HadGHCNDProcessor()
51 | httpretty.enable()
52 |
53 | def tearDown(self):
54 | httpretty.disable()
55 | self.processor.cleanup()
56 |
57 | def test_download(self):
58 | """
59 | Verify that a file is downloaded
60 | """
61 | httpretty.register_uri(httpretty.GET,
62 | self.processor.base_url,
63 | body=get_mock_image())
64 | layer = self.processor.layers.keys()[0]
65 | imgfile = self.processor.download(
66 | self.processor.base_url, layer)
67 | self.assertTrue(os.path.exists(
68 | os.path.join(self.processor.tmp_dir, imgfile)))
69 |
70 | def test_extract_band(self):
71 | httpretty.register_uri(httpretty.GET,
72 | self.processor.base_url,
73 | body=get_mock_image())
74 | layer = self.processor.layers.keys()[0]
75 | imgfile = self.processor.download(
76 | self.processor.base_url, layer.rstrip('.tgz'))
77 | ncds_gdal_name = 'NETCDF:{}:tmin'.format(
78 | os.path.join(self.processor.tmp_dir, imgfile))
79 | bandout = '{}_test.tif'.format(self.processor.prefix)
80 | outpath = self.processor.extract_band(ncds_gdal_name, 1, bandout,
81 | projection='WGS84')
82 | self.assertTrue(os.path.exists(outpath))
83 | img = gdal.Open(outpath)
84 | try:
85 | self.assertEquals(1, img.RasterCount)
86 | finally:
87 | del img
88 |
89 | def test_date(self):
90 | self.assertEquals(self.processor.get_date(712224), date(1950, 1, 1))
91 | self.assertEquals(self.processor.get_date(735964), date(2014, 12, 31))
92 |
93 | def test_cleanup(self):
94 | httpretty.register_uri(httpretty.GET,
95 | self.processor.base_url,
96 | body=get_mock_image())
97 | layer = self.processor.layers.keys()[0]
98 | self.processor.download(self.processor.base_url, layer)
99 | self.assertNotEqual([], glob.glob(os.path.join(
100 | self.processor.tmp_dir, self.processor.prefix + '*')))
101 | self.processor.cleanup()
102 | self.assertEquals([], glob.glob(os.path.join(
103 | self.processor.tmp_dir, self.processor.prefix + '*')))
104 |
--------------------------------------------------------------------------------
/ansible/README.md:
--------------------------------------------------------------------------------
1 | ### Introduction
2 |
3 | This Ansible Role will install GeoNode and required dependencies onto an Ubuntu
4 | 14.04 (Trusty) host. It includes tasks for PostgreSQL+PostGIS, GeoServer, GeoNode,
5 | nginx, uswgi and also includes tasks for using AWS RDS databases. It is meant
6 | to be used with a GeoNode template project by following the workflow
7 | described here http://github.com/geonode/geonode-project
8 |
9 | Tested with a known minimal working Ansible version of 1.9.3.
10 |
11 | ### Installing roles from ansible galaxy
12 |
13 | The ansible playbook that performs the provisioning depends on a few roles provided in the
14 | ansible galaxy. You can install these rolls with the following command in this directory:
15 |
16 | ```
17 | ansible-galaxy install -r requirements.txt
18 | ```
19 |
20 | ### Role Variables
21 |
22 | * `app_name` - GeoNode project name (default: `geonode`)
23 | * `github_user` - GitHub username that owns the project (default: `GeoNode`)
24 | * `code_repository` - URL to the Code Repository (default: `https://github.com/{{ github_user }}/{{ app_name }}.git`)
25 |
26 | The `app_name` variable will be used to set the database names and credentials. You can override this behavior with the following variables.
27 |
28 | * `db_data_instance` - Database instance for spatial data (default: `{{ app_name }}`)
29 | * `db_metadata_instance` - Database instance for the application metadata (default: `{{ app_name }}_app`)
30 | * `db_password` - Database password (default: `{{ app_name }}`)
31 | * `db_user` - Database user (default: `{{ app_name }}`)
32 |
33 | You can also change the war used to deploy geoserver with the following variable.
34 |
35 | * `geoserver_url` - GeoServer war URL (default: `http://build.geonode.org/geoserver/latest/geoserver.war`)
36 |
37 | ### Dataqs Processors
38 |
39 | * roles/dataqs/templates/dataq_settings.py:
40 | * Change the 'DATAQS_APPS' setting to add/remove individual dataqs processors
41 | * Change the 'CELERYBEAT_SCHEDULE' setting to add/remove/modify scheduled dataqs celery tasks
42 |
43 | ### Setting up a vagrant box
44 |
45 | To configure a local development virtual machine, you will need to have virtualbox and vagrant installed.
46 | Note: You may need to change the IP configuration in the VagrantFile to a valid ip on the local network
47 |
48 | $ vagrant up geoservices
49 | $ vagrant ssh geoservices
50 |
51 |
52 | Note: You may need to bring the vagrant box down and up for geonode to work.
53 |
54 | $ vagrant halt
55 | $ vagrant up
56 |
57 |
58 | ## Deploying to ec2 (or other server)
59 |
60 | Several variables have to be set correctly before deploying to a remote server. This can be achived by creating a custom inventory with the group ```[geoservices]``` and the host you will deploy too.
61 |
62 | ```
63 | [geoservices]
64 | XXX.XXX.XXX.XXX ansible_ssh_private_key_file=PATH_TO_PEM_FILE ansible_user=ubuntu deploy_user=ubuntu site_url=http://ec2-XXX-XXX-XXX-XXX.us-west-2.compute.amazonaws.com/ server_name=XXX-XXX-XXX-XXX-XXX.us-west-2.compute.amazonaws.com
65 | ```
66 |
67 | Replace X's with the IP address of the remote server
68 |
69 | * `ansible_user` - will be the user ansible SSHes in as
70 | * `deploy_user` - will be the user used to deploy and install all the software (usually the same as ansible_user)
71 | * `ansible_ssh_private_key_file` - the PEM file that corresponds to the ansible_user and provides passwordless ssh access
72 | * `site_url` - the url of the website - used by geonode to identify its base URL
73 | * `server_name` - the fully qualified domain name of the server
74 |
75 | To deploy, run ```ansible-playbook -i /path/to/inventory playbook.yml``` From this directory.
76 |
77 | Alternately, variables may be placed in a local variables file, e.g.:
78 |
79 | /path/to/local_vars.yml
80 | ```yaml
81 | ansible_ssh_private_key_file: PATH_TO_PEM_FILE
82 | ansible_user: ubuntu
83 | deploy_user: ubuntu
84 | site_url: http://ec2-XXX-XXX-XXX-XXX.us-west-2.compute.amazonaws.com/
85 | server_name: ec2-XXX-XXX-XXX-XXX.us-west-2.compute.amazonaws.com
86 | ```
87 |
88 | With an inventory:
89 |
90 | /path/to/inventory
91 | ```
92 | [geoservices]
93 | XXX.XXX.XXX.XXX
94 | ```
95 |
96 | To deploy, run:
97 |
98 | ```
99 | ansible-playbook -i /path/to/inventory -e @/path/to/local_vars.yml playbook.yml
100 | ```
101 |
--------------------------------------------------------------------------------
/dataqs/mmwr/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import json
22 | import httpretty
23 | import os
24 | import datetime
25 | from django.test import TestCase
26 | from dataqs.mmwr.mmwr import MortalityProcessor
27 | import unicodecsv as csv
28 |
29 | script_dir = os.path.dirname(os.path.realpath(__file__))
30 |
31 |
32 | def test_data():
33 | with open(os.path.join(script_dir, 'resources/test_mmwr.txt')) as csv:
34 | return csv.read()
35 |
36 |
37 | class MMWRTest(TestCase):
38 | """
39 | Tests the dataqs.mmwr module. Since each processor is highly
40 | dependent on a running GeoNode instance for most functions, only
41 | independent functions are tested here.
42 | """
43 |
44 | def setUp(self):
45 | self.processor = MortalityProcessor()
46 | httpretty.enable()
47 |
48 | def tearDown(self):
49 | httpretty.disable()
50 | self.processor.cleanup()
51 |
52 | def test_download(self):
53 | """
54 | Verify that a file is downloaded
55 | """
56 | report_date = datetime.date(2016, 1, 15)
57 | httpretty.register_uri(
58 | httpretty.GET,
59 | self.processor.base_url.format(week=2, year=2016),
60 | body=test_data())
61 | self.processor.generate_csv(report_date)
62 | output = os.path.join(
63 | self.processor.tmp_dir, '{}.txt'.format(self.processor.prefix))
64 | self.assertTrue(os.path.exists(output))
65 | with open(output) as ofile:
66 | self.assertEquals(ofile.read(), test_data())
67 |
68 | def test_generate_csv(self):
69 | """
70 | Verify that a correct csv file is generated
71 | :return:
72 | """
73 | report_date = datetime.date(2016, 1, 15)
74 | httpretty.register_uri(
75 | httpretty.GET,
76 | self.processor.base_url.format(week=2, year=2016),
77 | body=test_data())
78 | self.processor.generate_csv(report_date)
79 | output = os.path.join(
80 | self.processor.tmp_dir, '{}.csv'.format(self.processor.prefix))
81 | self.assertTrue(os.path.exists(output))
82 | with open(output) as ofile:
83 | reader = csv.reader(ofile)
84 | headers = reader.next()
85 | with open(os.path.join(script_dir, 'resources/mmwr.json')) as locs:
86 | locations = json.load(locs)
87 | self.assertEquals(
88 | headers, ['place', 'lng', 'lat', 'all', 'a65',
89 | 'a45_64', 'a25_44', 'a01-24', 'a01', 'flu',
90 | 'report_date'])
91 | for row in reader:
92 | self.assertIn(row[0], locations)
93 | self.assertEquals(float(row[1]), locations[row[0]][1])
94 | self.assertEquals(float(row[2]), locations[row[0]][0])
95 |
96 | def test_cleanup(self):
97 | report_date = datetime.date(2016, 1, 15)
98 | httpretty.register_uri(
99 | httpretty.GET,
100 | self.processor.base_url.format(week=2, year=2016),
101 | body=test_data())
102 | self.processor.generate_csv(report_date)
103 | self.assertNotEqual([], glob.glob(os.path.join(
104 | self.processor.tmp_dir, self.processor.prefix + '*')))
105 | self.processor.cleanup()
106 | self.assertEquals([], glob.glob(os.path.join(
107 | self.processor.tmp_dir, self.processor.prefix + '*')))
108 |
--------------------------------------------------------------------------------
/dataqs/wqp/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import zipfile
22 | import httpretty
23 | import os
24 | from django.test import TestCase
25 | from dataqs.wqp.wqp import WaterQualityPortalProcessor
26 | import unicodecsv as csv
27 |
28 | script_dir = os.path.dirname(os.path.realpath(__file__))
29 |
30 |
31 | def get_mock_response(filename):
32 | """
33 | Return a canned response with HTML for Boston
34 | """
35 | zf = zipfile.ZipFile(os.path.join(script_dir,
36 | 'resources/test_wqp_ph.zip'))
37 |
38 | return zf.read(filename)
39 |
40 |
41 | class WaterQualityTest(TestCase):
42 | """
43 | Tests the dataqs.wqp module. Since each processor is highly
44 | dependent on a running GeoNode instance for most functions, only
45 | independent functions are tested here.
46 | """
47 |
48 | def setUp(self):
49 | self.processor = WaterQualityPortalProcessor(days=7)
50 | httpretty.enable()
51 |
52 | def tearDown(self):
53 | httpretty.disable()
54 | self.processor.cleanup()
55 |
56 | def test_download(self):
57 | """
58 | Verify that files are downloaded.
59 | """
60 | for qtype in ('Result', 'Station'):
61 | url = ('http://www.waterqualitydata.us/{}/search?'.format(qtype) +
62 | 'countrycode=US&startDateLo=12-27-2015' +
63 | '&startDateHi=01-26-2016' +
64 | '&characteristicName=pH')
65 | httpretty.register_uri(httpretty.GET, url,
66 | body=get_mock_response(
67 | 'test_wqp_ph_{}.csv'.format(qtype)))
68 |
69 | files = self.processor.download('pH')
70 | self.assertTrue('Result' in files)
71 | self.assertTrue('Station' in files)
72 |
73 | station_file = os.path.join(self.processor.tmp_dir, files['Station'])
74 | result_file = os.path.join(self.processor.tmp_dir, files['Result'])
75 | self.assertTrue(os.path.exists(station_file), "Station file not found")
76 | self.assertTrue(os.path.exists(result_file), "Result file not found")
77 |
78 | stations = []
79 | with open(station_file) as inputfile:
80 | reader = csv.DictReader(inputfile)
81 | for row in reader:
82 | stations.append(row['MonitoringLocationIdentifier'])
83 |
84 | with open(result_file) as inputfile:
85 | reader = csv.DictReader(inputfile)
86 | for row in reader:
87 | self.assertEquals(row['CharacteristicName'], 'pH')
88 | self.assertTrue(row['MonitoringLocationIdentifier'] in stations)
89 |
90 | def test_safe_name(self):
91 | """
92 | Verify that the correct safe name is returned for indicators
93 | """
94 | self.assertEquals('inorganicnitrogennitrateandnitrite',
95 | self.processor.safe_name(
96 | 'Inorganic nitrogen (nitrate and nitrite)'))
97 | self.assertEquals('temperaturewater',
98 | self.processor.safe_name(
99 | 'Temperature, water'))
100 |
101 | def test_cleanup(self):
102 | """
103 | Verify that no stray files exist after cleanup
104 | """
105 | self.processor.cleanup()
106 | self.assertEquals([], glob.glob(os.path.join(
107 | self.processor.tmp_dir, self.processor.prefix + '*')))
108 |
--------------------------------------------------------------------------------
/dataqs/aqicn/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import json
21 | import os
22 | import datetime
23 | from django.test import TestCase
24 | import dataqs
25 | from dataqs.aqicn.aqicn import AQICNProcessor
26 | import httpretty
27 | from mock import patch
28 |
29 | script_dir = os.path.dirname(os.path.realpath(__file__))
30 | tmpfile = os.path.join(script_dir, 'test_city.json')
31 |
32 |
33 | def get_mock_response(filename):
34 | """
35 | Return a canned response with HTML for all cities
36 | """
37 | with open(os.path.join(
38 | script_dir, 'resources/{}'.format(filename))) as infile:
39 | return infile.read()
40 |
41 |
42 | def mock_saveData(self, city):
43 | """
44 | Save data to a JSON file instead of to database
45 | """
46 | for key in city.keys():
47 | if isinstance(city[key], datetime.datetime):
48 | city[key] = city[key].strftime('%Y-%m-%d')
49 | with open(tmpfile, 'w') as outfile:
50 | outfile.write(json.dumps(city))
51 |
52 |
53 | def mock_worker_init(self, table, cities):
54 | self.cities = cities
55 | self.prefix = table
56 | self.archive = self.prefix + "_archive"
57 | self.max_wait = 5
58 |
59 |
60 | class AQICNTest(TestCase):
61 | """
62 | Tests the dataqs.aqicn module. Since each processor is highly
63 | dependent on a running GeoNode instance for most functions, only
64 | independent functions are tested here.
65 | """
66 |
67 | def setUp(self):
68 | self.processor = AQICNProcessor()
69 | httpretty.enable()
70 |
71 | def tearDown(self):
72 | httpretty.disable()
73 |
74 | def test_download(self):
75 | """
76 | Verify that the master url is retrieved.
77 | """
78 | httpretty.register_uri(
79 | httpretty.GET,
80 | self.processor.base_url,
81 | body=get_mock_response('test_aqicn_cities.html'),
82 | content_type='text/html')
83 | content = self.processor.download()
84 | self.assertIn(
85 | '
Air Pollution in the World - aqicn.org', content)
86 |
87 | def test_getCities(self):
88 | """
89 | Verify that the processor creates a correct cities dictionary structure
90 | """
91 | self.processor.getCities()
92 | cities = self.processor.cities
93 | self.assertIsNotNone(cities)
94 | for city in cities:
95 | self.assertIsNotNone(city['city'], city)
96 | self.assertIsNotNone(city['country'], city)
97 | self.assertIsNotNone(city['url'], city)
98 |
99 | @patch('dataqs.aqicn.aqicn.AQICNWorker.__init__', mock_worker_init)
100 | @patch('dataqs.aqicn.aqicn.AQICNWorker.save_data', mock_saveData)
101 | def test_handleCity(self):
102 | """
103 | Verify that the correct AQI for a city is returned.
104 | """
105 | boston = u'http://aqicn.org/city/boston/'
106 | httpretty.register_uri(
107 | httpretty.GET,
108 | boston,
109 | body=get_mock_response('test_aqicn_boston.html'),
110 | content_type='text/html')
111 | cities = [{'city': u'Boston', 'country': u'USA', 'url': boston}]
112 | worker = dataqs.aqicn.aqicn.AQICNWorker('aqicn', cities)
113 | worker.handle_city(0, cities[0])
114 | with open(tmpfile) as jsonfile:
115 | city_json = json.load(jsonfile)
116 | self.assertEquals(city_json['data']['cur_aqi'], u'25')
117 | self.assertEquals(city_json['data']['cur_pm25'], u'25')
118 | self.assertEquals(city_json['data']['cur_o3'], u'11')
119 | self.assertEquals(city_json['data']['cur_so2'], u'2')
120 |
--------------------------------------------------------------------------------
/ansible/roles/dataqs/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ###
2 | # Installs geonode customizations from epidemico
3 | ---
4 | - name: get necessary apt packages
5 | apt: name={{item}}
6 | with_items:
7 | - libgdal-dev
8 | - cdo
9 | - netcdf-bin
10 | sudo: yes
11 |
12 | - name: install python dependencies
13 | pip: virtualenv={{virtualenv_dir}}/{{app_name}} name={{item}}
14 | with_items:
15 | - redis
16 | - flower
17 |
18 | - name: install dataqs python package
19 | pip: virtualenv={{virtualenv_dir}}/{{app_name}} name=git+https://github.com/OpenGeoscience/dataqs.git#egg=dataqs
20 | environment:
21 | CPLUS_INCLUDE_PATH: "/usr/include/gdal"
22 | C_INCLUDE_PATH: "/usr/include/gdal"
23 |
24 | - name: add user to tomcat7 group
25 | user: name={{deploy_user}} append=yes groups=tomcat7
26 | sudo: yes
27 |
28 | - name: create work directory
29 | file: path=/home/{{deploy_user}}/epigeonode state=directory mode=1775 owner={{deploy_user}} group=tomcat7
30 | sudo: yes
31 |
32 | - name: Create temp data directory
33 | file: path=/data/tmp state=directory mode=1775 owner={{deploy_user}} group=tomcat7
34 | sudo: yes
35 |
36 | - include: geoserver_permissions.yml
37 |
38 | - name: patch geonode application
39 | patch: >
40 | src=geonode_patch.diff
41 | basedir={{app_code_dir}}/{{app_name}}
42 | strip=1
43 | notify: restart uwsgi
44 |
45 | - name: copy settings to geonode application
46 | template: src=dataq_settings.py dest={{geonode_root}}/dataq_settings.py
47 | notify: restart uwsgi
48 |
49 | - name: import dataq settings in geonode configuration
50 | lineinfile: name="{{geonode_root}}/local_settings.py" state=present
51 | insertafter="EOF" line="from dataq_settings import *"
52 | notify: restart uwsgi
53 |
54 | - name: enable dataq apps in geonode configuration
55 | lineinfile: name={{geonode_root}}/local_settings.py state=present
56 | insertafter=EOF line="dataqs_extend()"
57 | notify: restart uwsgi
58 |
59 | - name: install supervisor config for workers
60 | template: src=celery.conf dest=/etc/supervisor/conf.d/ owner=root group=root mode=0644
61 | sudo: yes
62 | notify:
63 | - restart supervisor for celery
64 |
65 | - include: geoserver_permissions.yml
66 |
67 | - name: Create the usgs_quakes store
68 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/usgs_quakes/usgs_quakes.py"
69 | ignore_errors: yes
70 |
71 | - include: geoserver_permissions.yml
72 |
73 | - name: Create the wqp store
74 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/wqp/wqp.py"
75 | ignore_errors: yes
76 |
77 | - include: geoserver_permissions.yml
78 |
79 | - name: Create the gdacs store
80 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/gdacs/gdacs.py"
81 | ignore_errors: yes
82 |
83 | - include: geoserver_permissions.yml
84 |
85 | - name: Create the gfms coverage store
86 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/gfms/gfms.py"
87 | ignore_errors: yes
88 |
89 | - include: geoserver_permissions.yml
90 |
91 | - name: Create the gistemp coverage store
92 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/gistemp/gistemp.py"
93 | ignore_errors: yes
94 |
95 | - include: geoserver_permissions.yml
96 |
97 | - name: Create the forecastio coverage store
98 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/forecastio/forecastio_air.py"
99 | ignore_errors: yes
100 |
101 | - include: geoserver_permissions.yml
102 |
103 | - name: Create the hifld layers
104 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/hifld/hifld.py"
105 | ignore_errors: yes
106 |
107 | - include: geoserver_permissions.yml
108 |
109 | - name: Create the cmap layer
110 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/cmap/cmap.py"
111 | ignore_errors: yes
112 |
113 | - include: geoserver_permissions.yml
114 |
115 | - name: Create the landscan layer
116 | command: "{{ app_code_dir }}/venvs/geonode/bin/python {{app_code_dir}}/venvs/geonode/src/dataqs/dataqs/landscan/landscan.py"
117 | ignore_errors: yes
118 |
119 | - name: Django updatelayers
120 | django_manage: command=updatelayers
121 | app_path={{app_code_dir}}/{{app_name}}
122 | virtualenv={{virtualenv_dir}}/{{app_name}}
123 | settings={{main_module}}.settings
124 |
--------------------------------------------------------------------------------
/dataqs/udatp/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import os
22 | import unittest
23 | from datetime import date
24 | from django.test import TestCase
25 | from dataqs.udatp.udatp import UoDAirTempPrecipProcessor
26 | from mock import patch
27 |
28 | script_dir = os.path.dirname(os.path.realpath(__file__))
29 |
30 |
31 | def mock_retrbinary_nc(self, name, writer):
32 | """
33 | Mocks the ftplib.FTP.retrbinary method, writes test image to disk.
34 | """
35 | with open(os.path.join(script_dir, 'resources/uodtest.nc'), 'rb') as inf:
36 | writer(inf.read())
37 | return None
38 |
39 |
40 | def mock_retrbinary_tif(self, name, writer):
41 | """
42 | Mocks the ftplib.FTP.retrbinary method, writes test image to disk.
43 | """
44 | with open(os.path.join(script_dir, 'resources/uodtest.tif'), 'rb') as inf:
45 | writer(inf.read())
46 | return None
47 |
48 |
49 | def mock_none(self, *args):
50 | """
51 | For mocking various FTP methods that should return nothing for tests.
52 | """
53 | return None
54 |
55 |
56 | class UoDAirTempPrecipTest(TestCase):
57 | """
58 | Tests the dataqs.gistemp module. Since each processor is highly
59 | dependent on a running GeoNode instance for most functions, only
60 | independent functions are tested here.
61 | """
62 |
63 | def setUp(self):
64 | self.processor = UoDAirTempPrecipProcessor()
65 |
66 | def tearDown(self):
67 | self.processor.cleanup()
68 |
69 | @patch('ftplib.FTP', autospec=True)
70 | @patch('ftplib.FTP.retrbinary', mock_retrbinary_nc)
71 | @patch('ftplib.FTP.connect', mock_none)
72 | @patch('ftplib.FTP.login', mock_none)
73 | @patch('ftplib.FTP.cwd', mock_none)
74 | def test_download(self, ftp_mock):
75 | """
76 | Verify that a file is downloaded
77 | """
78 | cdf_files = self.processor.download()
79 | for cdf in cdf_files:
80 | self.assertTrue(os.path.exists(cdf))
81 |
82 | @patch('ftplib.FTP', autospec=True)
83 | @patch('ftplib.FTP.retrbinary', mock_retrbinary_nc)
84 | @patch('ftplib.FTP.connect', mock_none)
85 | @patch('ftplib.FTP.login', mock_none)
86 | @patch('ftplib.FTP.cwd', mock_none)
87 | def test_cleanup(self, ftp_mock):
88 | self.processor.download()
89 | self.assertNotEqual([], glob.glob(os.path.join(
90 | self.processor.tmp_dir, self.processor.prefix + '*')))
91 | self.processor.cleanup()
92 | self.assertEquals([], glob.glob(os.path.join(
93 | self.processor.tmp_dir, self.processor.prefix + '*')))
94 |
95 | def test_date(self):
96 | last_date = self.processor.get_date(1380)
97 | self.assertEquals(last_date, date(2015, 12, 1))
98 |
99 | @patch('ftplib.FTP', autospec=True)
100 | @patch('ftplib.FTP.retrbinary', mock_retrbinary_nc)
101 | @patch('ftplib.FTP.connect', mock_none)
102 | @patch('ftplib.FTP.login', mock_none)
103 | @patch('ftplib.FTP.cwd', mock_none)
104 | def test_convert(self, ftp_mock):
105 | cdf_files = self.processor.download()
106 | for cdf in cdf_files:
107 | try:
108 | self.processor.convert(cdf)
109 | self.assertNotEqual([], glob.glob(cdf.replace(
110 | '.nc', '.classic.lng.nc')))
111 | except OSError:
112 | # cdo and/or netcdf not installed
113 | raise unittest.SkipTest()
114 |
115 | @patch('ftplib.FTP', autospec=True)
116 | @patch('ftplib.FTP.retrbinary', mock_retrbinary_tif)
117 | @patch('ftplib.FTP.connect', mock_none)
118 | @patch('ftplib.FTP.login', mock_none)
119 | @patch('ftplib.FTP.cwd', mock_none)
120 | def test_extract_band(self, ftp_mock):
121 | cdf = self.processor.download()[0]
122 | tif = cdf.replace('.nc', '.tif')
123 | self.processor.extract_band(cdf, 1, tif)
124 | self.assertTrue(os.path.isfile(tif))
125 |
--------------------------------------------------------------------------------
/dataqs/spei/spei.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | import logging
22 | import os
23 | from dataqs.processor_base import GeoDataProcessor
24 | from dataqs.helpers import get_band_count, gdal_translate, cdo_invert, \
25 | nc_convert, style_exists
26 |
27 | logger = logging.getLogger("dataqs.processors")
28 | script_dir = os.path.dirname(os.path.realpath(__file__))
29 |
30 |
31 | class SPEIProcessor(GeoDataProcessor):
32 | """
33 | Class for processing data from the SPEI Global Drought Monitor
34 | (http://sac.csic.es/spei/map/maps.html)
35 | """
36 | prefix = "spei"
37 | spei_files = {
38 | 'spei01': 'SPEI Global Drought Monitor (past month)',
39 | 'spei03': 'SPEI Global Drought Monitor (past 3 months)'}
40 | base_url = "http://notos.eead.csic.es/spei/nc/"
41 | description = """The SPEI Global Drought Monitor (http://sac.csic.es/spei/)
42 | offers near real-time information about drought conditions at the global scale,
43 | with a 0.5 degrees spatial resolution and a monthly time resolution. SPEI
44 | time-scales between 1 and 48 months are provided. The calibration period for the
45 | SPEI is January 1950 to December 2010.
46 | \n\nThe dataset is updated during the first days of the following month based on
47 | the most reliable and updated sources of climatic data. Mean temperature data
48 | are obtained from the NOAA NCEP CPC GHCN_CAMS gridded dataset. Monthly
49 | precipitation sums data are obtained from the Global Precipitation Climatology
50 | Centre (GPCC). Data from the 'first guess' GPCC product, with an original
51 | resolution of 1º, are interpolated to the resolution of 0.5º.\n\nCurrently,
52 | the SPEI Global Drought Monitor is based on the Thortnthwaite
53 | equation for estimating potential evapotranspiration, PET. This is due to the
54 | lack of real-time data sources for computing more robust PET estimations which
55 | have larger data requirements. The main advantage of the SPEI Global Drought
56 | Monitor is thus its near real-time character, a characteristic best suited for
57 | drought monitoring and early warning purposes. For long-term analysis, however,
58 | other datasets are to be preferred that rely on more robust methods of PET
59 | estimation. Use of the SPEIbase dataset, which is based on the FAO-56
60 | Penman-Monteith model, is thus recommended for climatological studies of
61 | drought.\n\nSource: http://notos.eead.csic.es/spei/nc/"""
62 |
63 | def convert(self, nc_file):
64 | tif_file = "{}.tif".format(nc_file)
65 | nc_transform = nc_convert(os.path.join(self.tmp_dir, nc_file))
66 | cdo_transform = cdo_invert(os.path.join(self.tmp_dir, nc_transform))
67 | band = get_band_count(cdo_transform)
68 | gdal_translate(cdo_transform, os.path.join(self.tmp_dir, tif_file),
69 | bands=[band], projection='EPSG:4326')
70 | return tif_file
71 |
72 | def run(self):
73 | """
74 | Retrieve and process all SPEI image files listed in the SPEIProcess
75 | object's spei_files property.
76 | """
77 | for layer_name in self.spei_files.keys():
78 | self.download("{}{}.nc".format(self.base_url, layer_name))
79 | tif_file = self.convert(layer_name)
80 | self.post_geoserver(tif_file, layer_name)
81 | if not style_exists(layer_name):
82 | with open(os.path.join(script_dir,
83 | 'resources/spei.sld')) as sld:
84 | self.set_default_style(layer_name, layer_name, sld.read())
85 | self.update_geonode(layer_name,
86 | title=self.spei_files[layer_name],
87 | description=self.description,
88 | store=layer_name,
89 | extra_keywords=['category:Agriculture'])
90 | self.truncate_gs_cache(layer_name)
91 | self.cleanup()
92 |
93 |
94 | if __name__ == '__main__':
95 | processor = SPEIProcessor()
96 | processor.run()
97 |
--------------------------------------------------------------------------------
/dataqs/gfms/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import zipfile
22 | import os
23 | import datetime
24 | from django.test import TestCase
25 | import re
26 | from dataqs.gfms.gfms import GFMSProcessor
27 | import httpretty
28 |
29 | script_dir = os.path.dirname(os.path.realpath(__file__))
30 |
31 |
32 | def get_mock_image():
33 | """
34 | Return a canned GFMS test image
35 | """
36 | zf = zipfile.ZipFile(os.path.join(script_dir,
37 | 'resources/test_gfms.zip'))
38 |
39 | return zf.read('test_gfms.bin')
40 |
41 |
42 | class GFMSTest(TestCase):
43 | """
44 | Tests the dataqs.gfms module. Since each processor is highly
45 | dependent on a running GeoNode instance for most functions, only
46 | independent functions are tested here.
47 | """
48 |
49 | def setUp(self):
50 | self.processor = GFMSProcessor()
51 | httpretty.enable()
52 |
53 | def tearDown(self):
54 | httpretty.disable()
55 | self.processor.cleanup()
56 |
57 | def test_find_current(self):
58 | """
59 | Verify that current file is for today's date
60 | """
61 | today = datetime.datetime.utcnow()
62 | strmonth, strday = (
63 | '{0:02d}'.format(x) for x in [today.month, today.day])
64 | img = self.processor.get_most_current()
65 | date_str = '_{}{}{}'.format(today.year, strmonth, strday)
66 | self.assertTrue(date_str in img)
67 |
68 | def test_find_future(self):
69 | """
70 | Verify that future file is for a future date
71 | """
72 | today = datetime.datetime.now()
73 | month = today.strftime("%m")
74 | year = today.strftime("%Y")
75 | day = today.strftime("%d")
76 | imgs_url = self.processor.base_url + "{year}/{year}{month}".format(
77 | year=year, month=month)
78 | mock_imgs = [''.format(
79 | year, month, day, i) for i in range(23)]
80 | httpretty.register_uri(httpretty.GET, imgs_url,
81 | body='\n'.join(mock_imgs))
82 | img = self.processor.get_latest_future()
83 | date_match = re.search('\d{10}', img)
84 | self.assertIsNotNone(date_match)
85 | future_date = datetime.datetime.strptime(
86 | date_match.group(), '%Y%m%d%H')
87 | self.assertGreater(future_date, today)
88 |
89 | def test_download(self):
90 | """
91 | Verify that a file is downloaded
92 | """
93 | current_url = self.processor.get_most_current()
94 | httpretty.register_uri(httpretty.GET, current_url,
95 | body=get_mock_image())
96 | imgfile = self.processor.download(current_url)
97 | self.assertTrue(os.path.exists(
98 | os.path.join(self.processor.tmp_dir, imgfile)))
99 |
100 | def test_convert_image(self):
101 | current_url = self.processor.get_most_current()
102 | httpretty.register_uri(httpretty.GET, current_url,
103 | body=get_mock_image())
104 | imgfile = self.processor.download(current_url)
105 | tif_file = self.processor.convert(imgfile)
106 | self.assertTrue(os.path.exists(os.path.join(
107 | self.processor.tmp_dir, tif_file)))
108 |
109 | def test_cleanup(self):
110 | current_url = self.processor.get_most_current()
111 | httpretty.register_uri(httpretty.GET, current_url,
112 | body=get_mock_image())
113 | imgfile = self.processor.download(current_url)
114 | self.processor.convert(imgfile)
115 | self.assertNotEqual([], glob.glob(os.path.join(
116 | self.processor.tmp_dir, self.processor.prefix + '*')))
117 | self.processor.cleanup()
118 | self.assertEquals([], glob.glob(os.path.join(
119 | self.processor.tmp_dir, self.processor.prefix + '*')))
120 |
--------------------------------------------------------------------------------
/dataqs/aqicn/resources/aqicn.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | aqicn
4 |
5 | aqicn
6 | aqicn
7 | 1
8 | Air Quality Index
9 |
10 | name
11 |
12 | rule1
13 | Good (<= 50)
14 |
15 |
16 |
17 | aqi
18 | 50.0
19 |
20 |
21 |
22 |
23 |
24 | circle
25 |
26 | #10A624
27 |
28 |
29 | 10
30 |
31 |
32 |
33 |
34 | rule1
35 | Moderate (50-100)
36 |
37 |
38 |
39 |
40 | aqi
41 | 50.0
42 |
43 |
44 | aqi
45 | 100.0
46 |
47 |
48 |
49 |
50 |
51 |
52 | circle
53 |
54 | #F5F108
55 |
56 |
57 | 10
58 |
59 |
60 |
61 |
62 | rule1
63 | Unhealthy for Sensitive Groups (101-150)
64 |
65 |
66 |
67 |
68 | aqi
69 | 101.0
70 |
71 |
72 | aqi
73 | 150.0
74 |
75 |
76 |
77 |
78 |
79 |
80 | circle
81 |
82 | #EFAE17
83 |
84 |
85 | 10
86 |
87 |
88 |
89 |
90 | rule1
91 | Unhealthy (> 150)
92 |
93 |
94 |
95 |
96 | aqi
97 | 228.0
98 |
99 |
100 | aqi
101 | 603.0
102 |
103 |
104 |
105 |
106 |
107 |
108 | circle
109 |
110 | #FF0000
111 |
112 |
113 | 10
114 |
115 |
116 |
117 |
118 |
119 |
120 |
--------------------------------------------------------------------------------
/dataqs/gdacs/gdacs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | import logging
22 | import os
23 | import datetime
24 | from django.db import connections
25 | from geonode.geoserver.helpers import ogc_server_settings
26 | from dataqs.helpers import ogr2ogr_exec, layer_exists, style_exists
27 | from dataqs.processor_base import GeoDataProcessor, DEFAULT_WORKSPACE
28 |
29 | logger = logging.getLogger("dataqs.processors")
30 | script_dir = os.path.dirname(os.path.realpath(__file__))
31 |
32 |
33 | class GDACSProcessor(GeoDataProcessor):
34 | """
35 | Class for processing data from the Global Disaster Alerts &
36 | Coordination System website (http://gdacs.org/)
37 | """
38 |
39 | prefix = "gdacs_alerts"
40 | layer_title = 'Flood, Quake, Cyclone Alerts - GDACS'
41 | params = {}
42 | base_url = \
43 | "http://www.gdacs.org/rss.aspx?profile=ARCHIVE&fromarchive=true&" + \
44 | "from={}&to={}&alertlevel=&country=&eventtype=EQ,TC,FL&map=true"
45 | description = """GDACS (Global Disaster and Alert Coordination System) is a
46 | collaboration platform for organisations providing information on humanitarian
47 | disasters. From a technical point of view, GDACS links information of all
48 | participating organisations using a variety of systems to have a harmonized list
49 | of data sources.In 2011, the GDACS platform was completely revised to collect,
50 | store and distribute resources explicitly by events. The system matches
51 | information from all organisations (by translating unique identifiers), and make
52 | these resources available for GDACS users and developers in the form of GDACS
53 | Platform Services. The GDACS RSS feed automatically include a list of available
54 | resources.\n\nSource: http://www.gdacs.org/resources.aspx"""
55 |
56 | def __init__(self, *args, **kwargs):
57 | for key in kwargs.keys():
58 | self.params[key] = kwargs.get(key)
59 |
60 | if 'sdate' not in self.params:
61 | today = datetime.date.today()
62 | self.params['sdate'] = (
63 | today - datetime.timedelta(days=7)).strftime("%Y-%m-%d")
64 |
65 | if 'edate' not in self.params:
66 | today = datetime.date.today()
67 | self.params['edate'] = today.strftime("%Y-%m-%d")
68 |
69 | super(GDACSProcessor, self).__init__(*args)
70 |
71 | def run(self):
72 | print(self.base_url.format(
73 | self.params['sdate'], self.params['edate']))
74 | rss = self.download(self.base_url.format(
75 | self.params['sdate'], self.params['edate']),
76 | filename=self.prefix + ".rss")
77 | db = ogc_server_settings.datastore_db
78 | ogr2ogr_exec("-append -skipfailures -f PostgreSQL \
79 | \"PG:host={db_host} user={db_user} password={db_pass} \
80 | dbname={db_name}\" {rss} -nln {table}".format(
81 | db_host=db["HOST"], db_user=db["USER"],
82 | db_pass=db["PASSWORD"], db_name=db["NAME"],
83 | rss="{}".format(os.path.join(self.tmp_dir, rss)),
84 | table=self.prefix))
85 | datastore = ogc_server_settings.server.get('DATASTORE')
86 | if not layer_exists(self.prefix, datastore, DEFAULT_WORKSPACE):
87 | c = connections[datastore].cursor()
88 | try:
89 | c.execute(
90 | 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_guid UNIQUE (guid);'.
91 | format(tb=self.prefix))
92 | except Exception as e:
93 | c.close()
94 | raise e
95 | self.post_geoserver_vector(self.prefix)
96 | if not style_exists(self.prefix):
97 | with open(os.path.join(
98 | script_dir, 'resources/gdacs.sld')) as sld:
99 | self.set_default_style(self.prefix, self.prefix, sld.read())
100 | self.update_geonode(self.prefix, title=self.layer_title,
101 | description=self.description, store=datastore,
102 | extra_keywords=['category:Disaster Alerts'])
103 | self.truncate_gs_cache(self.prefix)
104 | self.cleanup()
105 |
106 | if __name__ == '__main__':
107 | processor = GDACSProcessor()
108 | processor.run()
109 |
--------------------------------------------------------------------------------
/dataqs/nasa_gpm/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import zipfile
22 | import os
23 | import datetime
24 | from django.test import TestCase
25 | from dataqs.nasa_gpm.nasa_gpm import GPMProcessor
26 | from mock import patch
27 |
28 | script_dir = os.path.dirname(os.path.realpath(__file__))
29 |
30 |
31 | def mock_retrbinary(self, name, writer):
32 | """
33 | Mocks the ftplib.FTP.retrbinary method, writes test image to disk.
34 | """
35 | extension = name[-3:]
36 | zf = zipfile.ZipFile(os.path.join(script_dir, 'resources/test_gpm.zip'))
37 | writer(zf.read('test_gpm.{}'.format(extension)))
38 | return None
39 |
40 |
41 | def mock_nlst(self, *args):
42 | """
43 | Mocks the ftplib.FTP.nlst method, returns a list of files based on date.
44 | """
45 | file_list = []
46 | current_date = datetime.datetime.utcnow()
47 | start_date = current_date - datetime.timedelta(days=7)
48 | while start_date < current_date:
49 | f = '3B-HHR-E.MS.MRG.3IMERG.{}-S120000-E175959.1050.V03E.1day.tif'
50 | file = f.format(
51 | current_date.strftime('%Y%m%d'))
52 | file_list.append(file)
53 | current_date = current_date - datetime.timedelta(days=1)
54 | return file_list
55 |
56 |
57 | def mock_none(self, *args):
58 | """
59 | For mocking various FTP methods that should return nothing for tests.
60 | """
61 | return None
62 |
63 |
64 | class NasaGpmTest(TestCase):
65 | """
66 | Tests the dataqs.nasa_gpm module. Since each processor is highly
67 | dependent on a running GeoNode instance for most functions, only
68 | independent functions are tested here.
69 | """
70 |
71 | def setUp(self):
72 | self.processor = GPMProcessor()
73 |
74 | def tearDown(self):
75 | self.processor.cleanup()
76 |
77 | @patch('ftplib.FTP', autospec=True)
78 | @patch('ftplib.FTP.retrbinary', mock_retrbinary)
79 | @patch('ftplib.FTP.nlst', mock_nlst)
80 | @patch('ftplib.FTP.connect', mock_none)
81 | @patch('ftplib.FTP.login', mock_none)
82 | @patch('ftplib.FTP.cwd', mock_none)
83 | def test_download(self, mock_ftp):
84 | """
85 | Verify that files are downloaded.
86 | """
87 | today = datetime.datetime.utcnow()
88 | imgfile = self.processor.download()[0]
89 | self.assertTrue(os.path.exists(os.path.join(
90 | self.processor.tmp_dir, imgfile)))
91 | self.assertTrue('3B-HHR-E.MS.MRG.3IMERG.{}'.format(
92 | datetime.datetime.strftime(today, '%Y%m%d')) in imgfile)
93 |
94 | def test_parse_name(self):
95 | """
96 | Layer title should contain date of image
97 | :return:
98 | """
99 | f = '3B-HHR-E.MS.MRG.3IMERG.20151027-S133000-E135959.0810.V03E.1day.tif'
100 | title = self.processor.parse_name(f)[0]
101 | self.assertTrue('NASA Global Precipitation Estimate (1day) - 2015-10-27'
102 | in title)
103 |
104 | @patch('ftplib.FTP', autospec=True)
105 | @patch('ftplib.FTP.retrbinary', mock_retrbinary)
106 | @patch('ftplib.FTP.nlst', mock_nlst)
107 | @patch('ftplib.FTP.connect', mock_none)
108 | @patch('ftplib.FTP.login', mock_none)
109 | @patch('ftplib.FTP.cwd', mock_none)
110 | def test_convert_image(self, mock_ftp):
111 | """
112 | Converted image should be create in temp directory
113 | :return:
114 | """
115 | dl_tif = self.processor.download()[0]
116 | convert_tif = self.processor.convert(dl_tif)
117 | self.assertTrue(os.path.exists(os.path.join(
118 | self.processor.tmp_dir, convert_tif)))
119 |
120 | @patch('ftplib.FTP', autospec=True)
121 | @patch('ftplib.FTP.retrbinary', mock_retrbinary)
122 | @patch('ftplib.FTP.nlst', mock_nlst)
123 | @patch('ftplib.FTP.connect', mock_none)
124 | @patch('ftplib.FTP.login', mock_none)
125 | @patch('ftplib.FTP.cwd', mock_none)
126 | def test_cleanup(self, mock_ftp):
127 | """
128 | Temporary files should be gone after cleanup
129 | :return:
130 | """
131 | dl_tif = self.processor.download()[0]
132 | self.processor.convert(dl_tif)
133 | self.assertNotEqual([], glob.glob(os.path.join(
134 | self.processor.tmp_dir, self.processor.prefix + '*')))
135 | self.processor.cleanup()
136 | self.assertEquals([], glob.glob(os.path.join(
137 | self.processor.tmp_dir, self.processor.prefix + '*')))
138 |
--------------------------------------------------------------------------------
/dataqs/forecastio/tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | import glob
21 | import zipfile
22 | import os
23 | import datetime
24 | from urlparse import urljoin
25 | from django.test import TestCase
26 | from dataqs.forecastio.forecastio_air import ForecastIOAirTempProcessor
27 | import httpretty
28 |
29 | script_dir = os.path.dirname(os.path.realpath(__file__))
30 |
31 |
32 | def get_test_image():
33 | """
34 | Return a canned response with HTML for Boston
35 | """
36 | zf = zipfile.ZipFile(os.path.join(script_dir,
37 | 'resources/test_forecastio.zip'))
38 |
39 | return zf.read('test_forecastio.tif')
40 |
41 |
42 | class ForecastIOAirTempTest(TestCase):
43 | """
44 | Tests the dataqs.forecastio module. Since each processor is highly
45 | dependent on a running GeoNode instance for most functions, only
46 | independent functions are tested here.
47 | """
48 |
49 | def setUp(self):
50 | self.processor = ForecastIOAirTempProcessor()
51 | httpretty.enable()
52 |
53 | def tearDown(self):
54 | httpretty.disable()
55 | self.processor.cleanup()
56 |
57 | def test_download(self):
58 | """
59 | Verify that files are downloaded.
60 | """
61 | today = datetime.datetime.utcnow()
62 | strmonth, strday, strhour = (
63 | '{0:02d}'.format(x) for x in [today.month, today.day, today.hour])
64 | img_url = urljoin(self.processor.base_url, '{}/{}/{}/{}.tif'.format(
65 | today.year, strmonth, strday, strhour))
66 | raw_name = "{prefix}_{hour}.tif".format(
67 | prefix=self.processor.prefix,
68 | hour='{0:02d}'.format(today.hour))
69 | httpretty.register_uri(httpretty.GET, img_url,
70 | body=get_test_image(),
71 | content_type="image/tif")
72 | imgfile = self.processor.download(img_url, filename=raw_name)
73 | self.assertTrue(os.path.exists(os.path.join(
74 | self.processor.tmp_dir, imgfile)))
75 |
76 | def test_parse_name(self):
77 | """
78 | Layer title should contain date of image
79 | :return:
80 | """
81 | today = datetime.datetime(2006, 11, 21, 16, 00)
82 | title = self.processor.parse_name(today)
83 | self.assertEquals(
84 | 'Global (near-surface) Air Temperature - 2006-11-21 16:00 UTC',
85 | title)
86 |
87 | def test_convert_image(self):
88 | """
89 | Verifies that the original image is translated into a new one with the
90 | expected name and location.
91 | :return:
92 | """
93 | today = datetime.datetime.utcnow()
94 | strmonth, strday, strhour = (
95 | '{0:02d}'.format(x) for x in [today.month, today.day, today.hour])
96 | img_url = urljoin(self.processor.base_url, '{}/{}/{}/{}.tif'.format(
97 | today.year, strmonth, strday, strhour))
98 | raw_name = "{prefix}_{hour}.tif".format(
99 | prefix=self.processor.prefix,
100 | hour='{0:02d}'.format(today.hour))
101 | httpretty.register_uri(httpretty.GET, img_url,
102 | body=get_test_image(),
103 | content_type="image/tif")
104 | imgfile = self.processor.download(img_url, filename=raw_name)
105 | tif_file = self.processor.convert(imgfile, today)
106 | self.assertTrue(tif_file.endswith('0000000Z.tif'))
107 | self.assertTrue(os.path.exists(os.path.join(
108 | self.processor.tmp_dir, tif_file)))
109 |
110 | def test_cleanup(self):
111 | """
112 | Verifies that no images are left over after cleanup
113 | :return:
114 | """
115 | today = datetime.datetime.utcnow()
116 | strmonth, strday, strhour = (
117 | '{0:02d}'.format(x) for x in [today.month, today.day, today.hour])
118 | img_url = urljoin(self.processor.base_url, '{}/{}/{}/{}.tif'.format(
119 | today.year, strmonth, strday, strhour))
120 | raw_name = "{prefix}_{hour}.tif".format(
121 | prefix=self.processor.prefix,
122 | hour='{0:02d}'.format(today.hour))
123 | httpretty.register_uri(httpretty.GET, img_url,
124 | body=get_test_image(),
125 | content_type="image/tif")
126 | self.processor.download(img_url, filename=raw_name)
127 | self.assertNotEqual([], glob.glob(os.path.join(
128 | self.processor.tmp_dir, self.processor.prefix + '*')))
129 | self.processor.cleanup()
130 | self.assertEquals([], glob.glob(os.path.join(
131 | self.processor.tmp_dir, self.processor.prefix + '*')))
132 |
--------------------------------------------------------------------------------
/dataqs/gistemp/gistemp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | ###############################################################################
5 | # Copyright Kitware Inc. and Epidemico Inc.
6 | #
7 | # Licensed under the Apache License, Version 2.0 ( the "License" );
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | ###############################################################################
19 |
20 | from __future__ import absolute_import
21 | import logging
22 | import os
23 | import re
24 | import shutil
25 | from datetime import date
26 | from dateutil.relativedelta import relativedelta
27 | from dataqs.processor_base import GeoDataMosaicProcessor, GS_DATA_DIR
28 | from dataqs.helpers import get_band_count, gdal_translate, \
29 | nc_convert, style_exists, cdo_fixlng, gunzip
30 |
31 | logger = logging.getLogger("dataqs.processors")
32 | script_dir = os.path.dirname(os.path.realpath(__file__))
33 |
34 |
35 | class GISTEMPProcessor(GeoDataMosaicProcessor):
36 | """
37 | Processor for Land-Ocean Temperature Index, ERSSTv4, 1200km smoothing
38 | from the NASA Goddard Institute for Space Studies' Surface Temperature
39 | Analysis (GISTEMP).
40 | More info at http://data.giss.nasa.gov/gistemp/
41 | """
42 | prefix = "gistemp"
43 | base_url = "http://data.giss.nasa.gov/pub/gistemp/gistemp1200_ERSSTv4.nc.gz"
44 | layer_name = 'gistemp1200_ERSSTv4'
45 | title = 'Global Monthly Air Temperature Anomalies, 1880/01/01 - {}'
46 | abstract = """The GISTEMP analysis recalculates consistent temperature
47 | anomaly series from 1880 to the present for a regularly spaced array of virtual
48 | stations covering the whole globe. Those data are used to investigate regional
49 | and global patterns and trends. Graphs and tables are updated around the
50 | middle of every month using current data files from NOAA GHCN v3 (meteorological
51 | stations), ERSST v4 (ocean areas), and SCAR (Antarctic stations).
52 |
53 | The displayed image is based on the most current month.
54 |
55 | Citations:
56 | - GISTEMP Team, 2016: GISS Surface Temperature Analysis (GISTEMP).
57 | NASA Goddard Institute for Space Studies. Dataset accessed monthly
58 | since 8/2016 at http://data.giss.nasa.gov/gistemp/.
59 | - Hansen, J., R. Ruedy, M. Sato, and K. Lo, 2010: Global surface
60 | temperature change, Rev. Geophys., 48, RG4004, doi:10.1029/2010RG000345.
61 |
62 | """
63 |
64 | def convert(self, nc_file):
65 | nc_transform = nc_convert(nc_file)
66 | cdo_transform = cdo_fixlng(nc_transform)
67 | return cdo_transform
68 |
69 | def extract_band(self, tif, band, outname):
70 | outfile = os.path.join(self.tmp_dir, outname)
71 | gdal_translate(tif, outfile, bands=[band],
72 | projection='EPSG:4326',
73 | options=['TILED=YES', 'COMPRESS=LZW'])
74 | return outfile
75 |
76 | def get_date(self, months):
77 | start_month = date(1880, 1, 1)
78 | return start_month + relativedelta(months=months-1)
79 |
80 | def get_title(self, months):
81 | end_month = self.get_date(months)
82 | return self.title.format(end_month.strftime('%Y/%m/%d'))
83 |
84 | def run(self):
85 | """
86 | Retrieve and process the latest NetCDF file.
87 | """
88 | gzfile = self.download(
89 | self.base_url, '{}.nc.gz'.format(self.layer_name))
90 | ncfile = gunzip(os.path.join(self.tmp_dir, gzfile))
91 | cdf_file = self.convert(ncfile)
92 | bands = get_band_count(cdf_file)
93 | img_list = self.get_mosaic_filenames(self.layer_name)
94 | for band in range(1, bands+1):
95 | band_date = re.sub('[\-\.]+', '', self.get_date(band).isoformat())
96 | img_name = '{}_{}T000000000Z.tif'.format(self.layer_name, band_date)
97 | if img_name not in img_list:
98 | band_tif = self.extract_band(cdf_file, band, img_name)
99 | dst_file = self.data_dir.format(gsd=GS_DATA_DIR,
100 | ws=self.workspace,
101 | layer=self.layer_name,
102 | file=img_name)
103 | dst_dir = os.path.dirname(dst_file)
104 | if not os.path.exists(dst_dir):
105 | os.makedirs(dst_dir)
106 | if dst_file.endswith('.tif'):
107 | shutil.move(os.path.join(self.tmp_dir, band_tif), dst_file)
108 | self.post_geoserver(dst_file, self.layer_name)
109 |
110 | if not style_exists(self.layer_name):
111 | with open(os.path.join(script_dir,
112 | 'resources/gistemp.sld')) as sld:
113 | self.set_default_style(self.layer_name, self.layer_name,
114 | sld.read().format(latest_band=bands))
115 | self.update_geonode(
116 | self.layer_name, title=self.get_title(bands),
117 | description=self.abstract,
118 | store=self.layer_name,
119 | bounds=('-180.0', '180.0', '-90.0', '90.0',
120 | 'EPSG:4326'),
121 | extra_keywords=['category:Climatology Meteorology'])
122 | self.truncate_gs_cache(self.layer_name)
123 | self.cleanup()
124 |
125 |
126 | if __name__ == '__main__':
127 | processor = GISTEMPProcessor()
128 | processor.run()
129 |
--------------------------------------------------------------------------------
/dataqs/mmwr/resources/mmwr.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | {layername}
4 |
5 | {layername}
6 | All Deaths
7 | 1
8 | MMWWR
9 |
10 | name
11 |
12 | rule1
13 | <= 75
14 |
15 |
16 |
17 | all
18 | 75.0
19 |
20 |
21 |
22 |
23 |
24 | circle
25 |
26 | #0000FF
27 |
28 |
29 | 6
30 |
31 |
32 |
33 |
34 | rule1
35 | > 75 AND <= 150
36 |
37 |
38 |
39 |
40 | all
41 | 75
42 |
43 |
44 | all
45 | 150
46 |
47 |
48 |
49 |
50 |
51 |
52 | circle
53 |
54 | #12C308
55 |
56 |
57 | 6
58 |
59 |
60 |
61 |
62 | rule1
63 | > 150 AND <= 250
64 |
65 |
66 |
67 |
68 | all
69 | 150
70 |
71 |
72 | all
73 | 250
74 |
75 |
76 |
77 |
78 |
79 |
80 | circle
81 |
82 | #FFFF00
83 |
84 |
85 | 6
86 |
87 |
88 |
89 |
90 | rule1
91 | > 250 AND <= 400
92 |
93 |
94 |
95 |
96 | all
97 | 250
98 |
99 |
100 | all
101 | 400
102 |
103 |
104 |
105 |
106 |
107 |
108 | circle
109 |
110 | #FF7F00
111 |
112 |
113 | 6
114 |
115 |
116 |
117 |
118 | rule1
119 | > 400
120 |
121 |
122 |
123 | all
124 | 400
125 |
126 |
127 |
128 |
129 |
130 | circle
131 |
132 | #FF0000
133 |
134 |
135 | 6
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
--------------------------------------------------------------------------------
/dataqs/wqp/resources/wqp_api_ph_map.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | wqp_api_ph_map
4 |
5 | wqp_api_ph_map
6 | pH (Water Quality)
7 | 1
8 |
9 | name
10 |
11 | < 3
12 |
13 |
14 | ResultMeasureValue
15 | 3.0
16 |
17 |
18 |
19 |
20 |
21 | circle
22 |
23 | #0000FF
24 | 0.8
25 |
26 |
27 | 10
28 |
29 |
30 |
31 |
32 | > 3 AND <= 7
33 |
34 |
35 |
36 | ResultMeasureValue
37 | 3.0
38 |
39 |
40 | ResultMeasureValue
41 | 7.0
42 |
43 |
44 |
45 |
46 |
47 |
48 | circle
49 |
50 | #0BB70E
51 | 0.8
52 |
53 |
54 | 10
55 |
56 |
57 |
58 |
59 | > 7.0 AND <= 7.5
60 |
61 |
62 |
63 | ResultMeasureValue
64 | 7.0
65 |
66 |
67 | ResultMeasureValue
68 | 7.5
69 |
70 |
71 |
72 |
73 |
74 |
75 | circle
76 |
77 | #FFFF00
78 | 0.8
79 |
80 |
81 | 10
82 |
83 |
84 |
85 |
86 | > 7.5 AND <= 8
87 |
88 |
89 |
90 | ResultMeasureValue
91 | 7.5
92 |
93 |
94 | ResultMeasureValue
95 | 8.0
96 |
97 |
98 |
99 |
100 |
101 |
102 | circle
103 |
104 | #FF7F00
105 | 0.8
106 |
107 |
108 | 10
109 |
110 |
111 |
112 |
113 | > 8
114 |
115 |
116 | ResultMeasureValue
117 | 8.0
118 |
119 |
120 |
121 |
122 |
123 | circle
124 |
125 | #FF0000
126 | 0.8
127 |
128 |
129 | 10
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
--------------------------------------------------------------------------------
/dataqs/whisp/resources/whisp.sld:
--------------------------------------------------------------------------------
1 |
2 |
3 | whisp
4 |
5 | whisp
6 | WHISPers events
7 | 1
8 | Wildlife Health Information Events
9 |
10 | name
11 |
12 | rule1
13 | Morbidity
14 |
15 |
16 | eventtype
17 | Morbidity
18 |
19 |
20 |
21 |
22 |
23 | circle
24 |
25 | #0000FF
26 | 0.5
27 |
28 |
29 | 0.1
30 |
31 |
32 | 10
33 |
34 |
35 |
36 |
37 | rule1
38 | Morbidity < 5
39 |
40 |
41 | eventtype
42 | Morbidity < 5
43 |
44 |
45 |
46 |
47 |
48 | circle
49 |
50 | #0A902D
51 | 0.5
52 |
53 |
54 | 0.1
55 |
56 |
57 | 10
58 |
59 |
60 |
61 |
62 | rule1
63 | Mortality
64 |
65 |
66 | eventtype
67 | Mortality
68 |
69 |
70 |
71 |
72 |
73 | circle
74 |
75 | #FFFF00
76 | 0.5
77 |
78 |
79 | 0.1
80 |
81 |
82 | 10
83 |
84 |
85 |
86 |
87 | rule1
88 | Mortality < 5
89 |
90 |
91 | eventtype
92 | Mortality < 5
93 |
94 |
95 |
96 |
97 |
98 | circle
99 |
100 | #FF7F00
101 | 0.5
102 |
103 |
104 | 0.1
105 |
106 |
107 | 10
108 |
109 |
110 |
111 |
112 | rule1
113 | Surveillance
114 |
115 |
116 | eventtype
117 | Surveillance
118 |
119 |
120 |
121 |
122 |
123 | circle
124 |
125 | #FF0000
126 | 0.5
127 |
128 |
129 | 0.1
130 |
131 |
132 | 10
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
--------------------------------------------------------------------------------