├── nansat
├── mappers
│ ├── __init__.py
│ ├── obpg.py
│ ├── hdf4_mapper.py
│ ├── mapper_arome.py
│ ├── mapper_mod44w.py
│ ├── mapper_ecmwf_metno.py
│ ├── mapper_amsre_uham_leadfraction.py
│ ├── mapper_quikscat.py
│ ├── mapper_opendap_globcurrent_thredds.py
│ ├── mapper_opendap_occci.py
│ ├── mapper_opendap_sentinel1_wind.py
│ ├── mapper_netcdf_cf_sentinel1.py
│ ├── mapper_cmems.py
│ ├── mapper_opendap_sstcci.py
│ ├── mapper_kmss.py
│ ├── mapper_opendap_siwtacsst.py
│ ├── mapper_goci_l1.py
│ ├── mapper_ascat.py
│ ├── mapper_case2reg.py
│ ├── mapper_metno_local_hires_seaice.py
│ ├── globcolour.py
│ ├── mapper_topography.py
│ ├── mapper_emodnet.py
│ ├── mapper_opendap_osisaf.py
│ ├── mapper_opendap_globcurrent.py
│ ├── mapper_opendap_sentinel2.py
│ ├── mapper_opendap_mywave.py
│ ├── get_inv.pl
│ ├── get_grib.pl
│ ├── mapper_amsr2_l3.py
│ ├── mapper_pathfinder52.py
│ ├── mapper_opendap_arome.py
│ ├── mapper_metno_hires_seaice.py
│ ├── mapper_ncep_wind.py
│ ├── mapper_hirlam.py
│ ├── mapper_hirlam_wind_netcdf.py
│ ├── scatterometers.py
│ ├── mapper_viirs_l1.py
│ └── mapper_opendap_sentinel1.py
├── tests
│ ├── __init__.py
│ ├── mappers
│ │ ├── __init__.py
│ │ ├── test_mapper_opendap_arome.py
│ │ ├── test_mapper_opendap_mywave.py
│ │ └── test_mapper_opendap_ostia.py
│ ├── data
│ │ ├── gcps.tif
│ │ ├── map.tif
│ │ ├── arctic.nc
│ │ ├── complex.nc
│ │ ├── points.shp
│ │ ├── points.shx
│ │ ├── stere.tif
│ │ ├── nansat_logo_s.png
│ │ ├── points.dbf
│ │ └── some_xml_file.xml
│ ├── test_pixelfunctions.py
│ ├── nansat_test_data.py
│ ├── nansat_test_base.py
│ ├── test_utils.py
│ ├── test_geolocation.py
│ ├── test_nsr.py
│ ├── test_tools.py
│ └── test_pointbrowser.py
├── fonts
│ ├── DejaVuSans.ttf
│ └── LICENSE
├── warnings.py
├── pixelfunctions
│ ├── MakefileWin
│ ├── README.txt
│ ├── Makefile
│ ├── _pixfun_py2.c
│ ├── README_Nansat
│ ├── pixfunplugin.c
│ └── _pixfun_py3.c
├── exceptions.py
├── __init__.py
├── nsr.py
└── geolocation.py
├── nansat_integration_tests
├── __init__.py
├── test_open_issues.py
├── test_radarsat2.py
└── test_mappers.py
├── .coveragerc
├── .dockerignore
├── docs
├── source
│ ├── modules.rst
│ ├── images
│ │ └── nansat_logo_transp.png
│ ├── packages_and_modules.rst
│ ├── tutorials.rst
│ ├── acknowledgments.rst
│ ├── about.rst
│ ├── documenting.rst
│ ├── nansat.rst
│ ├── features.rst
│ ├── nansat.tests.rst
│ └── release_nansat.rst
├── environment.yml
├── Makefile
├── make.bat
└── index.rst
├── provisioning
├── galaxy_requirements.yml
├── conda_env_requirements.yml
├── roles
│ └── nansat
│ │ └── tasks
│ │ └── main.yml
└── site.yml
├── MANIFEST.in
├── pyproject.toml
├── .readthedocs.yml
├── .gitignore
├── Dockerfile
├── utilities
├── nansatinfo
├── nansat_geotiffimage
├── nansat_translate
├── nansat_show
└── nansat_add_coastline
├── Vagrantfile
└── .github
└── workflows
└── ci.yml
/nansat/mappers/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/nansat/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/nansat/tests/mappers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/nansat_integration_tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | relative_files = True
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | build
2 | dist
3 | provisioning
4 | nansat_integration_tests
5 |
--------------------------------------------------------------------------------
/docs/source/modules.rst:
--------------------------------------------------------------------------------
1 | nansat
2 | ======
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | nansat
8 |
--------------------------------------------------------------------------------
/nansat/tests/data/gcps.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/tests/data/gcps.tif
--------------------------------------------------------------------------------
/nansat/tests/data/map.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/tests/data/map.tif
--------------------------------------------------------------------------------
/nansat/fonts/DejaVuSans.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/fonts/DejaVuSans.ttf
--------------------------------------------------------------------------------
/nansat/tests/data/arctic.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/tests/data/arctic.nc
--------------------------------------------------------------------------------
/nansat/tests/data/complex.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/tests/data/complex.nc
--------------------------------------------------------------------------------
/nansat/tests/data/points.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/tests/data/points.shp
--------------------------------------------------------------------------------
/nansat/tests/data/points.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/tests/data/points.shx
--------------------------------------------------------------------------------
/nansat/tests/data/stere.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/tests/data/stere.tif
--------------------------------------------------------------------------------
/provisioning/galaxy_requirements.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - src: andrewrothstein.miniconda
3 | - src: andrewrothstein.conda-env
4 |
5 |
--------------------------------------------------------------------------------
/nansat/tests/data/nansat_logo_s.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/nansat/tests/data/nansat_logo_s.png
--------------------------------------------------------------------------------
/docs/source/images/nansat_logo_transp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nansencenter/nansat/HEAD/docs/source/images/nansat_logo_transp.png
--------------------------------------------------------------------------------
/nansat/tests/data/points.dbf:
--------------------------------------------------------------------------------
1 | _ A W FID N
0 1 2 3
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | # Include the license file
2 | include LICENSE
3 |
4 | # Include the pixel function files
5 | recursive-include nansat/pixelfunctions *
6 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools", "setuptools_scm"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [tool.setuptools_scm]
6 |
--------------------------------------------------------------------------------
/docs/source/packages_and_modules.rst:
--------------------------------------------------------------------------------
1 | Packages and modules
2 | =====================
3 |
4 | .. toctree::
5 | :maxdepth: 1
6 |
7 | modules.rst
8 | nansat.mappers.rst
9 |
--------------------------------------------------------------------------------
/nansat/tests/data/some_xml_file.xml:
--------------------------------------------------------------------------------
1 |
2 | testValue
3 |
4 |
5 | Deep Value
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/provisioning/conda_env_requirements.yml:
--------------------------------------------------------------------------------
1 | ---
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.6
6 | - gdal
7 | - numpy
8 | - pillow
9 | - netcdf4
10 | - python-dateutil
11 | - nose
12 | - coveralls
13 | - mock
14 | - urllib3
15 | - pythesint
16 |
--------------------------------------------------------------------------------
/docs/environment.yml:
--------------------------------------------------------------------------------
1 | dependencies:
2 | - ipykernel
3 | - mock
4 | - nbconvert
5 | - notebook
6 | - pandoc
7 | - pip
8 | - python-dateutil
9 | - recommonmark
10 | - sphinx
11 | - sphinx_rtd_theme
12 | - pip
13 | - pip:
14 | - nbsphinx
15 | - sphinx-autobuild
16 |
--------------------------------------------------------------------------------
/provisioning/roles/nansat/tasks/main.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: nansat | Add PYTHONPATH to bashrc
3 | lineinfile: dest="/home/vagrant/.bashrc" line="export PYTHONPATH=/vagrant"
4 |
5 | - name: nansat | Compile pixel functions
6 | shell: 'python setup.py build_ext --inplace'
7 | args:
8 | chdir: '/vagrant'
9 |
10 | - name: nansat | Run nosetests
11 | shell: 'nosetests nansat'
12 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: 2
3 |
4 | build:
5 | os: "ubuntu-22.04"
6 | tools:
7 | python: "mambaforge-latest"
8 | jobs:
9 | pre_build:
10 | # add cloned repository to python importable paths
11 | - 'echo "$(pwd)" >> $(python -c "import sysconfig;print(sysconfig.get_paths()[\"purelib\"])")/workdir.pth'
12 |
13 | conda:
14 | environment: docs/environment.yml
15 |
--------------------------------------------------------------------------------
/nansat/warnings.py:
--------------------------------------------------------------------------------
1 | # Name: warnings.py
2 | # Purpose: Definitions of nansat warnings
3 | # Authors: Morten W. Hansen
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 |
8 | class NansatFutureWarning(Warning):
9 | pass
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | gidea/
2 | *.pyc
3 | *.swp
4 | *.so
5 | *.o
6 | *.png
7 | !docs/source/images/*.png
8 | .project
9 | .pydevproject
10 | test_data/
11 | build/
12 | dist/
13 | nansat.egg-info/
14 | docs/_build/
15 | #docs/_static/
16 | #docs/_templates/
17 | .project
18 | .pydevproject
19 | .idea
20 | .settings
21 | .vagrant
22 | provisioning/roles/andrewrothstein*
23 | provisioning/site.retry
24 | .vscode
25 | .devcontainer
26 | .coverage
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG BASE_IMAGE=nansencenter/nansat_base
2 | FROM ${BASE_IMAGE}
3 | # Necessary to access the BASE_IMAGE variable during the build
4 | ARG BASE_IMAGE
5 |
6 | ARG NANSAT_RELEASE
7 |
8 | COPY . /tmp/nansat/
9 | WORKDIR /tmp/nansat
10 | RUN apt update \
11 | && apt install -y --no-install-recommends g++ \
12 | && pip install . \
13 | && rm -rf /tmp/nansat \
14 | && apt autoremove -y \
15 | && apt clean \
16 | && rm -rf /var/lib/apt/lists/*
17 |
18 | WORKDIR /src
19 |
--------------------------------------------------------------------------------
/docs/source/tutorials.rst:
--------------------------------------------------------------------------------
1 | Nansat tutorials
2 | ================
3 |
4 | The package `nansat-lectures
5 | `_ contains several Jupyter
6 | notebooks with examples of how to use Nansat. Unfortunately, we have not been able to keep them
7 | fully updated. The most recently updated notebooks should, however, work.
8 |
9 |
10 | .. toctree::
11 | :maxdepth: 2
12 |
13 | notebooks/nansat-introduction.ipynb
14 |
--------------------------------------------------------------------------------
/nansat/tests/test_pixelfunctions.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import importlib
3 | import unittest
4 | pixfun_module_name = 'nansat._pixfun_py{0}'.format(sys.version_info[0])
5 |
6 | class TestPixelFunctions(unittest.TestCase):
7 | def test_import_pixel_functions(self):
8 | try:
9 | pixfun = importlib.import_module(pixfun_module_name)
10 | pixfun.registerPixelFunctions()
11 | except ImportError:
12 | self.fail('Cannot import pixel functions')
13 |
--------------------------------------------------------------------------------
/docs/source/acknowledgments.rst:
--------------------------------------------------------------------------------
1 | Please acknowledge Nansat
2 | ==========================
3 |
4 | We appreciate acknowledgments of Nansat. If you use Nansat in scientific publications, please add a
5 | reference to the following paper:
6 |
7 | Korosov A.A., Hansen M.W., Dagestad K.-F., Yamakawa A., Vines A., Riechert M., (2016). Nansat: a
8 | Scientist-Orientated Python Package for Geospatial Data Processing. Journal of Open Research
9 | Software. 4(1), p.e39. DOI: http://doi.org/10.5334/jors.120
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utilities/nansatinfo:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # Analogue to gdalinfo, but for Nansat datasets
4 | # Refers to Nansat band numbers
5 |
6 | import sys
7 | from os.path import dirname, abspath
8 |
9 | try:
10 | from nansat import Nansat
11 | except ImportError: # development
12 | sys.path.append(dirname(dirname(abspath(__file__))))
13 | from nansat import Nansat
14 |
15 | if (len(sys.argv) != 2):
16 | sys.exit('Usage: nansatinfo ')
17 |
18 | n = Nansat(sys.argv[1])
19 | print(n)
20 |
--------------------------------------------------------------------------------
/nansat/tests/mappers/test_mapper_opendap_arome.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from nansat.mappers.mapper_opendap_arome import Mapper
3 | from nansat import Nansat
4 |
5 |
6 | class AROMEOpenDAPTests(unittest.TestCase):
7 |
8 | def test_get_date(self):
9 | res = Mapper.get_date('https://thredds.met.no/thredds/dodsC/aromearcticarchive/2017/'
10 | '10/30/arome_arctic_full_2_5km_20171030T21Z.nc')
11 | self.assertIsInstance(res, str)
12 | self.assertEqual(res, '2017-10-30T21:00Z')
13 |
--------------------------------------------------------------------------------
/nansat/tests/mappers/test_mapper_opendap_mywave.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from nansat.mappers.mapper_opendap_mywave import Mapper
3 |
4 |
5 | class MyWaveOpenDAPTests(unittest.TestCase):
6 |
7 | def setUp(self):
8 | self.src = 'http://thredds.met.no/thredds/dodsC/fou-hi/mywavewam4archive' \
9 | '/2017/10/29/MyWave_wam4_WAVE_20171029T18Z.nc'
10 |
11 | def test_get_date(self):
12 | res = Mapper.get_date(self.src)
13 | self.assertIsInstance(res, str)
14 | self.assertEqual(res, '2017-10-29T18:00:00Z')
15 |
--------------------------------------------------------------------------------
/nansat/tests/mappers/test_mapper_opendap_ostia.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from nansat.mappers.mapper_opendap_ostia import Mapper
3 |
4 |
5 | class MyWaveOpenDAPTests(unittest.TestCase):
6 |
7 | def setUp(self):
8 | self.src = 'https://podaac-opendap.jpl.nasa.gov/opendap/allData/ghrsst/data/L4/GLOB/' \
9 | 'UKMO/OSTIA/2016/006/20160106-UKMO-L4HRfnd-GLOB-v01-fv02-OSTIA.nc.bz2'
10 |
11 | def test_get_date(self):
12 | res = Mapper.get_date(self.src)
13 | self.assertIsInstance(res, str)
14 | self.assertEqual(res, '2016-01-06T00:00:00Z')
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = Nansat
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/nansat/pixelfunctions/MakefileWin:
--------------------------------------------------------------------------------
1 | # nmake -f MaiefileWin
2 | # NB : Copy gdal_i.lib in this folder!!
3 |
4 | cc = cl
5 | link = link
6 | rm = del
7 | TARGET = gdal_PIXFUN
8 |
9 | $(TARGET).dll : pixelfunctions.obj pixfunplugin.obj gdal_i.lib
10 | $(link) -nologo -DLL pixelfunctions.obj pixfunplugin.obj gdal_i.lib -out:$(TARGET).dll -implib:$(TARGET).lib
11 |
12 | pixelfunctions.obj : pixelfunctions.c
13 | $(cc) -nologo -c pixelfunctions.c
14 |
15 | pixfunplugin.obj : pixfunplugin.c
16 | $(cc) -nologo -c pixfunplugin.c
17 |
18 | clean :
19 | $(rm) $(TARGET).dll
20 | $(rm) $(TARGET).exp
21 | $(rm) $(TARGET).lib
22 | $(rm) $(TARGET).def
23 | $(rm) *.obj
24 |
--------------------------------------------------------------------------------
/provisioning/site.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - hosts: localhost
3 | vars:
4 | project_home: '/vagrant'
5 | env_name: 'py3nansat'
6 | conda_dir: '/home/vagrant/anaconda'
7 |
8 | roles:
9 | - role: andrewrothstein.miniconda
10 | miniconda_escalate: no
11 | miniconda_parent_dir: '{{ conda_dir | dirname }}'
12 | miniconda_make_sys_default: yes
13 |
14 | - role: andrewrothstein.conda-env
15 | conda_env_name: '{{ env_name }}'
16 | conda_env_environment: conda_env_requirements.yml
17 | conda_env_conda_dir: '{{ conda_dir }}'
18 | conda_env_escalate: no
19 |
20 | - nansat
21 |
22 | environment:
23 | PYTHONPATH: "/vagrant"
24 | PATH: "{{ conda_dir }}/envs/{{ env_name }}/bin:{{ ansible_env.PATH }}"
25 |
--------------------------------------------------------------------------------
/nansat/mappers/obpg.py:
--------------------------------------------------------------------------------
1 | # Name: obpg
2 | # Purpose: Base class for mapping for L2 data from the OBPG web-site
3 | # Authors: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | from nansat.vrt import VRT
8 |
9 |
10 | class OBPGL2BaseClass(VRT):
11 | ''' Base Class for Mappers for SeaWIFS/MODIS/MERIS/VIIRS L2 data from OBPG
12 | '''
13 |
14 | titles = ['HMODISA Level-2 Data',
15 | 'MODISA Level-2 Data',
16 | 'HMODIST Level-2 Data',
17 | 'MERIS Level-2 Data',
18 | 'GOCI Level-2 Data',
19 | 'VIIRSN Level-2 Data',
20 | 'SeaWiFS Level-2 Data']
21 |
--------------------------------------------------------------------------------
/utilities/nansat_geotiffimage:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # Utility to make an 8-bit Geotiff figure for one band of a Nansat dataset
4 | # Scaling ("minmax") and colormap from VKW is applied
5 |
6 | import sys
7 | from os.path import dirname, abspath
8 |
9 | try:
10 | from nansat import Nansat
11 | except ImportError: # development
12 | sys.path.append(dirname(dirname(abspath(__file__))))
13 | from nansat import Nansat
14 |
15 | tmpVRTfileName = 'tmp.VRT'
16 |
17 | def Usage():
18 | sys.exit('Usage: nansat_geotiffimage ')
19 |
20 | if (len(sys.argv) <= 2):
21 | Usage()
22 |
23 | try:
24 | bandNo = int(sys.argv[1])
25 | infileName = sys.argv[2]
26 | outfileName = sys.argv[3]
27 | except:
28 | Usage()
29 |
30 | n = Nansat(infileName)
31 | n.write_geotiffimage(outfileName, bandNo)
32 |
--------------------------------------------------------------------------------
/utilities/nansat_translate:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # Analog to gdal_translate for Nansat datasets.
4 | # Any gdal_translate-options can be used,
5 | # but band number refers to the Nansat dataset
6 |
7 | import sys
8 | import os
9 | from os.path import dirname, abspath
10 |
11 | try:
12 | from nansat import Nansat
13 | except ImportError: # development
14 | sys.path.append(dirname(dirname(abspath(__file__))))
15 | from nansat import Nansat
16 |
17 | tmpfile = 'nansat_translate_test.VRT'
18 |
19 | if (len(sys.argv) <= 2):
20 | sys.exit('Usage: nansat_translate ')
21 |
22 | outfile = sys.argv[2]
23 | options = " ".join(sys.argv[3:])
24 |
25 | n = Nansat(sys.argv[1])
26 | n.vrt.export(tmpfile)
27 | os.system('gdal_translate ' + tmpfile + ' ' + outfile + ' ' + options)
28 | os.remove(tmpfile)
29 |
--------------------------------------------------------------------------------
/docs/source/about.rst:
--------------------------------------------------------------------------------
1 | .. image:: _images/nansat_logo_transp.png
2 | :align: right
3 | :width: 250px
4 | :target: https://github.com/nansencenter/nansat
5 |
6 | `Nansat `_ is a scientist friendly Python toolbox for processing 2D
7 | satellite earth observation data.
8 |
9 | The main **goal** of Nansat is to facilitate:
10 |
11 | - easy development and testing of scientific algorithms,
12 | - easy analysis of geospatial data, and
13 | - efficient operational processing.
14 |
15 | You can also find a detailed description of Nansat in our `paper
16 | `_ published in `Journal of
17 | Open Research Software `_ in 2016.
18 |
19 | ... and you can join the
20 | `mailing list `_.
21 |
--------------------------------------------------------------------------------
/nansat/pixelfunctions/README.txt:
--------------------------------------------------------------------------------
1 | :Author: Antonio Valentino
2 | :Contact: a_valentino@users.sf.net
3 | :Date: 2011-05-13
4 | :Copyright: This document has been placed in the public domain.
5 |
6 | GDAL pixfun plugin
7 | ==================
8 |
9 | This package provides:
10 |
11 | * the implementation of a set of GDALDerivedPixelFunc(s) to be used with
12 | source raster band of virtual GDAL datasets
13 | * a fake GDAL driver to register pixel functions
14 |
15 | .. note:: using the plugin mechanism is a hack aimed to enable python users
16 | to use pixel functions without C++ coding
17 |
18 | To use the plugin just build the gdal_PIXFUN.so shared object::
19 |
20 | make
21 |
22 | and set the GDAL_DRIVER_PATH accordingly::
23 |
24 | export GDAL_DRIVER_PATH=:$GDAL_DRIVER_PATH
25 |
26 | .. seealso:: http://lists.osgeo.org/pipermail/gdal-dev/2011-May/028737.html
27 |
--------------------------------------------------------------------------------
/nansat/tests/nansat_test_data.py:
--------------------------------------------------------------------------------
1 | #------------------------------------------------------------------------------
2 | # Name: nansat_test_data.py
3 | # Purpose: Get/Create directories to store test data and results of tests
4 | #
5 | # Author: Anton Korosov
6 | #
7 | # Created: 29.09.2014
8 | # Copyright: (c) NERSC
9 | # Licence: This file is part of NANSAT. You can redistribute it or modify
10 | # under the terms of GNU General Public License, v.3
11 | # http://www.gnu.org/licenses/gpl-3.0.html
12 | #------------------------------------------------------------------------------
13 | import os
14 |
15 | tests_path = os.path.dirname(os.path.abspath(__file__))
16 | test_data_path = os.path.join(tests_path, 'data')
17 | tmp_data_path = os.path.join(tests_path, 'data', 'test_data')
18 |
19 | if not os.path.exists(tmp_data_path):
20 | os.mkdir(tmp_data_path)
21 |
--------------------------------------------------------------------------------
/nansat_integration_tests/test_open_issues.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from nansat.nansat import Nansat
4 | from nansat.domain import Domain
5 | from nansat.nsr import NSR
6 |
7 | doppler_installed = True
8 | try:
9 | from sardoppler.sardoppler import Doppler
10 | except Exception as e:
11 | print(e.message)
12 | doppler_installed = False
13 |
14 | class TestOpenIssues(unittest.TestCase):
15 |
16 | def test_issue_189(self):
17 | fn = '/mnt/10.11.12.232/sat_downloads_asar/level-0/2010-01/descending/VV/gsar_rvl/RVL_ASA_WS_20100110211812087.gsar'
18 | if doppler_installed:
19 | n = Doppler(fn)
20 | xlon, xlat = n.get_corners()
21 | d = Domain(NSR(3857),
22 | '-lle %f %f %f %f -tr 1000 1000' % (
23 | xlon.min(), xlat.min(), xlon.max(), xlat.max()))
24 | n.reproject(d, eResampleAlg=1, tps=True)
25 | inci = n['incidence_angle']
26 |
27 |
--------------------------------------------------------------------------------
/nansat/pixelfunctions/Makefile:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 |
3 | .PHONY: all clean check dist
4 |
5 | OBJS = pixfunplugin.o pixelfunctions.o
6 | CFLAGS := -fPIC -Wall -Wno-long-long -pedantic \
7 | $(shell gdal-config --cflags) $(CFLAGS)
8 |
9 | #CFLAGS := -O0 -g3 -ggdb3 $(CFLAGS)
10 | CFLAGS := -O3 $(CFLAGS)
11 |
12 | ARCHIVE = pixfun-plugin-$(shell date +%Y%m%d)
13 |
14 | TARGET = gdal_PIXFUN.so
15 | #TARGET = gdal_PIXFUN.dylib
16 |
17 | all: $(TARGET)
18 |
19 | clean:
20 | $(RM) $(TARGET) *.o *~
21 |
22 | dist:
23 | $(RM) $(ARCHIVE).tar.gz
24 | mkdir -p $(ARCHIVE)/tests/data
25 | cp $(OBJS:.o=.c) Makefile README.txt $(ARCHIVE)
26 | cp tests/*.py $(ARCHIVE)/tests
27 | cp tests/data/*.vrt tests/data/*.tif $(ARCHIVE)/tests/data
28 | tar cvfz $(ARCHIVE).tar.gz $(ARCHIVE)
29 | $(RM) -r $(ARCHIVE)
30 |
31 | check: $(TARGET)
32 | cd tests && python test_pixfun.py
33 |
34 | $(TARGET): $(OBJS)
35 | $(CC) -shared -o $@ $(OBJS) $(shell gdal-config --libs)
36 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 | set SPHINXPROJ=Nansat
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/nansat/pixelfunctions/_pixfun_py2.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 |
4 | extern CPLErr CPL_STDCALL GDALRegisterDefaultPixelFunc();
5 |
6 | /* Docstrings */
7 | static char module_docstring[] =
8 | "";
9 | static char pixfun_docstring[] =
10 | "";
11 |
12 | /* The only available function */
13 | static PyObject *registerPixelFunctions(PyObject *self, PyObject *args);
14 |
15 | /* Module specification */
16 | static PyMethodDef module_methods[] = {
17 | {"registerPixelFunctions", registerPixelFunctions, METH_VARARGS, pixfun_docstring},
18 | {NULL, NULL, 0, NULL}
19 | };
20 |
21 | /* Initialize the module */
22 | PyMODINIT_FUNC init_pixfun_py2(void)
23 | {
24 | PyObject *m = Py_InitModule3("_pixfun_py2", module_methods, module_docstring);
25 | if (m == NULL)
26 | return;
27 | }
28 |
29 | static PyObject *registerPixelFunctions(PyObject *self, PyObject *args)
30 | {
31 | GDALRegisterDefaultPixelFunc();
32 | Py_INCREF(Py_None);
33 | return Py_None;
34 | }
35 |
--------------------------------------------------------------------------------
/utilities/nansat_show:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # Utility to quickly view one band of a file supported by Nansat/GDAL
4 |
5 | # Could be extended to use more of the features of the Figure-class
6 |
7 | import sys
8 | import os
9 | from os.path import dirname, abspath
10 | import Image
11 |
12 | try:
13 | from nansat import Nansat
14 | except ImportError: # development
15 | sys.path.append(dirname(dirname(abspath(__file__))))
16 | from nansat import Nansat
17 |
18 | def Usage():
19 | sys.exit('Usage: nansat_show []')
20 |
21 | if (len(sys.argv) < 3):
22 | Usage()
23 |
24 | bandNo = int(sys.argv[1])
25 | try:
26 | n = Nansat(sys.argv[2])
27 | except:
28 | Usage()
29 |
30 | try:
31 | outfile = sys.argv[3]
32 | delete = False
33 | except:
34 | outfile = 'tmp.png'
35 | delete = True
36 |
37 | n.write_figure(outfile, bandNo, legend=True)
38 | Image.open(outfile).show()
39 |
40 | if delete:
41 | os.remove(outfile)
42 |
--------------------------------------------------------------------------------
/Vagrantfile:
--------------------------------------------------------------------------------
1 | # -*- mode: ruby -*-
2 | # vi: set ft=ruby :
3 |
4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
5 | VAGRANTFILE_API_VERSION = "2"
6 |
7 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
8 |
9 | config.vm.box = "ubuntu/trusty64"
10 | config.vm.box_url = "https://atlas.hashicorp.com/ubuntu/trusty64"
11 |
12 | config.vm.define "nansat", primary: true do |nansat|
13 | end
14 |
15 | config.vm.provider "virtualbox" do |v|
16 | v.memory = 2000
17 | v.cpus = 1
18 | end
19 |
20 | # If true, then any SSH connections made will enable agent forwarding.
21 | #config.ssh.forward_agent = true
22 | #config.ssh.forward_x11 = true
23 |
24 | config.vm.provision "ansible_local" do |ansible|
25 | ansible.playbook = "provisioning/site.yml"
26 | ansible.galaxy_role_file = 'provisioning/galaxy_requirements.yml'
27 | ansible.galaxy_command = 'ansible-galaxy install --role-file=%{role_file} --roles-path=%{roles_path} --force -c'
28 | end
29 |
30 | end
31 |
--------------------------------------------------------------------------------
/docs/source/documenting.rst:
--------------------------------------------------------------------------------
1 | Documenting Nansat
2 | =====================
3 |
4 | Documentation should follow the `conventions
5 | `_.
6 |
7 | .. note::
8 |
9 | Documentation for classes should be given after the class definition, not within the
10 | ``__init__``-method.
11 |
12 | To build documentation locally, the best is to create a virtual environment with the sphinx
13 | environment installed. This is done as follows:
14 |
15 | .. code-block:: bash
16 |
17 | cd docs
18 | conda env create -n build_docs --file environment.yml
19 | source activate build_docs
20 |
21 | Then, the following commands should build the documentation:
22 |
23 | .. code-block:: bash
24 |
25 | make clean
26 | sphinx-apidoc -fo source/ ../nansat
27 | make html
28 |
29 | Some documentation remains to be written. This is marked by ``TODO`` in the rst source files. Find
30 | open tasks by:
31 |
32 | .. code-block:: bash
33 |
34 | cd docs/source
35 | grep TODO *
36 |
--------------------------------------------------------------------------------
/nansat/exceptions.py:
--------------------------------------------------------------------------------
1 | # Name: exceptions.py
2 | # Purpose: Definitions of nansat exceptions
3 | # Authors: Morten W. Hansen
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | from __future__ import absolute_import
8 |
9 | class NansatProjectionError(Exception):
10 | """ Cannot get the projection """
11 | pass
12 |
13 |
14 | class NansatGDALError(Exception):
15 | """ Error from GDAL """
16 | pass
17 |
18 |
19 | class NansatReadError(Exception):
20 | """ Exception if a file cannot be read with Nansat """
21 | pass
22 |
23 | class NansatGeolocationError(Exception):
24 | """ Exception if geolocation is wrong (e.g., all lat/lon values are 0) """
25 | pass
26 |
27 | class NansatMissingProjectionError(Exception):
28 | """ Exception raised if no (sub-) dataset has projection """
29 |
30 | class WrongMapperError(Exception):
31 | """ Error for handling data that does not fit a given mapper """
32 | pass
33 |
34 |
--------------------------------------------------------------------------------
/nansat/mappers/hdf4_mapper.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_modisL1
2 | # Purpose: Mapping for MODIS-L1 data
3 | # Authors: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | from dateutil.parser import parse
8 | import warnings
9 |
10 | from nansat.utils import gdal, ogr
11 | from nansat.exceptions import WrongMapperError
12 | from nansat.vrt import VRT
13 |
14 |
15 | class HDF4Mapper(VRT):
16 |
17 | def find_metadata(self, iMetadata, iKey, default=''):
18 | """ Find metadata which has similar key
19 |
20 | Parameters
21 | ----------
22 | iMetadata : dict
23 | input metadata, usually gdalMetadata
24 | iKey : str
25 | key to search for
26 | default : str
27 | default value
28 |
29 | """
30 | value = default
31 | for key in iMetadata:
32 | if iKey in key:
33 | value = iMetadata[key]
34 | break
35 |
36 | return value
37 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. Nansat documentation master file, created by
2 | sphinx-quickstart on Thu Jan 11 09:17:40 2018.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to Nansat's documentation!
7 | ==================================
8 |
9 | .. include:: source/about.rst
10 |
11 | .. toctree::
12 | :maxdepth: 2
13 | :caption: Contents:
14 |
15 | .. About Nansat
16 |
17 | .. ============
18 |
19 | .. * Background scientifics needs
20 |
21 | .. * Data formats readable with Nansat
22 |
23 | .. * Projects employing Nansat
24 |
25 | .. * How to get support
26 |
27 | .. * Contributing to Nansat
28 |
29 | .. toctree::
30 | :maxdepth: 1
31 | :caption: User documentation
32 |
33 | source/installation.rst
34 | source/tutorials.rst
35 | source/packages_and_modules.rst
36 | source/acknowledgments.rst
37 |
38 |
39 | .. toctree::
40 | :maxdepth: 2
41 | :caption: Feature documentation
42 |
43 | source/features.rst
44 |
45 |
46 | .. toctree::
47 | :maxdepth: 2
48 | :caption: Developer documentation
49 |
50 | source/conventions.rst
51 | source/about_mappers.rst
52 | source/release_nansat.rst
53 | source/documenting.rst
54 |
55 | Indices and tables
56 | ==================
57 |
58 | * :ref:`genindex`
59 | * :ref:`modindex`
60 | * :ref:`search`
61 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_arome.py:
--------------------------------------------------------------------------------
1 | from dateutil.parser import parse
2 |
3 | import json
4 | import pythesint as pti
5 |
6 | from nansat.mappers.mapper_netcdf_cf import Mapper as NetcdfCF
7 | from nansat.exceptions import WrongMapperError
8 |
9 | class Mapper(NetcdfCF):
10 |
11 | def __init__(self, *args, **kwargs):
12 |
13 | mm = args[2] # metadata
14 | if not mm:
15 | raise WrongMapperError
16 | if 'NC_GLOBAL#source' not in list(mm.keys()):
17 | raise WrongMapperError
18 | if not 'arome' in mm['NC_GLOBAL#source'].lower() and \
19 | not 'meps' in mm['NC_GLOBAL#source'].lower():
20 | raise WrongMapperError
21 |
22 | super(Mapper, self).__init__(*args, **kwargs)
23 |
24 | self.dataset.SetMetadataItem('time_coverage_start',
25 | (parse(mm['NC_GLOBAL#min_time'], ignoretz=True, fuzzy=True).isoformat()))
26 | self.dataset.SetMetadataItem('time_coverage_end',
27 | (parse(mm['NC_GLOBAL#max_time'], ignoretz=True, fuzzy=True).isoformat()))
28 |
29 | # Get dictionary describing the instrument and platform according to
30 | # the GCMD keywords
31 | mm = pti.get_gcmd_instrument('computer')
32 | ee = pti.get_gcmd_platform('models')
33 |
34 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
35 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
36 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_mod44w.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_mod44w
2 | # Purpose: Mapping for MOD44W watermask data
3 | # Authors: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os.path
8 | import json
9 |
10 | import pythesint as pti
11 |
12 | from nansat.vrt import VRT
13 | from nansat.exceptions import WrongMapperError
14 |
15 |
16 | class Mapper(VRT):
17 | ''' VRT with mapping of WKV for MOD44W produc (MODIS watermask at 250 m)'''
18 |
19 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
20 | ''' Create VRT '''
21 |
22 | fileBaseName = os.path.basename(filename)
23 | if not fileBaseName == 'MOD44W.vrt':
24 | raise WrongMapperError
25 |
26 | metaDict = [{'src': {'SourceFilename': filename, 'SourceBand': 1},
27 | 'dst': {'wkv': 'land_binary_mask'}}]
28 |
29 | # create empty VRT dataset with geolocation only
30 | self._init_from_gdal_dataset(gdalDataset)
31 |
32 | # add bands with metadata and corresponding values to the empty VRT
33 | self.create_bands(metaDict)
34 |
35 | mm = pti.get_gcmd_instrument('MODIS')
36 | ee = pti.get_gcmd_platform('TERRA')
37 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
38 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
39 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_ecmwf_metno.py:
--------------------------------------------------------------------------------
1 | from dateutil.parser import parse
2 |
3 | import json
4 | import pythesint as pti
5 |
6 | from nansat.mappers.mapper_netcdf_cf import Mapper as NetcdfCF
7 | from nansat.exceptions import WrongMapperError
8 |
9 | class Mapper(NetcdfCF):
10 |
11 | def __init__(self, *args, **kwargs):
12 |
13 | mm = args[2] # metadata
14 | if not mm:
15 | raise WrongMapperError
16 | if 'NC_GLOBAL#source' not in list(mm.keys()):
17 | raise WrongMapperError
18 | if 'NC_GLOBAL#institution' not in list(mm.keys()):
19 | raise WrongMapperError
20 | if not ('ecmwf' in mm['NC_GLOBAL#source'].lower() and 'met.no' in
21 | mm['NC_GLOBAL#institution'].lower()):
22 | raise WrongMapperError
23 |
24 | super(Mapper, self).__init__(*args, **kwargs)
25 |
26 | self.dataset.SetMetadataItem('time_coverage_start',
27 | (parse(mm['NC_GLOBAL#min_time'], ignoretz=True, fuzzy=True).isoformat()))
28 | self.dataset.SetMetadataItem('time_coverage_end',
29 | (parse(mm['NC_GLOBAL#max_time'], ignoretz=True, fuzzy=True).isoformat()))
30 |
31 | # Get dictionary describing the instrument and platform according to
32 | # the GCMD keywords
33 | mm = pti.get_gcmd_instrument('computer')
34 | ee = pti.get_gcmd_platform('models')
35 |
36 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
37 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
38 |
39 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_amsre_uham_leadfraction.py:
--------------------------------------------------------------------------------
1 | #-------------------------------------------------------------------------------
2 | # Name: mapper_amsre_UHAM_lead_fraction.py
3 | # Purpose:
4 | #
5 | # Author: Morten Wergeland Hansen
6 | # Modified: Morten Wergeland Hansen
7 | #
8 | # Created: 18.02.2015
9 | # Last modified:24.02.2015 09:26
10 | # Copyright: (c) NERSC
11 | # License:
12 | #-------------------------------------------------------------------------------
13 | import datetime
14 | from osgeo import gdal, osr
15 | from nansat.nsr import NSR
16 | from nansat.vrt import VRT
17 |
18 | from nansat.exceptions import WrongMapperError
19 |
20 | class Mapper(VRT):
21 |
22 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
23 |
24 | title_correct = False
25 | if not gdalMetadata:
26 | raise WrongMapperError
27 | for key, val in list(gdalMetadata.items()):
28 | if 'title' in key:
29 | if not val == 'Daily AMSR-E Arctic lead area fraction [in percent]':
30 | raise WrongMapperError
31 | else:
32 | title_correct = True
33 |
34 | if not title_correct:
35 | raise WrongMapperError
36 |
37 | # initiate VRT for the NSIDC 10 km grid
38 | self._init_from_dataset_params(1216, 1792, (-3850000, 6250, 0.0, 5850000, 0.0, -6250),
39 | NSR(3411).wkt)
40 |
41 | src = {
42 | 'SourceFilename': 'NETCDF:"%s":lf' % filename,
43 | 'SourceBand': 1,
44 | }
45 | dst = {
46 | 'name': 'leadFraction',
47 | 'long_name': 'AMSRE sea ice lead fraction',
48 | }
49 |
50 | self.create_band(src, dst)
51 | self.dataset.FlushCache()
52 |
53 |
54 |
--------------------------------------------------------------------------------
/utilities/nansat_add_coastline:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | # Utility to make a figure with coastline from any file supported by Nansat
4 | # Useful to e.g. check accuracy of reprojections
5 |
6 | import sys
7 | from os.path import dirname, abspath
8 | from numpy import linspace, flipud
9 | import matplotlib.pyplot as plt
10 | from matplotlib import cm
11 | from mpl_toolkits.basemap import Basemap
12 |
13 | try:
14 | from nansat import Nansat
15 | except ImportError: # development
16 | sys.path.append(dirname(dirname(abspath(__file__))))
17 | from nansat import Nansat
18 |
19 | bandNo = 1 # Could be an input parameter
20 |
21 | def Usage():
22 | sys.exit('Usage: nansat_add_coastline []')
23 |
24 | if (len(sys.argv) < 1):
25 | Usage()
26 |
27 | inFileName = sys.argv[1]
28 | try:
29 | n = Nansat(inFileName)
30 | except:
31 | Usage()
32 |
33 | # Currently only implemented for lonlat-projection (Plate Carree)
34 | if n.vrt.dataset.GetProjection()[0:4] != 'GEOG':
35 | sys.exit('Utility currently only implemented for datasets with '\
36 | 'geographical (lonlat / Plate Carree) coordiante systems')
37 |
38 | try:
39 | outFileName = sys.argv[2]
40 | except:
41 | outFileName = inFileName + '_coastline.png'
42 |
43 | imsize = n.vrt.dataset.RasterXSize, n.vrt.dataset.RasterYSize
44 | lon, lat = n.get_corners()
45 | fig = plt.figure()
46 | ax = plt.axes([0,0,1,1])
47 | fig.set_size_inches(imsize[0]/1, imsize[1]/1)
48 | m = Basemap(llcrnrlon=lon[1], llcrnrlat=lat[1], urcrnrlon=lon[2],urcrnrlat=lat[2], resolution='i',projection='cyl')
49 | lons = linspace(lon[1], lon[2], imsize[0])
50 | lats = linspace(lat[1], lat[2], imsize[1])
51 | m.imshow(flipud(n[bandNo]), cm.gray, interpolation='nearest', extent=[lon[1]-5,lon[2]+5,lat[1],lat[2]])
52 | m.drawcoastlines(linewidth=100, color='blue')
53 | plt.savefig(outFileName, dpi=1)
54 |
--------------------------------------------------------------------------------
/nansat/__init__.py:
--------------------------------------------------------------------------------
1 | # Name: __init__.py
2 | # Purpose: Use the current folder as a package
3 | # Authors: Asuka Yamakawa, Anton Korosov, Knut-Frode Dagestad,
4 | # Morten W. Hansen, Alexander Myasoyedov,
5 | # Dmitry Petrenko, Evgeny Morozov, Aleksander Vines
6 | # Created: 29.06.2011
7 | # Copyright: (c) NERSC 2011 - 2015
8 | # Licence:
9 | # This file is part of NANSAT.
10 | # NANSAT is free software: you can redistribute it and/or modify
11 | # it under the terms of the GNU General Public License as published by
12 | # the Free Software Foundation, version 3 of the License.
13 | # http://www.gnu.org/licenses/gpl-3.0.html
14 | # This program is distributed in the hope that it will be useful,
15 | # but WITHOUT ANY WARRANTY without even the implied warranty of
16 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
17 | from __future__ import absolute_import
18 | import os
19 | import sys
20 | import warnings
21 | import importlib
22 | pixfun_module_name = 'nansat._pixfun_py{0}'.format(sys.version_info[0])
23 |
24 | # check if pixel functions were compiled using setup_tools
25 | try:
26 | pixfun = importlib.import_module(pixfun_module_name)
27 | pixfun.registerPixelFunctions()
28 | except ImportError as e:
29 | print(e)
30 | warnings.warn('''Cannot register C pixel functions!
31 | Either nansat was not installed using setup.py or
32 | pixel functions were not compiled automatically.
33 | For development, use "python setup.py build_ext --inplace"
34 | to compile pixel functions manually into the source tree.
35 | ''')
36 | from nansat.nsr import NSR
37 | from nansat.domain import Domain
38 | from nansat.nansat import Nansat
39 | from nansat.figure import Figure
40 |
41 | __all__ = ['NSR', 'Domain', 'Nansat', 'Figure']
42 |
43 | os.environ['LOG_LEVEL'] = '30'
44 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_quikscat.py:
--------------------------------------------------------------------------------
1 | import re
2 | import json
3 | import numpy as np
4 | from datetime import datetime
5 | import pythesint as pti
6 |
7 | from nansat.utils import gdal
8 |
9 | from nansat.vrt import VRT
10 | from nansat.geolocation import Geolocation
11 | from nansat.nsr import NSR
12 | from nansat.domain import Domain
13 | #from nansat.mappers.mapper_netcdf_cf import Mapper as NetcdfCF
14 | from nansat.mappers.scatterometers import Mapper as ScatterometryMapper
15 | from nansat.exceptions import WrongMapperError
16 |
17 | #class Mapper(NetcdfCF):
18 | class Mapper(ScatterometryMapper):
19 | """ Nansat mapper for QuikScat """
20 |
21 | def __init__(self, filename, gdal_dataset, metadata, quartile=0, *args, **kwargs):
22 |
23 | if not 'quikscat' in metadata.get('NC_GLOBAL#source', '').lower():
24 | raise WrongMapperError
25 |
26 | super(Mapper, self).__init__(filename, gdal_dataset, metadata, quartile=quartile, *args, **kwargs)
27 |
28 | lat = self.dataset.GetRasterBand(self._latitude_band_number(gdal_dataset)).ReadAsArray()
29 | lon = self.dataset.GetRasterBand(self._longitude_band_number(gdal_dataset)).ReadAsArray()
30 | lon = ScatterometryMapper.shift_longitudes(lon)
31 | self.set_gcps(lon, lat, gdal_dataset)
32 |
33 | # Get dictionary describing the instrument and platform according to
34 | # the GCMD keywords
35 | mm = pti.get_gcmd_instrument('seawinds')
36 | ee = pti.get_gcmd_platform('quikscat')
37 | provider = metadata['NC_GLOBAL#institution']
38 | if provider.lower()=='jpl':
39 | provider = 'NASA/JPL/QUIKSCAT'
40 | provider = pti.get_gcmd_provider(provider)
41 |
42 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
43 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
44 | self.dataset.SetMetadataItem('data_center', json.dumps(provider))
45 | self.dataset.SetMetadataItem('entry_title', metadata['NC_GLOBAL#title'])
46 | self.dataset.SetMetadataItem('ISO_topic_category',
47 | json.dumps(pti.get_iso19115_topic_category('Oceans')))
48 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_globcurrent_thredds.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_occci_online.py
2 | # Purpose: Nansat mapping for OC CCI data, stored online in THREDDS
3 | # Author: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import json
8 |
9 | import numpy as np
10 |
11 | import pythesint as pti
12 |
13 | from nansat.nsr import NSR
14 | from nansat.mappers.opendap import Opendap
15 |
16 | #https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-8DAY
17 | #https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-MONTHLY
18 | class Mapper(Opendap):
19 | ''' VRT with mapping of WKV for NCEP GFS '''
20 |
21 | baseURLs = ['http://tds0.ifremer.fr/thredds/dodsC/CLS-L4']
22 | timeVarName = 'time'
23 | xName = 'lon'
24 | yName = 'lat'
25 | timeCalendarStart = '1950-01-01'
26 |
27 | srcDSProjection = NSR().wkt
28 |
29 | def __init__(self, filename, gdalDataset, gdalMetadata,
30 | date=None, ds=None, bands=None, cachedir=None,
31 | **kwargs):
32 | ''' Create NCEP VRT
33 | Parameters:
34 | filename : URL
35 | date : str
36 | 2010-05-01
37 | ds : netCDF.Dataset
38 | previously opened dataset
39 |
40 | '''
41 | self.test_mapper(filename)
42 | self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)
43 |
44 | # add instrument and platform
45 | mm = pti.get_gcmd_instrument('Passive Remote Sensing')
46 | ee = pti.get_gcmd_platform('Earth Observation Satellites')
47 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
48 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
49 | self.dataset.SetMetadataItem('Data Center', 'FR/IFREMER/CERSAT')
50 | self.dataset.SetMetadataItem('Entry Title', 'GLOBCURRENT')
51 |
52 | def convert_dstime_datetimes(self, dsTime):
53 | ''' Convert time variable to np.datetime64 '''
54 | dsDatetimes = np.array([np.datetime64(self.timeCalendarStart) + int(day)
55 | for day in dsTime]).astype('M8[s]')
56 |
57 | return dsDatetimes
58 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_occci.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_occci_online.py
2 | # Purpose: Nansat mapping for OC CCI data, stored online in THREDDS
3 | # Author: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import json
8 |
9 | import numpy as np
10 |
11 | import pythesint as pti
12 |
13 | from nansat.nsr import NSR
14 | from nansat.mappers.opendap import Opendap
15 |
16 | #https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-8DAY
17 | #https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-MONTHLY
18 | class Mapper(Opendap):
19 | ''' VRT with mapping of WKV for NCEP GFS '''
20 | baseURLs = ['https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL',
21 | 'https://www.oceancolour.org/thredds/dodsC/CCI_ALL',
22 | 'https://esgf-data1.ceda.ac.uk/thredds/dodsC/esg_esacci/ocean_colour/data/v2-release/geographic/netcdf/'
23 | ]
24 | timeVarName = 'time'
25 | xName = 'lon'
26 | yName = 'lat'
27 | timeCalendarStart = '1970-01-01'
28 | srcDSProjection = NSR().wkt
29 |
30 | def __init__(self, filename, gdalDataset, gdalMetadata,
31 | date=None, ds=None, bands=None, cachedir=None,
32 | **kwargs):
33 | ''' Create NCEP VRT
34 | Parameters:
35 | filename : URL
36 | date : str
37 | 2010-05-01
38 | ds : netCDF.Dataset
39 | previously opened dataset
40 |
41 | '''
42 | self.test_mapper(filename)
43 | self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)
44 |
45 | # add instrument and platform
46 | mm = pti.get_gcmd_instrument('Passive Remote Sensing')
47 | ee = pti.get_gcmd_platform('Earth Observation Satellites')
48 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
49 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
50 |
51 | def convert_dstime_datetimes(self, dsTime):
52 | ''' Convert time variable to np.datetime64 '''
53 | dsDatetimes = np.array([np.datetime64(self.timeCalendarStart) + day
54 | for day in dsTime]).astype('M8[s]')
55 |
56 | return dsDatetimes
57 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_sentinel1_wind.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 | from dateutil.parser import parse
4 | from datetime import datetime
5 | from netCDF4 import Dataset
6 |
7 | from nansat.nsr import NSR
8 | from nansat.mappers.opendap import Opendap
9 |
10 | class Mapper(Opendap):
11 |
12 | baseURLs = ['http://thredds.nersc.no/thredds/dodsC/sarvind/SarVind',]
13 | timeVarName = 'time'
14 | xName = 'x'
15 | yName = 'y'
16 |
17 | def __init__(self, filename, gdal_dataset, gdal_metadata,
18 | ds=None, bands=None, cachedir=None, *args, **kwargs):
19 |
20 | self.test_mapper(filename)
21 | ds = Dataset(filename)
22 | self.srcDSProjection = ds.variables['stereographic'].spatial_ref
23 | self.timeCalendarStart = parse(ds.variables[self.timeVarName].units, fuzzy=True)
24 | self.create_vrt(filename, gdal_dataset, gdal_metadata, self.get_date(filename), ds, bands,
25 | cachedir)
26 |
27 | @staticmethod
28 | def get_date(filename):
29 | """Extract date and time parameters from filename and return
30 | it as a formatted (isoformat) string
31 |
32 | Parameters
33 | ----------
34 |
35 | filename: str
36 | nn
37 |
38 | Returns
39 | -------
40 | str, YYYY-mm-ddThh:MMZ
41 |
42 | """
43 | _, filename = os.path.split(filename)
44 | t = datetime.strptime(filename.split('_')[5], '%Y%m%dT%H%M%S')
45 | return datetime.strftime(t, '%Y-%m-%dT%H:%M:%SZ')
46 |
47 | def convert_dstime_datetimes(self, ds_time):
48 | """ Convert time variable to np.datetime64
49 |
50 | The time unit is days.
51 | """
52 | hours = (ds_time[0] - np.floor(ds_time[0]))*24
53 | minutes = (hours - np.floor(hours))*60
54 | secs = (minutes - np.floor(minutes))*60
55 | millisecs = int(np.round((secs - np.floor(secs))*10**3))
56 | ds_datetimes = np.array(
57 | [np.datetime64(self.timeCalendarStart) +
58 | np.timedelta64(int(np.floor(ds_time[0])), 'D') +
59 | np.timedelta64(int(np.floor(hours)), 'h') +
60 | np.timedelta64(int(np.floor(minutes)), 'm') +
61 | np.timedelta64(int(np.floor(secs)), 's') +
62 | np.timedelta64(millisecs, 'ms')])
63 | return ds_datetimes
64 |
--------------------------------------------------------------------------------
/nansat/tests/nansat_test_base.py:
--------------------------------------------------------------------------------
1 | # ------------------------------------------------------------------------------
2 | # Name: nansat_test_base.py
3 | # Purpose: Basic class for Nansat tests
4 | #
5 | # Author: Anton Korosov
6 | #
7 | # Created: 20.03.2018
8 | # Copyright: (c) NERSC
9 | # Licence: This file is part of NANSAT. You can redistribute it or modify
10 | # under the terms of GNU General Public License, v.3
11 | # http://www.gnu.org/licenses/gpl-3.0.html
12 | # ------------------------------------------------------------------------------
13 | from __future__ import print_function, absolute_import, division
14 | import os
15 | import sys
16 | import unittest
17 | import tempfile
18 | import pythesint
19 |
20 | from mock import patch, PropertyMock, Mock, MagicMock, DEFAULT
21 |
22 | from nansat.tests import nansat_test_data as ntd
23 |
24 |
25 | class NansatTestBase(unittest.TestCase):
26 |
27 | def setUp(self):
28 | self.test_file_gcps = os.path.join(ntd.test_data_path, 'gcps.tif')
29 | self.test_file_stere = os.path.join(ntd.test_data_path, 'stere.tif')
30 | self.test_file_complex = os.path.join(ntd.test_data_path, 'complex.nc')
31 | self.test_file_arctic = os.path.join(ntd.test_data_path, 'arctic.nc')
32 | self.tmp_data_path = ntd.tmp_data_path
33 | self.default_mapper = 'generic'
34 | fd, self.tmp_filename = tempfile.mkstemp(suffix='.nc')
35 | os.close(fd)
36 |
37 | if not os.path.exists(self.test_file_gcps):
38 | raise ValueError('No test data available')
39 | # Mock several Pythesint functions to avoid network connection
40 | self.patcher = patch.multiple(pythesint, get_wkv_variable=DEFAULT,
41 | get_gcmd_instrument=DEFAULT,
42 | get_gcmd_platform=DEFAULT)
43 | self.mock_pti = self.patcher.start()
44 | self.mock_pti['get_gcmd_instrument'].return_value=dict(short_name='MODIS')
45 | self.mock_pti['get_gcmd_platform'].return_value=dict(short_name='AQUA')
46 | self.mock_pti['get_wkv_variable'].return_value=dict(short_name='swathmask')
47 |
48 | def tearDown(self):
49 | self.patcher.stop()
50 | try:
51 | os.unlink(self.tmp_filename)
52 | except OSError:
53 | pass
54 |
55 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_netcdf_cf_sentinel1.py:
--------------------------------------------------------------------------------
1 | from nansat.mappers.sentinel1 import Sentinel1
2 | from nansat.mappers.mapper_netcdf_cf import Mapper as NetCDF_CF_Mapper
3 |
4 | class Mapper(Sentinel1, NetCDF_CF_Mapper):
5 |
6 | def __init__(self, filename, gdal_dataset, gdal_metadata, *args, **kwargs):
7 | NetCDF_CF_Mapper.__init__(self, filename, gdal_dataset, gdal_metadata, *args, **kwargs)
8 | Sentinel1.__init__(self, filename, flip_gcp_line=True)
9 | self.add_calibrated_nrcs()
10 | self.add_nrcs_VV_from_HH()
11 |
12 | def add_calibrated_nrcs(self):
13 | polarizations = [self.ds.polarisation[i:i+2] for i in range(0,len(self.ds.polarisation),2)]
14 | for pol in polarizations:
15 | amp_fn = 'NETCDF:"' + self.input_filename + '":Amplitude_%s' %pol
16 | bdict_amp = self._get_band_from_subfile(amp_fn)
17 | s0_fn = 'NETCDF:"' + self.input_filename + '":sigmaNought_%s' %pol
18 | bdict_s0 = self._get_band_from_subfile(s0_fn)
19 | src = [
20 | bdict_amp['src'],
21 | bdict_s0['src']
22 | ]
23 | dst = {
24 | 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
25 | 'PixelFunctionType': 'Sentinel1Calibration',
26 | 'polarization': pol,
27 | 'suffix': pol,
28 | }
29 | self.create_band(src, dst)
30 | self.dataset.FlushCache()
31 |
32 | def add_nrcs_VV_from_HH(self):
33 | if not 'Amplitude_HH' in self.ds.variables.keys():
34 | return
35 | amp_fn = 'NETCDF:"' + self.input_filename + '":Amplitude_HH'
36 | bdict_amp = self._get_band_from_subfile(amp_fn)
37 | s0_fn = 'NETCDF:"' + self.input_filename + '":sigmaNought_HH'
38 | bdict_s0 = self._get_band_from_subfile(s0_fn)
39 | src = [
40 | bdict_amp['src'],
41 | bdict_s0['src'],
42 | {'SourceFilename': self.band_vrts['inciVRT'].filename, 'SourceBand': 1}
43 | ]
44 | dst = {
45 | 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
46 | 'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
47 | 'polarization': 'VV',
48 | 'suffix': 'VV'}
49 | self.create_band(src, dst)
50 | self.dataset.FlushCache()
51 |
--------------------------------------------------------------------------------
/docs/source/nansat.rst:
--------------------------------------------------------------------------------
1 | nansat package
2 | ==============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | nansat.mappers
10 | nansat.tests
11 |
12 | Submodules
13 | ----------
14 |
15 | nansat.domain module
16 | --------------------
17 |
18 | .. automodule:: nansat.domain
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | nansat.exceptions module
24 | ------------------------
25 |
26 | .. automodule:: nansat.exceptions
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | nansat.exporter module
32 | ----------------------
33 |
34 | .. automodule:: nansat.exporter
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | nansat.figure module
40 | --------------------
41 |
42 | .. automodule:: nansat.figure
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 | nansat.geolocation module
48 | -------------------------
49 |
50 | .. automodule:: nansat.geolocation
51 | :members:
52 | :undoc-members:
53 | :show-inheritance:
54 |
55 | nansat.nansat module
56 | --------------------
57 |
58 | .. automodule:: nansat.nansat
59 | :members:
60 | :undoc-members:
61 | :show-inheritance:
62 |
63 | nansat.node module
64 | ------------------
65 |
66 | .. automodule:: nansat.node
67 | :members:
68 | :undoc-members:
69 | :show-inheritance:
70 |
71 | nansat.nsr module
72 | -----------------
73 |
74 | .. automodule:: nansat.nsr
75 | :members:
76 | :undoc-members:
77 | :show-inheritance:
78 |
79 | nansat.pointbrowser module
80 | --------------------------
81 |
82 | .. automodule:: nansat.pointbrowser
83 | :members:
84 | :undoc-members:
85 | :show-inheritance:
86 |
87 | nansat.tools module
88 | -------------------
89 |
90 | .. automodule:: nansat.tools
91 | :members:
92 | :undoc-members:
93 | :show-inheritance:
94 |
95 | nansat.vrt module
96 | -----------------
97 |
98 | .. automodule:: nansat.vrt
99 | :members:
100 | :undoc-members:
101 | :show-inheritance:
102 |
103 | nansat.warnings module
104 | ----------------------
105 |
106 | .. automodule:: nansat.warnings
107 | :members:
108 | :undoc-members:
109 | :show-inheritance:
110 |
111 |
112 | Module contents
113 | ---------------
114 |
115 | .. automodule:: nansat
116 | :members:
117 | :undoc-members:
118 | :show-inheritance:
119 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_cmems.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | from dateutil.parser import parse
3 |
4 | import json
5 | import pythesint as pti
6 |
7 | from nansat.vrt import VRT
8 | from nansat.mappers.mapper_netcdf_cf import Mapper as NetcdfCF
9 | from nansat.exceptions import WrongMapperError
10 |
11 |
12 | # Dictionary with mappings of GCMD metadata keywords:
13 | def get_gcmd_keywords_mapping():
14 | gcmd_keywords_mapping = {
15 | 'MERCATOR PSY4QV3R1': {
16 | 'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
17 | 'platform': json.dumps(pti.get_gcmd_platform('models')),
18 | },
19 | 'NERSC-HYCOM model fields': {
20 | 'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
21 | 'platform': json.dumps(pti.get_gcmd_platform('models')),
22 | },
23 | 'MERCATOR BIOMER4V1R2': {
24 | 'instrument': json.dumps(pti.get_gcmd_instrument('computer')),
25 | 'platform': json.dumps(pti.get_gcmd_platform('models')),
26 | },
27 | }
28 | return gcmd_keywords_mapping
29 |
30 | class Mapper(NetcdfCF):
31 |
32 | def __init__(self, *args, **kwargs):
33 |
34 | filename = args[0]
35 | gdal_metadata = VRT._remove_strings_in_metadata_keys(args[2],
36 | ['NC_GLOBAL#', 'NANSAT_', 'GDAL_'])
37 |
38 | gcmd_keywords_mapping = get_gcmd_keywords_mapping()
39 | for key, val in list(gcmd_keywords_mapping.items()):
40 | if 'source' in list(gdal_metadata.keys()) and key in gdal_metadata['source']:
41 | instrument = gcmd_keywords_mapping[key]['instrument']
42 | platform = gcmd_keywords_mapping[key]['platform']
43 |
44 | if not 'instrument' in locals():
45 | raise WrongMapperError
46 |
47 | super(Mapper, self).__init__(*args, **kwargs)
48 |
49 | time_coverage_start, time_coverage_end = self.time_coverage()
50 |
51 | self.dataset.SetMetadataItem('time_coverage_start',
52 | (time_coverage_start.isoformat()))
53 | self.dataset.SetMetadataItem('time_coverage_end',
54 | (time_coverage_end.isoformat()))
55 | self.dataset.SetMetadataItem('instrument', instrument)
56 | self.dataset.SetMetadataItem('platform', platform)
57 |
58 | def time_coverage(self):
59 | times = self.times()
60 | return times[0].astype(datetime.datetime), \
61 | times[-1].astype(datetime.datetime)
62 |
--------------------------------------------------------------------------------
/nansat/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | #------------------------------------------------------------------------------
2 | # Name: test_nansat.py
3 | # Purpose: Test the Nansat class
4 | #
5 | # Author: Morten Wergeland Hansen, Asuka Yamakawa
6 | # Modified: Morten Wergeland Hansen
7 | #
8 | # Created: 18.06.2014
9 | # Last modified:16.04.2015 10:48
10 | # Copyright: (c) NERSC
11 | # Licence: This file is part of NANSAT. You can redistribute it or modify
12 | # under the terms of GNU General Public License, v.3
13 | # http://www.gnu.org/licenses/gpl-3.0.html
14 | #------------------------------------------------------------------------------
15 | from __future__ import unicode_literals, absolute_import
16 | import os
17 | import unittest
18 | import datetime
19 | import warnings
20 |
21 | from mock import patch
22 |
23 | try:
24 | import matplotlib
25 | except ImportError:
26 | MATPLOTLIB_IS_INSTALLED = False
27 | else:
28 | MATPLOTLIB_IS_INSTALLED = True
29 | import matplotlib.pyplot as plt
30 | from matplotlib.colors import hex2color
31 |
32 | from nansat.utils import get_random_color, parse_time, register_colormaps
33 | from nansat.tests import nansat_test_data as ntd
34 |
35 |
36 | class UtilsTest(unittest.TestCase):
37 | @unittest.skipUnless(MATPLOTLIB_IS_INSTALLED, 'Matplotlib is required')
38 | def test_get_random_color(self):
39 | ''' Should return HEX code of random color '''
40 | c0 = get_random_color()
41 | c1 = get_random_color(c0)
42 | c2 = get_random_color(c1, 300)
43 |
44 | self.assertEqual(type(hex2color(c0)), tuple)
45 | self.assertEqual(type(hex2color(c1)), tuple)
46 | self.assertEqual(type(hex2color(c2)), tuple)
47 |
48 | @patch('nansat.utils.MATPLOTLIB_IS_INSTALLED', False)
49 | def test_get_random_color__matplotlib_missing(self):
50 | with self.assertRaises(ImportError):
51 | c0 = get_random_color()
52 |
53 | def test_parse_time(self):
54 | dt = parse_time('2016-01-19')
55 |
56 | self.assertEqual(type(dt), datetime.datetime)
57 |
58 | def test_parse_time_incorrect(self):
59 | dt = parse_time('2016-01-19Z')
60 |
61 | self.assertEqual(type(dt), datetime.datetime)
62 |
63 | @unittest.skipUnless(MATPLOTLIB_IS_INSTALLED, 'Matplotlib is required')
64 | def test_register_colormaps(self):
65 | register_colormaps()
66 | self.assertIn('obpg', plt.colormaps())
67 | self.assertIn('ak01', plt.colormaps())
68 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_sstcci.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_ncep_wind_online.py
2 | # Purpose: Nansat mapping for OC CCI data, stored online in THREDDS
3 | # Author: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os
8 |
9 | from dateutil.parser import parse
10 | import json
11 |
12 | import numpy as np
13 |
14 | import pythesint as pti
15 |
16 | from nansat.nsr import NSR
17 | from nansat.mappers.opendap import Opendap
18 |
19 | class Mapper(Opendap):
20 | ''' VRT with mapping of WKV for NCEP GFS '''
21 | #http://dap.ceda.ac.uk/data/neodc/esacci/sst/data/lt/Analysis/L4/v01.1/2010/05/01/20100501120000-ESACCI-L4_GHRSST-SSTdepth-OSTIA-GLOB_LT-v02.0-fv01.1.nc
22 | baseURLs = ['http://dap.ceda.ac.uk/data/neodc/esacci/sst/data/lt/Analysis/L4/v01.1/']
23 | timeVarName = 'time'
24 | xName = 'lon'
25 | yName = 'lat'
26 | timeCalendarStart = '1981-01-01'
27 |
28 | srcDSProjection = NSR().wkt
29 | def __init__(self, filename, gdalDataset, gdalMetadata,
30 | date=None, ds=None, bands=None, cachedir=None,
31 | **kwargs):
32 | ''' Create NCEP VRT
33 | Parameters:
34 | filename : URL
35 | date : str
36 | 2010-05-01
37 | ds : netCDF.Dataset
38 | previously opened dataset
39 |
40 | '''
41 | self.test_mapper(filename)
42 | fname = os.path.split(filename)
43 | date = '%s-%s-%s' % (fname[0:4], fname[4:6], fname[6:8])
44 |
45 | self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)
46 |
47 | # add instrument and platform
48 | mm = pti.get_gcmd_instrument('Passive Remote Sensing')
49 | ee = pti.get_gcmd_platform('Earth Observation Satellites')
50 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
51 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
52 |
53 | def convert_dstime_datetimes(self, dsTime):
54 | ''' Convert time variable to np.datetime64 '''
55 | dsDatetimes = np.array([(np.datetime64(self.timeCalendarStart).astype('M8[s]') +
56 | np.timedelta64(int(day), 'D').astype('m8[s]') +
57 | np.timedelta64(int(24*(day - int(day))), 'h').astype('m8[s]'))
58 | for day in dsTime]).astype('M8[s]')
59 |
60 | return dsDatetimes
61 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_kmss.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_kmssL1
2 | # Purpose: Mapping for KMSS-L1 data
3 | # Author: Evgeny Morozov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os
8 | from datetime import datetime
9 |
10 | from nansat.utils import gdal, ogr
11 | from nansat.exceptions import WrongMapperError
12 | from nansat.vrt import VRT
13 | from nansat.domain import Domain
14 |
15 |
16 | class Mapper(VRT):
17 | ''' VRT with mapping of WKV for KMSS TOA tiff data'''
18 |
19 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
20 | ''' Create VRT '''
21 | if (os.path.split(filename)[1][0:4] != '101_' or
22 | os.path.split(filename)[1][0:4] != '102_'):
23 | raise WrongMapperError
24 |
25 | try:
26 | product = gdalDataset.GetDriver().LongName
27 | except:
28 | raise WrongMapperError
29 |
30 | if (product != 'GeoTIFF' or filename[-3:] != 'tif' or
31 | gdalDataset.RasterCount != 3):
32 | raise WrongMapperError
33 |
34 | metaDict = [{'src': {'SourceFilename': filename, 'SourceBand': 1},
35 | 'dst': {'wkv': 'toa_outgoing_spectral_radiance',
36 | 'wavelength': '555'}},
37 | {'src': {'SourceFilename': filename, 'SourceBand': 2},
38 | 'dst': {'wkv': 'toa_outgoing_spectral_radiance',
39 | 'wavelength': '655'}},
40 | {'src': {'SourceFilename': filename, 'SourceBand': 3},
41 | 'dst': {'wkv': 'toa_outgoing_spectral_radiance',
42 | 'wavelength': '800'}}
43 | ]
44 | # from https://gsics.nesdis.noaa.gov/wiki/Development/StandardVariableNames
45 |
46 | # add DataType into 'src' and name into 'dst'
47 | for bandDict in metaDict:
48 | if 'DataType' not in bandDict['src']:
49 | bandDict['src']['DataType'] = 2
50 | if 'wavelength' in bandDict['dst']:
51 | bandDict['dst']['name'] = ('toa_radiance_' +
52 | bandDict['dst']['wavelength'])
53 |
54 | # create empty VRT dataset with geolocation only
55 | self._init_from_gdal_dataset(gdalDataset)
56 |
57 | # add bands with metadata and corresponding values to the empty VRT
58 | self.create_bands(metaDict)
59 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_siwtacsst.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_occci_online.py
2 | # Purpose: Nansat mapping for OC CCI data, stored online in THREDDS
3 | # Author: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os
8 | import json
9 | import datetime as dt
10 |
11 | import numpy as np
12 |
13 | import pythesint as pti
14 |
15 | from nansat.nsr import NSR
16 | from nansat.mappers.opendap import Dataset, Opendap
17 |
18 | #http://thredds.met.no/thredds/dodsC/myocean/siw-tac/sst-metno-arc-sst03/20121001000000-METNO-L4_GHRSST-SSTfnd-METNO_OI-ARC-v02.0-fv01.0.nc
19 | #http://thredds.met.no/thredds/dodsC/myocean/siw-tac/sst-metno-arc-sst03_V1/20120808-METNO-L4UHfnd-ARC-v01-fv01-METNO_OI.nc
20 | #http://thredds.met.no/thredds/dodsC/sea_ice/SST-METNO-ARC-SST_L4-OBS-V2-V1/sst_arctic_aggregated
21 |
22 |
23 | class Mapper(Opendap):
24 | """VRT with mapping of WKV for NCEP GFS"""
25 | baseURLs = ['http://thredds.met.no/thredds/dodsC/myocean/siw-tac/sst-metno-arc-sst03/',
26 | 'http://thredds.met.no/thredds/dodsC/myocean/siw-tac/sst-metno-arc-sst03_V1/',
27 | 'http://thredds.met.no/thredds/dodsC/sea_ice/SST-METNO-ARC-SST_L4-OBS-V2-V1/']
28 | timeVarName = 'time'
29 | xName = 'lon'
30 | yName = 'lat'
31 | t0 = dt.datetime(1981, 1, 1)
32 | srcDSProjection = NSR().wkt
33 |
34 | def __init__(self, filename, gdalDataset, gdalMetadata, date=None, ds=None, bands=None,
35 | cachedir=None, **kwargs):
36 | ''' Create NCEP VRT
37 | Parameters:
38 | filename : URL
39 | date : str
40 | 2010-05-01
41 | ds : netCDF.Dataset
42 | previously opened dataset
43 |
44 | '''
45 | self.test_mapper(filename)
46 | self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)
47 |
48 | # add instrument and platform
49 | mm = pti.get_gcmd_instrument('Passive Remote Sensing')
50 | ee = pti.get_gcmd_platform('Earth Observation Satellites')
51 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
52 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
53 |
54 | def convert_dstime_datetimes(self, dsTime):
55 | ''' Convert time variable to np.datetime64 '''
56 |
57 | dsDatetimes = np.array([np.datetime64(self.t0 + dt.timedelta(seconds=int(day)))
58 | for day in dsTime]).astype('M8[s]')
59 | return dsDatetimes
60 |
--------------------------------------------------------------------------------
/nansat/pixelfunctions/README_Nansat:
--------------------------------------------------------------------------------
1 | These pixel functions are downloaded as a patch provided by Antonio Valentino:
2 | http://trac.osgeo.org/gdal/ticket/3367
3 |
4 | To use these in Nansat we have simply to:
5 | - compile contents in this folder with "make"
6 | - include the present folder in the environmental variable "GDAL_DRIVER_PATH"
7 |
8 | Note that this must be performed on each users machine, as compiled code is machine-specific!
9 |
10 | The file pixelfunctions.c contains all the functions listed in the ticket above
11 | We may add more pixel functions in this file as we need, and then use them as described on
12 | http://www.gdal.org/gdal_vrttut.html
13 |
14 | As the pixel functions are a bit awkward to implement, we probably dont want to use them
15 | for our scientific algorithms where we use trial-and-error approach. But it can be very useful for
16 | common operations such as:
17 | - returning speed (sqrt(u*u+v*v)) when we have two bands with U and V
18 | - returning angle from North for the same input
19 | - returning a difference of two bands
20 | - ...
21 |
22 | I have tested it, and can confirm that it works like charm!
23 |
24 | Knut-Frode
25 |
26 |
27 | *Usage of GenericPixelFunctionPixelLine
28 | This is generic pixel function for one pixel band, one line band and full size band(s).
29 | From the 1st to (N-2)th bands (papoSource) must be full size bands (XSize x YSize). The second and the last bands must be a line band and a pixel band respectively.
30 | Here N is total number of bands.
31 |
32 | !!N.B.!!
33 | Order of bands may be not same as written order in mappers.
34 |
35 | e.g.
36 | ------------------------------------------------------------
37 | 'src': [ {'SourceFilename': self.adsVRTs[0].fileName,
38 | 'SourceBand': 1},
39 | {'SourceFilename': self.calibrationVRT.fileName,
40 | 'SourceBand': 1},
41 | {'SourceFilename': fileName, 'SourceBand': 1}]
42 | --------------------------------------------------------------
43 | The 1st band is line, 2nd is pixel and the last one is full size band in this example. But in the pixelfunctions, the 1st is full size, 2nd is line and the 3rd is pixel band. I do not know how the order is changed. It is good to check with vrt file since the order of vrt file is same as pixelfunction.
44 |
45 | *Usage of GenericPixelFunctionLine
46 | This is generic pixel function for one line band and full size band(s).
47 | From the 1st to (N-1)th bands (papoSource) must be full size bands (XSize x YSize) and the second is a line band.
48 | Here N is total number of bands.
49 | Asuka
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_goci_l1.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_goci_l1
2 | # Purpose: Mapping for GOCI-L1B data
3 | # Authors: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 |
8 | from nansat.utils import gdal, ogr
9 | from nansat.exceptions import WrongMapperError
10 | from nansat.vrt import VRT
11 |
12 |
13 | class Mapper(VRT):
14 | ''' VRT with mapping of WKV for MODIS Level 1 (QKM, HKM, 1KM) '''
15 |
16 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
17 | ''' Create MODIS_L1 VRT '''
18 |
19 | # raise error in case of not GOCI L1B
20 | try:
21 | title = gdalMetadata['HDFEOS_POINTS_Scene_Header_Scene_Title']
22 | except (TypeError, KeyError):
23 | raise WrongMapperError
24 | if not title == 'GOCI Level-1B Data':
25 | raise WrongMapperError
26 |
27 | # set GOCI projection parameters
28 | lat_0 = gdalMetadata['HDFEOS_POINTS_Map_Projection_Central_Latitude_(parallel)']
29 | lon_0 = gdalMetadata['HDFEOS_POINTS_Map_Projection_Central_Longitude_(meridian)']
30 | rasterXSize = int(gdalMetadata['HDFEOS_POINTS_Scene_Header_number_of_columns'].split(' ')[0])
31 | rasterYSize = int(gdalMetadata['HDFEOS_POINTS_Scene_Header_number_of_rows'].split(' ')[0])
32 | proj4 = '+proj=ortho +lat_0=%s +lon_0=%s units=m +ellps=WGS84 +datum=WGS84 +no_defs' % (lat_0, lon_0)
33 | srs = osr.SpatialReference()
34 | srs.ImportFromProj4(proj4)
35 | projection = srs.ExportToWkt()
36 | geoTransform = (-1391500.0, 500.0, 0.0, 1349500.0, 0.0, -500.0)
37 |
38 | # create empty VRT dataset with georeference only
39 | self._init_from_dataset_params(rasterXSize, rasterYSize, geoTransform, projection)
40 |
41 | # add bands from subdatasets
42 | subDatasets = gdalDataset.GetSubDatasets()
43 | metaDict = []
44 | wavelengths = ['412', '443', '490', '555', '660', '680', '745', '865']
45 | for subDSI in range(8):
46 | metaEntry = {'src': {'SourceFilename': subDatasets[subDSI][0],
47 | 'SourceBand': 1,
48 | 'ScaleRatio': 1e-6},
49 | 'dst': {'wkv': 'toa_outgoing_spectral_radiance',
50 | 'wavelength': wavelengths[subDSI],
51 | 'suffix': wavelengths[subDSI]}}
52 | metaDict.append(metaEntry)
53 |
54 | # add bands with metadata and corresponding values to the empty VRT
55 | self.create_bands(metaDict)
56 |
--------------------------------------------------------------------------------
/nansat/pixelfunctions/pixfunplugin.c:
--------------------------------------------------------------------------------
1 | /******************************************************************************
2 | * $Id: pixfunplugin.c 2958 2011-02-11 10:45:29Z valentino $
3 | *
4 | * Project: GDAL
5 | * Purpose: Provide a fake GDAL driver to register a small set of pixel
6 | * functions to be used with the virtual driver.
7 | * It is a hack aimed to enable python users to use a small set
8 | * of custom pixel functions without C++ coding.
9 | * Author: Antonio Valentino
10 | *
11 | ******************************************************************************
12 | * Copyright (c) 2008-2011 Antonio Valentino
13 | *
14 | * Permission is hereby granted, free of charge, to any person obtaining a
15 | * copy of this software and associated documentation files (the "Software"),
16 | * to deal in the Software without restriction, including without limitation
17 | * the rights to use, copy, modify, merge, publish, distribute, sublicense,
18 | * and/or sell copies of the Software, and to permit persons to whom the
19 | * Software is furnished to do so, subject to the following conditions:
20 | *
21 | * The above copyright notice and this permission notice shall be included
22 | * in all copies or substantial portions of the Software.
23 | *
24 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
25 | * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
26 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
27 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
28 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
29 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
30 | * DEALINGS IN THE SOFTWARE.
31 | *****************************************************************************/
32 |
33 | #include
34 |
35 | /* CPL_CVSID("$Id: pixfunplugin.c 2958 2011-02-11 10:45:29Z valentino $"); */
36 |
37 | CPL_C_START
38 | CPL_DLL void GDALRegister_PIXFUN(void);
39 | CPL_C_END
40 |
41 | extern CPLErr CPL_STDCALL GDALRegisterDefaultPixelFunc();
42 |
43 | /************************************************************************/
44 | /* GDALRegister_PIXFUN() */
45 | /************************************************************************/
46 | void GDALRegister_PIXFUN(void)
47 | {
48 | const char PLUGINNAME[]="Pixfun";
49 |
50 | if (! GDAL_CHECK_VERSION(PLUGINNAME))
51 | return;
52 |
53 | GDALRegisterDefaultPixelFunc();
54 | CPLDebug("PIXFUN", "Plugin %s %s", PLUGINNAME, "$Revision: 2958 $");
55 |
56 | } /* GDALRegister_PIXFUN */
57 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_ascat.py:
--------------------------------------------------------------------------------
1 | import re
2 | import json
3 | import numpy as np
4 | from datetime import datetime
5 | import pythesint as pti
6 |
7 | from netCDF4 import Dataset
8 |
9 | from nansat.utils import gdal
10 |
11 | from nansat.vrt import VRT
12 | from nansat.geolocation import Geolocation
13 | from nansat.nsr import NSR
14 | from nansat.domain import Domain
15 | from nansat.mappers.scatterometers import Mapper as ScatterometryMapper
16 | from nansat.exceptions import WrongMapperError
17 |
18 | class Mapper(ScatterometryMapper):
19 | """ Nansat mapper for ASCAT """
20 |
21 | def __init__(self, filename, gdal_dataset, metadata, quartile=0, *args, **kwargs):
22 |
23 | if not 'ascat' in metadata.get('NC_GLOBAL#source', '').lower():
24 | raise WrongMapperError
25 |
26 | super(Mapper, self).__init__(filename, gdal_dataset, metadata, quartile=quartile, *args, **kwargs)
27 |
28 | lat = self.dataset.GetRasterBand(self._latitude_band_number(gdal_dataset)).ReadAsArray()
29 | lon = self.dataset.GetRasterBand(self._longitude_band_number(gdal_dataset)).ReadAsArray()
30 | lon = ScatterometryMapper.shift_longitudes(lon)
31 | self.set_gcps(lon, lat, gdal_dataset)
32 |
33 | # Get dictionary describing the instrument and platform according to
34 | # the GCMD keywords
35 | ii = pti.get_gcmd_instrument('ascat')
36 | pp = pti.get_gcmd_platform(metadata['NC_GLOBAL#source'].split(' ')[0])
37 | provider = pti.get_gcmd_provider(re.split('[^a-zA-Z]',
38 | metadata['NC_GLOBAL#institution'])[0])
39 |
40 | # TODO: Validate that the found instrument and platform are indeed what
41 | # we want....
42 |
43 | self.dataset.SetMetadataItem('instrument', json.dumps(ii))
44 | self.dataset.SetMetadataItem('platform', json.dumps(pp))
45 | self.dataset.SetMetadataItem('data_center', json.dumps(provider))
46 | self.dataset.SetMetadataItem('entry_title', metadata['NC_GLOBAL#title'])
47 | self.dataset.SetMetadataItem('ISO_topic_category',
48 | json.dumps(pti.get_iso19115_topic_category('Oceans')))
49 |
50 | def times(self):
51 | """ Get times from time variable
52 | """
53 | ds = Dataset(self.input_filename)
54 |
55 | # Get datetime object of epoch and time_units string
56 | time_units = self._time_reference(ds=ds)
57 |
58 | # Get all times - slight difference from NetCDF-CF mappers times method...
59 | times = ds.variables[self._timevarname(ds=ds)][:,0]
60 |
61 | # Create numpy array of np.datetime64 times (provide epoch to save time)
62 | tt = np.array([self._time_count_to_np_datetime64(tn,
63 | time_units=time_units) for tn in times])
64 |
65 | return tt
66 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_case2reg.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_case2reg.py
2 | # Purpose: Mapper for the BEAM/Visat output of Case2Regional algorithm
3 | # Authors: Asuka Yamakava, Anton Korosov, Morten Wergeland Hansen
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os.path
8 | import numpy as np
9 |
10 | from nansat.vrt import VRT
11 | from nansat.node import Node
12 | from nansat.nsr import NSR
13 | from nansat.exceptions import WrongMapperError
14 |
15 | import pythesint as pti
16 |
17 | from nansat.mappers import mapper_generic as mg
18 |
19 | class Mapper(mg.Mapper):
20 | '''Mapping for the BEAM/Visat output of Case2Regional algorithm'''
21 | def __init__(self, filename, gdalDataset, gdalMetadata,
22 | wavelengths=[None, 413, 443, 490, 510, 560, 620, 665,
23 | 681, 709, 753, None, 778, 864], **kwargs):
24 |
25 | fPathName, fExt = os.path.splitext(filename)
26 | fPath, fName = os.path.split(fPathName)
27 | if fExt != '.nc' or 'MER_' not in fName or 'N1_C2IOP' not in fName:
28 | raise WrongMapperError
29 |
30 | # get all metadata using the GENERIC Mapper
31 | mg.Mapper.__init__(self, filename, gdalDataset, gdalMetadata)
32 |
33 | #add metadata for Rrs bands
34 | rrsDict = pti.get_wkv_variable('surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_air')
35 |
36 | for bi in range(1, 1+self.dataset.RasterCount):
37 | b = self.dataset.GetRasterBand(bi)
38 | bMetadata = b.GetMetadata()
39 | rawName = bMetadata.get('name', '')
40 | if 'reflec_' in rawName:
41 | refNumber = int(rawName.split('_')[1])
42 | wavelength = wavelengths[refNumber]
43 | b.SetMetadataItem('name', 'Rrs_' + str(wavelength))
44 | b.SetMetadataItem('wavelength', str(wavelength))
45 | for rrsKey in rrsDict:
46 | b.SetMetadata(rrsKey, rrsDict[rrsKey])
47 |
48 | src = [{
49 | 'SourceFilename': b.GetMetadataItem('SourceFilename'),
50 | 'SourceBand': b.GetMetadataItem('SourceBand'),
51 | 'DataType': 6}]
52 | dst = {
53 | 'wkv': 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_water',
54 | 'suffix': str(wavelength),
55 | 'wavelength': str(wavelength),
56 | 'PixelFunctionType': 'NormReflectanceToRemSensReflectance'}
57 | self.create_band(src, dst)
58 |
59 | self.dataset.FlushCache()
60 |
--------------------------------------------------------------------------------
/docs/source/features.rst:
--------------------------------------------------------------------------------
1 | Differentiating between land and water
2 | ---------------------------------------
3 |
4 | To add simple land- or water-masks to your figures, you can use the watermask() method in the main
5 | Nansat class. Download the prepared `MODIS 250M water-mask product
6 | `_ from our server and add the path to the directory with this
7 | data to an environment variable named MOD44WPATH (e.g. ``MOD44WPATH=/Data/sat/auxdata/mod44w``).
8 |
9 | Distance to the Nearest coast
10 | ------------------------------
11 |
12 | To get inforamtion about distace to the nearest coastline within the domain of interest, you can use
13 | ``distance2coast`` method from the ``nanasat.toolbox``. Download NASA's Ocean
14 | Biology Processing Group `0.1x0.1 degree Distance to the Nearest Coast product
15 | `.
19 |
20 | Digital Elevation Models (DEMs)
21 | --------------------------------
22 |
23 | Global Multi-resolution Terrain Elevation Data 2010 (GMTED2010)
24 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
25 |
26 | The GMTED2010 datasets are provided by the `U.S. Geological Survey
27 | `_. We have prepared a GDAL vrt file that can be used
28 | together with `mapper_topography.py `__
29 | to open the 30 arcseconds Digital Elevation Model (DEM) with Nansat. To use it, the vrt file must be
30 | downloaded from ``_ and stored in the same folder as
31 | the tif files of *mean elevation* available at
32 | ``_.
33 |
34 | In case you want to use a different DEM, the procedure is as follows:
35 |
36 | #. Download the relevant GDAL readable files to a local folder
37 | #. Generate a vrt file using *gdalbuildvrt*, e.g.:
38 |
39 | .. code-block:: bash
40 |
41 | gdalbuildvrt gmted2010_30.vrt *.tif
42 |
43 | #. Update *mapper_topography.py* to accept the new kind of file(s)
44 |
45 |
46 |
47 | Global 30 Arc-Second Elevation (GTOPO30)
48 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
49 |
50 | We have also created a vrt-file for the GTOPO30 dataset. This is available as
51 | ``_. The vrt-file should be placed in the same folder as
52 | the .DEM files available at ``_.
53 |
54 |
55 |
--------------------------------------------------------------------------------
/nansat/tests/test_geolocation.py:
--------------------------------------------------------------------------------
1 | #------------------------------------------------------------------------------
2 | # Name: test_vrt.py
3 | # Purpose: Test the VRT class
4 | #
5 | # Author: Anton Korosov
6 | #
7 | # Created: 15.01.2018
8 | # Copyright: (c) NERSC
9 | # Licence: This file is part of NANSAT. You can redistribute it or modify
10 | # under the terms of GNU General Public License, v.3
11 | # http://www.gnu.org/licenses/gpl-3.0.html
12 | #------------------------------------------------------------------------------
13 | import unittest
14 | import os
15 |
16 | import numpy as np
17 |
18 | from nansat.vrt import VRT
19 | from nansat.geolocation import Geolocation
20 | from nansat.utils import gdal, osr
21 | from nansat.tests import nansat_test_data as ntd
22 |
23 | class GeolocationTest(unittest.TestCase):
24 | def setUp(self):
25 | self.test_file = os.path.join(ntd.test_data_path, 'gcps.tif')
26 |
27 | def test_init(self):
28 | lon, lat = np.meshgrid(np.linspace(0,5,10), np.linspace(10,20,30))
29 | x_vrt = VRT.from_array(lon)
30 | y_vrt = VRT.from_array(lat)
31 |
32 | ga = Geolocation(x_vrt, y_vrt)
33 |
34 | self.assertIsInstance(ga, Geolocation)
35 | self.assertEqual(ga.data['X_DATASET'], x_vrt.filename)
36 | self.assertEqual(ga.data['Y_DATASET'], y_vrt.filename)
37 | self.assertEqual(ga.data['LINE_OFFSET'], '0')
38 | self.assertEqual(ga.data['LINE_STEP'], '1')
39 | self.assertEqual(ga.data['PIXEL_OFFSET'], '0')
40 | self.assertEqual(ga.data['PIXEL_STEP'], '1')
41 | srs = osr.SpatialReference()
42 | status = srs.ImportFromWkt(ga.data['SRS'])
43 | self.assertEqual(status, 0)
44 | self.assertEqual(srs.ExportToProj4().strip(), '+proj=longlat +datum=WGS84 +no_defs')
45 | self.assertEqual(ga.data['X_BAND'], '1')
46 | self.assertEqual(ga.data['Y_BAND'], '1')
47 | self.assertEqual(ga.x_vrt, x_vrt)
48 | self.assertEqual(ga.y_vrt, y_vrt)
49 |
50 | def test_from_dataset(self):
51 | ds = gdal.Open(self.test_file)
52 | g = Geolocation.from_dataset(ds)
53 | self.assertIsInstance(g, Geolocation)
54 |
55 | def test_from_filenames(self):
56 | lon, lat = np.meshgrid(np.linspace(0,5,10), np.linspace(10,20,30))
57 | x_vrt = VRT.from_array(lon)
58 | y_vrt = VRT.from_array(lat)
59 | g = Geolocation.from_filenames(x_vrt.filename, y_vrt.filename)
60 | self.assertIsInstance(g, Geolocation)
61 | self.assertEqual(g.data['X_DATASET'], x_vrt.filename)
62 | self.assertEqual(g.data['Y_DATASET'], y_vrt.filename)
63 | self.assertEqual(g.data['LINE_OFFSET'], '0')
64 | self.assertEqual(g.data['LINE_STEP'], '1')
65 | self.assertEqual(g.data['PIXEL_OFFSET'], '0')
66 | self.assertEqual(g.data['PIXEL_STEP'], '1')
67 |
--------------------------------------------------------------------------------
/nansat/tests/test_nsr.py:
--------------------------------------------------------------------------------
1 | #------------------------------------------------------------------------------
2 | # Name: test_nansat.py
3 | # Purpose: Test the nansat module
4 | #
5 | # Author: Morten Wergeland Hansen, Anton Korosov, Asuka Yamakawa
6 | # Modified: Morten Wergeland Hansen
7 | #
8 | # Created: 18.06.2014
9 | # Last modified:27.08.2014 10:58
10 | # Copyright: (c) NERSC
11 | # License:
12 | #------------------------------------------------------------------------------
13 | import unittest
14 | from nansat import NSR
15 | from nansat.utils import osr
16 |
17 | from nansat.exceptions import NansatProjectionError
18 |
19 |
20 | class NSRTest(unittest.TestCase):
21 | def test_init_empty(self):
22 | nsr = NSR()
23 |
24 | self.assertEqual(type(nsr), NSR)
25 | self.assertEqual(nsr.Validate(), 0)
26 |
27 | def test_init_from_none(self):
28 | nsr = NSR(None)
29 |
30 | self.assertEqual(type(nsr), NSR)
31 | self.assertRaises(RuntimeError, nsr.Validate)
32 |
33 | def test_init_from_0(self):
34 | nsr = NSR(0)
35 |
36 | self.assertEqual(type(nsr), NSR)
37 | self.assertEqual(nsr.Validate(), 0)
38 |
39 | def test_init_from_EPSG(self):
40 | nsr = NSR(4326)
41 |
42 | self.assertEqual(type(nsr), NSR)
43 | self.assertEqual(nsr.Validate(), 0)
44 | self.assertTrue('4326' in nsr.ExportToWkt())
45 |
46 | def test_init_from_proj4(self):
47 | nsr = NSR('+proj=longlat')
48 |
49 | self.assertEqual(type(nsr), NSR)
50 | self.assertEqual(nsr.Validate(), 0)
51 | self.assertTrue('longlat' in nsr.ExportToProj4())
52 |
53 | def test_init_from_proj4_unicode(self):
54 | nsr = NSR(u'+proj=longlat')
55 |
56 | self.assertEqual(type(nsr), NSR)
57 | self.assertEqual(nsr.Validate(), 0)
58 | self.assertTrue('longlat' in nsr.ExportToProj4())
59 |
60 | def test_init_from_wkt(self):
61 | nsr = NSR(
62 | 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'\
63 | 'AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,'\
64 | 'AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],'\
65 | 'AXIS["Latitude",NORTH],AXIS["Longitude",EAST],AUTHORITY["EPSG","4326"]]')
66 |
67 | self.assertEqual(type(nsr), NSR)
68 | self.assertEqual(nsr.Validate(), 0)
69 | self.assertTrue('longlat' in nsr.ExportToProj4())
70 |
71 | def test_init_from_NSR(self):
72 | nsr = NSR(NSR(4326))
73 |
74 | self.assertEqual(type(nsr), NSR)
75 | self.assertEqual(nsr.Validate(), 0)
76 | self.assertTrue('longlat' in nsr.ExportToProj4())
77 |
78 | def test_dont_init_from_invalid(self):
79 | self.assertRaises(NansatProjectionError, NSR, -10)
80 | self.assertRaises(NansatProjectionError, NSR, 'some crap')
81 | ss = osr.SpatialReference()
82 | self.assertRaises(NansatProjectionError, NSR, ss)
83 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_metno_local_hires_seaice.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_metno_hires_seaice.py
2 | # Purpose: Nansat mapping for high resolution sea ice
3 | # from met.no Thredds server
4 | # Authors: Knut-Frode Dagestad
5 | # Licence: This file is part of NANSAT. You can redistribute it or modify
6 | # under the terms of GNU General Public License, v.3
7 | # http://www.gnu.org/licenses/gpl-3.0.html
8 |
9 | # High resolution (1 km) manual ice concentration, based on SAR imagery
10 | #
11 | # Mapper is called with keyword (fake filename):
12 | # 'metno_hires_seaice_20140109'
13 | # and [optional] keyword: iceFolder =
15 | #
16 | # The closest available data within +/- 3 days is returned
17 | from __future__ import print_function, absolute_import, unicode_literals
18 | import sys
19 | import os
20 | from datetime import datetime, timedelta
21 |
22 | from nansat.utils import gdal, ogr
23 | from nansat.exceptions import WrongMapperError
24 | from nansat.vrt import VRT
25 | import nansat.mappers.mapper_generic as mg
26 |
27 |
28 | class Mapper(mg.Mapper):
29 | """Create VRT with mapping of WKV for Met.no seaice"""
30 |
31 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
32 | """Create VRT"""
33 |
34 | try:
35 | ice_folder_name = kwargs['iceFolder']
36 | except:
37 | #iceFolderName = '/vol/istjenesten/data/metnoCharts/'
38 | ice_folder_name = '/vol/data/metnoCharts/'
39 |
40 | keyword_base = 'metno_local_hires_seaice'
41 |
42 | if filename[0:len(keyword_base)] != keyword_base:
43 | raise WrongMapperError
44 |
45 | keyword_time = filename[len(keyword_base)+1:]
46 | requested_time = datetime.strptime(keyword_time, '%Y%m%d')
47 | # Search for nearest available file, within the closest 3 days
48 | found_dataset = False
49 | for delta_day in [0, -1, 1, -2, 2, -3, 3]:
50 | valid_time = (requested_time + timedelta(days=delta_day) +
51 | timedelta(hours=15))
52 | filename = (ice_folder_name + 'ice_conc_svalbard_' +
53 | valid_time.strftime('%Y%m%d1500.nc'))
54 | if os.path.exists(filename):
55 | print('Found file:')
56 | print(filename)
57 | gdal_dataset = gdal.Open(filename)
58 | gdal_metadata = gdalDataset.GetMetadata()
59 | mg.Mapper.__init__(self, filename, gdal_dataset, gdal_metadata)
60 | found_dataset = True
61 | # Modify GeoTransform from netCDF file
62 | # - otherwise a shift is seen!
63 | self.dataset.SetGeoTransform(
64 | (-1243508 - 1000, 1000, 0, -210526 - 7000, 0, -1000))
65 | break # Data is found for this day
66 |
67 | if found_dataset is False:
68 | AttributeError("No local Svalbard-ice files available")
69 | sys.exit()
70 |
--------------------------------------------------------------------------------
/docs/source/nansat.tests.rst:
--------------------------------------------------------------------------------
1 | nansat.tests package
2 | ====================
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | nansat.tests.mappers
10 |
11 | Submodules
12 | ----------
13 |
14 | nansat.tests.nansat\_test\_base module
15 | --------------------------------------
16 |
17 | .. automodule:: nansat.tests.nansat_test_base
18 | :members:
19 | :undoc-members:
20 | :show-inheritance:
21 |
22 | nansat.tests.nansat\_test\_data module
23 | --------------------------------------
24 |
25 | .. automodule:: nansat.tests.nansat_test_data
26 | :members:
27 | :undoc-members:
28 | :show-inheritance:
29 |
30 | nansat.tests.test\_domain module
31 | --------------------------------
32 |
33 | .. automodule:: nansat.tests.test_domain
34 | :members:
35 | :undoc-members:
36 | :show-inheritance:
37 |
38 | nansat.tests.test\_exporter module
39 | ----------------------------------
40 |
41 | .. automodule:: nansat.tests.test_exporter
42 | :members:
43 | :undoc-members:
44 | :show-inheritance:
45 |
46 | nansat.tests.test\_figure module
47 | --------------------------------
48 |
49 | .. automodule:: nansat.tests.test_figure
50 | :members:
51 | :undoc-members:
52 | :show-inheritance:
53 |
54 | nansat.tests.test\_geolocation module
55 | -------------------------------------
56 |
57 | .. automodule:: nansat.tests.test_geolocation
58 | :members:
59 | :undoc-members:
60 | :show-inheritance:
61 |
62 | nansat.tests.test\_nansat module
63 | --------------------------------
64 |
65 | .. automodule:: nansat.tests.test_nansat
66 | :members:
67 | :undoc-members:
68 | :show-inheritance:
69 |
70 | nansat.tests.test\_node module
71 | ------------------------------
72 |
73 | .. automodule:: nansat.tests.test_node
74 | :members:
75 | :undoc-members:
76 | :show-inheritance:
77 |
78 | nansat.tests.test\_nsr module
79 | -----------------------------
80 |
81 | .. automodule:: nansat.tests.test_nsr
82 | :members:
83 | :undoc-members:
84 | :show-inheritance:
85 |
86 | nansat.tests.test\_pixelfunctions module
87 | ----------------------------------------
88 |
89 | .. automodule:: nansat.tests.test_pixelfunctions
90 | :members:
91 | :undoc-members:
92 | :show-inheritance:
93 |
94 | nansat.tests.test\_pointbrowser module
95 | --------------------------------------
96 |
97 | .. automodule:: nansat.tests.test_pointbrowser
98 | :members:
99 | :undoc-members:
100 | :show-inheritance:
101 |
102 | nansat.tests.test\_tools module
103 | -------------------------------
104 |
105 | .. automodule:: nansat.tests.test_tools
106 | :members:
107 | :undoc-members:
108 | :show-inheritance:
109 |
110 | nansat.tests.test\_vrt module
111 | -----------------------------
112 |
113 | .. automodule:: nansat.tests.test_vrt
114 | :members:
115 | :undoc-members:
116 | :show-inheritance:
117 |
118 |
119 | Module contents
120 | ---------------
121 |
122 | .. automodule:: nansat.tests
123 | :members:
124 | :undoc-members:
125 | :show-inheritance:
126 |
--------------------------------------------------------------------------------
/nansat/mappers/globcolour.py:
--------------------------------------------------------------------------------
1 | # Name: globcolour
2 | # Purpose: Data and methods shared by GLOBCOLOUR mappers
3 | # Authors: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | from copy import deepcopy
8 |
9 |
10 | class Globcolour():
11 | ''' Mapper for GLOBCOLOR L3M products'''
12 |
13 | # detect wkv from metadata 'Parameter'
14 | varname2wkv = {'CHL1_mean': 'mass_concentration_of_chlorophyll_a_in_sea_water',
15 | 'CHL2_mean': 'mass_concentration_of_chlorophyll_a_in_sea_water',
16 | 'KD490_mean': 'volume_attenuation_coefficient_of_downwelling_radiative_flux_in_sea_water',
17 | 'L412_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
18 | 'L443_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
19 | 'L490_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
20 | 'L510_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
21 | 'L531_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
22 | 'L555_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
23 | 'L620_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
24 | 'L670_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
25 | 'L681_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
26 | 'L709_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water',
27 | 'CDM_mean': 'volume_absorption_coefficient_of_radiative_flux_in_sea_water_due_to_dissolved_organic_matter',
28 | 'BBP_mean': 'volume_backscattering_coefficient_of_radiative_flux_in_sea_water_due_to_suspended_particles',
29 | 'PAR_mean': 'surface_downwelling_photosynthetic_radiative_flux_in_air',
30 | }
31 |
32 | def make_rrsw_meta_entry(self, nlwMetaEntry):
33 | '''Make metaEntry for calculation of Rrsw'''
34 | iWKV = nlwMetaEntry['dst']['wkv']
35 | if 'solar_irradiance' not in nlwMetaEntry['dst']:
36 | return None
37 | if iWKV == 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water':
38 | solarIrradiance = nlwMetaEntry['dst']['solar_irradiance']
39 | wavelength = nlwMetaEntry['dst']['wavelength']
40 | metaEntry = deepcopy(nlwMetaEntry)
41 | metaEntry['dst']['wkv'] = 'surface_ratio_of_upwelling_radiance_emerging_from_sea_water_to_downwelling_radiative_flux_in_water'
42 | metaEntry['dst']['expression'] = ('self["nLw_%s"] / %s'
43 | % (wavelength, solarIrradiance))
44 | else:
45 | metaEntry = None
46 |
47 | return metaEntry
48 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_topography.py:
--------------------------------------------------------------------------------
1 | # -------------------------------------------------------------------------------
2 | # Name: mapper_topography.py
3 | # Purpose: Mapping for the global 30 arc-second elevation
4 | #
5 | # Author: Morten Wergeland Hansen
6 | # Modified: Morten Wergeland Hansen
7 | #
8 | # Created: 04.06.2015
9 | # Last modified:08.06.2015 10:27
10 | # Copyright: (c) NERSC
11 | # Licence: This file is part of NANSAT. You can redistribute it or modify
12 | # under the terms of GNU General Public License, v.3
13 | # http://www.gnu.org/licenses/gpl-3.0.html
14 | # -------------------------------------------------------------------------------
15 | from __future__ import division, unicode_literals, absolute_import, print_function
16 | import re
17 | import os.path
18 |
19 | from nansat.vrt import VRT
20 |
21 | from nansat.exceptions import WrongMapperError
22 |
23 |
24 | class Mapper(VRT):
25 | """
26 | Mapping for the GTOPO30 (``_) and the GMTED2010
27 | (``_) global elevation models.
28 |
29 | Parameters
30 | ----------
31 | filename : string
32 | The vrt filename, e.g., ``gtopo30.vrt``
33 | gdal_dataset : osgeo.gdal.Dataset
34 | The GDAL dataset returned by ``gdal.Open(filename)``
35 |
36 | Example
37 | -------
38 | You can create your own ``gtopo30.vrt`` file with gdal, e.g.:
39 |
40 | .. code-block:: bash
41 |
42 | gdalbuildvrt .vrt [E,W]*.DEM
43 |
44 | Note
45 | ----
46 | Either the name of a GTOPO30 DEM file or GMTED2010 tif file, or ``/.vrt``. The latter
47 | is an aggregation of the DEM-files available from the given DEM. The GTOPO30 vrt does not
48 | contain the Antarctic, because this is in polarstereographic projection.
49 |
50 | Remember to update this mapper by adding allowed filenames to the list of accepted filenames
51 | (accepted_names) if you create or apply new DEM datasets.
52 |
53 | """
54 |
55 | def __init__(self, filename, gdal_dataset, *args, **kwargs):
56 |
57 | bn = os.path.basename(filename)
58 | accepted_names = [
59 | 'gmted2010_30.vrt',
60 | 'gtopo30.vrt',
61 | '.*.DEM',
62 | '.*_gmted_mea.*.tif',
63 | ]
64 |
65 | correct_mapper = False
66 | for accepted_name in accepted_names:
67 | m = re.search(accepted_name, filename)
68 | if m and m.group(0):
69 | correct_mapper = True
70 | break
71 |
72 | #if not bn=='gtopo30.vrt' and not os.path.splitext(bn)[1]=='.DEM':
73 | if not correct_mapper:
74 | raise WrongMapperError
75 |
76 | metaDict = [{'src': {'SourceFilename': filename, 'SourceBand': 1},
77 | 'dst': {'wkv': 'height_above_reference_ellipsoid'}}]
78 |
79 | # create empty VRT dataset with geolocation only
80 | self._init_from_gdal_dataset(gdal_dataset)
81 |
82 | # add bands with metadata and corresponding values to the empty VRT
83 | self.create_bands(metaDict)
84 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_emodnet.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_emodnet.py
2 | # Purpose: Mapper for bathymetry data from EMODNet
3 | # http://portal.emodnet-bathymetry.eu/
4 | # Authors: Anton Korosov
5 | # Licence: This file is part of NANSAT. You can redistribute it or modify
6 | # under the terms of GNU General Public License, v.3
7 | # http://www.gnu.org/licenses/gpl-3.0.html
8 | import os
9 | from dateutil.parser import parse
10 |
11 | import numpy as np
12 |
13 | from nansat.nsr import NSR
14 | from nansat.vrt import VRT
15 | from nansat.node import Node
16 | from nansat.utils import gdal, ogr
17 | from nansat.exceptions import WrongMapperError
18 |
19 |
20 | class Mapper(VRT):
21 | def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30,
22 | **kwargs):
23 | # check if mapper fits
24 | if not gdalMetadata:
25 | raise WrongMapperError
26 | if not os.path.splitext(inputFileName)[1] == '.mnt':
27 | raise WrongMapperError
28 | try:
29 | mbNorthLatitude = float(gdalMetadata['NC_GLOBAL#mbNorthLatitude'])
30 | mbSouthLatitude = float(gdalMetadata['NC_GLOBAL#mbSouthLatitude'])
31 | mbEastLongitude = float(gdalMetadata['NC_GLOBAL#mbEastLongitude'])
32 | mbWestLongitude = float(gdalMetadata['NC_GLOBAL#mbWestLongitude'])
33 | mbProj4String = gdalMetadata['NC_GLOBAL#mbProj4String']
34 | Number_lines = int(gdalMetadata['NC_GLOBAL#Number_lines'])
35 | Number_columns = int(gdalMetadata['NC_GLOBAL#Number_columns'])
36 | Element_x_size = float(gdalMetadata['NC_GLOBAL#Element_x_size'])
37 | Element_y_size = float(gdalMetadata['NC_GLOBAL#Element_y_size'])
38 | except:
39 | raise WrongMapperError
40 |
41 | # find subdataset with DEPTH
42 | subDatasets = gdalDataset.GetSubDatasets()
43 | dSourceFile = None
44 | for subDataset in subDatasets:
45 | if subDataset[0].endswith('.mnt":DEPTH'):
46 | dSourceFile = subDataset[0]
47 | if dSourceFile is None:
48 | raise WrongMapperError
49 | dSubDataset = gdal.Open(dSourceFile)
50 | dMetadata = dSubDataset.GetMetadata()
51 |
52 | try:
53 | scale_factor = dMetadata['DEPTH#scale_factor']
54 | add_offset = dMetadata['DEPTH#add_offset']
55 | except:
56 | raise WrongMapperError
57 |
58 | geoTransform = [mbWestLongitude, Element_x_size, 0,
59 | mbNorthLatitude, 0, -Element_y_size]
60 |
61 | # create empty VRT dataset with geolocation only
62 | self._init_from_dataset_params(Number_columns, Number_lines, geoTransform, NSR(mbProj4String).wkt, metadata=gdalMetadata)
63 |
64 | metaDict = [{'src': {'SourceFilename': dSourceFile,
65 | 'SourceBand': 1,
66 | 'ScaleRatio' : scale_factor,
67 | 'ScaleOffset' : add_offset},
68 | 'dst': {'wkv': 'depth'}}]
69 |
70 | # add bands with metadata and corresponding values to the empty VRT
71 | self.create_bands(metaDict)
72 |
--------------------------------------------------------------------------------
/nansat/pixelfunctions/_pixfun_py3.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 |
4 | extern CPLErr CPL_STDCALL GDALRegisterDefaultPixelFunc();
5 |
6 | /* Docstrings */
7 | static char module_docstring[] =
8 | "";
9 | static char pixfun_docstring[] =
10 | "";
11 |
12 | /* The only available function */
13 | static PyObject *registerPixelFunctions(PyObject *self, PyObject *args);
14 |
15 | /* Module specification */
16 | /* deprecated in Py3
17 | static PyMethodDef module_methods[] = {
18 | {"registerPixelFunctions", registerPixelFunctions, METH_VARARGS, pixfun_docstring},
19 | {NULL, NULL, 0, NULL}
20 | };
21 | */
22 |
23 | static PyMethodDef module_methods[] = {
24 | {"registerPixelFunctions", (PyCFunction) registerPixelFunctions, METH_NOARGS, pixfun_docstring},
25 | {NULL, NULL, 0, NULL}
26 | };
27 |
28 |
29 | /* Initialize the module */
30 | /* DEPRECATED
31 | PyMODINIT_FUNC init_pixfun(void)
32 | {
33 | PyObject *m = Py_InitModule3("_pixfun", module_methods, module_docstring);
34 | if (m == NULL)
35 | return;
36 | }
37 | */
38 |
39 | /* deprecated :
40 | PyMODINIT_FUNC init_uniqueCombinations(void)
41 | {
42 | Py_InitModule3("uniqueCombinations", uniqueCombinations_funcs,
43 | "Extension module uniqueCombinations v. 0.01");
44 | }
45 | */
46 |
47 | static struct PyModuleDef _pixfun_py3 =
48 | {
49 | PyModuleDef_HEAD_INIT,
50 | "_pixfun_py3", /* name of module */
51 | "usage: _pixfun_py3.registerPixelFunctions\n", /* module documentation, may be NULL */
52 | -1, /* size of per-interpreter state of the module, or -1 if the module keeps state in global variables. */
53 | module_methods
54 | };
55 |
56 | PyMODINIT_FUNC PyInit__pixfun_py3(void)
57 | {
58 | return PyModule_Create(&_pixfun_py3);
59 | }
60 |
61 |
62 |
63 | static PyObject *registerPixelFunctions(PyObject *self, PyObject *args)
64 | {
65 | GDALRegisterDefaultPixelFunc();
66 | Py_INCREF(Py_None);
67 | return Py_None;
68 | }
69 |
70 | /***********************************/
71 |
72 | /* deprecated:
73 | static PyMethodDef uniqueCombinations_funcs[] = {
74 | {"uniqueCombinations", (PyCFunction)uniqueCombinations,
75 | METH_NOARGS, uniqueCombinations_docs},
76 | {NULL}
77 | };
78 | use instead of the above: */
79 |
80 | /* NEW
81 | static PyMethodDef module_methods[] = {
82 | {"uniqueCombinations", (PyCFunction) uniqueCombinations, METH_NOARGS, uniqueCombinations_docs},
83 | {NULL}
84 | };
85 | */
86 |
87 |
88 | /* deprecated :
89 | PyMODINIT_FUNC init_uniqueCombinations(void)
90 | {
91 | Py_InitModule3("uniqueCombinations", uniqueCombinations_funcs,
92 | "Extension module uniqueCombinations v. 0.01");
93 | }
94 | */
95 |
96 | /* NEW
97 | static struct PyModuleDef Combinations =
98 | {
99 | PyModuleDef_HEAD_INIT,
100 | "Combinations", // name of module
101 | "usage: Combinations.uniqueCombinations(lstSortableItems, comboSize)\n", // module documentation, may be NULL
102 | -1, // size of per-interpreter state of the module, or -1 if the module keeps state in global variables.
103 | module_methods
104 | };
105 |
106 | PyMODINIT_FUNC PyInit_Combinations(void)
107 | {
108 | return PyModule_Create(&Combinations);
109 | }
110 | */
111 |
112 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_osisaf.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_occci_online.py
2 | # Purpose: Nansat mapping for OC CCI data, stored online in THREDDS
3 | # Author: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os
8 |
9 | import json
10 | import numpy as np
11 | import datetime as dt
12 |
13 | import pythesint as pti
14 |
15 | from nansat.nsr import NSR
16 | from nansat.mappers.opendap import Dataset, Opendap
17 |
18 | # http://thredds.met.no/thredds/dodsC/osisaf/met.no/ice/conc/2016/04/ice_conc_sh_polstere-100_multi_201604261200.nc ice_conc
19 | # http://thredds.met.no/thredds/dodsC/osisaf/met.no/ice/drift_lr/merged/2016/04/ice_drift_nh_polstere-625_multi-oi_201604151200-201604171200.nc dX dY
20 | # http://thredds.met.no/thredds/dodsC/osisaf/met.no/ice/type/2016/04/ice_type_nh_polstere-100_multi_201604151200.nc ice_type
21 | # http://thredds.met.no/thredds/dodsC/osisaf/met.no/ice/edge/2016/04/ice_edge_nh_polstere-100_multi_201604241200.nc ice_edge
22 | # http://thredds.met.no/thredds/dodsC/osisaf_test/met.no/ice/drift_lr/merged/2013/09/ice_drift_nh_polstere-625_multi-oi_201309171200-201309191200.nc dX dY
23 |
24 |
25 | class Mapper(Opendap):
26 | ''' VRT with mapping of WKV for NCEP GFS '''
27 | baseURLs = ['http://thredds.met.no/thredds/dodsC/cryoclim/met.no/osisaf-nh',
28 | 'http://thredds.met.no/thredds/dodsC/osisaf_test/met.no/ice/',
29 | 'http://thredds.met.no/thredds/dodsC/osisaf/met.no/ice/']
30 | timeVarName = 'time'
31 | xName = 'xc'
32 | yName = 'yc'
33 | t0 = dt.datetime(1978, 1, 1)
34 | srcDSProjection = NSR().wkt
35 |
36 | def __init__(self, filename, gdalDataset, gdalMetadata, date=None, ds=None, bands=None,
37 | cachedir=None, **kwargs):
38 | ''' Create NCEP VRT
39 | Parameters:
40 | filename : URL
41 | date : str
42 | 2010-05-01
43 | ds : netCDF.Dataset
44 | previously opened dataset
45 |
46 | '''
47 | self.test_mapper(filename)
48 | ds = Dataset(filename)
49 | proj4str = '%s +units=%s' % (ds.variables['Polar_Stereographic_Grid'].proj4_string,
50 | ds.variables['xc'].units)
51 | self.srcDSProjection = NSR(proj4str).wkt
52 | if filename[-3:] == '.nc':
53 | date = self.t0 + dt.timedelta(seconds=ds.variables['time'][0])
54 | date = date.strftime('%Y-%m-%d')
55 |
56 | self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)
57 |
58 | # add instrument and platform
59 | mm = pti.get_gcmd_instrument('Passive Remote Sensing')
60 | ee = pti.get_gcmd_platform('Earth Observation Satellites')
61 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
62 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
63 |
64 | def convert_dstime_datetimes(self, dsTime):
65 | ''' Convert time variable to np.datetime64 '''
66 |
67 | dsDatetimes = np.array([np.datetime64(self.t0 + dt.timedelta(seconds=day))
68 | for day in dsTime]).astype('M8[s]')
69 | return dsDatetimes
70 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_globcurrent.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_globcurrent_online.py
2 | # Purpose: Nansat mapping for GLOBCURRENT data, stored online in HYRAX
3 | # Author: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os
8 | import warnings
9 |
10 | from dateutil.parser import parse
11 | import json
12 | import numpy as np
13 |
14 | import pythesint as pti
15 |
16 | from nansat.nsr import NSR
17 | from nansat.mappers.opendap import Opendap
18 |
19 | class Mapper(Opendap):
20 | ''' VRT with mapping of WKV for NCEP GFS '''
21 | #http://www.ifremer.fr/opendap/cerdap1/globcurrent/v2.0/global_025_deg/total_hs/2010/001/20100101000000-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc
22 | #http://tds0.ifremer.fr/thredds/dodsC/CLS-L4-CUREUL_HS-ALT_SUM-V02.0_FULL_TIME_SERIE
23 | baseURLs = ['http://www.ifremer.fr/opendap/cerdap1/globcurrent/v2.0/']
24 | timeVarName = 'time'
25 | xName = 'lon'
26 | yName = 'lat'
27 | timeCalendarStart = '1950-01-01'
28 |
29 | srcDSProjection = NSR().wkt
30 |
31 | def __init__(self, filename, gdalDataset, gdalMetadata,
32 | date=None, ds=None, bands=None, cachedir=None,
33 | **kwargs):
34 | ''' Create NCEP VRT
35 | Parameters:
36 | filename : URL
37 | date : str
38 | 2010-05-01
39 | ds : netCDF.Dataset
40 | previously opened dataset
41 |
42 | '''
43 | self.test_mapper(filename)
44 | fname = os.path.split(filename)[1]
45 | date = '%s-%s-%sT%s:00Z' % (fname[0:4], fname[4:6], fname[6:8], fname[8:10])
46 |
47 | self.create_vrt(filename, gdalDataset, gdalMetadata, date, ds, bands, cachedir)
48 |
49 | # add instrument and platform
50 | pi = []
51 | if 'jason-1' in self.ds.sensor.lower():
52 | pi.append([pti.get_gcmd_platform('jason-1'), pti.get_gcmd_instrument('poseidon-2')])
53 | if 'envisat' in self.ds.sensor.lower():
54 | pi.append([pti.get_gcmd_platform('envisat'), pti.get_gcmd_instrument('ra-1')])
55 | if 'jason-2' in self.ds.sensor.lower():
56 | pi.append([pti.get_gcmd_platform('ostm/jason-2'),
57 | pti.get_gcmd_instrument('poseidon-3')])
58 |
59 | if pi:
60 | self.dataset.SetMetadataItem('platform/instrument', json.dumps(pi))
61 | else:
62 | warnings.warn('Could not provide source metadata - please check and submit a git ' \
63 | 'issue if necessary')
64 |
65 | self.dataset.SetMetadataItem('Data Center', 'FR/IFREMER/CERSAT')
66 | self.dataset.SetMetadataItem('Entry Title', 'GLOBCURRENT')
67 |
68 | def convert_dstime_datetimes(self, dsTime):
69 | ''' Convert time variable to np.datetime64 '''
70 | dsDatetimes = np.array([(np.datetime64(self.timeCalendarStart).astype('M8[s]') +
71 | np.timedelta64(int(day), 'D').astype('m8[s]') +
72 | np.timedelta64(int(24*(day - int(day))), 'h').astype('m8[s]'))
73 | for day in dsTime]).astype('M8[s]')
74 |
75 | return dsDatetimes
76 |
--------------------------------------------------------------------------------
/docs/source/release_nansat.rst:
--------------------------------------------------------------------------------
1 | Releasing Nansat
2 | ==================
3 |
4 | General release procedure
5 | -------------------------
6 |
7 | In setup.py, Make sure the version is correct, by checking MAJOR, MINOR and MICRO, and that 'dev'
8 | is removed from VERSION.
9 |
10 | Releases should be made from the master branch. **Make sure that Nansat works on all operating
11 | systems (Windows, Linux and OS X), with all combinations of python (py27, py35, py36) before
12 | releasing.**
13 |
14 | When you are ready to release a new version, create a tag for the release and push it.
15 | The following creates a tag for version 0.6.17 and pushes it.
16 |
17 | ::
18 |
19 | git tag "v0.6.17"
20 | git push origin --tags
21 |
22 | Go to https://github.com/nansencenter/nansat/releases and click "Draft a new release".
23 |
24 | Select the tag version you just created, create a release title, for consistency, use the format of
25 | Nansat-0.6.17
26 |
27 | Write some release notes that describes the changes done in this release.
28 |
29 |
30 | Releasing on PiPy
31 | -----------------
32 |
33 | First, wait until Nansat passes all tests on Travi-CI, Appveyor and Coverals. Then execute:
34 |
35 | ::
36 |
37 | conda create -n release_nansat -c conda-forge -y python pythesint scipy basemap netcdf4 gdal pillow mock nose urllib3 twine
38 | source activate release_nansat
39 | python setup.py sdist
40 | # Check the dist file that was just created
41 | ls dist
42 | # Should be a file on this format 'nansat-1.0.20.tar.gz'
43 | twine upload dist/nansat-1.0.20.tar.gz
44 |
45 | Packaging documentation is found at `PyPA Packaging and Distributing Projects
46 | `_
47 |
48 | To avoid having to enter password when uploading, you can set $HOME/.pypirc as described in the
49 | above link.
50 |
51 | Releasing on Anaconda
52 | ---------------------
53 |
54 | We are releasing Nansat through the conda-forge channel on Anaconda. First, wait until Nansat passes
55 | all tests on Travi-CI, Appveyor and Coverals. Then execute:
56 |
57 | ::
58 |
59 | # install (or update) conda-smithy
60 | conda install -n root -c conda-forge conda-smithy
61 | git clone git@github.com:conda-forge/nansat-feedstock.git
62 | cd nansat-feedstock
63 | conda smithy rerender -c auto
64 | git push
65 |
66 | Information how to use Conda-Smithy can be found at `The tool for managing conda-forge feedstocks
67 | `_
68 |
69 |
70 | Releasing on Docker
71 | -------------------
72 | To build an image with stable version of Nansat you need to build the image and push it to Docker Hub:
73 |
74 | ::
75 | # build the base image with conda
76 | docker build . -t nansat:conda --target conda
77 | # build the image for compiling Nansat
78 | docker build . -t nansat:dev --target dev
79 | # build the image for distributing Nansat
80 | docker build . -t nansat:latest --target latest
81 | # find the ide of the nansat:latest image (e.g. bb38976d03cf)
82 | docker images
83 | # tag the image (bb38976d03cf is the id of the nansat:latest image)
84 | docker tag bb38976d03cf akorosov/nansat:latest
85 | # authenticate on Docker Hub
86 | docker login --username=yourhubusername --email=youremail@company.com
87 | # push the image to Docker Hub
88 | docker push akorosov/nansat:latest
89 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_sentinel2.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_opendap_sentinel2.py
2 | # Purpose: Nansat mapping for ESA Sentinel-2 data from the Norwegian ground segment
3 | # Author: Morten W. Hansen
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os
8 | from datetime import datetime
9 | import json
10 | import warnings
11 |
12 | import numpy as np
13 | from netCDF4 import Dataset
14 | from nansat.utils import gdal
15 |
16 | import pythesint as pti
17 |
18 | from nansat.mappers.opendap import Opendap
19 | from nansat.nsr import NSR
20 |
21 |
22 | class Mapper(Opendap):
23 |
24 | baseURLs = [
25 | 'http://nbstds.met.no/thredds/dodsC/NBS/S2A',
26 | 'http://nbstds.met.no/thredds/dodsC/NBS/S2B',
27 | ]
28 |
29 | timeVarName = 'time'
30 | xName = 'x'
31 | yName = 'y'
32 | timeCalendarStart = '1981-01-01'
33 | GCP_STEP=100
34 |
35 | def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
36 | ds=None, bands=None, cachedir=None, *args, **kwargs):
37 |
38 | self.test_mapper(filename)
39 | timestamp = date if date else self.get_date(filename)
40 |
41 | self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
42 |
43 | mditem = 'entry_title'
44 | if not self.dataset.GetMetadataItem(mditem):
45 | try:
46 | self.dataset.SetMetadataItem(mditem, str(self.ds.getncattr('title')))
47 | except AttributeError:
48 | self.dataset.SetMetadataItem(mditem, filename)
49 | mditem = 'data_center'
50 | if not self.dataset.GetMetadataItem(mditem):
51 | self.dataset.SetMetadataItem('data_center', json.dumps(pti.get_gcmd_provider('NO/MET')))
52 | mditem = 'ISO_topic_category'
53 | if not self.dataset.GetMetadataItem(mditem):
54 | self.dataset.SetMetadataItem(mditem,
55 | pti.get_iso19115_topic_category('Imagery/Base Maps/Earth Cover')['iso_topic_category'])
56 |
57 | mm = pti.get_gcmd_instrument('multi-spectral')
58 | ee = pti.get_gcmd_platform('sentinel-2')
59 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
60 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
61 |
62 | @staticmethod
63 | def get_date(filename):
64 | """Extract date and time parameters from filename and return
65 | it as a formatted (isoformat) string
66 |
67 | Parameters
68 | ----------
69 |
70 | filename: str
71 | nn
72 |
73 | Returns
74 | -------
75 | str, YYYY-mm-ddThh:MMZ
76 |
77 | """
78 | _, filename = os.path.split(filename)
79 | t = datetime.strptime(filename.split('_')[2], '%Y%m%dT%H%M%S')
80 | return datetime.strftime(t, '%Y-%m-%dT%H:%M:%SZ')
81 |
82 | def convert_dstime_datetimes(self, ds_time):
83 | """Convert time variable to np.datetime64"""
84 | ds_datetimes = np.array(
85 | [(np.datetime64(self.timeCalendarStart).astype('M8[s]')
86 | + np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]')
87 | return ds_datetimes
88 |
89 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_mywave.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_opendap_mywave.py
2 | # Purpose: Nansat mapping for MyWaveWAV data provided by MET.NO
3 | # Author: Artem Moiseev
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 |
8 | from nansat.mappers.opendap import Opendap
9 | from nansat.exceptions import WrongMapperError
10 | from nansat.nsr import NSR
11 | from netCDF4 import Dataset
12 | from datetime import datetime
13 | import numpy as np
14 | import json
15 | import pythesint as pti
16 | import os
17 |
18 |
19 | class Mapper(Opendap):
20 |
21 | baseURLs = ['http://thredds.met.no/thredds/dodsC/fou-hi/mywavewam4archive',
22 | 'https://thredds.met.no/thredds/dodsC/sea/mywavewam4/mywavewam4_be']
23 |
24 | timeVarName = 'time'
25 | xName = 'rlon'
26 | yName = 'rlat'
27 | timeCalendarStart = '1970-01-01'
28 |
29 | def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
30 | ds=None, bands=None, cachedir=None, *args, **kwargs):
31 |
32 | self.test_mapper(filename)
33 | timestamp = date if date else self.get_date(filename)
34 | ds = Dataset(filename)
35 | try:
36 | self.srcDSProjection = NSR(ds.variables['projection_3'].proj4 +
37 | ' +to_meter=0.0174532925199 +wktext')
38 | except KeyError:
39 | raise WrongMapperError
40 |
41 | self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
42 |
43 | self.dataset.SetMetadataItem('instrument', json.dumps(pti.get_gcmd_instrument('Computer')))
44 | self.dataset.SetMetadataItem('platform', json.dumps(pti.get_gcmd_platform('MODELS')))
45 | self.dataset.SetMetadataItem('Data Center', json.dumps(pti.get_gcmd_provider('NO/MET')))
46 | self.dataset.SetMetadataItem('Entry Title', str(ds.getncattr('title')))
47 | self.dataset.SetMetadataItem('Entry Title',
48 | json.dumps(pti.get_iso19115_topic_category('Oceans')))
49 | self.dataset.SetMetadataItem('gcmd_location',
50 | json.dumps(pti.get_gcmd_location('sea surface')))
51 |
52 | @staticmethod
53 | def get_date(filename):
54 | """Extract date and time parameters from filename and return
55 | it as a formatted (isoformat) string
56 |
57 | Parameters
58 | ----------
59 |
60 | filename: str
61 |
62 | Returns
63 | -------
64 | str, YYYY-mm-ddThh:MM:00Z
65 |
66 | Examples
67 | --------
68 | >>> Mapper.get_date('/path/to/MyWave_wam4_WAVE_20171029T18Z.nc')
69 | '2017-10-29T18:00:00Z'
70 | """
71 | _, filename = os.path.split(filename)
72 | t = datetime.strptime(filename.split('_')[-1], '%Y%m%dT%HZ.nc')
73 | return datetime.strftime(t, '%Y-%m-%dT%H:%M:00Z')
74 |
75 | def convert_dstime_datetimes(self, ds_time):
76 | """Convert time variable to np.datetime64"""
77 | ds_datetimes = np.array(
78 | [(np.datetime64(self.timeCalendarStart).astype('M8[s]')
79 | + np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]')
80 | return ds_datetimes
81 |
--------------------------------------------------------------------------------
/nansat/nsr.py:
--------------------------------------------------------------------------------
1 | # Name: nsr.py
2 | # Purpose: Container of NSR class
3 | # Authors: Anton Korosov
4 | # Created: 01.02.2014
5 | # Copyright: (c) NERSC 2011 - 2014
6 | # Licence:
7 | # This file is part of NANSAT.
8 | # NANSAT is free software: you can redistribute it and/or modify
9 | # it under the terms of the GNU General Public License as published by
10 | # the Free Software Foundation, version 3 of the License.
11 | # http://www.gnu.org/licenses/gpl-3.0.html
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
15 | from __future__ import absolute_import, unicode_literals
16 | import sys
17 | from nansat.utils import osr
18 | osr.UseExceptions()
19 |
20 | from nansat.exceptions import NansatProjectionError
21 |
22 |
23 | class NSR(osr.SpatialReference, object):
24 | """Nansat Spatial Reference. Overrides constructor of osr.SpatialReference.
25 |
26 | Parameters
27 | ----------
28 | srs : 0, PROJ4 or EPSG or WKT or osr.SpatialReference, NSR
29 | Specifies spatial reference system (SRS)
30 | PROJ4:
31 | string with proj4 options [http://trac.osgeo.org/proj/] e.g.:
32 | '+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs'
33 | '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=75 +lon_0=0 +no_defs'
34 | EPSG:
35 | integer with EPSG number, [http://spatialreference.org/],
36 | e.g. 4326
37 | WKT:
38 | string with Well Know Text of SRS. E.g.:
39 | 'GEOGCS["WGS 84",
40 | DATUM["WGS_1984",
41 | SPHEROID["WGS 84",6378137,298.257223563,
42 | AUTHORITY["EPSG","7030"]],
43 | TOWGS84[0,0,0,0,0,0,0],
44 | AUTHORITY["EPSG","6326"]],
45 | PRIMEM["Greenwich",0,
46 | AUTHORITY["EPSG","8901"]],
47 | UNIT["degree",0.0174532925199433,
48 | AUTHORITY["EPSG","9108"]],
49 | AUTHORITY["EPSG","4326"]]'
50 |
51 | """
52 |
53 | def __init__(self, srs=0):
54 | """Create Spatial Reference System from input parameter"""
55 | if sys.version_info.major == 2:
56 | str_types = (str, unicode)
57 | else:
58 | str_types = str
59 | # create SRS
60 | osr.SpatialReference.__init__(self)
61 |
62 | # parse input parameters
63 | if srs is 0:
64 | # generate default WGS84 SRS
65 | self.ImportFromEPSG(4326)
66 | elif isinstance(srs, str_types):
67 | # parse as proj4 string
68 | try:
69 | self.ImportFromProj4(str(srs))
70 | except RuntimeError:
71 | # parse as WKT string
72 | try:
73 | self.ImportFromWkt(str(srs))
74 | except RuntimeError:
75 | raise NansatProjectionError('Proj4 or WKT (%s) is wrong' % srs)
76 | elif isinstance(srs, int):
77 | # parse as EPSG code
78 | try:
79 | self.ImportFromEPSG(srs)
80 | except RuntimeError:
81 | raise NansatProjectionError('EPSG %d is wrong' % srs)
82 | elif type(srs) in [osr.SpatialReference, NSR]:
83 | # parse from input Spatial Reference
84 | try:
85 | self.ImportFromWkt(srs.ExportToWkt())
86 | except RuntimeError:
87 | raise NansatProjectionError('NSR %s is wrong' % srs)
88 |
89 | @property
90 | def wkt(self):
91 | """Well Known Text representation of SRS"""
92 | return self.ExportToWkt()
93 |
--------------------------------------------------------------------------------
/nansat/mappers/get_inv.pl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/perl -w
2 | $curl="curl";
3 |
4 | # get_inv.pl wesley ebisuzaki
5 | # v0.9 1/2005
6 | #
7 | # gets a wgrib inventory from the net
8 | # adds cURL compatible "range" field (byte address of grib record)
9 | #
10 | # requires:
11 | # curl: http://curl.haxx.se open source MIT/X derivate license
12 | # perl: open source
13 | #
14 | # configuration:
15 | # line 1: #!{location of perl} -w
16 | # line 2: curl="{location of curl executable}";
17 | #
18 | # v0.9 1st version
19 | # v0.9.3 2/2005
20 | # v0.9.4 5/2006 grib2 support
21 | # v0.9.5 7/2006 grib2 support - no need for -range option in inventory
22 | # v0.9.6 1/2009 someone actually used the -range option in the inventory, remove error message
23 | #
24 | $version="get_inv.pl v0.9.6 1/2009 wesley ebisuzaki\n";
25 | $file='';
26 | foreach $_ (@ARGV) {
27 | SWITCH: {
28 | /^-v$|^--version$/ && do { print STDERR $version; exit 8; };
29 | /^ftp:|^http:/ && do {
30 | if ($file eq '') { $file = $_; last SWITCH; }
31 | else {
32 | print STDERR "error: multiple URLs found\n";
33 | exit 8;
34 | }
35 | };
36 | /^-h$|^--help$/ && do {
37 | print STDERR "$0: gets wgrib inventory from net, adds range field\n";
38 | print STDERR " usage: $0 URL-of-wgrib-inventory\n";
39 | print STDERR " ex: $0 http://nomad3.ncep.noaa.gov/pub/gfs/rotating/gblav.t00z.pgrbf12.inv\n\n";
40 | print STDERR "This program is part of a package to select GRIB records (messages)\n";
41 | print STDERR "to download. By selecting specific records, the download times can\n";
42 | print STDERR "be significantly reduced. This program uses cURL and supports grib\n";
43 | print STDERR "data on http: (most) and ftp: servers that include wgrib inventories.\n";
44 | print STDERR "ref: http://www.cpc.ncep.noaa.gov/products/wesley/fast_downloading_grib.html\n";
45 | exit 8;
46 | };
47 | print STDERR "error: unknown parameter\n";
48 | exit 8;
49 | }
50 | }
51 |
52 | if ($file eq '') {
53 | print STDERR $version;
54 | print STDERR "\n$0: gets wgrib inventory from net, adds range field\n";
55 | print STDERR " usage: $0 URL-of-wgrib-inventory\n";
56 | exit 8;
57 | }
58 |
59 | open (In, "$curl -f -s $file |");
60 |
61 | $last=0;
62 | $lastnum = -1;
63 | $has_range = 0;
64 | while () {
65 | if (/:range=/) {
66 | # grib2 inventory has range field
67 | $has_range = 1;
68 | print $_;
69 | }
70 | else {
71 | # grib1/2 inventory, figure range field
72 | chomp;
73 | ($f1,$num,$rest) = split(/:/,$_,3);
74 |
75 | # check for missing file
76 | if (! defined($num) || "$num" eq "") {
77 | sleep(5);
78 | print STDERR "ERROR: Bad URL or not wgrib inventory: $file\n";
79 | sleep(3);
80 | exit 7;
81 | }
82 |
83 | if ($lastnum != $num && $last != 0) {
84 | $n = $num - 1;
85 | for ($i = 0; $i < $last; $i++) {
86 | print "$old_lines[$i]:range=$lastnum-$n\n";
87 | }
88 | $lastnum = $num;
89 | $last = 1;
90 | $old_lines[0] = $_;
91 | }
92 | else {
93 | $old_lines[$last++] = $_;
94 | $lastnum = $num;
95 | }
96 | }
97 | }
98 |
99 | if ($has_range == 1) { exit 0; }
100 |
101 | $f1="";
102 | $rest="";
103 | if ($last != 0) {
104 | for ($i = 0; $i < $last; $i++) {
105 | print "$old_lines[$i]:range=$lastnum\n";
106 | }
107 | }
108 | else {
109 | sleep(5);
110 | print STDERR "missing wgrib inventory\n";
111 | sleep(3);
112 | if (! -t STDIN) {
113 | # not a terminal .. sleep longer
114 | sleep(60);
115 | }
116 | exit 6;
117 | }
118 | exit 0;
119 |
--------------------------------------------------------------------------------
/nansat/mappers/get_grib.pl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/perl -w
2 | $curl="curl";
3 |
4 | # get_grib.pl wesley ebisuzaki
5 | # v0.9 1/2005
6 | #
7 | # gets a grib file from the net, uses a wgrib inventory with a
8 | # "range inventory" to control the ftp/http transfer
9 | #
10 | # requires:
11 | # curl: http://curl.haxx.se open source MIT/X derivate license
12 | # perl: open source
13 | #
14 | # configuration:
15 | # line 1: #!{location of perl} -w
16 | # line 2: curl="{location of curl executable}";
17 | #
18 | # v0.9.5 6/2006 error with grib-1, last record
19 | # v0.9.6 10/2006 better error return codes
20 |
21 | $version="get_grib.pl v0.9.6 10/2006 wesley ebisuzaki\n";
22 |
23 | $file="";
24 | $url="";
25 |
26 | foreach $_ (@ARGV) {
27 | SWITCH: {
28 | /^-v$|^--version$/ && do { print STDERR $version; exit 8; };
29 | /^http:/ && do {
30 | if ($url eq '') { $url = $_; last SWITCH; }
31 | else {
32 | print STDERR "error: multiple URLs found\n";
33 | exit 8;
34 | }
35 | };
36 | /^-h$|^--help$/ && do {
37 | print STDERR "\n$0: gets selected grib records from net\n";
38 | print STDERR " usage: cat {enhanced wgrib inv} | grep FIELD | $0 URL-of-gribfile output-file\n\n";
39 | print STDERR " ex: get_inv.pl http://nomad3.ncep.noaa.gov/pub/gfs/rotating/gblav.t00z.pgrbf12.inv | \\\n";
40 | print STDERR " grep \":HGT:500 mb:\" | \\\n";
41 | print STDERR " $0 http://nomad3.ncep.noaa.gov/pub/gfs/rotating/gblav.t00z.pgrbf12 out.grb\n\n";
42 | print STDERR "This program is part of a package to select GRIB records (messages)\n";
43 | print STDERR "to download. By selecting specific records, the download times can\n";
44 | print STDERR "be significantly reduced. This program uses cURL and supports grib\n";
45 | print STDERR "data on http: (most) and ftp: servers that include wgrib inventories.\n";
46 | print STDERR "ref: http://www.cpc.ncep.noaa.gov/products/wesley/fast_downloading_grib.html\n";
47 | exit 8;
48 | };
49 | if ($file eq "") {
50 | $file = $_;
51 | last SWITCH;
52 | }
53 | print STDERR "error: multiple URLs found\n";
54 | sleep(5);
55 | exit 8;
56 | }
57 | }
58 |
59 | if ($file eq '' || $url eq '') {
60 | print STDERR $version;
61 | print STDERR "\n$0: gets gribfile from net using wgrib inventory with range field\n";
62 | print STDERR " usage: cat {wgrib inv with range field} | $0 URL-of-wgrib-inventory\n";
63 | if ($file eq '') {
64 | print STDERR "\n\n MISING OUTPUT FILE!\n\n";
65 | }
66 | else {
67 | print STDERR "\n\n MISING URL!\n\n";
68 | }
69 | exit 8;
70 | }
71 |
72 |
73 | $range="";
74 | $lastfrom='';
75 | $lastto=-100;
76 | while () {
77 | chomp;
78 | $from='';
79 | /:range=([0-9]*)/ && do {$from=$1};
80 | $to='';
81 | /:range=[0-9]*-([0-9]*)/ && do {$to=$1};
82 |
83 | if ($lastto+1 == $from) {
84 | # delay writing out last range specification
85 | $lastto = $to;
86 | }
87 | elsif ($lastto ne $to) {
88 | # write out last range specification
89 | if ($lastfrom ne '') {
90 | if ($range eq '') { $range="$lastfrom-$lastto"; }
91 | else { $range="$range,$lastfrom-$lastto"; }
92 | }
93 | $lastfrom=$from;
94 | $lastto=$to;
95 | }
96 | # print "$_\n";
97 | }
98 | if ($lastfrom ne '') {
99 | if ($range eq '') { $range="$lastfrom-$lastto"; }
100 | else { $range="$range,$lastfrom-$lastto"; }
101 | }
102 |
103 | unlink $file;
104 | if ($range ne "") {
105 | $err=system("$curl -f -v -s -r \"$range\" $url -o $file.tmp");
106 | $err = $err >> 8;
107 | if ($err != 0) {
108 | print STDERR "error in getting file $err\n";
109 | sleep(20);
110 | exit $err;
111 | }
112 | if (! rename "$file.tmp", "$file") {
113 | sleep(30);
114 | }
115 | }
116 | else {
117 | sleep(10);
118 | print STDERR "No download! No matching grib fields\n";
119 | sleep(30);
120 | exit 8;
121 | }
122 |
123 | exit 0;
124 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_amsr2_l3.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_masr2_l3
2 | # Purpose: Mapping for L3 data from the OBPG web-site
3 | # Authors: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | from dateutil.parser import parse
8 | import datetime
9 | import os.path
10 | import glob
11 | import json
12 |
13 | import pythesint as pti
14 |
15 | import numpy as np
16 |
17 | from nansat.vrt import VRT
18 | from nansat.nsr import NSR
19 | from nansat.utils import gdal, ogr
20 | from nansat.exceptions import WrongMapperError
21 |
22 |
23 | class Mapper(VRT):
24 | ''' Mapper for Level-3 AMSR2 data from https://gcom-w1.jaxa.jp'''
25 |
26 | freqs = [6, 7, 10, 18, 23, 36, 89]
27 |
28 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
29 | ''' OBPG L3 VRT '''
30 |
31 | # test the product
32 | try:
33 | assert gdalMetadata['PlatformShortName'] == 'GCOM-W1'
34 | assert gdalMetadata['SensorShortName'] == 'AMSR2'
35 | assert gdalMetadata['ProductName'] == 'AMSR2-L3'
36 | except:
37 | raise WrongMapperError
38 |
39 | # get list of similar (same date, A/D orbit) files in the directory
40 | iDir, iFile = os.path.split(filename)
41 | iFileMask = iFile[:30] + '%02d' + iFile[32:]
42 | simFiles = []
43 | for freq in self.freqs:
44 | simFile = os.path.join(iDir, iFileMask % freq)
45 | #print simFile
46 | if os.path.exists(simFile):
47 | simFiles.append(simFile)
48 |
49 | metaDict = []
50 | for freq in self.freqs:
51 | simFile = os.path.join(iDir, iFileMask % freq)
52 | if simFile not in simFiles:
53 | continue
54 | #print 'simFile', simFile
55 | # open file, get metadata and get parameter name
56 | simSupDataset = gdal.Open(simFile)
57 | if simSupDataset is None:
58 | # skip this similar file
59 | #print 'No dataset: %s not a supported SMI file' % simFile
60 | continue
61 | # get subdatasets from the similar file
62 | simSubDatasets = simSupDataset.GetSubDatasets()
63 | for simSubDataset in simSubDatasets:
64 | #print 'simSubDataset', simSubDataset
65 | if 'Brightness_Temperature' in simSubDataset[0]:
66 | # get SourceFilename from subdataset
67 | metaEntry = {
68 | 'src': {'SourceFilename': simSubDataset[0],
69 | 'SourceBand': 1,
70 | 'ScaleRatio': 0.0099999998,
71 | 'ScaleOffset': 0},
72 | 'dst': {'wkv': 'brightness_temperature',
73 | 'frequency': '%02d' % freq,
74 | 'polarisation': simSubDataset[0][-2:-1],
75 | 'suffix': ('%02d%s' %
76 | (freq, simSubDataset[0][-2:-1]))}}
77 | metaDict.append(metaEntry)
78 |
79 | # initiate VRT for the NSIDC 10 km grid
80 | self._init_from_dataset_params(760, 1120, (-3850000, 10000, 0.0, 5850000, 0.0, -10000), NSR(3411).wkt)
81 |
82 | # add bands with metadata and corresponding values to the empty VRT
83 | self.create_bands(metaDict)
84 |
85 | # Adding valid time to dataset
86 | self.dataset.SetMetadataItem('time_coverage_start', parse(gdalMetadata['ObservationStartDateTime']).isoformat())
87 | self.dataset.SetMetadataItem('time_coverage_end', parse(gdalMetadata['ObservationStartDateTime']).isoformat())
88 |
89 | mm = pti.get_gcmd_instrument('AMSR2')
90 | ee = pti.get_gcmd_platform('GCOM-W1')
91 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
92 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
93 |
94 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_pathfinder52.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_pathfinder52
2 | # Purpose: Mapping for NOAA AVHRR PATHFINDER52 DATA
3 | # Authors: Anton Korosov, Dmitry Petrenko
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | from dateutil.parser import parse
8 |
9 | import numpy as np
10 |
11 | import nansat.vrt as vrt
12 | from nansat.nsr import NSR
13 | from nansat.exceptions import WrongMapperError
14 |
15 |
16 | class Mapper(vrt.VRT):
17 | ''' Mapper PATHFINDER (local files)
18 |
19 | TODO:
20 | * remote files
21 | '''
22 |
23 | def __init__(self, filename, gdalDataset, gdalMetadata, minQual=4,
24 | **kwargs):
25 | ''' Create VRT '''
26 |
27 | if not 'AVHRR_Pathfinder-PFV5.2' in filename:
28 | raise WrongMapperError(filename)
29 |
30 | subDatasets = gdalDataset.GetSubDatasets()
31 | metaDict = []
32 | sstName = ''
33 |
34 | for subDataset in subDatasets:
35 | subDatasetName = subDataset[0].split(':')[2]
36 |
37 | if '//' in subDatasetName:
38 | h5Style = True
39 | else:
40 | h5Style = False
41 |
42 | if h5Style:
43 | subDatasetName = subDatasetName.replace('//', '')
44 |
45 | if subDatasetName == 'quality_level':
46 | qualName = subDataset[0]
47 |
48 | subGDALDataset = vrt.gdal.Open(subDataset[0])
49 | subGDALMetadata = subGDALDataset.GetRasterBand(1).GetMetadata()
50 | if h5Style:
51 | metaPrefix = subDatasetName + '_'
52 | else:
53 | metaPrefix = ''
54 |
55 | subWKV = subGDALMetadata.get(metaPrefix + 'standard_name', '')
56 | subScaleRatio = subGDALMetadata.get(metaPrefix + 'scale_factor',
57 | '1')
58 | subScaleOffset = subGDALMetadata.get(metaPrefix + 'add_offset',
59 | '0')
60 | metaEntry = {'src': {'SourceFilename': subDataset[0],
61 | 'sourceBand': 1,
62 | 'ScaleRatio': subScaleRatio,
63 | 'ScaleOffset': subScaleOffset},
64 | 'dst': {'wkv': subWKV}}
65 |
66 | # append band metadata to metaDict
67 | metaDict.append(metaEntry)
68 |
69 | # create empty VRT dataset with geolocation only
70 | self._init_from_gdal_dataset(subGDALDataset)
71 |
72 | # add mask
73 | if qualName != '':
74 | qualDataset = vrt.gdal.Open(qualName)
75 | qualArray = qualDataset.ReadAsArray()
76 | qualArray[qualArray < minQual] = 1
77 | qualArray[qualArray >= minQual] = 128
78 | self.band_vrts = {'maskVRT': vrt.VRT(array=qualArray.astype('int8'))}
79 | metaDict.append({'src': {'SourceFilename': (self.
80 | band_vrts['maskVRT'].
81 | filename),
82 | 'SourceBand': 1,
83 | 'SourceType': 'SimpleSource',
84 | 'DataType': 1},
85 | 'dst': {'name': 'mask'}})
86 |
87 | # add bands with metadata and corresponding values to the empty VRT
88 | self.create_bands(metaDict)
89 |
90 | # append fixed projection and geotransform
91 | self.dataset.SetProjection(NSR().wkt)
92 | self.dataset.SetGeoTransform((-180, 0.0417, 0, 90, 0, -0.0417))
93 |
94 | # set TIMEstart_time
95 | if h5Style:
96 | startTimeKey = 'start_time'
97 | else:
98 | startTimeKey = 'NC_GLOBAL#start_time'
99 | self.dataset.SetMetadataItem('time_coverage_start', subGDALDataset.GetMetadataItem(startTimeKey))
100 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_arome.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_arome.py
2 | # Purpose: Nansat mapping for AROME-Arctic and MEPS (MetCoOp Ensemble
3 | # Prediction System) data provided by MET.NO
4 | # Author: Artem Moiseev
5 | # Licence: This file is part of NANSAT. You can redistribute it or modify
6 | # under the terms of GNU General Public License, v.3
7 | # http://www.gnu.org/licenses/gpl-3.0.html
8 |
9 | from nansat.mappers.mapper_arome import Mapper as MapperArome
10 | from nansat.mappers.opendap import Opendap
11 | from nansat.exceptions import WrongMapperError
12 | from nansat.nsr import NSR
13 | import pythesint as pti
14 | import os
15 | from datetime import datetime
16 | from netCDF4 import Dataset
17 | import numpy as np
18 | import json
19 |
20 |
21 | class Mapper(Opendap, MapperArome):
22 |
23 | baseURLs = ['http://thredds.met.no/thredds/catalog/arome25/catalog.html',
24 | 'https://thredds.met.no/thredds/dodsC/aromearcticarchive',
25 | 'http://thredds.met.no/thredds/dodsC/aromearcticarchive',
26 | 'https://thredds.met.no/thredds/dodsC/meps25epsarchive',
27 | 'http://thredds.met.no/thredds/dodsC/meps25epsarchive']
28 | timeVarName = 'time'
29 | xName = 'x'
30 | yName = 'y'
31 | timeCalendarStart = '1970-01-01'
32 |
33 | def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
34 | ds=None, bands=None, cachedir=None, *args, **kwargs):
35 |
36 | self.test_mapper(filename)
37 | timestamp = date if date else self.get_date(filename)
38 | ds = Dataset(filename)
39 |
40 | try:
41 | self.srcDSProjection = NSR(ds.variables['projection_lambert'].proj4)
42 | except KeyError:
43 | raise WrongMapperError
44 |
45 | self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
46 |
47 | mm = pti.get_gcmd_instrument('Computer')
48 | ee = pti.get_gcmd_platform('ecmwfifs')
49 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
50 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
51 |
52 | md_item = 'Data Center'
53 | if not self.dataset.GetMetadataItem(md_item):
54 | self.dataset.SetMetadataItem(md_item, 'NO/MET')
55 | md_item = 'Entry Title'
56 | if not self.dataset.GetMetadataItem(md_item):
57 | self.dataset.SetMetadataItem(md_item, str(ds.getncattr('title')))
58 | md_item = 'summary'
59 | if not self.dataset.GetMetadataItem(md_item):
60 | summary = """
61 | AROME_Arctic is a convection-permitting atmosphere model covering parts of the Barents
62 | Sea and the Nordic Arctic. It has horizontal resolution of 2.5 km and 65 vertical
63 | levels. AROME_Arctic runs for 66 hours four times a day (00,06,12,18) with three-hourly
64 | cycling for data assimilation. Boundary data is from ECMWF. Model code based on HARMONIE
65 | cy40h1.1
66 | """
67 | self.dataset.SetMetadataItem(md_item, str(summary))
68 |
69 | @staticmethod
70 | def get_date(filename):
71 | """Extract date and time parameters from filename and return
72 | it as a formatted string
73 |
74 | Parameters
75 | ----------
76 |
77 | filename: str
78 | nn
79 |
80 | Returns
81 | -------
82 | str, YYYY-mm-ddThh:MMZ
83 |
84 | Examples
85 | --------
86 | >>> Mapper.get_date('/path/to/arome_arctic_full_2_5km_20171030T21Z.nc')
87 | '2017-10-30T21:00Z'
88 | """
89 | _, filename = os.path.split(filename)
90 | t = datetime.strptime(filename.split('_')[-1], '%Y%m%dT%HZ.nc')
91 | return datetime.strftime(t, '%Y-%m-%dT%H:%MZ')
92 |
93 | def convert_dstime_datetimes(self, ds_time):
94 | """Convert time variable to np.datetime64"""
95 | ds_datetimes = np.array(
96 | [(np.datetime64(self.timeCalendarStart).astype('M8[s]')
97 | + np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]')
98 | return ds_datetimes
99 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_metno_hires_seaice.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_metno_hires_seaice.py
2 | # Purpose: Nansat mapping for high resolution sea ice
3 | # from met.no Thredds server
4 | # Authors: Knut-Frode Dagestad
5 | # Licence: This file is part of NANSAT. You can redistribute it or modify
6 | # under the terms of GNU General Public License, v.3
7 | # http://www.gnu.org/licenses/gpl-3.0.html
8 |
9 | # High resolution (1 km) manual ice concentration, based on SAR imagery
10 | # http://thredds.met.no/thredds/catalog/myocean/siw-tac/siw-metno-svalbard/
11 | #
12 | # Mapper may be called with full URL:
13 | # 'http://thredds.met.no/thredds/dodsC/myocean/siw-tac/siw-metno-svalbard/2014/01/ice_conc_svalbard_201401091500.nc
14 | # or with keyword (fake filename):
15 | # 'metno_hires_seaice:20140109'
16 | #
17 | # The latter syntax will construct the URL,
18 | # and will return the closest available data within +/- 3 days
19 | import sys
20 | try:
21 | import urllib3 as urllib
22 | except:
23 | import urllib2 as urllib
24 |
25 | from datetime import datetime, timedelta
26 |
27 | from nansat.utils import gdal, ogr, osr
28 | from nansat.exceptions import WrongMapperError
29 | from nansat.vrt import VRT
30 |
31 |
32 | class Mapper(VRT):
33 | ''' Create VRT with mapping of WKV for Met.no seaice '''
34 |
35 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
36 | ''' Create VRT '''
37 |
38 | ThreddsBase = 'http://thredds.met.no/thredds/dodsC/myocean/siw-tac/siw-metno-svalbard/'
39 | # First check if mapper is called with keyword syntax:
40 | # filename = metno_hires_seaice:YYYYmmdd
41 | keywordBase = 'metno_hires_seaice'
42 | foundDataset = False
43 | if filename[0:len(keywordBase)] == keywordBase:
44 | keywordTime = filename[len(keywordBase)+1:]
45 | requestedTime = datetime.strptime(keywordTime, '%Y%m%d')
46 | # Search for nearest available file, within the closest 3 days
47 | for deltaDay in [0, -1, 1, -2, 2, -3, 3]:
48 | validTime = (requestedTime + timedelta(days=deltaDay) +
49 | timedelta(hours=15))
50 | filename = (ThreddsBase +
51 | validTime.strftime(
52 | '%Y/%m/ice_conc_svalbard_%Y%m%d1500.nc'))
53 | try:
54 | urllib.urlopen(filename + '.dds')
55 | foundDataset = True
56 | # Data is found for this day
57 | break
58 | except:
59 | # No data for this day
60 | pass
61 |
62 | if not foundDataset:
63 | raise WrongMapperError
64 |
65 | # Then check if a valid OPeNDAP URL is given
66 | # (or has been constructed from keyword)
67 | if filename[0:len(ThreddsBase)] != ThreddsBase:
68 | AttributeError("Not Met.no Svalbard-ice Thredds URL")
69 | else:
70 | timestr = filename[-15:-3]
71 | validTime = datetime.strptime(timestr, '%Y%m%d%H%M')
72 |
73 | filename = filename + '?ice_concentration[0][y][x]'
74 | srcProjection = osr.SpatialReference()
75 | srcProjection.ImportFromProj4('+proj=stere lon_0=0.0 +lat_0=90 +datum=WGS84 +ellps=WGS84 +units=km +no_defs')
76 | srcProjection = srcProjection.ExportToWkt()
77 |
78 | # From thredds web, with manual shift
79 | srcGeotransform = (-1243.008 - 1, 1, 0, -3190.026 - 7, 0, 1)
80 |
81 | # create empty VRT dataset with geolocation only
82 | self._init_from_dataset_params(3812, 2980, srcGeotransform, srcProjection)
83 |
84 | metaDict = [{'src': {'SourceFilename': filename,
85 | 'sourceBand': 1},
86 | 'dst': {'name': 'sea_ice_area_fraction',
87 | 'wkv': 'sea_ice_area_fraction'}}]
88 |
89 | # Add band
90 | self.create_bands(metaDict)
91 |
92 | # Set time
93 | self.logger.info('Valid time: %s', str(validTime))
94 | self.dataset.SetMetadataItem('time_coverage_start',
95 | validTime.isoformat())
96 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_ncep_wind.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_ncep_wind
2 | # Purpose: Nansat mapping for NCEP GFS model data subset as downloaded
3 | # by mapper_ncep_wind_online
4 | # Author: Knut-Frode Dagestad
5 | # Licence: This file is part of NANSAT. You can redistribute it or modify
6 | # under the terms of GNU General Public License, v.3
7 | # http://www.gnu.org/licenses/gpl-3.0.html
8 | #
9 | # Made for GRIB files downloaded from http://nomads.ncep.noaa.gov/data/gfs4/
10 | import datetime
11 | import json
12 | import pythesint as pti
13 |
14 | from nansat.vrt import VRT
15 | from nansat.exceptions import WrongMapperError
16 |
17 |
18 | class Mapper(VRT):
19 | ''' VRT with mapping of WKV for NCEP GFS '''
20 |
21 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
22 | ''' Create NCEP VRT '''
23 |
24 | if not gdalDataset:
25 | raise WrongMapperError(filename)
26 |
27 | geotransform = gdalDataset.GetGeoTransform()
28 | if (geotransform != (-0.25, 0.5, 0.0, 90.25, 0.0, -0.5) or
29 | gdalDataset.RasterCount != 2): # Not water proof
30 | raise WrongMapperError(filename)
31 |
32 | metaDict = [{'src': {'SourceFilename': filename,
33 | 'SourceBand': 1},
34 | 'dst': {'wkv': 'eastward_wind',
35 | 'height': '10 m'}},
36 | {'src': {'SourceFilename': filename,
37 | 'SourceBand': 2},
38 | 'dst': {'wkv': 'northward_wind',
39 | 'height': '10 m'}},
40 | {'src': [{'SourceFilename': filename,
41 | 'SourceBand': 1,
42 | 'DataType': gdalDataset.GetRasterBand(1).DataType
43 | },
44 | {'SourceFilename': filename,
45 | 'SourceBand': 2,
46 | 'DataType': gdalDataset.GetRasterBand(2).DataType
47 | }],
48 | 'dst': {'wkv': 'wind_speed',
49 | 'PixelFunctionType': 'UVToMagnitude',
50 | 'name': 'windspeed',
51 | 'height': '2 m'
52 | }},
53 | {'src': [{'SourceFilename': filename,
54 | 'SourceBand': 1,
55 | 'DataType': gdalDataset.GetRasterBand(1).DataType
56 | },
57 | {'SourceFilename': filename,
58 | 'SourceBand': 2,
59 | 'DataType': gdalDataset.GetRasterBand(2).DataType
60 | }],
61 | 'dst': {'wkv': 'wind_from_direction',
62 | 'PixelFunctionType': 'UVToDirectionFrom',
63 | 'name': 'winddirection',
64 | 'height': '2 m'
65 | }
66 | }]
67 |
68 | # create empty VRT dataset with geolocation only
69 | self._init_from_gdal_dataset(gdalDataset)
70 |
71 | # add bands with metadata and corresponding values to the empty VRT
72 | self.create_bands(metaDict)
73 |
74 | # Adding valid time from the GRIB file to dataset
75 | validTime = gdalDataset.GetRasterBand(1).GetMetadata()['GRIB_VALID_TIME']
76 | self.dataset.SetMetadataItem('time_coverage_start',
77 | (datetime.datetime.utcfromtimestamp(
78 | int(validTime.strip().split(' ')[0])).isoformat()))
79 | self.dataset.SetMetadataItem('time_coverage_end',
80 | (datetime.datetime.utcfromtimestamp(
81 | int(validTime.strip().split(' ')[0])).isoformat()))
82 |
83 | # Get dictionary describing the instrument and platform according to
84 | # the GCMD keywords
85 | mm = pti.get_gcmd_instrument('computer')
86 | ee = pti.get_gcmd_platform('ncep-gfs')
87 |
88 | # TODO: Validate that the found instrument and platform are indeed what we
89 | # want....
90 |
91 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
92 | self.dataset.SetMetadataItem('platform', json.dumps(ee))
93 |
--------------------------------------------------------------------------------
/nansat/tests/test_tools.py:
--------------------------------------------------------------------------------
1 | #------------------------------------------------------------------------------
2 | # Name: test_tools.py
3 | # Purpose: Test tools from nansat.tools
4 | # Author: Artem Moiseev
5 | # Created: 17.01.2020
6 | # Copyright: (c) NERSC
7 | # Licence: This file is part of NANSAT. You can redistribute it or modify
8 | # under the terms of GNU General Public License, v.3
9 | # http://www.gnu.org/licenses/gpl-3.0.html
10 | #------------------------------------------------------------------------------
11 |
12 | import os
13 | import unittest
14 | from mock import patch, DEFAULT
15 |
16 | import numpy as np
17 | try:
18 | import matplotlib.pyplot as plt
19 | plt.switch_backend('Agg')
20 | except ImportError:
21 | MATPLOTLIB_IS_INSTALLED = False
22 | else:
23 | MATPLOTLIB_IS_INSTALLED = True
24 | try:
25 | import cartopy
26 | except ImportError:
27 | CARTOPY_IS_INSTALLED = False
28 | else:
29 | CARTOPY_IS_INSTALLED = True
30 |
31 | import nansat
32 | from nansat.domain import Domain
33 | from nansat.figure import Image
34 | from nansat.nansat import Nansat as NANSAT
35 | from nansat.tests import nansat_test_data as ntd
36 | from nansat.tools import (distance2coast,
37 | register_colormaps,
38 | get_domain_map,
39 | show_domain_map,
40 | save_domain_map,
41 | get_domain_map)
42 |
43 |
44 | class ToolsTest(unittest.TestCase):
45 | def setUp(self):
46 | self.d = Domain(4326, "-te 25 70 35 72 -ts 100 100")
47 | # define a test Nansat object
48 | test_domain = Domain(4326, "-lle -180 -90 180 90 -ts 500 500")
49 | self.n = NANSAT.from_domain(test_domain, array=np.ones([500,500]))
50 |
51 | @patch('nansat.tools.os.getenv')
52 | def test_distance2coast_source_not_exists_envvar(self, mock_getenv):
53 | mock_getenv.return_value='/path/dos/not/exist'
54 | with self.assertRaises(IOError) as err:
55 | distance2coast(self.d)
56 | self.assertEqual('Distance to the nearest coast product does not exist - '
57 | 'see Nansat documentation to get it (the path is '
58 | '/path/dos/not/exist)', str(err.exception))
59 |
60 | def test_distance2coast_source_not_exists_attribute(self):
61 | with self.assertRaises(IOError) as err:
62 | distance2coast(self.d, distance_src='/path/dos/not/exist')
63 | self.assertEqual('Distance to the nearest coast product does not exist - '
64 | 'see Nansat documentation to get it (the path is '
65 | '/path/dos/not/exist)', str(err.exception))
66 |
67 | @patch.multiple('nansat.tools', Nansat=DEFAULT, os=DEFAULT)
68 | def test_distance2coast_integration(self, Nansat, os):
69 | Nansat.return_value = self.n
70 | os.path.exists.return_value=True
71 | result = distance2coast(self.d)
72 | self.assertEqual(type(result), NANSAT)
73 |
74 | def test_warning(self):
75 | register_colormaps()
76 | with self.assertWarns(UserWarning) as w:
77 | register_colormaps()
78 |
79 | @unittest.skipUnless(CARTOPY_IS_INSTALLED, 'Cartopy is required')
80 | def test_get_domain_map(self):
81 | ax = get_domain_map(self.d)
82 | self.assertIsInstance(ax, plt.Axes)
83 |
84 | def test_get_domain_map_no_cartopy(self):
85 | nansat.tools.CARTOPY_IS_INSTALLED = False
86 | with self.assertRaises(ImportError) as err:
87 | ax = get_domain_map(self.d)
88 | self.assertIn('Cartopy is not installed', str(err.exception))
89 | nansat.tools.CARTOPY_IS_INSTALLED = CARTOPY_IS_INSTALLED
90 |
91 | @unittest.skipUnless(CARTOPY_IS_INSTALLED, 'Cartopy is required')
92 | def test_save_domain_map(self):
93 | tmpfilename = os.path.join(ntd.tmp_data_path, 'domain_save_map.png')
94 | save_domain_map(self.d, tmpfilename)
95 | self.assertTrue(os.path.exists(tmpfilename))
96 | i = Image.open(tmpfilename)
97 | i.verify()
98 | # with cartopy>=0.20.0, the dpi attribute is not
99 | # always (100, 100)
100 | self.assertEqual(tuple(np.round(i.info['dpi'], 1)), (100, 100))
101 |
--------------------------------------------------------------------------------
/nansat/tests/test_pointbrowser.py:
--------------------------------------------------------------------------------
1 | # ------------------------------------------------------------------------------
2 | # Name: test_pointbrowser.py
3 | # Purpose: Test the PointBrowser class
4 | #
5 | # Author: Aleksander Vines, Anton Korosov
6 | #
7 | # Created: 2015-10-22
8 | # Copyright: (c) NERSC
9 | # Licence: This file is part of NANSAT. You can redistribute it or modify
10 | # under the terms of GNU General Public License, v.3
11 | # http://www.gnu.org/licenses/gpl-3.0.html
12 | # ------------------------------------------------------------------------------
13 | import os
14 | import unittest
15 | from mock import patch, PropertyMock, Mock, MagicMock, DEFAULT
16 |
17 | import numpy as np
18 | from nansat.pointbrowser import PointBrowser
19 |
20 | try:
21 | import matplotlib
22 | import matplotlib.pyplot as plt
23 | plt.switch_backend('Agg')
24 | except ImportError:
25 | MATPLOTLIB_IS_INSTALLED = False
26 | else:
27 | MATPLOTLIB_IS_INSTALLED = True
28 |
29 |
30 | class PointBrowserTest(unittest.TestCase):
31 | @unittest.skipUnless(MATPLOTLIB_IS_INSTALLED, 'Matplotlib is required')
32 | def setUp(self):
33 | self.data = np.zeros((4, 4))
34 | self.event = MagicMock()
35 |
36 | def test_init(self):
37 | """ Create Pointbrowser """
38 | pb = PointBrowser(self.data, force_interactive=False)
39 | self.assertIsInstance(pb.fig, plt.Figure)
40 | self.assertTrue(np.alltrue(pb.data == self.data))
41 | self.assertTrue(np.alltrue(pb.ax.get_images()[0].get_array() == self.data))
42 | self.assertEqual(pb.fmt, 'x-k')
43 | self.assertEqual(pb.points, [])
44 | self.assertEqual(pb.coordinates, [[]])
45 |
46 | def test_onclick(self):
47 | """ Mimic click """
48 | self.event = MagicMock()
49 | self.event.xdata = 10
50 | self.event.ydata = 10
51 | self.event.key = None
52 | pb = PointBrowser(self.data, force_interactive=False)
53 |
54 | pb.onclick(self.event)
55 | self.assertIsInstance(pb.points[0][0], matplotlib.lines.Line2D)
56 | self.assertEqual(pb.coordinates, [[(self.event.xdata, self.event.ydata)]])
57 |
58 | def test_onclick_none(self):
59 | """ Mimic click outside figure """
60 | self.event.xdata = None
61 | self.event.ydata = None
62 | self.event.key = None
63 | pb = PointBrowser(self.data, force_interactive=False)
64 |
65 | pb.onclick(self.event)
66 | self.assertEqual(pb.points, [])
67 | self.assertEqual(pb.coordinates, [[]])
68 |
69 | def test_onclick_key_z(self):
70 | """ Mimic click with 'z' pressed """
71 | self.event.xdata = 10
72 | self.event.ydata = 10
73 | self.event.key = 'z'
74 | pb = PointBrowser(self.data, force_interactive=False)
75 |
76 | pb.onclick(self.event)
77 | self.assertEqual(pb.points, [])
78 | self.assertEqual(pb.coordinates, [[]])
79 |
80 | def test_onclick_key(self):
81 | """ Mimic click with 'anykey' pressed """
82 | self.event = MagicMock()
83 | self.event.xdata = 10
84 | self.event.ydata = 10
85 | self.event.key = 'newkey'
86 | pb = PointBrowser(self.data, force_interactive=False)
87 |
88 | pb.onclick(self.event)
89 | self.assertIsInstance(pb.points[0][0], matplotlib.lines.Line2D)
90 | self.assertEqual(pb.coordinates, [[],[(self.event.xdata, self.event.ydata)]])
91 |
92 | def test_convert_coordinates(self):
93 | """ Mimic click with 'anykey' pressed """
94 | pb = PointBrowser(self.data, force_interactive=False)
95 | pb.coordinates = [[[1,2,3],[4,5,6]]]
96 | new_coordinates = pb._convert_coordinates()
97 | self.assertTrue(np.all(new_coordinates[0] == np.array([[1,2,3], [4,5,6]]).T))
98 |
99 | @patch('nansat.pointbrowser.plt')
100 | def test_get_points(self, plt_mock):
101 | plt_mock.show.return_value = None
102 | pb = PointBrowser(self.data, force_interactive=False)
103 | points = pb.get_points()
104 | self.assertTrue(pb.fig.canvas.mpl_connect.called)
105 | self.assertTrue(plt_mock.show.called)
106 | self.assertEqual(points, [])
107 |
108 |
109 | if __name__ == "__main__":
110 | unittest.main()
111 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_hirlam.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_hirlam.py
2 | # Purpose: Nansat mapping for Hirlam model data
3 | # (GRIB files from www.yr.no)
4 | # Authors: Knut-Frode Dagestad, Morten W. Hansen
5 | # Licence: This file is part of NANSAT. You can redistribute it or modify
6 | # under the terms of GNU General Public License, v.3
7 | # http://www.gnu.org/licenses/gpl-3.0.html
8 | import datetime
9 | import json
10 |
11 | import numpy
12 |
13 | from nansat.vrt import VRT
14 | from nansat.exceptions import WrongMapperError
15 |
16 | import pythesint as pti
17 |
18 |
19 | class Mapper(VRT):
20 | ''' VRT with mapping of WKV for HIRLAM '''
21 |
22 | def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs):
23 |
24 | try:
25 | geo_transform = gdalDataset.GetGeoTransform()[0:5]
26 | except AttributeError:
27 | raise WrongMapperError
28 | if geo_transform != (-12.1, 0.2, 0.0, 81.95, 0.0):
29 | raise WrongMapperError
30 |
31 | metaDict = [{'src': {'SourceFilename': filename,
32 | 'SourceBand': 2,
33 | 'NODATA': 9999},
34 | 'dst': {'wkv': 'eastward_wind',
35 | 'height': '10 m'}
36 | },
37 | {'src': {'SourceFilename': filename,
38 | 'SourceBand': 3,
39 | 'NODATA': 9999},
40 | 'dst': {'wkv': 'northward_wind',
41 | 'height': '10 m'}
42 | },
43 | {'src': [{'SourceFilename': filename,
44 | 'SourceBand': 2,
45 | 'DataType': gdalDataset.GetRasterBand(2).DataType
46 | },
47 | {'SourceFilename': filename,
48 | 'SourceBand': 3,
49 | 'DataType': gdalDataset.GetRasterBand(3).DataType
50 | }],
51 | 'dst': {'wkv': 'wind_speed',
52 | 'name': 'windspeed',
53 | 'height': '10 m',
54 | 'PixelFunctionType': 'UVToMagnitude',
55 | 'NODATA': 9999}
56 | },
57 | {'src': [{'SourceFilename': filename,
58 | 'SourceBand': 2,
59 | 'DataType': gdalDataset.GetRasterBand(2).DataType
60 | },
61 | {'SourceFilename': filename,
62 | 'SourceBand': 3,
63 | 'DataType': gdalDataset.GetRasterBand(3).DataType
64 | }],
65 | 'dst': {'wkv': 'wind_from_direction',
66 | 'name': 'winddirection',
67 | 'height': '10 m',
68 | 'PixelFunctionType': 'UVToDirectionFrom',
69 | 'NODATA': 9999
70 | }
71 | }]
72 |
73 | # create empty VRT dataset with geolocation only
74 | self._init_from_gdal_dataset(gdalDataset, metadata=gdalMetadata)
75 |
76 | # Create bands
77 | self.create_bands(metaDict)
78 |
79 | # set source, start_date, stop_date
80 | self.dataset.SetMetadataItem('source', 'HIRLAM')
81 |
82 | # Adding valid time from the GRIB file to dataset
83 | start_date = gdalDataset.GetRasterBand(1).GetMetadata()['GRIB_VALID_TIME']
84 | self.dataset.SetMetadataItem('time_coverage_start',
85 | datetime.datetime.utcfromtimestamp(
86 | int(start_date.strip().split(' ')[0])).isoformat() + '+00:00')
87 |
88 | stop_date = gdalDataset.GetRasterBand(gdalDataset.RasterCount).GetMetadata()['GRIB_VALID_TIME']
89 | self.dataset.SetMetadataItem('time_coverage_end',
90 | datetime.datetime.utcfromtimestamp(
91 | int(stop_date.strip().split(' ')[0])).isoformat() + '+00:00')
92 |
93 | mm = pti.get_gcmd_instrument('computer')
94 | self.dataset.SetMetadataItem('instrument', json.dumps(mm))
95 | ee = pti.get_gcmd_platform('merged analysis')
96 | self.dataset.SetMetadataItem('platform', json.dump(ee))
97 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_hirlam_wind_netcdf.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_hirlam2nc.py
2 | # Purpose: Mapper for Hirlam wind data converted from felt to netCDF
3 | # Authors: Knut-Frode Dagestad
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import datetime
8 |
9 | from nansat.utils import gdal, ogr
10 | from nansat.exceptions import WrongMapperError
11 | from nansat.vrt import VRT
12 |
13 |
14 | class Mapper(VRT):
15 | def __init__(self, filename, gdalDataset, gdalMetadata, logLevel=30,
16 | **kwargs):
17 |
18 | if not gdalMetadata:
19 | raise WrongMapperError
20 |
21 | isHirlam = False
22 | for key in gdalMetadata.keys():
23 | if 'creation by fimex from file' in gdalMetadata[key]:
24 | isHirlam = True
25 |
26 | if not isHirlam:
27 | raise WrongMapperError
28 |
29 | #GeolocMetaDict = [{'src':
30 | # {'SourceFilename': 'NETCDF:"' + filename + '":longitude',
31 | # 'SourceBand': 1,
32 | # 'ScaleRatio': 1,
33 | # 'ScaleOffset': 0},
34 | # 'dst': {}},
35 | # {'src':
36 | # {'SourceFilename': 'NETCDF:"' + filename + '":latitude',
37 | # 'SourceBand': 1,
38 | # 'ScaleRatio': 1,
39 | # 'ScaleOffset': 0},
40 | # 'dst': {}}]
41 |
42 | subDataset = gdal.Open('NETCDF:"' + filename + '":x_wind_10m')
43 | #self.GeolocVRT = VRT(srcRasterXSize=subDataset.RasterXSize,
44 | # srcRasterYSize=subDataset.RasterYSize)
45 | #self.GeolocVRT.create_bands(GeolocMetaDict)
46 |
47 | #GeolocObject = GeolocationArray(xVRT=self.GeolocVRT,
48 | # yVRT=self.GeolocVRT,
49 | # xBand=1, yBand=2,
50 | # lineOffset=0, pixelOffset=0,
51 | # lineStep=1, pixelStep=1)
52 |
53 | ## create empty VRT dataset with geolocation only
54 | #VRT.__init__(self, srcRasterXSize = subDataset.RasterXSize,
55 | # srcRasterYSize = subDataset.RasterYSize,
56 | # geolocationArray = GeolocObject,
57 | # srcProjection = GeolocObject.d['SRS'])
58 | lon = gdal.Open(
59 | 'NETCDF:"' + filename + '":longitude"').ReadAsArray()
60 | lat = gdal.Open(
61 | 'NETCDF:"' + filename + '":latitude"').ReadAsArray()
62 | self._init_from_lonlat(lon, lat)
63 |
64 | # Add bands with wind components
65 | metaDict = [{'src': {'SourceFilename': ('NETCDF:"' + filename +
66 | '":x_wind_10m'),
67 | 'NODATA': -32767},
68 | 'dst': {'name': 'U',
69 | 'wkv': 'eastward_wind'}},
70 | {'src': {'SourceFilename': ('NETCDF:"' + filename +
71 | '":y_wind_10m'),
72 | 'NODATA': -32767},
73 | 'dst': {'name': 'V',
74 | 'wkv': 'northward_wind'}}]
75 |
76 | # Add pixel function with wind speed
77 | metaDict.append({'src': [{'SourceFilename': ('NETCDF:"' + filename +
78 | '":x_wind_10m'),
79 | 'SourceBand': 1,
80 | 'DataType': 6},
81 | {'SourceFilename': ('NETCDF:"' + filename +
82 | '":y_wind_10m'),
83 | 'SourceBand': 1,
84 | 'DataType': 6}],
85 | 'dst': {'wkv': 'wind_speed',
86 | 'name': 'windspeed',
87 | 'height': '10 m',
88 | 'PixelFunctionType': 'UVToMagnitude',
89 | 'NODATA': 9999}})
90 |
91 | # add bands with metadata and corresponding values
92 | # to the empty VRT
93 | self.create_bands(metaDict)
94 |
95 | # Add valid time
96 | validTime = datetime.datetime.utcfromtimestamp(
97 | int(subDataset.GetRasterBand(1).
98 | GetMetadata()['NETCDF_DIM_time']))
99 | self.dataset.SetMetadataItem('time_coverage_start', validTime.isoformat())
100 |
--------------------------------------------------------------------------------
/nansat/mappers/scatterometers.py:
--------------------------------------------------------------------------------
1 | import json
2 | import numpy as np
3 | from datetime import datetime
4 | import pythesint as pti
5 |
6 | from nansat.utils import gdal
7 |
8 | from nansat.vrt import VRT
9 | from nansat.geolocation import Geolocation
10 | from nansat.nsr import NSR
11 | from nansat.domain import Domain
12 | from nansat.mappers.mapper_netcdf_cf import Mapper as NetcdfCF
13 | from nansat.exceptions import WrongMapperError
14 |
15 | class Mapper(NetcdfCF):
16 | """ Nansat mapper for scatterometers """
17 |
18 | def __init__(self, filename, gdal_dataset, metadata, quartile=0, *args, **kwargs):
19 |
20 | super(Mapper, self).__init__(filename, gdal_dataset, metadata, *args, **kwargs)
21 |
22 | intervals = [0,1,2,3]
23 | if not quartile in intervals:
24 | raise ValueError('quartile must be one of [0,1,2,3]')
25 |
26 | y_size = self.dataset.RasterYSize/4
27 | y_offset = [y_size*qq for qq in intervals][quartile]
28 |
29 | # Crop
30 | self.set_offset_size('y', y_offset, y_size)
31 |
32 | # Add quartile to metadata
33 | self.dataset.SetMetadataItem('quartile', str(quartile))
34 |
35 | # Create band of times
36 | # TODO: resolve nansat issue #263 (https://github.com/nansencenter/nansat/issues/263)
37 | #import ipdb; ipdb.set_trace()
38 | tt = self.times()[int(y_offset) : int(y_offset + y_size)]
39 | self.dataset.SetMetadataItem('time_coverage_start', tt[0].astype(datetime).isoformat())
40 | self.dataset.SetMetadataItem('time_coverage_end', tt[-1].astype(datetime).isoformat())
41 | time_stamps = (tt - tt[0]) / np.timedelta64(1, 's')
42 | self.band_vrts['time'] = VRT.from_array(
43 | np.tile(time_stamps, (self.dataset.RasterXSize, 1)).transpose()
44 | )
45 | self.create_band(
46 | src = {
47 | 'SourceFilename': self.band_vrts['time'].filename,
48 | 'SourceBand': 1,
49 | },
50 | dst = {
51 | 'name': 'timestamp',
52 | 'time_coverage_start': tt[0].astype(datetime).isoformat(),
53 | 'units': 'seconds since time_coverage_start',
54 | }
55 | )
56 |
57 | # Set projection to wkt
58 | #self.dataset.SetProjection(NSR().wkt)
59 |
60 | def set_gcps(self, lon, lat, gdal_dataset):
61 | """ Set gcps """
62 | self.band_vrts['new_lon_VRT'] = VRT.from_array(lon)
63 | self.dataset.SetGCPs(VRT._lonlat2gcps(lon, lat, n_gcps=400), NSR().wkt)
64 |
65 | # Add geolocation from correct longitudes and latitudes
66 | self._add_geolocation(
67 | Geolocation(self.band_vrts['new_lon_VRT'], self, x_band=1, y_band=self._latitude_band_number(gdal_dataset))
68 | )
69 |
70 | def _geoloc_band_number(self, gdal_dataset, sub_filename_index, long_name):
71 | """ Return the band number associated to a specific geolocation sub-file and long_name
72 | """
73 | band_index = [ii for ii, ll in enumerate(self._get_sub_filenames(gdal_dataset)) if
74 | ':'+sub_filename_index in ll]
75 | if band_index:
76 | band_num = band_index[0] + 1
77 | else:
78 | raise WrongMapperError
79 | # Check that it is actually latitudes
80 | band = gdal.Open(
81 | self._get_sub_filenames(gdal_dataset)[band_index[0]]
82 | )
83 | if not band.GetRasterBand(1).GetMetadata()['long_name'] == long_name:
84 | raise ValueError('Cannot find %s band'%long_name)
85 | return band_num
86 |
87 | def _latitude_band_number(self, gdal_dataset):
88 | return self._geoloc_band_number(gdal_dataset, 'lat', 'latitude')
89 |
90 | def _longitude_band_number(self, gdal_dataset):
91 | return self._geoloc_band_number(gdal_dataset, 'lon', 'longitude')
92 |
93 | @staticmethod
94 | def shift_longitudes(lon):
95 | """ Apply correction of longitudes (they are defined on 0:360 degrees but also contain
96 | egative values)
97 |
98 | TODO: consider making this core to nansat - different ways of defining longitudes (-180:180
99 | og 0:360 degrees) often cause problems...
100 | """
101 | return np.mod(lon+180., 360.) - 180.
102 |
103 | def _create_empty(self, gdal_dataset, gdal_metadata):
104 | lat = gdal.Open(
105 | self._get_sub_filenames(gdal_dataset)[self._latitude_band_number(gdal_dataset)]
106 | )
107 | super(NetcdfCF, self).__init__(lat.RasterXSize, lat.RasterYSize, metadata=gdal_metadata)
108 |
--------------------------------------------------------------------------------
/nansat_integration_tests/test_radarsat2.py:
--------------------------------------------------------------------------------
1 | # ------------------------------------------------------------------------------
2 | # Name: test_radarsat2.py
3 | # Purpose: End to end testing of Nansat for Radarsat-2
4 | #
5 | # Author: Morten Wergeland Hansen
6 | # Modified: Morten Wergeland Hansen, Aleksander Vines
7 | #
8 | # Created: 17.04.2015
9 | # Last modified:23.12.2015 13:21
10 | # Copyright: (c) NERSC
11 | # License:
12 | # ------------------------------------------------------------------------------
13 | import os
14 | import sys
15 | import numpy as np
16 |
17 | from nansat.nansat import Nansat
18 | from nansat_integration_tests.mapper_test_archive import DataForTestingMappers
19 |
20 |
21 | class TestRadarsat(object):
22 |
23 | def test_all_rs2_files(self):
24 | sys.stderr.write('\ntest_all_rs2_files\n')
25 | testData = DataForTestingMappers()
26 | rs2Index = [i for i,
27 | v in enumerate(testData.mapperData) if v['mapperName'] == 'radarsat2']
28 | for index in rs2Index:
29 | rsfile = testData.mapperData[index][0]
30 | # yield self.export2thredds, rsfile
31 | yield self.export, rsfile
32 | yield self.incidence_angle, rsfile
33 | yield self.export_band, rsfile
34 | yield self.resize, rsfile
35 |
36 | def export2thredds(self, rsfile):
37 | sys.stderr.write('\nexport2thredds:'+rsfile)
38 | ncfile = 'test.nc'
39 | orig = Nansat(rsfile)
40 | orig.export2thredds(ncfile, bands={'incidence_angle': {}})
41 | copy = Nansat(ncfile)
42 | inc0 = orig['incidence_angle']
43 | inc1 = copy['incidence_angle']
44 | np.testing.assert_allclose(inc0, inc1)
45 | os.unlink(ncfile)
46 |
47 | def export(self, rsfile):
48 | sys.stderr.write('\nexport:'+rsfile+'\n')
49 | ncfile = 'test.nc'
50 | orig = Nansat(rsfile)
51 | sys.stderr.write('\nExporting\n')
52 | orig.export(ncfile)
53 | sys.stderr.write('\nOpening Copy\n')
54 | copy = Nansat(ncfile)
55 | inc0 = orig['incidence_angle']
56 | inc1 = copy['incidence_angle']
57 | sys.stderr.write('\nGet orig grids\n')
58 | lon0, lat0 = orig.get_geolocation_grids()
59 | sys.stderr.write('\nGet copy grids\n')
60 | lon1, lat1 = copy.get_geolocation_grids()
61 | sys.stderr.write('\nGet orig sigma0_HH\n')
62 | sigma0_0 = orig['sigma0_HH']
63 | sys.stderr.write('\nGet copy sigma0_HH\n')
64 | sigma0_1 = copy['sigma0_HH']
65 | sys.stderr.write('\nAsserting\n')
66 | np.testing.assert_allclose(lon0, lon1)
67 | np.testing.assert_allclose(lat0, lat1)
68 | # If the next tests fail, it could indicate that the data is flipped
69 | # check by pyplot.imshow orig vs copy...
70 | np.testing.assert_allclose(inc0, inc1)
71 | np.testing.assert_allclose(sigma0_0, sigma0_1)
72 | os.unlink(ncfile)
73 |
74 | def incidence_angle(self, rsfile):
75 | sys.stderr.write('\nincidence_angle:'+rsfile+'\n')
76 | n = Nansat(rsfile)
77 | inc_min = float(n.get_metadata()['NEAR_RANGE_INCIDENCE_ANGLE'])-0.5
78 | inc_max = float(n.get_metadata()['FAR_RANGE_INCIDENCE_ANGLE'])+0.5
79 | inc = n['incidence_angle']
80 | assert np.all(np.greater_equal(inc[np.isnan(inc) == False], inc_min))
81 | assert np.all(np.less_equal(inc[np.isnan(inc) == False], inc_max))
82 |
83 | def export_band(self, rsfile):
84 | sys.stderr.write('\nexport_band:'+rsfile+'\n')
85 | orig = Nansat(rsfile)
86 | ncfile = 'test.nc'
87 | orig.export(ncfile, bands=[orig.get_band_number('incidence_angle')])
88 | copy = Nansat(ncfile)
89 | inc0 = orig['incidence_angle']
90 | inc1 = copy['incidence_angle']
91 | np.testing.assert_allclose(inc0, inc1)
92 | os.unlink(ncfile)
93 |
94 | def resize(self, rsfile):
95 | sys.stderr.write('\nresize:'+rsfile+'\n')
96 | n = Nansat(rsfile)
97 | inc_max = float(n.get_metadata()['FAR_RANGE_INCIDENCE_ANGLE'])+0.5
98 | n.resize(0.5, eResampleAlg=0)
99 | assert (np.nanmax(n['incidence_angle']) <= inc_max)
100 | n.undo()
101 | n.resize(0.5, eResampleAlg=1)
102 | assert (np.nanmax(n['incidence_angle']) <= inc_max)
103 | n.undo()
104 | n.resize(0.5, eResampleAlg=2)
105 | assert (np.nanmax(n['incidence_angle']) <= inc_max)
106 | n.undo()
107 | n.resize(0.5, eResampleAlg=3)
108 | assert (np.nanmax(n['incidence_angle']) <= inc_max)
109 | n.undo()
110 | n.resize(0.5, eResampleAlg=4)
111 | assert (np.nanmax(n['incidence_angle']) <= inc_max)
112 | n.undo()
113 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: "Unit tests and build"
3 | on:
4 | push:
5 | branches: ['**']
6 | release:
7 | types: [prereleased, released]
8 | env:
9 | IMAGE_NAME: "${{ vars.DOCKER_ORG }}/nansat"
10 | BASE_IMAGE_NAME: "${{ vars.DOCKER_ORG }}/nansat_base"
11 | BASE_IMAGE_TAG: '3.0.0'
12 | jobs:
13 | tests_and_docker_build:
14 | name: 'Run unit tests and build docker image'
15 | runs-on: 'ubuntu-20.04'
16 | env:
17 | latest: ${{ matrix.python_version == '3.11' && 'true' || '' }}
18 | strategy:
19 | matrix:
20 | python_version:
21 | - '3.7'
22 | - '3.8'
23 | - '3.9'
24 | - '3.10'
25 | - '3.11'
26 | steps:
27 | - name: 'Checkout repository'
28 | uses: actions/checkout@v4
29 | with:
30 | fetch-depth: 0
31 |
32 | - name: Set up Docker Buildx
33 | uses: docker/setup-buildx-action@v3
34 |
35 | - name: Cache Docker layers
36 | uses: actions/cache@v4
37 | with:
38 | path: /tmp/.buildx-cache
39 | key: ${{ runner.os }}-buildx-python${{ matrix.python_version }}-${{ github.sha }}
40 | restore-keys: |
41 | ${{ runner.os }}-buildx-python${{ matrix.python_version }}-
42 |
43 | - name: Login to DockerHub
44 | uses: docker/login-action@v3
45 | with:
46 | username: ${{ vars.DOCKER_USER }}
47 | password: ${{ secrets.DOCKER_PASS }}
48 |
49 | - name: 'Run tests'
50 | env:
51 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
52 | run: >
53 | docker run --rm
54 | -v "$(pwd):/src"
55 | -e "GITHUB_ACTIONS=$GITHUB_ACTIONS"
56 | -e "GITHUB_REF=$GITHUB_REF"
57 | -e "GITHUB_SHA=$GITHUB_SHA"
58 | -e "GITHUB_HEAD_REF=$GITHUB_HEAD_REF"
59 | -e "GITHUB_REPOSITORY=$GITHUB_REPOSITORY"
60 | -e "GITHUB_RUN_ID=$GITHUB_RUN_ID"
61 | -e "GITHUB_TOKEN=$GITHUB_TOKEN"
62 | "${BASE_IMAGE_NAME}:${BASE_IMAGE_TAG}-python${{ matrix.python_version }}"
63 | bash -c "
64 | pip install -e /src &&
65 | coverage run --omit=nansat/mappers/*,nansat/tests/*,nansat/nansatmap.py --source=nansat -m unittest discover nansat.tests"
66 |
67 | - name: 'Install Python 3.11'
68 | if: ${{ env.latest }}
69 | uses: actions/setup-python@v5
70 | with:
71 | python-version: '3.11'
72 |
73 | - name: 'Upload coverage to coveralls.io'
74 | if: ${{ env.latest }}
75 | env:
76 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
77 | run: pip install coveralls && coveralls --service=github
78 |
79 | - name: Build docker image
80 | uses: docker/build-push-action@v5
81 | with:
82 | context: .
83 | build-args: |
84 | BASE_IMAGE=${{ env.BASE_IMAGE_NAME }}:${{ env.BASE_IMAGE_TAG }}-python${{ matrix.python_version }}
85 | push: ${{ github.ref_type == 'tag' }}
86 | tags: |
87 | ${{ env.IMAGE_NAME }}:${{ github.ref_name }}-python${{ matrix.python_version }}
88 | ${{ env.latest && format('{0}:{1}', env.IMAGE_NAME, github.ref_name) || '' }}
89 | ${{ env.IMAGE_NAME }}:latest-python${{ matrix.python_version }}
90 | ${{ env.latest && format('{0}:latest', env.IMAGE_NAME) || '' }}
91 | cache-from: type=local,src=/tmp/.buildx-cache
92 | cache-to: type=local,dest=/tmp/.buildx-cache-new
93 |
94 | # Temp fix
95 | # https://github.com/docker/build-push-action/issues/252
96 | # https://github.com/moby/buildkit/issues/1896
97 | - name: Move cache
98 | run: |
99 | rm -rf /tmp/.buildx-cache
100 | mv /tmp/.buildx-cache-new /tmp/.buildx-cache
101 |
102 | publish_python_package:
103 | name: Publish the Python package
104 | runs-on: 'ubuntu-20.04'
105 | needs: 'tests_and_docker_build'
106 | if: github.event_name == 'release'
107 | steps:
108 | - name: 'Checkout repository'
109 | uses: actions/checkout@v4
110 | with:
111 | fetch-depth: 0
112 |
113 | - name: Build package
114 | run: docker run --rm -v "$(pwd):/src" "$BASE_IMAGE_NAME" bash -c "pip install build && python -m build -s"
115 | shell: bash
116 |
117 | - name: 'Deploy package to the Github release'
118 | env:
119 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
120 | GITHUB_REPOSITORY: ${{ github.repository }}
121 | uses: svenstaro/upload-release-action@v2
122 | with:
123 | repo_token: ${{ secrets.GITHUB_TOKEN }}
124 | file: 'dist/*'
125 | file_glob: true
126 | tag: ${{ github.ref }}
127 |
128 | - name: Publish to PyPI
129 | uses: pypa/gh-action-pypi-publish@release/v1
130 | with:
131 | repository-url: ${{ vars.PYPI_REPOSITORY_URL }}
132 | password: ${{ secrets.PYPI_TOKEN }}
133 | ...
--------------------------------------------------------------------------------
/nansat/fonts/LICENSE:
--------------------------------------------------------------------------------
1 | Fonts are (c) Bitstream (see below). DejaVu changes are in public domain.
2 | Glyphs imported from Arev fonts are (c) Tavmjong Bah (see below)
3 |
4 | Bitstream Vera Fonts Copyright
5 | ------------------------------
6 |
7 | Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is
8 | a trademark of Bitstream, Inc.
9 |
10 | Permission is hereby granted, free of charge, to any person obtaining a copy
11 | of the fonts accompanying this license ("Fonts") and associated
12 | documentation files (the "Font Software"), to reproduce and distribute the
13 | Font Software, including without limitation the rights to use, copy, merge,
14 | publish, distribute, and/or sell copies of the Font Software, and to permit
15 | persons to whom the Font Software is furnished to do so, subject to the
16 | following conditions:
17 |
18 | The above copyright and trademark notices and this permission notice shall
19 | be included in all copies of one or more of the Font Software typefaces.
20 |
21 | The Font Software may be modified, altered, or added to, and in particular
22 | the designs of glyphs or characters in the Fonts may be modified and
23 | additional glyphs or characters may be added to the Fonts, only if the fonts
24 | are renamed to names not containing either the words "Bitstream" or the word
25 | "Vera".
26 |
27 | This License becomes null and void to the extent applicable to Fonts or Font
28 | Software that has been modified and is distributed under the "Bitstream
29 | Vera" names.
30 |
31 | The Font Software may be sold as part of a larger software package but no
32 | copy of one or more of the Font Software typefaces may be sold by itself.
33 |
34 | THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
35 | OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY,
36 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT,
37 | TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME
38 | FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING
39 | ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
40 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
41 | THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE
42 | FONT SOFTWARE.
43 |
44 | Except as contained in this notice, the names of Gnome, the Gnome
45 | Foundation, and Bitstream Inc., shall not be used in advertising or
46 | otherwise to promote the sale, use or other dealings in this Font Software
47 | without prior written authorization from the Gnome Foundation or Bitstream
48 | Inc., respectively. For further information, contact: fonts at gnome dot
49 | org.
50 |
51 | Arev Fonts Copyright
52 | ------------------------------
53 |
54 | Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved.
55 |
56 | Permission is hereby granted, free of charge, to any person obtaining
57 | a copy of the fonts accompanying this license ("Fonts") and
58 | associated documentation files (the "Font Software"), to reproduce
59 | and distribute the modifications to the Bitstream Vera Font Software,
60 | including without limitation the rights to use, copy, merge, publish,
61 | distribute, and/or sell copies of the Font Software, and to permit
62 | persons to whom the Font Software is furnished to do so, subject to
63 | the following conditions:
64 |
65 | The above copyright and trademark notices and this permission notice
66 | shall be included in all copies of one or more of the Font Software
67 | typefaces.
68 |
69 | The Font Software may be modified, altered, or added to, and in
70 | particular the designs of glyphs or characters in the Fonts may be
71 | modified and additional glyphs or characters may be added to the
72 | Fonts, only if the fonts are renamed to names not containing either
73 | the words "Tavmjong Bah" or the word "Arev".
74 |
75 | This License becomes null and void to the extent applicable to Fonts
76 | or Font Software that has been modified and is distributed under the
77 | "Tavmjong Bah Arev" names.
78 |
79 | The Font Software may be sold as part of a larger software package but
80 | no copy of one or more of the Font Software typefaces may be sold by
81 | itself.
82 |
83 | THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
84 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
85 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
86 | OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL
87 | TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
88 | INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
89 | DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
90 | FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
91 | OTHER DEALINGS IN THE FONT SOFTWARE.
92 |
93 | Except as contained in this notice, the name of Tavmjong Bah shall not
94 | be used in advertising or otherwise to promote the sale, use or other
95 | dealings in this Font Software without prior written authorization
96 | from Tavmjong Bah. For further information, contact: tavmjong @ free
97 | . fr.
98 |
99 | $Id: LICENSE 2133 2007-11-28 02:46:28Z lechimp $
100 |
--------------------------------------------------------------------------------
/nansat_integration_tests/test_mappers.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, unicode_literals, division
2 | import unittest
3 | import sys
4 | import datetime
5 | import json
6 |
7 | import pythesint as pti
8 |
9 | from nansat import Nansat
10 | from nansat.nansat import _import_mappers
11 | from nansat_integration_tests.mapper_test_archive import DataForTestingMappers, DataForTestingOnlineMappers
12 |
13 |
14 | class TestDataForTestingMappers(unittest.TestCase):
15 | def test_create_test_data(self):
16 | ''' should create TestData instance '''
17 | t = DataForTestingMappers()
18 | self.assertTrue(hasattr(t, 'mapperData'))
19 |
20 |
21 | # https://nose.readthedocs.org/en/latest/writing_tests.html#test-generators
22 | # The x-flag results in the test stopping at first failure or error - use it
23 | # for easier debugging:
24 | # nosetests -v -x integration_tests.test_mappers:TestAllMappers.test_mappers_basic
25 | class TestAllMappers(object):
26 |
27 | @classmethod
28 | def setup_class(cls):
29 | ''' Download testing data '''
30 | cls.testData = DataForTestingMappers()
31 |
32 | def test_mappers_basic(self):
33 | ''' Basic mapper test '''
34 | for kwargs in self.testData.mapperData:
35 | fileName = kwargs.pop('fileName')
36 | mapperName = kwargs.pop('mapperName')
37 | # Test call to Nansat, mapper not specified
38 | sys.stderr.write('\n -> '+fileName+'\n')
39 | yield self.open_with_nansat, fileName, None, kwargs
40 | # Test call to Nansat, mapper specified
41 | sys.stderr.write('\n'+mapperName+' -> '+fileName+'\n')
42 | yield self.open_with_nansat, fileName, mapperName, kwargs
43 |
44 | def open_with_nansat(self, filePath, mapper=None, kwargs=None):
45 | ''' Ensures that you can open the filePath as a Nansat object '''
46 | if kwargs is None:
47 | kwargs = {}
48 |
49 | try:
50 | if mapper:
51 | n = Nansat(filePath, mapperName=mapper, **kwargs)
52 | else:
53 | n = Nansat(filePath, **kwargs)
54 | except Exception as e:
55 | raise Exception('%s: %s'%(filePath, e.message))
56 | assert type(n) == Nansat
57 |
58 | def test_mappers_advanced(self):
59 | ''' Run tests to check DIF/GCMD metadata content in all mappers'''
60 | for dd in self.testData.mapperData:
61 | sys.stderr.write('\nMapper '+dd['mapperName']+' -> '+dd['fileName']+'\n')
62 | n = Nansat(dd['fileName'], mapperName=dd['mapperName'])
63 | yield self.is_correct_mapper, n, dd['mapperName']
64 | yield self.has_metadata_time_coverage_start, n
65 | yield self.has_metadata_time_coverage_end, n
66 | yield self.has_metadata_platform, n
67 | yield self.has_metadata_instrument, n
68 |
69 | # Test that SAR objects have sigma0 intensity bands in addition
70 | # to complex bands
71 | if n.has_band(
72 | 'surface_backwards_scattering_coefficient_of_radar_wave'
73 | ):
74 | yield self.exist_intensity_band, n
75 |
76 | def has_metadata_time_coverage_start(self, n):
77 | ''' Has start time '''
78 | assert type(n.time_coverage_start) == datetime.datetime
79 |
80 | def has_metadata_time_coverage_end(self, n):
81 | assert type(n.time_coverage_end) == datetime.datetime
82 |
83 | def has_metadata_platform(self, n):
84 | meta1 = json.loads(n.get_metadata('platform'))
85 | meta1ShortName = meta1['Short_Name']
86 | meta2 = pti.get_gcmd_platform(meta1ShortName)
87 |
88 | assert type(meta1) == dict
89 | assert meta1 == meta2
90 |
91 | def has_metadata_instrument(self, n):
92 | meta1 = json.loads(n.get_metadata('instrument'))
93 | meta1ShortName = meta1['Short_Name']
94 | meta2 = pti.get_gcmd_instrument(meta1ShortName)
95 |
96 | assert type(meta1) == dict
97 | assert meta1 == meta2
98 |
99 | def is_correct_mapper(self, n, mapper):
100 | assert n.mapper == mapper
101 |
102 | def exist_intensity_band(self, n):
103 | ''' Test if intensity bands exist for complex data '''
104 | allBandNames = []
105 | complexBandNames = []
106 | for iBand in range(n.vrt.dataset.RasterCount):
107 | iBandName = n.get_metadata(band=iBand + 1)['name']
108 | allBandNames.append(iBandName)
109 | if '_complex' in iBandName:
110 | complexBandNames.append(iBandName)
111 |
112 | for iComplexName in complexBandNames:
113 | assert iComplexName.replace('_complex', '') in allBandNames
114 |
115 | class TestOnlineMappers(TestAllMappers):
116 | @classmethod
117 | def setup_class(cls):
118 | ''' Download testing data '''
119 | cls.testData = DataForTestingOnlineMappers()
120 |
121 |
122 | if __name__ == '__main__':
123 | # nansatMappers = _import_mappers()
124 | # for mapper in nansatMappers:
125 | # test_name = 'test_%s'%mapper
126 | unittest.main()
127 |
128 |
--------------------------------------------------------------------------------
/nansat/geolocation.py:
--------------------------------------------------------------------------------
1 | # Name: geolocation.py
2 | # Purpose: Container of Geolocation class
3 | # Authors: Anton Korosov
4 | # Created: 14.01.2018
5 | # Copyright: (c) NERSC 2011 - 2018
6 | # Licence:
7 | # This file is part of NANSAT.
8 | # NANSAT is free software: you can redistribute it and/or modify
9 | # it under the terms of the GNU General Public License as published by
10 | # the Free Software Foundation, version 3 of the License.
11 | # http://www.gnu.org/licenses/gpl-3.0.html
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
15 | from __future__ import absolute_import
16 |
17 | from nansat.utils import gdal, osr
18 |
19 | from nansat.nsr import NSR
20 |
21 |
22 | class Geolocation(object):
23 | """Container for GEOLOCATION data
24 |
25 | Keeps references to bands with X and Y coordinates, offset and step
26 | of pixel and line. All information is stored in dictionary self.data
27 |
28 | Instance of Geolocation is used in VRT and ususaly created in
29 | a Mapper.
30 | """
31 | # instance attributes
32 | data = None
33 | x_vrt = None
34 | y_vrt = None
35 |
36 | def __init__(self, x_vrt, y_vrt, **kwargs):
37 | """Create Geolocation object from input VRT objects
38 |
39 | Parameters
40 | -----------
41 | x_vrt_name : VRT
42 | dataset with X-coordinates
43 | y_vrt_name : VRT
44 | dataset with Y-coordinates
45 | **kwargs : dict
46 | parameters for self._init_data()
47 |
48 | Note
49 | ----
50 | Modifies, self.x_vrt, self.y_vrt, self.data
51 |
52 | """
53 | # dictionary with all metadata
54 | self.data = dict()
55 | # VRT objects
56 | self.x_vrt = x_vrt
57 | self.y_vrt = y_vrt
58 |
59 | self._init_data(x_vrt.filename, y_vrt.filename, **kwargs)
60 |
61 | def _init_data(self, x_filename, y_filename, x_band=1, y_band=1, srs=None,
62 | line_offset=0, line_step=1,
63 | pixel_offset=0, pixel_step=1):
64 | """Init data of Geolocation object from input parameters
65 | Parameters
66 | -----------
67 | x_filename : str
68 | name of file for X-dataset
69 | y_filename : str
70 | name of file for Y-dataset
71 | x_band : int
72 | number of the band in the X-dataset
73 | y_band : int
74 | number of the band in the Y-dataset
75 | srs : str
76 | WKT
77 | line_offset : int
78 | offset of first line
79 | line_step : int
80 | step of lines
81 | pixel_offset : int
82 | offset of first pixel
83 | pixel_step : int
84 | step of pixels
85 |
86 | Notes
87 | -----
88 | Saves everything in self.data dict
89 |
90 | """
91 | if srs is None:
92 | srs = NSR().wkt
93 | self.data['SRS'] = srs
94 | self.data['X_DATASET'] = x_filename
95 | self.data['Y_DATASET'] = y_filename
96 | self.data['X_BAND'] = str(x_band)
97 | self.data['Y_BAND'] = str(y_band)
98 | self.data['LINE_OFFSET'] = str(line_offset)
99 | self.data['LINE_STEP'] = str(line_step)
100 | self.data['PIXEL_OFFSET'] = str(pixel_offset)
101 | self.data['PIXEL_STEP'] = str(pixel_step)
102 |
103 | @classmethod
104 | def from_dataset(cls, dataset):
105 | """Create geolocation from GDAL dataset
106 | Parameters
107 | ----------
108 | dataset : gdal.Dataset
109 | input dataset to copy Geolocation metadata from
110 | """
111 | self = cls.__new__(cls) # empty object
112 | self.x_vrt = None
113 | self.y_vrt = None
114 | self.data = dataset.GetMetadata('GEOLOCATION')
115 | return self
116 |
117 | @classmethod
118 | def from_filenames(cls, x_filename, y_filename, **kwargs):
119 | """Create geolocation from names of files with geolocation
120 | Parameters
121 | ----------
122 | x_filename : str
123 | name of file for X-dataset
124 | y_filename : str
125 | name of file for Y-dataset
126 | **kwargs : dict
127 | parameters for self._init_data()
128 | """
129 | self = cls.__new__(cls) # empty object
130 | self.x_vrt = None
131 | self.y_vrt = None
132 | self.data = {}
133 | self._init_data(x_filename, y_filename, **kwargs)
134 | return self
135 |
136 | def get_geolocation_grids(self):
137 | """Read values of geolocation grids"""
138 | lon_dataset = gdal.Open(self.data['X_DATASET'])
139 | lon_grid = lon_dataset.GetRasterBand(int(self.data['X_BAND'])).ReadAsArray()
140 | lat_dataset = gdal.Open(self.data['Y_DATASET'])
141 | lat_grid = lat_dataset.GetRasterBand(int(self.data['Y_BAND'])).ReadAsArray()
142 | return lon_grid, lat_grid
143 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_viirs_l1.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_modisL1
2 | # Purpose: Mapping for MODIS-L1 data
3 | # Authors: Anton Korosov
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | from __future__ import absolute_import, unicode_literals, division
8 |
9 | import os
10 | import glob
11 | from datetime import datetime, timedelta
12 | from math import ceil
13 | try:
14 | from scipy.ndimage.filters import gaussian_filter
15 | except:
16 | IMPORT_SCIPY = False
17 | else:
18 | IMPORT_SCIPY = True
19 |
20 | from nansat.nsr import NSR
21 | from nansat.vrt import VRT
22 | from nansat.utils import gdal, ogr
23 | from nansat.exceptions import WrongMapperError, NansatReadError
24 |
25 |
26 | class Mapper(VRT):
27 | ''' VRT with mapping of WKV for VIIRS Level 1B '''
28 |
29 | def __init__(self, filename, gdalDataset, gdalMetadata,
30 | GCP_COUNT0=5, GCP_COUNT1=20, pixelStep=1,
31 | lineStep=1, **kwargs):
32 | ''' Create VIIRS VRT '''
33 |
34 | if not 'GMTCO_npp_' in filename:
35 | raise WrongMapperError(filename)
36 | ifiledir = os.path.split(filename)[0]
37 | ifiles = glob.glob(ifiledir + 'SVM??_npp_d*_obpg_ops.h5')
38 | ifiles.sort()
39 |
40 | if not IMPORT_SCIPY:
41 | raise NansatReadError('VIIRS data cannot be read because scipy is not installed')
42 |
43 | viirsWavelengths = [None, 412, 445, 488, 555, 672, 746, 865, 1240,
44 | 1378, 1610, 2250, 3700, 4050, 8550, 10736, 12013]
45 |
46 | # create empty VRT dataset with geolocation only
47 | xDatasetSource = ('HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Longitude'
48 | % filename)
49 | xDatasetBand = 1
50 | xDataset = gdal.Open(xDatasetSource)
51 | self._init_from_gdal_dataset(xDataset)
52 |
53 | metaDict = []
54 | for ifile in ifiles:
55 | ifilename = os.path.split(ifile)[1]
56 | print(ifilename)
57 | bNumber = int(ifilename[3:5])
58 | print(bNumber)
59 | bWavelength = viirsWavelengths[bNumber]
60 | print(bWavelength)
61 | SourceFilename = ('HDF5:"%s"://All_Data/VIIRS-M%d-SDR_All/Radiance'
62 | % (ifile, bNumber))
63 | print(SourceFilename)
64 | metaEntry = {'src': {'SourceFilename': SourceFilename,
65 | 'SourceBand': 1},
66 | 'dst': {'wkv': 'toa_outgoing_spectral_radiance',
67 | 'wavelength': str(bWavelength),
68 | 'suffix': str(bWavelength)}
69 | }
70 | metaDict.append(metaEntry)
71 |
72 | # add bands with metadata and corresponding values to the empty VRT
73 | self.create_bands(metaDict)
74 |
75 | xVRTArray = xDataset.ReadAsArray()
76 | xVRTArray = gaussian_filter(xVRTArray, 5).astype('float32')
77 | xVRT = VRT.from_array(xVRTArray)
78 |
79 | yDatasetSource = ('HDF5:"%s"://All_Data/VIIRS-MOD-GEO-TC_All/Latitude'
80 | % filename)
81 | yDatasetBand = 1
82 | yDataset = gdal.Open(yDatasetSource)
83 | yVRTArray = yDataset.ReadAsArray()
84 | yVRTArray = gaussian_filter(yVRTArray, 5).astype('float32')
85 | yVRT = VRT.from_array(yVRTArray)
86 |
87 | # estimate pixel/line step
88 | self.logger.debug('pixel/lineStep %f %f' % (pixelStep, lineStep))
89 |
90 | # ==== ADD GCPs and Pojection ====
91 | # get lat/lon matrices
92 | longitude = xVRT.dataset.GetRasterBand(1).ReadAsArray()
93 | latitude = yVRT.dataset.GetRasterBand(1).ReadAsArray()
94 |
95 | # estimate step of GCPs
96 | step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT0))
97 | step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT1))
98 | self.logger.debug('gcpCount: %d %d %d %d, %d %d',
99 | latitude.shape[0], latitude.shape[1],
100 | GCP_COUNT0, GCP_COUNT1, step0, step1)
101 |
102 | # generate list of GCPs
103 | gcps = []
104 | k = 0
105 | for i0 in range(0, latitude.shape[0], step0):
106 | for i1 in range(0, latitude.shape[1], step1):
107 | # create GCP with X,Y,pixel,line from lat/lon matrices
108 | lon = float(longitude[i0, i1])
109 | lat = float(latitude[i0, i1])
110 | if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90):
111 | gcp = gdal.GCP(lon, lat, 0,
112 | i1 * pixelStep, i0 * lineStep)
113 | self.logger.debug('%d %d %d %f %f',
114 | k, gcp.GCPPixel, gcp.GCPLine,
115 | gcp.GCPX, gcp.GCPY)
116 | gcps.append(gcp)
117 | k += 1
118 |
119 | # append GCPs and lat/lon projection to the vsiDataset
120 | self.dataset.SetGCPs(gcps, NSR().wkt)
121 |
122 | # remove geolocation array
123 | self._remove_geolocation()
124 | #"""
125 |
--------------------------------------------------------------------------------
/nansat/mappers/mapper_opendap_sentinel1.py:
--------------------------------------------------------------------------------
1 | # Name: mapper_opendap_sentinel1.py
2 | # Purpose: Nansat mapping for ESA Sentinel-1 data from the Norwegian ground segment
3 | # Author: Morten W. Hansen
4 | # Licence: This file is part of NANSAT. You can redistribute it or modify
5 | # under the terms of GNU General Public License, v.3
6 | # http://www.gnu.org/licenses/gpl-3.0.html
7 | import os
8 | from datetime import datetime
9 | import json
10 | import warnings
11 |
12 | import numpy as np
13 | from netCDF4 import Dataset
14 | from nansat.utils import gdal
15 |
16 | try:
17 | import scipy
18 | except:
19 | IMPORT_SCIPY = False
20 | else:
21 | IMPORT_SCIPY = True
22 |
23 | import pythesint as pti
24 |
25 | from nansat.mappers.sentinel1 import Sentinel1
26 | from nansat.mappers.opendap import Opendap
27 | from nansat.vrt import VRT
28 | from nansat.nsr import NSR
29 | from nansat.utils import initial_bearing
30 |
31 |
32 | class Mapper(Opendap, Sentinel1):
33 |
34 | baseURLs = [
35 | 'https://nbstds.met.no/thredds/dodsC/NBS/S1A',
36 | 'https://nbstds.met.no/thredds/dodsC/NBS/S1B',
37 | 'http://nbstds.met.no/thredds/dodsC/NBS/S1A',
38 | 'http://nbstds.met.no/thredds/dodsC/NBS/S1B',
39 | ]
40 |
41 | timeVarName = 'time'
42 | xName = 'x'
43 | yName = 'y'
44 | timeCalendarStart = '1981-01-01'
45 | srcDSProjection = NSR().wkt
46 |
47 | def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
48 | ds=None, bands=None, cachedir=None, *args, **kwargs):
49 |
50 | self.test_mapper(filename)
51 |
52 | if not IMPORT_SCIPY:
53 | raise NansatReadError('Sentinel-1 data cannot be read because scipy is not installed')
54 |
55 | timestamp = date if date else self.get_date(filename)
56 |
57 | self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
58 |
59 | Sentinel1.__init__(self, filename)
60 | self.add_calibrated_nrcs(filename)
61 | self.add_nrcs_VV_from_HH(filename)
62 |
63 | def add_calibrated_nrcs(self, filename):
64 | layer_time_id, layer_date = Opendap.get_layer_datetime(None,
65 | self.convert_dstime_datetimes(self.get_dataset_time()))
66 | polarizations = [self.ds.polarisation[i:i+2] for i in range(0,len(self.ds.polarisation),2)]
67 | for pol in polarizations:
68 | dims = list(self.ds.variables['dn_%s' %pol].dimensions)
69 | dims[dims.index(self.timeVarName)] = layer_time_id
70 | src = [
71 | self.get_metaitem(filename, 'Amplitude_%s' %pol, dims)['src'],
72 | self.get_metaitem(filename, 'sigmaNought_%s' %pol, dims)['src']
73 | ]
74 | dst = {
75 | 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
76 | 'PixelFunctionType': 'Sentinel1Calibration',
77 | 'polarization': pol,
78 | 'suffix': pol,
79 | }
80 | self.create_band(src, dst)
81 | self.dataset.FlushCache()
82 |
83 | def add_nrcs_VV_from_HH(self, filename):
84 | if not 'Amplitude_HH' in self.ds.variables.keys():
85 | return
86 | layer_time_id, layer_date = Opendap.get_layer_datetime(None,
87 | self.convert_dstime_datetimes(self.get_dataset_time()))
88 | dims = list(self.ds.variables['dn_HH'].dimensions)
89 | dims[dims.index(self.timeVarName)] = layer_time_id
90 | src = [
91 | self.get_metaitem(filename, 'Amplitude_HH', dims)['src'],
92 | self.get_metaitem(filename, 'sigmaNought_HH', dims)['src'],
93 | {'SourceFilename': self.band_vrts['inciVRT'].filename, 'SourceBand': 1}
94 | ]
95 | dst = {
96 | 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
97 | 'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
98 | 'polarization': 'VV',
99 | 'suffix': 'VV'}
100 | self.create_band(src, dst)
101 | self.dataset.FlushCache()
102 |
103 | @staticmethod
104 | def get_date(filename):
105 | """Extract date and time parameters from filename and return
106 | it as a formatted (isoformat) string
107 |
108 | Parameters
109 | ----------
110 |
111 | filename: str
112 | nn
113 |
114 | Returns
115 | -------
116 | str, YYYY-mm-ddThh:MMZ
117 |
118 | """
119 | _, filename = os.path.split(filename)
120 | t = datetime.strptime(filename.split('_')[4], '%Y%m%dT%H%M%S')
121 | return datetime.strftime(t, '%Y-%m-%dT%H:%M:%SZ')
122 |
123 | def convert_dstime_datetimes(self, ds_time):
124 | """Convert time variable to np.datetime64"""
125 | ds_datetimes = np.array(
126 | [(np.datetime64(self.timeCalendarStart).astype('M8[s]')
127 | + np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]')
128 | return ds_datetimes
129 |
130 | def get_geotransform(self):
131 | """ Return fake and temporary geotransform. This will be replaced by gcps in
132 | Sentinel1.__init__
133 | """
134 | xx = self.ds.variables['lon'][0:100:50, 0].data
135 | yy = self.ds.variables['lat'][0, 0:100:50].data
136 | return xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0]
137 |
138 |
--------------------------------------------------------------------------------