├── .gitignore
├── AUTHORS.md
├── LICENSE.txt
├── README.md
├── bufr_table_archives.txt
├── bufrdc_tables.zip
├── bufrtables_ecCodes.zip
├── bufrtables_libDWD.zip
├── datacat.table
├── docs
├── .gitignore
├── Makefile
└── source
│ ├── _static
│ ├── 207003.bufr
│ ├── 207003.json
│ └── LICENSE.txt
│ ├── api.rst
│ ├── cmd.rst
│ ├── conf.py
│ ├── examples.rst
│ ├── iasi_plot.rst
│ ├── index.rst
│ ├── json.rst
│ ├── tables.rst
│ └── usage.rst
├── examples
├── iasi_ctp.png
├── iasi_plot.py
├── iasi_search.py
├── iasi_show.py
├── iedx61.png
└── region_config.cfg
├── operator.table
├── setup.py
├── test
├── .gitignore
├── metop_mhs.bufr
└── test_trollbufr.py
└── trollbufr
├── .gitignore
├── __init__.py
├── bufr.py
├── bufr_main.py
├── coder
├── .gitignore
├── __init__.py
├── bdata.py
├── bufr_sect.py
├── bufr_types.py
├── errors.py
├── functions.py
├── load_tables.py
├── operator.py
├── parse.py.skel
├── parse_bufrdc.py
├── parse_eccodes.py
├── parse_libdwd.py
├── subset.py
└── tables.py
├── load_file.py
├── update.py
└── version.py
/.gitignore:
--------------------------------------------------------------------------------
1 | /data/
2 | /tables/
3 | /bufrtables_bufr-tools.zip
4 | /eccodes-2.0.0-Source.tar.gz
5 | /eccodes-2.0.2-Source.tar.gz
6 | /grib_accessor_class_apply_operators.c
7 | /TODO
8 | /.project
9 | /.pydevproject
10 | /build/
11 | /dist/
12 | /trollbufr.egg-info/
13 | /gallery/
14 | /.settings/
15 | /test_verify/
16 | /plot/
17 |
--------------------------------------------------------------------------------
/AUTHORS.md:
--------------------------------------------------------------------------------
1 | # Project Contributors
2 |
3 | The following people have made contributions to this project:
4 |
5 | - [Alex Maul (alexmaul)](https://github.com/alexmaul)
6 | - [Barry Baker (bbakernoaa)](https://github.com/bbakernoaa)
7 | - [Gerrit Holl (gerritholl)](https://github.com/gerritholl)
8 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # trollBUFR
2 | A pure pythonic reader/writer for BUFR, the meteorological
3 | "Binary Universal Format for data Representation"
4 |
5 | _(C) 2016-2018 Alexander Maul_
6 |
7 | ## Read-the-docs
8 | [http://trollbufr.readthedocs.io](http://trollbufr.readthedocs.io/)
9 |
10 | ## BUFR-Tables
11 | TDCF table sets are provided as ZIP-archives in different formats, or go to:
12 | * [ECMWF eccodes](https://software.ecmwf.int/wiki/display/ECC/ecCodes+Home).
13 | * [DWD's OpenData server](https://opendata.dwd.de/weather/lib/bufr/).
14 |
15 | If files `operator.table` and/or `datacat.table` are not present in your table
16 | directory, there are standard ones in this project's root.
17 | These files are not required for decoding, but optional for readable output.
18 |
19 | ## Command-line program "trollbufr"
20 | Command-line interface created by setup-py.
21 |
22 | * It reads BUFR (with abbreviated heading line, if present) from file(s) and
23 | writes human-readable to stdout.
24 | * Decoding to JSON-formated file.
25 | * Encoding JSON-formated values to BUFR.
26 |
27 | ## News
28 | `Vers. 0.10.0`
29 | All code supports Python2 *and* Python3, without code-conversion (i.e. by 2to3).
30 |
31 | With Python3 the `setup.py` installs the command-line script with a different
32 | name `trollbufr3`, to reflect its usage of Python3 and to have both versions
33 | callable.
34 |
35 | `Vers. 0.6.0`
36 | With version 0.6.0 a feature for encoding a JSON formatted file into binary BUFR
37 | is added.
38 |
39 | The command-line options are changed:
40 |
41 | * print information and descriptor sections: `-d` became `-m`.
42 | * decode BUFR: `-r` became `-d`.
43 |
44 | New is `-j` to write any output in a JSON format.
45 |
46 | ## To-Do
47 | There are still things to do:
48 |
49 | * Implement the remaining obscure operators
50 |
--------------------------------------------------------------------------------
/bufr_table_archives.txt:
--------------------------------------------------------------------------------
1 | https://opendata.dwd.de/weather/lib/bufr/bufrtables_libdwd.tar.bz2
2 | https://opendata.dwd.de/weather/lib/bufr/bufrtables_ecCodes-local-dwd.tar.bz2
3 | https://opendata.dwd.de/weather/lib/bufr/bufrtables_bufr-tools.tar.bz2
4 |
--------------------------------------------------------------------------------
/bufrdc_tables.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pytroll/trollbufr/8979860f4fdd733b91eb1db9e1fc08b07764e3ec/bufrdc_tables.zip
--------------------------------------------------------------------------------
/bufrtables_ecCodes.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pytroll/trollbufr/8979860f4fdd733b91eb1db9e1fc08b07764e3ec/bufrtables_ecCodes.zip
--------------------------------------------------------------------------------
/bufrtables_libDWD.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pytroll/trollbufr/8979860f4fdd733b91eb1db9e1fc08b07764e3ec/bufrtables_libDWD.zip
--------------------------------------------------------------------------------
/datacat.table:
--------------------------------------------------------------------------------
1 | # BUFR Table A - Data category
2 | 0|Surface data - land
3 | 1|Surface data - sea
4 | 2|Vertical soundings (other than satellite)
5 | 3|Vertical soundings (satellite)
6 | 4|Single level upper-air data (other than satellite)
7 | 5|Single level upper-air data (satellite)
8 | 6|Radar data
9 | 7|Synoptic features
10 | 8|Physical/chemical constituents
11 | 9|Dispersal and transport
12 | 10|Radiological data
13 | 11|BUFR tables, complete replacement or update
14 | 12|Surface data (satellite)
15 | 13|Forecasts
16 | 14|Warnings
17 | 20|Status information
18 | 21|Radiances (satellite measured)
19 | 22|Radar (satellite) but not altimeter and scatterometer
20 | 23|Lidar (satellite)
21 | 24|Scatterometry (satellite)
22 | 25|Altimetry (satellite)
23 | 26|Spectrometry (satellite)
24 | 27|Gravity measurement (satellite)
25 | 28|Precision orbit (satellite)
26 | 29|Space environment (satellite)
27 | 30|Calibration datasets (satellite)
28 | 31|Oceanographic data
29 | 101|Image data
30 | 240|For experimental use
31 | 241|For experimental use
32 | 242|For experimental use
33 | 243|For experimental use
34 | 244|For experimental use
35 | 245|For experimental use
36 | 246|For experimental use
37 | 247|For experimental use
38 | 248|For experimental use
39 | 249|For experimental use
40 | 250|For experimental use
41 | 251|For experimental use
42 | 252|For experimental use
43 | 253|For experimental use
44 | 254|For experimental use
45 | 255|Other category, or local use
46 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | /build/
2 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
16 |
17 | .PHONY: help
18 | help:
19 | @echo "Please use \`make ' where is one of"
20 | @echo " html to make standalone HTML files"
21 | @echo " dirhtml to make HTML files named index.html in directories"
22 | @echo " singlehtml to make a single large HTML file"
23 | @echo " pickle to make pickle files"
24 | @echo " json to make JSON files"
25 | @echo " htmlhelp to make HTML files and a HTML help project"
26 | @echo " qthelp to make HTML files and a qthelp project"
27 | @echo " applehelp to make an Apple Help Book"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " epub3 to make an epub3"
31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
32 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
34 | @echo " text to make text files"
35 | @echo " man to make manual pages"
36 | @echo " texinfo to make Texinfo files"
37 | @echo " info to make Texinfo files and run them through makeinfo"
38 | @echo " gettext to make PO message catalogs"
39 | @echo " changes to make an overview of all changed/added/deprecated items"
40 | @echo " xml to make Docutils-native XML files"
41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
42 | @echo " linkcheck to check all external links for integrity"
43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
44 | @echo " coverage to run coverage check of the documentation (if enabled)"
45 | @echo " dummy to check syntax errors of document sources"
46 |
47 | .PHONY: clean
48 | clean:
49 | rm -rf $(BUILDDIR)/*
50 |
51 | .PHONY: html
52 | html:
53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
54 | @echo
55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
56 |
57 | .PHONY: dirhtml
58 | dirhtml:
59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
60 | @echo
61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
62 |
63 | .PHONY: singlehtml
64 | singlehtml:
65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
66 | @echo
67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
68 |
69 | .PHONY: pickle
70 | pickle:
71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
72 | @echo
73 | @echo "Build finished; now you can process the pickle files."
74 |
75 | .PHONY: json
76 | json:
77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
78 | @echo
79 | @echo "Build finished; now you can process the JSON files."
80 |
81 | .PHONY: htmlhelp
82 | htmlhelp:
83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
84 | @echo
85 | @echo "Build finished; now you can run HTML Help Workshop with the" \
86 | ".hhp project file in $(BUILDDIR)/htmlhelp."
87 |
88 | .PHONY: qthelp
89 | qthelp:
90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
91 | @echo
92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/trollBUFR.qhcp"
95 | @echo "To view the help file:"
96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/trollBUFR.qhc"
97 |
98 | .PHONY: applehelp
99 | applehelp:
100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
101 | @echo
102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
103 | @echo "N.B. You won't be able to view it unless you put it in" \
104 | "~/Library/Documentation/Help or install it in your application" \
105 | "bundle."
106 |
107 | .PHONY: devhelp
108 | devhelp:
109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
110 | @echo
111 | @echo "Build finished."
112 | @echo "To view the help file:"
113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/trollBUFR"
114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/trollBUFR"
115 | @echo "# devhelp"
116 |
117 | .PHONY: epub
118 | epub:
119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
120 | @echo
121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
122 |
123 | .PHONY: epub3
124 | epub3:
125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
126 | @echo
127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
128 |
129 | .PHONY: latex
130 | latex:
131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
132 | @echo
133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
135 | "(use \`make latexpdf' here to do that automatically)."
136 |
137 | .PHONY: latexpdf
138 | latexpdf:
139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
140 | @echo "Running LaTeX files through pdflatex..."
141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
143 |
144 | .PHONY: latexpdfja
145 | latexpdfja:
146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
147 | @echo "Running LaTeX files through platex and dvipdfmx..."
148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
150 |
151 | .PHONY: text
152 | text:
153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
154 | @echo
155 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
156 |
157 | .PHONY: man
158 | man:
159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
160 | @echo
161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
162 |
163 | .PHONY: texinfo
164 | texinfo:
165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
166 | @echo
167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
168 | @echo "Run \`make' in that directory to run these through makeinfo" \
169 | "(use \`make info' here to do that automatically)."
170 |
171 | .PHONY: info
172 | info:
173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
174 | @echo "Running Texinfo files through makeinfo..."
175 | make -C $(BUILDDIR)/texinfo info
176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
177 |
178 | .PHONY: gettext
179 | gettext:
180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
181 | @echo
182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
183 |
184 | .PHONY: changes
185 | changes:
186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
187 | @echo
188 | @echo "The overview file is in $(BUILDDIR)/changes."
189 |
190 | .PHONY: linkcheck
191 | linkcheck:
192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
193 | @echo
194 | @echo "Link check complete; look for any errors in the above output " \
195 | "or in $(BUILDDIR)/linkcheck/output.txt."
196 |
197 | .PHONY: doctest
198 | doctest:
199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
200 | @echo "Testing of doctests in the sources finished, look at the " \
201 | "results in $(BUILDDIR)/doctest/output.txt."
202 |
203 | .PHONY: coverage
204 | coverage:
205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
206 | @echo "Testing of coverage in the sources finished, look at the " \
207 | "results in $(BUILDDIR)/coverage/python.txt."
208 |
209 | .PHONY: xml
210 | xml:
211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
212 | @echo
213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
214 |
215 | .PHONY: pseudoxml
216 | pseudoxml:
217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
218 | @echo
219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
220 |
221 | .PHONY: dummy
222 | dummy:
223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
224 | @echo
225 | @echo "Build finished. Dummy builder generates no files."
226 |
--------------------------------------------------------------------------------
/docs/source/_static/207003.bufr:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pytroll/trollbufr/8979860f4fdd733b91eb1db9e1fc08b07764e3ec/docs/source/_static/207003.bufr
--------------------------------------------------------------------------------
/docs/source/_static/207003.json:
--------------------------------------------------------------------------------
1 | [{
2 | "index": 0,
3 | "bufr": [
4 | ["BUFR", 3],
5 | [0, 0, 98, 0, false, 21, 202, 15, 0, 2012, 11, 2, 0, 0, 0],
6 | [],
7 | [2, true, true, ["310060"]],
8 | [
9 | [224, 160, 620, 3, 2012, 11, 2, 0, 0, 27.584, 6675220.0, 2628450.5, 696570.75, 4.96669, 24.54144, 25.41, 282.91, 150.05, 111.28, 1, 1, 9, 7, 5258, 597, 829880.0, 1.0, 0, null, null, 2048, 0, [
10 | [2, 65000.0, 109500.0, 1, 713, 0, 1024],
11 | [3, 121000.0, 175000.0, 714, 1146, 0, 1024],
12 | [4, 215500.0, 255000.0, 1147, 1305, 0, 1024]
13 | ], null, 0, 0, [
14 | [1, 0.0462895],
15 | [2, 0.0454931],
16 | [3, 0.0421172],
17 | [4, 0.0453741],
18 | [5, 0.0431189]
19 | ]],
20 | [224, 160, 620, 3, 2012, 11, 2, 0, 0, 27.584, 6675220.0, 2628450.5, 696570.75, 5.05004, 24.3926, 24.2, 281.97, 150.22, 111.22, 1, 1, 9, 8, 5258, 538, 829880.0, 1.0, 0, null, null, 2048, 0, [
21 | [2, 65000.0, 109500.0, 1, 713, 0, 1024],
22 | [3, 121000.0, 175000.0, 714, 1146, 0, 1024],
23 | [4, 215500.0, 255000.0, 1147, 1305, 0, 1024]
24 | ], null, 0, 0, [
25 | [1, 0.0469285],
26 | [2, 0.0458891],
27 | [3, 0.041389],
28 | [4, 0.0447059],
29 | [5, 0.0430633]
30 | ]]
31 | ],
32 | ["7777"]
33 | ],
34 | "heading": null,
35 | "file": "207003.bufr"
36 | }]
37 |
--------------------------------------------------------------------------------
/docs/source/_static/LICENSE.txt:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/docs/source/api.rst:
--------------------------------------------------------------------------------
1 | .. api_
2 |
3 | API
4 | =====
5 |
6 | Classes and modules
7 | --------------------
8 |
9 | .. automodule:: trollbufr.bufr
10 | :members:
11 |
12 |
--------------------------------------------------------------------------------
/docs/source/cmd.rst:
--------------------------------------------------------------------------------
1 | Command-line scripts
2 | ====================
3 |
4 | Two command-line scripts will be created by `python setup.py install`.
5 |
6 | Both print the available options with description for each option to STDOUT::
7 |
8 | trollbufr -h
9 | trollbufr --help
10 |
11 | ::
12 |
13 | trollbufr_update -h
14 | trollbufr_update --help
15 |
16 | Executing them without any command-line option, they will just print the list
17 | of options to STDOUT.
18 |
19 | trollbufr
20 | ---------
21 | This is a command-line script to call `bufr_main.py`.
22 |
23 | - Reading data elements::
24 |
25 | trollbufr -t tables -d data/mw/TestBulletin_051
26 |
27 | Decoding all BUFR in the file TestBulletin_051, for each reading the data
28 | elements and writing the descriptor, short name, value, unit to STDOUT.
29 | The table format defaults to "eccodes".
30 |
31 | ::
32 |
33 | trollbufr -t tables -o Test.txt -d data/mw/TestBulletin_051
34 |
35 | Does the same as the first example, but writes to the file Test.txt.
36 |
37 | - Simple list of descriptors::
38 |
39 | trollbufr -t tables -T libdwd -smb 1 data/mw/TestBulletin_051
40 |
41 | Using the table-format `libdwd` and decoding only the first BUFR in the file,
42 | writing the un-expanded list of descriptors (without names, etc.) to STDOUT.
43 |
44 | - Encoding data from a JSON-formatted file as BUFR::
45 |
46 | trollbufr -t tables -e -o Test.bin data/TestBulletin_1.json
47 |
48 | Encodes the JSON-formatted content of the file TestBulletin_1.json and
49 | writes the resulting BUFR to the file Test.bin instead of STDOUT.
50 |
51 |
52 | trollbufr_update
53 | ----------------
54 | A command-line script to download archive file(s) from Internet resources in
55 | order to update the BUFR table files.
56 |
57 | - URL(s) on command-line, strip first two elements from path on extract::
58 |
59 | trollbufr_update -t tables -s 2 -U https://opendata.dwd.de/weather/lib/bufr/bufrtables_libdwd.tar.bz2
60 |
61 | - Set of URLs in a file -- only download, no extract::
62 |
63 | trollbufr_update -t tables --download -F bufr_table_archives.txt
64 |
65 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # trollBUFR documentation build configuration file, created by
4 | # sphinx-quickstart on Tue Nov 29 12:56:03 2016.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | # If extensions (or modules to document with autodoc) are in another directory,
16 | # add these directories to sys.path here. If the directory is relative to the
17 | # documentation root, use os.path.abspath to make it absolute, like shown here.
18 | #
19 | import os
20 | import sys
21 | sys.path.insert(0, os.path.abspath('../..'))
22 | import trollbufr
23 | from trollbufr.version import version
24 |
25 | # -- General configuration ------------------------------------------------
26 |
27 | # If your documentation needs a minimal Sphinx version, state it here.
28 | #
29 | # needs_sphinx = '1.0'
30 |
31 | # Add any Sphinx extension module names here, as strings. They can be
32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
33 | # ones.
34 | extensions = [
35 | 'sphinx.ext.autodoc',
36 | ]
37 |
38 | # Add any paths that contain templates here, relative to this directory.
39 | templates_path = ['_templates']
40 |
41 | # The suffix(es) of source filenames.
42 | # You can specify multiple suffix as a list of string:
43 | #
44 | # source_suffix = ['.rst', '.md']
45 | source_suffix = '.rst'
46 |
47 | # The encoding of source files.
48 | #
49 | # source_encoding = 'utf-8-sig'
50 |
51 | # The master toctree document.
52 | master_doc = 'index'
53 |
54 | # General information about the project.
55 | project = u'trollBUFR'
56 | copyright = u'2016, Alexander Maul'
57 | author = u'Alexander Maul'
58 |
59 | # The version info for the project you're documenting, acts as replacement for
60 | # |version| and |release|, also used in various other places throughout the
61 | # built documents.
62 | #
63 | # The short X.Y version.
64 | #version = __version__
65 | # The full version, including alpha/beta/rc tags.
66 | release = version
67 |
68 | # The language for content autogenerated by Sphinx. Refer to documentation
69 | # for a list of supported languages.
70 | #
71 | # This is also used if you do content translation via gettext catalogs.
72 | # Usually you set "language" from the command line for these cases.
73 | language = None
74 |
75 | # There are two options for replacing |today|: either, you set today to some
76 | # non-false value, then it is used:
77 | #
78 | # today = ''
79 | #
80 | # Else, today_fmt is used as the format for a strftime call.
81 | #
82 | # today_fmt = '%B %d, %Y'
83 |
84 | # List of patterns, relative to source directory, that match files and
85 | # directories to ignore when looking for source files.
86 | # This patterns also effect to html_static_path and html_extra_path
87 | exclude_patterns = []
88 |
89 | # The reST default role (used for this markup: `text`) to use for all
90 | # documents.
91 | #
92 | # default_role = None
93 |
94 | # If true, '()' will be appended to :func: etc. cross-reference text.
95 | #
96 | # add_function_parentheses = True
97 |
98 | # If true, the current module name will be prepended to all description
99 | # unit titles (such as .. function::).
100 | #
101 | # add_module_names = True
102 |
103 | # If true, sectionauthor and moduleauthor directives will be shown in the
104 | # output. They are ignored by default.
105 | #
106 | # show_authors = False
107 |
108 | # The name of the Pygments (syntax highlighting) style to use.
109 | pygments_style = 'sphinx'
110 |
111 | # A list of ignored prefixes for module index sorting.
112 | # modindex_common_prefix = []
113 |
114 | # If true, keep warnings as "system message" paragraphs in the built documents.
115 | # keep_warnings = False
116 |
117 | # If true, `todo` and `todoList` produce output, else they produce nothing.
118 | todo_include_todos = False
119 |
120 |
121 | # -- Options for HTML output ----------------------------------------------
122 |
123 | # The theme to use for HTML and HTML Help pages. See the documentation for
124 | # a list of builtin themes.
125 | #
126 | html_theme = 'alabaster'
127 |
128 | # Theme options are theme-specific and customize the look and feel of a theme
129 | # further. For a list of options available for each theme, see the
130 | # documentation.
131 | #
132 | # html_theme_options = {}
133 |
134 | # Add any paths that contain custom themes here, relative to this directory.
135 | # html_theme_path = []
136 |
137 | # The name for this set of Sphinx documents.
138 | # " v documentation" by default.
139 | #
140 | # html_title = u'trollBUFR v0.1'
141 |
142 | # A shorter title for the navigation bar. Default is the same as html_title.
143 | #
144 | # html_short_title = None
145 |
146 | # The name of an image file (relative to this directory) to place at the top
147 | # of the sidebar.
148 | #
149 | # html_logo = None
150 |
151 | # The name of an image file (relative to this directory) to use as a favicon of
152 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
153 | # pixels large.
154 | #
155 | # html_favicon = None
156 |
157 | # Add any paths that contain custom static files (such as style sheets) here,
158 | # relative to this directory. They are copied after the builtin static files,
159 | # so a file named "default.css" will overwrite the builtin "default.css".
160 | html_static_path = ['_static']
161 |
162 | # Add any extra paths that contain custom files (such as robots.txt or
163 | # .htaccess) here, relative to this directory. These files are copied
164 | # directly to the root of the documentation.
165 | #
166 | # html_extra_path = []
167 |
168 | # If not None, a 'Last updated on:' timestamp is inserted at every page
169 | # bottom, using the given strftime format.
170 | # The empty string is equivalent to '%b %d, %Y'.
171 | #
172 | # html_last_updated_fmt = None
173 |
174 | # If true, SmartyPants will be used to convert quotes and dashes to
175 | # typographically correct entities.
176 | #
177 | # html_use_smartypants = True
178 |
179 | # Custom sidebar templates, maps document names to template names.
180 | #
181 | # html_sidebars = {}
182 |
183 | # Additional templates that should be rendered to pages, maps page names to
184 | # template names.
185 | #
186 | # html_additional_pages = {}
187 |
188 | # If false, no module index is generated.
189 | #
190 | # html_domain_indices = True
191 |
192 | # If false, no index is generated.
193 | #
194 | # html_use_index = True
195 |
196 | # If true, the index is split into individual pages for each letter.
197 | #
198 | # html_split_index = False
199 |
200 | # If true, links to the reST sources are added to the pages.
201 | #
202 | # html_show_sourcelink = True
203 |
204 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
205 | #
206 | # html_show_sphinx = True
207 |
208 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
209 | #
210 | # html_show_copyright = True
211 |
212 | # If true, an OpenSearch description file will be output, and all pages will
213 | # contain a tag referring to it. The value of this option must be the
214 | # base URL from which the finished HTML is served.
215 | #
216 | # html_use_opensearch = ''
217 |
218 | # This is the file name suffix for HTML files (e.g. ".xhtml").
219 | # html_file_suffix = None
220 |
221 | # Language to be used for generating the HTML full-text search index.
222 | # Sphinx supports the following languages:
223 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
224 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
225 | #
226 | # html_search_language = 'en'
227 |
228 | # A dictionary with options for the search language support, empty by default.
229 | # 'ja' uses this config value.
230 | # 'zh' user can custom change `jieba` dictionary path.
231 | #
232 | # html_search_options = {'type': 'default'}
233 |
234 | # The name of a javascript file (relative to the configuration directory) that
235 | # implements a search results scorer. If empty, the default will be used.
236 | #
237 | # html_search_scorer = 'scorer.js'
238 |
239 | # Output file base name for HTML help builder.
240 | htmlhelp_basename = 'trollBUFRdoc'
241 |
242 | # -- Options for LaTeX output ---------------------------------------------
243 |
244 | latex_elements = {
245 | # The paper size ('letterpaper' or 'a4paper').
246 | #
247 | # 'papersize': 'letterpaper',
248 |
249 | # The font size ('10pt', '11pt' or '12pt').
250 | #
251 | # 'pointsize': '10pt',
252 |
253 | # Additional stuff for the LaTeX preamble.
254 | #
255 | # 'preamble': '',
256 |
257 | # Latex figure (float) alignment
258 | #
259 | # 'figure_align': 'htbp',
260 | }
261 |
262 | # Grouping the document tree into LaTeX files. List of tuples
263 | # (source start file, target name, title,
264 | # author, documentclass [howto, manual, or own class]).
265 | latex_documents = [
266 | (master_doc, 'trollBUFR.tex', u'trollBUFR Documentation',
267 | u'Alexander Maul', 'manual'),
268 | ]
269 |
270 | # The name of an image file (relative to this directory) to place at the top of
271 | # the title page.
272 | #
273 | # latex_logo = None
274 |
275 | # For "manual" documents, if this is true, then toplevel headings are parts,
276 | # not chapters.
277 | #
278 | # latex_use_parts = False
279 |
280 | # If true, show page references after internal links.
281 | #
282 | # latex_show_pagerefs = False
283 |
284 | # If true, show URL addresses after external links.
285 | #
286 | # latex_show_urls = False
287 |
288 | # Documents to append as an appendix to all manuals.
289 | #
290 | # latex_appendices = []
291 |
292 | # It false, will not define \strong, \code, itleref, \crossref ... but only
293 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
294 | # packages.
295 | #
296 | # latex_keep_old_macro_names = True
297 |
298 | # If false, no module index is generated.
299 | #
300 | # latex_domain_indices = True
301 |
302 |
303 | # -- Options for manual page output ---------------------------------------
304 |
305 | # One entry per manual page. List of tuples
306 | # (source start file, name, description, authors, manual section).
307 | man_pages = [
308 | (master_doc, 'trollbufr', u'trollBUFR Documentation',
309 | [author], 1)
310 | ]
311 |
312 | # If true, show URL addresses after external links.
313 | #
314 | # man_show_urls = False
315 |
316 |
317 | # -- Options for Texinfo output -------------------------------------------
318 |
319 | # Grouping the document tree into Texinfo files. List of tuples
320 | # (source start file, target name, title, author,
321 | # dir menu entry, description, category)
322 | texinfo_documents = [
323 | (master_doc, 'trollBUFR', u'trollBUFR Documentation',
324 | author, 'trollBUFR', 'One line description of project.',
325 | 'Miscellaneous'),
326 | ]
327 |
328 | # Documents to append as an appendix to all manuals.
329 | #
330 | # texinfo_appendices = []
331 |
332 | # If false, no module index is generated.
333 | #
334 | # texinfo_domain_indices = True
335 |
336 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
337 | #
338 | # texinfo_show_urls = 'footnote'
339 |
340 | # If true, do not generate a @detailmenu in the "Top" node's menu.
341 | #
342 | # texinfo_no_detailmenu = False
343 |
--------------------------------------------------------------------------------
/docs/source/examples.rst:
--------------------------------------------------------------------------------
1 | .. examples_
2 |
3 | Examples
4 | ========
5 |
6 | Using the trollbufr-API, the class :class:`~trollbufr.bufr.Bufr` presents the
7 | main entry point.
8 |
9 | trollbufr.bufr_main.py
10 | ~~~~~~~~~~~~~~~~~~~~~~
11 | This gives good examples how to handle the class :class:`~trollbufr.bufr.Bufr`.
12 |
13 | trollbufr.bufr.Bufr
14 | ~~~~~~~~~~~~~~~~~~~
15 | Simple example for handling class :class:`Bufr`::
16 |
17 | bufr = Bufr(args.tables_type, args.tables_path)
18 | for fn in glob("*.bufr"):
19 | for blob, size, header in load_file.next_bufr(fn):
20 | print "HEADER\t%s" % header
21 | bufr.decode_meta(blob, tables=False)
22 | tabl = bufr.load_tables()
23 | print "META:\n%s" % bufr.get_meta_str()
24 | for report in bufr.next_subset():
25 | print "SUBSET\t#%d/%d" % report.subs_num
26 | for k, m, (v, q) in report.next_data():
27 | print k, v
28 |
29 | Read all files named \*.bufr, parse each file for BUFR bulletins; then decode
30 | them, writing each descriptor and the associated data value to STDOUT.
31 |
32 | It can be done even shorter::
33 |
34 | bufr = Bufr(args.tables_type, args.tables_path)
35 | for fn_in in glob("*.bufr"):
36 | for blob, _, header in load_file.next_bufr(fn_in):
37 | json_bufr = bufr.decode(blob,
38 | load_tables=True,
39 | as_array=args.array)
40 | print json_bufr
41 |
42 | Here each BUFR is decoded, including loading tables as required, in one go and
43 | the resulting values are printed as a list/dict structure.
44 |
45 |
--------------------------------------------------------------------------------
/docs/source/iasi_plot.rst:
--------------------------------------------------------------------------------
1 | WMO file formats
2 | ================
3 |
4 | Wether you like it or not, the *Binary Universal Form for the Representation of
5 | meteorological data* (BUFR) file format (see e.g. the WMO_ satellite file format pages)
6 | is widely used even in satellite meteorology.
7 | The format is used mostly for satellite sounder data, like NOAA AMSU and MHS and Metop
8 | IASI, which traditionally have been the primary satellite data going into the *Numerical
9 | Weather Prediction* (NWP) models.
10 |
11 | Here we will demonstrate how trollbufr (trollbufr_) can be used to read satellite
12 | data in BUFR format.
13 |
14 | The Grib file format is another WMO file format. Traditionally this has been the
15 | file format used for the output of NWP models, but it is occasionally also used for storing
16 | satellite products. For reading Grib files in python we refer to the excellent
17 | pygrib_ package.
18 |
19 | Use python-bufr
20 | ---------------
21 |
22 | The trollbufr does not depend on the ECMWF or ony other third-party BUFR
23 | library, but their (or any other provider's) tables, including EUMETSAT's local
24 | extension tables.
25 | Although this example uses numpy, trollbufr does not require it.
26 |
27 | Please see the package documentation at trollbufr_ for installation and setup.
28 |
29 | You will just need to tell your environment where to find the BUFR tables and
30 | make sure your PYTHONPATH includes the place where you have the trollbufr installed:
31 |
32 | export BUFR_TABLES=/path/bufrtables/
33 |
34 | In the BUFR_TABLES directory you will have files (and symlinks) like shown here:
35 |
36 | .. code-block:: ini
37 |
38 | lrwxrwxrwx 1 a000680 users 24 26 jan 14.19 D0000000000254013001.TXT -> D0000000000098013001.TXT*
39 | lrwxrwxrwx 1 a000680 users 24 26 jan 14.19 C0000000000254013001.TXT -> C0000000000098013001.TXT*
40 | lrwxrwxrwx 1 a000680 users 24 26 jan 14.09 B0000000000254013001.TXT -> B0000000000098013001.TXT*
41 | lrwxrwxrwx 1 a000680 users 24 26 jan 14.00 D0000000000254010001.TXT -> D0000000000098013001.TXT*
42 | lrwxrwxrwx 1 a000680 users 24 26 jan 14.00 D0000000000099010001.TXT -> D0000000000098013001.TXT*
43 | ...
44 |
45 | Example on EUMETCast IASI level 2 product
46 | -----------------------------------------
47 |
48 | >>> TESTFILE = "./sample_data/iasi_20120206_190254_metopa_27506_eps_o_clp.l2_bufr"
49 | >>> import bufr
50 | >>> import numpy as np
51 | >>> bfr = bufr.BUFRFile(TESTFILE)
52 |
53 | Import the required modules:
54 |
55 | >>> from trollbufr.bufr import Bufr
56 | >>> from trollbufr import load_file
57 | >>> import numpy as np
58 | >>> import sys
59 | >>> testfile = "./sample_data/IEDX61_EUMC_020505.bin"
60 |
61 | Initialise the Bufr-object:
62 |
63 | >>> bfr = Bufr("eccodes", "tables")
64 |
65 | Decode the BUFR's meta-data and print it to STDOUT:
66 |
67 | >>> for blob, size, header in load_file.next_bufr(testfile):
68 | >>> bfr.decode(blob)
69 | >>> print "\n", testfile, header, "\n", bfr.get_meta_str()
70 |
71 | Let's have a look at the descriptor-list of the BUFR:
72 |
73 | >>> print "\n".join( bufr.get_descr_full() )
74 |
75 | .. code-block:: ini
76 |
77 | 001007 : 'SATELLITE IDENTIFIER' (code) [CODE TABLE]
78 | 001031 : 'IDENTIFICATION OF ORIGINATING/GENERATING CENTRE (SEE NOTE 10)' (code) [CODE TABLE]
79 | 025060 : 'SOFTWARE IDENTIFICATION (SEE NOTE 2)' (long) [Numeric]
80 | 002019 : 'SATELLITE INSTRUMENTS' (code) [CODE TABLE]
81 | 002020 : 'SATELLITE CLASSIFICATION' (code) [CODE TABLE]
82 | 004001 : 'YEAR' (long) [a]
83 | 004002 : 'MONTH' (long) [mon]
84 | ...
85 | 005001 : 'LATITUDE (HIGH ACCURACY)' (double) [deg]
86 | 006001 : 'LONGITUDE (HIGH ACCURACY)' (double) [deg]
87 | ...
88 | 008003 : 'VERTICAL SIGNIFICANCE (SATELLITE OBSERVATIONS)' (code) [CODE TABLE]
89 | 112003 : LOOP, 12 desc., 3 times
90 | 202129 : OPERATOR 2: 129
91 | 201131 : OPERATOR 1: 131
92 | 007004 : 'PRESSURE' (long) [Pa]
93 | 201000 : OPERATOR 1: 0
94 | 202000 : OPERATOR 2: 0
95 | 012101 : 'TEMPERATURE/DRY-BULB TEMPERATURE' (double) [K]
96 | 202130 : OPERATOR 2: 130
97 | 201135 : OPERATOR 1: 135
98 | 020081 : 'CLOUD AMOUNT IN SEGMENT' (long) [%]
99 | 201000 : OPERATOR 1: 0
100 | 202000 : OPERATOR 2: 0
101 | 020056 : 'CLOUD PHASE' (code) [CODE TABLE]
102 |
103 | In the BUFR file data are layed out sequentially, the pressure fields have the
104 | descriptors 007004. They are associated with the latitude 005001 and longitude
105 | 006001.
106 |
107 | Now lets just check what fields and their values are in the file,
108 | the first subset is enough to get an impression:
109 |
110 | >>> for subset in bfr.next_subset():
111 | >>> for k, m, (v, q) in subset.next_data():
112 | >>> print k, m, v
113 | >>> break
114 |
115 | .. code-block:: ini
116 |
117 | Edition : 4
118 | Master-table : 0
119 | Centre : EUMETSAT OPERATION CENTRE
120 | Sub-Centre : NO SUB-CENTRE
121 | Update sequence number : 0
122 | Type of data : observed
123 | Data category : Vertical soundings (satellite)
124 | International data sub-category : 255
125 | Local data sub-category : 226
126 | Version number of master table : 19
127 | Version number of local table : 1
128 | Most typical time : 2016-12-02 05:02:00
129 | Optional section present : no
130 | Compression : yes
131 | Number of data subsets : 2040
132 | None SUB None
133 | 1007 None 4
134 | 1031 None 254
135 | 25060 None 602
136 | 2019 None 221
137 | 2020 None 61
138 | 4001 None 2016
139 | 4002 None 12
140 | 4003 None 2
141 | 4004 None 5
142 | 4005 None 2
143 | 4006 None 59
144 | 5040 None 52516
145 | 5041 None 175
146 | 5001 None 71.1056
147 | 6001 None 177.8049
148 | ...
149 | None RPL 1 None
150 | 7004 None 83637
151 | 12101 None 247.0
152 | 20081 None 86.13
153 | 20056 None 2
154 | None RPL 2 None
155 | 7004 None None
156 | 12101 None None
157 | 20081 None None
158 | 20056 None None
159 | None RPL 3 None
160 | 7004 None None
161 | 12101 None None
162 | 20081 None None
163 | 20056 None None
164 | None RPL END None
165 | 2040 2040 2040
166 |
167 | We want to look at the Cloud Top Pressure, but we see that there are actually
168 | three PRESSURE fields (repetitions RPL 1-3) in the file.
169 | The descritors are printed as numbers, leaving any leading "0". So we are
170 | looking for descriptors 5001, 6001, and 7004.
171 |
172 | Since for the last two 7004 there's "None" in the third column, it seems the
173 | only field that contains data is the first one. Let us extract all the data
174 | and the geolocation:
175 |
176 | >>> bfr = bufr.BUFRFile(TESTFILE)
177 | >>> lon = []
178 | >>> lat = []
179 | >>> pres = []
180 | >>> for subset in bfr.next_subset():
181 | >>> gotit = 0
182 | >>> for k, m, (v, q) in subset.next_data():
183 | >>> if gotit:
184 | >>> continue
185 | >>> if k == 5001:
186 | >>> lat.append((0, 0, v))
187 | >>> if k == 6001:
188 | >>> lon.append((0, 0, v))
189 | >>> if k == 7004:
190 | >>> pres.append((0, 0, v))
191 | >>> gotit = 1
192 | >>> lons = np.concatenate(lon)
193 | >>> lats = np.concatenate(lat)
194 | >>> pres = np.concatenate(pres) / 100.0 # hPa
195 | >>> pres = np.ma.masked_greater(pres, 1.0e+6)
196 |
197 |
198 | Now we have an IASI granule with the level 2 CTP parameter.
199 | It is geolocated, so we could project it to a user area and map projection.
200 | We use pyresample_ for that of course, and a predefined area from a local configuration
201 | file (see further below):
202 |
203 | >>> import pyresample as pr
204 | >>> from pyresample import kd_tree, geometry
205 | >>> from pyresample import utils
206 | >>> swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
207 | >>> area_def = utils.parse_area_file('./region_config.cfg', 'scan2')[0]
208 | >>> result = kd_tree.resample_nearest(swath_def, pres,
209 | area_def,
210 | radius_of_influence=12000,
211 | epsilon=100,
212 | fill_value=None)
213 | >>> pr.plot.save_quicklook('/tmp/iasi_ctp.png',
214 | area_def, result, label='IASI - Cloud Top Pressure',
215 | coast_res = 'h')
216 |
217 |
218 | .. image:: images/iasi_ctp.png
219 |
220 | The local area configuration is actually, in this case, taken from another project,
221 | namely the nwcsaf_. The NWCSAf PPS software use the same configuration style as
222 | implemented in pyresample. In this particular case the area *scan2* is defined as
223 | shown below:
224 |
225 | .. code-block:: ini
226 |
227 | REGION: scan2 {
228 | NAME: Scandinavia - 2km area
229 | PCS_ID: ps60n
230 | PCS_DEF: proj=stere,ellps=bessel,lat_0=90,lon_0=14,lat_ts=60
231 | XSIZE: 1024
232 | YSIZE: 1024
233 | AREA_EXTENT: (-1268854.1266382949, -4150234.8425892727, 779145.8733617051, -2102234.8425892727)
234 | };
235 |
236 | .. _WMO: http://www.wmo.int/pages/prog/sat/formatsandstandards_en.php
237 | .. _pygrib: http://code.google.com/p/pygrib/
238 | .. _trollbufr: http://github.com/pytroll/trollbufr
239 | .. _pyresample: http://github.com/pytroll/pyresample
240 | .. _nwcsaf: http://nwcsaf.smhi.se/
241 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. trollBUFR documentation master file, created by
2 | sphinx-quickstart on Tue Nov 29 12:56:03 2016.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to trollBUFR's documentation!
7 | =====================================
8 |
9 | Contents
10 | --------
11 | .. toctree::
12 | :maxdepth: 2
13 |
14 | tables
15 | cmd
16 | usage
17 | json
18 | api
19 | examples
20 |
21 | trollBUFR
22 | ---------
23 | The Python package ``trollbufr`` provides an API and some command-line scripts
24 | to read/decode and write/encode BUFR messages.
25 |
26 | Where comes the *troll* from? -- Well, this package lives in the context of
27 | PyTroll_, an Open-Source-Software collaboration of several national
28 | meteorological services and universities.
29 |
30 | About BUFR
31 | ----------
32 | BUFR stands for "Binary Universal Format for data Representation".
33 | It is a binary message format, developed as a "table-driven code form" (TDCF)
34 | by members of World Meteorological Organisation (WMO_).
35 |
36 | It's main use is the meteorological data exchange and storage. It is used
37 | in other fields of geo-science as well, e.g. oceanographics, and
38 | satellite-derived products.
39 |
40 | Find further information and detailed description at
41 | http://www.wmo.int/pages/prog/www/WMOCodes.html
42 |
43 | News
44 | ----
45 | With version 0.6.0 a feature for encoding a JSON formatted file into binary BUFR
46 | is added.
47 |
48 | The command-line options are changed:
49 |
50 | - `print information and descriptor sections`: ``-d`` became ``-m``.
51 | - `decode BUFR`: ``-r`` became ``-d``.
52 |
53 | New is ``-j`` to write any output in a JSON format.
54 |
55 | To-Do
56 | -----
57 | There are still things to do:
58 |
59 | - Create working unit-tests.
60 | - Implement the remaining obscure operators.
61 |
62 | So, get involved at PyTroll_ or GitHub_!
63 |
64 | License
65 | -------
66 |
67 | trollBUFR is licensed under `GNU LGPL-3 <_static/LICENSE.txt>`_.
68 |
69 | Indices and tables
70 | ==================
71 |
72 | - :ref:`genindex`
73 | - :ref:`modindex`
74 | - :ref:`search`
75 |
76 | .. _PyTroll: http://www.pytroll.org
77 | .. _GitHub: https://github.com/pytroll/trollbufr
78 | .. _WMO: http://www.wmo.int
--------------------------------------------------------------------------------
/docs/source/json.rst:
--------------------------------------------------------------------------------
1 | .. json_
2 |
3 | JSON Format for Input/Output
4 | ============================
5 |
6 | .. contents::
7 |
8 | Purpose
9 | -------
10 | The JSON object notation format was chosen because numerous reader are
11 | available, and it's also human-readable.
12 | The structure has its focus on values, rather any descriptive elements, as it
13 | is the same with binary BUFR.
14 |
15 | Decode BUFR to JSON Output
16 | --------------------------
17 | To decode a BUFR into a JSON object, either use the instance method
18 | `Bufr.decode()` or the command-line parameter ``-j | --decode-json``.
19 |
20 | Encode JSON Input to BUFR
21 | -------------------------
22 | To encode a file with a JSON object into binary BUFR, either use the instance
23 | method `Bufr.encode()` or the command-line parameter ``-e | --encode``.
24 |
25 | JSON Structure
26 | --------------
27 | The content of an input and output file as formatted following the JSON
28 | structure consist of one list ``[...]`` as top-level element.
29 |
30 | Each entry in this list represents one BUFR message.
31 |
32 | One BUFR message is build as a dictionary ``{...}`` with following keys:
33 |
34 | - `"index"` : integer value, equal to the list index of this BUFR in the top-level
35 | list.
36 |
37 | - `"file"` : optional, set if the command-line scripts are used for decoding and
38 | encoding:
39 |
40 | - Original file name, if a file was decoded.
41 | - File name used for encoding.
42 |
43 | - `"heading"` : optional,
44 |
45 | - Set if the decoded file contained a WMO bulletin.
46 | - Used as WMO abbreviated heading line for a WMO bulletin on encoding.
47 |
48 |
49 | - `"bufr"` : the value representation of the BUFR.
50 |
51 | Example::
52 |
53 | [{
54 | "index": 0,
55 | "bufr": [ ... ],
56 | "heading": null,
57 | "file": "207003.bufr"
58 | }]
59 |
60 | The value representation of one BUFR is one list ``[...]``, its elements are
61 | the BUFR sections 0 to 4, where each is a list of values.
62 |
63 | Length identifyer
64 | ~~~~~~~~~~~~~~~~~
65 | In contrast to the binary BUFR, there are NO length values denoting either the
66 | length of a section, or the ammount of repetition. On encoding they will be
67 | calculated, thus making the handling of delayed replication easier.
68 |
69 | Booleans, None, Strings, and Numbers
70 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
71 | With JSON, boolean values are encoded as ``true`` or ``false``, which is the
72 | case for some flag-values in sections 1 and 3.
73 |
74 | In the data section 4, for any `missing value` the keyword ``none`` is set in
75 | the JSON structure.
76 |
77 | Strings, or character sequences, are stored as sequences of ITA-5 (which is
78 | equivalent to US-ASCII) characters, surrounded with double-quotes ``"``.
79 | When encoding a JSON object as BUFR, the underlying functions take care of
80 | padding/truncating the strings to match the width as defined by the descriptor.
81 |
82 | Numbers are ... numbers. Either integer or decimal-point values.
83 |
84 | Section 0 -- Indicator section
85 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
86 | Always has exactly two elements:
87 |
88 | - the keyword ``"BUFR"``.
89 | - the BUFR edition number.
90 |
91 | ::
92 |
93 | ["BUFR", 3],
94 |
95 | Section 1 -- Identification section
96 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
97 | The identification elements, or meta-data, of this BUFR.
98 |
99 | The elements meaning follows the same order as encoded in BUFR according to
100 | the BUFR edition.
101 |
102 | Although the representative time is encoded in BUFR Ed.3 without a value for
103 | the seconds, they are always set in the JSON structure -- in which case they
104 | are set to zero.
105 | On encoding a JSON structure into BUFR Ed.3, the value for seconds will be
106 | ignored.
107 |
108 | ::
109 |
110 | [0, 0, 98, 0, false, 21, 202, 15, 0, 2012, 11, 2, 0, 0, 0],
111 |
112 | Section 2 -- Optional section (local data)
113 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
114 | This section is optional in encoding a BUFR, but the corresponding list entry
115 | is required.
116 |
117 | Empty section::
118 |
119 | [],
120 |
121 | If the local data section 2 should not be present in the BUFR when
122 | encoding or decoding, the list representing the values of this section shall be
123 | left empty.
124 |
125 | Section with values::
126 |
127 | ["03", "59", "7d", "ca", "7d", "20", "00", "53", "10", "94"],
128 |
129 | If data for local application use either was encoded in the BUFR or should be
130 | used when encoding a BUFR, the numerical ASCII values of all bytes shall be
131 | listed, each wrapped with double-quotes to set string values.
132 |
133 | Section 3 -- Data description section
134 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
135 | Values listed in the following order:
136 |
137 | - Number of data subsets.
138 | - Data category flag.
139 | - Data compression flag.
140 | - Collection of descriptors which define the form and content of individual
141 | data elements.
142 |
143 | ::
144 |
145 | [2, true, true, ["310060"]],
146 |
147 | Section 4 -- Data section
148 | ~~~~~~~~~~~~~~~~~~~~~~~~~
149 | Binary data.
150 |
151 |
152 | Section 5 -- End section
153 | ~~~~~~~~~~~~~~~~~~~~~~~~
154 | Always has exactly one element: the keyword ``"7777"``.
155 |
156 | ::
157 |
158 | ["7777"]
159 |
160 | Full Example
161 | ~~~~~~~~~~~~
162 |
163 | .. include:: _static/207003.json
164 | :code: javascript
165 |
166 |
--------------------------------------------------------------------------------
/docs/source/tables.rst:
--------------------------------------------------------------------------------
1 | Tables
2 | ======
3 |
4 | Tables are essential for writing and reading BUFR messages. Each BUFR has
5 | references to a specific version of a set of tables. They contain information
6 | how to read the data described within the BUFR.
7 |
8 | Download a TDCF table archive:
9 |
10 | - zip-files (``bufrtables_*.zip``) in this project.
11 | - eg. from `ECMWF eccodes `_.
12 | - DWD's `OpenData server `_.
13 | - Or other sources.
14 |
15 | If the files ``operator.table`` and/or ``datacat.table`` are not present in the
16 | table directory, there are standard ones in this project's root.
17 | These files are not required for decoding, but optional for human-readable output.
18 |
19 | Either set environment variable ``$BUFR_TABLES`` to the base directory, where
20 | the table archives were extracted into, or provide this path to the Bufr
21 | constructor, resp. at command-line.
22 |
--------------------------------------------------------------------------------
/docs/source/usage.rst:
--------------------------------------------------------------------------------
1 | .. usage_
2 |
3 | Usage
4 | =====
5 |
6 | The class Bufr provides methods for decoding and encoding BUFR with different
7 | strategies:
8 |
9 | - Decode BUFR meta-data from BUFR section 0, 1, and 3.
10 | - Decode data section 4 only.
11 | - Load tables corresponding to BUFR section 1.
12 | - Do all three steps in one go.
13 | - Encode a JSON formatted file as BUFR and write this to a file.
14 |
15 | Decoding the data section can be done descriptor by descriptor via a generator,
16 | or decode it in one step, creating a JSON-like data structure.
17 | The second will have better performance with BUFR using the internal compression.
18 |
19 |
20 | Usually follow these first steps to decode a BUFR:
21 |
22 | 1. Instantiate class Bufr
23 | 2. Load BUFR data in string
24 | 3. Decode BUFR meta-data
25 | 4. Load tables
26 |
27 | To retrieve the descriptor/value pairs from a generator:
28 |
29 | 5. Get iterator over subsets
30 | 6. Get iterator over data elements and iterate
31 |
32 | Repeat 5+6 for each subset.
33 |
34 | To decode a BUFR and retrieve all values as a JSON-like structure:
35 |
36 | 5. Decode all values from all subsets, and get a dict object.
37 |
38 | Repeat 2-6 for new BUFR, re-using already loaded tables.
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/examples/iasi_ctp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pytroll/trollbufr/8979860f4fdd733b91eb1db9e1fc08b07764e3ec/examples/iasi_ctp.png
--------------------------------------------------------------------------------
/examples/iasi_plot.py:
--------------------------------------------------------------------------------
1 | import logging
2 | handler = logging.StreamHandler()
3 | handler.setFormatter(logging.Formatter(
4 | "[%(levelname)s: %(module)s] %(message)s"))
5 | handler.setLevel(logging.WARNING)
6 | logging.getLogger('').setLevel(logging.WARNING)
7 | logging.getLogger('').addHandler(handler)
8 |
9 |
10 | from trollbufr.bufr import Bufr
11 | from trollbufr import load_file
12 | import numpy as np
13 | import sys
14 |
15 | if len(sys.argv) != 3:
16 | print "SYNTAX:", sys.argv[0], ""
17 | sys.exit(1)
18 | testfile = sys.argv[1]
19 | pngfile = sys.argv[2]
20 |
21 | lon = []
22 | lat = []
23 | pres = []
24 | bfr = Bufr("eccodes", "tables")
25 | for blob, size, header in load_file.next_bufr(testfile):
26 | bfr.decode(blob)
27 | print header, bfr.get_meta()['datetime']
28 | for subset in bfr.next_subset():
29 | gotit = 0
30 | for k, m, (v, q) in subset.next_data():
31 | if gotit:
32 | continue
33 | if k == 5001:
34 | lat.append((0, 0, v))
35 | if k == 6001:
36 | lon.append((0, 0, v))
37 | if k == 7004:
38 | pres.append((0, 0, v))
39 | gotit = 1
40 | print len(lon), len(lat), len(pres)
41 |
42 | lons = np.concatenate(lon)
43 | lats = np.concatenate(lat)
44 | pres = np.concatenate(pres) / 100.0 # hPa
45 | pres = np.ma.masked_greater(pres, 1.0e+6)
46 |
47 | import pyresample as pr
48 | from pyresample import kd_tree, geometry
49 | from pyresample import utils
50 |
51 | swath_def = geometry.SwathDefinition(lons=lons, lats=lats)
52 | area_def = utils.parse_area_file('region_config.cfg', 'scanX')[0]
53 |
54 | result = kd_tree.resample_nearest(swath_def, pres,
55 | area_def,
56 | radius_of_influence=12000,
57 | epsilon=100,
58 | fill_value=None)
59 | pr.plot.save_quicklook(pngfile,
60 | area_def, result, label='IASI - Cloud Top Pressure',
61 | coast_res='l')
62 |
63 |
--------------------------------------------------------------------------------
/examples/iasi_search.py:
--------------------------------------------------------------------------------
1 | from trollbufr.bufr import Bufr
2 | from trollbufr import load_file
3 |
4 | import sys
5 | import glob
6 | import logging
7 | handler = logging.StreamHandler()
8 | handler.setFormatter(logging.Formatter(
9 | "[%(levelname)s: %(module)s] %(message)s"))
10 | # handler.setLevel(logging.DEBUG)
11 | # logging.getLogger('').setLevel(logging.DEBUG)
12 | handler.setLevel(logging.WARNING)
13 | logging.getLogger('').setLevel(logging.WARNING)
14 | logging.getLogger('').addHandler(handler)
15 |
16 | fp = sys.argv[1]
17 |
18 | bfr = Bufr("eccodes", "tables")
19 | for fn in glob.glob(fp):
20 | print fn
21 | i=0
22 | for blob, size, header in load_file.next_bufr(fn):
23 | try:
24 | bfr.decode(blob)
25 | lon = lat = 0
26 | for subset in bfr.next_subset():
27 | for k, m, (v, q) in subset.next_data():
28 | if k == 5001:
29 | lat = v
30 | if k == 6001:
31 | lon = v
32 | break
33 |
34 | if header.startswith("IEDX"):
35 | print i,header, lon, lat,
36 | if lon > -10 and lon < 30 and lat > 50 and lat < 70:
37 | print "<------"
38 | else:
39 | print
40 | except StandardError as e:
41 | print "ERR:",e
42 | i+=1
43 | print "---"
44 |
--------------------------------------------------------------------------------
/examples/iasi_show.py:
--------------------------------------------------------------------------------
1 | import logging
2 | handler = logging.StreamHandler()
3 | handler.setFormatter(logging.Formatter(
4 | "[%(levelname)s: %(module)s] %(message)s"))
5 | handler.setLevel(logging.WARNING)
6 | logging.getLogger('').setLevel(logging.WARNING)
7 | logging.getLogger('').addHandler(handler)
8 |
9 |
10 | from trollbufr.bufr import Bufr
11 | from trollbufr import load_file
12 | import sys
13 |
14 | if len(sys.argv) != 2:
15 | print "SYNTAX:", sys.argv[0], ""
16 | sys.exit(1)
17 | testfile = sys.argv[1]
18 |
19 | bfr = Bufr("eccodes", "tables")
20 | for blob, size, header in load_file.next_bufr(testfile):
21 | bfr.decode(blob)
22 | print "\n", testfile, header, "\n", bfr.get_meta_str()
23 | for subset in bfr.next_subset():
24 | for k, m, (v, q) in subset.next_data():
25 | print k, m, v
26 | break
27 |
28 | with open(header.replace(" ","_"), "w") as fh:
29 | fh.write(blob[0:])
30 |
--------------------------------------------------------------------------------
/examples/iedx61.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pytroll/trollbufr/8979860f4fdd733b91eb1db9e1fc08b07764e3ec/examples/iedx61.png
--------------------------------------------------------------------------------
/examples/region_config.cfg:
--------------------------------------------------------------------------------
1 | REGION: scan2 {
2 | NAME: Scandinavia - 2km area
3 | PCS_ID: ps60n
4 | PCS_DEF: proj=stere,ellps=bessel,lat_0=90,lon_0=14,lat_ts=60
5 | XSIZE: 1024
6 | YSIZE: 1024
7 | AREA_EXTENT: (-1268854.1266382949, -4150234.8425892727, 779145.8733617051, -2102234.8425892727)
8 | };
9 | REGION: scanX {
10 | NAME: Scandinavia - 2km area
11 | PCS_ID: ps60n
12 | PCS_DEF: proj=stere,ellps=bessel,lat_0=90,lon_0=14,lat_ts=60
13 | XSIZE: 1024
14 | YSIZE: 1024
15 | AREA_EXTENT: (-4000000.0000000000, -6000000.0000000000, 2000000.0000000000, -1000000.0000000000)
16 | };
17 |
--------------------------------------------------------------------------------
/operator.table:
--------------------------------------------------------------------------------
1 | #Edition, FXY, OperatorName_en, OperationDefinition_en
2 |
3 | 3,201YYY, Change data width, Add (YYY-128) bits to the data width given for each data element in Table B
4 | 3,202YYY, Change scale, Add YYY-128 to the scale for each data element in Table B
5 | 3,203YYY, Change reference values, Subsequent element descriptors define new reference values for corresponding Table B entries. Each new reference value is represented by YYY bits in the Data section. Definition of new reference values is concluded by coding this operator with YYY = 255. Negative reference values shall be represented by a positive integer with the left-most bit (bit 1) set to 1.
6 | 3,204YYY, Add associated field, Precede each data element with YYY bits of information. This operation associates a data field (e.g. quality control information) of YYY bits with each data element.
7 | 3,205YYY, Signify character, YYY characters (CCITT International Alphabet No. 5) are inserted as a data field of YYY x 8 bits in length.
8 | 3,206YYY, Signify data width for the immediately following local descriptor, YYY bits of data are described by the immediately following descriptor.
9 | 3,221YYY, Data not present, Data values present in Section 4 (Data section) corresponding to the following YYY descriptors shall be limited to data from Classes 1-9
10 | 3,222000, Quality information follows, The values of Class 33 elements which follow relate to the data defined by the data present bit-map.
11 | 3,223000, Substituted values operator, The substituted values which follow relate to the data defined by the data present bit-map.
12 | 3,223255, Substituted values marker operator, This operator shall signify a data item containing a substituted value; the element descriptor for the substituted value is obtained by the application of the data present bit-map associated with the substituted values operator.
13 | 3,224000, First-order statistical values follow, The statistical values which follow relate to the data defined by the data present bit-map.
14 | 3,224255, First-order statistical values marker operator, This operator shall signify a data item containing a first-order statistical value of the type indicated by the preceding 0 08 023 element descriptor; the element descriptor to which the first-order statistic relates is obtained by the application of the data present bit-map associated with the first-order statistical values follow operator; first-order statistical values shall be represented as defined by this element descriptor.
15 | 3,225000, Difference statistical values follow, The statistical values which follow relate to the data defined by the data present bit-map.
16 | 3,225255, Difference statistical values marker operator, This operator shall signify a data item containing a difference statistical value of the type indicated by the preceding 0 08 024 element descriptor; the element descriptor to which the difference statistical value relates is obtained by the application of the data present bit-map associated with the difference statistical values follow operator; difference statistical values shall be represented as defined by this element descriptor
17 | 3,232000, Replaced/retained values follow, The replaced/retained values which follow relate to the data defined by the data present bit-map.
18 | 3,232255, Replaced/retained value marker operator, This operator shall signify a data item containing the original of an element which has been replaced by a substituted value. The element descriptor for the retained value is obtained by the application of the data present bit-map associated with the substituted values operator.
19 | 3,235000, Cancel backward data reference, This operator terminates all previously defined back-ward reference and cancels any previously defined data present bit-map; it causes the next data present bit-map to refer to the data descriptors which immediately precede the operator to which it relates.
20 | 3,236000, Define data present bit-map, This operator defines the data present bit-map which follows for possible re-use; only one data present bitmap may be defined between this operator and the cancel use defined data present bit-map operator.
21 | 3,237000, Use defined data present bit-map, This operator causes the defined data present bit-map to be used again.
22 | 3,237255, Cancel use defined data present bit-map, This operator cancels the re-use of the defined data present bit-map.
23 |
24 | 4,201YYY, Change data width, Add (YYY-128) bits to the data width given for each data element in Table B
25 | 4,202YYY, Change scale, Add YYY-128 to the scale for each data element in Table B
26 | 4,203YYY, Change reference values, Subsequent element descriptors define new reference values for corresponding Table B entries. Each new reference value is represented by Y bits in the Data section. Definition of new reference values is concluded by coding this operator with YYY = 255. Negative reference values shall be represented by a positive integer with the left-most bit (bit 1) set to 1.
27 | 4,204YYY, Add associated field, Precede each data element with YYY bits of information. This operation associates a data field (e.g. quality control information) of YYY bits with each data element.
28 | 4,205YYY, Signify character, YYY characters (CCITT International Alphabet No. 5) are inserted as a data field of Y x 8 bits in length.
29 | 4,206YYY, Signify data width for the immediately following local descriptor, YYY bits of data are described by the immediately following descriptor.
30 | 4,207YYY, Increase scale, reference value and data width
31 | 4,208YYY, Change width of CCITT IA5 field, YYY characters from CCITT International Alphabet No. 5 (representing YYY x 8 bits in length) replace the specified data width given for each CCITT IA5 element in Table B.
32 | 4,209YYY, IEEE floating point representation, For elements in Table B other than CCITT IA5
33 | 4,221YYY, Data not present, Data values present in Section 4 (Data section) corresponding to the following YYY descriptors shall be limited to data from Classes 01-09
34 | 4,222000, Quality information follows, The values of Class 33 elements which follow relate to the data defined by the data present bit-map.
35 | 4,223000, Substituted values operator, The substituted values which follow relate to the data defined by the data present bit-map.
36 | 4,223255, Substituted values marker operator, This operator shall signify a data item containing a substituted value; the element descriptor for the substituted value is obtained by the application of the data present bit-map associated with the substituted values operator.
37 | 4,224000, First-order statistical values follow, The statistical values which follow relate to the data defined by the data present bit-map.
38 | 4,224255, First-order statistical values marker operator, This operator shall signify a data item containing a first-order statistical value of the type indicated by the preceding 0 08 023 element descriptor; the element descriptor to which the first-order statistic relates is obtained by the application of the data present bit-map associated with the first-order statistical values follow operator; first-order statistical values shall be represented as defined by this element descriptor.
39 | 4,225000, Difference statistical values follow, The statistical values which follow relate to the data defined by the data present bit-map.
40 | 4,225255, Difference statistical values marker operator, This operator shall signify a data item containing a difference statistical value of the type indicated by the preceding 0 08 024 element descriptor; the element descriptor to which the difference statistical value relates is obtained by the application of the data present bit-map associated with the difference statistical values follow operator; difference statistical values shall be represented as defined by this element descriptor
41 | 4,232000, Replaced/retained values follow, The replaced/retained values which follow relate to the data defined by the data present bit-map.
42 | 4,232255, Replaced/retained value marker operator, This operator shall signify a data item containing the original of an element which has been replaced by a substituted value. The element descriptor for the retained value is obtained by the application of the data present bit-map associated with the substituted values operator.
43 | 4,235000, Cancel backward data reference, This operator terminates all previously defined back-ward reference and cancels any previously defined data present bit-map; it causes the next data present bit-map to refer to the data descriptors which immediately precede the operator to which it relates.
44 | 4,236000, Define data present bit-map, This operator defines the data present bit-map which follows for possible re-use; only one data present bit-map may be defined between this operator and the cancel use defined data present bit-map operator.
45 | 4,237000, Use defined data present bit-map, This operator causes the defined data present bit-map to be used again.
46 | 4,237255, Cancel use defined data present bit-map, This operator cancels the re-use of the defined data present bit-map.
47 | 4,241000, Define event, This operator denotes the beginning of the definition of an event
48 | 4,241255, Cancel define event, This operator denotes the conclusion of the event definition that was begun via the previous 2 41 000 operator.
49 | 4,242000, Define conditioning event, This operator denotes the beginning of the definition of a conditioning event.
50 | 4,242255, Cancel define conditioning event, This operator denotes the conclusion of the conditioning event definition that was begun via the previous 2 42 000 operator.
51 | 4,243000, Categorical forecast values follow, The values which follow are categorical forecast values.
52 | 4,243255, Cancel categorical forecast values follow, This operator denotes the conclusion of the definition of categorical forecast values that was begun via the previous 2 43 000 operator.
53 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Author(s):
5 | #
6 | # Alexander Maul
7 | #
8 | # This program is free software: you can redistribute it and/or modify
9 | # it under the terms of the GNU General Public License as published by
10 | # the Free Software Foundation, either version 3 of the License, or
11 | # (at your option) any later version.
12 | #
13 | # This program is distributed in the hope that it will be useful,
14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 | # GNU General Public License for more details.
17 | #
18 | # You should have received a copy of the GNU General Public License
19 | # along with this program. If not, see .
20 |
21 | """
22 | """
23 | # workaround python bug: http://bugs.python.org/issue15881#msg170215
24 | import multiprocessing
25 |
26 | from setuptools import setup, find_packages
27 | import imp
28 | import sys
29 |
30 | version = imp.load_source("trollbufr.version", "trollbufr/version.py")
31 |
32 | requires = ["bitstring", "six"]
33 |
34 | if sys.version_info < (3, 0):
35 | scripts_with_python_version = ["trollbufr = trollbufr.bufr_main:run",
36 | "trollbufr_update = trollbufr.update:run"]
37 | else:
38 | scripts_with_python_version = ["trollbufr3 = trollbufr.bufr_main:run",
39 | "trollbufr3_update = trollbufr.update:run"]
40 |
41 | setup(name="trollbufr",
42 | version=version.version,
43 | description="Reading meteorological data format BUFR in pure Python",
44 | author="Alexander Maul",
45 | author_email="alexander.maul@dwd.de",
46 | classifiers=["Development Status :: 4 - Beta",
47 | "Intended Audience :: Science/Research",
48 | "License :: OSI Approved :: GNU Lesser General Public License v3 " +
49 | "or later (LGPLv3+)",
50 | "Operating System :: OS Independent",
51 | "Programming Language :: Python :: 2",
52 | "Programming Language :: Python :: 2.6",
53 | "Programming Language :: Python :: 3",
54 | "Programming Language :: Python :: 3.4",
55 | "Topic :: Scientific/Engineering"
56 | ],
57 | test_suite="bufr.tests.suite",
58 | entry_points={
59 | "console_scripts": scripts_with_python_version},
60 | packages=["trollbufr", "trollbufr.coder"],
61 | install_requires=requires,
62 | python_requires=">=2.6",
63 | zip_safe=False,
64 | )
65 |
--------------------------------------------------------------------------------
/test/.gitignore:
--------------------------------------------------------------------------------
1 | /bufrtables/
2 |
--------------------------------------------------------------------------------
/test/metop_mhs.bufr:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pytroll/trollbufr/8979860f4fdd733b91eb1db9e1fc08b07764e3ec/test/metop_mhs.bufr
--------------------------------------------------------------------------------
/test/test_trollbufr.py:
--------------------------------------------------------------------------------
1 | """Unit tests for trollbufr package."""
2 | # trollbufr unittest
3 | #
4 | # Copyright (C) 2017-2021 trollbufr developers
5 | #
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU General Public License as published by
8 | # the Free Software Foundation, either version 3 of the License, or
9 | # (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU General Public License
17 | # along with this program. If not, see .
18 | #
19 | import os
20 | import unittest
21 |
22 | test_dir = os.path.dirname(os.path.abspath(__file__))
23 |
24 |
25 | def test_bufr_read(monkeypatch):
26 | """Test reading data and data quality on Metop-A MHS BUFR file."""
27 | monkeypatch.setenv("BUFR_TABLES", os.path.join(test_dir, "bufrtables"))
28 | monkeypatch.setenv("BUFR_TABLES_TYPE", "bufrdc")
29 | from trollbufr import load_file
30 | from trollbufr.bufr import Bufr
31 | test_file = os.path.join(test_dir, "metop_mhs.bufr")
32 | bufr = Bufr(os.environ["BUFR_TABLES_TYPE"], os.environ["BUFR_TABLES"])
33 | # laod test file and iterate over BUFR
34 | for blob, size, header in load_file.next_bufr(test_file):
35 | # test header for first BUFR
36 | assert header == "IEMX01 EUMP 150722"
37 | assert size == 48598
38 | # decode BUFR message
39 | bufr.decode(blob)
40 | # iterate over subsets
41 | for report in bufr.next_subset():
42 | i = 0
43 | # iterate over all descriptor/data sets
44 | for k, m, (v, q) in report.next_data():
45 | i += 1
46 | if i >= 4:
47 | # after first 3 descriptor/data sets just count
48 | continue
49 | if i <= 3:
50 | # type-marker for first 3 descriptor is not None
51 | assert m is not None
52 | continue
53 | # assert descriptor, data value, quality
54 | assert m is not None
55 | assert k == 8070
56 | assert v == 3
57 | assert q is None
58 | # look-up and assert name and unit
59 | kn, ku = bufr.get_tables().lookup_elem(k)
60 | assert kn.strip() == "TOVS/ATOVS PRODUCT QUALIFIER"
61 | assert ku.strip() == "CODE TABLE 8070"
62 | # assert there were 88 descriptors in the subset
63 | assert i == 88
64 | # leave for-loops, all tests are done
65 | break
66 | break
67 |
68 |
69 | if __name__ == "__main__":
70 | unittest.run()
71 |
--------------------------------------------------------------------------------
/trollbufr/.gitignore:
--------------------------------------------------------------------------------
1 | /__pycache__/
2 | /*.pyc
3 |
--------------------------------------------------------------------------------
/trollbufr/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | trollBUFR
4 | =========
5 | A pure pythonic reader for BUFR, the meteorological "Binary Universal Format for data Representation".
6 |
7 | """
8 |
--------------------------------------------------------------------------------
/trollbufr/bufr_main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016,2017 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | '''
25 | trollbufr.reader
26 | ================
27 | Command-line interface, reads BUFR (with abbreviated heading line,
28 | if present) from file(s) and writes human-readable to stdout.
29 | '''
30 | from __future__ import print_function
31 | from __future__ import absolute_import
32 |
33 | import sys
34 | import os
35 | from argparse import ArgumentParser, RawDescriptionHelpFormatter
36 | from trollbufr.version import version
37 | from trollbufr.bufr import Bufr
38 | from trollbufr.coder.bufr_types import TabBType
39 | from trollbufr import load_file
40 | from trollbufr.coder import load_tables
41 |
42 | import logging
43 | logger = logging.getLogger("trollbufr")
44 |
45 |
46 | def read_bufr_data(args):
47 | """Read BUFR(s), decode data section and write to file-handle.
48 |
49 | Depending on command argument "--array", either process the subsets in
50 | sequence, which is ideal for un-compressed BUFR, or process each descriptor
51 | per all subsets at once, which improves performance for compressed BUFR.
52 | """
53 | try:
54 | fh_out = open(args.out_file, "w")
55 | except:
56 | fh_out = sys.stdout
57 | bufr = Bufr(args.tables_type, args.tables_path)
58 | for fn_in in args.in_file:
59 | print("FILE\t%s" % os.path.basename(fn_in), file=fh_out)
60 | i = 0
61 | for blob, size, header in load_file.next_bufr(fn_in):
62 | if args.bulletin is not None and i != args.bulletin:
63 | i += 1
64 | continue
65 | print("BUFR\t#%d (%d B)" % (i, size), file=fh_out)
66 | i += 1
67 | print("HEADER\t%s" % header, file=fh_out)
68 | try:
69 | bufr.decode_meta(blob, load_tables=False)
70 | tabl = bufr.load_tables()
71 | print("META:\n%s" % bufr.get_meta_str(), file=fh_out)
72 | for report in bufr.next_subset(args.array and bufr.is_compressed):
73 | print("SUBSET\t#%d/%d" % report.subs_num, file=fh_out)
74 | if args.sparse or (args.array and bufr.is_compressed):
75 | for descr_entry in report.next_data():
76 | if descr_entry.mark is not None:
77 | if isinstance(descr_entry.value, (list)):
78 | descr_value = "".join([str(x) for x
79 | in descr_entry.value])
80 | else:
81 | descr_value = descr_entry.value
82 | print(" ",
83 | descr_entry.mark,
84 | descr_value,
85 | end="", file=fh_out)
86 | print(file=fh_out)
87 | continue
88 | if descr_entry.value is None:
89 | print("%06d: ///" % (descr_entry.descr), file=fh_out)
90 | elif descr_entry.quality is not None:
91 | print("%06d: %s (%s)" % (descr_entry.descr,
92 | str(descr_entry.value),
93 | descr_entry.quality), file=fh_out)
94 | else:
95 | print("%06d: %s" % (descr_entry.descr,
96 | str(descr_entry.value)), file=fh_out)
97 | else:
98 | for descr_entry in report.next_data():
99 | if descr_entry.mark is not None:
100 | if isinstance(descr_entry.value, (list)):
101 | descr_value = "".join([str(x) for x
102 | in descr_entry.value])
103 | else:
104 | descr_value = descr_entry.value
105 | print(" ",
106 | descr_entry.mark,
107 | descr_value,
108 | end="", file=fh_out)
109 | print(file=fh_out)
110 | continue
111 | descr_info = tabl.lookup_elem(descr_entry.descr)
112 | if descr_info.type in (TabBType.CODE, TabBType.FLAG):
113 | if descr_entry.value is None:
114 | print("%06d %-40s = Missing value"
115 | % (descr_entry.descr, descr_info.name), file=fh_out)
116 | else:
117 | v = tabl.lookup_codeflag(descr_entry.descr,
118 | descr_entry.value)
119 | print("%06d %-40s = %s"
120 | % (descr_entry.descr,
121 | descr_info.name,
122 | str(v)), file=fh_out)
123 | else:
124 | if descr_info.unit in ("CCITT IA5", "Numeric"):
125 | dinf_unit = ""
126 | else:
127 | dinf_unit = descr_info.unit
128 | if descr_entry.value is None:
129 | print("%06d %-40s = /// %s"
130 | % (descr_entry.descr,
131 | descr_info.name, dinf_unit), file=fh_out)
132 | elif descr_entry.quality is not None:
133 | print("%06d %-40s = %s %s (%s)"
134 | % (descr_entry.descr,
135 | descr_info.name,
136 | str(descr_entry.value),
137 | dinf_unit,
138 | descr_entry.quality), file=fh_out)
139 | else:
140 | print("%06d %-40s = %s %s"
141 | % (descr_entry.descr,
142 | descr_info.name,
143 | str(descr_entry.value),
144 | dinf_unit), file=fh_out)
145 | except Exception as e:
146 | print("ERROR\t%s" % e, file=fh_out)
147 | if logger.isEnabledFor(logging.DEBUG):
148 | logger.exception(e)
149 | else:
150 | logger.warning(e)
151 | if fh_out is not sys.stdout:
152 | fh_out.close()
153 |
154 |
155 | def read_bufr_to_json(args):
156 | """Read and decode BUFR, write as JSON formatted file.
157 | """
158 | bufr = Bufr(args.tables_type, args.tables_path)
159 | json_data = []
160 | bufr_i = -1
161 | for fn_in in args.in_file:
162 | for blob, _, header in load_file.next_bufr(fn_in):
163 | bufr_i += 1
164 | if args.bulletin is not None and bufr_i != args.bulletin:
165 | continue
166 | json_data_item = {"heading": header,
167 | "file": os.path.basename(fn_in),
168 | "index": bufr_i,
169 | "status": False,
170 | "error": None,
171 | "bufr": None,
172 | }
173 | try:
174 | json_bufr = bufr.decode(blob,
175 | load_tables=True,
176 | as_array=args.array)
177 | except Exception as e:
178 | logger.error(e, exc_info=1 and logger.isEnabledFor(logging.DEBUG))
179 | json_data_item["error"] = str(e)
180 | else:
181 | json_data_item["status"] = True
182 | json_data_item["bufr"] = json_bufr
183 | finally:
184 | json_data.append(json_data_item)
185 | import json
186 | out_fh = open(args.out_file, "w") or sys.stdout
187 | with out_fh as fh_out:
188 | if args.sparse:
189 | json.dump(json_data, fh_out)
190 | else:
191 | json.dump(json_data, fh_out, indent=3, separators=(',', ': '))
192 |
193 |
194 | def read_bufr_desc(args):
195 | """Read BUFR(s), decode meta-data and descriptor list, write to file-handle.
196 | """
197 | try:
198 | fh_out = open(args.out_file, "w")
199 | except:
200 | fh_out = sys.stdout
201 | for fn_in in args.in_file:
202 | print("FILE\t%s" % os.path.basename(fn_in), file=fh_out)
203 | i = 0
204 | for blob, size, header in load_file.next_bufr(fn_in):
205 | if args.bulletin is not None and i != args.bulletin:
206 | i += 1
207 | continue
208 | print("BUFR\t#%d (%d B)" % (i, size), file=fh_out)
209 | i += 1
210 | print("HEADER\t%s" % header, file=fh_out)
211 | try:
212 | bufr = Bufr(args.tables_type, args.tables_path)
213 | bufr.decode_meta(blob, load_tables=(not args.sparse))
214 | print("META\n%s" % bufr.get_meta_str(), file=fh_out)
215 | if args.sparse:
216 | d = bufr.get_descr_short()
217 | else:
218 | d = bufr.get_descr_full()
219 | print("DESC :\n%s" % "\n".join(d), file=fh_out)
220 | except Exception as e:
221 | print("ERROR\t%s" % e, file=fh_out)
222 | if logger.isEnabledFor(logging.DEBUG):
223 | logger.exception(e)
224 | if fh_out is not sys.stdout:
225 | fh_out.close()
226 |
227 |
228 | def write_bufr(args):
229 | """Read JSON file, encode as BUFR and write to file-handle.
230 | """
231 | import json
232 | try:
233 | fh_out = open(args.out_file, "wb")
234 | except:
235 | fh_out = sys.stdout
236 | multi_bul = False
237 | for fn_in in args.in_file:
238 | with open(fn_in, "r") as fh_in:
239 | json_data = json.load(fh_in)
240 | for json_data_msg in json_data:
241 | if not "bufr" in json_data_msg or json_data_msg["bufr"] is None:
242 | continue
243 | bufr = Bufr(tab_fmt=args.tables_type,
244 | tab_path=args.tables_path)
245 | bin_data = bufr.encode(json_data_msg["bufr"],
246 | load_tables=True)
247 | if json_data_msg["heading"] is not None:
248 | multi_bul and fh_out.write(b"\r\r\n\r\r\n")
249 | fh_out.write(("%s\r\r\n" % json_data_msg["heading"]).encode())
250 | fh_out.write(bin_data)
251 | multi_bul = True
252 | if fh_out is not sys.stdout:
253 | fh_out.close()
254 |
255 |
256 | def run(argv=None):
257 | '''Command line options.'''
258 | if argv is None:
259 | argv = sys.argv
260 | else:
261 | sys.argv.extend(argv)
262 | program_version = version
263 | try:
264 | # Setup argument parser
265 | parser = ArgumentParser(description=__import__('__main__').__doc__,
266 | formatter_class=RawDescriptionHelpFormatter
267 | )
268 | parser.add_argument('-V', '--version',
269 | action='version',
270 | version="TrollBUFR %s" % program_version
271 | )
272 | parser.add_argument("-v", "--verbose", dest="verbose",
273 | action="count",
274 | help="set verbosity level [default: 0]"
275 | )
276 | parser.add_argument("-s", "--sparse", dest="sparse",
277 | action="store_true",
278 | help="sparse output, no tables loaded"
279 | )
280 | parser.add_argument("-a", "--array", dest="array",
281 | action="store_true",
282 | help="values as array (compressed BUFR only!)"
283 | )
284 | parser.add_argument("-o", "--output", dest="out_file",
285 | metavar="file",
286 | help="write to file instead STDOUT"
287 | )
288 | group_op = parser.add_argument_group(title="operator",
289 | description="what to do (at least one required)"
290 | )
291 | group_op.add_argument("-m", "--meta", dest="desc",
292 | action="store_true",
293 | help="print info/descriptor"
294 | )
295 | group_op.add_argument("-d", "--decode", dest="reader",
296 | action="store_true",
297 | help="decode and print data"
298 | )
299 | group_op.add_argument("-j", "--decode-json", dest="json_dump",
300 | action="store_true",
301 | help="decode and dump data in JSON format"
302 | )
303 | group_op.add_argument("-e", "--encode", dest="json_encode",
304 | action="store_true",
305 | help="encode data from JSON file as BUFR"
306 | )
307 | group_tab = parser.add_argument_group(title="table setting")
308 | group_tab.add_argument("-t", "--tables_path",
309 | default=os.getenv("BUFR_TABLES"),
310 | help="path to tables, if not set in $BUFR_TABLES",
311 | metavar="path"
312 | )
313 | group_tab.add_argument("-T", "--tables_type",
314 | default=load_tables.list_parser()[0],
315 | choices=load_tables.list_parser(),
316 | help="type of table format [%s], default: %s" % (
317 | "|".join(load_tables.list_parser()),
318 | load_tables.list_parser()[0]
319 | ),
320 | metavar="name"
321 | )
322 | parser.add_argument("-b", "--bulletin", dest="bulletin",
323 | default=None,
324 | type=int,
325 | metavar="N",
326 | help="decode only bulletin #N in file (starts with '0')"
327 | )
328 | parser.add_argument(dest="in_file",
329 | help="file(s) with BUFR or JSON content",
330 | metavar="file",
331 | nargs='+'
332 | )
333 | # Process arguments
334 | args = parser.parse_args()
335 |
336 | handler = logging.StreamHandler()
337 | log_formater_line = "[%(levelname)s] %(message)s"
338 | if not args.verbose:
339 | loglevel = logging.WARN
340 | else:
341 | if args.verbose == 1:
342 | loglevel = logging.INFO
343 | elif args.verbose >= 2:
344 | loglevel = logging.DEBUG
345 | log_formater_line = "[%(levelname)s: %(module)s:%(lineno)d] %(message)s"
346 | handler.setFormatter(logging.Formatter(log_formater_line))
347 | handler.setLevel(loglevel)
348 | logging.getLogger('').setLevel(loglevel)
349 | logging.getLogger('').addHandler(handler)
350 |
351 | logger.debug(args)
352 | if args.tables_path is None:
353 | sys.stderr.write("No path to tables given!")
354 | return 1
355 | if not (args.desc or args.reader or args.json_dump or args.json_encode):
356 | sys.stderr.write("Unknown operation!")
357 | return 1
358 |
359 | PROFILE = False
360 | if PROFILE:
361 | import cProfile
362 | import pstats
363 | pr = cProfile.Profile()
364 | pr.enable()
365 |
366 | if args.desc:
367 | read_bufr_desc(args)
368 | if args.reader:
369 | read_bufr_data(args)
370 | elif args.json_dump:
371 | read_bufr_to_json(args)
372 | elif args.json_encode:
373 | write_bufr(args)
374 |
375 | if PROFILE:
376 | pr.disable()
377 | sortby = 'cumulative'
378 | ps = pstats.Stats(pr, stream=sys.stderr).sort_stats(sortby)
379 | ps.print_stats()
380 |
381 | except KeyboardInterrupt:
382 | return 0
383 | except Exception as e:
384 | if logger.isEnabledFor(logging.DEBUG):
385 | logger.exception(e)
386 | else:
387 | logger.warning(e)
388 | return 1
389 | return 0
390 |
391 |
392 | if __name__ == "__main__":
393 | sys.exit(run())
394 |
--------------------------------------------------------------------------------
/trollbufr/coder/.gitignore:
--------------------------------------------------------------------------------
1 | /__pycache__/
2 | /*.pyc
3 |
--------------------------------------------------------------------------------
/trollbufr/coder/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Modules/classes for reading a BUFR.
4 |
5 | No entry point for pybufr-API here.
6 | """
7 |
--------------------------------------------------------------------------------
/trollbufr/coder/bdata.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | """
25 | Class around the BUFR byte string.
26 |
27 | Created on Nov 17, 2016
28 |
29 | @author: amaul
30 | """
31 | from bitstring import Bits, BitStream, ConstBitStream
32 | import six
33 |
34 |
35 | class Blob(object):
36 |
37 | _data = None
38 |
39 | def __init__(self, bin_data=None, rw=False):
40 | """Initialising the class with an byte (octet) array or make a new, empty one.
41 | :param bin_data: Byte array.
42 | :param rw: If bin_data is set, make it read/write-able, otherwise it's read-only.
43 | """
44 | if bin_data is None:
45 | self._data = BitStream()
46 | elif rw:
47 | self._data = BitStream(bytes=bin_data)
48 | else:
49 | self._data = ConstBitStream(bytes=bin_data)
50 | self.reset()
51 |
52 | def __str__(self):
53 | return "%dB %d/%d" % (len(self._data) // 8, self._data.pos // 8, self._data.pos % 8)
54 |
55 | def __len__(self):
56 | return len(self._data)
57 |
58 | def __getitem__(self, x):
59 | if isinstance(x, tuple):
60 | return self._data[x[0]:x[1]]
61 | else:
62 | return self._data[x]
63 |
64 | def reset(self, x=0):
65 | """Reset internal pointer to position x or start"""
66 | self._data.pos = x * 8
67 |
68 | def get_bytes(self):
69 | return self._data.bytes
70 |
71 | def get_point(self):
72 | return self._data.pos // 8
73 |
74 | def set_point(self, point):
75 | self._data.pos = point * 8
76 |
77 | def get_bitcons(self):
78 | return self._data.pos % 8
79 |
80 | def set_bitcons(self, consumed):
81 | self._data.pos += consumed
82 |
83 | p = property(get_point, set_point)
84 | bc = property(get_bitcons, set_bitcons)
85 |
86 | def read(self, fmt):
87 | return self._data.read(fmt)
88 |
89 | def readlist(self, fmt):
90 | return self._data.readlist(fmt)
91 |
92 | def writelist(self, fmt, json_data):
93 | self._data += Bits(fmt.format(*json_data))
94 |
95 | def read_align(self, even=False):
96 | p = self._data.pos
97 | self._data.bytealign()
98 | if even and (self._data.pos // 8) & 1:
99 | self._data.pos += 8
100 | return self._data.pos - p
101 |
102 | def write_align(self, even=False):
103 | width = (8 - (len(self._data) % 8)) & 7
104 | self._data += ("uint:{}={}").format(width, 0)
105 | if even and (len(self._data) // 8) & 1:
106 | self._data += ("uint:{}={}").format(8, 0)
107 |
108 | def read_skip(self, width):
109 | """Skip width bits.
110 |
111 | Move internal pointer when some bits don't need processing.
112 | :return: Void.
113 | """
114 | # self._data.read("pad:%d" % width)
115 | self._data.pos += width
116 |
117 | def write_skip(self, width):
118 | """Skip width bits.
119 |
120 | Move internal pointer when some bits don't need processing.
121 | :return: Void.
122 | """
123 | self._data += ("uintbe:{}={}" if not width & 7 else
124 | "uint:{}={}").format(width, 0)
125 |
126 | def read_bytes(self, width=1):
127 | return self._data.read("bytes:%d" % width)
128 |
129 | def read_bits(self, width):
130 | """Read width bits from internal buffer.
131 |
132 | :return: character buffer, which needs further decoding.
133 | """
134 | if width & 7:
135 | return self._data.read("uint:%d" % width)
136 | else:
137 | return self._data.read("uintbe:%d" % width)
138 |
139 | def write_bytes(self, value, width=None):
140 | """
141 | :param value: character array (String)
142 | :param width: the string's width in bits, not octets.
143 | """
144 | if isinstance(value, six.text_type):
145 | value = value.encode("latin-1")
146 | value_len = len(value)
147 | if width is None:
148 | width = value_len
149 | else:
150 | width //= 8
151 | if value_len > width:
152 | value = value[:width]
153 | elif value_len < width:
154 | value += b"\x00" * (width - value_len)
155 | self._data += Bits(bytes=value)
156 | return len(self._data)
157 |
158 | def write_uint(self, value, width):
159 | value = int(value)
160 | self._data += ("uintbe:{}={}" if width % 8 == 0 else
161 | "uint:{}={}").format(width, value)
162 | return len(self._data)
163 |
164 | def set_uint(self, value, width, bitpos):
165 | if width // 8 == 0:
166 | bins = Bits(uint=value, length=width)
167 | else:
168 | bins = Bits(uintbe=value, length=24)
169 | self._data[bitpos: bitpos + width] = bins
170 |
--------------------------------------------------------------------------------
/trollbufr/coder/bufr_sect.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Author(s):
7 | #
8 | # Alexander Maul
9 | #
10 | # This program is free software: you can redistribute it and/or modify
11 | # it under the terms of the GNU General Public License as published by
12 | # the Free Software Foundation, either version 3 of the License, or
13 | # (at your option) any later version.
14 | #
15 | # This program is distributed in the hope that it will be useful,
16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | # GNU General Public License for more details.
19 | #
20 | # You should have received a copy of the GNU General Public License
21 | # along with this program. If not, see .
22 | '''
23 | Functions decoding the sections 0-5 for all the meta-bin_data.
24 | No bin_data json_data from section 4 are decoded.
25 |
26 | Created on Nov 18, 2016
27 | Ported to Py3 09/2018
28 |
29 | @author: amaul
30 | '''
31 | from .errors import SUPPORTED_BUFR_EDITION, BufrEncodeError
32 | from .functions import str2dtg, dtg2str
33 |
34 | """
35 | Section 0
36 | =========
37 | 0-3 "BUFR"
38 | 4-6 Total length
39 | 7 BUFR-edition
40 | """
41 |
42 |
43 | def decode_sect0(bin_data, offset):
44 | """
45 | :return: offset, length, {size, edition}
46 | """
47 | keys = ["bufr", "size", "edition"]
48 | vals = bin_data.readlist("bytes:4, uintbe:24, uint:8")
49 | if vals[0] != b"BUFR":
50 | return -1, -1, {}
51 | return bin_data.get_point(), 8, dict(list(zip(keys[1:], vals[1:])))
52 |
53 |
54 | def encode_sect0(bin_data, edition=4):
55 | """
56 | :return: section start offset, meta-dict
57 | """
58 | # Originally:
59 | # bin_data.writelist("bytes:4={}, uintbe:24={}, uint:8={}", ("BUFR", 0, edition))
60 | # The next two lines are a workaround, since in Py3 bitstring seems to
61 | # evaluate "bytes:" incorrectly.
62 | bin_data.write_bytes("BUFR")
63 | bin_data.writelist("uintbe:24={}, uint:8={}", (0, edition))
64 | return 0, {"edition": edition}
65 |
66 |
67 | def encode_bufr_size(bin_data):
68 | """Set total size of BUFR in size filed of section 0."""
69 | bin_data.set_uint(len(bin_data) // 8, 24, 32)
70 |
71 |
72 | """
73 | Section 1
74 | =========
75 | BUFR Vers. 3
76 | -----------
77 | 0-2 Section length
78 | 3 Master table
79 | 4 Sub-centre
80 | 5 Centre
81 | 6 Update sequence number (0 = original, 1.. = updates)
82 | 7 Flag: 00 = no sect2, 01 = sect2 present, 02-ff = reserved
83 | 8 Data category
84 | 9 Sub-category
85 | 10 Master table version
86 | 11 Local table version
87 | 12 Year [yy]
88 | 13 Month
89 | 14 Day
90 | 15 Hour
91 | 16 Minute
92 | 17-n Reserved
93 |
94 | BUFR Vers. 4
95 | ------------
96 | 0-2 Section length
97 | 3 Master table
98 | 4-5 Centre
99 | 6-7 Sub-centre
100 | 8 Update sequence number (0 = original, 1.. = updates)
101 | 9 Flag, 0x00 = no sect2, 0x01 = sect2 present, 0x02-0xff = reserved
102 | 10 Data-category
103 | 11 Internat. sub-category
104 | 12 Local sub-category
105 | 13 Master table version
106 | 14 Local table version
107 | 15-16 Year [yyyy]
108 | 17 Month
109 | 18 Day
110 | 19 Hour
111 | 20 Minute
112 | 21 Second
113 | (22-n Reserved)
114 | """
115 |
116 |
117 | def decode_sect1(bin_data, offset, edition=4):
118 | """
119 | :return: offset, length, {master, center, subcenter, update, cat, cat_int, cat_loc, mver, lver, datetime, sect2}
120 | """
121 | key_offs = {
122 | 3: (("length", "master", "subcenter", "center", "update", "sect2", "cat", "cat_loc", "mver", "lver", "datetime"),
123 | "uint:24, uint:8, uint:8, uint:8, uint:8, bool, pad:7, uint:8, uint:8, uint:8, uint:8, bytes:5"
124 | ),
125 | 4: (("length", "master", "center", "subcenter", "update", "sect2", "cat", "cat_int", "cat_loc", "mver", "lver", "datetime"),
126 | "uint:24, uint:8, uint:16, uint:16, uint:8, bool, pad:7, uint:8, uint:8, uint:8, uint:8, uint:8, bytes:7"
127 | ),
128 | }
129 | vals = bin_data.readlist(key_offs[edition][1])
130 | rd = dict(list(zip(key_offs[edition][0], vals)))
131 | rd["datetime"] = str2dtg(rd["datetime"], ed=edition)
132 | l = rd.pop("length")
133 | if bin_data.get_point() < offset + l:
134 | rd["sect1_local_use"] = bin_data.readlist("hex:8," * (offset + l - bin_data.get_point()))
135 | if edition == 3 and rd["sect1_local_use"] == [b"00"]:
136 | rd.pop("sect1_local_use")
137 | bin_data.reset(offset + l)
138 | return offset + l, l, rd
139 |
140 |
141 | def encode_sect1(bin_data, json_data, edition=4):
142 | """
143 | :param json_data: list or tuple with slots
144 | (master, center, subcenter, update, sect2, cat, cat_int, cat_loc, mver, lver,
145 | str2dtg-yy, str2dtg-mo, str2dtg-dy, str2dtg-hh, str2dtg-mi, str2dtg-ss)
146 | :return: section start offset, meta-dict
147 | """
148 | key_offs = {3: (("master", "subcenter", "center",
149 | "update", "sect2",
150 | "cat", "cat_loc",
151 | "mver", "lver"),
152 | "uint:24=0, uint:8={}, uint:8={}, uint:8={},"
153 | + "uint:8={}, bool={}, pad:7, "
154 | + "uint:8={}, uint:8={}, "
155 | + "uint:8={}, uint:8={}"
156 | ),
157 | 4: (("master", "center", "subcenter",
158 | "update", "sect2",
159 | "cat", "cat_int", "cat_loc",
160 | "mver", "lver"),
161 | "uint:24=0, uint:8={}, uint:16={}, uint:16={}, "
162 | + "uint:8={}, bool={}, pad:7, "
163 | + "uint:8={}, uint:8={}, uint:8={}, "
164 | + "uint:8={}, uint:8={}"
165 | )
166 | }
167 | loc_use = None
168 | if isinstance(json_data, dict):
169 | if "sect1_local_use" in json_data:
170 | loc_use = json_data.pop("sect1_local_use")
171 | ord_val = [json_data[k] for k in key_offs[edition][0]]
172 | rd = json_data
173 | rd["datetime"] = dtg2str(json_data["datetime"], edition)
174 | else:
175 | if isinstance(json_data[-1], (list, tuple)):
176 | loc_use = json_data[-1]
177 | json_data = json_data[:-1]
178 | ord_val = json_data
179 | rd = dict(list(zip(key_offs[edition][0], json_data[:-6])))
180 | rd["datetime"] = dtg2str(json_data[-6:], edition)
181 | section_start = len(bin_data)
182 | bin_data.writelist(key_offs[edition][1], ord_val)
183 | if edition == 3:
184 | bin_data.write_bytes(rd["datetime"], 5 * 8)
185 | else:
186 | bin_data.write_bytes(rd["datetime"], 7 * 8)
187 | if loc_use:
188 | bin_data.writelist("hex:8={}," * len(loc_use), loc_use)
189 | (edition == 3) and bin_data.write_align(True)
190 | rd["length"] = (len(bin_data) - section_start) // 8
191 | bin_data.set_uint(rd["length"], 24, section_start)
192 | return section_start // 8, rd
193 |
194 |
195 | """
196 | Section 2
197 | =========
198 | 0-2 Section length
199 | 3 Reserved
200 | 4-n Local data
201 | """
202 |
203 |
204 | def decode_sect2(bin_data, offset):
205 | """
206 | :return: offset, length, {}
207 | """
208 | l = bin_data.readlist("uint:24, pad:8")[0]
209 | s2data = bin_data.readlist("hex:8," * (l - 4))
210 | bin_data.reset(offset + l)
211 | return offset + l, l, {"data_start": offset + 4, "data_end": offset + l, "sect2_data": s2data}
212 |
213 |
214 | def encode_sect2(bin_data, json_data):
215 | """
216 | :return: section start offset
217 | """
218 | section_start = len(bin_data)
219 | bin_data.writelist("uint:24={}, pad:8", (0,))
220 | bin_data.writelist("hex:8={}," * len(json_data), json_data)
221 | sz = (len(bin_data) - section_start) // 8
222 | bin_data.set_uint(sz, 24, section_start)
223 | return section_start // 8
224 |
225 |
226 | """
227 | Section 3
228 | =========
229 | 0-2 Section length
230 | 3 Reserved
231 | 4-5 Number of subsets
232 | 6 Flag: &128 = other|observation, &64 = not compressed|compressed
233 | 7-n List of descriptors
234 | FXXYYY: F = 2bit, & 0xC000 ; XX = 6bit, & 0x3F00 ; YYY = 8bit, & 0xFF
235 | F=0: element/Tab.B, F=1: repetition, F=2: operator/Tab.C, F=3: sequence/Tab.D
236 | """
237 |
238 |
239 | def decode_sect3(bin_data, offset):
240 | """
241 | Use {}[desc] for bin_data-iterator iter_data().
242 |
243 | :return: offset, length, {subsets, obs, comp, desc}
244 | """
245 | keys = ("length", "subsets", "obs", "comp")
246 | vals = bin_data.readlist("uint:24, pad:8, uint:16, bool, bool, pad:6")
247 | rd = dict(list(zip(keys, vals)))
248 | l = rd.pop("length")
249 | desc = []
250 | while bin_data.get_point() < offset + l - 1:
251 | df, dx, dy = bin_data.readlist("uint:2, uint:6, uint:8")
252 | desc.append(int("%d%02d%03d" % (df, dx, dy)))
253 | rd["descr"] = desc
254 | bin_data.reset(offset + l)
255 | return offset + l, l, rd
256 |
257 |
258 | def encode_sect3(bin_data, json_data, edition=4):
259 | """
260 | :param json_data: list or tuple with slots (subsets, obs, comp)
261 | :return: section start offset, meta-dict
262 | """
263 | if edition not in SUPPORTED_BUFR_EDITION:
264 | raise BufrEncodeError()
265 | section_start = len(bin_data)
266 | bin_data.writelist("uint:24=0, pad:8, uint:16={}, bool={}, bool={}, pad:6", (json_data[:3]))
267 | descr = []
268 | rd = {"subsets": json_data[0], "obs": json_data[1], "comp": json_data[2]}
269 | for d in json_data[3]:
270 | bin_data.writelist("uint:2={}, uint:6={}, uint:8={}", (int(d[0:1]), int(d[1:3]), int(d[3:])))
271 | descr.append(int(d))
272 | # Ed.3: pad section to even number of octets.
273 | bin_data.write_align(edition == 3)
274 | rd["descr"] = descr
275 | sz = (len(bin_data) - section_start) // 8
276 | bin_data.set_uint(sz, 24, section_start)
277 | return section_start // 8, rd
278 |
279 |
280 | """
281 | Section 4
282 | =========
283 | 0-2 Section length
284 | 3 Reserved
285 | 4-n Data
286 | """
287 |
288 |
289 | def decode_sect4(bin_data, offset):
290 | """
291 | :return: offset, length, {data_start, data_end}
292 | """
293 | l = bin_data.read("uint:24")
294 | bin_data.reset(offset + l)
295 | return offset + l, l, {"data_start": offset + 4, "data_end": offset + l}
296 |
297 |
298 | def encode_sect4(bin_data, edition=4):
299 | """
300 | :return: section start offset
301 | """
302 | if edition not in SUPPORTED_BUFR_EDITION:
303 | raise BufrEncodeError()
304 | section_start = len(bin_data)
305 | bin_data.writelist("uint:24={}, pad:8", (0,))
306 | return section_start // 8
307 |
308 |
309 | def encode_sect4_size(bin_data, section_start, section_end):
310 | """Set size of BUFR bin_data section in size field of section 4."""
311 | bin_data.set_uint((section_end - section_start), 24, section_start * 8)
312 |
313 |
314 | """
315 | Section 5
316 | =========
317 | 0-3 "7777"
318 | """
319 |
320 |
321 | def decode_sect5(bin_data, offset):
322 | """
323 | :return: offset, length, {}
324 | """
325 | if bin_data.read("bytes:4") == b"7777":
326 | return offset + 4, 4, {}
327 | else:
328 | return -1, -1, {}
329 |
330 |
331 | def encode_sect5(bin_data):
332 | """
333 | :return: section start offset
334 | """
335 | section_start = len(bin_data)
336 | bin_data.write_bytes(b"7777")
337 | return section_start // 8
338 |
--------------------------------------------------------------------------------
/trollbufr/coder/bufr_types.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016-2018 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | """
25 | Collection of classes.
26 |
27 | Created on Mar 13, 2018
28 |
29 | @author: amaul
30 | """
31 | from copy import deepcopy
32 |
33 | from collections import namedtuple
34 | DescrDataEntry = namedtuple("DescrDataEntry", "descr mark value quality")
35 | """Description of 'mark' for non-data types:
36 | SEQ desc : Following descriptors by expansion of sequence descriptor desc.
37 | SEQ END : End of sequence expansion.
38 | RPL #n : Descriptor replication number #n begins.
39 | RPL END : End of descriptor replication.
40 | RPL NIL : Descriptor replication evaluated to zero replications.
41 | REP #n : Descriptor and data repetition, all descriptor and data between
42 | this and REP END are to be repeated #n times.
43 | REP END : End of desriptor and data repetition.
44 | OPR desc : Operator, which read and returned data values.
45 | BMP DEF : Use the data present bit-map to refer to the data descriptors
46 | which immediately precede the operator to which it relates.
47 | The bitmap is returned in the named tuple item 'value'.
48 | BMP USE : Re-use the previously defined data present bit-map. Same as "BMP DEF".
49 | LOC desc : Local descriptor skipped, its bit-width was declared by operator.
50 | """
51 |
52 | DescrInfoEntry = namedtuple("DescrInfoEntry", "name shortname unit type")
53 | """From table-B lookup for textual output."""
54 |
55 | BufrMetadataKeys = ("master", # BUFR master version, WMO=0.
56 | "center", # Originating center.
57 | "subcenter", # Originating sub-center.
58 | "update", # Update number.
59 | "sect2", # Section 2 (local data) present.
60 | "cat", # Data category.
61 | "cat_int", # International data category.
62 | "cat_loc", # Local data category.
63 | "mver", # Master table version.
64 | "lver", # Local table version.
65 | "datetime", # Associated date/time.
66 | "obs", # Observed data.
67 | "comp", # Compression used.
68 | )
69 |
70 |
71 | class TabBType(object):
72 | """Types of Table-B entries."""
73 | NUMERIC = 0
74 | LONG = 1
75 | DOUBLE = 2
76 | CODE = 3
77 | FLAG = 4
78 | STRING = 5
79 |
80 |
81 | class AlterState(object):
82 | """Holding the states for altering descriptors."""
83 |
84 | def __init__(self):
85 | self.reset()
86 |
87 | def __str__(self):
88 | return "wnum={} wchr={} refmul={} scale={} assoc={} ieee={} refval={}".format(
89 | self.wnum, self.wchr, self.refmul, self.scale, self.assoc[-1], self.ieee, self.refval
90 | )
91 |
92 | def reset(self):
93 | self.wnum = 0
94 | """Add to width, for number data fields."""
95 | self.wchr = 0
96 | """ Change width for string data fields."""
97 | self.refval = {}
98 | """ {desc:ref}, dict with new reference values for descriptors."""
99 | self.refmul = 1
100 | """ Multiplier, for all reference values of following descriptors (207yyy)."""
101 | self.scale = 0
102 | """ Add to scale, for number data fields."""
103 | self.assoc = [0]
104 | """ Add width for associated quality field. A stack, always use last value."""
105 | self.ieee = 0
106 | """ 0|32|64 All numerical values encoded as IEEE floating point number."""
107 |
108 |
109 | class BackrefRecord(object):
110 | """Records descriptor/alter objects for later re-play with applied bitmaps."""
111 |
112 | def __init__(self):
113 | self.restart()
114 |
115 | def restart(self):
116 | """Clear record of descriptor/alter pairs.
117 |
118 | Requires appending new pairs before a bitmap can be applied.
119 | """
120 | # Recorder for back-referenced descriptors
121 | self._backref_record = []
122 | # Stack for re-play
123 | self._backref_stack = []
124 | # Index for stack
125 | self._stack_idx = -1
126 | # Recorder is active or paused
127 | self._recording = True
128 |
129 | def __str__(self):
130 | return "Record *{}, Stack *{}, Idx {}".format(
131 | len(self._backref_record),
132 | len(self._backref_stack),
133 | self._stack_idx)
134 |
135 | def append(self, descr, alter):
136 | """Append descriptor and alter object to the record."""
137 | if not self._recording:
138 | return
139 | if alter is None:
140 | alt = AlterState()
141 | else:
142 | alt = deepcopy(alter)
143 | self._backref_record.append((descr, alt))
144 |
145 | def apply(self, bitmap):
146 | """Apply bitmap to record, creating a stack of descriptor/alter pairs."""
147 | self._backref_stack = [self._backref_record[len(self._backref_record) - i]
148 | for i in range(len(bitmap), 0, -1)
149 | if bitmap[len(bitmap) - i] == 0]
150 | self._stack_idx = 0
151 |
152 | def __next__(self):
153 | """Return next descriptor/alter pair from stack."""
154 | if self._stack_idx >= len(self._backref_stack):
155 | # raise StopIteration # XXX:
156 | return
157 | r = self._backref_stack[self._stack_idx]
158 | self._stack_idx += 1
159 | return r
160 |
161 | def next(self):
162 | """Return next descriptor/alter pair from stack."""
163 | return self.__next__()
164 |
165 | def pause(self, paused=True):
166 | """Pause or re-activate recording."""
167 | self._recording = not paused
168 |
169 | def reset(self):
170 | """Reset stack index to start, for "re-use bitmap"."""
171 | self._stack_idx = 0
172 | self._recording = True
173 |
174 | def renew(self):
175 | """Clear stack (created from bitmap), for "cancel use bitmap"."""
176 | self._stack_idx = -1
177 | self._backref_stack = []
178 | self._recording = True
179 |
--------------------------------------------------------------------------------
/trollbufr/coder/errors.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | '''
25 | Created on Nov 4, 2016
26 |
27 | @author: amaul
28 | '''
29 |
30 | SUPPORTED_BUFR_EDITION = (3, 4)
31 | """List of supported BUFR editions."""
32 |
33 |
34 | class BufrDecodeError(Exception):
35 | '''Error class, raised if anything prevents further decoding'''
36 |
37 | def __init__(self, msg):
38 | super(BufrDecodeError).__init__(type(self))
39 | self.msg = "BufrDecodeError: %s" % msg
40 |
41 | def __str__(self):
42 | return self.msg
43 |
44 | def __unicode__(self):
45 | return self.msg
46 |
47 |
48 | class BufrDecodeWarning(Warning):
49 | '''Warning class, raised at recoverable faults'''
50 |
51 | def __init__(self, msg):
52 | super(BufrDecodeError).__init__(type(self))
53 | self.msg = "BufrDecodeWarning: %s" % msg
54 |
55 | def __str__(self):
56 | return self.msg
57 |
58 | def __unicode__(self):
59 | return self.msg
60 |
61 |
62 | class BufrEncodeError(Exception):
63 | '''Error class, raised if anything prevents further encoding'''
64 |
65 | def __init__(self, msg):
66 | super(BufrEncodeError).__init__(type(self))
67 | self.msg = "BufrEncodeError: %s" % msg
68 |
69 | def __str__(self):
70 | return self.msg
71 |
72 | def __unicode__(self):
73 | return self.msg
74 |
75 |
76 | class BufrTableWarning(Warning):
77 | '''Warning class, raised at recoverable faults'''
78 |
79 | def __init__(self, msg):
80 | super(BufrTableWarning).__init__(type(self))
81 | self.msg = "BufrTableWarning: %s" % msg
82 |
83 | def __str__(self):
84 | return self.msg
85 |
86 | def __unicode__(self):
87 | return self.msg
88 |
89 |
90 | class BufrTableError(Exception):
91 | '''Error class, raised if anything prevents further decoding'''
92 |
93 | def __init__(self, msg):
94 | super(BufrTableError).__init__(type(self))
95 | self.msg = "BufrTableError: %s" % msg
96 |
97 | def __str__(self):
98 | return self.msg
99 |
100 | def __unicode__(self):
101 | return self.msg
102 |
--------------------------------------------------------------------------------
/trollbufr/coder/functions.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016-2018 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | """
25 | Collection of functions handling bits and bytes.
26 |
27 | Created on Oct 28, 2016
28 |
29 | @author: amaul
30 | """
31 | import sys
32 | import datetime
33 | import logging
34 | import struct
35 | from .errors import BufrDecodeError, BufrEncodeError, BufrTableError
36 | from .bufr_types import AlterState, TabBType
37 |
38 | logger = logging.getLogger("trollbufr")
39 |
40 | if sys.version_info >= (3, 0):
41 | def ordx(x):
42 | return x
43 | else:
44 | def ordx(x):
45 | return ord(x)
46 |
47 |
48 | def octets2num(bin_data, offset, count):
49 | """Convert character slice of length count from bin_data (high->low) to int.
50 |
51 | Returns offset+count, the character after the converted characters,
52 | and the integer value.
53 |
54 | :return: offset,value
55 | """
56 | v = 0
57 | i = count - 1
58 | for b in bin_data[offset: offset + count]:
59 | v |= ordx(b) << 8 * i
60 | i -= 1
61 | return offset + count, v
62 |
63 |
64 | def calc_width(bin_data, tab_b_elem=None, alter=None, fix_width=None, fix_typ=None):
65 | """Read a raw value integer from the data section.
66 |
67 | The number of bits are either fixed or determined from Tab.B and previous
68 | alteration operators.
69 | Compression is taken into account.
70 |
71 | :return: raw value integer
72 | """
73 | loc_typ = tab_b_elem.typ if tab_b_elem is not None else fix_typ
74 | if fix_width is not None:
75 | loc_width = fix_width
76 | if logger.isEnabledFor(logging.DEBUG):
77 | logger.debug("OCTETS : FXW w+a:_+_ fw:%d qual:_ bc:%d #%d", # ->ord(%02X)",
78 | fix_width, bin_data.bc, bin_data.p, # ord(bin_data[bin_data.p])
79 | )
80 | elif tab_b_elem is not None and (31000 <= tab_b_elem.descr < 32000):
81 | # replication/repetition descriptor (group 31) is never altered.
82 | loc_width = tab_b_elem.width
83 | if logger.isEnabledFor(logging.DEBUG):
84 | logger.debug("OCTETS %06d: NAL w+a:_+_ fw:_ qual:_ bc:%d #%d", # ->ord(%02X)",
85 | tab_b_elem.descr, bin_data.bc, bin_data.p, # ord(bin_data[bin_data.p])
86 | )
87 | elif tab_b_elem is not None and alter is not None:
88 | if loc_typ == TabBType.STRING and alter.wchr:
89 | loc_width = alter.wchr
90 | elif loc_typ in (TabBType.DOUBLE, TabBType.LONG):
91 | if alter.ieee:
92 | loc_width = alter.ieee
93 | else:
94 | loc_width = tab_b_elem.width + alter.wnum
95 | else:
96 | loc_width = tab_b_elem.width
97 | if logger.isEnabledFor(logging.DEBUG):
98 | logger.debug("OCTETS %06d: w+a:%d%+d fw:_ qual:%d bc:%d #%d", # ->ord(%02X)",
99 | tab_b_elem.descr, tab_b_elem.width, alter.wnum, alter.assoc[-1],
100 | bin_data.bc, bin_data.p, # ord(bin_data[bin_data.p])
101 | )
102 | else:
103 | raise BufrDecodeError("Can't determine width.")
104 | return loc_width, loc_typ
105 |
106 |
107 | def get_val(bin_data, subs_num, tab_b_elem=None, alter=None, fix_width=None, fix_typ=None):
108 | loc_width, loc_typ = calc_width(bin_data, tab_b_elem, alter, fix_width, fix_typ)
109 | rval = bin_data.read_bits(loc_width)
110 | if fix_width is not None:
111 | if loc_typ == TabBType.STRING:
112 | return rval2str(rval)
113 | else:
114 | return rval
115 | else:
116 | return rval2num(tab_b_elem, alter, rval)
117 |
118 |
119 | def get_val_comp(bin_data, subs_num, tab_b_elem=None, alter=None, fix_width=None, fix_typ=None):
120 | loc_width, loc_typ = calc_width(bin_data, tab_b_elem, alter, fix_width, fix_typ)
121 | rval = cset2octets(bin_data,
122 | loc_width,
123 | subs_num,
124 | loc_typ or TabBType.LONG)
125 | if fix_width is not None:
126 | if loc_typ == TabBType.STRING:
127 | return rval2str(rval)
128 | else:
129 | return rval
130 | else:
131 | return rval2num(tab_b_elem, alter, rval)
132 |
133 |
134 | def get_val_array(bin_data, subs_num, tab_b_elem=None, alter=None, fix_width=None, fix_typ=None):
135 | loc_width, loc_typ = calc_width(bin_data, tab_b_elem, alter, fix_width, fix_typ)
136 | rval_ary = cset2array(bin_data,
137 | loc_width,
138 | subs_num[1],
139 | loc_typ or TabBType.LONG)
140 | if fix_width is None:
141 | rval_ary = [rval2num(tab_b_elem, alter, rval) for rval in rval_ary]
142 | elif loc_typ == TabBType.STRING:
143 | rval_ary = [rval2str(rval) for rval in rval_ary]
144 | return rval_ary
145 |
146 |
147 | def cset2octets(bin_data, loc_width, subs_num, btyp):
148 | """Like Blob.read_bits(), but for compressed data.
149 |
150 | :return: octets
151 | """
152 | min_val = bin_data.read_bits(loc_width)
153 | cwidth = bin_data.read_bits(6)
154 | n = None
155 | v = None
156 | if btyp == TabBType.STRING:
157 | cwidth *= 8
158 | try:
159 | if cwidth == 0 or min_val == all_one(loc_width):
160 | # All equal or all missing
161 | v = min_val
162 | else:
163 | # Data compressed
164 | bin_data.read_skip(cwidth * subs_num[0])
165 | n = bin_data.read_bits(cwidth)
166 | if n == all_one(cwidth):
167 | v = all_one(loc_width)
168 | else:
169 | v = min_val + n
170 | bin_data.read_skip(cwidth * (subs_num[1] - subs_num[0] - 1))
171 | finally:
172 | logger.debug("CSET subnum %s loc_width %d min_val %d cwidth %d cval %s rval %d",
173 | subs_num, loc_width, min_val, cwidth, n, v)
174 | return v
175 |
176 |
177 | def cset2array(bin_data, loc_width, subs_cnt, btyp):
178 | """Like Blob.read_bits(), but for compressed data.
179 |
180 | :return: octets
181 | """
182 | min_val = bin_data.read_bits(loc_width)
183 | cwidth = bin_data.read_bits(6)
184 | single_val = None
185 | val_ary = [None] * subs_cnt
186 | if btyp == TabBType.STRING:
187 | cwidth *= 8
188 | try:
189 | if cwidth == 0 or min_val == all_one(loc_width):
190 | # All equal or all missing
191 | val_ary = [min_val] * subs_cnt
192 | else:
193 | # Data compressed
194 | for i in range(subs_cnt):
195 | single_val = bin_data.read_bits(cwidth)
196 | if single_val == all_one(cwidth):
197 | val_ary[i] = all_one(loc_width)
198 | else:
199 | val_ary[i] = min_val + single_val
200 | finally:
201 | logger.debug("CSET subnum %s loc_width %d min_val %d cwidth %d cval %s rval %s",
202 | subs_cnt, loc_width, min_val, cwidth, single_val, val_ary)
203 | return val_ary
204 |
205 |
206 | def rval2str(rval):
207 | """Each byte of the integer rval is taken as a character,
208 | they are joined into a string.
209 | """
210 | octets = []
211 | while rval:
212 | if rval & 0xFF >= 0x20:
213 | octets.append(chr(rval & 0xFF))
214 | rval >>= 8
215 | octets.reverse()
216 | val = "".join(octets)
217 | return val
218 |
219 |
220 | _IEEE_INF = {32: ("f", 0x7f7fffff), 64: ("d", 0x7fefffffffffffff)}
221 | """The "missing-value" bit-masks for IEEE float/double."""
222 |
223 |
224 | def rval2num(tab_b_elem, alter, rval):
225 | """Return bit-sequence rval as a value.
226 |
227 | Return the numeric value for all bits in rval decoded with descriptor descr,
228 | or type str if tab_b_elem describes a string.
229 | If the value was interpreted as "missing", None is returned.
230 |
231 | type(value):
232 | * numeric: int, float
233 | * codetable/flags: int
234 | * IA5 characters: string
235 |
236 | :return: value
237 |
238 | :raise: KeyError if descr is not in table.
239 | """
240 | # Default return value is "missing value"
241 | val = None
242 |
243 | # Alter = {'wnum':0, 'wchr':0, 'refval':0, 'scale':0, 'assoc':0}
244 | if tab_b_elem.typ == TabBType.CODE or tab_b_elem.typ == TabBType.FLAG:
245 | loc_width = tab_b_elem.width
246 | loc_refval = tab_b_elem.refval
247 | loc_scale = tab_b_elem.scale
248 | elif tab_b_elem.typ == TabBType.STRING:
249 | loc_width = alter.wchr or tab_b_elem.width
250 | loc_refval = tab_b_elem.refval
251 | loc_scale = tab_b_elem.scale
252 | else:
253 | loc_width = tab_b_elem.width + alter.wnum
254 | loc_refval = alter.refval.get(tab_b_elem.descr, tab_b_elem.refval * alter.refmul)
255 | loc_scale = tab_b_elem.scale + alter.scale
256 | if (rval == all_one(loc_width)
257 | and (tab_b_elem.descr < 31000 or tab_b_elem.descr >= 31020)):
258 | # First, test if all bits are set, which usually means "missing value".
259 | # The delayed replication and repetition descr are special nut-cases.
260 | logger.debug("rval %d ==_(1<<%d)%d #%06d/%d", rval, loc_width,
261 | all_one(loc_width), tab_b_elem.descr,
262 | tab_b_elem.descr // 1000)
263 | val = None
264 | elif alter.ieee and (tab_b_elem.typ == TabBType.DOUBLE
265 | or tab_b_elem.typ == TabBType.LONG):
266 | # IEEE 32b or 64b floating point number, INF means "missing value".
267 | if alter.ieee not in _IEEE_INF:
268 | raise BufrDecodeError("Invalid IEEE size %d" % alter.ieee)
269 | if not rval ^ _IEEE_INF[alter.ieee][1]:
270 | val = struct.unpack(_IEEE_INF[alter.ieee][0], rval)
271 | else:
272 | val = None
273 | elif tab_b_elem.typ == TabBType.DOUBLE or loc_scale > 0:
274 | # Float/double: add reference, divide by scale
275 | val = float(rval + loc_refval) / 10 ** loc_scale
276 | elif tab_b_elem.typ == TabBType.LONG:
277 | # Integer: add reference, divide by scale
278 | val = int((rval + loc_refval) / 10 ** loc_scale)
279 | elif tab_b_elem.typ == TabBType.STRING:
280 | val = rval2str(rval)
281 | else:
282 | val = rval
283 |
284 | logger.debug("EVAL-RV %06d: typ:%s width:%d ref:%d scal:%d%+d val:(%d)->(%s)",
285 | tab_b_elem.descr, tab_b_elem.typ, loc_width, loc_refval,
286 | tab_b_elem.scale, alter.scale, rval, str(val))
287 |
288 | return val
289 |
290 |
291 | def num2rval(tab_b_elem, alter, value):
292 | """Create the bit-sequence for a value.
293 |
294 | Encode a numeric value for with descriptor descr, apply altering if provided.
295 | If the value is "missing", it's encoded as "all bits are set to 1".
296 |
297 | :return: raw value
298 |
299 | :raise: KeyError if descr is not in table.
300 | """
301 | if alter is None:
302 | # If alter is None, we make a new, empty object for default values.
303 | alter = AlterState()
304 | # Alter = {'wnum':0, 'wchr':0, 'refval':0, 'scale':0, 'assoc':0}
305 | if tab_b_elem.typ == TabBType.CODE or tab_b_elem.typ == TabBType.FLAG:
306 | loc_width = tab_b_elem.width
307 | loc_refval = tab_b_elem.refval
308 | loc_scale = tab_b_elem.scale
309 | elif tab_b_elem.typ == TabBType.STRING:
310 | loc_width = alter.wchr or tab_b_elem.width
311 | loc_refval = tab_b_elem.refval
312 | loc_scale = tab_b_elem.scale
313 | value = value.encode("latin1") if value is not None else value
314 | else:
315 | loc_width = tab_b_elem.width + alter.wnum
316 | loc_refval = alter.refval.get(tab_b_elem.descr, tab_b_elem.refval * alter.refmul)
317 | loc_scale = tab_b_elem.scale + alter.scale
318 | if value is None and (tab_b_elem.descr < 31000 or tab_b_elem.descr >= 31020):
319 | # First, for "missing value" set all bits to 1.
320 | # The delayed replication and repetition descr are special cases.
321 | if tab_b_elem.typ == TabBType.STRING:
322 | rval = b"\xff" * (loc_width // 8)
323 | else:
324 | rval = all_one(loc_width)
325 | elif alter.ieee and (tab_b_elem.typ == TabBType.DOUBLE or tab_b_elem.typ == TabBType.LONG):
326 | # IEEE 32b or 64b floating point number, INF means "missing value".
327 | if alter.ieee not in _IEEE_INF:
328 | raise BufrEncodeError("Invalid IEEE size %d" % alter.ieee)
329 | fmt = _IEEE_INF[alter.ieee][0]
330 | if value is None:
331 | value = _IEEE_INF[alter.ieee][1]
332 | rval = struct.pack(fmt, value)
333 | elif tab_b_elem.typ == TabBType.LONG or tab_b_elem.typ == TabBType.DOUBLE or loc_scale > 0:
334 | # Float/double/integer: add reference, divide by scale
335 | rval = int(round((value * 10 ** loc_scale) - loc_refval))
336 | else:
337 | rval = value
338 |
339 | logger.debug("EVAL-N %06d: typ:%s width:%d>%d ref:%d scal:%d%+d val:(%s)->(%s)",
340 | tab_b_elem.descr, tab_b_elem.typ, tab_b_elem.width, loc_width,
341 | loc_refval, tab_b_elem.scale, alter.scale, value, str(rval))
342 |
343 | return rval, loc_width
344 |
345 |
346 | def num2cval(tab_b_elem, alter, fix_width, value_list):
347 | """Process and compress a list of values and apply num2rval() to each.
348 |
349 | Returns the
350 | * uncompressed bit-width according the descriptor,
351 | * minimum value,
352 | * bit-width required for "value - minimum",
353 | * list of re-calculated values.
354 |
355 | :return: loc_width, min_value, min_width, recal_val
356 | """
357 | rval_list = []
358 | value_list_sansnone = [x for x in value_list if x is not None]
359 | logger.debug("value_list:\n%s\nvalue_list_sansnone:\n%s\n", value_list, value_list_sansnone)
360 | if (not any(True for x in value_list if x is not None)
361 | or (max(value_list_sansnone) == min(value_list_sansnone)
362 | and len(value_list) == len(value_list_sansnone)
363 | )
364 | ):
365 | # All values are "missing", or all are equal
366 | if tab_b_elem and alter:
367 | min_value, loc_width = num2rval(tab_b_elem, alter, value_list[0])
368 | elif tab_b_elem and alter is None and fix_width is None:
369 | min_value, loc_width = value_list[0], tab_b_elem.width
370 | else:
371 | min_value, loc_width = value_list[0], fix_width
372 | min_width = 0
373 | recal_val = []
374 | recal_max_val = 0
375 | elif tab_b_elem is not None and tab_b_elem.typ == TabBType.STRING:
376 | for v in value_list:
377 | rval_list.extend(num2rval(tab_b_elem, alter, v))
378 | min_width = loc_width = rval_list[1]
379 | min_value = ""
380 | recal_max_val = -1
381 | recal_val = [(v if v != all_one(loc_width) else None)
382 | for v in rval_list[::2]]
383 | recal_val = [(v if v is not None else all_one(min_width))
384 | for v in recal_val]
385 | else:
386 | if tab_b_elem and alter:
387 | for v in value_list:
388 | rval_list.extend(num2rval(tab_b_elem, alter, v))
389 | else:
390 | for v in value_list:
391 | rval_list.extend((v, fix_width))
392 | loc_width = rval_list[1]
393 | min_value = min(x for x in rval_list[::2] if x is not None)
394 | min_width = 0
395 | recal_val = [(v - min_value if v != all_one(loc_width) else None)
396 | for v in rval_list[::2]]
397 | recal_max_val = max(x for x in recal_val if x is not None)
398 | min_width = recal_max_val.bit_length()
399 | if recal_max_val == all_one(min_width):
400 | min_width += 1
401 | recal_val = [(v if v is not None else all_one(min_width))
402 | for v in recal_val]
403 |
404 | logger.debug("lw:%s mval:%s mwi:%s max:%s recal_vaL:%s", loc_width,
405 | min_value, min_width, recal_max_val, recal_val)
406 |
407 | return loc_width, min_value, min_width, recal_val
408 |
409 |
410 | def add_val(blob, value_list, value_list_idx, tab_b_elem=None, alter=None, fix_width=None, fix_typ=None):
411 | """Append a value to the BUFR bitstream.
412 |
413 | Exactly one number or string is transformed with num2rval(), this is for
414 | not-compressed BUFR.
415 |
416 | :param blob: bitstream object.
417 | :param value_list: single value or value list, with latter index is required.
418 | :param value_list_idx: index to value_list, ignored if value_list is single value.
419 | :param tab_b_elem: descriptor
420 | :param alter: alteration object
421 | :param fix_width: fix bit-width, if descriptor is not applicable.
422 | :param fix_typ: fix type, if descriptor is not applicable.
423 | """
424 | if isinstance(value_list, (list, tuple)):
425 | val_buf = value_list[value_list_idx]
426 | else:
427 | # Take value_list as a simple value (e.g.: int), ignore value_list_idx.
428 | val_buf = value_list
429 | if fix_width is not None:
430 | loc_width = fix_width
431 | loc_value = val_buf
432 | elif tab_b_elem is not None and (31000 <= tab_b_elem.descr < 32000):
433 | # replication/repetition descriptor (group 31) is never altered.
434 | loc_width = tab_b_elem.width
435 | loc_value = val_buf
436 | elif tab_b_elem is not None and alter is not None:
437 | loc_value, loc_width = num2rval(tab_b_elem, alter, val_buf)
438 | else:
439 | raise BufrEncodeError("Can't determine width.")
440 | if loc_value is None:
441 | loc_value = all_one(loc_width)
442 | if (tab_b_elem is not None and tab_b_elem.typ == TabBType.STRING) or fix_typ == TabBType.STRING:
443 | blob.write_bytes(loc_value, loc_width)
444 | else:
445 | blob.write_uint(loc_value, loc_width)
446 |
447 |
448 | def add_val_comp(blob, value_list, value_list_idx, tab_b_elem=None, alter=None, fix_width=None, fix_typ=None):
449 | """Append a set of values to a compressed BUFR bitstream.
450 |
451 | value_list_idx serves as index to value_list, or as multiplicator for a single value.
452 |
453 | :param blob: bitstream object.
454 | :param value_list: value list or single value, with latter index is required.
455 | :param value_list_idx: index to value_list, or number of subsets if value_list is single value.
456 | :param tab_b_elem: descriptor
457 | :param alter: alteration object
458 | :param fix_width: fix bit-width, if descriptor is not applicable.
459 | :param fix_typ: fix type, if descriptor is not applicable.
460 | """
461 | if tab_b_elem is None and fix_width is None:
462 | raise BufrEncodeError("Can't determine width.")
463 | val_l = mk_value_list(value_list, value_list_idx)
464 | if tab_b_elem is not None and (31000 <= tab_b_elem.descr < 32000):
465 | # Replication/repetition descriptor (group 31) is never altered.
466 | alter = None
467 | loc_width, min_value, min_width, recal_val = num2cval(tab_b_elem, alter, fix_width, val_l)
468 | if (tab_b_elem is not None and tab_b_elem.typ == TabBType.STRING) or fix_typ == TabBType.STRING:
469 | # Special handling for strings.
470 | blob.write_bytes(min_value, loc_width)
471 | blob.write_uint(min_width // 8, 6)
472 | for value in recal_val:
473 | blob.write_bytes(value, min_width)
474 | else:
475 | blob.write_uint(min_value, loc_width)
476 | blob.write_uint(min_width, 6)
477 | for value in recal_val:
478 | blob.write_uint(value, min_width)
479 |
480 |
481 | def mk_value_list(value_list, value_list_idx):
482 | """Make a list of values from all subsets."""
483 | if isinstance(value_list, (list, tuple)):
484 | # Build a list of this value from all subsets.
485 | try:
486 | val_l = [x[value_list_idx] for x in value_list]
487 | except Exception as e:
488 | logger.error("%d # %s", value_list_idx, value_list)
489 | raise e
490 | else:
491 | # If value_list is not a list but a simple value (e.g.: int),
492 | # take value_list_idx as the numer of subsets and multiply them.
493 | # --> Same value for all subsets.
494 | val_l = [value_list] * value_list_idx
495 | return val_l
496 |
497 |
498 | def all_one(x):
499 | """Set all bits of width x to '1'."""
500 | return (1 << x) - 1
501 |
502 |
503 | def b2s(n):
504 | """Builds a string with characters 0 and 1, representing the bits of an integer n."""
505 | a = 2 if n // 256 else 1
506 | m = 1 << 8 * a - 1
507 | return "".join([('1' if n & (m >> i) else '0') for i in range(0, 8 * a)])
508 |
509 |
510 | def str2dtg(octets, ed=4):
511 | """Interpret octet sequence as datetime object.
512 |
513 | Ed.3: year [yy], month, day, hour, minute
514 | Ed.4: year [yyyy], month, day, hour, minute, second
515 | """
516 | if ed == 3:
517 | o, yy = octets2num(octets, 0, 1)
518 | elif ed == 4:
519 | o, yy = octets2num(octets, 0, 2)
520 | o, mo = octets2num(octets, o, 1)
521 | o, dy = octets2num(octets, o, 1)
522 | o, hr = octets2num(octets, o, 1)
523 | o, mi = octets2num(octets, o, 1)
524 | if ed == 3:
525 | if yy > 50:
526 | yy += 1900
527 | else:
528 | yy += 2000
529 | sc = 0
530 | elif ed == 4:
531 | o, sc = octets2num(octets, o, 1)
532 | return datetime.datetime(yy, mo, dy, hr, mi, sc)
533 |
534 |
535 | def dtg2str(date_time, ed=4):
536 | """Convert datetime list to string.
537 |
538 | :param date_time: date/time list (y, m, d, H, M, S)
539 | :return: a string of octets according:
540 | BUFR Ed.3: year [yy], month, day, hour, minute
541 | BUFR Ed.4: year [yyyy], month, day, hour, minute, second
542 | """
543 | octets = []
544 | yy = date_time[0]
545 | if ed == 3:
546 | octets.append(chr((yy % 100) & 0xFF))
547 | elif ed == 4:
548 | octets.append(chr((yy >> 8) & 0xFF))
549 | octets.append(chr(yy & 0xFF))
550 | for i in date_time[1:5]:
551 | octets.append(chr(i & 0xFF))
552 | if ed == 4:
553 | octets.append(chr(date_time[5] & 0xFF))
554 | return "".join(octets)
555 |
556 |
557 | def descr_is_nil(desc):
558 | """True if descriptor is null."""
559 | return desc == 0
560 |
561 |
562 | def descr_is_data(desc):
563 | """True if descriptor is Tab-B bin_data descriptor."""
564 | return 0 < desc < 100000
565 |
566 |
567 | def descr_is_loop(desc):
568 | """True if descriptor is replication/repetition."""
569 | return 100000 <= desc < 200000
570 |
571 |
572 | def descr_is_oper(desc):
573 | """True if descriptor is operator."""
574 | return 200000 <= desc < 300000
575 |
576 |
577 | def descr_is_seq(desc):
578 | """True if descriptor is sequence."""
579 | return 300000 <= desc < 400000
580 |
581 | # Yet not existent.
582 | # def descr_is_dseq(desc):
583 | # """True if descriptor is delayed sequence (Ed.5)."""
584 | # return 400000 <= desc < 500000
585 |
586 |
587 | def get_descr_list(tables, desc3):
588 | """List all expanded descriptors.
589 |
590 | :param tables: Table-set.
591 | :param desc3: list of descriptors to expand.
592 | :return: desc_list, has_backref
593 | """
594 | desc_list = []
595 | stack = [(desc3, 0)]
596 | try:
597 | while stack:
598 | dl, di = stack.pop()
599 | while di < len(dl):
600 | if descr_is_nil(dl[di]):
601 | di += 1
602 | elif descr_is_data(dl[di]) or descr_is_loop(dl[di]) or descr_is_oper(dl[di]):
603 | desc_list.append(dl[di])
604 | di += 1
605 | elif descr_is_seq(dl[di]):
606 | desc_list.append(dl[di])
607 | stack.append((dl, di + 1))
608 | dl = tables.tab_d[dl[di]]
609 | di = 0
610 | except KeyError as e:
611 | raise BufrTableError("Unknown descriptor: {}".format(e))
612 | has_backref = any(True
613 | for d in desc_list
614 | if 222000 <= d < 240000)
615 | return desc_list, has_backref
616 |
--------------------------------------------------------------------------------
/trollbufr/coder/load_tables.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | '''
25 | Created on Sep 15, 2016
26 |
27 | @author: amaul
28 | '''
29 | import logging
30 | import os
31 | from importlib import import_module
32 | from .errors import BufrTableError
33 | from .tables import Tables
34 |
35 | logger = logging.getLogger("trollbufr")
36 |
37 | MODULE_PATTERN = "trollbufr.coder.parse_%s"
38 |
39 | BUFR_TABLES_DEFAULT = "%s/.local/share/trollbufr" % (os.getenv('HOME'))
40 |
41 | _text_tab_loaded = "Table loaded: '%s'"
42 |
43 |
44 | class TableCache(object):
45 |
46 | _CACHE_MAX_SIZE = 10
47 |
48 | def __init__(self, base_path, tabf="eccodes"):
49 | self._base_path = base_path
50 | self._tabf = tabf
51 | self._cache = []
52 |
53 | def __str__(self):
54 | kl = (k for k, _ in self._cache)
55 | return ", ".join("-".join(str(x) for x in k) for k in kl)
56 |
57 | def load(self, master, center, subcenter, master_vers, local_vers):
58 | key = (master, center, subcenter, master_vers, local_vers)
59 | for ckey, tables in self._cache:
60 | if ckey == key:
61 | logger.debug("Tables from cache: %s", "-".join(str(x) for x in key))
62 | break
63 | else:
64 | tables = load_all(master, center, subcenter, master_vers, local_vers, self._base_path, self._tabf)
65 | self._cache.append((key, tables))
66 | if len(self._cache) > TableCache._CACHE_MAX_SIZE:
67 | self._cache = self._cache[1:]
68 | return tables
69 |
70 |
71 | def list_parser():
72 | return ["eccodes", "libdwd", "bufrdc"]
73 |
74 |
75 | def load_differ(tables, master, center, subcenter, master_vers, local_vers, base_path, tabf="eccodes"):
76 | """Load tables, if the versions differ from those already loaded."""
77 | if tables is None or tables.differs(master, master_vers, local_vers, center, subcenter):
78 | tables = load_all(master, center, subcenter, master_vers, local_vers, base_path, tabf)
79 | else:
80 | logger.debug("Table loading not neccessary")
81 | return tables
82 |
83 |
84 | def load_all(master, center, subcenter, master_vers, local_vers, base_path, tabf="eccodes"):
85 | """Load all given versions of tables"""
86 | try:
87 | tparse = import_module(MODULE_PATTERN % tabf)
88 | except:
89 | raise BufrTableError("Unknown table parser '%s'!" % tabf)
90 | if base_path is None:
91 | base_path = BUFR_TABLES_DEFAULT
92 | tables = Tables(master, master_vers, local_vers, center, subcenter)
93 | #
94 | # Table A (centres)
95 | try:
96 | mp, _ = tparse.get_file("A", base_path, master, center, subcenter, master_vers, local_vers)
97 | tparse.load_tab_a(tables, mp)
98 | logger.debug(_text_tab_loaded, mp)
99 | except Exception as e:
100 | logger.warning(e)
101 | #
102 | # Table B (elements)
103 | try:
104 | mp, lp = tparse.get_file("B", base_path, master, center, subcenter, master_vers, local_vers)
105 | # International (master) table
106 | tparse.load_tab_b(tables, mp)
107 | logger.debug(_text_tab_loaded, mp)
108 | # Local table
109 | if local_vers:
110 | tparse.load_tab_b(tables, lp)
111 | logger.debug(_text_tab_loaded, lp)
112 | except Exception as e:
113 | logger.error(e)
114 | raise e
115 | #
116 | # Table C (operators)
117 | try:
118 | mp, _ = tparse.get_file("C", base_path, master, center, subcenter, master_vers, local_vers)
119 | tparse.load_tab_c(tables, mp)
120 | logger.debug(_text_tab_loaded, mp)
121 | except Exception as e:
122 | logger.warning(e)
123 | #
124 | # Table D (sequences)
125 | try:
126 | mp, lp = tparse.get_file("D", base_path, master, center, subcenter, master_vers, local_vers)
127 | # International (master) table
128 | tparse.load_tab_d(tables, mp)
129 | logger.debug(_text_tab_loaded, mp)
130 | # Local table
131 | if local_vers:
132 | tparse.load_tab_d(tables, lp)
133 | logger.debug(_text_tab_loaded, lp)
134 | except Exception as e:
135 | logger.error(e)
136 | raise e
137 | #
138 | # Table CF (code/flags)
139 | try:
140 | mp, lp = tparse.get_file("CF", base_path, master, center, subcenter, master_vers, local_vers)
141 | # International (master) table
142 | tparse.load_tab_cf(tables, mp)
143 | logger.debug(_text_tab_loaded, mp)
144 | # Local table
145 | if local_vers:
146 | tparse.load_tab_cf(tables, lp)
147 | logger.debug(_text_tab_loaded, lp)
148 | except Exception as er:
149 | logger.warning(er)
150 |
151 | return tables
152 |
--------------------------------------------------------------------------------
/trollbufr/coder/operator.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016-2018 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | """
25 | Functions implementing the set of operator descriptors.
26 |
27 | Created on Mar 31, 2017
28 |
29 | @author: amaul
30 | """
31 | from . import functions as fun
32 | from .errors import BufrDecodeError, BufrEncodeError
33 | from .bufr_types import DescrDataEntry, TabBType
34 | import logging
35 |
36 | logger = logging.getLogger("trollbufr")
37 |
38 |
39 | def eval_oper(subset, descr):
40 | """Evaluate operator, read octets from data section if necessary.
41 |
42 | :return: di, None|DescrDataEntry(desc,mark,value,qual)
43 | """
44 | # Dictionary referencing operator functions from descriptors xx part.
45 | res = {
46 | 1: fun_01, # Change data width
47 | 2: fun_02, # Change scale
48 | 3: fun_03_r, # Set of new reference values
49 | 4: fun_04, # Add associated field, shall be followed by 031021
50 | 5: fun_05_r, # Signify with characters, plain language text as returned value
51 | 6: fun_06_r, # Length of local descriptor
52 | 7: fun_07, # Change scale, reference, width
53 | 8: fun_08, # Change data width for characters
54 | 9: fun_09, # IEEE floating point representation
55 | 21: fun_21, # Data not present
56 | 22: fun_22_r, # Quality Assessment Information
57 | 23: fun_fail, # Substituted values operator / Substituted values marker
58 | 24: fun_24_r, # First-order statistical values follow / marker operator
59 | 25: fun_25_r, # Difference statistical values follow / marker operator
60 | 32: fun_fail, # Replaced/retained vaules follow / marker operator
61 | 35: fun_35, # Cancel backward data reference
62 | 36: fun_36_r, # Define data present bit-map
63 | 37: fun_37_r, # Use data present bit-map / Cancel use data present bit-map
64 | 41: fun_fail, # Define event / Cancel event
65 | 42: fun_fail, # Define conditioning event / Cancel conditioning event
66 | 43: fun_fail, # Categorial forecast values follow / Cancel categorial forecast
67 | }
68 | # Delegating to operator function from dict.
69 | logger.debug("OP %d", descr)
70 | am = descr // 1000 - 200
71 | if am not in res:
72 | raise BufrDecodeError("Operator %06d unknown." % descr)
73 | return res[am](subset, descr)
74 |
75 |
76 | def prep_oper(subset, descr):
77 | """Evaluate operator, write octets to data section if necessary.
78 |
79 | :return: di, None|DescrDataEntry, vi
80 | """
81 | # Dictionary referencing operator functions from descriptors xx part.
82 | res = {
83 | 1: fun_01, # Change data width
84 | 2: fun_02, # Change scale
85 | 3: fun_03_w, # Set of new reference values
86 | 4: fun_04, # Add associated field, shall be followed by 031021
87 | 5: fun_05_w, # Signify with characters, plain language text as returned value
88 | 6: fun_fail, # Length of local descriptor
89 | 7: fun_07, # Change scale, reference, width
90 | 8: fun_08, # Change data width for characters
91 | 9: fun_09, # IEEE floating point representation
92 | 21: fun_21, # Data not present
93 | 22: fun_noop, # Quality Assessment Information
94 | 23: fun_fail, # Substituted values operator / Substituted values marker
95 | 24: fun_24_w, # First-order statistical values follow / marker operator
96 | 25: fun_25_w, # Difference statistical values follow / marker operator
97 | 32: fun_fail, # Replaced/retained vaules follow / marker operator
98 | 35: fun_35, # Cancel backward data reference
99 | 36: fun_36_w, # Define data present bit-map
100 | 37: fun_37_w, # Use data present bit-map / Cancel use data present bit-map
101 | 41: fun_fail, # Define event / Cancel event
102 | 42: fun_fail, # Define conditioning event / Cancel conditioning event
103 | 43: fun_fail, # Categorial forecast values follow / Cancel categorial forecast
104 | }
105 | # Delegating to operator function from dict.
106 | logger.debug("OP %d", descr)
107 | am = descr // 1000 - 200
108 | if am not in res:
109 | raise BufrEncodeError("Operator %06d unknown." % descr)
110 | return res[am](subset, descr)
111 |
112 |
113 | '''
114 | Template for future operator functions.
115 | The subset object is passed to them because they might need access to the
116 | subset's private attributes and methods.
117 |
118 | def funXY(subset, dl, di, de):
119 | """"""
120 | an = descr % 1000
121 | if an==0:
122 | """Define/use/follows"""
123 | pass
124 | elif an==255:
125 | """Cancel"""
126 | pass
127 | return di,None
128 | '''
129 |
130 |
131 | def fun_01(subset, descr):
132 | """Change data width"""
133 | an = descr % 1000
134 | subset._alter.wnum = an - 128 if an else 0
135 | return None
136 |
137 |
138 | def fun_02(subset, descr):
139 | """Change scale"""
140 | an = descr % 1000
141 | subset._alter.scale = an - 128 if an else 0
142 | return None
143 |
144 |
145 | def fun_03_r(subset, descr):
146 | """Set of new reference values"""
147 | an = descr % 1000
148 | if an == 0:
149 | subset._alter.refval = {}
150 | else:
151 | subset._read_refval()
152 | logger.debug("OP refval -> %s" % subset._alter.refval)
153 | return None
154 |
155 |
156 | def fun_03_w(subset, descr):
157 | """Write and set of new reference values"""
158 | an = descr % 1000
159 | if an == 0:
160 | subset._alter.refval = {}
161 | else:
162 | subset._write_refval()
163 | logger.debug("OP refval -> %s" % subset._alter.refval)
164 | return None
165 |
166 |
167 | def fun_04(subset, descr):
168 | """Add associated field, shall be followed by 031021"""
169 | an = descr % 1000
170 | # Manages stack for associated field, the value added last shall be used.
171 | if an == 0:
172 | subset._alter.assoc.pop()
173 | if not len(subset._alter.assoc):
174 | subset._alter.assoc = [0]
175 | else:
176 | subset._alter.assoc.append(subset._alter.assoc[-1] + an)
177 | return None
178 |
179 |
180 | def fun_05_r(subset, descr):
181 | """Signify with characters, plain language text as returned value"""
182 | an = descr % 1000
183 | v = subset.get_val(subset._blob,
184 | subset.subs_num,
185 | fix_width=an * 8,
186 | fix_typ=TabBType.STRING)
187 | logger.debug("OP text -> '%s'", v)
188 | # Special rval for plain character
189 | l_rval = DescrDataEntry(descr, None, v, None)
190 | return l_rval
191 |
192 |
193 | def fun_05_w(subset, descr):
194 | """Signify with characters, plain language text."""
195 | an = descr % 1000
196 | logger.debug("OP text %d B -> '%s'", an, subset._vl[subset._vi])
197 | subset.add_val(subset._blob,
198 | subset._vl,
199 | subset._vi,
200 | fix_width=an * 8,
201 | fix_typ=TabBType.STRING)
202 | return None
203 |
204 |
205 | def fun_06_r(subset, descr):
206 | """Length of local descriptor"""
207 | an = descr % 1000
208 | v = subset.get_val(subset._blob,
209 | subset.subs_num,
210 | fix_width=an)
211 | subset._di += 1
212 | logger.debug("OP skip local desc '%s', %d bit = %d",
213 | subset._dl[subset._di], an, v)
214 | l_rval = DescrDataEntry(descr,
215 | "LOC %06d" % (subset._dl[subset._di]),
216 | v,
217 | None)
218 | return l_rval
219 |
220 |
221 | def fun_07(subset, descr):
222 | """Change scale, reference, width"""
223 | an = descr % 1000
224 | if an == 0:
225 | subset._alter.scale = 0
226 | subset._alter.refmul = 1
227 | subset._alter.wnum = 0
228 | else:
229 | subset._alter.scale = an
230 | subset._alter.refmul = 10 ^ an
231 | subset._alter.wnum = ((10 * an) + 2) // 3
232 | return None
233 |
234 |
235 | def fun_08(subset, descr):
236 | """Change data width for characters"""
237 | an = descr % 1000
238 | subset._alter.wchr = an * 8 if an else 0
239 | return None
240 |
241 |
242 | def fun_09(subset, descr):
243 | """IEEE floating point representation"""
244 | an = descr % 1000
245 | subset._alter.ieee = an
246 | return None
247 |
248 |
249 | def fun_21(subset, descr):
250 | """Data not present"""
251 | an = descr % 1000
252 | subset._skip_data = an
253 | return None
254 |
255 |
256 | def fun_22_r(subset, descr):
257 | """Quality Assessment Information"""
258 | en = subset._tables.tab_c.get(descr, ("Operator",))
259 | # An additional rval for operators where no further action is required
260 | l_rval = DescrDataEntry(descr, "OPR", en[0], None)
261 | # Pause back-reference recording
262 | subset._backref_record.pause(True)
263 | return l_rval
264 |
265 |
266 | def fun_24_r(subset, descr):
267 | """First-order statistical values."""
268 | an = descr % 1000
269 | return fun_statistic_read(subset, descr, an)
270 |
271 |
272 | def fun_24_w(subset, descr):
273 | """First-order statistical values."""
274 | an = descr % 1000
275 | return fun_statistic_write(subset, descr, an)
276 |
277 |
278 | def fun_25_r(subset, descr):
279 | """Difference statistical values."""
280 | an = descr % 1000
281 | return fun_statistic_read(subset, descr, an)
282 |
283 |
284 | def fun_25_w(subset, descr):
285 | """Difference statistical values."""
286 | an = descr % 1000
287 | return fun_statistic_write(subset, descr, an)
288 |
289 |
290 | def fun_statistic_read(subset, descr, an):
291 | """Various operators for statistical values."""
292 | if an == 0:
293 | """Statistical values follow."""
294 | en = subset._tables.tab_c.get(descr, ("Operator",))
295 | # Local return value: long name of this operator.
296 | l_rval = DescrDataEntry(descr, "OPR", en[0], None)
297 | elif an == 255:
298 | """Statistical values marker operator."""
299 | bar = next(subset._backref_record)
300 | if bar:
301 | val = subset.get_val(subset._blob,
302 | subset.subs_num,
303 | tab_b_elem=bar[0],
304 | alter=bar[1])
305 | l_rval = DescrDataEntry(descr, None, val, bar[0])
306 | else:
307 | l_rval = None
308 | else:
309 | raise BufrDecodeError("Unknown operator '%d'!", descr)
310 | return l_rval
311 |
312 |
313 | def fun_statistic_write(subset, descr, an):
314 | """Various operators for statistical values."""
315 | if an == 0:
316 | """Statistical values follow."""
317 | # Filter back-references by bitmap
318 | elif an == 255:
319 | """Statistical values marker operator."""
320 | bar = next(subset._backref_record)
321 | subset.add_val(subset._blob, subset._vl, subset._vi, tab_b_elem=bar[0], alter=bar[1])
322 | subset._vi += 1
323 | else:
324 | raise BufrEncodeError("Unknown operator '%d'!", descr)
325 | return None
326 |
327 |
328 | def fun_35(subset, descr):
329 | """Cancel backward data reference."""
330 | if descr == 235000:
331 | subset._backref_record.restart()
332 | return None
333 |
334 |
335 | def fun_36_r(subset, descr):
336 | """Define data present bit-map."""
337 | # Evaluate following replication descr.
338 | subset._di += 1
339 | if fun.descr_is_loop(subset._dl[subset._di]):
340 | # How bitmaps should be done.
341 | am, an, _ = subset.eval_loop_descr(record=False)
342 | elif subset._dl[subset._di] == 31031:
343 | # Bitmap not as replication, but explicit 031031-list in sect3.
344 | am = 1
345 | an = 0
346 | while subset._dl[subset._di] == 31031:
347 | # Count 031031.
348 | an += 1
349 | subset._di += 1
350 | else:
351 | # Set descriptor index to last of the 031031.
352 | subset._di -= 1
353 | if am != 1 or subset._dl[subset._di] != 31031:
354 | raise BufrDecodeError("Fault in replication defining bitmap!")
355 | if subset._as_array:
356 | subset._bitmap = [subset.get_val(subset._blob,
357 | subset.subs_num,
358 | fix_width=1)[0]
359 | for _ in range(an)]
360 | else:
361 | subset._bitmap = [subset.get_val(subset._blob,
362 | subset.subs_num,
363 | fix_width=1)
364 | for _ in range(an)]
365 | logger.debug("APPLY BITMAP (%d) %s", len(subset._bitmap), subset._bitmap)
366 | subset._backref_record.apply(subset._bitmap)
367 | l_rval = DescrDataEntry(descr, "BMP DEF", subset._bitmap, None)
368 | return l_rval
369 |
370 |
371 | def fun_36_w(subset, _):
372 | """Define data present bit-map."""
373 | # The current index _vi shall point to a "bitmap" list, which shall be a
374 | # list of single-item lists, i.e. [[1],[1],[0],[1]]
375 | if subset.is_compressed:
376 | subset._bitmap = [x[0] for x in fun.mk_value_list(subset._vl, subset._vi)[0]]
377 | else:
378 | subset._bitmap = [x[0] for x in subset._vl[subset._vi]]
379 | subset._backref_record.apply(subset._bitmap)
380 | return None
381 |
382 |
383 | def fun_37_r(subset, descr):
384 | """Use (237000) or cancel use (237255) defined data present bit-map."""
385 | if descr == 237000:
386 | l_rval = DescrDataEntry(descr, "BMP USE", subset._bitmap, None)
387 | subset._backref_record.reset()
388 | elif descr == 237255:
389 | subset._bitmap = []
390 | subset._backref_record.renew()
391 | return l_rval
392 |
393 |
394 | def fun_37_w(subset, descr):
395 | """Use (237000) or cancel use (237255) defined data present bit-map.
396 |
397 | Skip a bitmap list if one is present in the json data set.
398 | """
399 | if descr == 237000:
400 | if ((subset.is_compressed
401 | and isinstance(subset._vl[0][subset._vi], (list, tuple)))
402 | or
403 | (not subset.is_compressed
404 | and isinstance(subset._vl[subset._vi], (list, tuple)))
405 | ):
406 | subset._vi += 1
407 | subset._backref_record.reset()
408 | elif descr == 237255:
409 | subset._bitmap = []
410 | subset._backref_record.renew()
411 | return None
412 |
413 |
414 | def fun_noop(*_):
415 | """No further acton required.
416 |
417 | :param args: list, at least [subset, dl, di, de]:
418 | """
419 | return None
420 |
421 |
422 | def fun_fail(_, descr):
423 | """Not implemented.
424 |
425 | This is a dummy function for operators who are to expect (as from BUFR
426 | standard) but are yet not implemented.
427 |
428 | :param args: list, at least [subset, dl, di, de]:
429 | """
430 | raise NotImplementedError("Operator %06d not implemented." % descr)
431 |
--------------------------------------------------------------------------------
/trollbufr/coder/parse.py.skel:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c)
5 | #
6 | # Author(s):
7 | #
8 | #
9 | # This program is free software: you can redistribute it and/or modify
10 | # it under the terms of the GNU General Public License as published by
11 | # the Free Software Foundation, either version 3 of the License, or
12 | # (at your option) any later version.
13 | #
14 | # This program is distributed in the hope that it will be useful,
15 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 | # GNU General Public License for more details.
18 | #
19 | # You should have received a copy of the GNU General Public License
20 | # along with this program. If not, see .
21 | '''
22 | Created on
23 |
24 | @author:
25 | '''
26 |
27 | import logging
28 | import os
29 |
30 | from tab.errors import BufrTableError
31 | from tab.tables import TabBelem
32 |
33 | logger = logging.getLogger("trollbufr")
34 |
35 | """
36 | ##### Description of recognized table format #####
37 |
38 |
39 | ##################################################
40 | """
41 |
42 | _default_table_dir = "%s/.local/share/trollbufr" % (os.getenv('HOME'))
43 | _table_file_names = {
44 | "A": "datacat.table",
45 | "B": "",
46 | "C": "operator.table",
47 | "D": "",
48 | "CF": "",
49 | }
50 | _text_tab_loaded = "Table loaded: '%s'"
51 | _text_file_not_found = "Table not found: '%s'"
52 |
53 | def load_tab_a(tables, fname):
54 | """Load table A (data category) from 'fname' into object Tables."""
55 | pass
56 |
57 | def load_tab_b(tables, fname):
58 | """Load table B (elements) from 'fname' into object Tables."""
59 | pass
60 |
61 | def load_tab_c(tables, fname):
62 | """Load table C (operators) from 'fname' into object Tables."""
63 | pass
64 |
65 | def load_tab_d(tables, fname):
66 | """Load table D (sequences) from 'fname' into object Tables."""
67 | pass
68 |
69 | def load_tab_cf(tables, fname):
70 | """
71 | Load table E (code- and flagtables) into object Tables.
72 | fname is a directory for ecCodes, a file for libDWD.
73 | """
74 | pass
75 |
76 | def get_file(tabnum, base_path, master, center, subcenter, master_vers, local_vers):
77 | # master_path = os.path.join(base_path, "")
78 | # local_path = os.path.join(base_path, "", "%05d_%05d" % (center, subcenter))
79 | master_path = base_path
80 | local_path = os.path.join(base_path, "%05d_%05d" % (center, subcenter))
81 | if '%' in _table_file_names[tabnum]:
82 | m = os.path.join(master_path, _table_file_names[tabnum] % (master_vers))
83 | l = os.path.join(local_path, _table_file_names[tabnum] % (local_vers))
84 | else:
85 | m = os.path.join(master_path, _table_file_names[tabnum])
86 | l = os.path.join(local_path, _table_file_names[tabnum])
87 | return (m, l)
88 |
89 |
--------------------------------------------------------------------------------
/trollbufr/coder/parse_bufrdc.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | '''
25 | Created on Dec 07, 2016
26 |
27 | @author:
28 | '''
29 |
30 | import logging
31 | import os
32 |
33 | from .errors import BufrTableError
34 | from .tables import TabBElem
35 |
36 | logger = logging.getLogger("trollbufr")
37 |
38 | """
39 | ##### Table files naming convention (Ed.3+4) #####
40 |
41 | vssswwwwwxxxxxyyyzzz
42 |
43 | v - Bufr table (B, C, D)
44 | sss - Master table number (000)
45 | wwwww - Originating subcentre
46 | xxxxx - Originating centre
47 | yyy - Version number of master table used
48 | zzz - Version number of local table used
49 |
50 | e.g. B0000000000098013001.TXT
51 | C0000000000098013001.TXT
52 | D0000000000098013001.TXT
53 |
54 |
55 | ##### Description of recognized table format #####
56 |
57 | B0000000000254019001.TXT
58 | 000001 TABLE A: ENTRY CCITTIA5 0 0 24 CHARACTER 0 3
59 |
60 | C0000000000098014001.TXT
61 | 001003 0008 0000 01 ATARCTICA
62 | 0001 01 REGION I
63 | 0002 01 REGION II
64 | 0003 01 REGION III
65 | 0004 01 REGION IV
66 | 0005 01 REGION V
67 | 0006 01 REGION VI
68 | 0007 01 MISSING VALUE
69 |
70 | D0000000000098014001.TXT
71 | 300002 2 000002
72 | 000003
73 |
74 | ----- path/file -----
75 | ./[BCD]<3:d><7:d><3:center><3:mver><3:lver>
76 |
77 | ----- elements --- B0000000000254019001.TXT -----
78 | # 1:sp 6:fxy 1:sp 64:name 1:sp 24:unit 1:sp 3:scale 1:sp 12:ref 1:sp 3:width
79 | fxy name unit scale reference width
80 | 000001 TABLE A: ENTRY CCITTIA5 0 0 24 [...]
81 |
82 | ----- sequence --- D0000000000098014001.TXT -----
83 | # first line per sequence: "FXY1 N FXY2"
84 | # following lines per seq: " FXY2"
85 | # N = number of FXY2 in sequence.
86 | #
87 | 300002 2 000002
88 | 000003
89 |
90 | ----- code/flag --- C0000000000098014001.TXT -----
91 | # line format:
92 | fxy n code xx name
93 | 001003 0008 0000 01 ATARCTICA
94 | 0001 01 REGION I
95 | 0002 01 REGION II
96 |
97 | ##################################################
98 | """
99 |
100 | _default_table_dir = "%s/.local/share/trollbufr" % (os.getenv('HOME'))
101 | _table_file_names = {
102 | "A": "A" + "0" * 19 + ".TXT",
103 | "B": "B%03d%07d%03d%03d%03d.TXT",
104 | "C": "operator.table",
105 | "D": "D%03d%07d%03d%03d%03d.TXT",
106 | "CF": "C%03d%07d%03d%03d%03d.TXT",
107 | }
108 | _text_file_not_found = "Table not found: '%s'"
109 |
110 |
111 | def load_tab_a(tables, fname):
112 | """Load table A (data category) from 'fname' into object Tables."""
113 | # if not os.path.exists(fname):
114 | # raise BufrTableError(_text_file_not_found % fname)
115 | # with open(fname, "r") as fh:
116 | # for line in fh:
117 | # if line[0]=="#" or len(line) < 3:
118 | # continue
119 | # d = None
120 | # e = None
121 | # lim = '|'
122 | # el = line.rstrip().split(lim)
123 | # # 0 1
124 | # # code|meaning
125 | # d = el[0]
126 | # e = el[1]
127 | # tables.tab_a[int(d)] = e
128 | # return True
129 | return False
130 |
131 |
132 | def load_tab_b(tables, fname):
133 | """Load table B (elements) from 'fname' into object Tables."""
134 | if not os.path.exists(fname):
135 | raise BufrTableError(_text_file_not_found % fname)
136 | with open(fname, "r") as fh:
137 | for line in fh:
138 | try:
139 | if line[0] == "#" or len(line) < 3:
140 | continue
141 | e = None
142 | el_descr = int(line[1:7])
143 | el_full_name = line[8:73].rstrip()
144 | el_unit = line[73:98].rstrip()
145 | el_scale = int(line[98:101])
146 | el_refval = int(line[101:114])
147 | el_width = int(line[114:118])
148 | if el_unit == "CCITTIA5":
149 | el_typ = "A"
150 | elif el_unit.startswith("CODE") or el_unit.startswith("FLAG"):
151 | el_typ = el_unit[0:1]
152 | else:
153 | el_typ = "N"
154 | # descr, typ, unit, abbrev, full_name, scale, refval, width
155 | e = TabBElem(el_descr, el_typ, el_unit, None, el_full_name, el_scale, el_refval, el_width)
156 | tables.tab_b[int(el_descr)] = e
157 | except Exception as exc:
158 | logger.warning("Corrupt table %s (%s)", fname, line[0:8])
159 | logger.warning(exc)
160 | return True
161 |
162 |
163 | def load_tab_c(tables, fname):
164 | """Load table C (operators) from 'fname' into object Tables."""
165 | # if not os.path.exists(fname):
166 | # raise BufrTableError(_text_file_not_found % fname)
167 | # with open(fname, "r") as fh:
168 | # for line in fh:
169 | # if line[0]=="#" or len(line) < 3:
170 | # continue
171 | # d = None
172 | # e = None
173 | # el = line.rstrip().split(',')
174 | # # 0 1 2 3
175 | # # Edition, FXY, OperatorName_en, OperationDefinition_en
176 | # d = el[1]
177 | # e = (el[2], el[3])
178 | # if d.endswith("YYY"):
179 | # tables.tab_c[int(d[0:3])] = e
180 | # else:
181 | # tables.tab_c[int(d)] = e
182 | # return True
183 | return False
184 |
185 |
186 | def load_tab_d(tables, fname):
187 | """Load table D (sequences) from 'fname' into object Tables."""
188 | if not os.path.exists(fname):
189 | raise BufrTableError(_text_file_not_found % fname)
190 | with open(fname, "r") as fh:
191 | desc = None
192 | e = []
193 | for line in fh:
194 | if line[0] == "#" or len(line) < 3:
195 | continue
196 | try:
197 | le = (line[1:7], line[7:10], line[10:17])
198 | if not le[0].isspace():
199 | if len(e):
200 | tables.tab_d[int(desc)] = tuple(e)
201 | e = []
202 | desc = le[0]
203 | e.append(int(le[-1]))
204 | except BaseException as exc:
205 | logger.error(exc)
206 | raise BufrTableError(exc)
207 | else:
208 | if len(e):
209 | tables.tab_d[int(desc)] = tuple(e)
210 | return True
211 |
212 |
213 | def load_tab_cf(tables, fname):
214 | """
215 | Load table CF (code- and flagtables) into object Tables.
216 | fname is a directory for ecCodes, a file for libDWD.
217 | """
218 | if not os.path.exists(fname):
219 | raise BufrTableError(_text_file_not_found % fname)
220 | with open(fname, "r") as fh:
221 | la = ["" * 5]
222 | for line in fh:
223 | if line[0] == "#" or len(line) < 3:
224 | continue
225 | l = line.rstrip()
226 | try:
227 | le = [l[0:6], l[7:11], l[12:16], l[17:19], l[20:]]
228 | if le[3].isspace():
229 | la[4] += le[4]
230 | le = la
231 | if le[4].startswith("RESERVED") or le[4].startswith("NOT DEFINED"):
232 | continue
233 | if not le[0].isspace():
234 | desc = int(le[0])
235 | tables.tab_cf.setdefault(desc, {})[int(le[2])] = le[4]
236 | la = le
237 | except BaseException as exc:
238 | logger.error(exc)
239 | raise BufrTableError(exc)
240 | return True
241 |
242 |
243 | def get_file(tabnum, base_path, master, center, subcenter, master_vers, local_vers):
244 | mp = lp = base_path
245 | m = os.path.join(mp, _table_file_names[tabnum] % (0, 0, 0, master_vers, 0))
246 | l = os.path.join(lp, _table_file_names[tabnum] % (0, 0, center, master_vers, local_vers))
247 | return (m, l)
248 |
--------------------------------------------------------------------------------
/trollbufr/coder/parse_eccodes.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | '''
25 | Created on Sep 15, 2016
26 |
27 | @author: amaul
28 | '''
29 |
30 | import glob
31 | import logging
32 | import os
33 | import re
34 |
35 | from .errors import BufrTableError
36 | from .tables import TabBElem
37 |
38 | logger = logging.getLogger("trollbufr")
39 |
40 | """
41 | ##### Description of recognized table format =eccodes= #####
42 |
43 | ----- path/file -----
44 | ./MasterTable/"wmo"/MasterVersion
45 | ./MasterTable/"local"/LocalVersion/Centre/SubCentre
46 | codetables/XY.table
47 | element.table
48 | sequence.def
49 |
50 | ----- codetable -----
51 | N N Value
52 |
53 | ----- element.table -----
54 | 0 1 2 3 4 5 6 7 8 9 10
55 | #code|abbreviation|type|name|unit|scale|reference|width|crex_unit|crex_scale|crex_width
56 | 000001|tableAEntry|string|TABLE A: ENTRY|CCITT IA5|0|0|24|Character|0|3
57 |
58 | ----- sequence.def -----
59 | "300002" = [ 000002, 000003 ]
60 |
61 | ----- operators.table -----
62 | #code|abbreviation|type|name|unit|scale|reference|width|crex_unit|crex_scale|crex_width
63 | 222000|qualityInformationFollows|long|The values of class 33 elements which follow relate to the data defined by the data present bit-map|OPERATOR|0|0|0|0|0|
64 |
65 | ----- common code tables -----
66 | #code|meaning
67 | 0|Surface data - land
68 |
69 | ############################################################
70 | """
71 |
72 | _table_file_names = {
73 | "A": "../../../datacat.table",
74 | "B": "element.table",
75 | "C": "../../../operators.table",
76 | "D": "sequence.def",
77 | "CF": "codetables",
78 | }
79 | _text_file_not_found = "Table not found: '%s'"
80 |
81 |
82 | def load_tab_a(tables, fname):
83 | """Load table A (data category) from 'fname' into object Tables."""
84 | if not os.path.exists(fname):
85 | raise BufrTableError(_text_file_not_found % fname)
86 | with open(fname, "r") as fh:
87 | for line in fh:
88 | if line[0] == "#" or len(line) < 3:
89 | continue
90 | d = None
91 | e = None
92 | lim = '|'
93 | el = line.rstrip().split(lim)
94 | # 0 1
95 | # code|meaning
96 | d = el[0]
97 | e = el[1]
98 | tables.tab_a[int(d)] = e
99 | return True
100 |
101 |
102 | def load_tab_b(tables, fname):
103 | """Load table B (elements) from 'fname' into object Tables."""
104 | if not os.path.exists(fname):
105 | raise BufrTableError(_text_file_not_found % fname)
106 | with open(fname, "r") as fh:
107 | for line in fh:
108 | if line[0] == "#" or len(line) < 3:
109 | continue
110 | e = None
111 | el = line.rstrip().split('|')
112 | # 0 1 2 3 4 5 6 7 8 9 10
113 | # code|abbreviation|type|name|unit|scale|reference|width|crex_unit|crex_scale|crex_width
114 | # descr, typ, unit, abbrev, full_name, scale, refval, width
115 | if el[2] == "table":
116 | t = "code"
117 | # t = el[4].lower()
118 | # if "code table" in t:
119 | # t = "code"
120 | # elif "flag table" in t:
121 | # t = "flag"
122 | else:
123 | t = el[2]
124 | e = TabBElem(int(el[0]), t, el[4], el[1], el[3], int(el[5]), int(el[6]), int(el[7]))
125 | tables.tab_b[int(el[0])] = e
126 | return True
127 |
128 |
129 | def load_tab_c(tables, fname):
130 | """Load table C (operators) from 'fname' into object Tables."""
131 | if not os.path.exists(fname):
132 | raise BufrTableError(_text_file_not_found % fname)
133 | with open(fname, "r") as fh:
134 | for line in fh:
135 | if line[0] == "#" or len(line) < 3:
136 | continue
137 | d = None
138 | e = None
139 | el = line.rstrip().split('|')
140 | # 0 1 2 3 4 5 6 7 8 9 10
141 | # code|abbreviation|type|name|unit|scale|reference|width|crex_unit|crex_scale|crex_width
142 | # y y n y n...
143 | d = el[0]
144 | e = (el[1].strip(), el[3].strip())
145 | if d.endswith("YYY"):
146 | tables.tab_c[int(d[0:3])] = e
147 | else:
148 | tables.tab_c[int(d)] = e
149 | return True
150 |
151 |
152 | def load_tab_d(tables, fname):
153 | """Load table D (sequences) from 'fname' into object Tables."""
154 | if not os.path.exists(fname):
155 | raise BufrTableError(_text_file_not_found % fname)
156 | # Using regex for eccodes' sequence.tab, for libdwd we split on tabulator.
157 | re_fl = re.compile("\"(?P\d+)\"\s*=\s*\[(?P[0-9, ]+)\]")
158 | with open(fname, "r") as fh:
159 | desc = None
160 | e = []
161 | cline = ""
162 | for line in fh:
163 | if line[0] == "#" or len(line) < 3:
164 | continue
165 | # Some eccodes' sequence.tab have newline inside a sequence-array,
166 | # we can assume this happens when matcher m is None.
167 | # To handle that case cline is a buffer collecting lines until
168 | # cline matches the regex re_fl.
169 | cline += line.strip()
170 | m = re_fl.match(cline)
171 | if m is None:
172 | continue
173 | desc = m.group('desc')
174 | ll = m.group('exp').split(',')
175 | e = []
176 | for le in ll:
177 | e.append(int(le))
178 | tables.tab_d[int(desc)] = tuple(e)
179 | cline = ""
180 | return True
181 |
182 |
183 | def load_tab_cf(tables, fname):
184 | """
185 | Load table E (code- and flagtables) into object Tables.
186 | fname is a directory for ecCodes, a file for libDWD.
187 | """
188 | if not os.path.exists(fname):
189 | raise BufrTableError(_text_file_not_found % fname)
190 | for fn_etab in glob.glob(os.path.join(fname, "*.table")):
191 | desc = os.path.basename(fn_etab).split('.')
192 | with open(fn_etab, "r") as fh:
193 | for line in fh:
194 | if line[0] == "#" or len(line) < 3:
195 | continue
196 | try:
197 | e = line.rstrip().split(' ', 2)
198 | if e[2].startswith("Reserved") or e[2].startswith("Not used"):
199 | continue
200 | tables.tab_cf.setdefault(int(desc[0]), {})[int(e[0])] = e[2].replace("\" ", "")
201 | except IndexError:
202 | logger.warn("Table parse: no values: '%s' in '%s'", line.strip(), fn_etab)
203 | return True
204 |
205 |
206 | def get_file(tabnum, base_path, master, center, subcenter, master_vers, local_vers):
207 | mp = os.path.join(base_path, str(master), "wmo", str(master_vers))
208 | lp = os.path.join(base_path, str(master), "local", str(local_vers), str(center), str(subcenter))
209 | if '%' in _table_file_names[tabnum]:
210 | m = os.path.join(mp, _table_file_names[tabnum] % (master_vers))
211 | l = os.path.join(lp, _table_file_names[tabnum] % (local_vers))
212 | else:
213 | m = os.path.join(mp, _table_file_names[tabnum])
214 | l = os.path.join(lp, _table_file_names[tabnum])
215 | return (m, l)
216 |
--------------------------------------------------------------------------------
/trollbufr/coder/parse_libdwd.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | '''
25 | Created on Sep 15, 2016
26 |
27 | @author: amaul
28 | '''
29 |
30 | import logging
31 | import os
32 | import re
33 |
34 | from .errors import BufrTableError
35 | from .tables import TabBElem
36 |
37 | logger = logging.getLogger("trollbufr")
38 |
39 | """
40 | ##### Description of recognized table format =libdwd= #####
41 |
42 | ----- path/file -----
43 | ./
44 | ./local_CENTRE_SUBCENTRE/
45 |
46 | ----- table_b_VVV -----
47 | 0 1 2 3 4 5 6
48 | "FXYlibDWDTypeunitscalereferenceValuedataWidth_Bitsdescriptor_name"
49 | 000001 A CCITT IA5 0 0 24 TABLE A: ENTRY
50 |
51 | ----- table_d_VVV -----
52 | # first line per sequence: "FXY1FXY2"
53 | # following lines per seq: "FXY2"
54 | #
55 | 300002 000002
56 | 000003
57 |
58 | ----- codeflags_VVV -----
59 | 0 1 2 3 4 5 6
60 | # line format: "FXYlibDWDTypecodeFigureFromcodeFigureToentrynameentryNameSub1entryNameSub2"
61 | 001003 C 0 Antarctica
62 | 001003 C 1 Region I
63 |
64 | ----- operator.table -----
65 | #Edition, FXY, OperatorName_en, OperationDefinition_en
66 | 3,201YYY, Change data width, Add (YYY-128) bits to the data width given for each data element in Table B
67 |
68 | ----- common code tables -----
69 | #code|meaning
70 | 0|Surface data - land
71 |
72 | ###########################################################
73 | """
74 |
75 | _table_file_names = {
76 | "A": "datacat.table",
77 | "B": "table_b_%03d",
78 | "C": "operator.table",
79 | "D": "table_d_%03d",
80 | "CF": "codeflags_%03d",
81 | }
82 | _text_file_not_found = "Table not found: '%s'"
83 |
84 |
85 | def load_tab_a(tables, fname):
86 | """Load table A (data category) from 'fname' into object Tables."""
87 | if not os.path.exists(fname):
88 | raise BufrTableError(_text_file_not_found % fname)
89 | with open(fname, "r") as fh:
90 | for line in fh:
91 | if line[0] == "#" or len(line) < 3:
92 | continue
93 | d = None
94 | e = None
95 | lim = '|'
96 | el = line.rstrip().split(lim)
97 | # 0 1
98 | # code|meaning
99 | d = el[0]
100 | e = el[1]
101 | tables.tab_a[int(d)] = e
102 | return True
103 |
104 |
105 | def load_tab_b(tables, fname):
106 | """Load table B (elements) from 'fname' into object Tables."""
107 | if not os.path.exists(fname):
108 | raise BufrTableError(_text_file_not_found % fname)
109 | try:
110 | # 1 2 3 4 5 6 7
111 | # "FXYlibDWDTypeunitscalereferenceValuedataWidth_Bitsdescriptor_name"
112 | re_fl = re.compile(
113 | r"^(\d+)(?:\t|\s+)(\w)(?:\t|\s+)(.+?)(?:\t|\s+)([0-9-]+)(?:\t|\s+)([0-9-]+)(?:\t|\s+)([0-9-]+)(?:\t|\s+)(.+)$")
114 | with open(fname, "r") as fh:
115 | for line in fh:
116 | if line[0] == "#" or len(line) < 3:
117 | continue
118 | m = re_fl.match(line)
119 | if m is None:
120 | continue
121 | # descr, typ, unit, abbrev, full_name, scale, refval, width
122 | e = TabBElem(int(m.group(1)), m.group(2), m.group(3), None, m.group(7),
123 | int(m.group(4)), int(m.group(5)), int(m.group(6)))
124 | tables.tab_b[int(m.group(1))] = e
125 | except Exception as err:
126 | logger.error(err, exc_info=1)
127 | return True
128 |
129 |
130 | def load_tab_c(tables, fname):
131 | """Load table C (operators) from 'fname' into object Tables."""
132 | if not os.path.exists(fname):
133 | raise BufrTableError(_text_file_not_found % fname)
134 | with open(fname, "r") as fh:
135 | for line in fh:
136 | if line[0] == "#" or len(line) < 3:
137 | continue
138 | d = None
139 | e = None
140 | el = line.rstrip().split(',')
141 | # 0 1 2 3
142 | # Edition, FXY, OperatorName_en, OperationDefinition_en
143 | d = el[1]
144 | e = (el[2].strip(), el[3].strip())
145 | if d.endswith("YYY"):
146 | tables.tab_c[int(d[0:3])] = e
147 | else:
148 | tables.tab_c[int(d)] = e
149 | return True
150 |
151 |
152 | def load_tab_d(tables, fname):
153 | """Load table D (sequences) from 'fname' into object Tables."""
154 | if not os.path.exists(fname):
155 | raise BufrTableError(_text_file_not_found % fname)
156 | with open(fname, "r") as fh:
157 | desc = None
158 | e = []
159 | for line in fh:
160 | if line[0] == "#" or len(line) < 3:
161 | continue
162 | try:
163 | le = line.split('\t')
164 | if len(le[0]):
165 | if len(e):
166 | tables.tab_d[int(desc)] = tuple(e)
167 | e = []
168 | desc = le[0]
169 | e.append(int(le[-1]))
170 | except BaseException as e:
171 | raise BufrTableError(e)
172 | else:
173 | if len(e):
174 | tables.tab_d[int(desc)] = tuple(e)
175 | return True
176 |
177 |
178 | def load_tab_cf(tables, fname):
179 | """
180 | Load table E (code- and flagtables) into object Tables.
181 | fname is a directory for ecCodes, a file for libDWD.
182 | """
183 | if not os.path.exists(fname):
184 | raise BufrTableError(_text_file_not_found % fname)
185 | with open(fname, "r") as fh:
186 | for line in fh:
187 | if line[0] == "#" or len(line) < 3:
188 | continue
189 | e = line.rstrip().split('\t')
190 | if e[4].startswith("Reserved") or e[4].startswith("Not used"):
191 | continue
192 | try:
193 | if e[3] == 'A':
194 | v = -1
195 | else:
196 | v = int(e[2])
197 | tables.tab_cf.setdefault(int(e[0]), {})[int(v)] = e[4]
198 | except BaseException as e:
199 | logger.warn("Table parse error: ", e)
200 | raise BufrTableError(e)
201 | return True
202 |
203 |
204 | def get_file(tabnum, base_path, master, center, subcenter, master_vers, local_vers):
205 | mp = base_path
206 | lp = os.path.join(base_path, "local_%05d_%05d" % (center, subcenter))
207 | if '%' in _table_file_names[tabnum]:
208 | m = os.path.join(mp, _table_file_names[tabnum] % (master_vers))
209 | l = os.path.join(lp, _table_file_names[tabnum] % (local_vers))
210 | else:
211 | m = os.path.join(mp, _table_file_names[tabnum])
212 | l = os.path.join(lp, _table_file_names[tabnum])
213 | return (m, l)
214 |
--------------------------------------------------------------------------------
/trollbufr/coder/tables.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 |
25 | '''
26 | Created on Sep 15, 2016
27 |
28 | @author: amaul
29 | '''
30 | from .bufr_types import TabBType, DescrInfoEntry
31 |
32 | import logging
33 | logger = logging.getLogger("trollbufr")
34 |
35 |
36 | class Tables(object):
37 | '''
38 | classdocs
39 | '''
40 | # Master table
41 | _master = 0
42 | # Version master table
43 | _vers_master = 0
44 | # Version local table
45 | _vers_local = 0
46 | # Centre
47 | _centre = 0
48 | # Sub-centre
49 | _centre_sub = 0
50 |
51 | def __init__(self, master=0, master_vers=0, local_vers=0, centre=0, subcentre=0):
52 | '''Constructor'''
53 | self._master = master
54 | self._vers_master = master_vers
55 | self._vers_local = local_vers
56 | self._centre = centre
57 | self._centre_sub = subcentre
58 | # { code -> meaning }
59 | self.tab_a = dict()
60 | # { desc -> TabBElem }
61 | self.tab_b = dict()
62 | # { desc -> (name, definition) }
63 | self.tab_c = dict()
64 | # { desc -> (desc, ...) }
65 | self.tab_d = dict()
66 | # { desc -> {num:value} }
67 | self.tab_cf = dict()
68 |
69 | def differs(self, master, master_vers, local_vers, centre, subcentre):
70 | """Test if the version etc. numbers differ from the table currently loaded"""
71 | return (self._master != master or self._vers_master != master_vers or
72 | self._vers_local != local_vers or self._centre != centre or
73 | self._centre_sub != subcentre)
74 |
75 | def lookup_codeflag(self, descr, val):
76 | """Interprets value val according the code/flag tables.
77 |
78 | Returns val if it's not of type code table or flag table.
79 | """
80 | sval = val
81 | if not isinstance(descr, int):
82 | descr = int(descr)
83 | if descr < 100000:
84 | b = self.tab_b[descr]
85 | if self.tab_cf.get(descr) is None:
86 | return sval
87 | if b.typ == TabBType.CODE:
88 | sval = self.tab_cf[descr].get(val)
89 | logger.debug("CODE %06d: %d -> %s", descr, val, sval)
90 | elif b.typ == TabBType.FLAG:
91 | vl = []
92 | for k, v in list(self.tab_cf[descr].items()):
93 | if val & (1 << (b.width - k)):
94 | vl.append(v)
95 | sval = "|".join(vl)
96 | logger.debug("FLAG %06d: %d -> %s", descr, val, sval)
97 | return sval or "N/A"
98 |
99 | def lookup_elem(self, descr):
100 | """Returns name, short-name, unit, and type in a named tuple
101 | associated with table B or C descriptor.
102 | """
103 | if descr < 100000:
104 | b = self.tab_b.get(descr)
105 | if b is None:
106 | return DescrInfoEntry("UNKN", None, "", None)
107 | return DescrInfoEntry(b.full_name, b.abbrev, b.unit, b.typ)
108 | elif 200000 < descr < 300000:
109 | if descr in self.tab_c:
110 | c = self.tab_c.get(descr)
111 | else:
112 | c = self.tab_c.get(descr // 1000)
113 | if c is None:
114 | return DescrInfoEntry("UNKN", None, "", "oper")
115 | return DescrInfoEntry(c[0], None, "", "oper")
116 | else:
117 | return DescrInfoEntry(None, None, None, None)
118 |
119 | def lookup_common(self, val):
120 | """Returns meaning for data category value."""
121 | a = self.tab_a.get(val)
122 | logger.debug("COMMONS %d -> %s", val, a)
123 | return a or "UNKN"
124 |
125 |
126 | class TabBElem(object):
127 |
128 | def __init__(self, descr, typ_str, unit, abbrev, full_name, scale, refval, width):
129 | type_list = {"A": TabBType.STRING,
130 | "N": TabBType.NUMERIC,
131 | "C": TabBType.CODE,
132 | "F": TabBType.FLAG,
133 | "long": TabBType.LONG,
134 | "double": TabBType.DOUBLE,
135 | "code": TabBType.CODE,
136 | "flag": TabBType.FLAG,
137 | "string": TabBType.STRING}
138 | self.descr = descr
139 | self.typ = type_list.get(typ_str, None)
140 | if self.typ == TabBType.NUMERIC:
141 | if scale > 0:
142 | self.typ = TabBType.DOUBLE
143 | else:
144 | self.typ = TabBType.LONG
145 | elif self.typ is None:
146 | raise BaseException("Invalid entry typ_str '%s'" % typ_str)
147 | self.unit = unit
148 | self.abbrev = abbrev
149 | self.full_name = full_name
150 | self.scale = scale
151 | self.refval = refval
152 | self.width = width
153 |
154 | def __str__(self):
155 | if isinstance(self.descr, int):
156 | return "%06d : '%s' (%s) [%s]" % (self.descr, self.full_name, self.typ, self.unit)
157 | else:
158 | return "%s : '%s' (%s) [%s]" % (self.descr, self.full_name, self.typ, self.unit)
159 |
--------------------------------------------------------------------------------
/trollbufr/load_file.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | '''
25 | Created on Oct 27, 2016
26 |
27 | @author: amaul
28 | '''
29 | import re
30 | from trollbufr.coder import functions as f
31 | from trollbufr.coder.bdata import Blob
32 | from trollbufr.coder.errors import BufrDecodeWarning
33 |
34 | import logging
35 | logger = logging.getLogger("trollbufr")
36 |
37 | """This RE matches any Abbreviated Heading Line"""
38 | _re_ahl = re.compile(b"[^A-Z0-9]*?([A-Z]{4}[0-9]{2} [A-Z]{4} [0-9]{6}(?: [ACR][A-Z]{2})?)[^A-Z0-9]+")
39 |
40 |
41 | def next_bufr(path=None, bin_data=None):
42 | '''
43 | Generator:
44 | Load octets from file, if path is given; otherwise use character-array in bin_data.
45 | Parse though bin_data for next BUFR.
46 | If present recognize a bulletins' abbreviated header line (AHL).
47 |
48 | RETURN: (bufr, size, header)
49 | '''
50 | if path is not None:
51 | # Read whole file
52 | with open(path, "rb") as fh:
53 | bin_data = fh.read()
54 | logger.info("FILE %s" % path)
55 | if bin_data is None:
56 | raise ValueError("No bin_data!")
57 | offs = 0
58 | bufr = None
59 | size = -1
60 | header = None
61 | while offs < len(bin_data):
62 | # Search for next BUFR
63 | bstart = offs
64 | while bin_data[bstart: bstart + 4] != b"BUFR":
65 | bstart += 1
66 | if bstart >= len(bin_data) - 30:
67 | # reached end-of-bin_data
68 | #raise StopIteration # XXX:
69 | return
70 | # At start of file or after previous bufr look for AHL
71 | m = _re_ahl.search(bin_data[offs:bstart])
72 | logger.debug("SEARCH AHL : %d - %d %s : %d matches > %s",
73 | offs,
74 | bstart,
75 | bin_data[bstart: bstart + 4],
76 | 0 if m is None else len(m.groups()),
77 | 0 if m is None else m.groups()[0]
78 | )
79 | if m is not None:
80 | header = (m.groups()[0]).decode()
81 | else:
82 | header = None
83 | # Bufr starts here
84 | offs = bstart
85 | # Read size of bufr
86 | offs += 4
87 | offs, size = f.octets2num(bin_data, offs, 3)
88 | # Set end of bufr and skip to there
89 | bend = bstart + size
90 | offs = bend
91 | # Check if end is correct
92 | if bin_data[bend - 4: bend] != b"7777":
93 | # The bufr is corrupt if section5 is not correct
94 | logger.error("End '7777' not found")
95 | raise BufrDecodeWarning("Bufr offset/length error!")
96 | bufr = Blob(bin_data[bstart: bend])
97 | logger.debug("LOADED %d B, %d - %d", bend - bstart, bstart, bend)
98 | # This generator returns one entry
99 | yield (bufr, size, header)
100 | #raise StopIteration # XXX:
101 | return
102 |
103 |
104 | if __name__ == "__main__":
105 | import sys
106 | print(sys.argv)
107 | for b in next_bufr(path=sys.argv[1]):
108 | print(b)
109 |
--------------------------------------------------------------------------------
/trollbufr/update.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016,2018 Alexander Maul
5 | #
6 | # Ported to Py3 09/2018
7 | #
8 | # Author(s):
9 | #
10 | # Alexander Maul
11 | #
12 | # This program is free software: you can redistribute it and/or modify
13 | # it under the terms of the GNU General Public License as published by
14 | # the Free Software Foundation, either version 3 of the License, or
15 | # (at your option) any later version.
16 | #
17 | # This program is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 | # GNU General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU General Public License
23 | # along with this program. If not, see .
24 | """
25 | TrollBUFR - table update.
26 | """
27 | from __future__ import print_function
28 | from __future__ import absolute_import
29 |
30 | import sys
31 | import os
32 | import urllib.request, urllib.error, urllib.parse
33 | import tarfile
34 | import zipfile
35 | from argparse import ArgumentParser
36 | from argparse import RawDescriptionHelpFormatter
37 |
38 | import logging
39 | logger = logging.getLogger("trollbufr")
40 |
41 | __version__ = "0.2.0"
42 |
43 | E_OK = 0
44 | E_ARG = 1
45 | E_ERR = 2
46 |
47 |
48 | def arg_parser():
49 | program_name = os.path.basename(sys.argv[0])
50 | # Setup argument parser
51 | parser = ArgumentParser(description=__import__('__main__').__doc__,
52 | formatter_class=RawDescriptionHelpFormatter
53 | )
54 | parser.add_argument("-v", "--verbose",
55 | dest="verbose",
56 | action="count",
57 | help="set verbosity level [default: %(default)s]"
58 | )
59 | parser.add_argument('-V', '--version',
60 | action='version',
61 | version="%s %s" % (program_name, __version__)
62 | )
63 | parser.add_argument("-t", "--tables-path",
64 | default=os.getenv("BUFR_TABLES"),
65 | help="path to tables, if not set in $BUFR_TABLES",
66 | metavar="PATH"
67 | )
68 | parser.add_argument("-s", "--strip",
69 | help="strip N top-level dirs on un-tar",
70 | type=int,
71 | metavar="N",
72 | )
73 | parser.add_argument("--download",
74 | action="store_true",
75 | help="only download table archives"
76 | )
77 | parser.add_argument("-F", "--url-file",
78 | help="File with URL list",
79 | metavar="FILE",
80 | )
81 | parser.add_argument("-U", "--url",
82 | help="URL for table archive",
83 | metavar="URL",
84 | nargs="+"
85 | )
86 | # Process arguments
87 | args = parser.parse_args()
88 | # Setup logger
89 | handler = logging.StreamHandler()
90 | log_formater_line = "[%(levelname)s] %(message)s"
91 | if not args.verbose:
92 | loglevel = logging.WARN
93 | else:
94 | if args.verbose == 1:
95 | loglevel = logging.INFO
96 | elif args.verbose >= 2:
97 | loglevel = logging.DEBUG
98 | log_formater_line = "[%(levelname)s: %(module)s:%(lineno)d] %(message)s"
99 | handler.setFormatter(logging.Formatter(log_formater_line))
100 | handler.setLevel(loglevel)
101 | logging.getLogger('').setLevel(loglevel)
102 | logging.getLogger('').addHandler(handler)
103 | # Return arguments
104 | return args
105 |
106 |
107 | def download_all(args, url_list):
108 | """Command line options."""
109 | arc_list = []
110 | for url in url_list:
111 | try:
112 | arc_name = url.split("/")[-1]
113 | arc_dest = os.path.join(args.tables_path, arc_name)
114 | if not os.path.exists(args.tables_path):
115 | logger.warning("Path does not exist: %s", args.tables_path)
116 | return E_ARG
117 | with open(arc_dest, "w") as dest:
118 | logger.info("Download %s", url)
119 | response = urllib.request.urlopen(url)
120 | dest.write(response.read())
121 | except Exception as e:
122 | logger.warning("%s : %s", url, e)
123 | else:
124 | arc_list.append(arc_dest)
125 | return arc_list
126 |
127 |
128 | def un_tar(args, arc_dest):
129 | """Extract (compressed) TAR file."""
130 | logger.info("Extract %s", arc_dest)
131 | with tarfile.open(arc_dest, "r") as tar_h:
132 | for member in tar_h:
133 | name_parts = member.name.split(os.path.sep)[args.strip:]
134 | if not len(name_parts):
135 | continue
136 | new_name = os.path.sep.join(name_parts)
137 | if member.isdir():
138 | try:
139 | logger.debug("mkdir: %s", new_name)
140 | os.makedirs(os.path.join(args.tables_path, new_name))
141 | except:
142 | pass
143 | elif member.isfile():
144 | with open(
145 | os.path.join(args.tables_path, new_name),
146 | "w") as fh:
147 | logger.debug("write: %s", new_name)
148 | fh.write(tar_h.extractfile(member).read())
149 |
150 |
151 | def un_zip(args, arc_dest):
152 | """Extract ZIP file."""
153 | logger.info("Extract %s", arc_dest)
154 | with zipfile.ZipFile(arc_dest, "r") as zip_h:
155 | for member in zip_h.infolist():
156 | name_parts = member.filename.split(os.path.sep)[args.strip:]
157 | if not len(name_parts):
158 | continue
159 | new_name = os.path.sep.join(name_parts)
160 | if member.filename.endswith("/"):
161 | try:
162 | logger.debug("mkdir: %s", new_name)
163 | os.makedirs(os.path.join(args.tables_path, new_name))
164 | except:
165 | pass
166 | else:
167 | with open(
168 | os.path.join(args.tables_path, new_name),
169 | "w") as fh:
170 | logger.debug("write: %s", new_name)
171 | fh.write(zip_h.open(member).read())
172 |
173 |
174 | def run(argv=None):
175 | if argv is None:
176 | argv = sys.argv
177 | else:
178 | sys.argv.extend(argv)
179 | args = arg_parser()
180 | if args.url:
181 | url_list = args.url
182 | elif args.url_file:
183 | url_list = []
184 | with open(args.url_file, "r") as fh_url:
185 | for line in fh_url:
186 | url_list.append(line.strip())
187 | else:
188 | logger.error("URL or URL-file missing!\n")
189 | return E_ERR
190 | try:
191 | print(args)
192 | logger.debug("Sources: %s", url_list)
193 | logger.debug("Destination: %s", args.tables_path)
194 | arc_list = download_all(args, url_list)
195 | if not args.download:
196 | for arc_dest in arc_list:
197 | try:
198 | if tarfile.is_tarfile(arc_dest):
199 | un_tar(args, arc_dest)
200 | elif zipfile.is_zipfile(arc_dest):
201 | un_zip(args, arc_dest)
202 | else:
203 | logger.warning("Unkown archive format: %s", arc_dest)
204 | except Exception as e:
205 | logger.warning("Extract %s : %s", arc_dest, e)
206 | else:
207 | os.remove(arc_dest)
208 | except KeyboardInterrupt:
209 | ### handle keyboard interrupt ###
210 | return E_OK
211 | except Exception as e:
212 | logger.error(e)
213 | return E_ERR
214 | return E_OK
215 |
216 |
217 | if __name__ == "__main__":
218 | sys.exit(run())
219 |
--------------------------------------------------------------------------------
/trollbufr/version.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright (c) 2016-2018 Alexander Maul
5 | #
6 | # Author(s):
7 | #
8 | # Alexander Maul
9 | #
10 | # This program is free software: you can redistribute it and/or modify
11 | # it under the terms of the GNU General Public License as published by
12 | # the Free Software Foundation, either version 3 of the License, or
13 | # (at your option) any later version.
14 | #
15 | # This program is distributed in the hope that it will be useful,
16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | # GNU General Public License for more details.
19 | #
20 | # You should have received a copy of the GNU General Public License
21 | # along with this program. If not, see .
22 |
23 | """Version file.
24 | Ported to Py3 09/2018
25 | """
26 |
27 | __major__ = "0"
28 | __minor__ = "11"
29 | __patch__ = "0"
30 |
31 | version = ".".join([__major__, __minor__, __patch__])
32 |
--------------------------------------------------------------------------------