├── .gitignore
├── Dockerfile
├── LICENSE.txt
├── MANIFEST.in
├── Makefile
├── README.rst
├── bin
├── specdal_gui
├── specdal_info
├── specdal_pipeline
└── specdalqt
├── doc
├── Makefile
├── api.rst
├── conf.py
├── data_model.rst
├── index.rst
├── installation.rst
├── introduction.rst
├── specdal_gui.rst
├── specdal_info.rst
└── specdal_pipeline.rst
├── requirements.txt
├── runDocker
├── setup.py
└── specdal
├── __init__.py
├── containers
├── __init__.py
├── collection.py
└── spectrum.py
├── examples
├── .gitignore
├── __init__.py
├── grouping.ipynb
├── process_collection.ipynb
└── process_spectrum.ipynb
├── filters
├── __init__.py
├── filter_std.py
├── filter_threshold.py
├── filter_white.py
├── is_monotonic.py
└── split_good_bad.py
├── gui
├── __init__.py
├── gui.py
├── pyqt
│ ├── Assets
│ │ ├── ajax-loader.gif
│ │ ├── icons8-csv-32.png
│ │ ├── icons8-cursor-32.png
│ │ ├── icons8-empty-flag-32.png
│ │ ├── icons8-flag-filled-32.png
│ │ ├── icons8-flag-save-32.png
│ │ ├── icons8-folder-24.png
│ │ ├── icons8-interpolate-32.png
│ │ ├── icons8-jump-correct-32.png
│ │ ├── icons8-math-32.png
│ │ ├── icons8-normal-distribution-histogram-32.png
│ │ ├── icons8-opened-folder-32.png
│ │ ├── icons8-proximal-join.png
│ │ ├── icons8-show-flag-32.png
│ │ └── icons8-stitch-32.png
│ ├── Makefile
│ ├── __init__.py
│ ├── __main__.py
│ ├── collection_plotter.py
│ ├── export_collection.py
│ ├── op_config.ui
│ ├── qt_viewer.ui
│ ├── qt_viewer_ui.py
│ ├── save_dialog.ui
│ └── viewer.py
├── select.png
└── viewer.py
├── operators
├── __init__.py
├── derivative.py
├── interpolate.py
├── jump_correct.py
├── proximal_join.py
└── stitch.py
├── readers
├── __init__.py
├── asd.py
├── pico.py
├── sed.py
└── sig.py
└── tests
├── test_collection.py
├── test_groupby.py
├── test_proximal_join.py
├── test_reader.py
├── test_resampler.py
├── test_spectrum.py
└── test_stitcher.py
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__*
2 | *~
3 | *.pyc
4 | /dist/
5 | /build/
6 | /*.egg-info
7 | *.ipynb_checkpoints*
8 | /_build/
9 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 |
3 | WORKDIR /home/app/
4 |
5 | # install things required for python3.6.9 installation
6 | RUN apt-get -y update
7 | RUN apt-get -y install git
8 | RUN apt-get -y install libssl-dev openssl wget
9 |
10 | # download and install python
11 | RUN wget https://www.python.org/ftp/python/3.6.9/Python-3.6.9.tgz
12 | RUN tar xzvf Python-3.6.9.tgz
13 | RUN apt-get -y install build-essential liblzma-dev
14 | RUN Python-3.6.9/configure
15 | RUN apt-get -y install zlib1g-dev libbz2-dev
16 | RUN make
17 | RUN make install
18 | RUN pip3 install --upgrade pip
19 | RUN pip3 install pyqt5
20 |
21 | # download and install specdal
22 | RUN git clone https://github.com/EnSpec/SpecDAL.git && pip install SpecDAL/
23 |
24 | WORKDIR ../
25 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) [2017] [Clayton Kingdon, Young Lee]
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include specdal/gui/*.png
2 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | default:
2 | make -C specdal/gui/pyqt
3 | python setup.py install
4 |
5 | clean:
6 | pip uninstall SpecDAL
7 |
8 | test_gui: default
9 | bin/specdal_gui
10 |
11 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | Visit our `ReadTheDocs `_.
2 |
3 | Introduction
4 | ============
5 |
6 | ``specdal`` is a Python package for loading and manipulating field
7 | spectroscopy data. It currently supports readers for ASD, SVC, and PSR
8 | spectrometers. ``specdal`` provides useful functions and command line
9 | scripts for processing and aggregating the data.
10 |
11 | Features
12 | ========
13 |
14 | 1. Command line interface
15 |
16 | - specdal_info: lightweight script to read and display content of
17 | spectral files
18 |
19 | - specdal_pipeline: default script to convert spectral files into
20 | datasets and figures
21 |
22 | 2. Python interface
23 |
24 | - readers for .asd, .sig, .sed spectral files
25 | - Pico files (WIP)
26 |
27 | - spectral functions that operate on pandas objects
28 |
29 | - interpolation
30 |
31 | - jump_correction
32 |
33 | - joining proximal measurements (WIP)
34 |
35 | - ``Spectrum`` and ``Collection`` classes which wrap around pandas
36 | objects to provide simpler interface for spectral functions
37 |
38 | 3. GUI (under development)
39 |
40 | Tutorials
41 | =========
42 |
43 | See the Jupyter notebooks `here
44 | `_.
45 |
46 |
47 | Installation
48 | ============
49 |
50 | SpecDAL can be installed from PyPI using pip. For a more detailed
51 | walkthrough, see
52 | `http://specdal-test.readthedocs.io/en/latest/installation.html`
53 |
54 | Installation from Source
55 | ========================
56 |
57 | **Warning:** This method of installation will override any other versions of SpecDAL
58 | in your current environment. A virtual environment can be used to preserve other installations.
59 |
60 | SpecDAL can also be installed from source. Open a terminal and run the command:
61 |
62 | ``git clone https://github.com/EnSpec/SpecDAL.git && pip install SpecDAL/``
63 |
64 | The SpecDAL python package and ``specdal_pipeline`` command-line tool will be
65 | installed on your system (see ``specdal_pipeline --help`` for usage).
66 |
67 |
68 | Example Usage
69 | =============
70 |
71 | For a description of all command line arguments: ``specdal_pipeline --help``.
72 |
73 | To produce an individual plot and textfile for every spectrum file
74 | in directory ``/path/to/spectra/`` and store the results in ``specdal_output/``:
75 |
76 | ``specdal_pipeline -o specdal_output /path/to/spectra/``
77 |
78 | To only output whole-dataset images and files:
79 |
80 | ``specdal_pipeline -oi -o specdal_output /path/to/spectra/``
81 |
82 | To only output images, with no data files:
83 |
84 | ``specdal_pipeline -od -o specdal_output /path/to/spectra/``
85 |
86 |
87 | To group input files by the first 3 underscore-separated components
88 | of their filename (such that ``foo_bar_baz_001.asd`` and
89 | ``foo_bar_baz_002.asd`` will appear in one group, and
90 | ``foo_bar_qux_001.asd`` in another):
91 |
92 | ``specdal_pipeline -g -gi 0 1 2 -- /path/to/spectra/``
93 |
94 | To also output the mean and median of every group of spectra:
95 |
96 | ``specdal_pipeline -g -gi 0 1 2 -gmean -gmedian /path/to/spectra/``
97 |
98 | To remove all white reference spectra from the output dataset (leaves input files intact):
99 |
100 | ``specdal_pipeline --filter_white /path/to/spectra/``
101 |
102 | To remove all white reference spectra from the dataset, as well as spectra
103 | with a 750-1200 nm reflectance that is greater than 1 standard deviation from the mean,
104 | or with a 500-600 nm reflectance that is greater than 2 standard devations from the mean:
105 |
106 | ``specdal_pipeline --filter_white --filter_std 750 1200 1 500 600 2 -- /path/to/spectra/``
107 |
108 | To perform the filtering above, and then group the remaining spectra by filename:
109 |
110 | ``specdal_pipeline --filter_white --filter_std 750 1200 1 500 600 2
111 | -g -gi 0 1 2 /path/to/spectra/``
112 |
113 | To group the spectra by filename, and then perform filtering on each group:
114 |
115 | ``specdal_pipeline --filter_white --filter_std 750 1200 1 500 600 2
116 | -g -gi 0 1 2 --filter_on group /path/to/spectra/``
117 |
118 | Usage with Docker
119 | =================
120 |
121 | Steps:
122 |
123 | - Download and save the files in the directory which has all the folders or files you want to process..
124 |
125 | - Download and install docker software from: https://www.docker.com/get-started
126 |
127 | - Run the following in terminal from directory where the Dockerfile and runDocker are stored
128 |
129 | ``docker build -t specdal --no-cache -f Dockerfile .``
130 |
131 | ``bash runDocker``
132 |
133 | That will take you inside the docker called 'specdal' where you can run ``specdal_pipeline`` command as shown in the example usage above. Your current directory on the laptop will get mapped to ``/home/`` in the docker.
134 |
135 | Once the image is built, the next time only ``bash runDocker`` command can be run to go inside the docker. Building the image will take some time, and it will require 1.4GB space approximately.
136 |
--------------------------------------------------------------------------------
/bin/specdal_gui:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | python -m specdal.gui.pyqt
3 |
--------------------------------------------------------------------------------
/bin/specdal_info:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import numpy as np
3 | import argparse
4 | import sys
5 | from os.path import abspath, expanduser
6 | import os
7 | sys.path.insert(0, os.path.abspath('..'))
8 | from specdal.containers.spectrum import Spectrum
9 | from specdal.readers import read
10 |
11 | parser = argparse.ArgumentParser(description='SpecDAL Info')
12 |
13 | # io options
14 | parser.add_argument('input_files', metavar='FILE', nargs='+',
15 | action='store', type=str,
16 | help='input directory containing input files')
17 | parser.add_argument('--raw', action='store_true', default=False,
18 | help='output raw dataframe and metadata and exit')
19 | parser.add_argument('--list_measure_types', action='store_true', default=False,
20 | help='list measurement types and exit')
21 | parser.add_argument('--list_metadata_fields', action='store_true', default=False,
22 | help='list metadata fields and exit')
23 | parser.add_argument('--measure_type', # choices=['pct_reflect',
24 | # 'tgt_count',
25 | # "ref_count",
26 | # "tgt_radiance",
27 | # "ref_radiance",
28 | # "tgt_irradiance",
29 | # "ref_irradiance",
30 | # "tgt_reflect",
31 | # "ref_reflect"],
32 | default='pct_reflect',
33 | help='type of measurement to read')
34 | parser.add_argument('--metadata', metavar='FIELD', nargs='*',
35 | help='specify metadata fields to display')
36 | parser.add_argument('-n', '--N', type=int, default=2,
37 | help='number of spectra to display from head and tail')
38 | # misc
39 | parser.add_argument('-d', '--debug', action='store_true')
40 | args = parser.parse_args()
41 | if args.debug:
42 | print('args = {}'.format(args))
43 |
44 | ################################################################################
45 | # main
46 | ################################################################################
47 |
48 | for f in args.input_files:
49 | assert os.path.isfile(f)
50 | print('\n\n'+'-'*80)
51 | print('{:<20}\t{}\n'.format('file:', f))
52 |
53 | if args.list_measure_types or args.list_metadata_fields or args.raw:
54 | data, meta = read(f)
55 | if args.raw:
56 | assert 2*args.N < data.shape[0]
57 | #print('{}\n{}'.format('data:', data.iloc[np.r_[0:args.N, -args.N:0]]))
58 | print('{}\n{}'.format('data:', data.iloc[0:args.N].to_string(col_space=10)))
59 | print('{}\n{}'.format('...', data.iloc[-args.N:].to_string(
60 | header=None, index_names=False, col_space=10)))
61 | print('metadata:')
62 | for key, item in meta.items():
63 | print(' {:<20}\t{}'.format(key+':', item))
64 | continue
65 | if args.list_measure_types:
66 | print('{:<20}\t{}'.format('measure_types: ', data.columns.values))
67 | if args.list_metadata_fields:
68 | print('{:<20}\t{}'.format('metadata_fields: ', list(meta.keys())))
69 | continue
70 |
71 | s = Spectrum(name=f, filepath=f, measure_type=args.measure_type)
72 |
73 | assert 2*args.N < s.measurement.size
74 |
75 | print('{:<20}\t{}'.format('measure_type:', s.measure_type))
76 | print('{:<20}'.format('metadata:'))
77 | for i, (key, item) in enumerate(s.metadata.items()):
78 | if args.metadata is not None:
79 | if key not in args.metadata:
80 | continue
81 | print(' {:<20}\t{}'.format(key+':', item))
82 | print('{:<20}'.format('data:'))
83 | print('{:>2}'.format(s.measurement.iloc[0:args.N].to_string()))
84 | print('...\n{}'.format(s.measurement.iloc[-args.N:].to_string(header=None)))
85 |
--------------------------------------------------------------------------------
/bin/specdal_pipeline:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import argparse
4 | from argparse import RawTextHelpFormatter
5 | import sys
6 | from os.path import abspath, expanduser
7 | import os
8 | import matplotlib
9 | matplotlib.use('Agg')
10 | from matplotlib import pyplot as plt
11 | sys.path.insert(0, os.path.abspath('..'))
12 | from specdal.containers.collection import Collection, proximal_join, df_to_collection
13 | import pandas as pd
14 | from specdal import filters
15 | import shutil
16 |
17 | parser = argparse.ArgumentParser(description='SpecDAL Pipeline',
18 | formatter_class=RawTextHelpFormatter)
19 | # io options
20 | parser.add_argument('input_dir', metavar='INPUT_PATH', action='store',
21 | help='directory containing input files')
22 | parser.add_argument('--proximal_reference', default=None, metavar='PATH',
23 | action='store',
24 | help='directory containing proximal reference spectral files')
25 | parser.add_argument('-o', '--output_dir', metavar='PATH',
26 | default='./specdal_output', action='store',
27 | help='directory to store the csv files and figures')
28 | parser.add_argument('-op', '--prefix', metavar='PREFIX',
29 | type=str, action='store', default='dataset',
30 | help='option to specify prefix for output dataset files')
31 | parser.add_argument('-of', '--omit_figures', action='store_true',
32 | help='option to omit output png figures')
33 | parser.add_argument('-od', '--omit_data', action='store_true',
34 | help='option to omit output csv files')
35 | parser.add_argument('-oi', '--omit_individual', action='store_true',
36 | help='option to omit output of individual csv file for each spectrum file')
37 | # interpolation
38 | parser.add_argument('-i', '--interpolate', default=None,
39 | choices=['slinear', 'cubic'],
40 | help='specify the interpolation method.\n'
41 | 'method descriptions can be found on scipy docs:\n'
42 | 'https://docs.scipy.org/doc/scipy-0.19.1/reference/generated/scipy.interpolate.interp1d.html')
43 | parser.add_argument('-is', '--interpolate_spacing', metavar='SPC',
44 | action="store", type=int, default=1,
45 | help='specify desired spacing for interpolation in nanometers\n')
46 | ## overlap stitcher
47 | parser.add_argument('-s', '--stitch', default=None,
48 | choices=['mean', 'median', 'min', 'max','first','last'],
49 | help='specify overlap stitching method;\n'
50 | 'not necessary if data at detector edges does not overlap')
51 |
52 | parser.add_argument('-sr', '--stitch_reference', default=None, metavar='REF',
53 | type=int, help='specify the reference detector')
54 | # jump corrector
55 | parser.add_argument('-j', '--jump_correct', default=None,
56 | choices=['additive'],
57 | help='specify jump correction method;')
58 | parser.add_argument('-js', '--jump_correct_splices', metavar='WVL',
59 | default=[1000, 1800], type=int, nargs='+',
60 | help='wavelengths of jump locations')
61 | parser.add_argument('-jr', '--jump_correct_reference', metavar='REF',
62 | type=int, action='store', default=0,
63 | help='specify the reference detector '
64 | '(e.g. VNIR is 1, SWIR1 is 2)')
65 | # groupby
66 | parser.add_argument('-g', '--group_by', action='store_true',
67 | help='create groups using filenames')
68 | parser.add_argument('-gs', '--group_by_separator', type=str,
69 | metavar='S', default='_',
70 | help='specify filename separator character to define groups')
71 | parser.add_argument('-gi', '--group_by_indices', metavar='I', nargs='*', type=int,
72 | help='specify the indices of the split filenames to define a group')
73 | parser.add_argument('-gmean', '--group_mean', dest='aggr', action='append_const',
74 | default=[],
75 | const='mean', help='calculate group means and append to group figures')
76 | parser.add_argument('-gmedian', '--group_median', dest='aggr', action='append_const',
77 | const='median', help='calculate group median and append to group figures')
78 | parser.add_argument('-gstd', '--group_std', dest='aggr', action='append_const',
79 | const='std', help='calculate group standard deviation and append to group figures')
80 |
81 | #error filter
82 | parser.add_argument('-fstd','--filter_std',metavar='wl0 wl1 n_std', nargs="+",type=float,
83 | help='Remove spectra from dataset with a pct_reflect'
84 | ' over n_std\naway from the mean between wavelengths wl0 and wl1.'
85 | '\nCan specify multiple sets of wavenumber ranges and thresholds')
86 |
87 | parser.add_argument('-fthresh','--filter_threshold',metavar="wl0 wl1 LO HI",type=float,
88 | nargs='+',
89 | help='Remove spectra from the dataset with a pct_reflect outside\n(LO,HI)'
90 | 'in the wavenumber range wl0 wl1. Can specify multiple\nsets of wavenumber'
91 | ' ranges and thresholds')
92 |
93 | parser.add_argument('-fwhite','--filter_white',action='store_true',
94 | help='Remove white reference spectra from dataset')
95 |
96 | parser.add_argument('-fg','--filter_group',metavar='method',
97 | choices=['mean', 'median', 'min', 'max'],default='mean',
98 | help='How to combine the wavelengths selected by --filter_group.')
99 |
100 | parser.add_argument('-fo','--filter_on',metavar='set',
101 | choices=['collection','group','both'],default='collection',
102 | help='What subset of the data to apply filter on'
103 | ' (collection, group or both)')
104 |
105 | parser.add_argument('-yl','--ylim',metavar=('ymin','ymax'),type=float,
106 | nargs=2,help='Force the y axis of plots to display between ymin and ymax')
107 | # misc
108 | parser.add_argument('-q', '--quiet', default=False, action='store_true')
109 | parser.add_argument('-f', '--force', default=False, action='store_true',
110 | help='if output path exists, remove previous output and run')
111 |
112 | args = parser.parse_args()
113 |
114 | ################################################################################
115 | # main
116 | ################################################################################
117 | VERBOSE = not args.quiet
118 |
119 | def print_if_verbose(*args, **kwargs):
120 | if VERBOSE:
121 | print(*args, **kwargs)
122 |
123 | indir = abspath(expanduser(args.input_dir))
124 | outdir = abspath(expanduser(args.output_dir))
125 | datadir = os.path.join(outdir, 'data')
126 | figdir = os.path.join(outdir, 'figures')
127 |
128 | if not os.path.exists(indir):
129 | raise FileNotFoundError("path " + indir + " does not exist")
130 |
131 | if os.path.exists(outdir):
132 | while not args.force:
133 | # prompt user for action
134 | ans = input(outdir + ' already exists. Are you sure you want to remove its contents? [y/n]: ')
135 | ans = ans.strip().lower()
136 | if ans == 'y':
137 | args.force = True
138 | elif ans == 'n':
139 | print('exiting pipeline...')
140 | sys.exit(0)
141 | print('removing {}'.format(outdir))
142 | shutil.rmtree(outdir)
143 |
144 | # make output directories
145 | for d in (outdir, datadir, figdir):
146 | os.makedirs(d, exist_ok=True)
147 |
148 | c = Collection(name=args.prefix)
149 | print_if_verbose('Reading target measurements from ' + indir)
150 | c.read(directory=indir)
151 |
152 | if args.proximal_reference:
153 | print_if_verbose('Reading base measurements from ' + args.proximal_reference)
154 | c_base = Collection(name=args.prefix + '_base')
155 | c_base.read(directory=args.proximal_reference)
156 |
157 | if args.stitch:
158 | print_if_verbose('Stitching...')
159 | c.stitch(method=args.stitch,jump_reference=args.stitch_reference)
160 | if args.proximal_reference:
161 | c_base.stitch(method=args.stitch,jump_reference=args.stitch_reference)
162 |
163 | if args.interpolate:
164 | print_if_verbose('interpolating...')
165 | c.interpolate(spacing=args.interpolate_spacing, method=args.interpolate)
166 | if args.proximal_reference:
167 | c_base.interpolate(spacing=args.interpolate_spacing, method=args.interpolate)
168 |
169 | if args.jump_correct:
170 | print_if_verbose('Jump correcting...')
171 | c.jump_correct(splices=args.jump_correct_splices,
172 | reference=args.jump_correct_reference,
173 | method=args.jump_correct)
174 | if args.proximal_reference:
175 | c_base.jump_correct(splices=args.jump_correct_splices,
176 | reference=args.jump_correct_reference,
177 | method=args.jump_correct)
178 |
179 | if args.proximal_reference:
180 | print_if_verbose('Joining proximal data...')
181 | c = proximal_join(c_base, c, on='gps_time_tgt', direction='nearest')
182 |
183 |
184 | #filter bad
185 | def do_filters(c):
186 | if args.filter_std or args.filter_threshold or args.filter_white:
187 | print_if_verbose('Filtering...',end=' ')
188 | if not filters.is_monotonic(c):
189 | print("ERROR: Attempting to filter unstitched spectra. See specdal_pipeline --help")
190 | sys.exit(1)
191 | c_bads = []
192 | #TODO: Nicer way to select from various filter methods
193 | #or a way to chain filtering methods
194 | if args.filter_white:
195 | c, c_bad = filters.filter_white(c)
196 | if not (c_bad.data is None):
197 | c_bads.append(c_bad.data)
198 |
199 | if args.filter_std and c.data is not None:
200 | if len(args.filter_std)%3 != 0:
201 | print("Incorrect parameters for --filter_std. See specdal_pipeline --help")
202 | sys.exit(1)
203 |
204 | for i in range(0,len(args.filter_std),3):
205 | wl1,wl2,std_thresh = args.filter_std[i:i+3]
206 | c, c_bad = filters.filter_std(c, wl1, wl2, std_thresh,
207 | group = args.filter_group)
208 | if not (c_bad.data is None):
209 | c_bads.append(c_bad.data)
210 |
211 | if args.filter_threshold and c.data is not None:
212 | if len(args.filter_threshold)%4 != 0:
213 | print("Incorrect parameters for --filter_threshold. See specdal_pipeline --help")
214 | sys.exit(1)
215 | for i in range(0,len(args.filter_threshold),4):
216 | wl1,wl2,low,high = args.filter_threshold[i:i+4]
217 | c, c_bad = filters.filter_threshold(c, wl1, wl2, low, high,
218 | group = args.filter_group)
219 | if not (c_bad.data is None):
220 | c_bads.append(c_bad.data)
221 |
222 |
223 | if len(c_bads) > 0:
224 | c_bad = df_to_collection(pd.concat(c_bads,axis=1).T,name=c.name+'_rejected')
225 | print_if_verbose('Rejected {} spectra'.format(len(c_bad.spectra)),end=' ')
226 | if len(c_bad.spectra):
227 | if not args.omit_figures:
228 | c_bad.plot(legend=False)
229 | if args.ylim:
230 | plt.ylim(*args.ylim)
231 | plt.savefig(os.path.join(figdir, c.name + "_rejected.png"), bbox_inches="tight")
232 | plt.close()
233 | if not args.omit_data:
234 | c_bad.to_csv(os.path.join(datadir, c.name + '_rejected.csv'))
235 | if args.filter_std or args.filter_threshold or args.filter_white:
236 | print_if_verbose('')
237 | return c
238 |
239 | if args.filter_on in ('collection','both'):
240 | c = do_filters(c)
241 | # group by
242 | groups = None
243 | if args.group_by:
244 | print_if_verbose('Grouping...')
245 | groups = c.groupby(separator=args.group_by_separator,
246 | indices=args.group_by_indices)
247 | if args.filter_on in ('group','both'):
248 | bad_keys = []
249 | for key in groups:
250 | groups[key] = do_filters(groups[key])
251 | #reject the groups with no good data
252 | if groups[key].data is None:
253 | bad_keys.append(key)
254 | for key in bad_keys:
255 | groups.pop(key)
256 |
257 | # output individual spectra
258 | if not args.omit_individual:
259 | if not args.omit_figures:
260 | print_if_verbose('Saving individual spectrum outputs...')
261 | indiv_datadir = os.path.join(datadir, 'indiv')
262 | indiv_figdir = os.path.join(figdir, 'indiv')
263 | os.mkdir(indiv_datadir)
264 | os.mkdir(indiv_figdir)
265 | for spectrum in c.spectra:
266 | if not args.omit_data:
267 | spectrum.to_csv(os.path.join(indiv_datadir, spectrum.name + '.csv'))
268 | if not args.omit_figures:
269 | spectrum.plot(legend=False)
270 | if args.ylim:
271 | plt.ylim(*args.ylim)
272 | plt.savefig(os.path.join(indiv_figdir, spectrum.name + '.png'), bbox_inches='tight')
273 | plt.close()
274 |
275 | # output whole and group data
276 | if not args.omit_data:
277 | print_if_verbose('Saving entire and grouped data outputs...')
278 | c.to_csv(os.path.join(datadir, c.name + ".csv"))
279 | if groups:
280 | for group_id, group_coll in groups.items():
281 | group_coll.to_csv(os.path.join(datadir, group_id + '.csv'))
282 |
283 | # calculate group aggregates
284 | if len(args.aggr) > 0:
285 | print_if_verbose('Calculating group aggregates...')
286 | for aggr in args.aggr:
287 | aggr_coll = Collection(name=c.name+'_'+aggr,
288 | spectra=[getattr(group_coll, aggr)(append=True)
289 | for group_coll in groups.values()],
290 | measure_type=c.measure_type)
291 | # output
292 | print_if_verbose('Saving group {} outputs...'.format(aggr))
293 | aggr_coll.to_csv(os.path.join(datadir, aggr_coll.name + '.csv'))
294 | aggr_coll.plot(legend=False)
295 | if args.ylim:
296 | plt.ylim(*args.ylim)
297 | plt.savefig(os.path.join(figdir, aggr_coll.name + '.png'), bbox_inches='tight')
298 | plt.close()
299 |
300 | # output whole and group figures (possibly with aggregates appended)
301 | if not args.omit_figures:
302 | print_if_verbose('Saving entire and grouped figure outputs...')
303 | c.plot(legend=False)
304 | if args.ylim:
305 | plt.ylim(*args.ylim)
306 | plt.savefig(os.path.join(figdir, c.name + ".png"), bbox_inches="tight")
307 | plt.close()
308 | if groups:
309 | for group_id, group_coll in groups.items():
310 | group_coll.plot(legend=False)
311 | if args.ylim:
312 | plt.ylim(*args.ylim)
313 | plt.savefig(os.path.join(figdir, group_id + ".png"), bbox_inches="tight")
314 | plt.close()
315 |
--------------------------------------------------------------------------------
/bin/specdalqt:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | python -m specdal.gui.pyqt
3 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = python -msphinx
7 | SPHINXPROJ = specdal
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/doc/api.rst:
--------------------------------------------------------------------------------
1 | =============
2 | API Reference
3 | =============
4 |
5 | This is the class and function reference page of SpecDAL.
6 |
7 | Spectrum
8 | ========
9 |
10 | .. automodule:: specdal.containers.spectrum
11 | :members:
12 |
13 | Collection
14 | ==========
15 |
16 | .. autoclass:: specdal.containers.collection.Collection
17 | :members:
18 |
19 | .. autofunction:: specdal.containers.collection.df_to_collection
20 |
21 | Operators
22 | =========
23 |
24 | Specdal's operators perform on both pandas and specdal objects. In the
25 | following operations, pandas series and dataframes correspond to
26 | specdal's spectrum and collection, respectively.
27 |
28 | .. automodule:: specdal.operators
29 | :members:
30 |
31 |
32 | Readers
33 | =======
34 |
35 | Specdal's readers parse a variety of input formats into the common
36 | specdal.containers.spectrum.Spectrum data type. Readers are used
37 | internally py Specturm and Collection when constructed with the
38 | filename argument, but can also be used individually.
39 |
40 | .. autofunction:: specdal.readers.read
41 |
42 | .. autofunction:: specdal.readers.asd.read_asd
43 |
44 | .. autofunction:: specdal.readers.sig.read_sig
45 |
46 | .. autofunction:: specdal.readers.sed.read_sed
47 |
48 | .. autofunction:: specdal.readers.pico.read_pico
49 |
50 | Filters
51 | =======
52 |
53 | Specdal's filters operate on Collection objects, splitting
54 | them into "good" and "bad" spectra based on certain criteria.
55 |
56 | .. automodule:: specdal.filters
57 | :members:
58 |
59 | GUI
60 | ===
61 |
62 | Specdal provides a Tkinter-based GUI for plotting and manually
63 | flagging spectra from Collections.
64 |
65 | .. automodule:: specdal.gui.viewer
66 | :members:
67 |
--------------------------------------------------------------------------------
/doc/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | #
4 | # specdal documentation build configuration file, created by
5 | # sphinx-quickstart on Wed Oct 4 14:05:24 2017.
6 | #
7 | # This file is execfile()d with the current directory set to its
8 | # containing dir.
9 | #
10 | # Note that not all possible configuration values are present in this
11 | # autogenerated file.
12 | #
13 | # All configuration values have a default; values that are commented out
14 | # serve to show the default.
15 |
16 | # If extensions (or modules to document with autodoc) are in another directory,
17 | # add these directories to sys.path here. If the directory is relative to the
18 | # documentation root, use os.path.abspath to make it absolute, like shown here.
19 | #
20 | import os
21 | import sys
22 | sys.path.insert(0, os.path.abspath('../'))
23 | sys.path.insert(0, os.path.abspath('../specdal'))
24 |
25 |
26 | # -- General configuration ------------------------------------------------
27 |
28 | # If your documentation needs a minimal Sphinx version, state it here.
29 | #
30 | # needs_sphinx = '1.0'
31 |
32 | # Add any Sphinx extension module names here, as strings. They can be
33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
34 | # ones.
35 | extensions = ['sphinx.ext.autodoc',
36 | # 'numpydoc'
37 | ]
38 |
39 | # Add any paths that contain templates here, relative to this directory.
40 | templates_path = ['_templates']
41 |
42 | # The suffix(es) of source filenames.
43 | # You can specify multiple suffix as a list of string:
44 | #
45 | # source_suffix = ['.rst', '.md']
46 | source_suffix = '.rst'
47 |
48 | # The master toctree document.
49 | master_doc = 'index'
50 |
51 | # General information about the project.
52 | project = 'specdal'
53 | copyright = '2017, Young Lee'
54 | author = 'Young Lee'
55 |
56 | # The version info for the project you're documenting, acts as replacement for
57 | # |version| and |release|, also used in various other places throughout the
58 | # built documents.
59 | #
60 | # The short X.Y version.
61 | version = '2.0.0'
62 | # The full version, including alpha/beta/rc tags.
63 | release = '2.0.0'
64 |
65 | # The language for content autogenerated by Sphinx. Refer to documentation
66 | # for a list of supported languages.
67 | #
68 | # This is also used if you do content translation via gettext catalogs.
69 | # Usually you set "language" from the command line for these cases.
70 | language = None
71 |
72 | # List of patterns, relative to source directory, that match files and
73 | # directories to ignore when looking for source files.
74 | # This patterns also effect to html_static_path and html_extra_path
75 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
76 |
77 | # The name of the Pygments (syntax highlighting) style to use.
78 | pygments_style = 'sphinx'
79 |
80 | # If true, `todo` and `todoList` produce output, else they produce nothing.
81 | todo_include_todos = False
82 |
83 |
84 | # -- Options for HTML output ----------------------------------------------
85 |
86 | # The theme to use for HTML and HTML Help pages. See the documentation for
87 | # a list of builtin themes.
88 | #
89 | html_theme = 'classic'
90 |
91 | # Theme options are theme-specific and customize the look and feel of a theme
92 | # further. For a list of options available for each theme, see the
93 | # documentation.
94 | #
95 | # html_theme_options = {}
96 |
97 | # Add any paths that contain custom static files (such as style sheets) here,
98 | # relative to this directory. They are copied after the builtin static files,
99 | # so a file named "default.css" will overwrite the builtin "default.css".
100 | # html_static_path = ['_static']
101 |
102 | # Custom sidebar templates, must be a dictionary that maps document names
103 | # to template names.
104 | #
105 | # This is required for the alabaster theme
106 | # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
107 | html_sidebars = {
108 | '**': [
109 | # 'about.html',
110 | # 'navigation.html',
111 | 'relations.html', # needs 'show_related': True theme option to display
112 | 'searchbox.html',
113 | # 'donate.html',
114 | ]
115 | }
116 |
117 |
118 | # -- Options for HTMLHelp output ------------------------------------------
119 |
120 | # Output file base name for HTML help builder.
121 | htmlhelp_basename = 'specdaldoc'
122 |
123 |
124 | # -- Options for LaTeX output ---------------------------------------------
125 |
126 | latex_elements = {
127 | # The paper size ('letterpaper' or 'a4paper').
128 | #
129 | # 'papersize': 'letterpaper',
130 |
131 | # The font size ('10pt', '11pt' or '12pt').
132 | #
133 | # 'pointsize': '10pt',
134 |
135 | # Additional stuff for the LaTeX preamble.
136 | #
137 | # 'preamble': '',
138 |
139 | # Latex figure (float) alignment
140 | #
141 | # 'figure_align': 'htbp',
142 | }
143 |
144 | # Grouping the document tree into LaTeX files. List of tuples
145 | # (source start file, target name, title,
146 | # author, documentclass [howto, manual, or own class]).
147 | latex_documents = [
148 | (master_doc, 'specdal.tex', 'specdal Documentation',
149 | 'Young Lee', 'manual'),
150 | ]
151 |
152 |
153 | # -- Options for manual page output ---------------------------------------
154 |
155 | # One entry per manual page. List of tuples
156 | # (source start file, name, description, authors, manual section).
157 | man_pages = [
158 | (master_doc, 'specdal', 'specdal Documentation',
159 | [author], 1)
160 | ]
161 |
162 |
163 | # -- Options for Texinfo output -------------------------------------------
164 |
165 | # Grouping the document tree into Texinfo files. List of tuples
166 | # (source start file, target name, title, author,
167 | # dir menu entry, description, category)
168 | texinfo_documents = [
169 | (master_doc, 'specdal', 'specdal Documentation',
170 | author, 'specdal', 'One line description of project.',
171 | 'Miscellaneous'),
172 | ]
173 |
174 |
175 |
176 |
--------------------------------------------------------------------------------
/doc/data_model.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | Data Model
3 | ==========
4 |
5 | SpecDAL relies on Pandas data structures to represent spectroscopy
6 | measurements. A single measurement is stored in pandas.Series while a
7 | collection of measurements is stored in pandas.DataFrame. SpecDAL
8 | provides Spectrum and Collection classes that wraps Series and
9 | DataFrames along with spectral metadata. Spectral operators, such as
10 | interpolation, are provided as functions on pandas objects or as
11 | methods of specdal's classes.
12 |
13 |
14 | Pandas Representation of Spectra
15 | ================================
16 |
17 | Series - single spectrum
18 | ------------------------
19 |
20 | DataFrame - collection of spectra
21 | ---------------------------------
22 |
23 |
24 |
25 | Spectrum and Collection Classes
26 | ===============================
27 |
28 | Spectrum - single spectrum
29 | --------------------------
30 |
31 | Collection - collection of spectra
32 | ----------------------------------
33 |
34 |
35 | Operators
36 | =========
37 |
38 |
--------------------------------------------------------------------------------
/doc/index.rst:
--------------------------------------------------------------------------------
1 | SpecDAL Reference
2 | =================
3 |
4 | .. toctree::
5 | introduction
6 | installation
7 | data_model
8 | api
9 | specdal_pipeline
10 | specdal_info
11 | :maxdepth: 2
12 | :caption: Contents:
13 |
14 |
15 | Indices and tables
16 | ==================
17 |
18 | * :ref:`genindex`
19 | * :ref:`modindex`
20 | * :ref:`search`
21 |
--------------------------------------------------------------------------------
/doc/installation.rst:
--------------------------------------------------------------------------------
1 | ============
2 | Installation
3 | ============
4 |
5 | SpecDAL is available via pip (``pip install specdal``) or on `Github
6 | `_. This page provides
7 | detailed walkthrough of the installation process intended for users
8 | who are not comfortable in Python environment.
9 |
10 | Prerequisites
11 | =============
12 |
13 | - python3
14 | - pip3
15 |
16 | Setting up the virtual environment (recommended)
17 | ------------------------------------------------
18 |
19 | Although not necessary, it is good practice to install Python packages
20 | in a virtual environment. Virtual environments provide an isolated and
21 | self-contained environment for your Python session, which can help
22 | prevent conflicts across packages. We will walk through the process of
23 | creating one on Ubuntu Linux for demonstration.
24 |
25 | - Install virtualenv using pip installer.
26 |
27 | ::
28 |
29 | $ pip install --user virtualenv
30 |
31 | - Create a directory for storing virtual environments.
32 |
33 | ::
34 |
35 | $ mkdir ~/venv
36 |
37 | - Create a new virtual environment called ``specdal_env`` running python3
38 | by default.
39 |
40 | ::
41 |
42 | $ virtualenv -p python3 ~/venv/specdal_env
43 |
44 | If you're curious, you can navigate to that directory and find all
45 | the components that make up a Python environment. For example,
46 | packages are installed in ``~/venv/specdal_env/lib`` and binaries
47 | are stored in ``~/venv/specdal_env/bin``.
48 |
49 | - Before starting a Python session, we can activate the virtual
50 | environment as follows.
51 |
52 | ::
53 |
54 | $ source ~/venv/specdal/bin/activate
55 |
56 | Note: On windows, there should be an executable
57 | ``~/venv/specdal/bin/activate.exe`` with a similar effect.
58 |
59 | You'll notice the name of your virtual environment in
60 | parentheses.
61 |
62 | ::
63 |
64 | (specdal_env) $
65 |
66 | - Once in this environment, we can install and use ``SpecDAL`` or
67 | other packages.
68 |
69 | ::
70 |
71 | (specdal_env) $ ... # install specdal
72 | (specdal_env) $ ... # write and run programs
73 |
74 | - When we're done, we can exit the virtual environment.
75 |
76 | ::
77 |
78 | $ deactivate
79 |
80 | Install via pip
81 | ===============
82 |
83 | - Stable version
84 |
85 | ::
86 |
87 | $ pip3 install specdal --upgrade
88 |
89 | - Latest development version
90 |
91 | ::
92 |
93 | $ pip3 install specdal --pre
94 |
95 | Install from Github
96 | ===================
97 |
98 | SpecDAL can be found on Enspec's Github `repo
99 | `_. Stable release can be
100 | found on ``master`` branch and the development version on ``dev``
101 | branch.
102 |
103 | Github walkthrough
104 | ------------------
105 |
106 | 1. Open terminal or Git-bash and navigate to the desired directory,
107 | ``~/specdal`` for this demo.
108 |
109 | ``cd ~/specdal``
110 |
111 | 2. The following command will clone the SpecDAL's Github repository.
112 |
113 | ::
114 |
115 | $ git clone https://github.com/EnSpec/SpecDAL.git
116 |
117 | You'll notice a new subdirectory ``SpecDAL`` with the source code.
118 |
119 | 3. Install SpecDAL.
120 |
121 | ::
122 |
123 | $ cd ./SpecDAL
124 | $ python setup.py install
125 |
126 | Install in development mode
127 | ---------------------------
128 |
129 | If you'd like to modify SpecDAL's source, it's useful to install the
130 | package in development mode.
131 |
132 | - Install in development mode
133 |
134 | ::
135 |
136 | $ python setup.py develop
137 |
138 | - Modify the source and run/test it.
139 |
140 | - Uninstall development mode
141 |
142 | ::
143 |
144 | $ python setup.py develop --uninstall
145 |
--------------------------------------------------------------------------------
/doc/introduction.rst:
--------------------------------------------------------------------------------
1 | ============
2 | Introduction
3 | ============
4 |
5 | SpecDAL is a Python package for loading and manipulating field
6 | spectroscopy data. It currently supports readers for ASD, SVC, and PSR
7 | spectrometers. SpecDAL provides useful functions and command line
8 | scripts for processing and aggregating the data.
9 |
10 | Interface
11 | =========
12 |
13 | There are three options for using SpecDAL.
14 |
15 | 1. Python interface
16 |
17 | The lowest level interface is for users to import ``specdal`` as a
18 | Python module. Functions in ``specdal`` are written to operate
19 | directly on Pandas Series and DataFrames. ``specdal`` also provides
20 | classes that wrap around Pandas objects for convenience to users
21 | not familiar with Pandas.
22 |
23 | Users at this level are encouraged to check out the :doc:`data model `,
24 | `Notebook examples
25 | `_
26 | , and the :doc:`API `.
27 |
28 | 2. Command line interface
29 |
30 | Alternatively, users can utilize the command line scripts that
31 | ``specdal`` provides. The following scripts are currently
32 | distributed:
33 |
34 | - :doc:`specdal_info `: displays key information in a spectral file
35 |
36 |
37 | - :doc:`specdal_pipeline `: converts a directory of spectral files into
38 | .csv files and figures
39 |
40 | 3. Graphical User Interface (GUI)
41 |
42 | At the highest level, ``SpecDAL`` provides a GUI that requires no
43 | programming. GUI can be handy for tasks such as outlier detection.
44 | GUI is provided as an executable, ``specdal_gui`` on Linux/Mac and
45 | ``specdal_gui.exe`` on Windows.
46 |
47 |
48 | Examples
49 | ========
50 |
51 | Check out the example Notebooks `here
52 | `_.
53 |
--------------------------------------------------------------------------------
/doc/specdal_gui.rst:
--------------------------------------------------------------------------------
1 | ==================
2 | Specdal Gui Script
3 | ==================
4 |
5 | Specdal provides a graphical interface ''specdal_gui'' for visually
6 | processing data files in a directory. The gui toolbar provides
7 | tools for stitching, jump correcting, and grouping spectra, and
8 | several options for manually filtering spectra.
9 |
10 | Usage
11 | =====
12 | ``specdal_gui``
13 |
14 |
--------------------------------------------------------------------------------
/doc/specdal_info.rst:
--------------------------------------------------------------------------------
1 | ===================
2 | Specdal Info Script
3 | ===================
4 |
5 | Usage
6 | =====
7 | ::
8 |
9 | usage: specdal_info [-h] [--raw] [--list_measure_types]
10 | [--list_metadata_fields] [--measure_type MEASURE_TYPE]
11 | [--metadata [FIELD [FIELD ...]]] [-n N] [-d]
12 | FILE [FILE ...]
13 |
14 |
15 | Command Line Arguments
16 | ======================
17 | positional arguments:
18 | ``FILE`` input directory containing input files
19 |
20 | optional arguments:
21 | ``-h, --help`` show this help message and exit
22 |
23 | ``--raw`` output raw dataframe and metadata and exit
24 |
25 | ``--list_measure_types`` list measurement types and exit
26 |
27 | ``--list_metadata_fields``
28 | list metadata fields and exit
29 |
30 | ``--measure_type MEASURE_TYPE``
31 | type of measurement to read
32 |
33 | ``--metadata [FIELD [FIELD ...]]``
34 | specify metadata fields to display
35 |
36 | ``-n N, --N N`` number of spectra to display from head and tail
37 |
38 | ``-d, --debug``
39 |
40 |
--------------------------------------------------------------------------------
/doc/specdal_pipeline.rst:
--------------------------------------------------------------------------------
1 | =======================
2 | Specdal Pipeline Script
3 | =======================
4 |
5 | Specdal provides a command line script ``specdal_pipeline`` for batch
6 | processing of spectral data files in a directory. A typical input to
7 | ``specdal_pipeline`` is a directory containing spectral files
8 | (i.e. .asd files), which will be converted into .csv files and figures
9 | of spectra. User can provide arguments to customize the processing
10 | operations (i.e. jump correction, groupby) and output (i.e. .csv file
11 | of group means). This page describes the usage and provides examples.
12 |
13 | Usage
14 | =====
15 | ::
16 |
17 | usage: specdal_pipeline [-h] [--proximal_reference PATH] [-o PATH]
18 | [-op PREFIX] [-of] [-od] [-oi] [-i {slinear,cubic}]
19 | [-is SPC] [-s {mean,median,min,max}] [-j {additive}]
20 | [-js WVL [WVL ...]] [-jr REF] [-g] [-gs S]
21 | [-gi [I [I ...]]] [-gmean] [-gmedian] [-gstd]
22 | [-fstd wl0 wl1 n_std [wl0 wl1 n_std ...]]
23 | [-fthresh wl0 wl1 LO HI [wl0 wl1 LO HI ...]] [-fwhite]
24 | [-fg method] [-fo set] [-yl ymin ymax] [-q] [-f]
25 | INPUT_PATH
26 |
27 | Command Line Arguments
28 | ======================
29 |
30 | positional arguments:
31 | ``INPUT_PATH`` directory containing input files
32 |
33 | optional arguments:
34 | ``-h, --help`` show this help message and exit
35 |
36 | ``--proximal_reference PATH``
37 | directory containing proximal reference spectral files
38 |
39 | ``-o PATH, --output_dir PATH``
40 | directory to store the csv files and figures
41 |
42 | ``-op PREFIX, --prefix PREFIX``
43 | option to specify prefix for output dataset files
44 |
45 | ``-of, --omit_figures`` option to omit output png figures
46 |
47 | ``-od, --omit_data`` option to omit output csv files
48 |
49 | ``-oi, --omit_individual``
50 | option to omit output of individual csv file for each spectrum file
51 |
52 | ``-i {slinear,cubic}, --interpolate {slinear,cubic}``
53 | specify the interpolation method.
54 | method descriptions can be found on scipy docs:
55 | https://docs.scipy.org/doc/scipy-0.19.1/reference/generated/scipy.interpolate.interp1d.html
56 |
57 | ``-is SPC, --interpolate_spacing SPC``
58 | specify desired spacing for interpolation in nanometers
59 |
60 | ``-s {mean,median,min,max}, --stitch {mean,median,min,max}``
61 | specify overlap stitching method;
62 | not necessary if data at detector edges does not overlap
63 |
64 | ``-j {additive}, --jump_correct {additive}``
65 | specify jump correction method;
66 |
67 | ``-js WVL [WVL ...], --jump_correct_splices WVL [WVL ...]``
68 | wavelengths of jump locations
69 |
70 | ``-jr REF, --jump_correct_reference REF``
71 | specify the reference detector (e.g. VNIR is 1, SWIR1 is 2)
72 |
73 | ``-g, --group_by`` create groups using filenames``
74 |
75 | ``-gs S, --group_by_separator S``
76 | specify filename separator character to define groups
77 |
78 | ``-gi [I [I ...]], --group_by_indices [I [I ...]]``
79 | specify the indices of the split filenames to define a group
80 |
81 | ``-gmean, --group_mean`` calculate group means and append to group figures
82 |
83 | ``-gmedian, --group_median``
84 | calculate group median and append to group figures
85 |
86 | ``-gstd, --group_std`` calculate group standard deviation and append to group figures
87 |
88 | ``-fstd wl0 wl1 n_std [wl0 wl1 n_std ...], --filter_std wl0 wl1 n_std [wl0 wl1 n_std ...]``
89 | Remove spectra from dataset with a pct_reflect over n_std
90 | away from the mean between wavelengths wl0 and wl1.
91 | Can specify multiple sets of wavenumber ranges and thresholds
92 |
93 | ``-fthresh wl0 wl1 LO HI [wl0 wl1 LO HI ...], --filter_threshold wl0 wl1 LO HI [wl0 wl1 LO HI ...]``
94 | Remove spectra from the dataset with a pct_reflect outside
95 | (LO,HI)in the wavenumber range wl0 wl1. Can specify multiple
96 | sets of wavenumber ranges and thresholds
97 |
98 | ``-fwhite, --filter_white``
99 | Remove white reference spectra from dataset
100 |
101 | ``-fg method, --filter_group method``
102 | How to combine the wavelengths selected by --filter_group.
103 |
104 | ``-fo set, --filter_on set``
105 | What subset of the data to apply filter on (collection, group or both)
106 |
107 | ``-yl ymin ymax, --ylim ymin ymax``
108 | Force the y axis of plots to display between ymin and ymax
109 |
110 | ``-q, --quiet``
111 |
112 | ``-f, --force`` if output path exists, remove previous output and run
113 |
114 | Examples
115 | ========
116 | For a description of all command line arguments: ``specdal_pipeline --help``.
117 |
118 | To produce an individual plot and textfile for every spectrum file
119 | in directory ``/path/to/spectra/`` and store the results in ``specdal_output/``:
120 | ``specdal_pipeline -o specdal_output /path/to/spectra/``
121 |
122 | To only output whole-dataset images and files:
123 | ``specdal_pipeline -oi -o specdal_output /path/to/spectra/``
124 |
125 | To only output images, with no data files:
126 | ``specdal_pipeline -od -o specdal_output /path/to/spectra/``
127 |
128 |
129 | To group input files by the first 3 underscore-separated components
130 | of their filename (such that ``foo_bar_baz_001.asd`` and
131 | ``foo_bar_baz_002.asd`` will appear in one group, and
132 | ``foo_bar_qux_001.asd`` in another):
133 | ``specdal_pipeline -g -gi 0 1 2 -- /path/to/spectra/``
134 |
135 | To also output the mean and median of every group of spectra:
136 | ``specdal_pipeline -g -gi 0 1 2 -gmean -gmedian /path/to/spectra/``
137 |
138 | To remove all white reference spectra from the output dataset (leaves input files intact):
139 | ``specdal_pipeline --filter_white /path/to/spectra/``
140 |
141 | Filtering (WIP)
142 | ===============
143 | specdal_pipeline also provides the option to automatically filter spectra out of
144 | the dataset. This feature is not fully tested and may cause issues.
145 |
146 | To remove all spectra
147 | with a 750-1200 nm reflectance that is greater than 1 standard deviation from the mean,
148 | or with a 500-600 nm reflectance that is greater than 2 standard devations from the mean:
149 |
150 | ``specdal_pipeline --filter_std 750 1200 1 500 600 2 -- /path/to/spectra/``
151 |
152 | To perform the filtering above, and then group the remaining spectra by filename:
153 |
154 | ``specdal_pipeline --filter_std 750 1200 1 500 600 2
155 | -g -gi 0 1 2 /path/to/spectra/``
156 |
157 | To group the spectra by filename, and then perform filtering on each group:
158 |
159 | ``specdal_pipeline --filter_std 750 1200 1 500 600 2
160 | -g -gi 0 1 2 --filter_on group /path/to/spectra/``
161 |
162 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | pandas >= 0.20.0
3 | matplotlib
4 | scipy
5 | numpydoc
6 |
--------------------------------------------------------------------------------
/runDocker:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | docker container run -it --rm -v ${PWD}:/home -u $(id -u ${USER}):$(id -g ${USER}) --rm specdal
3 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from distutils.core import setup
2 |
3 | setup(name='specdal',
4 | version='0.2.2',
5 | description='Package for processing spectroscopy data',
6 | long_description=open('README.rst').read(),
7 | scripts=[
8 | 'bin/specdal_pipeline',
9 | 'bin/specdal_info',
10 | 'bin/specdal_gui',
11 | 'bin/specdalqt',
12 | ],
13 | entry_points={
14 | 'gui_scripts': ['specdal_gui = specdal.gui.viewer:main'],
15 | },
16 | url='https://github.com/EnSpec/SpecDAL/',
17 | author='Young Lee',
18 | author_email='ylee546@wisc.edu',
19 | license='MIT',
20 | packages=['specdal', 'specdal.gui','specdal.gui.pyqt',
21 | 'specdal.readers','specdal.containers',
22 | 'specdal.operators','specdal.filters'],
23 | install_requires=['numpy', 'pandas', 'matplotlib', 'scipy','pyqt5'],
24 | package_data = {'':['specdal/gui/select.png'],
25 | 'specdal.gui.pyqt':['Assets/*.*']},
26 | include_package_data = True,
27 | zip_safe=False,
28 | classifiers=[
29 | 'Development Status :: 3 - Alpha',
30 | 'Intended Audience :: Science/Research',
31 | 'Topic :: Scientific/Engineering :: Atmospheric Science',
32 | 'Programming Language :: Python :: 3',
33 | ],
34 | python_requires='>=3'
35 | )
36 |
--------------------------------------------------------------------------------
/specdal/__init__.py:
--------------------------------------------------------------------------------
1 | from .containers.spectrum import Spectrum
2 | from .containers.collection import Collection, df_to_collection, proximal_join
3 | from .readers import read
4 |
5 | # __all__ = ['spectrum', 'collection', 'reader']
6 |
--------------------------------------------------------------------------------
/specdal/containers/__init__.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname, basename, isfile
2 | import glob
3 | modules = glob.glob(dirname(__file__)+"/*.py")
4 | __all__ = [ basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')]
5 |
6 | from .spectrum import Spectrum
7 | from .collection import *
8 |
--------------------------------------------------------------------------------
/specdal/containers/collection.py:
--------------------------------------------------------------------------------
1 | # collection.py provides class for representing multiple
2 | # spectra. Collection class is essentially a wrapper around
3 | # pandas.DataFrame.
4 | import pandas as pd
5 | import numpy as np
6 | from collections import OrderedDict, defaultdict
7 | from .spectrum import Spectrum
8 | import specdal.operators as op
9 | from itertools import groupby
10 | from specdal.readers import read
11 | import copy
12 | import logging
13 | from os.path import abspath, expanduser, splitext
14 | import os
15 | import sys
16 |
17 | logging.basicConfig(level=logging.WARNING,
18 | format="%(levelname)s:%(name)s:%(message)s\n")
19 | ################################################################################
20 | # key functions for forming groups
21 | def separator_keyfun(spectrum, separator, indices):
22 | elements = spectrum.name.split(separator)
23 | return separator.join([elements[i] for i in indices if i 0:
52 | metadata_dict = df[meta_cols].transpose().to_dict()
53 | measurement_dict = df[wave_cols].transpose().to_dict('series')
54 | for spectrum_name in df.index:
55 | c.append(Spectrum(name=spectrum_name,
56 | measurement=measurement_dict[spectrum_name],
57 | measure_type=measure_type,
58 | metadata=metadata_dict[spectrum_name]))
59 | return c
60 |
61 | def proximal_join(base, rover, on='gps_time_tgt', direction='nearest'):
62 | '''
63 | Perform proximal join and return a new collection.
64 |
65 | Parameters
66 | ----------
67 |
68 | base: DataFrame or specdal.Collection object
69 |
70 | rover: DataFrame or specdal.Collection object
71 |
72 | Returns
73 | -------
74 | result: proximally joined dataset
75 | default: specdal.Collection object
76 | if output_df is True: pandas.DataFrame object
77 | '''
78 | result = None
79 | return_collection = False
80 | name = 'proximally_joined'
81 | # ensure that wavelength indices are monotonically increasing
82 | if (pd.Series(rover.data.index).diff()[1:] <= 0).any() or \
83 | (pd.Series(base.data.index).diff()[1:] <= 0).any():
84 | logging.error("Cannot proximally join dataset with non-increasing"
85 | " wavelengths. Try stitching.")
86 | sys.exit(1)
87 |
88 | if not (all([b.interpolated for b in base.spectra]) and all(
89 | [r.interpolated for r in rover.spectra])):
90 | logging.warning("Proximal join should be done on datasets interpolated "
91 | "to the same wavelengths.")
92 | if isinstance(base, Collection):
93 | return_collection = True
94 | base = base.data_with_meta(fields=[on])
95 | if isinstance(rover, Collection):
96 | return_collection = True
97 | name = rover.name
98 | rover = rover.data_with_meta(fields=[on])
99 | result = op.proximal_join(base, rover, on=on, direction=direction)
100 | if return_collection:
101 | result = df_to_collection(result, name=name)
102 | return result
103 |
104 | ################################################################################
105 | # main Collection class
106 | class Collection(object):
107 | """
108 | Represents a dataset consisting of a collection of spectra
109 | """
110 | def __init__(self, name, directory=None, spectra=None,
111 | measure_type='pct_reflect', metadata=None, flags=None):
112 | self.name = name
113 | self.spectra = spectra
114 | self.measure_type = measure_type
115 | self.metadata = metadata
116 | self.flags = flags
117 | if directory:
118 | self.read(directory, measure_type)
119 | @property
120 | def spectra(self):
121 | """
122 | A list of Spectrum objects in the collection
123 | """
124 | return list(self._spectra.values())
125 |
126 |
127 | @property
128 | def spectra_dict(self):
129 | return self._spectra
130 |
131 | @spectra.setter
132 | def spectra(self, value):
133 | self._spectra = OrderedDict()
134 | if value is not None:
135 | # assume value is an iterable such as list
136 | for spectrum in value:
137 | assert spectrum.name not in self._spectra
138 | self._spectra[spectrum.name] = spectrum
139 | @property
140 | def flags(self):
141 | """
142 | A dict of flags for each spectrum in the collection
143 | """
144 | return self._flags
145 | @flags.setter
146 | def flags(self, value):
147 | '''
148 | TODO: test this
149 | '''
150 | self._flags = defaultdict(lambda: False)
151 | if value is not None:
152 | for v in value:
153 | if v in self._spectra:
154 | self._flags[v] = True
155 | def flag(self, spectrum_name):
156 | self.flags[spectrum_name] = True
157 |
158 | def unflag(self, spectrum_name):
159 | del self.flags[spectrum_name]
160 |
161 | def as_flagged(self):
162 | """ Return a collection with just the flagged spectra """
163 | flags = set(self.flags)
164 | spectra = [s for s in self.spectra if s.name in flags]
165 | return Collection(self.name+'_flagged', None,
166 | spectra=spectra, metadata=self.metadata, flags=self.flags)
167 | def as_unflagged(self):
168 | """ Return a collection with just the flagged spectra """
169 | flags = set(self.flags)
170 | spectra = [s for s in self.spectra if not s.name in flags]
171 | return Collection(self.name+'_unflagged', None,
172 | spectra=spectra, metadata=self.metadata, flags=None)
173 |
174 | def _check_uniform_wavelengths(self):
175 | warning =\
176 | """Multiple wavelength spacings found in dataset. This may indicate input files
177 | from multiple datasets are being processed simultaneously, and can cause
178 | unpredictable behavior."""
179 | wavelengths0 = self.spectra[0].measurement.index
180 | for s in self.spectra[1:]:
181 | if len(s.measurement.index) != len(wavelengths0):
182 | logging.warning(warning)
183 | break
184 | if not (s.measurement.index == wavelengths0).all():
185 | logging.warning(warning)
186 | break
187 |
188 | @property
189 | def data(self):
190 | '''
191 | Get measurements as a Pandas.DataFrame
192 | '''
193 | try:
194 | self._check_uniform_wavelengths()
195 | objs = [s.measurement for s in self.spectra]
196 | keys = [s.name for s in self.spectra]
197 | return pd.concat(objs=objs, keys=keys, axis=1)
198 | except pd.core.indexes.base.InvalidIndexError as err:
199 | # typically from duplicate index due to overlapping wavelengths
200 | if not all([s.stitched for s in self.spectra]):
201 | logging.warning('{}: Try after stitching the overlaps'.format(err))
202 | raise err
203 | except Exception as e:
204 | print("Unexpected exception occurred")
205 | raise e
206 |
207 | def _unflagged_data(self):
208 | try:
209 | spectra = [s for s in self.spectra if not s.name in self.flags]
210 | return pd.concat(objs=[s.measurement for s in spectra],
211 | axis=1, keys=[s.name for s in spectra])
212 | except (ValueError, pd.core.indexes.base.InvalidIndexError) as err:
213 | # typically from duplicate index due to overlapping wavelengths
214 | if not all([s.stitched for s in self.spectra]):
215 | logging.warning('{}: Try after stitching the overlaps'.format(err))
216 | return None
217 | except Exception as e:
218 | print("Unexpected exception occurred")
219 | raise e
220 |
221 |
222 | def append(self, spectrum):
223 | """
224 | insert spectrum to the collection
225 | """
226 | assert spectrum.name not in self._spectra
227 | assert isinstance(spectrum, Spectrum)
228 | self._spectra[spectrum.name] = spectrum
229 |
230 | def data_with_meta(self, data=True, fields=None):
231 | """
232 | Get dataframe with additional columns for metadata fields
233 |
234 | Parameters
235 | ----------
236 |
237 | data: boolean
238 | whether to return the measurement data or not
239 |
240 | fields: list
241 | names of metadata fields to include as columns.
242 | If None, all the metadata will be included.
243 |
244 | Returns
245 | -------
246 | pd.DataFrame: self.data with additional columns
247 |
248 | """
249 | if fields is None:
250 | fields = ['file', 'instrument_type', 'integration_time',
251 | 'measurement_type', 'gps_time_tgt', 'gps_time_ref',
252 | 'wavelength_range']
253 | meta_dict = {}
254 | for field in fields:
255 | meta_dict[field] = [s.metadata[field] if field in s.metadata
256 | else None for s in self.spectra]
257 | meta_df = pd.DataFrame(meta_dict, index=[s.name for s in self.spectra])
258 | if data:
259 | result = pd.merge(meta_df, self.data.transpose(),
260 | left_index=True, right_index=True)
261 | else:
262 | result = meta_df
263 | return result
264 |
265 | ##################################################
266 | # object methods
267 | def __getitem__(self, key):
268 | return self._spectra[key]
269 | def __delitem__(self, key):
270 | self._spectra.__delitem__(key)
271 | self._flags.__delitem__(key)
272 | def __missing__(self, key):
273 | pass
274 | def __len__(self):
275 | return len(self._spectra)
276 | def __contains__(self, item):
277 | self._spectra.__contains__(item)
278 | ##################################################
279 | # reader
280 | def read(self, directory, measure_type='pct_reflect',
281 | ext=[".asd", ".sed", ".sig",".pico",".light"], recursive=False,
282 | verbose=False):
283 | """
284 | read all files in a path matching extension
285 | """
286 | directory = abspath(expanduser(directory))
287 | for dirpath, dirnames, filenames in os.walk(directory):
288 | if not recursive:
289 | # only read given path
290 | if dirpath != directory:
291 | continue
292 | for f in sorted(filenames):
293 | f_name, f_ext = splitext(f)
294 | if f_ext not in list(ext):
295 | # skip to next file
296 | continue
297 | filepath = os.path.join(dirpath, f)
298 | try:
299 | spectrum = Spectrum(name=f_name, filepath=filepath,
300 | measure_type=measure_type,
301 | verbose=verbose)
302 | self.append(spectrum)
303 | except UnicodeDecodeError:
304 | logging.warning("Input file {} contains non-unicode "
305 | "character. Please inspect input file.".format(
306 | f_name))
307 | except KeyError:
308 | logging.warning("Input file {} missing metadata key. "
309 | "Please inspect input file.".format(f_name))
310 | ##################################################
311 | # wrapper around spectral operations
312 | def interpolate(self, spacing=1, method='slinear'):
313 | '''
314 | '''
315 | for spectrum in self.spectra:
316 | spectrum.interpolate(spacing, method)
317 | def stitch(self, method='max'):
318 | '''
319 | '''
320 | for spectrum in self.spectra:
321 | try:
322 | spectrum.stitch(method)
323 | except Exception as e:
324 | logging.error("Error occurred while stitching {}".format(spectrum.name))
325 | raise e
326 | def jump_correct(self, splices, reference, method='additive'):
327 | '''
328 | '''
329 | for spectrum in self.spectra:
330 | spectrum.jump_correct(splices, reference, method)
331 | ##################################################
332 | # group operations
333 | def groupby(self, separator, indices, filler=None):
334 | """
335 | Group the spectra using a separator pattern
336 |
337 | Returns
338 | -------
339 | OrderedDict consisting of specdal.Collection objects for each group
340 | key: group name
341 | value: collection object
342 |
343 | """
344 | args = [separator, indices]
345 | key_fun = separator_keyfun
346 | if filler is not None:
347 | args.append(filler)
348 | key_fun = separator_with_filler_keyfun
349 | spectra_sorted = sorted(self.spectra,
350 | key=lambda x: key_fun(x, *args))
351 | groups = groupby(spectra_sorted,
352 | lambda x: key_fun(x, *args))
353 | result = OrderedDict()
354 | for g_name, g_spectra in groups:
355 | coll = Collection(name=g_name,
356 | spectra=[copy.deepcopy(s) for s in g_spectra])
357 | result[coll.name] = coll
358 | return result
359 |
360 | def plot(self, *args, **kwargs):
361 | '''
362 | '''
363 | self.data.plot(*args, **kwargs)
364 | pass
365 | def to_csv(self, *args, **kwargs):
366 | '''
367 | '''
368 | self.data.transpose().to_csv(*args, **kwargs)
369 | ##################################################
370 | # aggregate
371 | def mean(self, append=False, ignore_flagged=True):
372 | '''
373 | '''
374 | data = self._unflagged_data() if ignore_flagged else data
375 | spectrum = Spectrum(name=self.name + '_mean',
376 | measurement=data.mean(axis=1),
377 | measure_type=self.measure_type)
378 | if append:
379 | self.append(spectrum)
380 | return spectrum
381 | def median(self, append=False, ignore_flagged=True):
382 | '''
383 | '''
384 | data = self._unflagged_data() if ignore_flagged else data
385 | spectrum = Spectrum(name=self.name + '_median',
386 | measurement=data.median(axis=1),
387 | measure_type=self.measure_type)
388 | if append:
389 | self.append(spectrum)
390 | return spectrum
391 | def min(self, append=False, ignore_flagged=True):
392 | '''
393 | '''
394 | data = self._unflagged_data() if ignore_flagged else data
395 | spectrum = Spectrum(name=self.name + '_min',
396 | measurement=data.min(axis=1),
397 | measure_type=self.measure_type)
398 | if append:
399 | self.append(spectrum)
400 | return spectrum
401 | def max(self, append=False, ignore_flagged=True):
402 | '''
403 | '''
404 | data = self._unflagged_data() if ignore_flagged else data
405 | spectrum = Spectrum(name=self.name + '_max',
406 | measurement=data.max(axis=1),
407 | measure_type=self.measure_type)
408 | if append:
409 | self.append(spectrum)
410 | return spectrum
411 | def std(self, append=False, ignore_flagged=True):
412 | '''
413 | '''
414 | data = self._unflagged_data() if ignore_flagged else data
415 | spectrum = Spectrum(name=self.name + '_std',
416 | measurement=data.std(axis=1),
417 | measure_type=self.measure_type)
418 | if append:
419 | self.append(spectrum)
420 | return spectrum
421 |
422 |
--------------------------------------------------------------------------------
/specdal/containers/spectrum.py:
--------------------------------------------------------------------------------
1 | # spectrum.py provides class for representing a single
2 | # spectrum. Spectrum class is essentially a wrapper around
3 | # pandas.Series.
4 | import pandas as pd
5 | import numpy as np
6 | import specdal.operators as op
7 | from collections import OrderedDict
8 | from specdal.readers import read
9 | import logging
10 | import os
11 |
12 | logging.basicConfig(level=logging.WARNING,
13 | format="%(levelname)s:%(name)s:%(message)s\n")
14 | class Spectrum(object):
15 | """Class that represents a single spectrum
16 |
17 | Parameters
18 | ----------
19 |
20 | name: string
21 | Name of the spectrum.
22 |
23 | filepath: string (optional)
24 | Path to the file to read from.
25 |
26 | measurement: pandas.Series
27 | Spectral measurement
28 |
29 | metadata: OrderedDict
30 | Metadata associated with spectrum
31 |
32 | Notes
33 | -----
34 |
35 | Spectrum object stores a single spectral measurement using
36 | pandas.Series with index named: "wavelength".
37 |
38 | """
39 | def __init__(self, name=None, filepath=None, measurement=None,
40 | measure_type='pct_reflect', metadata=None,
41 | interpolated=False, stitched=False, jump_corrected=False,
42 | verbose=False):
43 | if name is None:
44 | assert filepath is not None
45 | name = os.path.splitext(os.path.basename(filepath))[0]
46 | self.name = name
47 | self.measurement = measurement
48 | self.measure_type = measure_type
49 | self.metadata = metadata
50 | self.interpolated = interpolated
51 | self.stitched = stitched
52 | self.jump_corrected = jump_corrected
53 | if filepath:
54 | self.read(filepath, measure_type, verbose=verbose)
55 | def __str__(self):
56 | string = "\nname:\t\t{!s},\n".format(self.name)
57 | string += "measure_type:\t{!s}\n".format(self.measure_type)
58 | string += "measurements:\twave |measurement\n"
59 | string += "\t\t------|-----------\n"
60 | string += "\t\t {0:.1f}|{1:.3f}\n".format(
61 | self.measurement.head(1).index.values[0],
62 | self.measurement.head(1).values[0])
63 | string += "\t\t ...|...\n"
64 | string += "\t\t{0:.1f}|{1:.3f}\n".format(self.measurement.tail(1).index.values[0],
65 | self.measurement.tail(1).values[0])
66 | string += "metadata:"
67 | for i, (key, item) in enumerate(self.metadata.items()):
68 | if i > 0:
69 | string += "\t"
70 | string += "\t{}:{}\n".format(key, item)
71 | return string
72 | ##################################################
73 | # reader
74 | def read(self, filepath, measure_type, verbose=False):
75 | '''
76 | Read measurement from a file.
77 | '''
78 | data, meta = read(filepath, verbose=verbose)
79 | self.metadata = meta
80 | if measure_type == 'pct_reflect' and 'pct_reflect' not in data:
81 | self.measurement = self.get_pct_reflect(data)
82 | return
83 | assert measure_type in data # TODO: handle this
84 | self.measurement = data[measure_type]
85 | ##################################################
86 | # wrappers around spectral operations
87 | def interpolate(self, spacing=1, method='slinear'):
88 | '''
89 | '''
90 | self.measurement = op.interpolate(self.measurement, spacing, method)
91 | self.interpolated = True
92 | def stitch(self, method='mean'):
93 | '''
94 | '''
95 | self.measurement = op.stitch(self.measurement, method)
96 | self.stitched = True
97 | def jump_correct(self, splices, reference, method="additive"):
98 | '''
99 | '''
100 | self.measurement = op.jump_correct(self.measurement, splices, reference, method)
101 | self.jump_corrected = True
102 | def get_pct_reflect(self,dataframe):
103 | """
104 | Helper function to calculate pct_reflect from other columns
105 |
106 | Returns
107 | -------
108 | pd.Series object for pct_reflect
109 | """
110 | columns = dataframe.columns.values
111 | pct_reflect = None
112 | #special case for piccolo
113 | if all(x in columns for x in ["tgt_count","ref_count","tgt_count_dark",
114 | "ref_count_dark"]):
115 | pct_reflect = (dataframe["tgt_count"]-dataframe["tgt_count_dark"])/(
116 | dataframe["ref_count"]-dataframe["ref_count_dark"])
117 | elif all(x in columns for x in ["tgt_count", "ref_count"]):
118 | pct_reflect = dataframe["tgt_count"]/dataframe["ref_count"]
119 | elif all(x in columns for x in ["tgt_radiance", "ref_radiance"]):
120 | pct_reflect = dataframe["tgt_radiance"]/dataframe["ref_radiance"]
121 | elif all(x in columns for x in ["tgt_reflect", "ref_reflect"]):
122 | pct_reflect = dataframe["tgt_reflect"]/dataframe["ref_reflect"]
123 | elif all(x in columns for x in ["tgt_irradiance", "ref_irradiance"]):
124 | pct_reflect = dataframe["tgt_irradiance"]/dataframe["ref_irradiance"]
125 |
126 | if pct_reflect is not None:
127 | pct_reflect.name = 'pct_reflect'
128 | else:
129 | logging.warning("Dataframe lacks columns to compute pct_reflect.")
130 | return pct_reflect
131 | ##################################################
132 | # wrapper around plot function
133 | def plot(self, *args, **kwargs):
134 | ''''''
135 | return self.measurement.plot(*args, **kwargs)
136 | def to_csv(self, *args, **kwargs):
137 | ''''''
138 | return pd.DataFrame(self.measurement).transpose().to_csv(
139 | *args, **kwargs)
140 | ##################################################
141 | # wrapper around pandas series operators
142 | def __add__(self, other):
143 | new_measurement = None
144 | new_name = self.name + '+'
145 | if isinstance(other, Spectrum):
146 | assert self.measure_type == other.measure_type
147 | new_measurement = self.measurement.__add__(other.measurement).dropna()
148 | new_name += other.name
149 | else:
150 | new_measurement = self.measurement.__add__(other)
151 | return Spectrum(name=new_name, measurement=new_measurement,
152 | measure_type=self.measure_type)
153 | def __isub__(self, other):
154 | pass
155 | def __imul__(self, other):
156 | pass
157 | def __itruediv__(self, other):
158 | pass
159 | def __ifloordiv__(self, other):
160 | pass
161 | def __iiadd__(self, other):
162 | pass
163 | def __isub__(self, other):
164 | pass
165 | def __imul__(self, other):
166 | pass
167 | def __itruediv__(self, other):
168 | pass
169 | def __ifloordiv__(self, other):
170 | pass
171 |
172 |
--------------------------------------------------------------------------------
/specdal/examples/.gitignore:
--------------------------------------------------------------------------------
1 | .ipynb_checkpoints*
2 | *~
--------------------------------------------------------------------------------
/specdal/examples/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/examples/__init__.py
--------------------------------------------------------------------------------
/specdal/filters/__init__.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname, basename, isfile
2 | import glob
3 | modules = glob.glob(dirname(__file__)+"/*.py")
4 | __all__ = [ basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')]
5 |
6 | from .filter_std import filter_std
7 | from .filter_threshold import filter_threshold
8 | from .filter_white import filter_white
9 | from .is_monotonic import is_monotonic
10 |
--------------------------------------------------------------------------------
/specdal/filters/filter_std.py:
--------------------------------------------------------------------------------
1 | from .split_good_bad import split_good_bad
2 |
3 | def filter_std(collection,wavelength0,wavelength1,std_thresh,group='mean'):
4 | """Filter the spectra from collection that have a standard deviation
5 | outside a certain threshold.
6 |
7 | Parameters
8 | ----------
9 | collection: specdal.containers.collection.Collection
10 | the collection to filter
11 |
12 | wavelength0: float
13 | the starting wavelength to filter
14 |
15 | wavelength1: float
16 | the ending wavelength to filter
17 |
18 | std_thresh: float
19 | remove spectra outside of std_thresh standard deviations from the mean
20 |
21 | group: string
22 | if there are multiple data points between wavelength0 and wavelength1,
23 | average them this way. Options: "mean", "median", "min", "max"
24 |
25 | Returns
26 | -------
27 | good: specdal.containers.Collection
28 | A new collection made of the spectra that passed the filter
29 |
30 | bad: specdal.containers.Collection
31 | A new collection made of the spectra that failed the filter
32 | """
33 | #extract the relevant wavelength range
34 | data = collection.data.loc[wavelength0:wavelength1]
35 | mean = data.mean(axis=1)
36 | std = data.std(axis=1)
37 | #number of standard deviations from mean at each wavelength
38 | n_std = data.sub(mean,axis=0).div(std,axis=0).abs()
39 |
40 | if group == 'mean':
41 | good = n_std.mean() < std_thresh
42 | if group == 'median':
43 | good = n_std.median() < std_thresh
44 | if group == 'min':
45 | good = n_std.min() < std_thresh
46 | if group == 'max':
47 | good = n_std.min() < std_thresh
48 | #TODO: work around transposing
49 | return split_good_bad(collection,good)
50 |
51 |
--------------------------------------------------------------------------------
/specdal/filters/filter_threshold.py:
--------------------------------------------------------------------------------
1 | from .split_good_bad import split_good_bad
2 |
3 | def filter_threshold(collection,wavelength0,wavelength1,low,high,group='mean'):
4 | """Filter the spectra from collection that have a value outside of
5 | (low,high).
6 | Parameters
7 | ----------
8 | collection: specdal.containers.collection.Collection
9 | the collection to filter
10 |
11 | wavelength0: float
12 | the starting wavelength to filter
13 |
14 | wavelength1: float
15 | the ending wavelength to filter
16 |
17 | low: float
18 | minimum allowed value between wavelength0 and wavelength1
19 |
20 | high: float
21 | maximum allowed value between wavelength0 and wavelength1
22 |
23 | group: string
24 | if there are multiple data points between wavelength0 and wavelength1,
25 | average them this way. Options: "mean", "median", "min", "max"
26 |
27 | Returns
28 | -------
29 | good: specdal.containers.Collection
30 | A new collection made of the spectra that passed the filter
31 |
32 | bad: specdal.containers.Collection
33 | A new collection made of the spectra that failed the filter
34 | """
35 | data = collection.data.loc[wavelength0:wavelength1]
36 | if group == 'mean':
37 | mean = data.mean(axis=0)
38 | good = (mean < high) & (mean > low)
39 | if group == 'median':
40 | med = data.median(axis=0)
41 | good = (med < high) & (med > low)
42 | if group == 'min':
43 | _min = data.min(axis=0)
44 | good = (_min < high) & (_min > low)
45 | if group == 'max':
46 | _max = data.max(axis=0)
47 | good = (_max < high) & (_max > low)
48 | return split_good_bad(collection,good)
49 |
--------------------------------------------------------------------------------
/specdal/filters/filter_white.py:
--------------------------------------------------------------------------------
1 | from specdal.containers.collection import Collection,df_to_collection
2 | from .split_good_bad import split_good_bad
3 |
4 | def filter_white(collection,wavelength0=0,wavelength1=10000,group='mean'):
5 | """Filter white reference spectra from collection
6 |
7 | Returns
8 | -------
9 | good: specdal.containers.Collection
10 | A new collection made of the spectra that passed the filter
11 |
12 | bad: specdal.containers.Collection
13 | A new collection made of the spectra that failed the filter
14 | """
15 | data = collection.data.loc[wavelength0:wavelength1]
16 | mean = data.mean(axis=0)
17 | std = data.std(axis=0)
18 | #a flat-ish spectrum at nearly 1 is probably white
19 | white = (mean > 0.9) & (mean < 1.1) & (std < .03)
20 | good = ~white
21 | if not good.all():
22 | return split_good_bad(collection,good)
23 | return collection,Collection(collection.name+'_filtered')
24 |
25 |
--------------------------------------------------------------------------------
/specdal/filters/is_monotonic.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | def is_monotonic(collection):
3 | try:
4 | return (pd.Series(collection.data.index).diff()[1:]>0).all()
5 | except:
6 | return False
7 |
--------------------------------------------------------------------------------
/specdal/filters/split_good_bad.py:
--------------------------------------------------------------------------------
1 | from specdal.containers.collection import Collection,df_to_collection
2 |
3 | def split_good_bad(collection,is_good):
4 | """
5 | Given: A collection and some error metric
6 | Return: 2 collections, one of the flagged-good data, one of the flagged-bad
7 | data
8 | """
9 | #TODO: work around transposing
10 | good_spectra = collection.data.T[is_good]
11 | bad_spectra = collection.data.T[~is_good]
12 |
13 | good_col = df_to_collection(good_spectra,name=collection.name)
14 | bad_col = df_to_collection(bad_spectra,name=collection.name+'_filtered')
15 |
16 | return good_col,bad_col
17 |
18 |
--------------------------------------------------------------------------------
/specdal/gui/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/__init__.py
--------------------------------------------------------------------------------
/specdal/gui/gui.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import tkinter as tk
4 | from tkinter import ttk
5 | from tkinter import filedialog
6 | import tkinter.simpledialog as tksd
7 | sys.path.insert(0, os.path.abspath("../.."))
8 | import matplotlib
9 | matplotlib.use('TkAgg')
10 | from specdal.spectrum import Spectrum
11 | from specdal.collection import Collection
12 | from viewer import Viewer
13 | from collections import OrderedDict
14 |
15 | # ~/data/specdal/aidan_data2/PSR/
16 |
17 | class SpecdalGui(tk.Tk):
18 | """GUI entry point for Specdal"""
19 | def __init__(self, collections=None):
20 | tk.Tk.__init__(self)
21 |
22 | # create menubar
23 | self.config(menu=Menubar(self))
24 |
25 | # create list
26 | self.collectionList = CollectionList(self, collections)
27 | self.collectionList.pack(side=tk.LEFT, fill=tk.Y)
28 |
29 | # create viewer
30 | self.viewer = Viewer(self, self.collectionList.currentCollection,
31 | with_toolbar=False)
32 | self.viewer.pack(side=tk.LEFT, fill=tk.BOTH)
33 |
34 | def read_dir(self):
35 | directory = filedialog.askdirectory()
36 | if not directory:
37 | return
38 | self.collectionList.add_collection(
39 | Collection(name="collection" + str(self.collectionList.listbox.size()), directory=directory))
40 |
41 | def group_by(self, collection=None):
42 | separator = tksd.askstring("separator", "Enter separator pattern", initialvalue="_")
43 | if separator is None:
44 | return
45 | indices = tksd.askstring("indices", "Enter indices to group by (comma separated)", initialvalue="0")
46 | if indices is None:
47 | return
48 | indices = list(map(int, indices.replace(" ", "").split(",")))
49 | if collection is None:
50 | collection = self.collectionList.currentCollection
51 | groups = collection.groupby(separator=separator, indices=indices, filler=None)
52 | for gname, gcoll in groups.items():
53 | gcoll.name = collection.name + " (" + gcoll.name + ")"
54 | self.collectionList.add_collection(gcoll)
55 |
56 | class CollectionList(tk.Frame):
57 | """Stores and manages collections"""
58 | def __init__(self, parent, collections=None):
59 | tk.Frame.__init__(self, parent)
60 | self.collections = OrderedDict()
61 | self.currentCollection = None
62 |
63 | # gui
64 | self.scrollbar = ttk.Scrollbar(self)
65 | self.listbox = tk.Listbox(self, yscrollcommand=self.scrollbar.set,
66 | width=30)
67 | self.scrollbar.config(command=self.listbox.yview)
68 | self.listbox.pack(side=tk.LEFT, fill=tk.Y)
69 | self.scrollbar.pack(side=tk.LEFT, fill=tk.Y)
70 | self.listbox.bind('', lambda x:
71 | self.master.viewer.set_collection(
72 | self.set_cur(pos=self.get_selection()[0][0])))
73 |
74 | # load provided collections
75 | if collections:
76 | for c in collections:
77 | self.add_collection(c)
78 | self.set_cur()
79 |
80 | def set_cur(self, name=None, pos=0):
81 | if name is None:
82 | # TODO: check whether pos is valid
83 | name = self.listbox.get(pos)
84 | self.currentCollection = self.get_collection(name)
85 | return self.currentCollection
86 |
87 | def add_collection(self, collection):
88 | assert isinstance(collection, Collection)
89 | self.collections[collection.name] = collection
90 | # add to listbox
91 | self.listbox.insert(tk.END, collection.name)
92 |
93 | def get_collection(self, name):
94 | if name in self.collections:
95 | return self.collections[name]
96 |
97 | def get_selection(self):
98 | ''' return indices (tuple) and names (list) '''
99 | idx = self.listbox.curselection()
100 | all_names = list(self.collections)
101 | names = [ all_names[i] for i in idx ]
102 | return idx, names
103 |
104 | def remove_selection(self):
105 | idx, names = self.get_selection()
106 | # remove from listbox
107 | for i in sorted(idx, reverse=True):
108 | self.listbox.delete(i)
109 | # remove from dict
110 | for name in names:
111 | if self.currentCollection.name == name:
112 | self.set_cur()
113 | self.collections.__delitem__(name)
114 |
115 | def not_implemented_message(feature_name):
116 | tk.messagebox.showinfo(feature_name, "Not implemented")
117 | pass
118 |
119 | class Menubar(tk.Menu):
120 | # parent is the SpecdalGui class
121 | def __init__(self, parent):
122 | tk.Menu.__init__(self, parent)
123 | # File
124 | fileMenu = tk.Menu(self, tearoff=0)
125 | fileMenu.add_command(label="open", command=lambda: not_implemented_message("open"))
126 | fileMenu.add_command(label="read file", command=lambda: not_implemented_message("read file"))
127 | fileMenu.add_command(label="read directory", command=lambda: self.master.read_dir())
128 | fileMenu.add_command(label="read csv", command=lambda: not_implemented_message("read csv"))
129 | fileMenu.add_command(label="save", command=lambda: not_implemented_message("save"))
130 | fileMenu.add_command(label="save as", command=lambda: not_implemented_message("save as"))
131 | fileMenu.add_command(label="close", command=lambda: not_implemented_message("close"))
132 | self.add_cascade(label="File", menu=fileMenu)
133 |
134 | # Edit
135 | editMenu = tk.Menu(self, tearoff=0)
136 | editMenu.add_command(label="flag/unflag", command=lambda: self.master.viewer.toggle_flag())
137 | editMenu.add_command(label="remove collection", command=lambda: self.master.collectionList.remove_selection())
138 |
139 | editMenu.add_command(label="setting", command=lambda: not_implemented_message("setting"))
140 | self.add_cascade(label="Edit", menu=editMenu)
141 |
142 | # View
143 | viewMenu = tk.Menu(self, tearoff=0)
144 | viewMenu.add_command(label="Collection/Spectra Mode", command=lambda: self.master.viewer.toggle_mode())
145 | viewMenu.add_command(label="Show/Hide Flagged", command=lambda: self.master.viewer.toggle_show_flagged())
146 | viewMenu.add_command(label="Mean", command=lambda: self.master.viewer.toggle_mean())
147 | viewMenu.add_command(label="Median", command=lambda: self.master.viewer.toggle_median())
148 | viewMenu.add_command(label="Max", command=lambda: self.master.viewer.toggle_max())
149 | viewMenu.add_command(label="Min", command=lambda: self.master.viewer.toggle_min())
150 | viewMenu.add_command(label="Std", command=lambda: self.master.viewer.toggle_std())
151 | self.add_cascade(label="View", menu=viewMenu)
152 |
153 | # Operators
154 | operatorMenu = tk.Menu(self, tearoff=0)
155 | operatorMenu.add_command(label="Groupby", command=lambda: self.master.group_by())
156 | operatorMenu.add_command(label="Stitch", command=lambda: self.master.viewer.stitch())
157 | operatorMenu.add_command(label="Jump Correct", command=lambda: self.master.viewer.jump_correct())
158 | self.add_cascade(label="Operator", menu=operatorMenu)
159 |
160 |
161 |
162 | def read_test_data():
163 | path = '~/data/specdal/aidan_data2/ASD'
164 | c = Collection("Test Collection", directory=path)
165 | for i in range(30):
166 | c.flag(c.spectra[i].name)
167 | return c
168 |
169 | def main():
170 | gui = SpecdalGui()
171 | gui.mainloop()
172 |
173 |
174 | if __name__ == "__main__":
175 | main()
176 |
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/ajax-loader.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/ajax-loader.gif
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-csv-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-csv-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-cursor-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-cursor-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-empty-flag-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-empty-flag-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-flag-filled-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-flag-filled-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-flag-save-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-flag-save-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-folder-24.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-folder-24.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-interpolate-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-interpolate-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-jump-correct-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-jump-correct-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-math-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-math-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-normal-distribution-histogram-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-normal-distribution-histogram-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-opened-folder-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-opened-folder-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-proximal-join.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-proximal-join.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-show-flag-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-show-flag-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Assets/icons8-stitch-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/Assets/icons8-stitch-32.png
--------------------------------------------------------------------------------
/specdal/gui/pyqt/Makefile:
--------------------------------------------------------------------------------
1 | default: qt_viewer_ui.py op_config_ui.py save_dialog_ui.py
2 | true
3 |
4 | qt_viewer_ui.py: qt_viewer.ui
5 | pyuic5 qt_viewer.ui > qt_viewer_ui.py
6 |
7 | op_config_ui.py: op_config.ui
8 | pyuic5 op_config.ui > op_config_ui.py
9 |
10 | save_dialog_ui.py: save_dialog.ui
11 | pyuic5 save_dialog.ui > save_dialog_ui.py
12 |
13 | test: qt_viewer_ui.py op_config_ui.py save_dialog_ui.py
14 | python viewer.py
15 |
--------------------------------------------------------------------------------
/specdal/gui/pyqt/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/pyqt/__init__.py
--------------------------------------------------------------------------------
/specdal/gui/pyqt/__main__.py:
--------------------------------------------------------------------------------
1 | from . import viewer
2 | viewer.run()
3 |
--------------------------------------------------------------------------------
/specdal/gui/pyqt/collection_plotter.py:
--------------------------------------------------------------------------------
1 | from PyQt5 import QtGui, QtCore, QtWidgets
2 | import numpy as np
3 | import pandas as pd
4 | import os
5 | import sys
6 | import matplotlib
7 | matplotlib.use('Qt5Agg')
8 | import matplotlib.pyplot as plt
9 | from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg
10 | from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT
11 | from matplotlib.figure import Figure
12 | from matplotlib.patches import Rectangle
13 |
14 | def set_or_none(iterable):
15 | if iterable is not None and not isinstance(iterable,set):
16 | iterable = set(iterable)
17 | return iterable
18 |
19 | class SpectrumArtist():
20 | show_flagged = True
21 | show_unselected = True
22 |
23 | def __init__(self,artist):
24 | self.artist = artist
25 | self._flagged = False
26 | self._selected = False
27 | self._visible = True
28 | self.style = '-'
29 | self.color = 'k'
30 |
31 | @property
32 | def flagged(self):
33 | return self._flagged
34 |
35 | @flagged.setter
36 | def flagged(self,value):
37 | self._flagged = value
38 | self.color = 'r' if self._flagged else 'k'
39 | self._update_look()
40 |
41 | @property
42 | def selected(self):
43 | return self._selected
44 |
45 | @selected.setter
46 | def selected(self,value):
47 | self._selected = value
48 | self.style = '--' if self._selected else '-'
49 | self._update_look()
50 |
51 | @property
52 | def visible(self):
53 | return self._visible
54 |
55 | @visible.setter
56 | def visible(self,value):
57 | self._visible = value
58 | if self._visible:
59 | self.artist.set_linestyle(self.style)
60 | else:
61 | self.artist.set_linestyle('None')
62 |
63 | def _calculate_visibility(self):
64 | visible = True
65 | if not self.selected and not self.show_unselected:
66 | visible = False
67 | if self.flagged and not self.show_flagged:
68 | visible = False
69 | self.visible = visible
70 |
71 | def _update_look(self):
72 | self._calculate_visibility()
73 | if self.visible:
74 | self.artist.set_color(self.color)
75 | self.artist.set_linestyle(self.style)
76 |
77 |
78 |
79 | class CollectionCanvas(FigureCanvasQTAgg):
80 | """Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
81 |
82 | selected = QtCore.pyqtSignal(tuple)
83 |
84 | def __init__(self, parent=None, width=5, height=4, dpi=100):
85 | fig = Figure(figsize=(width, height), dpi=dpi)
86 | self.ax = fig.add_subplot(111)
87 | self.ax.grid(True)
88 | self._flag_style = 'r'
89 | self._unselected_style = '-'
90 |
91 | fig.tight_layout()
92 | FigureCanvasQTAgg.__init__(self, fig)
93 | self.setParent(parent)
94 |
95 | FigureCanvasQTAgg.setSizePolicy(self,
96 | QtWidgets.QSizePolicy.Expanding,
97 | QtWidgets.QSizePolicy.Expanding)
98 | FigureCanvasQTAgg.updateGeometry(self)
99 |
100 |
101 | @property
102 | def show_unselected(self):
103 | pass
104 |
105 | @show_unselected.setter
106 | def show_unselected(self,value):
107 | SpectrumArtist.show_unselected = value
108 |
109 | @property
110 | def show_flagged(self):
111 | pass
112 |
113 | @show_flagged.setter
114 | def show_flagged(self,value):
115 | SpectrumArtist.show_flagged = value
116 |
117 | def rectangleStartEvent(self,event):
118 | self._rect = None
119 | self._rect_start = event
120 |
121 | def rectangleMoveEvent(self,event):
122 | try:
123 | dx = event.xdata - self._rect_start.xdata
124 | dy = event.ydata - self._rect_start.ydata
125 | except TypeError:
126 | #we're out of canvas bounds
127 | return
128 |
129 | if self._rect is not None:
130 | self._rect.remove()
131 |
132 | self._rect = Rectangle((self._rect_start.xdata,self._rect_start.ydata),
133 | dx,dy, color='k',ls='--',lw=1,fill=False)
134 | self.ax.add_patch(self._rect)
135 | self.ax.draw_artist(self._rect)
136 |
137 | def rectangleEndEvent(self,event):
138 | class FakeEvent(object):
139 | def __init__(self,x,y):
140 | self.xdata, self.ydata = x, y
141 |
142 | if self._rect is not None:
143 | self._rect.remove()
144 | else:
145 | #make a small, fake rectangle
146 | dy = (self.ax.get_ylim()[1]-self.ax.get_ylim()[0])/100.
147 | self._rect_start = FakeEvent(event.xdata-10,event.ydata+dy)
148 | event = FakeEvent(event.xdata+10,event.ydata-dy)
149 |
150 | x0 = min(self._rect_start.xdata,event.xdata)
151 | x1 = max(self._rect_start.xdata,event.xdata)
152 | y0 = min(self._rect_start.ydata,event.ydata)
153 | y1 = max(self._rect_start.ydata,event.ydata)
154 | self.selected.emit((x0,x1,y0,y1))
155 |
156 | def _onMouseDown(self,event):
157 | if self.ax.get_navigate_mode() is None:
158 | self._bg_cache = self.copy_from_bbox(self.ax.bbox)
159 | self.clicked = True
160 | self.rectangleStartEvent(event)
161 |
162 | def _onMouseUp(self,event):
163 | if self.ax.get_navigate_mode() is None:
164 | self.restore_region(self._bg_cache)
165 | self.blit(self.ax.bbox)
166 | self.clicked = False
167 | self.rectangleEndEvent(event)
168 |
169 | def _onMouseMove(self,event):
170 | if self.ax.get_navigate_mode() is None:
171 | if(self.clicked):
172 | self.restore_region(self._bg_cache)
173 | self.rectangleMoveEvent(event)
174 | self.blit(self.ax.bbox)
175 |
176 | def setupMouseNavigation(self):
177 | self.clicked = False
178 | self.select_mode = 'rectangle'
179 | self._bg_cache = None
180 |
181 | self._cids = [
182 | self.mpl_connect('button_press_event',self._onMouseDown),
183 | self.mpl_connect('button_release_event',self._onMouseUp),
184 | self.mpl_connect('motion_notify_event',self._onMouseMove),
185 | ]
186 |
187 | def suspendMouseNavigation(self):
188 | for cid in self._cids:
189 | self.mpl_disconnect(cid)
190 |
191 |
192 | def update_selected(self,selected_keys,only_add=False):
193 | # better lookup time
194 | selected_keys = set_or_none(selected_keys)
195 | if only_add:
196 | # if we're only adding, just select
197 | for key in selected_keys:
198 | self.artist_dict[key].selected = True
199 | else:
200 | # otherwise, unselect everything that isn't selected
201 | keys = self.artist_dict.keys()
202 | for key in keys:
203 | self.artist_dict[key].selected = key in selected_keys
204 | self.draw()
205 |
206 | def set_flagged(self,flagged_keys,selected_keys=None,flag=True):
207 | # better lookup time
208 | flagged_keys = set_or_none(flagged_keys)
209 | selected_keys = set_or_none(selected_keys)
210 | for key in flagged_keys:
211 | self.artist_dict[key].flagged = flag
212 |
213 | self.draw()
214 |
215 | def add_flagged(self,unflagged_keys,selected_keys=None):
216 | self.set_flagged(unflagged_keys,selected_keys,True)
217 |
218 | def remove_flagged(self,unflagged_keys,selected_keys=None):
219 | self.set_flagged(unflagged_keys,selected_keys,False)
220 |
221 | def update_artists(self,collection,new_lim=False):
222 | if collection is None:
223 | return
224 | # save limits
225 | if new_lim == False:
226 | xlim = self.ax.get_xlim()
227 | ylim = self.ax.get_ylim()
228 | # plot
229 | self.ax.clear()
230 | collection.plot(ax=self.ax, style='k', picker=1)
231 | #self.ax.set_title(collection.name)
232 | keys = [s.name for s in collection.spectra]
233 | artists = self.ax.lines
234 | self.artist_dict = {key:SpectrumArtist(artist)
235 | for key,artist in zip(keys,artists)}
236 | for key in collection.flags:
237 | self.artist_dict[key].flagged = True
238 | self.ax.legend().remove()
239 | self.ax.grid(True)
240 | self.draw()
241 |
242 | class ToolBar(NavigationToolbar2QT):
243 | def __init__(self,canvas_,parent,ax):
244 | NavigationToolbar2QT.__init__(self,canvas_,parent,coordinates=False)
245 | self._xlim = (0,1)
246 | self._ylim = (0,1)
247 | self._ax = ax
248 | self._canvas_ = canvas_
249 | self._addActions()
250 |
251 |
252 | def home(self):
253 | """Override home method to return to home of most recent plot"""
254 | self._ax.set_xlim(*self._xlim)
255 | self._ax.set_ylim(*self._ylim)
256 | self._canvas_.draw()
257 |
258 | def setHome(self,xlim,ylim):
259 | self._xlim = xlim
260 | self._ylim = ylim
261 |
262 | def _rebind_save(self):
263 | # find the save button from the matplotlib toolbar and rebind its action
264 | # This will probably break at some point
265 | action = [a for a in self.__actions if "Save" in a.toolTip()][0]
266 | print(action.toolTip())
267 | action.triggered.disconnect()
268 | action.setToolTip("Export Dataset")
269 | self.icons["save"] = action
270 |
271 | def _update_pan_zoom(self):
272 | def pan2():
273 | self.icons["select"].setChecked(False)
274 | self.pan()
275 | def zoom2():
276 | self.icons["select"].setChecked(False)
277 | self.zoom()
278 |
279 | self._actions["pan"].triggered.disconnect()
280 | self._actions["pan"].triggered.connect(pan2)
281 | self._actions["zoom"].triggered.disconnect()
282 | self._actions["zoom"].triggered.connect(zoom2)
283 |
284 | @property
285 | def __actions(self):
286 | return [child for child in self.children()
287 | if isinstance(child, QtWidgets.QAction)]
288 |
289 | def _addActions(self):
290 | path = os.path.split(os.path.abspath(__file__))[0]
291 | dir_ = os.path.join(path,"Assets")
292 | self.icons = {}
293 | def _icon_of(name,fname,description, idx=None):
294 | icon = QtGui.QIcon(os.path.join(dir_,fname))
295 | if idx is None:
296 | action = self.addAction(icon,description)
297 | else:
298 | action = QtWidgets.QAction(icon,description)
299 | self.insertAction(self.__actions[idx],action)
300 | self.icons[name] = action
301 | return action
302 | self._rebind_save()
303 | _icon_of("select","icons8-cursor-32.png","Select spectra with left mouse",5)
304 | _icon_of("load","icons8-opened-folder-32.png","Load Collection")
305 | _icon_of("flag","icons8-flag-filled-32.png","Flag Selection")
306 | _icon_of("unflag","icons8-empty-flag-32.png","Unflag Selection")
307 | _icon_of("vis","icons8-show-flag-32.png","Show/Hide Flags")
308 | _icon_of("export","icons8-flag-save-32.png","Export Flags")
309 | _icon_of("operators","icons8-math-32.png","Operator Configuration")
310 | _icon_of("stats","icons8-normal-distribution-histogram-32.png","Plot Statistics")
311 | _icon_of("stitch","icons8-stitch-32.png","Stitch")
312 | _icon_of("jump","icons8-jump-correct-32.png","Jump Correct")
313 | _icon_of("interpolate","icons8-interpolate-32.png","Interpolate")
314 | _icon_of("proximal","icons8-proximal-join.png","Proximal Join")
315 | _icon_of("reset","icons8-restart-32.png","Revert Operators")
316 | self.insertSeparator(self.icons['flag'])
317 | self.insertSeparator(self.icons['operators'])
318 |
319 | self.icons["select"].setCheckable(True)
320 | self._update_pan_zoom()
321 |
322 | def triggered(self,key):
323 | return self.icons[key].triggered
324 |
325 | def returnToSelectMode(self):
326 | if self._ax.get_navigate_mode() == 'PAN':
327 | #Turn panning off
328 | self.pan()
329 | elif self._ax.get_navigate_mode() == 'ZOOM':
330 | #Turn zooming off
331 | self.zoom()
332 |
--------------------------------------------------------------------------------
/specdal/gui/pyqt/export_collection.py:
--------------------------------------------------------------------------------
1 | import os
2 | from matplotlib import pyplot as plt
3 | from PyQt5 import QtCore
4 |
5 | class CollectionExporter(QtCore.QThread):
6 | def export(self,collection,configuration):
7 | self.collection = collection
8 | self.configuration = configuration
9 | self.start()
10 |
11 | def run(self):
12 | c = self.collection
13 | configuration = self.configuration
14 | if not configuration['flags']:
15 | c = c.as_unflagged()
16 | # output individual spectra
17 | outdir = configuration['path']
18 | datadir = os.path.join(outdir, 'data')
19 | figdir = os.path.join(outdir, 'figures')
20 | os.makedirs(datadir,exist_ok=True)
21 | os.makedirs(figdir,exist_ok=True)
22 | if configuration['data']['individual']:
23 | indiv_datadir = os.path.join(datadir, 'indiv')
24 | os.makedirs(indiv_datadir,exist_ok=True)
25 | for spectrum in c.spectra:
26 | spectrum.to_csv(os.path.join(indiv_datadir, spectrum.name + '.csv'))
27 |
28 | if configuration['figures']['individual']:
29 | indiv_figdir = os.path.join(figdir, 'indiv')
30 | os.makedirs(indiv_figdir,exist_ok=True)
31 | for spectrum in c.spectra:
32 | spectrum.plot(legend=False)
33 | plt.savefig(os.path.join(indiv_figdir, spectrum.name + '.png'), bbox_inches='tight')
34 | plt.close()
35 |
36 | # output whole and group data
37 | if configuration['data']['dataset']:
38 | c.to_csv(os.path.join(datadir, c.name + ".csv"))
39 |
40 | if configuration['figures']['dataset']:
41 | # output whole and group figures (possibly with aggregates appended)
42 | c.plot(legend=False)
43 | plt.savefig(os.path.join(figdir, c.name + ".png"), bbox_inches="tight")
44 | plt.close()
45 |
46 |
--------------------------------------------------------------------------------
/specdal/gui/pyqt/op_config.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | Dialog
4 |
5 |
6 |
7 | 0
8 | 0
9 | 420
10 | 518
11 |
12 |
13 |
14 | Operator Configuration
15 |
16 |
17 | -
18 |
19 |
20 | Plot Statistics
21 |
22 |
23 | true
24 |
25 |
26 | false
27 |
28 |
29 |
-
30 |
31 |
-
32 |
33 |
34 | Mi&nimum
35 |
36 |
37 |
38 | -
39 |
40 |
41 | &Mean
42 |
43 |
44 | false
45 |
46 |
47 |
48 | -
49 |
50 |
51 | M&edian
52 |
53 |
54 |
55 | -
56 |
57 |
58 | M&aximum
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 | -
68 |
69 |
70 | Stitch
71 |
72 |
73 | true
74 |
75 |
76 | false
77 |
78 |
79 |
-
80 |
81 |
82 | Stitching Method:
83 |
84 |
85 |
86 | -
87 |
88 |
89 |
90 | 0
91 | 0
92 |
93 |
94 |
-
95 |
96 | Maximum
97 |
98 |
99 | -
100 |
101 | Minimum
102 |
103 |
104 | -
105 |
106 | Mean
107 |
108 |
109 | -
110 |
111 | Median
112 |
113 |
114 | -
115 |
116 | Interpolated
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 | -
125 |
126 |
127 | Jump Correct
128 |
129 |
130 | true
131 |
132 |
133 | false
134 |
135 |
136 |
-
137 |
138 |
-
139 |
140 |
141 | Wavelength Splices:
142 |
143 |
144 |
145 | -
146 |
147 |
148 |
149 | 0
150 | 0
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 | -
159 |
160 |
161 | 0
162 |
163 |
164 |
165 | -
166 |
167 |
168 | Reference Band:
169 |
170 |
171 |
172 |
173 |
174 | -
175 |
176 |
177 | QLabel { color : orange; font-weight: bold; }
178 |
179 |
180 | ⚠ Couldn't parse wavelength splices
181 |
182 |
183 | true
184 |
185 |
186 |
187 |
188 |
189 |
190 | -
191 |
192 |
193 | Interpolate
194 |
195 |
196 | true
197 |
198 |
199 | false
200 |
201 |
202 |
-
203 |
204 |
205 | Interpolation Spacing (nm):
206 |
207 |
208 |
209 | -
210 |
211 |
212 |
213 | 0
214 | 0
215 |
216 |
217 |
218 |
219 | -
220 |
221 |
222 | Interpolation Method:
223 |
224 |
225 |
226 | -
227 |
228 |
-
229 |
230 | Linear Spline
231 |
232 |
233 | -
234 |
235 | Cubic
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 | -
244 |
245 |
246 | Proximal Reference
247 |
248 |
249 | true
250 |
251 |
252 | false
253 |
254 |
255 |
-
256 |
257 |
258 | Reference Directory:
259 |
260 |
261 |
262 | -
263 |
264 |
265 | Select Directory ...
266 |
267 |
268 |
269 |
270 |
271 |
272 | -
273 |
274 |
275 | Qt::Vertical
276 |
277 |
278 |
279 | 20
280 | 40
281 |
282 |
283 |
284 |
285 | -
286 |
287 |
288 | Qt::Horizontal
289 |
290 |
291 | QDialogButtonBox::Cancel|QDialogButtonBox::Ok
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 | buttonBox
301 | accepted()
302 | Dialog
303 | accept()
304 |
305 |
306 | 248
307 | 254
308 |
309 |
310 | 157
311 | 274
312 |
313 |
314 |
315 |
316 | buttonBox
317 | rejected()
318 | Dialog
319 | reject()
320 |
321 |
322 | 316
323 | 260
324 |
325 |
326 | 286
327 | 274
328 |
329 |
330 |
331 |
332 |
333 |
--------------------------------------------------------------------------------
/specdal/gui/pyqt/qt_viewer.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | MainWindow
4 |
5 |
6 |
7 | 0
8 | 0
9 | 1082
10 | 629
11 |
12 |
13 |
14 | SpecDAL GUI
15 |
16 |
17 |
18 | -
19 |
20 |
21 | 0
22 |
23 |
24 |
25 | -
26 |
27 |
28 |
-
29 |
30 |
31 | Qt::Horizontal
32 |
33 |
34 | 12
35 |
36 |
37 | true
38 |
39 |
40 |
41 |
42 | 6
43 | 0
44 |
45 |
46 |
47 | QFrame::StyledPanel
48 |
49 |
50 | QFrame::Sunken
51 |
52 |
53 |
54 | 0
55 |
56 |
57 | 0
58 |
59 |
60 | 0
61 |
62 |
63 | 0
64 |
65 |
-
66 |
67 |
68 |
69 |
70 |
71 |
72 | -
73 |
74 |
75 | 0
76 |
77 |
-
78 |
79 |
80 | Select Group:
81 |
82 |
83 |
84 | -
85 |
86 |
-
87 |
88 | --
89 |
90 |
91 |
92 |
93 | -
94 |
95 |
96 |
97 | 0
98 | 0
99 |
100 |
101 |
102 | Select Spectra by Name
103 |
104 |
105 |
106 |
107 |
108 | Select by Name
109 |
110 |
111 |
112 | -
113 |
114 |
115 |
116 | 0
117 | 0
118 |
119 |
120 |
121 | Select
122 |
123 |
124 |
125 | -
126 |
127 |
128 |
129 | 0
130 | 0
131 |
132 |
133 |
134 | Name Group
135 |
136 |
137 |
138 | -
139 |
140 |
141 |
142 | 0
143 | 0
144 |
145 |
146 |
147 |
148 |
149 |
150 | Name for Selection
151 |
152 |
153 |
154 |
155 |
156 | -
157 |
158 |
159 | Only Show Selected
160 |
161 |
162 |
163 | -
164 |
165 |
166 |
167 | 0
168 | 0
169 |
170 |
171 |
172 | QAbstractItemView::ExtendedSelection
173 |
174 |
175 |
176 | -
177 |
178 |
179 |
180 |
181 |
182 | Qt::AlignCenter
183 |
184 |
185 |
186 | -
187 |
188 |
189 | 0
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
266 |
267 |
268 | Open
269 |
270 |
271 |
272 |
273 | Save
274 |
275 |
276 |
277 |
278 | Plot Config
279 |
280 |
281 |
282 |
283 | Flag Selection
284 |
285 |
286 |
287 |
288 | Show/Hide Flagged
289 |
290 |
291 |
292 |
293 | Export Flags
294 |
295 |
296 |
297 |
298 | Stitch
299 |
300 |
301 |
302 |
303 | Jump Correct
304 |
305 |
306 |
307 |
308 | Mean
309 |
310 |
311 |
312 |
313 | Median
314 |
315 |
316 |
317 |
318 | Mode
319 |
320 |
321 |
322 |
323 | Max
324 |
325 |
326 |
327 |
328 | Min
329 |
330 |
331 |
332 |
333 | Mean
334 |
335 |
336 |
337 |
338 | Median
339 |
340 |
341 |
342 |
343 | Maximum
344 |
345 |
346 |
347 |
348 | Minimum
349 |
350 |
351 |
352 |
353 | Standard Deviation
354 |
355 |
356 |
357 |
358 | Select All
359 |
360 |
361 |
362 |
363 | Clear Selection
364 |
365 |
366 |
367 |
368 | Invert Selection
369 |
370 |
371 |
372 |
373 | Move Selection to Top
374 |
375 |
376 |
377 |
378 | Filter Selection by name...
379 |
380 |
381 |
382 |
383 | Unflag Selection
384 |
385 |
386 |
387 |
388 |
389 |
390 |
--------------------------------------------------------------------------------
/specdal/gui/pyqt/qt_viewer_ui.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Form implementation generated from reading ui file 'qt_viewer.ui'
4 | #
5 | # Created by: PyQt5 UI code generator 5.11.3
6 | #
7 | # WARNING! All changes made in this file will be lost!
8 |
9 | from PyQt5 import QtCore, QtGui, QtWidgets
10 |
11 | class Ui_MainWindow(object):
12 | def setupUi(self, MainWindow):
13 | MainWindow.setObjectName("MainWindow")
14 | MainWindow.resize(1082, 629)
15 | self.centralwidget = QtWidgets.QWidget(MainWindow)
16 | self.centralwidget.setObjectName("centralwidget")
17 | self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
18 | self.verticalLayout.setObjectName("verticalLayout")
19 | self.toolbarLayout = QtWidgets.QHBoxLayout()
20 | self.toolbarLayout.setContentsMargins(-1, -1, -1, 0)
21 | self.toolbarLayout.setObjectName("toolbarLayout")
22 | self.verticalLayout.addLayout(self.toolbarLayout)
23 | self.widget = QtWidgets.QWidget(self.centralwidget)
24 | self.widget.setObjectName("widget")
25 | self.gridLayout_2 = QtWidgets.QGridLayout(self.widget)
26 | self.gridLayout_2.setObjectName("gridLayout_2")
27 | self.splitter = QtWidgets.QSplitter(self.widget)
28 | self.splitter.setOrientation(QtCore.Qt.Horizontal)
29 | self.splitter.setHandleWidth(12)
30 | self.splitter.setChildrenCollapsible(True)
31 | self.splitter.setObjectName("splitter")
32 | self.frame = QtWidgets.QFrame(self.splitter)
33 | sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
34 | sizePolicy.setHorizontalStretch(6)
35 | sizePolicy.setVerticalStretch(0)
36 | sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
37 | self.frame.setSizePolicy(sizePolicy)
38 | self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
39 | self.frame.setFrameShadow(QtWidgets.QFrame.Sunken)
40 | self.frame.setObjectName("frame")
41 | self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.frame)
42 | self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
43 | self.horizontalLayout_2.setObjectName("horizontalLayout_2")
44 | self.plotLayout = QtWidgets.QVBoxLayout()
45 | self.plotLayout.setObjectName("plotLayout")
46 | self.horizontalLayout_2.addLayout(self.plotLayout)
47 | self.layoutWidget = QtWidgets.QWidget(self.splitter)
48 | self.layoutWidget.setObjectName("layoutWidget")
49 | self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.layoutWidget)
50 | self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
51 | self.verticalLayout_2.setObjectName("verticalLayout_2")
52 | self.gridLayout = QtWidgets.QGridLayout()
53 | self.gridLayout.setContentsMargins(-1, -1, -1, 0)
54 | self.gridLayout.setObjectName("gridLayout")
55 | self.label = QtWidgets.QLabel(self.layoutWidget)
56 | self.label.setObjectName("label")
57 | self.gridLayout.addWidget(self.label, 2, 0, 1, 1)
58 | self.groupBox = QtWidgets.QComboBox(self.layoutWidget)
59 | self.groupBox.setObjectName("groupBox")
60 | self.groupBox.addItem("")
61 | self.gridLayout.addWidget(self.groupBox, 2, 1, 1, 1)
62 | self.nameSelection = QtWidgets.QLineEdit(self.layoutWidget)
63 | sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
64 | sizePolicy.setHorizontalStretch(0)
65 | sizePolicy.setVerticalStretch(0)
66 | sizePolicy.setHeightForWidth(self.nameSelection.sizePolicy().hasHeightForWidth())
67 | self.nameSelection.setSizePolicy(sizePolicy)
68 | self.nameSelection.setWhatsThis("")
69 | self.nameSelection.setObjectName("nameSelection")
70 | self.gridLayout.addWidget(self.nameSelection, 0, 0, 1, 1)
71 | self.selectByName = QtWidgets.QPushButton(self.layoutWidget)
72 | sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
73 | sizePolicy.setHorizontalStretch(0)
74 | sizePolicy.setVerticalStretch(0)
75 | sizePolicy.setHeightForWidth(self.selectByName.sizePolicy().hasHeightForWidth())
76 | self.selectByName.setSizePolicy(sizePolicy)
77 | self.selectByName.setObjectName("selectByName")
78 | self.gridLayout.addWidget(self.selectByName, 0, 1, 1, 1)
79 | self.createGroup = QtWidgets.QPushButton(self.layoutWidget)
80 | sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
81 | sizePolicy.setHorizontalStretch(0)
82 | sizePolicy.setVerticalStretch(0)
83 | sizePolicy.setHeightForWidth(self.createGroup.sizePolicy().hasHeightForWidth())
84 | self.createGroup.setSizePolicy(sizePolicy)
85 | self.createGroup.setObjectName("createGroup")
86 | self.gridLayout.addWidget(self.createGroup, 1, 1, 1, 1)
87 | self.groupName = QtWidgets.QLineEdit(self.layoutWidget)
88 | sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
89 | sizePolicy.setHorizontalStretch(0)
90 | sizePolicy.setVerticalStretch(0)
91 | sizePolicy.setHeightForWidth(self.groupName.sizePolicy().hasHeightForWidth())
92 | self.groupName.setSizePolicy(sizePolicy)
93 | self.groupName.setText("")
94 | self.groupName.setObjectName("groupName")
95 | self.gridLayout.addWidget(self.groupName, 1, 0, 1, 1)
96 | self.verticalLayout_2.addLayout(self.gridLayout)
97 | self.onlyShowSelected = QtWidgets.QCheckBox(self.layoutWidget)
98 | self.onlyShowSelected.setObjectName("onlyShowSelected")
99 | self.verticalLayout_2.addWidget(self.onlyShowSelected)
100 | self.spectraList = QtWidgets.QListWidget(self.layoutWidget)
101 | sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
102 | sizePolicy.setHorizontalStretch(0)
103 | sizePolicy.setVerticalStretch(0)
104 | sizePolicy.setHeightForWidth(self.spectraList.sizePolicy().hasHeightForWidth())
105 | self.spectraList.setSizePolicy(sizePolicy)
106 | self.spectraList.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
107 | self.spectraList.setObjectName("spectraList")
108 | self.verticalLayout_2.addWidget(self.spectraList)
109 | self.loadLabel = QtWidgets.QLabel(self.layoutWidget)
110 | self.loadLabel.setText("")
111 | self.loadLabel.setAlignment(QtCore.Qt.AlignCenter)
112 | self.loadLabel.setObjectName("loadLabel")
113 | self.verticalLayout_2.addWidget(self.loadLabel)
114 | self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
115 | self.horizontalLayout_3.setContentsMargins(-1, 0, -1, -1)
116 | self.horizontalLayout_3.setObjectName("horizontalLayout_3")
117 | self.verticalLayout_2.addLayout(self.horizontalLayout_3)
118 | self.gridLayout_2.addWidget(self.splitter, 0, 0, 1, 1)
119 | self.verticalLayout.addWidget(self.widget)
120 | MainWindow.setCentralWidget(self.centralwidget)
121 | self.menubar = QtWidgets.QMenuBar(MainWindow)
122 | self.menubar.setGeometry(QtCore.QRect(0, 0, 1082, 25))
123 | self.menubar.setObjectName("menubar")
124 | self.menuFile = QtWidgets.QMenu(self.menubar)
125 | self.menuFile.setObjectName("menuFile")
126 | self.menuEdit = QtWidgets.QMenu(self.menubar)
127 | self.menuEdit.setObjectName("menuEdit")
128 | self.menuFlags = QtWidgets.QMenu(self.menubar)
129 | self.menuFlags.setObjectName("menuFlags")
130 | self.menuOperators = QtWidgets.QMenu(self.menubar)
131 | self.menuOperators.setObjectName("menuOperators")
132 | self.menuPlot_Metric = QtWidgets.QMenu(self.menuOperators)
133 | self.menuPlot_Metric.setObjectName("menuPlot_Metric")
134 | self.menuSelection = QtWidgets.QMenu(self.menubar)
135 | self.menuSelection.setObjectName("menuSelection")
136 | MainWindow.setMenuBar(self.menubar)
137 | self.actionOpen = QtWidgets.QAction(MainWindow)
138 | self.actionOpen.setObjectName("actionOpen")
139 | self.actionSave = QtWidgets.QAction(MainWindow)
140 | self.actionSave.setObjectName("actionSave")
141 | self.actionPlot_Config = QtWidgets.QAction(MainWindow)
142 | self.actionPlot_Config.setObjectName("actionPlot_Config")
143 | self.actionFlag_Selection = QtWidgets.QAction(MainWindow)
144 | self.actionFlag_Selection.setObjectName("actionFlag_Selection")
145 | self.actionShow_Hide_Flagged = QtWidgets.QAction(MainWindow)
146 | self.actionShow_Hide_Flagged.setObjectName("actionShow_Hide_Flagged")
147 | self.actionExport_Flags = QtWidgets.QAction(MainWindow)
148 | self.actionExport_Flags.setObjectName("actionExport_Flags")
149 | self.actionStitch = QtWidgets.QAction(MainWindow)
150 | self.actionStitch.setObjectName("actionStitch")
151 | self.actionJump_Correct = QtWidgets.QAction(MainWindow)
152 | self.actionJump_Correct.setObjectName("actionJump_Correct")
153 | self.actionMean = QtWidgets.QAction(MainWindow)
154 | self.actionMean.setObjectName("actionMean")
155 | self.actionMedian = QtWidgets.QAction(MainWindow)
156 | self.actionMedian.setObjectName("actionMedian")
157 | self.actionMode = QtWidgets.QAction(MainWindow)
158 | self.actionMode.setObjectName("actionMode")
159 | self.actionMax = QtWidgets.QAction(MainWindow)
160 | self.actionMax.setObjectName("actionMax")
161 | self.actionMin = QtWidgets.QAction(MainWindow)
162 | self.actionMin.setObjectName("actionMin")
163 | self.actionMean_2 = QtWidgets.QAction(MainWindow)
164 | self.actionMean_2.setObjectName("actionMean_2")
165 | self.actionMedian_2 = QtWidgets.QAction(MainWindow)
166 | self.actionMedian_2.setObjectName("actionMedian_2")
167 | self.actionMaximum = QtWidgets.QAction(MainWindow)
168 | self.actionMaximum.setObjectName("actionMaximum")
169 | self.actionMinimum = QtWidgets.QAction(MainWindow)
170 | self.actionMinimum.setObjectName("actionMinimum")
171 | self.actionStandard_Deviation = QtWidgets.QAction(MainWindow)
172 | self.actionStandard_Deviation.setObjectName("actionStandard_Deviation")
173 | self.actionClear = QtWidgets.QAction(MainWindow)
174 | self.actionClear.setObjectName("actionClear")
175 | self.actionClear_Selection = QtWidgets.QAction(MainWindow)
176 | self.actionClear_Selection.setObjectName("actionClear_Selection")
177 | self.actionInvert_Selection = QtWidgets.QAction(MainWindow)
178 | self.actionInvert_Selection.setObjectName("actionInvert_Selection")
179 | self.actionMove_Selection_to_Top = QtWidgets.QAction(MainWindow)
180 | self.actionMove_Selection_to_Top.setObjectName("actionMove_Selection_to_Top")
181 | self.actionFilter_Selection_by_name = QtWidgets.QAction(MainWindow)
182 | self.actionFilter_Selection_by_name.setObjectName("actionFilter_Selection_by_name")
183 | self.actionUnflag_Selection = QtWidgets.QAction(MainWindow)
184 | self.actionUnflag_Selection.setObjectName("actionUnflag_Selection")
185 | self.menuFile.addAction(self.actionOpen)
186 | self.menuFile.addAction(self.actionSave)
187 | self.menuEdit.addAction(self.actionPlot_Config)
188 | self.menuFlags.addAction(self.actionFlag_Selection)
189 | self.menuFlags.addAction(self.actionUnflag_Selection)
190 | self.menuFlags.addAction(self.actionShow_Hide_Flagged)
191 | self.menuFlags.addAction(self.actionExport_Flags)
192 | self.menuPlot_Metric.addAction(self.actionMean_2)
193 | self.menuPlot_Metric.addAction(self.actionMedian_2)
194 | self.menuPlot_Metric.addAction(self.actionMaximum)
195 | self.menuPlot_Metric.addAction(self.actionMinimum)
196 | self.menuPlot_Metric.addAction(self.actionStandard_Deviation)
197 | self.menuOperators.addAction(self.actionStitch)
198 | self.menuOperators.addAction(self.actionJump_Correct)
199 | self.menuOperators.addAction(self.menuPlot_Metric.menuAction())
200 | self.menuSelection.addAction(self.actionClear)
201 | self.menuSelection.addAction(self.actionClear_Selection)
202 | self.menuSelection.addAction(self.actionInvert_Selection)
203 | self.menuSelection.addAction(self.actionMove_Selection_to_Top)
204 | self.menubar.addAction(self.menuFile.menuAction())
205 | self.menubar.addAction(self.menuEdit.menuAction())
206 | self.menubar.addAction(self.menuFlags.menuAction())
207 | self.menubar.addAction(self.menuOperators.menuAction())
208 | self.menubar.addAction(self.menuSelection.menuAction())
209 |
210 | self.retranslateUi(MainWindow)
211 | QtCore.QMetaObject.connectSlotsByName(MainWindow)
212 |
213 | def retranslateUi(self, MainWindow):
214 | _translate = QtCore.QCoreApplication.translate
215 | MainWindow.setWindowTitle(_translate("MainWindow", "SpecDAL GUI"))
216 | self.label.setText(_translate("MainWindow", "Select Group:"))
217 | self.groupBox.setItemText(0, _translate("MainWindow", "--"))
218 | self.nameSelection.setToolTip(_translate("MainWindow", "Select Spectra by Name"))
219 | self.nameSelection.setPlaceholderText(_translate("MainWindow", "Select by Name"))
220 | self.selectByName.setText(_translate("MainWindow", "Select"))
221 | self.createGroup.setText(_translate("MainWindow", "Name Group"))
222 | self.groupName.setPlaceholderText(_translate("MainWindow", "Name for Selection"))
223 | self.onlyShowSelected.setText(_translate("MainWindow", "Only Show Selected"))
224 | self.menuFile.setTitle(_translate("MainWindow", "File"))
225 | self.menuEdit.setTitle(_translate("MainWindow", "Edit"))
226 | self.menuFlags.setTitle(_translate("MainWindow", "Flags"))
227 | self.menuOperators.setTitle(_translate("MainWindow", "Operators"))
228 | self.menuPlot_Metric.setTitle(_translate("MainWindow", "Plot Metric..."))
229 | self.menuSelection.setTitle(_translate("MainWindow", "Selection"))
230 | self.actionOpen.setText(_translate("MainWindow", "Open"))
231 | self.actionSave.setText(_translate("MainWindow", "Save"))
232 | self.actionPlot_Config.setText(_translate("MainWindow", "Plot Config"))
233 | self.actionFlag_Selection.setText(_translate("MainWindow", "Flag Selection"))
234 | self.actionShow_Hide_Flagged.setText(_translate("MainWindow", "Show/Hide Flagged"))
235 | self.actionExport_Flags.setText(_translate("MainWindow", "Export Flags"))
236 | self.actionStitch.setText(_translate("MainWindow", "Stitch"))
237 | self.actionJump_Correct.setText(_translate("MainWindow", "Jump Correct"))
238 | self.actionMean.setText(_translate("MainWindow", "Mean"))
239 | self.actionMedian.setText(_translate("MainWindow", "Median"))
240 | self.actionMode.setText(_translate("MainWindow", "Mode"))
241 | self.actionMax.setText(_translate("MainWindow", "Max"))
242 | self.actionMin.setText(_translate("MainWindow", "Min"))
243 | self.actionMean_2.setText(_translate("MainWindow", "Mean"))
244 | self.actionMedian_2.setText(_translate("MainWindow", "Median"))
245 | self.actionMaximum.setText(_translate("MainWindow", "Maximum"))
246 | self.actionMinimum.setText(_translate("MainWindow", "Minimum"))
247 | self.actionStandard_Deviation.setText(_translate("MainWindow", "Standard Deviation"))
248 | self.actionClear.setText(_translate("MainWindow", "Select All"))
249 | self.actionClear_Selection.setText(_translate("MainWindow", "Clear Selection"))
250 | self.actionInvert_Selection.setText(_translate("MainWindow", "Invert Selection"))
251 | self.actionMove_Selection_to_Top.setText(_translate("MainWindow", "Move Selection to Top"))
252 | self.actionFilter_Selection_by_name.setText(_translate("MainWindow", "Filter Selection by name..."))
253 | self.actionUnflag_Selection.setText(_translate("MainWindow", "Unflag Selection"))
254 |
255 |
--------------------------------------------------------------------------------
/specdal/gui/pyqt/save_dialog.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | Dialog
4 |
5 |
6 |
7 | 0
8 | 0
9 | 330
10 | 324
11 |
12 |
13 |
14 | Export Dataset
15 |
16 |
17 | -
18 |
19 |
20 | Export Data
21 |
22 |
23 | true
24 |
25 |
26 |
-
27 |
28 |
29 | Export Whole Dataset
30 |
31 |
32 |
33 | -
34 |
35 |
36 | Export Individual Spectra
37 |
38 |
39 |
40 |
41 |
42 |
43 | -
44 |
45 |
46 | Export Figures
47 |
48 |
49 | false
50 |
51 |
52 | true
53 |
54 |
55 |
-
56 |
57 |
58 | Export Whole Dataset
59 |
60 |
61 |
62 | -
63 |
64 |
65 | Export Individual Spectra
66 |
67 |
68 |
69 |
70 |
71 |
72 | -
73 |
74 |
75 | Include Flagged Spectra
76 |
77 |
78 |
79 | -
80 |
81 |
-
82 |
83 |
84 | Export Directory:
85 |
86 |
87 |
88 | -
89 |
90 |
91 | Select Directory ...
92 |
93 |
94 |
95 |
96 |
97 | -
98 |
99 |
100 | Qt::Vertical
101 |
102 |
103 |
104 | 20
105 | 40
106 |
107 |
108 |
109 |
110 | -
111 |
112 |
113 | Qt::Horizontal
114 |
115 |
116 | QDialogButtonBox::Cancel|QDialogButtonBox::Ok
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 | buttonBox
126 | accepted()
127 | Dialog
128 | accept()
129 |
130 |
131 | 248
132 | 254
133 |
134 |
135 | 157
136 | 274
137 |
138 |
139 |
140 |
141 | buttonBox
142 | rejected()
143 | Dialog
144 | reject()
145 |
146 |
147 | 316
148 | 260
149 |
150 |
151 | 286
152 | 274
153 |
154 |
155 |
156 |
157 |
158 |
--------------------------------------------------------------------------------
/specdal/gui/select.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/EnSpec/SpecDAL/4a89f5de6d8feb9472813da9767eafb78c0fe19a/specdal/gui/select.png
--------------------------------------------------------------------------------
/specdal/operators/__init__.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname, basename, isfile
2 | import glob
3 | modules = glob.glob(dirname(__file__)+"/*.py")
4 | __all__ = [ basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')]
5 |
6 | from .proximal_join import proximal_join, get_column_types
7 | from .interpolate import interpolate
8 | from .stitch import stitch
9 | from .jump_correct import jump_correct
10 | from .derivative import derivative
11 |
12 |
--------------------------------------------------------------------------------
/specdal/operators/derivative.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 | ################################################################################
4 | # derivative: calculate derivative of a spectrum
5 | def derivative(series):
6 | '''
7 | Calculate the spectral derivative. Not Implemented Yet.
8 | '''
9 | pass
10 |
11 |
--------------------------------------------------------------------------------
/specdal/operators/interpolate.py:
--------------------------------------------------------------------------------
1 | # Operator.py defines operations on pd.Series that consists of
2 | # wavelength as index and measurement as values
3 | import pandas as pd
4 | import numpy as np
5 |
6 | def get_monotonic_series(series):
7 | """return a list of series with monotonic index
8 |
9 | TODO: test what happens if not strictly monotonic
10 | i.e. index: 1, 2, 3, 3
11 | """
12 | if series.index.is_monotonic:
13 | return [series]
14 | else:
15 | index = pd.Series(series.index)
16 | head_positions = index[index.diff() < 0].index
17 |
18 | N = head_positions.size
19 |
20 | result = [series.iloc[:head_positions[0]]]
21 | result += [series.iloc[head_positions[i]:head_positions[i+1]] for i in range(0, N-1)]
22 | result += [series.iloc[head_positions[N-1]:]]
23 | return result
24 |
25 | # interpolate: interpolate at given spacing
26 | def interpolate(series, spacing=1, method='slinear'):
27 | """
28 | Interpolate the array into given spacing
29 |
30 | Parameters
31 | ----------
32 | series: pandas.Series object
33 |
34 | spacing: int
35 | wavelength spacing to interpolate at (in nm)
36 |
37 | method: string
38 | "slinear" or "cubic"
39 |
40 | """
41 | seqs = []
42 | for seq in get_monotonic_series(series):
43 | int_index = np.round(seq.index)
44 | # fill in gaps at 1 nm wavelength
45 | int_index = int_index.reindex(np.arange(int_index.min(),
46 | int_index.max() + 1,
47 | spacing))[0]
48 | tmp_index = seq.index.union(int_index)
49 | seq = seq.reindex(tmp_index)
50 | # interpolate
51 | seq = seq.interpolate(method=method)
52 | # select the integer indices
53 | seqs.append(seq.loc[int_index])
54 | return pd.concat(seqs).dropna()
55 |
--------------------------------------------------------------------------------
/specdal/operators/jump_correct.py:
--------------------------------------------------------------------------------
1 | # Operator.py defines operations on pd.Series that consists of
2 | # wavelength as index and measurement as values
3 | import pandas as pd
4 | import numpy as np
5 |
6 | ################################################################################
7 | # jump_correct: resolve jumps in non-overlapping wavelengths
8 | def jump_correct(series, splices, reference, method="additive"):
9 | """
10 | Correct for jumps in non-overlapping wavelengths
11 |
12 | Parameters
13 | ----------
14 | splices: list
15 | list of wavelength values where jumps occur
16 |
17 | reference: int
18 | position of the reference band (0-based)
19 |
20 | """
21 | if method == "additive":
22 | return jump_correct_additive(series, splices, reference)
23 |
24 | def jump_correct_additive(series, splices, reference):
25 | """ Perform additive jump correction (ASD) """
26 | # if asd, get the locations from the metadata
27 | # stop if overlap exists
28 | def get_sequence_num(wavelength):
29 | """ return the sequence id after cutting at splices """
30 | for i, splice in enumerate(splices):
31 | if wavelength <= splice:
32 | return i
33 | return i+1
34 | def translate_y(ref, mov, right=True):
35 | # translates the mov sequence to stitch with ref sequence
36 | if right:
37 | diff = ref.iloc[-1] - mov.iloc[0]
38 | else:
39 | diff = ref.iloc[0] - mov.iloc[-1]
40 | mov = mov + diff
41 | series.update(mov)
42 | groups = series.groupby(get_sequence_num)
43 | for i in range(reference, groups.ngroups-1, 1):
44 | # move sequences on the right of reference
45 | translate_y(groups.get_group(i),
46 | groups.get_group(i+1),
47 | right=True)
48 | for i in range(reference, 0, -1):
49 | # move sequences on the left of reference
50 | translate_y(groups.get_group(i),
51 | groups.get_group(i-1),
52 | right=False)
53 | return series
54 |
55 |
--------------------------------------------------------------------------------
/specdal/operators/proximal_join.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 | import warnings
4 | import logging as logging
5 |
6 | logging.basicConfig(level=logging.WARNING,
7 | format="%(levelname)s:%(name)s:%(message)s\n")
8 | def get_column_types(df):
9 | '''
10 | Returns a tuple (wvl_cols, meta_cols), given a dataframe.
11 |
12 | Notes
13 | -----
14 | Wavelength column is defined as columns with a numerical name (i.e. decimal).
15 | Everything else is considered metadata column.
16 | '''
17 | isdigit = df.columns.map(str).str.replace('.', '').str.isdigit()
18 | wvl_cols = df.columns[isdigit].sort_values()
19 | meta_cols = df.columns.difference(wvl_cols)
20 | return wvl_cols, meta_cols
21 |
22 | def proximal_join(base_df, rover_df, on='gps_time_tgt', direction='nearest'):
23 | '''
24 | Perform proximal join and return a new dataframe.
25 |
26 | Params
27 | ------
28 | base_df: pandas.DataFrame
29 | DataFrame of reference measurements
30 |
31 | rover_df: pandas.DataFrame
32 | DataFrame of target measurements
33 |
34 | Returns
35 | -------
36 | proximal: pandas.DataFrame object
37 | proximally processed dataset ( rover_df / base_df )
38 |
39 | Notes
40 | -----
41 |
42 | As a side-effect, the rover dataframe is sorted by the key
43 | Both base_df and rover_df must have the column specified by on.
44 | This column must be the same type in base and rover.
45 | '''
46 | # remove spectra with missing join metadata from the dataset
47 | bad_rover = rover_df[on].isnull()
48 | bad_base = base_df[on].isnull()
49 | if bad_rover.any():
50 | logging.warning(
51 | "Removing {} spectra with missing {} key from dataset."
52 | .format(bad_rover.sum(),on))
53 |
54 | if bad_base.any():
55 | logging.warning(
56 | "Removing {} reference spectra with missing {} key from dataset."
57 | .format(bad_base.sum(),on))
58 | rover_df = rover_df[~bad_rover]
59 | base_df = base_df[~bad_base]
60 | rover_wvl_cols, rover_meta_cols = get_column_types(rover_df)
61 | base_wvl_cols, base_meta_cols = get_column_types(base_df)
62 |
63 | # join the (sorted) keys
64 | joined = pd.merge_asof(rover_df[on].sort_values().reset_index(),
65 | base_df[on].sort_values().reset_index(),
66 | on=on,
67 | direction=direction,
68 | suffixes=('_rover', '_base'))
69 | rover_df = rover_df.loc[joined['index_rover']]
70 | base_df = base_df.loc[joined['index_base']]
71 | base_df.index = rover_df.index
72 | metadata = pd.merge(rover_df[rover_meta_cols], base_df[base_meta_cols],
73 | left_index=True, right_index=True,
74 | suffixes=('_rover', '_base'))
75 | proximal = rover_df[rover_wvl_cols]/base_df[base_wvl_cols]
76 | proximal = pd.merge(metadata, proximal, left_index=True,
77 | right_index=True) # retrieve metadata
78 | return proximal
79 |
80 |
81 |
--------------------------------------------------------------------------------
/specdal/operators/stitch.py:
--------------------------------------------------------------------------------
1 | # Operator.py defines operations on pd.Series that consists of
2 | # wavelength as index and measurement as values
3 | import pandas as pd
4 | import numpy as np
5 | from . import interpolate
6 |
7 | def _stitch_zero(series,wnum,idx,method='max'):
8 | return pd.concat([series.iloc[0:idx],series.iloc[idx+1:]])
9 |
10 | def _stitch_region(series,wnum,idx,method='max'):
11 | #the radiances to the left of the negative step
12 | left_idx = wnum.loc[:idx-1][wnum.loc[:idx-1]>wnum[idx]].index
13 | #the radiances to the right of the negative step
14 | right_idx = wnum.loc[idx:][wnum.loc[idx:]0).all()
38 | return pd.concat([series.iloc[0:left_idx[0]-1],merged,
39 | series.iloc[right_idx[-1]+1:]])
40 |
41 | ################################################################################
42 | # stitch: resolve overlaps in wavelengths
43 | def stitch(series, method='max', jump_reference=None):
44 | """
45 | Stitch the regions with overlapping wavelength
46 |
47 | Parameters
48 | ----------
49 | series: pandas.Series object
50 |
51 | method: string
52 | How to compute final value in case of overlap. "mean","median","min", or "max".
53 |
54 | """
55 | #find indices of overlap
56 |
57 | if method == 'first':
58 | return stitch_by_intersect(series)
59 |
60 | while (pd.Series(series.index).diff()[1:]<=0).any():
61 | # find non-positive steps in wavenumber index
62 | wnum = pd.Series(series.index)
63 | wnum_step = wnum.diff()
64 | neg_idx = wnum.index[wnum_step <= 0]
65 | # stitch at the first non-positive index
66 | if wnum_step[neg_idx[0]] == 0:
67 | series = _stitch_zero(series,wnum,neg_idx[0],method)
68 | else:
69 | series = _stitch_region(series,wnum,neg_idx[0],method)
70 |
71 | assert (pd.Series(series.index).diff()[1:] > 0).all(), "Stitched wavenumbers not strictly increasing!"
72 | return series
73 |
74 | def _intersection(p1, p2):
75 | """Find the intersection of two partially-overlapping series"""
76 | p1 = interpolate(p1)
77 | p2 = interpolate(p2)
78 | diff = p1 - p2
79 | return (p1 - p2).abs().idxmin() - 1
80 |
81 | def _jump_correct(parts,reference_idx):
82 | """Jump correct a stitch"""
83 | # jump correct backwards from the reference
84 | reference = parts[reference_idx]
85 | for i in range(reference_idx-1,-1,-1):
86 | jump = parts[i].iloc[-1] - reference.iloc[0]
87 | parts[i] -= jump
88 | reference = parts[i]
89 | # jump correct forwards from the reference
90 | reference = parts[reference_idx]
91 | for i in range(reference_idx+1,len(parts)):
92 | jump = parts[i].iloc[0] - reference.iloc[-1]
93 | parts[i] -= jump
94 | reference = parts[i]
95 |
96 | def stitch_by_intersect(series, jump_reference=1):
97 | if (pd.Series(series.index).diff()[1:]<=0).any():
98 | parts = []
99 | # find non-positive steps in wavenumber index
100 | wnum = pd.Series(series.index)
101 | wnum_step = wnum.diff()
102 | neg_idxs = [0] + list(wnum.index[wnum_step <= 0]) + [None]
103 | # chop the spectrum up into sections of increasing wavenumber
104 | for i1, i2 in zip(neg_idxs,neg_idxs[1:]):
105 | parts.append(series.iloc[i1:i2])
106 | # find where sections of increasing wavenumber intersect eachother
107 | bounds = [0] + [_intersection(p1,p2)
108 | for p1, p2 in zip(parts,parts[1:])] + [pd.np.Inf]
109 | assert len(bounds) == len(parts) + 1
110 | # truncate sections to the points where they intersect
111 | truncated_parts = []
112 | for b0,b1,p in zip(bounds,bounds[1:],parts):
113 | p = interpolate(p)
114 | truncated_parts.append(p[(p.index > b0) & (p.index <= b1)])
115 |
116 | if jump_reference is not None:
117 | _jump_correct(truncated_parts,jump_reference)
118 |
119 | series = pd.concat(truncated_parts)
120 | assert (pd.Series(series.index).diff()[1:] > 0).all(), "Stitched wavenumbers not strictly increasing!"
121 | return series
122 |
--------------------------------------------------------------------------------
/specdal/readers/__init__.py:
--------------------------------------------------------------------------------
1 | #via https://stackoverflow.com/a/1057534
2 | from os.path import dirname, basename, isfile
3 | from os.path import abspath, expanduser, splitext, join, split
4 | import glob
5 | from .asd import read_asd
6 | from .sed import read_sed
7 | from .sig import read_sig
8 | from .pico import read_pico
9 |
10 | modules = glob.glob(dirname(__file__)+"/*.py")
11 | __all__ = [ basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')]
12 |
13 | SUPPORTED_READERS = {
14 | '.asd':read_asd,
15 | '.sig':read_sig,
16 | '.sed':read_sed,
17 | '.pico':read_pico,
18 | '.light':read_pico,
19 | '.dark':read_pico,
20 | }
21 |
22 | def read(filepath, read_data=True, read_metadata=True, verbose=False):
23 | """Calls a reader function based on the extension of the passed filename.
24 | .asd: read_asd
25 | .sig: read_sig
26 | .sed: read_sed
27 | .pico: read_pico
28 | """
29 | ext = splitext(filepath)[1]
30 | assert ext in SUPPORTED_READERS
31 | reader = SUPPORTED_READERS[ext]
32 | return reader(abspath(expanduser(filepath)), read_data,
33 | read_metadata, verbose)
34 |
35 |
--------------------------------------------------------------------------------
/specdal/readers/asd.py:
--------------------------------------------------------------------------------
1 | # readers.py provides functions to read .asd spectrum files for data and
2 | # metadata.
3 |
4 |
5 | import pandas as pd
6 | import numpy as np
7 | from os.path import abspath, expanduser, splitext, basename, join, split
8 | import glob
9 | from collections import OrderedDict
10 | import json
11 | import struct
12 |
13 | ASD_VERSIONS = ['ASD', 'asd', 'as6', 'as7', 'as8']
14 | ASD_HAS_REF = {'ASD': False, 'asd': False, 'as6': True, 'as7': True,
15 | 'as8': True}
16 | ASD_DATA_TYPES = OrderedDict([("RAW_TYPE", "tgt_count"),
17 | ("REF_TYPE", "tgt_reflect"),
18 | ("RAD_TYPE", "tgt_radiance"),
19 | ("NOUNITS_TYPE", None),
20 | ("IRRAD_TYPE", "tgt_irradiance"),
21 | ("QI_TYPE", None),
22 | ("TRANS_TYPE", None),
23 | ("UNKNOWN_TYPE", None),
24 | ("ABS_TYPE", None)])
25 | ASD_GPS_DATA = struct.Struct("= 5d 2b cl 2b 5B 2c")
26 |
27 | def read_asd(filepath, read_data=True, read_metadata=True, verbose=False):
28 | """
29 | Read asd file for data and metadata
30 |
31 | Return
32 | ------
33 | 2-tuple of (pd.DataFrame, OrderedDict) for data, metadata
34 | """
35 | data = None
36 | metadata = None
37 | if read_metadata:
38 | metadata = OrderedDict()
39 | raw_metadata = {}
40 | with open(abspath(expanduser(filepath)), 'rb') as f:
41 | if verbose:
42 | print('reading {}'.format(filepath))
43 | binconts = f.read()
44 | version = binconts[0:3].decode('utf-8')
45 | assert(version in ASD_VERSIONS) # TODO: define ASD_VERSIONS
46 | # read spectrum type
47 | spectrum_type_index = struct.unpack('B', binconts[186:(186 + 1)])[0]
48 | spectrum_type = list(ASD_DATA_TYPES.keys())[spectrum_type_index]
49 | # read wavelength info
50 | wavestart = struct.unpack('f', binconts[191:(191 + 4)])[0]
51 | wavestep = struct.unpack('f', binconts[195:(195 + 4)])[0] # in nm
52 | num_channels = struct.unpack('h', binconts[204:(204 + 2)])[0]
53 | wavestop = wavestart + num_channels*wavestep - 1
54 | if read_data:
55 | # read data
56 | tgt_column = ASD_DATA_TYPES[spectrum_type]
57 | ref_column = tgt_column.replace('tgt', 'ref')
58 | data_format = struct.unpack('B', binconts[199:(199 + 1)])[0]
59 | fmt = 'f'*num_channels
60 | if data_format == 2:
61 | fmt = 'd'*num_channels
62 | if data_format == 0:
63 | fmt = 'f'*num_channels
64 | # data to DataFrame
65 | size = num_channels*8
66 | # Read the spectrum block data
67 | waves = np.linspace(wavestart, wavestop, num_channels)
68 | spectrum = np.array(struct.unpack(fmt, binconts[484:(484 + size)]))
69 | reference = None
70 | if ASD_HAS_REF[version]:
71 | # read reference
72 | start = 484 + size
73 | ref_flag = struct.unpack('??', binconts[start: start + 2])[0]
74 | first, last = start + 18, start + 20
75 | ref_desc_length = struct.unpack('H', binconts[first:last])[0]
76 | first = start + 20 + ref_desc_length
77 | last = first + size
78 | reference = np.array(struct.unpack(fmt, binconts[first:last]))
79 | data = pd.DataFrame({tgt_column : spectrum,
80 | ref_column: reference}, index=waves)
81 | data.index.name = 'wavelength'
82 | data.dropna(axis=1, how='all')
83 | if read_metadata:
84 | metadata['file'] = f.name
85 | metadata['instrument_type'] = 'ASD'
86 | # read splice wavelength
87 | splice1 = struct.unpack('f', binconts[444:(444 + 4)])[0]
88 | splice2 = struct.unpack('f', binconts[448:(448 + 4)])[0]
89 | # integration time
90 | integration_time = struct.unpack('= L', binconts[390:(390 + 4)])[0] # in ms
91 | # gps info
92 | gps_struct = ASD_GPS_DATA.unpack(binconts[344:(344+56)])
93 | gps_true_heading, gps_speed, gps_latitude, gps_longitude, gps_altitude = gps_struct[:5]
94 | gps_flags = gps_struct[5:7] # unpack this into bits
95 | gps_hardware_mode = gps_struct[7]
96 | gps_timestamp = gps_struct[8]
97 | gps_flags2 = gps_struct[9:11] # unpack this into bits
98 | gps_satellites = gps_struct[11:16]
99 | gps_filler = gps_struct[16:18]
100 | # metadata
101 | metadata['integration_time'] = integration_time
102 | metadata['measurement_type'] = spectrum_type
103 | metadata['gps_time_tgt'] = gps_timestamp
104 | metadata['gps_time_ref'] = None
105 | metadata['wavelength_range'] = (wavestart, wavestop)
106 | # metadata['splice'] = (splice1, splice2)
107 | # metadata['resolution'] = wavestep
108 | return data, metadata
109 |
--------------------------------------------------------------------------------
/specdal/readers/pico.py:
--------------------------------------------------------------------------------
1 | # readers.py provides functions to read spectrum files for data and
2 | # metadata.
3 |
4 | import pandas as pd
5 | import numpy as np
6 | from os.path import abspath, expanduser, splitext, basename, join, split
7 | import glob
8 | from collections import OrderedDict
9 | import json
10 |
11 | PICO_GPS_KEYS = "gps","GPS start","GPS"
12 |
13 | class PiccoloFileError(Exception):
14 | """Error type for piccolo-related issues"""
15 | pass
16 |
17 | def _find_pico_dark(pico_light_path):
18 | """
19 | Recent piccolo versions store dark and light spectra in different locations
20 | Default naming conventions are a bit tricky (timestamp changes between
21 | light and dark), so we need to check every dark file in the directory
22 | """
23 | #easy case - there's just one dark and one light file, so they have
24 | #the same time stamp
25 | first_end = "0000.pico.light"
26 | if pico_light_path.endswith(first_end):
27 | return pico_light_path[:-len(first_end)]+"0000.pico.dark"
28 | #harder case - there's multiple light files per dark, so find the dark
29 | #with the closest timestamp before this one
30 | #assume there's not that many dark files
31 | dark_files = glob.glob(join(split(pico_light_path)[0],"*.dark"))
32 | #insert our light file in here, its dark file will come immediately before
33 | #it when sorted
34 | dark_files.append(pico_light_path)
35 | dark_files.sort()
36 | dark_idx = dark_files.index(pico_light_path)-1
37 | if dark_idx == -1:
38 | raise PiccoloFileError("Unable to find .pico.dark file for {}"
39 | .format(pico_light_path))
40 | #TODO: It's still possible there's not a matching dark file
41 | #(eg we've chosen the wrong dark file)
42 | return dark_files[dark_idx]
43 |
44 | def read_pico(filepath, read_data=True, read_metadata=True, verbose=False):
45 | """
46 | Read pico file for data and metadata
47 |
48 | Return
49 | ------
50 | 2-tuple of (pd.DataFrame, OrderedDict) for data, metadata
51 | """
52 | data = None
53 | metadata = None
54 | raw_metadata = {}
55 | with open(abspath(expanduser(filepath)), 'r') as f:
56 | if verbose:
57 | print('reading {}'.format(filepath))
58 | raw_metadata = json.load(f)
59 |
60 | #dark spectra are stored in a different file for some piccolo formats
61 | if filepath.endswith('.pico.light'):
62 | with open(_find_pico_dark(filepath),'r') as f:
63 | dark_metadata = json.load(f)
64 | raw_metadata['Spectra'] += dark_metadata['Spectra']
65 |
66 | #TODO: How to handle multiple spectrometers per file?
67 | #For now, just return the first one
68 |
69 | spectrometer = raw_metadata["Spectra"][0]["Metadata"]["name"]
70 | #the 4 spectra we need to get a complete measurement
71 | downwelling_light = None
72 | downwelling_dark = None
73 | upwelling_light = None
74 | upwelling_dark = None
75 | #figure out which of the 4 spectra we need
76 | for spectrum in raw_metadata["Spectra"]:
77 | meta = spectrum["Metadata"]
78 | if meta["name"] == spectrometer:
79 | if meta["Dark"] and meta["Direction"]=="Upwelling":
80 | upwelling_dark = spectrum
81 | elif meta["Dark"] and meta["Direction"]=="Downwelling":
82 | downwelling_dark = spectrum
83 | elif meta["Direction"] == "Upwelling":
84 | upwelling_light = spectrum
85 | elif meta["Direction"] == "Downwelling":
86 | downwelling_light = spectrum
87 |
88 |
89 | if read_data:
90 | if(downwelling_light is None or downwelling_dark is None or
91 | upwelling_light is None or upwelling_dark is None):
92 | raise PiccoloFileError("Piccolo File missing necessary spectrum")
93 | #Pico always in raw counts
94 | wavelength_coeffs = downwelling_light["Metadata"][
95 | "WavelengthCalibrationCoefficients"]
96 | wavelength_idxs = range(len(downwelling_light["Pixels"]))
97 | wavelengths = np.poly1d(wavelength_coeffs[::-1])(wavelength_idxs)
98 | #TODO: How to get ref data for pico?
99 | columns = ("wavelength","tgt_count","ref_count","tgt_count_dark",
100 | "ref_count_dark")
101 | data = pd.DataFrame(
102 | columns = columns,
103 | data = np.array((wavelengths,
104 | upwelling_light["Pixels"],
105 | downwelling_light["Pixels"],
106 | upwelling_dark["Pixels"],
107 | downwelling_dark["Pixels"],
108 | )).T
109 | )
110 |
111 | if read_metadata:
112 | metadata = OrderedDict()
113 | metadata['file'] = f.name
114 | metadata['instrument_type'] = spectrometer
115 | metadata['integration_time'] = downwelling_light["Metadata"]["IntegrationTime"]
116 | for gps_key in PICO_GPS_KEYS:
117 | if gps_key in downwelling_light:
118 | metadata['gps_time_ref'] = downwelling_light.get("gps",{}).get("time",None)
119 | metadata['gps_time_tgt'] = metadata['gps_time_tgt']
120 | metadata['wavelength_range'] = None
121 | if read_data:
122 | metadata['wavelength_range'] = (data.index.min(), data.index.max())
123 | return data, metadata
124 |
--------------------------------------------------------------------------------
/specdal/readers/sed.py:
--------------------------------------------------------------------------------
1 | # readers.py provides functions to read .sed spectrum files for data and
2 | # metadata.
3 |
4 | import pandas as pd
5 | import numpy as np
6 | from os.path import abspath, expanduser, splitext, basename, join, split
7 | import glob
8 | from collections import OrderedDict
9 | import json
10 |
11 | SED_COLUMNS = {
12 | "Wvl": "wavelength",
13 | "Rad. (Target)": "tgt_reflect",
14 | 'Rad. (Ref.)': "ref_reflect",
15 | "Tgt./Ref. %": "pct_reflect",
16 | "Irrad. (Ref.)": "ref_irradiance",
17 | "Irrad. (Target)": "tgt_irradiance",
18 | "Norm. DN (Ref.)": "ref_count",
19 | "Norm. DN (Target)": "tgt_count",
20 | "Reflect. %": "pct_reflect",
21 | "Reflect. [1.0]":"dec_reflect",
22 | "Chan.#":"channel_num",
23 | }
24 |
25 | def read_sed(filepath, read_data=True, read_metadata=True, verbose=False):
26 | """
27 | Read sed file for data and metadata
28 |
29 | Return
30 | ------
31 | 2-tuple of (pd.DataFrame, OrderedDict) for data, metadata
32 | """
33 | data = None
34 | metadata = None
35 | raw_metadata = {}
36 | # first, get raw metadata and line number of data
37 | with open(abspath(expanduser(filepath)), 'r') as f:
38 | if verbose:
39 | print('reading {}'.format(filepath))
40 | for i, line in enumerate(f):
41 | if line[0:5] == 'Data:':
42 | break
43 | field = line.strip().split(': ')
44 | if len(field) > 1:
45 | raw_metadata[field[0]] = field[1]
46 | if read_data:
47 | data = pd.read_csv(filepath, skiprows=i+1,
48 | sep='\t')
49 | data.columns = [SED_COLUMNS[col] for col in data.columns]
50 | data = data.set_index("wavelength")
51 | if "pct_reflect" in data:
52 | data["pct_reflect"] = data["pct_reflect"]/100
53 | if "dec_reflect" in data:
54 | data["pct_reflect"] = data["dec_reflect"]
55 | if read_metadata:
56 | metadata = OrderedDict()
57 | metadata['file'] = f.name
58 | metadata['instrument_type'] = 'SED'
59 | ################################################################################
60 | # Average the integration times
61 | # TODO: check if this is valid
62 | metadata['integration_time'] = np.mean(
63 | list(map(int, raw_metadata['Integration'].split(','))))
64 | ################################################################################
65 | metadata['measurement_type'] = raw_metadata['Measurement']
66 | metadata['gps_time_tgt'] = None
67 | metadata['gps_time_ref'] = None
68 | if raw_metadata['GPS Time'] != 'n/a':
69 | # TODO: WILL THIS BE A TUPLE?
70 | metadata['gps_time_tgt'] = raw_metadata['GPS Time']
71 | metadata['wavelength_range'] = tuple(map(int, raw_metadata['Wavelength Range'].split(',')))
72 | return data, metadata
73 |
--------------------------------------------------------------------------------
/specdal/readers/sig.py:
--------------------------------------------------------------------------------
1 | # readers.py provides functions to read .sig spectrum files for data and
2 | # metadata.
3 |
4 | import pandas as pd
5 | import numpy as np
6 | from os.path import abspath, expanduser, splitext, basename, join, split
7 | import glob
8 | from collections import OrderedDict
9 | import json
10 |
11 | def read_sig(filepath, read_data=True, read_metadata=True, verbose=False):
12 | """
13 | Read asd file for data and metadata
14 |
15 | Return
16 | ------
17 | 2-tuple of (pd.DataFrame, OrderedDict) for data, metadata
18 | """
19 | data = None
20 | metadata = None
21 | raw_metadata = {}
22 | # first, get raw metadata and line number of data
23 | with open(abspath(expanduser(filepath)), 'r') as f:
24 | if verbose:
25 | print('reading {}'.format(filepath))
26 | for i, line in enumerate(f):
27 | if line[0:5] == 'data=':
28 | break
29 | field = line.strip().split('= ')
30 | if len(field) > 1:
31 | raw_metadata[field[0]] = field[1].strip()
32 | if read_data:
33 | # read data
34 | if raw_metadata['units'] == "Counts, Counts":
35 | colnames = ["wavelength", "ref_counts",
36 | "tgt_counts", "pct_reflect"]
37 | elif raw_metadata['units'] == "Radiance, Radiance":
38 | colnames = ["wavelength", "ref_radiance",
39 | "tgt_radiance", "pct_reflect"]
40 | data = pd.read_csv(filepath, skiprows=i+1,
41 | sep="\s+", index_col=0,
42 | header=None, names=colnames
43 | )
44 | if "pct_reflect" in data:
45 | data["pct_reflect"] = data["pct_reflect"]/100
46 | if read_metadata:
47 | metadata = OrderedDict()
48 | metadata['file'] = f.name
49 | metadata['instrument_type'] = 'SIG'
50 | ################################################################################
51 | # Average the integration times
52 | # TODO: check if this is valid
53 | metadata['integration_time'] = np.mean(
54 | list(map(float, raw_metadata['integration'].split(', '))))
55 | ################################################################################
56 | metadata['measurement_type'] = raw_metadata['units'].split(', ')[0]
57 | try:
58 | metadata['gps_time_ref'], metadata['gps_time_tgt'] = tuple(
59 | map(float, raw_metadata['gpstime'].replace(' ', '').split(',')))
60 | except:
61 | metadata['gps_time_tgt'] = None
62 | metadata['gps_time_ref'] = None
63 |
64 | metadata['wavelength_range'] = None
65 | if read_data:
66 | metadata['wavelength_range'] = (data.index.min(), data.index.max())
67 | return data, metadata
68 |
--------------------------------------------------------------------------------
/specdal/tests/test_collection.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import numpy as np
4 | import pandas as pd
5 | import pandas.util.testing as pdt
6 | import unittest
7 | from collections import OrderedDict
8 | sys.path.insert(0, os.path.abspath("../../"))
9 | from specdal.spectrum import Spectrum
10 | from specdal.collection import Collection, proximal_join, df_to_collection
11 |
12 | class collectionTests(unittest.TestCase):
13 | def setUp(self):
14 | pass
15 | def test_collection_assert_unique_name1(self):
16 | s1 = Spectrum(name='s1',
17 | measurement= pd.Series([1, 2, 3, 4],
18 | index=pd.Index([1, 2, 3, 4],
19 | name='wavelength'),
20 | name='pct_reflect'))
21 | s2 = Spectrum(name='s1',
22 | measurement= pd.Series([1, 2, 3, 4],
23 | index=pd.Index([1, 2, 3, 4],
24 | name='wavelength'),
25 | name='pct_reflect'))
26 | self.assertRaises(AssertionError, Collection, name='c1', spectra=[s1, s2])
27 | def test_collection_assert_unique_name1(self):
28 | s1 = Spectrum(name='s1',
29 | measurement= pd.Series([1, 2, 3, 4],
30 | index=pd.Index([1, 2, 3, 4],
31 | name='wavelength'),
32 | name='pct_reflect'))
33 | s2 = Spectrum(name='s1',
34 | measurement= pd.Series([1, 2, 3, 4],
35 | index=pd.Index([1, 2, 3, 4],
36 | name='wavelength'),
37 | name='pct_reflect'))
38 | c1 = Collection(name='c1')
39 | c1.append(s1)
40 | self.assertRaises(AssertionError, c1.append, s2)
41 | def test_collection_data_unstitched(self):
42 | s1 = Spectrum(name='s1',
43 | measurement= pd.Series([1, 2, 3, 4],
44 | index=pd.Index([1, 2, 3, 4],
45 | name='wavelength'),
46 | name='pct_reflect'))
47 | s2 = Spectrum(name='s2',
48 | measurement= pd.Series([10, 11, 12, 13],
49 | index=pd.Index([0.8, 2.2, 3.7, 5],
50 | name='wavelength'),
51 | name='pct_reflect'))
52 | s3 = Spectrum(name='s3',
53 | measurement= pd.Series([100, 200, 300, 400, 500],
54 | index=pd.Index([1, 2, 3, 3, 4],
55 | name='wavelength'),
56 | name='pct_reflect'))
57 | c1 = Collection(name='c1', spectra=[s1, s2, s3])
58 | self.assertRaises(ValueError, lambda: c1.data)
59 | c1.stitch()
60 | def test_collection_data_with_meta(self):
61 | m1 = OrderedDict()
62 | m2 = OrderedDict()
63 | m3 = OrderedDict()
64 | for i, meta in enumerate([m1, m2, m3]):
65 | meta['file'] = 'f{}'.format(i)
66 | meta['instrument_type'] = 'asd'
67 | meta['integration_time'] = '15'
68 | meta['measurement_type'] = 'pct_reflect'
69 | meta['gps_time'] = (100 + i, 200 + i)
70 | meta['wavelength_range'] = (1, 4)
71 | s1 = Spectrum(name='s1',
72 | measurement= pd.Series([1, 2, 3, 4],
73 | index=pd.Index([1, 2, 3, 4],
74 | name='wavelength'),
75 | name='pct_reflect'),
76 | metadata=m1)
77 | s2 = Spectrum(name='s2',
78 | measurement= pd.Series([10, 11, 12, 13],
79 | index=pd.Index([1, 2, 3, 4],
80 | name='wavelength'),
81 | name='pct_reflect'),
82 | metadata=m2)
83 | s3 = Spectrum(name='s3',
84 | measurement= pd.Series([100, 200, 300, 400],
85 | index=pd.Index([1, 2, 3, 4],
86 | name='wavelength'),
87 | name='pct_reflect'),
88 | metadata=m3)
89 | c1 = Collection(name='c1', spectra=[s1, s2, s3])
90 | c1.data_with_meta()
91 | # TODO: test correctness
92 | def test_df_to_collection(self):
93 | df = pd.DataFrame({'file':['f1', 'f2', 'f3'],
94 | 'gps_time':[1, 2, 3],
95 | '500':[.1, .2, .3],
96 | '501':[11, 22, 33]},
97 | index=['s1', 's2', 's3'])
98 | c = df_to_collection(df, name='c1')
99 | for s in c.spectra:
100 | self.assertEqual(df.loc[s.name]['file'], s.metadata['file'])
101 | self.assertEqual(df.loc[s.name]['gps_time'], s.metadata['gps_time'])
102 | self.assertEqual(df.loc[s.name]['500'], s.measurement.loc['500'])
103 | self.assertEqual(df.loc[s.name]['501'], s.measurement.loc['501'])
104 | def test_df_to_collection_without_metadata(self):
105 | df = pd.DataFrame({'500':[.1, .2, .3],
106 | '501':[11, 22, 33]},
107 | index=['s1', 's2', 's3'])
108 | c = df_to_collection(df, name='c1')
109 | for s in c.spectra:
110 | self.assertEqual(df.loc[s.name]['500'], s.measurement.loc['500'])
111 | self.assertEqual(df.loc[s.name]['501'], s.measurement.loc['501'])
112 | def test_df_to_collection_without_measurements(self):
113 | df = pd.DataFrame({'file':['f1', 'f2', 'f3'],
114 | 'gps_time':[1, 2, 3]},
115 | index=['s1', 's2', 's3'])
116 | c = df_to_collection(df, name='c1')
117 | for s in c.spectra:
118 | self.assertEqual(df.loc[s.name]['file'], s.metadata['file'])
119 | self.assertEqual(df.loc[s.name]['gps_time'], s.metadata['gps_time'])
120 | def test_proximal_join(self):
121 | s1 = Spectrum(name='s1',
122 | measurement= pd.Series([1, 2, 3, 4],
123 | index=pd.Index([1, 2, 3, 4],
124 | name='wavelength'),
125 | name='pct_reflect'),
126 | metadata={'gps_time_tgt':1})
127 | s2 = Spectrum(name='s2',
128 | measurement= pd.Series([10, 11, 12, 13],
129 | index=pd.Index([1, 2, 3, 4],
130 | name='wavelength'),
131 | name='pct_reflect'),
132 | metadata={'gps_time_tgt':5})
133 | s3 = Spectrum(name='s3',
134 | measurement= pd.Series([100, 200, 300, 400],
135 | index=pd.Index([1, 2, 3, 4],
136 | name='wavelength'),
137 | name='pct_reflect'),
138 | metadata={'gps_time_tgt':10})
139 | base = Collection(name='base', spectra=[s1, s2, s3])
140 | s4 = Spectrum(name='s4',
141 | measurement= pd.Series([1, 2, 3, 4],
142 | index=pd.Index([1, 2, 3, 4],
143 | name='wavelength'),
144 | name='pct_reflect'),
145 | metadata={'gps_time_tgt':2})
146 | s5 = Spectrum(name='s5',
147 | measurement= pd.Series([10, 11, 12, 13],
148 | index=pd.Index([1, 2, 3, 4],
149 | name='wavelength'),
150 | name='pct_reflect'),
151 | metadata={'gps_time_tgt':5})
152 | s6 = Spectrum(name='s6',
153 | measurement= pd.Series([100, 200, 300, 400],
154 | index=pd.Index([1, 2, 3, 4],
155 | name='wavelength'),
156 | name='pct_reflect'),
157 | metadata={'gps_time_tgt':9})
158 | rover = Collection(name='rover', spectra=[s4, s5, s6])
159 | proximal = proximal_join(base, rover)
160 | proximal_df_correct = pd.DataFrame({'s4':[1.0, 1.0, 1.0, 1.0],
161 | 's5':[1.0, 1.0, 1.0, 1.0],
162 | 's6':[1.0, 1.0, 1.0, 1.0]},
163 | index=pd.Index([1, 2, 3, 4],
164 | name='wavelength'))
165 | pdt.assert_frame_equal(proximal.data, proximal_df_correct)
166 |
167 |
168 |
169 | def main():
170 | unittest.main()
171 |
172 | if __name__ == "__main__":
173 | main()
174 |
--------------------------------------------------------------------------------
/specdal/tests/test_groupby.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import numpy as np
4 | import pandas as pd
5 | import pandas.util.testing as pdt
6 | import unittest
7 |
8 | sys.path.insert(0, os.path.abspath("../../"))
9 | from specdal.spectrum import Spectrum
10 | from specdal.collection import Collection
11 |
12 | class GroupByTests(unittest.TestCase):
13 | def setUp(self):
14 | # total 36 spectra
15 | self.c = Collection(name='For Groups')
16 | for a in ('A', 'B', 'C'):
17 | for b in ('a', 'b', 'c'):
18 | for c in ('0', '1'):
19 | for d in ('0001', '0002', '0003', '0004'):
20 | self.c.append(Spectrum('_'.join([a, b, c, d])))
21 | # print([s.name for s in self.c.spectra])
22 |
23 | def test_groups(self):
24 | groups = self.c.groupby(separator='_', indices=[0, 2])
25 | for s in groups['A_0'].spectra:
26 | print(s.name)
27 | '''
28 | def test_num_groups(self):
29 | groups = self.c.groupby(separator='_', indices=[0])
30 | self.assertEqual(len(groups), 3)
31 | groups = self.c.groupby(separator='_', indices=[1])
32 | self.assertEqual(len(groups), 3)
33 | groups = self.c.groupby(separator='_', indices=[2])
34 | self.assertEqual(len(groups), 4)
35 | groups = self.c.groupby(separator='_', indices=[0, 1])
36 | self.assertEqual(len(groups), 9)
37 | groups = self.c.groupby(separator='_', indices=[0, 2])
38 | self.assertEqual(len(groups), 12)
39 | groups = self.c.groupby(separator='_', indices=[1, 2])
40 | self.assertEqual(len(groups), 12)
41 | groups = self.c.groupby(separator='_', indices=[0, 1, 2])
42 | self.assertEqual(len(groups), 36)
43 | '''
44 | def main():
45 | unittest.main()
46 |
47 |
48 | if __name__ == '__main__':
49 | main()
50 |
--------------------------------------------------------------------------------
/specdal/tests/test_proximal_join.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import numpy as np
4 | import pandas as pd
5 | import pandas.util.testing as pdt
6 | import unittest
7 | from collections import OrderedDict
8 | sys.path.insert(0, os.path.abspath("../../"))
9 | # from specdal.operator import proximal_join
10 | from specdal.spectrum import Spectrum
11 | from specdal.collection import Collection, proximal_join, df_to_collection
12 |
13 | class proximalTests(unittest.TestCase):
14 | def setUp(self):
15 | pass
16 | def test_proximal_join(self):
17 | rover_df = pd.DataFrame({'gps_time_tgt':[1, 4, 6, 8],
18 | 1:[100, 100, 100, 100],
19 | 2:[100, 100, 100, 100],
20 | 3:[100, 100, 100, 100]},
21 | index=pd.Index(['s1', 's2', 's3', 's4']))
22 | base_df = pd.DataFrame({'gps_time_tgt':[1, 3, 7, 10],
23 | 1:[2, 2, 2, 2],
24 | 2:[50, 50, 50, 10],
25 | 3:[1, 2, 3, 4]},
26 | index=pd.Index(['s1', 's2', 's3', 's4']))
27 | proximal_join(base_df, rover_df)
28 | # proximal_df_correct = pd.DataFrame({'s4':[1.0, 1.0, 1.0, 1.0],
29 | # 's5':[1.0, 1.0, 1.0, 1.0],
30 | # 's6':[1.0, 1.0, 1.0, 1.0]},
31 | # index=pd.Index([1, 2, 3, 4],
32 | # name='wavelength'))
33 | # pdt.assert_frame_equal(proximal.data, proximal_df_correct)
34 | def test_proximal_join_on_unsorted(self):
35 | rover_df = pd.DataFrame({'gps_time_tgt':[4, 8, 1, 6],
36 | 4:[100, 100, 100, 100],
37 | 5:[100, 100, 100, 100],
38 | 6:[100, 100, 100, 100]},
39 | index=pd.Index(['s1', 's2', 's3', 's4']))
40 | base_df = pd.DataFrame({'gps_time_tgt':[10, 3, 7, 1],
41 | 4:[2, 2, 2, 2],
42 | 5:[50, 50, 50, 10],
43 | 6:[1, 2, 3, 4]},
44 | index=pd.Index(['s1', 's2', 's3', 's4']))
45 | rover_c = df_to_collection(rover_df, name='rover')
46 | base_c = df_to_collection(base_df, name='base')
47 | proximal_c = proximal_join(rover_c, base_c)
48 | # c = df_to_collection(base_df, name='hi')
49 | print(proximal_c.data)
50 | print([s.name for s in proximal_c.spectra])
51 |
52 | def main():
53 | unittest.main()
54 |
55 | if __name__ == "__main__":
56 | main()
57 |
--------------------------------------------------------------------------------
/specdal/tests/test_reader.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import glob
4 | import numpy as np
5 | import pandas as pd
6 | import pandas.util.testing as pdt
7 | import unittest
8 |
9 | sys.path.insert(0, os.path.abspath("../../"))
10 | from specdal.reader import read_asd, read_sig, read_sed, read_pico
11 |
12 | ASD_FNAME = "/media/mwestphall/2TB/Big_Bio/Big_Bio_2015/LeafLevel/07202015/ASD/_164_4_LESCA_1_T_1_00001.asd"
13 | SIG_FNAME = "/media/mwestphall/2TB/Big_Bio/Big_Bio_2015/LeafLevel/07192015/SVC/136_1_ANDGE_1_B_1_000.sig"
14 | SED_PSRPLS_FNAME = "/media/mwestphall/2TB/Big_Bio/Big_Bio_2015/LeafLevel/07102015/PSR+/149_1_ANDGE_1_M_1__00013.sed"
15 | SED_PSM_FNAME = "/media/mwestphall/2TB/Big_Bio/Big_Bio_2015/LeafLevel/06302015/PSM/157_1_AGRSM_1_M_1__00205.sed"
16 | PICO_FNAME = "/media/mwestphall/2TB/pdata/spectra/20170831-121053_untitled_bat0000__0000.pico.light"
17 |
18 | class readerTests(unittest.TestCase):
19 | def setUp(self):
20 | pass
21 |
22 | def test_asd_reader(self):
23 | data, meta = read_asd(ASD_FNAME)
24 | self.assertTrue(data is not None)
25 | self.assertTrue(meta is not None)
26 | def test_sig_reader(self):
27 | data, meta = read_sig(SIG_FNAME)
28 | self.assertTrue(data is not None)
29 | self.assertTrue(meta is not None)
30 | def test_sed_reader(self):
31 | data, meta = read_sed(SED_PSM_FNAME)
32 | self.assertTrue(data is not None)
33 | self.assertTrue(meta is not None)
34 |
35 | def test_pico_reader(self):
36 | data, meta = read_pico(PICO_FNAME)
37 | self.assertTrue(data is not None)
38 | self.assertTrue(meta is not None)
39 |
40 | def test_asd_reader_data(self):
41 | data, meta = read_asd(ASD_FNAME,
42 | read_data=True, read_metadata=False, verbose=False)
43 | self.assertTrue(data is not None)
44 | self.assertTrue(meta is None)
45 |
46 | def test_sig_reader_data(self):
47 | data, meta = read_sig(SIG_FNAME,
48 | read_data=True, read_metadata=False, verbose=False)
49 | self.assertTrue(data is not None)
50 | self.assertTrue(meta is None)
51 | def test_sed_reader_data(self):
52 | data, meta = read_sed(SED_PSM_FNAME,
53 | read_data=True, read_metadata=False, verbose=False)
54 | self.assertTrue(data is not None)
55 | self.assertTrue(meta is None)
56 |
57 | def test_pico_reader_data(self):
58 | data, meta = read_pico(PICO_FNAME,
59 | read_data=True, read_metadata=False, verbose=False)
60 | self.assertTrue(data is not None)
61 | self.assertTrue(meta is None)
62 |
63 | def test_asd_reader_metadata(self):
64 | data, meta = read_asd(ASD_FNAME,
65 | read_data=False, read_metadata=True, verbose=False)
66 | self.assertTrue(data is None)
67 | self.assertTrue(meta is not None)
68 | def test_sig_reader_metadata(self):
69 | data, meta = read_sig(SIG_FNAME,
70 | read_data=False, read_metadata=True, verbose=False)
71 | self.assertTrue(data is None)
72 | self.assertTrue(meta is not None)
73 | def test_sed_reader_metadata(self):
74 | data, meta = read_sed(SED_PSM_FNAME,
75 | read_data=False, read_metadata=True, verbose=False)
76 | self.assertTrue(data is None)
77 | self.assertTrue(meta is not None)
78 |
79 | def test_pico_reader_metadata(self):
80 | data, meta = read_pico(PICO_FNAME,
81 | read_data=False, read_metadata=True, verbose=False)
82 | self.assertTrue(data is None)
83 | self.assertTrue(meta is not None)
84 | '''
85 | def test_read_asd(self):
86 | data, meta = read_asd(ASD_FNAME, meta = read_sig(SIG_FNAME'/data/specdal/data/aidan_data2/SVC/ACPA_F_A_SU_20160617_002.sig')
87 | self.assertTrue(data is not None)
88 | self.assertTrue(meta is not None)
89 | def test_read_sed(self):
90 | data, meta = read_sed(SED_PSRPLS_FNAME,'/data/specdal/data/aidan_data2/PSR/ACPA_F_A_SU_20160617_00005.sed')
91 | self.assertTrue(data is not None)
92 | self.assertTrue(meta is not None)
93 | '''
94 | def main():
95 | unittest.main()
96 |
97 |
98 | if __name__ == "__main__":
99 | main()
100 |
--------------------------------------------------------------------------------
/specdal/tests/test_resampler.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import numpy as np
4 | import pandas as pd
5 | import pandas.util.testing as pdt
6 | import unittest
7 |
8 | sys.path.insert(0, os.path.abspath("../../"))
9 | from specdal.operator import resample
10 |
11 | class resamplerTests(unittest.TestCase):
12 | def setUp(self):
13 | pass
14 | def test_index_without_gap(self):
15 | s1 = pd.DataFrame({'measurement':[1, 2, 3, 4],
16 | 'wavelength':[0.8, 2.1, 2.9, 4]}).set_index(
17 | 'wavelength')['measurement']
18 | s1_pre = resample(s1)
19 | pdt.assert_index_equal(s1_pre.index,
20 | pd.Index([1, 2, 3, 4],
21 | name='wavelength', dtype=float))
22 | def test_index_with_gap(self):
23 | s1 = pd.DataFrame({'measurement':[1, 2, 3, 4],
24 | 'wavelength':[0.8, 3.1, 6, 8]}).set_index(
25 | 'wavelength')['measurement']
26 | s1_pre = resample(s1)
27 | pdt.assert_index_equal(s1_pre.index,
28 | pd.Index([1, 2, 3, 4, 5, 6, 7, 8],
29 | name='wavelength', dtype=float))
30 | def test_index_with_gap_spacing(self):
31 | s1 = pd.DataFrame({'measurement':[1, 2, 3, 4],
32 | 'wavelength':[0.8, 3.1, 6, 8]}).set_index(
33 | 'wavelength')['measurement']
34 | s1_pre = resample(s1, spacing=2)
35 | pdt.assert_index_equal(s1_pre.index,
36 | pd.Index([1, 3, 5, 7],
37 | name='wavelength', dtype=float))
38 |
39 | def main():
40 | unittest.main()
41 |
42 |
43 | if __name__ == "__main__":
44 | main()
45 |
46 |
47 |
--------------------------------------------------------------------------------
/specdal/tests/test_spectrum.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import numpy as np
4 | import pandas as pd
5 | import pandas.util.testing as pdt
6 | import unittest
7 |
8 | sys.path.insert(0, os.path.abspath("../../"))
9 | from specdal.spectrum import Spectrum
10 |
11 | ASD_FNAME = "/media/mwestphall/2TB/Big_Bio/Big_Bio_2015/LeafLevel/07202015/ASD/_164_4_LESCA_1_T_1_00001.asd"
12 | SIG_FNAME = "/media/mwestphall/2TB/Big_Bio/Big_Bio_2015/LeafLevel/07192015/SVC/136_1_ANDGE_1_B_1_000.sig"
13 | SED_PSRPLS_FNAME = "/media/mwestphall/2TB/Big_Bio/Big_Bio_2015/LeafLevel/07102015/PSR+/149_1_ANDGE_1_M_1__00013.sed"
14 | SED_PSM_FNAME = "/media/mwestphall/2TB/Big_Bio/Big_Bio_2015/LeafLevel/06302015/PSM/157_1_AGRSM_1_M_1__00205.sed"
15 | PICO_FNAME = "/media/mwestphall/2TB/pdata/spectra/20170831-121053_untitled_bat0000__0000.pico.light"
16 |
17 | class spectrumTests(unittest.TestCase):
18 | def setUp(self):
19 | pass
20 | def test_spectrum_read(self):
21 | s1 = Spectrum(name='s1',
22 | filepath=ASD_FNAME)
23 | s2 = Spectrum(name='s2',
24 | filepath=SED_PSM_FNAME)
25 | s3 = Spectrum(name='s3',
26 | filepath=SIG_FNAME)
27 | s4 = Spectrum(name='s4',
28 | filepath=PICO_FNAME)
29 |
30 | def test_spectrum_set_measurement(self):
31 | measurement1 = pd.Series([1, 2, 3, 4], index=pd.Index([1, 2, 3, 4], name='wavelength'),
32 | name='pct_reflect')
33 | measurement2 = pd.Series([3, 0, 1], index=pd.Index([2, 3, 4], name='wavelength'),
34 | name='pct_reflect')
35 | s1 = Spectrum(name='s1', measurement=measurement1)
36 | s2 = Spectrum(name='s2', measurement=measurement2)
37 | def main():
38 | unittest.main()
39 |
40 |
41 | if __name__ == "__main__":
42 | main()
43 |
--------------------------------------------------------------------------------
/specdal/tests/test_stitcher.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import numpy as np
4 | import pandas as pd
5 | import pandas.util.testing as pdt
6 | import unittest
7 |
8 | sys.path.insert(0, os.path.abspath("../../"))
9 | from specdal.spectrum import Spectrum
10 | from specdal.operator import stitch
11 |
12 | class stitcherTests(unittest.TestCase):
13 | def setUp(self):
14 | pass
15 | def test_operator_stitcher_mean(self):
16 | measurement= pd.Series([100, 200, 300, 400, 500],
17 | index=pd.Index([1, 2, 3, 3, 4],
18 | name='wavelength'),
19 | name='pct_reflect')
20 | measurement_stitched = pd.Series([100, 200, 350, 500],
21 | index=pd.Index([1, 2, 3, 4],
22 | name='wavelength'),
23 | name='pct_reflect')
24 | pdt.assert_series_equal(stitch(measurement), measurement_stitched)
25 |
26 | def test_spectrum_stitcher_mean(self):
27 | s1 = Spectrum(name='s1',
28 | measurement= pd.Series([100, 200, 300, 400, 500],
29 | index=pd.Index([1, 2, 3, 3, 4],
30 | name='wavelength'),
31 | name='pct_reflect'))
32 | measurement_stitched = pd.Series([100, 200, 350, 500],
33 | index=pd.Index([1, 2, 3, 4],
34 | name='wavelength'),
35 | name='pct_reflect')
36 | s1.stitch()
37 | pdt.assert_series_equal(s1.measurement, measurement_stitched)
38 |
39 | def main():
40 | unittest.main()
41 |
42 | if __name__ == "__main__":
43 | main()
44 |
--------------------------------------------------------------------------------