├── .gitignore ├── LICENSE ├── README.md ├── deck.yaml ├── main.py ├── pydictoolkit ├── __init__.py ├── data_mods │ ├── __init__.py │ └── mods.py ├── deck │ ├── __init__.py │ └── data.py ├── dic │ ├── __init__.py │ ├── data.py │ └── measurements.py └── plot │ ├── __init__.py │ ├── plotter.py │ └── shiftedColorMap.py └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Ilyass T 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pydictoolkit 2 | 3 | Toolkit to further analyze 2D and 3D Digital Image Correlation results. The current work focuses on strain evolution at the surface of growing biological materials. 4 | 5 | # Main features 6 | 7 | - [x] Compute the difference between consecutive images (the "delta") for the available fields (displacement and strain fields) 8 | - [x] Ability to divide the AOI into equal rectangular areas of prescribed size and to plot heatmaps of the "delta" for any field 9 | - [x] Compute the minimal and maximal values of a field for a set of CSV results (good values to know if we want to have a single scale for all the provided CSV files) 10 | - [x] Contour plot for the displacement or strain fields with automatically scaled color bar 11 | - [x] Streamline plots of the `U` and `V` displacement field 12 | - [ ] Streamline plots of the first or second principal strain fields 13 | - [ ] Allow the user to be able to change the name of the spatial variables (`x` instead of `X` in the case of VIC2D since there is no `X (mm)` by default) 14 | - [ ] Increase the quality (dpi) of the plots, or even better turn it into a deck variable 15 | - [x] Add a scale (mm/px) to the `Report.csv` 16 | - [ ] Add a scale bar to all the plots 17 | - [ ] Make it possible to create a gif out of any kind of plots, not only heatmaps 18 | 19 | 20 | # Quickstart 21 | 22 | If you know what you're doing, then this section is for you. Otherwise, you should go to the `Getting Started` section. 23 | 24 | #### Linux: 25 | 26 | ``` 27 | sudo apt install python3-pip python3-venv 28 | git clone https://github.com/ilyasst/pydictoolkit.git 29 | cd pydictoolkit 30 | python3 -m venv .env 31 | source .env/bin/activate 32 | pip install -r requirements.txt 33 | python main.py -h 34 | ``` 35 | 36 | Basic usage: 37 | ``` 38 | python main.py -d "./deck.yaml" 39 | ``` 40 | 41 | # Getting started 42 | 43 | ## Installation 44 | 45 | The following pieces of software are required to run pydictoolkit: 46 | 47 | * Python 3.6 or higher 48 | * python3-pip 49 | * python3-venv 50 | 51 | The list of necessary python packages is provided in the `requirements.txt` file and can be installed using pip: 52 | 53 | ``` 54 | pip install -r requirements.txt 55 | ``` 56 | 57 | We recommend that you create a virtual environment before installing these packages using the python3-venv software: 58 | 59 | ``` 60 | python3 -m venv .env 61 | ``` 62 | 63 | You can then load your virtual environment using: 64 | ``` 65 | source .env/bin/activate 66 | ``` 67 | 68 | In order to exit it, use the `deactivate` command. 69 | 70 | ## Usage 71 | 72 | In order to use the code, you will need to provide: 73 | 74 | * A valid `deck.yaml` file (the content of this file is further detailed in the `Input description` section) 75 | * Your VIC3D CSV grid outputs (`.csv`) and stereo-images (`.tiff`) in the folder specified in the `deck.yaml` file 76 | 77 | You can then run pydictoolkit using: 78 | ``` 79 | python main.py -d "PATH_TO_DECK" 80 | ``` 81 | where `PATH_TO_DECK` is the path to your deck.yaml file. 82 | 83 | A short help/reminder can be accessed using: 84 | ``` 85 | python main.py -h 86 | ``` 87 | 88 | # Input description 89 | 90 | ## deck.yaml 91 | 92 | Main structure of the `deck.yaml` file: 93 | 94 | ```yaml 95 | Data: 96 | Folder: ./pydictoolkit/dummy_data/ 97 | 98 | Plots: 99 | Target Plot: e1 100 | Groups: 101 | Region: 102 | i: 200 103 | j: 200 104 | Target Column: e1_delta 105 | 106 | ``` 107 | 108 | The remainder of this section provides detailed explanations for each section. 109 | 110 | #### Data 111 | 112 | ```yaml 113 | Data: 114 | Folder: ./pydictoolkit/dummy_data/ 115 | ``` 116 | 117 | The Data section contains a single value: the path of the folder that contains your VIC3D CSV grid data and stereo-images. It can be an absolute path, or a path relative to the main.py file. 118 | 119 | 120 | ## Your VIC3D grid data 121 | 122 | Let's make add a few screenshots to show which CSV files we mean exactly. 123 | 124 | ## Examples 125 | 126 | An example: 127 | 128 | * Description of the example data and credit where it's due 129 | 130 | ## Contribute ? 131 | 132 | Maybe you need something for your own project, maybe you found a mistake or something you could improve and feeling like helping, in any case, we'll be happy to get in touch. Please leave us an Issue, or a Pull Request (we welcome those!). 133 | 134 | 135 | # Disclaimer 136 | 137 | This software is for educational and research purposes only. Use it at your own risks. 138 | 139 | -------------------------------------------------------------------------------- /deck.yaml: -------------------------------------------------------------------------------- 1 | Data: 2 | Folder: ./pydictoolkit/dummy_data/ 3 | 4 | Plots: 5 | Target Plot: e1 6 | 7 | Heatmaps: 8 | Plot_it: false 9 | Region: 10 | i: 200 11 | j: 200 12 | Gif_it: false 13 | 14 | Contour Plots: 15 | Linear: 16 | Plot_it: false 17 | Gif_it: false 18 | Log: 19 | Plot_it: true 20 | Gif_it: true 21 | 22 | Quiver: 23 | Plot_it: false 24 | 25 | Streamplots: 26 | Plot_it: false 27 | 28 | Incremental Contour: 29 | Plot_it: false 30 | Target Plot: e1_delta -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | from pydictoolkit import * 2 | import argparse 3 | 4 | if __name__ == "__main__": 5 | parser = argparse.ArgumentParser(description="python main.py -d './mydata/deck.yaml'") 6 | 7 | parser.add_argument("-d", "--mydeck", 8 | action="store", 9 | dest="deck", 10 | type=str, 11 | help="provide the path to your deck file (please see README.md)", 12 | default="./deck.yaml", 13 | required=True) 14 | 15 | args = parser.parse_args() 16 | 17 | if args.deck: 18 | try: 19 | f = open(args.deck) 20 | except IOError: 21 | print("The provided path does not seem to exist.") 22 | sys.exit(1) 23 | finally: 24 | f.close() 25 | 26 | deck = Deck(args.deck) 27 | 28 | dic_data = DIC_reader(deck.data_folder) 29 | dic_report = DIC_measurements(dic_data, deck) 30 | 31 | data_modes = DataMods(dic_data.dataframe, deck) 32 | 33 | plott = Plotter( 34 | dic_data, 35 | deck, 36 | data_modes, 37 | plot_deltas = False, 38 | ) -------------------------------------------------------------------------------- /pydictoolkit/__init__.py: -------------------------------------------------------------------------------- 1 | from .deck import * 2 | from .dic import * 3 | from .plot import * 4 | from .data_mods import * -------------------------------------------------------------------------------- /pydictoolkit/data_mods/__init__.py: -------------------------------------------------------------------------------- 1 | from .mods import * -------------------------------------------------------------------------------- /pydictoolkit/data_mods/mods.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | class DataMods(): 5 | def __init__(self, dfs, deck): 6 | self.compute_deltas(dfs) 7 | 8 | plot_heatmaps = deck.doc["Plots"]["Heatmaps"]["Plot_it"] 9 | if plot_heatmaps.lower() == "true": 10 | self.create_grids(dfs, deck) 11 | self.group_dfs(dfs, deck) 12 | 13 | self.compute_relative_errors(dfs) 14 | self.compute_shifted_cmap(dfs, deck) 15 | 16 | # Adds a grid to the data 17 | def create_grids(self, dfs, deck): 18 | grid_x = int(deck.heatmaps_sample_size["i"]) 19 | grid_y = int(deck.heatmaps_sample_size["j"]) 20 | for df in dfs: 21 | x = df["x"] 22 | y = df["y"] 23 | df['region_x']= x//grid_x 24 | df['region_y'] = y//grid_y 25 | 26 | 27 | # Computes the delta between consecutive images 28 | def compute_deltas(self, dfs): 29 | for index, df in enumerate(dfs): 30 | for column in df: 31 | if index == 0: 32 | pass 33 | else: 34 | try: 35 | df[column+"_delta"] = df[column]-dfs[0][column] #index-1 36 | except KeyError: 37 | pass 38 | 39 | def compute_relative_errors(self, dfs): 40 | for index, df in enumerate(dfs): 41 | if index == 0: 42 | pass 43 | else: 44 | for column in df: 45 | try: 46 | df[column+"_delta_relative"] = 100*(df[column+"_delta"].divide(df[column].max())) 47 | except KeyError: 48 | pass 49 | 50 | #group dataframes based on regions 51 | def group_dfs(self, dfs, deck): 52 | grouped = [] 53 | f = lambda x: x.mean() 54 | for index, df in enumerate(dfs): 55 | if index == 0: 56 | pass 57 | else: 58 | df_grouped = df.groupby(["region_x", "region_y"]).apply(f) 59 | grouped.append(df_grouped) 60 | 61 | heat_min = min([min(df[deck.plot_inccontour_target]) for df in grouped]) 62 | heat_max = max([max(df[deck.plot_inccontour_target]) for df in grouped]) 63 | self.scale_min = heat_min 64 | self.scale_max = heat_max 65 | self.grouped = grouped 66 | 67 | def compute_shifted_cmap(self, dfs, deck): 68 | vmax_0 = 0. 69 | vmin_0 = 0. 70 | for df in dfs: 71 | if df[deck.doc["Plots"]['Target Plot']].max() > vmax_0: 72 | vmax_0 = df[deck.doc["Plots"]['Target Plot']].max() 73 | elif df[deck.doc["Plots"]['Target Plot']].min() < vmin_0: 74 | vmin_0 = df[deck.doc["Plots"]['Target Plot']].min() 75 | else: 76 | pass 77 | self.vmin_0 = vmin_0 78 | self.vmax_0 = vmax_0 79 | 80 | 81 | -------------------------------------------------------------------------------- /pydictoolkit/deck/__init__.py: -------------------------------------------------------------------------------- 1 | from .data import * -------------------------------------------------------------------------------- /pydictoolkit/deck/data.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | #@author: ilyass.tabiai@polymtl.ca 3 | # Heavily inspired from 4 | # https://raw.githubusercontent.com/lm2-poly/PeriPyDIC/master/peripydic/IO/deck.py 5 | import yaml, sys 6 | import os.path 7 | 8 | class Deck(): 9 | 10 | def __init__(self, inputhpath): 11 | if not os.path.exists(inputhpath): 12 | print("File " + inputhpath) 13 | sys.exit(1) 14 | else: 15 | with open(inputhpath,'r') as f: 16 | ## Container of the tags parsed from the yaml file 17 | self.doc = yaml.load(f, Loader=yaml.BaseLoader) 18 | 19 | if not "Data" in self.doc: 20 | print ("YamlTagError: Data tag is a mandatory tag") 21 | sys.exit(1) 22 | else: 23 | if not "Folder" in self.doc["Data"]: 24 | print ("YamlTagError: Folder within Data tag is a mandatory tag") 25 | sys.exit(1) 26 | else: 27 | self.data_folder = self.doc["Data"]["Folder"] 28 | 29 | if not "Plots" in self.doc: 30 | print("YamlTagError: Plots tag is a mandatory tag") 31 | sys.exit(1) 32 | else: 33 | if not "Target Plot" in self.doc["Plots"]: 34 | print("YamlTagError: Target Plot within Plots tag is a mandatory tag") 35 | sys.exit(1) 36 | else: 37 | self.targetplot = self.doc["Plots"]["Target Plot"] 38 | 39 | if not "Heatmaps" in self.doc["Plots"]: 40 | print("YamlTagError: Heatmaps within Plots tag is a mandatory tag") 41 | sys.exit(1) 42 | else: 43 | if not "Plot_it" in self.doc["Plots"]["Heatmaps"]: 44 | print("YamlTagError: Plot_it within Plots-Heatmaps tag is a mandatory tag") 45 | sys.exit(1) 46 | else: 47 | self.plot_heatmaps = self.doc["Plots"]["Heatmaps"]["Plot_it"] 48 | if self.plot_heatmaps == "true": 49 | if not "Region" in self.doc["Plots"]["Heatmaps"]: 50 | print("YamlTagError: Region within Plots-Heatmaps tag is a mandatory tag") 51 | sys.exit(1) 52 | else: 53 | self.heatmaps_sample_size = self.doc["Plots"]["Heatmaps"]["Region"] 54 | if not "Gif_it" in self.doc["Plots"]["Heatmaps"]: 55 | print("YamlTagWarning: Gif_it tag within Plots-Heatmaps tag was not provided and is not mandatory tag:") 56 | print("The default value `Gif_it = False` was chosen.") 57 | self.doc["Plots"]["Heatmaps"]["Gif_it"] = False 58 | 59 | if not "Contour Plots" in self.doc["Plots"]: 60 | print("YamlTagError: Contour Plots within Plots tag is a mandatory tag") 61 | sys.exit(1) 62 | else: 63 | if not "Linear" in self.doc["Plots"]["Contour Plots"]: 64 | print ("YamlTagError: Linear within Plots-Contour Plots tag is a mandatory tag") 65 | sys.exit(1) 66 | else: 67 | if not "Plot_it" in self.doc["Plots"]["Contour Plots"]["Linear"]: 68 | print ("YamlTagError: Plot_it within Plots-Contour Plots-Linear tag is a mandatory tag") 69 | sys.exit(1) 70 | if not "Gif_it" in self.doc["Plots"]["Contour Plots"]["Linear"]: 71 | print("YamlTagWarning: Plot_it within Plots-Contour Plots-Linear tag is a mandatory tag") 72 | print("The default value `Gif_it = False` was chosen.") 73 | self.doc["Plots"]["Contour Plots"]["Linear"]["Gif_it"] = False 74 | 75 | if not "Log" in self.doc["Plots"]["Contour Plots"]: 76 | print ("YamlTagError: Log within Plots-Contour Plots tag is a mandatory tag") 77 | sys.exit(1) 78 | else: 79 | if not "Plot_it" in self.doc["Plots"]["Contour Plots"]["Log"]: 80 | print ("YamlTagError: Plot_it within Plots-Contour Plots-Log tag is a mandatory tag") 81 | sys.exit(1) 82 | 83 | if not "Quiver" in self.doc["Plots"]: 84 | print ("YamlTagError: Quiver within Plots tag is a mandatory tag") 85 | sys.exit(1) 86 | else: 87 | if not "Plot_it" in self.doc["Plots"]["Quiver"]: 88 | print ("YamlTagError: Plot_it within Plots-Quiver tag is a mandatory tag") 89 | sys.exit(1) 90 | 91 | if not "Streamplots" in self.doc["Plots"]: 92 | print ("YamlTagError: Streamplots within Plots tag is a mandatory tag") 93 | sys.exit(1) 94 | else: 95 | if not "Plot_it" in self.doc["Plots"]["Streamplots"]: 96 | print ("YamlTagError: Plot_it within Plots-Streamplots tag is a mandatory tag") 97 | sys.exit(1) 98 | 99 | if not "Incremental Contour" in self.doc["Plots"]: 100 | print ("YamlTagWarning: Incremental Contours within Plots tag is a mandatory tag") 101 | sys.exit(1) 102 | else: 103 | if not "Plot_it" in self.doc["Plots"]["Incremental Contour"]: 104 | print ("YamlTagError: Streamplots within Plots tag is a mandatory tag") 105 | sys.exit(1) 106 | else: 107 | self.plot_inccontour = self.doc["Plots"]["Incremental Contour"]["Plot_it"] 108 | if not "Target Plot" in self.doc["Plots"]["Incremental Contour"]: 109 | print ("YamlTagError: Target Plot within Plots-Incremental Concours tag is a mandatory tag") 110 | sys.exit(1) 111 | else: 112 | self.plot_inccontour_target = self.doc["Plots"]["Incremental Contour"]["Target Plot"] -------------------------------------------------------------------------------- /pydictoolkit/dic/__init__.py: -------------------------------------------------------------------------------- 1 | from .data import * 2 | from .measurements import * -------------------------------------------------------------------------------- /pydictoolkit/dic/data.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | #@author: ilyass.tabiai@polymtl.ca 3 | import csv, os 4 | import glob 5 | import pandas as pd 6 | 7 | class DIC_reader(): 8 | 9 | def __init__(self, relpath): 10 | #cwd = os.getcwd() 11 | os.chdir(relpath) 12 | 13 | dic_paths = glob.glob('*.{}'.format("csv")) 14 | dic_paths.sort() 15 | self.dic_paths = dic_paths 16 | 17 | self.preprocess_csv() 18 | self.load_data() 19 | 20 | def load_data(self): 21 | dataframe = [] 22 | for csv_name in self.dic_paths: 23 | pd_data = pd.read_csv(csv_name) 24 | pd_data.columns = pd_data.columns.str.strip() 25 | print("File ", csv_name) 26 | print("Your keys are: ", pd_data.columns.str.strip()) 27 | dataframe.append( pd_data ) 28 | self.dataframe = dataframe 29 | 30 | def preprocess_csv(self): 31 | new_dic_paths = [] 32 | for csv_name in self.dic_paths: 33 | new_csv = [] 34 | if "_clean.csv" in csv_name: 35 | pass 36 | else: 37 | with open( csv_name ) as csv_file: 38 | csv_reader = csv.reader(csv_file, delimiter=',') 39 | line = 0 40 | for row in csv_reader: 41 | new_row = [] 42 | if line == 0: 43 | for header in row: 44 | new_row.append( header.strip('"').strip(" ").strip('"') ) 45 | new_csv.append( new_row ) 46 | line += 1 47 | else: 48 | new_csv.append( row ) 49 | line += 1 50 | 51 | new_path = csv_name[:-4]+"_clean.csv" 52 | new_dic_paths.append( new_path ) 53 | with open( new_path, "w" ) as new_csv_file: 54 | wr = csv.writer(new_csv_file) 55 | wr.writerows(new_csv) 56 | self.dic_paths = new_dic_paths 57 | -------------------------------------------------------------------------------- /pydictoolkit/dic/measurements.py: -------------------------------------------------------------------------------- 1 | import csv 2 | 3 | class DIC_measurements: 4 | def __init__(self, dfs, deck): 5 | self.report = [] 6 | for index, df in enumerate(dfs.dataframe): 7 | temp = {} 8 | temp['filename'] = dfs.dic_paths[index] 9 | temp['index'] = index 10 | self.report.append(temp) 11 | 12 | for index, df in enumerate(dfs.dataframe): 13 | dfs.dataframe[index] = df.astype('float64') 14 | 15 | self.compute_measurements(dfs.dataframe) 16 | self.write_report(dfs.dataframe, deck) 17 | 18 | 19 | def compute_measurements(self,dfs): 20 | for index, df in enumerate(dfs): 21 | encr = df['sigma'] != -1.0 22 | df_encr = df[encr] 23 | 24 | # CALCUL DE LA SURFACE DE L'AOI DU SPECIMEN 25 | AOI = len(df_encr['x']) 26 | 27 | # TAILLE DU PIXEL : LONGUEUR EN MM / NOMBRE DE PIXELS 28 | resolution_x = (df_encr['X'].max()-df_encr['X'].min()) / (df_encr['x'].max()-df_encr['x'].min()) 29 | 30 | resolution_y = (df_encr['Y'].max()-df_encr['Y'].min()) / (df_encr['y'].max()-df_encr['y'].min()) 31 | 32 | # CALCUL DE LA MAX des strains 33 | max_exx = max(df_encr['exx'].values) 34 | max_eyy = max(df_encr['eyy'].values) 35 | max_e1 = max(df_encr['e1'].values) 36 | max_e2 = max(df_encr['e2'].values) 37 | 38 | # ecriture dans le dictionnaire 39 | self.report[index]['AOI_px2'] = AOI 40 | self.report[index]['resolution_x'] = resolution_x 41 | self.report[index]['resolution_y'] = resolution_y 42 | self.report[index]['max_exx'] = max_exx 43 | self.report[index]['max_eyy'] = max_eyy 44 | self.report[index]['max_e1'] = max_e1 45 | self.report[index]['max_e2'] = max_e2 46 | 47 | 48 | def write_report(self, dfs, deck): 49 | csv_columns = [] 50 | csv_file = "./plots/Report.csv" 51 | for key in self.report[0]: 52 | csv_columns.append(key) 53 | try: 54 | with open(csv_file, 'w') as csvfile: 55 | writer = csv.DictWriter(csvfile, fieldnames=csv_columns) 56 | writer.writeheader() 57 | for data in self.report: 58 | writer.writerow(data) 59 | except IOError: 60 | print("I/O error") 61 | 62 | 63 | 64 | 65 | 66 | 67 | -------------------------------------------------------------------------------- /pydictoolkit/plot/__init__.py: -------------------------------------------------------------------------------- 1 | from .plotter import * 2 | -------------------------------------------------------------------------------- /pydictoolkit/plot/plotter.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import matplotlib 3 | from matplotlib import animation 4 | import seaborn as sns 5 | import numpy as np 6 | import cmocean 7 | import os 8 | from mpl_toolkits.axes_grid1 import AxesGrid 9 | from mpl_toolkits.axes_grid1 import make_axes_locatable 10 | import scipy 11 | import scipy.ndimage 12 | from scipy.stats import norm 13 | import matplotlib.image as mpimg 14 | 15 | class Plotter(): 16 | 17 | def __init__(self, dic_data, deck, data_modes, 18 | plot_deltas = False): 19 | 20 | self.zz = deck.targetplot 21 | plot_contour_linear = deck.doc["Plots"]["Contour Plots"]["Linear"]["Plot_it"] 22 | plot_contour_log = deck.doc["Plots"]["Contour Plots"]["Log"]["Plot_it"] 23 | plot_quiver = deck.doc["Plots"]["Quiver"]["Plot_it"] 24 | plot_streamplots = deck.doc["Plots"]["Streamplots"]["Plot_it"] 25 | gif_heatmaps = deck.doc["Plots"]["Heatmaps"]["Gif_it"] 26 | gif_contourlin = deck.doc["Plots"]["Contour Plots"]["Linear"]["Gif_it"] 27 | gif_contourlog = deck.doc["Plots"]["Contour Plots"]["Log"]["Gif_it"] 28 | 29 | for self.index, dic_image in enumerate(dic_data.dataframe): 30 | index = self.index 31 | if plot_contour_linear.lower() == "true": 32 | self.create_contourplot_linear(dic_data.dic_paths[index], dic_image, deck, data_modes) 33 | if plot_contour_log.lower() == "true": 34 | self.create_contourplot_log(dic_data.dic_paths[index], dic_image, deck, data_modes) 35 | if plot_quiver.lower() == "true": 36 | self.create_quiver(dic_data.dic_paths[index], dic_image, deck) 37 | if plot_streamplots.lower() == "true": 38 | self.create_streamplot(dic_data.dic_paths[index], dic_image, deck) 39 | 40 | # Do we really need this ? 41 | self.plot_dataset(dic_data.dic_paths[index], dic_image, deck) 42 | if plot_deltas == True: 43 | if index == 0: 44 | pass 45 | else: 46 | self.plot_deltas(dic_data.dic_paths[index], dic_image, deck) 47 | 48 | if deck.plot_heatmaps.lower() == "true": 49 | for index2, gdf in enumerate(data_modes.grouped): 50 | if index == index2: 51 | self.build_deltaheatmaps(dic_data.dic_paths[index], gdf, deck, data_modes.scale_min, data_modes.scale_max) 52 | 53 | if gif_heatmaps == "true": 54 | self.create_heatmaps_gif(data_modes.grouped, deck, data_modes.scale_min, data_modes.scale_max) 55 | 56 | if gif_contourlin.lower() == "true": 57 | self.create_contourplotlin_gif(dic_data.dataframe, deck, data_modes, dic_data.dic_paths) 58 | 59 | if gif_contourlog.lower() == "true": 60 | self.create_contourplotlog_gif(dic_data.dataframe, deck, data_modes, dic_data.dic_paths) 61 | 62 | 63 | def filter_NaN_Matrix(self, U, sigVal): 64 | #Fonction pour limiter la propagation des NaNs dans le filtre gaussien lissant l'image 65 | V=U.copy() 66 | V[np.isnan(U)]=0 67 | VV=scipy.ndimage.gaussian_filter(V,sigma=sigVal) 68 | 69 | W=0*U.copy()+1 70 | W[np.isnan(U)]=0 71 | WW=scipy.ndimage.gaussian_filter(W,sigma=sigVal) 72 | 73 | np.seterr(divide='ignore', invalid='ignore') #enleve le pb de division /0 74 | Z=VV/WW 75 | return Z 76 | 77 | def create_contourplot_log(self, file_name, df, deck, data_modes): 78 | x = list(sorted(set( df["x"].values ))) 79 | y = list(sorted(set( df["y"].values ))) 80 | 81 | img_name = file_name[0 : len(file_name) -10] + '.tif' 82 | img = plt.imread(img_name) 83 | fig, ax = plt.subplots(dpi=300,) 84 | ax.imshow(img, alpha = 1, cmap = 'gray') 85 | 86 | df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan 87 | e1 = np.array(df[deck.doc["Plots"]['Target Plot']].values) 88 | e1 = e1.reshape(len(y), len(x)) 89 | 90 | levels = np.sort(np.append( np.append( -np.logspace(0.1, abs(data_modes.vmin_0),10) , np.linspace(-0.01,0.01,5) ), np.logspace(0.1,data_modes.vmax_0,15))) 91 | ax.contour(x, y, e1, colors = 'k', linewidths = 0.5, levels = levels) 92 | pcm = ax.pcolormesh(x,y,e1,norm=matplotlib.colors.SymLogNorm(linthresh=0.001, linscale=0.1, vmin=data_modes.vmin_0, vmax=data_modes.vmax_0), 93 | cmap='plasma') 94 | fig.colorbar(pcm, ax=ax, extend = 'both') 95 | plt.title(deck.doc["Plots"]['Target Plot']+", "+str(self.index)) 96 | 97 | plot_dir = "./plots/" 98 | check_folder = os.path.isdir(plot_dir) 99 | if not check_folder: 100 | os.makedirs(plot_dir) 101 | plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"-contourplot-log"+".png") 102 | plt.close() 103 | 104 | def create_contourplot_linear(self, file_name, df, deck, data_modes): 105 | x = list(sorted(set( df["x"].values ))) 106 | y = list(sorted(set( df["y"].values ))) 107 | 108 | img_name = file_name[0 : len(file_name) -10] + '.tif' 109 | img = plt.imread(img_name) 110 | fig, ax = plt.subplots(dpi=300,) 111 | ax.imshow(img, alpha = 1, cmap = 'gray') 112 | 113 | df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan 114 | e1 = np.array(df[deck.doc["Plots"]['Target Plot']].values) 115 | e1 = e1.reshape(len(y), len(x)) 116 | levels = np.linspace(data_modes.vmin_0, data_modes.vmax_0,10) 117 | cs = plt.contourf(x, y, e1, origin = 'lower', extend = 'both', cmap = 'plasma', alpha = 0.5) 118 | plt.contour(x, y, e1, levels = levels, colors = 'k', linewidths = 0.5) 119 | fig.colorbar(cs) 120 | plt.title(deck.doc["Plots"]['Target Plot']+", "+str(self.index)) 121 | 122 | plot_dir = "./plots/" 123 | check_folder = os.path.isdir(plot_dir) 124 | if not check_folder: 125 | os.makedirs(plot_dir) 126 | plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"-contourplot-linear"+".png") 127 | plt.close() 128 | 129 | def create_quiver(self, file_name, df, deck): 130 | x = list(sorted(set( df["x"].values ))) 131 | y = list(sorted(set( df["y"].values ))) 132 | 133 | df.loc[df["sigma"] == -1, "gamma" ] = np.nan 134 | self.teta_ = np.array(df["gamma"].values) 135 | 136 | teta_1 = np.cos(self.teta_) 137 | self.teta_1 = teta_1.reshape(len(y), len(x)) 138 | 139 | teta_2 = np.sin(self.teta_) 140 | self.teta_2 = teta_2.reshape(len(y), len(x)) 141 | 142 | contour_ = np.array(df[self.zz].values) 143 | self.contour_ = contour_.reshape((len(y), len(x))) 144 | 145 | img_name = file_name[0 : len(file_name) -10] + '.tif' 146 | img = plt.imread(img_name) 147 | fig, ax = plt.subplots(dpi=300) 148 | ax.imshow(img, cmap = plt.get_cmap('gray'), alpha = 1) 149 | 150 | skip1 = ( slice(None, None, 20)) 151 | skip2 = ( slice(None, None, 20), slice(None, None,20) ) 152 | 153 | tf1 = self.filter_NaN_Matrix(np.array(self.teta_1),7) 154 | tf2 = self.filter_NaN_Matrix(np.array(self.teta_2),7) 155 | contourf = self.filter_NaN_Matrix(np.array(self.contour_),7) 156 | 157 | plt.quiver(np.array(x[skip1]),np.array(y[skip1]),tf1[skip2], tf2[skip2], contourf[skip2], cmap='plasma', scale = 50) 158 | plt.colorbar() 159 | plt.title(deck.doc["Plots"]['Target Plot']+", "+str(self.index)) 160 | 161 | plot_dir = "./plots/" 162 | check_folder = os.path.isdir(plot_dir) 163 | if not check_folder: 164 | os.makedirs(plot_dir) 165 | plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"-quiver"+".png") 166 | plt.close() 167 | 168 | def create_streamplot(self, file_name, df, deck): 169 | x = list(sorted(set( df["x"].values ))) 170 | y = list(sorted(set( df["y"].values ))) 171 | 172 | img_name = file_name[0 : len(file_name) -10] + '.tif' 173 | img = plt.imread(img_name) 174 | 175 | fig, ax = plt.subplots(dpi=300) 176 | ax.imshow(img, cmap = plt.get_cmap('gray'), alpha = 1) 177 | 178 | tf1 = self.filter_NaN_Matrix(np.array(self.teta_1),7) 179 | tf2 = self.filter_NaN_Matrix(np.array(self.teta_2),7) 180 | contourf = self.filter_NaN_Matrix(np.array(self.contour_),7) 181 | 182 | fig = plt.streamplot(np.array(x), np.array(y), tf1, tf2, 183 | color=contourf, 184 | linewidth=1, 185 | cmap='plasma', 186 | density=1.3, 187 | arrowsize=0.5) 188 | 189 | plt.title(deck.doc["Plots"]['Target Plot']+", "+str(self.index)) 190 | plt.colorbar() 191 | plot_dir = "./plots/" 192 | check_folder = os.path.isdir(plot_dir) 193 | if not check_folder: 194 | os.makedirs(plot_dir) 195 | plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"-stream"+".png") 196 | plt.close() 197 | 198 | def plot_dataset(self, file_name, df, deck): 199 | df = df.sort_index(axis=1, level='"x"', ascending=False) 200 | x = list(sorted(set( df["x"].values ))) 201 | y = list(sorted(set( df["y"].values ))) 202 | 203 | df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan 204 | zv = 100*(df[deck.doc["Plots"]['Target Plot']].values) 205 | zv = zv.reshape((len(y), len(x))) 206 | 207 | fig = plt.contour(x, y, zv, levels=8, linewidths=0.4, colors="black") 208 | cs = plt.contourf(x, y, zv, origin = 'lower', extend = 'both', cmap = 'plasma', alpha = 0.5) 209 | cbar = plt.colorbar(cs) 210 | cbar.ax.set_xlabel('Strain (%)') 211 | 212 | plt.title(deck.doc["Plots"]['Target Plot']) 213 | plt.clabel(fig, inline=0.1, fontsize=5) 214 | plt.legend() 215 | 216 | plot_dir = "./plots/" 217 | check_folder = os.path.isdir(plot_dir) 218 | if not check_folder: 219 | os.makedirs(plot_dir) 220 | plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-3]+"_contour.png") 221 | plt.close() 222 | 223 | def plot_deltas(self, file_name, df, deck): 224 | df = df.sort_index(axis=1, level='"x"', ascending=False) 225 | x = list(sorted(set( df["x"].values ))) 226 | y = list(sorted(set( df["y"].values ))) 227 | 228 | df.loc[df["sigma"] == -1, deck.plot_inccontour_target ] = np.nan 229 | zv = 100*(df[deck.plot_inccontour_target].values) 230 | fig = plt.contour(x, y, zv, levels=8, linewidths=0.4, colors="black") 231 | cs = plt.contourf(x, y, zv, origin = 'lower', extend = 'both', cmap = 'plasma', alpha = 0.5) 232 | cbar = plt.colorbar(cs) 233 | cbar.ax.set_xlabel('Strain (%)') 234 | 235 | plt.title(deck.plot_inccontour_target) 236 | plt.clabel(fig, inline=0.1, fontsize=5) 237 | plt.legend() 238 | 239 | plot_dir = "./plots/" 240 | check_folder = os.path.isdir(plot_dir) 241 | if not check_folder: 242 | os.makedirs(plot_dir) 243 | plt.savefig("./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"_deltas"+".png") 244 | plt.close() 245 | 246 | def build_deltaheatmaps(self, file_name, df, deck, vmin, vmax): 247 | ''' 248 | Plots a heatmap for each image with delta variations over the x and y splitting regions 249 | df = pandas data frame with set index, one column and target values. 250 | ''' 251 | df = df.pivot('region_y', 'region_x', deck.target) 252 | #df = df.sort_index(ascending=False) 253 | 254 | fig, ax = plt.subplots(figsize=(9,6)) 255 | sns.set() 256 | # bug of matplotlib 3.1 forces to manually set ylim to avoid cut-off top and bottom 257 | # might remove this later 258 | sns.heatmap(df, linewidths= .5, vmin = float(vmin), vmax = float(vmax), annot = True, annot_kws={"size": 9}, cmap = cmocean.cm.curl, ax = ax) 259 | ax.set_ylim(len(df), 0) 260 | plot_dir = "./plots/" 261 | check_folder = os.path.isdir(plot_dir) 262 | if not check_folder: 263 | os.makedirs(plot_dir) 264 | fig.savefig( "./plots/"+self.zz.strip('"')+"-"+file_name[:-4]+"_heatmap"+".png") 265 | plt.close() 266 | 267 | def create_heatmaps_gif(self, dfs, deck, vmin, vmax): 268 | #set base plotting space 269 | fig = plt.figure(figsize=(9,6)) 270 | 271 | # create iterator 272 | data_frames_iterator = iter(dfs) 273 | 274 | # set up formatting of the gif later 275 | writer='matplotlib.animation.PillowWriter' 276 | #'imagemagick' 277 | 278 | def update_frame(i): 279 | plt.clf() 280 | heatmap_data = next(data_frames_iterator) 281 | heatmap_data = heatmap_data.pivot('region_y', 'region_x', deck.doc["Plots"]["Incremental Contour"]["Target Plot"]) 282 | ax = sns.heatmap(heatmap_data, 283 | linewidths= 0, 284 | vmin = float(vmin), 285 | vmax = float(vmax), 286 | annot = True, 287 | annot_kws={"size": 9}, 288 | cmap = "YlGnBu", 289 | ) 290 | #need to manually set y_lim to avoi cropping of top and bottom cells 291 | ax.set_ylim(heatmap_data.shape[0], 0) 292 | 293 | animation.FuncAnimation(fig, update_frame, frames=len(dfs)-1, interval=400).save('./plots/heatmaps.gif', writer = writer) 294 | 295 | 296 | def create_contourplotlin_gif(self, dfs, deck, data_modes, filenames): 297 | #set base plotting space 298 | fig, ax = plt.subplots(dpi=200, figsize=(12,10)) 299 | x = list(sorted(set( dfs[0]["x"].values ))) 300 | y = list(sorted(set( dfs[0]["y"].values ))) 301 | 302 | # create iterator 303 | data_frames_iterator = iter(dfs) 304 | 305 | # set up formatting of the gif later 306 | writer='matplotlib.animation.PillowWriter' 307 | 308 | def update_frame_log(i): 309 | plt.clf() 310 | 311 | img_name = filenames[i][0 : len(filenames[i]) -10] + '.tif' 312 | img = plt.imread(img_name) 313 | plt.imshow(img, alpha = 1, cmap = 'gray') 314 | 315 | df = next(data_frames_iterator) 316 | 317 | df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan 318 | e1 = np.array(df[deck.doc["Plots"]['Target Plot']].values) 319 | e1 = e1.reshape(len(y), len(x)) 320 | 321 | levels = np.sort(np.linspace(data_modes.vmin_0, data_modes.vmax_0,20)) 322 | cont = plt.pcolormesh(x,y,e1,vmin=data_modes.vmin_0, vmax=data_modes.vmax_0,cmap='plasma') 323 | plt.contour(x, y, e1, levels = levels, colors = 'k', linewidths = 0.5) 324 | plt.colorbar(cont) 325 | 326 | return cont 327 | 328 | animation.FuncAnimation(fig, update_frame_log, frames=len(dfs)-1, interval=600).save('./plots/contourplotlin.gif', writer = writer) 329 | 330 | 331 | def create_contourplotlog_gif(self, dfs, deck, data_modes, filenames): 332 | #set base plotting space 333 | fig, ax = plt.subplots(dpi=92, figsize=(12,10)) 334 | x = list(sorted(set( dfs[0]["x"].values ))) 335 | y = list(sorted(set( dfs[0]["y"].values ))) 336 | 337 | # create iterator 338 | data_frames_iterator = iter(dfs) 339 | 340 | # set up formatting of the gif later 341 | writer='matplotlib.animation.PillowWriter' 342 | 343 | def update_frame_log(i): 344 | plt.clf() 345 | 346 | img_name = filenames[i][0 : len(filenames[i]) -10] + '.tif' 347 | img = plt.imread(img_name) 348 | plt.imshow(img, alpha = 1, cmap = 'gray') 349 | 350 | df = next(data_frames_iterator) 351 | 352 | df.loc[df["sigma"] == -1, deck.doc["Plots"]['Target Plot'] ] = np.nan 353 | e1 = np.array(df[deck.doc["Plots"]['Target Plot']].values) 354 | e1 = e1.reshape(len(y), len(x)) 355 | 356 | levels = np.sort(np.append( np.append( -np.logspace(0.1, abs(data_modes.vmin_0),10) , np.linspace(-0.01,0.01,5) ), np.logspace(0.1,data_modes.vmax_0,15))) 357 | cont = plt.pcolormesh(x,y,e1,norm=matplotlib.colors.SymLogNorm(linthresh=0.001, linscale=0.1, vmin=data_modes.vmin_0, vmax=data_modes.vmax_0), vmin=data_modes.vmin_0, vmax=data_modes.vmax_0,cmap='plasma') 358 | plt.contour(x, y, e1, levels = levels, colors = 'k', linewidths = 0.5) 359 | plt.colorbar(cont) 360 | 361 | return cont 362 | 363 | animation.FuncAnimation(fig, update_frame_log, frames=len(dfs)-1, interval=600).save('./plots/contourplotlog.gif', writer = writer) 364 | -------------------------------------------------------------------------------- /pydictoolkit/plot/shiftedColorMap.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib 3 | import matplotlib.pyplot as plt 4 | from mpl_toolkits.axes_grid1 import AxesGrid 5 | 6 | def shiftedMap(cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'): 7 | cdict = { 8 | 'red': [], 9 | 'green': [], 10 | 'blue': [], 11 | 'alpha': [] 12 | } 13 | 14 | # regular index to compute the colors 15 | reg_index = np.linspace(start, stop, 257) 16 | 17 | # shifted index to match the data 18 | shift_index = np.hstack([ 19 | np.linspace(0.0, midpoint, 128, endpoint=False), 20 | np.linspace(midpoint, 1.0, 129, endpoint=True) 21 | ]) 22 | 23 | for ri, si in zip(reg_index, shift_index): 24 | r, g, b, a = cmap(ri) 25 | 26 | cdict['red'].append((si, r, r)) 27 | cdict['green'].append((si, g, g)) 28 | cdict['blue'].append((si, b, b)) 29 | cdict['alpha'].append((si, a, a)) 30 | 31 | newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict) 32 | plt.register_cmap(cmap=newcmap) 33 | 34 | return newcmap -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyyaml 2 | pandas 3 | matplotlib 4 | seaborn 5 | Pillow 6 | cmocean 7 | --------------------------------------------------------------------------------