├── .github └── workflows │ └── main.yml ├── .gitignore ├── LICENSE ├── README.md ├── logo.icns ├── matlab └── plot_matlab.m ├── misclib ├── __init__.py ├── latex2utf.py └── misc.py ├── postproclib ├── OF_field_parser.py ├── OF_mesh_parser.py ├── VTKfields.py ├── VTKpostproc.py ├── VTKrawdata.py ├── __init__.py ├── allpostproc.py ├── build_vmd_polymer_psf.py ├── cfdfields.py ├── cfdpostproc.py ├── cfdrawdata.py ├── channelflowfields.py ├── channelflowpostproc.py ├── channelflowrawdata.py ├── cplfields.py ├── cplpostproc.py ├── exceptions.py ├── field.py ├── headerdata.py ├── lammpsfields.py ├── lammpspostproc.py ├── lammpsrawdata.py ├── mdfields.py ├── mdmacroprops.py ├── mdmols.py ├── mdpostproc.py ├── mdrawdata.py ├── openfoamfields.py ├── openfoampostproc.py ├── openfoamrawdata.py ├── postproc.py ├── pplexceptions.py ├── psiboilrawdata.py ├── rawdata.py ├── serial_cfdfields.py ├── serial_cfdpostproc.py ├── serial_cfdrawdata.py ├── visualiser │ ├── __init__.py │ ├── choosefield.py │ ├── data.csv │ ├── directory.py │ ├── logo.gif │ ├── mainframe.py │ ├── mathtext_wx.py │ ├── minimalscript.py │ ├── minispinctrl.py │ ├── plot.py │ ├── script.py │ ├── sliders.py │ └── visuals.py ├── vmd_reformat.f90 ├── vmd_reformat.py ├── vmd_tcl │ ├── .svn │ │ ├── all-wcprops │ │ ├── entries │ │ └── text-base │ │ │ ├── color_scale_bar_new_test.tcl.svn-base │ │ │ ├── custom_colorscale.tcl.svn-base │ │ │ ├── load_polymer.vmd.svn-base │ │ │ ├── plot_MD_field.vmd.svn-base │ │ │ └── read_file.tcl.svn-base │ ├── color_scale_bar_new_test.tcl │ ├── custom_colorscale.tcl │ ├── load_miepsf.vmd │ ├── load_polymer.vmd │ ├── plot_MD_field.vmd │ └── read_file.tcl ├── vmdfields.py └── writecolormap.py ├── pyDataView.py ├── pyDataView.spec ├── pyDataView_apple1.spec ├── pyDataView_apple2.spec ├── pyDataView_screenshot.png ├── pyDataview_final.pdf ├── requirements.txt ├── run_vmd.py └── setup.py /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Package application with Pyinstaller for Windows and pypi 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | 11 | build-pyPI: 12 | name: Build and publish to PyPI 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@master 17 | - name: Set up Python 3.9 18 | uses: actions/setup-python@v3 19 | with: 20 | python-version: "3.9" 21 | 22 | - name: Install setuptools 23 | run: 24 | python -m pip install --upgrade pip setuptools wheel 25 | 26 | - name: Build an sdist 27 | run: 28 | python setup.py sdist 29 | 30 | - name: Publish distribution to PyPI 31 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 32 | uses: pypa/gh-action-pypi-publish@release/v1 33 | with: 34 | password: ${{ secrets.PYPI_API_TOKEN }} 35 | 36 | build-Windows-exe: 37 | 38 | runs-on: ubuntu-20.04 39 | 40 | steps: 41 | - uses: actions/checkout@v2 42 | - name: Set up Python 43 | uses: actions/setup-python@v4 44 | with: 45 | python-version: 3.9 46 | 47 | - name: Package Application for Windows 48 | uses: JackMcKew/pyinstaller-action-windows@main 49 | with: 50 | path: ./ 51 | 52 | - uses: actions/upload-artifact@v2 53 | with: 54 | name: pyDataView.exe 55 | path: dist/windows 56 | 57 | build-mac-linux: 58 | 59 | strategy: 60 | fail-fast: false 61 | matrix: 62 | #os: [macos-latest, macos-11, macos-10.15] 63 | os: [ubuntu-20.04, ubuntu-22.04]#, macos-11, ubuntu-18.04, ubuntu-22.04] 64 | python-version: ['3.9']#, '3.8', '3.9', '3.10', '3.11'] 65 | #exclude: 66 | # - os: macos-latest 67 | # python-version: '3.8' 68 | 69 | runs-on: ${{ matrix.os }} 70 | 71 | steps: 72 | - uses: actions/checkout@v3 73 | - name: Set up Python 74 | uses: actions/setup-python@v4 75 | with: 76 | python-version: ${{ matrix.python-version }} 77 | - name: Display Python version 78 | run: python --version 79 | 80 | - name: Diagnostic report env 81 | run: | 82 | echo "PYTHON_VERSION is ${{ matrix.python-version }} on OS ${{ matrix.os }}" 83 | echo "Github variable RUNNER_OS=$RUNNER_OS" 84 | if [ "$RUNNER_OS" == "Linux" ]; then # report linux distro version 85 | lsb_release -a 86 | fi 87 | 88 | - name: Install dependencies 89 | run: | 90 | python -m pip install --upgrade pip 91 | pip install wheel 92 | pip install attrdict3 93 | 94 | if [ "$RUNNER_OS" == "Linux" ]; then 95 | sudo apt-get install libsdl2-mixer-2.0-0 libsdl2-image-2.0-0 libsdl2-2.0-0 96 | sudo apt-get install libgtk-3-dev 97 | pip install -U -f https://extras.wxpython.org/wxPython4/extras/linux/gtk3/${{ matrix.os }} wxPython 98 | 99 | elif [ "$RUNNER_OS" == "macOS" ]; then 100 | pip install wxpython 101 | 102 | else 103 | echo "$RUNNER_OS not supported" 104 | exit 1 105 | fi 106 | 107 | pip install numpy matplotlib scipy vispy 108 | pip install pyinstaller 109 | 110 | - name: Run Python and print version_info 111 | run: | 112 | python -c "import sys; print(sys.version_info)" 113 | uname -a 114 | 115 | #Checkout code here 116 | - uses: actions/checkout@v2 117 | 118 | - name: Build executable using Pyinstaller 119 | run: | 120 | if [ "$RUNNER_OS" == "Linux" ]; then 121 | pyinstaller pyDataView.spec 122 | ls -l dist/ 123 | elif [ "$RUNNER_OS" == "macOS" ]; then 124 | #It seems building in stages seems to work (locally at least) 125 | pyinstaller --windowed pyDataView.py 126 | #diff pyDataView.spec pyDataView_apple1.spec 127 | #Try rebuilding with spec file generated by windowed command above 128 | pyinstaller pyDataView.spec 129 | #pyinstaller -y pyDataView_apple1.spec 130 | #pyinstaller -y pyDataView_apple2.spec 131 | else 132 | echo "$RUNNER_OS not supported" 133 | exit 1 134 | fi 135 | 136 | 137 | - name: Save generated executable file as an artifact 138 | uses: actions/upload-artifact@v2 139 | with: 140 | # The act of downloading zips up the artifact and adds the .zip to the downloaded file 141 | name: pyDataView-${{ matrix.os }}-${{ matrix.python-version }} 142 | path: dist/ 143 | 144 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | #Ipython Notebook 62 | .ipynb_checkpoints 63 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pyDataView 2 | 3 | "A graphical user interface for people who hate graphical user interfaces" 4 | 5 | If you know how to read your data by writing Python code, PyDataView simply provides a way to quickly explore the data with a slider/file viewer. Once you see something you like, click Save Script to generate a starting point for further analysis. 6 | 7 | ## Introduction 8 | 9 | A Data Viewer GUI written in python, wxpython and matplotlib. 10 | 11 | This is a lightweight interface for quick insight into scientific data. 12 | Data can be explored as a matplotlib lineplot or contour using sliders to traverse the range of existing records and bins. 13 | For more detailed analysis, a figure can be saved, the data output as a csv file or a minimal python script generated. 14 | The emphasis of this project is the provision of a simple reader to get data into python, a minimal GUI to get quick insight into that data and generation of python scripts for more detailed analysis. 15 | 16 | ## Quickstart 17 | 18 | ### Clone the repository 19 | 20 | On linux, you need Python3, numpy, scipy, wxpython, matplotlib and vispy installed. You can then clone the repository and run pyDataView from the command line, 21 | 22 | python3 pyDataView.py -d ./path/to/dir 23 | 24 | Choosing the path to point it at directory which contains the data. 25 | 26 | ### Install using pip 27 | 28 | A version is available from the Python [package manager](https://pypi.org/project/pyDataView/) with a command of the form, 29 | 30 | pip install pyDataView 31 | 32 | This should create an entry point so you can use the GUI with a command of the form 33 | 34 | pyDataView 35 | 36 | Any files which can be converted to fields are displayed on the left hand side. 37 | 38 | ![alt tag](https://raw.githubusercontent.com/edwardsmith999/pyDataView/master/pyDataView_screenshot.png) 39 | 40 | You can also use the entire library of data reading functions in Python scripts, e.g. 41 | 42 | ```python 43 | import postproclib as ppl 44 | 45 | fdir = "/path/to/directory/with/data/" 46 | PPObj = ppl.All_PostProc(fdir) 47 | print(PPObj) 48 | ``` 49 | 50 | which will print any data found in the directory. The dataformats can be ouput from molecular dynamics simulations such as [Flowmol](https://github.com/edwardsmith999/flowmol) (binary format MPI/Fortran) or [LAMMPS](https://www.lammps.org/) (ascii format) as well as CFD codes such as [OpenFOAM](https://openfoam.org/) (uniform grids only) or [ChannelFlow](http://channelflow.org/) (h5 format). The found data will be listed as a Dictonary of plottypes which can be plotted as follows, 51 | 52 | ```python 53 | #Get plotting object 54 | plotObj = PPObj.plotlist['mbins'] 55 | 56 | #Get profile 57 | x, y = plotObj.profile(axis=normal, 58 | startrec=startrec, 59 | endrec=endrec) 60 | 61 | #Plot only normal component 62 | fig, ax = plt.subplots(1,1) 63 | ax.plot(x,y[:,component]) 64 | plt.show() 65 | ``` 66 | 67 | If the code/datatype is already supported, all available data should be displayed. 68 | `pyDataView` uses fields of five dimensional data: three spatial, one temporal and one for dimensionality of data. 69 | 70 | As well as the raw data fields, derived fields are also supported, for example, dividing momentum by density to get velocity. 71 | 72 | ### Prebuilt Binaries 73 | If you use Windows, a pre-compiled executable using pyinstaller is available 74 | 75 | - Action Autobuild (latest version) [here](https://nightly.link/edwardsmith999/pyDataView/workflows/main/master/pyDataView.exe.zip) 76 | - Stable download [here]([https://e1.pcloud.link/publink/show?code=XZng4FZRzqlM9Xu9m5rjfsHqeWmYYcaWnj7](https://e.pcloud.link/publink/show?code=XZow81ZztSOsTwpmBbrvhihbwBVVYIf5vhy)) (Built 01/12/23) 77 | 78 | 79 | ## Adding new readers 80 | 81 | One of the main aims of pyDataView is to make it easy to add new readers. 82 | 83 | In order to add new datatypes, the user must create a raw data reader in `newreader.py`, 84 | 85 | ```python 86 | 87 | from rawdata import RawData 88 | 89 | class SomeNewReader(RawData): 90 | 91 | def __init__(self,fdir,fname,dtype,nperbin): 92 | if (fdir[-1] != '/'): fdir += '/' 93 | self.fdir = fdir 94 | self.fname = fname 95 | self.dtype = dtype 96 | self.nperbin = nperbin 97 | self.filepath = self.fdir + self.fname + '/' 98 | self.header = self.read_header(fdir) #The user should write these 99 | self.grid = self.get_gridtopology() #The user should write these 100 | self.maxrec = self.get_maxrec() 101 | 102 | def read(self, startrec, endrec): 103 | 104 | # Read a 5D array [nx, ny, nz, nperbin, nrecs] 105 | # where nrecs=endrec-startrec+1 and nperbin is 1 for scalar field 106 | # and 3 for vector field, etc 107 | 108 | return bindata 109 | 110 | ``` 111 | To use this raw data, a field datatype can then be added to `newfield.py`, 112 | 113 | ```python 114 | 115 | from field import Field 116 | from newreader import SomeNewReader 117 | 118 | class SomeNewVectorField(Field): 119 | 120 | dtype = 'd' 121 | nperbin = 3 122 | 123 | def __init__(self,fdir): 124 | Raw = SomeNewReader(fdir, self.fname, self.dtype, 125 | self.nperbin) 126 | Field.__init__(self,Raw) 127 | self.header = self.Raw.header 128 | self.axislabels = ['x','y','z'] 129 | 130 | ```` 131 | This field type can now be called in any python script, allowing 5D data fields to be read as well as data preparation for plots, including profiles, contours, fft, etc. To add these new fields to the GUI, they should be added to `newpostproc.py` 132 | 133 | ```python 134 | 135 | from postproc import PostProc 136 | from newfield import SomeNewVectorField 137 | 138 | class New_PostProc(PostProc): 139 | 140 | def __init__(self,resultsdir,**kwargs): 141 | self.resultsdir = resultsdir 142 | self.plotlist = {} 143 | 144 | possibles = {'New Field': SomeNewVectorField} 145 | 146 | self.plotlist = {} 147 | for key, field in possibles.items(): 148 | try: 149 | self.plotlist[key] = field(self.resultsdir) 150 | except AssertionError: 151 | pass 152 | ``` 153 | which must be instantiated in `allpostproc.py` allowing pyDataView to find and display the new field format. 154 | 155 | There are many examples of different data formats in the postproclib file which can be used as a template. 156 | -------------------------------------------------------------------------------- /logo.icns: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edwardsmith999/pyDataView/87ef1325a7aecfced5eaf150821032010f3d655c/logo.icns -------------------------------------------------------------------------------- /matlab/plot_matlab.m: -------------------------------------------------------------------------------- 1 | clear variables classes 2 | close all 3 | cd("../") 4 | ppmod = py.importlib.import_module('postproclib'); 5 | %py.importlib.reload(ppmod); 6 | 7 | %MATLAB indexing 1=x, 2=y, 3=z 8 | normal = 3; 9 | naxis = 0:2; 10 | naxis(normal) = []; 11 | component = 1; 12 | 13 | startrec = 300; 14 | endrec = 300; 15 | 16 | %Some issues with windows backslashes, also make sure ends with a forward slash 17 | %fdir='\\wsl$\Ubuntu-18.04\home\es205\\codes\flowmol_et_al\flowmol\runs\results' 18 | %fdir = py.os.path.normpath("C:\Users\Ed\Documents\code\pyDataView-python3\pyDataView-python3\results/"); 19 | fdir = './results/'; 20 | fname = "rho"; 21 | 22 | %Create Postproc object and one for required datatype 23 | PPObj = ppmod.All_PostProc(py.str(fdir)); 24 | PObj = PPObj.plotlist{py.str(fname)}; 25 | 26 | %General low level read code 27 | ndarray = PObj.read(py.int(startrec), py.int(endrec)); 28 | data = np2mat(ndarray); 29 | 30 | %Plot as an image 31 | figure() 32 | imagesc(data(:,:,1,1,component)') 33 | colorbar() 34 | 35 | %Setup Meshgrid for contour plot 36 | g = PObj.grid; 37 | x = double(g{1}); 38 | y = double(g{2}); 39 | z = double(g{3}); 40 | [X,Y,Z] = meshgrid(x,y,z); 41 | 42 | %Plot contour against grid 43 | figure() 44 | contourf(X(:,:,1), Y(:,:,1), data(:,:,1,1,component)') 45 | colorbar() 46 | 47 | %Get Profile 48 | a = PObj.profile(py.int(normal-1), py.int(startrec),py.int(endrec)); 49 | x = a{1}; 50 | y = a{2}; 51 | plot(x,y) 52 | 53 | %Get contour 54 | a = PObj.contour(py.list({py.int(naxis(1)),py.int(naxis(2))}), ... 55 | py.int(startrec),py.int(endrec)); 56 | 57 | ax1 = np2mat(a{1}); 58 | ax2 = np2mat(a{2}); 59 | field = np2mat(a{3}); 60 | 61 | figure() 62 | [C,h] =contourf(ax1, ax2, field, 40); 63 | set(h,'LineColor','none'); 64 | colorbar() 65 | 66 | figure() 67 | surf(ax1, ax2, field) 68 | colorbar() 69 | 70 | %Example with bin limits 71 | bins = 5; 72 | binwidth = 0; %Average range above and below current bin 73 | bns = py.list({py.int(bins-binwidth), py.int(bins+binwidth+1)}); 74 | None = string(missing); 75 | if (normal == 1) 76 | binlimits = py.list({bns,None,None}); 77 | elseif (normal == 2) 78 | binlimits = py.list({None,bns,None}); 79 | elseif (normal == 3) 80 | binlimits = py.list({None,None,bns}); 81 | end 82 | a = PObj.contour(py.list({py.int(naxis(1)),py.int(naxis(2))}), ... 83 | py.int(startrec),py.int(endrec), ... 84 | pyargs('binlimits',binlimits, ... 85 | "missingrec","returnzeros")); 86 | ax1 = np2mat(a{1}); 87 | ax2 = np2mat(a{2}); 88 | field = np2mat(a{3}); 89 | figure() 90 | [C,h] =contourf(ax1, ax2, field, 40); 91 | set(h,'LineColor','none'); 92 | colorbar() 93 | 94 | % Solution from 95 | % mathworks.com/matlabcentral/answers/157347-convert-python-numpy-array-to-double 96 | function data = np2mat(nparray) 97 | ns = int32(py.array.array('i',nparray.shape)); 98 | %data = reshape(double(py.array.array('d', ... 99 | % py.numpy.nditer(nparray))), ns); 100 | data = reshape(double(py.array.array('d', ... 101 | py.numpy.nditer(nparray, pyargs('order', 'C')))), ns); 102 | data=reshape(data,fliplr(ns)); 103 | data=permute(data,[length(ns):-1:1]); 104 | end 105 | -------------------------------------------------------------------------------- /misclib/__init__.py: -------------------------------------------------------------------------------- 1 | from .misc import * 2 | 3 | -------------------------------------------------------------------------------- /misclib/misc.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding:utf-8 3 | # Routines which are not specific to MD/CFD or CPL code 4 | import os 5 | import numpy as np 6 | from matplotlib.colors import colorConverter 7 | from . import latex2utf 8 | from math import log10, floor 9 | import math as maths 10 | import re 11 | 12 | def tryint(s): 13 | try: 14 | return int(s) 15 | except: 16 | return s 17 | 18 | def alphanum_key(s): 19 | """ Turn a string into a list of string and number chunks. 20 | "z23a" -> ["z", 23, "a"] 21 | """ 22 | return [ tryint(c) for c in re.split('([0-9]+)', s) ] 23 | 24 | def sort_nicely(l): 25 | """ Sort the given list in the way that humans expect. 26 | """ 27 | l.sort(key=alphanum_key) 28 | 29 | class Chdir: 30 | """ 31 | Wrapper to move from current directory to new directory 32 | and return when using with 33 | 34 | Example usage: 35 | 36 | with Chdir('./../'): 37 | os.system('./a.out') 38 | """ 39 | def __init__( self, newPath ): 40 | self.savedPath = os.getcwd() 41 | self.newPath = newPath 42 | 43 | def __enter__( self ): 44 | os.chdir(self.newPath) 45 | 46 | def __exit__( self, etype, value, traceback): 47 | os.chdir( self.savedPath ) 48 | 49 | 50 | #Some simple functions to generate colours. 51 | def pastel(colour, weight=2.4): 52 | """ Convert colour into a nice pastel shade""" 53 | rgb = np.asarray(colorConverter.to_rgb(colour)) 54 | # scale colour 55 | maxc = max(rgb) 56 | if maxc < 1.0 and maxc > 0: 57 | # scale colour 58 | scale = 1.0 / maxc 59 | rgb = rgb * scale 60 | # now decrease saturation 61 | total = sum(rgb) 62 | slack = 0 63 | for x in rgb: 64 | slack += 1.0 - x 65 | 66 | # want to increase weight from total to weight 67 | # pick x s.t. slack * x == weight - total 68 | # x = (weight - total) / slack 69 | x = (weight - total) / slack 70 | 71 | rgb = [c + (x * (1.0-c)) for c in rgb] 72 | 73 | return rgb 74 | 75 | #Helper functions for matplotlib figures and axes to changed to dashed types 76 | def setAxLinesBW(ax): 77 | """ 78 | Take each Line2D in the axes, ax, and convert the line style to be 79 | suitable for black and white viewing. 80 | """ 81 | MARKERSIZE = 3 82 | 83 | COLORMAP = { 84 | 'b': {'marker': None, 'dash': (None,None)}, 85 | 'g': {'marker': None, 'dash': [5,5]}, 86 | 'r': {'marker': None, 'dash': [5,3,1,3]}, 87 | 'c': {'marker': None, 'dash': [1,3]}, 88 | 'm': {'marker': None, 'dash': [5,2,5,2,5,10]}, 89 | 'y': {'marker': None, 'dash': [5,3,1,2,1,10]}, 90 | 'k': {'marker': 'o', 'dash': (None,None)} #[1,2,1,10]} 91 | } 92 | 93 | for line in ax.get_lines() + ax.get_legend().get_lines(): 94 | origColor = line.get_color() 95 | line.set_color('black') 96 | line.set_dashes(COLORMAP[origColor]['dash']) 97 | line.set_marker(COLORMAP[origColor]['marker']) 98 | line.set_markersize(MARKERSIZE) 99 | 100 | def setFigLinesBW(fig): 101 | """ 102 | Take each axes in the figure, and for each line in the axes, make the 103 | line viewable in black and white. 104 | """ 105 | for ax in fig.get_axes(): 106 | setAxLinesBW(ax) 107 | 108 | 109 | def update_line(hl, new_data): 110 | hl.set_xdata(np.append(hl.get_xdata(), new_data)) 111 | hl.set_ydata(np.append(hl.get_ydata(), new_data)) 112 | plt.draw() 113 | 114 | def get_colours(n): 115 | """ Return n pastel colours. """ 116 | base = np.asarray([[1,0,0], [0,1,0], [0,0,1]]) 117 | 118 | if n <= 3: 119 | return base[0:n] 120 | 121 | # how many new colours to we need to insert between 122 | # red and green and between green and blue? 123 | needed = (((n - 3) + 1) / 2, (n - 3) / 2) 124 | 125 | colours = [] 126 | for start in (0, 1): 127 | for x in np.linspace(0, 1, needed[start]+2): 128 | colours.append((base[start] * (1.0 - x)) + 129 | (base[start+1] * x)) 130 | 131 | return [pastel(c) for c in colours[0:n]] 132 | 133 | def round_to_n(x,p): 134 | """ 135 | returns a string representation of x formatted with a precision of p 136 | 137 | Based on the webkit javascript implementation taken from here: 138 | https://code.google.com/p/webkit-mirror/source/browse/JavaScriptCore/kjs/number_object.cpp 139 | """ 140 | 141 | #No need to round an integer 142 | if isinstance(x,int): 143 | return x 144 | else: 145 | x = float(x) 146 | 147 | if x == 0.: 148 | return "0." + "0"*(p-1) 149 | 150 | out = [] 151 | 152 | if x < 0: 153 | out.append("-") 154 | x = -x 155 | 156 | e = int(maths.log10(x)) 157 | tens = maths.pow(10, e - p + 1) 158 | n = maths.floor(x/tens) 159 | 160 | if n < maths.pow(10, p - 1): 161 | e = e -1 162 | tens = maths.pow(10, e - p+1) 163 | n = maths.floor(x / tens) 164 | 165 | if abs((n + 1.) * tens - x) <= abs(n * tens -x): 166 | n = n + 1 167 | 168 | if n >= maths.pow(10,p): 169 | n = n / 10. 170 | e = e + 1 171 | 172 | m = "%.*g" % (p, n) 173 | 174 | if e < -2 or e >= p: 175 | out.append(m[0]) 176 | if p > 1: 177 | out.append(".") 178 | out.extend(m[1:p]) 179 | out.append('e') 180 | if e > 0: 181 | out.append("+") 182 | out.append(str(e)) 183 | elif e == (p -1): 184 | out.append(m) 185 | elif e >= 0: 186 | out.append(m[:e+1]) 187 | if e+1 < len(m): 188 | out.append(".") 189 | out.extend(m[e+1:]) 190 | else: 191 | out.append("0.") 192 | out.extend(["0"]*-(e+1)) 193 | out.append(m) 194 | 195 | return "".join(out) 196 | 197 | 198 | def latextounicode(strings): 199 | 200 | if type(strings) is str: 201 | string = strings.encode('utf8') 202 | try: 203 | strings = strings.replace('rho','ρ') 204 | except UnicodeDecodeError: 205 | pass 206 | except SyntaxError: 207 | pass 208 | 209 | if type(strings) is str: 210 | try: 211 | strings = strings.replace('rho','ρ') 212 | except UnicodeDecodeError: 213 | pass 214 | except SyntaxError: 215 | pass 216 | elif type(strings) is list: 217 | for i, string in enumerate(strings): 218 | try: 219 | strings[i] = string.replace('rho','ρ') 220 | except UnicodeDecodeError: 221 | pass 222 | except SyntaxError: 223 | pass 224 | #latex2utf.latex2utf(string) 225 | 226 | 227 | return strings 228 | 229 | def unicodetolatex(strings): 230 | 231 | if type(strings) is str: 232 | #string = strings.encode('utf-8') 233 | strings = strings.replace('ρ','rho') 234 | if type(strings) is str: 235 | strings = strings.replace('ρ','rho') 236 | elif type(strings) is list: 237 | for i, string in enumerate(strings): 238 | #string = string.encode('utf-8') 239 | strings[i] = string.replace('ρ','rho') 240 | 241 | return strings 242 | 243 | -------------------------------------------------------------------------------- /postproclib/OF_field_parser.py: -------------------------------------------------------------------------------- 1 | """ 2 | field_parser.py 3 | parser for field data 4 | """ 5 | 6 | 7 | import os 8 | import struct 9 | import numpy as np 10 | 11 | 12 | def parse_field_all(fn): 13 | """ 14 | parse internal field, extract data to numpy.array 15 | :param fn: file name 16 | :return: numpy array of internal field and boundary 17 | """ 18 | if not os.path.exists(fn): 19 | print("Can not open file " + fn) 20 | return None 21 | with open(fn, "rb") as f: 22 | content = f.readlines() 23 | return parse_internal_field_content(content), parse_boundary_content(content) 24 | 25 | 26 | def parse_internal_field(fn): 27 | """ 28 | parse internal field, extract data to numpy.array 29 | :param fn: file name 30 | :return: numpy array of internal field 31 | """ 32 | if not os.path.exists(fn): 33 | print("Can not open file " + fn) 34 | return None 35 | with open(fn, "rb") as f: 36 | content = f.readlines() 37 | return parse_internal_field_content(content) 38 | 39 | 40 | def parse_internal_field_content(content): 41 | """ 42 | parse internal field from content 43 | :param content: contents of lines 44 | :return: numpy array of internal field 45 | """ 46 | is_binary = is_binary_format(content) 47 | for ln, lc in enumerate(content): 48 | if lc.startswith(b'internalField'): 49 | if b'nonuniform' in lc: 50 | return parse_data_nonuniform(content, ln, len(content), is_binary) 51 | elif b'uniform' in lc: 52 | return parse_data_uniform(content[ln]) 53 | break 54 | return None 55 | 56 | 57 | def parse_boundary_field(fn): 58 | """ 59 | parse internal field, extract data to numpy.array 60 | :param fn: file name 61 | :return: numpy array of boundary field 62 | """ 63 | if not os.path.exists(fn): 64 | print("Can not open file " + fn) 65 | return None 66 | with open(fn, "rb") as f: 67 | content = f.readlines() 68 | return parse_boundary_content(content) 69 | 70 | 71 | def parse_boundary_content(content): 72 | """ 73 | parse each boundary from boundaryField 74 | :param content: 75 | :return: 76 | """ 77 | data = {} 78 | is_binary = is_binary_format(content) 79 | bd = split_boundary_content(content) 80 | for boundary, (n1, n2) in list(bd.items()): 81 | pd = {} 82 | n = n1 83 | while True: 84 | lc = content[n] 85 | if b'nonuniform' in lc: 86 | v = parse_data_nonuniform(content, n, n2, is_binary) 87 | pd[lc.split()[0]] = v 88 | if not is_binary: 89 | n += len(v) + 4 90 | else: 91 | n += 3 92 | continue 93 | elif b'uniform' in lc: 94 | pd[lc.split()[0]] = parse_data_uniform(content[n]) 95 | n += 1 96 | if n > n2: 97 | break 98 | data[boundary] = pd 99 | return data 100 | 101 | 102 | def parse_data_uniform(line): 103 | """ 104 | parse uniform data from a line 105 | :param line: a line include uniform data, eg. "value uniform (0 0 0);" 106 | :return: data 107 | """ 108 | if b'(' in line: 109 | return np.array([float(x) for x in line.split(b'(')[1].split(b')')[0].split()]) 110 | return float(line.split(b'uniform')[1].split(b';')[0]) 111 | 112 | 113 | def parse_data_nonuniform(content, n, n2, is_binary): 114 | """ 115 | parse nonuniform data from lines 116 | :param content: data content 117 | :param n: line number 118 | :param n2: last line number 119 | :param is_binary: binary format or not 120 | :return: data 121 | """ 122 | num = int(content[n + 1]) 123 | if not is_binary: 124 | if b'scalar' in content[n]: 125 | data = np.array([float(x) for x in content[n + 3:n + 3 + num]]) 126 | else: 127 | data = np.array([ln[1:-2].split() for ln in content[n + 3:n + 3 + num]], dtype=float) 128 | else: 129 | nn = 1 130 | if b'vector' in content[n]: 131 | nn = 3 132 | elif b'symmTensor' in content[n]: 133 | nn = 6 134 | elif b'tensor' in content[n]: 135 | nn = 9 136 | buf = b''.join(content[n+2:n2+1]) 137 | vv = np.array(struct.unpack('{}d'.format(num*nn), 138 | buf[struct.calcsize('c'):num*nn*struct.calcsize('d')+struct.calcsize('c')])) 139 | if nn > 1: 140 | data = vv.reshape((num, nn)) 141 | else: 142 | data = vv 143 | return data 144 | 145 | 146 | def split_boundary_content(content): 147 | """ 148 | split each boundary from boundaryField 149 | :param content: 150 | :return: boundary and its content range 151 | """ 152 | bd = {} 153 | n = 0 154 | in_boundary_field = False 155 | in_patch_field = False 156 | current_path = '' 157 | while True: 158 | lc = content[n] 159 | if lc.startswith(b'boundaryField'): 160 | in_boundary_field = True 161 | if content[n+1].startswith(b'{'): 162 | n += 2 163 | continue 164 | elif content[n+1].strip() == b'' and content[n+2].startswith(b'{'): 165 | n += 3 166 | continue 167 | else: 168 | print('no { after boundaryField') 169 | break 170 | if in_boundary_field: 171 | if lc.rstrip() == b'}': 172 | break 173 | if in_patch_field: 174 | if lc.strip() == b'}': 175 | bd[current_path][1] = n-1 176 | in_patch_field = False 177 | current_path = '' 178 | n += 1 179 | continue 180 | if lc.strip() == b'': 181 | n += 1 182 | continue 183 | current_path = lc.strip() 184 | if content[n+1].strip() == b'{': 185 | n += 2 186 | elif content[n+1].strip() == b'' and content[n+2].strip() == b'{': 187 | n += 3 188 | else: 189 | print('no { after boundary patch') 190 | break 191 | in_patch_field = True 192 | bd[current_path] = [n,n] 193 | continue 194 | n += 1 195 | if n > len(content): 196 | if in_boundary_field: 197 | print('error, boundaryField not end with }') 198 | break 199 | 200 | return bd 201 | 202 | 203 | def is_binary_format(content, maxline=20): 204 | """ 205 | parse file header to judge the format is binary or not 206 | :param content: file content in line list 207 | :param maxline: maximum lines to parse 208 | :return: binary format or not 209 | """ 210 | for lc in content[:maxline]: 211 | if b'format' in lc: 212 | if b'binary' in lc: 213 | return True 214 | return False 215 | return False 216 | -------------------------------------------------------------------------------- /postproclib/VTKfields.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from .field import Field 4 | from .VTKrawdata import VTK_RawData 5 | 6 | class VTKField(Field): 7 | 8 | def __init__(self, fdir, fname): 9 | self.fname = fname 10 | Raw = VTK_RawData(fdir, self.fname) 11 | self.nperbin = Raw.nperbin 12 | Field.__init__(self, Raw) 13 | if Raw.nperbin == 1: 14 | self.labels = ['component'] 15 | elif Raw.nperbin == 3: 16 | self.labels = ['x', 'y', 'z'] 17 | self.axislabels = ['x', 'y', 'z'] 18 | self.plotfreq = Raw.plotfreq 19 | -------------------------------------------------------------------------------- /postproclib/VTKpostproc.py: -------------------------------------------------------------------------------- 1 | import os 2 | import glob 3 | 4 | from .VTKfields import * 5 | from .postproc import PostProc 6 | from .pplexceptions import NoResultsInDir 7 | 8 | class VTK_PostProc(PostProc): 9 | 10 | """ 11 | Post processing class for VTK files runs 12 | """ 13 | 14 | def __init__(self,resultsdir,**kwargs): 15 | self.resultsdir = resultsdir 16 | 17 | # Check directory exists before trying to instantiate object 18 | if (not os.path.isdir(self.resultsdir)): 19 | print(("Directory " + self.resultsdir + " not found")) 20 | raise IOError 21 | 22 | files = glob.glob(resultsdir+"/*.vtr") 23 | fnames = set([f.replace(resultsdir,"").replace("/","").split(".")[0] 24 | for f in files]) 25 | 26 | 27 | self.plotlist = {} 28 | for fname in fnames: 29 | print(self.resultsdir, fname) 30 | try: 31 | self.plotlist[fname] = VTKField(self.resultsdir, fname) 32 | except IOError: 33 | pass 34 | except ValueError: 35 | pass 36 | 37 | if (len(self.plotlist) == 0): 38 | raise NoResultsInDir 39 | -------------------------------------------------------------------------------- /postproclib/__init__.py: -------------------------------------------------------------------------------- 1 | # Only import the classes we want to be accessible to the user 2 | from .allpostproc import All_PostProc 3 | from .mdfields import * 4 | from .mdpostproc import * 5 | from .mdmols import * 6 | 7 | from .cfdfields import * 8 | from .cfdpostproc import * 9 | 10 | from .channelflowfields import * 11 | from .channelflowpostproc import * 12 | 13 | from .serial_cfdfields import * 14 | 15 | from .cplfields import * 16 | from .cplpostproc import * 17 | from .pplexceptions import * 18 | 19 | from .vmdfields import * 20 | from .build_vmd_polymer_psf import * 21 | 22 | from .openfoamfields import * 23 | from .openfoampostproc import * 24 | 25 | from .lammpsfields import * 26 | from .lammpspostproc import * 27 | 28 | from .VTKfields import * 29 | from .VTKpostproc import * 30 | -------------------------------------------------------------------------------- /postproclib/allpostproc.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from .mdpostproc import MD_PostProc 4 | from .lammpspostproc import LAMMPS_PostProc 5 | from .cfdpostproc import CFD_PostProc 6 | from .cplpostproc import CPL_PostProc 7 | from .channelflowpostproc import channelflow_PostProc 8 | from .openfoampostproc import OpenFOAM_PostProc 9 | from .serial_cfdpostproc import Serial_CFD_PostProc 10 | from .pplexceptions import NoResultsInDir 11 | try: 12 | from .VTKpostproc import VTK_PostProc 13 | vispyfound = True 14 | except ImportError: 15 | vispyfound = False 16 | 17 | class All_PostProc: 18 | 19 | def __init__(self, fdir): 20 | 21 | if not os.path.isdir(fdir): 22 | print(("Requested directory ", fdir, " does not exist.")) 23 | fdir = './' 24 | 25 | self.plotlist = {} 26 | 27 | try: 28 | CPL_PP = CPL_PostProc(fdir) 29 | self.plotlist.update(CPL_PP.plotlist) 30 | print(CPL_PP) 31 | print("Coupled case, only plotting coupled field") 32 | return 33 | except NoResultsInDir: 34 | pass 35 | 36 | try: 37 | MD_PP = MD_PostProc(fdir) 38 | self.plotlist.update(MD_PP.plotlist) 39 | print(MD_PP) 40 | except NoResultsInDir: 41 | pass 42 | 43 | try: 44 | LAMMPS_PP = LAMMPS_PostProc(fdir) 45 | self.plotlist.update(LAMMPS_PP.plotlist) 46 | print(LAMMPS_PP) 47 | except NoResultsInDir: 48 | pass 49 | 50 | try: 51 | CFD_PP = CFD_PostProc(fdir) 52 | self.plotlist.update(CFD_PP.plotlist) 53 | print(CFD_PP) 54 | except NoResultsInDir: 55 | pass 56 | 57 | try: 58 | CF_PP = channelflow_PostProc(fdir) 59 | self.plotlist.update(CF_PP.plotlist) 60 | print(CF_PP) 61 | except NoResultsInDir: 62 | pass 63 | 64 | try: 65 | SCFD_PP = Serial_CFD_PostProc(fdir) 66 | self.plotlist.update(SCFD_PP.plotlist) 67 | print(SCFD_PP) 68 | except NoResultsInDir: 69 | pass 70 | 71 | try: 72 | OF_PP = OpenFOAM_PostProc(fdir) 73 | self.plotlist.update(OF_PP.plotlist) 74 | print(OF_PP) 75 | except NoResultsInDir: 76 | pass 77 | 78 | if vispyfound: 79 | try: 80 | VTK_PP = VTK_PostProc(fdir) 81 | self.plotlist.update(VTK_PP.plotlist) 82 | print(VTK_PP) 83 | except NoResultsInDir: 84 | pass 85 | 86 | if (len(self.plotlist) == 0): 87 | raise NoResultsInDir 88 | 89 | 90 | -------------------------------------------------------------------------------- /postproclib/build_vmd_polymer_psf.py: -------------------------------------------------------------------------------- 1 | #! usr/bin/env python 2 | import os 3 | import glob 4 | import sys 5 | import numpy as np 6 | import subprocess as sp 7 | from operator import itemgetter 8 | 9 | def progress_bar(fraction): 10 | i = int(fraction*65) 11 | sys.stdout.write('\r') 12 | sys.stdout.write("[%-65s] %6.3f%%" % ('='*i, 100*fraction)) 13 | sys.stdout.flush() 14 | 15 | def write_bonds(pairs_list): 16 | nbonds = len(pairs_list) 17 | f = open('polymer_topol.bonds','w') 18 | f.write("\n{0:8d}".format(nbonds) + ' !NBOND: bonds\n') 19 | line = '' 20 | count = 0 21 | for pair in pairs_list: 22 | count += 1 23 | line += "{0:>8d}{1:>8d}".format(pair[0],pair[1]) 24 | if (count%4 == 0): 25 | f.write(line+'\n') 26 | line = ''; 27 | # If necessary write final line 28 | if (count%4 != 0): 29 | f.write(line+'\n') 30 | f.close() 31 | 32 | def concat_files(): 33 | sp.call('cat polymer_topol.header polymer_topol.bonds > polymer_topol.psf', 34 | shell=True) 35 | 36 | 37 | def read_monomers(fdir="./", filename='monomers'): 38 | 39 | # Concat list of all monomers, from all ranks, 40 | # store everything in RAM if poss 41 | data = [] 42 | rankfiles = glob.glob(fdir + filename + '_*') 43 | for rankfile in rankfiles: 44 | print('Getting info from file ' + str(rankfile) + ' of ' + 45 | str(len(rankfiles))) 46 | with open(rankfile,'r') as f: 47 | data = data + [list(map(int,line.split())) for line in f] 48 | 49 | # Sort the data into chains (second column is chainID) 50 | print('Sorting monomers into chains...') 51 | data.sort(key=itemgetter(1)) 52 | data = np.array(data) 53 | 54 | return data 55 | 56 | def build_psf(): 57 | 58 | data = read_monomers() 59 | 60 | if data.size == 0: 61 | return 62 | 63 | # Loop over chainIDs and determine bond pairs 64 | chainID = data[0,1] 65 | maxchainID = data[-1,1] 66 | pairs = [] # List of bond pairs 67 | count = 0 68 | 69 | if maxchainID == 0: 70 | print('No polymers in fluid -- exiting') 71 | return 72 | 73 | print('Finding bond pairs in all chains...') 74 | while True: 75 | 76 | # chain is a list of all monomers with the same chainID 77 | chain = data[np.where(data[:,1]==chainID)] 78 | # keep track of where we are in the data 79 | lastindex = np.where(data[:,1]==chainID)[0][-1] 80 | print(chainID, np.array(chain)[:,0], lastindex) 81 | if (chainID%100 == 0): 82 | progress_bar(float(chainID)/float(maxchainID)) 83 | 84 | if (chainID != 0): 85 | 86 | for monomer in chain: 87 | globID = monomer[0] 88 | scID = monomer[2] 89 | bflag = monomer[-4:] 90 | bstring = "{3:031b}{2:031b}{1:031b}{0:031b}".format( 91 | bflag[0],bflag[1],bflag[2],bflag[3])[::-1] 92 | barray = np.array(list(map(int,list(bstring)))) 93 | bscIDs = np.where(barray==1)[0] + 1 94 | try: 95 | bglobIDs = ([chain[np.where(chain[:,2]==b)][0][0] 96 | for b in bscIDs]) 97 | for ID in bglobIDs: 98 | pairs.append(sorted([globID,ID])) 99 | except: 100 | print('Failed to find all subchainIDs ' + str(bscIDs) + 101 | ' for chain ID ' + str(chainID)) 102 | #raise 103 | 104 | try: 105 | chainID = data[lastindex+1][1] 106 | except IndexError: 107 | break 108 | 109 | # Remove duplicate entries by converting to a set and then back to a list 110 | pairs_set = set(tuple(p) for p in pairs) 111 | pairs_list = list(map(list,pairs_set)) 112 | 113 | # Write the list to polymer_topol.bonds 114 | write_bonds(pairs_list) 115 | 116 | if __name__ == "__main__": 117 | build_psf() 118 | # Ask user if they want to concat 119 | print('Do you wish to concatenate polymer_topol.header and \n'+ 120 | 'polymer_topol.bonds to make polymer_topol.psf? (y/n)') 121 | ans = input() 122 | if (ans in ['y','Y','yes','Yes','YES']): 123 | concat_files() 124 | -------------------------------------------------------------------------------- /postproclib/cfdfields.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import numpy as np 3 | 4 | from .field import Field 5 | from .cfdrawdata import CFD_RawData 6 | 7 | # ============================================================================ 8 | # CFDField base class 9 | 10 | class CFDField(Field): 11 | 12 | nhalos = [0, 1, 0] 13 | 14 | def __init__(self,fdir): 15 | Raw = CFD_RawData(fdir) 16 | Field.__init__(self,Raw) 17 | self.axislabels = ['x','y','z'] 18 | self.plotfreq = 1 19 | 20 | # ============================================================================ 21 | # CFDField derived classes, but calculated by the main code 22 | class CFD_vField(CFDField): 23 | 24 | nperbin = 3 25 | 26 | def __init__(self,fdir): 27 | CFDField.__init__(self,fdir) 28 | assert self.Raw.npercell > 0 29 | self.labels = ['u','v','w'] 30 | 31 | def read(self,startrec,endrec,binlimits=None,**kwargs): 32 | subdata = CFDField.read(self,startrec,endrec,binlimits=binlimits, 33 | **kwargs) 34 | v = subdata[:,:,:,:,0:3] 35 | return v 36 | 37 | class CFD_PField(CFDField): 38 | 39 | nperbin = 1 40 | 41 | def __init__(self,fdir): 42 | CFDField.__init__(self,fdir) 43 | assert self.Raw.npercell > 3 44 | self.labels = ['p'] 45 | 46 | def read(self,startrec,endrec,binlimits=None,**kwargs): 47 | subdata = CFDField.read(self,startrec,endrec,binlimits=binlimits, 48 | **kwargs) 49 | P = subdata[:,:,:,:,3:4] 50 | return P 51 | 52 | 53 | class CFD_StressField(CFDField): 54 | 55 | nperbin = 9 56 | def __init__(self,fdir): 57 | CFDField.__init__(self,fdir) 58 | assert self.Raw.npercell > 4 59 | x = self.axislabels[0]; y = self.axislabels[1]; z = self.axislabels[2] 60 | self.labels = [x+x,x+y,x+z, 61 | y+x,y+y,y+z, 62 | z+x,z+y,z+z] 63 | 64 | def read(self,startrec,endrec,binlimits=None,**kwargs): 65 | subdata = CFDField.read(self,startrec,endrec,binlimits=binlimits, 66 | **kwargs) 67 | P = subdata[:,:,:,:,4:] 68 | return P 69 | # ============================================================================= 70 | # Complex fields that require extra calculations. 71 | class CFD_complexField(CFDField): 72 | 73 | def inherit_parameters(self, subfieldobj): 74 | self.header = subfieldobj.Raw.header 75 | self.nperbin = subfieldobj.nperbin 76 | self.cpol_bins = False 77 | self.plotfreq = subfieldobj.plotfreq 78 | self.axislabels = subfieldobj.axislabels 79 | self.labels = subfieldobj.labels 80 | 81 | 82 | class CFD_mugradvField(CFD_complexField): 83 | 84 | nperbin = 9 85 | 86 | def __init__(self, fdir): 87 | self.vField = CFD_vField(fdir) 88 | CFD_complexField.__init__(self, fdir) 89 | x = self.axislabels[0]; y = self.axislabels[1]; z = self.axislabels[2] 90 | self.labels = [x+x,x+y,x+z, 91 | y+x,y+y,y+z, 92 | z+x,z+y,z+z] 93 | self.rho = None 94 | 95 | def set_rho(self, rho): 96 | self.rho = rho 97 | 98 | def read(self, startrec, endrec, binlimits=None, **kwargs): 99 | 100 | if (self.rho == None): 101 | print(('CFD_mugradvField requires rho, set by ' + 102 | 'CFD_mugradvField.set_rho(rho).')) 103 | 104 | vdata = self.vField.read(startrec, endrec, binlimits=binlimits, 105 | **kwargs) 106 | 107 | # The call to grad between >>> 108 | # should do the same as the lines between <<< 109 | # but I haven't changed it as I can't check over ssh... 110 | 111 | # >>>>>>>>>>>>>>>>>>>> 112 | #gradv = self.grad(vdata) 113 | # >>>>>>>>>>>>>>>>>>>> 114 | 115 | # <<<<<<<<<<<<<<<<<<<< 116 | dx = self.vField.Raw.dx 117 | dy = self.vField.Raw.dy 118 | dz = self.vField.Raw.dz 119 | gradv = np.empty(list(vdata.shape[:-1]) + [9]) 120 | for rec in range(gradv.shape[-2]): 121 | for ixyz in range(3): 122 | for jxyz in range(3): 123 | c = 3*ixyz + jxyz 124 | gradv[:,:,:,rec,c] = ( 125 | np.gradient(vdata[:,:,:,rec,ixyz], dx, dy, dz)[jxyz] 126 | ) 127 | # <<<<<<<<<<<<<<<<<<<< 128 | 129 | 130 | nugradv = self.vField.Raw.nu*gradv 131 | try: 132 | mugradv = np.multiply(nugradv, self.rho) 133 | return mugradv 134 | except TypeError: 135 | print('Rho not set, returning nugradv') 136 | return nugradv 137 | 138 | class CFD_strainField(CFD_complexField,CFD_vField): 139 | 140 | def __init__(self,fdir,rectype='bins'): 141 | self.vField = CFD_vField(fdir) 142 | 143 | Field.__init__(self,self.vField.Raw) 144 | self.inherit_parameters(self.vField) 145 | self.labels = ["dudx","dudy","dudz", 146 | "dvdx","dvdy","dvdz", 147 | "dwdx","dwdy","dwdz"] 148 | self.nperbin = 9 149 | 150 | def read(self,startrec,endrec, binlimits=None,**kwargs): 151 | vdata = self.vField.read(startrec, endrec, 152 | binlimits=None) 153 | 154 | straindata = self.grad(vdata) 155 | 156 | if (binlimits): 157 | 158 | # Defaults 159 | lower = [0]*3 160 | upper = [i for i in straindata.shape] 161 | 162 | for axis in range(3): 163 | if (binlimits[axis] == None): 164 | continue 165 | else: 166 | lower[axis] = binlimits[axis][0] 167 | upper[axis] = binlimits[axis][1] 168 | 169 | straindata = straindata[lower[0]:upper[0], 170 | lower[1]:upper[1], 171 | lower[2]:upper[2], :, :] 172 | 173 | return straindata 174 | 175 | 176 | class CFD_vortField(CFD_complexField,CFD_vField): 177 | 178 | def __init__(self,fdir,rectype='bins'): 179 | self.vField = CFD_vField(fdir) 180 | self.strainField = CFD_strainField(fdir) 181 | 182 | Field.__init__(self,self.vField.Raw) 183 | self.inherit_parameters(self.strainField) 184 | self.labels = ["x","y","z"] 185 | self.nperbin = 3 186 | 187 | def read(self,startrec,endrec, binlimits=None,**kwargs): 188 | dudr = self.strainField.read(startrec, endrec, 189 | binlimits=None) 190 | 191 | vortdata = np.empty([dudr.shape[0],dudr.shape[1], 192 | dudr.shape[2],dudr.shape[3],self.nperbin]) 193 | vortdata[:,:,:,:,0] = ( dudr[:,:,:,:,7] 194 | -dudr[:,:,:,:,5]) 195 | vortdata[:,:,:,:,1] = ( dudr[:,:,:,:,2] 196 | -dudr[:,:,:,:,6]) 197 | vortdata[:,:,:,:,2] = ( dudr[:,:,:,:,3] 198 | -dudr[:,:,:,:,1]) 199 | 200 | if (binlimits): 201 | 202 | # Defaults 203 | lower = [0]*3 204 | upper = [i for i in vortdata.shape] 205 | 206 | for axis in range(3): 207 | if (binlimits[axis] == None): 208 | continue 209 | else: 210 | lower[axis] = binlimits[axis][0] 211 | upper[axis] = binlimits[axis][1] 212 | 213 | vortdata = vortdata[lower[0]:upper[0], 214 | lower[1]:upper[1], 215 | lower[2]:upper[2], :, :] 216 | 217 | return vortdata 218 | 219 | 220 | class CFD_dissipField(CFD_complexField,CFD_vField): 221 | 222 | def __init__(self,fdir,rectype='bins'): 223 | self.vField = CFD_vField(fdir) 224 | self.strainField = CFD_strainField(fdir) 225 | 226 | Field.__init__(self,self.vField.Raw) 227 | self.inherit_parameters(self.strainField) 228 | self.labels = ["mag"] 229 | self.nperbin = 1 230 | 231 | def read(self,startrec,endrec, binlimits=None,**kwargs): 232 | dudr = self.strainField.read(startrec, endrec, 233 | binlimits=None) 234 | 235 | vortdata = np.empty([dudr.shape[0],dudr.shape[1], 236 | dudr.shape[2],dudr.shape[3],self.nperbin]) 237 | vortdata[:,:,:,:,0] = ( np.power(dudr[:,:,:,:,0],2.) 238 | +np.power(dudr[:,:,:,:,1],2.) 239 | +np.power(dudr[:,:,:,:,2],2.)) 240 | 241 | 242 | if (binlimits): 243 | 244 | # Defaults 245 | lower = [0]*3 246 | upper = [i for i in vortdata.shape] 247 | 248 | for axis in range(3): 249 | if (binlimits[axis] == None): 250 | continue 251 | else: 252 | lower[axis] = binlimits[axis][0] 253 | upper[axis] = binlimits[axis][1] 254 | 255 | vortdata = vortdata[lower[0]:upper[0], 256 | lower[1]:upper[1], 257 | lower[2]:upper[2], :, :] 258 | 259 | return vortdata 260 | 261 | 262 | -------------------------------------------------------------------------------- /postproclib/cfdpostproc.py: -------------------------------------------------------------------------------- 1 | import os 2 | from .cfdfields import * 3 | from .postproc import PostProc 4 | from .pplexceptions import NoResultsInDir 5 | 6 | class CFD_PostProc(PostProc): 7 | 8 | """ 9 | Post processing class for CFD runs 10 | """ 11 | 12 | def __init__(self,resultsdir,**kwargs): 13 | self.resultsdir = resultsdir 14 | self.plotlist = {} 15 | 16 | # Check directory exists before instantiating object and check 17 | # which files associated with plots are in directory 18 | if (not os.path.isdir(self.resultsdir)): 19 | print(("Directory " + self.resultsdir + " not found")) 20 | raise IOError 21 | 22 | try: 23 | fobj = open(self.resultsdir + 'report','r') 24 | except IOError: 25 | raise NoResultsInDir 26 | 27 | possibles = {'CFD Velocity': CFD_vField, 28 | 'CFD Pressure': CFD_PField, 29 | 'CFD mugradv': CFD_mugradvField, 30 | 'CFD Stress': CFD_StressField, 31 | 'CFD Strain': CFD_strainField, 32 | 'CFD Vorticity': CFD_vortField, 33 | 'CFD Dissipation': CFD_dissipField} 34 | 35 | self.plotlist = {} 36 | for key, field in list(possibles.items()): 37 | try: 38 | self.plotlist[key] = field(self.resultsdir) 39 | except AssertionError: 40 | pass 41 | 42 | -------------------------------------------------------------------------------- /postproclib/cfdrawdata.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import numpy as np 3 | import os 4 | 5 | from .rawdata import RawData 6 | from .pplexceptions import DataNotAvailable 7 | 8 | class CFD_RawData(RawData): 9 | 10 | def __init__(self,fdir): 11 | self.fdir = fdir 12 | self.grid = self.get_grid() 13 | self.subdomlist = self.get_subdomlist() 14 | self.npercell = self.get_npercell() 15 | self.maxrec = len(self.subdomlist)-1 # count from 0 16 | self.Re, self.nu = self.get_couette_params() 17 | self.header = None 18 | 19 | def get_couette_params(self): 20 | 21 | def extract_param(string): 22 | with open(self.fdir+'input','r') as fobj: 23 | param = float(fobj.read().split(string)[0].split()[-1]) 24 | return param 25 | 26 | Re = extract_param('Re') 27 | #Umax = extract_param('uwall_t') 28 | #L = 1.0 29 | #nu = Umax*L/Re 30 | nu = 1.0/Re 31 | return Re, nu 32 | 33 | def get_grid(self): 34 | try: 35 | fobj = open(self.fdir+'report','r') 36 | except IOError: 37 | raise DataNotAvailable 38 | report = fobj.readlines()[3:6] # Lines with info in 39 | for line in report: 40 | linepairs = line.split('|') 41 | for pair in linepairs: 42 | varname = pair.split()[0] 43 | varval = pair.split()[1] 44 | vars(self)[varname] = varval 45 | # Number of grid points in main code 46 | self.nx = int(self.nx) 47 | self.ny = int(self.ny) 48 | self.nz = int(self.nz) 49 | # Number of cell-centered values written to files 50 | # -1 for cell centers rather than grid points 51 | # -2 for not writing halos (except in y-direction) 52 | # Therefore -3 in x and z, -1 in y 53 | self.nrx = int(self.nx)-3 # number of subdom grid records in x 54 | self.nry = int(self.ny)-3+2 # +2 halos 55 | self.nrz = int(self.nz)-3 56 | # Domain lengths 57 | self.xL = float(self.xL) 58 | self.yL = float(self.yL) 59 | self.zL = float(self.zL) 60 | # Grid spacing 61 | self.dx = self.xL/float(self.nx-3) 62 | self.dy = self.yL/float(self.ny-3) 63 | self.dz = self.zL/float(self.nz-3) 64 | # Linspaces of cell centers, accounting for halos written in y 65 | gridx = np.linspace( self.dx/2., self.xL -self.dx/2., num=self.nrx) 66 | gridy = np.linspace(-self.dy/2., self.yL +self.dy/2., num=self.nry) 67 | gridz = np.linspace( self.dz/2., self.zL -self.dz/2., num=self.nrz) 68 | grid = [gridx,gridy,gridz] 69 | 70 | return grid 71 | 72 | def get_subdomlist(self): 73 | 74 | def get_int(name): 75 | string, integer = name.split('.') 76 | return int(integer) 77 | 78 | subdoms = [] 79 | for filename in os.listdir(self.fdir): 80 | if (filename.find('SubDom') != -1): 81 | subdoms.append(filename) 82 | 83 | if (len(subdoms) == 0): 84 | raise DataNotAvailable 85 | 86 | subdoms = sorted(subdoms,key=get_int) 87 | 88 | # CFD writes a record at the beginning, so remove it 89 | subdoms = subdoms[1:] 90 | return subdoms 91 | 92 | def get_npercell(self): 93 | dprealbytes = 8 # 8 for dp float 94 | ngridpoints = self.nrx * self.nry * self.nrz 95 | filepath = self.fdir + self.subdomlist[0] 96 | filesize = os.path.getsize(filepath) 97 | npercell = filesize / (dprealbytes*ngridpoints) 98 | return npercell 99 | 100 | def read(self,startrec,endrec,binlimits=None,verbose=False,**kwargs): 101 | 102 | nrecs = endrec - startrec + 1 103 | # Efficient memory allocation 104 | subdata = np.empty((self.nrx,self.nry,self.nrz,nrecs,self.npercell)) 105 | 106 | # Loop through files and insert data 107 | for plusrec in range(0,nrecs): 108 | 109 | fpath = self.fdir + self.get_subdomlist().pop(startrec+plusrec) 110 | with open(fpath,'rb') as fobj: 111 | data = np.fromfile(fobj,dtype='d') 112 | # zxy ordered in file 113 | try: 114 | data = np.reshape(data,[self.nrz,self.nrx,self.nry,self.npercell], 115 | order='F') 116 | except ValueError: 117 | print('Data in CFD file seems wrong -- maybe it includes halos? \n' 118 | 'Attempting to correct') 119 | if (data.shape[0] > self.nrz*self.nrx*self.nry*self.npercell): 120 | data = np.reshape(data,[self.nrz+1,self.nrx+1,self.nry,self.npercell], 121 | order='F') 122 | data = data[:-1,:-1,:,:] 123 | else: 124 | data = np.reshape(data,[self.nrz-1,self.nrx-1,self.nry,self.npercell], 125 | order='F') 126 | data = data[:-1,:-1,:,:] 127 | 128 | # change to xyz ordering 129 | data = np.transpose(data,(1,2,0,3)) 130 | # insert into array 131 | subdata[:,:,:,plusrec,:] = data 132 | 133 | # If bin limits are specified, return only those within range 134 | if (binlimits): 135 | 136 | if (verbose): 137 | print(('subdata.shape = {0:s}'.format(str(subdata.shape)))) 138 | print(('Extracting bins {0:s}'.format(str(binlimits)))) 139 | 140 | # Defaults 141 | lower = [0]*3 142 | upper = [i for i in subdata.shape] 143 | 144 | for axis in range(3): 145 | if (binlimits[axis] == None): 146 | continue 147 | else: 148 | lower[axis] = binlimits[axis][0] 149 | upper[axis] = binlimits[axis][1] 150 | 151 | subdata = subdata[lower[0]:upper[0], 152 | lower[1]:upper[1], 153 | lower[2]:upper[2], :, :] 154 | 155 | return subdata 156 | -------------------------------------------------------------------------------- /postproclib/channelflowfields.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import numpy as np 3 | from .field import Field 4 | from .channelflowrawdata import Channelflow_RawData 5 | 6 | # ============================================================================ 7 | # CFDField base class 8 | 9 | class ChannelflowField(Field): 10 | 11 | def __init__(self,fdir): 12 | Raw = Channelflow_RawData(fdir) 13 | Field.__init__(self,Raw) 14 | self.axislabels = ['x','y','z'] 15 | 16 | # ============================================================================ 17 | # CFDField derived classes, but calculated by the main code 18 | class Channelflow_vField(ChannelflowField): 19 | 20 | nperbin = 3 21 | labels = ['u','v','w'] 22 | 23 | def read(self,startrec,endrec,**kwargs): 24 | 25 | v = ChannelflowField.read(self,startrec,endrec,**kwargs) 26 | return v 27 | 28 | # ============================================================================ 29 | # Complex fields that require extra calculations. 30 | 31 | class Channelflow_complexField(ChannelflowField): 32 | 33 | def inherit_parameters(self, subfieldobj): 34 | self.header = subfieldobj.Raw.header 35 | self.nperbin = subfieldobj.nperbin 36 | self.cpol_bins = False 37 | self.plotfreq = subfieldobj.Raw.plotfreq 38 | self.axislabels = subfieldobj.axislabels 39 | self.labels = subfieldobj.labels 40 | 41 | 42 | class Channelflow_strainField(Channelflow_complexField,Channelflow_vField): 43 | 44 | def __init__(self,fdir,rectype='bins'): 45 | self.vField = Channelflow_vField(fdir) 46 | 47 | Field.__init__(self,self.vField.Raw) 48 | self.inherit_parameters(self.vField) 49 | self.labels = ["dudx","dudy","dudz", 50 | "dvdx","dvdy","dvdz", 51 | "dwdx","dwdy","dwdz"] 52 | self.nperbin = 9 53 | 54 | def read(self,startrec,endrec, binlimits=None,**kwargs): 55 | vdata = self.vField.read(startrec, endrec, 56 | binlimits=None) 57 | 58 | grid = self.vField.Raw.grid 59 | x, y, z = grid 60 | dx = np.gradient(x) 61 | dy = np.gradient(y) 62 | dz = np.gradient(z) 63 | dX,dY,dZ = np.meshgrid(dx,dy,dz,indexing='ij') 64 | 65 | straindata = self.grad(vdata,dX,dY,dZ) 66 | 67 | # straindata = np.zeros((vdata.shape[0],vdata.shape[1],vdata.shape[2],vdata.shape[3],9)) 68 | # print(vdata.shape) 69 | # for i in range(1,vdata.shape[0]): 70 | # for j in range(1,vdata.shape[1]): 71 | # for k in range(1,vdata.shape[2]): 72 | # straindata[i,j,k,:,0] = (vdata[i+1,j,k,:,0]-vdata[i-1,j,k,:,0])/(2.*(x[i+1]-x[i-1])) 73 | # straindata[i,j,k,:,1] = (vdata[i+1,j,k,:,1]-vdata[i-1,j,k,:,1])/(2.*(x[i+1]-x[i-1])) 74 | # straindata[i,j,k,:,2] = (vdata[i+1,j,k,:,2]-vdata[i-1,j,k,:,2])/(2.*(x[i+1]-x[i-1])) 75 | 76 | # straindata[i,j,k,:,3] = (vdata[i,j+1,k,:,0]-vdata[i,j-1,k,:,0])/(2.*(y[j+1]-y[j-1])) 77 | # straindata[i,j,k,:,4] = (vdata[i,j+1,k,:,1]-vdata[i,j-1,k,:,1])/(2.*(y[j+1]-y[j-1])) 78 | # straindata[i,j,k,:,5] = (vdata[i,j+1,k,:,2]-vdata[i,j-1,k,:,2])/(2.*(y[j+1]-y[j-1])) 79 | 80 | # straindata[i,j,k,:,6] = (vdata[i,j,k+1,:,0]-vdata[i,j,k-1,:,0])/(2.*(z[k+1]-z[k-1])) 81 | # straindata[i,j,k,:,7] = (vdata[i,j,k+1,:,1]-vdata[i,j,k-1,:,1])/(2.*(z[k+1]-z[k-1])) 82 | # straindata[i,j,k,:,8] = (vdata[i,j,k+1,:,2]-vdata[i,j,k-1,:,2])/(2.*(z[k+1]-z[k-1])) 83 | 84 | # print(i,j,k,straindata[i,j,k,0,:]) 85 | 86 | if (binlimits): 87 | 88 | # Defaults 89 | lower = [0]*3 90 | upper = [i for i in straindata.shape] 91 | 92 | for axis in range(3): 93 | if (binlimits[axis] == None): 94 | continue 95 | else: 96 | lower[axis] = binlimits[axis][0] 97 | upper[axis] = binlimits[axis][1] 98 | 99 | straindata = straindata[lower[0]:upper[0], 100 | lower[1]:upper[1], 101 | lower[2]:upper[2], :, :] 102 | 103 | return straindata 104 | 105 | class Channelflow_uuField(Channelflow_complexField): 106 | 107 | def __init__(self, fdir): 108 | 109 | # Get mean velocity and density field 110 | self.fdir = fdir 111 | self.vField = Channelflow_vField(fdir) 112 | Field.__init__(self,self.vField.Raw) 113 | self.inherit_parameters(self.vField) 114 | self.labels = ['uu','uv','uw', 115 | 'vu','vv','vw', 116 | 'wu','wv','ww'] 117 | self.nperbin = 9 118 | 119 | def read(self,startrec,endrec,**kwargs): 120 | vdata = self.vField.read(startrec,endrec,**kwargs) 121 | 122 | # Find outer product of v*v and reshape to 1x9 rather than 3x3 123 | nrecs = endrec-startrec+1 124 | rhovvdata = np.einsum('abcdj,abcdk->abcdjk',vdata,vdata) 125 | vvshapelist = list(rhovvdata.shape) 126 | newshape = tuple(vvshapelist[0:4]+[self.nperbin]) 127 | rhovvdata = np.reshape(rhovvdata,newshape) 128 | 129 | return rhovvdata 130 | 131 | 132 | class Channelflow_vortField(Channelflow_complexField,Channelflow_vField): 133 | 134 | def __init__(self,fdir,rectype='bins'): 135 | self.vField = Channelflow_vField(fdir) 136 | self.strainField = Channelflow_strainField(fdir) 137 | 138 | Field.__init__(self,self.vField.Raw) 139 | self.inherit_parameters(self.strainField) 140 | self.labels = ["x","y","z"] 141 | self.nperbin = 3 142 | 143 | def read(self,startrec,endrec, binlimits=None,**kwargs): 144 | dudr = self.strainField.read(startrec, endrec, 145 | binlimits=None) 146 | 147 | vortdata = np.empty([dudr.shape[0],dudr.shape[1], 148 | dudr.shape[2],dudr.shape[3],self.nperbin]) 149 | vortdata[:,:,:,:,0] = ( dudr[:,:,:,:,7] 150 | -dudr[:,:,:,:,5]) 151 | vortdata[:,:,:,:,1] = ( dudr[:,:,:,:,2] 152 | -dudr[:,:,:,:,6]) 153 | vortdata[:,:,:,:,2] = ( dudr[:,:,:,:,3] 154 | -dudr[:,:,:,:,1]) 155 | 156 | if (binlimits): 157 | 158 | # Defaults 159 | lower = [0]*3 160 | upper = [i for i in vortdata.shape] 161 | 162 | for axis in range(3): 163 | if (binlimits[axis] == None): 164 | continue 165 | else: 166 | lower[axis] = binlimits[axis][0] 167 | upper[axis] = binlimits[axis][1] 168 | 169 | vortdata = vortdata[lower[0]:upper[0], 170 | lower[1]:upper[1], 171 | lower[2]:upper[2], :, :] 172 | 173 | return vortdata 174 | 175 | 176 | class Channelflow_dissipField(Channelflow_complexField,Channelflow_vField): 177 | 178 | def __init__(self,fdir,rectype='bins'): 179 | self.vField = Channelflow_vField(fdir) 180 | self.strainField = Channelflow_strainField(fdir) 181 | 182 | Field.__init__(self,self.vField.Raw) 183 | self.inherit_parameters(self.strainField) 184 | self.labels = ["mag"] 185 | self.nperbin = 1 186 | 187 | def read(self,startrec,endrec, binlimits=None,**kwargs): 188 | dudr = self.strainField.read(startrec, endrec, 189 | binlimits=None) 190 | 191 | dissipdata = np.empty([dudr.shape[0],dudr.shape[1], 192 | dudr.shape[2],dudr.shape[3],self.nperbin]) 193 | 194 | #From Viswanath 2006 D = \int_V |del u|^2 + |del v|^2 + |del w|^2 dV 195 | # dissipdata[:,:,:,:,0] = ( np.power(dudr[:,:,:,:,0] 196 | # + dudr[:,:,:,:,1] 197 | # + dudr[:,:,:,:,2],2) 198 | # + np.power(dudr[:,:,:,:,3] 199 | # + dudr[:,:,:,:,4] 200 | # + dudr[:,:,:,:,5],2) 201 | # + np.power(dudr[:,:,:,:,6] 202 | # + dudr[:,:,:,:,7] 203 | # + dudr[:,:,:,:,8],2)) 204 | 205 | dissipdata[:,:,:,:,0] = ( np.power(dudr[:,:,:,:,0],2.) + 206 | np.power(dudr[:,:,:,:,4],2.) + 207 | np.power(dudr[:,:,:,:,8],2.) + 208 | np.power(dudr[:,:,:,:,1],2.) + 209 | np.power(dudr[:,:,:,:,2],2.) + 210 | np.power(dudr[:,:,:,:,3],2.) + 211 | np.power(dudr[:,:,:,:,5],2.) + 212 | np.power(dudr[:,:,:,:,6],2.) + 213 | np.power(dudr[:,:,:,:,7],2.) ) 214 | 215 | 216 | #print('dissip data = ',dudr[3,100,3,0,:],dissipdata[3,100,3,0,0]) 217 | 218 | if (binlimits): 219 | 220 | # Defaults 221 | lower = [0]*3 222 | upper = [i for i in dissipdata.shape] 223 | 224 | for axis in range(3): 225 | if (binlimits[axis] == None): 226 | continue 227 | else: 228 | lower[axis] = binlimits[axis][0] 229 | upper[axis] = binlimits[axis][1] 230 | 231 | dissipdata = dissipdata[lower[0]:upper[0], 232 | lower[1]:upper[1], 233 | lower[2]:upper[2], :, :] 234 | 235 | return dissipdata 236 | 237 | -------------------------------------------------------------------------------- /postproclib/channelflowpostproc.py: -------------------------------------------------------------------------------- 1 | import os 2 | import glob 3 | from .channelflowfields import * 4 | from .postproc import PostProc 5 | from .pplexceptions import NoResultsInDir 6 | 7 | class channelflow_PostProc(PostProc): 8 | 9 | """ 10 | Post processing class for channelflow runs 11 | """ 12 | 13 | def __init__(self,resultsdir,**kwargs): 14 | self.resultsdir = resultsdir 15 | self.plotlist = {} 16 | 17 | # Check directory exists before instantiating object and check 18 | # which files associated with plots are in directory 19 | if (not os.path.isdir(self.resultsdir)): 20 | print(("Directory " + self.resultsdir + " not found")) 21 | raise IOError 22 | 23 | possibles = {'channelflow Velocity': Channelflow_vField, 24 | 'Channelflow strain': Channelflow_strainField, 25 | 'Channelflow uu': Channelflow_uuField, 26 | 'Channelflow vorticity': Channelflow_vortField, 27 | 'Channelflow Dissipation': Channelflow_dissipField} 28 | 29 | if (not glob.glob(self.resultsdir+'*.h5')): 30 | raise NoResultsInDir 31 | 32 | self.plotlist = {} 33 | for key, field in list(possibles.items()): 34 | try: 35 | self.plotlist[key] = field(self.resultsdir) 36 | except AssertionError: 37 | pass 38 | 39 | if (len(self.plotlist) == 0): 40 | raise NoResultsInDir 41 | -------------------------------------------------------------------------------- /postproclib/cplpostproc.py: -------------------------------------------------------------------------------- 1 | import os 2 | from .cplfields import * 3 | from .postproc import PostProc 4 | from .pplexceptions import NoResultsInDir, DataNotAvailable 5 | from .mdpostproc import MD_PostProc 6 | from .cfdpostproc import CFD_PostProc 7 | from .serial_cfdpostproc import Serial_CFD_PostProc 8 | from .openfoampostproc import OpenFOAM_PostProc 9 | 10 | # Results directory paths for each code 11 | resultsdirs = { 12 | 'flowmol': 'flowmol/results', 13 | 'lammps': 'lammps/', 14 | 'serialcouette': 'couette_serial/results/', 15 | 'openfoam': 'openfoam/', 16 | 'transflow': 'couette_data/' 17 | } 18 | 19 | 20 | 21 | # Field classes that are associated with velocity for each code 22 | vfieldtypes = { 23 | 'flowmol': 24 | mdfields.MD_vField, 25 | 'lammps': 26 | lammpsfields.LAMMPS_vField, 27 | 'serialcouette': 28 | serial_cfdfields.Serial_CFD_vField, 29 | 'openfoam': 30 | openfoamfields.OpenFOAM_vField, 31 | 'transflow': 32 | cfdfields.CFD_vField 33 | } 34 | 35 | # Field classes that are associated with momentum for each code 36 | momfieldtypes = { 37 | 'flowmol': 38 | mdfields.MD_momField, 39 | 'serialcouette': 40 | serial_cfdfields.Serial_CFD_momField, 41 | 'lammps': 42 | lammpsfields.LAMMPS_momField, 43 | 'openfoam': 44 | openfoamfields.OpenFOAM_momField, 45 | 'transflow': 46 | None 47 | } 48 | 49 | # Field classes that are associated with stress for each code 50 | stressfieldtypes = { 51 | 'flowmol': 52 | mdfields.MD_stressField, 53 | 'lammps': 54 | None, 55 | 'serialcouette': 56 | serial_cfdfields.Serial_CFD_StressField, 57 | 'openfoam': 58 | openfoamfields.OpenFOAM_mugradvField, 59 | 'transflow': 60 | cfdfields.CFD_mugradvField 61 | } 62 | 63 | 64 | # CPL Field classes that could potentially be constructed 65 | possible_fields = { 66 | 'CPL Velocity': CPL_vField, 67 | 'CPL Momentum': CPL_momField, 68 | 'CPL Stress': CPL_stressField 69 | } 70 | # And their associated field class dictionary 71 | type_dicts = { 72 | 'CPL Velocity': vfieldtypes, 73 | 'CPL Momentum': momfieldtypes, 74 | 'CPL Stress': stressfieldtypes 75 | } 76 | 77 | 78 | # All possible pairings (Surely this should be done with itertools permute?) 79 | possible_pairs = [ 80 | {'MD':'flowmol', 'CFD':'serialcouette'}, 81 | {'MD':'flowmol', 'CFD':'openfoam'}, 82 | {'MD':'flowmol', 'CFD':'transflow'}, 83 | {'MD':'lammps', 'CFD':'openfoam'} 84 | ] 85 | 86 | class CPL_PostProc(PostProc): 87 | 88 | """ 89 | Post processing class for Coupled runs 90 | """ 91 | 92 | def __init__(self,resultsdir,**kwargs): 93 | self.resultsdir = resultsdir 94 | # Check directory exists before instantiating object and check 95 | # which files associated with plots are in directory 96 | if (not os.path.isdir(self.resultsdir)): 97 | print(("Directory " + self.resultsdir + " not found")) 98 | raise IOError 99 | 100 | self.plotlist = {} 101 | try: 102 | fobj = open(self.resultsdir + 'cpl/coupler_header','r') 103 | except IOError: 104 | raise NoResultsInDir 105 | 106 | for pair in possible_pairs: 107 | 108 | MDkey = pair['MD'] 109 | CFDkey = pair['CFD'] 110 | 111 | for CPLkey, CPLfieldtype in list(possible_fields.items()): 112 | 113 | print(('Attempting to construct ' + str(CPLfieldtype) 114 | + ' for ' + MDkey + ' and ' + CFDkey)) 115 | 116 | try: 117 | self.plotlist[CPLkey] = CPLfieldtype(self.resultsdir, 118 | MDFieldType=type_dicts[CPLkey][MDkey], 119 | CFDFieldType=type_dicts[CPLkey][CFDkey], 120 | mddir=resultsdirs[MDkey], 121 | cfddir=resultsdirs[CFDkey]) 122 | 123 | except AssertionError as e: 124 | print(e) 125 | pass 126 | except DataNotAvailable as e: 127 | print(e) 128 | pass 129 | except IOError as e: 130 | print(e) 131 | pass 132 | except TypeError as e: 133 | print(e) 134 | pass 135 | -------------------------------------------------------------------------------- /postproclib/exceptions.py: -------------------------------------------------------------------------------- 1 | 2 | class DataNotAvailable(Exception): 3 | pass 4 | 5 | 6 | class NoResultsInDir(Exception): 7 | pass 8 | -------------------------------------------------------------------------------- /postproclib/headerdata.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import glob 3 | import os 4 | import re 5 | 6 | """ 7 | Automatically read and store variables in a header file formatted 8 | as follows: 9 | 10 | description ; variable_name{(array_element)} ; variable_value 11 | 12 | where the delimiter is a semicolon and elements of multi-dimensional 13 | Fortran arrays are written with curved brackets (). The array is then 14 | stored as individual variable with the same name and the array index 15 | as a suffix (e.g. domain(1:3) is stored as domain1, domain2 and domain3). 16 | 17 | """ 18 | class HeaderData: 19 | 20 | def __init__(self,fobj): 21 | for line in fobj: 22 | varname=line.split(';')[1].strip().replace('(','').replace(')','') 23 | varval =line.split(';')[2].strip() 24 | vars(self)[varname] = varval 25 | 26 | class MDHeaderData(HeaderData): 27 | 28 | def __init__(self, fdir): 29 | if (fdir[-1] != '/'): fdir += '/' 30 | fobj = open(fdir+'simulation_header','r') 31 | HeaderData.__init__(self,fobj) 32 | 33 | class Serial_CFD_HeaderData(HeaderData): 34 | 35 | def __init__(self, fdir): 36 | if (fdir[-1] != '/'): fdir += '/' 37 | fobj = open(fdir+'continuum_header','r') 38 | HeaderData.__init__(self,fobj) 39 | 40 | class FEA_HeaderData(HeaderData): 41 | 42 | def __init__(self, fdir): 43 | if (fdir[-1] != '/'): fdir += '/' 44 | fobj = open(fdir+'continuum_header','r') 45 | HeaderData.__init__(self,fobj) 46 | 47 | class openfoam_HeaderData: 48 | 49 | def __init__(self, fdir, readfields=False): 50 | 51 | if (fdir[-1] != '/'): fdir += '/' 52 | self.fdir = fdir 53 | headerfiles = self.get_header_files() 54 | 55 | if readfields: 56 | fieldfiles = self.get_field_files() 57 | readfiles = headerfiles + fieldfiles 58 | else: 59 | readfiles = headerfiles 60 | 61 | headerDict = {} 62 | for filename in readfiles: 63 | if os.path.isfile(filename): 64 | with open(filename) as f: 65 | lines = self.lines_generator_strip(f) 66 | header = self.header_parser(lines) 67 | headerDict[filename.split("/")[-1]] = header 68 | self.headerDict = headerDict 69 | 70 | def get_header_files(self): 71 | #["blockMeshDict", "transportProperties", "controlDict", 72 | # "environmentalProperties", "decomposeParDict"] 73 | paths = [self.fdir + f for f in ['constant', 'system']] 74 | filenames = [] 75 | for path in paths: 76 | files = glob.glob(path + "/*") 77 | for filename in files: 78 | filenames.append(filename) 79 | 80 | filenames.append(self.fdir + "constant/polyMesh/blockMeshDict") 81 | 82 | return filenames 83 | 84 | def get_field_files(self): 85 | 86 | path = self.fdir + "0/" 87 | files = glob.glob(path + "/*") 88 | filenames = [] 89 | for filename in files: 90 | try: 91 | with open(filename) as f: 92 | for line in f: 93 | if "class" in line: 94 | fname = filename.split("/")[-1] 95 | if "volScalarField" in line: 96 | filenames.append(filename) 97 | elif "volVectorField": 98 | filenames.append(filename) 99 | elif "volSymmTensorField": 100 | filenames.append(filename) 101 | else: 102 | continue 103 | except IOError: 104 | pass 105 | 106 | return filenames 107 | 108 | def lines_generator(self, lines): 109 | for line in lines: 110 | if not line: 111 | continue 112 | yield line 113 | 114 | def lines_generator_strip(self, lines): 115 | for line in lines: 116 | line = line.strip() 117 | if not line: 118 | continue 119 | yield line 120 | 121 | def stringtolist(self, s): 122 | v = s.replace("(","").replace(")","").split() 123 | r = [] 124 | for i in v: 125 | try: 126 | r.append(int(i)) 127 | except ValueError: 128 | try: 129 | r.append(float(i)) 130 | except ValueError: 131 | r.append(i) 132 | return r 133 | 134 | def header_parser(self, lines): 135 | 136 | """ 137 | Recursive header parser which 138 | builds up a dictonary of input files 139 | """ 140 | ft = True 141 | Out = {} 142 | prevline = "" 143 | for line in lines: 144 | 145 | #Skip comments 146 | if line[0:2] == "/*": 147 | break_next = False 148 | for line in lines: 149 | if break_next: 150 | break 151 | if '\*' in line: 152 | break_next=True 153 | if "//" in line: 154 | continue 155 | 156 | #Split line into list 157 | split = line.split() 158 | 159 | #One elemnt means we will go down to another level of nesting 160 | if len(split) == 1: 161 | if (line == '{' or line == '('): 162 | try: 163 | Out[prevline] = self.header_parser(lines) 164 | except TypeError: 165 | continue 166 | elif line == ');': 167 | return Out 168 | elif line == '}': 169 | return Out 170 | else: 171 | #This skip here avoids field contents 172 | try: 173 | float(line) 174 | return Out 175 | except ValueError: 176 | Out[line] = None 177 | 178 | #If ends with a semi-colon then we define a value 179 | elif len(split) == 2: 180 | if line[-1] == ";": 181 | key, value = split 182 | Out[key] = value.strip(';') 183 | else: 184 | print(("Error, two values not a statement", line)) 185 | #Otherwise we have to parse as needed 186 | elif len(split) > 2: 187 | key = split[0] 188 | if ("[" in line): 189 | indx = line.find("]") 190 | afterunits = line[indx+1:].replace(";","") 191 | Out[key] = self.stringtolist(afterunits) 192 | elif ("(" in key): 193 | if ft: 194 | ft = False 195 | Out = [] 196 | Out.append(self.stringtolist(line)) 197 | else: 198 | #As we have a key, we assume multiple brackets on line 199 | indx = line.find(key) 200 | remainingline = line[indx+len(key):] 201 | rsplit = re.findall("\((.*?)\)", remainingline) 202 | #rsplit = remainingline.replace("(",")").split(")")[:-1] 203 | vals = [] 204 | for s in rsplit: 205 | vals.append(self.stringtolist(s)) 206 | Out[key] = vals 207 | 208 | if line[-1] == ");": 209 | return Out 210 | 211 | if line[-1] == "}": 212 | return Out 213 | 214 | prevline = line 215 | 216 | return Out 217 | 218 | 219 | #if __name__ == "__main__": 220 | # fdir = "/home/es205/codes/cpl_granlammmps/OpenFOAM-3.0.1_LAMMPS-dev/OpenFOAM-3.0.1_coupled/runs/Couette_Gran/openfoam" 221 | # filename = fdir+"/constant/polyMesh/blockMeshDict" 222 | # #filename = fdir+"/system/decomposeParDict" 223 | 224 | # with open(filename) as fobj: 225 | # lines = lines_generator(fobj) 226 | # header = header_parser(lines) 227 | -------------------------------------------------------------------------------- /postproclib/lammpsfields.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from .field import Field 4 | from .lammpsrawdata import LAMMPS_RawData 5 | 6 | class LAMMPSField(Field): 7 | 8 | def __init__(self, fdir, fname='3dgrid'): 9 | self.fname = fname 10 | Raw = LAMMPS_RawData(fdir, self.fname, self.readnames) 11 | self.nperbin = Raw.nperbin 12 | Field.__init__(self, Raw) 13 | self.axislabels = ['x', 'y', 'z'] 14 | self.plotfreq = Raw.plotfreq 15 | 16 | class LAMMPS_complexField(LAMMPSField): 17 | 18 | """ 19 | Complex fields that inherit LAMMPSField AND contain LAMMPSField 20 | objects require extra calculations. "Read" and "average_data" routines 21 | are commonly overridden. Parameters for the complex field are usually 22 | inherited from one of the sub-fields. 23 | """ 24 | 25 | def inherit_parameters(self, subfieldobj): 26 | self.nperbin = subfieldobj.nperbin 27 | self.axislabels = subfieldobj.axislabels 28 | self.labels = subfieldobj.labels 29 | self.plotfreq = subfieldobj.plotfreq 30 | 31 | 32 | # ---------------------------------------------------------------------------- 33 | # Simple fields 34 | 35 | class LAMMPS_pField(LAMMPSField): 36 | readnames = ['vx', 'vy', 'vz'] 37 | labels = readnames 38 | 39 | 40 | class LAMMPS_mField(LAMMPSField): 41 | readnames = ['Ncount'] 42 | labels = readnames 43 | 44 | class LAMMPS_mSurfField(LAMMPSField): 45 | readnames = ['c_mSurf'] 46 | labels = readnames 47 | 48 | class LAMMPS_mWaterField(LAMMPSField): 49 | readnames = ['c_mWater'] 50 | labels = readnames 51 | 52 | class LAMMPS_TField(LAMMPSField): 53 | readnames = ['temp'] 54 | labels = readnames 55 | 56 | class LAMMPS_PressureField(LAMMPSField): 57 | readnames = ['c_Pressure[1]', 'c_Pressure[2]', 'c_Pressure[3]'] 58 | labels = readnames 59 | 60 | class LAMMPS_ShearStressField(LAMMPSField): 61 | readnames = ['c_Pressure[4]', 'c_Pressure[5]', 'c_Pressure[6]'] 62 | labels = readnames 63 | 64 | class LAMMPS_KineticEnergyField(LAMMPSField): 65 | readnames = ['c_myKE'] 66 | labels = readnames 67 | 68 | class LAMMPS_PotentialEnergyField(LAMMPSField): 69 | readnames = ['c_eng'] 70 | labels = readnames 71 | 72 | 73 | # ---------------------------------------------------------------------------- 74 | # Complex fields 75 | class LAMMPS_dField(LAMMPS_complexField): 76 | 77 | def __init__(self, fdir, fname='3dgrid'): 78 | self.nField = LAMMPS_mField(fdir, fname) 79 | Field.__init__(self, self.nField.Raw) 80 | self.inherit_parameters(self.nField) 81 | 82 | def read(self, startrec, endrec, binlimits=None, **kwargs): 83 | 84 | gridvolumes = self.nField.Raw.get_gridvolumes(binlimits=binlimits) 85 | gridvolumes = np.expand_dims(gridvolumes,axis=-1) 86 | 87 | # Read 4D time series from startrec to endrec 88 | ndata = self.nField.read(startrec, endrec, binlimits=binlimits) 89 | density = np.divide(ndata, gridvolumes) 90 | 91 | return density 92 | 93 | def averaged_data(self, startrec, endrec, avgaxes=(), binlimits=None, **kwargs): 94 | 95 | nrecs = endrec - startrec + 1 96 | gridvolumes = self.nField.Raw.get_gridvolumes(binlimits=binlimits) 97 | gridvolumes = np.expand_dims(gridvolumes,axis=-1) 98 | 99 | # Read 4D time series from startrec to endrec 100 | ndata = self.nField.read(startrec, endrec, binlimits=binlimits) 101 | #mdata = np.divide(mdata,float(self.plotfreq)) 102 | 103 | if (avgaxes != ()): 104 | ndata = np.sum(ndata,axis=avgaxes) 105 | # gridvolumes should only be length=1 in time & component axis 106 | gridvolumes = np.sum(gridvolumes,axis=avgaxes) 107 | 108 | density = np.divide(ndata,gridvolumes*nrecs) 109 | 110 | return density 111 | 112 | #Velocity field 113 | class LAMMPS_vField(LAMMPS_complexField): 114 | 115 | def __init__(self, fdir, fname='3dgrid'): 116 | self.mField = LAMMPS_mField(fdir, fname) 117 | self.pField = LAMMPS_pField(fdir, fname) 118 | Field.__init__(self, self.pField.Raw) 119 | self.inherit_parameters(self.pField) 120 | 121 | def read(self, startrec, endrec, binlimits=None, **kwargs): 122 | 123 | mdata = self.mField.read(startrec, endrec, binlimits=binlimits, **kwargs) 124 | pdata = self.pField.read(startrec, endrec, binlimits=binlimits, **kwargs) 125 | 126 | # Divide and patch any NaNs 127 | vdata = np.divide(pdata, mdata) 128 | vdata[np.isnan(vdata)] = 0.0 129 | 130 | return vdata 131 | 132 | def averaged_data(self, startrec, endrec, avgaxes=(), binlimits=None, **kwargs): 133 | 134 | mdata = self.mField.read(startrec, endrec, binlimits=binlimits, **kwargs) 135 | pdata = self.pField.read(startrec, endrec, binlimits=binlimits, **kwargs) 136 | 137 | if (avgaxes != ()): 138 | mdata = np.sum(mdata, axis=avgaxes) 139 | pdata = np.sum(pdata, axis=avgaxes) 140 | 141 | # Divide and patch any NaNs 142 | vdata = np.divide(pdata, mdata) 143 | vdata[np.isnan(vdata)] = 0.0 144 | 145 | return vdata 146 | 147 | 148 | 149 | # Momentum density field 150 | class LAMMPS_momField(LAMMPS_complexField): 151 | 152 | def __init__(self, fdir, fname='3dgrid'): 153 | self.pField = LAMMPS_pField(fdir, fname) 154 | Field.__init__(self,self.pField.Raw) 155 | self.inherit_parameters(self.pField) 156 | 157 | def read(self, startrec, endrec, binlimits=None, **kwargs): 158 | 159 | gridvolumes = self.pField.Raw.get_gridvolumes(binlimits=binlimits) 160 | gridvolumes = np.expand_dims(gridvolumes,axis=-1) 161 | 162 | # Read 4D time series from startrec to endrec 163 | pdata = self.pField.read(startrec, endrec, binlimits=binlimits, **kwargs) 164 | #pdata = np.divide(pdata,float(self.plotfreq)) 165 | 166 | momdensity = np.divide(pdata,gridvolumes) 167 | 168 | return momdensity 169 | 170 | def averaged_data(self, startrec, endrec, binlimits=None, avgaxes=(), **kwargs): 171 | 172 | nrecs = endrec - startrec + 1 173 | gridvolumes = self.pField.Raw.get_gridvolumes(binlimits=binlimits) 174 | gridvolumes = np.expand_dims(gridvolumes, axis=-1) 175 | 176 | # Read 4D time series from startrec to endrec 177 | pdata = self.pField.read(startrec, endrec, binlimits=binlimits, **kwargs) 178 | #pdata = np.divide(pdata,float(self.plotfreq)) 179 | 180 | if (avgaxes != ()): 181 | pdata = np.sum(pdata, axis=avgaxes) 182 | # gridvolumes should only be length=1 in time & component axis 183 | gridvolumes = np.sum(gridvolumes, axis=avgaxes) 184 | 185 | momdensity = np.divide(pdata, gridvolumes*nrecs) 186 | 187 | return momdensity 188 | 189 | 190 | 191 | # Total Energy field 192 | class LAMMPS_TotalEnergyField(LAMMPS_complexField): 193 | 194 | def __init__(self, fdir, fname='3dgrid'): 195 | self.KEField = LAMMPS_KineticEnergyField(fdir, fname) 196 | self.PEField = LAMMPS_PotentialEnergyField(fdir, fname) 197 | Field.__init__(self, self.KEField.Raw) 198 | self.inherit_parameters(self.KEField) 199 | 200 | def read(self, startrec, endrec, binlimits=None, **kwargs): 201 | 202 | KEdata = self.KEField.read(startrec, endrec, binlimits=binlimits, **kwargs) 203 | PEdata = self.PEField.read(startrec, endrec, binlimits=binlimits, **kwargs) 204 | Tdata = KEdata + PEdata 205 | 206 | return Tdata 207 | -------------------------------------------------------------------------------- /postproclib/lammpspostproc.py: -------------------------------------------------------------------------------- 1 | import os 2 | from .lammpsfields import * 3 | from .postproc import PostProc 4 | from .pplexceptions import NoResultsInDir 5 | 6 | class LAMMPS_PostProc(PostProc): 7 | 8 | """ 9 | Post processing class for LAMMPS runs 10 | """ 11 | 12 | def __init__(self,resultsdir, **kwargs): 13 | self.resultsdir = resultsdir 14 | 15 | # Check directory exists before trying to instantiate object 16 | if (not os.path.isdir(self.resultsdir)): 17 | print(("Directory " + self.resultsdir + " not found")) 18 | raise IOError 19 | 20 | possibles = {'vsum': LAMMPS_pField, 21 | 'nsum': LAMMPS_mField, 22 | 'mSurf': LAMMPS_mSurfField, 23 | 'mWater': LAMMPS_mWaterField, 24 | 'Density': LAMMPS_dField, 25 | 'Velocity': LAMMPS_vField, 26 | 'Momentum': LAMMPS_momField, 27 | 'Temperature': LAMMPS_TField, 28 | 'Pressure': LAMMPS_PressureField, 29 | 'Shear Stess': LAMMPS_ShearStressField, 30 | 'Kinetic Energy': LAMMPS_KineticEnergyField, 31 | 'Potential Energy': LAMMPS_PotentialEnergyField, 32 | 'Total Energy': LAMMPS_TotalEnergyField 33 | } 34 | 35 | 36 | #Try to get fnames from log.lammps 37 | fname = "" 38 | logfile = self.resultsdir + "/log.lammps" 39 | if (os.path.isfile(logfile)): 40 | with open(logfile, "r") as f: 41 | n = "3dgrid" 42 | for l in f: 43 | if ("chunk/atom bin/3d") in l: 44 | if ("cfdbccompute" in l): 45 | continue 46 | nl = next(f) 47 | if "ave/chunk" in nl: 48 | #Take index not include word file 49 | indx = nl.find("file")+4 50 | if indx != -1: 51 | fname = nl[indx:].split("/")[-1].split()[0] 52 | else: 53 | print(("logfile ", logfile, " appears to be corrupted " + 54 | "so cannot determine output filename")) 55 | # n=l.split()[1] 56 | # if n in l and "ave/chunk" in l: 57 | # indx = l.find("file") 58 | # if indx != -1: 59 | # fname = l[indx:].split()[1] 60 | # else: 61 | # print(("logfile ", logfile, " appears to be corrupted " + 62 | # "so cannot determine output filename")) 63 | else: 64 | pass 65 | #print("logfile ", logfile, " not found") 66 | #raise NoResultsInDir 67 | 68 | if fname == "": 69 | print("fname not defined, trying 3dgrid") 70 | fname = "3dgrid" 71 | 72 | self.plotlist = {} 73 | if os.path.exists(self.resultsdir + fname): 74 | for key, field in list(possibles.items()): 75 | try: 76 | self.plotlist[key] = field(self.resultsdir, fname) 77 | except IOError: 78 | pass 79 | except ValueError: 80 | pass 81 | 82 | if (len(self.plotlist) == 0): 83 | raise NoResultsInDir 84 | -------------------------------------------------------------------------------- /postproclib/lammpsrawdata.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from .rawdata import RawData 4 | from .pplexceptions import DataNotAvailable 5 | 6 | class LAMMPS_RawData(RawData): 7 | 8 | def __init__(self, fdir, fname, readnames): 9 | 10 | if (fdir[-1] != '/'): fdir += '/' 11 | self.fdir = fdir 12 | self.fname = fname 13 | self.fobj = open(fdir + fname, 'rb') 14 | self.recoffsets = self.get_recoffsets() 15 | self.maxrec = len(self.recoffsets) - 1 16 | self.plotfreq = self.get_plotfreq() 17 | self.grid = self.get_grid() 18 | self.nbins = [len(self.grid[i]) for i in range(len(self.grid))] 19 | self.readindices = self.get_readindices(readnames) 20 | self.nperbin = len(self.readindices) 21 | 22 | def get_recoffsets(self): 23 | 24 | offsets = [] 25 | while True: 26 | 27 | tell = self.fobj.tell() 28 | line = self.fobj.readline() 29 | 30 | if (len(line.split()) == 3): 31 | offsets.append(tell) 32 | 33 | if (line == '' or line == b''): 34 | break 35 | 36 | return offsets 37 | 38 | def get_plotfreq(self): 39 | 40 | self.fobj.seek(self.recoffsets[0]) 41 | it, ngpoints, nsamples = self.fobj.readline().split() 42 | 43 | return int(it) 44 | 45 | def get_grid(self): 46 | 47 | def uniqueify(seq): 48 | """ 49 | "Remove repeated values, e.g. [1,2,2,3,4,3] => [1,2,3,4] 50 | """ 51 | seen = set() 52 | seen_add = seen.add 53 | return [ x for x in seq if not (x in seen or seen_add(x))] 54 | 55 | self.fobj.seek(self.recoffsets[0]) 56 | it, ngpoints, nsamples = self.fobj.readline().split() 57 | xlist = [] 58 | ylist = [] 59 | zlist = [] 60 | for point in range(int(ngpoints)): 61 | lineitems = self.fobj.readline().split() 62 | xlist.append(float(lineitems[1])) 63 | ylist.append(float(lineitems[2])) 64 | zlist.append(float(lineitems[3])) 65 | 66 | gridx = np.array(uniqueify(xlist)) 67 | gridy = np.array(uniqueify(ylist)) 68 | gridz = np.array(uniqueify(zlist)) 69 | 70 | self.domain = [gridx.max(), gridy.max(), gridz.max()] 71 | 72 | return [gridx, gridy, gridz] 73 | 74 | def get_readindices(self, readnames): 75 | 76 | self.fobj.seek(0) 77 | self.fobj.readline() 78 | self.fobj.readline() 79 | line = self.fobj.readline() 80 | if (b"Chunk Coord1 Coord2 Coord3" in line): 81 | readindices = [] 82 | linesplit = line.decode("ascii").split()[1:] # Ignore # character at beginning 83 | for name in readnames: 84 | readindices.append(linesplit.index(name)) 85 | else: 86 | print("Couldn't find Chunk coordinate info in "+self.fname) 87 | raise DataNotAvailable 88 | 89 | return readindices 90 | 91 | def read(self, startrec, endrec, binlimits=None, verbose=False, 92 | missingrec='raise'): 93 | 94 | # Store how many records are to be read 95 | nrecs = endrec - startrec + 1 96 | # Allocate enough memory in the C library to efficiently insert 97 | # into bindata 98 | recitems = np.product(self.nbins)*self.nperbin 99 | bindata = np.empty(nrecs*recitems) 100 | 101 | if (verbose): 102 | print(('Reading {0:s} recs {1:5d} to {2:5d}'.format( 103 | self.fname,startrec,endrec))) 104 | 105 | cnt = 0 106 | for plusrec in range(0, nrecs): 107 | 108 | # Go to the record, read how many lines 109 | self.fobj.seek(self.recoffsets[startrec + plusrec]) 110 | recdetails = self.fobj.readline().split() 111 | reclines = int(recdetails[1]) 112 | 113 | pos = plusrec*recitems 114 | # Loop over record's lines 115 | for plusline in range(reclines): 116 | lineitems = self.fobj.readline().split() 117 | for index in self.readindices: 118 | bindata[cnt] = float(lineitems[index]) 119 | cnt += 1 120 | 121 | bindata = np.reshape(bindata,[nrecs, 122 | self.nbins[0], 123 | self.nbins[1], 124 | self.nbins[2], 125 | self.nperbin]) 126 | bindata = np.transpose(bindata, (1,2,3,0,4)) 127 | 128 | # If bin limits are specified, return only those within range 129 | if (binlimits): 130 | 131 | if (verbose): 132 | print(('bindata.shape = {0:s}'.format(str(bindata.shape)))) 133 | print(('Extracting bins {0:s} from {1:s} '.format( 134 | str(binlimits),self.fname))) 135 | # Defaults 136 | lower = [0]*3 137 | upper = [i for i in bindata.shape] 138 | 139 | for axis in range(3): 140 | if (binlimits[axis] == None): 141 | continue 142 | else: 143 | lower[axis] = binlimits[axis][0] 144 | upper[axis] = binlimits[axis][1] 145 | 146 | bindata = bindata[lower[0]:upper[0], 147 | lower[1]:upper[1], 148 | lower[2]:upper[2], :, :] 149 | 150 | 151 | if (verbose): 152 | print(('new bindata.shape = {0:s}'.format(str(bindata.shape)))) 153 | 154 | 155 | return bindata 156 | 157 | def get_gridvolumes(self,binlimits=None): 158 | 159 | try: 160 | binspaces = self.grid 161 | except AttributeError: 162 | nbins, binspaces, dxyz = self.get_gridtopology() 163 | 164 | x, y, z = np.meshgrid(binspaces[0],binspaces[1],binspaces[2], 165 | indexing='ij') 166 | 167 | dx = binspaces[0][1] - binspaces[0][0] 168 | dy = binspaces[1][1] - binspaces[1][0] 169 | dz = binspaces[2][1] - binspaces[2][0] 170 | 171 | gridvolumes = np.ones(x.shape)*dx*dy*dz 172 | 173 | # If bin limits are specified, return only those within range 174 | if (binlimits): 175 | 176 | # Defaults 177 | lower = [0]*3 178 | upper = [i for i in gridvolumes.shape] 179 | 180 | for axis in range(3): 181 | if (binlimits[axis] == None): 182 | continue 183 | else: 184 | lower[axis] = binlimits[axis][0] 185 | upper[axis] = binlimits[axis][1] 186 | 187 | gridvolumes = gridvolumes[lower[0]:upper[0], 188 | lower[1]:upper[1], 189 | lower[2]:upper[2]] 190 | 191 | # Ensure gridvolumes is the right shape for subsequent 192 | # broadcasting with other fields 193 | gridvolumes = np.expand_dims(gridvolumes,-1) 194 | return gridvolumes 195 | -------------------------------------------------------------------------------- /postproclib/mdmacroprops.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import numpy as np 3 | import scipy as sp 4 | 5 | class MacroProps: 6 | 7 | def __init__(self,fdir): 8 | self.macro = np.genfromtxt(fdir+'/macroscopic_properties', 9 | delimiter=';', names=True) 10 | def get(self,string): 11 | return self.macro[string] 12 | 13 | def get_mean(self,string,start=1000): 14 | return np.mean(self.macro[string][start:]) 15 | 16 | def get_std(self,string,start=1000): 17 | return np.std(self.macro[string][start:]) 18 | 19 | def get_stderror(self,string,start=1000): 20 | s = np.std(self.macro[string][start:]) 21 | n = self.macro[string][start:].shape[0] 22 | return np.divide(s,np.sqrt(n)) 23 | 24 | if __name__ == "__main__": 25 | obj = MacroProps('./') 26 | prop = 'Pressure' 27 | print(('For ' + prop + ' mean = ' + str(obj.get_mean(prop)) + ' with standard deviation = ' 28 | + str(obj.get_std(prop)) + ' and standard error = ' + str(obj.get_stderror(prop)) )) 29 | -------------------------------------------------------------------------------- /postproclib/openfoampostproc.py: -------------------------------------------------------------------------------- 1 | import os 2 | from .openfoamfields import * 3 | from .postproc import PostProc 4 | from .pplexceptions import NoResultsInDir 5 | import glob 6 | 7 | def walklevel(some_dir, level=1): 8 | some_dir = some_dir.rstrip(os.path.sep) 9 | assert os.path.isdir(some_dir) 10 | num_sep = some_dir.count(os.path.sep) 11 | for root, dirs, files in os.walk(some_dir): 12 | yield root, dirs, files 13 | num_sep_this = root.count(os.path.sep) 14 | if num_sep + level <= num_sep_this: 15 | del dirs[:] 16 | 17 | class OpenFOAM_PostProc(PostProc): 18 | 19 | """ 20 | Post processing class for CFD runs 21 | """ 22 | 23 | def __init__(self, resultsdir, **kwargs): 24 | self.resultsdir = resultsdir 25 | self.plotlist = {} 26 | 27 | # Check directory exists before instantiating object and check 28 | # which files associated with plots are in directory 29 | if (not os.path.isdir(self.resultsdir)): 30 | print(("Directory " + self.resultsdir + " not found")) 31 | raise IOError 32 | 33 | # # Raise if no results in directory 34 | # try: 35 | # fobj = open(self.resultsdir + '0/U','r') 36 | # except IOError: 37 | # raise NoResultsInDir 38 | 39 | # possibles = {'U': OpenFOAM_vField, 40 | # 'P': OpenFOAM_PField, 41 | # 'eps': OpenFOAM_epsField, 42 | # 'F': OpenFOAM_FField} 43 | 44 | # for key, field in possibles.items(): 45 | # try: 46 | # self.plotlist[key] = field(self.resultsdir) 47 | # except AssertionError: 48 | # pass 49 | 50 | #We need to take the first record as lots of fields are not 51 | #defined in the initial condition.. 52 | parallel_run = False 53 | controlDictfound = False 54 | #possibles = [] 55 | writecontrol ='' 56 | #Use walklevel to prevent finding other openfoam results 57 | #which might be in the same directory 58 | for root, dirs, files in walklevel(self.resultsdir, 2): 59 | if ("controlDict" in files): 60 | controlDictfound = True 61 | with open(root+"/controlDict") as f: 62 | for line in f: 63 | try: 64 | if "writeControl" in line: 65 | writecontrol = (line.replace("\t"," ") 66 | .replace(";","") 67 | .replace("\n","") 68 | .split(" ")[-1]) 69 | if "writeInterval" in line: 70 | writeInterval = float(line.replace("\t"," ") 71 | .replace(";","") 72 | .replace("\n","") 73 | .split(" ")[-1]) 74 | 75 | if "deltaT" in line: 76 | deltaT = float(line.replace("\t"," ") 77 | .replace(";","") 78 | .replace("\n","") 79 | .split(" ")[-1]) 80 | except ValueError: 81 | print(("Convert failed in OpenFOAM_reader", line)) 82 | 83 | if "processor" in root and not parallel_run: 84 | #Check if at least two processor folders 85 | if (os.path.isdir(root+'/../processor0') and os.path.isdir(root+'/../processor1')): 86 | print("Assuming parallel run as processor0, processor1, etc found in " 87 | + self.resultsdir + ".\n") 88 | parallel_run = True 89 | else: 90 | # Check number of folders in processor0 folder as on older versions of OpenFOAM 91 | # this is filled in a one processor parallel run but not on later ones 92 | rmlist = ["constant", "system", "processor0"] 93 | try: 94 | rc = next(os.walk(root+'/../'))[1] 95 | pc = next(os.walk(root))[1] 96 | rootcontents = [i for i in rc if i not in rmlist] 97 | proccontents = [i for i in pc if i not in rmlist] 98 | if len(proccontents) > len(rootcontents): 99 | parallel_run = True 100 | print("Assuming parallel run as processor folder found in " 101 | + self.resultsdir + " with " + str(len(proccontents)) + " in.\n") 102 | except StopIteration: 103 | pass 104 | 105 | #Check if data files exist 106 | if not controlDictfound: 107 | raise NoResultsInDir 108 | 109 | if "timeStep" in writecontrol: 110 | writeInterval = writeInterval*deltaT 111 | elif "runTime" in writecontrol: 112 | writeInterval = writeInterval 113 | elif "adjustable" in writecontrol: 114 | writeInterval = writeInterval 115 | else: 116 | raise IOError("Writecontrol keyword not found in controlDict") 117 | 118 | 119 | print(("parallel_run = ", parallel_run, 120 | "writeInterval = ", writeInterval, 121 | "writecontrol = ", writecontrol)) 122 | 123 | #Look for file at first write interval 124 | if parallel_run: 125 | path = self.resultsdir + "processor0/" + str(writeInterval) + '/*' 126 | if not os.path.isdir(path.replace("*","")): 127 | path = self.resultsdir + "processor0/" + str(int(writeInterval)) + '/*' 128 | if not os.path.isdir(path.replace("*","")): 129 | path = self.resultsdir + "processor0/0/*" 130 | else: 131 | path = self.resultsdir + str(writeInterval) + '/*' 132 | if not os.path.isdir(path.replace("*","")): 133 | path = self.resultsdir + str(int(writeInterval)) + '/*' 134 | if not os.path.isdir(path.replace("*","")): 135 | print("Cannot find first record at ", path.replace("*",""), " Reverting to 0") 136 | path = self.resultsdir + "/0/*" 137 | 138 | #Try to parse any other files 139 | self.plotlist = {} 140 | files = glob.glob(path) 141 | 142 | for filename in files: 143 | try: 144 | #Handle if file is binary format 145 | with open(filename, encoding="utf8", errors='ignore') as f: 146 | for line in f: 147 | if "class" in line: 148 | fname = filename.split("/")[-1] 149 | if "volScalarField" in line: 150 | S = OpenFOAM_ScalarField(self.resultsdir, fname, parallel_run) 151 | elif "volVectorField" in line: 152 | S = OpenFOAM_VectorField(self.resultsdir, fname, parallel_run) 153 | elif "volSymmTensorField" in line: 154 | S = OpenFOAM_SymmTensorField(self.resultsdir, fname, parallel_run) 155 | elif "surfaceScalarField" in line: 156 | print((filename, "is a surfaceScalarField")) 157 | break 158 | else: 159 | continue 160 | self.plotlist.update({fname:S}) 161 | except IOError: 162 | print(("Error reading ", filename)) 163 | pass 164 | except IndexError: 165 | print(("Error reading ", filename)) 166 | pass 167 | except UnicodeDecodeError: 168 | print(("Error reading ", filename, " suspect binary format")) 169 | raise 170 | except: 171 | print(("Error reading ", filename)) 172 | raise 173 | 174 | -------------------------------------------------------------------------------- /postproclib/postproc.py: -------------------------------------------------------------------------------- 1 | class PostProc: 2 | 3 | def __str__(self): 4 | string = ('\nAvailable outputs in ' + self.resultsdir + ' include:\n\n') 5 | string += ('\t{0:^24s}\t{1:>10s}\n'.format('field', 'records')) 6 | #string += ('\t{0:^24s}\t{1:^10s}\n'.format('-'*24, '-'*10)) 7 | for key,field in sorted(self.plotlist.items()): 8 | line = '\t{0:<24s}=\t{1:>10f},\n'.format(key, field.maxrec) 9 | string += line 10 | return string 11 | -------------------------------------------------------------------------------- /postproclib/pplexceptions.py: -------------------------------------------------------------------------------- 1 | class DataNotAvailable(Exception): 2 | pass 3 | 4 | class NoResultsInDir(Exception): 5 | pass 6 | 7 | class DataMismatch(Exception): 8 | pass 9 | 10 | class ScriptMissing(Exception): 11 | pass 12 | 13 | class OutsideRecRange(Exception): 14 | pass 15 | -------------------------------------------------------------------------------- /postproclib/psiboilrawdata.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import numpy as np 3 | import os 4 | 5 | from .rawdata import RawData 6 | from .pplexceptions import DataNotAvailable 7 | 8 | class PsiBoil_RawData(RawData): 9 | 10 | def __init__(self, fdir): 11 | self.fdir = fdir 12 | self.subdomlist = self.get_subdomlist() 13 | self.grid = self.get_grid() 14 | self.npercell = self.get_npercell() 15 | self.maxrec = len(self.subdomlist)-1 # count from 0 16 | self.header = None 17 | 18 | def get_grid(self): 19 | try: 20 | fobj = "PUTPSIBOILFILETYPEHERE" 21 | except IOError: 22 | raise DataNotAvailable 23 | 24 | #Read Header data first then remaining file 25 | with open(self.fdir+self.subdomlist[0],'r') as f: 26 | imin, imax, jmin, jmax, kmin, kmax, nx, ny, nz = np.fromfile(f,dtype="i4",count=9) 27 | data = np.fromfile(f,dtype="f8",count=(nx+ny+nz)) 28 | 29 | # Number of grid points in main code 30 | self.nx = nx 31 | self.ny = ny 32 | self.nz = nz 33 | 34 | # Number of cell-centered values written to files 35 | self.nrx = self.nx 36 | self.nry = self.ny 37 | self.nrz = self.nz 38 | 39 | #Grid data 40 | gridx = data[0:nx] 41 | gridy = data[nx:nx+ny] 42 | gridz = data[nx+ny:nx+ny+nz] 43 | grid = [gridx,gridy,gridz] 44 | 45 | # Domain lengths 46 | self.xL = np.max(gridx) 47 | self.yL = np.max(gridy) 48 | self.zL = np.max(gridz) 49 | 50 | # Grid spacing 51 | self.dx = np.diff(gridx) 52 | self.dy = np.diff(gridy) 53 | self.dz = np.diff(gridz) 54 | 55 | return grid 56 | 57 | def get_subdomlist(self): 58 | 59 | def get_int(name): 60 | string = name.replace('.bin','') 61 | integer = name.split('_')[-1] 62 | 63 | return int(integer) 64 | 65 | subdoms = [] 66 | for filename in os.listdir(self.fdir): 67 | if (filename.find('.bin') != -1): 68 | subdoms.append(filename) 69 | 70 | if (len(subdoms) == 0): 71 | raise DataNotAvailable 72 | 73 | return sorted(subdoms,key=get_int) 74 | 75 | def get_npercell(self): 76 | dprealbytes = 8 # 8 for dp float 77 | ngridpoints = self.nrx * self.nry * self.nrz 78 | filepath = self.fdir + self.subdomlist[0] 79 | filesize = os.path.getsize(filepath) 80 | npercell = filesize / (dprealbytes*ngridpoints) 81 | return npercell 82 | 83 | def read(self,startrec,endrec,binlimits=None,verbose=False,**kwargs): 84 | 85 | nrecs = endrec - startrec + 1 86 | # Efficient memory allocation 87 | subdata = np.empty((self.nrx,self.nry,self.nrz,nrecs,self.npercell)) 88 | 89 | # Loop through files and insert data 90 | for plusrec in range(0,nrecs): 91 | 92 | fpath = self.fdir + self.get_subdomlist().pop(startrec+plusrec) 93 | with open(fpath,'rb') as fobj: 94 | data = np.fromfile(fobj,dtype='d') 95 | # zxy ordered in file 96 | try: 97 | data = np.reshape(data,[self.nrz,self.nrx,self.nry,self.npercell], 98 | order='F') 99 | except ValueError: 100 | print('Data in CFD file seems wrong -- maybe it includes halos?' 101 | 'Attempting to correct') 102 | if (data.shape[0] > self.nrz*self.nrx*self.nry*self.npercell): 103 | data = np.reshape(data,[self.nrz+1,self.nrx+1,self.nry,self.npercell], 104 | order='F') 105 | data = data[:-1,:-1,:,:] 106 | else: 107 | data = np.reshape(data,[self.nrz-1,self.nrx-1,self.nry,self.npercell], 108 | order='F') 109 | data = data[:-1,:-1,:,:] 110 | 111 | # change to xyz ordering 112 | data = np.transpose(data,(1,2,0,3)) 113 | # insert into array 114 | subdata[:,:,:,plusrec,:] = data 115 | 116 | # If bin limits are specified, return only those within range 117 | if (binlimits): 118 | 119 | if (verbose): 120 | print(('subdata.shape = {0:s}'.format(str(subdata.shape)))) 121 | print(('Extracting bins {0:s}'.format(str(binlimits)))) 122 | 123 | # Defaults 124 | lower = [0]*3 125 | upper = [i for i in subdata.shape] 126 | 127 | for axis in range(3): 128 | if (binlimits[axis] == None): 129 | continue 130 | else: 131 | lower[axis] = binlimits[axis][0] 132 | upper[axis] = binlimits[axis][1] 133 | 134 | subdata = subdata[lower[0]:upper[0], 135 | lower[1]:upper[1], 136 | lower[2]:upper[2], :, :] 137 | 138 | return subdata 139 | -------------------------------------------------------------------------------- /postproclib/serial_cfdfields.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import numpy as np 3 | 4 | from .field import Field 5 | from .serial_cfdrawdata import Serial_CFD_RawData 6 | 7 | # ============================================================================ 8 | # CFDField base class 9 | 10 | class Serial_CFDField(Field): 11 | 12 | nhalos = [0, 1, 0] 13 | 14 | def __init__(self,fdir): 15 | Raw = Serial_CFD_RawData(fdir, self.fname, self.dtype, 16 | self.nperbin) 17 | Field.__init__(self,Raw) 18 | self.header = self.Raw.header 19 | self.axislabels = ['x','y','z'] 20 | 21 | # ============================================================================ 22 | # CFDField derived classes, but calculated by the main code 23 | class Serial_CFD_vField(Serial_CFDField): 24 | 25 | dtype = 'd' 26 | nperbin = 3 27 | 28 | def __init__(self,fdir,fname='continuum_vbins'): 29 | 30 | self.fname = fname 31 | Serial_CFDField.__init__(self,fdir) 32 | self.labels = self.axislabels 33 | self.nperbin = self.Raw.nperbin 34 | self.plotfreq = self.Raw.header.continuum_tplot 35 | assert self.Raw.nperbin > 0 36 | self.labels = ['u','v','w'] 37 | 38 | class Serial_CFD_momField(Serial_CFDField): 39 | 40 | dtype = 'd' 41 | nperbin = 3 42 | 43 | def __init__(self,fdir,fname='continuum_vbins'): 44 | 45 | self.fname = fname 46 | Serial_CFDField.__init__(self,fdir) 47 | self.labels = self.axislabels 48 | self.nperbin = self.Raw.nperbin 49 | self.plotfreq = self.Raw.header.continuum_tplot 50 | assert self.Raw.nperbin > 0 51 | self.labels = ["rhou","rhov","rhow"] 52 | 53 | def read(self,startrec,endrec,**kwargs): 54 | 55 | grid_data = Serial_CFDField.read(self,startrec,endrec,**kwargs) 56 | density = float(self.Raw.header.rho) 57 | grid_data = density*grid_data 58 | return grid_data 59 | 60 | class Serial_CFD_StressField(Serial_CFDField): 61 | 62 | dtype = 'd' 63 | nperbin = 4 64 | 65 | def __init__(self,fdir,fname='continuum_tau_xy'): 66 | 67 | if (fname in ("continuum_tau_xx", "continuum_tau_xy", 68 | "continuum_tau_yx", "continuum_tau_yy")): 69 | self.fname = fname 70 | Serial_CFDField.__init__(self,fdir) 71 | self.labels = ["right", "top", 72 | "left", "bottom"] 73 | Serial_CFDField.__init__(self,fdir) 74 | assert self.Raw.nperbin > 0 75 | self.nperbin = self.Raw.nperbin 76 | self.plotfreq = self.Raw.header.continuum_tplot 77 | 78 | -------------------------------------------------------------------------------- /postproclib/serial_cfdpostproc.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | import sys 4 | import math as maths 5 | import glob 6 | #import collections 7 | 8 | 9 | from .serial_cfdfields import * 10 | from .headerdata import * 11 | from .postproc import PostProc 12 | from .pplexceptions import NoResultsInDir 13 | 14 | class Serial_CFD_PostProc(PostProc): 15 | 16 | """ 17 | Post processing class for Serial CFD runs 18 | """ 19 | 20 | def __init__(self,resultsdir,**kwargs): 21 | self.resultsdir = resultsdir 22 | self.plotlist = {} #collections.OrderedDict 23 | self.error = {} 24 | self.name = self.resultsdir.split('/')[-2] 25 | 26 | # Check directory exists before instantiating object and check 27 | # which files associated with plots are in directory 28 | self.potentialfiles = ( "continuum_vbins", "continuum_tau_xx", 29 | "continuum_tau_xy","continuum_tau_yx", 30 | "continuum_tau_yy") 31 | 32 | if (not os.path.isdir(self.resultsdir)): 33 | print(("Directory " + self.resultsdir + " not found")) 34 | raise IOError 35 | 36 | self.fields_present = [] 37 | for fname in self.potentialfiles: 38 | if (glob.glob(self.resultsdir+fname)): 39 | self.fields_present.append(fname) 40 | if (glob.glob(self.resultsdir+fname+'.*')): 41 | self.fields_present.append(fname.strip().split('.')[0]) 42 | 43 | self.fieldfiles1 = list(set(self.fields_present) & set(self.potentialfiles)) 44 | 45 | try: 46 | Header1 = Serial_CFD_HeaderData(self.resultsdir) 47 | except IOError: 48 | raise NoResultsInDir 49 | 50 | #Velocity 51 | if 'continuum_vbins' in (self.fieldfiles1): 52 | d1 = Serial_CFD_vField(self.resultsdir, **kwargs) 53 | self.plotlist.update({'u':d1}) 54 | 55 | #Stress 56 | if 'continuum_tau_xx' in (self.fieldfiles1): 57 | M1 = Serial_CFD_StressField(self.resultsdir, 58 | fname='continuum_tau_xx', 59 | **kwargs) 60 | self.plotlist.update({'CFD surface Tau_xx':M1}) 61 | 62 | if 'continuum_tau_xy' in (self.fieldfiles1): 63 | M1 = Serial_CFD_StressField(self.resultsdir, 64 | fname='continuum_tau_xy', **kwargs) 65 | self.plotlist.update({'CFD surface Tau_xx':M1}) 66 | 67 | if 'continuum_tau_yx' in (self.fieldfiles1): 68 | M1 = Serial_CFD_StressField(self.resultsdir, 69 | fname='continuum_tau_yx', **kwargs) 70 | self.plotlist.update({'CFD surface Tau_xx':M1}) 71 | 72 | if 'continuum_tau_yy' in (self.fieldfiles1): 73 | M1 = Serial_CFD_StressField(self.resultsdir, 74 | fname='continuum_tau_yy', **kwargs) 75 | self.plotlist.update({'CFD surface Tau_xx':M1}) 76 | 77 | if (len(self.plotlist) == 0): 78 | raise NoResultsInDir 79 | -------------------------------------------------------------------------------- /postproclib/serial_cfdrawdata.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | import numpy as np 3 | import os 4 | import glob 5 | 6 | from .rawdata import RawData 7 | from .headerdata import Serial_CFD_HeaderData 8 | from .pplexceptions import DataNotAvailable 9 | 10 | class Serial_CFD_RawData(RawData): 11 | 12 | def __init__(self,fdir,fname,dtype,nperbin): 13 | if (fdir[-1] != '/'): fdir += '/' 14 | self.fdir = fdir 15 | self.fname = fname 16 | self.dtype = dtype 17 | self.nperbin = nperbin 18 | self.filepath = self.fdir + self.fname + '/' 19 | 20 | if (glob.glob(self.filepath)): 21 | self.separate_outfiles = False 22 | elif (glob.glob(self.filepath+'.*')): 23 | self.separate_outfiles = True 24 | else: 25 | self.separate_outfiles = False 26 | 27 | try: 28 | self.header = self.read_header(fdir) 29 | print((self.header)) 30 | except IOError: 31 | raise DataNotAvailable 32 | 33 | self.grid = self.get_gridtopology() 34 | self.maxrec = self.get_maxrec() 35 | 36 | def read_header(self,fdir): 37 | return Serial_CFD_HeaderData(fdir) 38 | 39 | def get_grid(self): 40 | print("Call to get_grid are depreciated, please use get_gridtopology instead") 41 | return self.get_gridtopology() 42 | 43 | def get_gridtopology(self): 44 | #Number of halos 45 | self.halox = 1 46 | self.haloy = 1 47 | self.haloz = 1 48 | # Number of grid points in main code 49 | self.nx = int(self.header.nx)+1 50 | self.ny = int(self.header.ny)+1 51 | self.nz = int(self.header.nz)+1 52 | # Number of cell-centered values written with halos 53 | self.nrx = int(self.nx)-1 #+2*self.halox 54 | self.nry = int(self.ny)-1+2*self.haloy 55 | self.nrz = 1 #int(self.nz)-1 #+2*self.haloz 56 | # Domain lengths 57 | self.xL = float(self.header.lx) 58 | self.yL = float(self.header.ly) 59 | self.zL = float(self.header.lz) 60 | self.xyzL = [self.xL,self.yL,self.zL] 61 | # Grid spacing 62 | self.dx = self.xL/float(self.nrx) 63 | self.dy = self.yL/float(self.nry-2*self.haloy) 64 | self.dz = self.zL/float(self.nrz) 65 | # Linspaces of cell centers, accounting for halos written in y 66 | gridx = np.linspace(+self.dx/2., self.xL -self.dx/2., num=self.nrx) 67 | # NOTE SHIFTED BY HALF A CELL SO IT MATCHES THE OVERLAPPED MD CASE 68 | gridy = np.linspace(-self.dy/2., self.yL +self.dy/2., num=self.nry)-self.dy 69 | print((self.yL,self.ny,self.dy,gridy)) 70 | gridz = np.linspace(+self.dz/2., self.zL -self.dz/2., num=self.nrz) 71 | grid = [gridx,gridy,gridz] 72 | return grid 73 | 74 | def get_maxrec(self): 75 | 76 | if (glob.glob(self.fdir+self.fname)): 77 | 78 | filesize = os.path.getsize(self.fdir+self.fname) 79 | if (self.dtype == 'i'): 80 | maxrec = filesize/(4*self.nperbin*self.nrx*self.nry*self.nrz) - 1 81 | elif (self.dtype == 'd'): 82 | maxrec = filesize/(8*self.nperbin*self.nrx*self.nry*self.nrz) - 1 83 | else: 84 | quit('Unrecognised dtype in MD_RawData.get_maxrec') 85 | 86 | elif (glob.glob(self.fdir+self.fname+'.*')): 87 | 88 | filelist = glob.glob(self.fdir+self.fname+'.*') 89 | sortedlist = sorted(filelist) 90 | maxrec = int(sortedlist[-1].split('.')[-1]) 91 | 92 | else: 93 | print(('Neither ' + self.fname + ' nor ' + self.fname + '.* exist.')) 94 | raise DataNotAvailable 95 | 96 | return maxrec 97 | 98 | def read(self, startrec, endrec, binlimits=None, verbose=False, missingrec='raise'): 99 | 100 | """ 101 | Required inputs: 102 | 103 | startrec - seek a specific record with this integer, count 104 | from 0. 105 | endrec - record at which to finish (integer) 106 | 107 | Return: 108 | 109 | bindata - 4D array of data in one record that was 110 | read from the binary data file. The size 111 | is (nbinsx, nbinsy, nbinsz, nperbin) or 112 | the equivalent in cylindrical polar. 113 | 114 | """ 115 | 116 | #return_zeros if data cannot be obtained 117 | return_zeros = False 118 | 119 | # Store how many records are to be read 120 | nrecs = endrec - startrec + 1 121 | # Allocate enough memory in the C library to efficiently insert 122 | # into bindata 123 | recitems = self.nrx*self.nry*self.nrz*self.nperbin 124 | bindata = np.empty(nrecs*recitems) 125 | 126 | # Check whether the records are written separately 127 | # If so 128 | if (self.separate_outfiles): 129 | 130 | # Loop through files and append data 131 | for plusrec in range(0,nrecs): 132 | 133 | filepath = self.fdir+self.fname+'.'+"%07d"%(startrec+plusrec) 134 | try: 135 | fobj = open(filepath,'rb') 136 | except: 137 | if missingrec is 'raise': 138 | quit('Unable to find file ' + filepath) 139 | else: 140 | print(('Unable to find file ' + filepath)) 141 | return_zeros = True 142 | 143 | istart = plusrec*recitems 144 | iend = istart + recitems 145 | if (verbose): 146 | print(('Reading {0:s} rec {1:5d}'.format( 147 | self.fname,startrec+plusrec))) 148 | if return_zeros: 149 | bindata = np.zeros([ self.nrx,self.nry,self.nrz, 150 | self.nperbin ,nrecs ]) 151 | else: 152 | bindata[istart:iend] = np.fromfile(fobj,dtype=self.dtype) 153 | fobj.close() 154 | 155 | # Else 156 | else: 157 | 158 | try: 159 | fobj = open(self.fdir+self.fname,'rb') 160 | except: 161 | if missingrec is 'raise': 162 | print(('Unable to find file ' + self.fname)) 163 | raise DataNotAvailable 164 | elif missingrec is 'returnzeros': 165 | print(('Unable to find file ' + self.fname)) 166 | return_zeros = True 167 | 168 | # Seek to correct point in the file 169 | if (self.dtype == 'i'): 170 | recbytes = 4*recitems 171 | elif (self.dtype == 'd'): 172 | recbytes = 8*recitems 173 | else: 174 | if missingrec is 'raise': 175 | print(('Unable to find file ' + self.fname)) 176 | raise DataNotAvailable 177 | elif missingrec is 'returnzeros': 178 | print(('Unable to find file ' + self.fname)) 179 | return_zeros = True 180 | 181 | seekbyte = startrec*recbytes 182 | fobj.seek(seekbyte) 183 | 184 | if (verbose): 185 | print(('Reading {0:s} recs {1:5d} to {2:5d}'.format( 186 | self.fname,startrec,endrec))) 187 | 188 | # Get data and reshape with fortran array ordering 189 | if return_zeros: 190 | bindata = np.zeros([ self.nrx,self.nry,self.nrz, 191 | self.nperbin ,nrecs ]) 192 | else: 193 | bindata = np.fromfile(fobj, dtype=self.dtype, 194 | count=nrecs*recitems) 195 | 196 | fobj.close() 197 | 198 | if (verbose): 199 | print(('Reshaping and transposing {0:s} '.format(self.fname))) 200 | 201 | # Reshape bindata 202 | bindata = np.reshape( bindata, 203 | [ self.nrx, 204 | self.nry, 205 | self.nrz, 206 | self.nperbin , 207 | nrecs ], 208 | order='F') 209 | bindata = np.transpose(bindata, (0,1,2,4,3)) 210 | 211 | # If bin limits are specified, return only those within range 212 | if (binlimits): 213 | 214 | if (verbose): 215 | print(('bindata.shape = {0:s}'.format(str(bindata.shape)))) 216 | print(('Extracting bins {0:s} from {1:s} '.format( 217 | str(binlimits),self.fname))) 218 | # Defaults 219 | lower = [0]*3 220 | upper = [i for i in bindata.shape] 221 | 222 | for axis in range(3): 223 | if (binlimits[axis] == None): 224 | continue 225 | else: 226 | lower[axis] = binlimits[axis][0] 227 | upper[axis] = binlimits[axis][1] 228 | 229 | bindata = bindata[lower[0]:upper[0], 230 | lower[1]:upper[1], 231 | lower[2]:upper[2], :, :] 232 | 233 | 234 | if (verbose): 235 | print(('new bindata.shape = {0:s}'.format(str(bindata.shape)))) 236 | 237 | return bindata 238 | 239 | -------------------------------------------------------------------------------- /postproclib/visualiser/__init__.py: -------------------------------------------------------------------------------- 1 | from .mainframe import MainFrame, MainPanel 2 | 3 | -------------------------------------------------------------------------------- /postproclib/visualiser/choosefield.py: -------------------------------------------------------------------------------- 1 | import wx 2 | import wx.lib.scrolledpanel as scrolled 3 | 4 | from misclib import latextounicode 5 | 6 | class PlotTypePanel(wx.Panel): 7 | 8 | def __init__(self,parent,**kwargs): 9 | wx.Panel.__init__(self,parent,**kwargs) 10 | choices = ['Profile','Contour','Molecules'] 11 | self.radiobox = wx.RadioBox(self,label='Plot Type', 12 | style=wx.RA_SPECIFY_COLS, 13 | choices=choices) 14 | vbox = wx.BoxSizer(wx.VERTICAL) 15 | vbox.Add(self.radiobox, 0, wx.EXPAND|wx.ALL, 10) 16 | self.SetSizer(vbox) 17 | 18 | class FieldTypePanel(scrolled.ScrolledPanel): 19 | 20 | def __init__(self,parent,**kwargs): 21 | scrolled.ScrolledPanel.__init__(self, parent,**kwargs) 22 | 23 | choices = latextounicode(sorted(parent.parent.PP.plotlist.keys())) 24 | self.fieldradiobox = wx.RadioBox(self,label='Field', 25 | style=wx.RA_SPECIFY_ROWS, 26 | choices=choices) 27 | vbox = wx.BoxSizer(wx.VERTICAL) 28 | vbox.Add(self.fieldradiobox, 0, wx.EXPAND|wx.ALL, 10) 29 | self.SetSizer(vbox) 30 | self.SetAutoLayout(1) 31 | self.SetupScrolling(scroll_x=False, scrollToTop=False) 32 | 33 | #Fix to prevent jump to top when select 34 | self.Bind(wx.EVT_CHILD_FOCUS, self.on_focus) 35 | 36 | def on_focus(self,event): 37 | pass 38 | 39 | class MolTypePanel(scrolled.ScrolledPanel): 40 | 41 | def __init__(self,parent,**kwargs): 42 | scrolled.ScrolledPanel.__init__(self, parent,**kwargs) 43 | 44 | #This will be moved to a top level MD data collector 45 | #import glob 46 | #choices = "final_state" + glob.glob("*.dcd") + glob.glob("*.xyz") 47 | #choices = ["final_state", "all_clusters.xyz", "vmd_out.dcd", "vmd_temp.dcd"] 48 | choices = sorted(parent.parent.MM.plotlist.keys()) 49 | self.molradiobox = wx.RadioBox(self,label='Molecule Files', 50 | style=wx.RA_SPECIFY_ROWS, 51 | choices=choices) 52 | vbox = wx.BoxSizer(wx.VERTICAL) 53 | vbox.Add(self.molradiobox, 0, wx.EXPAND|wx.ALL, 10) 54 | self.SetSizer(vbox) 55 | self.SetAutoLayout(1) 56 | self.SetupScrolling(scroll_x=False, scrollToTop=False) 57 | 58 | #Fix to prevent jump to top when select 59 | self.Bind(wx.EVT_CHILD_FOCUS, self.on_focus) 60 | 61 | def on_focus(self,event): 62 | pass 63 | 64 | 65 | class SaveFigurePanel(wx.Panel): 66 | 67 | def __init__(self,parent,**kwargs): 68 | wx.Panel.__init__(self,parent,**kwargs) 69 | self.savebutton = wx.Button(self,-1,"Save Fig") 70 | 71 | class SaveDataPanel(wx.Panel): 72 | 73 | def __init__(self,parent,**kwargs): 74 | wx.Panel.__init__(self,parent,**kwargs) 75 | self.savebutton = wx.Button(self,-1,"Save Data") 76 | 77 | class SaveScriptPanel(wx.Panel): 78 | 79 | def __init__(self,parent,**kwargs): 80 | wx.Panel.__init__(self,parent,**kwargs) 81 | self.savebutton = wx.Button(self,-1,"Save Script") 82 | 83 | 84 | class FieldComponentPanel(wx.Panel): 85 | 86 | def __init__(self,parent,**kwargs): 87 | wx.Panel.__init__(self,parent,**kwargs) 88 | 89 | self.componenttitle = wx.StaticText(self,-1,label='Component',size=(100,-1)) 90 | self.componentcombobox = wx.ComboBox(self, size=(10,-1), value='0') 91 | 92 | choices = ['0','1','2'] 93 | self.normaltitle = wx.StaticText(self,-1,label='Normal', size=(100,-1)) 94 | self.normalcombobox = wx.ComboBox(self, choices=choices, size=(10,-1), 95 | value='0') 96 | 97 | grid = wx.GridBagSizer(hgap=3) 98 | grid.Add(self.componenttitle, (0,0), flag=wx.EXPAND) 99 | grid.Add(self.componentcombobox, (1,0), flag=wx.EXPAND) 100 | grid.Add(self.normaltitle, (0,1), flag=wx.EXPAND) 101 | grid.Add(self.normalcombobox, (1,1), flag=wx.EXPAND) 102 | grid.AddGrowableCol(0) 103 | grid.AddGrowableCol(1) 104 | self.SetSizer(grid) 105 | 106 | 107 | class FieldChooserPanel(wx.Panel): 108 | 109 | def __init__(self, parent, **kwargs): 110 | wx.Panel.__init__(self, parent, **kwargs) 111 | self.parent = parent 112 | # Plot type chooser box 113 | self.plottype_p = PlotTypePanel(self) 114 | # Field type chooser box 115 | self.fieldtype_p = FieldTypePanel(self, size = (-1, 340)) 116 | self.moltype_p = MolTypePanel(self, size = (-1, 340)) 117 | self.moltype_p.Hide() 118 | # Component chooser combo box 119 | self.component_p = FieldComponentPanel(self) 120 | # Autoscale button 121 | self.autoscale_b = wx.CheckBox(self,-1,label='Autoscale') 122 | # Min and max values for autoscale 123 | self.minpspin = wx.TextCtrl(self,style=wx.TE_PROCESS_ENTER,size=(70,-1)) 124 | self.maxpspin = wx.TextCtrl(self,style=wx.TE_PROCESS_ENTER,size=(70,-1)) 125 | # Save buttons (figure, data, script) 126 | self.save_b = SaveFigurePanel(self) 127 | self.save_d = SaveDataPanel(self) 128 | self.save_s = SaveScriptPanel(self) 129 | 130 | # Sizer 131 | vbox = wx.BoxSizer(wx.VERTICAL) 132 | vbox.Add(self.plottype_p, 0,wx.EXPAND, 0) 133 | vbox.Add(self.fieldtype_p,0,wx.EXPAND, 0) 134 | vbox.Add(self.moltype_p,0,wx.EXPAND, 0) 135 | vbox.Add(self.component_p,0,wx.EXPAND, 0) 136 | hbox = wx.BoxSizer(wx.HORIZONTAL) 137 | vbox.Add(hbox,0,wx.EXPAND, 0) 138 | hbox.Add(self.autoscale_b,0,wx.EXPAND, 0) 139 | hbox.Add(self.minpspin,0,wx.EXPAND, 0) 140 | hbox.Add(self.maxpspin,0,wx.EXPAND, 0) 141 | 142 | hbox = wx.BoxSizer(wx.HORIZONTAL) 143 | vbox.Add(hbox,0,wx.EXPAND, 0) 144 | 145 | #label = wx.StaticText(self, 0, 'Save', (20, 20)) 146 | #hbox.Add(label, 0,wx.EXPAND, 0) 147 | hbox.Add(self.save_b, 0,wx.EXPAND, 0) 148 | hbox.Add(self.save_d, 0,wx.EXPAND, 0) 149 | hbox.Add(self.save_s, 0,wx.EXPAND, 0) 150 | self.SetSizer(vbox) 151 | -------------------------------------------------------------------------------- /postproclib/visualiser/data.csv: -------------------------------------------------------------------------------- 1 | 1.424979955563912659e+00,1.980694444444444535e+00 2 | 4.274939866691737755e+00,1.983759133185620138e+00 3 | 7.124899777819563518e+00,1.991980182387880216e+00 4 | 9.974859688947388392e+00,2.002747467016664729e+00 5 | 1.282481960007521415e+01,2.012642450280332618e+00 6 | 1.567477951120303992e+01,2.018523544907279810e+00 7 | 1.852473942233086390e+01,2.018523544907280254e+00 8 | 2.137469933345868967e+01,2.012642450280332618e+00 9 | 2.422465924458651543e+01,2.002747467016665173e+00 10 | 2.707461915571434119e+01,1.991980182387880216e+00 11 | 2.992457906684216695e+01,1.983759133185620138e+00 12 | 3.277453897796998916e+01,1.980694444444444535e+00 13 | -------------------------------------------------------------------------------- /postproclib/visualiser/directory.py: -------------------------------------------------------------------------------- 1 | import wx 2 | import os 3 | 4 | class DirectoryChooserPanel(wx.Panel): 5 | 6 | def __init__(self,parent,fdir,**kwargs): 7 | 8 | wx.Panel.__init__(self,parent,**kwargs) 9 | 10 | self.fdir = fdir 11 | statictxt = wx.StaticText(self,-1,label='File directory: ') 12 | self.textctrl = wx.TextCtrl(self,-1,self.fdir,style=wx.TE_PROCESS_ENTER) 13 | self.changebutton = wx.Button(self,-1,"...") 14 | 15 | hbox = wx.BoxSizer(wx.HORIZONTAL) 16 | hbox.Add(statictxt,0,wx.ALIGN_CENTER_VERTICAL| wx.LEFT,5) 17 | hbox.Add(self.textctrl,1,wx.EXPAND,0) 18 | hbox.Add(self.changebutton,0,wx.EXPAND) 19 | self.SetSizer(hbox) 20 | -------------------------------------------------------------------------------- /postproclib/visualiser/logo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edwardsmith999/pyDataView/87ef1325a7aecfced5eaf150821032010f3d655c/postproclib/visualiser/logo.gif -------------------------------------------------------------------------------- /postproclib/visualiser/mainframe.py: -------------------------------------------------------------------------------- 1 | # /usr/bin/env python 2 | import wx 3 | import os 4 | import sys 5 | import traceback 6 | 7 | from postproclib.visualiser import __path__ as pplvpath 8 | from postproclib.pplexceptions import NoResultsInDir 9 | 10 | from .visuals import VisualiserPanel 11 | from .directory import DirectoryChooserPanel 12 | 13 | def showMessageDlg(msg, title='Information', style=wx.OK|wx.ICON_INFORMATION): 14 | """""" 15 | dlg = wx.MessageDialog(parent=None, message=msg, 16 | caption=title, style=style) 17 | dlg.ShowModal() 18 | dlg.Destroy() 19 | 20 | class MainFrame(wx.Frame): 21 | 22 | def __init__(self, parent=None, fdir='./', title='pyDataViewer', 23 | size=(1200,800), **kwargs): 24 | 25 | wx.Frame.__init__(self,parent,title=title,size=size) 26 | try: 27 | # postproclib.visualiser.__path__ (pplvpath) is a list 28 | if os.path.isfile(pplvpath[0]+"/logo.gif"): 29 | _icon = wx.EmptyIcon() 30 | _icon.CopyFromBitmap(wx.Bitmap(pplvpath[0] 31 | +"/logo.gif", wx.BITMAP_TYPE_ANY)) 32 | self.SetIcon(_icon) 33 | except IOError: 34 | print('Couldn\'t load icon') 35 | 36 | panel = MainPanel(self, fdir) 37 | 38 | 39 | class MainPanel(wx.Panel): 40 | 41 | def __init__(self, parent, fdir, catch_noresults=True): 42 | super(MainPanel, self).__init__(parent) 43 | self.parent = parent 44 | self.dirchooser = DirectoryChooserPanel(self, fdir) 45 | 46 | self.vbox = wx.BoxSizer(wx.VERTICAL) 47 | self.vbox.Add(self.dirchooser, 0, wx.EXPAND, 0) 48 | self.SetSizer(self.vbox) 49 | 50 | self.visualiserpanel = None 51 | self.new_visualiserpanel(fdir, catch_noresults) 52 | self.fdir = fdir 53 | 54 | self.set_bindings() 55 | 56 | def set_bindings(self): 57 | 58 | self.Bind(wx.EVT_TEXT_ENTER, self.handle_chdir, 59 | self.dirchooser.textctrl) 60 | self.Bind(wx.EVT_BUTTON, self.fdir_dialogue, 61 | self.dirchooser.changebutton) 62 | 63 | def destroy_visualiserpanel(self): 64 | 65 | if (self.visualiserpanel != None): 66 | self.visualiserpanel.Destroy() 67 | self.visualiserpanel = None 68 | 69 | def new_visualiserpanel(self, fdir, catch_noresults=True): 70 | 71 | self.destroy_visualiserpanel() 72 | self.fdir = fdir 73 | try: 74 | newvp = VisualiserPanel(self, fdir) 75 | except IOError: 76 | raise 77 | #showMessageDlg('Invalid directory.') 78 | except NoResultsInDir: 79 | tb = traceback.format_exc() 80 | print(tb) 81 | if catch_noresults: 82 | showMessageDlg('No results in this directory.') 83 | 84 | else: 85 | self.visualiserpanel = newvp 86 | self.vbox.Add(self.visualiserpanel, 1, wx.EXPAND, 0) 87 | self.SetSizer(self.vbox) 88 | self.Layout() 89 | print(('New visualiser file directory: ' + fdir)) 90 | 91 | def handle_chdir(self, event): 92 | 93 | fdir = self.dirchooser.textctrl.GetValue() 94 | self.new_visualiserpanel(fdir) 95 | self.fdir = fdir 96 | 97 | def fdir_dialogue(self, event): 98 | 99 | fdir = "" # Use folder as a flag 100 | currentdir = self.dirchooser.textctrl.GetValue() 101 | dlg = wx.DirDialog(self, defaultPath = currentdir) 102 | if dlg.ShowModal() == wx.ID_OK: 103 | fdir = dlg.GetPath() + "/" 104 | dlg.SetPath(fdir) 105 | dlg.Destroy() # best to do this sooner than later 106 | 107 | if fdir: 108 | self.dirchooser.textctrl.SetValue(fdir) 109 | event = wx.PyCommandEvent(wx.EVT_TEXT_ENTER.typeId, 110 | self.dirchooser.textctrl.GetId()) 111 | self.GetEventHandler().ProcessEvent(event) 112 | self.fdir = fdir 113 | -------------------------------------------------------------------------------- /postproclib/visualiser/mathtext_wx.py: -------------------------------------------------------------------------------- 1 | """ 2 | Demonstrates how to convert mathtext to a wx.Bitmap for display in various 3 | controls on wxPython. 4 | """ 5 | 6 | import matplotlib 7 | matplotlib.use("WxAgg") 8 | from numpy import arange, sin, pi, cos, log 9 | from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas 10 | from matplotlib.backends.backend_wx import NavigationToolbar2Wx 11 | from matplotlib.figure import Figure 12 | 13 | import wx 14 | 15 | IS_GTK = 'wxGTK' in wx.PlatformInfo 16 | IS_WIN = 'wxMSW' in wx.PlatformInfo 17 | IS_MAC = 'wxMac' in wx.PlatformInfo 18 | 19 | ############################################################ 20 | # This is where the "magic" happens. 21 | from matplotlib.mathtext import MathTextParser 22 | import matplotlib.mathtext as mathtext 23 | 24 | # Optionally set font to Computer Modern to avoid common missing font errors 25 | matplotlib.rc('font', family='serif', serif='cm10') 26 | matplotlib.rc('text', usetex=True) 27 | matplotlib.rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"] 28 | 29 | 30 | mathtext_parser = MathTextParser("Bitmap") 31 | def mathtext_to_wxbitmap(s): 32 | ftimage, depth = mathtext_parser.parse(s, 150) 33 | return wx.BitmapFromBufferRGBA( 34 | ftimage.get_width(), ftimage.get_height(), 35 | ftimage.as_rgba_str()) 36 | ############################################################ 37 | 38 | functions = [ 39 | (r'$\sum_{i=1}^N \; \vec{f}_{ij}$' + r' $ \vec{r}_{ij} $' , lambda x: sin(2*pi*x)), 40 | (r'$\frac{4}{3}\pi x^3$' , lambda x: (4.0 / 3.0) * pi * x**3), 41 | (r'$\cos(2 \pi x)$' , lambda x: cos(2*pi*x)), 42 | (r'$\oint_S \rho \bf{u}$' + r'$ \cdot d\bf{S}$' , lambda x: log(x)) 43 | ] 44 | 45 | class CanvasFrame(wx.Frame): 46 | def __init__(self, parent, title): 47 | wx.Frame.__init__(self, parent, -1, title, size=(550, 350)) 48 | self.SetBackgroundColour(wx.NamedColour("WHITE")) 49 | 50 | self.figure = Figure() 51 | self.axes = self.figure.add_subplot(111) 52 | self.change_plot(0) 53 | 54 | self.canvas = FigureCanvas(self, -1, self.figure) 55 | 56 | self.sizer = wx.BoxSizer(wx.VERTICAL) 57 | self.add_buttonbar() 58 | self.sizer.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW) 59 | self.add_toolbar() # comment this out for no toolbar 60 | 61 | menuBar = wx.MenuBar() 62 | 63 | # File Menu 64 | menu = wx.Menu() 65 | menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample") 66 | menuBar.Append(menu, "&File") 67 | 68 | if IS_GTK or IS_WIN: 69 | # Equation Menu 70 | menu = wx.Menu() 71 | for i, (mt, func) in enumerate(functions): 72 | bm = mathtext_to_wxbitmap(mt) 73 | item = wx.MenuItem(menu, 1000 + i, "") 74 | item.SetBitmap(bm) 75 | menu.AppendItem(item) 76 | self.Bind(wx.EVT_MENU, self.OnChangePlot, item) 77 | menuBar.Append(menu, "&Functions") 78 | 79 | self.SetMenuBar(menuBar) 80 | 81 | self.SetSizer(self.sizer) 82 | self.Fit() 83 | 84 | def add_buttonbar(self): 85 | self.button_bar = wx.Panel(self) 86 | self.button_bar_sizer = wx.BoxSizer(wx.HORIZONTAL) 87 | self.sizer.Add(self.button_bar, 0, wx.LEFT | wx.TOP | wx.GROW) 88 | 89 | for i, (mt, func) in enumerate(functions): 90 | bm = mathtext_to_wxbitmap(mt) 91 | button = wx.BitmapButton(self.button_bar, 1000 + i, bm) 92 | self.button_bar_sizer.Add(button, 1, wx.GROW) 93 | self.Bind(wx.EVT_BUTTON, self.OnChangePlot, button) 94 | 95 | self.button_bar.SetSizer(self.button_bar_sizer) 96 | 97 | def add_toolbar(self): 98 | """Copied verbatim from embedding_wx2.py""" 99 | self.toolbar = NavigationToolbar2Wx(self.canvas) 100 | self.toolbar.Realize() 101 | if IS_MAC: 102 | self.SetToolBar(self.toolbar) 103 | else: 104 | tw, th = self.toolbar.GetSizeTuple() 105 | fw, fh = self.canvas.GetSizeTuple() 106 | self.toolbar.SetSize(wx.Size(fw, th)) 107 | self.sizer.Add(self.toolbar, 0, wx.LEFT | wx.EXPAND) 108 | self.toolbar.update() 109 | 110 | def OnPaint(self, event): 111 | self.canvas.draw() 112 | 113 | def OnChangePlot(self, event): 114 | self.change_plot(event.GetId() - 1000) 115 | 116 | def change_plot(self, plot_number): 117 | t = arange(1.0,3.0,0.01) 118 | s = functions[plot_number][1](t) 119 | self.axes.clear() 120 | self.axes.plot(t, s) 121 | self.Refresh() 122 | 123 | class MyApp(wx.App): 124 | def OnInit(self): 125 | frame = CanvasFrame(None, "wxPython mathtext demo app") 126 | self.SetTopWindow(frame) 127 | frame.Show(True) 128 | return True 129 | 130 | app = MyApp() 131 | app.MainLoop() 132 | 133 | -------------------------------------------------------------------------------- /postproclib/visualiser/minimalscript.py: -------------------------------------------------------------------------------- 1 | 2 | def minimalscript(scripttype, plottype, fdir, ppdir, fieldname, 3 | startrec, endrec, comp, norm, bins, binwidth): 4 | 5 | if scripttype.lower() == "python": 6 | 7 | script=r""" 8 | import matplotlib.pyplot as plt 9 | import numpy as np 10 | import sys 11 | 12 | ppdir = '{0}' 13 | sys.path.append(ppdir) 14 | import postproclib as ppl 15 | 16 | normal ={6} 17 | component={3} 18 | startrec={4} 19 | endrec={5} 20 | 21 | #Get Post Proc Object 22 | fdir = '{1}' 23 | PPObj = ppl.All_PostProc(fdir) 24 | print(PPObj) 25 | 26 | #Get plotting object 27 | plotObj = PPObj.plotlist['{2}'] 28 | """.format(ppdir, fdir, fieldname, str(comp), str(startrec), str(endrec), str(norm)) 29 | 30 | if plottype == "Profile": 31 | script += r""" 32 | #Get profile 33 | x, y = plotObj.profile(axis=normal, 34 | startrec=startrec, 35 | endrec=endrec) 36 | 37 | #Plot only normal component 38 | fig, ax = plt.subplots(1,1) 39 | ax.plot(x,y[:,component]) 40 | plt.show() 41 | """ 42 | 43 | elif plottype == "Contour": 44 | script += r""" 45 | #Get Contour 46 | naxes = [0,1,2] 47 | naxes.remove(normal) 48 | bins = {0} 49 | binwidth = {1} 50 | binlimits = [None]*3 51 | binlimits[normal] = (bins-binwidth, 52 | bins+binwidth+1) #Python +1 slicing 53 | 54 | ax1, ax2, data = plotObj.contour(axes=naxes, 55 | startrec=startrec, 56 | endrec=endrec, 57 | binlimits=binlimits, 58 | missingrec='returnzeros') 59 | 60 | fig, ax = plt.subplots(1,1) 61 | cmap = plt.cm.RdYlBu_r 62 | colormesh = ax.pcolormesh(ax1, ax2, data[:,:,component], 63 | cmap=cmap) 64 | plt.colorbar(colormesh) 65 | plt.axis('tight') 66 | plt.show() 67 | """.format(str(bins), str(binwidth)) 68 | 69 | elif scripttype.lower() == "matlab": 70 | 71 | script=r""" 72 | clear variables classes 73 | close all 74 | 75 | ppdir = '{0}' 76 | cd(ppdir) 77 | ppmod = py.importlib.import_module("postproclib"); 78 | 79 | normal ={6} 80 | component={3} 81 | startrec={4} 82 | endrec={5} 83 | naxis = 0:2; 84 | naxis(normal+1) = []; 85 | 86 | %Get Post Proc Object 87 | fdir = '{1}' 88 | PPObj = ppmod.All_PostProc(py.str(fdir)); 89 | 90 | %Get plotting object 91 | fname = '{2}' 92 | PObj = PPObj.plotlist{{py.str(fname)}}; 93 | 94 | """.format(ppdir, fdir, fieldname, str(comp), str(startrec), str(endrec), str(norm)) 95 | 96 | if plottype == "Profile": 97 | script += r""" 98 | %Get profile 99 | a = PObj.profile(py.int(normal), py.int(startrec),py.int(endrec)); 100 | x = a{1}; y = a{2}; 101 | plot(x,y) 102 | 103 | """ 104 | 105 | elif plottype == "Contour": 106 | script += r""" 107 | bins = {0} 108 | binwidth = {1} 109 | bns = py.list({{py.int(bins-binwidth), py.int(bins+binwidth+1)}}); 110 | None = string(missing); 111 | if (normal == 0) 112 | binlimits = py.list({{bns,None,None}}); 113 | elseif (normal == 1) 114 | binlimits = py.list({{None,bns,None}}); 115 | elseif (normal == 2) 116 | binlimits = py.list({{None,None,bns}}); 117 | end 118 | 119 | a = PObj.contour(py.list({{py.int(naxis(1)),py.int(naxis(2))}}), ... 120 | py.int(startrec),py.int(endrec), ... 121 | pyargs('binlimits',binlimits, ... 122 | "missingrec","returnzeros")); 123 | 124 | ax1 = np2mat(a{{1}}); 125 | ax2 = np2mat(a{{2}}); 126 | field = np2mat(a{{3}}); 127 | 128 | [C,h] =contourf(ax1, ax2, field(:,:,component+1), 40); 129 | set(h,'LineColor','none'); 130 | colorbar() 131 | """.format(str(bins), str(binwidth)) 132 | 133 | script += r""" 134 | function data = np2mat(nparray) 135 | ns = int32(py.array.array('i',nparray.shape)); 136 | data = reshape(double(py.array.array('d', ... 137 | py.numpy.nditer(nparray, pyargs('order', 'C')))), ns); 138 | data=reshape(data,fliplr(ns)); 139 | data=permute(data,[length(ns):-1:1]); 140 | end 141 | """ 142 | else: 143 | raise ValueError("scripttype should be python or matlab") 144 | 145 | return script -------------------------------------------------------------------------------- /postproclib/visualiser/script.py: -------------------------------------------------------------------------------- 1 | 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | import sys 5 | 6 | sys.path.append(resultdirectory) 7 | import postproclib as ppl 8 | 9 | #Get Post Proc Object 10 | PPObj = ppl.MD_PostProc(utlisdirectory) 11 | 12 | #Get plotting object 13 | plotObj = PPObj.plotlist[plottype] 14 | 15 | #Get profile 16 | x, y = plotObj.profile(axis=0, 17 | startrec=10, 18 | endrec=0) 19 | 20 | #Plot only normal component 21 | plt.plot(x,y[0]) 22 | plt.show() 23 | 24 | -------------------------------------------------------------------------------- /postproclib/visualiser/sliders.py: -------------------------------------------------------------------------------- 1 | import wx 2 | import numpy as np 3 | from .minispinctrl import MiniSpinCtrl 4 | 5 | class SliderPlusWidth(wx.Panel): 6 | 7 | def __init__(self,parent,slidername,**kwargs): 8 | 9 | wx.Panel.__init__(self,parent,**kwargs) 10 | sliderlabel = wx.StaticText(self,-1,label=slidername+':',size=(50,-1)) 11 | self.slidertext = wx.TextCtrl(self,-1,style=wx.TE_PROCESS_ENTER, 12 | size=(50,-1)) 13 | self.slider = JumpSlider(self) 14 | #self.slider = wx.Slider(self) 15 | spintext = wx.StaticText(self,-1,label="\u00B1",size=(10,-1)) 16 | #self.spin = wx.SpinCtrl(self,value='0',initial=0, size=(-1,-1)) 17 | #A spin control which avoids the massive GTK3 buttons 18 | self.spin = MiniSpinCtrl(self, initial=0, size=(30,-1)) 19 | 20 | hbox = wx.BoxSizer(wx.HORIZONTAL) 21 | hbox.Add(sliderlabel,0,wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 10) 22 | hbox.Add(self.slidertext,0,wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 10) 23 | hbox.Add(self.slider,1,wx.EXPAND,0) 24 | hbox.Add(spintext,0,wx.ALIGN_CENTER_VERTICAL | wx.LEFT, 10) 25 | hbox.Add(self.spin,0,wx.ALIGN_CENTER_VERTICAL | wx.RIGHT, 10) 26 | vbox = wx.BoxSizer(wx.VERTICAL) 27 | vbox.Add(hbox,1,wx.EXPAND,0) 28 | self.SetSizer(vbox) 29 | self.SetValue(0) 30 | 31 | def SetValue(self,pos): 32 | self.slidertext.SetValue(str(pos)) 33 | self.slider.SetValue(pos) 34 | 35 | def GetValue(self): 36 | return self.slider.GetValue() 37 | 38 | def SetMax(self,maximum): 39 | self.slider.SetMax(maximum) 40 | self.spin.SetRange(0,maximum/2) 41 | 42 | class RecordSliderPanel(wx.Panel): 43 | 44 | def __init__(self,parent,**kwargs): 45 | 46 | wx.Panel.__init__(self,parent,**kwargs) 47 | 48 | self.binslider = SliderPlusWidth(self, 'Bin') 49 | self.recslider = SliderPlusWidth(self, 'Rec') 50 | 51 | vbox = wx.BoxSizer(wx.VERTICAL) 52 | vbox.Add(self.binslider,1,wx.EXPAND,0) 53 | vbox.Add(self.recslider,1,wx.EXPAND,0) 54 | self.SetSizer(vbox) 55 | 56 | class JumpSlider(wx.Slider): 57 | """ 58 | Slider which jumps to location of click 59 | Mouse click is bound to slider so 60 | location is set by clicking somewhere. 61 | """ 62 | def __init__(self, parent, gap=12, *args, **kwargs): 63 | wx.Slider.__init__(self, parent, *args, **kwargs) 64 | self.gap = gap 65 | self.parent = self.GetParent() 66 | self.Bind(wx.EVT_LEFT_DOWN, self.OnClick) 67 | 68 | def linapp(self, x1, x2, y1, y2, x): 69 | return (float(x - x1) / (x2 - x1)) * (y2 - y1) + y1 70 | 71 | def post_slide_event(self,eventval): 72 | """ 73 | Updated positions triggers an 74 | event to let the parent know scroll 75 | position has been changed 76 | """ 77 | event = wx.PyCommandEvent(wx.EVT_COMMAND_SCROLL_CHANGED.typeId, self.GetId()) 78 | event.SetInt(eventval) 79 | wx.PostEvent(self.GetEventHandler(),event) 80 | 81 | def OnClick(self, e): 82 | click_min = self.gap 83 | click_max = self.GetSize()[0] - self.gap 84 | click_position = e.GetX() 85 | result_min = self.GetMin() 86 | result_max = self.GetMax() 87 | if click_position > click_min and click_position < click_max: 88 | result = self.linapp(click_min, click_max, 89 | result_min, result_max, 90 | click_position) 91 | elif click_position <= click_min: 92 | result = result_min 93 | else: 94 | result = result_max 95 | #Round to nearest integer using numpy 96 | result = int(np.round(result)) 97 | self.parent.SetValue(result) 98 | self.post_slide_event(result) 99 | e.Skip() 100 | 101 | -------------------------------------------------------------------------------- /postproclib/vmd_reformat.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python2.7 2 | 3 | # Gets number of molecules from header file and 4 | # calls fortran routine vmd_reformat.exe to convert 5 | # vmd.temp into readable files 6 | # Written by David Trevelyan 7 | # Edited by Edward Smith 25/11/14 8 | 9 | import os 10 | 11 | from .pplexceptions import ScriptMissing 12 | 13 | class Chdir: 14 | """ 15 | Wrapper to move from current directory to new directory 16 | and return when using with 17 | 18 | Example usage: 19 | 20 | with Chdir('./../'): 21 | os.system('./a.out') 22 | """ 23 | def __init__( self, newpath ): 24 | self.savedPath = os.getcwd() 25 | 26 | #Check for abs or relative path 27 | if (newpath[0] == "/"): 28 | self.newpath = newpath 29 | elif (newpath[0] != "."): 30 | self.newpath = './' + newpath 31 | else: 32 | self.newpath = newpath 33 | 34 | def __enter__( self ): 35 | os.chdir(self.newpath) 36 | 37 | def __exit__( self, etype, value, traceback): 38 | os.chdir(self.savedPath ) 39 | 40 | class VmdReformat: 41 | 42 | def __init__(self, fdir, fname, scriptdir): 43 | 44 | self.fdir = fdir 45 | self.fname = fname 46 | self.scriptdir = scriptdir 47 | self.Reformatted = False 48 | 49 | headerfile = self.fdir + 'simulation_header' 50 | 51 | # Extract np from header 52 | fobj = open(headerfile,'r') 53 | self.np = 0 54 | while self.np==0: 55 | line = fobj.readline().split(';') 56 | if (line[1].strip() == 'globalnp'): 57 | self.np = int(line[2].strip()) 58 | 59 | self.domain = [0, 0, 0] 60 | for ixyz in range(3): 61 | while self.domain[ixyz]==0: 62 | line = fobj.readline().split(';') 63 | if (line[1].strip() == 'globaldomain(' + str(ixyz+1) + ')'): 64 | self.domain[ixyz] = float(line[2].strip()) 65 | 66 | if not os.path.isfile(self.scriptdir + 'vmd_reformat.f90'): 67 | print('Error -- vmd_reformat.f90 is missing from ' + self.scriptdir) 68 | raise ScriptMissing 69 | 70 | def reformat(self): 71 | 72 | #Remove previous file 73 | try: 74 | os.remove(str(self.fdir) + str(self.fname).replace('temp','out')) 75 | except OSError: 76 | pass 77 | 78 | # Build and call VMD_reformat with np from header 79 | with Chdir(self.scriptdir): 80 | os.system('mpif90 -O3 -o vmd_reformat.exe vmd_reformat.f90') 81 | cmd = ('./vmd_reformat.exe ' + str(self.fdir) 82 | + ' ' + str(self.fname) 83 | + ' ' + str(self.np) 84 | + ' ' + str(self.domain[0]) 85 | + ' ' + str(self.domain[1]) 86 | + ' ' + str(self.domain[2])) 87 | print(cmd) 88 | os.system(cmd) 89 | 90 | #Check if sucessful 91 | try: 92 | with open(self.fdir + '/' + self.fname.replace('temp','out')): pass 93 | self.Reformatted = True 94 | except IOError: 95 | print('Reformat unsuccessful') 96 | raise 97 | 98 | def run_vmd(self): 99 | 100 | if self.Reformatted: 101 | try: 102 | with open('./' + self.fdir + '/' + self.fname.replace('temp','out')): pass 103 | os.system('vmd ' + self.fdir + '/' + self.fname.replace('temp','out')) 104 | except IOError: 105 | print('vmd_out.dcd file missing in run_vmd -- have you called reformat?') 106 | else: 107 | print('VmdReformat.reformat() must be called before VmdReformat.run_vmd()') 108 | 109 | if __name__ == "__main__": 110 | 111 | scriptdir = os.path.join(os.path.dirname(__file__)) 112 | filepath = scriptdir + '/../results/' 113 | filename = 'vmd_temp.dcd' 114 | 115 | VMDobj = VmdReformat(filepath, filename, scriptdir) 116 | VMDobj.reformat() 117 | VMDobj.run_vmd() 118 | 119 | 120 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/.svn/all-wcprops: -------------------------------------------------------------------------------- 1 | K 25 2 | svn:wc:ra_dav:version-url 3 | V 74 4 | /subversion/edward/!svn/ver/858/MDNS_repo/branch/utils/postproclib/vmd_tcl 5 | END 6 | custom_colorscale.tcl 7 | K 25 8 | svn:wc:ra_dav:version-url 9 | V 96 10 | /subversion/edward/!svn/ver/831/MDNS_repo/branch/utils/postproclib/vmd_tcl/custom_colorscale.tcl 11 | END 12 | color_scale_bar_new_test.tcl 13 | K 25 14 | svn:wc:ra_dav:version-url 15 | V 103 16 | /subversion/edward/!svn/ver/831/MDNS_repo/branch/utils/postproclib/vmd_tcl/color_scale_bar_new_test.tcl 17 | END 18 | plot_MD_field.vmd 19 | K 25 20 | svn:wc:ra_dav:version-url 21 | V 92 22 | /subversion/edward/!svn/ver/831/MDNS_repo/branch/utils/postproclib/vmd_tcl/plot_MD_field.vmd 23 | END 24 | read_file.tcl 25 | K 25 26 | svn:wc:ra_dav:version-url 27 | V 88 28 | /subversion/edward/!svn/ver/831/MDNS_repo/branch/utils/postproclib/vmd_tcl/read_file.tcl 29 | END 30 | load_polymer.vmd 31 | K 25 32 | svn:wc:ra_dav:version-url 33 | V 91 34 | /subversion/edward/!svn/ver/858/MDNS_repo/branch/utils/postproclib/vmd_tcl/load_polymer.vmd 35 | END 36 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/.svn/entries: -------------------------------------------------------------------------------- 1 | 10 2 | 3 | dir 4 | 864 5 | http://svn.ma.ic.ac.uk/subversion/edward/MDNS_repo/branch/utils/postproclib/vmd_tcl 6 | http://svn.ma.ic.ac.uk/subversion/edward 7 | 8 | 9 | 10 | 2015-02-24T10:36:28.037815Z 11 | 858 12 | edwards 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 1e92d776-e0e5-45cd-b670-8958d04d46c8 28 | 29 | custom_colorscale.tcl 30 | file 31 | 32 | 33 | 34 | 35 | 2014-12-04T11:05:30.374668Z 36 | a7b86e71b5d3371ce7fc75cf7c418f96 37 | 2014-12-01T18:50:34.008042Z 38 | 831 39 | edwards 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 2199 62 | 63 | color_scale_bar_new_test.tcl 64 | file 65 | 66 | 67 | 68 | 69 | 2014-12-04T11:05:30.374668Z 70 | fb3ad75b2c6932e89b9273b51d2b1c86 71 | 2014-12-01T18:50:34.008042Z 72 | 831 73 | edwards 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 4816 96 | 97 | plot_MD_field.vmd 98 | file 99 | 100 | 101 | 102 | 103 | 2014-12-04T11:05:30.374668Z 104 | c2d0603e820e373e02bc197b9e4ce79e 105 | 2014-12-01T18:50:34.008042Z 106 | 831 107 | edwards 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 7051 130 | 131 | read_file.tcl 132 | file 133 | 134 | 135 | 136 | 137 | 2014-12-04T11:05:30.378668Z 138 | 076fb09d2be248c10ff5cd3cb0591ad4 139 | 2014-12-01T18:50:34.008042Z 140 | 831 141 | edwards 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 396 164 | 165 | load_polymer.vmd 166 | file 167 | 168 | 169 | 170 | 171 | 2015-02-19T18:56:36.645450Z 172 | aefea51f7bce790083b7dcc6cb427899 173 | 2015-02-24T10:36:28.037815Z 174 | 858 175 | edwards 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 2403 198 | 199 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/.svn/text-base/color_scale_bar_new_test.tcl.svn-base: -------------------------------------------------------------------------------- 1 | ## NAME: color_scale_bar 2 | ## 3 | ## SYNOPSIS: 4 | ## color_scale_bar draws a color bar on the screen to show all of the 5 | ## current colors (colorid 17~1040). It also shows labels beside the 6 | ## color bar to show the range of the mapped values. 7 | ## 8 | ## VERSION: 2.0 9 | ## Uses VMD version: VMD Version 1.7 or greater 10 | ## Ease of use: 2. need to understand some Tcl and a bit about how VMD 11 | ## works 12 | ## 13 | ## PROCEDURES: 14 | ## color_scale bar 15 | ## 16 | ## DESCRIPTION: 17 | ## To draw a color scale bar with length=1.5, width=0.25, the range of 18 | ## mapped values is 0~128, and you want 8 labels. 19 | ## color_scale_bar 1.5 0.25 0 128 8 20 | ## 21 | ## COMMENTS: The size of the bar also depends on the zoom scale. 22 | ## 23 | ## AUTHOR: 24 | ## Wuwei Liang (gtg088c@prism.gatech.edu) 25 | ## 26 | ## New version 2 built on Wuwei Liang's code, by Dan Wright 27 | ## 28 | ## 29 | ## Last update v2.1 on Dan Wright's version 2 on Wuwei Liang's code 30 | ## by Pepe Romo 31 | ## 32 | ## CHANGES: 33 | ## * draws the bar in a new molecule 34 | ## * has defaults for all parameters so nothing has to be entered manually 35 | ## * functions moved into a seperate namespace 36 | ## * has a delete function (just deletes the seperate mol for now) 37 | ## * fixed position so it remains visible when the scene is rotated 38 | ## 39 | ## CHANGES 2: 40 | ## * Searches the Min and Max through all the FRAMES in each Mol 41 | ## 42 | ## USAGE: 43 | ## Run the following in the console window: 44 | ## 45 | ## 1) 'source color_scale_bar_new.tcl' 46 | ## 2) 'namespace import ::ColorBar::*' 47 | ## 3) run 'color_scale_bar' to create the bar with default parameters; 48 | ## run 'delete_color_scale_bar' to remove it from the display 49 | 50 | 51 | # This function draws a color bar to show the color scale 52 | # length = the length of the color bar 53 | # width = the width of the color bar 54 | # min = the minimum value to be mapped 55 | # max = the maximum mapped value 56 | # label_num = the number of labels to be displayed 57 | 58 | namespace eval ::ColorBar_v2:: { 59 | variable bar_mol 60 | namespace export color_scale_bar_v2 delete_color_scale_bar_v2 61 | } 62 | 63 | proc ::ColorBar_v2::color_scale_bar_v2 {{length 0.5} {width 0.05} {auto_scale 1} {fixed 1} {min 0} {max 100} {label_num 5} } { 64 | 65 | variable bar_mol 66 | 67 | display update off 68 | #display resetview 69 | 70 | # Create a seperate molid to draw in, so it's possible for the user to 71 | # delete the bar. 72 | # 73 | # So that the draw cmds will work right, must save top mol and set top 74 | # to our new created mol, then set it back later. 75 | set numframes [molinfo top get numframes] 76 | set old_top [molinfo top] 77 | set bar_mol [mol new] 78 | mol top $bar_mol 79 | 80 | # If a fixed bar was requested... 81 | if {$fixed == 1} { 82 | mol fix $bar_mol 83 | } 84 | 85 | # If auto_scale was requested, go through all the mols and find the min/max 86 | # scale ranges for setting the bar. 87 | if {$auto_scale == 1} { 88 | set min 999 89 | set max -99 90 | foreach m [molinfo list] { 91 | if {$m != $bar_mol} { 92 | for {set i 0} {$i<$numframes} {incr i 1} { 93 | molinfo $m set frame $i 94 | set minmax [split [mol scaleminmax $m 0]] 95 | set aux [molinfo $m get frame] 96 | #puts "mol $m frame $aux minmax $minmax i $i" 97 | if {$min > [lindex $minmax 0]} { 98 | set min [lindex $minmax 0] 99 | } 100 | if {$max < [lindex $minmax 1]} { 101 | set max [lindex $minmax 1] 102 | } 103 | } 104 | } 105 | } 106 | } 107 | #puts "Final MinMax --> $min $max" 108 | 109 | 110 | # We want to draw relative to the location of the top mol so that the bar 111 | # will always show up nicely. 112 | set center [molinfo $old_top get center] 113 | set center [regsub -all {[{}]} $center ""] 114 | set center [split $center] 115 | 116 | #puts "[lindex $center 0]" 117 | 118 | # draw the color bar 119 | set start_y [expr [lindex $center 1] - (0.5 * $length)] 120 | #set start_y [expr (-0.5 * $length)-1.2] 121 | set use_x [expr 1+[lindex $center 0]-0.25] 122 | #set use_x -1.0 123 | set use_z [lindex $center 2] 124 | #set use_z 0 125 | set step [expr $length / 1024.0] 126 | 127 | #puts "x: $use_x y: $start_y z: $use_z" 128 | 129 | for {set colorid 17 } { $colorid <= 1040 } {incr colorid 1 } { 130 | draw color $colorid 131 | set cur_y [ expr $start_y + ($colorid - 17) * $step ] 132 | draw line "$use_x $cur_y $use_z" "[expr $use_x+$width] $cur_y $use_z" 133 | } 134 | 135 | # draw the labels 136 | set coord_x [expr (1.2*$width)+$use_x]; 137 | set step_size [expr $length / $label_num] 138 | set color_step [expr 1024.0/$label_num] 139 | set value_step [expr ($max - $min ) / double ($label_num)] 140 | 141 | for {set i 0} {$i <= $label_num } { incr i 1} { 142 | set cur_color_id black 143 | # set cur_color_id IS THE COLOR OF THE LABELS!! 144 | draw color $cur_color_id 145 | set coord_y [expr $start_y+$i * $step_size ] 146 | set cur_text [expr $min + $i * $value_step ] 147 | 148 | draw text " $coord_x $coord_y $use_z" [format %6.2f $cur_text] 149 | } 150 | 151 | 152 | # re-set top 153 | mol top $old_top 154 | display update on 155 | } 156 | 157 | proc ::ColorBar_v2::delete_color_scale_bar_v2 { } { 158 | variable bar_mol 159 | 160 | mol delete $bar_mol 161 | } 162 | 163 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/.svn/text-base/custom_colorscale.tcl.svn-base: -------------------------------------------------------------------------------- 1 | ## NAME: tricolor_scale 2 | ## 3 | ## SYNOPSIS: 4 | ## Specify a customer colour schemes for the 5 | ## tricolor_scale used to define gradients 6 | ## 7 | 8 | # load read file routine 9 | source ./read_file.tcl 10 | namespace import ::read_file::* 11 | 12 | 13 | namespace eval ::custom_colorscale:: { 14 | namespace export tricolor_scale cmapfile_color_scale 15 | } 16 | 17 | #Define custom colour scales 18 | proc lerpcolor { col1 col2 alpha } { 19 | set dc [vecsub $col2 $col1] 20 | set nc [vecadd $col1 [vecscale $dc $alpha]] 21 | return $nc 22 | } 23 | 24 | proc coltogs { col } { 25 | foreach {r g b} $col {} 26 | set gray [expr ($r + $g + $b) / 3.0] 27 | return [list $gray $gray $gray] 28 | } 29 | 30 | proc ::custom_colorscale::tricolor_scale {} { 31 | display update off 32 | set mincolorid [expr [colorinfo num] - 1] 33 | set maxcolorid [expr [colorinfo max] - 1] 34 | set colrange [expr $maxcolorid - $mincolorid] 35 | set colhalf [expr $colrange / 2] 36 | for {set i $mincolorid} {$i < $maxcolorid} {incr i} { 37 | set colpcnt [expr ($i - $mincolorid) / double($colrange)] 38 | 39 | set R {1.921568661928176880e-01 2.117647081613540649e-01 5.843137502670288086e-01 } 40 | set W {9.737793234621987537e-01 9.898500606950446645e-01 7.972318345012722185e-01 } 41 | set B {7.239523413363717630e-01 7.381776274281801054e-02 1.505574837974381908e-01 } 42 | if { $colpcnt < 0.5 } { 43 | set nc [lerpcolor $R $W [expr $colpcnt * 2.0]] 44 | } else { 45 | set nc [lerpcolor $W $B [expr ($colpcnt-0.5) * 2.0]] 46 | } 47 | 48 | foreach {r g b} $nc {} 49 | puts "index: $i $r $g $b -- $colpcnt" 50 | display update ui 51 | color change rgb $i $r $g $b 52 | } 53 | display update on 54 | } 55 | 56 | 57 | proc ::custom_colorscale::cmapfile_color_scale {name} { 58 | display update off 59 | set mincolorid [expr [colorinfo num] - 1] 60 | set maxcolorid [expr [colorinfo max] - 1] 61 | set colrange [expr $maxcolorid - $mincolorid] 62 | set count 1024 63 | for {set i [expr $mincolorid]} {$i < [expr $maxcolorid]} {incr i} { 64 | set colpcnt [expr ($i - $mincolorid) / double($colrange)] 65 | set nc [read_file_in $name $count] 66 | foreach {r g b} $nc {} 67 | #puts "index: $count $i $r $g $b -- $colpcnt" 68 | display update ui 69 | color change rgb $i $r $g $b 70 | #Go through in reverse order (seems to prevents wrap around of colour) 71 | set count [expr $count-1] 72 | } 73 | display update on 74 | } 75 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/.svn/text-base/load_polymer.vmd.svn-base: -------------------------------------------------------------------------------- 1 | 2 | proc newrep {molid repname} { 3 | 4 | mol addrep $molid 5 | set repid [expr [molinfo $molid get numreps] - 1] 6 | set repname [mol repname $molid $repid] 7 | 8 | return $repid 9 | } 10 | 11 | proc delrep {molid repname} { 12 | 13 | set repid [mol repindex $molid $repname] 14 | mol delrep $repid $molid 15 | 16 | } 17 | 18 | #Default molecule colours 19 | set W_colour 15 20 | set EO_colour 0 21 | set S_colour 1 22 | set M_colour 3 23 | set D_colour 9 24 | set C_colour 5 25 | set P_colour 0 26 | set A_colour 15 27 | 28 | #Molecule sizes based on base scale and set using size ratios 29 | set scale 0.6 30 | set W_size [expr 0.8584*$scale] 31 | set M_size [expr 1.2398*$scale] 32 | set D_size [expr 1.0702*$scale] 33 | set EO_size [expr 0.8067*$scale] 34 | set CM_size [expr 0.7*$scale] 35 | set S_size [expr 1.1*$scale] 36 | set P_size [expr 1.1*$scale] 37 | set A_size [expr 1.*$scale] 38 | 39 | #LOAD POLYMER MOLECULES 40 | mol new {./polymer_topol.psf} type {psf} first 0 last -1 step 1 waitfor all 41 | mol addfile {./vmd_out.dcd} type {dcd} first 0 last -1 step 1 waitfor all top 42 | set bonds [newrep top bonds] 43 | mol modstyle $bonds top CPK 0.000000 0.400000 10.000000 10.000000 44 | 45 | mol selection type W 46 | set repW [newrep top repW] 47 | mol modstyle $repW top CPK $W_size 0.000000 10.000000 10.000000 48 | mol modmaterial $repW top Transparent 49 | 50 | mol selection type D 51 | set repD [newrep top repD] 52 | mol modstyle $repD top CPK $D_size 0.000000 10.000000 10.000000 53 | mol modmaterial $repD top AOChalky 54 | 55 | mol selection type M 56 | set repM [newrep top repM] 57 | mol modstyle $repM top CPK $M_size 0.000000 10.000000 10.000000 58 | mol modmaterial $repM top AOChalky 59 | 60 | mol selection type EO 61 | set repEO [newrep top repEO] 62 | mol modstyle $repEO top CPK $EO_size 0.000000 10.000000 10.000000 63 | mol modmaterial $repEO top AOChalky 64 | 65 | mol selection type CM 66 | set repCM [newrep top repCM] 67 | mol modstyle $repCM top CPK $CM_size 0.000000 10.000000 10.000000 68 | 69 | mol selection type S 70 | set repS [newrep top repS] 71 | mol modstyle $repS top CPK $S_size 0.000000 10.000000 10.000000 72 | mol modmaterial $repS top AOChalky 73 | 74 | mol selection type C 75 | set repPOLY [newrep top repPOLY] 76 | mol modstyle $repPOLY top CPK $P_size 0.000000 10.000000 10.000000 77 | mol modmaterial $repPOLY top AOChalky 78 | 79 | mol selection type N 80 | set repSOL [newrep top repSOL] 81 | mol modstyle $repSOL top CPK $A_size 0.000000 10.000000 10.000000 82 | mol modmaterial $repSOL top AOChalky 83 | 84 | #Set default color for types 85 | color Name W $W_colour 86 | color Name E $EO_colour 87 | color Name S $S_colour 88 | color Name M $M_colour 89 | color Name D $D_colour 90 | color Name P $P_colour 91 | color Name A $A_colour 92 | 93 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/.svn/text-base/plot_MD_field.vmd.svn-base: -------------------------------------------------------------------------------- 1 | # =============================================================# 2 | # VMD script to plot a coupled simulation using a combination # 3 | # of MD molecules and volumetric data calculated from CFD/MD # 4 | # averages and written in .dx format # 5 | # =============================================================# 6 | 7 | # load colorbar routine 8 | source ./color_scale_bar_new_test.tcl 9 | namespace import ::ColorBar_v2::* 10 | 11 | # load read file routine 12 | source ./read_file.tcl 13 | namespace import ::read_file::* 14 | 15 | # load custom colour scale 16 | source ./custom_colorscale.tcl 17 | namespace import ::custom_colorscale::* 18 | 19 | #Header VMD Header data written by python script to define time intervals 20 | proc read_header {} { 21 | 22 | set filename "./vol_data/vmd_header" 23 | 24 | set tplot [read_file_in $filename 1 ] 25 | set delta_t [read_file_in $filename 2 ] 26 | set Nave [read_file_in $filename 3 ] 27 | set skip [read_file_in $filename 4 ] 28 | 29 | return [list $tplot $delta_t $Nave $skip] 30 | } 31 | 32 | #Get VMD record that corresponds to current frame 33 | proc frame2vmdrecord {frame tplot} { 34 | 35 | #Check if MD step is inside interval 36 | set interval_no 1 37 | 38 | #Get start and end of current interval 39 | set interval_start [read_file_in "./vol_data/vmd_intervals" $interval_no ] 40 | set interval_end [read_file_in "./vol_data/vmd_intervals" [expr $interval_no+1 ] ] 41 | set interval_frame [expr ($interval_end - $interval_start)/$tplot] 42 | set prev_int_frame 0 43 | while {$frame >= $interval_frame} { 44 | #Get start and end of current interval 45 | #puts "In while loop = $interval_no $interval_start $interval_end $frame $interval_frame" 46 | set interval_no [expr $interval_no +2] 47 | set interval_start [read_file_in "./vol_data/vmd_intervals" $interval_no ] 48 | set interval_end [read_file_in "./vol_data/vmd_intervals" [expr $interval_no+1 ] ] 49 | set prev_int_frame [expr $interval_frame] 50 | set interval_frame [expr ($interval_end - $interval_start)/$tplot + $interval_frame] 51 | } 52 | 53 | set vmdrecord [expr ($interval_start/($tplot)+($frame-$prev_int_frame))] 54 | return $vmdrecord 55 | } 56 | 57 | #Get bin record corresponding to vmd record 58 | proc vmdrecord2binrec {vmdrec Nave} { 59 | 60 | set binrec [expr $vmdrec/$Nave] 61 | 62 | return $binrec 63 | } 64 | 65 | #Get simulation time corresponding to bin record 66 | proc get_simulation_time {frame tplot dt} { 67 | 68 | set rec [frame2vmdrecord $frame $tplot ] 69 | set time_text [format "%s %.4f" "Simulation Time = " [expr $rec*$dt]] 70 | return $time_text 71 | } 72 | 73 | #Set number of frames (average steps) for each volume 74 | set headervars [read_header]; puts "" 75 | set tplot [lindex $headervars 0]; puts "" 76 | set delta_t [lindex $headervars 1]; puts "" 77 | set Nave [lindex $headervars 2]; puts "" 78 | set skip [lindex $headervars 3]; puts "" 79 | 80 | # Display settings 81 | display projection Orthographic 82 | display nearclip set 0.000000 83 | display farclip set 10.000000 84 | display depthcue off 85 | #Specifiy Colorbar 86 | set colorfilename "./vol_data/cmap.dat"; puts "" 87 | cmapfile_color_scale $colorfilename 88 | #tricolor_scale 89 | #color scale method BWR 90 | color Display Background white 91 | 92 | # Load molecular data 93 | set updmol [mol new {../vmd_out.dcd} type dcd waitfor all]; puts "" 94 | set nframes [molinfo top get numframes]; puts "" 95 | 96 | 97 | set dx_records 0 98 | set Nvols 0 99 | for {set i 0} {$i<$nframes+1} {incr i} { 100 | if {[expr [frame2vmdrecord $i $tplot] % [expr $Nave/$skip]] == 0} { 101 | #set dx_records [expr $dx_records + 1] 102 | set binrec [vmdrecord2binrec [frame2vmdrecord $i $tplot] [expr $Nave/$skip]]; puts "" 103 | puts "For Frame = $i loading Bin = $binrec Time = [get_simulation_time $i $tplot $delta_t]" 104 | mol addfile ./vol_data/DATA${binrec}.dx type dx waitfor all; puts "" 105 | set Nvols [expr $Nvols+1] 106 | } 107 | 108 | 109 | } 110 | 111 | #Molecules representation (Note - must be rep 0) 112 | mol delrep 0 top; puts "" 113 | mol representation points 2.000000 114 | mol color Volume 1 115 | mol selection all 116 | mol material Diffuse 117 | mol modmaterial 0 top AOChalky 118 | mol addrep top 119 | 120 | #Display outputs 121 | display ambientocclusion on 122 | display shadows on 123 | light 0 off 124 | 125 | #MD Volumetric Slice 126 | mol color Volume 5 127 | #Slice of xy plane through central in z 128 | #mol representation VolumeSlice 0.500000 5.000000 2.000000 2.000000 129 | #Slice of xz plane at bottom of domain in y 130 | mol representation VolumeSlice 0.000000 3.000000 1.000000 2.000000 131 | mol selection all 132 | mol material Transparent 133 | mol addrep top 134 | 135 | # store name of the Volumetric Slice representation (id=1) for later use 136 | set updrep [mol repname top 1] 137 | 138 | #Get minimum and maximum point in domain 139 | set sel [atomselect top all]; puts "" 140 | set coords [lsort -real [$sel get x]]; puts "" 141 | set dminx [lindex $coords 0]; puts "" 142 | set dmaxx [lindex [lsort -real -decreasing $coords] 0]; puts "" 143 | set coords [lsort -real [$sel get y]]; puts "" 144 | set dminy [lindex $coords 0]' puts "" 145 | set dmaxy [lindex [lsort -real -decreasing $coords] 0]; puts "" 146 | set coords [lsort -real [$sel get z]]; puts "" 147 | set dminz [lindex $coords 0]; puts "" 148 | set dmaxz [lindex [lsort -real -decreasing $coords] 0]; puts "" 149 | 150 | 151 | # colorbar 152 | set cmin [read_file_in "./vol_data/colour_range" 1 ] 153 | set cmax [read_file_in "./vol_data/colour_range" 2 ] 154 | mol scaleminmax 0 1 $cmin $cmax 155 | color_scale_bar_v2 0.5 0.05 0 1 $cmin $cmax 5 156 | 157 | # use the volumetric data set for the isosurface corresponding to the frame divided by the variable Nave. 158 | # $updmol contains the id of the molecule and $updrep the (unique) name of 159 | # the isosurface representation 160 | proc update_iso {args} { 161 | global updmol 162 | global updrep 163 | global Nave 164 | global Nvols 165 | global skip 166 | global tplot 167 | global delta_t 168 | global dminy 169 | global dminx 170 | global cmin 171 | global cmax 172 | 173 | # get representation id and return if invalid 174 | set repid [mol repindex $updmol $updrep] 175 | if {$repid < 0} { return } 176 | 177 | # update representation but replace the data set 178 | # id with the current frame number. 179 | set frame [molinfo $updmol get frame] 180 | lassign [molinfo $updmol get "{rep $repid}"] rep 181 | set vmdrec [frame2vmdrecord $frame $tplot] 182 | set binrec [vmdrecord2binrec $vmdrec [expr $Nave/$skip]] 183 | set volrec [expr $frame/[expr $Nave/$skip]] 184 | 185 | #Load new representation provided dx file data is available 186 | if {$volrec < $Nvols} { 187 | mol representation [lreplace $rep 2 2 $volrec] 188 | mol modrep $repid $updmol 189 | 190 | #Change volumetric data 191 | mol color Volume $volrec 192 | 193 | # Change molecular colour to MD volumetric data 194 | #puts "Bin record = $binrec , vmdrec = $vmdrec, volrec = $volrec Frame = $frame " 195 | mol modcolor 0 $updmol Volume $volrec 196 | mol scaleminmax 0 $updmol $cmin $cmax 197 | } else { 198 | puts "$volrec outside of range" 199 | } 200 | 201 | 202 | # Draw Text of current iteration 203 | draw delete all 204 | draw color black 205 | set time_text [get_simulation_time $frame $tplot $delta_t] 206 | draw text "$dminx [expr $dminy-5.0] 0.0" $time_text 207 | 208 | #puts "$interval_no $interval_start $MD_step $interval_end" 209 | 210 | color_scale_bar_v2 211 | 212 | # Debug prints 213 | #puts "repid=$repid,updmol=$updmol,frame=$frame,vol=[expr $frame/$Nave],rep=$rep" 214 | 215 | # Rotate 216 | #rotate y by 0.133333 217 | 218 | #Write frame to file 219 | #render TachyonInternal vmdscene{$frame}.tga 220 | 221 | } 222 | 223 | 224 | #Trace check for a change of VMD frame and calls update_iso function if changed 225 | trace variable vmd_frame($updmol) w update_iso 226 | animate goto 0 227 | 228 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/.svn/text-base/read_file.tcl.svn-base: -------------------------------------------------------------------------------- 1 | ## NAME: read_file 2 | ## 3 | ## SYNOPSIS: 4 | ## Read the file specified by the name passed into function 5 | ## 6 | 7 | namespace eval ::read_file:: { 8 | namespace export read_file_in 9 | } 10 | 11 | proc ::read_file::read_file_in {name req_line} { 12 | 13 | set i 0 14 | set f [open $name r] 15 | while {[gets $f line] >= 0} { 16 | #puts [string length $line] 17 | set i [expr $i+1] 18 | set out($i) $line 19 | #puts $out($i) 20 | } 21 | close $f 22 | return $out($req_line) 23 | } 24 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/color_scale_bar_new_test.tcl: -------------------------------------------------------------------------------- 1 | ## NAME: color_scale_bar 2 | ## 3 | ## SYNOPSIS: 4 | ## color_scale_bar draws a color bar on the screen to show all of the 5 | ## current colors (colorid 17~1040). It also shows labels beside the 6 | ## color bar to show the range of the mapped values. 7 | ## 8 | ## VERSION: 2.0 9 | ## Uses VMD version: VMD Version 1.7 or greater 10 | ## Ease of use: 2. need to understand some Tcl and a bit about how VMD 11 | ## works 12 | ## 13 | ## PROCEDURES: 14 | ## color_scale bar 15 | ## 16 | ## DESCRIPTION: 17 | ## To draw a color scale bar with length=1.5, width=0.25, the range of 18 | ## mapped values is 0~128, and you want 8 labels. 19 | ## color_scale_bar 1.5 0.25 0 128 8 20 | ## 21 | ## COMMENTS: The size of the bar also depends on the zoom scale. 22 | ## 23 | ## AUTHOR: 24 | ## Wuwei Liang (gtg088c@prism.gatech.edu) 25 | ## 26 | ## New version 2 built on Wuwei Liang's code, by Dan Wright 27 | ## 28 | ## 29 | ## Last update v2.1 on Dan Wright's version 2 on Wuwei Liang's code 30 | ## by Pepe Romo 31 | ## 32 | ## CHANGES: 33 | ## * draws the bar in a new molecule 34 | ## * has defaults for all parameters so nothing has to be entered manually 35 | ## * functions moved into a seperate namespace 36 | ## * has a delete function (just deletes the seperate mol for now) 37 | ## * fixed position so it remains visible when the scene is rotated 38 | ## 39 | ## CHANGES 2: 40 | ## * Searches the Min and Max through all the FRAMES in each Mol 41 | ## 42 | ## USAGE: 43 | ## Run the following in the console window: 44 | ## 45 | ## 1) 'source color_scale_bar_new.tcl' 46 | ## 2) 'namespace import ::ColorBar::*' 47 | ## 3) run 'color_scale_bar' to create the bar with default parameters; 48 | ## run 'delete_color_scale_bar' to remove it from the display 49 | 50 | 51 | # This function draws a color bar to show the color scale 52 | # length = the length of the color bar 53 | # width = the width of the color bar 54 | # min = the minimum value to be mapped 55 | # max = the maximum mapped value 56 | # label_num = the number of labels to be displayed 57 | 58 | namespace eval ::ColorBar_v2:: { 59 | variable bar_mol 60 | namespace export color_scale_bar_v2 delete_color_scale_bar_v2 61 | } 62 | 63 | proc ::ColorBar_v2::color_scale_bar_v2 {{length 0.5} {width 0.05} {auto_scale 1} {fixed 1} {min 0} {max 100} {label_num 5} } { 64 | 65 | variable bar_mol 66 | 67 | display update off 68 | #display resetview 69 | 70 | # Create a seperate molid to draw in, so it's possible for the user to 71 | # delete the bar. 72 | # 73 | # So that the draw cmds will work right, must save top mol and set top 74 | # to our new created mol, then set it back later. 75 | set numframes [molinfo top get numframes] 76 | set old_top [molinfo top] 77 | set bar_mol [mol new] 78 | mol top $bar_mol 79 | 80 | # If a fixed bar was requested... 81 | if {$fixed == 1} { 82 | mol fix $bar_mol 83 | } 84 | 85 | # If auto_scale was requested, go through all the mols and find the min/max 86 | # scale ranges for setting the bar. 87 | if {$auto_scale == 1} { 88 | set min 999 89 | set max -99 90 | foreach m [molinfo list] { 91 | if {$m != $bar_mol} { 92 | for {set i 0} {$i<$numframes} {incr i 1} { 93 | molinfo $m set frame $i 94 | set minmax [split [mol scaleminmax $m 0]] 95 | set aux [molinfo $m get frame] 96 | #puts "mol $m frame $aux minmax $minmax i $i" 97 | if {$min > [lindex $minmax 0]} { 98 | set min [lindex $minmax 0] 99 | } 100 | if {$max < [lindex $minmax 1]} { 101 | set max [lindex $minmax 1] 102 | } 103 | } 104 | } 105 | } 106 | } 107 | #puts "Final MinMax --> $min $max" 108 | 109 | 110 | # We want to draw relative to the location of the top mol so that the bar 111 | # will always show up nicely. 112 | set center [molinfo $old_top get center] 113 | set center [regsub -all {[{}]} $center ""] 114 | set center [split $center] 115 | 116 | #puts "[lindex $center 0]" 117 | 118 | # draw the color bar 119 | set start_y [expr [lindex $center 1] - (0.5 * $length)] 120 | #set start_y [expr (-0.5 * $length)-1.2] 121 | set use_x [expr 1+[lindex $center 0]-0.25] 122 | #set use_x -1.0 123 | set use_z [lindex $center 2] 124 | #set use_z 0 125 | set step [expr $length / 1024.0] 126 | 127 | #puts "x: $use_x y: $start_y z: $use_z" 128 | 129 | for {set colorid 17 } { $colorid <= 1040 } {incr colorid 1 } { 130 | draw color $colorid 131 | set cur_y [ expr $start_y + ($colorid - 17) * $step ] 132 | draw line "$use_x $cur_y $use_z" "[expr $use_x+$width] $cur_y $use_z" 133 | } 134 | 135 | # draw the labels 136 | set coord_x [expr (1.2*$width)+$use_x]; 137 | set step_size [expr $length / $label_num] 138 | set color_step [expr 1024.0/$label_num] 139 | set value_step [expr ($max - $min ) / double ($label_num)] 140 | 141 | for {set i 0} {$i <= $label_num } { incr i 1} { 142 | set cur_color_id black 143 | # set cur_color_id IS THE COLOR OF THE LABELS!! 144 | draw color $cur_color_id 145 | set coord_y [expr $start_y+$i * $step_size ] 146 | set cur_text [expr $min + $i * $value_step ] 147 | 148 | draw text " $coord_x $coord_y $use_z" [format %6.2f $cur_text] 149 | } 150 | 151 | 152 | # re-set top 153 | mol top $old_top 154 | display update on 155 | } 156 | 157 | proc ::ColorBar_v2::delete_color_scale_bar_v2 { } { 158 | variable bar_mol 159 | 160 | mol delete $bar_mol 161 | } 162 | 163 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/custom_colorscale.tcl: -------------------------------------------------------------------------------- 1 | ## NAME: tricolor_scale 2 | ## 3 | ## SYNOPSIS: 4 | ## Specify a customer colour schemes for the 5 | ## tricolor_scale used to define gradients 6 | ## 7 | 8 | # load read file routine 9 | source ./read_file.tcl 10 | namespace import ::read_file::* 11 | 12 | 13 | namespace eval ::custom_colorscale:: { 14 | namespace export tricolor_scale cmapfile_color_scale 15 | } 16 | 17 | #Define custom colour scales 18 | proc lerpcolor { col1 col2 alpha } { 19 | set dc [vecsub $col2 $col1] 20 | set nc [vecadd $col1 [vecscale $dc $alpha]] 21 | return $nc 22 | } 23 | 24 | proc coltogs { col } { 25 | foreach {r g b} $col {} 26 | set gray [expr ($r + $g + $b) / 3.0] 27 | return [list $gray $gray $gray] 28 | } 29 | 30 | proc ::custom_colorscale::tricolor_scale {} { 31 | display update off 32 | set mincolorid [expr [colorinfo num] - 1] 33 | set maxcolorid [expr [colorinfo max] - 1] 34 | set colrange [expr $maxcolorid - $mincolorid] 35 | set colhalf [expr $colrange / 2] 36 | for {set i $mincolorid} {$i < $maxcolorid} {incr i} { 37 | set colpcnt [expr ($i - $mincolorid) / double($colrange)] 38 | 39 | set R {1.921568661928176880e-01 2.117647081613540649e-01 5.843137502670288086e-01 } 40 | set W {9.737793234621987537e-01 9.898500606950446645e-01 7.972318345012722185e-01 } 41 | set B {7.239523413363717630e-01 7.381776274281801054e-02 1.505574837974381908e-01 } 42 | if { $colpcnt < 0.5 } { 43 | set nc [lerpcolor $R $W [expr $colpcnt * 2.0]] 44 | } else { 45 | set nc [lerpcolor $W $B [expr ($colpcnt-0.5) * 2.0]] 46 | } 47 | 48 | foreach {r g b} $nc {} 49 | puts "index: $i $r $g $b -- $colpcnt" 50 | display update ui 51 | color change rgb $i $r $g $b 52 | } 53 | display update on 54 | } 55 | 56 | 57 | proc ::custom_colorscale::cmapfile_color_scale {name} { 58 | display update off 59 | set mincolorid [expr [colorinfo num] - 1] 60 | set maxcolorid [expr [colorinfo max] - 1] 61 | set colrange [expr $maxcolorid - $mincolorid] 62 | set count 1024 63 | for {set i [expr $mincolorid]} {$i < [expr $maxcolorid]} {incr i} { 64 | set colpcnt [expr ($i - $mincolorid) / double($colrange)] 65 | set nc [read_file_in $name $count] 66 | foreach {r g b} $nc {} 67 | #puts "index: $count $i $r $g $b -- $colpcnt" 68 | display update ui 69 | color change rgb $i $r $g $b 70 | #Go through in reverse order (seems to prevents wrap around of colour) 71 | set count [expr $count-1] 72 | } 73 | display update on 74 | } 75 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/load_miepsf.vmd: -------------------------------------------------------------------------------- 1 | 2 | proc newrep {molid repname} { 3 | 4 | mol addrep $molid 5 | set repid [expr [molinfo $molid get numreps] - 1] 6 | set repname [mol repname $molid $repid] 7 | 8 | return $repid 9 | } 10 | 11 | proc delrep {molid repname} { 12 | 13 | set repid [mol repindex $molid $repname] 14 | mol delrep $repid $molid 15 | 16 | } 17 | 18 | #Default molecule colours 19 | set W_colour 15 20 | set S_colour 1 21 | set A_colour 15 22 | set r_colour 15 23 | 24 | #Molecule sizes based on base scale and set using size ratios 25 | set scale 0.6 26 | set W_size [expr 0.8584*$scale] 27 | set S_size [expr 1.1*$scale] 28 | set A_size [expr 1.*$scale] 29 | set r_size [expr 1.*$scale] 30 | 31 | #LOAD POLYMER MOLECULES 32 | mol new {./vmd_out.psf} type {psf} first 0 last -1 step 1 waitfor all 33 | mol addfile {./vmd_out.dcd} type {dcd} first 0 last -1 step 1 waitfor all top 34 | 35 | mol selection type W 36 | set repW [newrep top repW] 37 | mol modstyle $repW top CPK $W_size 0.000000 10.000000 10.000000 38 | mol modmaterial $repW top Transparent 39 | 40 | mol selection type S 41 | set repS [newrep top repS] 42 | mol modstyle $repS top CPK $S_size 0.000000 10.000000 10.000000 43 | mol modmaterial $repS top AOChalky 44 | 45 | mol selection type A 46 | set repA [newrep top repA] 47 | mol modstyle $repA top CPK $A_size 0.000000 10.000000 10.000000 48 | mol modmaterial $repA top Transparent 49 | 50 | 51 | mol selection type r 52 | set repr [newrep top repr] 53 | mol modstyle $repr top CPK $r_size 0.000000 10.000000 10.000000 54 | mol modmaterial $repr top AOChalky 55 | 56 | #Set default color for types 57 | color Name W $W_colour 58 | color Name S $S_colour 59 | color Name A $A_colour 60 | color Name r $r_colour 61 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/load_polymer.vmd: -------------------------------------------------------------------------------- 1 | 2 | proc newrep {molid repname} { 3 | 4 | mol addrep $molid 5 | set repid [expr [molinfo $molid get numreps] - 1] 6 | set repname [mol repname $molid $repid] 7 | 8 | return $repid 9 | } 10 | 11 | proc delrep {molid repname} { 12 | 13 | set repid [mol repindex $molid $repname] 14 | mol delrep $repid $molid 15 | 16 | } 17 | 18 | #Default molecule colours 19 | set W_colour 15 20 | set EO_colour 0 21 | set S_colour 1 22 | set M_colour 3 23 | set D_colour 9 24 | set C_colour 5 25 | set P_colour 0 26 | set A_colour 15 27 | set EM_colour 2 28 | set OA_colour 4 29 | set T_colour 6 30 | set W1_colour 15 31 | 32 | #Molecule sizes based on base scale and set using size ratios 33 | set scale 0.6 34 | set W_size [expr 0.8584*$scale] 35 | set M_size [expr 1.2398*$scale] 36 | set D_size [expr 1.0702*$scale] 37 | set EO_size [expr 0.8067*$scale] 38 | set CM_size [expr 0.7*$scale] 39 | set S_size [expr 1.1*$scale] 40 | set P_size [expr 1.1*$scale] 41 | set A_size [expr 1.*$scale] 42 | set EM_size [expr 1.13*$scale] 43 | set OA_size [expr 1.08*$scale] 44 | set T_size [expr 1.324*$scale] 45 | set W1_size [expr 0.854*$scale] 46 | 47 | #LOAD POLYMER MOLECULES 48 | mol new {./polymer_topol.psf} type {psf} first 0 last -1 step 1 waitfor all 49 | mol addfile {./vmd_out.dcd} type {dcd} first 0 last -1 step 1 waitfor all top 50 | set bonds [newrep top bonds] 51 | mol modstyle $bonds top CPK 0.000000 0.400000 10.000000 10.000000 52 | 53 | mol selection type W 54 | set repW [newrep top repW] 55 | mol modstyle $repW top CPK $W_size 0.000000 10.000000 10.000000 56 | mol modmaterial $repW top Transparent 57 | 58 | mol selection type D 59 | set repD [newrep top repD] 60 | mol modstyle $repD top CPK $D_size 0.000000 10.000000 10.000000 61 | mol modmaterial $repD top AOChalky 62 | 63 | mol selection type M 64 | set repM [newrep top repM] 65 | mol modstyle $repM top CPK $M_size 0.000000 10.000000 10.000000 66 | mol modmaterial $repM top AOChalky 67 | 68 | mol selection type EO 69 | set repEO [newrep top repEO] 70 | mol modstyle $repEO top CPK $EO_size 0.000000 10.000000 10.000000 71 | mol modmaterial $repEO top AOChalky 72 | 73 | mol selection type CM 74 | set repCM [newrep top repCM] 75 | mol modstyle $repCM top CPK $CM_size 0.000000 10.000000 10.000000 76 | 77 | mol selection type S 78 | set repS [newrep top repS] 79 | mol modstyle $repS top CPK $S_size 0.000000 10.000000 10.000000 80 | mol modmaterial $repS top AOChalky 81 | 82 | mol selection type C 83 | set repPOLY [newrep top repPOLY] 84 | mol modstyle $repPOLY top CPK $P_size 0.000000 10.000000 10.000000 85 | mol modmaterial $repPOLY top AOChalky 86 | 87 | mol selection type N 88 | set repSOL [newrep top repSOL] 89 | mol modstyle $repSOL top CPK $A_size 0.000000 10.000000 10.000000 90 | mol modmaterial $repSOL top AOChalky 91 | 92 | mol selection type EM 93 | set repEM [newrep top repEM] 94 | mol modstyle $repEM top CPK $EM_size 0.000000 10.000000 10.000000 95 | mol modmaterial $repEM top AOChalky 96 | 97 | mol selection type OA 98 | set repOA [newrep top repOA] 99 | mol modstyle $repOA top CPK $OA_size 0.000000 10.000000 10.000000 100 | mol modmaterial $repOA top AOChalky 101 | 102 | mol selection type T 103 | set repT [newrep top repT] 104 | mol modstyle $repT top CPK $T_size 0.000000 10.000000 10.000000 105 | mol modmaterial $repT top AOChalky 106 | 107 | mol selection type W1 108 | set repW1 [newrep top repW1] 109 | mol modstyle $repW1 top CPK $W1_size 0.000000 10.000000 10.000000 110 | mol modmaterial $repW1 top AOChalky 111 | 112 | #Set default color for types 113 | color Name W $W_colour 114 | color Name E $EO_colour 115 | color Name S $S_colour 116 | color Name M $M_colour 117 | color Name D $D_colour 118 | color Name P $P_colour 119 | color Name A $A_colour 120 | color Name EM $EM_colour 121 | color Name OA $OA_colour 122 | color Name T $T_colour 123 | color Name W1 $W1_colour 124 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/plot_MD_field.vmd: -------------------------------------------------------------------------------- 1 | # =============================================================# 2 | # VMD script to plot a coupled simulation using a combination # 3 | # of MD molecules and volumetric data calculated from CFD/MD # 4 | # averages and written in .dx format # 5 | # =============================================================# 6 | 7 | # load colorbar routine 8 | source ./color_scale_bar_new_test.tcl 9 | namespace import ::ColorBar_v2::* 10 | 11 | # load read file routine 12 | source ./read_file.tcl 13 | namespace import ::read_file::* 14 | 15 | # load custom colour scale 16 | source ./custom_colorscale.tcl 17 | namespace import ::custom_colorscale::* 18 | 19 | #Header VMD Header data written by python script to define time intervals 20 | proc read_header {} { 21 | 22 | set filename "./vol_data/vmd_header" 23 | 24 | set tplot [read_file_in $filename 1 ] 25 | set delta_t [read_file_in $filename 2 ] 26 | set Nave [read_file_in $filename 3 ] 27 | set skip [read_file_in $filename 4 ] 28 | 29 | return [list $tplot $delta_t $Nave $skip] 30 | } 31 | 32 | #Get VMD record that corresponds to current frame 33 | proc frame2vmdrecord {frame tplot} { 34 | 35 | #Check if MD step is inside interval 36 | set interval_no 1 37 | 38 | #Get start and end of current interval 39 | set interval_start [read_file_in "./vol_data/vmd_intervals" $interval_no ] 40 | set interval_end [read_file_in "./vol_data/vmd_intervals" [expr $interval_no+1 ] ] 41 | set interval_frame [expr ($interval_end - $interval_start)/$tplot] 42 | set prev_int_frame 0 43 | while {$frame >= $interval_frame} { 44 | #Get start and end of current interval 45 | #puts "In while loop = $interval_no $interval_start $interval_end $frame $interval_frame" 46 | set interval_no [expr $interval_no +2] 47 | set interval_start [read_file_in "./vol_data/vmd_intervals" $interval_no ] 48 | set interval_end [read_file_in "./vol_data/vmd_intervals" [expr $interval_no+1 ] ] 49 | set prev_int_frame [expr $interval_frame] 50 | set interval_frame [expr ($interval_end - $interval_start)/$tplot + $interval_frame] 51 | } 52 | 53 | set vmdrecord [expr ($interval_start/($tplot)+($frame-$prev_int_frame))] 54 | return $vmdrecord 55 | } 56 | 57 | #Get bin record corresponding to vmd record 58 | proc vmdrecord2binrec {vmdrec Nave} { 59 | 60 | set binrec [expr $vmdrec/$Nave] 61 | 62 | return $binrec 63 | } 64 | 65 | #Get simulation time corresponding to bin record 66 | proc get_simulation_time {frame tplot dt} { 67 | 68 | set rec [frame2vmdrecord $frame $tplot ] 69 | set time_text [format "%s %.4f" "Simulation Time = " [expr $rec*$dt]] 70 | return $time_text 71 | } 72 | 73 | #Set number of frames (average steps) for each volume 74 | set headervars [read_header]; puts "" 75 | set tplot [lindex $headervars 0]; puts "" 76 | set delta_t [lindex $headervars 1]; puts "" 77 | set Nave [lindex $headervars 2]; puts "" 78 | set skip [lindex $headervars 3]; puts "" 79 | 80 | # Display settings 81 | display projection Orthographic 82 | display nearclip set 0.000000 83 | display farclip set 10.000000 84 | display depthcue off 85 | #Specifiy Colorbar 86 | set colorfilename "./vol_data/cmap.dat"; puts "" 87 | cmapfile_color_scale $colorfilename 88 | #tricolor_scale 89 | #color scale method BWR 90 | color Display Background white 91 | 92 | # Load molecular data 93 | set updmol [mol new {../vmd_out.dcd} type dcd waitfor all]; puts "" 94 | set nframes [molinfo top get numframes]; puts "" 95 | 96 | 97 | set dx_records 0 98 | set Nvols 0 99 | for {set i 0} {$i<$nframes+1} {incr i} { 100 | if {[expr [frame2vmdrecord $i $tplot] % [expr $Nave/$skip]] == 0} { 101 | #set dx_records [expr $dx_records + 1] 102 | set binrec [vmdrecord2binrec [frame2vmdrecord $i $tplot] [expr $Nave/$skip]]; puts "" 103 | puts "For Frame = $i loading Bin = $binrec Time = [get_simulation_time $i $tplot $delta_t]" 104 | mol addfile ./vol_data/DATA${binrec}.dx type dx waitfor all; puts "" 105 | set Nvols [expr $Nvols+1] 106 | } 107 | 108 | 109 | } 110 | 111 | #Molecules representation (Note - must be rep 0) 112 | mol delrep 0 top; puts "" 113 | mol representation points 2.000000 114 | mol color Volume 1 115 | mol selection all 116 | mol material Diffuse 117 | mol modmaterial 0 top AOChalky 118 | mol addrep top 119 | 120 | #Display outputs 121 | display ambientocclusion on 122 | display shadows on 123 | light 0 off 124 | 125 | #MD Volumetric Slice 126 | mol color Volume 5 127 | #Slice of xy plane through central in z 128 | #mol representation VolumeSlice 0.500000 5.000000 2.000000 2.000000 129 | #Slice of xz plane at bottom of domain in y 130 | mol representation VolumeSlice 0.000000 3.000000 1.000000 2.000000 131 | mol selection all 132 | mol material Transparent 133 | mol addrep top 134 | 135 | # store name of the Volumetric Slice representation (id=1) for later use 136 | set updrep [mol repname top 1] 137 | 138 | #Get minimum and maximum point in domain 139 | set sel [atomselect top all]; puts "" 140 | set coords [lsort -real [$sel get x]]; puts "" 141 | set dminx [lindex $coords 0]; puts "" 142 | set dmaxx [lindex [lsort -real -decreasing $coords] 0]; puts "" 143 | set coords [lsort -real [$sel get y]]; puts "" 144 | set dminy [lindex $coords 0]' puts "" 145 | set dmaxy [lindex [lsort -real -decreasing $coords] 0]; puts "" 146 | set coords [lsort -real [$sel get z]]; puts "" 147 | set dminz [lindex $coords 0]; puts "" 148 | set dmaxz [lindex [lsort -real -decreasing $coords] 0]; puts "" 149 | 150 | 151 | # colorbar 152 | set cmin [read_file_in "./vol_data/colour_range" 1 ] 153 | set cmax [read_file_in "./vol_data/colour_range" 2 ] 154 | mol scaleminmax 0 1 $cmin $cmax 155 | color_scale_bar_v2 0.5 0.05 0 1 $cmin $cmax 5 156 | 157 | # use the volumetric data set for the isosurface corresponding to the frame divided by the variable Nave. 158 | # $updmol contains the id of the molecule and $updrep the (unique) name of 159 | # the isosurface representation 160 | proc update_iso {args} { 161 | global updmol 162 | global updrep 163 | global Nave 164 | global Nvols 165 | global skip 166 | global tplot 167 | global delta_t 168 | global dminy 169 | global dminx 170 | global cmin 171 | global cmax 172 | 173 | # get representation id and return if invalid 174 | set repid [mol repindex $updmol $updrep] 175 | if {$repid < 0} { return } 176 | 177 | # update representation but replace the data set 178 | # id with the current frame number. 179 | set frame [molinfo $updmol get frame] 180 | lassign [molinfo $updmol get "{rep $repid}"] rep 181 | set vmdrec [frame2vmdrecord $frame $tplot] 182 | set binrec [vmdrecord2binrec $vmdrec [expr $Nave/$skip]] 183 | set volrec [expr $frame/[expr $Nave/$skip]] 184 | 185 | #Load new representation provided dx file data is available 186 | if {$volrec < $Nvols} { 187 | mol representation [lreplace $rep 2 2 $volrec] 188 | mol modrep $repid $updmol 189 | 190 | #Change volumetric data 191 | mol color Volume $volrec 192 | 193 | # Change molecular colour to MD volumetric data 194 | #puts "Bin record = $binrec , vmdrec = $vmdrec, volrec = $volrec Frame = $frame " 195 | mol modcolor 0 $updmol Volume $volrec 196 | mol scaleminmax 0 $updmol $cmin $cmax 197 | } else { 198 | puts "$volrec outside of range" 199 | } 200 | 201 | 202 | # Draw Text of current iteration 203 | draw delete all 204 | draw color black 205 | set time_text [get_simulation_time $frame $tplot $delta_t] 206 | draw text "$dminx [expr $dminy-5.0] 0.0" $time_text 207 | 208 | #puts "$interval_no $interval_start $MD_step $interval_end" 209 | 210 | color_scale_bar_v2 211 | 212 | # Debug prints 213 | #puts "repid=$repid,updmol=$updmol,frame=$frame,vol=[expr $frame/$Nave],rep=$rep" 214 | 215 | # Rotate 216 | #rotate y by 0.133333 217 | 218 | #Write frame to file 219 | #render TachyonInternal vmdscene{$frame}.tga 220 | 221 | } 222 | 223 | 224 | #Trace check for a change of VMD frame and calls update_iso function if changed 225 | trace variable vmd_frame($updmol) w update_iso 226 | animate goto 0 227 | 228 | -------------------------------------------------------------------------------- /postproclib/vmd_tcl/read_file.tcl: -------------------------------------------------------------------------------- 1 | ## NAME: read_file 2 | ## 3 | ## SYNOPSIS: 4 | ## Read the file specified by the name passed into function 5 | ## 6 | 7 | namespace eval ::read_file:: { 8 | namespace export read_file_in 9 | } 10 | 11 | proc ::read_file::read_file_in {name req_line} { 12 | 13 | set i 0 14 | set f [open $name r] 15 | while {[gets $f line] >= 0} { 16 | #puts [string length $line] 17 | set i [expr $i+1] 18 | set out($i) $line 19 | #puts $out($i) 20 | } 21 | close $f 22 | return $out($req_line) 23 | } 24 | -------------------------------------------------------------------------------- /postproclib/writecolormap.py: -------------------------------------------------------------------------------- 1 | #Write colormap as file with name specified by colormap 2 | 3 | import matplotlib.cm as cm 4 | import numpy as np 5 | 6 | class WriteColorMap(): 7 | 8 | def __init__(self,cmap,N): 9 | 10 | self.N = N 11 | cmapobj = cm.get_cmap(cmap, N) 12 | self.colormap = cmapobj(np.arange(N)) 13 | self.outfile = 'cmap.dat' 14 | 15 | def __str__(self): 16 | string = '' 17 | for i in self.colormap: 18 | for j in range(3): 19 | string += str(i[j]) + ' ' 20 | string+= '\n' 21 | return string 22 | 23 | def write(self,fdir='./'): 24 | 25 | f = open(fdir + self.outfile,'w') 26 | f.write(self.__str__()) 27 | f.close() 28 | 29 | 30 | if __name__ == "__main__": 31 | 32 | cmap_writer = WriteColorMap('RdYlBu_r',1024) 33 | cmap_writer.write() 34 | -------------------------------------------------------------------------------- /pyDataView.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf8 -*- 3 | # 4 | # ========================== pyDataViewer ========================== 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | 19 | 20 | import wx 21 | import sys 22 | import postproclib.visualiser as pplv 23 | import argparse 24 | 25 | def run_visualiser(parent_parser=argparse.ArgumentParser(add_help=False)): 26 | 27 | #Keyword arguments 28 | parser = argparse.ArgumentParser( 29 | description=""" 30 | Runs visualiser XXXX where XXXX is an 31 | increasingly more futuristic and exciting number""", 32 | parents=[parent_parser]) 33 | 34 | parser.add_argument('-d', '--fdir', dest='fdir', 35 | help='Directory containing results', 36 | default='./') 37 | 38 | args = vars(parser.parse_args()) 39 | 40 | app = wx.App() 41 | fr = pplv.MainFrame(None, fdir=args['fdir']) 42 | fr.Show() 43 | app.MainLoop() 44 | 45 | if __name__ == "__main__": 46 | 47 | run_visualiser() 48 | -------------------------------------------------------------------------------- /pyDataView.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | import sys 4 | from PyInstaller.compat import is_win, is_darwin, is_linux 5 | from PyInstaller.utils.hooks import collect_submodules 6 | import vispy.glsl 7 | import vispy.io 8 | 9 | block_cipher = None 10 | 11 | data_files = [ 12 | (os.path.dirname(vispy.glsl.__file__), os.path.join("vispy", "glsl")), 13 | (os.path.join(os.path.dirname(vispy.io.__file__), "_data"), os.path.join("vispy", "io", "_data")) 14 | ] 15 | 16 | if is_darwin: 17 | hidden_imports = [] 18 | else: 19 | hidden_imports = [ 20 | "vispy.ext._bundled.six", 21 | "vispy.app.backends._wx", 22 | ] 23 | 24 | a = Analysis(['pyDataView.py'], 25 | pathex=[], 26 | binaries=[], 27 | datas=data_files, 28 | hiddenimports=hidden_imports, 29 | hookspath=[], 30 | runtime_hooks=[], 31 | excludes=[], 32 | win_no_prefer_redirects=False, 33 | win_private_assemblies=False, 34 | cipher=block_cipher, 35 | noarchive=False) 36 | pyz = PYZ(a.pure, a.zipped_data, 37 | cipher=block_cipher) 38 | 39 | exe = EXE(pyz, 40 | a.scripts, 41 | a.binaries, 42 | a.zipfiles, 43 | a.datas, 44 | [], 45 | name='pyDataView', 46 | debug=False, 47 | bootloader_ignore_signals=False, 48 | strip=False, 49 | upx=True, 50 | upx_exclude=[], 51 | runtime_tmpdir=None, 52 | console=False, 53 | disable_windowed_traceback=False, 54 | target_arch=None, 55 | codesign_identity=None, 56 | entitlements_file=None) 57 | 58 | app = BUNDLE(exe, 59 | name='pyDataView.app', 60 | icon='logo.icns', 61 | info_plist={ 62 | 'NSHighResolutionCapable': 'True', 63 | 'NSRequiresAquaSystemAppearance': 'No' 64 | }, 65 | bundle_identifier=None) 66 | -------------------------------------------------------------------------------- /pyDataView_apple1.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | import vispy.io 4 | import os 5 | 6 | block_cipher = None 7 | 8 | data_files = [ 9 | (os.path.join(os.path.dirname(vispy.io.__file__), "_data"), os.path.join("vispy", "io", "_data")) 10 | ] 11 | 12 | a = Analysis( ['pyDataView.py'], 13 | pathex=[], 14 | binaries=[], 15 | datas=data_files, 16 | hiddenimports=[], 17 | hookspath=[], 18 | runtime_hooks=[], 19 | excludes=[], 20 | win_no_prefer_redirects=False, 21 | win_private_assemblies=False, 22 | cipher=block_cipher, 23 | noarchive=False) 24 | 25 | pyz = PYZ(a.pure, a.zipped_data, 26 | cipher=block_cipher) 27 | 28 | exe = EXE( 29 | pyz, 30 | a.scripts, 31 | [], 32 | exclude_binaries=True, 33 | name='pyDataView', 34 | debug=False, 35 | bootloader_ignore_signals=False, 36 | strip=False, 37 | upx=True, 38 | console=True, 39 | disable_windowed_traceback=False, 40 | argv_emulation=False, 41 | target_arch=None, 42 | codesign_identity=None, 43 | entitlements_file=None, 44 | ) 45 | coll = COLLECT( 46 | exe, 47 | a.binaries, 48 | a.zipfiles, 49 | a.datas, 50 | strip=False, 51 | upx=True, 52 | upx_exclude=[], 53 | name='pyDataView', 54 | ) 55 | -------------------------------------------------------------------------------- /pyDataView_apple2.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | import vispy.glsl 4 | import vispy.io 5 | import os 6 | 7 | block_cipher = None 8 | 9 | data_files = [ 10 | (os.path.dirname(vispy.glsl.__file__), os.path.join("vispy", "glsl")), 11 | (os.path.join(os.path.dirname(vispy.io.__file__), "_data"), os.path.join("vispy", "io", "_data")) 12 | ] 13 | 14 | a = Analysis( ['pyDataView.py'], 15 | pathex=[], 16 | binaries=[], 17 | datas=data_files, 18 | hiddenimports=[], 19 | hookspath=[], 20 | runtime_hooks=[], 21 | excludes=[], 22 | win_no_prefer_redirects=False, 23 | win_private_assemblies=False, 24 | cipher=block_cipher, 25 | noarchive=False) 26 | 27 | pyz = PYZ(a.pure, a.zipped_data, 28 | cipher=block_cipher) 29 | 30 | exe = EXE( 31 | pyz, 32 | a.scripts, 33 | [], 34 | exclude_binaries=True, 35 | name='pyDataView', 36 | debug=False, 37 | bootloader_ignore_signals=False, 38 | strip=False, 39 | upx=True, 40 | console=True, 41 | disable_windowed_traceback=False, 42 | argv_emulation=False, 43 | target_arch=None, 44 | codesign_identity=None, 45 | entitlements_file=None, 46 | ) 47 | coll = COLLECT( 48 | exe, 49 | a.binaries, 50 | a.zipfiles, 51 | a.datas, 52 | strip=False, 53 | upx=True, 54 | upx_exclude=[], 55 | name='pyDataView', 56 | ) 57 | 58 | -------------------------------------------------------------------------------- /pyDataView_screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edwardsmith999/pyDataView/87ef1325a7aecfced5eaf150821032010f3d655c/pyDataView_screenshot.png -------------------------------------------------------------------------------- /pyDataview_final.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/edwardsmith999/pyDataView/87ef1325a7aecfced5eaf150821032010f3d655c/pyDataview_final.pdf -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | h5py==3.4 2 | matplotlib==3.1.2 3 | numpy==1.26 4 | scipy==1.8.0 5 | setuptools==65.5.1 6 | vispy==0.9.6 7 | wxPython==4.0.7.post2 8 | -------------------------------------------------------------------------------- /run_vmd.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import argparse 3 | import os 4 | 5 | import sys 6 | sys.path.insert(0, "../../pyDataView/") 7 | import postproclib as ppl 8 | from misclib import Chdir 9 | 10 | class MockHeader(object): 11 | 12 | def __init__(self): 13 | self.vmd_skip = 0 14 | self.vmd_start = 0 15 | self.vmd_end = 0 16 | self.Nsteps = 0 17 | self.tplot = 0 18 | self.delta_t = 0 19 | self.initialstep = 0 20 | 21 | class MockRaw(object): 22 | 23 | def __init__(self, fdir): 24 | self.fdir = fdir 25 | try: 26 | self.header = ppl.MDHeaderData(fdir) 27 | except IOError: 28 | self.header = MockHeader() 29 | 30 | class dummyField(ppl.Field): 31 | 32 | """ 33 | Dummy field object 34 | """ 35 | 36 | dtype = 'd' 37 | nperbin = 1 38 | fname = '' 39 | plotfreq = 1 40 | 41 | def __init__(self, fdir): 42 | self.Raw = MockRaw(fdir) 43 | 44 | 45 | def prepare_vmd_files(args): 46 | 47 | """ 48 | Copy tcl files for postprocessing 49 | and reformat vmd temp files 50 | """ 51 | 52 | fobj = dummyField(args['fdir']) 53 | vmdobj = ppl.VMDFields(fobj, args['fdir']) 54 | vmdobj.copy_tclfiles() #Create VMD vol_data folder and copy vmd driver scripts 55 | vmdobj.reformat() 56 | 57 | 58 | def run_vmd(parent_parser=argparse.ArgumentParser(add_help=False)): 59 | 60 | def print_fieldlist(): 61 | outstr = 'Type of field to overlay with vmd \n' 62 | try: 63 | ppObj = ppl.All_PostProc('../src/results/') 64 | outstr = outstr + str(ppObj) 65 | except: 66 | print(' \n') 67 | pass 68 | 69 | outstr = outstr + '\n N.B. Make sure to include quotes if there is a space in field name \n' 70 | return outstr 71 | 72 | #Keyword arguments 73 | parser = argparse.ArgumentParser(description='run_vmd vs. master jay -- Runs VMD with overlayed field', 74 | parents=[parent_parser]) 75 | 76 | try: 77 | argns, unknown = parser.parse_known_args() 78 | print('Using directory defined as ', argns.fdir) 79 | except AttributeError: 80 | parser.add_argument('-d','--fdir',dest='fdir', nargs='?', 81 | help='Directory with vmd file and field files', 82 | default=None) 83 | 84 | parser.add_argument('-f', '--field', dest='field', 85 | help=print_fieldlist(), default=None) 86 | parser.add_argument('-c', '--comp', dest='comp', 87 | help='Component name', default=None) 88 | parser.add_argument('-l', '--clims', dest='clims', 89 | help='Colour limits', default=None) 90 | parser.add_argument('-p', '--poly',help='Polymer flag', 91 | action='store_const', const=True) 92 | parser.add_argument('-m', '--mie',help='Mie types flag', 93 | action='store_const', const=True) 94 | args = vars(parser.parse_args()) 95 | 96 | #Static arguments 97 | if args['fdir'] == None: 98 | scriptdir = os.path.dirname(os.path.realpath(__file__)) 99 | args['fdir'] = scriptdir + '/../src/results/' 100 | if args['field'] == None: 101 | print("No field type specified -- using default value of no field") 102 | args['field'] = None 103 | component = 0 104 | if(len(sys.argv) < 2 or sys.argv[1] in ['--help', '-help', '-h']): 105 | ppObj = ppl.All_PostProc(args['fdir']) 106 | print("Available field types include") 107 | print(ppObj) 108 | sys.exit() 109 | if args['comp'] == None: 110 | print("No components direction specified, setting default = 0") 111 | args['comp'] = 0 112 | 113 | print(args['clims'], type(args['clims'])) 114 | if args['clims'] == None: 115 | print("No colour limits specified -- using defaults min/max") 116 | clims = None 117 | else: 118 | clims = [float(i) for i in args['clims'].replace("[","").replace("]","").split(",")] 119 | #Polymer case, no field at the moment 120 | if args['poly']: 121 | if args['field'] != None: 122 | print("Can't overlay field and polymers") 123 | prepare_vmd_files(args) 124 | 125 | #Open vmd 126 | with Chdir(args['fdir']): 127 | #Build vmd polymer file 128 | ppl.build_psf() 129 | ppl.concat_files() 130 | 131 | #Call polymer script 132 | command = "vmd -e ./vmd/load_polymer.vmd" 133 | os.system(command) 134 | sys.exit() 135 | 136 | if args['mie']: 137 | if args['field'] != None: 138 | print("Can't overlay field and mie molecules") 139 | prepare_vmd_files(args) 140 | 141 | with Chdir(args['fdir']): 142 | 143 | #Call mie script 144 | command = "vmd -e ./vmd/load_miepsf.vmd" 145 | os.system(command) 146 | sys.exit() 147 | 148 | #Plane field case 149 | if args['field'] == None: 150 | prepare_vmd_files(args) 151 | 152 | #Open vmd 153 | with Chdir(args['fdir']): 154 | command = "vmd " + "./vmd_out.dcd" 155 | os.system(command) 156 | 157 | #Overlayed field case 158 | else: 159 | try: 160 | ppObj = ppl.All_PostProc(args['fdir']) 161 | fobj = ppObj.plotlist[args['field']] 162 | except KeyError: 163 | print("Field not recognised -- available field types include:") 164 | print(ppObj) 165 | sys.exit() 166 | except: 167 | raise 168 | 169 | vmdobj = ppl.VMDFields(fobj, args['fdir']) 170 | vmdobj.copy_tclfiles() #Create VMD vol_data folder and copy vmd driver scripts 171 | vmdobj.reformat() 172 | vmdobj.write_vmd_header() 173 | vmdobj.write_vmd_intervals() 174 | vmdobj.write_dx_range(component=args['comp'], clims=clims) 175 | vmdobj.writecolormap('RdYlBu') 176 | 177 | #Open vmd 178 | with Chdir(args['fdir'] + './vmd/'): 179 | command = "vmd -e " + "./plot_MD_field.vmd" 180 | os.system(command) 181 | 182 | if __name__ == "__main__": 183 | run_vmd() 184 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup, find_packages 4 | 5 | # To use a consistent encoding 6 | from codecs import open 7 | from os import path 8 | 9 | from pathlib import Path 10 | this_directory = Path(__file__).parent 11 | long_description = (this_directory / "README.md").read_text() 12 | 13 | setup( name = "pyDataView", 14 | version = "2.0.2", 15 | author = ["Edward Smith"], 16 | author_email = "edward.smith@brunel.ac.uk", 17 | url = "https://github.com/edwardsmith999/pyDataView", 18 | classifiers=['Development Status :: 3 - Alpha', 19 | 'Programming Language :: Python :: 3.6'], 20 | packages=find_packages(exclude=['contrib', 'docs', 'tests']), 21 | keywords='visualisation scientific data', 22 | license = "GPL", 23 | install_requires=['numpy', 'scipy', 'matplotlib', 'wxpython', 'vispy', ], 24 | extras_require = {'Channelflow_plots': ["h5py"], 25 | 'cpl_plots':["scikit-image"]}, 26 | description = "Data Viewer GUI written in python, wxpython and matplotlib", 27 | long_description = long_description, 28 | long_description_content_type='text/markdown', 29 | entry_points={ 30 | 'console_scripts': [ 31 | 'pyDataView=pyDataView:main', 32 | ], 33 | }, 34 | ) 35 | --------------------------------------------------------------------------------