├── .DS_Store ├── CM_D03.pysav ├── docs ├── _build │ ├── html │ │ ├── objects.inv │ │ ├── _static │ │ │ ├── up.png │ │ │ ├── down.png │ │ │ ├── file.png │ │ │ ├── minus.png │ │ │ ├── plus.png │ │ │ ├── comment.png │ │ │ ├── ajax-loader.gif │ │ │ ├── up-pressed.png │ │ │ ├── comment-close.png │ │ │ ├── down-pressed.png │ │ │ ├── comment-bright.png │ │ │ ├── pygments.css │ │ │ ├── doctools.js │ │ │ ├── underscore.js │ │ │ ├── alabaster.css │ │ │ └── basic.css │ │ ├── .buildinfo │ │ ├── searchindex.js │ │ ├── genindex.html │ │ ├── search.html │ │ ├── _sources │ │ │ └── index.txt │ │ └── index.html │ └── doctrees │ │ ├── index.doctree │ │ └── environment.pickle ├── index.rst ├── Makefile └── conf.py ├── Table3_SMC.WD.dat ├── README.md ├── __init__.py ├── Table3_LMCavg.WD.dat ├── Table3_LMC2.WD.dat ├── .gitignore ├── test_profile.txt ├── Table1.WD.dat ├── LICENSE.txt ├── errors.py ├── parse_PAH.py ├── constants.py ├── test.py ├── minerals.py ├── dust.py ├── cmindex.py ├── cosmology.py ├── radprofile.py ├── halodict.py ├── WD01.py ├── analytic.py ├── halo.py ├── analyze_emcee.py ├── galhalo.py ├── model_halo.py └── sigma_scat.py /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/.DS_Store -------------------------------------------------------------------------------- /CM_D03.pysav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/CM_D03.pysav -------------------------------------------------------------------------------- /docs/_build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/objects.inv -------------------------------------------------------------------------------- /docs/_build/html/_static/up.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/up.png -------------------------------------------------------------------------------- /docs/_build/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/doctrees/index.doctree -------------------------------------------------------------------------------- /docs/_build/html/_static/down.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/down.png -------------------------------------------------------------------------------- /docs/_build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/file.png -------------------------------------------------------------------------------- /docs/_build/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/minus.png -------------------------------------------------------------------------------- /docs/_build/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/plus.png -------------------------------------------------------------------------------- /docs/_build/html/_static/comment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/comment.png -------------------------------------------------------------------------------- /docs/_build/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/doctrees/environment.pickle -------------------------------------------------------------------------------- /docs/_build/html/_static/ajax-loader.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/ajax-loader.gif -------------------------------------------------------------------------------- /docs/_build/html/_static/up-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/up-pressed.png -------------------------------------------------------------------------------- /docs/_build/html/_static/comment-close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/comment-close.png -------------------------------------------------------------------------------- /docs/_build/html/_static/down-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/down-pressed.png -------------------------------------------------------------------------------- /docs/_build/html/_static/comment-bright.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JohannesBuchner/dust/master/docs/_build/html/_static/comment-bright.png -------------------------------------------------------------------------------- /docs/_build/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: e9f23f958205baff77c00d56e0173733 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /Table3_SMC.WD.dat: -------------------------------------------------------------------------------- 1 | # 2 | # Table 3 (SMC) from Weingartner & Draine (2001) 3 | # ApJ 548:296 4 | # 5 | # R_v 10^5 bc alpha_g beta_g a_tg a_cg C_g alpha_s beta_s a_ts C_s 6 | # 7 | -- 0.0 -2.79 1.12 0.0190 0.522 8.36e-14 -2.26 -3.46 0.216 3.16e-14 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dust 2 | For calculating dust scattering and extinction in the X-ray 3 | 4 | ![alt text](https://zenodo.org/badge/10830/eblur/dust.svg) 5 | 6 | ## Install instructions 7 | 8 | Be sure to add the cloned directory to your PYTHONPATH environment variable 9 | 10 | *bash* 11 | 12 | export PYTHONPATH=/path/to/python/libraries 13 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | #from WD01 import * 4 | from constants import * 5 | from galhalo import * 6 | from model_halo import * 7 | from cmindex import * 8 | from analytic import * 9 | from cosmology import * 10 | from halo import * 11 | from scatmodels import * 12 | from cmindex import * 13 | from dust import * 14 | from halodict import * 15 | from sigma_scat import * 16 | -------------------------------------------------------------------------------- /Table3_LMCavg.WD.dat: -------------------------------------------------------------------------------- 1 | # 2 | # Table 3 (LMC avg) from Weingartner & Draine (2001) 3 | # ApJ 548:296 4 | # 5 | # R_v 10^5 bc alpha_g beta_g a_tg a_cg C_g alpha_s beta_s a_ts C_s 6 | # 7 | -- 0.0 -2.91 0.895 0.578 1.21 7.12e-17 -2.45 0.125 0.191 1.84e-14 8 | -- 1.0 -2.99 2.46 0.0980 0.641 3.51e-15 -2.49 0.345 0.184 1.78e-14 9 | -- 2.0 4.43 0.0 0.00322 0.285 9.57e-24 -2.70 2.18 0.198 7.29e-15 10 | -------------------------------------------------------------------------------- /Table3_LMC2.WD.dat: -------------------------------------------------------------------------------- 1 | # 2 | # Table 3 (LMC2) from Weingartner & Draine (2001) 3 | # ApJ 548:296 4 | # 5 | # R_v 10^5 bc alpha_g beta_g a_tg a_cg C_g alpha_s beta_s a_ts C_s 6 | # 7 | -- 0.0 -2.94 5.22 0.373 0.349 9.92e-17 -2.34 -0.243 0.184 3.18e-14 8 | -- 0.5 -2.82 9.01 0.392 0.269 6.20e-17 -2.36 -0.113 0.182 3.03e-14 9 | -- 1.0 4.16 0.0 0.342 0.0493 3.05e-15 -2.44 0.254 0.188 2.24e-14 10 | 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled stuff # 2 | ############## 3 | *.pyc 4 | 5 | # Temp files # 6 | ########### 7 | *.*~ 8 | *# 9 | 10 | # ipynb support files # 11 | ###################### 12 | .ipynb_checkpoints* 13 | 14 | # Mercurial relics # 15 | ############### 16 | .hg* 17 | 18 | # OS generated files # 19 | ################## 20 | .DS_Store 21 | .DS_Store? 22 | ._* 23 | .Spotlight-V100 24 | .Trashes 25 | Icon? 26 | ehthumbs.db 27 | Thumbs.db -------------------------------------------------------------------------------- /test_profile.txt: -------------------------------------------------------------------------------- 1 | # R[0] R[1] SUR_BRI[counts pix**-2] SUR_BRI_ERR[counts pix**-2] 2 | 1.0 2.0 1648.89413778 316.720470112 3 | 2.0 3.0 3337.83616237 309.567630556 4 | 3.0 4.0 2273.58748412 216.305703583 5 | 4.0 5.0 1258.64517223 144.999398968 6 | 5.0 6.29462705897 607.292785834 80.261109235 7 | 6.29462705897 7.92446596231 303.750901715 45.9392303862 8 | 7.92446596231 9.97631157484 190.246510813 29.4028097507 9 | 9.97631157484 12.5594321575 111.941845802 18.3309525496 10 | 12.5594321575 15.8113883008 60.9580131027 10.7794999304 11 | 15.8113883008 19.9053585277 38.8652104586 7.02682893118 12 | 19.9053585277 25.0593616814 25.6624633745 5.12177336029 13 | 25.0593616814 31.547867224 14.1278372446 3.00648881292 14 | 31.547867224 39.7164117362 8.45660871501 1.80109221266 15 | 39.7164117362 50.0 6.89080086994 1.24762149522 16 | 50.0 62.9462705897 4.69853684985 0.797904017389 17 | 62.9462705897 79.2446596231 2.922400941 0.515703596469 18 | 79.2446596231 99.7631157484 1.98515527697 0.345530471966 19 | 99.7631157484 125.594321575 1.30973713259 0.23768560596 20 | 125.594321575 158.113883008 0.791750382793 0.168537044506 21 | 158.113883008 199.053585277 0.653315475128 0.134128203423 22 | -------------------------------------------------------------------------------- /Table1.WD.dat: -------------------------------------------------------------------------------- 1 | # 2 | # Table 1 (Case A) from Weingartner & Draine (2001) 3 | # ApJ 548:296 4 | # 5 | # R_v 10^5 bc alpha_g beta_g a_tg a_cg C_g alpha_s beta_s a_ts C_s 6 | # 7 | 3.1 0.0 -2.25 -0.0648 0.00745 0.606 9.94e-11 -1.48 -9.34 0.172 1.02e-12 8 | 3.1 1.0 -2.17 -0.0382 0.00373 0.586 3.79e-10 -1.46 -10.3 0.174 1.09e-12 9 | 3.1 2.0 -2.04 -0.111 0.00828 0.543 5.57e-11 -1.43 -11.7 0.173 1.27e-12 10 | 3.1 3.0 -1.91 -0.125 0.00837 0.499 4.15e-11 -1.41 -11.5 0.171 1.33e-12 11 | 3.1 4.0 -1.84 -0.132 0.00898 0.489 2.90e-11 -2.10 -0.114 0.169 1.26e-13 12 | 3.1 5.0 -1.72 -0.322 0.0254 0.438 3.20e-12 -2.10 -0.0407 0.166 1.27e-13 13 | 3.1 6.0 -1.54 -0.165 0.0107 0.428 9.99e-12 -2.21 0.300 0.164 1.00e-13 14 | 4.0 0.0 -2.26 -0.199 0.0241 0.861 5.47e-12 -2.03 0.668 0.189 5.20e-14 15 | 4.0 1.0 -2.16 -0.0862 0.00867 0.803 4.58e-11 -2.05 0.832 0.188 4.81e-14 16 | 4.0 2.0 -2.01 -0.0973 0.00811 0.696 3.96e-11 -2.06 0.995 0.185 4.70e-14 17 | 4.0 3.0 -1.83 -0.175 0.0117 0.604 1.42e-11 -2.08 1.29 0.184 4.26e-14 18 | 4.0 4.0 -1.64 -0.247 0.0152 0.536 5.83e-12 -2.09 1.58 0.183 3.94e-14 19 | 5.5 0.0 -2.35 -0.668 0.148 1.96 4.82e-14 -1.57 1.10 0.198 4.24e-14 20 | 5.5 1.0 -2.12 -0.670 0.0686 1.35 3.65e-13 -1.57 1.25 0.197 4.00e-14 21 | 5.5 2.0 -1.94 -0.853 0.0786 0.921 2.57e-13 -1.55 1.33 0.195 4.05e-14 22 | 5.5 3.0 -1.61 -0.722 0.0418 0.720 7.58e-13 -1.59 2.12 0.193 3.20e-14 -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Copyright (c) 2014, Lia Corrales 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are 7 | met: 8 | 9 | 1. Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright 13 | notice, this list of conditions and the following disclaimer in the 14 | documentation and/or other materials provided with the distribution. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS 17 | IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 18 | TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 19 | PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 22 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 23 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 24 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 25 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 26 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /errors.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | 4 | def prop_add( xerr=0.0, yerr=0.0 ): 5 | return np.sqrt( xerr**2 + yerr**2 ) 6 | 7 | def prop_div( x, y, xerr=0.0, yerr=0.0 ): 8 | F = x / y 9 | return np.sqrt( xerr**2 + F**2 * yerr**2 ) / y 10 | 11 | def prop_mult( x, y, xerr=0.0, yerr=0.0 ): 12 | F = x * y 13 | return np.sqrt( (xerr/x)**2 + (yerr/y)**2 ) * F 14 | 15 | """ 16 | ## Quick test 17 | import matplotlib.pyplot as plt 18 | 19 | i = np.arange(10.0) + 1 20 | x = 2.0 * i 21 | y = 4.0 * i 22 | xerr = np.zeros( 10.0 ) + 5.0 23 | yerr = np.zeros( 10.0 ) + 2.5 24 | 25 | test_add = prop_add( xerr, yerr ) 26 | test_div = prop_div( x, y, xerr, yerr ) 27 | test_mult = prop_mult( x, y, xerr, yerr ) 28 | 29 | fig = plt.figure() 30 | plt.errorbar( i, x, yerr=xerr, ls='', color='r', lw=3, alpha=0.3 ) 31 | plt.errorbar( i, y, yerr=yerr, ls='', color='b', lw=3, alpha=0.3 ) 32 | plt.errorbar( i, x + y, yerr=test_add, ls='', color='0.3', lw=3, alpha=0.3 ) 33 | plt.errorbar( i, x / y, yerr=test_div, ls='', color='0.5', lw=3, alpha=0.3 ) 34 | plt.errorbar( i, x * y, yerr=test_mult, ls='', color='0.7', lw=3, alpha=0.3 ) 35 | plt.ylim(-1,1) 36 | fig.show() 37 | 38 | print 'Add, no yerr:', prop_add( xerr ) 39 | print 'Add, no xerr:', prop_add( yerr=yerr ) 40 | print 'Div, no yerr:', prop_div( x, y, xerr ) 41 | print 'Should be:', xerr / y 42 | print 'Div, no xerr:', prop_div( x, y, yerr=yerr ) 43 | print 'Mult, no yerr:', prop_mult( x, y, xerr ) 44 | print 'Should be:', y * xerr 45 | print 'Mult, no xerr:', prop_mult( x, y, yerr=yerr ) 46 | print 'Should be:', x * yerr 47 | """ -------------------------------------------------------------------------------- /docs/_build/html/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({envversion:46,filenames:["index"],objects:{},objnames:{},objtypes:{},terms:{"2003a":[],"2003b":[],"class":0,"static":0,abov:0,absorpt:0,advis:0,algorithm:0,all:0,also:0,ani:0,appropori:0,aris:0,astrosil:0,binari:0,bohren:0,both:0,busi:0,calcul:0,can:0,caus:0,clone:0,code:0,com:0,complex:0,condit:0,consequenti:0,constant:0,contact:0,content:[],context:0,contract:0,contributor:0,convert:0,copyright:0,corral:0,cosmolog:0,custom:0,damag:0,data:0,direct:0,directori:0,disclaim:0,distribut:0,doi:0,drain:0,dwek:0,edu:0,effici:0,even:0,event:0,exemplari:0,express:0,first:0,fit:0,follow:0,form:0,fortran:0,from:0,fulli:0,gan:0,git:0,github:0,good:0,gorenstein:0,grain:0,graphit:0,halo:0,have:0,hoffman:[],holder:0,howev:0,http:0,huffman:0,idl:0,imag:0,impli:0,incident:0,includ:0,index:0,indirect:0,infrar:0,intergalact:0,interrupt:0,interstellar:0,invok:0,issu:0,law:0,lia:0,liabil:0,liabl:0,librari:0,limit:0,link:[],list:0,loss:0,materi:0,mauch:0,merchant:0,met:0,mie:0,milki:0,mit:0,model:0,modif:0,modul:0,must:0,neglig:0,notic:0,number:0,object:0,optic:0,org:0,other:0,otherwis:0,out:0,paerel:0,page:0,particular:0,path:0,permit:0,physic:0,pleas:0,possibl:0,power:0,procur:0,profit:0,provid:0,publish:0,purpos:0,python:0,rai:0,rayleigh:0,recommend:0,redistribut:0,refract:0,releas:0,repo:0,reproduc:0,reserv:0,retain:0,right:0,scatter:0,search:0,see:[],servic:0,set:0,shall:0,size:0,smith:0,softwar:0,sourc:0,space:0,special:0,strict:0,substitut:0,thei:0,theori:0,thi:0,tort:0,user:0,version:0,wai:0,warranti:0,websit:[],weingartn:0,whether:0,without:0,yet:0,you:0,your:0,zenodo:0},titles:["Welcome to documentation for eblur/dust"],titleterms:{contribut:0,document:0,dust:0,eblur:0,featur:0,indic:0,instal:0,licens:0,support:0,tabl:0,welcom:0}}) -------------------------------------------------------------------------------- /parse_PAH.py: -------------------------------------------------------------------------------- 1 | 2 | ## Created by Lia Corrales to parse PAH optical constant tables (PAHion_30, PAHneu_30) 3 | ## November 11, 2013 : lia@astro.columbia.edu 4 | 5 | import os 6 | import numpy as np 7 | 8 | def find_cmfile( name ): 9 | if os.path.exists(name): 10 | return name 11 | path_list = os.getenv("PYTHONPATH").split(':') 12 | for path in path_list: 13 | for root, dirs, files in os.walk(path+"/"): 14 | if name in files: 15 | return os.path.join(root, name) 16 | else: 17 | return "" 18 | 19 | ION_FILE = find_cmfile('PAHion_30') 20 | NEU_FILE = find_cmfile('PAHneu_30') 21 | 22 | def parse_PAH( option, ignore='#', flag='>', verbose=False ): 23 | 24 | if option == 'ion': filename = ION_FILE 25 | if option == 'neu': filename = NEU_FILE 26 | 27 | try : f = open( filename, 'r' ) 28 | except: 29 | print 'ERROR: file not found' 30 | return 31 | 32 | COLS = ['w(micron)', 'Q_ext', 'Q_abs', 'Q_sca', 'g=' ] 33 | result = {} 34 | 35 | end_of_file = False 36 | while not end_of_file: 37 | try: 38 | line = f.readline() 39 | 40 | # Ignore the ignore character 41 | if line[0] == ignore : pass 42 | 43 | # Characters flagged with '>' earn a dictionary entry with grain size 44 | elif line[0] == flag : 45 | gsize = np.float( line.split()[1] ) 46 | if verbose : print 'Reading data for grain size:', gsize 47 | result[ gsize ] = {} 48 | # Initialize dictionaries with lists 49 | for i in range( len(COLS) ) : result[gsize][COLS[i]] = [] 50 | 51 | # Sort the columns into the correct dictionary 52 | else: 53 | row_vals = line.split() 54 | for i in range( len(COLS) ) : 55 | result[ gsize ][ COLS[i] ].append( np.float( row_vals[i] ) ) 56 | except: 57 | if verbose : print line 58 | end_of_file = True 59 | 60 | f.close() 61 | 62 | return result 63 | 64 | #test_ion = parse_PAH('ion') 65 | #test_neu = parse_PAH('neu') 66 | -------------------------------------------------------------------------------- /docs/_build/html/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | Index — eblur/dust 1.0 documentation 11 | 12 | 13 | 14 | 15 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 44 | 45 |
46 |
47 |
48 |
49 | 50 | 51 |

Index

52 | 53 |
54 | 55 |
56 | 57 | 58 |
59 |
60 |
61 | 81 |
82 |
83 | 91 | 92 | 93 | 94 | 95 | 96 | -------------------------------------------------------------------------------- /constants.py: -------------------------------------------------------------------------------- 1 | 2 | import math 3 | import numpy as np 4 | import scipy as sp 5 | 6 | def h0(): 7 | return 75. #km/s/Mpc 8 | 9 | def rho_crit(): 10 | return np.float64(1.1e-29) 11 | 12 | def omega_d(): 13 | return 1e-5 14 | 15 | def omega_m(): 16 | return 0.3 17 | 18 | def omega_l(): 19 | return 0.7 20 | 21 | def intz(x, y): 22 | from scipy import integrate 23 | return sp.integrate.trapz(y,x) 24 | # Note that scipy calls integration in reverse order as I do 25 | 26 | def int(x, y): 27 | dx = x[1:] - x[:-1] 28 | dy = y[1:] - y[:-1] 29 | return np.sum( y[:-1]*dx + 0.5*dx*dy ) 30 | 31 | def c(): 32 | return 3.e10 # cm/s 33 | 34 | def h(): 35 | return np.float64( 4.136e-18 ) # keV s 36 | 37 | def re(): 38 | return 2.83e-13 # cm (electron radius) 39 | 40 | def mp(): 41 | return np.float64( 1.673e-24 ) # g (mass of proton) 42 | 43 | def micron2cm(): 44 | return 1e-6 * 100 45 | 46 | def pc2cm(): 47 | return 3.09e18 48 | 49 | def cperh0(): 50 | return c()*1.e-5 / h0() * (1.e6 * pc2cm() ) #cm 51 | 52 | def kev2lam(): 53 | return 1.240e-7 # cm keV 54 | 55 | def arcs2rad(): 56 | return 2.0*np.pi / 360. / 60. / 60. # rad/arcsec 57 | 58 | def arcm2rad(): 59 | return 2.0*np.pi / 360. / 60. # rad/arcmin 60 | 61 | 62 | #------- Save and restore functions, similar to IDL -------# 63 | # http://idl2python.blogspot.com/2010/10/save-and-restore-2.html 64 | # Updated April 20, 2012 to store objects 65 | # http://wiki.python.org/moin/UsingPickle 66 | 67 | def save( file, varnames, values): 68 | """ 69 | Usage: save('mydata.pysav', ['a','b','c'], [a,b,c] ) 70 | """ 71 | import cPickle 72 | f =open(file,"wb") 73 | super_var =dict( zip(varnames,values) ) 74 | cPickle.dump( super_var, f ) 75 | f.close 76 | 77 | def restore(file): 78 | """ 79 | Read data saved with save function. 80 | Usage: data = restore('mydata.pysav') 81 | a = data['a'] 82 | b = data['b'] 83 | c = data['c'] 84 | """ 85 | import cPickle 86 | f=open(file,"rb") 87 | result = cPickle.load(f) 88 | f.close 89 | return result 90 | 91 | #------- Read ascii tables --------# 92 | # June 11, 2013 93 | # needed for computers that don't have access to asciidata (hotfoot) 94 | 95 | def read_table( filename, ncols, ignore='#' ): 96 | """ 97 | Read data saved in an ascii table 98 | Assumes data is separated by white space 99 | Assumes all the data are floats 100 | Ignores lines that start with the ignore character / sequence 101 | --------------- 102 | Usage : read_table( filename, ncols, ignore='#' ) 103 | Returns : A dictionary with the column index as keys and the column data as lists 104 | """ 105 | 106 | # Initialize result dictionary 107 | result = {} 108 | for i in range(ncols): 109 | result[i] = [] 110 | 111 | try : f = open( filename, 'r' ) 112 | except: 113 | print 'ERROR: file not found' 114 | return 115 | 116 | end_of_file = False 117 | while not end_of_file: 118 | try: 119 | temp = f.readline() 120 | if temp[0] == ignore : pass # Ignore the ignore character 121 | else: 122 | temp = temp.split() 123 | for i in range(ncols) : result[i].append( np.float(temp[i]) ) 124 | except: 125 | end_of_file = True 126 | 127 | f.close() 128 | return result 129 | 130 | -------------------------------------------------------------------------------- /docs/_build/html/search.html: -------------------------------------------------------------------------------- 1 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Search — eblur/dust 1.0 documentation 10 | 11 | 12 | 13 | 14 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 51 | 52 |
53 |
54 |
55 |
56 | 57 |

Search

58 |
59 | 60 |

61 | Please activate JavaScript to enable the search 62 | functionality. 63 |

64 |
65 |

66 | From here you can search these documents. Enter your search 67 | words into the box below and click "search". Note that the search 68 | function will automatically search for all of the words. Pages 69 | containing fewer words won't appear in the result list. 70 |

71 |
72 | 73 | 74 | 75 |
76 | 77 |
78 | 79 |
80 | 81 |
82 |
83 |
84 | 88 |
89 |
90 | 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | print 'importing...' 5 | import sigma_scat as ss 6 | import dust 7 | import constants as c 8 | 9 | print 'setting input...' 10 | # column density 11 | NH = 1.e21 12 | # dust-to-gas ratio 13 | d2g = 0.009 14 | # use proton mass to get dust column mass 15 | dust_mass = NH * c.mp() * d2g 16 | # energy range to evaluate over (in keV) 17 | ERANGE = np.power( 10.0, np.arange(-0.6,1.0,0.005) ) 18 | MD = dust_mass * 10.0 # for N_H = 1.e22 19 | 20 | # define densities 21 | # g cm^-3; see Draine book 22 | RHO_SIL, RHO_GRA, RHO_AVG = 3.8, 2.2, 3.0 23 | 24 | print 'defining dust distributions...' 25 | 26 | # dust radius range to compute (in um) 27 | # up to 0.25 um 28 | MRN_RANGE = np.arange(0.005,0.25001,0.05) 29 | 30 | # larger range going up to 1.5 um 31 | BIG_RANGE = np.arange(0.005, 1.5, 0.05) 32 | MRN_RANGE = BIG_RANGE 33 | # define dust distribution: A powerlaw with index 3.5 34 | MRN_sil = dust.Dustdist( rad=MRN_RANGE, p=3.5, rho=RHO_SIL ) 35 | MRN_gra = dust.Dustdist( rad=MRN_RANGE, p=3.5, rho=RHO_GRA ) 36 | MRN_avg = dust.Dustdist( rad=MRN_RANGE, p=3.5, rho=RHO_AVG ) 37 | 38 | print 'Defining Kappaext and Dustspectrum...' 39 | 40 | print ' Mie scattering with Drude approximation' 41 | RGD_mrn = ss.Kappaext( E=ERANGE, dist=dust.Dustspectrum(rad=MRN_avg, md=dust_mass), scatm=ss.makeScatmodel('Mie','Drude') ) 42 | 43 | print ' Mie scattering for the small grain MRN distribution' 44 | Mie_mrn_sil = ss.Kappaext( E=ERANGE, dist=dust.Dustspectrum(rad=MRN_sil, md=dust_mass), scatm=ss.makeScatmodel('Mie','Silicate') ) 45 | Mie_mrn_gra = ss.Kappaext( E=ERANGE, dist=dust.Dustspectrum(rad=MRN_gra, md=dust_mass), scatm=ss.makeScatmodel('Mie','Graphite') ) 46 | 47 | print 'plotting...' 48 | 49 | plt.plot( RGD_mrn.E, RGD_mrn.kappa * MD, '0.4', lw=2, label='Mie-Drude' ) 50 | plt.plot( Mie_mrn_sil.E, Mie_mrn_sil.kappa * MD, 'g', lw=2, label='Mie-Silicate' ) 51 | plt.plot( Mie_mrn_gra.E, Mie_mrn_gra.kappa * MD, 'b', lw=2, label='Mie-Graphite' ) 52 | 53 | plt.legend( loc='upper right', fontsize=12 ) 54 | 55 | plt.loglog() 56 | plt.xlim(0.3,10) 57 | plt.ylim(1.e-2,10.0) 58 | plt.xlabel( "Energy [keV]", size=15 ) 59 | plt.ylabel( r"Scattering Opacity [$\tau$ per $N_{\rm H}/10^{22}$]", size=15 ) 60 | 61 | plt.savefig('figure5a.pdf', format='pdf') 62 | plt.close() 63 | np.savetxt('figure5a.txt', np.transpose([ERANGE, RGD_mrn.kappa, Mie_mrn_sil.kappa, Mie_mrn_gra.kappa])) 64 | 65 | ################################################################################ 66 | 67 | print 'Defining Kappascat and Dustspectrum...' 68 | 69 | print ' Rayleigh-Gans plus Drude approximation' 70 | RGD_mrn = ss.Kappascat( E=ERANGE, dist=dust.Dustspectrum(rad=MRN_avg, md=dust_mass), scatm=ss.makeScatmodel('RG','Drude') ) 71 | 72 | print ' Mie scattering for the small grain MRN distribution' 73 | Mie_mrn_sil = ss.Kappascat( E=ERANGE, dist=dust.Dustspectrum(rad=MRN_sil, md=dust_mass), scatm=ss.makeScatmodel('Mie','Silicate') ) 74 | Mie_mrn_gra = ss.Kappascat( E=ERANGE, dist=dust.Dustspectrum(rad=MRN_gra, md=dust_mass), scatm=ss.makeScatmodel('Mie','Graphite') ) 75 | 76 | print 'plotting...' 77 | 78 | 79 | plt.plot( RGD_mrn.E, RGD_mrn.kappa * MD, '0.4', lw=2, label='RG-Drude' ) 80 | plt.plot( Mie_mrn_sil.E, Mie_mrn_sil.kappa * MD, 'g', lw=2, label='Mie-Silicate' ) 81 | plt.plot( Mie_mrn_gra.E, Mie_mrn_gra.kappa * MD, 'b', lw=2, label='Mie-Graphite' ) 82 | 83 | plt.legend( loc='upper right', fontsize=12 ) 84 | 85 | 86 | plt.loglog() 87 | plt.xlim(0.3,10) 88 | plt.ylim(1.e-2,10.0) 89 | plt.xlabel( "Energy [keV]", size=15 ) 90 | plt.ylabel( r"Scattering Opacity [$\tau$ per $N_{\rm H}/10^{22}$]", size=15 ) 91 | 92 | plt.text( 0.5, 0.1, '$0.25\ \mu m$\ncut-off', size=12) 93 | plt.text( 1.2, 3.0, '$1.5\ \mu m$\ncut-off', size=12) 94 | plt.savefig('figure5b.pdf', format='pdf') 95 | plt.close() 96 | np.savetxt('figure5b.txt', np.transpose([ERANGE, RGD_mrn.kappa, Mie_mrn_sil.kappa, Mie_mrn_gra.kappa])) 97 | 98 | -------------------------------------------------------------------------------- /docs/_build/html/_static/pygments.css: -------------------------------------------------------------------------------- 1 | .highlight .hll { background-color: #ffffcc } 2 | .highlight { background: #eeffcc; } 3 | .highlight .c { color: #408090; font-style: italic } /* Comment */ 4 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 5 | .highlight .k { color: #007020; font-weight: bold } /* Keyword */ 6 | .highlight .o { color: #666666 } /* Operator */ 7 | .highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ 8 | .highlight .cp { color: #007020 } /* Comment.Preproc */ 9 | .highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ 10 | .highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ 11 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 12 | .highlight .ge { font-style: italic } /* Generic.Emph */ 13 | .highlight .gr { color: #FF0000 } /* Generic.Error */ 14 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 15 | .highlight .gi { color: #00A000 } /* Generic.Inserted */ 16 | .highlight .go { color: #333333 } /* Generic.Output */ 17 | .highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ 18 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 19 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 20 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 21 | .highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ 22 | .highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ 23 | .highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ 24 | .highlight .kp { color: #007020 } /* Keyword.Pseudo */ 25 | .highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ 26 | .highlight .kt { color: #902000 } /* Keyword.Type */ 27 | .highlight .m { color: #208050 } /* Literal.Number */ 28 | .highlight .s { color: #4070a0 } /* Literal.String */ 29 | .highlight .na { color: #4070a0 } /* Name.Attribute */ 30 | .highlight .nb { color: #007020 } /* Name.Builtin */ 31 | .highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ 32 | .highlight .no { color: #60add5 } /* Name.Constant */ 33 | .highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ 34 | .highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ 35 | .highlight .ne { color: #007020 } /* Name.Exception */ 36 | .highlight .nf { color: #06287e } /* Name.Function */ 37 | .highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ 38 | .highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ 39 | .highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ 40 | .highlight .nv { color: #bb60d5 } /* Name.Variable */ 41 | .highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ 42 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 43 | .highlight .mb { color: #208050 } /* Literal.Number.Bin */ 44 | .highlight .mf { color: #208050 } /* Literal.Number.Float */ 45 | .highlight .mh { color: #208050 } /* Literal.Number.Hex */ 46 | .highlight .mi { color: #208050 } /* Literal.Number.Integer */ 47 | .highlight .mo { color: #208050 } /* Literal.Number.Oct */ 48 | .highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ 49 | .highlight .sc { color: #4070a0 } /* Literal.String.Char */ 50 | .highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ 51 | .highlight .s2 { color: #4070a0 } /* Literal.String.Double */ 52 | .highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ 53 | .highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ 54 | .highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ 55 | .highlight .sx { color: #c65d09 } /* Literal.String.Other */ 56 | .highlight .sr { color: #235388 } /* Literal.String.Regex */ 57 | .highlight .s1 { color: #4070a0 } /* Literal.String.Single */ 58 | .highlight .ss { color: #517918 } /* Literal.String.Symbol */ 59 | .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ 60 | .highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ 61 | .highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ 62 | .highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ 63 | .highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. eblur/dust documentation master file, created by 2 | sphinx-quickstart on Thu Jan 28 13:40:11 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to documentation for *eblur/dust* 7 | ========================================= 8 | 9 | The *eblur/dust* set of python modules calculate scattering absorption 10 | and scattering efficiencies for dust from the infrared to the X-ray. 11 | This code can also be used to calculate dust scattering halo images in 12 | the X-ray, in both interstellar and intergalactic (cosmological) 13 | contexts. 14 | 15 | | **First published version of this code** (released with `Corrales & 16 | Paerels, 2015 17 | `_) 18 | | http://dx.doi.org/10.5281/zenodo.15991 19 | 20 | 21 | Features 22 | -------- 23 | 24 | A number of dust grain size distributions and optical constants are 25 | provided, but they can be fully customized by the user by invoking 26 | custom objects of the approporiate class. Provided dust models 27 | include: 28 | 29 | * A power law distribution of dust grain sizes 30 | * `Weingartner & Draine (2001) `_ 31 | grain size distributions for Milky Way dust 32 | * Optical constants (complex index of refraction) for 0.1 um sized 33 | `graphite and astrosilicate grains `_ 34 | 35 | * Rayleigh-Gans scattering physics 36 | 37 | * `Smith & Dwek (1998) `_ 38 | * `Mauche & Gorenstein (1986) `_ 39 | 40 | * Mie scattering physics using the algorithms of 41 | `Bohren & Huffman (1986) `_ 42 | 43 | * Converted from `fortran and IDL 44 | `_ 45 | to python 46 | 47 | 48 | Installation 49 | ------------ 50 | 51 | As of yet there is no static install version. I recommend cloning the 52 | github repo into a directory in your python path.:: 53 | 54 | cd /path/to/python/libraries/ 55 | git clone git@github.com:eblur/dust.git . 56 | 57 | 58 | Contribute 59 | ---------- 60 | 61 | **Source code:** github.com/eblur/dust 62 | 63 | 64 | Support 65 | ------- 66 | 67 | If you are having issues, please contact lia@space.mit.edu 68 | 69 | 70 | .. Contents: 71 | 72 | .. toctree:: 73 | :maxdepth: 2 74 | 75 | 76 | 77 | License 78 | ------- 79 | 80 | Copyright (c) 2014, Lia Corrales 81 | All rights reserved. 82 | 83 | Redistribution and use in source and binary forms, with or without 84 | modification, are permitted provided that the following conditions are 85 | met: 86 | 87 | 1. Redistributions of source code must retain the above copyright 88 | notice, this list of conditions and the following disclaimer. 89 | 90 | 2. Redistributions in binary form must reproduce the above copyright 91 | notice, this list of conditions and the following disclaimer in the 92 | documentation and/or other materials provided with the distribution. 93 | 94 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS 95 | IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 96 | TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 97 | PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 98 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 99 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 100 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 101 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 102 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 103 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 104 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 105 | 106 | 107 | 108 | Indices and tables 109 | ================== 110 | 111 | * :ref:`genindex` 112 | * :ref:`modindex` 113 | * :ref:`search` 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/index.txt: -------------------------------------------------------------------------------- 1 | .. eblur/dust documentation master file, created by 2 | sphinx-quickstart on Thu Jan 28 13:40:11 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to documentation for *eblur/dust* 7 | ========================================= 8 | 9 | The *eblur/dust* set of python modules calculate scattering absorption 10 | and scattering efficiencies for dust from the infrared to the X-ray. 11 | This code can also be used to calculate dust scattering halo images in 12 | the X-ray, in both interstellar and intergalactic (cosmological) 13 | contexts. 14 | 15 | | **First published version of this code** (released with `Corrales & 16 | Paerels, 2015 17 | `_) 18 | | http://dx.doi.org/10.5281/zenodo.15991 19 | 20 | 21 | Features 22 | -------- 23 | 24 | A number of dust grain size distributions and optical constants are 25 | provided, but they can be fully customized by the user by invoking 26 | custom objects of the approporiate class. Provided dust models 27 | include: 28 | 29 | * A power law distribution of dust grain sizes 30 | * `Weingartner & Draine (2001) `_ 31 | grain size distributions for Milky Way dust 32 | * Optical constants (complex index of refraction) for 0.1 um sized 33 | `graphite and astrosilicate grains `_ 34 | 35 | * Rayleigh-Gans scattering physics 36 | 37 | * `Smith & Dwek (1998) `_ 38 | * `Mauche & Gorenstein (1986) `_ 39 | 40 | * Mie scattering physics using the algorithms of 41 | `Bohren & Huffman (1986) `_ 42 | 43 | * Converted from `fortran and IDL 44 | `_ 45 | to python 46 | 47 | 48 | Installation 49 | ------------ 50 | 51 | As of yet there is no static install version. I recommend cloning the 52 | github repo into a directory in your python path.:: 53 | 54 | cd /path/to/python/libraries/ 55 | git clone git@github.com:eblur/dust.git . 56 | 57 | 58 | Contribute 59 | ---------- 60 | 61 | **Source code:** github.com/eblur/dust 62 | 63 | 64 | Support 65 | ------- 66 | 67 | If you are having issues, please contact lia@space.mit.edu 68 | 69 | 70 | .. Contents: 71 | 72 | .. toctree:: 73 | :maxdepth: 2 74 | 75 | 76 | 77 | License 78 | ------- 79 | 80 | Copyright (c) 2014, Lia Corrales 81 | All rights reserved. 82 | 83 | Redistribution and use in source and binary forms, with or without 84 | modification, are permitted provided that the following conditions are 85 | met: 86 | 87 | 1. Redistributions of source code must retain the above copyright 88 | notice, this list of conditions and the following disclaimer. 89 | 90 | 2. Redistributions in binary form must reproduce the above copyright 91 | notice, this list of conditions and the following disclaimer in the 92 | documentation and/or other materials provided with the distribution. 93 | 94 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS 95 | IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 96 | TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 97 | PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 98 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 99 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 100 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 101 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 102 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 103 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 104 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 105 | 106 | 107 | 108 | Indices and tables 109 | ================== 110 | 111 | * :ref:`genindex` 112 | * :ref:`modindex` 113 | * :ref:`search` 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /minerals.py: -------------------------------------------------------------------------------- 1 | 2 | ## minerals.py -- Some tables for ISM abundances and depletion factors 3 | ## that are useful for calculating dust mass and dust-to-gas ratios 4 | ## 5 | ## 2016.01.22 - lia@space.mit.edu 6 | ##---------------------------------------------------------------- 7 | 8 | import numpy as np 9 | 10 | 11 | amu = {'H':1.008,'He':4.0026,'C':12.011,'N':14.007,'O':15.999,'Ne':20.1797, \ 12 | 'Na':22.989,'Mg':24.305,'Al':26.981,'Si':28.085,'P':30.973,'S':32.06, \ 13 | 'Cl':35.45,'Ar':39.948,'Ca':40.078,'Ti':47.867,'Cr':51.9961,'Mn':54.938, \ 14 | 'Fe':55.845,'Co':58.933,'Ni':58.6934} 15 | amu_g = 1.661e-24 # g 16 | mp = 1.673e-24 # g (proton mass) 17 | 18 | wilms = {'H':12.0, 'He':10.99, 'C':8.38, 'N':7.88, 'O':8.69, 'Ne':7.94, \ 19 | 'Na':6.16, 'Mg':7.40, 'Al':6.33, 'Si':7.27, 'P':5.42, 'S':7.09, \ 20 | 'Cl':5.12, 'Ar':6.41, 'Ca':6.20, 'Ti':4.81, 'Cr':5.51, 'Mn':5.34, \ 21 | 'Fe':7.43, 'Co':4.92, 'Ni':6.05} # 12 + log A_z 22 | 23 | # Fraction of elements still in gas form 24 | wilms_1mbeta = {'H':1.0, 'He':1.0, 'C':0.5, 'N':1.0, 'O':0.6, 'Ne':1.0, 'Na':0.25, \ 25 | 'Mg':0.2, 'Al':0.02, 'Si':0.1, 'P':0.6, 'S':0.6, 'Cl':0.5, 'Ar':1.0, \ 26 | 'Ca':0.003, 'Ti':0.002, 'Cr':0.03, 'Mn':0.07, 'Fe':0.3, 'Co':0.05, \ 27 | 'Ni':0.04} 28 | 29 | class Mineral(object): 30 | """ 31 | Mineral object 32 | ------------------- 33 | Use a dictionary to define the composition. 34 | e.g. Olivines of pure MgFe^{2+}SiO_4 composition would be 35 | olivine_halfMg = Mineral( {'Mg':1.0, 'Fe':1.0, 'Si':1.0, 'O':4.0} ) 36 | ------------------- 37 | self.composition : dictionary containing elements and their weights 38 | @property 39 | self._weight_amu : amu weight of unit crystal 40 | self.weight_g : g weight of unit crystal 41 | """ 42 | def __init__(self, comp): 43 | self.composition = comp 44 | 45 | @property 46 | def weight_amu(self): 47 | result = 0.0 48 | for atom in self.composition.keys(): 49 | result += self.composition[atom] * amu[atom] 50 | return result 51 | 52 | @property 53 | def weight_g(self): 54 | return self.weight_amu * amu_g 55 | 56 | def calc_mass_conversion( elem, mineral ): 57 | """ 58 | calc_mass_conversion( elem, mineral ) 59 | Returns the number of atoms per gram of a particular mineral object 60 | Useful for converting mass column to a number density column for an element 61 | """ 62 | assert type(mineral) == Mineral 63 | assert type(elem) == str 64 | return mineral.composition[elem] / mineral.weight_g # g^{-1} 65 | 66 | 67 | def calc_element_column( NH, fmineral, atom, mineral, d2g=0.009 ): 68 | """ 69 | Calculate the column density of an element for a particular NH value, 70 | assuming a dust-to-gas ratio (d2g) and 71 | the fraction of dust in that particular mineral species (fmineral) 72 | -------------------------------------------------------------------- 73 | calc_element_column( NH, fmineral, atom, mineral, d2g=0.009 ) 74 | """ 75 | dust_mass = NH * mp * d2g * fmineral # g cm^{-2} 76 | print('Dust mass = %.3e g cm^-2' % (dust_mass)) 77 | return calc_mass_conversion(atom, mineral) * dust_mass # cm^{-2} 78 | 79 | 80 | def get_ISM_abund(elem, abund_table=wilms): 81 | """ 82 | get_ISM_abund( elem, abund_table ) 83 | ---- 84 | Given an abundance table, calculate the number per H atom of a 85 | given element in any ISM form 86 | """ 87 | assert type(elem) == str 88 | assert type(abund_table) == dict 89 | return np.power(10.0, abund_table[elem] - 12.0) # number per H atom 90 | 91 | def get_dust_abund(elem, abund_table=wilms, gas_ratio=wilms_1mbeta): 92 | """ 93 | get_dust_abund( elem, abund_table, gas_ratio) 94 | ---- 95 | Given an abundance table (dict) and a table of gas ratios (dict), 96 | calculate the number per H atom of a given ISM element in *solid* form 97 | """ 98 | assert type(elem) == str 99 | assert type(abund_table) == dict 100 | assert type(gas_ratio) == dict 101 | return get_ISM_abund(elem, abund_table) * (1.0 - gas_ratio[elem]) # number per H atom 102 | -------------------------------------------------------------------------------- /dust.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | import constants as c 4 | 5 | #----------------------------------------------------- 6 | 7 | def adist( amin=0.1, amax=1.0, na=100 ): 8 | """ FUNCTION adist( amin=0.1, amax=1.0, na=100 ) 9 | Returns np.array of grain sizes between amin and amax (microns) """ 10 | da = (amax-amin)/na 11 | return np.arange( amin, amax+da, da ) 12 | 13 | #----------------------------------------------------- 14 | 15 | class Grain(object): 16 | """ OBJECT dust.Grain 17 | ---------------------------------------- 18 | __init__(self, rad=1.0, rho=3.0) 19 | ---------------------------------------- 20 | a : scalar [micron] 21 | rho : scalar grain density [g cm^-3] 22 | ---------------------------------------- 23 | FUNCTIONS 24 | ndens ( md : mass density [g cm^-2 or g cm^-3] ) 25 | returns : scalar number density [cm^-3] 26 | """ 27 | def __init__(self, rad=1.0, rho=3.0): 28 | self.a = rad 29 | self.rho = rho 30 | def ndens(self, md=1.5e-5 ): 31 | gvol = 4./3. * np.pi * np.power( self.a*c.micron2cm(), 3 ) 32 | return md / ( gvol*self.rho ) 33 | 34 | class Dustdist(object): # power (p), rho, radius (a) 35 | """ OBJECT dust.Dustdist 36 | ---------------------------------------- 37 | __init__(self, p=4.0, rho=3.0, rad=adist() ) 38 | ---------------------------------------- 39 | p : scalar for power law dn/da \propto a^-p 40 | rho : scalar grain density [g cm^-3] 41 | a : np.array of grain sizes [microns] 42 | ---------------------------------------- 43 | FUNCTIONS 44 | dnda ( md : mass density [g cm^-2 or g cm^-3] ) 45 | returns : number density [cm^-3 um^-1] 46 | """ 47 | def __init__(self, p=3.5, rho=3.0, rad=adist() ): 48 | self.p = p 49 | self.rho = rho 50 | self.a = rad 51 | 52 | def dnda(self, md=1.5e-5): 53 | adep = np.power( self.a, -self.p ) # um^-p 54 | dmda = adep * 4./3. * np.pi * self.rho * np.power( self.a*c.micron2cm(), 3 ) # g um^-p 55 | const = md / c.intz( self.a, dmda ) # cm^-? um^p-1 56 | return const * adep # cm^-? um^-1 57 | 58 | class Dustspectrum(object): #radius (a), number density (nd), and mass density (md) 59 | """ OBJECT dust.Dustspectrum 60 | ---------------------------------------- 61 | __init__( self, rad=Dustdist(), md=1.5e-5 ) 62 | ---------------------------------------- 63 | rad : Dustdist or Grain 64 | md : mass density of dust 65 | nd : number density of dust 66 | """ 67 | def __init__( self, rad=Dustdist(), md=1.5e-5 ): 68 | self.md = md 69 | self.a = rad.a 70 | 71 | if type( rad ) == Dustdist: 72 | self.nd = rad.dnda( md=md ) 73 | if type( rad ) == Grain: 74 | self.nd = rad.ndens( md=md ) 75 | 76 | #----------------------------------------------------------------- 77 | 78 | def make_dust_spectrum( amin=0.1, amax=1.0, na=100, p=4.0, rho=3.0, md=1.5e-5 ): 79 | """ FUNCTION make_dust_spectrum( amin=0.1, amax=1.0, na=100, p=4.0, rho=3.0, md=1.5e-5 ) 80 | ---------------------------------------- 81 | INPUTS 82 | amin : [micron] 83 | amax : [micron] 84 | na : int 85 | p : scalar for dust power law dn/da \propto a^-p 86 | rho : grain density [g cm^-3] 87 | md : mass density [g cm^-2 or g cm^-3] 88 | ---------------------------------------- 89 | RETURNS 90 | dust.Dustspectrum (object) 91 | """ 92 | return Dustspectrum( rad=Dustdist( rad=adist(amin=amin, amax=amax, na=na), p=p, rho=rho ), md=md ) 93 | 94 | #----------------------------------------------------------------- 95 | 96 | def MRN( amin=0.005, amax=0.3, p=3.5, na=100, spectrum=True, **kwargs ): 97 | """ FUNCTION MRN( amin=0.005, amax=0.3, p=3.5, na=100, spectrum=True, **kwargs ) 98 | ---------------------------------------- 99 | INPUTS 100 | amin : [micron] 101 | amax : [micron] 102 | p : scalar for dust power law dn/da \propto a^-p 103 | na : int 104 | spectrum : boolean 105 | ---------------------------------------- 106 | RETURNS 107 | if spectrum == True: dust.Dustspectrum (object) 108 | if spectrum == False: dust.Dustdist (object) 109 | """ 110 | if spectrum : return make_dust_spectrum( amin=amin, amax=amax, p=p, **kwargs ) 111 | else : return Dustdist( rad=adist(amin=amin, amax=amax, na=na), p=p, **kwargs ) 112 | 113 | -------------------------------------------------------------------------------- /cmindex.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import numpy as np 4 | import constants as c 5 | from scipy.interpolate import interp1d 6 | 7 | #------------- Index of Refraction object comes in handy -- 8 | 9 | #class CM(object): # Complex index of refraction 10 | # def __init__( self, rp=1.0, ip=0.0 ): 11 | # self.rp = rp # real part 12 | # self.ip = ip # imaginary part 13 | 14 | #-------------- Complex index of refraction calcs --------- 15 | 16 | # ALL CM OBJECTS CONTAIN 17 | # cmtype : string ('Drude', 'Graphite', or 'Silicate') 18 | # rp : either a function or scipy.interp1d object that is callable 19 | # : rp(E) where E is in [keV] 20 | # ip : same as above, ip(E) where E is in [keV] 21 | 22 | def find_cmfile( name ): 23 | file_not_found = True 24 | if os.path.exists(name): 25 | return name 26 | 27 | path_list = os.getenv("PYTHONPATH").split(':') 28 | 29 | for path in path_list: 30 | for root, dirs, files in os.walk(path+"/"): 31 | if name in files: 32 | return os.path.join(root, name) 33 | 34 | if file_not_found: 35 | print("ERROR: Cannot find DM03 file") 36 | return 37 | 38 | 39 | class CmDrude(object): 40 | """ OBJECT cmindex.CmDrude 41 | --------------------------------------- 42 | __init__(self, rho=3.0) 43 | --------------------------------------- 44 | cmtype : 'Drude' 45 | rho : grain density [g cm^-3] 46 | --------------------------------------- 47 | FUNCTION 48 | rp(E) : real part of complex index of refraction [E in keV] 49 | ip(E) : imaginary part of complex index of refraction [always 0.0] 50 | """ 51 | def __init__(self, rho=3.0): # Returns a CM using the Drude approximation 52 | self.cmtype = 'Drude' 53 | self.rho = rho 54 | 55 | def rp( self, E ): 56 | mm1 = self.rho / ( 2.0*c.mp() ) * c.re()/(2.0*np.pi) * np.power( c.kev2lam()/E , 2 ) 57 | return mm1+1 58 | 59 | def ip( self, E ): 60 | if np.size(E) > 1: 61 | return np.zeros( np.size(E) ) 62 | else: 63 | return 0.0 64 | 65 | class CmGraphite(object): 66 | """ OBJECT cmindex.CmGraphite 67 | --------------------------------------- 68 | __init__( self, size='big', orient='perp' ) 69 | --------------------------------------- 70 | cmtype : 'Graphite' 71 | size : 'big' or 'small' 72 | orient : 'perp' or 'para' 73 | rp(E) : scipy.interp1d object 74 | ip(E) : scipy.interp1d object [E in keV] 75 | """ 76 | def __init__( self, size='big', orient='perp' ): 77 | # size : string ('big' or 'small') 78 | # : 'big' gives results for 0.1 um sized graphite grains at 20 K [Draine (2003)] 79 | # : 'small' gives results for 0.01 um sized grains at 20 K 80 | # orient : string ('perp' or 'para') 81 | # : 'perp' gives results for E-field perpendicular to c-axis 82 | # : 'para' gives results for E-field parallel to c-axis 83 | # 84 | self.cmtype = 'Graphite' 85 | self.size = size 86 | self.orient = orient 87 | 88 | D03file = find_cmfile('CM_D03.pysav') # look up file 89 | D03vals = c.restore(D03file) # read in index values 90 | 91 | if size == 'big': 92 | if orient == 'perp': 93 | lamvals = D03vals['Cpe_010_lam'] 94 | revals = D03vals['Cpe_010_re'] 95 | imvals = D03vals['Cpe_010_im'] 96 | 97 | if orient == 'para': 98 | lamvals = D03vals['Cpa_010_lam'] 99 | revals = D03vals['Cpa_010_re'] 100 | imvals = D03vals['Cpa_010_im'] 101 | 102 | if size == 'small': 103 | 104 | if orient == 'perp': 105 | lamvals = D03vals['Cpe_001_lam'] 106 | revals = D03vals['Cpe_001_re'] 107 | imvals = D03vals['Cpe_001_im'] 108 | 109 | if orient == 'para': 110 | lamvals = D03vals['Cpa_001_lam'] 111 | revals = D03vals['Cpa_001_re'] 112 | imvals = D03vals['Cpa_001_im'] 113 | 114 | lamEvals = c.kev2lam() / c.micron2cm() / lamvals # keV 115 | self.rp = interp1d( lamEvals, revals ) 116 | self.ip = interp1d( lamEvals, imvals ) 117 | 118 | class CmSilicate(object): 119 | """ OBJECT cmindex.CmSilicate 120 | --------------------------------------- 121 | __init__( self ) 122 | --------------------------------------- 123 | cmtype : 'Silicate' 124 | rp(E) : scipy.interp1d object 125 | ip(E) : scipy.interp1d object [E in keV] 126 | """ 127 | def __init__( self ): 128 | self.cmtype = 'Silicate' 129 | 130 | D03file = find_cmfile('CM_D03.pysav') 131 | D03vals = c.restore(D03file) # look up file 132 | 133 | lamvals = D03vals['Sil_lam'] 134 | revals = D03vals['Sil_re'] 135 | imvals = D03vals['Sil_im'] 136 | 137 | lamEvals = c.kev2lam() / c.micron2cm() / lamvals # keV 138 | self.rp = interp1d( lamEvals, revals ) 139 | self.ip = interp1d( lamEvals, imvals ) 140 | 141 | #------------- A quick way to grab a single CM ------------ 142 | 143 | def getCM( E, model=CmDrude() ): 144 | """ FUNCTION getCM( E, model=CmDrude() ) 145 | --------------------------------------- 146 | INPUTS 147 | E : scalar or np.array [keV] 148 | model : any Cm-type object 149 | --------------------------------------- 150 | RETURNS 151 | Complex index of refraction : scalar or np.array of dtype='complex' 152 | """ 153 | return model.rp(E) + 1j * model.ip(E) 154 | 155 | -------------------------------------------------------------------------------- /cosmology.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | import constants as c 4 | import dust 5 | import sigma_scat as ss 6 | 7 | #----------------------------------------------------- 8 | def zvalues( zs=4.0, z0=0.0, nz=100 ): 9 | """ Creates an np.array of z values between z0 and zs [unitless] 10 | zs=4.0, z0=0.0, nz=100 """ 11 | dz = (zs - z0)/nz 12 | return np.arange( z0, zs+dz, dz ) 13 | 14 | #----------------------------------------------------- 15 | 16 | class Cosmology(object): 17 | def __init__( self, h0=c.h0(), m=c.omega_m(), l=c.omega_l(), d=c.omega_d() ): 18 | """ 19 | Cosmology object 20 | ------------------------------ 21 | INPUTS (same as stored values) 22 | ------------------------------ 23 | h0 : [km/s/Mpc] (default: constants.h0) 24 | m : cosmic mass density (default: constants.omega_m) 25 | l : lambda mass density (default: constants.omega_l) 26 | d : cosmic dust density (default: constants.omega_d ) 27 | """ 28 | self.h0 = h0 29 | self.m = m 30 | self.l = l 31 | self.d = d 32 | 33 | class Cosmdens(object): # h0, omega, mass density (md) 34 | def __init__(self, cosm=Cosmology() ): 35 | """ 36 | Stores h0 and omega information from cosm 37 | ------------------------------ 38 | INPUTS 39 | ------------------------------ 40 | cosm : Cosmology 41 | ------------------------------ 42 | STORED VALUES 43 | h0, d, md 44 | """ 45 | self.h0 = cosm.h0 46 | self.omega = cosm.d 47 | self.md = cosm.d * c.rho_crit() * np.power( cosm.h0/c.h0(), 2 ) 48 | 49 | #----------------------------------------------------- 50 | 51 | def cosmdustspectrum( amin=0.1, amax=1.0, na=100., p=4.0, rho=3.0, cosm=Cosmology() ): 52 | """ 53 | FUNCTION cosmdustspectrum( amin=0.1, amax=1.0, na=100., p=4.0, rho=3.0, cosm=Cosmology() ) 54 | ----------------------------- 55 | amin, amax : [microns] 56 | na : int (# of dust grain sizes to use) 57 | p : scalar for dust power law dn/da \propto a^-p 58 | rho : grain density [g cm^-3] 59 | cosm : Cosmology 60 | ----------------------------- 61 | RETURNS : dust.Dustspectrum 62 | """ 63 | return dust.Dustspectrum( rad = dust.Dustdist( rad=dust.adist( amin=amin, amax=amax, na=na ), p=p, rho=rho ), 64 | md = Cosmdens( cosm=cosm ).md ) 65 | 66 | def DChi( z, zp=0.0, cosm=Cosmology(), nz=100 ): 67 | """ 68 | Calculates co-moving radial distance [Gpc] from zp to z using dx = cdt/a 69 | z : float : redshift 70 | zp ; float (0) : starting redshift 71 | cosm : Cosmology 72 | nz : int (100) : number of z-values to use in calculation 73 | """ 74 | zvals = zvalues( zs=z, z0=zp, nz=nz ) 75 | integrand = c.cperh0() * ( c.h0()/cosm.h0 ) / np.sqrt( cosm.m * np.power(1+zvals,3) + cosm.l ) 76 | return c.intz( zvals, integrand ) / (1e9 * c.pc2cm() ) # Gpc, in comoving coordinates 77 | 78 | def DA( theta, z, cosm=Cosmology(), nz=100 ): 79 | """ 80 | Calculates the diameter distance [Gpc] for an object of angular size 81 | theta and redshift z using DA = theta(radians) * DChi / (1+z) 82 | theta : float : angular size [arcsec] 83 | z : float : redshift of object 84 | cosm : Cosmology 85 | nz : int (100) : number of z-values to use in DChi calculation 86 | """ 87 | dchi = DChi( z, cosm=cosm, nz=nz ) 88 | return theta * c.arcs2rad() * dchi / (1+z) 89 | 90 | 91 | def CosmTauX( z, E=1.0, dist=dust.Dustdist(), scatm=ss.Scatmodel(), cosm=Cosmology(), nz=100 ): 92 | """ 93 | FUNCTION CosmTauX( z, E=1.0, dist=dust.Dustdist(), scatm=ss.Scatmodel(), cosm=Cosmology(), nz=100 94 | --------------------------------- 95 | INPUT 96 | z : redshift of source 97 | E : scalar or np.array [keV] 98 | dist : dust.Dustdist or dust.Grain 99 | scatm : ss.Scatmodel 100 | cosm : cosm.Cosmology 101 | --------------------------------- 102 | OUTPUT 103 | tauX : scalar or np.array [optical depth to X-ray scattering] 104 | = kappa( dn/da da ) cosmdens (1+z)^2 cdz/hfac 105 | """ 106 | 107 | zvals = zvalues( zs=z, nz=nz ) 108 | md = Cosmdens( cosm=cosm ).md 109 | spec = dust.Dustspectrum( rad=dist, md=md ) 110 | 111 | if np.size(E) > 1: 112 | result = np.array([]) 113 | for ener in E: 114 | Evals = ener * (1+zvals) 115 | kappa = ss.Kappascat( E=Evals, scatm=scatm, dist=spec ).kappa 116 | hfac = np.sqrt( cosm.m * np.power( 1+zvals, 3 ) + cosm.l ) 117 | integrand = kappa * md * np.power( 1+zvals, 2 ) * \ 118 | c.cperh0() * ( c.h0()/cosm.h0 ) / hfac 119 | result = np.append( result, c.intz( zvals, integrand ) ) 120 | else: 121 | Evals = E * (1 + zvals) 122 | kappa = ss.Kappascat( E=Evals, scatm=scatm, dist=spec ).kappa 123 | hfac = np.sqrt( cosm.m * np.power( 1+zvals, 3 ) + cosm.l ) 124 | integrand = kappa * md * np.power( 1+zvals, 2 ) * c.cperh0() * ( c.h0()/cosm.h0 ) / hfac 125 | result = c.intz( zvals, integrand ) 126 | 127 | return result 128 | 129 | 130 | def CosmTauScreen( zg, E=1.0, dist=dust.Dustspectrum(), scatm=ss.Scatmodel() ): 131 | """ 132 | FUNCTION CosmTauScreen( zg, E=1.0, dist=dust.Dustspectrum(), scatm=ss.Scatmodel() ) 133 | --------------------------------- 134 | INPUT 135 | zg : redshift of screen 136 | E : scalar or np.array [keV] 137 | dist : dust.Dustdist or dust.Grain 138 | scatm : ss.Scatmodel 139 | --------------------------------- 140 | OUTPUT 141 | tauX : np.array [optical depth to X-ray scattering] for the screen 142 | : = kappa(Eg) * M_d 143 | """ 144 | 145 | Eg = E * (1+zg) 146 | kappa = ss.Kappascat( E=Eg, scatm=scatm, dist=dist ).kappa 147 | return dist.md * kappa 148 | 149 | -------------------------------------------------------------------------------- /radprofile.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | from astropy.io import fits 4 | from astropy.io import ascii 5 | import errors as err 6 | 7 | ## November 30, 2014 : Removed dependence on matplotlib and asciidata 8 | 9 | ## April 1, 2013 : Added copy function to Profile object 10 | ## March 29, 2013 : Updated minus, plus, divide, multiply with error propagating routine (errors.py) 11 | ## March 2, 2013 : Updated Profile object with minus, plus, divide, multiply 12 | 13 | ## Part of radprofile.sh script 14 | ## Taken from CygX-3/6601/primary 15 | ## Plots a profile when used './radprofile.py rp_filename' 16 | ## where the '.txt' extension is missing from rp_filename 17 | 18 | import os # Needed for environment variables 19 | import sys 20 | sys.argv 21 | 22 | #---------------------------------------------- 23 | ## The Profile object 24 | 25 | class Profile(object): 26 | rleft = 0.0 27 | rright = 0.0 28 | surbri = 0.0 29 | surbri_err = 0.0 30 | 31 | @property 32 | def rmid( self ): 33 | return 0.5 * (self.rleft + self.rright) 34 | 35 | @property 36 | def area( self ): 37 | return np.pi * (self.rright**2 - self.rleft**2) # pix^2 38 | 39 | def __getslice__( self, i,j ): 40 | result = Profile() 41 | result.rleft = self.rleft[i:j] 42 | result.rright = self.rright[i:j] 43 | result.surbri = self.surbri[i:j] 44 | result.surbri_err = self.surbri_err[i:j] 45 | return result 46 | 47 | def __getitem__( self, ivals ): 48 | result = Profile() 49 | result.rleft = self.rleft[ivals] 50 | result.rright = self.rright[ivals] 51 | result.surbri = self.surbri[ivals] 52 | result.surbri_err = self.surbri_err[ivals] 53 | return result 54 | 55 | def minus( self, value, value_err=0 ): 56 | oldsb = self.surbri 57 | oldsb_err = self.surbri_err 58 | self.surbri = oldsb - value 59 | self.surbri_err = err.prop_add( oldsb_err, value_err ) 60 | #self.surbri_err = np.sqrt( oldsb_err**2 + value_err**2 ) 61 | return 62 | 63 | def plus( self, value, value_err=0 ): 64 | oldsb = self.surbri 65 | oldsb_err = self.surbri_err 66 | self.surbri = oldsb + value 67 | self.surbri_err = err.prop_add( oldsb_err, value_err ) 68 | #self.surbri_err = np.sqrt( oldsb_err**2 + value_err**2 ) 69 | return 70 | 71 | def divide( self, value, value_err=0 ): 72 | oldsb = self.surbri 73 | oldsb_err = self.surbri_err 74 | self.surbri = oldsb / value 75 | self.surbri_err = err.prop_div( oldsb, value, oldsb_err, value_err ) 76 | #self.surbri_err = oldsb_err*2 / value 77 | return 78 | 79 | def multiply( self, value, value_err=0 ): 80 | oldsb = self.surbri 81 | oldsb_err = self.surbri_err 82 | self.surbri = oldsb * value 83 | self.surbri_err = err.prop_mult( oldsb, value, oldsb_err, value_err ) 84 | #self.surbri_err = oldsb_err*2 * value 85 | return 86 | 87 | def write( self, filename, indices='all', sci_note=False ): 88 | if indices == 'all': 89 | indices = range(len(self.rmid)) 90 | 91 | FORMAT = "%f \t%f \t%f \t%f\n" 92 | if sci_note: 93 | FORMAT = "%e \t%e \t%e \t%e\n" 94 | 95 | f = open(filename, 'w') 96 | f.write( "# Bin_left\tBin_right\tSurbri\tSurbri_err\n" ) 97 | for i in indices: 98 | f.write( FORMAT % \ 99 | (self.rleft[i], self.rright[i], self.surbri[i], self.surbri_err[i]) ) 100 | f.close() 101 | return 102 | 103 | #---------------------------------------------- 104 | ## Useful functions 105 | 106 | def copy_profile( profile ): 107 | result = Profile() 108 | result.rleft = np.array( profile.rleft ) 109 | result.rright = np.array( profile.rright ) 110 | result.surbri = np.array( profile.surbri ) 111 | result.surbri_err = np.array( profile.surbri_err ) 112 | return result 113 | 114 | def get_profile_fits( filename, flux=False ): 115 | result = Profile() 116 | if flux: 117 | sb_key = 'SUR_FLUX' # phot/cm^2/s/pix^2 118 | sberr_key = 'SUR_FLUX_ERR' 119 | else: 120 | sb_key = 'SUR_BRI' # count/pix^2 121 | sberr_key = 'SUR_BRI_ERR' 122 | hdu_list = fits.open( filename ) 123 | data = hdu_list[1].data 124 | result.rleft = data['R'][:,0] 125 | result.rright = data['R'][:,1] 126 | result.surbri = data[sb_key] 127 | result.surbri_err = data[sberr_key] 128 | return result 129 | 130 | def get_profile( filename ): 131 | result = Profile() 132 | data = ascii.read( filename ) 133 | keys = data.keys() 134 | result.rleft = data[keys[0]] 135 | result.rright = data[keys[1]] 136 | result.surbri = data[keys[2]] 137 | result.surbri_err = data[keys[3]] 138 | return result 139 | 140 | 141 | def add_profile( profile1, profile2=Profile(), weight1=1.0, weight2=1.0 ): 142 | result = Profile() 143 | # if profile1.rleft != profile2.rleft or profile1.rright != profile2.rright: 144 | # print 'Error: Profile bins need to match up' 145 | # return 146 | result.surbri = profile1.surbri * weight1 + profile2.surbri * weight2 147 | result.surbri_err = np.sqrt( profile1.surbri_err**2 * weight1**2 + profile2.surbri_err**2 * weight2**2 ) 148 | result.rleft = profile1.rleft 149 | result.rright = profile1.rright 150 | return result 151 | 152 | def make_bkg_profile( template, bkg_value, bkg_err=0.0 ): 153 | result = Profile() 154 | result.rleft = template.rleft 155 | result.rright = template.rright 156 | result.surbri = np.zeros( len(template.rleft) ) + bkg_value 157 | result.surbri_err = np.zeros( len(template.rleft) ) + bkg_err 158 | return result 159 | 160 | #---------------------------------------------- 161 | ## Added Feb 5, 2013 : More useful functions 162 | 163 | def add_bkg( profile, bkg_counts, bkg_area ): 164 | ## To subtract, put a - sign before bkg_counts 165 | bkg_surbri = bkg_counts / bkg_area 166 | bkg_err = np.sqrt( bkg_counts ) / bkg_area 167 | sbnew = profile.surbri + bkg_surbri 168 | sbnew_err = np.sqrt( profile.surbri_err**2 + bkg_err**2 ) 169 | profile.surbri = sbnew 170 | profile.surbri_err = sbnew_err 171 | return 172 | 173 | def residual_profile( profile, model ): 174 | result = Profile() 175 | result.rleft = np.array(profile.rleft) 176 | result.rright = np.array(profile.rright) 177 | result.surbri = profile.surbri - model.surbri 178 | result.surbri_err = np.sqrt( profile.surbri_err**2 + model.surbri_err**2 ) 179 | return result 180 | 181 | #---------------------------------------------- 182 | 183 | try: 184 | datafile = sys.argv[1] 185 | profile = get_profile( datafile ) 186 | except: 187 | pass 188 | 189 | 190 | -------------------------------------------------------------------------------- /halodict.py: -------------------------------------------------------------------------------- 1 | 2 | import dust 3 | import sigma_scat as ss 4 | import galhalo as gh 5 | 6 | import numpy as np 7 | import constants as c 8 | import matplotlib.pyplot as plt 9 | 10 | from scipy.interpolate import interp1d 11 | 12 | from astropy.io import fits 13 | 14 | ## UPDATED July 10, 2013 : Rewrote ecf method in HaloDict object 15 | ## UPDATED June 11, 2013 : Make this file independent of asciidata 16 | 17 | ## July 17, 2012 : A library of objects and functions for simulating a 18 | ## full halo given an object's spectrum. 19 | ## See sim_cygx3.py for original code (and testing) 20 | 21 | #--------------------------------------------------------------- 22 | 23 | class HaloDict( object ): 24 | """ 25 | A dictionary of halos where each property can be looked up with energy as a key. 26 | """ 27 | 28 | def __init__( self, energy, alpha=np.power(10.0, np.arange(0.0,3.01,0.05)), \ 29 | rad=dust.Grain(), scatm=ss.Scatmodel() ): 30 | 31 | self.alpha = alpha 32 | self.energy = energy 33 | self.index = dict( zip(energy,range(len(energy))) ) 34 | self.rad = rad 35 | self.scatm = scatm 36 | self.intensity = np.zeros( shape=( len(energy), len(alpha) ) ) 37 | 38 | # The following variables get defined when htype is set 39 | # See analytic.set_htype 40 | self.htype = None 41 | self.dist = None 42 | self.taux = None 43 | 44 | ## Issues with comparing flouts, try round 45 | ## http://stackoverflow.com/questions/23721230/float-values-as-dictionary-key 46 | def __getitem__( self, key, n=2 ): 47 | i = self.index[round(key,n)] 48 | return self.intensity[i,:] 49 | 50 | ## http://stackoverflow.com/questions/19151/build-a-basic-python-iterator 51 | def __iter__( self ): 52 | self.count = 0 53 | return self 54 | def next( self ): 55 | if self.count >= len( self.energy ): 56 | raise StopIteration 57 | else: 58 | self.count += 1 59 | return self.intensity[count,:] 60 | 61 | def __getslice__( self, i, j ): 62 | slice = np.where( np.logical_and( self.energy>=i, self.energy 3.5*ANGS2MICRON, a < a_t ) ) 181 | case2g = np.where( a >= a_t ) 182 | 183 | if np.size(case1g) != 0: 184 | Case_g[case1g] = 1.0 185 | if np.size(case2g) != 0: 186 | Case_g[case2g] = np.exp( -( (a[case2g]-a_t) / a_c )**3 ) 187 | 188 | if beta >= 0: 189 | F_g = 1 + beta * a / a_t 190 | if beta < 0: 191 | F_g = 1.0 / (1 - beta * a / a_t) 192 | 193 | Dist_WD01 = D + C/a_cm * (a/a_t)**alpha * F_g * Case_g #cm^-4 per n_H 194 | 195 | if type == 'Silicate': 196 | 197 | Case_s = np.zeros( NA ) 198 | case1s = np.where( np.logical_and( a > 3.5*ANGS2MICRON, a < a_t ) ) 199 | case2s = np.where( a >= a_t ) 200 | 201 | if np.size(case1s) != 0: 202 | Case_s[case1s] = 1.0 203 | if np.size(case2s) != 0: 204 | Case_s[case2s] = np.exp( -( (a[case2s]-a_t)/a_c )**3 ) 205 | 206 | F_s = np.zeros( NA ) 207 | if beta >= 0: 208 | F_s = 1 + beta * a / a_t 209 | if beta < 0: 210 | F_s = 1. / (1 - beta * a / a_t) 211 | 212 | Dist_WD01 = C/a_cm * (a/a_t)**alpha * F_s * Case_s #cm^-4 per n_H 213 | 214 | ## Modify result Dustspectrum so we get a proper WD01 dist! 215 | 216 | mg = 4.0/3.0*np.pi*a_cm**3 * rho_d # mass of each dust grain 217 | Md = c.intz( a_cm, Dist_WD01 * mg ) 218 | 219 | result.nd = Dist_WD01 * c.micron2cm() # cm^-3 per um per n_H 220 | result.md = Md 221 | 222 | return result 223 | -------------------------------------------------------------------------------- /analytic.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | 5 | from scipy.special import erf 6 | from scipy.integrate import trapz 7 | 8 | ## REQUIRES Halo objects to work 9 | from halodict import HaloDict 10 | from halo import Halo 11 | 12 | import galhalo as GH 13 | import dust 14 | 15 | ## UPDATED June 11, 2013 : Make the halos calculation faster for halodicts? 16 | 17 | ## UPDATED May 27, 2013 : Rewrote GammaInc function to be more robust 18 | ## UPDATED April 4, 2013 : To treat halos in same way as DiscreteISM 19 | ## and UniformISM functions in galhalo.py 20 | ## CREATED April 3, 2013 21 | 22 | #-------------------------------------------- 23 | # http://www.johndcook.com/gamma_python.html 24 | # 25 | # See ~/Academic/notebooks/test_functions.ipynb for testing 26 | # and 27 | # http://mathworld.wolfram.com/IncompleteGammaFunction.html for general info 28 | 29 | from scipy.special import gammaincc 30 | from scipy.special import gamma 31 | from scipy.special import expi 32 | 33 | def GammaInc( a, z ): 34 | if z.any() < 0: 35 | print 'ERROR: z must be >= 0' 36 | return 37 | if a == 0: 38 | return -expi(-z) 39 | 40 | elif a < 0: 41 | return ( GammaInc(a+1,z) - np.power(z,a) * np.exp(-z) ) / a 42 | else: 43 | return gammaincc(a,z) * gamma(a) 44 | 45 | def set_htype( halo, xg=None, NH=1.0e20, d2g=0.009 ): 46 | ''' 47 | Sets galactic ISM htype values for Halo object 48 | -------------------------------------------------------------- 49 | FUNCTION set_htype( halo, xg=None, NH=1.0e20, d2g=0.009 ) 50 | RETURN : empty 51 | -------------------------------------------------------------- 52 | halo : halo.Halo object 53 | xg : float [0-1] : Position of screen where 0 = point source, 1 = observer 54 | - if None, htype set to 'Uniform' 55 | - otherwise, hytype set to 'Screen' 56 | NH : float [cm^-2] : Hydrogen column density 57 | d2g : float : Dust-to-gas mass ratio 58 | ''' 59 | if halo.htype != None: 60 | print 'WARNING: Halo already has an htype. Overwriting now' 61 | 62 | if xg == None: 63 | halo.htype = GH.GalHalo( NH=NH, d2g=d2g, ismtype='Uniform' ) 64 | else: 65 | halo.htype = GH.GalHalo( xg=xg, NH=NH, d2g=d2g, ismtype='Screen' ) 66 | 67 | md = NH * GH.c.mp() * d2g 68 | halo.dist = dust.Dustspectrum( rad=halo.rad, md=md ) 69 | halo.taux = GH.ss.Kappascat( E=halo.energy, scatm=halo.scatm, dist=halo.dist ).kappa * halo.dist.md 70 | return 71 | 72 | #-------------------------------------------- 73 | ## Screen case ISM 74 | 75 | def G_p( halo ): 76 | ''' 77 | Returns integral_a0^a1 a^(4-p) da 78 | ''' 79 | a0 = halo.dist.a[0] 80 | a1 = halo.dist.a[-1] 81 | p = halo.rad.p 82 | if p == 5: 83 | return np.log( a1/a0 ) 84 | else: 85 | return 1.0/(5.0-p) * ( np.power(a1,5.0-p) - np.power(a0,5.0-p) ) 86 | 87 | def G_s( halo ): 88 | ''' 89 | Function used for evaluating halo from power law distribution of grain sizes (Screen case) 90 | ''' 91 | a0 = halo.dist.a[0] 92 | a1 = halo.dist.a[-1] 93 | p = halo.rad.p 94 | 95 | if type(halo) == Halo: 96 | energy, alpha = halo.energy, halo.alpha 97 | if type(halo) == HaloDict: 98 | energy, alpha = halo.superE, halo.superA 99 | 100 | charsig0 = 1.04 * 60.0 / energy 101 | pfrac = (7.0-p)/2.0 102 | const = alpha**2/(2.0*charsig0**2*halo.htype.xg**2) 103 | gamma1 = GammaInc( pfrac, const * a1**2 ) 104 | gamma0 = GammaInc( pfrac, const * a0**2 ) 105 | return -0.5 * np.power( const, -pfrac ) * ( gamma1 - gamma0 ) 106 | 107 | def screen_eq( halo, xg=0.5, verbose=False, **kwargs ): 108 | ''' 109 | Analytic function for a screen of dust particles 110 | from parameters set in halo (taux, a0, a1, p, xg) 111 | -------------------------------------------------------------- 112 | FUNCTION screen_eq( halo ) 113 | RETURNS : np.array [arcsec^-2] : I_h/F_a 114 | Also alters halo.intensity with the resulting valuesa 115 | -------------------------------------------------------------- 116 | halo : halo.Halo object 117 | ''' 118 | 119 | set_htype( halo, xg=xg, **kwargs ) 120 | hfrac, energy, alpha = np.array([]), np.array([]), np.array([]) 121 | 122 | if type(halo) == Halo: 123 | if verbose: print 'Using a Halo object' 124 | hfrac = halo.taux 125 | energy, alpha = halo.energy, halo.alpha 126 | 127 | if type(halo) == HaloDict: 128 | if verbose: print 'Using a halo dictionary' 129 | NE, NA = len(halo.energy), len(halo.alpha) 130 | hfrac = np.tile( halo.taux.reshape(NE,1), NA ) # NE x NA 131 | energy, alpha = halo.superE, halo.superA # NE x NA 132 | 133 | if type(halo.rad) == dust.Grain: 134 | if verbose: print 'Using a dust grain' 135 | charsig = 1.04 * 60. / halo.rad.a / energy #arcsec 136 | gterm = np.exp( -alpha**2 / (2 * charsig**2 * xg**2) ) 137 | result = hfrac * gterm / ( xg**2 * 2.0*np.pi*charsig**2 ) 138 | 139 | if type(halo.rad) == dust.Dustdist: 140 | if verbose: print 'Using a distribution of grain sizes' 141 | charsig0 = 1.04 * 60.0 / energy 142 | const = hfrac / ( 2.0*np.pi*charsig0**2 ) 143 | result = const / xg**2 * G_s(halo) / G_p(halo) 144 | 145 | halo.intensity = result 146 | return 147 | 148 | #-------------------------------------------- 149 | ## Uniform case ISM 150 | 151 | def G_u( halo ): 152 | ''' 153 | Function used for evaluating halo from power law distribution of grain sizes 154 | (Uniform case) 155 | ''' 156 | a0 = halo.rad.a[0] 157 | a1 = halo.rad.a[-1] 158 | p = halo.rad.p 159 | 160 | if type(halo) == Halo: 161 | energy, alpha = halo.energy, halo.alpha 162 | if type(halo) == HaloDict: 163 | energy, alpha = halo.superE, halo.superA 164 | 165 | power = 6.0 - halo.rad.p 166 | pfrac = (7.0-p) / 2.0 167 | charsig = 1.04 * 60.0 / energy 168 | const = alpha / charsig / np.sqrt(2.0) 169 | 170 | A1 = np.power(a1,power) * ( 1 - erf(const*a1) ) 171 | A0 = np.power(a0,power) * ( 1 - erf(const*a0) ) 172 | B1 = np.power(const,-power) * GammaInc( pfrac, const**2 * a1**2 ) / np.sqrt(np.pi) 173 | B0 = np.power(const,-power) * GammaInc( pfrac, const**2 * a0**2 ) / np.sqrt(np.pi) 174 | return ( (A1-B1) - (A0-B0) ) / power 175 | 176 | def uniform_eq( halo, verbose=False, **kwargs ): 177 | ''' 178 | Analytic function for a uniform distribution of dust particles 179 | from parameters set in halo (taux, a0, a1, p) 180 | -------------------------------------------------------------- 181 | FUNCTION uniform_eq( halo ) 182 | RETURNS : np.array [arcsec^-2] : I_h/F_a 183 | Also alter halo.intensity with the resulting valuesa 184 | -------------------------------------------------------------- 185 | halo : halo.Halo object 186 | ''' 187 | 188 | set_htype( halo, xg=None, **kwargs ) 189 | 190 | if type(halo) == Halo: 191 | if verbose: print 'Using a Halo object' 192 | hfrac = halo.taux 193 | energy, alpha = halo.energy, halo.alpha 194 | 195 | if type(halo) == HaloDict: 196 | if verbose: print 'Using a halo dictionary' 197 | NE, NA = len(halo.energy), len(halo.alpha) 198 | hfrac = np.tile( halo.taux.reshape(NE,1), NA ) # NE x NA 199 | energy, alpha = halo.superE, halo.superA # NE x NA 200 | 201 | if type(halo.rad) == dust.Grain: 202 | if verbose: print 'Using a dust grain' 203 | charsig = 1.04 * 60. / halo.rad.a / energy #arcsec 204 | eterm = 1 - erf( alpha / charsig / np.sqrt(2.) ) 205 | result = hfrac * eterm * np.sqrt(np.pi/2.0) / (2.0*np.pi*charsig*alpha) 206 | 207 | if type(halo.rad) == dust.Dustdist: 208 | if verbose: print 'Using a distribution of grain sizes' 209 | charsig = 1.04 * 60.0 / energy 210 | const = hfrac / ( alpha * charsig * np.sqrt(8.0*np.pi) ) 211 | result = const * G_u(halo) / G_p(halo) 212 | 213 | halo.intensity = result 214 | return 215 | 216 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " applehelp to make an Apple Help Book" 34 | @echo " devhelp to make HTML files and a Devhelp project" 35 | @echo " epub to make an epub" 36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 37 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 39 | @echo " text to make text files" 40 | @echo " man to make manual pages" 41 | @echo " texinfo to make Texinfo files" 42 | @echo " info to make Texinfo files and run them through makeinfo" 43 | @echo " gettext to make PO message catalogs" 44 | @echo " changes to make an overview of all changed/added/deprecated items" 45 | @echo " xml to make Docutils-native XML files" 46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 47 | @echo " linkcheck to check all external links for integrity" 48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 49 | @echo " coverage to run coverage check of the documentation (if enabled)" 50 | 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | html: 55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | dirhtml: 60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 61 | @echo 62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 63 | 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | pickle: 70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 71 | @echo 72 | @echo "Build finished; now you can process the pickle files." 73 | 74 | json: 75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 76 | @echo 77 | @echo "Build finished; now you can process the JSON files." 78 | 79 | htmlhelp: 80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 81 | @echo 82 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 83 | ".hhp project file in $(BUILDDIR)/htmlhelp." 84 | 85 | qthelp: 86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 87 | @echo 88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/eblurdust.qhcp" 91 | @echo "To view the help file:" 92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/eblurdust.qhc" 93 | 94 | applehelp: 95 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 96 | @echo 97 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 98 | @echo "N.B. You won't be able to view it unless you put it in" \ 99 | "~/Library/Documentation/Help or install it in your application" \ 100 | "bundle." 101 | 102 | devhelp: 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $$HOME/.local/share/devhelp/eblurdust" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/eblurdust" 109 | @echo "# devhelp" 110 | 111 | epub: 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | latex: 117 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 118 | @echo 119 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 120 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 121 | "(use \`make latexpdf' here to do that automatically)." 122 | 123 | latexpdf: 124 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 125 | @echo "Running LaTeX files through pdflatex..." 126 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 127 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 128 | 129 | latexpdfja: 130 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 131 | @echo "Running LaTeX files through platex and dvipdfmx..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | text: 136 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 137 | @echo 138 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 139 | 140 | man: 141 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 142 | @echo 143 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 144 | 145 | texinfo: 146 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 147 | @echo 148 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 149 | @echo "Run \`make' in that directory to run these through makeinfo" \ 150 | "(use \`make info' here to do that automatically)." 151 | 152 | info: 153 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 154 | @echo "Running Texinfo files through makeinfo..." 155 | make -C $(BUILDDIR)/texinfo info 156 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 157 | 158 | gettext: 159 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 160 | @echo 161 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 162 | 163 | changes: 164 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 165 | @echo 166 | @echo "The overview file is in $(BUILDDIR)/changes." 167 | 168 | linkcheck: 169 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 170 | @echo 171 | @echo "Link check complete; look for any errors in the above output " \ 172 | "or in $(BUILDDIR)/linkcheck/output.txt." 173 | 174 | doctest: 175 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 176 | @echo "Testing of doctests in the sources finished, look at the " \ 177 | "results in $(BUILDDIR)/doctest/output.txt." 178 | 179 | coverage: 180 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 181 | @echo "Testing of coverage in the sources finished, look at the " \ 182 | "results in $(BUILDDIR)/coverage/python.txt." 183 | 184 | xml: 185 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 186 | @echo 187 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 188 | 189 | pseudoxml: 190 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 191 | @echo 192 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 193 | -------------------------------------------------------------------------------- /halo.py: -------------------------------------------------------------------------------- 1 | ## May 16, 2012 : Added taux to halo objects 2 | 3 | import numpy as np 4 | import constants as c 5 | import dust 6 | import sigma_scat as ss 7 | import cosmology as cosmo 8 | from scipy.interpolate import interp1d 9 | 10 | class CosmHalo(object): 11 | """ 12 | OBJECT CosmHalo( zs=None, zg=None, cosm=None, igmtype=None ) 13 | ** An htype abstract class for storing halo properties 14 | zs : float : redshift of X-ray source 15 | zg : float : redshift of an IGM screen 16 | cosm : cosmo.Cosmology object 17 | igmtype : labels the type of IGM scattering calculation : 'Uniform' or 'Screen' 18 | """ 19 | def __init__( self, zs=None, zg=None, cosm=None, igmtype=None ): 20 | self.zs = zs 21 | self.zg = zg 22 | self.cosm = cosm 23 | self.igmtype = igmtype 24 | 25 | class Halo(object): 26 | """ 27 | OBJECT Halo( E0, alpha=ss.angles(), rad=dust.Grain(), scatm=ss.Scatmodel() ) 28 | htype : abstract class containing information about the halo calculation 29 | E0 : float : observed energy [keV] 30 | alpha : np.array : observed angle [arcsec] 31 | rad : dust.Grain OR dust.Dustdist : grain size distribution 32 | dist : dust.Dustspectrum : initially NONE, stored from calculation 33 | scatm : ss.Scatmodel : scattering model used 34 | intensity : np.array : fractional intensity [arcsec^-2] 35 | """ 36 | def __init__( self, E0, 37 | alpha = ss.angles(), 38 | rad = dust.Grain(), 39 | scatm = ss.Scatmodel() ): 40 | self.htype = None 41 | self.energy = E0 42 | self.alpha = alpha 43 | self.rad = rad # Two options: dust.Grain or dust.Dustdist 44 | self.dist = None # dust.Dustspectrum will be stored here when halo is calculated 45 | self.scatm = scatm 46 | self.intensity = np.zeros( np.size(alpha) ) 47 | self.taux = None 48 | 49 | def ecf( self, theta, nth=500 ): 50 | """ 51 | Returns the enclosed fraction for the halo surface brightness 52 | profile, via integral(theta,2pi*theta*halo)/tau. 53 | theta : float : Value for which to compute enclosed fraction (arcseconds) 54 | nth : int (500) : Number of angles to use in calculation 55 | """ 56 | if self.htype == None: 57 | print 'Error: Halo has not yet beein calculated.' 58 | return 59 | interpH = interp1d( self.alpha, self.intensity ) 60 | tharray = np.linspace( min(self.alpha), theta, nth ) 61 | try: 62 | return c.intz( tharray, interpH(tharray) * 2.0*np.pi*tharray ) / self.taux 63 | except: 64 | print 'Error: ECF calculation failed. Theta is likely out of bounds.' 65 | return 66 | 67 | #----------------- Uniform IGM case -------------------------------- 68 | 69 | def UniformIGM( halo, zs=4.0, cosm=cosmo.Cosmology(), nz=500 ): 70 | """ 71 | FUNCTION UniformIGM( halo, zs=4.0, cosm=cosmo.Cosmology(), nz=500 ) 72 | MODIFIES halo.htype, halo.dist, halo.intensity, halo.taux 73 | -------------------------------------------------------------------- 74 | halo : Halo object 75 | zs : float : redshift of source 76 | cosm : cosmo.Cosmology 77 | nz : int : number of z-values to use in integration 78 | """ 79 | E0 = halo.energy 80 | alpha = halo.alpha 81 | scatm = halo.scatm 82 | 83 | halo.htype = CosmHalo( zs=zs, cosm=cosm, igmtype='Uniform' ) # Stores information about this halo calc 84 | halo.dist = dust.Dustspectrum( rad=halo.rad, md=cosmo.Cosmdens(cosm=cosm).md ) 85 | 86 | Dtot = cosmo.DChi( zs, cosm=cosm, nz=nz ) 87 | zpvals = cosmo.zvalues( zs=zs-zs/nz, z0=0, nz=nz ) 88 | 89 | DP = np.array([]) 90 | for zp in zpvals: 91 | DP = np.append( DP, cosmo.DChi( zs, zp=zp, cosm=cosm ) ) 92 | 93 | X = DP/Dtot 94 | 95 | c_H0_cm = c.cperh0() * (c.h0() / cosm.h0) #cm 96 | hfac = np.sqrt( cosm.m * np.power( 1+zpvals, 3) + cosm.l ) 97 | 98 | Evals = E0 * (1+zpvals) 99 | 100 | ## Single grain case 101 | 102 | if type( halo.rad ) == dust.Grain: 103 | 104 | intensity = np.array([]) 105 | 106 | f = 0.0 107 | cnt = 0.0 108 | na = np.size(alpha) 109 | for al in alpha: 110 | cnt += 1 111 | thscat = al / X # np.size(thscat) = nz 112 | dsig = ss.Diffscat( theta=thscat, a=halo.dist.a, E=Evals, scatm=scatm ).dsig 113 | itemp = c_H0_cm/hfac * np.power( (1+zpvals)/X, 2 ) * halo.dist.nd * dsig 114 | intensity = np.append( intensity, c.intz( zpvals, itemp ) ) 115 | 116 | ## Dust distribution case 117 | 118 | elif type( halo.rad ) == dust.Dustdist: 119 | 120 | avals = halo.dist.a 121 | intensity = np.array([]) 122 | 123 | for al in alpha: 124 | thscat = al / X # np.size(thscat) = nz 125 | 126 | iatemp = np.array([]) 127 | for aa in avals: 128 | dsig = ss.Diffscat( theta=thscat, a=aa, E=Evals, scatm=scatm ).dsig 129 | dtmp = c_H0_cm/hfac * np.power( (1+zpvals)/X, 2 ) * dsig 130 | iatemp = np.append( iatemp, c.intz( zpvals, dtmp ) ) 131 | 132 | intensity = np.append( intensity, c.intz( avals, halo.dist.nd * iatemp ) ) 133 | 134 | else: 135 | print '%% Must input type dust.Grain or dust.Dustdist' 136 | intensity = np.zeros( np.size(zpvals) ) 137 | 138 | #----- Finally, set the halo intensity -------- 139 | 140 | halo.intensity = intensity * np.power( c.arcs2rad(), 2 ) # arcsec^-2 141 | halo.taux = cosmo.CosmTauX( zs, E=halo.energy, dist=halo.rad, scatm=halo.scatm, cosm=halo.htype.cosm ) 142 | 143 | #----------------- Infinite Screen Case -------------------------- 144 | 145 | def ScreenIGM( halo, zs=2.0, zg=1.0, md=1.5e-5, cosm=cosmo.Cosmology() ): 146 | """ 147 | FUNCTION ScreenIGM( halo, zs=2.0, zg=1.0, md=1.5e-5, cosm=cosmo.Cosmology() ) 148 | MODIFIES halo.htype, halo.dist, halo.intensity, halo.taux 149 | -------------------------------------------------------------------- 150 | halo : Halo object 151 | zs : float : redshift of source 152 | zg : float : redshift of screen 153 | md : float : mass density of dust to use in screen [g cm^-2] 154 | cosm : cosmo.Cosmology 155 | """ 156 | if zg >= zs: 157 | print '%% STOP: zg must be < zs' 158 | 159 | E0 = halo.energy 160 | alpha = halo.alpha 161 | scatm = halo.scatm 162 | 163 | # Store information about this halo calculation 164 | halo.htype = CosmHalo( zs=zs, zg=zg, cosm=cosm, igmtype='Screen' ) 165 | halo.dist = dust.Dustspectrum( rad=halo.rad, md=md ) 166 | 167 | X = cosmo.DChi( zs, zp=zg, cosm=cosm ) / cosmo.DChi( zs, cosm=cosm ) # Single value 168 | thscat = alpha / X # Scattering angle required 169 | Eg = E0 * (1+zg) # Photon energy at the screen 170 | 171 | ## Single grain size case 172 | 173 | if type( halo.rad ) == dust.Grain: 174 | dsig = ss.Diffscat( theta=thscat, a=halo.dist.a, E=Eg, scatm=scatm ).dsig 175 | intensity = halo.dist.nd / np.power( X, 2 ) * dsig 176 | 177 | ## Distribution of grain sizes 178 | 179 | elif type( halo.rad ) == dust.Dustdist: 180 | 181 | avals = halo.dist.a 182 | 183 | dsig = np.zeros( shape=(np.size(avals), np.size(thscat)) ) 184 | for i in range( np.size(avals) ): 185 | dsig[i,:] = ss.Diffscat( theta=thscat, a=avals[i], E=Eg, scatm=scatm ).dsig 186 | 187 | intensity = np.array([]) 188 | for j in range( np.size(thscat) ): 189 | itemp = halo.dist.nd * dsig[:,j] / np.power(X,2) 190 | intensity = np.append( intensity, c.intz( avals, itemp ) ) 191 | 192 | else: 193 | print '%% Must input type dust.Grain or dust.Dustdist' 194 | intensity = np.zeros( np.size(zpvals) ) 195 | 196 | halo.intensity = intensity * np.power( c.arcs2rad(), 2 ) # arcsec^-2 197 | halo.taux = cosmo.CosmTauScreen( zg, E=halo.energy, dist=halo.dist, scatm=halo.scatm ) 198 | 199 | 200 | 201 | -------------------------------------------------------------------------------- /docs/_build/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | */ 33 | jQuery.urldecode = function(x) { 34 | return decodeURIComponent(x).replace(/\+/g, ' '); 35 | }; 36 | 37 | /** 38 | * small helper function to urlencode strings 39 | */ 40 | jQuery.urlencode = encodeURIComponent; 41 | 42 | /** 43 | * This function returns the parsed url parameters of the 44 | * current request. Multiple values per key are supported, 45 | * it will always return arrays of strings for the value parts. 46 | */ 47 | jQuery.getQueryParameters = function(s) { 48 | if (typeof s == 'undefined') 49 | s = document.location.search; 50 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 51 | var result = {}; 52 | for (var i = 0; i < parts.length; i++) { 53 | var tmp = parts[i].split('=', 2); 54 | var key = jQuery.urldecode(tmp[0]); 55 | var value = jQuery.urldecode(tmp[1]); 56 | if (key in result) 57 | result[key].push(value); 58 | else 59 | result[key] = [value]; 60 | } 61 | return result; 62 | }; 63 | 64 | /** 65 | * highlight a given string on a jquery object by wrapping it in 66 | * span elements with the given class name. 67 | */ 68 | jQuery.fn.highlightText = function(text, className) { 69 | function highlight(node) { 70 | if (node.nodeType == 3) { 71 | var val = node.nodeValue; 72 | var pos = val.toLowerCase().indexOf(text); 73 | if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { 74 | var span = document.createElement("span"); 75 | span.className = className; 76 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 77 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 78 | document.createTextNode(val.substr(pos + text.length)), 79 | node.nextSibling)); 80 | node.nodeValue = val.substr(0, pos); 81 | } 82 | } 83 | else if (!jQuery(node).is("button, select, textarea")) { 84 | jQuery.each(node.childNodes, function() { 85 | highlight(this); 86 | }); 87 | } 88 | } 89 | return this.each(function() { 90 | highlight(this); 91 | }); 92 | }; 93 | 94 | /* 95 | * backward compatibility for jQuery.browser 96 | * This will be supported until firefox bug is fixed. 97 | */ 98 | if (!jQuery.browser) { 99 | jQuery.uaMatch = function(ua) { 100 | ua = ua.toLowerCase(); 101 | 102 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 103 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 104 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 105 | /(msie) ([\w.]+)/.exec(ua) || 106 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 107 | []; 108 | 109 | return { 110 | browser: match[ 1 ] || "", 111 | version: match[ 2 ] || "0" 112 | }; 113 | }; 114 | jQuery.browser = {}; 115 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 116 | } 117 | 118 | /** 119 | * Small JavaScript module for the documentation. 120 | */ 121 | var Documentation = { 122 | 123 | init : function() { 124 | this.fixFirefoxAnchorBug(); 125 | this.highlightSearchWords(); 126 | this.initIndexTable(); 127 | }, 128 | 129 | /** 130 | * i18n support 131 | */ 132 | TRANSLATIONS : {}, 133 | PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, 134 | LOCALE : 'unknown', 135 | 136 | // gettext and ngettext don't access this so that the functions 137 | // can safely bound to a different name (_ = Documentation.gettext) 138 | gettext : function(string) { 139 | var translated = Documentation.TRANSLATIONS[string]; 140 | if (typeof translated == 'undefined') 141 | return string; 142 | return (typeof translated == 'string') ? translated : translated[0]; 143 | }, 144 | 145 | ngettext : function(singular, plural, n) { 146 | var translated = Documentation.TRANSLATIONS[singular]; 147 | if (typeof translated == 'undefined') 148 | return (n == 1) ? singular : plural; 149 | return translated[Documentation.PLURALEXPR(n)]; 150 | }, 151 | 152 | addTranslations : function(catalog) { 153 | for (var key in catalog.messages) 154 | this.TRANSLATIONS[key] = catalog.messages[key]; 155 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 156 | this.LOCALE = catalog.locale; 157 | }, 158 | 159 | /** 160 | * add context elements like header anchor links 161 | */ 162 | addContextElements : function() { 163 | $('div[id] > :header:first').each(function() { 164 | $('\u00B6'). 165 | attr('href', '#' + this.id). 166 | attr('title', _('Permalink to this headline')). 167 | appendTo(this); 168 | }); 169 | $('dt[id]').each(function() { 170 | $('\u00B6'). 171 | attr('href', '#' + this.id). 172 | attr('title', _('Permalink to this definition')). 173 | appendTo(this); 174 | }); 175 | }, 176 | 177 | /** 178 | * workaround a firefox stupidity 179 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 180 | */ 181 | fixFirefoxAnchorBug : function() { 182 | if (document.location.hash) 183 | window.setTimeout(function() { 184 | document.location.href += ''; 185 | }, 10); 186 | }, 187 | 188 | /** 189 | * highlight the search words provided in the url in the text 190 | */ 191 | highlightSearchWords : function() { 192 | var params = $.getQueryParameters(); 193 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 194 | if (terms.length) { 195 | var body = $('div.body'); 196 | if (!body.length) { 197 | body = $('body'); 198 | } 199 | window.setTimeout(function() { 200 | $.each(terms, function() { 201 | body.highlightText(this.toLowerCase(), 'highlighted'); 202 | }); 203 | }, 10); 204 | $('') 206 | .appendTo($('#searchbox')); 207 | } 208 | }, 209 | 210 | /** 211 | * init the domain index toggle buttons 212 | */ 213 | initIndexTable : function() { 214 | var togglers = $('img.toggler').click(function() { 215 | var src = $(this).attr('src'); 216 | var idnum = $(this).attr('id').substr(7); 217 | $('tr.cg-' + idnum).toggle(); 218 | if (src.substr(-9) == 'minus.png') 219 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 220 | else 221 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 222 | }).css('display', ''); 223 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 224 | togglers.click(); 225 | } 226 | }, 227 | 228 | /** 229 | * helper function to hide the search marks again 230 | */ 231 | hideSearchWords : function() { 232 | $('#searchbox .highlight-link').fadeOut(300); 233 | $('span.highlighted').removeClass('highlighted'); 234 | }, 235 | 236 | /** 237 | * make the url absolute 238 | */ 239 | makeURL : function(relativeURL) { 240 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 241 | }, 242 | 243 | /** 244 | * get the current relative url 245 | */ 246 | getCurrentURL : function() { 247 | var path = document.location.pathname; 248 | var parts = path.split(/\//); 249 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 250 | if (this == '..') 251 | parts.pop(); 252 | }); 253 | var url = parts.join('/'); 254 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 255 | } 256 | }; 257 | 258 | // quick alias for translations 259 | _ = Documentation.gettext; 260 | 261 | $(document).ready(function() { 262 | Documentation.init(); 263 | }); 264 | -------------------------------------------------------------------------------- /docs/_build/html/index.html: -------------------------------------------------------------------------------- 1 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Welcome to documentation for eblur/dust — eblur/dust 1.0 documentation 10 | 11 | 12 | 13 | 14 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 43 | 44 |
45 |
46 |
47 |
48 | 49 |
50 |

Welcome to documentation for eblur/dust

51 |

The eblur/dust set of python modules calculate scattering absorption 52 | and scattering efficiencies for dust from the infrared to the X-ray. 53 | This code can also be used to calculate dust scattering halo images in 54 | the X-ray, in both interstellar and intergalactic (cosmological) 55 | contexts.

56 |
57 |
First published version of this code (released with Corrales & 58 | Paerels, 2015)
59 | 60 |
61 |
62 |

Features

63 |

A number of dust grain size distributions and optical constants are 64 | provided, but they can be fully customized by the user by invoking 65 | custom objects of the approporiate class. Provided dust models 66 | include:

67 | 85 |
86 |
87 |

Installation

88 |

As of yet there is no static install version. I recommend cloning the 89 | github repo into a directory in your python path.:

90 |
cd /path/to/python/libraries/
 91 | git clone git@github.com:eblur/dust.git .
 92 | 
93 |
94 |
95 |
96 |

Contribute

97 |

Source code: github.com/eblur/dust

98 |
99 |
100 |

Support

101 |

If you are having issues, please contact lia@space.mit.edu

102 |
103 |
    104 |
105 |
106 |
107 |
108 |

License

109 |

Copyright (c) 2014, Lia Corrales 110 | All rights reserved.

111 |

Redistribution and use in source and binary forms, with or without 112 | modification, are permitted provided that the following conditions are 113 | met:

114 |

1. Redistributions of source code must retain the above copyright 115 | notice, this list of conditions and the following disclaimer.

116 |

2. Redistributions in binary form must reproduce the above copyright 117 | notice, this list of conditions and the following disclaimer in the 118 | documentation and/or other materials provided with the distribution.

119 |

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS 120 | IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 121 | TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 122 | PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 123 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 124 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 125 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 126 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 127 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 128 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 129 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

130 |
131 |
132 |
133 |

Indices and tables

134 | 139 |
140 | 141 | 142 |
143 |
144 |
145 | 182 |
183 |
184 | 195 | 196 | 197 | 198 | 199 | 200 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # eblur/dust documentation build configuration file, created by 4 | # sphinx-quickstart on Thu Jan 28 13:40:11 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | import shlex 18 | 19 | # If extensions (or modules to document with autodoc) are in another directory, 20 | # add these directories to sys.path here. If the directory is relative to the 21 | # documentation root, use os.path.abspath to make it absolute, like shown here. 22 | #sys.path.insert(0, os.path.abspath('.')) 23 | 24 | # -- General configuration ------------------------------------------------ 25 | 26 | # If your documentation needs a minimal Sphinx version, state it here. 27 | #needs_sphinx = '1.0' 28 | 29 | # Add any Sphinx extension module names here, as strings. They can be 30 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 31 | # ones. 32 | extensions = [ 33 | 'sphinx.ext.autodoc', 34 | 'sphinx.ext.doctest', 35 | 'sphinx.ext.mathjax', 36 | ] 37 | 38 | # Add any paths that contain templates here, relative to this directory. 39 | templates_path = ['_templates'] 40 | 41 | # The suffix(es) of source filenames. 42 | # You can specify multiple suffix as a list of string: 43 | source_suffix = ['.rst', '.md'] 44 | #source_suffix = '.txt' 45 | 46 | # The encoding of source files. 47 | #source_encoding = 'utf-8-sig' 48 | 49 | # The master toctree document. 50 | master_doc = 'index' 51 | 52 | # General information about the project. 53 | project = u'eblur/dust' 54 | copyright = u'2016, Lia Corrales' 55 | author = u'Lia Corrales' 56 | 57 | # The version info for the project you're documenting, acts as replacement for 58 | # |version| and |release|, also used in various other places throughout the 59 | # built documents. 60 | # 61 | # The short X.Y version. 62 | version = '1.0' 63 | # The full version, including alpha/beta/rc tags. 64 | release = '1.0' 65 | 66 | # The language for content autogenerated by Sphinx. Refer to documentation 67 | # for a list of supported languages. 68 | # 69 | # This is also used if you do content translation via gettext catalogs. 70 | # Usually you set "language" from the command line for these cases. 71 | language = None 72 | 73 | # There are two options for replacing |today|: either, you set today to some 74 | # non-false value, then it is used: 75 | #today = '' 76 | # Else, today_fmt is used as the format for a strftime call. 77 | #today_fmt = '%B %d, %Y' 78 | 79 | # List of patterns, relative to source directory, that match files and 80 | # directories to ignore when looking for source files. 81 | exclude_patterns = ['_build'] 82 | 83 | # The reST default role (used for this markup: `text`) to use for all 84 | # documents. 85 | #default_role = None 86 | 87 | # If true, '()' will be appended to :func: etc. cross-reference text. 88 | #add_function_parentheses = True 89 | 90 | # If true, the current module name will be prepended to all description 91 | # unit titles (such as .. function::). 92 | #add_module_names = True 93 | 94 | # If true, sectionauthor and moduleauthor directives will be shown in the 95 | # output. They are ignored by default. 96 | #show_authors = False 97 | 98 | # The name of the Pygments (syntax highlighting) style to use. 99 | pygments_style = 'sphinx' 100 | 101 | # A list of ignored prefixes for module index sorting. 102 | #modindex_common_prefix = [] 103 | 104 | # If true, keep warnings as "system message" paragraphs in the built documents. 105 | #keep_warnings = False 106 | 107 | # If true, `todo` and `todoList` produce output, else they produce nothing. 108 | todo_include_todos = False 109 | 110 | 111 | # -- Options for HTML output ---------------------------------------------- 112 | 113 | # The theme to use for HTML and HTML Help pages. See the documentation for 114 | # a list of builtin themes. 115 | html_theme = 'alabaster' 116 | 117 | # Theme options are theme-specific and customize the look and feel of a theme 118 | # further. For a list of options available for each theme, see the 119 | # documentation. 120 | #html_theme_options = {} 121 | 122 | # Add any paths that contain custom themes here, relative to this directory. 123 | #html_theme_path = [] 124 | 125 | # The name for this set of Sphinx documents. If None, it defaults to 126 | # " v documentation". 127 | #html_title = None 128 | 129 | # A shorter title for the navigation bar. Default is the same as html_title. 130 | #html_short_title = None 131 | 132 | # The name of an image file (relative to this directory) to place at the top 133 | # of the sidebar. 134 | #html_logo = None 135 | 136 | # The name of an image file (within the static path) to use as favicon of the 137 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 138 | # pixels large. 139 | #html_favicon = None 140 | 141 | # Add any paths that contain custom static files (such as style sheets) here, 142 | # relative to this directory. They are copied after the builtin static files, 143 | # so a file named "default.css" will overwrite the builtin "default.css". 144 | html_static_path = ['_static'] 145 | 146 | # Add any extra paths that contain custom files (such as robots.txt or 147 | # .htaccess) here, relative to this directory. These files are copied 148 | # directly to the root of the documentation. 149 | #html_extra_path = [] 150 | 151 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 152 | # using the given strftime format. 153 | #html_last_updated_fmt = '%b %d, %Y' 154 | 155 | # If true, SmartyPants will be used to convert quotes and dashes to 156 | # typographically correct entities. 157 | #html_use_smartypants = True 158 | 159 | # Custom sidebar templates, maps document names to template names. 160 | #html_sidebars = {} 161 | 162 | # Additional templates that should be rendered to pages, maps page names to 163 | # template names. 164 | #html_additional_pages = {} 165 | 166 | # If false, no module index is generated. 167 | #html_domain_indices = True 168 | 169 | # If false, no index is generated. 170 | #html_use_index = True 171 | 172 | # If true, the index is split into individual pages for each letter. 173 | #html_split_index = False 174 | 175 | # If true, links to the reST sources are added to the pages. 176 | #html_show_sourcelink = True 177 | 178 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 179 | #html_show_sphinx = True 180 | 181 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 182 | #html_show_copyright = True 183 | 184 | # If true, an OpenSearch description file will be output, and all pages will 185 | # contain a tag referring to it. The value of this option must be the 186 | # base URL from which the finished HTML is served. 187 | #html_use_opensearch = '' 188 | 189 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 190 | #html_file_suffix = None 191 | 192 | # Language to be used for generating the HTML full-text search index. 193 | # Sphinx supports the following languages: 194 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 195 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' 196 | #html_search_language = 'en' 197 | 198 | # A dictionary with options for the search language support, empty by default. 199 | # Now only 'ja' uses this config value 200 | #html_search_options = {'type': 'default'} 201 | 202 | # The name of a javascript file (relative to the configuration directory) that 203 | # implements a search results scorer. If empty, the default will be used. 204 | #html_search_scorer = 'scorer.js' 205 | 206 | # Output file base name for HTML help builder. 207 | htmlhelp_basename = 'eblurdustdoc' 208 | 209 | # -- Options for LaTeX output --------------------------------------------- 210 | 211 | latex_elements = { 212 | # The paper size ('letterpaper' or 'a4paper'). 213 | #'papersize': 'letterpaper', 214 | 215 | # The font size ('10pt', '11pt' or '12pt'). 216 | #'pointsize': '10pt', 217 | 218 | # Additional stuff for the LaTeX preamble. 219 | #'preamble': '', 220 | 221 | # Latex figure (float) alignment 222 | #'figure_align': 'htbp', 223 | } 224 | 225 | # Grouping the document tree into LaTeX files. List of tuples 226 | # (source start file, target name, title, 227 | # author, documentclass [howto, manual, or own class]). 228 | latex_documents = [ 229 | (master_doc, 'eblurdust.tex', u'eblur/dust Documentation', 230 | u'Lia Corrales', 'manual'), 231 | ] 232 | 233 | # The name of an image file (relative to this directory) to place at the top of 234 | # the title page. 235 | #latex_logo = None 236 | 237 | # For "manual" documents, if this is true, then toplevel headings are parts, 238 | # not chapters. 239 | #latex_use_parts = False 240 | 241 | # If true, show page references after internal links. 242 | #latex_show_pagerefs = False 243 | 244 | # If true, show URL addresses after external links. 245 | #latex_show_urls = False 246 | 247 | # Documents to append as an appendix to all manuals. 248 | #latex_appendices = [] 249 | 250 | # If false, no module index is generated. 251 | #latex_domain_indices = True 252 | 253 | 254 | # -- Options for manual page output --------------------------------------- 255 | 256 | # One entry per manual page. List of tuples 257 | # (source start file, name, description, authors, manual section). 258 | man_pages = [ 259 | (master_doc, 'eblurdust', u'eblur/dust Documentation', 260 | [author], 1) 261 | ] 262 | 263 | # If true, show URL addresses after external links. 264 | #man_show_urls = False 265 | 266 | 267 | # -- Options for Texinfo output ------------------------------------------- 268 | 269 | # Grouping the document tree into Texinfo files. List of tuples 270 | # (source start file, target name, title, author, 271 | # dir menu entry, description, category) 272 | texinfo_documents = [ 273 | (master_doc, 'eblurdust', u'eblur/dust Documentation', 274 | author, 'eblurdust', 'One line description of project.', 275 | 'Miscellaneous'), 276 | ] 277 | 278 | # Documents to append as an appendix to all manuals. 279 | #texinfo_appendices = [] 280 | 281 | # If false, no module index is generated. 282 | #texinfo_domain_indices = True 283 | 284 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 285 | #texinfo_show_urls = 'footnote' 286 | 287 | # If true, do not generate a @detailmenu in the "Top" node's menu. 288 | #texinfo_no_detailmenu = False 289 | -------------------------------------------------------------------------------- /analyze_emcee.py: -------------------------------------------------------------------------------- 1 | 2 | ## December 11, 2014 : Took out reliance on AEFF, which I initially took from PIMMS 3 | ## June 26, 2013 : Support code for emcee analysis 4 | 5 | import numpy as np 6 | import matplotlib.pyplot as plt 7 | 8 | import radprofile as rp 9 | import constants as c 10 | import model_halo as MH 11 | import dust 12 | import sigma_scat as ss 13 | 14 | import cPickle 15 | from scipy.interpolate import interp1d 16 | 17 | ##-------- Supporting constants, Cyg X-3 obsid 6601 -------## 18 | 19 | ALPHA = np.arange( 1.0, 200.0, 1.0 ) # 0.5 arcsec resolution 20 | AMIN = 0.005 21 | 22 | ##-------- Supporting structure, from emcee_fit -------## 23 | 24 | ## Parse text files containing walker positions 25 | def string_to_walker( string ): 26 | pos_string = string.strip().strip('[').strip(']') 27 | walker = np.array( [] ) 28 | for param in pos_string.split(): 29 | walker = np.append( walker, np.float(param) ) 30 | return np.array( [walker] ) 31 | 32 | def read_pos( filename, acor=1 ): 33 | f = open( filename ) 34 | 35 | first_line = f.readline() 36 | result = string_to_walker( first_line ) 37 | 38 | end_of_file = False 39 | counter = 1 40 | while not( end_of_file ): 41 | try: 42 | next_line = f.readline() 43 | if (counter % acor) == 0: 44 | walker = string_to_walker( next_line ) 45 | result = np.concatenate( (result,walker) ) 46 | else: pass 47 | counter += 1 48 | except: 49 | end_of_file = True 50 | 51 | f.close() 52 | return result 53 | 54 | def read_prob( filename ): 55 | prob_data = open( filename, 'r' ) 56 | prob = [] 57 | 58 | end_of_file = False 59 | while not end_of_file: 60 | try: 61 | newprob = prob_data.readline().strip() 62 | prob.append( np.float(newprob) ) 63 | except: 64 | end_of_file = True 65 | 66 | prob = np.array( prob ) 67 | prob_data.close() 68 | return prob 69 | 70 | ## Unpickle stuff 71 | def eat_pickle( filename ): 72 | pickle_data = open( filename, 'rb' ) 73 | data = cPickle.load( pickle_data ) 74 | pickle_data.close() 75 | return data 76 | 77 | ## Simulate the halos 78 | def uniform_halo( filename, params, alpha=ALPHA, **kwargs ): 79 | logNH, amax, p = params 80 | return MH.simulate_uniform( filename, \ 81 | NH=np.power(10.0,logNH), a0=AMIN, a1=amax, p=p, alpha=alpha, **kwargs ) 82 | 83 | def screen_halo( filename, params, alpha=ALPHA, **kwargs ): 84 | xg, logNH, amax, p = params 85 | return MH.simulate_screen( filename, \ 86 | xg=xg, NH=np.power(10.0,logNH), a0=AMIN, a1=amax, p=p, alpha=alpha, **kwargs) 87 | 88 | def sum_interp( sb1, sb2 ): 89 | ## Takes interp objects and sums them to create another interp object 90 | ## Assumes same x values for both 91 | if sb1.x.all() != sb2.x.all(): 92 | print 'Error: Interp objects must have same x-axis values' 93 | return 94 | else: 95 | return interp1d( sb1.x, sb1.y + sb2.y ) 96 | 97 | def multiscreen_halo( specfile, params, amin=AMIN, alpha=ALPHA, **kwargs ): 98 | x1, x2, logNH1, logNH2, amax, p = params 99 | s1 = MH.simulate_screen( specfile, xg=x1, NH=np.power(10.0,logNH1), \ 100 | a0=AMIN, a1=amax, p=p, alpha=alpha, **kwargs ) 101 | s2 = MH.simulate_screen( specfile, xg=x2, NH=np.power(10.0,logNH2), \ 102 | a0=AMIN, a1=amax, p=p, alpha=alpha, **kwargs ) 103 | return sum_interp( s1, s2 ) 104 | 105 | def uniscreen( specfile, params, alpha=ALPHA, **kwargs ): 106 | logNHu, logNHs, a_u, a_s, p_u, p_s, x_s = params 107 | nhu = np.power( 10.0, logNHu ) 108 | UU = MH.simulate_uniform( specfile, NH=nhu, \ 109 | a0=AMIN, a1=a_u, p=p_u, alpha=alpha, **kwargs ) 110 | nhs = np.power( 10.0, logNHs ) 111 | SS = MH.simulate_screen( specfile, xg=x_s, NH=nhs, \ 112 | a0=AMIN, a1=a_s, p=p_s, alpha=alpha, **kwargs ) 113 | return sum_interp( UU, SS ) 114 | 115 | def red_chisq( xdata, ydata, sigma, model, nparams ): 116 | chi = ( ydata - model(xdata) ) / sigma 117 | return np.sum(chi**2) / ( len(xdata) - nparams ) 118 | 119 | def chisq( xdata, ydata, sigma, model ): 120 | chi = ( ydata - model(xdata) ) / sigma 121 | return np.sum(chi**2) 122 | 123 | ##-------- Some basic plotting stuff -------## 124 | 125 | def plot_chains( chainfile, title=None, unit=None, opt_values=None, **kwargs ): 126 | 127 | chain = eat_pickle( chainfile ) 128 | 129 | nwalkers, nsteps, ndim = chain.shape 130 | 131 | for d in range(ndim): 132 | plt.figure() 133 | for i in range(nwalkers): 134 | plt.plot( range(nsteps), chain[i,:,d], **kwargs ) 135 | 136 | if title != None : plt.title( title[d] ) 137 | if unit != None : plt.ylabel( unit[d] ) 138 | if opt_values != None : 139 | plt.axhline( opt_values[d], lw=3, ls='--', color='r' ) 140 | 141 | return 142 | 143 | def plot_whist( walkers, nbins, title=None, unit=None, opt_values=None, \ 144 | histtype='step', **kwargs ): 145 | 146 | ndim = len( walkers[0] ) 147 | 148 | for d in range(ndim): 149 | plt.figure() 150 | plt.hist( walkers[:,d], nbins, histtype=histtype, **kwargs ) 151 | if title != None : plt.title( title[d] ) 152 | if unit != None : plt.xlabel( unit[d] ) 153 | if opt_values != None : 154 | plt.axvline( opt_values[d], lw=3, ls='--', color='r' ) 155 | 156 | return 157 | 158 | def compare_walkers( w1, w2, nbins, wlabels=None, \ 159 | title=None, unit=None, opt_values=None, histtype='stepfilled' ): 160 | 161 | ndim = len( w1[0] ) 162 | if wlabels == None : wlabels = ['',''] 163 | 164 | for d in range(ndim): 165 | plt.figure() 166 | plt.hist( w1[:,d], nbins, histtype='stepfilled', \ 167 | color='k', alpha=0.3, label=wlabels[0] ) 168 | plt.hist( w2[:,d], nbins, histtype='stepfilled', \ 169 | color='b', alpha=0.3, label=wlabels[1] ) 170 | if wlabels != None : plt.legend( loc='upper right', frameon=False ) 171 | 172 | if title != None : plt.title( title[d] ) 173 | if unit != None : plt.xlabel( unit[d] ) 174 | if opt_values != None : 175 | plt.axvline( opt_values[d], lw=3, ls='--', color='k' ) 176 | 177 | return 178 | 179 | ##--------- Grab items from sample ---------## 180 | 181 | def sample_halos( sample, isample, mscreen=False, **kwargs ): 182 | result = [] 183 | if mscreen: 184 | for i in isample: 185 | x1, x2, logNH1, logNH2, amax, p = sample[i] 186 | NH1, NH2 = np.power(10.0,logNH1), np.power(10.0,logNH2) 187 | print 'x =', x1, x2, '\tNH =', NH1, NH2, '\tamax =', amax, '\tp =', p 188 | result.append( multiscreen_halo( x1, x2, NH1, NH2, amax=amax, p=p, **kwargs ) ) 189 | else: 190 | for i in isample: 191 | logNH, amax, p = sample[i] 192 | NH = np.power(10.0,logNH) 193 | print 'NH =', NH, '\tamax =', amax, '\tp =', p 194 | result.append( uniform_halo( NH=NH, amax=amax, p=p, **kwargs ) ) 195 | return result 196 | 197 | def multiscreen_tau( sample, d2g=0.009, scatm=ss.makeScatmodel('RG','Drude') ): 198 | result = [] 199 | for walker in sample: 200 | logNHu, logNHs, a_u, a_s, p_u, p_s, x_s = walker 201 | MDu, MDs = np.power(10.0,logNHu) * c.mp() * d2g, np.power(10.0,logNHs) * c.mp() * d2g 202 | da_u, da_s = (a_u-AMIN)/10.0, (a_s-AMIN)/10.0 203 | Udust = dust.Dustdist( rad=np.arange(AMIN,a_u+da_u,da_u), p=p_u ) 204 | Sdust = dust.Dustdist( rad=np.arange(AMIN,a_s+da_s,da_s), p=p_s ) 205 | Ukappa = ss.Kappascat( E=1.0, dist=dust.Dustspectrum( rad=Udust, md=MDu ), scatm=scatm ).kappa[0] 206 | Skappa = ss.Kappascat( E=1.0, dist=dust.Dustspectrum( rad=Sdust, md=MDs ), scatm=scatm ).kappa[0] 207 | result.append( Ukappa*MDu + Skappa*MDs ) 208 | return np.array( result ) 209 | 210 | def sample_tau( sample, d2g=0.009, mscreen=False ): 211 | result = [] 212 | for walker in sample: 213 | if mscreen: 214 | x1, x2, logNH1, logNH2, amax, p = walker 215 | nhtot = np.power(10.0,logNH1) + np.power(10.0,logNH2) 216 | md = nhtot * c.mp() * d2g 217 | else: 218 | logNH, amax, p = walker 219 | md = np.power(10.0,logNH) * c.mp() * d2g 220 | da = (amax-AMIN)/100.0 221 | DD = dust.Dustdist( rad=np.arange(AMIN,amax+da,da), p=p ) 222 | DS = dust.Dustspectrum( rad=DD, md=md ) 223 | KK = ss.Kappascat( E=1.0, dist=DS ).kappa[0] 224 | result.append( KK * md ) 225 | return np.array(result) 226 | 227 | def sample_logMD( sample, d2g=0.009, replace=False, mscreen=False ): 228 | if mscreen: 229 | nhtot = np.power(10.0,sample[:,2]) + np.power(10.0,sample[:,3]) 230 | logmd = np.log10( nhtot * c.mp() * d2g ) 231 | if replace: 232 | result = np.copy( sample ) 233 | result[:,2] = sample[:,2] + np.log10( c.mp()*d2g ) 234 | result[:,3] = sample[:,3] + np.log10( c.mp()*d2g ) 235 | return result 236 | else: 237 | logmd = sample[:,0] + np.log10( c.mp()*d2g ) 238 | if replace : 239 | result = np.copy( sample ) 240 | result[:,0] = logmd 241 | return result 242 | return logmd 243 | 244 | def sample_extinction( sample, lam, isample, \ 245 | NA=20, d2g=0.009, scatm=ss.makeScatmodel('RG','Drude') ): 246 | 247 | energy = c.kev2lam() / lam # lam must be in cm to get keV 248 | logMD = sample_logMD( sample ) 249 | MD = np.power( 10.0, logMD ) 250 | 251 | result = [] 252 | for i in isample: 253 | logNH, amax, p = sample[i] 254 | print 'logNH =', logNH, '\tamax =', amax, '\tp =', p 255 | da = (amax-AMIN)/np.float(NA) 256 | dist = dust.Dustdist( rad=np.arange(AMIN,amax+da,da), p=p ) 257 | spec = dust.Dustspectrum( rad=dist, md=MD[i] ) 258 | kappa = ss.Kappascat( E=energy, dist=spec, scatm=scatm ).kappa 259 | result.append( 1.086 * MD[i] * kappa ) 260 | 261 | return result 262 | 263 | def extinction_curves( ext_list, lam, V=0.5470 ): 264 | # lam in micron this time 265 | A_V = [] 266 | curve = [] 267 | for ext in ext_list: 268 | A_lam = interp1d( lam, ext ) 269 | A_V.append( A_lam(V) ) 270 | curve.append( A_lam.y / A_lam(V) ) 271 | return np.array(A_V), curve 272 | -------------------------------------------------------------------------------- /galhalo.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | import constants as c 4 | import dust 5 | import sigma_scat as ss 6 | import cosmology as cosmo 7 | from scipy.interpolate import interp1d 8 | from halo import Halo 9 | 10 | class GalHalo(object): 11 | """ 12 | OBJECT Galhalo( NH=None, d2g=None, xs=None, ismtype=None ) 13 | ** An htype abstract class for storing halo properties (see halo.py) 14 | NH : float : hydrogen column density [cm^-2] 15 | d2g : float : dust-to-gas mass ratio 16 | xd : float[0-1] : position of a dust screen 17 | ismtype : string : 'Uniform' or 'Screen' 18 | """ 19 | def __init__( self, NH=None, d2g=None, xg=None, ismtype=None ): 20 | self.NH = NH 21 | self.d2g = d2g 22 | self.xg = xg 23 | self.ismtype = ismtype 24 | 25 | def power_angles( lmin=0.0, lmax=3.5, dl=0.05 ): 26 | """ 27 | FUNCTION power_angles( lmin=0.0, lmax=3.5, dl=0.05 ) : np.array [arcsec] 28 | Create an array of angles [arcsec] that are logarithmically spaced. 29 | ----------------------------------------- 30 | lmin : float : log10( amin ) 31 | lmax : float : log10( amax ) 32 | dl : float : logarithmic spacing 33 | """ 34 | return np.power( 10.0, np.arange(lmin,lmax,dl) ) 35 | 36 | class Ihalo(object): 37 | """ 38 | A self-similar halo object [i(theta)], azimuthally symmetric, interpolatable 39 | ------------------------------------------------------------------ 40 | theta : np.array : theta values used to derive the object [arcsec] 41 | itemp : np.array : values with respective theta [cm^2 arcsec^-2] 42 | rad : float : Grain size used to derive the object [um] 43 | ener : float : Photon energy used to derive the object [keV] 44 | scatm : ss.Scatmodel : Scattering model used to derive the object 45 | ------------------------------------------------------------------- 46 | __init__( theta[np.array], rad[float], ener[float], scatm[ss.Scatmodel] ) 47 | ihalo( theta ) : [cm^2 arcsec^-2] 48 | """ 49 | def __init__( self, theta=power_angles(), \ 50 | scatm=ss.Scatmodel(), \ 51 | rad=0.1, ener=1.0, nx=1000 ): 52 | # Theta automatically sampled in log space. 53 | # If I don't do it this way, linear interpolation easily fails 54 | # for small angles (between theta[0] and theta[1]). Since 55 | # most of my plots are in log-space, it makes more sense to 56 | # sample logarithmically. 57 | 58 | if np.size(theta) < 2: 59 | print 'Error: Must give more than one theta value' 60 | self.theta = None 61 | self.rad = None 62 | self.ener = None 63 | self.scatm = None 64 | 65 | self.theta = theta 66 | self.rad = rad 67 | self.ener = ener 68 | self.scatm = scatm 69 | 70 | dxi = 1.0 / np.float(nx) 71 | xi = np.arange( 0, 1.0, dxi ) + dxi 72 | itemp = np.array([]) 73 | 74 | for th in self.theta: 75 | thscat = th / xi 76 | dsig = ss.Diffscat( theta=thscat, scatm=self.scatm, E=self.ener, a=self.rad ).dsig 77 | itemp = np.append( itemp, \ 78 | c.intz( xi, dsig/(xi**2) ) ) 79 | 80 | self.itemp = itemp * c.arcs2rad()**2 81 | 82 | 83 | def ihalo( self, theta ): 84 | 85 | if self.theta == None: 86 | print 'Error: Empty ihalo object' 87 | return 88 | 89 | min_th = np.min(self.theta) 90 | max_th = np.max(self.theta) 91 | 92 | if np.min(theta) < min_th: 93 | print 'Note: Lower bounds of interpolation exceeded.' 94 | if np.max(theta) > max_th: 95 | print 'Note: Upper bounds of interpolation exceeded.' 96 | 97 | just_right = np.where( np.logical_and( theta >= min_th, theta <= max_th ) ) 98 | 99 | halo_interp = interp1d( self.theta, self.itemp ) 100 | result = np.zeros( np.size(theta) ) 101 | result[just_right] = halo_interp( theta[just_right] ) 102 | 103 | return result 104 | 105 | ## As of May 20, 2012 -- it compiles! 106 | 107 | ##------------------------------------------------------------------------------ 108 | 109 | def make_Ihalo_dict( rad=dust.adist(), ener=1.0, \ 110 | theta=power_angles(), 111 | scatm=ss.Scatmodel(), nx=1000 ): 112 | """ 113 | def make_Ihalo_dict( rad[np.array], ener[float], theta[np.array], scatm[ss.Scatmodel], nx[int] ) 114 | RETURNS : A dictionary of Ihalo objects, with grain sizes as keys. 115 | """ 116 | 117 | if np.size(ener) > 1: 118 | print 'Error: Can only choose one value for energy' 119 | return 120 | if np.size(theta) < 2: 121 | print 'Error: Must give more than one theta value' 122 | return 123 | if np.size(rad) == 1: 124 | print 'Error: Input "rad" must be an iterable object.' 125 | return 126 | 127 | keys = [] 128 | halo_objs = [] 129 | for aval in rad: 130 | keys.append( aval ) 131 | halo_objs.append( Ihalo( theta=theta, rad=aval, ener=ener, scatm=scatm, nx=nx ) ) 132 | 133 | return dict( zip(keys,halo_objs) ) 134 | 135 | #--------------- Galactic Halos -------------------- 136 | 137 | def path_diff( alpha, x ): 138 | """ 139 | path_diff( alpha, x ) 140 | ----------------------- 141 | INPUT 142 | alpha : scalar : observation angle [arcsec] 143 | x : scalar or np.array : position of dust patch (source is at x=0, observer at x=1) 144 | ----------------------- 145 | OUTPUT 146 | path difference associated with a particular alpha and x : alpha^2*(1-x)/(2x) 147 | """ 148 | 149 | if np.size( alpha ) > 1: 150 | print 'Error: np.size(alpha) cannot be greater than one.' 151 | return 152 | if np.max( x ) > 1.0 or np.min( x ) < 0: 153 | print 'Error: x must be between 0 and 1' 154 | return 155 | 156 | alpha_rad = alpha * c.arcs2rad() 157 | 158 | return alpha_rad**2 * (1-x) / (2*x) 159 | 160 | ## May 16, 2012: Added e^-kappa_x \delta x to the integral 161 | def UniformISM( halo, NH=1.0e20, d2g=0.009, nx=1000, usepathdiff=False ): 162 | """ 163 | FUNCTION UniformISM( halo, NH=1.0e20, d2g=0.009, nx=1000, usepathdiff=False ) 164 | MODIFIES halo.htype, halo.dist, halo.taux, halo.intensity 165 | ---------------------------------------------------------------------------- 166 | halo : Halo object 167 | NH : float : column density [cm^-2] 168 | d2g : float : dust-to-gass mass ratio 169 | nx : int : number of values to use in integration 170 | usepathdiff : boolean : True = use extinction due to path difference e^(-tau*path_diff) 171 | """ 172 | E0 = halo.energy 173 | alpha = halo.alpha 174 | scatm = halo.scatm 175 | md = NH * c.mp() * d2g 176 | 177 | halo.htype = GalHalo( NH=NH, d2g=d2g, ismtype='Uniform' ) 178 | halo.dist = dust.Dustspectrum( rad=halo.rad, md=md ) 179 | halo.taux = ss.Kappascat( E=halo.energy, scatm=halo.scatm, dist=halo.dist ).kappa * md 180 | 181 | dx = 1.0 / nx 182 | xvals = np.arange( 0.0, 1.0, dx ) + dx 183 | 184 | #--- Single grain case --- 185 | 186 | if type( halo.rad ) == dust.Grain: 187 | 188 | intensity = np.array([]) 189 | for al in alpha: 190 | thscat = al / xvals # np.size(thscat) = nx 191 | dsig = ss.Diffscat( theta=thscat, a=halo.dist.a, E=E0, scatm=scatm ).dsig 192 | 193 | delta_tau = 0.0 194 | if usepathdiff: 195 | print 'Using path difference' 196 | delta_x = path_diff( al, xvals ) 197 | delta_tau = halo.taux * delta_x 198 | print np.max( delta_x ) 199 | 200 | itemp = np.power( xvals, -2.0 ) * dsig * halo.dist.nd * np.exp( -delta_tau ) 201 | intensity = np.append( intensity, c.intz( xvals, itemp ) ) 202 | 203 | #--- Dust distribution case --- 204 | 205 | elif type( halo.rad ) == dust.Dustdist: 206 | 207 | avals = halo.dist.a 208 | intensity = np.array([]) 209 | 210 | for al in alpha: 211 | thscat = al / xvals # np.size(thscat) = nx 212 | iatemp = np.array([]) 213 | for aa in avals: 214 | dsig = ss.Diffscat( theta=thscat, a=aa, E=E0, scatm=scatm ).dsig 215 | 216 | delta_tau = 0.0 217 | if usepathdiff: 218 | print 'Using path difference' 219 | delta_x = path_diff( al, xvals ) 220 | delta_tau = halo.taux * delta_x 221 | print max( delta_x ) 222 | 223 | dtemp = np.power( xvals, -2.0 ) * dsig * np.exp( -delta_tau ) 224 | iatemp = np.append( iatemp, c.intz( xvals, dtemp ) ) 225 | 226 | intensity = np.append( intensity, c.intz( avals, halo.dist.nd * iatemp ) ) 227 | 228 | else: 229 | print '%% Must input type dust.Grain or dust.Dustdist' 230 | intensity = np.zeros( np.size(xvals) ) 231 | 232 | # Set the halo intensity 233 | 234 | halo.intensity = intensity * np.power( c.arcs2rad(), 2 ) # arcsec^-2 235 | # halo.taux set at beginning of function so it could be called for later use 236 | 237 | def DiscreteISM( halo, xg=0.5, NH=1.0e20, d2g=0.009 ): 238 | """ 239 | FUNCTION DiscreteISM( halo, xg=0.5, NH=1.0e20, d2g=0.009 ) 240 | MODIFIES halo.htype, halo.dist, halo.taux, halo.intensity 241 | ---------------------------------------------------------------------------- 242 | halo : Halo object 243 | xg : float : distance FROM source / distance between source and observer 244 | NH : float : column density [cm^-2] 245 | d2g : float : dust-to-gass mass ratio 246 | """ 247 | E0 = halo.energy 248 | alpha = halo.alpha 249 | scatm = halo.scatm 250 | md = NH * c.mp() * d2g 251 | 252 | halo.htype = GalHalo( xg=xg, NH=NH, d2g=d2g, ismtype='Screen' ) 253 | halo.dist = dust.Dustspectrum( rad=halo.rad, md=md ) 254 | 255 | thscat = alpha / xg 256 | 257 | if type(halo.rad) == dust.Grain: 258 | dsig = ss.Diffscat( theta=thscat, a=halo.dist.a, E=E0, scatm=scatm ).dsig 259 | intensity = np.power( xg, -2.0 ) * dsig * halo.dist.nd 260 | 261 | elif type(halo.rad) == dust.Dustdist: 262 | avals = halo.dist.a 263 | intensity = [] 264 | for i in range( len(alpha) ): 265 | iatemp = np.zeros( shape=( len(avals),len(alpha) ) ) 266 | for j in range( len(avals) ): 267 | dsig = ss.Diffscat( theta=thscat, a=avals[j], E=E0, scatm=scatm ).dsig 268 | iatemp[j,:] = np.power(xg,-2.0) * dsig 269 | intensity.append( c.intz( avals, iatemp[:,i] * halo.dist.nd ) ) 270 | intensity = np.array( intensity ) 271 | else: 272 | print '%% Must input type dust.Grain or dust.Dustdist' 273 | intensity = np.zeros( np.size(xvals) ) 274 | 275 | halo.intensity = intensity * np.power( c.arcs2rad(), 2 ) # arcsec^-2 276 | halo.taux = ss.Kappascat( E=halo.energy, scatm=halo.scatm, dist=halo.dist ).kappa * md 277 | 278 | 279 | -------------------------------------------------------------------------------- /model_halo.py: -------------------------------------------------------------------------------- 1 | #! /Library/Frameworks/EPD64.framework/Versions/Current/bin/python 2 | 3 | import numpy as np 4 | 5 | import galhalo as GH 6 | import halodict as HD 7 | import analytic as AH 8 | import constants as c 9 | 10 | from scipy.interpolate import interp1d 11 | from scipy import logical_and 12 | ## Bizzare -- I had to import logical_and for some reason 13 | 14 | ## December 11, 2014 : Remove dependence on AEFF, which came from PIMMS 15 | ## Instead of simulating counts/pix^2, using raw flux/arcsec^2 16 | 17 | ## UPDATED June 11, 2013 : I want to make halo_lib 18 | ## independent of asciidata, radprofile, and aeff 19 | ## CAVEAT : Path to effective area data file needs to be specified 20 | 21 | ## UPDATED April 4, 2013 : To include analytic solutions for galactic ISM 22 | ## (RG Gans + Drude solution only) 23 | 24 | ## CREATED August 29, 2012 : Read spectrum from a text file with same 25 | ## format as that required for ChaRT simulation. (Columns are energy 26 | ## [keV] and flux [photons/s/cm**2], tab separated.) Then use this to 27 | ## simulate a halo. I will set the parameters here, but in the future 28 | ## it would be nice to read them from a parameter file or using some 29 | ## other flexible input. 30 | 31 | #--------------------------------------------------------------------- 32 | ## Model parameters : As of Aug 29, these come from Predehl & Schmitt 1995 33 | ## April 4, 2013 : Updated from more recent data 34 | P = 3.5 35 | AMIN = 0.05 36 | AMAX = 0.3 37 | NH = 3.6e22 38 | 39 | ## Set up the model 40 | ## MAGIC NUMBERS are in ALLCAPS 41 | NA = 50 42 | da = ( np.log10(AMAX) - np.log10(AMIN) ) / NA 43 | avals = np.power( 10.0, np.arange( np.log10(AMIN), np.log10(AMAX)+da, da ) ) 44 | grains = GH.dust.Dustdist( p=P, rad=avals ) 45 | 46 | SCATM = GH.ss.Scatmodel() 47 | ALPHA = np.power( 10.0, np.arange(0.0,3.01,0.1) ) 48 | 49 | #--------------------------------------------------------------------- 50 | 51 | def screen( halodict, xg=0.5, NH=NH, d2g=0.009, verbose=False ): 52 | """ 53 | Performs numerical integration dust screen calculation with each halo in halodict 54 | ---------------------------------------------------- 55 | FUNCTION screen( halodict, xg=0.5, NH=NH, d2g=0.009, verbose=False ) 56 | RETURNS : empty 57 | ---------------------------------------------------- 58 | halodict : halodict.HaloDict object 59 | xg : float [0-1] : position of screen where 0 = point source, 1 = observer 60 | NH : float [cm^-2] : Hydrogen column 61 | d2g : float : Dust-to-gas mass ratio 62 | verbose : boolean : If true, print halo energy at each calculation step 63 | """ 64 | print 'Numerically integrating halo model for a dust screen at x =', xg 65 | AH.set_htype( halodict, xg=xg, NH=NH, d2g=d2g ) 66 | for i in range( halodict.len ): 67 | if verbose: print 'Calculating halo energy :', halodict.energy[i], ' keV' 68 | halo_temp = GH.Halo( halodict.energy[i], alpha=halodict.alpha, \ 69 | scatm=halodict.scatm, rad=halodict.rad ) 70 | GH.DiscreteISM( halo_temp, xg=xg, NH=NH, d2g=d2g ) 71 | halodict.intensity[i,:] = halo_temp.intensity 72 | return 73 | 74 | def uniform( halodict, NH=NH, d2g=0.009, verbose=False ): 75 | """ 76 | Performs numerical integration with uniform dust distribution 77 | calculation with each halo in halodict 78 | ---------------------------------------------------- 79 | FUNCTION uniform( halodict, NH=NH, d2g=0.009, verbose=False ) 80 | RETURNS : empty 81 | ---------------------------------------------------- 82 | halodict : halodict.HaloDict object 83 | NH : float [cm^-2] : Hydrogen column 84 | d2g : float : Dust-to-gas mass ratio 85 | verbose : boolean : If true, print halo energy at each calculation step 86 | """ 87 | print 'Numerically integrating halo model for uniform ISM' 88 | AH.set_htype( halodict, NH=NH, d2g=d2g ) 89 | for i in range( halodict.len ): 90 | if verbose: print 'Calculating halo energy:', halodict.energy[i], ' keV' 91 | halo_temp = GH.Halo( halodict.energy[i], alpha=halodict.alpha, \ 92 | scatm=halodict.scatm, rad=halodict.rad ) 93 | GH.UniformISM( halo_temp, NH=NH, d2g=d2g ) 94 | halodict.intensity[i,:] = halo_temp.intensity 95 | return 96 | 97 | #--------------------------------------------------------------------- 98 | ## Use the corrected flux [I_app = I_PS * exp(tau)] to find the total 99 | ## halo brightness 100 | 101 | def totalhalo( halodict, spectrum ): 102 | """ 103 | Alters halodict by running halodict.HaloDict.total_halo( corrflux ) 104 | ---------------------------------------------------- 105 | FUNCTION totalhalo( halodict, spectrum ) 106 | RETURNS : np.array : Corrected flux before scattering (F_a) 107 | assuming F_PS = F_a exp(-tau) 108 | ---------------------------------------------------- 109 | halodict : halodict.HaloDict object 110 | spectrum : flux for the energy values associated with halodict 111 | """ 112 | corrflux = spectrum * np.exp( halodict.taux ) 113 | halodict.total_halo( corrflux ) 114 | return corrflux 115 | 116 | #--------------------------------------------------------------------- 117 | 118 | def simulate_intensity( halodict, spectrum ): 119 | ''' 120 | Take a halo dictionary with a simulated halo, 121 | and simulate a Chandra surface brightness profile with it. 122 | ---------------------------------------------------- 123 | FUNCTION simulate_intensity( halodict, spectrum ) 124 | RETURNS : scipy.interpolate.interp1d object : x = arcsec, y = flux/arcsec^2 125 | ---------------------------------------------------- 126 | halodict : halodict.HaloDict object 127 | spectrum : flux for each energy value in halodict 128 | ''' 129 | arcsec2pix = 0.5 #arcsec/pix (Chandra) 130 | result = 0.0 131 | 132 | corr_flux = spectrum * np.exp( halodict.taux ) 133 | 134 | NE, NA = halodict.len, halodict.hsize 135 | halo_flux = np.tile( corr_flux.reshape(NE,1), NA ) * halodict.intensity 136 | # flux/arcsec^2 137 | 138 | result = np.sum( halo_flux, 0 ) 139 | return interp1d( halodict.alpha, result ) # arcsec, flux/arcsec^2 140 | 141 | def simulate_screen( specfile, a0=0.05, a1=None, p=3.5, \ 142 | NH=1.0e22, d2g=0.009, xg=0.5, rho=3.0, return_dict=False, \ 143 | alpha=ALPHA, scatm=SCATM, elim=None, na=50, v=False ): 144 | ''' 145 | Simulate a surface brightness profile from spectrum file 146 | for a screen of dust at xg, using 3-5 free parameters 147 | ---------------------------------------------------- 148 | FUNCTION simulate_screen( specfile, a0=0.1, a1=None, p=3.5, d2g=0.009, xg=0.5, \ 149 | alpha=ALPHA, dict=False, scatm=SCATM, elim=None, na=50 ) 150 | RETURNS : if dict == False : 151 | scipy.interpolate.interp1d object : x = pixels, y = counts/pix^2 152 | if dict == True : 153 | HaloDict object with full benefits of information 154 | ---------------------------------------------------- 155 | specfile : string : Name of spectrum file 156 | a0 : float [um] : Minimum (or single) grain size to use (0.05) 157 | a1 : float [um] : Maximum grain size for distribution (if None, single used) 158 | p : float : Power law index for grain size distribution 159 | NH : float [cm^-2] : Hyrdogen column (1.0e22) 160 | d2g : float : Dust-to-gas mass ratio (0.009) 161 | rho : float [g cm^-3] : mass density of a dust grain (3) 162 | dict : boolean (False) : if True, returns halodict instead of interp object 163 | xg : float [0-1] : Position of screen where 0 = point source, 1 = observer 164 | alpha : np.array [arcsec] : Angles for halo intensity values 165 | scatm : ss.Scatmodel() 166 | elim : tuple containing energy limits [keV] 167 | na : number of bins to use for grain size distribution 168 | ''' 169 | energy, flux = HD.get_spectrum( specfile ) 170 | if a1 == None: 171 | dust_dist = GH.dust.Grain( rad=a0, rho=rho ) 172 | else: 173 | dth = (a1-a0)/na 174 | dust_dist = GH.dust.Dustdist( p=p, rad=np.arange(a0,a1+dth,dth), rho=rho ) 175 | 176 | ii = range( len(energy) ) 177 | if elim != None: 178 | if v: print 'Limiting energy to values between', elim[0], 'and', elim[1], 'keV' 179 | ii = np.where( logical_and( energy>=elim[0], energy<=elim[1] ) )[0] 180 | 181 | halo_dict = HD.HaloDict( energy[ii], rad=dust_dist, scatm=scatm, alpha=alpha ) 182 | AH.screen_eq( halo_dict, xg=xg, NH=NH, d2g=d2g ) 183 | result = simulate_intensity( halo_dict, flux[ii] ) 184 | 185 | if return_dict : return halo_dict 186 | else : return result 187 | 188 | def simulate_uniform( specfile, a0=0.1, a1=None, p=3.5, \ 189 | NH=1.0e22, d2g=0.009, rho=3.0, return_dict=False, \ 190 | alpha=ALPHA, scatm=SCATM, elim=None, na=50, v=False ): 191 | ''' 192 | Simulate a surface brightness profile from spectrum file 193 | for a uniform distribution of dust, using 2-4 free parameters 194 | ---------------------------------------------------- 195 | FUNCTION simulate_screen( specfile, a0=0.1, a1=None, p=3.5, d2g=0.009, xg=0.5, \ 196 | alpha=ALPHA, dict=False, scatm=SCATM, elim=None, na=50 ) 197 | RETURNS : if dict == False : 198 | scipy.interpolate.interp1d object : x = pixels, y = counts/pix^2 199 | if dict == True : 200 | HaloDict object with full benefits of information 201 | ---------------------------------------------------- 202 | specfile : string : Name of spectrum file 203 | a0 : float [um] : Minimum (or single) grain size to use 204 | a1 : float [um] : Maximum grain size for distribution (if None, single used) 205 | p : float : Power law index for grain size distribution 206 | NH : float [cm^-2] : Hyrdogen column (1.0e22) 207 | d2g : float : Dust-to-gas mass ratio (0.009) 208 | rho : float [g cm^-3] : mass density of a dust grain (3) 209 | dict : boolean (False) : if True, returns halodict instead of interp object 210 | alpha : np.array [arcsec] : Angles for halo intensity values 211 | aeff : intper1d object : x = energy [keV], y = effective area [cm^2] 212 | exposure : float [sec] : Observation exposure time 213 | scatm : ss.Scatmodel() 214 | elim : tuple containing energy limits [keV] 215 | na : number of bins to use for grain size distribution 216 | ''' 217 | energy, flux = HD.get_spectrum( specfile ) 218 | if a1 == None: 219 | dust_dist = GH.dust.Grain( rad=a0, rho=rho ) 220 | else: 221 | dth = (a1-a0)/na 222 | dust_dist = GH.dust.Dustdist( p=p, rad=np.arange(a0,a1+dth,dth), rho=rho ) 223 | 224 | ii = range( len(energy) ) 225 | if elim != None: 226 | if v: print 'Limiting energy to values between', elim[0], 'and', elim[1], 'keV' 227 | ii = np.where( logical_and( energy>=elim[0], energy<=elim[1] ) )[0] 228 | 229 | halo_dict = HD.HaloDict( energy[ii], rad=dust_dist, scatm=scatm, alpha=alpha ) 230 | AH.uniform_eq( halo_dict, NH=NH, d2g=d2g ) 231 | result = simulate_intensity( halo_dict, flux[ii] ) 232 | 233 | if return_dict : return halo_dict 234 | else : return result 235 | 236 | -------------------------------------------------------------------------------- /docs/_build/html/_static/underscore.js: -------------------------------------------------------------------------------- 1 | // Underscore.js 1.3.1 2 | // (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc. 3 | // Underscore is freely distributable under the MIT license. 4 | // Portions of Underscore are inspired or borrowed from Prototype, 5 | // Oliver Steele's Functional, and John Resig's Micro-Templating. 6 | // For all details and documentation: 7 | // http://documentcloud.github.com/underscore 8 | (function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source== 9 | c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c, 10 | h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each= 11 | b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e2;a== 12 | null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect= 13 | function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e= 14 | e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck= 15 | function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;bd?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a, 17 | c,d){d||(d=b.identity);for(var e=0,f=a.length;e>1;d(a[g])=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e=0;d--)b=[a[d].apply(this,b)];return b[0]}}; 24 | b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments, 25 | 1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)}; 26 | b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"}; 27 | b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e/g,">").replace(/"/g,""").replace(/'/g,"'").replace(/\//g,"/")};b.mixin=function(a){j(b.functions(a), 28 | function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+ 29 | u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]= 30 | function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain= 31 | true;return this};m.prototype.value=function(){return this._wrapped}}).call(this); 32 | -------------------------------------------------------------------------------- /docs/_build/html/_static/alabaster.css: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | @import url("basic.css"); 22 | 23 | /* -- page layout ----------------------------------------------------------- */ 24 | 25 | body { 26 | font-family: 'goudy old style', 'minion pro', 'bell mt', Georgia, 'Hiragino Mincho Pro', serif; 27 | font-size: 17px; 28 | background-color: white; 29 | color: #000; 30 | margin: 0; 31 | padding: 0; 32 | } 33 | 34 | div.document { 35 | width: 940px; 36 | margin: 30px auto 0 auto; 37 | } 38 | 39 | div.documentwrapper { 40 | float: left; 41 | width: 100%; 42 | } 43 | 44 | div.bodywrapper { 45 | margin: 0 0 0 220px; 46 | } 47 | 48 | div.sphinxsidebar { 49 | width: 220px; 50 | } 51 | 52 | hr { 53 | border: 1px solid #B1B4B6; 54 | } 55 | 56 | div.body { 57 | background-color: #ffffff; 58 | color: #3E4349; 59 | padding: 0 30px 0 30px; 60 | } 61 | 62 | div.footer { 63 | width: 940px; 64 | margin: 20px auto 30px auto; 65 | font-size: 14px; 66 | color: #888; 67 | text-align: right; 68 | } 69 | 70 | div.footer a { 71 | color: #888; 72 | } 73 | 74 | div.related { 75 | display: none; 76 | } 77 | 78 | div.sphinxsidebar a { 79 | color: #444; 80 | text-decoration: none; 81 | border-bottom: 1px dotted #999; 82 | } 83 | 84 | div.sphinxsidebar a:hover { 85 | border-bottom: 1px solid #999; 86 | } 87 | 88 | div.sphinxsidebar { 89 | font-size: 14px; 90 | line-height: 1.5; 91 | } 92 | 93 | div.sphinxsidebarwrapper { 94 | padding: 18px 10px; 95 | } 96 | 97 | div.sphinxsidebarwrapper p.logo { 98 | padding: 0; 99 | margin: -10px 0 0 0px; 100 | text-align: center; 101 | } 102 | 103 | div.sphinxsidebarwrapper h1.logo { 104 | margin-top: -10px; 105 | text-align: center; 106 | margin-bottom: 5px; 107 | text-align: left; 108 | } 109 | 110 | div.sphinxsidebarwrapper h1.logo-name { 111 | margin-top: 0px; 112 | } 113 | 114 | div.sphinxsidebarwrapper p.blurb { 115 | margin-top: 0; 116 | font-style: normal; 117 | } 118 | 119 | div.sphinxsidebar h3, 120 | div.sphinxsidebar h4 { 121 | font-family: 'Garamond', 'Georgia', serif; 122 | color: #444; 123 | font-size: 24px; 124 | font-weight: normal; 125 | margin: 0 0 5px 0; 126 | padding: 0; 127 | } 128 | 129 | div.sphinxsidebar h4 { 130 | font-size: 20px; 131 | } 132 | 133 | div.sphinxsidebar h3 a { 134 | color: #444; 135 | } 136 | 137 | div.sphinxsidebar p.logo a, 138 | div.sphinxsidebar h3 a, 139 | div.sphinxsidebar p.logo a:hover, 140 | div.sphinxsidebar h3 a:hover { 141 | border: none; 142 | } 143 | 144 | div.sphinxsidebar p { 145 | color: #555; 146 | margin: 10px 0; 147 | } 148 | 149 | div.sphinxsidebar ul { 150 | margin: 10px 0; 151 | padding: 0; 152 | color: #000; 153 | } 154 | 155 | div.sphinxsidebar ul li.toctree-l1 > a { 156 | font-size: 120%; 157 | } 158 | 159 | div.sphinxsidebar ul li.toctree-l2 > a { 160 | font-size: 110%; 161 | } 162 | 163 | div.sphinxsidebar input { 164 | border: 1px solid #CCC; 165 | font-family: 'goudy old style', 'minion pro', 'bell mt', Georgia, 'Hiragino Mincho Pro', serif; 166 | font-size: 1em; 167 | } 168 | 169 | div.sphinxsidebar hr { 170 | border: none; 171 | height: 1px; 172 | color: #999; 173 | background: #999; 174 | 175 | text-align: left; 176 | margin-left: 0; 177 | width: 50%; 178 | } 179 | 180 | /* -- body styles ----------------------------------------------------------- */ 181 | 182 | a { 183 | color: #004B6B; 184 | text-decoration: underline; 185 | } 186 | 187 | a:hover { 188 | color: #6D4100; 189 | text-decoration: underline; 190 | } 191 | 192 | div.body h1, 193 | div.body h2, 194 | div.body h3, 195 | div.body h4, 196 | div.body h5, 197 | div.body h6 { 198 | font-family: 'Garamond', 'Georgia', serif; 199 | font-weight: normal; 200 | margin: 30px 0px 10px 0px; 201 | padding: 0; 202 | } 203 | 204 | div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } 205 | div.body h2 { font-size: 180%; } 206 | div.body h3 { font-size: 150%; } 207 | div.body h4 { font-size: 130%; } 208 | div.body h5 { font-size: 100%; } 209 | div.body h6 { font-size: 100%; } 210 | 211 | a.headerlink { 212 | color: #DDD; 213 | padding: 0 4px; 214 | text-decoration: none; 215 | } 216 | 217 | a.headerlink:hover { 218 | color: #444; 219 | background: #EAEAEA; 220 | } 221 | 222 | div.body p, div.body dd, div.body li { 223 | line-height: 1.4em; 224 | } 225 | 226 | div.admonition { 227 | margin: 20px 0px; 228 | padding: 10px 30px; 229 | background-color: #FCC; 230 | border: 1px solid #FAA; 231 | } 232 | 233 | div.admonition tt.xref, div.admonition a tt { 234 | border-bottom: 1px solid #fafafa; 235 | } 236 | 237 | dd div.admonition { 238 | margin-left: -60px; 239 | padding-left: 60px; 240 | } 241 | 242 | div.admonition p.admonition-title { 243 | font-family: 'Garamond', 'Georgia', serif; 244 | font-weight: normal; 245 | font-size: 24px; 246 | margin: 0 0 10px 0; 247 | padding: 0; 248 | line-height: 1; 249 | } 250 | 251 | div.admonition p.last { 252 | margin-bottom: 0; 253 | } 254 | 255 | div.highlight { 256 | background-color: white; 257 | } 258 | 259 | dt:target, .highlight { 260 | background: #FAF3E8; 261 | } 262 | 263 | div.note { 264 | background-color: #EEE; 265 | border: 1px solid #CCC; 266 | } 267 | 268 | div.seealso { 269 | background-color: #EEE; 270 | border: 1px solid #CCC; 271 | } 272 | 273 | div.topic { 274 | background-color: #eee; 275 | } 276 | 277 | p.admonition-title { 278 | display: inline; 279 | } 280 | 281 | p.admonition-title:after { 282 | content: ":"; 283 | } 284 | 285 | pre, tt, code { 286 | font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; 287 | font-size: 0.9em; 288 | } 289 | 290 | img.screenshot { 291 | } 292 | 293 | tt.descname, tt.descclassname, code.descname, code.descclassname { 294 | font-size: 0.95em; 295 | } 296 | 297 | tt.descname, code.descname { 298 | padding-right: 0.08em; 299 | } 300 | 301 | img.screenshot { 302 | -moz-box-shadow: 2px 2px 4px #eee; 303 | -webkit-box-shadow: 2px 2px 4px #eee; 304 | box-shadow: 2px 2px 4px #eee; 305 | } 306 | 307 | table.docutils { 308 | border: 1px solid #888; 309 | -moz-box-shadow: 2px 2px 4px #eee; 310 | -webkit-box-shadow: 2px 2px 4px #eee; 311 | box-shadow: 2px 2px 4px #eee; 312 | } 313 | 314 | table.docutils td, table.docutils th { 315 | border: 1px solid #888; 316 | padding: 0.25em 0.7em; 317 | } 318 | 319 | table.field-list, table.footnote { 320 | border: none; 321 | -moz-box-shadow: none; 322 | -webkit-box-shadow: none; 323 | box-shadow: none; 324 | } 325 | 326 | table.footnote { 327 | margin: 15px 0; 328 | width: 100%; 329 | border: 1px solid #EEE; 330 | background: #FDFDFD; 331 | font-size: 0.9em; 332 | } 333 | 334 | table.footnote + table.footnote { 335 | margin-top: -15px; 336 | border-top: none; 337 | } 338 | 339 | table.field-list th { 340 | padding: 0 0.8em 0 0; 341 | } 342 | 343 | table.field-list td { 344 | padding: 0; 345 | } 346 | 347 | table.footnote td.label { 348 | width: 0px; 349 | padding: 0.3em 0 0.3em 0.5em; 350 | } 351 | 352 | table.footnote td { 353 | padding: 0.3em 0.5em; 354 | } 355 | 356 | dl { 357 | margin: 0; 358 | padding: 0; 359 | } 360 | 361 | dl dd { 362 | margin-left: 30px; 363 | } 364 | 365 | blockquote { 366 | margin: 0 0 0 30px; 367 | padding: 0; 368 | } 369 | 370 | ul, ol { 371 | margin: 10px 0 10px 30px; 372 | padding: 0; 373 | } 374 | 375 | pre { 376 | background: #EEE; 377 | padding: 7px 30px; 378 | margin: 15px 0px; 379 | line-height: 1.3em; 380 | } 381 | 382 | dl pre, blockquote pre, li pre { 383 | margin-left: -60px; 384 | padding-left: 60px; 385 | } 386 | 387 | dl dl pre { 388 | margin-left: -90px; 389 | padding-left: 90px; 390 | } 391 | 392 | tt, code { 393 | background-color: #ecf0f3; 394 | color: #222; 395 | /* padding: 1px 2px; */ 396 | } 397 | 398 | tt.xref, code.xref, a tt { 399 | background-color: #FBFBFB; 400 | border-bottom: 1px solid white; 401 | } 402 | 403 | a.reference { 404 | text-decoration: none; 405 | border-bottom: 1px dotted #004B6B; 406 | } 407 | 408 | a.reference:hover { 409 | border-bottom: 1px solid #6D4100; 410 | } 411 | 412 | a.footnote-reference { 413 | text-decoration: none; 414 | font-size: 0.7em; 415 | vertical-align: top; 416 | border-bottom: 1px dotted #004B6B; 417 | } 418 | 419 | a.footnote-reference:hover { 420 | border-bottom: 1px solid #6D4100; 421 | } 422 | 423 | a:hover tt, a:hover code { 424 | background: #EEE; 425 | } 426 | 427 | 428 | @media screen and (max-width: 870px) { 429 | 430 | div.sphinxsidebar { 431 | display: none; 432 | } 433 | 434 | div.document { 435 | width: 100%; 436 | 437 | } 438 | 439 | div.documentwrapper { 440 | margin-left: 0; 441 | margin-top: 0; 442 | margin-right: 0; 443 | margin-bottom: 0; 444 | } 445 | 446 | div.bodywrapper { 447 | margin-top: 0; 448 | margin-right: 0; 449 | margin-bottom: 0; 450 | margin-left: 0; 451 | } 452 | 453 | ul { 454 | margin-left: 0; 455 | } 456 | 457 | .document { 458 | width: auto; 459 | } 460 | 461 | .footer { 462 | width: auto; 463 | } 464 | 465 | .bodywrapper { 466 | margin: 0; 467 | } 468 | 469 | .footer { 470 | width: auto; 471 | } 472 | 473 | .github { 474 | display: none; 475 | } 476 | 477 | 478 | 479 | } 480 | 481 | 482 | 483 | @media screen and (max-width: 875px) { 484 | 485 | body { 486 | margin: 0; 487 | padding: 20px 30px; 488 | } 489 | 490 | div.documentwrapper { 491 | float: none; 492 | background: white; 493 | } 494 | 495 | div.sphinxsidebar { 496 | display: block; 497 | float: none; 498 | width: 102.5%; 499 | margin: 50px -30px -20px -30px; 500 | padding: 10px 20px; 501 | background: #333; 502 | color: #FFF; 503 | } 504 | 505 | div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, 506 | div.sphinxsidebar h3 a { 507 | color: white; 508 | } 509 | 510 | div.sphinxsidebar a { 511 | color: #AAA; 512 | } 513 | 514 | div.sphinxsidebar p.logo { 515 | display: none; 516 | } 517 | 518 | div.document { 519 | width: 100%; 520 | margin: 0; 521 | } 522 | 523 | div.related { 524 | display: block; 525 | margin: 0; 526 | padding: 10px 0 20px 0; 527 | } 528 | 529 | div.related ul, 530 | div.related ul li { 531 | margin: 0; 532 | padding: 0; 533 | } 534 | 535 | div.footer { 536 | display: none; 537 | } 538 | 539 | div.bodywrapper { 540 | margin: 0; 541 | } 542 | 543 | div.body { 544 | min-height: 0; 545 | padding: 0; 546 | } 547 | 548 | .rtd_doc_footer { 549 | display: none; 550 | } 551 | 552 | .document { 553 | width: auto; 554 | } 555 | 556 | .footer { 557 | width: auto; 558 | } 559 | 560 | .footer { 561 | width: auto; 562 | } 563 | 564 | .github { 565 | display: none; 566 | } 567 | } 568 | 569 | 570 | /* misc. */ 571 | 572 | .revsys-inline { 573 | display: none!important; 574 | } 575 | 576 | /* Make nested-list/multi-paragraph items look better in Releases changelog 577 | * pages. Without this, docutils' magical list fuckery causes inconsistent 578 | * formatting between different release sub-lists. 579 | */ 580 | div#changelog > div.section > ul > li > p:only-child { 581 | margin-bottom: 0; 582 | } 583 | 584 | /* Hide fugly table cell borders in ..bibliography:: directive output */ 585 | table.docutils.citation, table.docutils.citation td, table.docutils.citation th { 586 | border: none; 587 | /* Below needed in some edge cases; if not applied, bottom shadows appear */ 588 | -moz-box-shadow: none; 589 | -webkit-box-shadow: none; 590 | box-shadow: none; 591 | } -------------------------------------------------------------------------------- /docs/_build/html/_static/basic.css: -------------------------------------------------------------------------------- 1 | /* 2 | * basic.css 3 | * ~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- basic theme. 6 | * 7 | * :copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /* -- main layout ----------------------------------------------------------- */ 13 | 14 | div.clearer { 15 | clear: both; 16 | } 17 | 18 | /* -- relbar ---------------------------------------------------------------- */ 19 | 20 | div.related { 21 | width: 100%; 22 | font-size: 90%; 23 | } 24 | 25 | div.related h3 { 26 | display: none; 27 | } 28 | 29 | div.related ul { 30 | margin: 0; 31 | padding: 0 0 0 10px; 32 | list-style: none; 33 | } 34 | 35 | div.related li { 36 | display: inline; 37 | } 38 | 39 | div.related li.right { 40 | float: right; 41 | margin-right: 5px; 42 | } 43 | 44 | /* -- sidebar --------------------------------------------------------------- */ 45 | 46 | div.sphinxsidebarwrapper { 47 | padding: 10px 5px 0 10px; 48 | } 49 | 50 | div.sphinxsidebar { 51 | float: left; 52 | width: 230px; 53 | margin-left: -100%; 54 | font-size: 90%; 55 | } 56 | 57 | div.sphinxsidebar ul { 58 | list-style: none; 59 | } 60 | 61 | div.sphinxsidebar ul ul, 62 | div.sphinxsidebar ul.want-points { 63 | margin-left: 20px; 64 | list-style: square; 65 | } 66 | 67 | div.sphinxsidebar ul ul { 68 | margin-top: 0; 69 | margin-bottom: 0; 70 | } 71 | 72 | div.sphinxsidebar form { 73 | margin-top: 10px; 74 | } 75 | 76 | div.sphinxsidebar input { 77 | border: 1px solid #98dbcc; 78 | font-family: sans-serif; 79 | font-size: 1em; 80 | } 81 | 82 | div.sphinxsidebar #searchbox input[type="text"] { 83 | width: 170px; 84 | } 85 | 86 | div.sphinxsidebar #searchbox input[type="submit"] { 87 | width: 30px; 88 | } 89 | 90 | img { 91 | border: 0; 92 | max-width: 100%; 93 | } 94 | 95 | /* -- search page ----------------------------------------------------------- */ 96 | 97 | ul.search { 98 | margin: 10px 0 0 20px; 99 | padding: 0; 100 | } 101 | 102 | ul.search li { 103 | padding: 5px 0 5px 20px; 104 | background-image: url(file.png); 105 | background-repeat: no-repeat; 106 | background-position: 0 7px; 107 | } 108 | 109 | ul.search li a { 110 | font-weight: bold; 111 | } 112 | 113 | ul.search li div.context { 114 | color: #888; 115 | margin: 2px 0 0 30px; 116 | text-align: left; 117 | } 118 | 119 | ul.keywordmatches li.goodmatch a { 120 | font-weight: bold; 121 | } 122 | 123 | /* -- index page ------------------------------------------------------------ */ 124 | 125 | table.contentstable { 126 | width: 90%; 127 | } 128 | 129 | table.contentstable p.biglink { 130 | line-height: 150%; 131 | } 132 | 133 | a.biglink { 134 | font-size: 1.3em; 135 | } 136 | 137 | span.linkdescr { 138 | font-style: italic; 139 | padding-top: 5px; 140 | font-size: 90%; 141 | } 142 | 143 | /* -- general index --------------------------------------------------------- */ 144 | 145 | table.indextable { 146 | width: 100%; 147 | } 148 | 149 | table.indextable td { 150 | text-align: left; 151 | vertical-align: top; 152 | } 153 | 154 | table.indextable dl, table.indextable dd { 155 | margin-top: 0; 156 | margin-bottom: 0; 157 | } 158 | 159 | table.indextable tr.pcap { 160 | height: 10px; 161 | } 162 | 163 | table.indextable tr.cap { 164 | margin-top: 10px; 165 | background-color: #f2f2f2; 166 | } 167 | 168 | img.toggler { 169 | margin-right: 3px; 170 | margin-top: 3px; 171 | cursor: pointer; 172 | } 173 | 174 | div.modindex-jumpbox { 175 | border-top: 1px solid #ddd; 176 | border-bottom: 1px solid #ddd; 177 | margin: 1em 0 1em 0; 178 | padding: 0.4em; 179 | } 180 | 181 | div.genindex-jumpbox { 182 | border-top: 1px solid #ddd; 183 | border-bottom: 1px solid #ddd; 184 | margin: 1em 0 1em 0; 185 | padding: 0.4em; 186 | } 187 | 188 | /* -- general body styles --------------------------------------------------- */ 189 | 190 | a.headerlink { 191 | visibility: hidden; 192 | } 193 | 194 | h1:hover > a.headerlink, 195 | h2:hover > a.headerlink, 196 | h3:hover > a.headerlink, 197 | h4:hover > a.headerlink, 198 | h5:hover > a.headerlink, 199 | h6:hover > a.headerlink, 200 | dt:hover > a.headerlink, 201 | caption:hover > a.headerlink, 202 | p.caption:hover > a.headerlink, 203 | div.code-block-caption:hover > a.headerlink { 204 | visibility: visible; 205 | } 206 | 207 | div.body p.caption { 208 | text-align: inherit; 209 | } 210 | 211 | div.body td { 212 | text-align: left; 213 | } 214 | 215 | .field-list ul { 216 | padding-left: 1em; 217 | } 218 | 219 | .first { 220 | margin-top: 0 !important; 221 | } 222 | 223 | p.rubric { 224 | margin-top: 30px; 225 | font-weight: bold; 226 | } 227 | 228 | img.align-left, .figure.align-left, object.align-left { 229 | clear: left; 230 | float: left; 231 | margin-right: 1em; 232 | } 233 | 234 | img.align-right, .figure.align-right, object.align-right { 235 | clear: right; 236 | float: right; 237 | margin-left: 1em; 238 | } 239 | 240 | img.align-center, .figure.align-center, object.align-center { 241 | display: block; 242 | margin-left: auto; 243 | margin-right: auto; 244 | } 245 | 246 | .align-left { 247 | text-align: left; 248 | } 249 | 250 | .align-center { 251 | text-align: center; 252 | } 253 | 254 | .align-right { 255 | text-align: right; 256 | } 257 | 258 | /* -- sidebars -------------------------------------------------------------- */ 259 | 260 | div.sidebar { 261 | margin: 0 0 0.5em 1em; 262 | border: 1px solid #ddb; 263 | padding: 7px 7px 0 7px; 264 | background-color: #ffe; 265 | width: 40%; 266 | float: right; 267 | } 268 | 269 | p.sidebar-title { 270 | font-weight: bold; 271 | } 272 | 273 | /* -- topics ---------------------------------------------------------------- */ 274 | 275 | div.topic { 276 | border: 1px solid #ccc; 277 | padding: 7px 7px 0 7px; 278 | margin: 10px 0 10px 0; 279 | } 280 | 281 | p.topic-title { 282 | font-size: 1.1em; 283 | font-weight: bold; 284 | margin-top: 10px; 285 | } 286 | 287 | /* -- admonitions ----------------------------------------------------------- */ 288 | 289 | div.admonition { 290 | margin-top: 10px; 291 | margin-bottom: 10px; 292 | padding: 7px; 293 | } 294 | 295 | div.admonition dt { 296 | font-weight: bold; 297 | } 298 | 299 | div.admonition dl { 300 | margin-bottom: 0; 301 | } 302 | 303 | p.admonition-title { 304 | margin: 0px 10px 5px 0px; 305 | font-weight: bold; 306 | } 307 | 308 | div.body p.centered { 309 | text-align: center; 310 | margin-top: 25px; 311 | } 312 | 313 | /* -- tables ---------------------------------------------------------------- */ 314 | 315 | table.docutils { 316 | border: 0; 317 | border-collapse: collapse; 318 | } 319 | 320 | table caption span.caption-number { 321 | font-style: italic; 322 | } 323 | 324 | table caption span.caption-text { 325 | } 326 | 327 | table.docutils td, table.docutils th { 328 | padding: 1px 8px 1px 5px; 329 | border-top: 0; 330 | border-left: 0; 331 | border-right: 0; 332 | border-bottom: 1px solid #aaa; 333 | } 334 | 335 | table.field-list td, table.field-list th { 336 | border: 0 !important; 337 | } 338 | 339 | table.footnote td, table.footnote th { 340 | border: 0 !important; 341 | } 342 | 343 | th { 344 | text-align: left; 345 | padding-right: 5px; 346 | } 347 | 348 | table.citation { 349 | border-left: solid 1px gray; 350 | margin-left: 1px; 351 | } 352 | 353 | table.citation td { 354 | border-bottom: none; 355 | } 356 | 357 | /* -- figures --------------------------------------------------------------- */ 358 | 359 | div.figure { 360 | margin: 0.5em; 361 | padding: 0.5em; 362 | } 363 | 364 | div.figure p.caption { 365 | padding: 0.3em; 366 | } 367 | 368 | div.figure p.caption span.caption-number { 369 | font-style: italic; 370 | } 371 | 372 | div.figure p.caption span.caption-text { 373 | } 374 | 375 | 376 | /* -- other body styles ----------------------------------------------------- */ 377 | 378 | ol.arabic { 379 | list-style: decimal; 380 | } 381 | 382 | ol.loweralpha { 383 | list-style: lower-alpha; 384 | } 385 | 386 | ol.upperalpha { 387 | list-style: upper-alpha; 388 | } 389 | 390 | ol.lowerroman { 391 | list-style: lower-roman; 392 | } 393 | 394 | ol.upperroman { 395 | list-style: upper-roman; 396 | } 397 | 398 | dl { 399 | margin-bottom: 15px; 400 | } 401 | 402 | dd p { 403 | margin-top: 0px; 404 | } 405 | 406 | dd ul, dd table { 407 | margin-bottom: 10px; 408 | } 409 | 410 | dd { 411 | margin-top: 3px; 412 | margin-bottom: 10px; 413 | margin-left: 30px; 414 | } 415 | 416 | dt:target, .highlighted { 417 | background-color: #fbe54e; 418 | } 419 | 420 | dl.glossary dt { 421 | font-weight: bold; 422 | font-size: 1.1em; 423 | } 424 | 425 | .field-list ul { 426 | margin: 0; 427 | padding-left: 1em; 428 | } 429 | 430 | .field-list p { 431 | margin: 0; 432 | } 433 | 434 | .optional { 435 | font-size: 1.3em; 436 | } 437 | 438 | .sig-paren { 439 | font-size: larger; 440 | } 441 | 442 | .versionmodified { 443 | font-style: italic; 444 | } 445 | 446 | .system-message { 447 | background-color: #fda; 448 | padding: 5px; 449 | border: 3px solid red; 450 | } 451 | 452 | .footnote:target { 453 | background-color: #ffa; 454 | } 455 | 456 | .line-block { 457 | display: block; 458 | margin-top: 1em; 459 | margin-bottom: 1em; 460 | } 461 | 462 | .line-block .line-block { 463 | margin-top: 0; 464 | margin-bottom: 0; 465 | margin-left: 1.5em; 466 | } 467 | 468 | .guilabel, .menuselection { 469 | font-family: sans-serif; 470 | } 471 | 472 | .accelerator { 473 | text-decoration: underline; 474 | } 475 | 476 | .classifier { 477 | font-style: oblique; 478 | } 479 | 480 | abbr, acronym { 481 | border-bottom: dotted 1px; 482 | cursor: help; 483 | } 484 | 485 | /* -- code displays --------------------------------------------------------- */ 486 | 487 | pre { 488 | overflow: auto; 489 | overflow-y: hidden; /* fixes display issues on Chrome browsers */ 490 | } 491 | 492 | td.linenos pre { 493 | padding: 5px 0px; 494 | border: 0; 495 | background-color: transparent; 496 | color: #aaa; 497 | } 498 | 499 | table.highlighttable { 500 | margin-left: 0.5em; 501 | } 502 | 503 | table.highlighttable td { 504 | padding: 0 0.5em 0 0.5em; 505 | } 506 | 507 | div.code-block-caption { 508 | padding: 2px 5px; 509 | font-size: small; 510 | } 511 | 512 | div.code-block-caption code { 513 | background-color: transparent; 514 | } 515 | 516 | div.code-block-caption + div > div.highlight > pre { 517 | margin-top: 0; 518 | } 519 | 520 | div.code-block-caption span.caption-number { 521 | padding: 0.1em 0.3em; 522 | font-style: italic; 523 | } 524 | 525 | div.code-block-caption span.caption-text { 526 | } 527 | 528 | div.literal-block-wrapper { 529 | padding: 1em 1em 0; 530 | } 531 | 532 | div.literal-block-wrapper div.highlight { 533 | margin: 0; 534 | } 535 | 536 | code.descname { 537 | background-color: transparent; 538 | font-weight: bold; 539 | font-size: 1.2em; 540 | } 541 | 542 | code.descclassname { 543 | background-color: transparent; 544 | } 545 | 546 | code.xref, a code { 547 | background-color: transparent; 548 | font-weight: bold; 549 | } 550 | 551 | h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { 552 | background-color: transparent; 553 | } 554 | 555 | .viewcode-link { 556 | float: right; 557 | } 558 | 559 | .viewcode-back { 560 | float: right; 561 | font-family: sans-serif; 562 | } 563 | 564 | div.viewcode-block:target { 565 | margin: -1px -10px; 566 | padding: 0 10px; 567 | } 568 | 569 | /* -- math display ---------------------------------------------------------- */ 570 | 571 | img.math { 572 | vertical-align: middle; 573 | } 574 | 575 | div.body div.math p { 576 | text-align: center; 577 | } 578 | 579 | span.eqno { 580 | float: right; 581 | } 582 | 583 | /* -- printout stylesheet --------------------------------------------------- */ 584 | 585 | @media print { 586 | div.document, 587 | div.documentwrapper, 588 | div.bodywrapper { 589 | margin: 0 !important; 590 | width: 100%; 591 | } 592 | 593 | div.sphinxsidebar, 594 | div.related, 595 | div.footer, 596 | #top-link { 597 | display: none; 598 | } 599 | } -------------------------------------------------------------------------------- /sigma_scat.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | #np.seterr(all='warn') 4 | import constants as c 5 | import dust 6 | import cmindex as cmi 7 | import scatmodels as sms 8 | from scipy.interpolate import interp1d 9 | 10 | #---------------------------------------------------------- 11 | # evals( emin=1.0, emax=2.0, de=0.1 ) : np.array [keV] 12 | # angles( thmin=5.0, thmax=100.0, dth=5.0 ) : np.array [arcsec] 13 | # 14 | 15 | def evals( emin=1.0, emax=2.0, de=0.1 ): 16 | """ 17 | FUNCTION evals( emin=1.0, emax=2.0, de=0.1 ) 18 | RETURNS : np.array 19 | Distribution of energies [keV] 20 | """ 21 | return np.arange( emin, emax+de, de ) 22 | 23 | def angles( thmin=5.0, thmax=100.0, dth=5.0 ): 24 | """ 25 | FUNCTION angles( thmin=5.0, thmax=100.0, dth=5.0 ) 26 | RETURNS : np.array 27 | Distribution of angles [arcsec] 28 | """ 29 | return np.arange( thmin, thmax+dth, dth ) 30 | 31 | #-------------- Tie scattering mechanism to an index of refraction ------------------ 32 | 33 | class Scatmodel(object): 34 | """ 35 | OBJECT Scatmodel( smodel=RGscat(), cmodel=cmi.CmDrude() ) 36 | smodel : scattering model type object : RGscat(), Mie() 37 | cmodel : cmindex type object : CmDrude(), CmGraphite(), CmSilicate() 38 | stype : string : 'RGscat', 'Mie' 39 | cmtype : 'Drude', 'Silicate', 'Graphite' 40 | """ 41 | def __init__( self, smodel=sms.RGscat(), cmodel=cmi.CmDrude() ): 42 | self.smodel = smodel 43 | self.cmodel = cmodel 44 | self.stype = smodel.stype 45 | self.cmtype = cmodel.cmtype 46 | # cmtype choices : 'Drude' (requires rho term only) 47 | # 'Graphite' (Carbonaceous grains) 48 | # 'Silicate' (Astrosilicate) 49 | # --- Graphite and Silicate values come from Draine (2003) 50 | 51 | #-------------- Quickly make a common Scatmodel object --------------------------- 52 | 53 | def makeScatmodel( model_name, material_name ): 54 | """ 55 | FUNCTION makeScatmodel( model_name, material_name ) 56 | RETURNS Scatmodel object 57 | ---------------------------------------------------- 58 | model_name : string : 'RG' or 'Mie' 59 | material_name : string : 'Drude', 'Silicate', 'Graphite', 'SmallGraphite' 60 | """ 61 | 62 | if model_name == 'RG': 63 | sm = sms.RGscat() 64 | elif model_name == 'Mie': 65 | sm = sms.Mie() 66 | else: 67 | print 'Error: Model name not recognized' 68 | return 69 | 70 | if material_name == 'Drude': 71 | cm = cmi.CmDrude() 72 | elif material_name == 'Silicate': 73 | cm = cmi.CmSilicate() 74 | elif material_name == 'Graphite': 75 | cm = cmi.CmGraphite() 76 | elif material_name == 'SmallGraphite': # Small Graphite ~ 0.01 um 77 | cm = cmi.CmGraphite( size='small' ) 78 | else: 79 | print 'Error: CM name not recognized' 80 | return 81 | 82 | return Scatmodel( sm, cm ) 83 | 84 | 85 | #-------------- Various Types of Scattering Cross-sections ----------------------- 86 | 87 | class Diffscat(object): 88 | """ 89 | A differential scattering cross-section [cm^2 ster^-1] 90 | -------------------------------------------------------------- 91 | OBJECT Diffscat( scatm=Scatmodel(), theta=angles() [arcsec], E=1.0 [keV], a=1.0 [um] ) 92 | scatm : Scatmodel 93 | theta : np.array : arcsec 94 | E : scalar or np.array : Note, must match number of theta values if size > 1 95 | a : scalar : um 96 | dsig : np.array : cm^2 ster^-1 97 | """ 98 | def __init__( self, scatm=Scatmodel(), theta=angles(), E=1.0, a=1.0 ): 99 | self.scatm = scatm 100 | self.theta = theta 101 | self.E = E 102 | self.a = a 103 | 104 | cm = scatm.cmodel 105 | scat = scatm.smodel 106 | 107 | if cm.cmtype == 'Graphite': 108 | dsig_pe = scat.Diff( theta=theta, a=a, E=E, cm=cmi.CmGraphite(size=cm.size, orient='perp') ) 109 | dsig_pa = scat.Diff( theta=theta, a=a, E=E, cm=cmi.CmGraphite(size=cm.size, orient='para') ) 110 | self.dsig = ( dsig_pa + 2.0 * dsig_pe ) / 3.0 111 | else: 112 | self.dsig = scat.Diff( theta=theta, a=a, E=E, cm=cm ) 113 | 114 | class Sigmascat(object): 115 | """ 116 | Total scattering cross-section [cm^2] 117 | --------------------------------------------------------- 118 | OBJECT Sigmascat( scatm=Scatmodel(), E=1.0 [keV], a=1.0 [um] ) 119 | scatm : Scatmodel 120 | E : scalar or np.array : keV 121 | a : scalar : um 122 | qsca : scalar or np.array : unitless scattering efficiency 123 | sigma : scalar or np.array : cm^2 124 | """ 125 | def __init__( self, scatm=Scatmodel(), E=1.0, a=1.0 ): 126 | self.scatm = scatm 127 | self.E = E 128 | self.a = a 129 | 130 | cm = scatm.cmodel 131 | scat = scatm.smodel 132 | 133 | cgeo = np.pi * np.power( a*c.micron2cm(), 2 ) 134 | 135 | if cm.cmtype == 'Graphite': 136 | qsca_pe = scat.Qsca( a=a, E=E, cm=cmi.CmGraphite(size=cm.size, orient='perp') ) 137 | qsca_pa = scat.Qsca( a=a, E=E, cm=cmi.CmGraphite(size=cm.size, orient='para') ) 138 | self.qsca = ( qsca_pa + 2.0*qsca_pe ) / 3.0 139 | else: 140 | self.qsca = scat.Qsca( a=a, E=E, cm=cm ) 141 | 142 | self.sigma = self.qsca * cgeo 143 | 144 | class Sigmaext(object): 145 | """ 146 | Total EXTINCTION cross-section [cm^2] 147 | --------------------------------------------------------- 148 | OBJECT Sigmascat( scatm=Scatmodel(), E=1.0 [keV], a=1.0 [um] ) 149 | scatm : Scatmodel 150 | E : scalar or np.array : keV 151 | a : scalar : um 152 | qext : scalar or np.array : unitless extinction efficiency 153 | sigma : scalar or np.array : cm^2 154 | """ 155 | def __init__( self, scatm=Scatmodel(), E=1.0, a=1.0 ): 156 | self.scatm = scatm 157 | self.E = E 158 | self.a = a 159 | 160 | if scatm.stype == 'RG': 161 | print 'Rayleigh-Gans cross-section not currently supported for Kappaext' 162 | self.sigma = None 163 | return 164 | 165 | cm = scatm.cmodel 166 | scat = scatm.smodel 167 | 168 | cgeo = np.pi * np.power( a*c.micron2cm(), 2 ) 169 | 170 | if cm.cmtype == 'Graphite': 171 | qext_pe = scat.Qext( a=a, E=E, cm=cmi.CmGraphite(size=cm.size, orient='perp') ) 172 | qext_pa = scat.Qext( a=a, E=E, cm=cmi.CmGraphite(size=cm.size, orient='para') ) 173 | self.qext = ( qext_pa + 2.0*qext_pe ) / 3.0 174 | else: 175 | self.qext = scat.Qext( a=a, E=E, cm=cm ) 176 | 177 | self.sigma = self.qext * cgeo 178 | 179 | class Kappascat(object): 180 | """ 181 | Opacity to scattering [g^-1 cm^2] 182 | OBJECT Kappascat( E=1.0 [keV], scatm=Scatmodel(), dist=dust.Dustspectrum() ) 183 | --------------------------------- 184 | scatm : Scatmodel 185 | E : scalar or np.array : keV 186 | dist : dust.Dustspectrum 187 | kappa : scalar or np.array : cm^2 g^-1, typically 188 | """ 189 | def __init__( self, E=1.0, scatm=Scatmodel(), dist=dust.Dustspectrum() ): 190 | self.scatm = scatm 191 | self.E = E 192 | self.dist = dist 193 | 194 | cm = scatm.cmodel 195 | scat = scatm.smodel 196 | 197 | cgeo = np.pi * np.power( dist.a * c.micron2cm(), 2 ) 198 | 199 | qsca = np.zeros( shape=( np.size(E),np.size(dist.a) ) ) 200 | qsca_pe = np.zeros( shape=( np.size(E),np.size(dist.a) ) ) 201 | qsca_pa = np.zeros( shape=( np.size(E),np.size(dist.a) ) ) 202 | 203 | # Test for graphite case 204 | if cm.cmtype == 'Graphite': 205 | cmGraphitePerp = cmi.CmGraphite(size=cm.size, orient='perp') 206 | cmGraphitePara = cmi.CmGraphite(size=cm.size, orient='para') 207 | 208 | if np.size(dist.a) > 1: 209 | for i in range( np.size(dist.a) ): 210 | qsca_pe[:,i] = scat.Qsca( E, a=dist.a[i], cm=cmGraphitePerp ) 211 | qsca_pa[:,i] = scat.Qsca( E, a=dist.a[i], cm=cmGraphitePara ) 212 | else: 213 | qsca_pe = scat.Qsca( E, a=dist.a, cm=cmGraphitePerp ) 214 | qsca_pa = scat.Qsca( E, a=dist.a, cm=cmGraphitePara ) 215 | 216 | qsca = ( qsca_pa + 2.0 * qsca_pe ) / 3.0 217 | 218 | else: 219 | if np.size(dist.a) > 1: 220 | for i in range( np.size(dist.a) ): 221 | qsca[:,i] = scat.Qsca( E, a=dist.a[i], cm=cm ) 222 | else: 223 | qsca = scat.Qsca( E, a=dist.a, cm=cm ) 224 | 225 | if np.size(dist.a) == 1: 226 | kappa = dist.nd * qsca * cgeo / dist.md 227 | else: 228 | kappa = np.array([]) 229 | for j in range( np.size(E) ): 230 | kappa = np.append( kappa, \ 231 | c.intz( dist.a, dist.nd * qsca[j,:] * cgeo ) / dist.md ) 232 | 233 | self.kappa = kappa 234 | 235 | 236 | class Kappaext(object): 237 | """ 238 | Opacity to EXTINCTION [g^-1 cm^2] 239 | OBJECT Kappaext( E=1.0 [keV], scatm=Scatmodel(), dist=dust.Dustspectrum() ) 240 | --------------------------------- 241 | scatm : Scatmodel 242 | E : scalar or np.array : keV 243 | dist : dust.Dustspectrum 244 | kappa : scalar or np.array : cm^2 g^-1, typically 245 | """ 246 | def __init__( self, E=1.0, scatm=Scatmodel(), dist=dust.Dustspectrum() ): 247 | self.scatm = scatm 248 | self.E = E 249 | self.dist = dist 250 | 251 | if scatm.stype == 'RG': 252 | print 'Rayleigh-Gans cross-section not currently supported for Kappaext' 253 | self.kappa = None 254 | return 255 | 256 | cm = scatm.cmodel 257 | scat = scatm.smodel 258 | 259 | cgeo = np.pi * np.power( dist.a * c.micron2cm(), 2 ) 260 | 261 | qext = np.zeros( shape=( np.size(E),np.size(dist.a) ) ) 262 | qext_pe = np.zeros( shape=( np.size(E),np.size(dist.a) ) ) 263 | qext_pa = np.zeros( shape=( np.size(E),np.size(dist.a) ) ) 264 | 265 | # Test for graphite case 266 | if cm.cmtype == 'Graphite': 267 | cmGraphitePerp = cmi.CmGraphite(size=cm.size, orient='perp') 268 | cmGraphitePara = cmi.CmGraphite(size=cm.size, orient='para') 269 | 270 | if np.size(dist.a) > 1: 271 | for i in range( np.size(dist.a) ): 272 | qext_pe[:,i] = scat.Qext( E, a=dist.a[i], cm=cmGraphitePerp ) 273 | qext_pa[:,i] = scat.Qext( E, a=dist.a[i], cm=cmGraphitePara ) 274 | else: 275 | qext_pe = scat.Qext( E, a=dist.a, cm=cmGraphitePerp ) 276 | qext_pa = scat.Qext( E, a=dist.a, cm=cmGraphitePara ) 277 | 278 | qext = ( qext_pa + 2.0 * qext_pe ) / 3.0 279 | 280 | else: 281 | if np.size(dist.a) > 1: 282 | for i in range( np.size(dist.a) ): 283 | qext[:,i] = scat.Qext( E, a=dist.a[i], cm=cm ) 284 | else: 285 | qext = scat.Qext( E, a=dist.a, cm=cm ) 286 | 287 | if np.size(dist.a) == 1: 288 | kappa = dist.nd * qext * cgeo / dist.md 289 | else: 290 | kappa = np.array([]) 291 | for j in range( np.size(E) ): 292 | kappa = np.append( kappa, \ 293 | c.intz( dist.a, dist.nd * qext[j,:] * cgeo ) / dist.md ) 294 | 295 | self.kappa = kappa 296 | 297 | 298 | 299 | 300 | #-------------- Objects that can be used for interpolation later ----------------- 301 | 302 | class KappaSpec( object ): 303 | """ 304 | OBJECT Kappaspec( E=None, kappa=None, scatm=None, dspec=None ) 305 | E : np.array : keV 306 | scatm : Scatmodel 307 | dspec : dust.Dustspectrum 308 | kappa : scipy.interpolate.interp1d object with (E, kappa) as arguments 309 | """ 310 | def __init__(self, E=None, kappa=None, scatm=None, dspec=None ): 311 | self.E = E 312 | self.kappa = interp1d( E, kappa ) 313 | self.scatm = scatm 314 | self.dspec = dspec 315 | 316 | class SigmaSpec( object ): 317 | """ 318 | OBJECT Sigmaspec( E=None, sigma=None, scatm=None ) 319 | E : np.array : keV 320 | scatm : Scatmodel 321 | sigma : scipy.interpolate.interp1d object with (E, sigma) as arguments 322 | """ 323 | def __init__(self, E=None, sigma=None, scatm=None): 324 | self.E = E 325 | self.sigma = interp1d( E, sigma ) 326 | self.scatm = scatm 327 | --------------------------------------------------------------------------------