├── .github └── workflows │ └── ci.yml ├── .gitignore ├── CITATION.cff ├── LICENSE.md ├── README.md ├── config └── config.yaml ├── data └── README.md ├── environment.yaml └── workflow ├── Snakefile ├── envs ├── fermitools.yaml └── gammapy.yaml ├── report ├── caption-counts.rst ├── caption-edisp.rst ├── caption-exposure.rst ├── caption-psf.rst └── workflow.rst └── rules ├── gather.smk ├── gtbin.smk ├── gtdrm.smk ├── gtexpcube2.smk ├── gtltcube.smk ├── gtmktime.smk ├── gtpsf.smk ├── gtselect.smk ├── prepare-gp-dataset.smk ├── prepare-gp-datasets-yaml.smk ├── prepare-gp-model.smk ├── summarize-gp-datasets.smk └── summarize-gp-spectra.smk /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # GitHub Actions workflow for testing and continuous integration. 2 | # 3 | # This file performs testing using tox and tox.ini to define and configure the test environments. 4 | 5 | name: CI tests 6 | 7 | on: 8 | push: 9 | branches: 10 | - main # GitHub now defaults to 'main' as the name of the primary branch. Change this as needed. 11 | # tags: # run CI if specific tags are pushed 12 | pull_request: 13 | # branches: # only build on PRs against 'main' if you need to further limit when CI is run. 14 | # - main 15 | 16 | jobs: 17 | # Github Actions supports ubuntu, windows, and macos virtual environments: 18 | # https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners 19 | ci_tests: 20 | name: CI tests 21 | runs-on: ubuntu-latest 22 | env: 23 | CACHE_NUMBER: 0 24 | GAMMAPY_DATA: data 25 | steps: 26 | - name: Checkout code 27 | uses: actions/checkout@v3 28 | with: 29 | fetch-depth: 0 30 | - name: Setup miniforge 31 | uses: conda-incubator/setup-miniconda@v3 32 | with: 33 | miniconda-version: latest 34 | activate-environment: snakemake-workflow-fermi-lat 35 | use-mamba: true 36 | - name: Cache conda environment 37 | uses: actions/cache@v2 38 | with: 39 | path: ${{ env.CONDA }}/envs 40 | key: 41 | conda-${{ runner.os }}--${{ runner.arch }}--${{hashFiles('environment.yaml') }}-${{ env.CACHE_NUMBER }} 42 | id: cache 43 | - name: Update environment 44 | run: 45 | conda env update -f environment.yaml 46 | if: steps.cache.outputs.cache-hit != 'true' 47 | - name: Download test files 48 | run: | 49 | cd data 50 | wget https://github.com/adonath/snakemake-workflow-fermi-lat-test-data/archive/refs/heads/main.zip 51 | unzip main.zip 52 | mv snakemake-workflow-fermi-lat-test-data-main/* . 53 | rm -rf snakemake-workflow-fermi-lat-test-data-main main.zip 54 | - name: Run tests 55 | shell: bash -l {0} 56 | run: | 57 | snakemake -c2 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | results/** 2 | resources/** 3 | logs/** 4 | data/** 5 | .snakemake 6 | .snakemake/** 7 | __pycache__ 8 | *.par 9 | report.html 10 | .ipynb_checkpoints -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | title: A Snakemake Workflow for Fermi-LAT data reduction for Gammapy 2 | repository-code: https://github.com/adonath/snakemake-workflow-fermi-lat 3 | version: v0.1 4 | date-released: 2023-10-17 5 | authors: 6 | - given-names: Axel 7 | family-names: Donath 8 | affiliation: Center for Astrophysics | Harvard & Smithsonian 9 | orcid: https://orcid.org/0000-0003-4568-7005 10 | cff-version: 1.2.0 11 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adonath/snakemake-workflow-fermi-lat/8b7c607af1180f9d352b7575bd5eddbfa497f44b/LICENSE.md -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Snakemake Workflow for Fermi-LAT Data Reduction 2 | 3 | This is an example snakemake workflow for data reduction of Fermi-LAT data. 4 | The workflow will run the standard `fermitools` for a given configuration 5 | and produce FITS files in a format that Gammapy can read. 6 | Thereby it will handle the reduction of counts, exposure and point 7 | spread function (PSF) for multiple PSF classes. 8 | 9 | ## Getting Started 10 | 11 | ### Use as Snakemake Module (recommended) 12 | 13 | If you would like use this as Snakmake module you should add e.g. the following to your `Snakefile`: 14 | 15 | ```python3 16 | module fermi_lat_data_workflow: 17 | snakefile: 18 | # here, plain paths, URLs and the special markers for code hosting providers (see below) are possible. 19 | github("adonath/snakemake-workflow-fermi-lat", path="workflow/Snakefile", branch="main") 20 | config: config["fermi-lat-data"] 21 | 22 | use rule * from fermi_lat_data_workflow as fermi_lat_data_* 23 | ``` 24 | 25 | ### Use as Repository 26 | Alternatively you could also just clone this repository to your local machine: 27 | ```bash 28 | git clone https://github.com/adonath/snakemake-workflow-fermi-lat.git 29 | ``` 30 | 31 | If you havn't done yet, please install [conda](https://www.anaconda.com/products/distribution) 32 | or [mamba](https://mamba.readthedocs.io/en/latest/installation.html). 33 | 34 | Now change to the directory of the repository: 35 | ```bash 36 | cd snakemake-workflow-fermi-lat/ 37 | ``` 38 | 39 | And create the conda environment using: 40 | ```bash 41 | mamba env create -f environment.yaml 42 | ``` 43 | 44 | Once the process is done you can activate the environment: 45 | 46 | ```bash 47 | conda activate snakemake-workflow-fermi-lat 48 | ``` 49 | 50 | ### Download Data 51 | 52 | Go to https://fermi.gsfc.nasa.gov/cgi-bin/ssc/LAT/LATDataQuery.cgi and download the data 53 | you are interested in. The data should be downloaded to the `./data` folder. 54 | 55 | ### Configure and Run the Workflow 56 | Now you should adapt the configuration in [config/config.yaml](config/config.yaml) 57 | to match your data. 58 | 59 | Then you are ready to run the workflow, like: 60 | ```bash 61 | snakemake --cores 8 62 | ``` 63 | 64 | You can also create a report to see previews of the counts, exposure and PSF images: 65 | ```bash 66 | snakemake --report report.html 67 | open report.html 68 | ``` 69 | 70 | Finally you can read and print the datasets as well as models using Gammapy: 71 | ```python3 72 | 73 | from gammapy.datasets import Datasets 74 | from gammapy.modeling.models import Models 75 | 76 | datasets = Datasets.read("results//datasets/-datasets-all.yaml") 77 | models = Models.read("results//model/-model.yaml") 78 | 79 | print(datasets) 80 | print(models) 81 | ``` 82 | -------------------------------------------------------------------------------- /config/config.yaml: -------------------------------------------------------------------------------- 1 | name: my-config 2 | scfile: data/L231017092559388FF7BE86_SC00.fits 3 | path_data: data 4 | path_results: results 5 | event_types: ["psf0", "psf1", "psf2", "psf3"] 6 | gal_diffuse: gll_iem_v07.fits 7 | catalog: 3fhl 8 | fermitools: 9 | gtselect: 10 | ra: &ra "08h52m00s" 11 | dec: &dec "-46d19m58.80065918s" 12 | rad: 3 deg 13 | tmin: &tmin "2008-08-04 15:43:36" 14 | tmax: &tmax "2023-02-10 21:46:14" 15 | emin: &emin 10 GeV 16 | emax: &emax 2 TeV 17 | zmax: &zmax 105 deg 18 | evclass: 128 19 | gtmktime: 20 | filter: "'(DATA_QUAL>0)&&(LAT_CONFIG==1)'" 21 | roicut: "yes" 22 | gtbin: 23 | algorithm: CCUBE 24 | nxpix: &nxpix 200 25 | nypix: &nypix 200 26 | binsz: &binsz 0.02 27 | xref: &xref 266.25888553 28 | yref: &yref -1.21964555 29 | proj: &proj CAR 30 | coordsys: &coordsys GAL 31 | ebinalg: &ebinalg LOG 32 | enumbins: 11 33 | axisrot: &axisrot 0 34 | emin: *emin 35 | emax: *emax 36 | gtltcube: 37 | dcostheta: 0.025 38 | binsz: 1. 39 | tmin: *tmin 40 | tmax: *tmax 41 | zmax: *zmax 42 | gtexpcube2: 43 | coordsys: *coordsys 44 | binsz: *binsz 45 | nxpix: *nxpix 46 | nypix: *nypix 47 | xref: *xref 48 | yref: *yref 49 | proj: *proj 50 | ebinalg: *ebinalg 51 | cmap: none 52 | emin: &emin_true 5 GeV 53 | emax: &emax_true 2 TeV 54 | enumbins: 16 55 | axisrot: *axisrot 56 | bincalc: EDGE 57 | gtpsf: 58 | thetamax: 10 59 | ntheta: 300 60 | ra: *ra 61 | dec: *dec 62 | emin: *emin_true 63 | emax: *emax_true 64 | nenergies: 17 65 | 66 | 67 | -------------------------------------------------------------------------------- /data/README.md: -------------------------------------------------------------------------------- 1 | # Raw data folder 2 | This folder should contain the raw data downloaded from https://fermi.gsfc.nasa.gov/cgi-bin/ssc/LAT/LATDataQuery.cgi -------------------------------------------------------------------------------- /environment.yaml: -------------------------------------------------------------------------------- 1 | # Conda environment for reducing fermi data 2 | name: snakemake-workflow-fermi-lat 3 | 4 | channels: 5 | - conda-forge 6 | - bioconda 7 | - fermi 8 | 9 | variables: 10 | PYTHONNOUSERSITE: "1" 11 | 12 | dependencies: 13 | - python=3.9 14 | - snakemake>=6.4.1 15 | - pydantic==1.10.13 16 | - scipy==1.11.3 17 | - astropy==5.3.4 18 | - gammapy==1.1 19 | - fermitools>=2.2.0 20 | -------------------------------------------------------------------------------- /workflow/Snakefile: -------------------------------------------------------------------------------- 1 | from snakemake.utils import min_version 2 | from astropy.coordinates import SkyCoord, Angle 3 | from astropy import units as u 4 | from astropy.time import Time 5 | from pydantic import BaseModel, validator, FilePath 6 | from enum import Enum 7 | from typing import List, Optional, Literal 8 | 9 | 10 | 11 | # set minimum snakemake version 12 | min_version("6.4.1") 13 | 14 | # setup config file 15 | configfile: "config/config.yaml" 16 | 17 | # TODO: this is work in progress. Add more event clases, config arguments etc. from fermitools 18 | TIME_REF_FERMI = Time("2001-01-01T00:00:00") 19 | 20 | FERMI_IRF_EVCLASS = { 21 | 128: "P8R3_SOURCE_V3", 22 | } 23 | 24 | EVENT_TYPES = [2 ** idx for idx in range(10)] 25 | EVENT_CLASSES = [2 ** idx for idx in range(3, 12)] 26 | 27 | class EventClassEnum(str, Enum): 28 | p8r3_sourceveto_v3 = "P8R3_SOURCEVETO_V3" 29 | p8r3_ultracleanveto_v3 = "P8R3_ULTRACLEANVETO_V3" 30 | p8r3_ultraclean_v3 = "P8R3_ULTRACLEAN_V3" 31 | p8r3_clean_v3 = "P8R3_CLEAN_V3" 32 | p8r3_source_v3 = "P8R3_SOURCE_V3" 33 | 34 | # P8R3_TRANSIENT010_V3 35 | # P8R3_TRANSIENT020_V3 36 | # P8R3_TRANSIENT010E_V3 37 | # P8R3_TRANSIENT020E_V3 38 | 39 | 40 | class EventTypeEnum(str, Enum): 41 | front = "front" 42 | back = "back" 43 | psf_0 = "psf0" 44 | psf_1 = "psf1" 45 | psf_2 = "psf2" 46 | psf_3 = "psf3" 47 | edisp_0 = "edisp0" 48 | edisp_1 = "edisp1" 49 | edisp_2 = "edisp2" 50 | edisp_3 = "edisp3" 51 | 52 | 53 | EVENT_TYPE_TO_INT = {key.value: value for key, value in zip(EventTypeEnum, EVENT_TYPES)} 54 | EVENT_CLASS_TO_INT = {key.value: value for key, value in zip(EventClassEnum, EVENT_CLASSES[::-1])} 55 | 56 | 57 | class BoolEnum(str, Enum): 58 | yes = "yes" 59 | no = "no" 60 | 61 | 62 | class CoordsysEnum(str, Enum): 63 | gal = "GAL" 64 | cel = "CEL" 65 | 66 | 67 | class EbinalgEnum(str, Enum): 68 | lin = "LIN" 69 | log = "LOG" 70 | 71 | 72 | class ProjEnum(str, Enum): 73 | ait = "AIT" 74 | car = "CAR" 75 | tab = "TAN" 76 | 77 | 78 | class AlgorithmEnum(str, Enum): 79 | ccube = "CCUBE" 80 | 81 | class BinCalcEnum(str, Enum): 82 | center = "CENTER" 83 | edges = "EDGE" 84 | 85 | 86 | class BaseConfig(BaseModel): 87 | """Gt base config""" 88 | mode: str = "h" 89 | chatter: int = 2 90 | clobber: Literal["yes", "no"] = "yes" 91 | debug: Literal["no", "yes"] = "no" 92 | 93 | class Config: 94 | validate_all = True 95 | validate_assignment = True 96 | extra = "forbid" 97 | 98 | def to_cmd_args(self): 99 | """To cmd args""" 100 | data = self.dict() 101 | data.pop("evtype", None) 102 | return " ".join([f"{key}={value}" for key, value in data.items()]) 103 | 104 | 105 | class GtSelectConfig(BaseConfig): 106 | ra: float = 0 107 | dec: float = 0 108 | rad: float = 3 109 | tmin: float = 239557417.0 110 | tmax: float = 697768627.0 111 | emin: float = 1000.0 112 | emax: float = 500000.0 113 | zmax: float = 105.0 114 | evclass: int = 128 115 | evtype: int = 2 116 | 117 | @validator("ra", "dec", "zmax", "rad", pre=True) 118 | def validate_angle(cls, value): 119 | """Validate angle""" 120 | return Angle(value, "deg").deg 121 | 122 | @validator("tmin", "tmax", pre=True) 123 | def validate_time(cls, value): 124 | """Validate time""" 125 | if isinstance(value, float): 126 | return value 127 | 128 | return (Time(value) - TIME_REF_FERMI).to_value("s") 129 | 130 | @validator("emin", "emax", pre=True) 131 | def validate_energy(cls, value): 132 | """Validate energy""" 133 | return u.Quantity(value, "MeV").to_value("MeV") 134 | 135 | @validator("evclass") 136 | def validate_event_class(cls, value): 137 | """Validate event class""" 138 | if not value in EVENT_CLASSES: 139 | raise ValueError(f"Not a valid event class {value}, choose from {EVENT_CLASSES}") 140 | 141 | return value 142 | 143 | @validator("evtype") 144 | def validate_event_type(cls, value): 145 | """Validate event type""" 146 | if not value in EVENT_TYPES: 147 | raise ValueError(f"Not a valid event type {value}, choose from {EVENT_TYPES}") 148 | 149 | return value 150 | 151 | @property 152 | def center_skydir(self): 153 | """Center sky dir""" 154 | return SkyCoord(self.ra, self.dec, unit="deg", frame="icrs") 155 | 156 | @property 157 | def width(self): 158 | """Width""" 159 | return Angle(self.rad, "deg") 160 | 161 | 162 | class GtMkTimeConfig(BaseConfig): 163 | filter: str = "(DATA_QUAL>0)&&(LAT_CONFIG==1)" 164 | roicut: BoolEnum = "yes" 165 | 166 | 167 | class GtBinConfig(BaseConfig): 168 | algorithm: AlgorithmEnum = "CCUBE" 169 | nxpix: int = 200 170 | nypix: int = 200 171 | xref: float = 0 172 | yref: float = 0 173 | proj: ProjEnum = "CAR" 174 | binsz: float = 0.02 175 | coordsys: CoordsysEnum = "GAL" 176 | ebinalg: EbinalgEnum = "LOG" 177 | enumbins: int = 1 178 | axisrot: float = 0 179 | emin: float = 1000.0 180 | emax: float = 500000.0 181 | 182 | class Config: 183 | use_enum_values = True 184 | 185 | @validator("emin", "emax", pre=True) 186 | def validate_energy(cls, value): 187 | """Validate energy""" 188 | return u.Quantity(value, "MeV").to_value("MeV") 189 | 190 | @validator("binsz", "axisrot", pre=True) 191 | def validate_angle(cls, value): 192 | """Validate angle""" 193 | return Angle(value, "deg").deg 194 | 195 | 196 | class GtLtCubeConfig(BaseConfig): 197 | dcostheta: float = 0.025 198 | binsz: float = 1. 199 | tmin: float = 239557417.0 200 | tmax: float = 697768627.0 201 | zmax: float = 105.0 202 | 203 | @validator("zmax", "binsz", pre=True) 204 | def validate_angle(cls, value): 205 | """Validate angle""" 206 | return Angle(value, "deg").deg 207 | 208 | @validator("tmin", "tmax", pre=True) 209 | def validate_time(cls, value): 210 | """Validate time""" 211 | if isinstance(value, float): 212 | return value 213 | 214 | return (Time(value) - TIME_REF_FERMI).to_value("s") 215 | 216 | 217 | class GtExpCube2Config(BaseConfig): 218 | coordsys: CoordsysEnum = "GAL" 219 | irfs: EventClassEnum = "P8R3_SOURCE_V3" 220 | nxpix: int = 200 221 | nypix: int = 200 222 | xref: float = 266.25888553 223 | yref: float = -1.21964555 224 | proj: ProjEnum = "CAR" 225 | binsz: float = 0.02 226 | ebinalg: EbinalgEnum = "LOG" 227 | cmap: str = "none" 228 | emin: float = 1000.0 229 | emax: float = 500000.0 230 | enumbins: int = 17 231 | axisrot: float = 0 232 | bincalc: BinCalcEnum = "EDGE" 233 | 234 | @validator("emin", "emax", pre=True) 235 | def validate_energy(cls, value): 236 | """Validate energy""" 237 | return u.Quantity(value, "MeV").to_value("MeV") 238 | 239 | @validator("binsz", "axisrot", "xref", "yref", pre=True) 240 | def validate_angle(cls, value): 241 | """Validate angle""" 242 | return Angle(value, "deg").deg 243 | 244 | 245 | class GtPsfConfig(BaseConfig): 246 | thetamax: float = 10 247 | ntheta: int = 300 248 | ra: float = 0 249 | dec: float = 0 250 | emin: float = 1000.0 251 | emax: float = 500000.0 252 | nenergies: int = 17 253 | irfs: EventClassEnum = "P8R3_SOURCE_V3" 254 | 255 | @validator("ra", "dec", "thetamax", pre=True) 256 | def validate_angle(cls, value): 257 | """Validate angle""" 258 | return Angle(value, "deg").deg 259 | 260 | @validator("emin", "emax", pre=True) 261 | def validate_energy(cls, value): 262 | """Validate energy""" 263 | return u.Quantity(value, "MeV").to_value("MeV") 264 | 265 | class GtDrmConfig(BaseConfig): 266 | irfs: EventClassEnum = "P8R3_SOURCE_V3" 267 | evtype: int = 2 268 | edisp_bins: int = 1 269 | 270 | @validator("evtype") 271 | def validate_event_type(cls, value): 272 | """Validate event type""" 273 | if not value in EVENT_TYPES: 274 | raise ValueError(f"Not a valid event type {value}, choose from {EVENT_TYPES}") 275 | 276 | return value 277 | 278 | 279 | class FermiToolsConfig(BaseConfig): 280 | gtselect: GtSelectConfig = GtSelectConfig() 281 | gtmktime: GtMkTimeConfig = GtMkTimeConfig() 282 | gtbin: GtBinConfig = GtBinConfig() 283 | gtltcube: GtLtCubeConfig = GtLtCubeConfig() 284 | gtexpcube2: GtExpCube2Config = GtExpCube2Config() 285 | gtpsf: GtPsfConfig = GtPsfConfig() 286 | gtdrm: GtDrmConfig = GtDrmConfig() 287 | 288 | 289 | class SnakeMakeFermiLATConfig(BaseConfig): 290 | name: str = "my-config" 291 | scfile: str 292 | path_data: str = "data" 293 | path_results: str = "results" 294 | gal_diffuse: str = "gll_iem_v07.fits" 295 | catalog: str = "3fhl" 296 | event_types: List[EventTypeEnum] = ["FRONT", "BACK"] 297 | ltcube: Optional[str] = None 298 | fermitools = FermiToolsConfig = FermiToolsConfig() 299 | 300 | 301 | config_obj = SnakeMakeFermiLATConfig(**config) 302 | 303 | gtselect = config_obj.fermitools.gtselect 304 | gtmktime = config_obj.fermitools.gtmktime 305 | gtbin = config_obj.fermitools.gtbin 306 | gtltcube = config_obj.fermitools.gtltcube 307 | gtexpcube2 = config_obj.fermitools.gtexpcube2 308 | gtpsf = config_obj.fermitools.gtpsf 309 | gtdrm = config_obj.fermitools.gtdrm 310 | 311 | report: "report/workflow.rst" 312 | 313 | # load rules 314 | include: "rules/gather.smk" 315 | include: "rules/gtselect.smk" 316 | include: "rules/gtmktime.smk" 317 | include: "rules/gtbin.smk" 318 | include: "rules/gtexpcube2.smk" 319 | include: "rules/gtltcube.smk" 320 | include: "rules/gtpsf.smk" 321 | include: "rules/gtdrm.smk" 322 | include: "rules/prepare-gp-dataset.smk" 323 | include: "rules/prepare-gp-datasets-yaml.smk" 324 | include: "rules/prepare-gp-model.smk" 325 | include: "rules/summarize-gp-datasets.smk" 326 | include: "rules/summarize-gp-spectra.smk" 327 | 328 | # all rule 329 | rule all: 330 | input: 331 | expand("{path_results}/{config_name}/datasets/{config_name}-{event_type}-dataset.fits", path_results=config_obj.path_results, config_name=config_obj.name, event_type=config_obj.event_types), 332 | expand("{path_results}/{config_name}/datasets/{config_name}-datasets-all.yaml", path_results=config_obj.path_results, config_name=config_obj.name), 333 | expand("{path_results}/{config_name}/model/{config_name}-model.yaml", path_results=config_obj.path_results, config_name=config_obj.name), 334 | expand("{path_results}/{config_name}/summary/images/{event_type}/{config_name}-{event_type}-counts-image.png", path_results=config_obj.path_results, config_name=config_obj.name, event_type=config_obj.event_types), 335 | expand("{path_results}/{config_name}/summary/images/{config_name}-counts-spectra.png", path_results=config_obj.path_results, config_name=config_obj.name), -------------------------------------------------------------------------------- /workflow/envs/fermitools.yaml: -------------------------------------------------------------------------------- 1 | # Conda environment for reducing fermi data 2 | name: fermitools-v2.2.0 3 | 4 | channels: 5 | - conda-forge 6 | - fermi 7 | 8 | variables: 9 | PYTHONNOUSERSITE: "1" 10 | 11 | dependencies: 12 | - python=3.9 13 | - fermitools>=2.2.0 -------------------------------------------------------------------------------- /workflow/envs/gammapy.yaml: -------------------------------------------------------------------------------- 1 | # Conda environment for reducing fermi data 2 | name: gammapy-v1.0 3 | 4 | channels: 5 | - conda-forge 6 | 7 | variables: 8 | PYTHONNOUSERSITE: "1" 9 | 10 | dependencies: 11 | - python=3.9 12 | - gammapy-v1.0 -------------------------------------------------------------------------------- /workflow/report/caption-counts.rst: -------------------------------------------------------------------------------- 1 | This a counts image. -------------------------------------------------------------------------------- /workflow/report/caption-edisp.rst: -------------------------------------------------------------------------------- 1 | This is the energy disperision matrix. -------------------------------------------------------------------------------- /workflow/report/caption-exposure.rst: -------------------------------------------------------------------------------- 1 | This is an exposure image. -------------------------------------------------------------------------------- /workflow/report/caption-psf.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adonath/snakemake-workflow-fermi-lat/8b7c607af1180f9d352b7575bd5eddbfa497f44b/workflow/report/caption-psf.rst -------------------------------------------------------------------------------- /workflow/report/workflow.rst: -------------------------------------------------------------------------------- 1 | This is the workflow description. -------------------------------------------------------------------------------- /workflow/rules/gather.smk: -------------------------------------------------------------------------------- 1 | rule gather_event_files: 2 | output: 3 | "{path_results}/events.txt" 4 | shell: 5 | "ls {config[path_data]}/*_PH* > {output}" -------------------------------------------------------------------------------- /workflow/rules/gtbin.smk: -------------------------------------------------------------------------------- 1 | rule gtbin: 2 | input: 3 | "{path_results}/{config_name}/events/filtered/{config_name}-{event_type}-events-selected-filtered.fits" 4 | output: 5 | "{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-counts.fits" 6 | log: "{path_results}/{config_name}/logs/{event_type}/gtbin.log" 7 | run: 8 | args = gtbin.to_cmd_args() 9 | shell("gtbin evfile={input} outfile={output} scfile={config[scfile]} " + args) 10 | -------------------------------------------------------------------------------- /workflow/rules/gtdrm.smk: -------------------------------------------------------------------------------- 1 | rule gtdrm: 2 | input: 3 | cmap="{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-counts.fits", 4 | expcube="{path_results}/{config_name}/{config_name}-ltcube.fits", 5 | bexpmap="{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-exposure.fits", 6 | output: 7 | "{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-edisp.fits" 8 | log: 9 | "{path_results}/{config_name}/logs/{event_type}/gtdrm.log" 10 | run: 11 | args = gtdrm.to_cmd_args() 12 | evtype = EVENT_TYPE_TO_INT[wildcards.event_type] 13 | shell("gtdrm cmap={input.cmap} outfile={output} expcube={input.expcube} bexpmap={input.bexpmap} " + args) -------------------------------------------------------------------------------- /workflow/rules/gtexpcube2.smk: -------------------------------------------------------------------------------- 1 | rule gtexpcube2: 2 | input: 3 | "{path_results}/{config_name}/{config_name}-ltcube.fits" 4 | output: 5 | "{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-exposure.fits" 6 | log: 7 | "{path_results}/{config_name}/logs/{event_type}/gtexpcube2.log" 8 | run: 9 | args = gtexpcube2.to_cmd_args() 10 | shell("gtexpcube2 infile={input} outfile={output} " + args) 11 | -------------------------------------------------------------------------------- /workflow/rules/gtltcube.smk: -------------------------------------------------------------------------------- 1 | rule gtltcube: 2 | input: 3 | expand("{path_results}/{config_name}/events/filtered/{config_name}-{event_type}-events-selected-filtered.fits", path_results=config["path_results"], config_name=config["name"], event_type=config_obj.event_types) 4 | output: 5 | "{path_results}/{config_name}/{config_name}-ltcube.fits" 6 | log: 7 | "{path_results}/{config_name}/logs/gtltcube.log" 8 | run: 9 | if config_obj.ltcube is not None: 10 | shell("cp {config[ltcube]} {input}") 11 | else: 12 | args = gtltcube.to_cmd_args() 13 | shell("gtltcube evfile={input[0]} outfile={output} scfile={config[scfile]} " + args) -------------------------------------------------------------------------------- /workflow/rules/gtmktime.smk: -------------------------------------------------------------------------------- 1 | rule gtmktime: 2 | input: 3 | "{path_results}/{config_name}/events/{config_name}-{event_type}-events-selected.fits" 4 | output: 5 | "{path_results}/{config_name}/events/filtered/{config_name}-{event_type}-events-selected-filtered.fits" 6 | log: 7 | "{path_results}/{config_name}/logs/{event_type}/gtmktime.log" 8 | run: 9 | args = gtmktime.to_cmd_args() 10 | shell("gtmktime scfile={config[scfile]} evfile={input} outfile={output} " + args) 11 | # TODO: for testing just copy... 12 | #shell("cp {input} {output}") -------------------------------------------------------------------------------- /workflow/rules/gtpsf.smk: -------------------------------------------------------------------------------- 1 | rule gtpsf: 2 | input: 3 | "{path_results}/{config_name}/{config_name}-ltcube.fits" 4 | output: 5 | "{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-psf.fits" 6 | log: 7 | "{path_results}/{config_name}/logs/{event_type}/gtpsf.log" 8 | run: 9 | args = gtpsf.to_cmd_args() 10 | evtype = EVENT_TYPE_TO_INT[wildcards.event_type] 11 | shell("gtpsf expcube={input} outfile={output} evtype={evtype} " + args) 12 | -------------------------------------------------------------------------------- /workflow/rules/gtselect.smk: -------------------------------------------------------------------------------- 1 | rule gtselect: 2 | input: 3 | "{path_results}/events.txt" 4 | output: 5 | "{path_results}/{config_name}/events/{config_name}-{event_type}-events-selected.fits" 6 | log: 7 | "{path_results}/{config_name}/logs/{event_type}/gtselect.log" 8 | run: 9 | args = gtselect.to_cmd_args() 10 | evtype = EVENT_TYPE_TO_INT[wildcards.event_type] 11 | shell("gtselect infile={input} outfile={output} evtype={evtype} " + args) -------------------------------------------------------------------------------- /workflow/rules/prepare-gp-dataset.smk: -------------------------------------------------------------------------------- 1 | rule prepare_gp_dataset: 2 | input: 3 | "{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-counts.fits", 4 | "{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-exposure.fits", 5 | "{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-psf.fits", 6 | "{path_results}/{config_name}/maps/{event_type}/{config_name}-{event_type}-edisp.fits", 7 | output: 8 | "{path_results}/{config_name}/datasets/{config_name}-{event_type}-dataset.fits", 9 | log: 10 | "{path_results}/{config_name}/logs/{event_type}/prepare-gp-dataset.log" 11 | run: 12 | from gammapy.maps import Map, RegionGeom, MapAxis 13 | from gammapy.datasets import MapDataset 14 | from gammapy.irf import PSFMap, EDispKernelMap, EDispKernel 15 | from gammapy.irf.edisp.map import get_overlap_fraction 16 | import numpy as np 17 | 18 | from astropy.table import Table 19 | from regions import PointSkyRegion 20 | 21 | EDispKernel.default_unit = u.Unit("") 22 | 23 | SLICE_LOOKUP = { 24 | 0 : slice(None), 25 | 2 : slice(1, -1), 26 | } 27 | 28 | def read_drm_table(filename): 29 | table_drm = Table.read(filename, hdu="DRM") 30 | table_drm["ENERG_LO"].unit = "MeV" 31 | table_drm["ENERG_HI"].unit = "MeV" 32 | return table_drm 33 | 34 | 35 | def create_edisp_kernel(table_drm, energy_true_exposure): 36 | energy_true_drm = MapAxis.from_table(table_drm, format="ogip-arf") 37 | 38 | redistribute = get_overlap_fraction(energy_true_drm, energy_true_exposure) 39 | matrix_drm = np.stack(table_drm["MATRIX"].data) 40 | 41 | diff = energy_true_drm.nbin - table_drm.meta["DETCHANS"] 42 | energy_axis = energy_true_drm.slice(SLICE_LOOKUP[diff]).copy(name="energy") 43 | return EDispKernel( 44 | axes=[energy_true_exposure, energy_axis], 45 | data=redistribute @ matrix_drm, 46 | ) 47 | 48 | 49 | 50 | counts = Map.read(input[0]) 51 | counts.data = counts.data.astype("float32") 52 | 53 | # for some reason the WCS definitions are not aligned... 54 | exposure = Map.read(input[1]) 55 | exposure.geom._wcs = counts.geom.wcs 56 | 57 | psf = PSFMap.read(input[2], format="gtpsf") 58 | 59 | # Add missing PSF meta data, see https://github.com/fermi-lat/Likelihood/issues/117 60 | center = SkyCoord(config_obj.fermitools.gtpsf.ra, config_obj.fermitools.gtpsf.dec, unit="deg") 61 | point_region = PointSkyRegion(center) 62 | geom = RegionGeom.from_regions(point_region) 63 | 64 | geom_psf = geom.to_cube(psf.psf_map.geom.axes) 65 | 66 | psf.psf_map._geom = geom_psf 67 | psf.exposure_map._geom = geom_psf.squash("rad") 68 | psf.exposure_map = psf.exposure_map.to_unit("m2 s") 69 | 70 | energy_axis_true = exposure.geom.axes["energy_true"] 71 | energy_axis = counts.geom.axes["energy"] 72 | 73 | table_drm = read_drm_table(input[3]) 74 | edisp_kernel = create_edisp_kernel(table_drm, energy_axis_true) 75 | 76 | edisp = EDispKernelMap.from_edisp_kernel(edisp_kernel, geom=geom) 77 | edisp.exposure_map = psf.exposure_map.rename_axes(["rad"], ["energy"]) 78 | 79 | mask_safe = counts.geom.boundary_mask(width="0.2 deg") 80 | 81 | row = {"TELESCOP": "Fermi-LAT"} 82 | meta_table = Table([row]) 83 | 84 | dataset = MapDataset( 85 | name=f"{wildcards.config_name}-{wildcards.event_type}", 86 | counts=counts, 87 | exposure=exposure, 88 | psf=psf, 89 | edisp=edisp, 90 | mask_safe=mask_safe, 91 | meta_table=meta_table, 92 | ) 93 | 94 | dataset.write(output[0]) 95 | -------------------------------------------------------------------------------- /workflow/rules/prepare-gp-datasets-yaml.smk: -------------------------------------------------------------------------------- 1 | rule prepare_gp_datasets_yaml: 2 | input: 3 | expand("{path_results}/{config_name}/datasets/{config_name}-{event_type}-dataset.fits", path_results=config["path_results"], config_name=config["name"], event_type=config_obj.event_types) 4 | output: 5 | "{path_results}/{config_name}/datasets/{config_name}-datasets-all.yaml" 6 | log: 7 | "{path_results}/{config_name}/logs/prepare-gp-datasets-yaml.log" 8 | run: 9 | 10 | from gammapy.utils.scripts import write_yaml 11 | from pathlib import Path 12 | from astropy.io import fits 13 | 14 | data = {} 15 | data["datasets"] = [] 16 | 17 | for filename in input: 18 | filename = Path(filename) 19 | header = fits.getheader(filename) 20 | entry = { 21 | "type": "MapDataset", 22 | "name": header["NAME"], 23 | "filename": filename.name, 24 | } 25 | data["datasets"].append(entry) 26 | 27 | write_yaml(data, filename=output[0]) -------------------------------------------------------------------------------- /workflow/rules/prepare-gp-model.smk: -------------------------------------------------------------------------------- 1 | rule prepare_gp_model: 2 | output: 3 | "{path_results}/{config_name}/model/{config_name}-model.yaml", 4 | "{path_results}/{config_name}/model/{config_name}-galactic-diffuse-model.fits", 5 | log: 6 | "{path_results}/{config_name}/logs/prepare-gp-model.log" 7 | run: 8 | 9 | from gammapy.maps import Map 10 | from gammapy.modeling.models import ( 11 | PowerLawNormSpectralModel, 12 | SkyModel, 13 | TemplateSpatialModel, 14 | create_fermi_isotropic_diffuse_model, 15 | Models, 16 | ) 17 | from gammapy.utils.scripts import make_path 18 | from gammapy.catalog import CATALOG_REGISTRY 19 | from astropy import units as u 20 | 21 | cutout_margin = 2 * u.deg 22 | 23 | path = make_path("$FERMI_DIR/refdata/fermi/galdiffuse") 24 | model = Map.read(path / config["gal_diffuse"]) 25 | 26 | cutout = model.cutout(gtselect.center_skydir, width=gtselect.width + cutout_margin) 27 | 28 | axis = cutout.geom.axes["energy_true"] 29 | idx_min, idx_max = axis.coord_to_idx([gtexpcube2.emin, gtexpcube2.emax] * u.MeV) 30 | cuotut = cutout.slice_by_idx({"energy": slice(idx_min, idx_max)}) 31 | cutout.write(output[1]) 32 | 33 | models = Models() 34 | 35 | for idx, event_type in enumerate(config_obj.event_types): 36 | filename = path / f"iso_{gtexpcube2.irfs}_{event_type.upper()}_v1.txt" 37 | 38 | diffuse_iso = create_fermi_isotropic_diffuse_model( 39 | filename=filename, interp_kwargs={"fill_value": None}, 40 | ) 41 | diffuse_iso._name = f"diffuse-iso-{event_type}" 42 | diffuse_iso.datasets_names = [f"{config_obj.name}-{event_type}"] 43 | models.append(diffuse_iso) 44 | 45 | 46 | template_diffuse = TemplateSpatialModel.read( 47 | filename=output[1], normalize=False 48 | ) 49 | 50 | diffuse_iem = SkyModel( 51 | spectral_model=PowerLawNormSpectralModel(), 52 | spatial_model=template_diffuse, 53 | name="diffuse-iem", 54 | ) 55 | models.append(diffuse_iem) 56 | 57 | catalog = CATALOG_REGISTRY.get_cls(config["catalog"])() 58 | 59 | geom_image = cutout.geom.to_image() 60 | width = (cutout_margin / geom_image.pixel_scales).to_value("") 61 | geom_image_pad = geom_image.pad(pad_width=width, axis_name=None) 62 | selection = geom_image.contains(catalog.positions) 63 | 64 | for source in catalog[selection]: 65 | models.append(source.sky_model()) 66 | 67 | models.write(output[0], write_covariance=False) 68 | 69 | -------------------------------------------------------------------------------- /workflow/rules/summarize-gp-datasets.smk: -------------------------------------------------------------------------------- 1 | rule summarize_gp_dataset: 2 | input: 3 | "{path_results}/{config_name}/datasets/{config_name}-{event_type}-dataset.fits" 4 | output: 5 | report("{path_results}/{config_name}/summary/images/{event_type}/{config_name}-{event_type}-counts-image.png", caption="../report/caption-counts.rst", category="Counts", subcategory="{event_type}"), 6 | report("{path_results}/{config_name}/summary/images/{event_type}/{config_name}-{event_type}-counts-grid.png", caption="../report/caption-counts.rst", category="Counts", subcategory="{event_type}"), 7 | 8 | report("{path_results}/{config_name}/summary/images/{event_type}/{config_name}-{event_type}-exposure-image.png", caption="../report/caption-exposure.rst", category="Exposure", subcategory="{event_type}"), 9 | report("{path_results}/{config_name}/summary/images/{event_type}/{config_name}-{event_type}-exposure-grid.png", caption="../report/caption-exposure.rst", category="Exposure", subcategory="{event_type}"), 10 | 11 | report("{path_results}/{config_name}/summary/images/{event_type}/{config_name}-{event_type}-edisp-matrix.png", caption="../report/caption-edisp.rst", category="EDisp", subcategory="{event_type}"), 12 | log: 13 | "{path_results}/{config_name}/logs/{event_type}/prepare-gp-dataset.log" 14 | run: 15 | 16 | import matplotlib 17 | matplotlib.use('agg') 18 | 19 | import matplotlib.pyplot as plt 20 | 21 | from gammapy.datasets import MapDataset 22 | import numpy as np 23 | 24 | dpi = 150 25 | 26 | dataset = MapDataset.read(input[0]) 27 | 28 | fig = plt.figure() 29 | dataset.counts.sum_over_axes().plot() 30 | plt.savefig(output[0], dpi=dpi) 31 | 32 | dataset.counts.plot_grid() 33 | plt.savefig(output[1], dpi=dpi) 34 | 35 | fig = plt.figure(figsize=(8, 5)) 36 | dataset.exposure.sum_over_axes().plot() 37 | plt.savefig(output[2], dpi=dpi) 38 | 39 | dataset.exposure.plot_grid() 40 | plt.savefig(output[3], dpi=dpi) 41 | 42 | fig = plt.figure(figsize=(8, 5)) 43 | kernel = dataset.edisp.get_edisp_kernel() 44 | kernel.plot_matrix() 45 | plt.savefig(output[4], dpi=dpi) 46 | 47 | -------------------------------------------------------------------------------- /workflow/rules/summarize-gp-spectra.smk: -------------------------------------------------------------------------------- 1 | rule summarize_gp_spectra: 2 | input: 3 | "{path_results}/{config_name}/datasets/{config_name}-datasets-all.yaml" 4 | output: 5 | report("{path_results}/{config_name}/summary/images/{config_name}-counts-spectra.png", caption="../report/caption-counts.rst", category="Counts"), 6 | report("{path_results}/{config_name}/summary/images/{config_name}-exposure-spectra.png", caption="../report/caption-counts.rst", category="Exposure"), 7 | report("{path_results}/{config_name}/summary/images/{config_name}-psf-spectra.png", caption="../report/caption-counts.rst", category="PSF"), 8 | log: 9 | "{path_results}/{config_name}/logs/summarize-gp-spectra.log" 10 | run: 11 | import matplotlib 12 | matplotlib.use('agg') 13 | 14 | import matplotlib.pyplot as plt 15 | from gammapy.datasets import Datasets 16 | import numpy as np 17 | 18 | dpi = 150 19 | datasets = Datasets.read(input[0]) 20 | 21 | fig = plt.figure(figsize=(8, 5)) 22 | 23 | for dataset in datasets: 24 | spectrum = dataset.counts.to_region_nd_map(func=np.sum) 25 | spectrum.plot(label=dataset.name) 26 | 27 | plt.legend() 28 | plt.savefig(output[0], dpi=dpi) 29 | 30 | fig = plt.figure(figsize=(8, 5)) 31 | 32 | for dataset in datasets: 33 | spectrum = dataset.exposure.to_region_nd_map(func=np.mean) 34 | spectrum.plot(label=dataset.name) 35 | 36 | plt.legend() 37 | plt.savefig(output[1], dpi=dpi) 38 | 39 | fig = plt.figure(figsize=(8, 5)) 40 | 41 | for dataset in datasets: 42 | spectrum = dataset.psf.plot_containment_radius_vs_energy() 43 | 44 | plt.legend() 45 | plt.savefig(output[2], dpi=dpi) 46 | --------------------------------------------------------------------------------