├── R
├── imports.R
├── data.R
├── reexports.R
├── model-evaluation.R
└── plotting.R
├── data
├── dt_group_codes.rda
└── stochastic_model_results.rda
├── manuscript
├── logo-ccby.pdf
├── logo-mdpi.pdf
├── figures
│ ├── gru.png
│ ├── nn2.png
│ ├── nn3.png
│ └── feedforward.png
├── logo-orcid.pdf
├── logo-updates.pdf
├── mybibfile.bib
├── mdpi.bst
├── manuscript.Rmd
├── journalnames.tex
└── mdpi.cls
├── NEWS.md
├── man
├── figures
│ ├── README-unnamed-chunk-2-1.png
│ ├── README-unnamed-chunk-3-1.png
│ ├── README-unnamed-chunk-5-1.png
│ └── README-unnamed-chunk-6-1.png
├── pipe.Rd
├── multi-assign.Rd
├── dt_compute_metrics.Rd
├── dt_group_codes.Rd
├── dt_tabulate_metrics.Rd
├── stochastic_model_results.Rd
└── dt_plot_predictions.Rd
├── .Rbuildignore
├── .gitignore
├── NAMESPACE
├── analysis
├── automl_results.csv
├── automl.R
├── model.R
├── main.R
├── data-prep.R
├── analysis-utils.R
└── automl-utils.R
├── deeptriangle.Rproj
├── data-raw
├── group-codes.R
└── stochastic-model-results.R
├── DESCRIPTION
├── .travis.yml
├── README.md
├── README.Rmd
└── LICENSE.md
/R/imports.R:
--------------------------------------------------------------------------------
1 | #' @import ggplot2 dplyr
2 | #' @importFrom rlang .data
3 | NULL
4 |
--------------------------------------------------------------------------------
/data/dt_group_codes.rda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/data/dt_group_codes.rda
--------------------------------------------------------------------------------
/manuscript/logo-ccby.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/manuscript/logo-ccby.pdf
--------------------------------------------------------------------------------
/manuscript/logo-mdpi.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/manuscript/logo-mdpi.pdf
--------------------------------------------------------------------------------
/manuscript/figures/gru.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/manuscript/figures/gru.png
--------------------------------------------------------------------------------
/manuscript/figures/nn2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/manuscript/figures/nn2.png
--------------------------------------------------------------------------------
/manuscript/figures/nn3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/manuscript/figures/nn3.png
--------------------------------------------------------------------------------
/manuscript/logo-orcid.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/manuscript/logo-orcid.pdf
--------------------------------------------------------------------------------
/manuscript/logo-updates.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/manuscript/logo-updates.pdf
--------------------------------------------------------------------------------
/NEWS.md:
--------------------------------------------------------------------------------
1 | # deeptriangle 0.2.0
2 |
3 | * Use a sequence-to-sequence architecture and refactored helper functions.
4 |
--------------------------------------------------------------------------------
/data/stochastic_model_results.rda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/data/stochastic_model_results.rda
--------------------------------------------------------------------------------
/manuscript/figures/feedforward.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/manuscript/figures/feedforward.png
--------------------------------------------------------------------------------
/man/figures/README-unnamed-chunk-2-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/man/figures/README-unnamed-chunk-2-1.png
--------------------------------------------------------------------------------
/man/figures/README-unnamed-chunk-3-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/man/figures/README-unnamed-chunk-3-1.png
--------------------------------------------------------------------------------
/man/figures/README-unnamed-chunk-5-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/man/figures/README-unnamed-chunk-5-1.png
--------------------------------------------------------------------------------
/man/figures/README-unnamed-chunk-6-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kasaai/deeptriangle/HEAD/man/figures/README-unnamed-chunk-6-1.png
--------------------------------------------------------------------------------
/.Rbuildignore:
--------------------------------------------------------------------------------
1 | ^.*\.Rproj$
2 | ^\.Rproj\.user$
3 | ^data-raw$
4 | ^README\.Rmd$
5 | ^LICENSE\.md$
6 | ^internal$
7 | ^README_cache$
8 | ^\.travis\.yml$
9 | ^analysis$
10 | ^output$
11 | ^manuscript$
12 | ^datasets$
13 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .Rproj.user
2 | .Rhistory
3 | .RData
4 | .Ruserdata
5 | .DS_Store
6 | internal
7 | output
8 | README_cache
9 | manuscript/manuscript_files
10 | manuscript/manuscript.pdf
11 | manuscript/manuscript.tex
12 | manuscript/dist
13 | datasets
14 |
--------------------------------------------------------------------------------
/R/data.R:
--------------------------------------------------------------------------------
1 | #' Group codes
2 | #'
3 | #' Data frame of group codes taken from Meyers (2015).
4 | "dt_group_codes"
5 |
6 | #' Stochastic model results
7 | #'
8 | #' Data frame of model results taken from Meyers (2015)
9 | "stochastic_model_results"
10 |
--------------------------------------------------------------------------------
/NAMESPACE:
--------------------------------------------------------------------------------
1 | # Generated by roxygen2: do not edit by hand
2 |
3 | export("%<-%")
4 | export("%>%")
5 | export(dt_compute_metrics)
6 | export(dt_plot_predictions)
7 | export(dt_tabulate_metrics)
8 | import(dplyr)
9 | import(ggplot2)
10 | import(magrittr)
11 | import(zeallot)
12 | importFrom(rlang,.data)
13 |
--------------------------------------------------------------------------------
/man/pipe.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/reexports.R
3 | \name{\%>\%}
4 | \alias{\%>\%}
5 | \title{Pipe operator}
6 | \usage{
7 | lhs \%>\% rhs
8 | }
9 | \description{
10 | See \code{\link[magrittr]{\%>\%}} for more details.
11 | }
12 | \keyword{internal}
13 |
--------------------------------------------------------------------------------
/analysis/automl_results.csv:
--------------------------------------------------------------------------------
1 | lob,mape,rmspe,model
2 | commercial_auto,0.06834292938525398,0.09552811030426292,AutoML
3 | other_liability,0.14247242107549246,0.1805281728480518,AutoML
4 | private_passenger_auto,0.03617553188315723,0.0590698647334635,AutoML
5 | workers_compensation,0.06695041717464804,0.09889612669667673,AutoML
6 |
--------------------------------------------------------------------------------
/man/multi-assign.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/reexports.R
3 | \name{\%<-\%}
4 | \alias{\%<-\%}
5 | \title{Assign values to names}
6 | \usage{
7 | x \%<-\% value
8 | }
9 | \description{
10 | See \code{\link[zeallot]{\%<-\%}} for more details.
11 | }
12 | \keyword{internal}
13 |
--------------------------------------------------------------------------------
/man/dt_compute_metrics.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model-evaluation.R
3 | \name{dt_compute_metrics}
4 | \alias{dt_compute_metrics}
5 | \title{Compute performance metrics}
6 | \usage{
7 | dt_compute_metrics(predictions)
8 | }
9 | \arguments{
10 | \item{predictions}{Predictions data frame.}
11 | }
12 | \description{
13 | Compute performance metrics
14 | }
15 |
--------------------------------------------------------------------------------
/man/dt_group_codes.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/data.R
3 | \docType{data}
4 | \name{dt_group_codes}
5 | \alias{dt_group_codes}
6 | \title{Group codes}
7 | \format{An object of class \code{tbl_df} (inherits from \code{tbl}, \code{data.frame}) with 200 rows and 2 columns.}
8 | \usage{
9 | dt_group_codes
10 | }
11 | \description{
12 | Data frame of group codes taken from Meyers (2015).
13 | }
14 | \keyword{datasets}
15 |
--------------------------------------------------------------------------------
/deeptriangle.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: Default
4 | SaveWorkspace: Default
5 | AlwaysSaveHistory: Default
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
15 | AutoAppendNewline: Yes
16 | StripTrailingWhitespace: Yes
17 |
18 | BuildType: Package
19 | PackageUseDevtools: Yes
20 | PackageInstallArgs: --no-multiarch --with-keep.source
21 | PackageRoxygenize: rd,collate,namespace
22 |
--------------------------------------------------------------------------------
/R/reexports.R:
--------------------------------------------------------------------------------
1 | #' Pipe operator
2 | #'
3 | #' See \code{\link[magrittr]{\%>\%}} for more details.
4 | #'
5 | #' @name %>%
6 | #' @rdname pipe
7 | #' @keywords internal
8 | #' @export
9 | #' @import magrittr
10 | #' @usage lhs \%>\% rhs
11 | NULL
12 |
13 | #' Assign values to names
14 | #'
15 | #' See \code{\link[zeallot]{\%<-\%}} for more details.
16 | #'
17 | #' @name %<-%
18 | #' @rdname multi-assign
19 | #' @keywords internal
20 | #' @export
21 | #' @import zeallot
22 | #' @usage x \%<-\% value
23 | NULL
24 |
--------------------------------------------------------------------------------
/man/dt_tabulate_metrics.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model-evaluation.R
3 | \name{dt_tabulate_metrics}
4 | \alias{dt_tabulate_metrics}
5 | \title{Tabulate performance metrics for all models}
6 | \usage{
7 | dt_tabulate_metrics(data, metric = c("mape", "rmspe"))
8 | }
9 | \arguments{
10 | \item{data}{Model results in tidy format.}
11 |
12 | \item{metric}{Performance metric.}
13 | }
14 | \description{
15 | Tabulate performance metrics for all models
16 | }
17 |
--------------------------------------------------------------------------------
/man/stochastic_model_results.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/data.R
3 | \docType{data}
4 | \name{stochastic_model_results}
5 | \alias{stochastic_model_results}
6 | \title{Stochastic model results}
7 | \format{An object of class \code{tbl_df} (inherits from \code{tbl}, \code{data.frame}) with 24 rows and 4 columns.}
8 | \usage{
9 | stochastic_model_results
10 | }
11 | \description{
12 | Data frame of model results taken from Meyers (2015)
13 | }
14 | \keyword{datasets}
15 |
--------------------------------------------------------------------------------
/data-raw/group-codes.R:
--------------------------------------------------------------------------------
1 | library(deeptriangle)
2 | temp_file <- tempfile(fileext = ".xlsx")
3 | download.file("http://www.casact.org/pubs/monographs/meyers/Monograph_Tables_and_Scripts.xlsx",
4 | temp_file)
5 | dt_group_codes <- readxl::read_xlsx(temp_file, sheet = "Multi Mack Paid", range = "A5:F205") %>%
6 | dplyr::transmute(lob = dplyr::case_when(
7 | Line == "CA" ~ "commercial_auto",
8 | Line == "PA" ~ "private_passenger_auto",
9 | Line == "WC" ~ "workers_compensation",
10 | Line == "OL" ~ "other_liability"),
11 | group_code = as.character(`Group Code`))
12 | usethis::use_data(dt_group_codes, overwrite = TRUE)
13 |
--------------------------------------------------------------------------------
/man/dt_plot_predictions.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/plotting.R
3 | \name{dt_plot_predictions}
4 | \alias{dt_plot_predictions}
5 | \title{Plot predictions}
6 | \usage{
7 | dt_plot_predictions(predictions, group_code, lob, type = c("paid_loss",
8 | "claims_outstanding"))
9 | }
10 | \arguments{
11 | \item{predictions}{Predictions data frame. Output of \code{dt_compute_predictions()}.}
12 |
13 | \item{group_code}{Company code to plot.}
14 |
15 | \item{lob}{LOB to plot}
16 |
17 | \item{type}{One of \code{"paid_loss"} and \code{"claims_outstanding"}.}
18 | }
19 | \description{
20 | Plot predictions
21 | }
22 |
--------------------------------------------------------------------------------
/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: deeptriangle
2 | Type: Package
3 | Title: DeepTriangle
4 | Version: 0.3.0
5 | Authors@R: person("Kevin", "Kuo", email = "kevin@kasa.ai",
6 | role = c("aut", "cre"), comment = c(ORCID = "0000-0001-7803-7901"))
7 | Description: Source code for the DeepTriangle paper.
8 | License: Apache License (>= 2.0)
9 | Encoding: UTF-8
10 | Depends:
11 | R (>= 3.2)
12 | Imports:
13 | keras,
14 | magrittr,
15 | zeallot,
16 | R6,
17 | dplyr,
18 | tidyr,
19 | rlang,
20 | ggplot2,
21 | purrr,
22 | ggfan
23 | Suggests:
24 | here,
25 | readr,
26 | cowplot,
27 | feather,
28 | piggyback,
29 | rmarkdown,
30 | tinytex,
31 | rticles,
32 | kableExtra,
33 | bookdown
34 | LazyData: true
35 | RoxygenNote: 6.1.1
36 | Remotes:
37 | ropensci/piggyback
38 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | # R for travis: see documentation at https://docs.travis-ci.com/user/languages/r
2 |
3 | language: R
4 | sudo: false
5 |
6 | cache:
7 | packages: yes
8 | directories:
9 | - $HOME/bin
10 | - $HOME/.TinyTeX
11 | - $TRAVIS_BUILD_DIR/datasets
12 |
13 | script:
14 | - R CMD build .
15 | - R CMD INSTALL .
16 | - Rscript -e 'if (tinytex::tinytex_root() == "") tinytex::install_tinytex()'
17 | - Rscript -e 'piggyback::pb_download(file = c("datasets/automl_results.csv", "datasets/predictions.tar.gz"))'
18 | - Rscript -e 'if (!file.exists("datasets/predictions.feather")) untar("datasets/predictions.tar.gz", exdir = "datasets/")'
19 | - Rscript -e 'dir.create("manuscript/dist")'
20 | - Rscript -e 'rmarkdown::render("manuscript/manuscript.Rmd", output_dir = "manuscript/dist/")'
21 |
22 | deploy:
23 | provider: pages
24 | skip_cleanup: true
25 | github_token: $GITHUB_TOKEN
26 | on:
27 | branch: master
28 | local_dir: manuscript/dist
29 |
--------------------------------------------------------------------------------
/analysis/automl.R:
--------------------------------------------------------------------------------
1 | library(deeptriangle)
2 | library(tidyverse)
3 | library(h2o)
4 | source("analysis/automl-utils.R")
5 |
6 | automl_data <- automl_data_prep(insurance::schedule_p, dt_group_codes)
7 |
8 | h2o.init()
9 |
10 | automl_results <- map_df(automl_data, function(d) {
11 | df_with_lags <- d %>%
12 | group_by(group_code, accident_year) %>%
13 | arrange(group_code, accident_year, development_lag) %>%
14 | mutate(!!!lags(incremental_paid, 9))
15 |
16 | c(automl_training, automl_validation, automl_full_training) %<-%
17 | automl_train_validation_split(df_with_lags)
18 |
19 | automl_full_training_h2o <- as.h2o(automl_full_training)
20 | response <- "incremental_paid"
21 | predictors <- c("group_code", paste0("lag_incremental_paid_0", 1:9))
22 | automl_model <- h2o.automl(x = predictors, y = response,
23 | training_frame = automl_full_training_h2o,
24 | max_runtime_secs = 5*60, seed = 2018)
25 |
26 | predictions_df <- automl_compute_predictions(automl_model, d)
27 | automl_compute_metrics(predictions_df)
28 | }) %>%
29 | bind_rows()
30 | write_csv(automl_results, "analysis/automl_results.csv")
31 | h2o.shutdown(FALSE)
32 |
--------------------------------------------------------------------------------
/data-raw/stochastic-model-results.R:
--------------------------------------------------------------------------------
1 | library(tidyverse)
2 | library(readxl)
3 |
4 | temp_file <- tempfile(fileext = ".xlsx")
5 | download.file("http://www.casact.org/pubs/monographs/meyers/Monograph_Tables_and_Scripts.xlsx",
6 | temp_file)
7 |
8 | compute_metrics <- function(x) {
9 | estimate_column <- colnames(x) %>%
10 | grep("Estimate$", ., value = TRUE)
11 | pred <- estimate_column %>%
12 | rlang::sym()
13 | m <- estimate_column %>%
14 | gsub(" Estimate", "", .)
15 | actual <- "Outcome" %>%
16 | rlang::sym()
17 | x %>%
18 | mutate(pct_error = (!!pred - !!actual) / !!actual) %>%
19 | group_by(Line) %>%
20 | summarize(mape = mean(abs(pct_error)),
21 | rmspe = sqrt(mean(pct_error ^ 2))
22 | ) %>%
23 | mutate(model = m)
24 | }
25 |
26 | stochastic_models <- c("Multi ODP Paid", "Multi Mack Paid", "Multi CCL Paid",
27 | "Multi CIT Paid", "Multi LIT Paid", "Multi CSR Paid")
28 | stochastic_model_results <- stochastic_models %>%
29 | map(~ read_xlsx(temp_file, sheet = .x, range = "A5:F205") %>%
30 | compute_metrics) %>%
31 | bind_rows() %>%
32 | mutate(lob = case_when(
33 | Line == "CA" ~ "commercial_auto",
34 | Line == "PA" ~ "private_passenger_auto",
35 | Line == "WC" ~ "workers_compensation",
36 | Line == "OL" ~ "other_liability")) %>%
37 | select(-Line)
38 |
39 | usethis::use_data(stochastic_model_results)
40 |
--------------------------------------------------------------------------------
/analysis/model.R:
--------------------------------------------------------------------------------
1 | dt_model <- function() {
2 | ay_seq_input <- layer_input(shape = list(9, 2), name = "ay_seq_input")
3 | company_code_input <- layer_input(shape = 1, name = "company_input")
4 | company_code_embedding <- company_code_input %>%
5 | layer_embedding(200, 49, name = "company_code_embedding") %>%
6 | layer_flatten()%>%
7 | layer_repeat_vector(9)
8 |
9 | encoded <- ay_seq_input %>%
10 | layer_masking(-99) %>%
11 | layer_gru(units = 128, dropout = 0.2, recurrent_dropout = 0.2)
12 |
13 | decoded <- encoded %>%
14 | layer_repeat_vector(9) %>%
15 | layer_gru(128, return_sequences = TRUE, dropout = 0.2, recurrent_dropout = 0.2) %>%
16 | layer_lambda(f = function(x) layer_concatenate(list(x, company_code_embedding)))
17 | #layer_lambda(f = function(x) layer_concatenate(list(x, k_repeat(company_code_embedding, 9))))
18 |
19 | case_reserves_output <- decoded %>%
20 | time_distributed(layer_dense(units = 64, activation = "relu")) %>%
21 | time_distributed(layer_dropout(rate = 0.2)) %>%
22 | time_distributed(layer_dense(units = 1, activation = "relu"), name = "case_reserves_output")
23 |
24 | paid_output <- decoded %>%
25 | time_distributed(layer_dense(units = 64, activation = "relu")) %>%
26 | time_distributed(layer_dropout(rate = 0.2)) %>%
27 | time_distributed(layer_dense(units = 1, activation = "relu"), name = "paid_output")
28 |
29 | model <- keras_model(
30 | inputs = c(ay_seq_input, company_code_input),
31 | outputs = c(paid_output, case_reserves_output)
32 | )
33 |
34 | model
35 | }
36 |
--------------------------------------------------------------------------------
/R/model-evaluation.R:
--------------------------------------------------------------------------------
1 | #' Compute performance metrics
2 | #'
3 | #' @param predictions Predictions data frame.
4 | #' @export
5 | dt_compute_metrics <- function(predictions) {
6 | predictions %>%
7 | dplyr::filter(
8 | .data$development_lag == 10,
9 | .data$type %in% c("cumulative_paid_loss", "predicted_cumulative_loss")
10 | ) %>%
11 | dplyr::group_by(.data$lob, .data$group_code, .data$type, .data$run_id) %>%
12 | dplyr::summarize(ultimate = sum(.data$value)) %>%
13 | dplyr::group_by(.data$lob, .data$group_code, .data$type) %>%
14 | dplyr::summarize(ultimate = mean(.data$ultimate)) %>%
15 | tidyr::spread(.data$type, .data$ultimate) %>%
16 | dplyr::mutate(
17 | pct_error = (.data$predicted_cumulative_loss - .data$cumulative_paid_loss) /
18 | .data$cumulative_paid_loss) %>%
19 | dplyr::ungroup() %>%
20 | dplyr::group_by(.data$lob) %>%
21 | dplyr::summarize(
22 | mape = mean(abs(.data$pct_error)),
23 | rmspe = sqrt(mean(.data$pct_error ^ 2))
24 | ) %>%
25 | dplyr::mutate(model = "DeepTriangle")
26 | }
27 |
28 | #' Tabulate performance metrics for all models
29 | #'
30 | #' @param data Model results in tidy format.
31 | #' @param metric Performance metric.
32 | #' @export
33 | dt_tabulate_metrics <- function(data, metric = c("mape", "rmspe")) {
34 | metric <- rlang::arg_match(metric)
35 | data %>%
36 | dplyr::filter(metric == !!metric) %>%
37 | dplyr::select(-.data$metric) %>%
38 | tidyr::spread(.data$model,.data$ value) %>%
39 | dplyr::select(
40 | .data$lob, .data$Mack, .data$ODP,
41 | .data$CIT, .data$LIT, .data$AutoML, .data$DeepTriangle
42 | )
43 | }
44 |
--------------------------------------------------------------------------------
/analysis/main.R:
--------------------------------------------------------------------------------
1 | library(recipes)
2 | library(insurance)
3 | library(tidyverse)
4 | library(deeptriangle)
5 | library(keras)
6 | library(tensorflow)
7 |
8 | source("analysis/analysis-utils.R")
9 | source("analysis/model.R")
10 | source("analysis/data-prep.R")
11 |
12 | lobs <- c("commercial_auto","other_liability", "private_passenger_auto", "workers_compensation")
13 |
14 | results <- map_df(lobs, function(lob) {
15 | data_lob <- data_keras %>%
16 | filter(lob == !!lob)
17 | full_training_data_keras <- data_lob %>%
18 | filter(data_type == "full_training_data") %>%
19 | pull(keras_data) %>%
20 | flatten()
21 | validation_data_keras <- data_lob %>%
22 | filter(data_type == "validation_data") %>%
23 | pull(keras_data) %>%
24 | flatten()
25 | test_data <- data_lob %>%
26 | filter(data_type == "test_data")
27 |
28 | map_df(1:100, function(run_id) {
29 | if (run_id %% 10 == 1) k_clear_session()
30 | cat(sprintf("Training LOB %s run %s: ", lob, run_id))
31 | start_time <- Sys.time()
32 | model <- dt_model()
33 | model %>%
34 | compile(
35 | optimizer = optimizer_adam(lr = 0.0005, amsgrad = TRUE),
36 | loss = list(masked_mse(-99), masked_mse(-99)),
37 | loss_weights = c(0.5, 0.5)
38 | )
39 |
40 | cb <- callback_early_stopping(min_delta = 0.001, patience = 200, mode = "min", restore_best_weights = TRUE)
41 |
42 | history <- model %>%
43 | fit(
44 | x = full_training_data_keras$x,
45 | y = full_training_data_keras$y,
46 | validation_data = unname(validation_data_keras),
47 | batch_size = 2250,
48 | epochs = 1000,
49 | callbacks = list(cb),
50 | verbose = 0
51 | )
52 |
53 | training_time <- as.integer(Sys.time() - start_time)
54 |
55 | cat(sprintf("training for %d epochs took %d seconds at %s", cb$stopped_epoch, training_time, Sys.time()), "\n")
56 |
57 | predictions_table <- compute_predictions(model, test_data, lob) %>%
58 | mutate(lob = !!lob, run_id = !!run_id)
59 |
60 | tibble(
61 | lob = lob,
62 | run_id = run_id,
63 | training_time = training_time,
64 | predictions_table = list(predictions_table),
65 | trained_epochs = cb$stopped_epoch
66 | )
67 | })
68 | })
69 |
70 | predictions_table <- results %>%
71 | pull(predictions_table) %>%
72 | bind_rows()
73 |
--------------------------------------------------------------------------------
/R/plotting.R:
--------------------------------------------------------------------------------
1 | utils::globalVariables(c("type_actual", "type_prediction", "..Interval.."))
2 |
3 | #' Plot predictions
4 | #'
5 | #' @param predictions Predictions data frame. Output of \code{dt_compute_predictions()}.
6 | #' @param group_code Company code to plot.
7 | #' @param lob LOB to plot
8 | #' @param type One of \code{"paid_loss"} and \code{"claims_outstanding"}.
9 | #' @export
10 | dt_plot_predictions <- function(
11 | predictions, group_code, lob, type = c("paid_loss", "claims_outstanding")) {
12 | type <- rlang::arg_match(type)
13 | y_lab <- if (identical(type, "paid_loss")) "Loss Ratio" else "Claims Outstanding"
14 |
15 | c(type_actual, type_prediction) %<-% (
16 | switch(type,
17 | paid_loss = c("cumulative_paid_loss", "predicted_cumulative_loss"),
18 | claims_outstanding = c("case_reserves_actual", "predicted_os")
19 | ))
20 | predictions <- predictions %>%
21 | dplyr::mutate(value = .data$value / .data$earned_premium_net)
22 |
23 | predictions %>%
24 | filter(
25 | .data$group_code == !!group_code,
26 | .data$type == type_prediction,
27 | .data$lob == !!lob
28 | ) %>%
29 | mutate(run_id = as.character(.data$run_id)) %>%
30 | ggplot(aes_(x = ~development_lag, y = ~value)) +
31 | ggfan::geom_interval(
32 | aes(linetype = ..Interval..),
33 | intervals = c(0.9)) +
34 | stat_summary(fun.y = "mean", geom="line", alpha = 0.5, aes(linetype = "mean")) +
35 | scale_linetype_manual(
36 | "Predicted",
37 | values = c("mean" = "solid", "0.9" = "dashed"),
38 | label = c("95% interval", "mean")
39 | ) +
40 | facet_wrap(~accident_year, nrow = 2) +
41 | guides(
42 | shape = guide_legend(title = "Actual", order = 1)
43 | ) +
44 | geom_point(
45 | mapping = ggplot2::aes_(x = ~development_lag, y = ~value, shape = ~obs_type),
46 | data = predictions %>%
47 | filter(
48 | .data$group_code == !!group_code,
49 | .data$type == type_actual,
50 | .data$lob == !!lob
51 | ) %>%
52 | filter(.data$run_id == 1),
53 | inherit.aes = FALSE
54 | ) +
55 | scale_shape_manual(values = c(1, 19)) +
56 | scale_x_continuous(breaks = c(2, 4, 6, 8, 10)) +
57 | scale_y_continuous(breaks = seq(0, 2, by = 0.2)) +
58 | ylab(y_lab) +
59 | labs(x = "Development Lag") +
60 | theme_light()
61 | }
62 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | [](https://travis-ci.org/kasaai/deeptriangle)
6 |
7 | # DeepTriangle
8 |
9 | This is the companion repository to the paper [*DeepTriangle: A Deep
10 | Learning Approach to Loss
11 | Reserving*](https://www.mdpi.com/2227-9091/7/3/97).
12 |
13 | ## Experiments
14 |
15 | To get started, either clone the repo and build the R package, or
16 | install with
17 |
18 | ``` r
19 | devtools::install_github("kasaai/deeptriangle")
20 | ```
21 |
22 | You will also need the [insurance](https://github.com/kasaai/insurance)
23 | package, which can be installed with
24 |
25 | ``` r
26 | devtools::install_github("kasaai/insurance")
27 | ```
28 |
29 | The experiments can be found in `analysis/main.R`. It is recommended
30 | that you use a GPU since many instances of the models are fit.
31 |
32 | For convenience, we provide a `predictions.feather` file in the release.
33 |
34 | ``` r
35 | predictions <- feather::read_feather("datasets/predictions.feather")
36 |
37 | model_results <- dt_compute_metrics(predictions) %>%
38 | bind_rows(stochastic_model_results) %>%
39 | bind_rows(read_csv("datasets/automl_results.csv")) %>%
40 | gather(metric, value, mape, rmspe)
41 |
42 | dt_tabulate_metrics(model_results, metric = "mape") %>%
43 | knitr::kable(booktabs = "T", digits = 3)
44 | ```
45 |
46 | | lob | Mack | ODP | CIT | LIT | AutoML | DeepTriangle |
47 | | :----------------------- | ----: | ----: | ----: | ----: | -----: | -----------: |
48 | | commercial\_auto | 0.060 | 0.217 | 0.052 | 0.052 | 0.068 | 0.043 |
49 | | other\_liability | 0.134 | 0.223 | 0.165 | 0.152 | 0.142 | 0.109 |
50 | | private\_passenger\_auto | 0.038 | 0.039 | 0.038 | 0.040 | 0.036 | 0.025 |
51 | | workers\_compensation | 0.053 | 0.105 | 0.054 | 0.054 | 0.067 | 0.046 |
52 |
53 | To create actual vs. predicted plots, use the `dt_plot_predictions()`
54 | function. Here are successful and unsuccessful examples of the model’s
55 | forecasting attempts.
56 |
57 | Company 1767 commercial auto.
58 |
59 |
60 |
61 | Company 337 workers’ compensation.
62 |
63 |
64 |
65 | ## Testing different architectures
66 |
67 | If you would like to try out different architectures or hyperparameters,
68 | you can do so by providing a function that returns a keras model. See
69 | the source code of `dt_model()` for a template.
70 |
71 | For more details on the **keras** R package, visit
72 | .
73 |
--------------------------------------------------------------------------------
/analysis/data-prep.R:
--------------------------------------------------------------------------------
1 | library(deeptriangle)
2 | library(tidyverse)
3 | library(recipes)
4 | library(insurance)
5 |
6 | data(schedule_p)
7 |
8 | data_with_features <- schedule_p %>%
9 | right_join(dt_group_codes, by = c("lob", "group_code")) %>%
10 | mutate(case_reserves = incurred_loss - cumulative_paid_loss) %>%
11 | group_by(lob, group_code, accident_year) %>%
12 | arrange(lob, group_code, accident_year, development_lag) %>%
13 | mutate(
14 | incremental_paid_actual = incremental_paid_loss,
15 | incremental_paid = ifelse(
16 | calendar_year <= 1997,
17 | incremental_paid_actual, NA_real_
18 | ),
19 | cumulative_paid_actual = cumulative_paid_loss,
20 | cumulative_paid = ifelse(
21 | calendar_year <= 1997,
22 | cumulative_paid_actual, NA_real_
23 | ),
24 | case_reserves_actual = case_reserves,
25 | case_reserves = ifelse(
26 | calendar_year <= 1997,
27 | case_reserves_actual,
28 | NA_real_
29 | )
30 | ) %>%
31 | ungroup() %>%
32 | mutate(
33 | bucket = case_when(
34 | calendar_year <= 1995 & development_lag > 1 ~ "train",
35 | calendar_year > 1995 & calendar_year <= 1997 &
36 | development_lag > 1 ~ "validation",
37 | calendar_year > 1997 ~ "test"
38 | )
39 | ) %>%
40 | mutate(
41 | incremental_paid = incremental_paid / earned_premium_net,
42 | incremental_paid_actual = incremental_paid_actual / earned_premium_net,
43 | cumulative_paid = cumulative_paid / earned_premium_net,
44 | cumulative_paid_actual = cumulative_paid_actual / earned_premium_net,
45 | case_reserves = case_reserves / earned_premium_net,
46 | case_reserves_actual = case_reserves_actual / earned_premium_net
47 | )
48 |
49 | # Recipe for indexing company code
50 | company_index_recipe <- recipe(~ group_code, data = data_with_features) %>%
51 | step_integer(group_code, zero_based = TRUE) %>%
52 | prep()
53 |
54 | data_keras <- bind_rows(
55 | validation_data = data_with_features %>%
56 | filter(bucket %in% c("train", "validation") | development_lag == 1) %>%
57 | mutate_series() %>%
58 | filter(bucket == "validation") %>%
59 | group_by(lob) %>%
60 | nest() %>%
61 | mutate(keras_data = map(data, ~ prep_keras_data(.x, company_index_recipe))),
62 | full_training_data = data_with_features %>%
63 | filter(bucket %in% c("train", "validation") | development_lag == 1) %>%
64 | mutate_series() %>%
65 | filter(bucket %in% c("train", "validation")) %>%
66 | group_by(lob) %>%
67 | nest() %>%
68 | mutate(keras_data = map(data, ~ prep_keras_data(.x, company_index_recipe))),
69 | test_data = data_with_features %>%
70 | filter(calendar_year <= 1998) %>%
71 | mutate_series() %>%
72 | filter(bucket == "test", calendar_year == 1998) %>%
73 | group_by(lob) %>%
74 | nest() %>%
75 | mutate(keras_data = map(data, ~ prep_keras_data(.x, company_index_recipe))),
76 | .id = "data_type"
77 | )
78 |
79 |
--------------------------------------------------------------------------------
/README.Rmd:
--------------------------------------------------------------------------------
1 | ---
2 | output: github_document
3 | ---
4 |
5 |
6 |
7 | ```{r setup, include = FALSE}
8 | knitr::opts_chunk$set(
9 | collapse = TRUE,
10 | comment = "#>",
11 | fig.path = "man/figures/README-",
12 | out.width = "100%"
13 | )
14 | library(deeptriangle)
15 | library(tidyverse)
16 | ```
17 |
18 | [](https://travis-ci.org/kasaai/deeptriangle)
19 |
20 | # DeepTriangle
21 |
22 | This is the companion repository to the paper [*DeepTriangle: A Deep Learning Approach to Loss Reserving*](https://www.mdpi.com/2227-9091/7/3/97).
23 |
24 |
25 | ## Experiments
26 |
27 | To get started, either clone the repo and build the R package, or install with
28 |
29 | ``` r
30 | devtools::install_github("kasaai/deeptriangle")
31 | ```
32 |
33 | You will also need the [insurance](https://github.com/kasaai/insurance) package, which can be installed with
34 |
35 | ```r
36 | devtools::install_github("kasaai/insurance")
37 | ```
38 |
39 | The experiments can be found in `analysis/main.R`. It is recommended that you use a GPU since many instances of the models are fit.
40 |
41 | For convenience, we provide a `predictions.feather` file in the release.
42 |
43 | ```{r, message = FALSE}
44 | predictions <- feather::read_feather("datasets/predictions.feather")
45 |
46 | model_results <- dt_compute_metrics(predictions) %>%
47 | bind_rows(stochastic_model_results) %>%
48 | bind_rows(read_csv("datasets/automl_results.csv")) %>%
49 | gather(metric, value, mape, rmspe)
50 |
51 | dt_tabulate_metrics(model_results, metric = "mape") %>%
52 | knitr::kable(booktabs = "T", digits = 3)
53 | ```
54 |
55 | To create actual vs. predicted plots, use the `dt_plot_predictions()` function. Here are successful and unsuccessful examples of the model's forecasting attempts.
56 |
57 | Company 1767 commercial auto.
58 |
59 | ```{r, echo = FALSE, message = FALSE, out.width = "80%"}
60 | library(cowplot)
61 |
62 | p1 <- dt_plot_predictions(predictions, "1767", "commercial_auto", "paid_loss") + xlab("")
63 | p2 <- dt_plot_predictions(predictions, "1767", "commercial_auto", "claims_outstanding")
64 | p12 <- plot_grid(
65 | p1 + theme(legend.position = "none"),
66 | p2 + theme(legend.position = "none"),
67 | align = "v",
68 | ncol = 1
69 | )
70 | legend <- get_legend(p1)
71 | plot_grid(p12, legend, rel_widths = c(1, 0.2), nrow = 1)
72 | ```
73 |
74 | Company 337 workers' compensation.
75 |
76 | ```{r, echo = FALSE, message = FALSE, out.width = "80%"}
77 | library(cowplot)
78 |
79 | p1 <- dt_plot_predictions(predictions, "337", "workers_compensation", "paid_loss") + xlab("")
80 | p2 <- dt_plot_predictions(predictions, "337", "workers_compensation", "claims_outstanding")
81 | p12 <- plot_grid(
82 | p1 + theme(legend.position = "none"),
83 | p2 + theme(legend.position = "none"),
84 | align = "v",
85 | ncol = 1
86 | )
87 | legend <- get_legend(p1)
88 | plot_grid(p12, legend, rel_widths = c(1, 0.2), nrow = 1)
89 | ```
90 |
91 | ## Testing different architectures
92 |
93 | If you would like to try out different architectures or hyperparameters, you can do so by providing a function that returns a keras model. See the source code of `dt_model()` for a template.
94 |
95 | For more details on the **keras** R package, visit [https://keras.rstudio.com/](https://keras.rstudio.com/).
96 |
--------------------------------------------------------------------------------
/analysis/analysis-utils.R:
--------------------------------------------------------------------------------
1 | #' Given a time series, return a list
2 | #' where each element is a vector representing a window
3 | #' of the time series determined by the offsets
4 | make_series <- function(v, start_offset, end_offset, na_pad = -99) {
5 | prepad_mask <- function(v, l = 9) {
6 | length_diff <- l - length(v)
7 | if (length_diff > 0) {
8 | c(rep(na_pad, length_diff), v)
9 | } else {
10 | v
11 | }
12 | }
13 |
14 | purrr::map(
15 | seq_along(v),
16 | function(x) {
17 | start <- max(0, x + start_offset)
18 | end <- max(0, x + end_offset)
19 | out <- v[start:end]
20 | ifelse(is.na(out), na_pad, out)
21 | } %>%
22 | prepad_mask()
23 | )
24 | }
25 |
26 | mutate_series <- function(data, timesteps = 9) {
27 | data %>%
28 | dplyr::group_by(.data$lob, .data$group_code, .data$accident_year) %>%
29 | dplyr::arrange(.data$lob, .data$group_code, .data$accident_year, .data$development_lag) %>%
30 | mutate(
31 | paid_lags = make_series(incremental_paid, -timesteps, -1),
32 | case_lags = make_series(case_reserves, -timesteps, -1),
33 | paid_target = make_series(incremental_paid, 0, timesteps - 1),
34 | case_target = make_series(case_reserves, 0, timesteps - 1),
35 | ) %>%
36 | ungroup()
37 | }
38 |
39 | prep_keras_data <- function(data, company_index_recipe) {
40 | lags <- data %>%
41 | select(.data$paid_lags, .data$case_lags) %>%
42 | purrr::transpose() %>%
43 | purrr::map(~ array(unlist(.x), dim = c(1, 9, 2))) %>%
44 | abind::abind(along = 1) %>%
45 | unname()
46 |
47 | target_paid <- data %>%
48 | pull(.data$paid_target) %>%
49 | flatten_dbl() %>%
50 | array_reshape(c(nrow(data), 9, 1))
51 |
52 | target_case <- data %>%
53 | pull(.data$case_target) %>%
54 | flatten_dbl() %>%
55 | array_reshape(c(nrow(data), 9, 1))
56 |
57 | company_input <- bake(company_index_recipe, data) %>% as.matrix()
58 |
59 | list(
60 | x = list(
61 | ay_seq_input = lags, company_input = company_input
62 | ),
63 | y = list(
64 | paid_output = target_paid,
65 | case_reserves_output = target_case
66 | )
67 | )
68 | }
69 |
70 | masked_mse <- function(mask_value) {
71 | function(y_true, y_pred) {
72 | keep_value <- k_cast(k_not_equal(y_true, mask_value), k_floatx())
73 | sum_squared_error <- k_sum(
74 | k_square(keep_value * (y_true - y_pred)),
75 | axis = 2
76 | )
77 | sum_squared_error / k_sum(keep_value, axis = 2)
78 | }
79 | }
80 |
81 | transform_preds <- function(preds) {
82 | rows <- 1:dim(preds[[1]])[[1]]
83 | list(
84 | # predicted_cumulative_loss = map(rows, ~ preds[[1]][, , 2][.x, ]),
85 | predicted_loss = map(rows, ~ preds[[1]][, , 1][.x, ]),
86 | predicted_os = map(rows, ~ preds[[2]][, , 1][.x, ])
87 | ) %>%
88 | as_tibble()
89 | }
90 |
91 | extract_keras_data <- function(data, lob) {
92 | data %>%
93 | filter(lob == !!lob) %>%
94 | pull(.data$keras_data) %>%
95 | flatten()
96 | }
97 |
98 | extract_data <- function(data, lob) {
99 | data %>%
100 | filter(lob == !!lob) %>%
101 | unnest(.data$data)
102 | }
103 |
104 | compute_predictions <- function(model, test_data, lob) {
105 | predictions <- model %>%
106 | predict(extract_keras_data(test_data, lob)$x) %>%
107 | transform_preds()
108 |
109 | test_data %>%
110 | unnest(data) %>%
111 | select(group_code, accident_year, development_lag) %>%
112 | bind_cols(predictions) %>%
113 | unnest(predicted_loss, predicted_os) %>%
114 | group_by(group_code, accident_year) %>%
115 | mutate(development_lag = development_lag + row_number() - 1) %>%
116 | filter(development_lag <= 10) %>%
117 | right_join(
118 | data_with_features %>%
119 | filter(lob == !!lob),
120 | by = c("group_code", "accident_year", "development_lag")
121 | ) %>%
122 | arrange(group_code, accident_year, development_lag) %>%
123 | mutate(
124 | predicted_os = if_else(
125 | is.na(predicted_os),
126 | case_reserves, predicted_os
127 | ),
128 | predicted_loss = if_else(
129 | is.na(predicted_loss),
130 | incremental_paid, predicted_loss
131 | )
132 | ) %>%
133 | mutate(
134 | predicted_cumulative_loss = cumsum(predicted_loss) * earned_premium_net,
135 | predicted_os = predicted_os * earned_premium_net,
136 | case_reserves_actual = case_reserves_actual * earned_premium_net
137 | ) %>%
138 | ungroup() %>%
139 | gather(
140 | "type", "value", predicted_cumulative_loss, predicted_os,
141 | cumulative_paid_loss, case_reserves_actual,
142 | na.rm = TRUE
143 | ) %>%
144 | mutate(
145 | obs_type = case_when(
146 | grepl("predicted", type) ~ "prediction",
147 | calendar_year <= 1997 ~ "observed",
148 | calendar_year > 1997 ~ "holdout",
149 | TRUE ~ "observed"
150 | )
151 | )
152 | }
153 |
--------------------------------------------------------------------------------
/analysis/automl-utils.R:
--------------------------------------------------------------------------------
1 | automl_data_prep <- function(schedule_p_data, group_codes, train_validation_cutoff = 1995) {
2 | schedule_p_data %>%
3 | dplyr::right_join(group_codes, by = c("lob", "group_code")) %>%
4 | dplyr::mutate(case_reserves = .data$incurred_loss - .data$cumulative_paid_loss) %>%
5 | dplyr::group_by(.data$lob, .data$group_code, .data$accident_year) %>%
6 | dplyr::arrange(.data$lob, .data$group_code, .data$accident_year, .data$development_lag) %>%
7 | dplyr::mutate(
8 | incremental_paid_actual = .data$incremental_paid_loss,
9 | incremental_paid = ifelse(.data$calendar_year <= 1997,
10 | .data$incremental_paid_actual, NA_real_),
11 | case_reserves_actual = .data$case_reserves,
12 | case_reserves = ifelse(.data$calendar_year <= 1997,
13 | .data$case_reserves_actual, NA_real_)
14 | ) %>%
15 | dplyr::ungroup() %>%
16 | dplyr::mutate(
17 | bucket = dplyr::case_when(
18 | .data$calendar_year <= !!train_validation_cutoff & .data$development_lag > 1 ~ "train",
19 | .data$calendar_year > !!train_validation_cutoff & .data$calendar_year <= 1997 &
20 | .data$development_lag > 1 ~ "validation",
21 | .data$calendar_year > 1997 ~ "test"
22 | )) %>%
23 | dplyr::mutate(
24 | incremental_paid = .data$incremental_paid / .data$earned_premium_net,
25 | incremental_paid_actual = .data$incremental_paid_actual / .data$earned_premium_net,
26 | case_reserves = .data$case_reserves / .data$earned_premium_net,
27 | case_reserves_actual = .data$case_reserves_actual / .data$earned_premium_net
28 | ) %>%
29 | dplyr::ungroup() %>%
30 | (function(x) split(x, x$lob)) %>%
31 | purrr::map(~ .x %>%
32 | dplyr::mutate(group_code = factor(.data$group_code))
33 | )
34 | }
35 |
36 |
37 | lags <- function(var, n = 10){
38 | var <- rlang::enquo(var)
39 | indices <- seq_len(n)
40 | map( indices, ~rlang::quo(lag(!!var, !!.x)) ) %>%
41 | set_names(sprintf("lag_%s_%02d", rlang::quo_text(var), indices))
42 | }
43 |
44 | automl_train_validation_split <- function(data) {
45 | training_data <- dplyr::filter(
46 | data,
47 | .data$bucket == "train",
48 | .data$development_lag > 1
49 | )
50 |
51 | validation_data <- dplyr::filter(
52 | data,
53 | .data$bucket == "validation",
54 | .data$development_lag > 1
55 | )
56 |
57 | full_training_data <- dplyr::filter(
58 | data,
59 | .data$bucket %in% c("train", "validation")
60 | )
61 |
62 | list(training_data, validation_data, full_training_data)
63 | }
64 |
65 | automl_predict <- function(model, data) {
66 | results <- dplyr::tibble(
67 | lob = character(),
68 | group_code = factor(levels = levels(data$group_code)),
69 | accident_year = integer(), development_lag = integer(),
70 | predicted_loss = numeric()
71 | )
72 |
73 | purrr::walk(1998:2006, function(cy) {
74 | df <- data %>%
75 | dplyr::filter(.data$calendar_year <= !!cy) %>%
76 | dplyr::left_join(results,
77 | by = c("lob", "group_code", "accident_year",
78 | "development_lag")
79 | ) %>%
80 | dplyr::mutate(
81 | incremental_paid = ifelse(is.na(.data$predicted_loss),
82 | .data$incremental_paid,
83 | .data$predicted_loss)
84 | ) %>%
85 | dplyr::select(-.data$predicted_loss) %>%
86 | mutate(!!!lags(incremental_paid, 9)) %>%
87 | dplyr::filter(.data$calendar_year == !!cy)
88 | df_h2o <- as.h2o(df)
89 | predictions <- model %>%
90 | stats::predict(df_h2o) %>%
91 | as_tibble() %>%
92 | rename(predicted_loss = predict)
93 | h2o.rm(df_h2o)
94 | results <<- dplyr::bind_rows(results, dplyr::bind_cols(df, predictions)) %>%
95 | dplyr::select(.data$lob, .data$group_code, .data$accident_year,
96 | .data$development_lag, .data$predicted_loss)
97 | })
98 | results
99 | }
100 |
101 | automl_compute_predictions <- function(model, data) {
102 | predictions <- automl_predict(model, data)
103 |
104 | predictions_df <- data %>%
105 | dplyr::left_join(
106 | dplyr::select(predictions, .data$group_code, .data$accident_year,
107 | .data$development_lag, .data$predicted_loss),
108 | by = c("group_code", "accident_year", "development_lag")
109 | ) %>%
110 | dplyr::group_by(.data$group_code, .data$accident_year) %>%
111 | dplyr::arrange(.data$group_code, .data$accident_year, .data$development_lag) %>%
112 | dplyr::mutate(
113 | predicted_loss = ifelse(is.na(.data$predicted_loss),
114 | .data$incremental_paid, .data$predicted_loss)
115 | ) %>%
116 | dplyr::mutate(
117 | predicted_cumulative_loss = cumsum(.data$predicted_loss) * .data$earned_premium_net
118 | ) %>%
119 | dplyr::ungroup() %>%
120 | dplyr::mutate(group_code = as.character(.data$group_code)) %>%
121 | tidyr::gather("type", "value", .data$predicted_cumulative_loss,
122 | .data$cumulative_paid_loss,
123 | na.rm = TRUE) %>%
124 | dplyr::mutate(obs_type = dplyr::case_when(
125 | grepl("predicted", .data$type) ~ "prediction",
126 | .data$calendar_year <= 1997 ~ "observed",
127 | .data$calendar_year > 1997 ~ "holdout",
128 | TRUE ~ "observed"
129 | ))
130 | }
131 |
132 | automl_compute_metrics <- function(predictions) {
133 | predictions %>%
134 | dplyr::filter(
135 | .data$development_lag == 10,
136 | .data$type %in% c("cumulative_paid_loss", "predicted_cumulative_loss")
137 | ) %>%
138 | dplyr::group_by(.data$lob, .data$group_code, .data$type) %>%
139 | dplyr::summarize(ultimate = sum(.data$value)) %>%
140 | tidyr::spread(.data$type, .data$ultimate) %>%
141 | dplyr::mutate(
142 | pct_error = (.data$predicted_cumulative_loss - .data$cumulative_paid_loss) /
143 | .data$cumulative_paid_loss) %>%
144 | dplyr::ungroup() %>%
145 | dplyr::group_by(.data$lob) %>%
146 | dplyr::summarize(
147 | mape = mean(abs(.data$pct_error)),
148 | rmspe = sqrt(mean(.data$pct_error ^ 2))
149 | ) %>%
150 | dplyr::mutate(model = "AutoML")
151 | }
152 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Apache License
2 | ==============
3 |
4 | _Version 2.0, January 2004_
5 | _<>_
6 |
7 | ### Terms and Conditions for use, reproduction, and distribution
8 |
9 | #### 1. Definitions
10 |
11 | “License” shall mean the terms and conditions for use, reproduction, and
12 | distribution as defined by Sections 1 through 9 of this document.
13 |
14 | “Licensor” shall mean the copyright owner or entity authorized by the copyright
15 | owner that is granting the License.
16 |
17 | “Legal Entity” shall mean the union of the acting entity and all other entities
18 | that control, are controlled by, or are under common control with that entity.
19 | For the purposes of this definition, “control” means **(i)** the power, direct or
20 | indirect, to cause the direction or management of such entity, whether by
21 | contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the
22 | outstanding shares, or **(iii)** beneficial ownership of such entity.
23 |
24 | “You” (or “Your”) shall mean an individual or Legal Entity exercising
25 | permissions granted by this License.
26 |
27 | “Source” form shall mean the preferred form for making modifications, including
28 | but not limited to software source code, documentation source, and configuration
29 | files.
30 |
31 | “Object” form shall mean any form resulting from mechanical transformation or
32 | translation of a Source form, including but not limited to compiled object code,
33 | generated documentation, and conversions to other media types.
34 |
35 | “Work” shall mean the work of authorship, whether in Source or Object form, made
36 | available under the License, as indicated by a copyright notice that is included
37 | in or attached to the work (an example is provided in the Appendix below).
38 |
39 | “Derivative Works” shall mean any work, whether in Source or Object form, that
40 | is based on (or derived from) the Work and for which the editorial revisions,
41 | annotations, elaborations, or other modifications represent, as a whole, an
42 | original work of authorship. For the purposes of this License, Derivative Works
43 | shall not include works that remain separable from, or merely link (or bind by
44 | name) to the interfaces of, the Work and Derivative Works thereof.
45 |
46 | “Contribution” shall mean any work of authorship, including the original version
47 | of the Work and any modifications or additions to that Work or Derivative Works
48 | thereof, that is intentionally submitted to Licensor for inclusion in the Work
49 | by the copyright owner or by an individual or Legal Entity authorized to submit
50 | on behalf of the copyright owner. For the purposes of this definition,
51 | “submitted” means any form of electronic, verbal, or written communication sent
52 | to the Licensor or its representatives, including but not limited to
53 | communication on electronic mailing lists, source code control systems, and
54 | issue tracking systems that are managed by, or on behalf of, the Licensor for
55 | the purpose of discussing and improving the Work, but excluding communication
56 | that is conspicuously marked or otherwise designated in writing by the copyright
57 | owner as “Not a Contribution.”
58 |
59 | “Contributor” shall mean Licensor and any individual or Legal Entity on behalf
60 | of whom a Contribution has been received by Licensor and subsequently
61 | incorporated within the Work.
62 |
63 | #### 2. Grant of Copyright License
64 |
65 | Subject to the terms and conditions of this License, each Contributor hereby
66 | grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
67 | irrevocable copyright license to reproduce, prepare Derivative Works of,
68 | publicly display, publicly perform, sublicense, and distribute the Work and such
69 | Derivative Works in Source or Object form.
70 |
71 | #### 3. Grant of Patent License
72 |
73 | Subject to the terms and conditions of this License, each Contributor hereby
74 | grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
75 | irrevocable (except as stated in this section) patent license to make, have
76 | made, use, offer to sell, sell, import, and otherwise transfer the Work, where
77 | such license applies only to those patent claims licensable by such Contributor
78 | that are necessarily infringed by their Contribution(s) alone or by combination
79 | of their Contribution(s) with the Work to which such Contribution(s) was
80 | submitted. If You institute patent litigation against any entity (including a
81 | cross-claim or counterclaim in a lawsuit) alleging that the Work or a
82 | Contribution incorporated within the Work constitutes direct or contributory
83 | patent infringement, then any patent licenses granted to You under this License
84 | for that Work shall terminate as of the date such litigation is filed.
85 |
86 | #### 4. Redistribution
87 |
88 | You may reproduce and distribute copies of the Work or Derivative Works thereof
89 | in any medium, with or without modifications, and in Source or Object form,
90 | provided that You meet the following conditions:
91 |
92 | * **(a)** You must give any other recipients of the Work or Derivative Works a copy of
93 | this License; and
94 | * **(b)** You must cause any modified files to carry prominent notices stating that You
95 | changed the files; and
96 | * **(c)** You must retain, in the Source form of any Derivative Works that You distribute,
97 | all copyright, patent, trademark, and attribution notices from the Source form
98 | of the Work, excluding those notices that do not pertain to any part of the
99 | Derivative Works; and
100 | * **(d)** If the Work includes a “NOTICE” text file as part of its distribution, then any
101 | Derivative Works that You distribute must include a readable copy of the
102 | attribution notices contained within such NOTICE file, excluding those notices
103 | that do not pertain to any part of the Derivative Works, in at least one of the
104 | following places: within a NOTICE text file distributed as part of the
105 | Derivative Works; within the Source form or documentation, if provided along
106 | with the Derivative Works; or, within a display generated by the Derivative
107 | Works, if and wherever such third-party notices normally appear. The contents of
108 | the NOTICE file are for informational purposes only and do not modify the
109 | License. You may add Your own attribution notices within Derivative Works that
110 | You distribute, alongside or as an addendum to the NOTICE text from the Work,
111 | provided that such additional attribution notices cannot be construed as
112 | modifying the License.
113 |
114 | You may add Your own copyright statement to Your modifications and may provide
115 | additional or different license terms and conditions for use, reproduction, or
116 | distribution of Your modifications, or for any such Derivative Works as a whole,
117 | provided Your use, reproduction, and distribution of the Work otherwise complies
118 | with the conditions stated in this License.
119 |
120 | #### 5. Submission of Contributions
121 |
122 | Unless You explicitly state otherwise, any Contribution intentionally submitted
123 | for inclusion in the Work by You to the Licensor shall be under the terms and
124 | conditions of this License, without any additional terms or conditions.
125 | Notwithstanding the above, nothing herein shall supersede or modify the terms of
126 | any separate license agreement you may have executed with Licensor regarding
127 | such Contributions.
128 |
129 | #### 6. Trademarks
130 |
131 | This License does not grant permission to use the trade names, trademarks,
132 | service marks, or product names of the Licensor, except as required for
133 | reasonable and customary use in describing the origin of the Work and
134 | reproducing the content of the NOTICE file.
135 |
136 | #### 7. Disclaimer of Warranty
137 |
138 | Unless required by applicable law or agreed to in writing, Licensor provides the
139 | Work (and each Contributor provides its Contributions) on an “AS IS” BASIS,
140 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
141 | including, without limitation, any warranties or conditions of TITLE,
142 | NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
143 | solely responsible for determining the appropriateness of using or
144 | redistributing the Work and assume any risks associated with Your exercise of
145 | permissions under this License.
146 |
147 | #### 8. Limitation of Liability
148 |
149 | In no event and under no legal theory, whether in tort (including negligence),
150 | contract, or otherwise, unless required by applicable law (such as deliberate
151 | and grossly negligent acts) or agreed to in writing, shall any Contributor be
152 | liable to You for damages, including any direct, indirect, special, incidental,
153 | or consequential damages of any character arising as a result of this License or
154 | out of the use or inability to use the Work (including but not limited to
155 | damages for loss of goodwill, work stoppage, computer failure or malfunction, or
156 | any and all other commercial damages or losses), even if such Contributor has
157 | been advised of the possibility of such damages.
158 |
159 | #### 9. Accepting Warranty or Additional Liability
160 |
161 | While redistributing the Work or Derivative Works thereof, You may choose to
162 | offer, and charge a fee for, acceptance of support, warranty, indemnity, or
163 | other liability obligations and/or rights consistent with this License. However,
164 | in accepting such obligations, You may act only on Your own behalf and on Your
165 | sole responsibility, not on behalf of any other Contributor, and only if You
166 | agree to indemnify, defend, and hold each Contributor harmless for any liability
167 | incurred by, or claims asserted against, such Contributor by reason of your
168 | accepting any such warranty or additional liability.
169 |
170 | _END OF TERMS AND CONDITIONS_
171 |
172 | ### APPENDIX: How to apply the Apache License to your work
173 |
174 | To apply the Apache License to your work, attach the following boilerplate
175 | notice, with the fields enclosed by brackets `[]` replaced with your own
176 | identifying information. (Don't include the brackets!) The text should be
177 | enclosed in the appropriate comment syntax for the file format. We also
178 | recommend that a file or class name and description of purpose be included on
179 | the same “printed page” as the copyright notice for easier identification within
180 | third-party archives.
181 |
182 | Copyright 2018 Kevin Kuo
183 |
184 | Licensed under the Apache License, Version 2.0 (the "License");
185 | you may not use this file except in compliance with the License.
186 | You may obtain a copy of the License at
187 |
188 | http://www.apache.org/licenses/LICENSE-2.0
189 |
190 | Unless required by applicable law or agreed to in writing, software
191 | distributed under the License is distributed on an "AS IS" BASIS,
192 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
193 | See the License for the specific language governing permissions and
194 | limitations under the License.
195 |
--------------------------------------------------------------------------------
/manuscript/mybibfile.bib:
--------------------------------------------------------------------------------
1 | @misc{meyers2011loss,
2 | title={Loss reserving data pulled from {NAIC} Schedule P},
3 | author={Meyers, Glenn and Shi, Peng},
4 | year={2011},
5 | howpublished={\url{http://www.casact.org/research/index.cfm?fa=loss_reserves_data}}
6 | }
7 |
8 | @book{meyers2015stochastic,
9 | title={Stochastic loss reserving using Bayesian {MCMC} models},
10 | author={Meyers, Glenn},
11 | year={2015},
12 | publisher={Casualty Actuarial Society}
13 | }
14 |
15 | @article{lecun2015deep,
16 | title={Deep learning},
17 | author={LeCun, Yann and Bengio, Yoshua and Hinton, Geoffrey},
18 | journal={nature},
19 | volume={521},
20 | number={7553},
21 | pages={436},
22 | year={2015},
23 | publisher={Nature Publishing Group}
24 | }
25 |
26 | @misc{chollet2017kerasR,
27 | title={R Interface to Keras},
28 | author={Chollet, Fran\c{c}ois and Allaire, JJ and others},
29 | year={2017},
30 | publisher={GitHub},
31 | howpublished={\url{https://github.com/rstudio/keras}},
32 | }
33 |
34 | @article{quarg2004munich,
35 | title={Munich chain ladder},
36 | author={Quarg, Gerhard and Mack, Thomas},
37 | journal={Bl{\"a}tter der DGVFM},
38 | volume={26},
39 | number={4},
40 | pages={597--630},
41 | year={2004},
42 | publisher={Springer}
43 | }
44 |
45 | @article{miranda2012double,
46 | title={Double chain ladder},
47 | author={Miranda, Mar{\'\i}a Dolores Mart{\'\i}nez and Nielsen, Jens Perch and Verrall, Richard},
48 | journal={ASTIN Bulletin: The Journal of the IAA},
49 | volume={42},
50 | number={1},
51 | pages={59--76},
52 | year={2012},
53 | publisher={Cambridge University Press}
54 | }
55 |
56 | @article{avanzi2016stochastic,
57 | title={Stochastic loss reserving with dependence: A flexible multivariate Tweedie approach},
58 | author={Avanzi, Benjamin and Taylor, Greg and Vu, Phuong Anh and Wong, Bernard},
59 | journal={Insurance: Mathematics and Economics},
60 | volume={71},
61 | pages={63--78},
62 | year={2016},
63 | publisher={Elsevier}
64 | }
65 |
66 | @misc{tensorflow2015-whitepaper,
67 | title={ {TensorFlow}: Large-Scale Machine Learning on Heterogeneous Systems},
68 | url={https://www.tensorflow.org/},
69 | note={Software available from tensorflow.org},
70 | author={
71 | Mart\'{\i}n~Abadi and
72 | Ashish~Agarwal and
73 | Paul~Barham and
74 | Eugene~Brevdo and
75 | Zhifeng~Chen and
76 | Craig~Citro and
77 | Greg~S.~Corrado and
78 | Andy~Davis and
79 | Jeffrey~Dean and
80 | Matthieu~Devin and
81 | Sanjay~Ghemawat and
82 | Ian~Goodfellow and
83 | Andrew~Harp and
84 | Geoffrey~Irving and
85 | Michael~Isard and
86 | Yangqing Jia and
87 | Rafal~Jozefowicz and
88 | Lukasz~Kaiser and
89 | Manjunath~Kudlur and
90 | Josh~Levenberg and
91 | Dandelion~Man\'{e} and
92 | Rajat~Monga and
93 | Sherry~Moore and
94 | Derek~Murray and
95 | Chris~Olah and
96 | Mike~Schuster and
97 | Jonathon~Shlens and
98 | Benoit~Steiner and
99 | Ilya~Sutskever and
100 | Kunal~Talwar and
101 | Paul~Tucker and
102 | Vincent~Vanhoucke and
103 | Vijay~Vasudevan and
104 | Fernanda~Vi\'{e}gas and
105 | Oriol~Vinyals and
106 | Pete~Warden and
107 | Martin~Wattenberg and
108 | Martin~Wicke and
109 | Yuan~Yu and
110 | Xiaoqiang~Zheng},
111 | year={2015},
112 | }
113 |
114 | @inproceedings{collobert2008unified,
115 | title={A unified architecture for natural language processing: Deep neural networks with multitask learning},
116 | author={Collobert, Ronan and Weston, Jason},
117 | booktitle={Proceedings of the 25th international conference on Machine learning},
118 | pages={160--167},
119 | year={2008},
120 | organization={ACM}
121 | }
122 |
123 | @article{caruana1997multitask,
124 | title={Multitask learning},
125 | author={Caruana, Rich},
126 | journal={Machine learning},
127 | volume={28},
128 | number={1},
129 | pages={41--75},
130 | year={1997},
131 | publisher={Springer}
132 | }
133 |
134 | @book{goodfellow2016deep,
135 | title={Deep learning},
136 | author={Goodfellow, Ian and Bengio, Yoshua and Courville, Aaron},
137 | year={2016},
138 | publisher={MIT Press Cambridge}
139 | }
140 |
141 | @article{england2002stochastic,
142 | title={Stochastic claims reserving in general insurance},
143 | author={England, Peter D and Verrall, Richard J},
144 | journal={British Actuarial Journal},
145 | volume={8},
146 | number={3},
147 | pages={443--518},
148 | year={2002},
149 | publisher={Cambridge University Press}
150 | }
151 |
152 | @misc{chung2014empirical,
153 | title={Empirical Evaluation of Gated Recurrent Neural Networks on Sequence Modeling},
154 | author={Junyoung Chung and Caglar Gulcehre and KyungHyun Cho and Yoshua Bengio},
155 | year={2014},
156 | eprint={1412.3555},
157 | archivePrefix={arXiv},
158 | primaryClass={cs.NE}
159 | }
160 |
161 | @inproceedings{nair2010rectified,
162 | title={Rectified linear units improve restricted boltzmann machines},
163 | author={Nair, Vinod and Hinton, Geoffrey E},
164 | booktitle={Proceedings of the 27th international conference on machine learning (ICML-10)},
165 | pages={807--814},
166 | year={2010}
167 | }
168 |
169 | @inproceedings{
170 | j.2018on,
171 | title={On the Convergence of Adam and Beyond},
172 | author={Sashank J. Reddi and Satyen Kale and Sanjiv Kumar},
173 | booktitle={International Conference on Learning Representations},
174 | year={2018},
175 | url={https://openreview.net/forum?id=ryQu7f-RZ},
176 | }
177 |
178 | @article{mack1993distribution,
179 | title={Distribution-free calculation of the standard error of chain ladder reserve estimates},
180 | author={Mack, Thomas},
181 | journal={Astin bulletin},
182 | volume={23},
183 | number={2},
184 | pages={213--225},
185 | year={1993}
186 | }
187 |
188 | @book{chollet2018deep,
189 | title={Deep learning with R},
190 | author={Chollet, Francois and Allaire, JJ},
191 | year={2018},
192 | publisher={Manning Publications}
193 | }
194 |
195 | @incollection{jolliffe2011principal,
196 | title={Principal component analysis},
197 | author={Jolliffe, Ian},
198 | booktitle={International encyclopedia of statistical science},
199 | pages={1094--1096},
200 | year={2011},
201 | publisher={Springer}
202 | }
203 |
204 | @article{wuthrich2018machine,
205 | title={Machine learning in individual claims reserving},
206 | author={W{\"u}thrich, Mario V},
207 | journal={Scandinavian Actuarial Journal},
208 | pages={1--16},
209 | year={2018},
210 | publisher={Taylor \& Francis}
211 | }
212 |
213 | @Manual{h2o_R_package,
214 | title = {h2o: R Interface for H2O},
215 | author = {{The H2O.ai team}},
216 | year = {2018},
217 | note = {R package version 3.20.0.8},
218 | url = {http://www.h2o.ai},
219 | }
220 |
221 | @book{friedman2001elements,
222 | title={The elements of statistical learning},
223 | author={Friedman, Jerome and Hastie, Trevor and Tibshirani, Robert},
224 | volume={1},
225 | number={10},
226 | year={2001},
227 | publisher={Springer series in statistics New York, NY, USA:}
228 | }
229 |
230 | @article{gabrielli2018neural,
231 | title={Neural network embedding of the over-dispersed Poisson reserving model},
232 | author={Gabrielli, Andrea and Richman, Ronald and Wuthrich, Mario V},
233 | journal={Available at SSRN},
234 | year={2018}
235 | }
236 |
237 | @article{gabrielli2019neural,
238 | title={A Neural Network Boosted Double Over-Dispersed Poisson Claims Reserving Model},
239 | author={Gabrielli, Andrea},
240 | journal={Available at SSRN 3365517},
241 | year={2019}
242 | }
243 |
244 | @article{wuthrich2018neural,
245 | title={Neural networks applied to chain--ladder reserving},
246 | author={W{\"u}thrich, Mario V},
247 | journal={European Actuarial Journal},
248 | volume={8},
249 | number={2},
250 | pages={407--436},
251 | year={2018},
252 | publisher={Springer}
253 | }
254 |
255 | @inproceedings{sutskever2014sequence,
256 | title={Sequence to sequence learning with neural networks},
257 | author={Sutskever, Ilya and Vinyals, Oriol and Le, Quoc V},
258 | booktitle={Advances in neural information processing systems},
259 | pages={3104--3112},
260 | year={2014}
261 | }
262 |
263 | @article{DBLP:journals/corr/SrivastavaMS15,
264 | author = {Nitish Srivastava and
265 | Elman Mansimov and
266 | Ruslan Salakhutdinov},
267 | title = {Unsupervised Learning of Video Representations using LSTMs},
268 | journal = {CoRR},
269 | volume = {abs/1502.04681},
270 | year = {2015},
271 | url = {http://arxiv.org/abs/1502.04681},
272 | archivePrefix = {arXiv},
273 | eprint = {1502.04681},
274 | timestamp = {Mon, 13 Aug 2018 16:47:05 +0200},
275 | biburl = {https://dblp.org/rec/bib/journals/corr/SrivastavaMS15},
276 | bibsource = {dblp computer science bibliography, https://dblp.org}
277 | }
278 |
279 | @article{richman2018neural,
280 | title={A neural network extension of the Lee-Carter model to multiple populations},
281 | author={Richman, Ronald and Wuthrich, Mario V},
282 | journal={Available at SSRN 3270877},
283 | year={2018}
284 | }
285 |
286 | @article{Cheng_2016,
287 | title={Wide \& Deep Learning for Recommender Systems},
288 | ISBN={9781450347952},
289 | url={http://dx.doi.org/10.1145/2988450.2988454},
290 | DOI={10.1145/2988450.2988454},
291 | journal={Proceedings of the 1st Workshop on Deep Learning for Recommender Systems - DLRS 2016},
292 | publisher={ACM Press},
293 | author={Cheng, Heng-Tze and Ispir, Mustafa and Anil, Rohan and Haque, Zakaria and Hong, Lichan and Jain, Vihan and Liu, Xiaobing and Shah, Hemal and Koc, Levent and Harmsen, Jeremiah and et al.},
294 | year={2016}
295 | }
296 |
297 | @article{DBLP:journals/corr/GuoB16,
298 | author = {Cheng Guo and
299 | Felix Berkhahn},
300 | title = {Entity Embeddings of Categorical Variables},
301 | journal = {CoRR},
302 | volume = {abs/1604.06737},
303 | year = {2016},
304 | url = {http://arxiv.org/abs/1604.06737},
305 | archivePrefix = {arXiv},
306 | eprint = {1604.06737},
307 | timestamp = {Mon, 13 Aug 2018 16:49:04 +0200},
308 | biburl = {https://dblp.org/rec/bib/journals/corr/GuoB16},
309 | bibsource = {dblp computer science bibliography, https://dblp.org}
310 | }
311 |
312 | @incollection{NIPS2017_7219,
313 | title = {Simple and Scalable Predictive Uncertainty Estimation using Deep Ensembles},
314 | author = {Lakshminarayanan, Balaji and Pritzel, Alexander and Blundell, Charles},
315 | booktitle = {Advances in Neural Information Processing Systems 30},
316 | editor = {I. Guyon and U. V. Luxburg and S. Bengio and H. Wallach and R. Fergus and S. Vishwanathan and R. Garnett},
317 | pages = {6402--6413},
318 | year = {2017},
319 | publisher = {Curran Associates, Inc.},
320 | url = {http://papers.nips.cc/paper/7219-simple-and-scalable-predictive-uncertainty-estimation-using-deep-ensembles.pdf}
321 | }
322 |
323 | @article{gabrielli2018individual,
324 | title={An individual claims history simulation machine},
325 | author={Gabrielli, Andrea and W{\"u}thrich, Mario V},
326 | journal={Risks},
327 | volume={6},
328 | number={2},
329 | pages={29},
330 | year={2018},
331 | publisher={Multidisciplinary Digital Publishing Institute}
332 | }
333 |
334 | @article{wuthrich2019data,
335 | title={Data analytics for non-life insurance pricing},
336 | author={Wuthrich, Mario V and Buser, Christoph},
337 | journal={Swiss Finance Institute Research Paper},
338 | number={16-68},
339 | year={2019}
340 | }
341 |
342 | @article{srivastava2014dropout,
343 | title={Dropout: a simple way to prevent neural networks from overfitting},
344 | author={Srivastava, Nitish and Hinton, Geoffrey and Krizhevsky, Alex and Sutskever, Ilya and Salakhutdinov, Ruslan},
345 | journal={The journal of machine learning research},
346 | volume={15},
347 | number={1},
348 | pages={1929--1958},
349 | year={2014},
350 | publisher={JMLR. org}
351 | }
352 |
353 | @article{martinek2019analysis,
354 | title={Analysis of Stochastic Reserving Models By Means of NAIC Claims Data},
355 | author={Martinek, L{\'a}szl{\'o}},
356 | journal={Risks},
357 | volume={7},
358 | number={2},
359 | pages={62},
360 | year={2019},
361 | publisher={Multidisciplinary Digital Publishing Institute}
362 | }
363 |
364 | @article{peremans2018robust,
365 | title={A robust general multivariate chain ladder method},
366 | author={Peremans, Kris and Van Aelst, Stefan and Verdonck, Tim},
367 | journal={Risks},
368 | volume={6},
369 | number={4},
370 | pages={108},
371 | year={2018},
372 | publisher={Multidisciplinary Digital Publishing Institute}
373 | }
374 |
375 | @article{chukhrova2017state,
376 | title={State space models and the Kalman-filter in stochastic claims reserving: forecasting, filtering and smoothing},
377 | author={Chukhrova, Nataliya and Johannssen, Arne},
378 | journal={Risks},
379 | volume={5},
380 | number={2},
381 | pages={30},
382 | year={2017},
383 | publisher={Multidisciplinary Digital Publishing Institute}
384 | }
385 |
--------------------------------------------------------------------------------
/manuscript/mdpi.bst:
--------------------------------------------------------------------------------
1 | %% Bibliography style for MDPI journals
2 |
3 | ENTRY
4 | { address
5 | archiveprefix %
6 | author
7 | booktitle
8 | chapter
9 | edition
10 | editor
11 | eprint %
12 | doi
13 | howpublished
14 | institution
15 | journal
16 | key
17 | month
18 | note
19 | number
20 | organization
21 | pages
22 | primaryclass %
23 | publisher
24 | school
25 | series
26 | title
27 | type
28 | volume
29 | year
30 | url
31 | urldate
32 | nationality
33 | }
34 | {}
35 | { label extra.label sort.label short.list }
36 |
37 | INTEGERS { output.state before.all mid.sentence after.sentence after.block after.item }
38 |
39 | FUNCTION {init.state.consts}
40 | { #0 'before.all :=
41 | #1 'mid.sentence :=
42 | #2 'after.sentence :=
43 | #3 'after.block :=
44 | #4 'after.item :=
45 | }
46 |
47 | STRINGS { s t }
48 |
49 | FUNCTION {output.nonnull}
50 | { 's :=
51 | output.state mid.sentence =
52 | { ", " * write$ }
53 | { output.state after.block =
54 | { add.period$ write$
55 | newline$
56 | "\newblock " write$
57 | }
58 | { output.state before.all =
59 | 'write$
60 | { output.state after.item =
61 | {"; " * write$}
62 | {add.period$ " " * write$}
63 | if$}
64 | if$
65 | }
66 | if$
67 | mid.sentence 'output.state :=
68 | }
69 | if$
70 | s
71 | }
72 |
73 | FUNCTION {output}
74 | { duplicate$ empty$
75 | 'pop$
76 | 'output.nonnull
77 | if$
78 | }
79 |
80 | FUNCTION {output.check}
81 | { 't :=
82 | duplicate$ empty$
83 | { pop$ "empty " t * " in " * cite$ * warning$ }
84 | 'output.nonnull
85 | if$
86 | }
87 |
88 | FUNCTION {output.checkwoa}
89 | { 't :=
90 | duplicate$ empty$
91 | { pop$ }
92 | 'output.nonnull
93 | if$
94 | }
95 |
96 | FUNCTION {fin.entry}
97 | { add.period$
98 | write$
99 | newline$
100 | }
101 |
102 | FUNCTION {new.block}
103 | { output.state before.all =
104 | 'skip$
105 | { after.block 'output.state := }
106 | if$
107 | }
108 |
109 | FUNCTION {new.sentence}
110 | { output.state after.block =
111 | 'skip$
112 | { output.state before.all =
113 | 'skip$
114 | { after.sentence 'output.state := }
115 | if$
116 | }
117 | if$
118 | }
119 |
120 | FUNCTION {not}
121 | { { #0 }
122 | { #1 }
123 | if$
124 | }
125 |
126 | FUNCTION {and}
127 | { 'skip$
128 | { pop$ #0 }
129 | if$
130 | }
131 |
132 | FUNCTION {or}
133 | { { pop$ #1 }
134 | 'skip$
135 | if$
136 | }
137 |
138 | FUNCTION {new.block.checka}
139 | { empty$
140 | 'skip$
141 | 'new.block
142 | if$
143 | }
144 |
145 | FUNCTION {new.block.checkb}
146 | { empty$
147 | swap$ empty$
148 | and
149 | 'skip$
150 | 'new.block
151 | if$
152 | }
153 |
154 | FUNCTION {new.sentence.checka}
155 | { empty$
156 | 'skip$
157 | 'new.sentence
158 | if$
159 | }
160 |
161 | FUNCTION {new.sentence.checkb}
162 | { empty$
163 | swap$ empty$
164 | and
165 | 'skip$
166 | 'new.sentence
167 | if$
168 | }
169 |
170 | FUNCTION {field.or.null}
171 | { duplicate$ empty$
172 | { pop$ "" }
173 | 'skip$
174 | if$
175 | }
176 |
177 | FUNCTION {emphasize}
178 | { duplicate$ empty$
179 | { pop$ "" }
180 | { "{\em " swap$ * "}" * }
181 | if$
182 | }
183 |
184 | FUNCTION {embolden}
185 | { duplicate$ empty$
186 | { pop$ "" }
187 | { "{\bf " swap$ * "}" * }
188 | if$
189 | }
190 |
191 | FUNCTION {website}
192 | { duplicate$ empty$
193 | { pop$ "" }
194 | { "\url{" swap$ * "}" * }
195 | if$
196 | }
197 |
198 | INTEGERS { nameptr namesleft numnames }
199 |
200 | FUNCTION {format.names}
201 | { 's :=
202 | #1 'nameptr :=
203 | s num.names$ 'numnames :=
204 | numnames 'namesleft :=
205 | { namesleft #0 > }
206 | { s nameptr "{vv~}{ll}{, jj}{, f{.}}." format.name$ 't :=
207 | nameptr #1 >
208 | { namesleft #1 >
209 | { "; " * t * }
210 | { numnames #2 >
211 | { "" * }
212 | 'skip$
213 | if$
214 | t "others" =
215 | { " et~al." * }
216 | { "; " * t * }
217 | if$
218 | }
219 | if$
220 | }
221 | 't
222 | if$
223 | nameptr #1 + 'nameptr :=
224 | namesleft #1 - 'namesleft :=
225 | }
226 | while$
227 | }
228 |
229 | FUNCTION {format.key}
230 | { empty$
231 | { key field.or.null }
232 | { "" }
233 | if$
234 | }
235 |
236 |
237 | FUNCTION {format.authors}
238 | { author empty$
239 | { "" }
240 | { author format.names }
241 | if$
242 | }
243 |
244 | FUNCTION {format.editors}
245 | { editor empty$
246 | { "" }
247 | { editor format.names
248 | editor num.names$ #1 >
249 | { ", Eds." * }
250 | { ", Ed." * }
251 | if$
252 | }
253 | if$
254 | }
255 |
256 |
257 |
258 |
259 | FUNCTION {format.title}
260 | { title empty$
261 | { "" }
262 | { title}
263 | if$
264 | }
265 |
266 | FUNCTION {format.number.patent}
267 | { number empty$
268 | { "" }
269 | { nationality empty$
270 | { number}
271 | { nationality " " * number *}
272 | if$
273 | }
274 | if$
275 | }
276 |
277 | FUNCTION {format.full.names}
278 | {'s :=
279 | #1 'nameptr :=
280 | s num.names$ 'numnames :=
281 | numnames 'namesleft :=
282 | { namesleft #0 > }
283 | { s nameptr
284 | "{vv~}{ll}" format.name$ 't :=
285 | nameptr #1 >
286 | {
287 | namesleft #1 >
288 | { ", " * t * }
289 | {
290 | numnames #2 >
291 | { "," * }
292 | 'skip$
293 | if$
294 | t "others" =
295 | { " et~al." * }
296 | { " and " * t * }
297 | if$
298 | }
299 | if$
300 | }
301 | 't
302 | if$
303 | nameptr #1 + 'nameptr :=
304 | namesleft #1 - 'namesleft :=
305 | }
306 | while$
307 | }
308 |
309 | FUNCTION {author.editor.full}
310 | { author empty$
311 | { editor empty$
312 | { "" }
313 | { editor format.full.names }
314 | if$
315 | }
316 | { author format.full.names }
317 | if$
318 | }
319 |
320 |
321 |
322 | FUNCTION {author.full}
323 | { author empty$
324 | { "" }
325 | { author format.full.names }
326 | if$
327 | }
328 |
329 | FUNCTION {editor.full}
330 | { editor empty$
331 | { "" }
332 | { editor format.full.names }
333 | if$
334 | }
335 |
336 | FUNCTION {make.full.names}
337 | { type$ "book" =
338 | type$ "inbook" =
339 | or
340 | 'author.editor.full
341 | { type$ "proceedings" =
342 | 'editor.full
343 | 'author.full
344 | if$
345 | }
346 | if$
347 | }
348 |
349 | FUNCTION {output.bibitem}
350 | { newline$
351 | "\bibitem[" write$
352 | label write$
353 | ")" make.full.names duplicate$ short.list =
354 | { pop$ }
355 | { * }
356 | if$
357 | "]{" * write$
358 | cite$ write$
359 | "}" write$
360 | newline$
361 | ""
362 | before.all 'output.state :=
363 | }
364 |
365 | FUNCTION {n.dashify}
366 | { 't :=
367 | ""
368 | { t empty$ not }
369 | { t #1 #1 substring$ "-" =
370 | { t #1 #2 substring$ "--" = not
371 | { "--" *
372 | t #2 global.max$ substring$ 't :=
373 | }
374 | { { t #1 #1 substring$ "-" = }
375 | { "-" *
376 | t #2 global.max$ substring$ 't :=
377 | }
378 | while$
379 | }
380 | if$
381 | }
382 | { t #1 #1 substring$ *
383 | t #2 global.max$ substring$ 't :=
384 | }
385 | if$
386 | }
387 | while$
388 | }
389 |
390 |
391 | FUNCTION {format.date}
392 | { year empty$
393 | { month empty$
394 | { "" }
395 | { "there's a month but no year in " cite$ * warning$
396 | month
397 | }
398 | if$
399 | }
400 | { " " year embolden * }
401 | if$
402 | }
403 |
404 | FUNCTION {format.bdate}
405 | { year empty$
406 | { month empty$
407 | { "" }
408 | { "there's a month but no year in " cite$ * warning$
409 | month
410 | }
411 | if$
412 | }
413 | { " " year * }
414 | if$
415 | }
416 |
417 | FUNCTION {format.pdate}
418 | { year empty$
419 | { month empty$
420 | { "" }
421 | { "there's a month but no year in " cite$ * warning$
422 | month
423 | }
424 | if$
425 | }
426 | { month empty$
427 | { " " year * }
428 | { " " month * ", " * year * }
429 | if$}
430 | if$
431 | }
432 |
433 | FUNCTION {format.btitle}
434 | { title emphasize
435 | }
436 |
437 | FUNCTION {tie.or.space.connect}
438 | { duplicate$ text.length$ #3 <
439 | { "~" }
440 | { " " }
441 | if$
442 | swap$ * *
443 | }
444 |
445 | FUNCTION {either.or.check}
446 | { empty$
447 | 'pop$
448 | { "can't use both " swap$ * " fields in " * cite$ * warning$ }
449 | if$
450 | }
451 |
452 | FUNCTION {format.bvolume}
453 | { volume empty$
454 | { "" }
455 | { "Vol." volume tie.or.space.connect
456 | series empty$
457 | 'skip$
458 | { ", " * series emphasize * }
459 | if$
460 | "volume and number" number either.or.check
461 | }
462 | if$
463 | }
464 |
465 | FUNCTION {format.number.series}
466 | { volume empty$
467 | { number empty$
468 | { series field.or.null }
469 | { output.state mid.sentence =
470 | { "number" }
471 | { "Number" }
472 | if$
473 | number tie.or.space.connect
474 | series empty$
475 | { "there's a number but no series in " cite$ * warning$ }
476 | { " in " * series * }
477 | if$
478 | }
479 | if$
480 | }
481 | { "" }
482 | if$
483 | }
484 |
485 | FUNCTION {format.edition}
486 | { edition empty$
487 | { "" }
488 | { output.state mid.sentence =
489 | { edition "l" change.case$ " ed." * }
490 | { edition "t" change.case$ " ed." * }
491 | if$
492 | }
493 | if$
494 | }
495 |
496 | INTEGERS { multiresult }
497 |
498 | FUNCTION {multi.page.check}
499 | { 't :=
500 | #0 'multiresult :=
501 | { multiresult not
502 | t empty$ not
503 | and
504 | }
505 | { t #1 #1 substring$
506 | duplicate$ "-" =
507 | swap$ duplicate$ "," =
508 | swap$ "+" =
509 | or or
510 | { #1 'multiresult := }
511 | { t #2 global.max$ substring$ 't := }
512 | if$
513 | }
514 | while$
515 | multiresult
516 | }
517 |
518 | FUNCTION {format.pages}
519 | { pages empty$
520 | { "" }
521 | { pages multi.page.check
522 | { "pp." pages n.dashify tie.or.space.connect }
523 | { "p." pages tie.or.space.connect }
524 | if$
525 | }
526 | if$
527 | }
528 |
529 | FUNCTION {format.vol.num.pages}
530 | { volume emphasize field.or.null
531 | number empty$
532 | 'skip$
533 | {
534 | volume empty$
535 | { "there's a number but no volume in " cite$ * warning$ }
536 | 'skip$
537 | if$
538 | }
539 | if$
540 | pages empty$
541 | 'skip$
542 | { duplicate$ empty$
543 | { pop$ format.pages }
544 | { ",~" * pages n.dashify * }
545 | if$
546 | }
547 | if$
548 | }
549 |
550 | FUNCTION {format.chapter.pages}
551 | { chapter empty$
552 | 'format.pages
553 | { type empty$
554 | { "chapter" }
555 | { type "l" change.case$ }
556 | if$
557 | chapter tie.or.space.connect
558 | pages empty$
559 | 'skip$
560 | { ", " * format.pages * }
561 | if$
562 | }
563 | if$
564 | }
565 |
566 | FUNCTION {format.in.ed.booktitle}
567 | { booktitle empty$
568 | { "" }
569 | { editor empty$
570 | { edition empty$
571 | {"In " booktitle emphasize *}
572 | {"In " booktitle emphasize * ", " * edition * " ed." *}
573 | if$
574 | }
575 | { edition empty$
576 | {"In " booktitle emphasize * "; " * format.editors * }
577 | {"In " booktitle emphasize * ", " * edition * " ed." * "; " * format.editors * }
578 | if$
579 | }
580 | if$
581 | }
582 | if$
583 | }
584 |
585 | FUNCTION {format.in.ed.booktitle.proc}
586 | { booktitle empty$
587 | { "" }
588 | { editor empty$
589 | { edition empty$
590 | {" " booktitle *}
591 | {" " booktitle * ", " * edition * " ed." *}
592 | if$
593 | }
594 | { edition empty$
595 | {" " booktitle * "; " * format.editors * }
596 | {" " booktitle * ", " * edition * " ed." * "; " * format.editors * }
597 | if$
598 | }
599 | if$
600 | }
601 | if$
602 | }
603 |
604 | FUNCTION {format.publisher.and.address}
605 | { publisher empty$
606 | {""}
607 | { address empty$
608 | {publisher }
609 | {publisher ": " * address *}
610 | if$
611 | }
612 | if$
613 | }
614 |
615 |
616 |
617 | FUNCTION {empty.misc.check}
618 | { author empty$ title empty$ howpublished empty$
619 | month empty$ year empty$ note empty$
620 | and and and and and
621 | { "all relevant fields are empty in " cite$ * warning$ }
622 | 'skip$
623 | if$
624 | }
625 |
626 | FUNCTION {format.thesis.type}
627 | { type empty$
628 | 'skip$
629 | { pop$
630 | type "t" change.case$
631 | }
632 | if$
633 | }
634 |
635 | FUNCTION {format.tr.number}
636 | { type empty$
637 | { "Technical Report" }
638 | 'type
639 | if$
640 | number empty$
641 | { "t" change.case$ }
642 | { number tie.or.space.connect }
643 | if$
644 | }
645 |
646 | FUNCTION {format.article.crossref}
647 | { key empty$
648 | { journal empty$
649 | { "need key or journal for " cite$ * " to crossref " * crossref *
650 | warning$
651 | ""
652 | }
653 | { "In \emph{" journal * "}" * }
654 | if$
655 | }
656 | { "In " }
657 | if$
658 | " \citet{" * crossref * "}" *
659 | }
660 |
661 |
662 |
663 | FUNCTION {format.book.crossref}
664 | { volume empty$
665 | { "empty volume in " cite$ * "'s crossref of " * crossref * warning$
666 | "In "
667 | }
668 | { "Vol." volume tie.or.space.connect
669 | " of " *
670 | }
671 | if$
672 | editor empty$
673 | editor field.or.null author field.or.null =
674 | or
675 | { key empty$
676 | { series empty$
677 | { "need editor, key, or series for " cite$ * " to crossref " *
678 | crossref * warning$
679 | "" *
680 | }
681 | { "{\em " * series * "\/}" * }
682 | if$
683 | }
684 | { key * }
685 | if$
686 | }
687 | { "" * }
688 | if$
689 | " \cite{" * crossref * "}" *
690 | }
691 |
692 | FUNCTION {format.incoll.inproc.crossref}
693 | { editor empty$
694 | editor field.or.null author field.or.null =
695 | or
696 | { key empty$
697 | { booktitle empty$
698 | { "need editor, key, or booktitle for " cite$ * " to crossref " *
699 | crossref * warning$
700 | ""
701 | }
702 | { "In {\em " booktitle * "\/}" * }
703 | if$
704 | }
705 | { "In " key * }
706 | if$
707 | }
708 | { "In " * }
709 | if$
710 | " \cite{" * crossref * "}" *
711 | }
712 |
713 | FUNCTION {format.website}
714 | { url empty$
715 | { "" }
716 | { "" url website *
717 | urldate empty$
718 | {"there is url but no urldate in " cite$ * warning$}
719 | { ", accessed on " * urldate *}
720 | if$
721 | }
722 | if$
723 | }
724 |
725 |
726 | %% the following function is modified from kp.bst at http://arxiv.org/hypertex/bibstyles/
727 | FUNCTION {format.eprint}
728 | {eprint empty$
729 | { ""}
730 | {primaryClass empty$
731 | {" \href{http://xxx.lanl.gov/abs/" eprint * "}" * "{{\normalfont " * "[" * eprint * "]" * "}}" *}
732 | {archivePrefix empty$
733 | {" \href{http://xxx.lanl.gov/abs/" eprint * "}" * "{{\normalfont " * "[" * "arXiv:" * primaryClass * "/" * eprint * "]" * "}}" *}
734 | {" \href{http://xxx.lanl.gov/abs/" eprint * "}" * "{{\normalfont " * "[" * archivePrefix * ":" * primaryClass * "/" * eprint * "]" * "}}" *}
735 | if$
736 | }
737 | if$
738 | }
739 | if$
740 | }
741 |
742 |
743 | %% For printing DOI numbers (it is a hyperlink but printed in black)
744 | FUNCTION {format.doi}
745 | { doi empty$
746 | { "" }
747 | {"\href{https://doi.org/" doi * "}" * }
748 | if$
749 | }
750 |
751 | FUNCTION {formatfull.doi}
752 | { doi empty$
753 | { "" }
754 | {"doi:{\changeurlcolor{black}\href{https://doi.org/" doi * "}{\detokenize{" * doi * "}}}" * }
755 | if$
756 | }
757 |
758 |
759 |
760 | FUNCTION {article}
761 | { output.bibitem
762 | format.authors "author" output.check
763 | author format.key output
764 | new.block
765 | format.title "title" output.check
766 | new.block
767 | crossref missing$
768 | { journal emphasize "journal" output.check
769 | format.date * format.vol.num.pages "" * output
770 | }
771 | { format.article.crossref output.nonnull
772 | format.pages output
773 | }
774 | if$
775 | format.eprint output
776 | new.block
777 | note output
778 | formatfull.doi output
779 | fin.entry
780 | }
781 |
782 | FUNCTION {book}
783 | { output.bibitem
784 | author empty$
785 | { format.editors "author and editor" output.check }
786 | { format.authors output.nonnull
787 | crossref missing$
788 | { "author and editor" editor either.or.check }
789 | 'skip$
790 | if$
791 | }
792 | if$
793 | new.block
794 | format.btitle "title" output.check
795 | format.edition output
796 | after.item 'output.state :=
797 | crossref missing$
798 | { format.bvolume output
799 | format.number.series output
800 | format.publisher.and.address "publisher" output.check
801 | %%% address output
802 | }
803 | {
804 | format.book.crossref output.nonnull
805 | }
806 | if$
807 | format.bdate "year" output.check
808 | after.item 'output.state :=
809 | format.chapter.pages output
810 | format.eprint output
811 | new.block
812 | note output
813 | formatfull.doi output
814 | fin.entry
815 | }
816 |
817 | FUNCTION {booklet}
818 | { output.bibitem
819 | format.authors output
820 | new.block
821 | format.title "title" output.check
822 | howpublished address new.block.checkb
823 | howpublished output
824 | address output
825 | format.bdate output
826 | format.eprint output
827 | new.block
828 | note output
829 | formatfull.doi output
830 | fin.entry
831 | }
832 |
833 | FUNCTION {inbook}
834 | { output.bibitem
835 | author empty$
836 | { format.editors "author and editor" output.check }
837 | { format.authors output.nonnull
838 | crossref missing$
839 | { "author and editor" editor either.or.check }
840 | 'skip$
841 | if$
842 | }
843 | if$
844 | %%% new.block
845 | format.title "title" output.check
846 | new.block
847 | crossref missing$
848 | { format.in.ed.booktitle "booktitle" output.check
849 | after.item 'output.state :=
850 | format.number.series output
851 | %% new.sentence
852 | format.publisher.and.address "publisher" output.check
853 | format.bdate "year" output.check
854 | after.item 'output.state :=
855 | format.bvolume output
856 | format.chapter.pages "chapter and pages" output.check
857 |
858 | }
859 | { format.chapter.pages "chapter and pages" output.check
860 | new.block
861 | format.book.crossref output.nonnull
862 | format.bdate "year" output.check
863 | }
864 | if$
865 | format.eprint output
866 | new.block
867 | note output
868 | formatfull.doi output
869 | fin.entry
870 | }
871 |
872 | FUNCTION {incollection}
873 | { output.bibitem
874 | format.authors "author" output.check
875 | new.block
876 | format.title "title" output.check
877 | new.sentence
878 | crossref missing$
879 | { format.in.ed.booktitle "booktitle" output.check
880 | after.item 'output.state :=
881 | format.number.series output
882 | % new.sentence
883 | format.publisher.and.address "publisher" output.check
884 | format.bdate "year" output.check
885 | after.item 'output.state :=
886 | format.bvolume output
887 | format.chapter.pages output
888 | }
889 | { format.incoll.inproc.crossref output.nonnull
890 | format.chapter.pages output
891 | }
892 | if$
893 | format.eprint output
894 | new.block
895 | note output
896 | formatfull.doi output
897 | fin.entry
898 | }
899 |
900 | FUNCTION {inproceedings}
901 | { output.bibitem
902 | format.authors "author" output.check
903 | new.block
904 | format.title "title" output.check
905 | new.block
906 | crossref missing$
907 | { format.in.ed.booktitle.proc "booktitle" output.check
908 | address empty$
909 | { organization publisher new.sentence.checkb
910 | organization output
911 | publisher output
912 | format.bdate "year" output.check
913 | }
914 | { after.item 'output.state :=
915 | organization output
916 | format.publisher.and.address output.nonnull
917 | format.bdate "year" output.check
918 | after.item 'output.state :=
919 | }
920 | if$
921 | format.number.series output
922 | format.bvolume output
923 | format.pages output
924 | }
925 | { format.incoll.inproc.crossref output.nonnull
926 | format.pages output
927 | }
928 | if$
929 | format.eprint output
930 | new.block
931 | note output
932 | formatfull.doi output
933 | fin.entry
934 | }
935 |
936 | FUNCTION {conference} { inproceedings }
937 |
938 | FUNCTION {manual}
939 | { output.bibitem
940 | author empty$
941 | { organization empty$
942 | 'skip$
943 | { organization output.nonnull
944 | address output
945 | }
946 | if$
947 | }
948 | { format.authors output.nonnull }
949 | if$
950 | new.block
951 | format.btitle "title" output.check
952 | author empty$
953 | { organization empty$
954 | { address new.block.checka
955 | address output
956 | }
957 | 'skip$
958 | if$
959 | }
960 | { organization address new.block.checkb
961 | organization output
962 | address output
963 | }
964 | if$
965 | format.edition output
966 | format.bdate output
967 | format.eprint output
968 | new.block
969 | note output
970 | formatfull.doi output
971 | fin.entry
972 | }
973 |
974 | FUNCTION {mastersthesis}
975 | { output.bibitem
976 | format.authors "author" output.check
977 | new.block
978 | format.title "title" output.check
979 | new.block
980 | "Master's thesis" format.thesis.type output.nonnull
981 | school "school" output.check
982 | address output
983 | format.bdate "year" output.check
984 | format.eprint output
985 | new.block
986 | note output
987 | formatfull.doi output
988 | fin.entry
989 | }
990 |
991 | FUNCTION {misc}
992 | { output.bibitem
993 | format.authors output
994 | title howpublished new.block.checkb
995 | format.title output
996 | howpublished new.block.checka
997 | howpublished output
998 | format.bdate output
999 | format.eprint output
1000 | new.block
1001 | note output
1002 | formatfull.doi output
1003 | fin.entry
1004 | empty.misc.check
1005 | }
1006 |
1007 | FUNCTION {phdthesis}
1008 | { output.bibitem
1009 | format.authors "author" output.check
1010 | new.block
1011 | format.title "title" output.check
1012 | new.block
1013 | "PhD thesis" format.thesis.type output.nonnull
1014 | school "school" output.check
1015 | address output
1016 | format.bdate "year" output.check
1017 | format.eprint output
1018 | new.block
1019 | note output
1020 | formatfull.doi output
1021 | fin.entry
1022 | }
1023 |
1024 | FUNCTION {proceedings}
1025 | { output.bibitem
1026 | editor empty$
1027 | { organization output }
1028 | { format.editors output.nonnull }
1029 | if$
1030 | new.block
1031 | format.btitle "title" output.check
1032 | format.bvolume output
1033 | format.number.series output
1034 | address empty$
1035 | { editor empty$
1036 | { publisher new.sentence.checka }
1037 | { organization publisher new.sentence.checkb
1038 | organization output
1039 | }
1040 | if$
1041 | publisher output
1042 | format.bdate "year" output.check
1043 | }
1044 | { address output.nonnull
1045 | format.bdate "year" output.check
1046 | new.sentence
1047 | editor empty$
1048 | 'skip$
1049 | { organization output }
1050 | if$
1051 | publisher output
1052 | }
1053 | if$
1054 | format.eprint output
1055 | new.block
1056 | note output
1057 | formatfull.doi output
1058 | fin.entry
1059 | }
1060 |
1061 | FUNCTION {techreport}
1062 | { output.bibitem
1063 | format.authors "author" output.check
1064 | new.block
1065 | format.title "title" output.check
1066 | new.block
1067 | format.tr.number output.nonnull
1068 | institution "institution" output.check
1069 | address output
1070 | format.bdate "year" output.check
1071 | format.eprint output
1072 | new.block
1073 | note output
1074 | formatfull.doi output
1075 | fin.entry
1076 | }
1077 |
1078 | FUNCTION {unpublished}
1079 | { output.bibitem
1080 | format.authors "author" output.check
1081 | new.block
1082 | format.title "title" output.check
1083 | format.eprint output
1084 | new.block
1085 | note output
1086 | formatfull.doi output
1087 | fin.entry
1088 | }
1089 |
1090 | FUNCTION {www}
1091 | { output.bibitem
1092 | format.authors "author" output.checkwoa
1093 | new.block
1094 | format.title "title" output.check
1095 | new.block
1096 | format.website "url" output.check
1097 | format.eprint output
1098 | new.block
1099 | note output
1100 | formatfull.doi output
1101 | fin.entry
1102 | }
1103 |
1104 | FUNCTION {patent}
1105 | { output.bibitem
1106 | format.authors "author" output.check
1107 | new.block
1108 | format.title "title" output.check
1109 | new.block
1110 | format.number.patent "number" output.check
1111 | mid.sentence 'output.state :=
1112 | format.pdate "date" output.check
1113 | format.eprint output
1114 | new.block
1115 | note output
1116 | formatfull.doi output
1117 | fin.entry
1118 | }
1119 |
1120 | READ
1121 |
1122 | FUNCTION {sortify}
1123 | { purify$
1124 | "l" change.case$
1125 | }
1126 |
1127 |
1128 | INTEGERS { len }
1129 |
1130 | FUNCTION {chop.word}
1131 | { 's :=
1132 | 'len :=
1133 | s #1 len substring$ =
1134 | { s len #1 + global.max$ substring$ }
1135 | 's
1136 | if$
1137 | }
1138 |
1139 |
1140 | FUNCTION {format.lab.names}
1141 | { 's :=
1142 | s #1 "{vv~}{ll}" format.name$
1143 | s num.names$ duplicate$
1144 | #2 >
1145 | { pop$ " \em{et~al.}" * }
1146 | { #2 <
1147 | 'skip$
1148 | { s #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" =
1149 | { " \em{et~al.}" * }
1150 | { " and " * s #2 "{vv~}{ll}" format.name$ * }
1151 | if$
1152 | }
1153 | if$
1154 | }
1155 | if$
1156 | }
1157 |
1158 |
1159 | FUNCTION {author.key.label}
1160 | { author empty$
1161 | { key empty$
1162 | { cite$ #1 #3 substring$ }
1163 | 'key
1164 | if$
1165 | }
1166 | { author format.lab.names }
1167 | if$
1168 | }
1169 |
1170 | FUNCTION {author.editor.key.label}
1171 | { author empty$
1172 | { editor empty$
1173 | { key empty$
1174 | { cite$ #1 #3 substring$ }
1175 | 'key
1176 | if$
1177 | }
1178 | { editor format.lab.names }
1179 | if$
1180 | }
1181 | { author format.lab.names }
1182 | if$
1183 | }
1184 |
1185 | FUNCTION {author.key.organization.label}
1186 | { author empty$
1187 | { key empty$
1188 | { organization empty$
1189 | { cite$ #1 #3 substring$ }
1190 | { "The " #4 organization chop.word #3 text.prefix$ }
1191 | if$
1192 | }
1193 | 'key
1194 | if$
1195 | }
1196 | { author format.lab.names }
1197 | if$
1198 | }
1199 |
1200 | FUNCTION {editor.key.organization.label}
1201 | { editor empty$
1202 | { key empty$
1203 | { organization empty$
1204 | { cite$ #1 #3 substring$ }
1205 | { "The " #4 organization chop.word #3 text.prefix$ }
1206 | if$
1207 | }
1208 | 'key
1209 | if$
1210 | }
1211 | { editor format.lab.names }
1212 | if$
1213 | }
1214 |
1215 | FUNCTION {calc.short.authors}
1216 | { type$ "book" =
1217 | type$ "inbook" =
1218 | or
1219 | 'author.editor.key.label
1220 | { type$ "proceedings" =
1221 | 'editor.key.organization.label
1222 | { type$ "manual" =
1223 | 'author.key.organization.label
1224 | 'author.key.label
1225 | if$
1226 | }
1227 | if$
1228 | }
1229 | if$
1230 | 'short.list :=
1231 | }
1232 |
1233 | FUNCTION {calc.label}
1234 | { calc.short.authors
1235 | short.list
1236 | "("
1237 | *
1238 | year duplicate$ empty$
1239 | short.list key field.or.null = or
1240 | { pop$ "" }
1241 | 'skip$
1242 | if$
1243 | *
1244 | 'label :=
1245 | }
1246 |
1247 | INTEGERS { seq.num }
1248 |
1249 | FUNCTION {init.seq}
1250 | { #0 'seq.num :=}
1251 |
1252 | EXECUTE {init.seq}
1253 |
1254 | FUNCTION {int.to.fix}
1255 | { "000000000" swap$ int.to.str$ *
1256 | #-1 #10 substring$
1257 | }
1258 |
1259 |
1260 | FUNCTION {presort}
1261 | { calc.label
1262 | label sortify
1263 | " "
1264 | *
1265 | seq.num #1 + 'seq.num :=
1266 | seq.num int.to.fix
1267 | 'sort.label :=
1268 | sort.label *
1269 | #1 entry.max$ substring$
1270 | 'sort.key$ :=
1271 | }
1272 |
1273 | ITERATE {presort}
1274 |
1275 |
1276 | STRINGS { longest.label last.label next.extra }
1277 |
1278 | INTEGERS { longest.label.width last.extra.num number.label }
1279 |
1280 | FUNCTION {initialize.longest.label}
1281 | { "" 'longest.label :=
1282 | #0 int.to.chr$ 'last.label :=
1283 | "" 'next.extra :=
1284 | #0 'longest.label.width :=
1285 | #0 'last.extra.num :=
1286 | #0 'number.label :=
1287 | }
1288 |
1289 | FUNCTION {forward.pass}
1290 | { last.label label =
1291 | { last.extra.num #1 + 'last.extra.num :=
1292 | last.extra.num int.to.chr$ 'extra.label :=
1293 | }
1294 | { "a" chr.to.int$ 'last.extra.num :=
1295 | "" 'extra.label :=
1296 | label 'last.label :=
1297 | }
1298 | if$
1299 | number.label #1 + 'number.label :=
1300 | }
1301 |
1302 | FUNCTION {reverse.pass}
1303 | { next.extra "b" =
1304 | { "a" 'extra.label := }
1305 | 'skip$
1306 | if$
1307 | extra.label 'next.extra :=
1308 | extra.label
1309 | duplicate$ empty$
1310 | 'skip$
1311 | { "{\natexlab{" swap$ * "}}" * }
1312 | if$
1313 | 'extra.label :=
1314 | label extra.label * 'label :=
1315 | }
1316 |
1317 | EXECUTE {initialize.longest.label}
1318 |
1319 | ITERATE {forward.pass}
1320 |
1321 | REVERSE {reverse.pass}
1322 |
1323 | FUNCTION {begin.bib}
1324 | { "\begin{thebibliography}{-------}"
1325 | write$ newline$
1326 | "\providecommand{\natexlab}[1]{#1}"
1327 | write$ newline$
1328 | }
1329 |
1330 | EXECUTE {begin.bib}
1331 |
1332 | EXECUTE {init.state.consts}
1333 |
1334 | ITERATE {call.type$}
1335 |
1336 | FUNCTION {end.bib}
1337 | { newline$
1338 | "\end{thebibliography}" write$ newline$
1339 | }
1340 |
1341 | EXECUTE {end.bib}
1342 |
1343 |
1344 |
--------------------------------------------------------------------------------
/manuscript/manuscript.Rmd:
--------------------------------------------------------------------------------
1 | ---
2 | title: "DeepTriangle: A Deep Learning Approach to Loss Reserving"
3 | author:
4 | - name: Kevin Kuo
5 | affil: 1, *
6 | orcid: 0000-0001-7803-7901
7 | affiliation:
8 | - num: 1
9 | address: Kasa AI
10 | email: kevin@kasa.ai
11 | # firstnote to eighthnote
12 | correspondence: |
13 | kevin@kasa.ai
14 | journal: risks
15 | type: article
16 | status: submit
17 | bibliography: mybibfile.bib
18 | abstract: |
19 | We propose a novel approach for loss reserving based on deep neural networks. The approach allows for joint modeling of paid losses and claims outstanding, and incorporation of heterogeneous inputs. We validate the models on loss reserving data across lines of business, and show that they improve on the predictive accuracy of existing stochastic methods. The models require minimal feature engineering and expert input, and can be automated to produce forecasts more frequently than manual workflows.
20 | acknowledgement: |
21 | We thank Sigrid Keydana, Ronald Richman, the anonymous reviewers, and the volunteers on the Casualty Actuarial Society Committee on Reserves (CASCOR) who helped to improve the paper through helpful comments and discussions.
22 | conflictsofinterest: |
23 | The author declares no conflict of interest.
24 | keywords: |
25 | loss reserving; machine learning; neural networks
26 | output:
27 | bookdown::pdf_book:
28 | base_format: rticles::mdpi_article
29 | ---
30 |
31 | ```{r setup, echo = FALSE, message = FALSE, warning=FALSE}
32 | library(dplyr)
33 | library(readr)
34 | library(tidyr)
35 | library(ggplot2)
36 | library(deeptriangle)
37 | ```
38 |
39 | # Introduction
40 |
41 | In the loss reserving exercise for property and casualty insurers, actuaries are concerned with forecasting future payments due to claims. Accurately estimating these payments is important from the perspectives of various stakeholders in the insurance industry. For the management of the insurer, the estimates of unpaid claims inform decisions in underwriting, pricing, and strategy. For the investors, loss reserves, and transactions related to them, are essential components in the balance sheet and income statement of the insurer. And, for the regulators, accurate loss reserves are needed to appropriately understand the financial soundness of the insurer.
42 |
43 | There can be time lags both for reporting of claims, where the insurer is not notified of a loss until long after it has occurred, and for final development of claims, where payments continue long after the loss has been reported. Also, the amounts of claims are uncertain before they have fully developed. These factors contribute to the difficulty of the loss reserving problem, for which extensive literature exists and active research is being done. We refer the reader to @england2002stochastic for a survey of the problem and existing techniques.
44 |
45 | Deep learning has garnered increasing interest in recent years due to successful applications in many fields [@lecun2015deep] and has recently made its way into the loss reserving literature. @wuthrich2018neural augments the traditional chain ladder method with neural networks to incorporate claims features, @gabrielli2018individual utilize neural networks to syntheisze claims data, and @gabrielli2018neural and @gabrielli2019neural embed classical parametric loss reserving models into neural networks. More specifically, the development in @gabrielli2018neural and @gabrielli2019neural proposes initializing a neural network so that, before training, it corresponds exactly to a classical model, such as the over-dispersed Poisson model. The training iterations then adjust the weights of the neural network to minimize the prediction errors, which can be interpreted as a boosting procedure.
46 |
47 | In developing our framework, which we call DeepTriangle[^2], we also draw inspiration from the existing stochastic reserving literature. Works that propose utilizing data in addition to paid losses include @quarg2004munich, which uses incurred losses, and @miranda2012double, which incorporates claim count information. Moving beyond a single homogeneous portfolio, @avanzi2016stochastic considers the dependencies among lines of business within an insurer's portfolio, while @peremans2018robust proposes a robust general multivariate chain ladder approach to accommodate outliers. There is also a category of models, referred to as state space or adaptive models, that allow parameters to evolve recursively in time as more data is observed [@chukhrova2017state]. This iterative updating mechanism is similar in spirit to the continuous updating of neural network weights during model deployment.
48 |
49 | [^2]: A portmanteau of *deep learning* and *loss development triangle*.
50 |
51 | The approach that we develop differs from existing works in many ways, and has the following advantages. First, it enables joint modeling of paid losses and claims outstanding for multiple companies simultaneously in a single model. In fact, the architecture can also accommodate arbitrary additional inputs, such as claim count data and economic indicators, should they be available to the modeler. Second, it requires no manual input during model updates or forecasting, which means that predictions can be generated more frequently than traditional processes, and, in turn, allows management to react to changes in the portfolio sooner.
52 |
53 | The rest of the paper is organized as follows: Section \@ref(prelim) provides a brief overview of neural network terminology, Section \@ref(data-arch) discusses the dataset used and introduces the proposed neural network architecture, Section \@ref(exps) defines the performance metrics we use to benchmark our models and discuss the results, and Section \@ref(conclusion) concludes.
54 |
55 | # Neural Network Preliminaries {#prelim}
56 |
57 | For comprehensive treatments of neural network mechanics and implementation, we refer the reader to @goodfellow2016deep and @chollet2018deep. A more actuarially oriented discussion can be found in @wuthrich2019data. In order to establish common terminology used in this paper, we present a brief overview in this section.
58 |
59 | ```{r feedforward, out.width = "80%", fig.cap = "Feedforward neural network.", fig.align="center", echo = FALSE}
60 | knitr::include_graphics("figures/feedforward.png")
61 | ```
62 |
63 | We motivate the discussion by considering an example feedforward network with fully connected layers represented in Figure \ref{fig:feedforward}, where the goal is to predict an output $y$ from input $x$. The intermediate values, known as hidden layers and represented by $h_j^{[l]}$, try to transform the input data into representations that successively become more useful at predicting the output. The nodes in the figure are computed, for each layer $l = 1, \dots, L$, as
64 |
65 | \begin{equation}
66 | h_j^{[l]} = g^{[l]}(z_j^{[l]}),
67 | \end{equation}
68 |
69 | where
70 |
71 | \begin{equation}
72 | z_j^{[l]} = w_j^{[l]T}h^{[l-1]}+ b_j^{[l]},
73 | \end{equation}
74 |
75 | for $j = 1, \dots, n^{[l]}$. In these equations, a superscript $[l]$ denotes association with the layer $l$, a subscript $j$ denotes association with the $j$-th component of the layer, of which there are $n^{[l]}$. The $g^{[l]}$ ($l = 1, \dots, L$) are called activation functions, whose values $h^{[l]}$ are known as activations. The vectors $w_j^{[l]}$ and scalars $b_j^{[l]}$ are known as weights and biases, respectively, and together represent the parameters of the neural network, which are learned during training.
76 |
77 | For $l = 1$, we define the previous layer activations as the input, so that the calculation for the first hidden layer becomes
78 | \begin{equation}
79 | h_j^{[1]} = g^{[1]}(w_j^{[1]T}x + b_j^{[1]}).
80 | \end{equation}
81 | Also, for the output layer $l = L$, we compute the prediction
82 | \begin{equation}
83 | \hat{y} = h_j^{[L]} = g^{[L]}(w_{j}^{[L]T}h^{[L-1]} + b_j^{[L]}).
84 | \end{equation}
85 |
86 | We can then think of a neural network as a sequence of function compositions $f = f_L \circ f_{L-1} \circ \dots \circ f_1$ parameterized as $f(x; W^{[1]}, b^{[1]}, \dots, W^{[L]}, b^{[L]})$. Here, it should be mentioned that the $g^{[l]}$ ($l = 1, \dots, L$) are chosen to be nonlinear, except for possibly in the output layer. These nonlinearities are key to the success of neural networks, because otherwise we would have a trivial composition of linear models.
87 |
88 | Each neural network model is specified with a specific loss function, which is used to measure how close the model predictions are to the actual values. During model training, the parameters discussed above are iteratively updated in order to minimize the loss function. Each update of the parameters typically involves only a subset, or mini-batch, of the training data, and one complete pass through the training data, which includes many updates, is known as an epoch. Training a neural network often requires many passes through the data.
89 |
90 | # Data and Model Architecture {#data-arch}
91 |
92 | In this section, we discuss the dataset used for our experiments and the proposed model architecture.
93 |
94 | ## Data Source
95 |
96 | We use the National Association of Insurance Commissioners (NAIC) Schedule P triangles [@meyers2011loss]. The dataset corresponds to claims from accident years 1988-1997, with development experience of 10 years for each accident year. In Schedule P data, the data is aggregated into accident year-development year records. The procedure for constructing the dataset is detailed in @meyers2015stochastic.
97 |
98 | Following @meyers2015stochastic, we restrict ourselves to a subset of the data which covers four lines of business (commercial auto, private personal auto, workers' compensation, and other liability) and 50 companies in each line of business. This is done to facilitate comparison to existing results.
99 |
100 | We use the following variables from the dataset in our study: line of business, company code, accident year, development lag, incurred loss, cumulative paid loss, and net earned premium. Claims outstanding, for the purpose of this study, is derived as incurred loss less cumulative paid loss. The company code is a categorical variable that denotes which insurer the records are associated with.
101 |
102 | ## Training/Testing Setup
103 |
104 | Let indices $1 \leq i \leq I$ denote accident years and $1 \leq j \leq J$ denote development years under consideration. Also, let $\{P_{i,j}\}$ and $\{OS_{i,j}\}$ denote the *incremental* paid losses and the *total* claims outstanding, or case reserves, respectively.
105 |
106 | Then, at the end of calendar year $I$, we have access to the observed data
107 |
108 | \begin{equation}
109 | \{P_{i,j}: i = 1, \dots, I; j = 1, \dots, I - i + 1\}
110 | \end{equation}
111 |
112 | and
113 |
114 | \begin{equation}
115 | \{OS_{i,j}: i = 1, \dots, I; j = 1, \dots, I - i + 1\}.
116 | \end{equation}
117 |
118 | Assume that we are interested in development through the $I$th development year; in other words, we only forecast through the eldest maturity in the available data. The goal then is to obtain predictions for future values $\{\widehat{P}_{i,j}: i = 2, \dots, I; j = i+1, \dots, I\}$ and $\{\widehat{OS}_{i,j}: i = 2, \dots, I; j = i+1, \dots, I\}$. We can then determine ultimate losses (UL) for each accident year $i = 1, \dots, I$ by calculating
119 |
120 | \begin{equation}
121 | \widehat{UL}_i = \left(\sum_{j = 1}^{I - i + 1} P_{i,j}\right) + \left(\sum_{j = I - i + 2}^I \widehat{P}_{i,j}\right).
122 | \end{equation}
123 |
124 | In our case, data as of year end 1997 is used for training. We then evaluate predictive performance on the development year 10 cumulative paid losses.
125 |
126 | ## Response and Predictor Variables
127 |
128 | In DeepTriangle, each training sample is associated with an accident year-development year pair, which we refer to thereinafter as a *cell*. The response for the sample associated with accident year $i$ and development year $j$ is the sequence
129 |
130 | \begin{equation}
131 | (Y_{i,j},Y_{i,j+1},\dots,Y_{i,I - i + 1}),
132 | \end{equation}
133 | where each $Y_{i,j} = (P_{i,j} / NPE_{i}, OS_{i,j} / NPE_{i})$, and $NPE_{i}$ denotes the net earned premium for accident year $i$. Working with loss ratios makes training more tractable by normalizing values into a similar scale.
134 |
135 | The predictor for the sample contains two components. The first component is the observed history as of the end of the calendar year associated with the cell:
136 |
137 | \begin{equation}
138 | (Y_{i,1}, Y_{i,2}, \dots, Y_{i,j-1}).
139 | \end{equation}
140 | In other words, for each accident year and at each evaluation date for which we have data, we attempt to predict future development of the accident year's paid losses and claims outstanding based on the observed history as of that date. While we are ultimately interested in $P_{i,j}$, the paid losses, we include claims outstanding as an auxiliary output of the model. We elaborate on the reasoning behind this approach in the next section.
141 |
142 | The second component of the predictor is the company identifier associated with the experience. Because we include experience from multiple companies in each training iteration, we need a way to differentiate the data from different companies. We discuss handling of the company identifier in more detail in the next section.
143 |
144 | ## Model Architecture
145 |
146 | As shown in Figure \ref{fig:dt}, DeepTriangle is a multi-task network [@caruana1997multitask] utilizing a sequence-to-sequence architecture [@sutskever2014sequence; @DBLP:journals/corr/SrivastavaMS15] with two prediction goals: paid loss and claims outstanding. We construct one model for each line of business and each model is trained on data from multiple companies.
147 |
148 | ### Multi-Task Learning
149 |
150 | Since the two target quantities, paid loss and claims outstanding, are related, we expect to obtain better performance by jointly training than predicting each quantity independently. While @caruana1997multitask contains detailed discourse on the specific mechanisms of multi-task learning, we provide some heuristics on why it may improve predictions: by utilizing the reponse data for claims outstanding, we are effectively increasing the training data size since we are providing more signals to the learning algorithm; there may be hidden features, useful for predicting paid losses, that are more easily learned by trying to predict claims outstanding; also, by trying to predict claims outstanding during training, we are imposing a bias towards neural network weight configurations which perform that task well, which lessens the likelihood of arriving at a model that overfits to random noise.
151 |
152 | ### Sequential Input Processing
153 |
154 | ```{r dt, out.width = "100%", fig.cap = "DeepTriangle architecture. \\textit{Embed} denotes embedding layer, \\textit{GRU} denotes gated recurrent unit, \\textit{FC} denotes fully connected layer.", fig.align="center", echo = FALSE}
155 | knitr::include_graphics("figures/nn3.png")
156 | ```
157 |
158 | For handling the time series of paid losses and claims outstanding, we utilize gated recurrent units (GRU) [@chung2014empirical], which is a type of recurrent neural network (RNN) building block that is appropriate for sequential data. A graphical representation of a GRU is shown in Figure \ref{fig:gru}, and the associated equations are as follows[^3]:
159 |
160 | \begin{equation}
161 | \tilde{h}^{} = \tanh(W_h[\Gamma_r h^{}, x^{}] + b_h)
162 | \end{equation}
163 | \begin{equation}
164 | \Gamma_r^{} = \sigma(W_r[h^{}, x^{}] + b_r)
165 | \end{equation}
166 | \begin{equation}
167 | \Gamma_u^{} = \sigma(W_u[h^{},x^{}] + b_u)
168 | \end{equation}
169 | \begin{equation}
170 | h^{} = \Gamma_u^{} \tilde{h}^{} + (1 - \Gamma_u^{})h^{}.
171 | \end{equation}
172 |
173 | [^3]: Note the use of angle brackets to index position in a sequence rather than layers in a feedforward neural network as in Section \@ref(prelim).
174 |
175 | Here, $h^{}$ and $x^{}$ represent the activation and input values, respectively, at time $t$, and $\sigma$ denotes the logistic sigmoid function defined as
176 |
177 | \begin{equation}
178 | \sigma(x) = \frac{1}{1 + \exp(-x)}\label{eq:eq1}.
179 | \end{equation}
180 | $W_h$, $W_r$, $W_u$, $b_h$, $b_r$, and $b_u$ are the appropriately sized weight matrices and biases to be learned. Intuitively, the activations $h^{}$ provide a way for the network to maintain state and "remember" values from early values of the input sequence. The values $\tilde{h}^{}$ can be thought of as candidates to replace the current state, and $\Gamma_u^{}$ determines the weighting between the previous state and the candidate state. We remark that although the GRU (and RNN in general) may seem opaque at first, they contain sequential instructions for updating weights just like vanilla feedforward neural networks (and can in fact be interpreted as such [@goodfellow2016deep]).
181 |
182 | We first encode the sequential predictor with a GRU to obtain a summary encoding of the historical values. We then repeat the output $I-1$ times before passing them to a decoder GRU that outputs its hidden state for each time step. The factor $I-1$ is chosen here because for the $I$th accident year, we need to forecast $I-1$ timesteps into the future. For both the encoder and decoder GRU modules, we use 128 hidden units and a dropout rate of 0.2. Here, dropout refers to the regime where, during training, at each iteration, we randomly set the output of the hidden units to zero with a specified probability, in order to reduce overfitting [@srivastava2014dropout]. Intuitively, dropout accomplishes this by approximating an ensemble of sub-networks that can be constructed by removing some hidden units.
183 |
184 | ### Company Code Embeddings
185 |
186 | The company code input is first passed to an embedding layer. In this process, each company is mapped to a fixed length vector in $\mathbb{R}^k$, where $k$ is a hyperparameter. In our case, we choose $k = \text{number of levels} - 1 = 49$, as recommended in @DBLP:journals/corr/GuoB16. In other words, each company is represented by a vector in $\mathbb{R}^{49}$. This mapping mechanism is part of the neural network and hence is learned during the training of the network, instead of in a separate data preprocessing step, so the learned numerical representations are optimized for predicted the future paid losses. Companies that are similar in the context of our claims forecasting problem are mapped to vectors that are close to each other in terms of Euclidean distance. Intuitively, one can think of this representation as a proxy for characteristics of the companies, such as size of book and case reserving philosophy. Categorical embedding is a common technique in deep learning that has been successfully applied to recommendation systems [@Cheng_2016] and retail sales prediction [@DBLP:journals/corr/GuoB16]. In the actuarial science literature, @richman2018neural utilize embedding layers to capture characteristics of regions in mortality forecasting, while @gabrielli2018neural apply them to lines of business factors in loss reserving.
187 |
188 | ### Fully Connected Layers and Outputs
189 |
190 | Each timestep of the decoded sequence from the GRU decoder is then concatenated with the company embedding output. The concatenated values are then passed to two subnetworks of fully connected layers, each of which shares weights across the timesteps. The two subnetworks correspond to the paid loss and case outstanding predictions, respectively, and each consists of a hidden layer of 64 units with a dropout rate of 0.2, followed by an output layer of 1 unit to represent the paid loss or claims outstanding at a time step.
191 |
192 | ```{r gru, out.width = "70%", fig.cap = "Gated recurrent unit.", fig.align="center", echo = FALSE, fig.pos="h"}
193 | knitr::include_graphics("figures/gru.png")
194 | ```
195 |
196 | Rectified linear unit (ReLU) [@nair2010rectified], defined as
197 |
198 | \begin{equation}
199 | x \mapsto \max(0, x),
200 | \end{equation}
201 |
202 | is used as the activation function (which we denote by $g$ in Section \@ref(prelim)) for all fully connected layers, including both of the output layers. We remark that this choice of output activation implies we only predict nonnegative cash flows, i.e. no recoveries. This assumption is reasonable for the dataset we use in our experiments, but may be modified to accommodate other use cases.
203 |
204 | ## Deployment Considerations
205 |
206 | While one may not have access to the latest experience data of competitors, the company code predictor can be utilized to incorporate data from companies within a group insurer. During training, the relationships among the companies are inferred based on historical development behavior. This approach provides an automated and objective alternative to manually aggregating, or clustering, the data based on knowledge of the degree of homogeneity among the companies.
207 |
208 | If new companies join the portfolio, or if the companies and associated claims are reorganized, one would modify the embedding input size to accommodate the new codes, leaving the rest of the architecture unchanged, then refit the model. The network would then assign embedding vectors to the new companies.
209 |
210 | Since the model outputs predictions for each triangle cell, one can calculate the traditional age-to-age, or loss development, factors (LDF) using the model forecasts. Having a familiar output may enable easier integration of DeepTriangle into existing actuarial workflows.
211 |
212 | Insurers often have access to richer information than is available in regulatory filings, which underlies the experiments in this paper. For example, in addition to paid and incurred losses, one may include claim count triangles so that the model can also learn from, and predict, frequency information.
213 |
214 | # Experiments {#exps}
215 |
216 | We now describe the performance metrics for benchmarking the models and training details, then discuss the results.
217 |
218 | ## Evaluation Metrics
219 |
220 | We aim to produce scalar metrics to evaluate the performance of the model on each line of business. To this end, for each company and each line of business, we calculate the actual and predicted ultimate losses as of development year 10, for all accident years combined, then compute the root mean squared percentage error (RMSPE) and mean absolute percentage error (MAPE) over companies in each line of business. Percentage errors are used in order to have unit-free measures for comparing across companies with vastly different sizes of portfolios. Formally, if $\mathcal{C}_l$ is the set of companies in line of business $l$,
221 |
222 | \begin{equation}
223 | MAPE_l = \frac{1}{|\mathcal{C}_l|}\sum_{C\in\mathcal{C}_l}\left|\frac{\widehat{UL}_C - UL_C}{UL_C}\right|,
224 | \end{equation}
225 |
226 | and
227 |
228 | \begin{equation}
229 | RMSPE_l = \sqrt{\frac{1}{|\mathcal{C}_l|}\sum_{C\in\mathcal{C}_l}\left(\frac{\widehat{UL}_C - UL_C)}{UL_C}\right)^2}
230 | \end{equation}
231 |
232 | where $\widehat{UL}_C$ and $UL_C$ are the predicted and actual cumulative ultimate losses, respectively, for company $C$.
233 |
234 | An alternative approach for evaluation could involve weighting the company results by the associated earned premium or using dollar amounts. However, due to the distribution of company sizes in the dataset, the weights would concentrate on a handful of companies. Hence, to obtain a more balanced evaluation, we choose to report the unweighted percentage-based measures outlined above. We note that the evaluation of reserving models is an ongoing area of research; and refer the reader to @martinek2019analysis for a recent analysis.
235 |
236 | ## Implementation and Training
237 |
238 | The loss function is computed as the average over the forecasted time steps of the mean squared error of the predictions. The losses for the outputs are then averaged to obtain the network loss. Formally, for the sample associated with cell $(i, j)$, we can write the per-sample loss as
239 |
240 | \begin{equation}
241 | \frac{1}{I-i+1-(j-1)}\sum_{k = j}^{I-i+1}\frac{(\widehat{P_{i,k}} - P_{i,k})^2 + (\widehat{OS_{i,k}} - OS_{i,k})^2}{2}.
242 | \end{equation}
243 |
244 | For optimization, we use the AMSGrad [@j.2018on] variant of adam with a learning rate of 0.0005. We train each neural network for a maximum of 1000 epochs with the following early stopping scheme: if the loss on the validation set does not improve over a 200-epoch window, we terminate training and revert back to the weights on the epoch with the lowest validation loss. The validation set used in the early stopping criterion is defined to be the subset of the training data that becomes available after calendar year 1995. For each line of business, we create an ensemble of 100 models, each trained with the same architecture but different random weight initialization. This is done to reduce the variance inherent in the randomness associated with neural networks.
245 |
246 | We implement DeepTriangle using the keras R package [@chollet2017kerasR] and TensorFlow [@tensorflow2015-whitepaper], which are open source software for developing neural network models. Code for producing the experiment results is available online.[^1]
247 |
248 | [^1]: https://github.com/kasaai/deeptriangle.
249 |
250 | ## Results and Discussion
251 |
252 | ```{r, echo = FALSE, results="asis", message = FALSE}
253 | data_dir <- file.path(here::here(), "datasets")
254 | predictions <- feather::read_feather(file.path(data_dir, "predictions.feather"))
255 | automl_results <- read_csv(file.path(data_dir, "automl_results.csv"))
256 | model_results <- dt_compute_metrics(predictions) %>%
257 | bind_rows(stochastic_model_results) %>%
258 | bind_rows(automl_results) %>%
259 | gather(metric, value, mape, rmspe)
260 | mape_table <- dt_tabulate_metrics(model_results, "mape")
261 | rmspe_table <- dt_tabulate_metrics(model_results, "rmspe")
262 | bind_rows(mape_table, rmspe_table) %>%
263 | mutate(lob = case_when(
264 | lob == "commercial_auto" ~ "Commercial Auto",
265 | lob == "other_liability" ~ "Other Liability",
266 | lob == "private_passenger_auto" ~ "Private Passenger Auto",
267 | lob == "workers_compensation" ~ "Workers' Compensation"
268 | )) %>%
269 | rename(DT = DeepTriangle,
270 | ML = AutoML,
271 | `Line of Business` = lob) %>%
272 | knitr::kable(
273 | format = "latex", booktabs = "T",
274 | digits = 3,
275 | caption = "\\label{tab:table1}Performance comparison of various models. DeepTriangle and AutoML are abbreviated do DT and ML, respectively."
276 | ) %>%
277 | kableExtra::column_spec(7, bold = TRUE) %>%
278 | kableExtra::group_rows("MAPE", 1, 4) %>%
279 | kableExtra::group_rows("RMSPE", 5, 8)
280 | ```
281 |
282 | In Table \ref{tab:table1} we tabulate the out-of-time performance of DeepTriangle against other models: the Mack chain-ladder model [@mack1993distribution], the bootstrap ODP model [@england2002stochastic], an AutoML model, and a selection of Bayesian Markov chain Monte Carlo (MCMC) models from @meyers2015stochastic including the correlated incremental trend (CIT) and leveled incremental trend (LIT) models. For the stochastic models, we use the means of the predictive distributions as the point estimates to which we compare the actual outcomes. For DeepTriangle, we report the averaged predictions from the ensembles.
283 |
284 | The AutoML model is developed by automatically searching over a set of common machine learning techniques. In the implementation we use, it trains and cross-validates a random forest, an extremely-randomized forest, a random grid of gradient boosting machines, a random grid of deep feedforward neural networks, and stacked ensembles thereof [@h2o_R_package]. Details of these algorithms can be found in @friedman2001elements. Because the machine learning techniques produce scalar outputs, we use an iterative forecasting scheme where the prediction for a timestep is used in the predictor for the next timestep.
285 |
286 | We see that DeepTriangle improves on the performance of the popular chain ladder and ODP models, common machine learning models, and Bayesian stochastic models.
287 |
288 | In addition to aggregated results for all companies, we also investigate qualitatively the ability of DeepTriangle to learn development patterns of individual companies. Figures \ref{fig:fig3} and \ref{fig:fig4} show the paid loss development and claims outstanding development for the commercial auto line of Company 1767 and the workers' compensation line of Company 337, respectively. We see that the model captures the development patterns for Company 1767 reasonably well. However, it is unsuccessful in forecasting the deteriorating loss ratios for Company 337's workers' compensation book.
289 |
290 | We do not study uncertainty estimates in this paper nor interpret the forecasts as posterior predictive distributions; rather, they are included to reflect the stochastic nature of optimizing neural networks. We note that others have exploited randomness in weight initialization in producing predictive distributions [@NIPS2017_7219], and further research could study the applicability of these techniques to reserve variability.
291 |
292 | ```{r fig3, echo = FALSE, message = FALSE, warning = FALSE, fig.cap = "Development by accident year for Company 1767, commercial auto.", fig.align = "center", out.width = "100%", cache = FALSE}
293 | library(cowplot)
294 | p1 <- dt_plot_predictions(predictions, "1767", "commercial_auto", "paid_loss") + xlab("")
295 | p2 <- dt_plot_predictions(predictions, "1767", "commercial_auto", "claims_outstanding")
296 | p12 <- plot_grid(
297 | p1 + theme(legend.position = "none"),
298 | p2 + theme(legend.position = "none"),
299 | align = "v",
300 | # vjust = -6,
301 | ncol = 1
302 | )
303 | legend <- get_legend(p1)
304 | plot_grid(p12, legend, rel_widths = c(1, 0.2), nrow = 1)
305 | ```
306 |
307 | ```{r fig4, echo = FALSE, message = FALSE, fig.cap = "Development by accident year for Company 337, workers' compensation.", fig.align = "center", out.width = "100%", cache = FALSE}
308 | p1 <- dt_plot_predictions(predictions, "337", "workers_compensation", "paid_loss") + xlab("")
309 | p2 <- dt_plot_predictions(predictions, "337", "workers_compensation", "claims_outstanding")
310 | p12 <- plot_grid(
311 | p1 + theme(legend.position = "none"),
312 | p2 + theme(legend.position = "none"),
313 | align = "v",
314 | # vjust = -6,
315 | ncol = 1
316 | )
317 | legend <- get_legend(p1)
318 | plot_grid(p12, legend, rel_widths = c(1, 0.2), nrow = 1)
319 | ```
320 |
321 | # Conclusion {#conclusion}
322 |
323 | We introduce DeepTriangle, a deep learning framework for forecasting paid losses. Our models are able to attain performance comparable, by our metrics, to modern stochastic reserving techniques, without expert input. This means that one can automate model updating and report production at the desired frequency (although we note that, as with any automated machine learning system, a process involving expert review should be implemented). By utilizing neural networks, we can incorporate multiple heterogeneous inputs and train on multiple objectives simultaneously, and also allow customization of models based on available data. To summarize, this framework maintains accuracy while providing automatability and extensibility.
324 |
325 | We analyze an aggregated dataset with limited features in this paper because it is publicly available and well studied, but one can extend DeepTriangle to incorporate additional data, such as claim counts.
326 |
327 | Deep neural networks can be designed to extend recent efforts, such as @wuthrich2018machine, on applying machine learning to claims level reserving. They can also be designed to incorporate additional features that are not handled well by traditional machine learning algorithms, such as claims adjusters' notes from free text fields and images.
328 |
329 | While this study focuses on prediction of point estimates, future extensions may include outputting distributions in order to address reserve variability.
330 |
--------------------------------------------------------------------------------
/manuscript/journalnames.tex:
--------------------------------------------------------------------------------
1 | \DeclareOption{acoustics}{ \gdef\@journal{acoustics} \gdef\@journalshort{Acoustics} \gdef\@journalfull{Acoustics} \gdef\@doiabbr{acoustics} \gdef\@ISSN{2624-599X} }
2 | \DeclareOption{actuators}{ \gdef\@journal{actuators} \gdef\@journalshort{Actuators} \gdef\@journalfull{Actuators} \gdef\@doiabbr{act} \gdef\@ISSN{2076-0825} }
3 | \DeclareOption{addictions}{ \gdef\@journal{addictions} \gdef\@journalshort{Addictions} \gdef\@journalfull{Addictions} \gdef\@doiabbr{} \gdef\@ISSN{0006-0006} }
4 | \DeclareOption{admsci}{ \gdef\@journal{admsci} \gdef\@journalshort{Adm. Sci.} \gdef\@journalfull{Administrative Sciences} \gdef\@doiabbr{admsci} \gdef\@ISSN{2076-3387} }
5 | \DeclareOption{aerospace}{ \gdef\@journal{aerospace} \gdef\@journalshort{Aerospace} \gdef\@journalfull{Aerospace} \gdef\@doiabbr{aerospace} \gdef\@ISSN{2226-4310} }
6 | \DeclareOption{agriculture}{ \gdef\@journal{agriculture} \gdef\@journalshort{Agriculture} \gdef\@journalfull{Agriculture} \gdef\@doiabbr{agriculture} \gdef\@ISSN{2077-0472} }
7 | \DeclareOption{agriengineering}{ \gdef\@journal{agriengineering} \gdef\@journalshort{AgriEngineering} \gdef\@journalfull{AgriEngineering} \gdef\@doiabbr{agriengineering} \gdef\@ISSN{2624-7402} }
8 | \DeclareOption{agronomy}{ \gdef\@journal{agronomy} \gdef\@journalshort{Agronomy} \gdef\@journalfull{Agronomy} \gdef\@doiabbr{agronomy} \gdef\@ISSN{2073-4395} }
9 | \DeclareOption{algorithms}{ \gdef\@journal{algorithms} \gdef\@journalshort{Algorithms} \gdef\@journalfull{Algorithms} \gdef\@doiabbr{a} \gdef\@ISSN{1999-4893} }
10 | \DeclareOption{animals}{ \gdef\@journal{animals} \gdef\@journalshort{Animals} \gdef\@journalfull{Animals} \gdef\@doiabbr{ani} \gdef\@ISSN{2076-2615} }
11 | \DeclareOption{antibiotics}{ \gdef\@journal{antibiotics} \gdef\@journalshort{Antibiotics} \gdef\@journalfull{Antibiotics} \gdef\@doiabbr{antibiotics} \gdef\@ISSN{2079-6382} }
12 | \DeclareOption{antibodies}{ \gdef\@journal{antibodies} \gdef\@journalshort{Antibodies} \gdef\@journalfull{Antibodies} \gdef\@doiabbr{antib} \gdef\@ISSN{2073-4468} }
13 | \DeclareOption{antioxidants}{ \gdef\@journal{antioxidants} \gdef\@journalshort{Antioxidants} \gdef\@journalfull{Antioxidants} \gdef\@doiabbr{antiox} \gdef\@ISSN{2076-3921} }
14 | \DeclareOption{applsci}{ \gdef\@journal{applsci} \gdef\@journalshort{Appl. Sci.} \gdef\@journalfull{Applied Sciences} \gdef\@doiabbr{app} \gdef\@ISSN{2076-3417} }
15 | \DeclareOption{arts}{ \gdef\@journal{arts} \gdef\@journalshort{Arts} \gdef\@journalfull{Arts} \gdef\@doiabbr{arts} \gdef\@ISSN{2076-0752} }
16 | \DeclareOption{asc}{ \gdef\@journal{asc} \gdef\@journalshort{Autom. Syst. Control} \gdef\@journalfull{Automatic Systems and Control} \gdef\@doiabbr{} \gdef\@ISSN{} }
17 | \DeclareOption{asi}{ \gdef\@journal{asi} \gdef\@journalshort{Appl. Syst. Innov.} \gdef\@journalfull{Applied System Innovation} \gdef\@doiabbr{asi} \gdef\@ISSN{2571-5577} }
18 | \DeclareOption{atmosphere}{ \gdef\@journal{atmosphere} \gdef\@journalshort{Atmosphere} \gdef\@journalfull{Atmosphere} \gdef\@doiabbr{atmos} \gdef\@ISSN{2073-4433} }
19 | \DeclareOption{atoms}{ \gdef\@journal{atoms} \gdef\@journalshort{Atoms} \gdef\@journalfull{Atoms} \gdef\@doiabbr{atoms} \gdef\@ISSN{2218-2004} }
20 | \DeclareOption{axioms}{ \gdef\@journal{axioms} \gdef\@journalshort{Axioms} \gdef\@journalfull{Axioms} \gdef\@doiabbr{axioms} \gdef\@ISSN{2075-1680} }
21 | \DeclareOption{batteries}{ \gdef\@journal{batteries} \gdef\@journalshort{Batteries} \gdef\@journalfull{Batteries} \gdef\@doiabbr{batteries} \gdef\@ISSN{2313-0105} }
22 | \DeclareOption{bdcc}{ \gdef\@journal{bdcc} \gdef\@journalshort{Big Data Cogn. Comput.} \gdef\@journalfull{Big Data and Cognitive Computing} \gdef\@doiabbr{bdcc} \gdef\@ISSN{2504-2289} }
23 | \DeclareOption{behavsci}{ \gdef\@journal{behavsci} \gdef\@journalshort{Behav. Sci.} \gdef\@journalfull{Behavioral Sciences} \gdef\@doiabbr{bs} \gdef\@ISSN{2076-328X} }
24 | \DeclareOption{beverages}{ \gdef\@journal{beverages} \gdef\@journalshort{Beverages} \gdef\@journalfull{Beverages} \gdef\@doiabbr{beverages} \gdef\@ISSN{2306-5710} }
25 | \DeclareOption{bioengineering}{ \gdef\@journal{bioengineering} \gdef\@journalshort{Bioengineering} \gdef\@journalfull{Bioengineering} \gdef\@doiabbr{bioengineering} \gdef\@ISSN{2306-5354} }
26 | \DeclareOption{biology}{ \gdef\@journal{biology} \gdef\@journalshort{Biology} \gdef\@journalfull{Biology} \gdef\@doiabbr{biology} \gdef\@ISSN{2079-7737} }
27 | \DeclareOption{biomedicines}{ \gdef\@journal{biomedicines} \gdef\@journalshort{Biomedicines} \gdef\@journalfull{Biomedicines} \gdef\@doiabbr{biomedicines} \gdef\@ISSN{2227-9059} }
28 | \DeclareOption{biomimetics}{ \gdef\@journal{biomimetics} \gdef\@journalshort{Biomimetics} \gdef\@journalfull{Biomimetics} \gdef\@doiabbr{biomimetics} \gdef\@ISSN{2313-7673} }
29 | \DeclareOption{biomolecules}{ \gdef\@journal{biomolecules} \gdef\@journalshort{Biomolecules} \gdef\@journalfull{Biomolecules} \gdef\@doiabbr{biom} \gdef\@ISSN{2218-273X} }
30 | \DeclareOption{biosensors}{ \gdef\@journal{biosensors} \gdef\@journalshort{Biosensors} \gdef\@journalfull{Biosensors} \gdef\@doiabbr{bios} \gdef\@ISSN{2079-6374} }
31 | \DeclareOption{brainsci}{ \gdef\@journal{brainsci} \gdef\@journalshort{Brain Sci.} \gdef\@journalfull{Brain Sciences} \gdef\@doiabbr{brainsci} \gdef\@ISSN{2076-3425} }
32 | \DeclareOption{buildings}{ \gdef\@journal{buildings} \gdef\@journalshort{Buildings} \gdef\@journalfull{Buildings} \gdef\@doiabbr{buildings} \gdef\@ISSN{2075-5309} }
33 | \DeclareOption{cancers}{ \gdef\@journal{cancers} \gdef\@journalshort{Cancers} \gdef\@journalfull{Cancers} \gdef\@doiabbr{cancers} \gdef\@ISSN{2072-6694} }
34 | \DeclareOption{carbon}{ \gdef\@journal{carbon} \gdef\@journalshort{C} \gdef\@journalfull{C} \gdef\@doiabbr{c} \gdef\@ISSN{2311-5629} }
35 | \DeclareOption{catalysts}{ \gdef\@journal{catalysts} \gdef\@journalshort{Catalysts} \gdef\@journalfull{Catalysts} \gdef\@doiabbr{catal} \gdef\@ISSN{2073-4344} }
36 | \DeclareOption{cells}{ \gdef\@journal{cells} \gdef\@journalshort{Cells} \gdef\@journalfull{Cells} \gdef\@doiabbr{cells} \gdef\@ISSN{2073-4409} }
37 | \DeclareOption{ceramics}{ \gdef\@journal{ceramics} \gdef\@journalshort{Ceramics} \gdef\@journalfull{Ceramics} \gdef\@doiabbr{ceramics} \gdef\@ISSN{2571-6131} }
38 | \DeclareOption{challenges}{ \gdef\@journal{challenges} \gdef\@journalshort{Challenges} \gdef\@journalfull{Challenges} \gdef\@doiabbr{challe} \gdef\@ISSN{2078-1547} }
39 | \DeclareOption{chemengineering}{ \gdef\@journal{chemengineering} \gdef\@journalshort{ChemEngineering} \gdef\@journalfull{ChemEngineering} \gdef\@doiabbr{chemengineering} \gdef\@ISSN{2305-7084} }
40 | \DeclareOption{chemistry}{ \gdef\@journal{chemistry} \gdef\@journalshort{Chemistry} \gdef\@journalfull{Chemistry} \gdef\@doiabbr{chemistry} \gdef\@ISSN{2624-8549} }
41 | \DeclareOption{chemosensors}{ \gdef\@journal{chemosensors} \gdef\@journalshort{Chemosensors} \gdef\@journalfull{Chemosensors} \gdef\@doiabbr{chemosensors} \gdef\@ISSN{2227-9040} }
42 | \DeclareOption{children}{ \gdef\@journal{children} \gdef\@journalshort{Children} \gdef\@journalfull{Children} \gdef\@doiabbr{children} \gdef\@ISSN{2227-9067} }
43 | \DeclareOption{cleantechnol}{ \gdef\@journal{cleantechnol} \gdef\@journalshort{Clean Technol.} \gdef\@journalfull{Clean Technologies} \gdef\@doiabbr{cleantechnol} \gdef\@ISSN{2571-8797} }
44 | \DeclareOption{climate}{ \gdef\@journal{climate} \gdef\@journalshort{Climate} \gdef\@journalfull{Climate} \gdef\@doiabbr{cli} \gdef\@ISSN{2225-1154} }
45 | \DeclareOption{clockssleep}{ \gdef\@journal{clockssleep} \gdef\@journalshort{Clocks\&Sleep} \gdef\@journalfull{Clocks \& Sleep} \gdef\@doiabbr{clockssleep} \gdef\@ISSN{2624-5175} }
46 | \DeclareOption{cmd}{ \gdef\@journal{cmd} \gdef\@journalshort{Corros. Mater. Degrad.} \gdef\@journalfull{Corrosion and Materials Degradation} \gdef\@doiabbr{cmd} \gdef\@ISSN{2624-5558} }
47 | \DeclareOption{coatings}{ \gdef\@journal{coatings} \gdef\@journalshort{Coatings} \gdef\@journalfull{Coatings} \gdef\@doiabbr{coatings} \gdef\@ISSN{2079-6412} }
48 | \DeclareOption{colloids}{ \gdef\@journal{colloids} \gdef\@journalshort{Colloids Interfaces} \gdef\@journalfull{Colloids Interfaces} \gdef\@doiabbr{colloids} \gdef\@ISSN{2504-5377} }
49 | \DeclareOption{computation}{ \gdef\@journal{computation} \gdef\@journalshort{Computation} \gdef\@journalfull{Computation} \gdef\@doiabbr{computation} \gdef\@ISSN{2079-3197} }
50 | \DeclareOption{computers}{ \gdef\@journal{computers} \gdef\@journalshort{Computers} \gdef\@journalfull{Computers} \gdef\@doiabbr{computers} \gdef\@ISSN{2073-431X} }
51 | \DeclareOption{condensedmatter}{ \gdef\@journal{condensedmatter} \gdef\@journalshort{Condens. Matter} \gdef\@journalfull{Condensed Matter} \gdef\@doiabbr{condmat} \gdef\@ISSN{2410-3896} }
52 | \DeclareOption{cosmetics}{ \gdef\@journal{cosmetics} \gdef\@journalshort{Cosmetics} \gdef\@journalfull{Cosmetics} \gdef\@doiabbr{cosmetics} \gdef\@ISSN{2079-9284} }
53 | \DeclareOption{cryptography}{ \gdef\@journal{cryptography} \gdef\@journalshort{Cryptography} \gdef\@journalfull{Cryptography} \gdef\@doiabbr{cryptography} \gdef\@ISSN{2410-387X} }
54 | \DeclareOption{crystals}{ \gdef\@journal{crystals} \gdef\@journalshort{Crystals} \gdef\@journalfull{Crystals} \gdef\@doiabbr{cryst} \gdef\@ISSN{2073-4352} }
55 | \DeclareOption{dairy}{ \gdef\@journal{dairy} \gdef\@journalshort{Dairy} \gdef\@journalfull{Dairy} \gdef\@doiabbr{dairy} \gdef\@ISSN{2624-862X} }
56 | \DeclareOption{data}{ \gdef\@journal{data} \gdef\@journalshort{Data} \gdef\@journalfull{Data} \gdef\@doiabbr{data} \gdef\@ISSN{2306-5729} }
57 | \DeclareOption{dentistry}{ \gdef\@journal{dentistry} \gdef\@journalshort{Dent. J.} \gdef\@journalfull{Dentistry Journal} \gdef\@doiabbr{dj} \gdef\@ISSN{2304-6767} }
58 | \DeclareOption{designs}{ \gdef\@journal{designs} \gdef\@journalshort{Designs} \gdef\@journalfull{Designs} \gdef\@doiabbr{designs} \gdef\@ISSN{2411-9660} }
59 | \DeclareOption{diagnostics}{ \gdef\@journal{diagnostics} \gdef\@journalshort{Diagnostics} \gdef\@journalfull{Diagnostics} \gdef\@doiabbr{diagnostics} \gdef\@ISSN{2075-4418} }
60 | \DeclareOption{diseases}{ \gdef\@journal{diseases} \gdef\@journalshort{Diseases} \gdef\@journalfull{Diseases} \gdef\@doiabbr{diseases} \gdef\@ISSN{2079-9721} }
61 | \DeclareOption{diversity}{ \gdef\@journal{diversity} \gdef\@journalshort{Diversity} \gdef\@journalfull{Diversity} \gdef\@doiabbr{d} \gdef\@ISSN{1424-2818} }
62 | \DeclareOption{drones}{ \gdef\@journal{drones} \gdef\@journalshort{Drones} \gdef\@journalfull{Drones} \gdef\@doiabbr{drones} \gdef\@ISSN{2504-446X} }
63 | \DeclareOption{econometrics}{ \gdef\@journal{econometrics} \gdef\@journalshort{Econometrics} \gdef\@journalfull{Econometrics} \gdef\@doiabbr{econometrics} \gdef\@ISSN{2225-1146} }
64 | \DeclareOption{economies}{ \gdef\@journal{economies} \gdef\@journalshort{Economies} \gdef\@journalfull{Economies} \gdef\@doiabbr{economies} \gdef\@ISSN{2227-7099} }
65 | \DeclareOption{education}{ \gdef\@journal{education} \gdef\@journalshort{Educ. Sci.} \gdef\@journalfull{Education Sciences} \gdef\@doiabbr{educsci} \gdef\@ISSN{2227-7102} }
66 | \DeclareOption{electrochem}{ \gdef\@journal{electrochem} \gdef\@journalshort{Electrochem} \gdef\@journalfull{Electrochem} \gdef\@doiabbr{electrochem} \gdef\@ISSN{} }
67 | \DeclareOption{electronics}{ \gdef\@journal{electronics} \gdef\@journalshort{Electronics} \gdef\@journalfull{Electronics} \gdef\@doiabbr{electronics} \gdef\@ISSN{2079-9292} }
68 | \DeclareOption{energies}{ \gdef\@journal{energies} \gdef\@journalshort{Energies} \gdef\@journalfull{Energies} \gdef\@doiabbr{en} \gdef\@ISSN{1996-1073} }
69 | \DeclareOption{entropy}{ \gdef\@journal{entropy} \gdef\@journalshort{Entropy} \gdef\@journalfull{Entropy} \gdef\@doiabbr{e} \gdef\@ISSN{1099-4300} }
70 | \DeclareOption{environments}{ \gdef\@journal{environments} \gdef\@journalshort{Environments} \gdef\@journalfull{Environments} \gdef\@doiabbr{environments} \gdef\@ISSN{2076-3298} }
71 | \DeclareOption{epigenomes}{ \gdef\@journal{epigenomes} \gdef\@journalshort{Epigenomes} \gdef\@journalfull{Epigenomes} \gdef\@doiabbr{epigenomes} \gdef\@ISSN{2075-4655} }
72 | \DeclareOption{est}{ \gdef\@journal{est} \gdef\@journalshort{Electrochem. Sci. Technol.} \gdef\@journalfull{Electrochemical Science and Technology} \gdef\@doiabbr{} \gdef\@ISSN{} }
73 | \DeclareOption{fermentation}{ \gdef\@journal{fermentation} \gdef\@journalshort{Fermentation} \gdef\@journalfull{Fermentation} \gdef\@doiabbr{fermentation} \gdef\@ISSN{2311-5637} }
74 | \DeclareOption{fibers}{ \gdef\@journal{fibers} \gdef\@journalshort{Fibers} \gdef\@journalfull{Fibers} \gdef\@doiabbr{fib} \gdef\@ISSN{2079-6439} }
75 | \DeclareOption{fire}{ \gdef\@journal{fire} \gdef\@journalshort{Fire} \gdef\@journalfull{Fire} \gdef\@doiabbr{fire} \gdef\@ISSN{2571-6255} }
76 | \DeclareOption{fishes}{ \gdef\@journal{fishes} \gdef\@journalshort{Fishes} \gdef\@journalfull{Fishes} \gdef\@doiabbr{fishes} \gdef\@ISSN{2410-3888} }
77 | \DeclareOption{fluids}{ \gdef\@journal{fluids} \gdef\@journalshort{Fluids} \gdef\@journalfull{Fluids} \gdef\@doiabbr{fluids} \gdef\@ISSN{2311-5521} }
78 | \DeclareOption{foods}{ \gdef\@journal{foods} \gdef\@journalshort{Foods} \gdef\@journalfull{Foods} \gdef\@doiabbr{foods} \gdef\@ISSN{2304-8158} }
79 | \DeclareOption{forecasting}{ \gdef\@journal{forecasting} \gdef\@journalshort{Forecasting} \gdef\@journalfull{Forecasting} \gdef\@doiabbr{forecast} \gdef\@ISSN{2571-9394} }
80 | \DeclareOption{forests}{ \gdef\@journal{forests} \gdef\@journalshort{Forests} \gdef\@journalfull{Forests} \gdef\@doiabbr{f} \gdef\@ISSN{1999-4907} }
81 | \DeclareOption{fractalfract}{ \gdef\@journal{fractalfract} \gdef\@journalshort{Fractal Fract.} \gdef\@journalfull{Fractal and Fractional} \gdef\@doiabbr{fractalfract} \gdef\@ISSN{2504-3110} }
82 | \DeclareOption{futureinternet}{ \gdef\@journal{futureinternet} \gdef\@journalshort{Future Internet} \gdef\@journalfull{Future Internet} \gdef\@doiabbr{fi} \gdef\@ISSN{1999-5903} }
83 | \DeclareOption{futurephys}{ \gdef\@journal{futurephys} \gdef\@journalshort{Future Phys.} \gdef\@journalfull{Future Physics} \gdef\@doiabbr{futurephys} \gdef\@ISSN{2624-6503} }
84 | \DeclareOption{galaxies}{ \gdef\@journal{galaxies} \gdef\@journalshort{Galaxies} \gdef\@journalfull{Galaxies} \gdef\@doiabbr{galaxies} \gdef\@ISSN{2075-4434} }
85 | \DeclareOption{games}{ \gdef\@journal{games} \gdef\@journalshort{Games} \gdef\@journalfull{Games} \gdef\@doiabbr{g} \gdef\@ISSN{2073-4336} }
86 | \DeclareOption{gastrointestdisord}{ \gdef\@journal{gastrointestdisord} \gdef\@journalshort{Gastrointest. Disord.} \gdef\@journalfull{Gastrointestinal Disorders} \gdef\@doiabbr{gidisord} \gdef\@ISSN{2624-5647} }
87 | \DeclareOption{gels}{ \gdef\@journal{gels} \gdef\@journalshort{Gels} \gdef\@journalfull{Gels} \gdef\@doiabbr{gels} \gdef\@ISSN{2310-2861} }
88 | \DeclareOption{genealogy}{ \gdef\@journal{genealogy} \gdef\@journalshort{Genealogy} \gdef\@journalfull{Genealogy} \gdef\@doiabbr{genealogy} \gdef\@ISSN{2313-5778} }
89 | \DeclareOption{genes}{ \gdef\@journal{genes} \gdef\@journalshort{Genes} \gdef\@journalfull{Genes} \gdef\@doiabbr{genes} \gdef\@ISSN{2073-4425} }
90 | \DeclareOption{geohazards}{ \gdef\@journal{geohazards} \gdef\@journalshort{GeoHazards} \gdef\@journalfull{GeoHazards} \gdef\@doiabbr{geohazards} \gdef\@ISSN{2624-795X} }
91 | \DeclareOption{geosciences}{ \gdef\@journal{geosciences} \gdef\@journalshort{Geosciences} \gdef\@journalfull{Geosciences} \gdef\@doiabbr{geosciences} \gdef\@ISSN{2076-3263} }
92 | \DeclareOption{geriatrics}{ \gdef\@journal{geriatrics} \gdef\@journalshort{Geriatrics} \gdef\@journalfull{Geriatrics} \gdef\@doiabbr{geriatrics} \gdef\@ISSN{2308-3417} }
93 | \DeclareOption{hazardousmatters}{ \gdef\@journal{hazardousmatters} \gdef\@journalshort{Hazard. Matters} \gdef\@journalfull{Hazardous Matters} \gdef\@doiabbr{} \gdef\@ISSN{0014-0014} }
94 | \DeclareOption{healthcare}{ \gdef\@journal{healthcare} \gdef\@journalshort{Healthcare} \gdef\@journalfull{Healthcare} \gdef\@doiabbr{healthcare} \gdef\@ISSN{2227-9032} }
95 | \DeclareOption{heritage}{ \gdef\@journal{heritage} \gdef\@journalshort{Heritage} \gdef\@journalfull{Heritage} \gdef\@doiabbr{heritage} \gdef\@ISSN{2571-9408} }
96 | \DeclareOption{highthroughput}{ \gdef\@journal{highthroughput} \gdef\@journalshort{High-Throughput} \gdef\@journalfull{High-Throughput} \gdef\@doiabbr{ht} \gdef\@ISSN{2571-5135} }
97 | \DeclareOption{horticulturae}{ \gdef\@journal{horticulturae} \gdef\@journalshort{Horticulturae} \gdef\@journalfull{Horticulturae} \gdef\@doiabbr{horticulturae} \gdef\@ISSN{2311-7524} }
98 | \DeclareOption{humanities}{ \gdef\@journal{humanities} \gdef\@journalshort{Humanities} \gdef\@journalfull{Humanities} \gdef\@doiabbr{h} \gdef\@ISSN{2076-0787} }
99 | \DeclareOption{hydrology}{ \gdef\@journal{hydrology} \gdef\@journalshort{Hydrology} \gdef\@journalfull{Hydrology} \gdef\@doiabbr{hydrology} \gdef\@ISSN{2306-5338} }
100 | \DeclareOption{ijerph}{ \gdef\@journal{ijerph} \gdef\@journalshort{Int. J. Environ. Res. Public Health} \gdef\@journalfull{International Journal of Environmental Research and Public Health} \gdef\@doiabbr{ijerph} \gdef\@ISSN{1660-4601} }
101 | \DeclareOption{ijfs}{ \gdef\@journal{ijfs} \gdef\@journalshort{Int. J. Financial Stud.} \gdef\@journalfull{International Journal of Financial Studies} \gdef\@doiabbr{ijfs} \gdef\@ISSN{2227-7072} }
102 | \DeclareOption{ijgi}{ \gdef\@journal{ijgi} \gdef\@journalshort{ISPRS Int. J. Geo-Inf.} \gdef\@journalfull{ISPRS International Journal of Geo-Information} \gdef\@doiabbr{ijgi} \gdef\@ISSN{2220-9964} }
103 | \DeclareOption{ijms}{ \gdef\@journal{ijms} \gdef\@journalshort{Int. J. Mol. Sci.} \gdef\@journalfull{International Journal of Molecular Sciences} \gdef\@doiabbr{ijms} \gdef\@ISSN{1422-0067} }
104 | \DeclareOption{ijtpp}{ \gdef\@journal{ijtpp} \gdef\@journalshort{Int. J. Turbomach. Propuls. Power} \gdef\@journalfull{International Journal of Turbomachinery, Propulsion and Power} \gdef\@doiabbr{ijtpp} \gdef\@ISSN{2504-186X} }
105 | \DeclareOption{informatics}{ \gdef\@journal{informatics} \gdef\@journalshort{Informatics} \gdef\@journalfull{Informatics} \gdef\@doiabbr{informatics} \gdef\@ISSN{2227-9709} }
106 | \DeclareOption{information}{ \gdef\@journal{information} \gdef\@journalshort{Information} \gdef\@journalfull{Information} \gdef\@doiabbr{info} \gdef\@ISSN{2078-2489} }
107 | \DeclareOption{infrastructures}{ \gdef\@journal{infrastructures} \gdef\@journalshort{Infrastructures} \gdef\@journalfull{Infrastructures} \gdef\@doiabbr{infrastructures} \gdef\@ISSN{2412-3811} }
108 | \DeclareOption{inorganics}{ \gdef\@journal{inorganics} \gdef\@journalshort{Inorganics} \gdef\@journalfull{Inorganics} \gdef\@doiabbr{inorganics} \gdef\@ISSN{2304-6740} }
109 | \DeclareOption{insects}{ \gdef\@journal{insects} \gdef\@journalshort{Insects} \gdef\@journalfull{Insects} \gdef\@doiabbr{insects} \gdef\@ISSN{2075-4450} }
110 | \DeclareOption{instruments}{ \gdef\@journal{instruments} \gdef\@journalshort{Instruments} \gdef\@journalfull{Instruments} \gdef\@doiabbr{instruments} \gdef\@ISSN{2410-390X} }
111 | \DeclareOption{inventions}{ \gdef\@journal{inventions} \gdef\@journalshort{Inventions} \gdef\@journalfull{Inventions} \gdef\@doiabbr{inventions} \gdef\@ISSN{2411-5134} }
112 | \DeclareOption{iot}{ \gdef\@journal{iot} \gdef\@journalshort{IoT} \gdef\@journalfull{IoT} \gdef\@doiabbr{iot} \gdef\@ISSN{2624-831X} }
113 | \DeclareOption{j}{ \gdef\@journal{j} \gdef\@journalshort{J} \gdef\@journalfull{J} \gdef\@doiabbr{j} \gdef\@ISSN{2571-8800} }
114 | \DeclareOption{jcdd}{ \gdef\@journal{jcdd} \gdef\@journalshort{J. Cardiovasc. Dev. Dis.} \gdef\@journalfull{Journal of Cardiovascular Development and Disease} \gdef\@doiabbr{jcdd} \gdef\@ISSN{2308-3425} }
115 | \DeclareOption{jcm}{ \gdef\@journal{jcm} \gdef\@journalshort{J. Clin. Med.} \gdef\@journalfull{Journal of Clinical Medicine} \gdef\@doiabbr{jcm} \gdef\@ISSN{2077-0383} }
116 | \DeclareOption{jcp}{ \gdef\@journal{jcp} \gdef\@journalshort{J. Cybersecur. Priv.} \gdef\@journalfull{Journal of Cybersecurity and Privacy} \gdef\@doiabbr{jcp} \gdef\@ISSN{2624-800X} }
117 | \DeclareOption{jcs}{ \gdef\@journal{jcs} \gdef\@journalshort{J. Compos. Sci.} \gdef\@journalfull{Journal of Composites Science} \gdef\@doiabbr{jcs} \gdef\@ISSN{2504-477X} }
118 | \DeclareOption{jdb}{ \gdef\@journal{jdb} \gdef\@journalshort{J. Dev. Biol.} \gdef\@journalfull{Journal of Developmental Biology} \gdef\@doiabbr{jdb} \gdef\@ISSN{2221-3759} }
119 | \DeclareOption{jfb}{ \gdef\@journal{jfb} \gdef\@journalshort{J. Funct. Biomater.} \gdef\@journalfull{Journal of Functional Biomaterials} \gdef\@doiabbr{jfb} \gdef\@ISSN{2079-4983} }
120 | \DeclareOption{jfmk}{ \gdef\@journal{jfmk} \gdef\@journalshort{J. Funct. Morphol. Kinesiol.} \gdef\@journalfull{Journal of Functional Morphology and Kinesiology} \gdef\@doiabbr{jfmk} \gdef\@ISSN{2411-5142} }
121 | \DeclareOption{jimaging}{ \gdef\@journal{jimaging} \gdef\@journalshort{J. Imaging} \gdef\@journalfull{Journal of Imaging} \gdef\@doiabbr{jimaging} \gdef\@ISSN{2313-433X} }
122 | \DeclareOption{jintelligence}{ \gdef\@journal{jintelligence} \gdef\@journalshort{J. Intell.} \gdef\@journalfull{Journal of Intelligence} \gdef\@doiabbr{jintelligence} \gdef\@ISSN{2079-3200} }
123 | \DeclareOption{jlpea}{ \gdef\@journal{jlpea} \gdef\@journalshort{J. Low Power Electron. Appl.} \gdef\@journalfull{Journal of Low Power Electronics and Applications} \gdef\@doiabbr{jlpea} \gdef\@ISSN{2079-9268} }
124 | \DeclareOption{jmmp}{ \gdef\@journal{jmmp} \gdef\@journalshort{J. Manuf. Mater. Process.} \gdef\@journalfull{Journal of Manufacturing and Materials Processing} \gdef\@doiabbr{jmmp} \gdef\@ISSN{2504-4494} }
125 | \DeclareOption{jmse}{ \gdef\@journal{jmse} \gdef\@journalshort{J. Mar. Sci. Eng.} \gdef\@journalfull{Journal of Marine Science and Engineering} \gdef\@doiabbr{jmse} \gdef\@ISSN{2077-1312} }
126 | \DeclareOption{jnt}{ \gdef\@journal{jnt} \gdef\@journalshort{J. Nanotheranostics} \gdef\@journalfull{Journal of Nanotheranostics} \gdef\@doiabbr{jnt} \gdef\@ISSN{2624-845X} }
127 | \DeclareOption{jof}{ \gdef\@journal{jof} \gdef\@journalshort{J. Fungi} \gdef\@journalfull{Journal of Fungi} \gdef\@doiabbr{jof} \gdef\@ISSN{2309-608X} }
128 | \DeclareOption{joitmc}{ \gdef\@journal{joitmc} \gdef\@journalshort{J. Open Innov. Technol. Mark. Complex.} \gdef\@journalfull{Journal of Open Innovation: Technology, Market, and Complexity} \gdef\@doiabbr{joitmc} \gdef\@ISSN{2199-8531} }
129 | \DeclareOption{jpm}{ \gdef\@journal{jpm} \gdef\@journalshort{J. Pers. Med.} \gdef\@journalfull{Journal of Personalized Medicine} \gdef\@doiabbr{jpm} \gdef\@ISSN{2075-4426} }
130 | \DeclareOption{jrfm}{ \gdef\@journal{jrfm} \gdef\@journalshort{J. Risk Financial Manag.} \gdef\@journalfull{Journal of Risk and Financial Management} \gdef\@doiabbr{jrfm} \gdef\@ISSN{1911-8074} }
131 | \DeclareOption{jsan}{ \gdef\@journal{jsan} \gdef\@journalshort{J. Sens. Actuator Netw.} \gdef\@journalfull{Journal of Sensor and Actuator Networks} \gdef\@doiabbr{jsan} \gdef\@ISSN{2224-2708} }
132 | \DeclareOption{land}{ \gdef\@journal{land} \gdef\@journalshort{Land} \gdef\@journalfull{Land} \gdef\@doiabbr{land} \gdef\@ISSN{2073-445X} }
133 | \DeclareOption{languages}{ \gdef\@journal{languages} \gdef\@journalshort{Languages} \gdef\@journalfull{Languages} \gdef\@doiabbr{languages} \gdef\@ISSN{2226-471X} }
134 | \DeclareOption{laws}{ \gdef\@journal{laws} \gdef\@journalshort{Laws} \gdef\@journalfull{Laws} \gdef\@doiabbr{laws} \gdef\@ISSN{2075-471X} }
135 | \DeclareOption{life}{ \gdef\@journal{life} \gdef\@journalshort{Life} \gdef\@journalfull{Life} \gdef\@doiabbr{life} \gdef\@ISSN{2075-1729} }
136 | \DeclareOption{literature}{ \gdef\@journal{literature} \gdef\@journalshort{Literature} \gdef\@journalfull{Literature} \gdef\@doiabbr{} \gdef\@ISSN{2410-9789} }
137 | \DeclareOption{logistics}{ \gdef\@journal{logistics} \gdef\@journalshort{Logistics} \gdef\@journalfull{Logistics} \gdef\@doiabbr{logistics} \gdef\@ISSN{2305-6290} }
138 | \DeclareOption{lubricants}{ \gdef\@journal{lubricants} \gdef\@journalshort{Lubricants} \gdef\@journalfull{Lubricants} \gdef\@doiabbr{lubricants} \gdef\@ISSN{2075-4442} }
139 | \DeclareOption{machines}{ \gdef\@journal{machines} \gdef\@journalshort{Machines} \gdef\@journalfull{Machines} \gdef\@doiabbr{machines} \gdef\@ISSN{2075-1702} }
140 | \DeclareOption{magnetochemistry}{ \gdef\@journal{magnetochemistry} \gdef\@journalshort{Magnetochemistry} \gdef\@journalfull{Magnetochemistry} \gdef\@doiabbr{magnetochemistry} \gdef\@ISSN{2312-7481} }
141 | \DeclareOption{make}{ \gdef\@journal{make} \gdef\@journalshort{Mach. Learn. Knowl. Extr.} \gdef\@journalfull{Machine Learning and Knowledge Extraction} \gdef\@doiabbr{make} \gdef\@ISSN{2504-4990} }
142 | \DeclareOption{marinedrugs}{ \gdef\@journal{marinedrugs} \gdef\@journalshort{Mar. Drugs} \gdef\@journalfull{Marine Drugs} \gdef\@doiabbr{md} \gdef\@ISSN{1660-3397} }
143 | \DeclareOption{materials}{ \gdef\@journal{materials} \gdef\@journalshort{Materials} \gdef\@journalfull{Materials} \gdef\@doiabbr{ma} \gdef\@ISSN{1996-1944} }
144 | \DeclareOption{mathematics}{ \gdef\@journal{mathematics} \gdef\@journalshort{Mathematics} \gdef\@journalfull{Mathematics} \gdef\@doiabbr{math} \gdef\@ISSN{2227-7390} }
145 | \DeclareOption{mca}{ \gdef\@journal{mca} \gdef\@journalshort{Math. Comput. Appl.} \gdef\@journalfull{Mathematical and Computational Applications} \gdef\@doiabbr{mca} \gdef\@ISSN{2297-8747} }
146 | \DeclareOption{medicina}{ \gdef\@journal{medicina} \gdef\@journalshort{Medicina} \gdef\@journalfull{Medicina} \gdef\@doiabbr{medicina} \gdef\@ISSN{1010-660X} }
147 | \DeclareOption{medicines}{ \gdef\@journal{medicines} \gdef\@journalshort{Medicines} \gdef\@journalfull{Medicines} \gdef\@doiabbr{medicines} \gdef\@ISSN{2305-6320} }
148 | \DeclareOption{medsci}{ \gdef\@journal{medsci} \gdef\@journalshort{Med. Sci.} \gdef\@journalfull{Medical Sciences} \gdef\@doiabbr{medsci} \gdef\@ISSN{2076-3271} }
149 | \DeclareOption{membranes}{ \gdef\@journal{membranes} \gdef\@journalshort{Membranes} \gdef\@journalfull{Membranes} \gdef\@doiabbr{membranes} \gdef\@ISSN{2077-0375} }
150 | \DeclareOption{metabolites}{ \gdef\@journal{metabolites} \gdef\@journalshort{Metabolites} \gdef\@journalfull{Metabolites} \gdef\@doiabbr{metabo} \gdef\@ISSN{2218-1989} }
151 | \DeclareOption{metals}{ \gdef\@journal{metals} \gdef\@journalshort{Metals} \gdef\@journalfull{Metals} \gdef\@doiabbr{met} \gdef\@ISSN{2075-4701} }
152 | \DeclareOption{microarrays}{ \gdef\@journal{microarrays} \gdef\@journalshort{Microarrays} \gdef\@journalfull{Microarrays} \gdef\@doiabbr{} \gdef\@ISSN{2076-3905} }
153 | \DeclareOption{micromachines}{ \gdef\@journal{micromachines} \gdef\@journalshort{Micromachines} \gdef\@journalfull{Micromachines} \gdef\@doiabbr{mi} \gdef\@ISSN{2072-666X} }
154 | \DeclareOption{microorganisms}{ \gdef\@journal{microorganisms} \gdef\@journalshort{Microorganisms} \gdef\@journalfull{Microorganisms} \gdef\@doiabbr{microorganisms} \gdef\@ISSN{2076-2607} }
155 | \DeclareOption{minerals}{ \gdef\@journal{minerals} \gdef\@journalshort{Minerals} \gdef\@journalfull{Minerals} \gdef\@doiabbr{min} \gdef\@ISSN{2075-163X} }
156 | \DeclareOption{modelling}{ \gdef\@journal{modelling} \gdef\@journalshort{Modelling} \gdef\@journalfull{Modelling} \gdef\@doiabbr{} \gdef\@ISSN{0012-0012} }
157 | \DeclareOption{molbank}{ \gdef\@journal{molbank} \gdef\@journalshort{Molbank} \gdef\@journalfull{Molbank} \gdef\@doiabbr{M} \gdef\@ISSN{1422-8599} }
158 | \DeclareOption{molecules}{ \gdef\@journal{molecules} \gdef\@journalshort{Molecules} \gdef\@journalfull{Molecules} \gdef\@doiabbr{molecules} \gdef\@ISSN{1420-3049} }
159 | \DeclareOption{mps}{ \gdef\@journal{mps} \gdef\@journalshort{Methods Protoc.} \gdef\@journalfull{Methods and Protocols} \gdef\@doiabbr{mps} \gdef\@ISSN{2409-9279} }
160 | \DeclareOption{mti}{ \gdef\@journal{mti} \gdef\@journalshort{Multimodal Technol. Interact.} \gdef\@journalfull{Multimodal Technologies and Interaction} \gdef\@doiabbr{mti} \gdef\@ISSN{2414-4088} }
161 | \DeclareOption{nanomaterials}{ \gdef\@journal{nanomaterials} \gdef\@journalshort{Nanomaterials} \gdef\@journalfull{Nanomaterials} \gdef\@doiabbr{nano} \gdef\@ISSN{2079-4991} }
162 | \DeclareOption{ncrna}{ \gdef\@journal{ncrna} \gdef\@journalshort{Non-coding RNA} \gdef\@journalfull{Non-coding RNA} \gdef\@doiabbr{ncrna} \gdef\@ISSN{2311-553X} }
163 | \DeclareOption{ijns}{ \gdef\@journal{ijns} \gdef\@journalshort{Int. J. Neonatal Screen.} \gdef\@journalfull{International Journal of Neonatal Screening} \gdef\@doiabbr{ijns} \gdef\@ISSN{2409-515X} }
164 | \DeclareOption{neuroglia}{ \gdef\@journal{neuroglia} \gdef\@journalshort{Neuroglia} \gdef\@journalfull{Neuroglia} \gdef\@doiabbr{neuroglia} \gdef\@ISSN{2571-6980} }
165 | \DeclareOption{nitrogen}{ \gdef\@journal{nitrogen} \gdef\@journalshort{Nitrogen} \gdef\@journalfull{Nitrogen} \gdef\@doiabbr{nitrogen} \gdef\@ISSN{2504-3129} }
166 | \DeclareOption{notspecified}{ \gdef\@journal{notspecified} \gdef\@journalshort{Journal Not Specified} \gdef\@journalfull{Journal Not Specified} \gdef\@doiabbr{} \gdef\@ISSN{} }
167 | \DeclareOption{nutrients}{ \gdef\@journal{nutrients} \gdef\@journalshort{Nutrients} \gdef\@journalfull{Nutrients} \gdef\@doiabbr{nu} \gdef\@ISSN{2072-6643} }
168 | \DeclareOption{ohbm}{ \gdef\@journal{ohbm} \gdef\@journalshort{J. Otorhinolaryngol. Hear. Balance Med.} \gdef\@journalfull{Journal of Otorhinolaryngology, Hearing and Balance Medicine} \gdef\@doiabbr{ohbm} \gdef\@ISSN{2504-463X} }
169 | \DeclareOption{particles}{ \gdef\@journal{particles} \gdef\@journalshort{Particles} \gdef\@journalfull{Particles} \gdef\@doiabbr{particles} \gdef\@ISSN{2571-712X} }
170 | \DeclareOption{pathogens}{ \gdef\@journal{pathogens} \gdef\@journalshort{Pathogens} \gdef\@journalfull{Pathogens} \gdef\@doiabbr{pathogens} \gdef\@ISSN{2076-0817} }
171 | \DeclareOption{pharmaceuticals}{ \gdef\@journal{pharmaceuticals} \gdef\@journalshort{Pharmaceuticals} \gdef\@journalfull{Pharmaceuticals} \gdef\@doiabbr{ph} \gdef\@ISSN{1424-8247} }
172 | \DeclareOption{pharmaceutics}{ \gdef\@journal{pharmaceutics} \gdef\@journalshort{Pharmaceutics} \gdef\@journalfull{Pharmaceutics} \gdef\@doiabbr{pharmaceutics} \gdef\@ISSN{1999-4923} }
173 | \DeclareOption{pharmacy}{ \gdef\@journal{pharmacy} \gdef\@journalshort{Pharmacy} \gdef\@journalfull{Pharmacy} \gdef\@doiabbr{pharmacy} \gdef\@ISSN{2226-4787} }
174 | \DeclareOption{philosophies}{ \gdef\@journal{philosophies} \gdef\@journalshort{Philosophies} \gdef\@journalfull{Philosophies} \gdef\@doiabbr{philosophies} \gdef\@ISSN{2409-9287} }
175 | \DeclareOption{photonics}{ \gdef\@journal{photonics} \gdef\@journalshort{Photonics} \gdef\@journalfull{Photonics} \gdef\@doiabbr{photonics} \gdef\@ISSN{2304-6732} }
176 | \DeclareOption{physics}{ \gdef\@journal{physics} \gdef\@journalshort{Physics} \gdef\@journalfull{Physics} \gdef\@doiabbr{physics} \gdef\@ISSN{2624-8174} }
177 | \DeclareOption{plants}{ \gdef\@journal{plants} \gdef\@journalshort{Plants} \gdef\@journalfull{Plants} \gdef\@doiabbr{plants} \gdef\@ISSN{2223-7747} }
178 | \DeclareOption{plasma}{ \gdef\@journal{plasma} \gdef\@journalshort{Plasma} \gdef\@journalfull{Plasma} \gdef\@doiabbr{plasma} \gdef\@ISSN{2571-6182} }
179 | \DeclareOption{polymers}{ \gdef\@journal{polymers} \gdef\@journalshort{Polymers} \gdef\@journalfull{Polymers} \gdef\@doiabbr{polym} \gdef\@ISSN{2073-4360} }
180 | \DeclareOption{polysaccharides}{ \gdef\@journal{polysaccharides} \gdef\@journalshort{Polysaccharides} \gdef\@journalfull{Polysaccharides} \gdef\@doiabbr{} \gdef\@ISSN{} }
181 | \DeclareOption{preprints}{ \gdef\@journal{preprints} \gdef\@journalshort{Preprints} \gdef\@journalfull{Preprints} \gdef\@doiabbr{} \gdef\@ISSN{} }
182 | \DeclareOption{proceedings}{ \gdef\@journal{proceedings} \gdef\@journalshort{Proceedings} \gdef\@journalfull{Proceedings} \gdef\@doiabbr{proceedings} \gdef\@ISSN{2504-3900} }
183 | \DeclareOption{processes}{ \gdef\@journal{processes} \gdef\@journalshort{Processes} \gdef\@journalfull{Processes} \gdef\@doiabbr{pr} \gdef\@ISSN{2227-9717} }
184 | \DeclareOption{proteomes}{ \gdef\@journal{proteomes} \gdef\@journalshort{Proteomes} \gdef\@journalfull{Proteomes} \gdef\@doiabbr{proteomes} \gdef\@ISSN{2227-7382} }
185 | \DeclareOption{psych}{ \gdef\@journal{psych} \gdef\@journalshort{Psych} \gdef\@journalfull{Psych} \gdef\@doiabbr{psych} \gdef\@ISSN{2624-8611} }
186 | \DeclareOption{publications}{ \gdef\@journal{publications} \gdef\@journalshort{Publications} \gdef\@journalfull{Publications} \gdef\@doiabbr{publications} \gdef\@ISSN{2304-6775} }
187 | \DeclareOption{quantumrep}{ \gdef\@journal{quantumrep} \gdef\@journalshort{Quantum Rep.} \gdef\@journalfull{Quantum Reports} \gdef\@doiabbr{quantum} \gdef\@ISSN{2624-960X} }
188 | \DeclareOption{quaternary}{ \gdef\@journal{quaternary} \gdef\@journalshort{Quaternary} \gdef\@journalfull{Quaternary} \gdef\@doiabbr{quat} \gdef\@ISSN{2571-550X} }
189 | \DeclareOption{qubs}{ \gdef\@journal{qubs} \gdef\@journalshort{Quantum Beam Sci.} \gdef\@journalfull{Quantum Beam Science} \gdef\@doiabbr{qubs} \gdef\@ISSN{2412-382X} }
190 | \DeclareOption{reactions}{ \gdef\@journal{reactions} \gdef\@journalshort{Reactions} \gdef\@journalfull{Reactions} \gdef\@doiabbr{reactions} \gdef\@ISSN{2624-781X} }
191 | \DeclareOption{recycling}{ \gdef\@journal{recycling} \gdef\@journalshort{Recycling} \gdef\@journalfull{Recycling} \gdef\@doiabbr{recycling} \gdef\@ISSN{2313-4321} }
192 | \DeclareOption{religions}{ \gdef\@journal{religions} \gdef\@journalshort{Religions} \gdef\@journalfull{Religions} \gdef\@doiabbr{rel} \gdef\@ISSN{2077-1444} }
193 | \DeclareOption{remotesensing}{ \gdef\@journal{remotesensing} \gdef\@journalshort{Remote Sens.} \gdef\@journalfull{Remote Sensing} \gdef\@doiabbr{rs} \gdef\@ISSN{2072-4292} }
194 | \DeclareOption{reports}{ \gdef\@journal{reports} \gdef\@journalshort{Reports} \gdef\@journalfull{Reports} \gdef\@doiabbr{reports} \gdef\@ISSN{2571-841X} }
195 | \DeclareOption{resources}{ \gdef\@journal{resources} \gdef\@journalshort{Resources} \gdef\@journalfull{Resources} \gdef\@doiabbr{resources} \gdef\@ISSN{2079-9276} }
196 | \DeclareOption{risks}{ \gdef\@journal{risks} \gdef\@journalshort{Risks} \gdef\@journalfull{Risks} \gdef\@doiabbr{risks} \gdef\@ISSN{2227-9091} }
197 | \DeclareOption{robotics}{ \gdef\@journal{robotics} \gdef\@journalshort{Robotics} \gdef\@journalfull{Robotics} \gdef\@doiabbr{robotics} \gdef\@ISSN{2218-6581} }
198 | \DeclareOption{safety}{ \gdef\@journal{safety} \gdef\@journalshort{Safety} \gdef\@journalfull{Safety} \gdef\@doiabbr{safety} \gdef\@ISSN{2313-576X} }
199 | \DeclareOption{sci}{ \gdef\@journal{sci} \gdef\@journalshort{Sci} \gdef\@journalfull{Sci} \gdef\@doiabbr{sci} \gdef\@ISSN{2413-4155} }
200 | \DeclareOption{scipharm}{ \gdef\@journal{scipharm} \gdef\@journalshort{Sci. Pharm.} \gdef\@journalfull{Scientia Pharmaceutica} \gdef\@doiabbr{scipharm} \gdef\@ISSN{2218-0532} }
201 | \DeclareOption{sensors}{ \gdef\@journal{sensors} \gdef\@journalshort{Sensors} \gdef\@journalfull{Sensors} \gdef\@doiabbr{s} \gdef\@ISSN{1424-8220} }
202 | \DeclareOption{separations}{ \gdef\@journal{separations} \gdef\@journalshort{Separations} \gdef\@journalfull{Separations} \gdef\@doiabbr{separations} \gdef\@ISSN{2297-8739} }
203 | \DeclareOption{sexes}{ \gdef\@journal{sexes} \gdef\@journalshort{Sexes} \gdef\@journalfull{Sexes} \gdef\@doiabbr{} \gdef\@ISSN{2411-5118} }
204 | \DeclareOption{signals}{ \gdef\@journal{signals} \gdef\@journalshort{Signals} \gdef\@journalfull{Signals} \gdef\@doiabbr{signals} \gdef\@ISSN{2624-6120} }
205 | \DeclareOption{sinusitis}{ \gdef\@journal{sinusitis} \gdef\@journalshort{Sinusitis} \gdef\@journalfull{Sinusitis} \gdef\@doiabbr{sinusitis} \gdef\@ISSN{2309-107X} }
206 | \DeclareOption{smartcities}{ \gdef\@journal{smartcities} \gdef\@journalshort{Smart Cities} \gdef\@journalfull{Smart Cities} \gdef\@doiabbr{smartcities} \gdef\@ISSN{2624-6511} }
207 | \DeclareOption{sna}{ \gdef\@journal{sna} \gdef\@journalshort{Sinusitis Asthma} \gdef\@journalfull{Sinusitis and Asthma} \gdef\@doiabbr{sna} \gdef\@ISSN{2624-7003} }
208 | \DeclareOption{societies}{ \gdef\@journal{societies} \gdef\@journalshort{Societies} \gdef\@journalfull{Societies} \gdef\@doiabbr{soc} \gdef\@ISSN{2075-4698} }
209 | \DeclareOption{socsci}{ \gdef\@journal{socsci} \gdef\@journalshort{Soc. Sci.} \gdef\@journalfull{Social Sciences} \gdef\@doiabbr{socsci} \gdef\@ISSN{2076-0760} }
210 | \DeclareOption{soilsystems}{ \gdef\@journal{soilsystems} \gdef\@journalshort{Soil Syst.} \gdef\@journalfull{Soil Systems} \gdef\@doiabbr{soilsystems} \gdef\@ISSN{2571-8789} }
211 | \DeclareOption{sports}{ \gdef\@journal{sports} \gdef\@journalshort{Sports} \gdef\@journalfull{Sports} \gdef\@doiabbr{sports} \gdef\@ISSN{2075-4663} }
212 | \DeclareOption{standards}{ \gdef\@journal{standards} \gdef\@journalshort{Standards} \gdef\@journalfull{Standards} \gdef\@doiabbr{} \gdef\@ISSN{2305-6703} }
213 | \DeclareOption{stats}{ \gdef\@journal{stats} \gdef\@journalshort{Stats} \gdef\@journalfull{Stats} \gdef\@doiabbr{stats} \gdef\@ISSN{2571-905X} }
214 | \DeclareOption{surfaces}{ \gdef\@journal{surfaces} \gdef\@journalshort{Surfaces} \gdef\@journalfull{Surfaces} \gdef\@doiabbr{surfaces} \gdef\@ISSN{2571-9637} }
215 | \DeclareOption{surgeries}{ \gdef\@journal{surgeries} \gdef\@journalshort{Surgeries} \gdef\@journalfull{Surgeries} \gdef\@doiabbr{} \gdef\@ISSN{2017-2017} }
216 | \DeclareOption{sustainability}{ \gdef\@journal{sustainability} \gdef\@journalshort{Sustainability} \gdef\@journalfull{Sustainability} \gdef\@doiabbr{su} \gdef\@ISSN{2071-1050} }
217 | \DeclareOption{symmetry}{ \gdef\@journal{symmetry} \gdef\@journalshort{Symmetry} \gdef\@journalfull{Symmetry} \gdef\@doiabbr{sym} \gdef\@ISSN{2073-8994} }
218 | \DeclareOption{systems}{ \gdef\@journal{systems} \gdef\@journalshort{Systems} \gdef\@journalfull{Systems} \gdef\@doiabbr{systems} \gdef\@ISSN{2079-8954} }
219 | \DeclareOption{technologies}{ \gdef\@journal{technologies} \gdef\@journalshort{Technologies} \gdef\@journalfull{Technologies} \gdef\@doiabbr{technologies} \gdef\@ISSN{2227-7080} }
220 | \DeclareOption{test}{ \gdef\@journal{test} \gdef\@journalshort{Test} \gdef\@journalfull{Test} \gdef\@doiabbr{} \gdef\@ISSN{} }
221 | \DeclareOption{toxics}{ \gdef\@journal{toxics} \gdef\@journalshort{Toxics} \gdef\@journalfull{Toxics} \gdef\@doiabbr{toxics} \gdef\@ISSN{2305-6304} }
222 | \DeclareOption{toxins}{ \gdef\@journal{toxins} \gdef\@journalshort{Toxins} \gdef\@journalfull{Toxins} \gdef\@doiabbr{toxins} \gdef\@ISSN{2072-6651} }
223 | \DeclareOption{tropicalmed}{ \gdef\@journal{tropicalmed} \gdef\@journalshort{Trop. Med. Infect. Dis.} \gdef\@journalfull{Tropical Medicine and Infectious Disease} \gdef\@doiabbr{tropicalmed} \gdef\@ISSN{2414-6366} }
224 | \DeclareOption{universe}{ \gdef\@journal{universe} \gdef\@journalshort{Universe} \gdef\@journalfull{Universe} \gdef\@doiabbr{universe} \gdef\@ISSN{2218-1997} }
225 | \DeclareOption{urbansci}{ \gdef\@journal{urbansci} \gdef\@journalshort{Urban Sci.} \gdef\@journalfull{Urban Science} \gdef\@doiabbr{urbansci} \gdef\@ISSN{2413-8851} }
226 | \DeclareOption{vaccines}{ \gdef\@journal{vaccines} \gdef\@journalshort{Vaccines} \gdef\@journalfull{Vaccines} \gdef\@doiabbr{vaccines} \gdef\@ISSN{2076-393X} }
227 | \DeclareOption{vehicles}{ \gdef\@journal{vehicles} \gdef\@journalshort{Vehicles} \gdef\@journalfull{Vehicles} \gdef\@doiabbr{vehicles} \gdef\@ISSN{2624-8921} }
228 | \DeclareOption{vetsci}{ \gdef\@journal{vetsci} \gdef\@journalshort{Vet. Sci.} \gdef\@journalfull{Veterinary Sciences} \gdef\@doiabbr{vetsci} \gdef\@ISSN{2306-7381} }
229 | \DeclareOption{vibration}{ \gdef\@journal{vibration} \gdef\@journalshort{Vibration} \gdef\@journalfull{Vibration} \gdef\@doiabbr{vibration} \gdef\@ISSN{2571-631X} }
230 | \DeclareOption{viruses}{ \gdef\@journal{viruses} \gdef\@journalshort{Viruses} \gdef\@journalfull{Viruses} \gdef\@doiabbr{v} \gdef\@ISSN{1999-4915} }
231 | \DeclareOption{vision}{ \gdef\@journal{vision} \gdef\@journalshort{Vision} \gdef\@journalfull{Vision} \gdef\@doiabbr{vision} \gdef\@ISSN{2411-5150} }
232 | \DeclareOption{water}{ \gdef\@journal{water} \gdef\@journalshort{Water} \gdef\@journalfull{Water} \gdef\@doiabbr{w} \gdef\@ISSN{2073-4441} }
233 | \DeclareOption{wem}{ \gdef\@journal{wem} \gdef\@journalshort{Wildl. Ecol. Manag.} \gdef\@journalfull{Wildlife Ecology and Management} \gdef\@doiabbr{} \gdef\@ISSN{1234-4321} }
234 | \DeclareOption{wevj}{ \gdef\@journal{wevj} \gdef\@journalshort{World Electric Vehicle Journal} \gdef\@journalfull{World Electric Vehicle Journal} \gdef\@doiabbr{wevj} \gdef\@ISSN{2032-6653} }
--------------------------------------------------------------------------------
/manuscript/mdpi.cls:
--------------------------------------------------------------------------------
1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2 | %% %% MDPI class for LaTeX files 15.2.2019 b
3 | %% %% For any information please send an e-mail to:
4 | %% %% latex@mdpi.com
5 | %% %%
6 | %% %% Initial class provided by:
7 | %% %% Stefano Mariani
8 | %% %% Modified by:
9 | %% %% Dietrich Rordorf
10 | %% %% Peter Harremoes
11 | %% %% Zeno Schumacher
12 | %% %% Maddalena Giulini
13 | %% %% Andres Gartmann
14 | %% %% Dr. Janine Daum
15 | %% %% Versions:
16 | %% %% v1.0 before Dr. Janine Daum
17 | %% %% v2.0 when Dr. Janine Daum started (March 2013)
18 | %% %% v3.0 after layout change (September 2015)
19 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
20 |
21 | %% IDENTIFICATION
22 | \NeedsTeXFormat{LaTeX2e}
23 | \ProvidesClass{mdpi}[15/02/2019 MDPI paper class]
24 |
25 | %%%% Copyright and citebox
26 | \AtEndDocument{\par \cright}
27 |
28 | %% PRELIMINARY DECLARATIONS
29 | \LoadClass[10pt,a4paper]{article}
30 | \RequirePackage[T1]{fontenc}
31 | \RequirePackage[utf8]{inputenc}
32 | \RequirePackage{calc}
33 | \RequirePackage{indentfirst}
34 | \RequirePackage{fancyhdr}
35 | \RequirePackage{graphicx,epstopdf}
36 | \RequirePackage{lastpage}
37 | \RequirePackage{ifthen}
38 | \RequirePackage{lineno}
39 | \RequirePackage{float}
40 | \RequirePackage{amsmath}
41 | \RequirePackage{setspace}
42 | \RequirePackage{enumitem}
43 | \RequirePackage{mathpazo}
44 | \RequirePackage{booktabs} % For \toprule etc. in tables
45 | \RequirePackage[largestsep]{titlesec}
46 | \RequirePackage{etoolbox} % For \AtBeginDocument etc.
47 | \RequirePackage{tabto} % To use tab for alignment on first page
48 | \RequirePackage[table]{xcolor} % To provide color for soul (for english editing) and provide coloring for tables (author request)
49 | \RequirePackage{soul} % To highlight text
50 | \newcommand{\highlight}[1]{\colorbox{yellow}{#1}}
51 | \RequirePackage{multirow}
52 | \RequirePackage{microtype} % For command \textls[]{}
53 | \RequirePackage{tikz} % For \foreach used for Orcid icon
54 | \RequirePackage{totcount} % To enable extracting the value of the counter "page"
55 |
56 |
57 | %% OPTIONS
58 | %% To choose the journal
59 | % All journals (website name, full name, short name, DOI abbreviation, and ISSN) are defined in an extra file.
60 | % This is the same as for mdpi.cls.
61 | \input{journalnames}
62 | \DeclareOption{journal}{\ClassWarning{mdpi}{You used an invalid journal name or you have not specified the journal. The first option of the documentclass command specifies the journal. The word 'journal' should be replaced by one of the journal names specified in template.tex (in the comment 'Choose between the following MDPI journal').}}
63 |
64 | %% To choose the type of manuscript
65 | \DeclareOption{abstract}{\gdef\@arttype{Abstract}}
66 | \DeclareOption{addendum}{\gdef\@arttype{Addendum}}
67 | \DeclareOption{article}{\gdef\@arttype{Article}}
68 | \DeclareOption{benchmark}{\gdef\@arttype{Benchmark}}
69 | \DeclareOption{book}{\gdef\@arttype{Book}}
70 | \DeclareOption{bookreview}{\gdef\@arttype{Book Review}}
71 | \DeclareOption{briefreport}{\gdef\@arttype{Brief Report}}
72 | \DeclareOption{casereport}{\gdef\@arttype{Case Report}}
73 | \DeclareOption{changes}{\gdef\@arttype{Changes}}
74 | \DeclareOption{comment}{\gdef\@arttype{Comment}}
75 | \DeclareOption{commentary}{\gdef\@arttype{Commentary}}
76 | \DeclareOption{communication}{\gdef\@arttype{Communication}}
77 | \DeclareOption{conceptpaper}{\gdef\@arttype{Concept Paper}}
78 | \DeclareOption{conferenceproceedings}{\gdef\@arttype{Proceedings}}
79 | \DeclareOption{correction}{\gdef\@arttype{Correction}}
80 | \DeclareOption{conferencereport}{\gdef\@arttype{Conference Report}}
81 | \DeclareOption{expressionofconcern}{\gdef\@arttype{Expression of Concern}}
82 | \DeclareOption{extendedabstract}{\gdef\@arttype{Extended Abstract}}
83 | \DeclareOption{meetingreport}{\gdef\@arttype{Meeting Report}}
84 | \DeclareOption{creative}{\gdef\@arttype{Creative}}
85 | \DeclareOption{datadescriptor}{\gdef\@arttype{Data Descriptor}}
86 | \DeclareOption{discussion}{\gdef\@arttype{Discussion}}
87 | \DeclareOption{editorial}{\gdef\@arttype{Editorial}}
88 | \DeclareOption{essay}{\gdef\@arttype{Essay}}
89 | \DeclareOption{erratum}{\gdef\@arttype{Erratum}}
90 | \DeclareOption{hypothesis}{\gdef\@arttype{Hypothesis}}
91 | \DeclareOption{interestingimages}{\gdef\@arttype{Interesting Images}}
92 | \DeclareOption{letter}{\gdef\@arttype{Letter}}
93 | \DeclareOption{meetingreport}{\gdef\@arttype{Meeting Report}}
94 | \DeclareOption{newbookreceived}{\gdef\@arttype{New Book Received}}
95 | \DeclareOption{obituary}{\gdef\@arttype{Obituary}}
96 | \DeclareOption{opinion}{\gdef\@arttype{Opinion}}
97 | \DeclareOption{projectreport}{\gdef\@arttype{Project Report}}
98 | \DeclareOption{reply}{\gdef\@arttype{Reply}}
99 | \DeclareOption{retraction}{\gdef\@arttype{Retraction}}
100 | \DeclareOption{review}{\gdef\@arttype{Review}}
101 | \DeclareOption{perspective}{\gdef\@arttype{Perspective}}
102 | \DeclareOption{protocol}{\gdef\@arttype{Protocol}}
103 | \DeclareOption{shortnote}{\gdef\@arttype{Short Note}}
104 | \DeclareOption{supfile}{\gdef\@arttype{Supfile}}
105 | \DeclareOption{technicalnote}{\gdef\@arttype{Technical Note}}
106 | \DeclareOption{viewpoint}{\gdef\@arttype{Viewpoint}}
107 |
108 | %% To choose the status of the manuscript
109 | \DeclareOption{submit}{\gdef\@status{submit}}
110 | \DeclareOption{accept}{\gdef\@status{accept}}
111 |
112 | %% To choose the whether there is one or more authors
113 | \DeclareOption{oneauthor}{\gdef\@authornum{author}}
114 | \DeclareOption{moreauthors}{\gdef\@authornum{authors}}
115 |
116 | %% Add the chosen options to the class
117 | \DeclareOption*{\PassOptionsToClass{\CurrentOption}{article}}
118 |
119 | %% Defaults
120 | \ExecuteOptions{notspecified,10pt,a4paper,article,submit,oneauthor}
121 |
122 | %% Process options
123 | \ProcessOptions\relax
124 |
125 | %% MORE DECLARATIONS
126 | %%%% Maths environments
127 | \RequirePackage{amsthm}
128 | \newtheoremstyle{mdpi}% name
129 | {12pt}% space above
130 | {12pt}% space below
131 | {\itshape}% body font
132 | {}% indent amount 1
133 | {\bfseries}% theorem head font
134 | {.}% punctuation after theorem head
135 | {.5em}% space after theorem head
136 | {}% theorem head spec (can be left empty, meaning `normal')
137 |
138 | \renewcommand{\qed}{\unskip\nobreak\quad\qedsymbol} %% This places the symbol right after the text instead of placing it at the end on the line.
139 |
140 | \renewenvironment{proof}[1][\proofname]{\par %% \proofname allows to have "Proof of my theorem"
141 | \pushQED{\qed}%
142 | \normalfont \topsep6\p@\@plus6\p@\relax
143 | \trivlist
144 | \item[\hskip\labelsep
145 | \bfseries %% "Proof" is bold
146 | #1\@addpunct{.}]\ignorespaces %% Period instead of colon
147 | }{%
148 | \popQED\endtrivlist\@endpefalse
149 | }
150 |
151 | \theoremstyle{mdpi}
152 | \newcounter{theorem}
153 | \setcounter{theorem}{0}
154 | \newtheorem{Theorem}[theorem]{Theorem}
155 |
156 | \newcounter{lemma}
157 | \setcounter{lemma}{0}
158 | \newtheorem{Lemma}[lemma]{Lemma}
159 |
160 | \newcounter{corollary}
161 | \setcounter{corollary}{0}
162 | \newtheorem{Corollary}[corollary]{Corollary}
163 |
164 | \newcounter{proposition}
165 | \setcounter{proposition}{0}
166 | \newtheorem{Proposition}[proposition]{Proposition}
167 |
168 | \newcounter{characterization}
169 | \setcounter{characterization}{0}
170 | \newtheorem{Characterization}[characterization]{Characterization}
171 |
172 | \newcounter{property}
173 | \setcounter{property}{0}
174 | \newtheorem{Property}[property]{Property}
175 |
176 | \newcounter{problem}
177 | \setcounter{problem}{0}
178 | \newtheorem{Problem}[problem]{Problem}
179 |
180 | \newcounter{example}
181 | \setcounter{example}{0}
182 | \newtheorem{Example}[example]{Example}
183 |
184 | \newcounter{examplesanddefinitions}
185 | \setcounter{examplesanddefinitions}{0}
186 | \newtheorem{ExamplesandDefinitions}[examplesanddefinitions]{Examples and Definitions}
187 |
188 | \newcounter{remark}
189 | \setcounter{remark}{0}
190 | \newtheorem{Remark}[remark]{Remark}
191 |
192 | \newcounter{definition}
193 | \setcounter{definition}{0}
194 | \newtheorem{Definition}[definition]{Definition}
195 |
196 | \newcounter{hypothesis}
197 | \setcounter{hypothesis}{0}
198 | \newtheorem{Hypothesis}[hypothesis]{Hypothesis}
199 |
200 | \newcounter{notation}
201 | \setcounter{notation}{0}
202 | \newtheorem{Notation}[notation]{Notation}
203 |
204 | %%%% Hyphenation
205 | \RequirePackage[none]{hyphenat}
206 | \sloppy
207 |
208 | %%%% References
209 | \RequirePackage[sort&compress,sectionbib]{natbib} % option sectionbib is for optionally organizing references using sections (author request)
210 |
211 | \ifthenelse{\equal{\@journal}{admsci}
212 | \OR \equal{\@journal}{arts}
213 | \OR \equal{\@journal}{econometrics}
214 | \OR \equal{\@journal}{economies}
215 | \OR \equal{\@journal}{genealogy}
216 | \OR \equal{\@journal}{humanities}
217 | \OR \equal{\@journal}{ijfs}
218 | \OR \equal{\@journal}{jrfm}
219 | \OR \equal{\@journal}{languages}
220 | \OR \equal{\@journal}{laws}
221 | \OR \equal{\@journal}{religions}
222 | \OR \equal{\@journal}{risks}
223 | \OR \equal{\@journal}{socsci}}{%
224 | \bibliographystyle{chicago2}
225 | \bibpunct{(}{)}{;}{x}{}{}%
226 | }{%
227 | \bibliographystyle{mdpi}
228 | \bibpunct{[}{]}{,}{n}{}{}%
229 | }%
230 |
231 | \renewcommand\NAT@set@cites{%
232 | \ifNAT@numbers
233 | \ifNAT@super \let\@cite\NAT@citesuper
234 | \def\NAT@mbox##1{\unskip\nobreak\textsuperscript{##1}}%
235 | \let\citeyearpar=\citeyear
236 | \let\NAT@space\relax
237 | \def\NAT@super@kern{\kern\p@}%
238 | \else
239 | \let\NAT@mbox=\mbox
240 | \let\@cite\NAT@citenum
241 | \let\NAT@space\relax
242 | \let\NAT@super@kern\relax
243 | \fi
244 | \let\@citex\NAT@citexnum
245 | \let\@biblabel\NAT@biblabelnum
246 | \let\@bibsetup\NAT@bibsetnum
247 | \renewcommand\NAT@idxtxt{\NAT@name\NAT@spacechar\NAT@open\NAT@num\NAT@close}%
248 | \def\natexlab##1{}%
249 | \def\NAT@penalty{\penalty\@m}%
250 | \else
251 | \let\@cite\NAT@cite
252 | \let\@citex\NAT@citex
253 | \let\@biblabel\NAT@biblabel
254 | \let\@bibsetup\NAT@bibsetup
255 | \let\NAT@space\NAT@spacechar
256 | \let\NAT@penalty\@empty
257 | \renewcommand\NAT@idxtxt{\NAT@name\NAT@spacechar\NAT@open\NAT@date\NAT@close}%
258 | \def\natexlab##1{##1}%
259 | \fi}
260 |
261 | %%%%% Hyperlinks
262 | %% Define color for citations
263 | \definecolor{bluecite}{HTML}{0875b7}
264 |
265 | \ifthenelse{\equal{\@arttype}{Book}}{
266 | \RequirePackage[unicode=true,
267 | bookmarksopen={true},
268 | pdffitwindow=true,
269 | colorlinks=true,
270 | linkcolor=black,
271 | citecolor=black,
272 | urlcolor=black,
273 | hyperfootnotes=false,
274 | pdfstartview={FitH},
275 | pdfpagemode=UseNone]{hyperref}
276 | }{
277 | \RequirePackage[unicode=true,
278 | bookmarksopen={true},
279 | pdffitwindow=true,
280 | colorlinks=true,
281 | linkcolor=bluecite,
282 | citecolor=bluecite,
283 | urlcolor=bluecite,
284 | hyperfootnotes=false,
285 | pdfstartview={FitH},
286 | pdfpagemode= UseNone]{hyperref}
287 | }
288 |
289 | %% To have the possibility to change the urlcolor
290 | \newcommand{\changeurlcolor}[1]{\hypersetup{urlcolor=#1}}
291 |
292 | %% Metadata
293 | \newcommand{\org@maketitle}{}% LATEX-Check
294 | \let\org@maketitle\maketitle
295 | \def\maketitle{%
296 | \hypersetup{
297 | pdftitle={\@Title},
298 | pdfsubject={\@abstract},
299 | pdfkeywords={\@keyword},
300 | pdfauthor={\@AuthorNames}
301 | }%
302 | \org@maketitle
303 | }
304 |
305 | %%%% Footnotes
306 | \RequirePackage[hang]{footmisc}
307 | \setlength{\skip\footins}{1.2cm}
308 | \setlength{\footnotemargin}{5mm}
309 | \def\footnoterule{\kern-14\p@
310 | \hrule \@width 2in \kern 11.6\p@}
311 |
312 | %%%% URL
313 | \RequirePackage{url}
314 | \urlstyle{same}
315 | \g@addto@macro{\UrlBreaks}{\UrlOrds}
316 |
317 | %%%% Widows & orphans
318 | \clubpenalty=10000
319 | \widowpenalty=10000
320 | \displaywidowpenalty=10000
321 |
322 | %%%% Front matter
323 | \newcommand{\firstargument}{}
324 | \newcommand{\Title}[1]{\gdef\@Title{#1}}%
325 | \newcommand{\Author}[1]{\gdef\@Author{#1}}%
326 | \def\@AuthorNames{}
327 | \newcommand{\AuthorNames}[1]{\gdef\@AuthorNames{#1}}%
328 | \newcommand{\firstpage}[1]{\gdef\@firstpage{#1}}
329 | \newcommand{\doinum}[1]{\gdef\@doinum{#1}}
330 |
331 | % DOI number
332 | \newcommand\twodigits[1]{%
333 | \ifnum#1<10
334 | 0\number#1
335 | \else
336 | \number#1
337 | \fi
338 | }
339 |
340 | \newcommand\fourdigits[1]{%
341 | \ifnum#1<10 000\number#1
342 | \else
343 | \ifnum#1<100 00\number#1
344 | \else
345 | \ifnum#1<1000 0\number#1
346 | \else
347 | \ifnum#1<10000 \number#1
348 | \else
349 | error
350 | \fi
351 | \fi
352 | \fi
353 | \fi
354 | }
355 |
356 |
357 | \ifthenelse{\equal{\@journal}{molbank}}{
358 | \doinum{10.3390/\@articlenumber}
359 | }{
360 | \doinum{10.3390/\@doiabbr\@pubvolume\twodigits\@issuenum\fourdigits\@articlenumber}
361 | }
362 |
363 |
364 | \newcommand{\pubvolume}[1]{\gdef\@pubvolume{#1}}
365 | \newcommand{\pubyear}[1]{\gdef\@pubyear{#1}}
366 | \newcommand{\copyrightyear}[1]{\gdef\@copyrightyear{#1}}
367 | \newcommand{\address}[2][]{\renewcommand{\firstargument}{#1}\gdef\@address{#2}}
368 | \newcommand{\corresfirstargument}{}
369 | \def\@corres{}
370 | \newcommand{\corres}[2][]{\renewcommand{\corresfirstargument}{#1}\gdef\@corres{#2}}
371 | \def\@conference{}
372 | \newcommand{\conference}[1]{\gdef\@conference{#1}}%
373 | \def\@abstract{}
374 | \renewcommand{\abstract}[1]{\gdef\@abstract{#1}}
375 | \def\@externaleditor{}
376 | \newcommand{\externaleditor}[1]{\gdef\@externaleditor{#1}}
377 | \def\@LSID{}
378 | \newcommand{\LSID}[1]{\gdef\@LSID{#1}}
379 | \newcommand{\history}[1]{\gdef\@history{#1}}
380 | \def\@pacs{}
381 | \newcommand{\PACS}[1]{\gdef\@pacs{#1}}
382 | \def\@msc{}
383 | \newcommand{\MSC}[1]{\gdef\@msc{#1}}
384 | \def\@jel{}
385 | \newcommand{\JEL}[1]{\gdef\@jel{#1}}
386 | \def\@keyword{}
387 | \newcommand{\keyword}[1]{\gdef\@keyword{#1}}
388 | \def\@dataset{}
389 | \newcommand{\dataset}[1]{\gdef\@dataset{#1}}
390 | \def\@datasetlicense{}
391 | \newcommand{\datasetlicense}[1]{\gdef\@datasetlicense{#1}}
392 | \def\@featuredapplication{}
393 | \newcommand{\featuredapplication}[1]{\gdef\@featuredapplication{#1}}
394 | \def\@keycontribution{}
395 | \newcommand{\keycontribution}[1]{\gdef\@keycontribution{#1}}
396 |
397 |
398 | \def\@issuenum{}
399 | \newcommand{\issuenum}[1]{\gdef\@issuenum{#1}}
400 | \def\@updates{}
401 | \newcommand{\updates}[1]{\gdef\@updates{#1}}
402 |
403 | \def\@firstnote{}
404 | \newcommand{\firstnote}[1]{\gdef\@firstnote{#1}}
405 | \def\@secondnote{}
406 | \newcommand{\secondnote}[1]{\gdef\@secondnote{#1}}%
407 | \def\@thirdnote{}
408 | \newcommand{\thirdnote}[1]{\gdef\@thirdnote{#1}}%
409 | \def\@fourthnote{}
410 | \newcommand{\fourthnote}[1]{\gdef\@fourthnote{#1}}%
411 | \def\@fifthnote{}
412 | \newcommand{\fifthnote}[1]{\gdef\@fifthnote{#1}}%
413 | \def\@sixthnote{}
414 | \newcommand{\sixthnote}[1]{\gdef\@sixthnote{#1}}%
415 | \def\@seventhnote{}
416 | \newcommand{\seventhnote}[1]{\gdef\@seventhnote{#1}}%
417 | \def\@eighthnote{}
418 | \newcommand{\eighthnote}[1]{\gdef\@eighthnote{#1}}%
419 |
420 | \def\@simplesumm{}
421 | \newcommand{\simplesumm}[1]{\gdef\@simplesumm{#1}}
422 | \newcommand{\articlenumber}[1]{\gdef\@articlenumber{#1}}
423 |
424 | \def\@externalbibliography{}
425 | \newcommand{\externalbibliography}[1]{\gdef\@externalbibliography{#1}}
426 |
427 | \def\@reftitle{}
428 | \newcommand{\reftitle}[1]{\gdef\@reftitle{#1}}
429 |
430 | % For transition period to change back to continuous page numbers
431 | \def\@continuouspages{}
432 | \newcommand{\continuouspages}[1]{\gdef\@continuouspages{#1}}
433 |
434 |
435 | %% ORCID
436 | % Make Orcid icon
437 | \newcommand{\orcidicon}{\includegraphics[width=0.32cm]{logo-orcid.pdf}}
438 |
439 | % Define link and button for each author
440 | \foreach \x in {A, ..., Z}{%
441 | \expandafter\xdef\csname orcid\x\endcsname{\noexpand\href{https://orcid.org/\csname orcidauthor\x\endcsname}{\noexpand\orcidicon}}
442 | }
443 |
444 | %%%% Journal name for the header
445 | \newcommand{\journalname}{\@journalshort}
446 |
447 |
448 | \regtotcounter{page} % to enable extracting the value of the counter "page" using the totcount package
449 |
450 | %%%% Header and footer on first page
451 | %% The plain page style needs to be redefined because with \maketitle in the article class, LaTeX applies the the plain page style automatically to the first page.
452 | \ifthenelse{\equal{\@journal}{preprints} %
453 | \OR \equal{\@arttype}{Book}}{%
454 | \fancypagestyle{plain}{%
455 | \fancyhf{}
456 | \ifthenelse{\equal{\@arttype}{Book}}{
457 | \fancyfoot[C]{\footnotesize\thepage}
458 | }{%
459 | }
460 | }
461 | }{%
462 | \ifthenelse{\equal{\@arttype}{Supfile}}{
463 | \fancypagestyle{plain}{
464 | \fancyhf{}
465 | \fancyhead[R]{
466 | \footnotesize %
467 | S\thepage{} of S\pageref*{LastPage}%
468 | }%
469 | \fancyhead[L]{
470 | \footnotesize %
471 | \ifthenelse{\equal{\@status}{submit}}{%
472 | Version {\@ \today} submitted to {\em\journalname}%
473 | }{%
474 | {\em \journalname} %
475 | {\bfseries \@pubyear}, %
476 | {\em \@pubvolume}, %
477 | \ifthenelse{\equal{\@continuouspages}{\@empty}}{%
478 | \@firstpage --\pageref*{LastPage}%
479 | }{%
480 | \@articlenumber%
481 | }%
482 | ; doi:{\changeurlcolor{black}%
483 | \href{http://dx.doi.org/\@doinum}%
484 | {\@doinum}}%
485 | }%
486 | }%
487 | }%
488 | }{
489 | \fancypagestyle{plain}{
490 | \fancyhf{}
491 | \fancyfoot[L]{
492 | \footnotesize%
493 | \ifthenelse{\equal{\@status}{submit}}{%
494 | Submitted to {\em\journalname}, %
495 | pages \thepage \ -- \color{black}{\pageref*{LastPage}}%
496 | }{
497 | {\em \journalname}\ %
498 | {\bfseries \@pubyear}, %
499 | {\em \@pubvolume}, %
500 | \ifthenelse{\equal{\@continuouspages}{\@empty}}{%
501 | \@articlenumber%
502 | }{%
503 | \@firstpage\ifnumcomp{\totvalue{page}-1}{=}{\@firstpage}{}{--\pageref*{LastPage}}%
504 | }%
505 | ; doi:{\changeurlcolor{black}%
506 | \href{http://dx.doi.org/\@doinum}%
507 | {\@doinum}}%
508 | }%
509 | }%
510 | \fancyfoot[R]{
511 | \footnotesize%
512 | {\changeurlcolor{black}%
513 | \href{http://www.mdpi.com/journal/\@journal}%
514 | {www.mdpi.com/journal/\@journal}}%
515 | }%
516 | \fancyhead{}
517 | \renewcommand{\headrulewidth}{0.0pt}%
518 | }
519 | }%
520 | }%
521 |
522 | %%%% Maketitle part 1: Logo, Arttype, Title, Author
523 | \renewcommand{\@maketitle}{
524 | \begin{flushleft}
525 | \ifthenelse{\equal{\@arttype}{Supfile}}{%
526 | \fontsize{18}{18}\selectfont
527 | \raggedright
528 | \noindent\textbf{Supplementary Materials: \@Title}%
529 | \par
530 | \vspace{12pt}
531 | \fontsize{10}{10}\selectfont
532 | \noindent\boldmath\bfseries{\@Author}
533 | }{%
534 | \ifthenelse{\equal{\@arttype}{Book}}{}{%
535 | \vspace*{-1.75cm}
536 | }
537 | {%0
538 | \ifthenelse{\equal{\@journal}{preprints}
539 | \OR \equal{\@arttype}{Book}}{}{%
540 | \ifthenelse{\equal{\@status}{submit}}{%
541 | \hfill \href{http://www.mdpi.com}{%
542 | \includegraphics[height=1cm]{logo-mdpi.pdf}}\vspace{0.5cm}%
543 | }{
544 | \href{http://www.mdpi.com/journal/\@journal}{
545 | \includegraphics[height=1.2cm]{\@journal-logo.eps}}%
546 | \hfill
547 | \ifthenelse{\equal{\@journal}{proceedings}}{
548 | \href{http://www.mdpi.com/journal/\@journal}{
549 | \includegraphics[height=1.2cm]{logo-conference.eps}
550 | \hfill}
551 | }{}
552 | \ifthenelse{\equal{\@journal}{scipharm}}{%
553 | \href{http://www.mdpi.com}{\includegraphics[height=1cm]{logo-mdpi-scipharm.eps}}%
554 | }{%
555 | \href{http://www.mdpi.com}{\includegraphics[height=1cm]{logo-mdpi.pdf}}%
556 | }%
557 | }%
558 | }%
559 | \par
560 | }%0
561 | {%1
562 | \vspace{14pt}
563 | \fontsize{10}{10}\selectfont
564 | \ifthenelse{\equal{\@arttype}{Book}}{}{
565 | \textit{\@arttype}%
566 | }%
567 | \par%
568 | }%1
569 | {%2
570 | \vspace{-1pt}
571 | \fontsize{18}{18}\selectfont
572 | \boldmath\bfseries{\@Title}
573 | \par
574 | \vspace{15pt}
575 | }%2
576 | {%3
577 | \boldmath\bfseries{\@Author}
578 | \par
579 | \vspace{-4pt}
580 | }%3
581 | }
582 | \end{flushleft}%
583 | }
584 |
585 | % Commands for hanging indent
586 | \newcommand{\dist}{1.7em}
587 | \newcommand{\hang}{\hangafter=1\hangindent=\dist\noindent}
588 |
589 | %%%% Maketitle part 2
590 | \newcommand{\maketitlen}{
591 | \ifthenelse{\equal{\@arttype}{Book}}{\vspace{12pt}}{
592 | \begin{flushleft}
593 | \begin{spacing}{1.35}
594 | \leftskip0.2cm
595 | \fontsize{9}{9}\selectfont
596 | {%
597 | \ifthenelse{\equal{\firstargument}{1}}{}{%
598 | \hang}\@address
599 | \par
600 | }%
601 | {%
602 | \ifthenelse{\equal{\@authornum}{author}}{}{%
603 | \ifthenelse{\equal{\@corres}{\@empty}}{}{%
604 | \hang\textbf{*} \tabto{\dist} \@corres}
605 | \par
606 | }
607 | }%
608 | {%
609 | \ifthenelse{\equal{\@conference}{\@empty}}{}{%
610 | \hang$\dagger$ \tabto{\dist} This paper is an extended version of our paper published in\space \@conference.}
611 | \par
612 | }%
613 | {%
614 | \ifthenelse{\equal{\@firstnote}{\@empty}}{}{%
615 | \hang\ifthenelse{\equal{\@conference}{\@empty}}{$\dagger$}{$\ddagger$} \tabto{\dist} \@firstnote}
616 | \par
617 | }%
618 | {%
619 | \ifthenelse{\equal{\@secondnote}{\@empty}}{}{%
620 | \hang \ifthenelse{\equal{\@conference}{\@empty}}{$\ddagger$}{\S} \tabto{\dist} \@secondnote}
621 | \par
622 | }%
623 | {%
624 | \ifthenelse{\equal{\@thirdnote}{\@empty}}{}{%
625 | \hang \ifthenelse{\equal{\@conference}{\@empty}}{\S}{$\|$} \tabto{\dist} \@thirdnote}
626 | \par
627 | }%
628 | {%
629 | \ifthenelse{\equal{\@fourthnote}{\@empty}}{}{%
630 | \hang \ifthenelse{\equal{\@conference}{\@empty}}{$\|$}{\P} \tabto{\dist} \@fourthnote}
631 | \par
632 | }%
633 | {%
634 | \ifthenelse{\equal{\@fifthnote}{\@empty}}{}{%
635 | \hang \ifthenelse{\equal{\@conference}{\@empty}}{\P}{**} \tabto{\dist} \@fifthnote}
636 | \par
637 | }%
638 | {%
639 | \ifthenelse{\equal{\@sixthnote}{\@empty}}{}{%
640 | \hang \ifthenelse{\equal{\@conference}{\@empty}}{**}{$\dagger\dagger$} \tabto{\dist} \@sixthnote}
641 | \par
642 | }%
643 | {%
644 | \ifthenelse{\equal{\@seventhnote}{\@empty}}{}{%
645 | \hang \ifthenelse{\equal{\@conference}{\@empty}}{$\dagger\dagger$}{$\ddagger\ddagger$} \tabto{\dist} \@seventhnote}
646 | \par
647 | }%
648 | {%
649 | \ifthenelse{\equal{\@eighthnote}{\@empty}}{}{%
650 | \hang \ifthenelse{\equal{\@conference}{\@empty}}{$\ddagger\ddagger$}{***} \tabto{\dist} \@eighthnote}
651 | \par
652 | }%
653 | \vspace{6pt}
654 | \ifthenelse{\equal{\@updates}{\@empty}}{
655 | \ifthenelse{\equal{\@externaleditor}{\@empty}}{}{\@externaleditor}
656 | \par
657 | \ifthenelse{\equal{\@LSID}{\@empty}}{}{\@LSID}
658 | \par
659 | \ifthenelse{\equal{\@status}{submit}}{
660 | Version {\@ \today} submitted to \journalname
661 | }{
662 | \mbox{\@history}
663 | }
664 | }{
665 | \parbox[tb]{.79\textwidth}{
666 | \ifthenelse{\equal{\@externaleditor}{\@empty}}{}{\@externaleditor}
667 | \par
668 | \ifthenelse{\equal{\@LSID}{\@empty}}{}{\@LSID}
669 | \par
670 | \ifthenelse{\equal{\@status}{submit}}{
671 | Version {\@ \today} submitted to \journalname
672 | }{
673 | \mbox{\@history}
674 | }
675 | }
676 | \parbox[b]{.19\textwidth}{
677 | \hfill
678 | \ifthenelse{\equal{\@updates}{\@empty}}{
679 | }{
680 | \href{http://www.mdpi.com/\@ISSN/\@pubvolume/\@issuenum/\@articlenumber?type=check_update&version=1}{\includegraphics[height=.6cm]{logo-updates.pdf}}%
681 | }%
682 | }%
683 | }
684 | \par
685 | \vspace{-4pt}%
686 | \end{spacing}
687 | \end{flushleft}
688 | }
689 | }
690 |
691 | %%%% Abstract, keywords, journal data, PACS, MSC, JEL
692 | \newcommand{\abstractkeywords}{
693 | \vspace{-8pt}
694 | {% For journal Applied Sciences:
695 | \ifthenelse{\equal{\@featuredapplication}{\@empty}}{}{
696 | \begingroup
697 | \leftskip0.2cm
698 | \noindent\textbf{Featured Application:\space\@featuredapplication}
699 | \vspace{12pt}
700 | \par
701 | \endgroup}
702 | }%
703 | {%10
704 | \begingroup
705 | \leftskip0.2cm
706 | \ifthenelse{\equal{\@simplesumm}{\@empty}}{}{
707 | \noindent\textbf{Simple Summary:\space}\@simplesumm
708 | \vspace{12pt}
709 | \par
710 | }
711 | \ifthenelse{\equal{\@abstract}{\@empty}}{}{
712 | \noindent\textbf{Abstract:\space}\@abstract
713 | \vspace{12pt}
714 | \par
715 | }
716 | \endgroup
717 | }%10
718 | {% For journal Data:
719 | \ifthenelse{\equal{\@dataset}{\@empty}}{}{
720 | \begingroup
721 | \leftskip0.2cm
722 | \noindent\textbf{Dataset:\space}\@dataset
723 | \vspace{12pt}
724 | \par
725 | \endgroup}
726 | }%
727 | {%For journal Data:
728 | \ifthenelse{\equal{\@datasetlicense}{\@empty}}{}{
729 | \begingroup
730 | \leftskip0.2cm
731 | \noindent\textbf{Dataset License:\space}\@datasetlicense
732 | \vspace{12pt}
733 | \par
734 | \endgroup}
735 | }%
736 | {%11
737 | \begingroup
738 | \leftskip0.2cm
739 | \ifthenelse{\equal{\@keyword}{\@empty}}{}{
740 | \noindent\textbf{Keywords:\space}\@keyword
741 | \vspace{12pt}
742 | \par
743 | }
744 | \endgroup
745 | }%11
746 | {%For journal Toxins:
747 | \begingroup
748 | \leftskip0.2cm
749 | \ifthenelse{\equal{\@keycontribution}{\@empty}}{}{
750 | \noindent\textbf{Key Contribution:\space}\@keycontribution
751 | \vspace{12pt}
752 | \par
753 | }
754 | \endgroup
755 | }%11
756 | {%12
757 | \ifthenelse{\equal{\@pacs}{\@empty}}{}{
758 | \begingroup
759 | \leftskip0.2cm
760 | \noindent\textbf{PACS:\space}\@pacs
761 | \vspace{12pt}
762 | \par
763 | \endgroup}
764 | }%12
765 | {%13
766 | \ifthenelse{\equal{\@msc}{\@empty}}{}{
767 | \begingroup
768 | \leftskip0.2cm
769 | \noindent\textbf{MSC:\space}\@msc
770 | \vspace{12pt}
771 | \par
772 | \endgroup}
773 | }%13
774 | {%14
775 | \ifthenelse{\equal{\@jel}{\@empty}}{}{
776 | \begingroup
777 | \leftskip0.2cm
778 | \noindent\textbf{JEL Classification:\space}\@jel
779 | \vspace{12pt}
780 | \par
781 | \endgroup}
782 | }%14
783 | \vspace{4pt}
784 | \ifthenelse{\equal{\@arttype}{Book}}{}{\hrule}
785 | \vspace{12pt}
786 | }
787 |
788 |
789 | %%%% Print maketitle and abstractkeywords
790 | \ifthenelse{\equal{\@arttype}{Supfile}}{
791 | \AfterEndPreamble{
792 | \maketitle
793 | \let\maketitle\relax
794 | \ifthenelse{\equal{\@status}{submit}}{\linenumbers}{}
795 | }%
796 | }{
797 | \AfterEndPreamble{
798 | \maketitle
799 | \let\maketitle\relax
800 | \maketitlen
801 | \let\maketitlen\relax
802 | \ifthenelse{\equal{\@status}{submit}}{\linenumbers}{}
803 | \abstractkeywords
804 | }%
805 | }
806 | \AtBeginDocument{
807 | \DeclareSymbolFont{AMSb}{U}{msb}{m}{n}
808 | \DeclareSymbolFontAlphabet{\mathbb}{AMSb}
809 | }
810 |
811 | %%%% Font size in Tables
812 | \AtEndPreamble{
813 | \def\@tablesize{}
814 | \newcommand{\tablesize}[1]{\gdef\@tablesize{#1}}
815 | \let\oldtabular\tabular
816 | \renewcommand{\tabular}{\ifthenelse{\equal{\@tablesize}{\@empty}}{\small}{\@tablesize}\oldtabular}
817 | }
818 |
819 | %%%% Section headings
820 | \setcounter{secnumdepth}{4} %i.e., section numbering depth, which defaults to 3 in the article class. To get paragraphs numbered and counted, increase the default value of secnumdepth to 4
821 |
822 | \titleformat {\section} [block] {\raggedright \fontsize{10}{10}\selectfont\bfseries} {\thesection.\space} {0pt} {}
823 | \titlespacing {\section} {0pt} {12pt} {6pt}
824 |
825 | \titleformat {\subsection} [block] {\raggedright \fontsize{10}{10}\selectfont\itshape} {\thesubsection.\space} {0pt} {}
826 | \titlespacing {\subsection} {0pt} {12pt} {6pt}
827 |
828 | \titleformat {\subsubsection} [block] {\raggedright \fontsize{10}{10}\selectfont} {\thesubsubsection.\space} {0pt} {}
829 | \titlespacing {\subsubsection} {0pt} {12pt} {6pt}
830 |
831 | \titleformat {\paragraph} [block] {\raggedright \fontsize{10}{10}\selectfont} {} {0pt} {}
832 | \titlespacing {\paragraph} {0pt} {12pt} {6pt}
833 |
834 | %%%% Special section title style for back matter
835 | \newcommand{\supplementary}[1]{
836 | \par\vspace{6pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Supplementary Materials:} {#1}\par}}
837 |
838 | \newcommand{\acknowledgments}[1]{
839 | \vspace{6pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Acknowledgments:} {#1}\par}}
840 |
841 | \newcommand{\authorcontributions}[1]{%
842 | \vspace{6pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Author Contributions:} {#1}\par}}
843 |
844 | \newcommand{\funding}[1]{
845 | \vspace{6pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Funding:} {#1}\par}}
846 |
847 | \newcommand{\conflictsofinterest}[1]{%
848 | \vspace{6pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Conflicts of Interest:} {#1}\par}}
849 |
850 | \newcommand{\conflictofinterest}[1]{% Backwards compatibility for book prodcution
851 | \vspace{6pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Conflicts of Interest:} {#1}\par}}
852 |
853 | \newcommand{\conflictofinterests}[1]{% Backwards compatibility for book prodcution
854 | \vspace{6pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Conflicts of Interest:} {#1}\par}}
855 |
856 | \newcommand{\sampleavailability}[1]{%
857 | \vspace{12pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Sample Availability:} {#1}\par}}
858 |
859 | \newcommand{\reviewreports}[1]{%
860 | \vspace{12pt}\noindent{\fontsize{9}{9}\selectfont\textbf{Review Reports:} {#1}\par}}
861 |
862 | \newcommand{\abbreviations}[1]{%
863 | \vspace{12pt}\noindent{\selectfont\textbf{Abbreviations}\par\vspace{6pt}\noindent {\fontsize{9}{9}\selectfont #1}\par}}
864 |
865 | %%%%% Defines the appendix
866 | \def\@appendixtitles{}
867 | \newcommand{\appendixtitles}[1]{\gdef\@appendixtitles{#1}}
868 |
869 | \def\@appendixsections{}
870 | \newcommand{\appendixsections}[1]{\gdef\@appendixsections{#1}}
871 |
872 | \renewcommand{\appendix}{%
873 | \setcounter{section}{0}%
874 | \setcounter{subsection}{0}%
875 | \setcounter{subsubsection}{0}%
876 | %
877 | \gdef\thesection{\@Alph\c@section}%
878 | \gdef\thesubsection{\@Alph\c@section.\@arabic\c@subsection}%
879 | %
880 | \titleformat {\section} [block] {\raggedright \fontsize{10}{10}\selectfont\bfseries} {%
881 | \ifthenelse{\equal{\@appendixtitles}{yes}}{%
882 | \appendixname~\thesection.%
883 | }{%
884 | \appendixname~\thesection~%
885 | }
886 | } {0pt} {}
887 | \titlespacing {\section} {0pt} {12pt} {6pt}
888 | %
889 | \titleformat {\subsection} [block] {\raggedright \fontsize{10}{10}\selectfont\itshape} {%
890 | \ifthenelse{\equal{\@appendixtitles}{yes}}{%
891 | \appendixname~\thesubsection.%
892 | }{%
893 | \appendixname~\thesubsection%
894 | }
895 | } {0pt} {}
896 | \titlespacing {\subsection} {0pt} {12pt} {6pt}
897 | %
898 | \titleformat {\subsubsection} [block] {\raggedright \fontsize{10}{10}\selectfont} {%
899 | \ifthenelse{\equal{\@appendixtitles}{yes}}{%
900 | \appendixname~\thesubsubsection.%
901 | }{%
902 | \appendixname~\thesubsubsection%
903 | }
904 | } {0pt} {}
905 | \titlespacing {\subsubsection} {0pt} {12pt} {6pt}
906 | %
907 | \gdef\theHsection{\@Alph\c@section.}% for hyperref
908 | \gdef\theHsubsection{\@Alph\c@section.\@arabic\c@subsection}% for hyperref
909 | \csname appendixmore\endcsname
910 | \renewcommand{\thefigure}{A\arabic{figure}}
911 | \setcounter{figure}{0}
912 | \renewcommand{\thetable}{A\arabic{table}}
913 | \setcounter{table}{0}
914 | \renewcommand{\thescheme}{A\arabic{scheme}}
915 | \setcounter{scheme}{0}
916 | \renewcommand{\thechart}{A\arabic{chart}}
917 | \setcounter{chart}{0}
918 | \renewcommand{\theboxenv}{A\arabic{boxenv}}
919 | \setcounter{boxenv}{0}
920 | \renewcommand{\theequation}{A\arabic{equation}}
921 | \setcounter{equation}{0}
922 | \renewcommand{\thetheorem}{A\arabic{theorem}}
923 | \setcounter{theorem}{0}
924 | \renewcommand{\thelemma}{A\arabic{lemma}}
925 | \setcounter{lemma}{0}
926 | \renewcommand{\thecorollary}{A\arabic{corollary}}
927 | \setcounter{corollary}{0}
928 | \renewcommand{\theproposition}{A\arabic{proposition}}
929 | \setcounter{proposition}{0}
930 | \renewcommand{\thecharacterization}{A\arabic{characterization}}
931 | \setcounter{characterization}{0}
932 | \renewcommand{\theproperty}{A\arabic{property}}
933 | \setcounter{property}{0}
934 | \renewcommand{\theproblem}{A\arabic{problem}}
935 | \setcounter{problem}{0}
936 | \renewcommand{\theexample}{A\arabic{example}}
937 | \setcounter{example}{0}
938 | \renewcommand{\theexamplesanddefinitions}{A\arabic{examplesanddefinitions}}
939 | \setcounter{examplesanddefinitions}{0}
940 | \renewcommand{\theremark}{A\arabic{remark}}
941 | \setcounter{remark}{0}
942 | \renewcommand{\thedefinition}{A\arabic{definition}}
943 | \setcounter{definition}{0}
944 | \renewcommand{\thehypothesis}{A\arabic{hypothesis}}
945 | \setcounter{hypothesis}{0}
946 | \renewcommand{\thenotation}{A\arabic{notation}}
947 | \setcounter{notation}{0}
948 | }
949 |
950 | %%%% Layout
951 | \ifthenelse{\equal{\@arttype}{Book}}{%%
952 | \RequirePackage[left=2.05cm,
953 | right=2.05cm,
954 | top=2.05cm,
955 | bottom=2.05cm,
956 | paperwidth=170mm,
957 | paperheight=244mm,
958 | includehead,
959 | includefoot]{geometry}
960 | }{
961 | \RequirePackage[left=2.7cm,
962 | right=2.7cm,
963 | top=1.8cm,
964 | bottom=1.5cm,
965 | includehead,
966 | includefoot]{geometry}
967 | }
968 |
969 | \linespread{1.13}
970 | \setlength{\parindent}{0.75cm}
971 |
972 | %%%% Figures and tables
973 | \RequirePackage{newfloat}
974 | \DeclareFloatingEnvironment[]{listing}
975 | \DeclareFloatingEnvironment[name=Box]{boxenv}
976 | \DeclareFloatingEnvironment[]{chart}
977 | \DeclareFloatingEnvironment[]{scheme}
978 |
979 | \RequirePackage{caption}
980 | \captionsetup[figure]{position=bottom, margin=0.75cm, labelfont={bf, small, stretch=1.17}, labelsep=period, textfont={small, stretch=1.17}, aboveskip=6pt, belowskip=-6pt, justification=justified}
981 |
982 | \captionsetup[scheme]{position=bottom, margin=0.75cm, labelfont={bf, small, stretch=1.17}, labelsep=period, textfont={small, stretch=1.17}, aboveskip=6pt, belowskip=-6pt, justification=justified}
983 |
984 | \captionsetup[listing]{position=top, margin=0.75cm, labelfont={bf, small, stretch=1.17}, labelsep=period, textfont={small, stretch=1.17}, aboveskip=6pt, justification=justified}
985 |
986 | \captionsetup[chart]{position=bottom, margin=0.75cm, labelfont={bf, small, stretch=1.17}, labelsep=period, textfont={small, stretch=1.17}, aboveskip=6pt, belowskip=-6pt, justification=justified}
987 |
988 | \captionsetup[table]{position=top, margin=0.75cm, labelfont={bf, small, stretch=1.17}, labelsep=period, textfont={small, stretch=1.17}, aboveskip=6pt, justification=justified}
989 |
990 | \captionsetup[boxenv]{position=top, margin=0.75cm, labelfont={bf, small, stretch=1.17}, labelsep=period, textfont={small, stretch=1.17}, aboveskip=6pt,justification=justified}
991 |
992 |
993 |
994 | %% For table footnotes
995 | \newsavebox{\@justcentbox}
996 | \newcommand{\justifyorcenter}[1]{
997 | \sbox \@justcentbox{#1}
998 | \ifdim \wd \@justcentbox >\hsize #1
999 | \else \centerline{#1} \fi
1000 | }
1001 |
1002 | %%%% Bullet lists
1003 | \newlength{\wideitemsep}
1004 | \setlength{\wideitemsep}{.5\itemsep}
1005 | \addtolength{\wideitemsep}{-7pt}
1006 | \let\olditem\item
1007 | \renewcommand{\item}{\setlength{\itemsep}{\wideitemsep}\olditem}
1008 |
1009 | %%%% Quote environment
1010 | \patchcmd{\quote}{\rightmargin}{\leftmargin 0.75cm \rightmargin}{}{}
1011 |
1012 | %%%% Supplementary file
1013 | \ifthenelse{\equal{\@arttype}{Supfile}}{
1014 | \renewcommand{\thefigure}{S\arabic{figure}}%
1015 | \renewcommand{\thetable}{S\arabic{table}}%
1016 | }{}%
1017 |
1018 | %% Link to supplementary material: www.mdpi.com/ISSN-number/volume-number/issue-number/article-number
1019 | \newcommand{\linksupplementary}[1]{\url{http://www.mdpi.com/\@ISSN/\@pubvolume/\@issuenum/\@articlenumber/#1}}
1020 |
1021 | %%%% Header and footer (all pages except the first)
1022 | \renewcommand\headrule{} %% set line (from fancyhdr) in header to nothing
1023 | \pagestyle{fancy}
1024 | \lhead{
1025 | \ifthenelse{\equal{\@journal}{preprints}%
1026 | \OR \equal{\@arttype}{Book}}{%
1027 | }{%
1028 | \fontsize{8}{8}\selectfont%
1029 | \ifthenelse{\equal{\@status}{submit}}{%
1030 | Version {\@ \today} submitted to {\em \journalname}%
1031 | }{%
1032 | \ifthenelse{\equal{\@arttype}{Supfile}}{%
1033 | {\em \journalname} {\bfseries \@pubyear}, {\em \@pubvolume} %
1034 | %\ifthenelse{\equal{\@articlenumber}{}}{%
1035 | %\@firstpage --\pageref*{LastPage}%
1036 | %}{\@articlenumber}%
1037 | ; doi:{\changeurlcolor{black}%
1038 | \href{http://dx.doi.org/\@doinum}%
1039 | {\@doinum}}%
1040 | }{%
1041 | {\em\journalname\ }{\bfseries\@pubyear}, {\em \@pubvolume}%
1042 | \ifthenelse{\equal{\@continuouspages}{\@empty}}{%
1043 | , \@articlenumber%
1044 | }{%
1045 | }%
1046 | }%
1047 | }%
1048 | }%
1049 | }
1050 |
1051 | \rhead{%
1052 | \ifthenelse{\equal{\@arttype}{Book}}{}{%
1053 | \ifthenelse{\equal{\@arttype}{Supfile}}{%
1054 | \fontsize{8}{8}\selectfont S\thepage{} of S\pageref*{LastPage}%
1055 | }{%
1056 | \ifthenelse{\equal{\@continuouspages}{\@empty}}{%
1057 | \fontsize{8}{8}\selectfont\thepage{} of \pageref*{LastPage}%
1058 | }{%
1059 | \fontsize{8}{8}\selectfont\thepage%{} of \pageref*{LastPage}%
1060 | }%
1061 | }%
1062 | }%
1063 | }
1064 |
1065 | \cfoot{
1066 | \ifthenelse{\equal{\@arttype}{Book}}{%
1067 | \fontsize{8}{8}\selectfont\thepage
1068 | }{%
1069 | }
1070 | }
1071 |
1072 |
1073 | %%%% Bibliography
1074 | \renewcommand\bibname{References} % Backwards compatibility for book production
1075 | \renewcommand\@biblabel[1]{#1.\hfill}
1076 | \def\thebibliography#1{
1077 | \linespread{1.44}
1078 | \section*{\@reftitle}
1079 | \addcontentsline{toc}{section}{References}
1080 | \fontsize{9}{9}\selectfont
1081 | \list{{\arabic{enumi}}}{\def\makelabel##1{\hss{##1}}
1082 | \topsep=0\p@
1083 | \parsep=5\p@
1084 | \partopsep=0\p@
1085 | \itemsep=0\p@
1086 | \labelsep=1.5mm
1087 | \ifthenelse{\equal{\@journal}{admsci}
1088 | \OR \equal{\@journal}{arts}
1089 | \OR \equal{\@journal}{econometrics}
1090 | \OR \equal{\@journal}{economies}
1091 | \OR \equal{\@journal}{genealogy}
1092 | \OR \equal{\@journal}{humanities}
1093 | \OR \equal{\@journal}{ijfs}
1094 | \OR \equal{\@journal}{jrfm}
1095 | \OR \equal{\@journal}{languages}
1096 | \OR \equal{\@journal}{laws}
1097 | \OR \equal{\@journal}{religions}
1098 | \OR \equal{\@journal}{risks}
1099 | \OR \equal{\@journal}{socsci}}{%
1100 | \ifthenelse{\equal{\@externalbibliography}{\@empty}}{%
1101 | \itemindent=-7.7mm
1102 | }{%
1103 | \itemindent=-3.3mm}%
1104 | }{%
1105 | \itemindent=0\p@}
1106 | \settowidth\labelwidth{\footnotesize[#1]}%
1107 | \leftmargin\labelwidth
1108 | \advance\leftmargin\labelsep
1109 | %\advance\leftmargin -\itemindent
1110 | \usecounter{enumi}}
1111 | %\def\newblock{\ }
1112 | %\sloppy\clubpenalty4000\widowpenalty4000
1113 | %\sfcode`\.=1000\relax
1114 | }
1115 | \let\endthebibliography=\endlist
1116 |
1117 | %%%% Copyright info
1118 | \newcommand{\cright}{%
1119 | \ifthenelse{\equal{\@arttype}{Supfile} \OR \equal{\@journal}{preprints}}{%
1120 | }{%
1121 | \vspace{12pt}
1122 | \noindent
1123 | \linespread{1.44}
1124 | \fontsize{9}{9}\selectfont
1125 | \ifthenelse{\equal{\@status}{submit}}{
1126 | \noindent \copyright{} {\@ \the\year} by the \@authornum. %
1127 | Submitted to {\em \journalname} for %
1128 | possible open access publication %
1129 | under the terms and conditions of the Creative Commons Attribution %
1130 | \ifthenelse{\equal{\@journal}{ijtpp}}{NonCommercial NoDerivatives (CC BY-NC-ND)}{(CC BY)} %
1131 | license %
1132 | \ifthenelse{\equal{\@journal}{ijtpp}}{
1133 | (\changeurlcolor{black}%
1134 | \href{https://creativecommons.org/licenses/by-nc-nd/4.0/.}%
1135 | {https://creativecommons.org/licenses/by-nc-nd/4.0/}).%
1136 | }{%
1137 | (\changeurlcolor{black}%
1138 | \href{http://creativecommons.org/licenses/by/4.0/.}%
1139 | {http://creativecommons.org/licenses/by/4.0/}).}
1140 | }{
1141 | \begin{minipage}{.2\textwidth}
1142 | \hspace{-1.2mm}%
1143 | \vspace{2mm}%
1144 | \href{http://creativecommons.org/}{%
1145 | \ifthenelse{\equal{\@journal}{ijtpp}}{%
1146 | \includegraphics[width=0.94\textwidth]{logo-ccby-nc-nd.eps}%
1147 | }{%
1148 | \includegraphics[width=0.94\textwidth]{logo-ccby.pdf}
1149 | }
1150 | }
1151 | \end{minipage}%
1152 | \begin{minipage}{.79\textwidth}
1153 | \copyright \ {\@copyrightyear} by the \@authornum. %
1154 | Licensee MDPI, Basel, Switzerland. %
1155 | This article is an open access article %
1156 | distributed under the terms and conditions %
1157 | of the Creative Commons Attribution %
1158 | \ifthenelse{\equal{\@journal}{ijtpp}}{NonCommercial NoDerivatives (CC BY-NC-ND)}{(CC BY)} %
1159 | license %
1160 | \ifthenelse{\equal{\@journal}{ijtpp}}{
1161 | (\changeurlcolor{black}%
1162 | \href{https://creativecommons.org/licenses/by-nc-nd/4.0/.}%
1163 | {https://creativecommons.org/licenses/by-nc-nd/4.0/}).%
1164 | }{%
1165 | (\changeurlcolor{black}%
1166 | \href{http://creativecommons.org/licenses/by/4.0/.}%
1167 | {http://creativecommons.org/licenses/by/4.0/}).}
1168 | \end{minipage}
1169 | }
1170 | }
1171 | }
1172 |
1173 |
1174 | \endinput
--------------------------------------------------------------------------------