├── .travis.yml ├── tests ├── resources │ ├── user.txt │ ├── user2.txt │ ├── user_backfill.txt │ ├── items.txt │ ├── newevents.csv │ ├── newevents_wt.csv │ ├── userpred_jac3_nobackfill.csv │ ├── userpred_jac3_backfill.csv │ ├── wt_count3_weighted_events.csv │ ├── wt_count3_unweighted_events.csv │ ├── wt_count3_userid_only.csv │ ├── userpred_count3_userid_only.csv │ ├── userpred_count3_userid_plus_events.csv │ ├── wt_count3_userid_plus_weighted_events.csv │ ├── wt_count3_userid_plus_unweighted_events.csv │ ├── userpred_jac3_userid_only.csv │ ├── userpred_jac3_userid_plus_events.csv │ ├── userpred_lift3_userid_only.csv │ ├── userpred_lift3_userid_plus_events.csv │ ├── userpred_2users_count3_userid_only.csv │ ├── userpred_2users_count3_userid_plus_events.csv │ ├── itempred_count3.csv │ ├── user_aff.csv │ ├── wt_aff.csv │ ├── itempred_jac3.csv │ ├── itempred_lift3.csv │ ├── sim_count1.csv │ └── sim_count3.csv ├── testthat.R └── testthat │ ├── test01_sar.R │ ├── test03_sarpred_wt.R │ ├── test04_cold_items.R │ ├── test02_sarpred.R │ └── test05_azure_service.R ├── LICENSE ├── data ├── ms_usage.RData └── ms_catalog.RData ├── src ├── Makevars ├── Makevars.win ├── similarity_rescale.cpp ├── make_similarity_matrix_sp.cpp ├── RcppExports.cpp └── userpred.cpp ├── .Rbuildignore ├── R ├── az_uris.R ├── set_sar_threads.R ├── sar_pkg.R ├── RcppExports.R ├── ms_data.R ├── cold_items_nullmodel.R ├── cold_items_model.R ├── az_service.R ├── onload.R ├── predict_sar.R ├── sar.R ├── az_rec_endp.R └── az_rec_model.R ├── CONTRIBUTING.md ├── SAR.Rproj ├── NEWS.md ├── NAMESPACE ├── man ├── ms_catalog.Rd ├── ms_usage.Rd ├── item_predict.Rd ├── get_rec_service.Rd ├── delete_rec_service.Rd ├── az_rec_service.Rd ├── create_rec_service.Rd ├── rec_model.Rd ├── user_predict.Rd ├── sar.Rd └── rec_endpoint.Rd ├── .gitignore ├── LICENSE.md ├── SAR.sln ├── README.md ├── DESCRIPTION ├── SAR.rxproj └── .gitattributes /.travis.yml: -------------------------------------------------------------------------------- 1 | language: r 2 | cache: packages 3 | -------------------------------------------------------------------------------- /tests/resources/user.txt: -------------------------------------------------------------------------------- 1 | 0003000098E85347 2 | -------------------------------------------------------------------------------- /tests/resources/user2.txt: -------------------------------------------------------------------------------- 1 | 00030000B72150D8 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | YEAR: 2018 2 | COPYRIGHT HOLDER: Microsoft 3 | -------------------------------------------------------------------------------- /tests/resources/user_backfill.txt: -------------------------------------------------------------------------------- 1 | 00010DC74C33EC42 2 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(SAR) 3 | 4 | test_check("SAR") 5 | -------------------------------------------------------------------------------- /data/ms_usage.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hongooi73/SAR/HEAD/data/ms_usage.RData -------------------------------------------------------------------------------- /data/ms_catalog.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hongooi73/SAR/HEAD/data/ms_catalog.RData -------------------------------------------------------------------------------- /tests/resources/items.txt: -------------------------------------------------------------------------------- 1 | DQF-00358 2 | DQF-00248 3 | DAF-00396 4 | DHF-01055 5 | DHF-01159 6 | -------------------------------------------------------------------------------- /src/Makevars: -------------------------------------------------------------------------------- 1 | PKG_CXXFLAGS += -DARMA_64BIT_WORD 2 | 3 | PKG_LIBS += $(shell ${R_HOME}/bin/Rscript -e "RcppParallel::RcppParallelLibs()") 4 | -------------------------------------------------------------------------------- /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^misc$ 2 | ^\.vs$ 3 | ^\.vscode$ 4 | \.sln$ 5 | \.Rproj$ 6 | \.Rxproj$ 7 | ^\.Rproj\.user$ 8 | .travis.yml 9 | CONTRIBUTING.md 10 | ^LICENSE\.md$ 11 | -------------------------------------------------------------------------------- /src/Makevars.win: -------------------------------------------------------------------------------- 1 | PKG_CXXFLAGS += -DARMA_64BIT_WORD -DRCPP_PARALLEL_USE_TBB=1 2 | 3 | PKG_LIBS += $(shell "${R_HOME}/bin${R_ARCH_BIN}/Rscript.exe" \ 4 | -e "RcppParallel::RcppParallelLibs()") 5 | -------------------------------------------------------------------------------- /R/az_uris.R: -------------------------------------------------------------------------------- 1 | sar_template <- "https://raw.githubusercontent.com/Microsoft/Product-Recommendations/master/saw/recommendationswebapp/core/arm/resources.json" 2 | 3 | sar_dll <- "https://github.com/Microsoft/Product-Recommendations/raw/master/saw/recommendationswebapp/assets/Recommendations.WebApp.zip" 4 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 4 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) 5 | or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 6 | -------------------------------------------------------------------------------- /tests/resources/newevents.csv: -------------------------------------------------------------------------------- 1 | "user","item","time" 2 | "0003000098E85347","DQF-00358",2015-06-09 19:39:37 3 | "0003000098E85347","DAF-00448",2015-03-06 09:08:24 4 | "0003000098E85347","DQF-00248",2015-03-06 09:08:24 5 | "0003000098E85347","DAF-00416",2014-12-26 20:22:18 6 | "0003000098E85347","DQF-00248",2014-12-26 20:22:18 7 | "0003000098E85347","DQF-00248",2014-12-21 07:40:39 8 | -------------------------------------------------------------------------------- /SAR.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | BuildType: Package 16 | PackageUseDevtools: Yes 17 | PackageInstallArgs: --no-multiarch --with-keep.source 18 | -------------------------------------------------------------------------------- /tests/resources/newevents_wt.csv: -------------------------------------------------------------------------------- 1 | "user","item","time","weight" 2 | "0003000098E85347","DQF-00358",2015-06-09 19:39:37,1 3 | "0003000098E85347","DAF-00448",2015-03-06 09:08:24,2 4 | "0003000098E85347","DQF-00248",2015-03-06 09:08:24,3 5 | "0003000098E85347","DAF-00416",2014-12-26 20:22:18,1 6 | "0003000098E85347","DQF-00248",2014-12-26 20:22:18,2 7 | "0003000098E85347","DQF-00248",2014-12-21 07:40:39,2 8 | -------------------------------------------------------------------------------- /R/set_sar_threads.R: -------------------------------------------------------------------------------- 1 | #' @param n_threads For `set_sar_threads`, the number of threads to use. Defaults to half the number of logical cores. 2 | #' 3 | #' @rdname user_predict 4 | #' @export 5 | set_sar_threads <- function(n_threads) 6 | { 7 | if(missing(n_threads)) 8 | n_threads <- max(1, parallel::detectCores()/2) 9 | RcppParallel::setThreadOptions(numThreads=as.integer(n_threads)) 10 | } 11 | -------------------------------------------------------------------------------- /tests/resources/userpred_jac3_nobackfill.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "00010DC74C33EC42","DHF-00905","DHF-01080","DHF-00894","DHF-00890","DHF-00907",NA,NA,NA,NA,NA,0.0624944380567152,0.0609551661833971,0.0446388843262251,0.041182970700969,0.00841761818723102,NA,NA,NA,NA,NA 3 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | # SAR 1.0.4 2 | 3 | * Fix web links for CRAN 4 | 5 | # SAR 1.0.3 6 | 7 | * Update maintainer email address. 8 | 9 | # SAR 1.0.2 10 | 11 | * Compatibility update for rlang/tibble changes. 12 | 13 | # SAR 1.0.1 14 | 15 | * Allow resource group/subscription methods to work without SAR package on search path. 16 | * Fix a bug in cold item prediction. 17 | 18 | # SAR 1.0.0 19 | 20 | * Initial release to CRAN 21 | -------------------------------------------------------------------------------- /R/sar_pkg.R: -------------------------------------------------------------------------------- 1 | #' @useDynLib SAR, .registration=TRUE 2 | NULL 3 | 4 | #' @importFrom Rcpp sourceCpp 5 | #' @importFrom R6 R6Class 6 | #' @importFrom dplyr %>% 7 | #' @importFrom RcppParallel RcppParallelLibs 8 | #' @importFrom stats cor predict reformulate xtabs 9 | NULL 10 | 11 | #' @import Matrix 12 | #' @import AzureRMR 13 | #' @import AzureStor 14 | NULL 15 | 16 | globalVariables(c("self", "warm_item", "cold_item"), "SAR") 17 | -------------------------------------------------------------------------------- /tests/resources/userpred_jac3_backfill.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "00010DC74C33EC42","DHF-00905","DHF-01080","DHF-00894","DHF-00890","DHF-00907","DQF-00248","DAF-00349","DAF-00288","DHF-01530","DAF-00448",0.0624944380567152,0.0609551661833971,0.0446388843262251,0.041182970700969,0.00841761818723102,0,0,0,0,0 3 | -------------------------------------------------------------------------------- /tests/resources/wt_count3_weighted_events.csv: -------------------------------------------------------------------------------- 1 | "rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "DAF-00349","DAF-00288","DAF-00419","DAF-00442","DAF-00482","DAF-00460","DAF-00351","DAF-00350","DAF-00450","DAF-00375",107.101637846919,98.64560862846,45.1751752528954,40.9458854195165,35.2041550308531,31.7440668251966,28.5922659203972,27.9308249401065,26.3342536786592,20.2015865726899 3 | -------------------------------------------------------------------------------- /tests/resources/wt_count3_unweighted_events.csv: -------------------------------------------------------------------------------- 1 | "rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "DAF-00288","DAF-00349","DAF-00419","DAF-00442","DAF-00482","DQF-00362","DHF-01530","DAF-00460","DAF-00450","DAF-00351",40.9687094135932,40.3776008451017,19.5500294128966,18.1075606253771,13.2477515437679,12.6735881236403,12.498989109107,12.0359004016026,10.9184200846472,10.911856233531 3 | -------------------------------------------------------------------------------- /tests/resources/wt_count3_userid_only.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DAF-00288","DAF-00349","DAF-00419","DAF-00442","DAF-00482","DQF-00362","DHF-01530","DAF-00460","DAF-00450","DAF-00351",40.9687094135932,40.3776008451017,19.5500294128966,18.1075606253771,13.2477515437679,12.6735881236403,12.498989109107,12.0359004016026,10.9184200846472,10.911856233531 3 | -------------------------------------------------------------------------------- /tests/resources/userpred_count3_userid_only.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DAF-00288","DAF-00349","DAF-00419","DAF-00442","DAF-00482","DQF-00362","DHF-01530","DAF-00460","DAF-00450","DAF-00351",40.9687094135932,40.3776008451017,19.5500294128966,18.1075606253771,13.2477515437679,12.6735881236403,12.498989109107,12.0359004016026,10.9184200846472,10.911856233531 3 | -------------------------------------------------------------------------------- /tests/resources/userpred_count3_userid_plus_events.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DAF-00288","DAF-00349","DAF-00419","DAF-00442","DAF-00482","DQF-00362","DHF-01530","DAF-00460","DAF-00450","DAF-00351",81.9374188271865,80.7552016902034,39.1000588257932,36.2151212507543,26.4955030875358,25.3471762472807,24.997978218214,24.0718008032053,21.8368401692945,21.8237124670619 3 | -------------------------------------------------------------------------------- /tests/resources/wt_count3_userid_plus_weighted_events.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DAF-00349","DAF-00288","DAF-00419","DAF-00442","DAF-00482","DAF-00460","DAF-00351","DAF-00350","DAF-00450","DAF-00375",147.47923869202,139.614318042053,64.7252046657919,59.0534460448936,48.451906574621,43.7799672267992,39.5041221539281,38.5119606834921,37.2526737633064,27.755960470453 3 | -------------------------------------------------------------------------------- /tests/resources/wt_count3_userid_plus_unweighted_events.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DAF-00288","DAF-00349","DAF-00419","DAF-00442","DAF-00482","DQF-00362","DHF-01530","DAF-00460","DAF-00450","DAF-00351",81.9374188271865,80.7552016902034,39.1000588257932,36.2151212507543,26.4955030875358,25.3471762472807,24.997978218214,24.0718008032053,21.8368401692945,21.8237124670619 3 | -------------------------------------------------------------------------------- /tests/resources/userpred_jac3_userid_only.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DQF-00362","DAF-00288","DHF-01530","DAF-00419","DAF-00442","DAF-00349","DHF-01159","DAF-00450","DAF-00518","DAF-00517",0.0616356962940849,0.0491800141372911,0.0424748656596161,0.0400987182354746,0.0384722850970896,0.0383977233290053,0.0325116739164845,0.0247482247681169,0.0243245801685614,0.0224889035141628 3 | -------------------------------------------------------------------------------- /tests/resources/userpred_jac3_userid_plus_events.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DQF-00362","DAF-00288","DHF-01530","DAF-00419","DAF-00442","DAF-00349","DHF-01159","DAF-00450","DAF-00518","DAF-00517",0.12327139258817,0.0983600282745821,0.0849497313192323,0.0801974364709493,0.0769445701941792,0.0767954466580107,0.065023347832969,0.0494964495362337,0.0486491603371228,0.0449778070283257 3 | -------------------------------------------------------------------------------- /tests/resources/userpred_lift3_userid_only.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DQF-00362","DHF-01530","DAF-00419","DHF-01159","DAF-00442","DAF-00518","DAF-00288","DAF-00517","DHF-01055","DAF-00450",0.00134901624374238,0.000846949418595508,0.000724970469906136,0.000721326480867887,0.000668550637477438,0.000600300548947123,0.000452986654840929,0.00045202168250828,0.000418032236938886,0.000347720319534188 3 | -------------------------------------------------------------------------------- /tests/resources/userpred_lift3_userid_plus_events.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DQF-00362","DHF-01530","DAF-00419","DHF-01159","DAF-00442","DAF-00518","DAF-00288","DAF-00517","DHF-01055","DAF-00450",0.00269803248748477,0.00169389883719102,0.00144994093981227,0.00144265296173577,0.00133710127495488,0.00120060109789425,0.000905973309681858,0.00090404336501656,0.000836064473877772,0.000695440639068376 3 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | S3method(print,sar) 4 | S3method(sar,data.frame) 5 | S3method(sar,default) 6 | export(az_rec_service) 7 | export(item_predict) 8 | export(rec_endpoint) 9 | export(rec_model) 10 | export(sar) 11 | export(set_sar_threads) 12 | export(user_predict) 13 | import(AzureRMR) 14 | import(AzureStor) 15 | import(Matrix) 16 | importFrom(R6,R6Class) 17 | importFrom(Rcpp,sourceCpp) 18 | importFrom(RcppParallel,RcppParallelLibs) 19 | importFrom(dplyr,"%>%") 20 | importFrom(stats,cor) 21 | importFrom(stats,predict) 22 | importFrom(stats,reformulate) 23 | importFrom(stats,xtabs) 24 | useDynLib(SAR, .registration=TRUE) 25 | -------------------------------------------------------------------------------- /R/RcppExports.R: -------------------------------------------------------------------------------- 1 | # Generated by using Rcpp::compileAttributes() -> do not edit by hand 2 | # Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 3 | 4 | make_similarity_matrix_sp <- function(n_items, groups, items) { 5 | .Call(`_SAR_make_similarity_matrix_sp`, n_items, groups, items) 6 | } 7 | 8 | rescale_to_jaccard <- function(mat) { 9 | .Call(`_SAR_rescale_to_jaccard`, mat) 10 | } 11 | 12 | rescale_to_lift <- function(mat) { 13 | .Call(`_SAR_rescale_to_lift`, mat) 14 | } 15 | 16 | user_predict_ranking <- function(aff, sim, n_recs, include_seed_items, backfill, pop_items) { 17 | .Call(`_SAR_user_predict_ranking`, aff, sim, n_recs, include_seed_items, backfill, pop_items) 18 | } 19 | 20 | -------------------------------------------------------------------------------- /man/ms_catalog.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ms_data.R 3 | \docType{data} 4 | \name{ms_catalog} 5 | \alias{ms_catalog} 6 | \title{Sample catalog dataset} 7 | \format{ 8 | A data frame with 101 rows and 3 columns. 9 | \describe{ 10 | \item{item}{The item ID, corresponding to the items in the \link{ms_usage} dataset.} 11 | \item{name}{A short description of the item.} 12 | \item{category}{The item category.} 13 | } 14 | } 15 | \source{ 16 | Microsoft. 17 | } 18 | \usage{ 19 | ms_catalog 20 | } 21 | \description{ 22 | Dataset of item descriptions from the Microsoft online store. 23 | } 24 | \seealso{ 25 | \link{ms_usage} 26 | } 27 | \keyword{datasets} 28 | -------------------------------------------------------------------------------- /man/ms_usage.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ms_data.R 3 | \docType{data} 4 | \name{ms_usage} 5 | \alias{ms_usage} 6 | \title{Sample usage dataset} 7 | \format{ 8 | A data frame with 118383 rows and 3 columns. 9 | \describe{ 10 | \item{user}{The user ID.} 11 | \item{item}{The item ID, corresponding to the items in the \link{ms_catalog} dataset.} 12 | \item{time}{The date and time of the transaction, in POSIXct format.} 13 | } 14 | } 15 | \source{ 16 | Microsoft. 17 | } 18 | \usage{ 19 | ms_usage 20 | } 21 | \description{ 22 | Dataset of anonymised transaction records from the Microsoft online store. 23 | } 24 | \seealso{ 25 | \link{ms_catalog} 26 | } 27 | \keyword{datasets} 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # History files 2 | .Rhistory 3 | .Rapp.history 4 | 5 | # Session Data files 6 | .RData 7 | 8 | # Example code in package build process 9 | *-Ex.R 10 | 11 | # Output files from R CMD build 12 | /*.tar.gz 13 | 14 | # Output files from R CMD check 15 | /*.Rcheck/ 16 | 17 | # RStudio files 18 | .Rproj.user/ 19 | 20 | # produced vignettes 21 | vignettes/*.html 22 | vignettes/*.pdf 23 | 24 | # OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3 25 | .httr-oauth 26 | 27 | # knitr and R markdown default cache directories 28 | /*_cache/ 29 | /cache/ 30 | 31 | # Temporary files created by R markdown 32 | *.utf8.md 33 | *.knit.md 34 | 35 | # scratch dir 36 | misc 37 | 38 | # C++ stuff 39 | Debug 40 | *.o 41 | *.dll 42 | 43 | .vs/ 44 | .vscode/ 45 | -------------------------------------------------------------------------------- /man/item_predict.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/predict_sar.R 3 | \name{item_predict} 4 | \alias{item_predict} 5 | \title{Get item-to-item recommendations from a SAR model} 6 | \usage{ 7 | item_predict(object, items, k = 10) 8 | } 9 | \arguments{ 10 | \item{object}{A SAR model object.} 11 | 12 | \item{items}{A vector of item IDs.} 13 | 14 | \item{k}{The number of recommendations to obtain.} 15 | } 16 | \value{ 17 | A data frame containing one row per item ID supplied. 18 | } 19 | \description{ 20 | Get item-to-item recommendations from a SAR model 21 | } 22 | \examples{ 23 | 24 | data(ms_usage) 25 | mod <- sar(ms_usage) 26 | 27 | # item recomendations for a set of item IDs 28 | items <- unique(ms_usage$item)[1:5] 29 | item_predict(mod, items=items) 30 | 31 | } 32 | -------------------------------------------------------------------------------- /tests/resources/userpred_2users_count3_userid_only.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DAF-00288","DAF-00349","DAF-00419","DAF-00442","DAF-00482","DQF-00362","DHF-01530","DAF-00460","DAF-00450","DAF-00351",40.9687094135932,40.3776008451017,19.5500294128966,18.1075606253771,13.2477515437679,12.6735881236403,12.498989109107,12.0359004016026,10.9184200846472,10.911856233531 3 | "00030000B72150D8","DAF-00349","DAF-00448","DAF-00416","DAF-00442","DAF-00482","DAF-00460","DAF-00351","DAF-00350","DHF-01406","DAF-00375",292.578922748547,155.687374965106,142.686750172649,96.469956568227,96.1602510829953,86.2048446867075,75.1343369430736,74.7627062886335,57.4111813855929,56.6009705435601 4 | -------------------------------------------------------------------------------- /tests/resources/userpred_2users_count3_userid_plus_events.csv: -------------------------------------------------------------------------------- 1 | "user","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "0003000098E85347","DAF-00288","DAF-00349","DAF-00419","DAF-00442","DAF-00482","DQF-00362","DHF-01530","DAF-00460","DAF-00450","DAF-00351",81.9374188271865,80.7552016902034,39.1000588257932,36.2151212507543,26.4955030875358,25.3471762472807,24.997978218214,24.0718008032053,21.8368401692945,21.8237124670619 3 | "00030000B72150D8","DAF-00349","DAF-00448","DAF-00416","DAF-00442","DAF-00482","DAF-00460","DAF-00351","DAF-00350","DHF-01406","DAF-00375",585.157845497094,311.374749930212,285.373500345298,192.939913136454,192.320502165991,172.409689373415,150.268673886147,149.525412577267,114.822362771186,113.20194108712 4 | -------------------------------------------------------------------------------- /src/similarity_rescale.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | 4 | // [[Rcpp::export]] 5 | arma::sp_mat rescale_to_jaccard(arma::sp_mat& mat) 6 | { 7 | const arma::vec diag(mat.diag()); 8 | 9 | for (arma::sp_mat::iterator it = mat.begin(); it != mat.end(); ++it) 10 | { 11 | size_t row = it.row(); 12 | size_t col = it.col(); 13 | *it = *it / (diag[row] + diag[col] - *it); 14 | } 15 | return mat; 16 | } 17 | 18 | 19 | // [[Rcpp::export]] 20 | arma::sp_mat rescale_to_lift(arma::sp_mat& mat) 21 | { 22 | const arma::vec diag(mat.diag()); 23 | 24 | for (arma::sp_mat::iterator it = mat.begin(); it != mat.end(); ++it) 25 | { 26 | size_t row = it.row(); 27 | size_t col = it.col(); 28 | *it = *it / (diag[row] * diag[col]); 29 | } 30 | return mat; 31 | } 32 | 33 | -------------------------------------------------------------------------------- /R/ms_data.R: -------------------------------------------------------------------------------- 1 | #' Sample usage dataset 2 | #' 3 | #' Dataset of anonymised transaction records from the Microsoft online store. 4 | #' @format A data frame with 118383 rows and 3 columns. 5 | #' \describe{ 6 | #' \item{user}{The user ID.} 7 | #' \item{item}{The item ID, corresponding to the items in the [ms_catalog] dataset.} 8 | #' \item{time}{The date and time of the transaction, in POSIXct format.} 9 | #' } 10 | #' @source Microsoft. 11 | #' @seealso 12 | #' [ms_catalog] 13 | "ms_usage" 14 | 15 | 16 | #' Sample catalog dataset 17 | #' 18 | #' Dataset of item descriptions from the Microsoft online store. 19 | #' @format A data frame with 101 rows and 3 columns. 20 | #' \describe{ 21 | #' \item{item}{The item ID, corresponding to the items in the [ms_usage] dataset.} 22 | #' \item{name}{A short description of the item.} 23 | #' \item{category}{The item category.} 24 | #' } 25 | #' @source Microsoft. 26 | #' @seealso 27 | #' [ms_usage] 28 | "ms_catalog" 29 | -------------------------------------------------------------------------------- /tests/resources/itempred_count3.csv: -------------------------------------------------------------------------------- 1 | "item","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "DQF-00358","DQF-00248","DQF-00362","DHF-01530","DAF-00288","DHF-01159","DAF-00517","DAF-00419","DAF-00442","DAF-00518","DAF-00416",24,12,11,6,6,5,4,4,4,3 3 | "DQF-00248","DAF-00349","DAF-00288","DAF-00448","DAF-00416","DAF-00419","DAF-00442","DAF-00482","DAF-00460","DAF-00350","DAF-00351",242,210,130,119,94,80,80,71,62,62 4 | "DAF-00396","DQF-00248","DAF-00288","DAF-00420","DAF-00448","DAF-00450","DAF-00351","DAF-00442","DAF-00349","DAF-00280","DAF-00399",28,14,7,7,6,5,5,4,3,3 5 | "DHF-01055","DHF-01530","DHF-01159","DAF-00517","DQF-00362","DAF-00518","DHF-01056","DAF-00516","DAF-00255","DAF-00512","DHF-00826",83,72,58,49,34,11,10,8,7,7 6 | "DHF-01159","DHF-01055","DHF-01530","DAF-00517","DQF-00362","DAF-00518","DAF-00255","DQF-00248","DDF-00122","DHF-01056","DAF-00512",72,65,58,45,24,12,12,9,9,8 7 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # MIT License 2 | 3 | Copyright (c) 2018 Microsoft 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /SAR.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 15 4 | VisualStudioVersion = 15.0.27406.0 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{DA7A21FA-8162-4350-AD77-A8D1B671F3ED}") = "SAR", "SAR.rxproj", "{891F8108-6B8F-42E4-B31A-A5EEB76382C2}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {891F8108-6B8F-42E4-B31A-A5EEB76382C2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {891F8108-6B8F-42E4-B31A-A5EEB76382C2}.Debug|Any CPU.Build.0 = Debug|Any CPU 16 | {891F8108-6B8F-42E4-B31A-A5EEB76382C2}.Release|Any CPU.ActiveCfg = Release|Any CPU 17 | {891F8108-6B8F-42E4-B31A-A5EEB76382C2}.Release|Any CPU.Build.0 = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(SolutionProperties) = preSolution 20 | HideSolutionNode = FALSE 21 | EndGlobalSection 22 | GlobalSection(ExtensibilityGlobals) = postSolution 23 | SolutionGuid = {9537C9DB-2A06-4A47-B63D-040E8014649C} 24 | EndGlobalSection 25 | EndGlobal 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SAR: Smart Adaptive Recommendations 2 | 3 | [SAR](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md) is a practical, rating-free collaborative filtering algorithm for recommendations. It produces explainable results, and is usable on a wide range of problems. 4 | 5 | This package provides the following: 6 | 7 | - An R interface to the Azure [Product Recommendations](https://github.com/Microsoft/Product-Recommendations) service, a cloud implementation of SAR. It includes the ability to deploy the backend via the [AzureRMR](https://github.com/Azure/AzureRMR) package, as well as a client frontend. 8 | 9 | - A standalone R implementation of SAR, for ease of experimentation and familiarisation. The core algorithm is written in C++ and makes use of multithreading and sparse matrices for speed and efficiency. 10 | 11 | ## More information 12 | 13 | [A detailed description of SAR](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md) 14 | 15 | Other SAR implementations: 16 | 17 | - Python: [Recommenders-Team/recommenders](https://github.com/recommenders-team/recommenders) 18 | - Spark: [Microsoft/SynapseML](https://github.com/microsoft/SynapseML) 19 | 20 | -------------------------------------------------------------------------------- /man/get_rec_service.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/onload.R 3 | \name{get_rec_service} 4 | \alias{get_rec_service} 5 | \title{Get existing Azure recommender service} 6 | \description{ 7 | Method for the \link[AzureRMR:az_resource_group]{AzureRMR::az_resource_group} and \link[AzureRMR:az_subscription]{AzureRMR::az_subscription} classes. 8 | } 9 | \section{Usage}{ 10 | 11 | 12 | \if{html}{\out{
}}\preformatted{get_rec_service(name, data_container = "inputdata") 13 | }\if{html}{\out{
}} 14 | } 15 | 16 | \section{Arguments}{ 17 | 18 | \itemize{ 19 | \item \code{name}: The name of the recommender service. 20 | \item \code{data_container}: The name of the blob container within the storage account to use for storing datasets. 21 | } 22 | } 23 | 24 | \section{Value}{ 25 | 26 | An object of class \code{az_rec_service} representing the deployed recommender service. 27 | } 28 | 29 | \examples{ 30 | \dontrun{ 31 | 32 | rg <- AzureRMR::az_rm$ 33 | new(tenant="myaadtenant.onmicrosoft.com", app="app_id", password="password")$ 34 | get_subscription("subscription_id")$ 35 | get_resource_group("rgname") 36 | 37 | # get a recommender service 38 | rg$get_rec_service("myrec") 39 | 40 | } 41 | } 42 | \seealso{ 43 | \link{create_rec_service}, \link{delete_rec_service} 44 | } 45 | -------------------------------------------------------------------------------- /man/delete_rec_service.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/onload.R 3 | \name{delete_rec_service} 4 | \alias{delete_rec_service} 5 | \title{Delete an Azure recommender service} 6 | \description{ 7 | Method for the \link[AzureRMR:az_resource_group]{AzureRMR::az_resource_group} and \link[AzureRMR:az_subscription]{AzureRMR::az_subscription} classes. 8 | } 9 | \section{Usage}{ 10 | 11 | 12 | \if{html}{\out{
}}\preformatted{delete_rec_service(name, confirm = TRUE, free_resources = TRUE) 13 | }\if{html}{\out{
}} 14 | } 15 | 16 | \section{Arguments}{ 17 | 18 | \itemize{ 19 | \item \code{name}: The name of the recommender service. 20 | \item \code{confirm}: Whether to ask for confirmation before deleting. 21 | \item \code{free_resources}: Whether to delete the individual resources as well as the recommender template. 22 | } 23 | } 24 | 25 | \section{Value}{ 26 | 27 | NULL on successful deletion. 28 | } 29 | 30 | \examples{ 31 | \dontrun{ 32 | 33 | rg <- AzureRMR::az_rm$ 34 | new(tenant="myaadtenant.onmicrosoft.com", app="app_id", password="password")$ 35 | get_subscription("subscription_id")$ 36 | get_resource_group("rgname") 37 | 38 | # delete a recommender service 39 | rg$delete_rec_service("myrec") 40 | 41 | } 42 | } 43 | \seealso{ 44 | \link{create_rec_service}, \link{delete_rec_service} 45 | } 46 | -------------------------------------------------------------------------------- /tests/resources/user_aff.csv: -------------------------------------------------------------------------------- 1 | "","DAF-00255","DAF-00280","DAF-00281","DAF-00283","DAF-00284","DAF-00288","DAF-00349","DAF-00350","DAF-00351","DAF-00367","DAF-00375","DAF-00385","DAF-00396","DAF-00399","DAF-00416","DAF-00419","DAF-00420","DAF-00437","DAF-00442","DAF-00443","DAF-00444","DAF-00448","DAF-00449","DAF-00450","DAF-00451","DAF-00460","DAF-00462","DAF-00464","DAF-00465","DAF-00482","DAF-00488","DAF-00491","DAF-00498","DAF-00499","DAF-00502","DAF-00503","DAF-00504","DAF-00512","DAF-00516","DAF-00517","DAF-00518","DC2-00001","DCF-00085","DCF-00086","DCF-00087","DCF-00104","DCF-00173","DCF-00197","DCF-00198","DCF-00199","DCF-00203","DCF-00204","DCF-00205","DCF-00206","DCF-00252","DCF-00253","DCF-00254","DDF-00078","DDF-00122","DHF-00533","DHF-00826","DHF-00847","DHF-00881","DHF-00890","DHF-00894","DHF-00904","DHF-00905","DHF-00907","DHF-00927","DHF-01029","DHF-01030","DHF-01031","DHF-01037","DHF-01038","DHF-01055","DHF-01056","DHF-01080","DHF-01159","DHF-01242","DHF-01331","DHF-01332","DHF-01333","DHF-01334","DHF-01406","DHF-01436","DHF-01437","DHF-01438","DHF-01439","DHF-01440","DHF-01441","DHF-01444","DHF-01512","DHF-01528","DHF-01529","DHF-01530","DHF-01540","DHF-01550","DQF-00248","DQF-00358","DQF-00362","DR5-00001" 2 | "1",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0221122254449968,0,0,0,0,0,0,0.110240163381778,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151812861826336,1,0,0 3 | -------------------------------------------------------------------------------- /tests/resources/wt_aff.csv: -------------------------------------------------------------------------------- 1 | "","DAF-00255","DAF-00280","DAF-00281","DAF-00283","DAF-00284","DAF-00288","DAF-00349","DAF-00350","DAF-00351","DAF-00367","DAF-00375","DAF-00385","DAF-00396","DAF-00399","DAF-00416","DAF-00419","DAF-00420","DAF-00437","DAF-00442","DAF-00443","DAF-00444","DAF-00448","DAF-00449","DAF-00450","DAF-00451","DAF-00460","DAF-00462","DAF-00464","DAF-00465","DAF-00482","DAF-00488","DAF-00491","DAF-00498","DAF-00499","DAF-00502","DAF-00503","DAF-00504","DAF-00512","DAF-00516","DAF-00517","DAF-00518","DC2-00001","DCF-00085","DCF-00086","DCF-00087","DCF-00104","DCF-00173","DCF-00197","DCF-00198","DCF-00199","DCF-00203","DCF-00204","DCF-00205","DCF-00206","DCF-00252","DCF-00253","DCF-00254","DDF-00078","DDF-00122","DHF-00533","DHF-00826","DHF-00847","DHF-00881","DHF-00890","DHF-00894","DHF-00904","DHF-00905","DHF-00907","DHF-00927","DHF-01029","DHF-01030","DHF-01031","DHF-01037","DHF-01038","DHF-01055","DHF-01056","DHF-01080","DHF-01159","DHF-01242","DHF-01331","DHF-01332","DHF-01333","DHF-01334","DHF-01406","DHF-01436","DHF-01437","DHF-01438","DHF-01439","DHF-01440","DHF-01441","DHF-01444","DHF-01512","DHF-01528","DHF-01529","DHF-01530","DHF-01540","DHF-01550","DQF-00248","DQF-00358","DQF-00362","DR5-00001" 2 | "0003000098E85347",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0221122254449968,0,0,0,0,0,0,0.110240163381778,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151812861826336,1,0,0 3 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: SAR 2 | Title: Smart Adaptive Recommendations 3 | Version: 1.0.4 4 | Authors@R: c( 5 | person("Hong", "Ooi", , "hongooi73@gmail.com", role = c("aut", "cre")), 6 | person("Microsoft Product Recommendations team", role = "ctb", comment="source for MS sample datasets"), 7 | person("Microsoft", role="cph") 8 | ) 9 | Description: 'Smart Adaptive Recommendations' (SAR) is the name of a fast, scalable, adaptive algorithm for personalized recommendations based on user transactions and item descriptions. It produces easily explainable/interpretable recommendations and handles "cold item" and "semi-cold user" scenarios. This package provides two implementations of 'SAR': a standalone implementation, and an interface to a web service in Microsoft's 'Azure' cloud: . The former allows fast and easy experimentation, and the latter provides robust scalability and extra features for production use. 10 | URL: https://github.com/hongooi73/SAR 11 | BugReports: https://github.com/hongooi73/SAR/issues 12 | License: MIT + file LICENSE 13 | Depends: 14 | R (>= 3.3) 15 | Imports: 16 | AzureRMR, 17 | AzureStor, 18 | dplyr (>= 0.7), 19 | httr, 20 | jsonlite, 21 | Matrix, 22 | R6, 23 | parallel, 24 | Rcpp (>= 0.12), 25 | RcppParallel 26 | Suggests: 27 | testthat 28 | LinkingTo: Rcpp, 29 | RcppArmadillo, 30 | RcppParallel 31 | SystemRequirements: GNU make 32 | LazyData: true 33 | Roxygen: list(markdown=TRUE, r6=FALSE) 34 | RoxygenNote: 7.3.1 35 | -------------------------------------------------------------------------------- /tests/resources/itempred_jac3.csv: -------------------------------------------------------------------------------- 1 | "item","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "DQF-00358","DQF-00362","DHF-01530","DHF-01159","DAF-00518","DAF-00419","DQF-00248","DAF-00442","DAF-00517","DHF-01055","DAF-00288",0.0609137055837563,0.041044776119403,0.0306122448979592,0.0238095238095238,0.0227272727272727,0.021680216802168,0.0212765957446809,0.0203252032520325,0.0168539325842697,0.0159151193633952 3 | "DQF-00248","DAF-00349","DAF-00288","DAF-00448","DAF-00416","DAF-00419","DAF-00482","DAF-00442","DAF-00460","DAF-00350","DAF-00351",0.20578231292517,0.177364864864865,0.114537444933921,0.107789855072464,0.0856882406563355,0.0723327305605787,0.0712377560106857,0.0644283121597096,0.0560578661844485,0.0557553956834532 4 | "DAF-00396","DAF-00420","DAF-00450","DAF-00399","DAF-00280","DAF-00288","DAF-00451","DAF-00351","DAF-00449","DAF-00448","DAF-00491",0.0693069306930693,0.0487804878048781,0.0476190476190476,0.0428571428571429,0.0396600566572238,0.0375,0.0352112676056338,0.032967032967033,0.0303030303030303,0.0303030303030303 5 | "DHF-01055","DHF-01159","DHF-01530","DAF-00517","DQF-00362","DAF-00518","DHF-01056","DAF-00516","DAF-00255","DAF-00512","DHF-00826",0.37696335078534,0.32295719844358,0.228346456692913,0.221719457013575,0.170854271356784,0.0846153846153846,0.0657894736842105,0.0555555555555556,0.0526315789473684,0.0492957746478873 6 | "DHF-01159","DHF-01055","DHF-01530","DAF-00517","DQF-00362","DAF-00518","DAF-00255","DHF-01056","DAF-00512","DDF-00122","DHF-00826",0.37696335078534,0.219594594594595,0.210909090909091,0.182926829268293,0.104347826086957,0.0745341614906832,0.0588235294117647,0.0522875816993464,0.0434782608695652,0.0365853658536585 7 | -------------------------------------------------------------------------------- /R/cold_items_nullmodel.R: -------------------------------------------------------------------------------- 1 | cold_wts <- function(sim_matrix, catalog_vars, items) 2 | { 3 | warm_items <- rownames(sim_matrix)[diag(sim_matrix) > 0] 4 | 5 | df <- expand.grid(item1=warm_items, item2=warm_items, stringsAsFactors=FALSE) 6 | sim <- as.numeric(sim_matrix[warm_items, warm_items]) 7 | 8 | vars <- sapply(catalog_vars, function(x) 9 | { 10 | x1 <- x[match(df$item1, items)] 11 | x2 <- x[match(df$item2, items)] 12 | max(0, cor(x1 == x2, sim, use="complete.obs", method="pearson")) 13 | }) 14 | 15 | vars / sum(vars) 16 | } 17 | 18 | 19 | get_cold_similarity_nullmodel <- function(sim_matrix, catalog_formula, catalog_data, cold_to_cold=FALSE) 20 | { 21 | # assume column 1 is item ID 22 | items <- catalog_data[[1]] 23 | 24 | vars <- all.vars(catalog_formula[[length(catalog_formula)]]) 25 | wts <- cold_wts(sim_matrix, catalog_data[vars], items) 26 | 27 | all_items <- rownames(sim_matrix) 28 | cold_items <- all_items[diag(sim_matrix) == 0] 29 | warm_items <- setdiff(all_items, cold_items) 30 | 31 | df <- if(!cold_to_cold) 32 | expand.grid(warm_item=warm_items, cold_item=cold_items, stringsAsFactors=FALSE) 33 | else expand.grid(warm_item=all_items, cold_item=cold_items, stringsAsFactors=FALSE) %>% 34 | dplyr::filter(warm_item != cold_item) 35 | 36 | vars <- mapply(function(x, wt) 37 | { 38 | x1 <- x[match(df$warm_item, items)] 39 | x2 <- x[match(df$cold_item, items)] 40 | (x1 == x2) * wt 41 | }, catalog_data[vars], wts, SIMPLIFY=FALSE) 42 | 43 | wt <- rowSums(dplyr::bind_cols(vars)) 44 | 45 | # rescale weights so that cold similarities are always less than warm-warm similarities 46 | wt <- wt * min(sim_matrix@x) 47 | 48 | dplyr::bind_cols(df, wt=wt) 49 | } 50 | 51 | -------------------------------------------------------------------------------- /src/make_similarity_matrix_sp.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | using namespace Rcpp; 4 | 5 | 6 | // [[Rcpp::export]] 7 | arma::sp_mat make_similarity_matrix_sp(int n_items, List groups, IntegerVector items) 8 | { 9 | int n_users = groups.length(); 10 | 11 | std::vector rowind; 12 | arma::uvec colptr(n_users + 1); 13 | 14 | rowind.reserve(n_users); 15 | colptr[0] = 0; 16 | 17 | for (int g = 0; g < n_users; g++) 18 | { 19 | IntegerVector user = groups[g]; 20 | std::vector user_items(user.length(), 0); 21 | 22 | for (int i = 0; i < user.length(); i++) 23 | { 24 | user_items[i] = items[user[i]] - 1; // -1 converts to 0-based indexing for C++ 25 | } 26 | 27 | // remove duplicated items for this user 28 | std::sort(user_items.begin(), user_items.end()); 29 | std::vector::iterator last = std::unique(user_items.begin(), user_items.end()); 30 | user_items.erase(last, user_items.end()); 31 | 32 | // append this user's values to the matrix vectors 33 | rowind.insert(rowind.end(), user_items.begin(), user_items.end()); 34 | colptr[g + 1] = colptr[g] + user_items.size(); 35 | } 36 | 37 | // vals is a vector of 1's, allocate it all in one hit 38 | arma::vec vals(rowind.size(), arma::fill::ones); 39 | 40 | // compressed sparse column representation of the item x user indicator matrix 41 | // take advantage of the fact that each user vector encodes the row-positions of the seen items 42 | arma::sp_mat item_user_mat(arma::conv_to::from(rowind), colptr, vals, n_items, n_users); 43 | 44 | // create result as sparse matrix multiply of item-user matrix with its transpose 45 | return item_user_mat * item_user_mat.t(); 46 | } 47 | 48 | 49 | -------------------------------------------------------------------------------- /tests/resources/itempred_lift3.csv: -------------------------------------------------------------------------------- 1 | "item","rec1","rec2","rec3","rec4","rec5","rec6","rec7","rec8","rec9","rec10","score1","score2","score3","score4","score5","score6","score7","score8","score9","score10" 2 | "DQF-00358","DQF-00362","DHF-01530","DHF-01159","DAF-00518","DAF-00419","DAF-00442","DAF-00517","DHF-01055","DQF-00248","DAF-00416",0.00134228187919463,0.000837138508371385,0.000704225352112676,0.000595238095238095,0.000555555555555556,0.000505050505050505,0.00043630017452007,0.000413223140495868,0.000373482726423903,0.000328947368421053 3 | "DQF-00248","DAF-00464","DAF-00437","DAF-00419","DAF-00416","DAF-00491","DAF-00280","DHF-00847","DAF-00499","DAF-00502","DAF-00462",0.000752152712937027,0.000746965452847806,0.000731403672580143,0.000730994152046784,0.000724427702115329,0.000708329308734988,0.000707353648530119,0.000700280112044818,0.000694294811941871,0.000684718331777155 4 | "DAF-00396","DAF-00399","DAF-00420","DAF-00280","DAF-00451","DAF-00450","DAF-00449","DAF-00491","DAF-00351","DAF-00288","DAF-00442",0.00309917355371901,0.00248579545454545,0.00235109717868339,0.00174825174825175,0.00160427807486631,0.00136363636363636,0.00117554858934169,0.00110326566637246,0.000985083028426682,0.000860881542699725 5 | "DHF-01055","DHF-01056","DHF-01159","DHF-01530","DAF-00512","DQF-00362","DAF-00517","DAF-00518","DAF-00255","DHF-00826","DAF-00516",0.00454545454545455,0.00419043184728204,0.00313219366768557,0.00304480208786429,0.00271784347440235,0.00250962745013197,0.00250885478158205,0.0021327645961077,0.00206611570247934,0.00201572263656521 6 | "DHF-01159","DHF-01055","DHF-01056","DAF-00512","DAF-00255","DAF-00517","DQF-00362","DHF-01530","DAF-00518","DHF-00826","DHF-01529",0.00419043184728204,0.00316901408450704,0.00296515937731653,0.00272603362108133,0.0021384853624364,0.00212685509027318,0.00209016657019744,0.00150905432595573,0.00150905432595573,0.00117370892018779 7 | -------------------------------------------------------------------------------- /tests/testthat/test01_sar.R: -------------------------------------------------------------------------------- 1 | context("SAR basic") 2 | 3 | datapath <- "../resources" 4 | data(ms_usage, package="SAR", envir=environment()) 5 | 6 | sim_count1 <- as.matrix(read.csv(file.path(datapath, "sim_count1.csv"), row.names=1, check.names=FALSE)) 7 | sim_count3 <- as.matrix(read.csv(file.path(datapath, "sim_count3.csv"), row.names=1, check.names=FALSE)) 8 | 9 | sim_jac1 <- as.matrix(read.csv(file.path(datapath, "sim_jac1.csv"), row.names=1, check.names=FALSE)) 10 | sim_jac3 <- as.matrix(read.csv(file.path(datapath, "sim_jac3.csv"), row.names=1, check.names=FALSE)) 11 | 12 | sim_lift1 <- as.matrix(read.csv(file.path(datapath, "sim_lift1.csv"), row.names=1, check.names=FALSE)) 13 | sim_lift3 <- as.matrix(read.csv(file.path(datapath, "sim_lift3.csv"), row.names=1, check.names=FALSE)) 14 | 15 | 16 | test_that("SAR model fit works", 17 | { 18 | count1 <- sar(ms_usage, support_threshold=1, similarity="count") 19 | expect_s3_class(count1, "sar") 20 | expect_equal(as.matrix(count1$sim_mat), sim_count1) 21 | 22 | count3 <- sar(ms_usage, support_threshold=3, similarity="count") 23 | expect_s3_class(count3, "sar") 24 | expect_equal(as.matrix(count3$sim_mat), sim_count3) 25 | 26 | jac1 <- sar(ms_usage, support_threshold=1, similarity="jaccard") 27 | expect_s3_class(jac1, "sar") 28 | expect_equal(as.matrix(jac1$sim_mat), sim_jac1) 29 | 30 | jac3 <- sar(ms_usage, support_threshold=3, similarity="jaccard") 31 | expect_s3_class(jac3, "sar") 32 | expect_equal(as.matrix(jac3$sim_mat), sim_jac3) 33 | 34 | lift1 <- sar(ms_usage, support_threshold=1, similarity="lift") 35 | expect_s3_class(lift1, "sar") 36 | expect_equal(as.matrix(lift1$sim_mat), sim_lift1) 37 | 38 | lift3 <- sar(ms_usage, support_threshold=3, similarity="lift") 39 | expect_s3_class(lift3, "sar") 40 | expect_equal(as.matrix(lift3$sim_mat), sim_lift3) 41 | }) 42 | 43 | 44 | -------------------------------------------------------------------------------- /SAR.rxproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 891f8108-6b8f-42e4-b31a-a5eeb76382c2 5 | 6 | 7 | 15.0 8 | Debug 9 | AnyCPU 10 | 11 | 12 | Script.R 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /tests/testthat/test03_sarpred_wt.R: -------------------------------------------------------------------------------- 1 | context("SAR weighted prediction") 2 | 3 | datapath <- "../resources" 4 | data(ms_usage, package="SAR", envir=environment()) 5 | 6 | i <- readLines(file.path(datapath, "items.txt")) 7 | u <- readLines(file.path(datapath, "user.txt")) 8 | newevents <- read.csv(file.path(datapath, "newevents.csv"), colClasses="character") 9 | newevents_wt <- read.csv(file.path(datapath, "newevents_wt.csv"), colClasses="character") 10 | 11 | newevents$time <- as.POSIXct(newevents$time, tz="UTC", format="%Y-%m-%d %H:%M:%S") 12 | newevents_wt$time <- as.POSIXct(newevents_wt$time, tz="UTC", format="%Y-%m-%d %H:%M:%S") 13 | newevents_wt$weight <- as.numeric(newevents_wt$weight) 14 | 15 | 16 | test_that("Weighted prediction works", 17 | { 18 | count_wt <- sar(ms_usage, support_threshold=3, similarity="count") 19 | 20 | pred1 <- read.csv(file.path(datapath, "wt_count3_userid_only.csv"), stringsAsFactors=FALSE, 21 | colClasses=c(rep("character", 11), rep("numeric", 10))) 22 | expect_equal(user_predict(count_wt, u, k=10), pred1) 23 | expect_equal(user_predict(count_wt, u), pred1) 24 | 25 | pred2 <- read.csv(file.path(datapath, "wt_count3_userid_plus_unweighted_events.csv"), stringsAsFactors=FALSE, 26 | colClasses=c(rep("character", 11), rep("numeric", 10))) 27 | expect_equal(user_predict(count_wt, newevents, k=10), pred2) 28 | expect_equal(user_predict(count_wt, newevents), pred2) 29 | 30 | pred3 <- read.csv(file.path(datapath, "wt_count3_unweighted_events.csv"), stringsAsFactors=FALSE, 31 | colClasses=c(rep("character", 10), rep("numeric", 10))) 32 | expect_equal(user_predict(count_wt, newevents[-1], k=10), pred3) 33 | expect_equal(user_predict(count_wt, newevents[-1]), pred3) 34 | 35 | pred4 <- read.csv(file.path(datapath, "wt_count3_userid_plus_weighted_events.csv"), stringsAsFactors=FALSE, 36 | colClasses=c(rep("character", 11), rep("numeric", 10))) 37 | expect_equal(user_predict(count_wt, newevents_wt, k=10), pred4) 38 | expect_equal(user_predict(count_wt, newevents_wt), pred4) 39 | 40 | pred5 <- read.csv(file.path(datapath, "wt_count3_weighted_events.csv"), stringsAsFactors=FALSE, 41 | colClasses=c(rep("character", 10), rep("numeric", 10))) 42 | expect_equal(user_predict(count_wt, newevents_wt[-1], k=10), pred5) 43 | expect_equal(user_predict(count_wt, newevents_wt[-1]), pred5) 44 | }) 45 | 46 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Set default behavior to automatically normalize line endings. 3 | ############################################################################### 4 | * text=auto 5 | 6 | ############################################################################### 7 | # Set default behavior for command prompt diff. 8 | # 9 | # This is need for earlier builds of msysgit that does not have it on by 10 | # default for csharp files. 11 | # Note: This is only used by command line 12 | ############################################################################### 13 | #*.cs diff=csharp 14 | 15 | ############################################################################### 16 | # Set the merge driver for project and solution files 17 | # 18 | # Merging from the command prompt will add diff markers to the files if there 19 | # are conflicts (Merging from VS is not affected by the settings below, in VS 20 | # the diff markers are never inserted). Diff markers may cause the following 21 | # file extensions to fail to load in VS. An alternative would be to treat 22 | # these files as binary and thus will always conflict and require user 23 | # intervention with every merge. To do so, just uncomment the entries below 24 | ############################################################################### 25 | #*.sln merge=binary 26 | #*.csproj merge=binary 27 | #*.vbproj merge=binary 28 | #*.vcxproj merge=binary 29 | #*.vcproj merge=binary 30 | #*.dbproj merge=binary 31 | #*.fsproj merge=binary 32 | #*.lsproj merge=binary 33 | #*.wixproj merge=binary 34 | #*.modelproj merge=binary 35 | #*.sqlproj merge=binary 36 | #*.wwaproj merge=binary 37 | 38 | ############################################################################### 39 | # behavior for image files 40 | # 41 | # image files are treated as binary by default. 42 | ############################################################################### 43 | #*.jpg binary 44 | #*.png binary 45 | #*.gif binary 46 | 47 | ############################################################################### 48 | # diff behavior for common document formats 49 | # 50 | # Convert binary document formats to text before diffing them. This feature 51 | # is only available from the command line. Turn it on by uncommenting the 52 | # entries below. 53 | ############################################################################### 54 | #*.doc diff=astextplain 55 | #*.DOC diff=astextplain 56 | #*.docx diff=astextplain 57 | #*.DOCX diff=astextplain 58 | #*.dot diff=astextplain 59 | #*.DOT diff=astextplain 60 | #*.pdf diff=astextplain 61 | #*.PDF diff=astextplain 62 | #*.rtf diff=astextplain 63 | #*.RTF diff=astextplain 64 | -------------------------------------------------------------------------------- /man/az_rec_service.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/az_service.R 3 | \docType{class} 4 | \name{az_rec_service} 5 | \alias{az_rec_service} 6 | \title{Azure product recommendations service class} 7 | \format{ 8 | An R6 object of class \code{az_rec_service}, inheriting from \code{AzureRMR::az_template}. 9 | } 10 | \description{ 11 | Class representing an Azure product recommendations service. 12 | } 13 | \section{Methods}{ 14 | 15 | \itemize{ 16 | \item \code{new(token, subscription, resource_group, name, ...)}: Initialize a recommendations service object. See 'Initialization' for more details. 17 | \item \code{start()}: Start the service. 18 | \item \code{stop()}: Stop the service. 19 | \item \code{get_rec_endpoint()}: Return an object representing the client endpoint for the service. 20 | \item \code{set_data_container(data_container="inputdata")}: sets the name of the blob container to use for storing datasets. 21 | \item \code{delete(confirm=TRUE)}: Delete the service, after checking for confirmation. 22 | } 23 | } 24 | 25 | \section{Initialization}{ 26 | 27 | Generally, the easiest way to initialize a new recommendations service object is via the \code{create_rec_service} or \code{get_rec_service} methods of the \link[AzureRMR:az_subscription]{AzureRMR::az_subscription} or \link[AzureRMR:az_resource_group]{AzureRMR::az_resource_group} classes. 28 | 29 | To create a new recommendations service, supply the following additional arguments to \code{new()}: 30 | \itemize{ 31 | \item \code{hosting_plan}: The name of the hosting plan (essentially the size of the virtual machine on which to run the service). See below for the plans that are available. 32 | \item \code{storage_type}: The type of storage account to use. Can be \code{"Standard_LRS"} or \code{"Standard_GRS"}. 33 | \item \code{insights_location}: The location for the application insights service. Defaults to \code{"East US"}. 34 | \item \code{data_container}: The default blob storage container to use for saving input datasets. Defaults to \code{"inputdata"}. 35 | \item \code{wait}: Whether to wait until the service has finished provisioning. Defaults to TRUE. 36 | } 37 | } 38 | 39 | \examples{ 40 | \dontrun{ 41 | 42 | # recommended way of retrieving a resource: via a resource group object 43 | svc <- resgroup$get_rec_service("myrec") 44 | 45 | # start the service backend 46 | svc$start() 47 | 48 | # get the service endpoint 49 | rec_endp <- svc$get_rec_endpoint() 50 | 51 | } 52 | } 53 | \seealso{ 54 | \link{rec_endpoint}, for the client interface to the recommendations service 55 | 56 | \href{https://azure.microsoft.com/en-us/pricing/details/app-service/windows/}{List of Azure hosting plans} 57 | 58 | \href{https://github.com/Microsoft/Product-Recommendations/blob/master/deploy/README.md}{Deployment instructions} at the Product Recommendations API repo on GitHub 59 | } 60 | -------------------------------------------------------------------------------- /tests/testthat/test04_cold_items.R: -------------------------------------------------------------------------------- 1 | context("Cold items modelling") 2 | 3 | datapath <- "../resources" 4 | data(ms_usage, package="SAR", envir=environment()) 5 | data(ms_catalog, package="SAR", envir=environment()) 6 | 7 | # make some more variables 8 | ms_catalog$ms <- grepl("microsoft", ms_catalog$name, ignore.case=TRUE) 9 | ms_catalog$surf <- grepl("surface", ms_catalog$name, ignore.case=TRUE) 10 | 11 | f <- reformulate(names(ms_catalog)[-(1:2)]) 12 | 13 | 14 | test_that("Cold item modelling works", 15 | { 16 | mod0 <- sar(ms_usage, support_threshold=25) 17 | mod1 <- sar(ms_usage, support_threshold=25, catalog_data=ms_catalog, catalog_formula=f, cold_item_model=NULL) 18 | mod2 <- sar(ms_usage, support_threshold=25, catalog_data=ms_catalog, catalog_formula=f, cold_item_model=NULL, 19 | cold_to_cold=TRUE) 20 | 21 | mod3 <- sar(ms_usage, support_threshold=25, catalog_data=ms_catalog, catalog_formula=f, cold_item_model="lm") 22 | mod4 <- sar(ms_usage, support_threshold=25, catalog_data=ms_catalog, catalog_formula=f, cold_item_model="lm", 23 | cold_to_cold=TRUE) 24 | 25 | sim0 <- as.matrix(mod0$sim_mat) 26 | sim1 <- as.matrix(mod1$sim_mat) 27 | sim2 <- as.matrix(mod2$sim_mat) 28 | sim3 <- as.matrix(mod3$sim_mat) 29 | sim4 <- as.matrix(mod4$sim_mat) 30 | 31 | # identify elements that will be modified by cold item models 32 | warm <- diag(sim0) > 0 33 | cold <- !warm 34 | indices <- matrix(seq_along(sim0), nrow(sim0), ncol(sim0)) 35 | cold_cold <- indices[cold, cold] 36 | indices_13 <- c(indices[warm, cold], indices[cold, warm]) 37 | indices_24 <- c(setdiff(cold_cold, diag(cold_cold)), indices_13) 38 | 39 | sim1diff <- sim1[indices_13] 40 | sim2diff <- sim2[indices_24] 41 | sim3diff <- sim3[indices_13] 42 | sim4diff <- sim4[indices_24] 43 | 44 | # check that new matrices are still symmetrical 45 | expect_true(isSymmetric(sim1)) 46 | expect_true(isSymmetric(sim2)) 47 | expect_true(isSymmetric(sim3)) 48 | expect_true(isSymmetric(sim4)) 49 | 50 | # check that only cold entries are modified 51 | expect_true(all(sim0[indices_13] == 0)) 52 | expect_true(all(sim0[indices_24] == 0)) 53 | 54 | expect_identical(sim0[-indices_13], sim1[-indices_13]) 55 | expect_identical(sim0[-indices_24], sim2[-indices_24]) 56 | expect_identical(sim0[-indices_13], sim3[-indices_13]) 57 | expect_identical(sim0[-indices_24], sim4[-indices_24]) 58 | 59 | expect_true(all(sim1diff >= 0)) 60 | expect_true(all(sim2diff >= 0)) 61 | expect_true(all(sim3diff >= 0)) 62 | expect_true(all(sim4diff >= 0)) 63 | 64 | # check that new cold entries < warm entries (with fuzz) 65 | min_warm <- min(sim0[sim0 > 0]) 66 | expect_true(all(sim1diff - min_warm < 1e-15)) 67 | expect_true(all(sim2diff - min_warm < 1e-15)) 68 | expect_true(all(sim3diff - min_warm < 1e-15)) 69 | expect_true(all(sim4diff - min_warm < 1e-15)) 70 | }) 71 | -------------------------------------------------------------------------------- /man/create_rec_service.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/onload.R 3 | \name{create_rec_service} 4 | \alias{create_rec_service} 5 | \title{Create Azure recommender service} 6 | \description{ 7 | Method for the \link[AzureRMR:az_resource_group]{AzureRMR::az_resource_group} and \link[AzureRMR:az_subscription]{AzureRMR::az_subscription} classes. 8 | } 9 | \section{Usage}{ 10 | 11 | 12 | \if{html}{\out{
}}\preformatted{## R6 method for class 'az_subscription' 13 | create_rec_service(name, location, hosting_plan, storage_type = c("Standard_LRS", "Standard_GRS"), 14 | insights_location = c("East US", "North Europe", "West Europe", "South Central US"), 15 | data_container = "inputdata", ..., wait = TRUE 16 | 17 | ## R6 method for class 'az_resource_group' 18 | create_rec_service(name, hosting_plan, storage_type = c("Standard_LRS", "Standard_GRS"), 19 | insights_location = c("East US", "North Europe", "West Europe", "South Central US"), 20 | data_container = "inputdata", ..., wait = TRUE 21 | }\if{html}{\out{
}} 22 | } 23 | 24 | \section{Arguments}{ 25 | 26 | \itemize{ 27 | \item \code{name}: The name of the recommender service. 28 | \item \code{location}: For the subscription method, the location/region for the service. For the resource group method, this is taken from the location of the resource group. 29 | \item \code{storage_type}: The replication strategy for the storage account for the service. 30 | \item \code{insights_location}: Location for the application insights service giving you details on the webapp usage. 31 | \item \code{data_container}: The name of the blob container within the storage account to use for storing datasets. 32 | \item \code{wait}: Whether to wait until the service has finished provisioning. 33 | \item \code{...} : Other named arguments to pass to the \link[AzureRMR:az_template]{AzureRMR::az_template} initialization function. 34 | } 35 | } 36 | 37 | \section{Details}{ 38 | 39 | This method deploys a new recommender service. The individual resources created are an Azure webapp, a storage account, and an application insights service for monitoring. Within the storage account, a blob container is created with name given by the \code{data_container} argument for storing input datasets. 40 | 41 | For the az_subscription method, a resource group is also created to hold the resources. The name of the resource group will be the same as the name of the service. 42 | } 43 | 44 | \section{Value}{ 45 | 46 | An object of class \code{az_rec_service} representing the deployed recommender service. 47 | } 48 | 49 | \examples{ 50 | \dontrun{ 51 | 52 | rg <- AzureRMR::az_rm$ 53 | new(tenant="myaadtenant.onmicrosoft.com", app="app_id", password="password")$ 54 | get_subscription("subscription_id")$ 55 | get_resource_group("rgname") 56 | 57 | # create a new recommender service 58 | rg$create_rec_service("myrec", hosting_plan="S2") 59 | 60 | } 61 | } 62 | \seealso{ 63 | \link{get_rec_service}, \link{delete_rec_service}. 64 | 65 | The architecture for the web service is documented \href{https://github.com/Microsoft/Product-Recommendations/blob/master/doc/architecture.md}{here}, and the specific template deployed by this method is \href{https://raw.githubusercontent.com/Microsoft/Product-Recommendations/master/saw/recommendationswebapp/core/arm/resources.json}{here}. 66 | } 67 | -------------------------------------------------------------------------------- /man/rec_model.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/az_rec_model.R 3 | \docType{class} 4 | \name{rec_model} 5 | \alias{rec_model} 6 | \title{Azure product recommendations model class} 7 | \format{ 8 | An R6 object of class \code{rec_model}. 9 | } 10 | \description{ 11 | Class representing an individual product recommendations (SAR) model. 12 | } 13 | \section{Methods}{ 14 | 15 | \itemize{ 16 | \item \code{new(...)}: Initialize a model object. See 'Initialization' for more details. 17 | \item \code{delete(confirm=TRUE)}: Delete the model. 18 | \item \code{user_predict(userdata, k=10)}: Get personalised recommendations from the model. See 'Recommendations' for more details. 19 | \item \code{item_predict(item, k=10)}: Get item-to-item recommendations from the model. See 'Recommendations' for more details. 20 | \item \code{get_model_url()}: Get the individual service URL for this model. 21 | } 22 | } 23 | 24 | \section{Initialization}{ 25 | 26 | Generally, the easiest way to initialize a new model object is via the \code{get_model()} and \code{train_model()} methods of the \code{rec_endpoint} class, which will handle all the gory details. 27 | } 28 | 29 | \section{Recommendations}{ 30 | 31 | These arguments are used for obtaining personalised and item-to-item recommendations. 32 | \itemize{ 33 | \item \code{userdata}: The input data on users for which to obtain personalised recommendations. This can be: 34 | \enumerate{ 35 | \item A character vector of user IDs. In this case, personalised recommendations will be computed based on the transactions in the training data, \emph{ignoring} any transaction event IDs or weights. 36 | \item A data frame containing transaction item IDs, event types and/or weights, plus timestamps. In this case, all the transactions are assumed to be for a single (new) user. If the event types/weights are absent, all transactions are assigned equal weight. 37 | \item A data frame containing user IDs and transaction details as in (2). In this case, the recommendations are based on both the training data for the given user(s), plus the new transaction details. 38 | } 39 | \item \code{item}: A vector of item IDs for which to obtain item-to-item recommendations. 40 | \item \code{k}: The number of recommendations to return. Defaults to 10. 41 | } 42 | 43 | Both the \code{user_predict()} and \code{item_predict()} methods return a data frame with the top-K recommendations and scores. 44 | } 45 | 46 | \examples{ 47 | \dontrun{ 48 | 49 | # get a recommender endpoint and previously-trained model 50 | rec_endp <- rec_endpoint$new("myrecusacvjwpk4raost", admin_key="key1", rec_key="key2") 51 | rec_model <- rec_endp$get_model("model1") 52 | 53 | data(ms_usage) 54 | 55 | # item recommendations for a set of user IDs 56 | users <- unique(ms_usage$user)[1:5] 57 | rec_model$user_predict(users) 58 | 59 | # item recommendations for a set of user IDs and transactions (assumed to be new) 60 | user_df <- subset(ms_usage, user \%in\% users) 61 | rec_model$user_predict(user_df) 62 | 63 | # item recomendations for a set of item IDs 64 | items <- unique(ms_usage$item)[1:5] 65 | rec_model$item_predict(items) 66 | 67 | } 68 | } 69 | \seealso{ 70 | \link{az_rec_service} for the service backend, \link{rec_endpoint} for the client endpoint 71 | 72 | \href{https://github.com/Microsoft/Product-Recommendations/blob/master/doc/api-reference.md}{API reference} and \href{https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md}{SAR model description} at the Product Recommendations API repo on GitHub 73 | } 74 | -------------------------------------------------------------------------------- /src/RcppExports.cpp: -------------------------------------------------------------------------------- 1 | // Generated by using Rcpp::compileAttributes() -> do not edit by hand 2 | // Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 3 | 4 | #include 5 | #include 6 | 7 | using namespace Rcpp; 8 | 9 | #ifdef RCPP_USE_GLOBAL_ROSTREAM 10 | Rcpp::Rostream& Rcpp::Rcout = Rcpp::Rcpp_cout_get(); 11 | Rcpp::Rostream& Rcpp::Rcerr = Rcpp::Rcpp_cerr_get(); 12 | #endif 13 | 14 | // make_similarity_matrix_sp 15 | arma::sp_mat make_similarity_matrix_sp(int n_items, List groups, IntegerVector items); 16 | RcppExport SEXP _SAR_make_similarity_matrix_sp(SEXP n_itemsSEXP, SEXP groupsSEXP, SEXP itemsSEXP) { 17 | BEGIN_RCPP 18 | Rcpp::RObject rcpp_result_gen; 19 | Rcpp::RNGScope rcpp_rngScope_gen; 20 | Rcpp::traits::input_parameter< int >::type n_items(n_itemsSEXP); 21 | Rcpp::traits::input_parameter< List >::type groups(groupsSEXP); 22 | Rcpp::traits::input_parameter< IntegerVector >::type items(itemsSEXP); 23 | rcpp_result_gen = Rcpp::wrap(make_similarity_matrix_sp(n_items, groups, items)); 24 | return rcpp_result_gen; 25 | END_RCPP 26 | } 27 | // rescale_to_jaccard 28 | arma::sp_mat rescale_to_jaccard(arma::sp_mat& mat); 29 | RcppExport SEXP _SAR_rescale_to_jaccard(SEXP matSEXP) { 30 | BEGIN_RCPP 31 | Rcpp::RObject rcpp_result_gen; 32 | Rcpp::RNGScope rcpp_rngScope_gen; 33 | Rcpp::traits::input_parameter< arma::sp_mat& >::type mat(matSEXP); 34 | rcpp_result_gen = Rcpp::wrap(rescale_to_jaccard(mat)); 35 | return rcpp_result_gen; 36 | END_RCPP 37 | } 38 | // rescale_to_lift 39 | arma::sp_mat rescale_to_lift(arma::sp_mat& mat); 40 | RcppExport SEXP _SAR_rescale_to_lift(SEXP matSEXP) { 41 | BEGIN_RCPP 42 | Rcpp::RObject rcpp_result_gen; 43 | Rcpp::RNGScope rcpp_rngScope_gen; 44 | Rcpp::traits::input_parameter< arma::sp_mat& >::type mat(matSEXP); 45 | rcpp_result_gen = Rcpp::wrap(rescale_to_lift(mat)); 46 | return rcpp_result_gen; 47 | END_RCPP 48 | } 49 | // user_predict_ranking 50 | List user_predict_ranking(arma::sp_mat& aff, arma::sp_mat& sim, const int n_recs, const bool include_seed_items, const bool backfill, const IntegerVector& pop_items); 51 | RcppExport SEXP _SAR_user_predict_ranking(SEXP affSEXP, SEXP simSEXP, SEXP n_recsSEXP, SEXP include_seed_itemsSEXP, SEXP backfillSEXP, SEXP pop_itemsSEXP) { 52 | BEGIN_RCPP 53 | Rcpp::RObject rcpp_result_gen; 54 | Rcpp::RNGScope rcpp_rngScope_gen; 55 | Rcpp::traits::input_parameter< arma::sp_mat& >::type aff(affSEXP); 56 | Rcpp::traits::input_parameter< arma::sp_mat& >::type sim(simSEXP); 57 | Rcpp::traits::input_parameter< const int >::type n_recs(n_recsSEXP); 58 | Rcpp::traits::input_parameter< const bool >::type include_seed_items(include_seed_itemsSEXP); 59 | Rcpp::traits::input_parameter< const bool >::type backfill(backfillSEXP); 60 | Rcpp::traits::input_parameter< const IntegerVector& >::type pop_items(pop_itemsSEXP); 61 | rcpp_result_gen = Rcpp::wrap(user_predict_ranking(aff, sim, n_recs, include_seed_items, backfill, pop_items)); 62 | return rcpp_result_gen; 63 | END_RCPP 64 | } 65 | 66 | static const R_CallMethodDef CallEntries[] = { 67 | {"_SAR_make_similarity_matrix_sp", (DL_FUNC) &_SAR_make_similarity_matrix_sp, 3}, 68 | {"_SAR_rescale_to_jaccard", (DL_FUNC) &_SAR_rescale_to_jaccard, 1}, 69 | {"_SAR_rescale_to_lift", (DL_FUNC) &_SAR_rescale_to_lift, 1}, 70 | {"_SAR_user_predict_ranking", (DL_FUNC) &_SAR_user_predict_ranking, 6}, 71 | {NULL, NULL, 0} 72 | }; 73 | 74 | RcppExport void R_init_SAR(DllInfo *dll) { 75 | R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); 76 | R_useDynamicSymbols(dll, FALSE); 77 | } 78 | -------------------------------------------------------------------------------- /R/cold_items_model.R: -------------------------------------------------------------------------------- 1 | # construct a model frame for the feature regression model: used for training and prediction 2 | feature_model_frame <- function(item1, item2, catalog_formula, catalog_data) 3 | { 4 | items <- catalog_data[[1]] 5 | 6 | vars <- all.vars(catalog_formula[[length(catalog_formula)]]) 7 | vars <- lapply(catalog_data[vars], function(x) 8 | { 9 | x1 <- x[match(item1, items)] 10 | x2 <- x[match(item2, items)] 11 | x1 == x2 12 | }) 13 | dplyr::bind_cols(item1=item1, item2=item2, vars) 14 | } 15 | 16 | 17 | # fit the regression model for predicting similarities for cold items, using feature data 18 | get_cold_similarity_model <- function(sim_matrix, catalog_formula, catalog_data, cold_to_cold, 19 | cold_item_model, similarity, ...) 20 | { 21 | # create training dataset: all warm-warm item pairs + random selection of cold-warm item pairs 22 | pairs <- cold_model_sample(sim_matrix) 23 | 24 | # logit transform only makes sense if similarity is jaccard 25 | y <- as.numeric(sim_matrix[pairs]) 26 | if(similarity == "jaccard") 27 | y <- logit(y) 28 | 29 | all_items <- rownames(sim_matrix) 30 | catalog_formula <- update(catalog_formula, y ~ .) 31 | 32 | model <- cold_item_model(formula=catalog_formula, 33 | data=dplyr::bind_cols(y=y, 34 | feature_model_frame(all_items[pairs[, 1]], all_items[pairs[, 2]], catalog_formula, catalog_data)), ...) 35 | 36 | cold_items <- all_items[diag(sim_matrix) == 0] 37 | warm_items <- setdiff(all_items, cold_items) 38 | 39 | df <- if(!cold_to_cold) 40 | expand.grid(warm_item=warm_items, cold_item=cold_items, stringsAsFactors=FALSE) 41 | else expand.grid(warm_item=all_items, cold_item=cold_items, stringsAsFactors=FALSE) %>% 42 | dplyr::filter(warm_item != cold_item) 43 | 44 | cold_pred <- predict(model, 45 | feature_model_frame(df$warm_item, df$cold_item, catalog_formula, catalog_data)) 46 | 47 | # ??? 48 | #if(inherits(model, "lm")) 49 | #{ 50 | #b <- model$coefficients[1] 51 | #cold_pred[cold_pred > b] <- pmax(0, cold_pred[cold_pred > b]) 52 | #} 53 | # presumably really trying to do: 54 | # assume we should back-transform for jaccard (?) 55 | if(similarity == "jaccard") 56 | cold_pred <- expit(cold_pred) 57 | 58 | # rescale weights so that cold similarities are always less than warm-warm similarities 59 | df$wt <- cold_pred * min(sim_matrix@x) / max(cold_pred) 60 | 61 | df 62 | } 63 | 64 | 65 | logit <- function(x) 66 | { 67 | x <- pmax(1e-5, pmin(x, 1 - 1e-5)) 68 | log(x / (1 - x)) 69 | } 70 | 71 | 72 | expit <- function(x) 73 | { 74 | 1/(1 + exp(-x)) 75 | } 76 | 77 | 78 | # create training dataset: all warm-warm item pairs + random selection of cold-warm item pairs 79 | cold_model_sample <- function(sim_matrix) 80 | { 81 | pairs <- which(sim_matrix != 0, arr.ind=TRUE) 82 | pairs <- pairs[pairs[, 1] > pairs[, 2], ] # lower triangular portion only 83 | 84 | warm <- which(diag(sim_matrix) != 0) 85 | n <- nrow(sim_matrix) 86 | n_warm <- length(warm) 87 | p_warm <- nrow(pairs) / (n_warm * (n_warm - 1) / 2) # proportion of warm pairs 88 | 89 | if(p_warm >= 0.5) 90 | { 91 | # get everything 92 | pairs <- expand.grid(row=warm, col=warm) %>% 93 | dplyr::filter(row > col) %>% 94 | as.matrix 95 | } 96 | else 97 | { 98 | # TODO: make more scalable 99 | cold_pairs <- which(sim_matrix == 0, arr.ind=TRUE) 100 | cold_pairs <- cold_pairs[cold_pairs[, 1] > cold_pairs[, 2], ] 101 | cold_pairs <- cold_pairs[sample(nrow(cold_pairs), n_warm), ] # sample of same size as #warm pairs 102 | pairs <- rbind(pairs, cold_pairs) 103 | } 104 | pairs 105 | } 106 | 107 | -------------------------------------------------------------------------------- /man/user_predict.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/predict_sar.R, R/set_sar_threads.R 3 | \name{user_predict} 4 | \alias{user_predict} 5 | \alias{set_sar_threads} 6 | \title{Get personalised recommendations from a SAR model} 7 | \usage{ 8 | user_predict( 9 | object, 10 | userdata = NULL, 11 | k = 10, 12 | include_seed_items = FALSE, 13 | backfill = FALSE, 14 | reftime 15 | ) 16 | 17 | set_sar_threads(n_threads) 18 | } 19 | \arguments{ 20 | \item{object}{A SAR model object.} 21 | 22 | \item{userdata}{A vector of user IDs, or a data frame containing user IDs and/or transactions. See below for the various ways to supply user information for predicting, and how they affect the results.} 23 | 24 | \item{k}{The number of recommendations to obtain.} 25 | 26 | \item{include_seed_items}{Whether items a user has already seen should be considered for recommendations.} 27 | 28 | \item{backfill}{Whether to backfill recommendations with popular items.} 29 | 30 | \item{reftime}{The reference time for discounting timestamps. If not supplied, defaults to the latest date in the training data and any new transactions supplied.} 31 | 32 | \item{n_threads}{For \code{set_sar_threads}, the number of threads to use. Defaults to half the number of logical cores.} 33 | } 34 | \value{ 35 | For \code{user_predict}, a data frame containing one row per user ID supplied (or if no IDs are supplied, exactly one row). 36 | } 37 | \description{ 38 | Get personalised recommendations from a SAR model 39 | } 40 | \details{ 41 | The SAR model can produce personalised recommendations for a user, given a history of their transactions. This history can be based on either the original training data, or new events, based on the contents of \code{userdata} argument: 42 | \enumerate{ 43 | \item A character vector of user IDs. In this case, personalised recommendations will be computed based on the transactions in the training data, \emph{ignoring} any transaction event IDs or weights. 44 | \item A data frame containing transaction item IDs, event types and/or weights, plus timestamps. In this case, all the transactions are assumed to be for a single (new) user. If the event types/weights are absent, all transactions are assigned equal weight. 45 | \item A data frame containing user IDs and transaction details as in (2). In this case, the recommendations are based on both the training data for the given user(s), plus the new transaction details. 46 | } 47 | 48 | In SAR, the first step in obtaining personalised recommendations is to compute a user-to-item affinity matrix \eqn{A}. This is essentially a weighted crosstabulation with one row per unique user ID and one column per item ID. The cells in the crosstab are given by the formula 49 | \deqn{sum(wt * 2^(-(t0 - time) / half_life))} 50 | where \code{wt} is obtained from the \code{weight} and \code{event} columns in the data. 51 | 52 | The product of this matrix with the item similarity matrix \eqn{S} then gives a matrix of recommendation scores. The recommendation scores are sorted, any items that the user has previously seen are optionally removed, and the top-N items are returned as the recommendations. 53 | 54 | The latter step is the most computationally expensive part of the algorithm. SAR can execute this in multithreaded fashion, with the default number of threads being half the number of (logical) cores. Use the \code{set_sar_threads} function to set the number of threads to use. 55 | } 56 | \examples{ 57 | 58 | data(ms_usage) 59 | mod <- sar(ms_usage) 60 | 61 | # item recommendations given a vector of user IDs 62 | users <- unique(ms_usage$user)[1:5] 63 | user_predict(mod, userdata=users) 64 | 65 | # item recommendations given a set of user IDs and transactions (assumed to be new) 66 | user_df <- subset(ms_usage, user \%in\% users) 67 | user_predict(mod, userdata=user_df) 68 | 69 | # item recomendations for a set of item IDs 70 | items <- unique(ms_usage$item)[1:5] 71 | item_predict(mod, items=items) 72 | 73 | # setting the number of threads to use when computing recommendations 74 | set_sar_threads(2) 75 | 76 | } 77 | \seealso{ 78 | \href{https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md#making-recommendations}{Making recommendations} at the \href{https://github.com/Microsoft/Product-Recommendations}{Product Recommendations API repo} on GitHub 79 | } 80 | -------------------------------------------------------------------------------- /R/az_service.R: -------------------------------------------------------------------------------- 1 | #' Azure product recommendations service class 2 | #' 3 | #' Class representing an Azure product recommendations service. 4 | #' 5 | #' @docType class 6 | #' @section Methods: 7 | #' - `new(token, subscription, resource_group, name, ...)`: Initialize a recommendations service object. See 'Initialization' for more details. 8 | #' - `start()`: Start the service. 9 | #' - `stop()`: Stop the service. 10 | #' - `get_rec_endpoint()`: Return an object representing the client endpoint for the service. 11 | #' - `set_data_container(data_container="inputdata")`: sets the name of the blob container to use for storing datasets. 12 | #' - `delete(confirm=TRUE)`: Delete the service, after checking for confirmation. 13 | #' 14 | #' @section Initialization: 15 | #' Generally, the easiest way to initialize a new recommendations service object is via the `create_rec_service` or `get_rec_service` methods of the [AzureRMR::az_subscription] or [AzureRMR::az_resource_group] classes. 16 | #' 17 | #' To create a new recommendations service, supply the following additional arguments to `new()`: 18 | #' - `hosting_plan`: The name of the hosting plan (essentially the size of the virtual machine on which to run the service). See below for the plans that are available. 19 | #' - `storage_type`: The type of storage account to use. Can be `"Standard_LRS"` or `"Standard_GRS"`. 20 | #' - `insights_location`: The location for the application insights service. Defaults to `"East US"`. 21 | #' - `data_container`: The default blob storage container to use for saving input datasets. Defaults to `"inputdata"`. 22 | #' - `wait`: Whether to wait until the service has finished provisioning. Defaults to TRUE. 23 | #' 24 | #' @seealso 25 | #' [rec_endpoint], for the client interface to the recommendations service 26 | #' 27 | #' [List of Azure hosting plans](https://azure.microsoft.com/en-us/pricing/details/app-service/windows/) 28 | #' 29 | #' [Deployment instructions](https://github.com/Microsoft/Product-Recommendations/blob/master/deploy/README.md) at the Product Recommendations API repo on GitHub 30 | #' 31 | #' @examples 32 | #' \dontrun{ 33 | #' 34 | #' # recommended way of retrieving a resource: via a resource group object 35 | #' svc <- resgroup$get_rec_service("myrec") 36 | #' 37 | #' # start the service backend 38 | #' svc$start() 39 | #' 40 | #' # get the service endpoint 41 | #' rec_endp <- svc$get_rec_endpoint() 42 | #' 43 | #' } 44 | #' @format An R6 object of class `az_rec_service`, inheriting from `AzureRMR::az_template`. 45 | #' @export 46 | az_rec_service <- R6Class("az_rec_service", inherit=AzureRMR::az_template, 47 | 48 | public=list( 49 | url=NULL, 50 | admin_key=NULL, 51 | rec_key=NULL, 52 | storage_key=NULL, 53 | data_container=NULL, 54 | 55 | initialize=function(token, subscription, resource_group, name, ...) 56 | { 57 | super$initialize(token, subscription, resource_group, name, ...) 58 | 59 | # get data members 60 | outputs <- self$properties$outputs 61 | self$url <- outputs$websiteUrl$value 62 | self$admin_key <- outputs$adminPrimaryKey$value 63 | self$rec_key <- outputs$recommendPrimaryKey$value 64 | self$storage_key <- sub("^AccountKey=", "", 65 | strsplit(outputs$storageConnectionString$value, ";")[[1]][3]) 66 | 67 | # get the storage account and webapp 68 | outputs <- unlist(self$properties$outputResources) 69 | st_id <- grep("Microsoft.Storage/storageAccounts/.+$", outputs, ignore.case=TRUE, value=TRUE)[1] 70 | private$storage <- az_storage$new(self$token, self$subscription, id=st_id) 71 | 72 | app_id <- grep("Microsoft.Web/sites/.+$", outputs, ignore.case=TRUE, value=TRUE)[1] 73 | private$app <- az_resource$new(self$token, self$subscription, id=app_id) 74 | }, 75 | 76 | start=function() 77 | { 78 | private$app$do_operation(http_verb="POST", "start") 79 | invisible(NULL) 80 | }, 81 | 82 | stop=function() 83 | { 84 | private$app$do_operation(http_verb="POST", "stop") 85 | invisible(NULL) 86 | }, 87 | 88 | get_rec_endpoint=function(key=self$storage_key, sas=NULL) 89 | { 90 | stor_endp <- private$storage$get_blob_endpoint(key=self$storage_key, sas=sas) 91 | rec_endpoint$new(self$url, self$admin_key, self$rec_key, 92 | storage_endpoint=stor_endp, data_container=self$data_container) 93 | }, 94 | 95 | set_data_container=function(data_container="inputdata") 96 | { 97 | stor_endp <- private$storage$get_blob_endpoint(key=self$storage_key) 98 | conts <- names(list_blob_containers(stor_endp)) 99 | if(!(data_container %in% conts)) 100 | create_blob_container(stor_endp, data_container, public_access="none") 101 | self$data_container <- data_container 102 | }, 103 | 104 | print=function(...) 105 | { 106 | cat("\n", sep="") 107 | cat(AzureRMR::format_public_fields(self, 108 | exclude=c("subscription", "resource_group", "name", "id", "properties"))) 109 | cat(AzureRMR::format_public_methods(self)) 110 | invisible(NULL) 111 | } 112 | ), 113 | 114 | private=list( 115 | storage=NULL, 116 | app=NULL, 117 | 118 | # override default method: 119 | # - this template lists hosting plan before site, must reorder to allow for dependency 120 | # - must also explicitly NOT delete empty plan as part of deleting app, or Azure gets confused 121 | # - do not delete resources which represent functionality provided by other resources 122 | free_resources=function() 123 | { 124 | resources <- self$properties$outputResources[c(1, 2, 4, 3)] 125 | for(i in seq_along(resources)) 126 | { 127 | id <- resources[[i]]$id 128 | 129 | # supply deployed_properties arg to prevent querying host for resource info 130 | try(az_resource$ 131 | new(self$token, self$subscription, id=id, deployed_properties=list(NULL))$ 132 | delete(confirm=FALSE, wait=TRUE)) 133 | } 134 | } 135 | )) 136 | 137 | -------------------------------------------------------------------------------- /man/sar.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/sar.R 3 | \name{sar} 4 | \alias{sar} 5 | \alias{sar.data.frame} 6 | \alias{sar.default} 7 | \alias{print.sar} 8 | \title{Fit a SAR model} 9 | \usage{ 10 | sar(...) 11 | 12 | \method{sar}{data.frame}( 13 | x, 14 | user = "user", 15 | item = "item", 16 | time = "time", 17 | event = "event", 18 | weight = "weight", 19 | ... 20 | ) 21 | 22 | \method{sar}{default}( 23 | user, 24 | item, 25 | time, 26 | event = NULL, 27 | weight = NULL, 28 | support_threshold = 1, 29 | allowed_items = NULL, 30 | allowed_events = c(Click = 1, RecommendationClick = 2, AddShopCart = 3, RemoveShopCart 31 | = -1, Purchase = 4), 32 | by_user = TRUE, 33 | similarity = c("jaccard", "lift", "count"), 34 | half_life = 30, 35 | catalog_data = NULL, 36 | catalog_formula = item ~ ., 37 | cold_to_cold = FALSE, 38 | cold_item_model = NULL, 39 | ... 40 | ) 41 | 42 | \method{print}{sar}(x, ...) 43 | } 44 | \arguments{ 45 | \item{...}{For \code{sar()}, further arguments to pass to the cold-items feature model.} 46 | 47 | \item{x}{A data frame. For the \code{print} method, a SAR model object.} 48 | 49 | \item{user, item, time, event, weight}{For the default method, vectors to use as the user IDs, item IDs, timestamps, event types, and transaction weights for SAR. For the \code{data.frame} method, the names of the columns in the data frame \code{x} to use for these variables.} 50 | 51 | \item{support_threshold}{The SAR support threshold. Items that do not occur at least this many times in the data will be considered "cold".} 52 | 53 | \item{allowed_items}{A character or factor vector of allowed item IDs to use in the SAR model. If supplied, this will be used to categorise the item IDs in the data.} 54 | 55 | \item{allowed_events}{The allowed values for \code{events}, if that argument is supplied. Other values will be discarded.} 56 | 57 | \item{by_user}{Should the analysis be by user ID, or by user ID and timestamp? Defaults to userID only.} 58 | 59 | \item{similarity}{Similarity metric to use; defaults to Jaccard.} 60 | 61 | \item{half_life}{The decay period to use when weighting transactions by age.} 62 | 63 | \item{catalog_data}{A dataset to use for building the cold-items feature model.} 64 | 65 | \item{catalog_formula}{A formula for the feature model used to compute similarities for cold items.} 66 | 67 | \item{cold_to_cold}{Whether the cold-items feature model should include the cold items themselves in the training data, or only warm items.} 68 | 69 | \item{cold_item_model}{The type of model to use for cold item features.} 70 | } 71 | \value{ 72 | An S3 object representing the SAR model. This is essentially the item-to-item similarity matrix in sparse format, along with the original transaction data used to fit the model. 73 | } 74 | \description{ 75 | Fit a SAR model 76 | } 77 | \details{ 78 | Smart Adaptive Recommendations (SAR) is a fast, scalable, adaptive algorithm for personalized recommendations based on user transaction history and item descriptions. It produces easily explainable/interpretable recommendations and handles "cold item" and "semi-cold user" scenarios. 79 | 80 | Central to how SAR works is an item-to-item \emph{co-occurrence matrix}, which is based on how many times two items occur for the same users. For example, if a given user buys items \eqn{i_1} and \eqn{i_2}, then the cell \eqn{(i_1, i_2)} is incremented by 1. From this, an item \emph{similarity matrix} can be obtained by rescaling the co-occurrences according to a given metric. Options for the metric include Jaccard (the default), lift, and counts (which means no rescaling). 81 | 82 | Note that the similarity matrix in SAR thus only includes information on which users transacted which items. It does not include any other information such as item ratings or features, which may be used by other recommender algorithms. 83 | 84 | #' The SAR implementation in R should be usable on datasets with up to a few million rows and several thousand items. The main constraint is the size of the similarity matrix, which in turn depends (quadratically) on the number of unique items. The implementation has been successfully tested on the MovieLens 20M dataset, which contains about 138,000 users and 27,000 items. For larger datasets, it is recommended to use the \link[=az_rec_service]{Azure web service API}. 85 | } 86 | \section{Cold items}{ 87 | 88 | 89 | SAR has the ability to handle cold items, meaning those which have not been seen by any user, or which have only been seen by a number of users less than \code{support_threshold}. This is done by using item features to predict similarities. The method used for this is set by the \code{cold_items_model} argument: 90 | \itemize{ 91 | \item If this is \code{NULL} (the default), a manual algorithm is used that correlates each feature in turn with similarity, and produces a predicted similarity based on which features two items have in common. 92 | \item If this is the name of a modelling function, such as \code{"lm"} or \code{"randomForest"}, a model of that type is fit on the features and used to predict similarity. In particular, use \code{"lm"} to get a model that is (approximately) equivalent to that used by the Azure web service API. 93 | } 94 | 95 | The data frame and features used for cold items are given by the \code{catalog_data} and \code{catalog_formula} arguments. \code{catalog_data} should be a data frame whose first column is item ID. \code{catalog_formula} should be a one-sided formula (no LHS). 96 | 97 | This feature is currently experimental, and subject to change. 98 | } 99 | 100 | \examples{ 101 | 102 | data(ms_usage) 103 | 104 | ## all of these fit the same model: 105 | 106 | # fit a SAR model from a series of vectors 107 | mod1 <- sar(user=ms_usage$user, item=ms_usage$item, time=ms_usage$time) 108 | 109 | # fit a model from a data frame, naming the variables to use 110 | mod2 <- sar(ms_usage, user="user", item="item", time="time") 111 | 112 | # fit a model from a data frame, using default variable names 113 | mod3 <- sar(ms_usage) 114 | 115 | } 116 | \seealso{ 117 | \href{https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md}{Description of SAR} at the \href{https://github.com/Microsoft/Product-Recommendations}{Product Recommendations API repo} on GitHub 118 | } 119 | -------------------------------------------------------------------------------- /man/rec_endpoint.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/az_rec_endp.R 3 | \docType{class} 4 | \name{rec_endpoint} 5 | \alias{rec_endpoint} 6 | \title{Azure product recommendations endpoint class} 7 | \format{ 8 | An R6 object of class \code{rec_endpoint}. 9 | } 10 | \description{ 11 | Class representing the client endpoint to the product recommendations service. 12 | } 13 | \section{Methods}{ 14 | 15 | \itemize{ 16 | \item \code{new(...)}: Initialize a client endpoint object. See 'Initialization' for more details. 17 | \item \code{train_model(...)}: Train a new product recommendations model; return an object of class \code{rec_model}. See \code{Training} for more details. 18 | \item \code{get_model(description, id)}: Get an existing product recommendations model from either its description or ID; return an object of class \code{rec_model}. 19 | \item \code{delete_model(description, id)}: Delete the specified model. 20 | \item \code{upload_data(data, destfile)}: Upload a data frame to the endpoint, as a CSV file. By default, the name of the uploaded file will be the name of the data frame with a ".csv" extension. 21 | \item \code{upload_csv(srcfile, destfile)}: Upload a CSV file to the endpoint. By default, the name of the uploaded file will be the same as the source file. 22 | \item \code{sync_model_list()}: Update the stored list of models for this service. 23 | \item \code{get_swagger_url()}: Get the Swagger URL for this service. 24 | \item \code{get_service_url()}: Get the service URL, which is used to train models and obtain recommendations. 25 | } 26 | } 27 | 28 | \section{Initialization}{ 29 | 30 | The following arguments are used to initialize a new client endpoint: 31 | \itemize{ 32 | \item \code{name}: The name of the endpoint; see below. Alternatively, this can also be the full URL of the endpoint. 33 | \item \code{admin_key}: The administration key for the endpoint. Use this to retrieve, train, and delete models. 34 | \item \code{rec_key}: The recommender key for the endpoint. Use this to get recommendations. 35 | \item \code{service_host}: The hostname for the endpoint. For the public Azure cloud, this is \code{azurewebsites.net}. 36 | \item \code{storage_key}: The access key for the storage account associated with the service. 37 | \item \code{storage_sas}: A shared access signature (SAS) for the storage account associated with the service. You must provide either \code{storage_key} or \code{storage_sas} if you want to upload new datasets to the backend. 38 | \item \code{storage_host}: The hostname for the storage account. For the public Azure cloud, this is \code{core.windows.net}. 39 | \item \code{storage_endpoint}: The storage account endpoint for the service. By default, uses the account that was created at service creation. 40 | \item \code{data_container}: The default blob container for input datasets. Defaults to \code{"inputdata"}. 41 | } 42 | 43 | Note that the name of the client endpoint for a product recommendations service is \emph{not} the name that was supplied when deploying the service. Instead, it is a randomly generated unique string that starts with the service name. For example, if you deployed a service called "myrec", the name of the endpoint is "myrecusacvjwpk4raost". 44 | } 45 | 46 | \section{Training}{ 47 | 48 | To train a new model, supply the following arguments to the \code{train_model} method: 49 | \itemize{ 50 | \item \code{description}: A character string describing the model. 51 | \item \code{usage_data}: The training dataset. This is required. 52 | \item \code{catalog_data}: An optional dataset giving features for each item. Only used for imputing cold items. 53 | \item \code{eval_data}: An optional dataset to use for evaluating model performance. 54 | \item \code{support_threshold}: The minimum support for an item to be considered warm. 55 | \item \code{cooccurrence}: How to measure cooccurrence: either user ID, or user-by-time. 56 | \item \code{similarity}: The similarity metric to use; defaults to "Jaccard". 57 | \item \code{cold_items}: Whether recommendations should include cold items. 58 | \item \code{cold_to_cold}: Whether similarities between cold items should be computed. 59 | \item \code{user_affinity}: Whether event type and time should be considered. 60 | \item \code{include_seed_items}: Whether seed items (those already seen by a user) should be allowed as recommendations. 61 | \item \code{half_life}: The time decay parameter for computing user-item affinities. 62 | \item \code{user_to_items}: Whether user ID is used when computing personalised recommendations. 63 | \item \code{wait}: Whether to wait until the model has finished training. 64 | \item \code{container}: The container where the input datasets are stored. Defaults to the input container for the endpoint, usually \code{"inputdata"}. 65 | } 66 | 67 | For detailed information on these arguments see the \href{https://github.com/Microsoft/Product-Recommendations/blob/master/doc/api-reference.md#train-a-new-model}{API reference}. 68 | } 69 | 70 | \examples{ 71 | \dontrun{ 72 | 73 | # creating a recommendations service endpoint from an Azure resource 74 | svc <- resgroup$get_rec_service("myrec") 75 | rec_endp <- svc$get_rec_endpoint() 76 | 77 | # creating the endpoint from scratch -- must supply admin, recommender and storage keys 78 | rec_endp <- rec_endpoint$new("myrecusacvjwpk4raost", 79 | admin_key="key1", rec_key="key2", storage_key="key3") 80 | 81 | # upload the Microsoft store data 82 | data(ms_usage) 83 | rec_endp$upload_data(ms_usage) 84 | 85 | # train a recommender 86 | rec_model <- rec_endp$train_model("model1", usage="ms_usage.csv", support_threshold=10, 87 | similarity="Jaccard", user_affinity=TRUE, user_to_items=TRUE, 88 | backfill=TRUE, include_seed_items=FALSE) 89 | 90 | # list of trained models 91 | rec_endp$sync_model_list() 92 | 93 | # delete the trained model (will ask for confirmation) 94 | rec_endp$delete_model("model1") 95 | 96 | } 97 | } 98 | \seealso{ 99 | \link{az_rec_service} for the service itself, \link{rec_model} for an individual recommmendations model 100 | 101 | \href{https://github.com/Microsoft/Product-Recommendations/blob/master/doc/api-reference.md}{API reference} and \href{https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md}{SAR model description} at the Product Recommendations API repo on GitHub 102 | } 103 | -------------------------------------------------------------------------------- /src/userpred.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | using namespace Rcpp; 5 | 6 | 7 | struct Rec 8 | { 9 | int item; // recommended item: use item index no. rather than text label, to avoid copying chars around 10 | double score; // recommended score 11 | 12 | Rec(int item, double score) : item(item), score(score) {} 13 | 14 | static bool score_comp(const Rec& a, const Rec& b) 15 | { 16 | return a.score > b.score; 17 | } 18 | 19 | static bool item_comp(const Rec& a, const Rec& b) 20 | { 21 | return a.item > b.item; 22 | } 23 | }; 24 | 25 | 26 | struct Rank_scores : public RcppParallel::Worker 27 | { 28 | // inputs 29 | const arma::sp_mat& aff; 30 | const arma::sp_mat& sim; 31 | const int n_recs; 32 | const bool include_seed_items; 33 | const bool backfill; 34 | std::vector popular_items; 35 | 36 | // outputs 37 | RcppParallel::RMatrix rec_scores; 38 | RcppParallel::RMatrix rec_items; 39 | 40 | // derived 41 | const int n_users; 42 | const int n_items; 43 | 44 | Rank_scores(const arma::sp_mat& aff, const arma::sp_mat& sim, 45 | const int n_recs, const bool include_seed_items, const bool backfill, const IntegerVector& pop_items, 46 | NumericMatrix& rec_scores, IntegerMatrix& rec_items) : 47 | 48 | aff(aff), sim(sim), n_recs(n_recs), 49 | include_seed_items(include_seed_items), backfill(backfill), 50 | rec_scores(rec_scores), rec_items(rec_items), 51 | n_users(aff.n_cols), n_items(aff.n_rows) 52 | { 53 | int n_pop = pop_items.length(); 54 | popular_items.reserve(n_pop); 55 | for (int i = 0; i < n_pop; i++) 56 | { 57 | popular_items.emplace_back(pop_items[i], n_pop - i); // score here must be sorted in decreasing order 58 | } 59 | // pre-sort by item no. for input to std::set_difference later 60 | std::sort(popular_items.begin(), popular_items.end(), Rec::item_comp); 61 | } 62 | 63 | void operator()(size_t begin, size_t end) 64 | { 65 | // create dense matrices from sparse inputs: important for speed of accessing elements 66 | const arma::mat aff_dens(aff.cols(begin, end - 1)); 67 | 68 | // do NOT use above matrix in this constructor: sparse * dense is slow 69 | const arma::mat scores(sim * aff.cols(begin, end - 1)); 70 | 71 | const size_t chunksize = end - begin; 72 | for (size_t offset = 0; offset < chunksize; offset++) 73 | { 74 | const size_t i = begin + offset; 75 | const arma::vec score_i(scores.col(offset)); 76 | const arma::vec aff_i(aff_dens.col(offset)); 77 | 78 | std::vector user_rec; 79 | user_rec.reserve(n_items); 80 | 81 | if (!include_seed_items) 82 | { 83 | // only keep recs that correspond to zero item affinities 84 | for (int j = 0; j < n_items; j++) 85 | { 86 | if (aff_i[j] == 0) 87 | { 88 | user_rec.emplace_back(j, score_i[j]); 89 | } 90 | } 91 | // make sure we don't return garbage: must have at least n_recs items 92 | for (int j = user_rec.size(); j < n_recs; j++) 93 | { 94 | user_rec.emplace_back(0, 0.0); 95 | } 96 | } 97 | else 98 | { 99 | for (int j = 0; j < n_items; j++) 100 | { 101 | user_rec.emplace_back(i, score_i[j]); 102 | } 103 | } 104 | 105 | std::sort(user_rec.begin(), user_rec.end(), Rec::score_comp); 106 | 107 | if (backfill && user_rec[n_recs - 1].score == 0) 108 | { 109 | backfill_recs(user_rec, aff_i); 110 | } 111 | 112 | for (int j = 0; j < n_recs; j++) 113 | { 114 | rec_scores(i, j) = user_rec[j].score; 115 | rec_items(i, j) = user_rec[j].item + 1; // convert to 1-based indexing for R 116 | } 117 | } 118 | } 119 | 120 | // check for zero scores, replace with popular items 121 | // must exclude items that have already been recommended, optionally also items with nonzero affinity 122 | // if we are in here, we have at least one zero score in the top K 123 | void backfill_recs(std::vector& recs, const arma::vec& user_aff) 124 | { 125 | std::vector seen_items, unseen_popular_items; 126 | 127 | int firstzero = 0; 128 | seen_items.reserve(include_seed_items ? n_recs : n_items); 129 | for (; firstzero < n_recs && recs[firstzero].score != 0; firstzero++) 130 | { 131 | seen_items.push_back(recs[firstzero]); 132 | } 133 | 134 | // if required, expand the set of ineligible items to include those for which user has nonzero affinity 135 | if (!include_seed_items) 136 | { 137 | for (int i = 0; i < n_items; i++) 138 | { 139 | if (user_aff[i] > 0) 140 | { 141 | seen_items.emplace_back(i, user_aff[i]); 142 | } 143 | } 144 | } 145 | 146 | // std::set_difference requires inputs to be sorted by comparison criterion (item no.) 147 | std::sort(seen_items.begin(), seen_items.end(), Rec::item_comp); 148 | 149 | std::set_difference(popular_items.begin(), popular_items.end(), 150 | seen_items.begin(), seen_items.end(), 151 | std::inserter(unseen_popular_items, unseen_popular_items.begin()), 152 | Rec::item_comp); 153 | 154 | std::sort(unseen_popular_items.begin(), unseen_popular_items.end(), Rec::score_comp); 155 | 156 | for (size_t i = firstzero, j = 0; i < recs.size() && j < unseen_popular_items.size(); i++, j++) 157 | { 158 | recs[i].item = unseen_popular_items[j].item; 159 | } 160 | } 161 | }; 162 | 163 | 164 | // [[Rcpp::export]] 165 | List user_predict_ranking(arma::sp_mat& aff, arma::sp_mat& sim, 166 | const int n_recs, const bool include_seed_items, const bool backfill, const IntegerVector& pop_items) 167 | { 168 | const int n_users = aff.n_cols; 169 | const int chunksize = 100; 170 | 171 | NumericMatrix rec_scores(n_users, n_recs); 172 | IntegerMatrix rec_items(n_users, n_recs); 173 | 174 | Rank_scores rank_scores(aff, sim, n_recs, include_seed_items, backfill, pop_items, rec_scores, rec_items); 175 | RcppParallel::parallelFor(0, n_users, rank_scores, chunksize); 176 | 177 | return List::create(rec_scores, rec_items); 178 | } 179 | 180 | -------------------------------------------------------------------------------- /tests/testthat/test02_sarpred.R: -------------------------------------------------------------------------------- 1 | context("SAR prediction") 2 | 3 | datapath <- "../resources" 4 | data(ms_usage, package="SAR", envir=environment()) 5 | 6 | i <- readLines(file.path(datapath, "items.txt")) 7 | u <- readLines(file.path(datapath, "user.txt")) 8 | u2 <- readLines(file.path(datapath, "user2.txt")) 9 | dfu <- subset(ms_usage, user == u) 10 | 11 | 12 | test_that("Prediction for cooccurrence works", 13 | { 14 | count3 <- sar(ms_usage, support_threshold=3, similarity="count") 15 | 16 | ipred3 <- read.csv(file.path(datapath, "itempred_count3.csv"), stringsAsFactors=FALSE, 17 | colClasses=c(rep("character", 11), rep("numeric", 10))) 18 | expect_equal(item_predict(count3, i, k=10), ipred3) 19 | expect_equal(item_predict(count3, i), ipred3) 20 | 21 | upred3 <- read.csv(file.path(datapath, "userpred_count3_userid_only.csv"), stringsAsFactors=FALSE, 22 | colClasses=c(rep("character", 11), rep("numeric", 10))) 23 | expect_equal(user_predict(count3, u, k=10), upred3) 24 | expect_equal(user_predict(count3, u), upred3) 25 | 26 | upred3 <- read.csv(file.path(datapath, "userpred_count3_userid_plus_events.csv"), stringsAsFactors=FALSE, 27 | colClasses=c(rep("character", 11), rep("numeric", 10))) 28 | expect_equal(user_predict(count3, dfu, k=10), upred3) 29 | expect_equal(user_predict(count3, dfu), upred3) 30 | }) 31 | 32 | test_that("Prediction for jaccard works", 33 | { 34 | jac3 <- sar(ms_usage, support_threshold=3, similarity="jac") 35 | 36 | ipred3 <- read.csv(file.path(datapath, "itempred_jac3.csv"), stringsAsFactors=FALSE, 37 | colClasses=c(rep("character", 11), rep("numeric", 10))) 38 | expect_equal(item_predict(jac3, i, k=10), ipred3) 39 | expect_equal(item_predict(jac3, i), ipred3) 40 | 41 | upred3 <- read.csv(file.path(datapath, "userpred_jac3_userid_only.csv"), stringsAsFactors=FALSE, 42 | colClasses=c(rep("character", 11), rep("numeric", 10))) 43 | expect_equal(user_predict(jac3, u, k=10), upred3) 44 | expect_equal(user_predict(jac3, u), upred3) 45 | 46 | upred3 <- read.csv(file.path(datapath, "userpred_jac3_userid_plus_events.csv"), stringsAsFactors=FALSE, 47 | colClasses=c(rep("character", 11), rep("numeric", 10))) 48 | expect_equal(user_predict(jac3, dfu, k=10), upred3) 49 | expect_equal(user_predict(jac3, dfu), upred3) 50 | }) 51 | 52 | test_that("Prediction for lift works", 53 | { 54 | lift3 <- sar(ms_usage, support_threshold=3, similarity="lift") 55 | 56 | ipred3 <- read.csv(file.path(datapath, "itempred_lift3.csv"), stringsAsFactors=FALSE, 57 | colClasses=c(rep("character", 11), rep("numeric", 10))) 58 | expect_equal(item_predict(lift3, i, k=10), ipred3) 59 | expect_equal(item_predict(lift3, i), ipred3) 60 | 61 | upred3 <- read.csv(file.path(datapath, "userpred_lift3_userid_only.csv"), stringsAsFactors=FALSE, 62 | colClasses=c(rep("character", 11), rep("numeric", 10))) 63 | expect_equal(user_predict(lift3, u, k=10), upred3) 64 | expect_equal(user_predict(lift3, u), upred3) 65 | 66 | upred3 <- read.csv(file.path(datapath, "userpred_lift3_userid_plus_events.csv"), stringsAsFactors=FALSE, 67 | colClasses=c(rep("character", 11), rep("numeric", 10))) 68 | expect_equal(user_predict(lift3, dfu, k=10), upred3) 69 | expect_equal(user_predict(lift3, dfu), upred3) 70 | }) 71 | 72 | 73 | test_that("Prediction with new/overlapping user IDs works", 74 | { 75 | mod <- sar(ms_usage, support_threshold=3, similarity="count") 76 | 77 | ipred3 <- read.csv(file.path(datapath, "itempred_count3.csv"), stringsAsFactors=FALSE, 78 | colClasses=c(rep("character", 11), rep("numeric", 10))) 79 | 80 | # new user ID: predicting with transactions should work, without transactions should fail 81 | dfu_new <- dfu 82 | dfu_new$user <- "xxxx" 83 | 84 | pred <- user_predict(mod, dfu_new) 85 | expect_s3_class(pred, "data.frame") 86 | expect_equal(nrow(pred), 1) 87 | 88 | expect_error(user_predict(mod, dfu_new[1])) 89 | 90 | expect_s3_class(user_predict(mod, dfu_new[-1]), "data.frame") 91 | 92 | # overlapping user IDs: predicting with transactions should work, without transactions should warn and drop 93 | dfu_ov <- rbind(dfu, dfu_new) 94 | 95 | pred <- user_predict(mod, dfu_ov) 96 | expect_s3_class(pred, "data.frame") 97 | expect_equal(nrow(pred), 2) 98 | 99 | expect_warning(pred <- user_predict(mod, dfu_ov[1])) 100 | expect_s3_class(pred, "data.frame") 101 | expect_equal(nrow(pred), 1) 102 | 103 | expect_s3_class(user_predict(mod, dfu_ov[-1]), "data.frame") 104 | }) 105 | 106 | 107 | test_that("Prediction with multiple user IDs ordered correctly", 108 | { 109 | mod <- sar(ms_usage, support_threshold=3, similarity="count") 110 | 111 | uu2 <- c(u, u2) 112 | 113 | expected <- read.csv(file.path(datapath, "userpred_2users_count3_userid_only.csv"), 114 | stringsAsFactors=FALSE, 115 | colClasses=c(rep("character", 11), rep("numeric", 10))) 116 | pred <- user_predict(mod, uu2) 117 | expect_equal(pred, expected) 118 | 119 | pred2 <- user_predict(mod, uu2[2:1]) 120 | expected2 <- expected[2:1,] 121 | row.names(expected2) <- NULL 122 | expect_equal(pred2, expected2) 123 | 124 | pred3 <- user_predict(mod, uu2[c(1, 2, 1, 2, 1, 2)]) 125 | expect_equal(pred3, expected) 126 | 127 | expected <- read.csv(file.path(datapath, "userpred_2users_count3_userid_plus_events.csv"), 128 | stringsAsFactors=FALSE, 129 | colClasses=c(rep("character", 11), rep("numeric", 10))) 130 | dfuu2 <- rbind(subset(ms_usage, user == u), subset(ms_usage, user == u2)) 131 | pred <- user_predict(mod, dfuu2) 132 | expect_equal(pred, expected) 133 | 134 | dfu2u <- rbind(subset(ms_usage, user == u2), subset(ms_usage, user == u)) 135 | pred2 <- user_predict(mod, dfu2u) 136 | expected2 <- expected[2:1,] 137 | row.names(expected2) <- NULL 138 | expect_equal(pred2, expected2) 139 | }) 140 | 141 | 142 | test_that("Prediction with new item IDs fails", 143 | { 144 | mod <- sar(ms_usage) 145 | 146 | dfu_new <- dfu 147 | dfu_new$item[1] <- "xxxx" 148 | expect_error(user_predict(mod, dfu_new)) 149 | }) 150 | 151 | 152 | test_that("Backfilled and non-backfilled prediction work", 153 | { 154 | u_bf <- readLines(file.path(datapath, "user_backfill.txt")) 155 | 156 | jac3 <- sar(ms_usage, support_threshold=3, similarity="jaccard") 157 | 158 | upred_nbf <- read.csv(file.path(datapath, "userpred_jac3_nobackfill.csv"), stringsAsFactors=FALSE, 159 | colClasses=c(rep("character", 11), rep("numeric", 10))) 160 | expect_equal(user_predict(jac3, u_bf, backfill=FALSE), upred_nbf) 161 | 162 | upred_bf <- read.csv(file.path(datapath, "userpred_jac3_backfill.csv"), stringsAsFactors=FALSE, 163 | colClasses=c(rep("character", 11), rep("numeric", 10))) 164 | expect_equal(user_predict(jac3, u_bf, backfill=TRUE), upred_bf) 165 | }) 166 | 167 | -------------------------------------------------------------------------------- /tests/testthat/test05_azure_service.R: -------------------------------------------------------------------------------- 1 | context("Azure recommender service backend") 2 | 3 | 4 | # backend test --- 5 | 6 | tenant <- Sys.getenv("AZ_TEST_TENANT_ID") 7 | app <- Sys.getenv("AZ_TEST_APP_ID") 8 | password <- Sys.getenv("AZ_TEST_PASSWORD") 9 | subscription <- Sys.getenv("AZ_TEST_SUBSCRIPTION") 10 | 11 | if(tenant == "" || app == "" || password == "" || subscription == "") 12 | skip("Resource Manager credentials must be set prior to test") 13 | 14 | az <- AzureRMR::az_rm$new(tenant=tenant, app=app, password=password) 15 | sub1 <- az$get_subscription(subscription) 16 | 17 | test_that("Azure recommender service backend works", 18 | { 19 | # generate random resource group name 20 | randgrp <- paste(sample(letters, 6, replace=TRUE), collapse="") 21 | 22 | expect_is(sub1$create_rec_service(randgrp, hosting_plan="S2", location="australiasoutheast"), "az_rec_service") 23 | 24 | rec_svc2 <- sub1$get_rec_service(randgrp) 25 | expect_is(rec_svc2, "az_rec_service") 26 | 27 | expect_true(!is_empty(rec_svc2$url) && !is_empty(rec_svc2$admin_key) && !is_empty(rec_svc2$rec_key)) 28 | expect_true(rec_svc2$url != "" && rec_svc2$admin_key != "" && rec_svc2$rec_key != "") 29 | 30 | Sys.setenv(AZ_REC_RESGRP=randgrp, 31 | AZ_REC_SERVICE=rec_svc2$url, 32 | AZ_REC_ADMIN_KEY=rec_svc2$admin_key, 33 | AZ_REC_REC_KEY=rec_svc2$rec_key, 34 | AZ_REC_STORAGE_KEY=rec_svc2$storage_key) 35 | }) 36 | 37 | 38 | 39 | # client test --- 40 | 41 | # check that credentials are available 42 | svcname <- Sys.getenv("AZ_REC_SERVICE") 43 | admin_key <- Sys.getenv("AZ_REC_ADMIN_KEY") 44 | rec_key <- Sys.getenv("AZ_REC_REC_KEY") 45 | storage_key <- Sys.getenv("AZ_REC_STORAGE_KEY") 46 | 47 | expect_true(svcname != "" && admin_key != "" && rec_key != "") 48 | 49 | datapath <- "../resources" 50 | data(ms_usage, package="SAR", envir=environment()) 51 | 52 | i <- readLines(file.path(datapath, "items.txt")) 53 | u <- readLines(file.path(datapath, "user.txt")) 54 | u2 <- readLines(file.path(datapath, "user2.txt")) 55 | dfu <- subset(ms_usage, user == u) 56 | 57 | check_preds <- function(df1, df2, threshold_ratio) 58 | { 59 | s1 <- grep("score", names(df1)) 60 | m1 <- as.matrix(df1[s1]) 61 | 62 | s2 <- grep("score", names(df2)) 63 | m2 <- as.matrix(df2[s2]) 64 | 65 | na1 <- which(is.na(m1)) 66 | na2 <- which(is.na(m2)) 67 | 68 | r <- na.omit(as.numeric(abs(m1 / m2 - 1))) 69 | 70 | # use tolerance to handle Azure fiddling with reference date 71 | expect_true(all(r < threshold_ratio) && diff(range(r)) < 1e-5 && identical(na1, na2)) 72 | } 73 | 74 | 75 | test_that("Azure recommender client works", 76 | { 77 | endp <- rec_endpoint$new(svcname, admin_key, rec_key, storage_key=storage_key) 78 | expect_is(endp, "rec_endpoint") 79 | 80 | mods <- endp$models 81 | expect_s3_class(mods, "data.frame") 82 | if("test_count" %in% mods$description) 83 | endp$delete_model("test_count", confirm=FALSE) 84 | expect_false("test_count" %in% endp$models$description) 85 | 86 | # recreate csv file for Azure service 87 | demoUsage <- ms_usage 88 | demoUsage$time <- strftime(demoUsage$time, format="%Y/%m/%dT%H:%M:%S", tz="UTC") 89 | endp$upload_data(demoUsage) 90 | 91 | test_count <- endp$train_model("test_count", usage="demoUsage.csv", support_threshold=3, 92 | similarity="Cooccurrence", user_affinity=TRUE, user_to_items=TRUE, backfill=TRUE, 93 | include_seed_items=FALSE) 94 | expect_is(test_count, "rec_model") 95 | 96 | expect_identical(test_count$status, "Completed") 97 | 98 | ipred0 <- read.csv(file.path(datapath, "itempred_count3.csv"), stringsAsFactors=FALSE, 99 | colClasses=c(rep("character", 11), rep("numeric", 10))) 100 | ipred <- test_count$item_predict(item=i) 101 | expect_s3_class(ipred, "data.frame") 102 | check_preds(ipred0, ipred, 0.05) # item prediction scores depend on time of prediction as well as time of fit (!) 103 | 104 | upred0 <- read.csv(file.path(datapath, "userpred_count3_userid_only.csv"), stringsAsFactors=FALSE, 105 | colClasses=c(rep("character", 11), rep("numeric", 10))) 106 | upred <- test_count$user_predict(userdata=u) 107 | expect_s3_class(upred, "data.frame") 108 | check_preds(upred0, upred, 0.01) 109 | 110 | upred0 <- read.csv(file.path(datapath, "userpred_count3_userid_plus_events.csv"), stringsAsFactors=FALSE, 111 | colClasses=c(rep("character", 11), rep("numeric", 10))) 112 | upred <- test_count$user_predict(userdata=dfu) 113 | expect_s3_class(upred, "data.frame") 114 | check_preds(upred0, upred, 0.01) 115 | }) 116 | 117 | 118 | test_that("Azure recommender client works with multiple user IDs", 119 | { 120 | endp <- rec_endpoint$new(svcname, admin_key, rec_key, storage_key=storage_key) 121 | expect_is(endp, "rec_endpoint") 122 | 123 | test_count <- endp$get_model("test_count") 124 | expect_is(test_count, "rec_model") 125 | 126 | uu2 <- c(u, u2) 127 | 128 | expected <- read.csv(file.path(datapath, "userpred_2users_count3_userid_only.csv"), 129 | stringsAsFactors=FALSE, 130 | colClasses=c(rep("character", 11), rep("numeric", 10))) 131 | pred <- test_count$user_predict(uu2) 132 | check_preds(expected, pred, 0.01) 133 | 134 | pred2 <- test_count$user_predict(uu2[2:1]) 135 | expected2 <- expected[2:1,] 136 | row.names(expected2) <- NULL 137 | check_preds(expected2, pred2, 0.01) 138 | 139 | pred3 <- test_count$user_predict(uu2[c(1, 2, 1, 2, 1, 2)]) 140 | check_preds(expected, pred3, 0.01) 141 | 142 | expected <- read.csv(file.path(datapath, "userpred_2users_count3_userid_plus_events.csv"), 143 | stringsAsFactors=FALSE, 144 | colClasses=c(rep("character", 11), rep("numeric", 10))) 145 | dfuu2 <- rbind(subset(ms_usage, user == u), subset(ms_usage, user == u2)) 146 | pred <- test_count$user_predict(dfuu2) 147 | check_preds(expected, pred, 0.01) 148 | 149 | dfu2u <- rbind(subset(ms_usage, user == u2), subset(ms_usage, user == u)) 150 | pred2 <- test_count$user_predict(dfu2u) 151 | expected2 <- expected[2:1,] 152 | row.names(expected2) <- NULL 153 | check_preds(expected2, pred2, 0.01) 154 | 155 | # delete test model once we're done 156 | endp$delete_model("test_count", confirm=FALSE) 157 | expect_false("test_count" %in% endp$models$description) 158 | }) 159 | 160 | 161 | test_that("Azure recommmender backfill works", 162 | { 163 | endp <- rec_endpoint$new(svcname, admin_key, rec_key, storage_key=storage_key) 164 | expect_is(endp, "rec_endpoint") 165 | 166 | mods <- endp$models 167 | expect_s3_class(mods, "data.frame") 168 | 169 | if("test_nobf" %in% mods$description) 170 | endp$delete_model("test_nobf", confirm=FALSE) 171 | expect_false("test_nobf" %in% endp$models$description) 172 | 173 | if("test_bf" %in% mods$description) 174 | endp$delete_model("test_bf", confirm=FALSE) 175 | expect_false("test_bf" %in% endp$models$description) 176 | 177 | test_bf <- endp$train_model("test_bf", usage="demoUsage.csv", support_threshold=3, 178 | similarity="Jaccard", user_affinity=TRUE, user_to_items=TRUE, backfill=TRUE, 179 | include_seed_items=FALSE) 180 | expect_identical(test_bf$status, "Completed") 181 | 182 | test_nobf <- endp$train_model("test_nobf", usage="demoUsage.csv", support_threshold=3, 183 | similarity="Jaccard", user_affinity=TRUE, user_to_items=TRUE, backfill=FALSE, 184 | include_seed_items=FALSE) 185 | expect_identical(test_nobf$status, "Completed") 186 | 187 | u_bf <- readLines(file.path(datapath, "user_backfill.txt")) 188 | 189 | upred0_bf <- read.csv(file.path(datapath, "userpred_jac3_backfill.csv"), stringsAsFactors=FALSE, 190 | colClasses=c(rep("character", 11), rep("numeric", 10))) 191 | 192 | upred_bf <- test_bf$user_predict(userdata=u_bf) 193 | expect_s3_class(upred_bf, "data.frame") 194 | check_preds(upred0_bf, upred_bf, 0.01) 195 | 196 | upred0_nbf <- read.csv(file.path(datapath, "userpred_jac3_nobackfill.csv"), stringsAsFactors=FALSE, 197 | colClasses=c(rep("character", 11), rep("numeric", 10))) 198 | 199 | upred_nbf <- test_nobf$user_predict(userdata=u_bf) 200 | expect_s3_class(upred_nbf, "data.frame") 201 | check_preds(upred0_nbf, upred_nbf, 0.01) 202 | 203 | # delete test models once we're done 204 | endp$delete_model("test_bf", confirm=FALSE) 205 | expect_false("test_bf" %in% endp$models$description) 206 | 207 | endp$delete_model("test_nobf", confirm=FALSE) 208 | expect_false("test_nbf" %in% endp$models$description) 209 | }) 210 | 211 | 212 | 213 | # backend and credential delete --- 214 | 215 | sub1$delete_resource_group(Sys.getenv("AZ_REC_RESGRP"), confirm=FALSE) 216 | 217 | Sys.unsetenv("AZ_REC_RESGRP") 218 | Sys.unsetenv("AZ_REC_SERVICE") 219 | Sys.unsetenv("AZ_REC_ADMIN_KEY") 220 | Sys.unsetenv("AZ_REC_REC_KEY") 221 | Sys.unsetenv("AZ_REC_STORAGE_KEY") 222 | 223 | -------------------------------------------------------------------------------- /R/onload.R: -------------------------------------------------------------------------------- 1 | #' Create Azure recommender service 2 | #' 3 | #' Method for the [AzureRMR::az_resource_group] and [AzureRMR::az_subscription] classes. 4 | #' 5 | #' @rdname create_rec_service 6 | #' @name create_rec_service 7 | #' @aliases create_rec_service 8 | #' @section Usage: 9 | #' ``` 10 | #' ## R6 method for class 'az_subscription' 11 | #' create_rec_service(name, location, hosting_plan, storage_type = c("Standard_LRS", "Standard_GRS"), 12 | #' insights_location = c("East US", "North Europe", "West Europe", "South Central US"), 13 | #' data_container = "inputdata", ..., wait = TRUE 14 | #' 15 | #' ## R6 method for class 'az_resource_group' 16 | #' create_rec_service(name, hosting_plan, storage_type = c("Standard_LRS", "Standard_GRS"), 17 | #' insights_location = c("East US", "North Europe", "West Europe", "South Central US"), 18 | #' data_container = "inputdata", ..., wait = TRUE 19 | #' ``` 20 | #' @section Arguments: 21 | #' - `name`: The name of the recommender service. 22 | #' - `location`: For the subscription method, the location/region for the service. For the resource group method, this is taken from the location of the resource group. 23 | #' - `storage_type`: The replication strategy for the storage account for the service. 24 | #' - `insights_location`: Location for the application insights service giving you details on the webapp usage. 25 | #' - `data_container`: The name of the blob container within the storage account to use for storing datasets. 26 | #' - `wait`: Whether to wait until the service has finished provisioning. 27 | #' - `...` : Other named arguments to pass to the [AzureRMR::az_template] initialization function. 28 | #' 29 | #' @section Details: 30 | #' This method deploys a new recommender service. The individual resources created are an Azure webapp, a storage account, and an application insights service for monitoring. Within the storage account, a blob container is created with name given by the `data_container` argument for storing input datasets. 31 | #' 32 | #' For the az_subscription method, a resource group is also created to hold the resources. The name of the resource group will be the same as the name of the service. 33 | #' 34 | #' @section Value: 35 | #' An object of class `az_rec_service` representing the deployed recommender service. 36 | #' 37 | #' @seealso 38 | #' [get_rec_service], [delete_rec_service]. 39 | #' 40 | #' The architecture for the web service is documented [here](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/architecture.md), and the specific template deployed by this method is [here](https://raw.githubusercontent.com/Microsoft/Product-Recommendations/master/saw/recommendationswebapp/core/arm/resources.json). 41 | #' 42 | #' @examples 43 | #' \dontrun{ 44 | #' 45 | #' rg <- AzureRMR::az_rm$ 46 | #' new(tenant="myaadtenant.onmicrosoft.com", app="app_id", password="password")$ 47 | #' get_subscription("subscription_id")$ 48 | #' get_resource_group("rgname") 49 | #' 50 | #' # create a new recommender service 51 | #' rg$create_rec_service("myrec", hosting_plan="S2") 52 | #' 53 | #' } 54 | NULL 55 | 56 | 57 | #' Get existing Azure recommender service 58 | #' 59 | #' Method for the [AzureRMR::az_resource_group] and [AzureRMR::az_subscription] classes. 60 | #' 61 | #' @rdname get_rec_service 62 | #' @name get_rec_service 63 | #' @aliases get_rec_service 64 | #' @section Usage: 65 | #' ``` 66 | #' get_rec_service(name, data_container = "inputdata") 67 | #' ``` 68 | #' @section Arguments: 69 | #' - `name`: The name of the recommender service. 70 | #' - `data_container`: The name of the blob container within the storage account to use for storing datasets. 71 | #' 72 | #' @section Value: 73 | #' An object of class `az_rec_service` representing the deployed recommender service. 74 | #' 75 | #' @seealso 76 | #' [create_rec_service], [delete_rec_service] 77 | #' 78 | #' @examples 79 | #' \dontrun{ 80 | #' 81 | #' rg <- AzureRMR::az_rm$ 82 | #' new(tenant="myaadtenant.onmicrosoft.com", app="app_id", password="password")$ 83 | #' get_subscription("subscription_id")$ 84 | #' get_resource_group("rgname") 85 | #' 86 | #' # get a recommender service 87 | #' rg$get_rec_service("myrec") 88 | #' 89 | #' } 90 | NULL 91 | 92 | 93 | #' Delete an Azure recommender service 94 | #' 95 | #' Method for the [AzureRMR::az_resource_group] and [AzureRMR::az_subscription] classes. 96 | #' 97 | #' @rdname delete_rec_service 98 | #' @name delete_rec_service 99 | #' @aliases delete_rec_service 100 | #' @section Usage: 101 | #' ``` 102 | #' delete_rec_service(name, confirm = TRUE, free_resources = TRUE) 103 | #' ``` 104 | #' @section Arguments: 105 | #' - `name`: The name of the recommender service. 106 | #' - `confirm`: Whether to ask for confirmation before deleting. 107 | #' - `free_resources`: Whether to delete the individual resources as well as the recommender template. 108 | #' 109 | #' @section Value: 110 | #' NULL on successful deletion. 111 | #' 112 | #' @seealso 113 | #' [create_rec_service], [delete_rec_service] 114 | #' 115 | #' @examples 116 | #' \dontrun{ 117 | #' 118 | #' rg <- AzureRMR::az_rm$ 119 | #' new(tenant="myaadtenant.onmicrosoft.com", app="app_id", password="password")$ 120 | #' get_subscription("subscription_id")$ 121 | #' get_resource_group("rgname") 122 | #' 123 | #' # delete a recommender service 124 | #' rg$delete_rec_service("myrec") 125 | #' 126 | #' } 127 | NULL 128 | 129 | 130 | .onLoad <- function(libname, pkgname) 131 | { 132 | set_sar_threads() 133 | add_sar_methods() 134 | } 135 | 136 | 137 | # add class methods to resource group 138 | add_sar_methods <- function() 139 | { 140 | az_resource_group$set("public", "create_rec_service", overwrite=TRUE, 141 | function(name, hosting_plan="S2", 142 | storage_type=c("Standard_LRS", "Standard_GRS"), 143 | insights_location=c("East US", "North Europe", "West Europe", "South Central US"), 144 | data_container="inputdata", 145 | ..., wait=TRUE) 146 | { 147 | storage_type <- match.arg(storage_type) 148 | insights_location <- match.arg(insights_location) 149 | 150 | parameters <- list(accountType=storage_type, 151 | hostingPlanSku=hosting_plan, 152 | appInsightsLocation=insights_location, 153 | deployPackageUri=sar_dll) 154 | 155 | res <- SAR::az_rec_service$new(self$token, self$subscription, self$name, 156 | name=name, 157 | template=sar_template, parameters=parameters, 158 | ..., wait=wait) 159 | 160 | res$set_data_container(data_container) 161 | res 162 | }) 163 | 164 | az_resource_group$set("public", "get_rec_service", overwrite=TRUE, 165 | function(name, data_container="inputdata") 166 | { 167 | res <- SAR::az_rec_service$new(self$token, self$subscription, self$name, name) 168 | if(!is_empty(data_container)) 169 | res$set_data_container(data_container) 170 | res 171 | }) 172 | 173 | az_resource_group$set("public", "delete_rec_service", overwrite=TRUE, 174 | function(name, confirm=TRUE, free_resources=TRUE) 175 | { 176 | self$get_rec_service(name, NULL)$delete(confirm=confirm, free_resources=free_resources) 177 | }) 178 | 179 | ## add class methods to subscription 180 | 181 | az_subscription$set("public", "create_rec_service", overwrite=TRUE, 182 | function(name, location, resource_group=name, hosting_plan, 183 | storage_type=c("Standard_LRS", "Standard_GRS"), 184 | insights_location=c("East US", "North Europe", "West Europe", "South Central US"), 185 | data_container="inputdata", 186 | ..., wait=TRUE) 187 | { 188 | if(!is_resource_group(resource_group)) 189 | { 190 | rgnames <- names(self$list_resource_groups()) 191 | if(resource_group %in% rgnames) 192 | { 193 | resource_group <- self$get_resource_group(resource_group) 194 | mode <- "Incremental" 195 | } 196 | else 197 | { 198 | message("Creating resource group '", resource_group, "'") 199 | resource_group <- self$create_resource_group(resource_group, location=location) 200 | mode <- "Complete" 201 | } 202 | } 203 | else mode <- "Incremental" # if passed a resource group object, assume it already exists in Azure 204 | 205 | res <- try(resource_group$create_rec_service(name=name, hosting_plan=hosting_plan, 206 | storage_type=storage_type, 207 | insights_location=insights_location, 208 | data_container=data_container, 209 | ..., wait=wait, mode=mode)) 210 | 211 | if(inherits(res, "try-error") && mode == "Complete") 212 | { 213 | resource_group$delete(confirm=FALSE) 214 | stop("Unable to create recommendation service") 215 | } 216 | res 217 | }) 218 | 219 | az_subscription$set("public", "get_rec_service", overwrite=TRUE, 220 | function(name, resource_group=name, data_container="inputdata") 221 | { 222 | if(!is_resource_group(resource_group)) 223 | resource_group <- self$get_resource_group(resource_group) 224 | 225 | resource_group$get_rec_service(name, data_container) 226 | }) 227 | 228 | az_subscription$set("public", "delete_rec_service", overwrite=TRUE, 229 | function(name, confirm=TRUE, free_resources=TRUE, resource_group=name) 230 | { 231 | self$get_rec_service(name, resource_group)$delete(confirm=confirm, free_resources=free_resources) 232 | }) 233 | } 234 | -------------------------------------------------------------------------------- /R/predict_sar.R: -------------------------------------------------------------------------------- 1 | #' Get personalised recommendations from a SAR model 2 | #' 3 | #' @param object A SAR model object. 4 | #' @param userdata A vector of user IDs, or a data frame containing user IDs and/or transactions. See below for the various ways to supply user information for predicting, and how they affect the results. 5 | #' @param k The number of recommendations to obtain. 6 | #' @param include_seed_items Whether items a user has already seen should be considered for recommendations. 7 | #' @param backfill Whether to backfill recommendations with popular items. 8 | #' @param reftime The reference time for discounting timestamps. If not supplied, defaults to the latest date in the training data and any new transactions supplied. 9 | #' @details 10 | #' The SAR model can produce personalised recommendations for a user, given a history of their transactions. This history can be based on either the original training data, or new events, based on the contents of `userdata` argument: 11 | #' 1. A character vector of user IDs. In this case, personalised recommendations will be computed based on the transactions in the training data, _ignoring_ any transaction event IDs or weights. 12 | #' 2. A data frame containing transaction item IDs, event types and/or weights, plus timestamps. In this case, all the transactions are assumed to be for a single (new) user. If the event types/weights are absent, all transactions are assigned equal weight. 13 | #' 3. A data frame containing user IDs and transaction details as in (2). In this case, the recommendations are based on both the training data for the given user(s), plus the new transaction details. 14 | #' 15 | #' In SAR, the first step in obtaining personalised recommendations is to compute a user-to-item affinity matrix \eqn{A}. This is essentially a weighted crosstabulation with one row per unique user ID and one column per item ID. The cells in the crosstab are given by the formula 16 | #' \deqn{sum(wt * 2^(-(t0 - time) / half_life))} 17 | #' where `wt` is obtained from the `weight` and `event` columns in the data. 18 | #' 19 | #' The product of this matrix with the item similarity matrix \eqn{S} then gives a matrix of recommendation scores. The recommendation scores are sorted, any items that the user has previously seen are optionally removed, and the top-N items are returned as the recommendations. 20 | #' 21 | #' The latter step is the most computationally expensive part of the algorithm. SAR can execute this in multithreaded fashion, with the default number of threads being half the number of (logical) cores. Use the `set_sar_threads` function to set the number of threads to use. 22 | #' 23 | #' @seealso 24 | #' [Making recommendations](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md#making-recommendations) at the [Product Recommendations API repo](https://github.com/Microsoft/Product-Recommendations) on GitHub 25 | #' 26 | #' @return 27 | #' For `user_predict`, a data frame containing one row per user ID supplied (or if no IDs are supplied, exactly one row). 28 | #' 29 | #' @examples 30 | #' 31 | #' data(ms_usage) 32 | #' mod <- sar(ms_usage) 33 | #' 34 | #' # item recommendations given a vector of user IDs 35 | #' users <- unique(ms_usage$user)[1:5] 36 | #' user_predict(mod, userdata=users) 37 | #' 38 | #' # item recommendations given a set of user IDs and transactions (assumed to be new) 39 | #' user_df <- subset(ms_usage, user %in% users) 40 | #' user_predict(mod, userdata=user_df) 41 | #' 42 | #' # item recomendations for a set of item IDs 43 | #' items <- unique(ms_usage$item)[1:5] 44 | #' item_predict(mod, items=items) 45 | #' 46 | #' # setting the number of threads to use when computing recommendations 47 | #' set_sar_threads(2) 48 | #' 49 | #' @export 50 | user_predict <- function(object, userdata=NULL, k=10, include_seed_items=FALSE, backfill=FALSE, reftime) 51 | { 52 | user_col <- object$col_ids["user"] 53 | item_col <- object$col_ids["item"] 54 | time_col <- object$col_ids["time"] 55 | event_col <- object$col_ids["event"] 56 | weight_col <- object$col_ids["weight"] 57 | 58 | if(is.null(userdata)) 59 | stop("Must provide new transaction events or users") 60 | 61 | if(is.data.frame(userdata)) 62 | { 63 | user <- as.character(userdata[[user_col]]) 64 | item <- userdata[[item_col]] 65 | time <- userdata[[time_col]] 66 | event <- userdata[[event_col]] 67 | weight <- userdata[[weight_col]] 68 | } 69 | else 70 | { 71 | user <- as.character(userdata) 72 | item <- time <- event <- weight <- NULL 73 | } 74 | 75 | t0 <- if(!missing(reftime)) 76 | max(object$time, time, reftime) 77 | else max(object$time, time) # if time not supplied, reduces to max(object$time) 78 | unique_users <- unique(user) 79 | 80 | # if userids supplied, compute affinity matrix from training data for these users 81 | trn_aff <- if(length(user) > 0) 82 | { 83 | keep <- which(object$user %in% unique_users) 84 | 85 | wt <- calc_wt(NULL, NULL, object$allowed_events) # events/weights not used for training data! 86 | make_affinity(object$user[keep], object$item[keep], object$time[keep], wt, t0, 87 | object$half_life, object$allowed_items) 88 | } 89 | else 0 90 | 91 | # if new transaction events supplied, compute affinity matrix from these events 92 | new_aff <- if(length(item) > 0) 93 | { 94 | item <- factor(item, levels=levels(object$item)) 95 | if(any(is.na(item))) 96 | stop("New item IDs detected") 97 | wt <- calc_wt(event, weight, object$allowed_events) 98 | make_affinity(user, item, time, wt, t0, object$half_life, object$allowed_items) 99 | } 100 | else 0 101 | 102 | # combine old and new affinity matrices, taking into account userids 103 | if(length(user) > 0) 104 | { 105 | tc <- colnames(trn_aff) 106 | nc <- colnames(new_aff) 107 | overlap <- intersect(tc, nc) 108 | 109 | # if both trn_aff and new_aff exist, there must be overlapping columns 110 | if(length(overlap) > 0) 111 | aff <- cbind( 112 | trn_aff[, base::setdiff(tc, overlap), drop=FALSE], 113 | trn_aff[, overlap, drop=FALSE] + new_aff[, overlap, drop=FALSE], 114 | new_aff[, base::setdiff(nc, overlap), drop=FALSE]) 115 | else if(!is.null(nrow(trn_aff)) && nrow(trn_aff) > 0) 116 | aff <- trn_aff 117 | else if(!is.null(nrow(new_aff)) && nrow(new_aff) > 0) 118 | aff <- new_aff 119 | else stop("Bad affinity matrix calculation") 120 | } 121 | else aff <- new_aff 122 | 123 | # detect case where new user ID supplied but no transactions 124 | if(length(user) > 0 && length(item) == 0 && ncol(aff) != length(unique_users)) 125 | warning("New user IDs detected without any transactions; these will be dropped") 126 | 127 | recs <- user_predict_ranking(aff, object$sim_mat, k, include_seed_items, backfill, object$pop_items) 128 | 129 | # if we don't fill in zero-score recs, NA them out 130 | if(!backfill) 131 | { 132 | zeros <- recs[[1]] == 0 133 | recs[[2]][zeros] <- NA_integer_ 134 | recs[[1]][zeros] <- NA_real_ 135 | } 136 | 137 | recs[[2]][] <- rownames(object$sim_mat)[recs[[2]]] 138 | colnames(recs[[1]]) <- paste0("score", seq_len(k)) 139 | colnames(recs[[2]]) <- paste0("rec", seq_len(k)) 140 | 141 | if(length(user) == 0) 142 | cbind.data.frame(recs[[2]], recs[[1]], stringsAsFactors=FALSE) 143 | else 144 | { 145 | out <- cbind.data.frame(user=colnames(aff), recs[[2]], recs[[1]], stringsAsFactors=FALSE) 146 | out <- out[match(unique(user), out$user, nomatch=0), ] 147 | row.names(out) <- NULL 148 | out 149 | } 150 | } 151 | 152 | 153 | #' Get item-to-item recommendations from a SAR model 154 | #' 155 | #' @param object A SAR model object. 156 | #' @param items A vector of item IDs. 157 | #' @param k The number of recommendations to obtain. 158 | #' @return 159 | #' A data frame containing one row per item ID supplied. 160 | #' 161 | #' @examples 162 | #' 163 | #' data(ms_usage) 164 | #' mod <- sar(ms_usage) 165 | #' 166 | #' # item recomendations for a set of item IDs 167 | #' items <- unique(ms_usage$item)[1:5] 168 | #' item_predict(mod, items=items) 169 | #' 170 | #' @export 171 | item_predict <- function(object, items, k=10) 172 | { 173 | if(is.data.frame(items)) 174 | items <- as.character(items$items) 175 | else items <- as.character(items) 176 | item_sim <- object$sim_mat[items, , drop=FALSE] 177 | 178 | ord <- apply(item_sim, 1, function(x) 179 | { 180 | order(x, decreasing=TRUE)[seq_len(k) + 1] # assuming largest elem will be on the diagonal 181 | }) 182 | 183 | recs <- matrix(rownames(object$sim_mat)[ord], ncol=k, byrow=TRUE) 184 | 185 | scores <- t(sapply(seq_len(ncol(ord)), function(x) 186 | { 187 | item_sim[x, ord[, x]] 188 | })) 189 | 190 | recs <- cbind.data.frame(recs, scores, stringsAsFactors=FALSE) 191 | names(recs) <- c(paste0("rec", seq_len(k)), paste0("score", seq_len(k))) 192 | cbind(item=items, recs, stringsAsFactors=FALSE) 193 | } 194 | 195 | 196 | make_affinity <- function(user, item, time, wt, t0=max(time), half_life, allowed_items=NULL) 197 | { 198 | # handle POSIXct datetimes; assume data is in days otherwise 199 | if(inherits(time, "POSIXct")) 200 | half_life <- half_life * 24 * 3600 201 | else if(!inherits(time, c("Date", "numeric"))) 202 | stop("time variable must be numeric, POSIXct or Date") 203 | 204 | # quit early if no data supplied 205 | if(length(item) < 1) 206 | return(0) 207 | 208 | time <- as.numeric(time) 209 | t0 <- as.numeric(t0) 210 | if(length(wt) == 0) 211 | wt <- rep(1, length(time)) 212 | if(half_life > 0) 213 | wt <- wt*2^((time - t0) / half_life) 214 | 215 | # use sparse=TRUE to work around dimension problems with user, item large 216 | # outputs item-user matrix, not user-item matrix for speed later on 217 | if(length(unique(user)) >= 1) 218 | return(xtabs(wt ~ item + user, sparse=TRUE)) 219 | else 220 | { 221 | out <- xtabs(wt ~ item) 222 | Matrix::Matrix(unclass(out), ncol=1, dimnames=list(dimnames(out)[[1]], NULL), sparse=TRUE) 223 | } 224 | } 225 | 226 | 227 | calc_wt <- function(event=NULL, weight=NULL, 228 | allowed_events=c(Click=1, RecommendationClick=2, AddShopCart=3, RemoveShopCart=-1, Purchase=4)) 229 | { 230 | if(is.null(event) && is.null(weight)) 231 | numeric(0) 232 | else if(!is.null(weight)) 233 | weight 234 | else 235 | { 236 | stopifnot(all(event %in% names(allowed_events))) 237 | allowed_events[event] 238 | } 239 | } 240 | -------------------------------------------------------------------------------- /R/sar.R: -------------------------------------------------------------------------------- 1 | #' Fit a SAR model 2 | #' 3 | #' @param x A data frame. For the `print` method, a SAR model object. 4 | #' @param user,item,time,event,weight For the default method, vectors to use as the user IDs, item IDs, timestamps, event types, and transaction weights for SAR. For the `data.frame` method, the names of the columns in the data frame `x` to use for these variables. 5 | #' @param support_threshold The SAR support threshold. Items that do not occur at least this many times in the data will be considered "cold". 6 | #' @param allowed_items A character or factor vector of allowed item IDs to use in the SAR model. If supplied, this will be used to categorise the item IDs in the data. 7 | #' @param by_user Should the analysis be by user ID, or by user ID and timestamp? Defaults to userID only. 8 | #' @param similarity Similarity metric to use; defaults to Jaccard. 9 | #' @param half_life The decay period to use when weighting transactions by age. 10 | #' @param allowed_events The allowed values for `events`, if that argument is supplied. Other values will be discarded. 11 | #' @param catalog_data A dataset to use for building the cold-items feature model. 12 | #' @param catalog_formula A formula for the feature model used to compute similarities for cold items. 13 | #' @param cold_to_cold Whether the cold-items feature model should include the cold items themselves in the training data, or only warm items. 14 | #' @param cold_item_model The type of model to use for cold item features. 15 | #' @param ... For `sar()`, further arguments to pass to the cold-items feature model. 16 | #' @details 17 | #' Smart Adaptive Recommendations (SAR) is a fast, scalable, adaptive algorithm for personalized recommendations based on user transaction history and item descriptions. It produces easily explainable/interpretable recommendations and handles "cold item" and "semi-cold user" scenarios. 18 | #' 19 | #' Central to how SAR works is an item-to-item _co-occurrence matrix_, which is based on how many times two items occur for the same users. For example, if a given user buys items \eqn{i_1} and \eqn{i_2}, then the cell \eqn{(i_1, i_2)} is incremented by 1. From this, an item _similarity matrix_ can be obtained by rescaling the co-occurrences according to a given metric. Options for the metric include Jaccard (the default), lift, and counts (which means no rescaling). 20 | #' 21 | #' Note that the similarity matrix in SAR thus only includes information on which users transacted which items. It does not include any other information such as item ratings or features, which may be used by other recommender algorithms. 22 | #' 23 | #' #' The SAR implementation in R should be usable on datasets with up to a few million rows and several thousand items. The main constraint is the size of the similarity matrix, which in turn depends (quadratically) on the number of unique items. The implementation has been successfully tested on the MovieLens 20M dataset, which contains about 138,000 users and 27,000 items. For larger datasets, it is recommended to use the [Azure web service API][az_rec_service]. 24 | #' 25 | #' @section Cold items: 26 | #' 27 | #' SAR has the ability to handle cold items, meaning those which have not been seen by any user, or which have only been seen by a number of users less than `support_threshold`. This is done by using item features to predict similarities. The method used for this is set by the `cold_items_model` argument: 28 | #' 29 | #' * If this is `NULL` (the default), a manual algorithm is used that correlates each feature in turn with similarity, and produces a predicted similarity based on which features two items have in common. 30 | #' * If this is the name of a modelling function, such as `"lm"` or `"randomForest"`, a model of that type is fit on the features and used to predict similarity. In particular, use `"lm"` to get a model that is (approximately) equivalent to that used by the Azure web service API. 31 | #' 32 | #' The data frame and features used for cold items are given by the `catalog_data` and `catalog_formula` arguments. `catalog_data` should be a data frame whose first column is item ID. `catalog_formula` should be a one-sided formula (no LHS). 33 | #' 34 | #' This feature is currently experimental, and subject to change. 35 | #' 36 | #' @return 37 | #' An S3 object representing the SAR model. This is essentially the item-to-item similarity matrix in sparse format, along with the original transaction data used to fit the model. 38 | #' 39 | #' @seealso 40 | #' [Description of SAR](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md) at the [Product Recommendations API repo](https://github.com/Microsoft/Product-Recommendations) on GitHub 41 | #' 42 | #' @examples 43 | #' 44 | #' data(ms_usage) 45 | #' 46 | #' ## all of these fit the same model: 47 | #' 48 | #' # fit a SAR model from a series of vectors 49 | #' mod1 <- sar(user=ms_usage$user, item=ms_usage$item, time=ms_usage$time) 50 | #' 51 | #' # fit a model from a data frame, naming the variables to use 52 | #' mod2 <- sar(ms_usage, user="user", item="item", time="time") 53 | #' 54 | #' # fit a model from a data frame, using default variable names 55 | #' mod3 <- sar(ms_usage) 56 | #' 57 | #' @rdname sar 58 | #' @export 59 | sar <- function(...) 60 | { 61 | UseMethod("sar") 62 | } 63 | 64 | 65 | #' @rdname sar 66 | #' @export 67 | sar.data.frame <- function(x, user="user", item="item", time="time", event="event", weight="weight", ...) 68 | { 69 | model <- sar.default(user=x[[user]], item=x[[item]], time=x[[time]], event=x[[event]], weight=x[[weight]], ...) 70 | model$col_ids <- c(user=user, item=item, time=time, event=event, weight=weight) 71 | class(model) <- c("sar.data.frame", class(model)) 72 | model 73 | } 74 | 75 | 76 | #' @rdname sar 77 | #' @export 78 | sar.default <- function(user, item, time, event=NULL, weight=NULL, support_threshold=1, allowed_items=NULL, 79 | allowed_events=c(Click=1, RecommendationClick=2, AddShopCart=3, RemoveShopCart=-1, Purchase=4), 80 | by_user=TRUE, similarity=c("jaccard", "lift", "count"), half_life=30, 81 | catalog_data=NULL, catalog_formula=item ~ ., cold_to_cold=FALSE, cold_item_model=NULL, ...) 82 | { 83 | if(missing(user) || is.null(user)) 84 | stop("must supply column of user IDs") 85 | if(missing(item) || is.null(item)) 86 | stop("must supply column of item IDs") 87 | if(missing(time) || is.null(time)) 88 | stop("must supply column of event timestamps") 89 | 90 | if(!is.null(weight) || !is.null(event)) 91 | message("Event types and weights are not using in training a SAR model") 92 | 93 | similarity <- match.arg(similarity) 94 | 95 | item <- if(is.null(allowed_items)) 96 | as.factor(item) 97 | else factor(item, levels=sort(allowed_items)) 98 | 99 | sim_mat <- make_similarity(user, item, time, support_threshold, by_user, similarity) 100 | pop_items <- attr(sim_mat, "pop_items") 101 | attr(sim_mat, "pop_items") <- NULL 102 | 103 | if(!is.null(catalog_data)) 104 | sim_mat <- get_cold_similarity(cold_item_model, sim_mat, catalog_formula, catalog_data, cold_to_cold, 105 | similarity, ...) 106 | 107 | out <- list(sim_mat=sim_mat, pop_items=pop_items, 108 | user=as.character(user), item=item, time=time, event=event, weight=weight, # save the data 109 | allowed_items=unique(allowed_items), allowed_events=allowed_events, 110 | by_user=by_user, support_threshold=support_threshold, 111 | half_life=half_life, similarity=similarity) 112 | class(out) <- "sar" 113 | out 114 | } 115 | 116 | 117 | #' @rdname sar 118 | #' @method print sar 119 | #' @export 120 | print.sar <- function(x, ...) 121 | { 122 | cat("SAR model\n") 123 | cat("Support threshold:", x$support_threshold, "\n") 124 | cat("Co-occurrence unit:", if(x$by_user) "user\n" else "user/time\n") 125 | cat("Similarity function:", x$similarity, "\n") 126 | cat("Decay period in days:", x$half_life, "\n") 127 | cat("Item count:", nrow(x$sim_mat), "\n") 128 | cat("User count:", nrow(x$aff_mat), "\n") 129 | if(!is.null(x$col_ids)) 130 | { 131 | cat("Column names:\n") 132 | print(x$col_ids) 133 | } 134 | invisible(x) 135 | } 136 | 137 | 138 | make_similarity <- function(user, item, time, support_threshold, by_user, similarity) 139 | { 140 | dat <- dplyr::tibble(user, item, time) 141 | 142 | grps <- if(by_user) 143 | dplyr::quo(user) 144 | else c(dplyr::quo(user), dplyr::quo(time)) 145 | 146 | # call out to C++ to compute actual matrix: 2 order of magnitude speedup 147 | sim_matrix <- make_similarity_matrix_sp(nlevels(item), 148 | group_list(dplyr::group_by(dat, !!grps)), 149 | item) 150 | 151 | # record popular items -- used for backfilling 152 | pop_items <- seq_len(nlevels(item))[order(diag(sim_matrix), decreasing=TRUE)] - 1 153 | 154 | # set all elements below support threshold to zero 155 | sim_matrix@x[sim_matrix@x < support_threshold] <- 0 156 | sim_matrix <- Matrix::drop0(sim_matrix) 157 | 158 | if(similarity == "lift") 159 | sim_matrix <- rescale_to_lift(sim_matrix) 160 | else if(similarity == "jaccard") 161 | sim_matrix <- rescale_to_jaccard(sim_matrix) 162 | 163 | dimnames(sim_matrix) <- list(levels(dat$item), levels(dat$item)) 164 | 165 | attr(sim_matrix, "pop_items") <- pop_items 166 | sim_matrix 167 | } 168 | 169 | 170 | get_cold_similarity <- function(cold_item_model=NULL, sim_matrix, catalog_formula, catalog_data, cold_to_cold, 171 | similarity, ...) 172 | { 173 | # handle ~. formula correctly, assuming column 1 of catalog data is item ID 174 | if(any(all.vars(catalog_formula[[length(catalog_formula)]]) == ".")) 175 | { 176 | vars <- names(catalog_data[-1]) 177 | catalog_formula <- reformulate(vars) 178 | } 179 | 180 | cold_df <- if(is.null(cold_item_model)) 181 | get_cold_similarity_nullmodel(sim_matrix, catalog_formula, catalog_data, cold_to_cold) 182 | else 183 | { 184 | if(is.character(cold_item_model)) 185 | cold_item_model <- get(cold_item_model, mode="function") 186 | else if(!is.function(cold_item_model)) 187 | stop("Must supply function or function name as string for cold item model") 188 | 189 | get_cold_similarity_model(sim_matrix, catalog_formula, catalog_data, cold_to_cold, cold_item_model, similarity) 190 | } 191 | 192 | wi <- match(cold_df$warm_item, rownames(sim_matrix)) 193 | ci <- match(cold_df$cold_item, rownames(sim_matrix)) 194 | sim_matrix[cbind(ci, wi)] <- cold_df$wt # fill in cold rows 195 | sim_matrix[cbind(wi, ci)] <- cold_df$wt # fill in cold columns 196 | 197 | sim_matrix 198 | } 199 | 200 | 201 | # grouping internal structure changes for dplyr 0.8 202 | group_list <- function(grpdf) 203 | { 204 | if(utils::packageVersion("dplyr") < package_version("0.8.0")) 205 | attr(grpdf, "indices") 206 | else 207 | { 208 | group_rows <- get("group_rows", getNamespace("dplyr")) 209 | lst <- group_rows(grpdf) 210 | lapply(lst, function(x) x - 1) 211 | } 212 | } 213 | -------------------------------------------------------------------------------- /R/az_rec_endp.R: -------------------------------------------------------------------------------- 1 | #' Azure product recommendations endpoint class 2 | #' 3 | #' Class representing the client endpoint to the product recommendations service. 4 | #' 5 | #' @docType class 6 | #' @section Methods: 7 | #' - `new(...)`: Initialize a client endpoint object. See 'Initialization' for more details. 8 | #' - `train_model(...)`: Train a new product recommendations model; return an object of class `rec_model`. See `Training` for more details. 9 | #' - `get_model(description, id)`: Get an existing product recommendations model from either its description or ID; return an object of class `rec_model`. 10 | #' - `delete_model(description, id)`: Delete the specified model. 11 | #' - `upload_data(data, destfile)`: Upload a data frame to the endpoint, as a CSV file. By default, the name of the uploaded file will be the name of the data frame with a ".csv" extension. 12 | #' - `upload_csv(srcfile, destfile)`: Upload a CSV file to the endpoint. By default, the name of the uploaded file will be the same as the source file. 13 | #' - `sync_model_list()`: Update the stored list of models for this service. 14 | #' - `get_swagger_url()`: Get the Swagger URL for this service. 15 | #' - `get_service_url()`: Get the service URL, which is used to train models and obtain recommendations. 16 | #' 17 | #' @section Initialization: 18 | #' The following arguments are used to initialize a new client endpoint: 19 | #' - `name`: The name of the endpoint; see below. Alternatively, this can also be the full URL of the endpoint. 20 | #' - `admin_key`: The administration key for the endpoint. Use this to retrieve, train, and delete models. 21 | #' - `rec_key`: The recommender key for the endpoint. Use this to get recommendations. 22 | #' - `service_host`: The hostname for the endpoint. For the public Azure cloud, this is `azurewebsites.net`. 23 | #' - `storage_key`: The access key for the storage account associated with the service. 24 | #' - `storage_sas`: A shared access signature (SAS) for the storage account associated with the service. You must provide either `storage_key` or `storage_sas` if you want to upload new datasets to the backend. 25 | #' - `storage_host`: The hostname for the storage account. For the public Azure cloud, this is `core.windows.net`. 26 | #' - `storage_endpoint`: The storage account endpoint for the service. By default, uses the account that was created at service creation. 27 | #' - `data_container`: The default blob container for input datasets. Defaults to `"inputdata"`. 28 | #' 29 | #' Note that the name of the client endpoint for a product recommendations service is _not_ the name that was supplied when deploying the service. Instead, it is a randomly generated unique string that starts with the service name. For example, if you deployed a service called "myrec", the name of the endpoint is "myrecusacvjwpk4raost". 30 | #' 31 | #' @section Training: 32 | #' To train a new model, supply the following arguments to the `train_model` method: 33 | #' - `description`: A character string describing the model. 34 | #' - `usage_data`: The training dataset. This is required. 35 | #' - `catalog_data`: An optional dataset giving features for each item. Only used for imputing cold items. 36 | #' - `eval_data`: An optional dataset to use for evaluating model performance. 37 | #' - `support_threshold`: The minimum support for an item to be considered warm. 38 | #' - `cooccurrence`: How to measure cooccurrence: either user ID, or user-by-time. 39 | #' - `similarity`: The similarity metric to use; defaults to "Jaccard". 40 | #' - `cold_items`: Whether recommendations should include cold items. 41 | #' - `cold_to_cold`: Whether similarities between cold items should be computed. 42 | #' - `user_affinity`: Whether event type and time should be considered. 43 | #- - `backfill`: Whether to backfill recommendations with popular items. 44 | #' - `include_seed_items`: Whether seed items (those already seen by a user) should be allowed as recommendations. 45 | #' - `half_life`: The time decay parameter for computing user-item affinities. 46 | #' - `user_to_items`: Whether user ID is used when computing personalised recommendations. 47 | #' - `wait`: Whether to wait until the model has finished training. 48 | #' - `container`: The container where the input datasets are stored. Defaults to the input container for the endpoint, usually `"inputdata"`. 49 | #' 50 | #' For detailed information on these arguments see the [API reference](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/api-reference.md#train-a-new-model). 51 | #' 52 | #' @seealso 53 | #' [az_rec_service] for the service itself, [rec_model] for an individual recommmendations model 54 | #' 55 | #' [API reference](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/api-reference.md) and [SAR model description](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md) at the Product Recommendations API repo on GitHub 56 | #' 57 | #' @examples 58 | #' \dontrun{ 59 | #' 60 | #' # creating a recommendations service endpoint from an Azure resource 61 | #' svc <- resgroup$get_rec_service("myrec") 62 | #' rec_endp <- svc$get_rec_endpoint() 63 | #' 64 | #' # creating the endpoint from scratch -- must supply admin, recommender and storage keys 65 | #' rec_endp <- rec_endpoint$new("myrecusacvjwpk4raost", 66 | #' admin_key="key1", rec_key="key2", storage_key="key3") 67 | #' 68 | #' # upload the Microsoft store data 69 | #' data(ms_usage) 70 | #' rec_endp$upload_data(ms_usage) 71 | #' 72 | #' # train a recommender 73 | #' rec_model <- rec_endp$train_model("model1", usage="ms_usage.csv", support_threshold=10, 74 | #' similarity="Jaccard", user_affinity=TRUE, user_to_items=TRUE, 75 | #' backfill=TRUE, include_seed_items=FALSE) 76 | #' 77 | #' # list of trained models 78 | #' rec_endp$sync_model_list() 79 | #' 80 | #' # delete the trained model (will ask for confirmation) 81 | #' rec_endp$delete_model("model1") 82 | #' 83 | #' } 84 | #' @format An R6 object of class `rec_endpoint`. 85 | #' @export 86 | rec_endpoint <- R6Class("rec_endpoint", 87 | 88 | public=list( 89 | url=NULL, 90 | admin_key=NULL, 91 | rec_key=NULL, 92 | name=NULL, 93 | storage=NULL, 94 | models=NULL, 95 | data_container=NULL, 96 | 97 | initialize=function(name, admin_key, rec_key, service_host="azurewebsites.net", 98 | storage_key=NULL, storage_sas=NULL, storage_host="core.windows.net", 99 | storage_endpoint=NULL, data_container="inputdata") 100 | { 101 | if(is_url(name)) 102 | { 103 | url <- name 104 | name <- sub("(^[^.]+)\\..+$", "\\1", httr::parse_url(name)$host) 105 | } 106 | else url <- paste0("https://", name, ".", service_host) 107 | 108 | self$url <- url 109 | self$admin_key <- admin_key 110 | self$rec_key <- rec_key 111 | self$name <- name 112 | 113 | if(is.null(storage_endpoint)) 114 | { 115 | storage_host <- paste0("st.blob.", storage_host) 116 | stor_endp <- sub("ws\\..+$", storage_host, self$url) 117 | storage_endpoint <- storage_endpoint(stor_endp, key=storage_key, sas=storage_sas) 118 | } 119 | else stopifnot(inherits(storage_endpoint, "blob_endpoint")) 120 | self$storage <- storage_endpoint 121 | self$data_container <- data_container 122 | 123 | self$sync_model_list() 124 | invisible(NULL) 125 | }, 126 | 127 | # store the list of model descriptions and IDs for convenience 128 | sync_model_list=function() 129 | { 130 | res <- httr::GET(self$get_service_url(), httr::add_headers("x-api-key"=self$admin_key)) 131 | httr::stop_for_status(res) 132 | self$models <- as.data.frame(dplyr::bind_rows(httr::content(res))) 133 | self$models 134 | }, 135 | 136 | get_swagger_url=function() 137 | { 138 | sprintf("%s/swagger", self$url) 139 | }, 140 | 141 | get_service_url=function() 142 | { 143 | sprintf("%s/api/models", self$url) 144 | }, 145 | 146 | get_model=function(description, id) 147 | { 148 | if(missing(id)) 149 | id <- private$get_id_by_desc(description) 150 | rec_model$new(self$url, self$admin_key, self$rec_key, id=id) 151 | }, 152 | 153 | train_model=function(description, 154 | usage_data=NULL, catalog_data=NULL, eval_data=NULL, 155 | support_threshold=NULL, cooccurrence=NULL, similarity=NULL, 156 | cold_items=NULL, cold_to_cold=NULL, user_affinity=NULL, backfill=NULL, include_seed_items=NULL, 157 | half_life=NULL, user_to_items=NULL, wait=TRUE, container=self$data_container) 158 | { 159 | if(description %in% self$models$description) 160 | stop("Model already exists with description '", description, "'", call.=FALSE) 161 | 162 | # don't use funky match.call magic to avoid NSE hassles 163 | parms <- list( 164 | description=description, 165 | blobContainerName=container, 166 | usageRelativePath=usage_data, 167 | catalogFileRelativePath=catalog_data, 168 | evaluationUsageRelativePath=eval_data, 169 | supportThreshold=support_threshold, 170 | cooccurrenceUnit=cooccurrence, 171 | similarityFunction=similarity, 172 | enableColdItemPlacement=cold_items, 173 | enableColdToColdRecommendations=cold_to_cold, 174 | enableUserAffinity=user_affinity, 175 | enableBackfilling=backfill, 176 | allowSeedItemsInRecommendations=include_seed_items, 177 | decayPeriodInDays=half_life, 178 | enableUserToItemRecommendations=user_to_items) 179 | 180 | res <- rec_model$new(self$url, self$admin_key, self$rec_key, parms=parms, wait=wait) 181 | self$sync_model_list() 182 | res 183 | }, 184 | 185 | delete_model=function(description, id, confirm=TRUE) 186 | { 187 | self$get_model(description, id)$delete(confirm=confirm) 188 | invisible(self$sync_model_list()) 189 | }, 190 | 191 | upload_data=function(data, destfile, container=self$data_container) 192 | { 193 | if(missing(destfile)) 194 | destfile <- paste0(as.character(substitute(data)), ".csv") 195 | f <- tempfile(fileext=".csv") 196 | on.exit(file.remove(f)) 197 | write.table(data, f, row.names=FALSE, col.names=FALSE, sep=",") 198 | self$upload_csv(f, destfile, container) 199 | }, 200 | 201 | upload_csv=function(srcfile, destfile=basename(srcfile), container=self$data_container) 202 | { 203 | self$storage %>% blob_container(container) %>% upload_blob(srcfile, destfile) 204 | }, 205 | 206 | list_data=function(container=self$data_container) 207 | { 208 | self$storage %>% blob_container(container) %>% list_blobs() 209 | }, 210 | 211 | delete_data=function(data, container=self$data_container, confirm=TRUE) 212 | { 213 | self$storage %>% blob_container(container) %>% delete_blob(data, confirm=confirm) 214 | }, 215 | 216 | print=function(...) 217 | { 218 | cat("Product recommendations service endpoint\n") 219 | cat("Service URL:", self$get_service_url(), "\n") 220 | cat("Swagger URL:", self$get_swagger_url(), "\n") 221 | cat("Admin key:", if(is_empty(self$admin_key)) "\n" else "\n") 222 | cat("Recommender key:", if(is_empty(self$rec_key)) "\n" else "\n") 223 | cat("---\n") 224 | cat("Models:\n") 225 | print(self$models) 226 | cat("---\n") 227 | print(self$storage) 228 | cat("\n---\n") 229 | cat(AzureRMR::format_public_methods(self)) 230 | invisible(NULL) 231 | } 232 | ), 233 | 234 | private=list( 235 | 236 | get_id_by_desc=function(description) 237 | { 238 | match <- which(description == self$models$description) 239 | if(length(match) == 0) 240 | stop("No model found with description '", description, "'", call.=FALSE) 241 | else if(length(match) > 1) 242 | stop("More than one model with description '", description, "'", call.=FALSE) 243 | self$models$id[match] 244 | }, 245 | 246 | get_desc_by_id=function(id) 247 | { 248 | match <- which(id == self$models$id) 249 | if(length(match) == 0) 250 | stop("No model found with ID '", id, "'", call.=FALSE) 251 | else if(length(match) > 1) 252 | stop("More than one model with ID '", id, "'", call.=FALSE) 253 | self$models$description[match] 254 | } 255 | )) 256 | 257 | 258 | -------------------------------------------------------------------------------- /R/az_rec_model.R: -------------------------------------------------------------------------------- 1 | #' Azure product recommendations model class 2 | #' 3 | #' Class representing an individual product recommendations (SAR) model. 4 | #' 5 | #' @docType class 6 | #' @section Methods: 7 | #' - `new(...)`: Initialize a model object. See 'Initialization' for more details. 8 | #' - `delete(confirm=TRUE)`: Delete the model. 9 | #' - `user_predict(userdata, k=10)`: Get personalised recommendations from the model. See 'Recommendations' for more details. 10 | #' - `item_predict(item, k=10)`: Get item-to-item recommendations from the model. See 'Recommendations' for more details. 11 | #' - `get_model_url()`: Get the individual service URL for this model. 12 | #' 13 | #' @section Initialization: 14 | #' Generally, the easiest way to initialize a new model object is via the `get_model()` and `train_model()` methods of the `rec_endpoint` class, which will handle all the gory details. 15 | #' 16 | #' @section Recommendations: 17 | #' These arguments are used for obtaining personalised and item-to-item recommendations. 18 | #' - `userdata`: The input data on users for which to obtain personalised recommendations. This can be: 19 | #' 1. A character vector of user IDs. In this case, personalised recommendations will be computed based on the transactions in the training data, _ignoring_ any transaction event IDs or weights. 20 | #' 2. A data frame containing transaction item IDs, event types and/or weights, plus timestamps. In this case, all the transactions are assumed to be for a single (new) user. If the event types/weights are absent, all transactions are assigned equal weight. 21 | #' 3. A data frame containing user IDs and transaction details as in (2). In this case, the recommendations are based on both the training data for the given user(s), plus the new transaction details. 22 | #' - `item`: A vector of item IDs for which to obtain item-to-item recommendations. 23 | #' - `k`: The number of recommendations to return. Defaults to 10. 24 | #' 25 | #' Both the `user_predict()` and `item_predict()` methods return a data frame with the top-K recommendations and scores. 26 | #' 27 | #' @seealso 28 | #' [az_rec_service] for the service backend, [rec_endpoint] for the client endpoint 29 | #' 30 | #' [API reference](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/api-reference.md) and [SAR model description](https://github.com/Microsoft/Product-Recommendations/blob/master/doc/sar.md) at the Product Recommendations API repo on GitHub 31 | #' 32 | #' @examples 33 | #' \dontrun{ 34 | #' 35 | #' # get a recommender endpoint and previously-trained model 36 | #' rec_endp <- rec_endpoint$new("myrecusacvjwpk4raost", admin_key="key1", rec_key="key2") 37 | #' rec_model <- rec_endp$get_model("model1") 38 | #' 39 | #' data(ms_usage) 40 | #' 41 | #' # item recommendations for a set of user IDs 42 | #' users <- unique(ms_usage$user)[1:5] 43 | #' rec_model$user_predict(users) 44 | #' 45 | #' # item recommendations for a set of user IDs and transactions (assumed to be new) 46 | #' user_df <- subset(ms_usage, user %in% users) 47 | #' rec_model$user_predict(user_df) 48 | #' 49 | #' # item recomendations for a set of item IDs 50 | #' items <- unique(ms_usage$item)[1:5] 51 | #' rec_model$item_predict(items) 52 | #' 53 | #' } 54 | #' @format An R6 object of class `rec_model`. 55 | #' @export 56 | rec_model <- R6Class("rec_model", 57 | 58 | public=list( 59 | service_url=NULL, 60 | admin_key=NULL, 61 | rec_key=NULL, 62 | id=NULL, 63 | description=NULL, 64 | creation_time=NULL, 65 | status=NULL, 66 | status_message=NULL, 67 | parameters=NULL, 68 | stats=NULL, 69 | 70 | initialize=function(service_url, admin_key, rec_key, id, ..., parms=list(...), wait=TRUE) 71 | { 72 | self$service_url <- service_url 73 | self$admin_key <- admin_key 74 | self$rec_key <- rec_key 75 | 76 | if(is_empty(parms)) 77 | { 78 | self$id <- id 79 | parms <- private$get_model() 80 | self$description <- parms$description 81 | } 82 | else 83 | { 84 | self$description <- parms$description 85 | parms <- private$train_model(parms, wait=wait) 86 | } 87 | 88 | self$creation_time <- as.POSIXct(parms$creationTime, format="%Y-%m-%dT%H:%M:%OS", tz="GMT") 89 | self$status <- parms$modelStatus 90 | self$status_message <- parms$modelStatusMessage 91 | self$parameters <- parms$parameters 92 | self$stats <- parms$statistics 93 | }, 94 | 95 | delete=function(confirm=TRUE) 96 | { 97 | if(confirm && interactive()) 98 | { 99 | yn <- readline(paste0("Do you really want to delete model '", self$description, "'? (y/N) ")) 100 | if(tolower(substr(yn, 1, 1)) != "y") 101 | return(invisible(NULL)) 102 | } 103 | message("Deleting model '", self$description, "'") 104 | private$model_op(http_verb="DELETE") 105 | }, 106 | 107 | user_predict=function(userdata=NULL, k=10) 108 | { 109 | # assume userdata in fixed format 110 | if(is.data.frame(userdata)) 111 | { 112 | users <- as.character(userdata$user) 113 | user_col <- which(names(userdata) == "user") 114 | } 115 | else users <- as.character(userdata) 116 | 117 | userid_provided <- length(users) > 0 118 | if(!userid_provided && !is.data.frame(userdata)) 119 | stop("Must provide user IDs or transaction events to get recommendations for", call.=FALSE) 120 | 121 | users <- unique(users) 122 | n_users <- max(1, length(users)) 123 | result <- lapply(seq_len(n_users), function(i) 124 | { 125 | # wrangle any provided dataset into format the API can accept 126 | if(is.data.frame(userdata) && "item" %in% names(userdata)) 127 | { 128 | if(userid_provided) 129 | data_i <- userdata[userdata$user == users[i], - user_col, drop=FALSE] 130 | else data_i <- userdata 131 | 132 | # rename to match API conventions 133 | names(data_i)[names(data_i) == "item"] <- "itemId" 134 | names(data_i)[names(data_i) == "time"] <- "timestamp" 135 | names(data_i)[names(data_i) == "event"] <- "eventType" 136 | } 137 | else data_i <- NULL 138 | 139 | options <- list(recommendationCount=k) 140 | if(userid_provided) 141 | options <- c(options, userId=users[i]) 142 | 143 | private$model_op("recommend", body=data_i, encode="json", options=options, 144 | key=self$rec_key, 145 | http_verb="POST") 146 | }) 147 | 148 | # pad out number of recommendations for each user with NAs, if we are short 149 | result <- lapply(result, function(row) 150 | { 151 | df <- dplyr::bind_cols(row) 152 | nc <- ncol(df) 153 | if(nc < 2 * k) 154 | df[(nc + 1):(2 * k)] <- list(NA_character_, NA_real_) 155 | df 156 | }) %>% dplyr::bind_rows() 157 | names(result) <- paste0(c("rec", "score"), rep(seq_len(k), each=2)) 158 | 159 | # reorder columns to match standalone predict 160 | perm <- c(matrix(seq_len(k * 2), ncol=2, byrow=TRUE)) 161 | result <- result[perm] 162 | 163 | if(userid_provided) 164 | result <- dplyr::bind_cols(user=users, result) 165 | as.data.frame(result) 166 | }, 167 | 168 | item_predict=function(item=NULL, k=10) 169 | { 170 | if(is.null(item)) 171 | stop("Must provide item IDs to get recommendations for", call.=FALSE) 172 | if(is.data.frame(item)) 173 | item <- item$item 174 | 175 | item <- unique(item) 176 | n_items <- length(item) 177 | result <- lapply(seq_len(n_items), function(i) 178 | { 179 | options <- list(itemId=item[i], recommendationCount=k) 180 | private$model_op("recommend", options=options, key=self$rec_key) 181 | }) 182 | 183 | # pad out number of recommendations for each user with NAs, if we are short 184 | result <- lapply(result, function(row) 185 | { 186 | df <- dplyr::bind_cols(row) 187 | nc <- ncol(df) 188 | if(nc < 2 * k) 189 | df[(nc + 1):(2 * k)] <- list(NA_character_, NA_real_) 190 | df 191 | }) %>% dplyr::bind_rows() 192 | names(result) <- paste0(c("rec", "score"), rep(seq_len(k), each=2)) 193 | 194 | result <- dplyr::bind_cols(item=item, result) 195 | 196 | # reorder columns to match standalone predict 197 | perm <- c(matrix(seq_len(k * 2), ncol=2, byrow=TRUE)) + 1 198 | as.data.frame(result[c(1, perm)]) 199 | }, 200 | 201 | get_model_url=function() 202 | { 203 | paste0(self$service_url, "/api/models/", self$id) 204 | }, 205 | 206 | print=function(...) 207 | { 208 | cat("Description:", self$description, "\n") 209 | cat("Endpoint:", self$get_model_url(), "\n") 210 | cat("Creation time:", format(self$creation_time, usetz=TRUE), "\n") 211 | cat("Status:", self$status, "\n") 212 | 213 | parms <- self$parameters 214 | class(parms) <- "simple.list" 215 | cat("\nModel training parameters:\n") 216 | print(parms, ...) 217 | 218 | if(!is.null(self$stats)) 219 | { 220 | stats <- self$stats 221 | stats <- list("Training duration"=stats$trainingDuration, 222 | "Total duration"=stats$totalDuration, 223 | "Included events"=stats$usageEventsParsing$successfulLinesCount, 224 | "Total events"=stats$usageEventsParsingtotalLinesCount, 225 | "Item count"=stats$numberOfUsageItems, 226 | "User count"=stats$numberOfUsers 227 | ) 228 | class(stats) <- "simple.list" 229 | cat("\nTraining statistics:\n") 230 | print(stats) 231 | 232 | ev <- stats$evaluation 233 | if(!is.null(ev)) 234 | { 235 | evalstats <- list("Evaluation duration"=ev$duration, 236 | "Total evaluation events"=ev$usageEventsParsing$totalLinesCount, 237 | "Included evaluation events"=ev$usageEventsParsing$successfulLinesCount) 238 | class(evalstats) <- "simple.list" 239 | cat("\nEvaluation statistics:\n") 240 | print(evalstats) 241 | 242 | divstats <- list("Total items recommended"=ev$metrics$diversityMetrics$totalItemsRecommended, 243 | "Unique items recommended"=ev$metrics$diversityMetrics$uniqueItemsRecommended, 244 | "Unique items in training set"=ev$metrics$diversityMetrics$uniqueItemsInTrainSet) 245 | class(divstats) <- "simple.list" 246 | cat("\nDiversity metrics:\n") 247 | print(divstats) 248 | cat("\n") 249 | div <- as.data.frame(dplyr::bind_rows(ev$metrics$diversityMetrics$percentileBuckets)) 250 | print(div) 251 | 252 | cat("\nPrecision metrics:\n") 253 | prec <- as.data.frame(dplyr::bind_rows(ev$metrics$precisionMetrics)) 254 | print(prec) 255 | } 256 | } 257 | invisible(NULL) 258 | } 259 | ), 260 | 261 | private=list( 262 | 263 | get_model=function() 264 | { 265 | private$model_op() 266 | }, 267 | 268 | train_model=function(parms, wait) 269 | { 270 | fit_args <- parms[!sapply(parms, is.null)] 271 | res <- private$model_op(body=fit_args, encode="json", http_verb="POST") 272 | self$id <- res$id 273 | 274 | if(wait) 275 | { 276 | for(i in 1:1000) 277 | { 278 | message(".", appendLF=FALSE) 279 | status <- res$modelStatus 280 | if(status == "Completed") 281 | break 282 | Sys.sleep(5) 283 | res <- private$model_op() 284 | } 285 | if(status != "Completed") 286 | warning("\nTimed out waiting for model training to complete") 287 | else message("\nTraining complete") 288 | } 289 | res 290 | }, 291 | 292 | model_op=function(op="", ..., options=list(), headers=list(), 293 | key=self$admin_key, 294 | http_verb=c("GET", "PUT", "POST", "DELETE", "HEAD")) 295 | { 296 | url <- httr::parse_url(self$get_model_url()) 297 | url$path <- paste0(url$path, "/", op) 298 | url$query <- options 299 | headers <- httr::add_headers("x-api-key"=key, .headers=unlist(headers)) 300 | 301 | # call recommender service backend 302 | verb <- get(match.arg(http_verb), getNamespace("httr")) 303 | 304 | cont <- verb(url, ..., headers) 305 | httr::stop_for_status(cont) 306 | httr::content(cont) 307 | } 308 | )) 309 | 310 | 311 | 312 | -------------------------------------------------------------------------------- /tests/resources/sim_count1.csv: -------------------------------------------------------------------------------- 1 | "","DAF-00255","DAF-00280","DAF-00281","DAF-00283","DAF-00284","DAF-00288","DAF-00349","DAF-00350","DAF-00351","DAF-00367","DAF-00375","DAF-00385","DAF-00396","DAF-00399","DAF-00416","DAF-00419","DAF-00420","DAF-00437","DAF-00442","DAF-00443","DAF-00444","DAF-00448","DAF-00449","DAF-00450","DAF-00451","DAF-00460","DAF-00462","DAF-00464","DAF-00465","DAF-00482","DAF-00488","DAF-00491","DAF-00498","DAF-00499","DAF-00502","DAF-00503","DAF-00504","DAF-00512","DAF-00516","DAF-00517","DAF-00518","DC2-00001","DCF-00085","DCF-00086","DCF-00087","DCF-00104","DCF-00173","DCF-00197","DCF-00198","DCF-00199","DCF-00203","DCF-00204","DCF-00205","DCF-00206","DCF-00252","DCF-00253","DCF-00254","DDF-00078","DDF-00122","DHF-00533","DHF-00826","DHF-00847","DHF-00881","DHF-00890","DHF-00894","DHF-00904","DHF-00905","DHF-00907","DHF-00927","DHF-01029","DHF-01030","DHF-01031","DHF-01037","DHF-01038","DHF-01055","DHF-01056","DHF-01080","DHF-01159","DHF-01242","DHF-01331","DHF-01332","DHF-01333","DHF-01334","DHF-01406","DHF-01436","DHF-01437","DHF-01438","DHF-01439","DHF-01440","DHF-01441","DHF-01444","DHF-01512","DHF-01528","DHF-01529","DHF-01530","DHF-01540","DHF-01550","DQF-00248","DQF-00358","DQF-00362","DR5-00001" 2 | "DAF-00255",31,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,14,9,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,1,6,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,8,1,0,12,0,0,0,0,1,0,0,1,0,2,0,0,0,0,0,2,15,0,0,1,0,16,0 3 | "DAF-00280",0,29,5,4,5,6,5,4,2,0,1,1,3,4,3,1,0,1,1,3,4,0,0,2,0,1,1,1,0,4,1,1,1,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,22,0,0,0 4 | "DAF-00281",0,5,55,6,6,13,12,5,4,2,1,0,2,1,3,3,0,0,4,5,8,6,0,1,2,1,2,1,0,3,2,2,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,0,0,0,0,0,0,2,1,1,1,0,0,0,0,1,0,1,0,0,2,1,0,1,0,0,0,0,2,3,0,0,0,1,39,0,0,0 5 | "DAF-00283",0,4,6,43,5,11,14,7,4,0,1,1,2,4,2,1,1,0,3,3,4,3,2,2,0,0,0,0,1,6,0,3,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,25,0,0,0 6 | "DAF-00284",0,5,6,5,26,8,7,4,0,1,0,0,0,0,2,2,0,0,2,2,4,0,0,1,0,0,0,0,0,4,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,0,0 7 | "DAF-00288",0,6,13,11,8,323,82,24,26,3,12,2,14,3,20,23,20,5,26,8,10,24,11,18,4,22,2,6,2,23,12,18,1,2,5,4,5,1,2,4,4,1,2,2,1,1,1,3,3,2,2,0,0,1,0,1,0,1,1,0,1,3,0,0,0,0,0,0,0,1,4,1,1,4,1,0,0,6,2,0,2,0,0,7,1,1,0,1,2,1,0,3,5,0,3,2,3,210,6,0,1 8 | "DAF-00349",0,5,12,14,7,82,347,58,35,5,24,4,4,6,15,19,6,3,35,8,9,30,2,5,2,19,1,5,0,46,10,15,5,4,10,6,5,0,0,0,0,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,4,0,0,0,2,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,1,3,0,0,0,0,4,3,0,0,2,0,0,1,1,2,0,0,1,1,242,1,0,1 9 | "DAF-00350",0,4,5,7,4,24,58,97,24,0,4,2,2,3,3,2,2,0,10,3,4,10,0,2,3,5,2,0,1,16,4,4,0,0,1,0,2,0,0,0,0,1,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,62,1,0,0 10 | "DAF-00351",0,2,4,4,0,26,35,24,103,0,4,2,5,5,3,4,5,1,15,7,6,13,8,6,5,11,2,0,1,20,5,3,2,6,2,0,4,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,0,1,0,0,1,0,62,2,0,0 11 | "DAF-00367",0,0,2,0,1,3,5,0,0,26,2,0,2,0,4,2,2,1,6,0,0,2,1,0,0,4,1,1,0,0,3,1,1,0,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,0,0 12 | "DAF-00375",1,1,1,1,0,12,24,4,4,2,71,3,0,1,4,5,1,1,4,1,2,3,1,0,0,14,1,2,0,4,1,0,1,0,0,2,1,0,1,1,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,2,0,1,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,47,0,2,0 13 | "DAF-00385",0,1,0,1,0,2,4,2,2,0,3,23,1,2,2,1,1,0,3,1,0,2,1,2,1,2,0,0,2,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,13,0,0,0 14 | "DAF-00396",0,3,2,2,0,14,4,2,5,2,0,1,44,3,1,3,7,0,5,1,2,7,3,6,3,3,0,0,2,3,0,3,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,2,0,0,2,0,0,0,0,0,0,0,0,0,2,1,0,0,0,0,2,0,0,0,1,0,0,0,0,0,0,0,0,0,28,1,0,0 15 | "DAF-00399",0,4,1,4,0,3,6,3,5,0,1,2,3,22,3,0,1,0,4,2,5,1,0,1,0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,13,1,0,0 16 | "DAF-00416",0,3,3,2,2,20,15,3,3,4,4,2,1,3,152,13,2,1,4,0,2,26,4,6,2,7,2,1,1,5,3,5,2,1,3,0,3,0,0,7,1,1,2,1,1,0,0,0,0,0,0,0,0,1,1,2,0,0,2,0,2,4,0,0,0,0,0,0,0,0,3,0,0,1,4,0,0,5,7,0,0,0,0,12,1,0,2,0,0,0,0,1,1,1,6,0,3,119,3,3,0 17 | "DAF-00419",1,1,3,1,2,23,19,2,4,2,5,1,3,0,13,120,7,1,5,1,3,9,2,3,2,8,1,2,1,3,1,6,0,0,2,3,3,0,0,0,2,1,1,0,0,1,0,1,1,1,1,1,1,2,0,0,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,2,3,0,0,0,0,4,0,0,0,1,1,0,0,1,1,0,0,0,0,94,4,1,0 18 | "DAF-00420",0,0,0,1,0,20,6,2,5,2,1,1,7,1,2,7,64,2,9,0,0,4,2,4,1,5,1,0,1,5,0,4,1,0,1,0,1,0,0,1,0,0,1,0,0,1,0,1,1,1,1,0,1,1,2,0,0,0,0,1,0,2,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,4,1,0,1,1,1,1,1,0,1,1,1,0,0,0,0,0,1,0,35,0,0,0 19 | "DAF-00437",0,1,0,0,0,5,3,0,1,1,1,0,0,0,1,1,2,25,1,1,0,3,0,0,1,3,0,0,0,2,3,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,3,0,0,0,0,1,1,1,0,0,0,0,0,0,20,2,0,0 20 | "DAF-00442",0,1,4,3,2,26,35,10,15,6,4,3,5,4,4,5,9,1,132,1,2,17,1,11,3,11,0,0,0,8,7,4,2,2,5,2,3,0,0,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,2,0,1,1,0,0,0,3,1,0,1,0,0,4,0,2,0,0,1,1,0,2,3,0,0,0,2,80,4,1,0 21 | "DAF-00443",0,3,5,3,2,8,8,3,7,0,1,1,1,2,0,1,0,1,1,27,5,1,2,1,1,1,1,0,0,6,0,0,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,1,0,0 22 | "DAF-00444",0,4,8,4,4,10,9,4,6,0,2,0,2,5,2,3,0,0,2,5,37,2,1,2,2,2,1,1,0,6,0,1,0,2,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,24,0,0,0 23 | "DAF-00448",0,0,6,3,0,24,30,10,13,2,3,2,7,1,26,9,4,3,17,1,2,194,21,40,12,10,0,1,1,9,9,2,4,0,1,5,2,0,0,1,0,0,2,0,0,1,0,0,0,0,0,0,0,1,0,0,0,3,0,0,0,2,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,2,0,0,0,0,2,2,1,0,1,2,1,1,0,2,0,0,0,1,130,3,0,0 24 | "DAF-00449",0,0,0,2,0,11,2,0,8,1,1,1,3,0,4,2,2,0,1,2,1,21,50,14,5,5,0,2,1,0,1,1,0,1,1,0,1,0,0,1,0,0,2,2,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,1,0,4,0,0,0,0,0,0,0,0,0,0,1,0,0,26,1,0,0 25 | "DAF-00450",0,2,1,2,1,18,5,2,6,0,0,2,6,1,6,3,4,0,11,1,2,40,14,85,7,6,0,1,2,6,1,1,0,1,1,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,1,0,0,0,0,0,0,2,0,0,6,0,1,1,1,0,0,2,2,0,0,0,0,1,0,0,0,0,1,0,0,1,1,0,0,0,1,42,2,0,0 26 | "DAF-00451",0,0,2,0,0,4,2,3,5,0,0,1,3,0,2,2,1,1,3,1,2,12,5,7,39,1,2,2,4,2,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,1,0,27,1,0,0 27 | "DAF-00460",0,1,1,0,0,22,19,5,11,4,14,2,3,0,7,8,5,3,11,1,2,10,5,6,1,102,1,7,8,6,4,2,0,0,0,1,0,0,0,1,1,0,1,0,0,0,1,0,0,0,1,0,0,1,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,71,1,0,0 28 | "DAF-00462",0,1,2,0,0,2,1,2,2,1,1,0,0,1,2,1,1,0,0,1,1,0,0,0,2,1,15,3,4,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0 29 | "DAF-00464",0,1,1,0,0,6,5,0,0,1,2,0,0,0,1,2,0,0,0,0,1,1,2,1,2,7,3,36,6,1,0,1,0,0,2,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,1,1,0,0,29,2,0,0 30 | "DAF-00465",0,0,0,1,0,2,0,1,1,0,0,2,2,0,1,1,1,0,0,0,0,1,1,2,4,8,4,6,23,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,14,0,0,0 31 | "DAF-00482",1,4,3,6,4,23,46,16,20,0,4,0,3,1,5,3,5,2,8,6,6,9,0,6,2,6,0,1,0,115,1,3,1,0,3,2,1,0,0,2,0,0,0,0,0,0,1,2,2,2,0,0,0,1,0,0,0,2,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,2,0,1,0,0,0,0,0,0,1,0,0,0,2,2,0,2,0,0,80,1,1,0 32 | "DAF-00488",0,1,2,0,0,12,10,4,5,3,1,0,0,0,3,1,0,3,7,0,0,9,1,1,1,4,1,0,0,1,50,2,3,3,1,2,1,0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,0,0,0,6,0,1,0,0,1,1,1,0,0,0,1,0,0,32,0,0,1 33 | "DAF-00491",0,1,2,3,0,18,15,4,3,1,0,1,3,1,5,6,4,1,4,0,1,2,1,1,0,2,0,1,0,3,2,58,0,0,0,2,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,2,1,2,0,1,0,0,0,0,0,1,2,0,2,0,0,45,0,2,0 34 | "DAF-00498",0,1,0,0,0,1,5,0,2,1,1,0,0,0,2,0,1,1,2,0,0,4,0,0,0,0,0,0,0,1,3,0,26,5,0,1,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,19,0,0,0 35 | "DAF-00499",0,0,0,0,0,2,4,0,6,0,0,0,0,0,1,0,0,0,2,0,2,0,1,1,0,0,0,0,0,0,3,0,5,20,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,15,1,0,0 36 | "DAF-00502",0,0,0,1,1,5,10,1,2,1,0,0,1,0,3,2,1,0,5,2,0,1,1,1,0,0,1,2,0,3,1,0,0,0,39,6,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,1,1,0,1,1,0,0,0,0,1,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,1,29,0,0,0 37 | "DAF-00503",0,0,0,0,0,4,6,0,0,1,2,1,1,0,0,3,0,0,2,0,0,5,0,1,3,1,0,1,0,2,2,2,1,0,6,33,4,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,23,0,0,0 38 | "DAF-00504",0,2,0,0,0,5,5,2,4,1,1,0,0,1,3,3,1,0,3,2,1,2,1,1,0,0,1,0,0,1,1,0,0,1,6,4,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1,3,0,0,0,0,0,0,0,1,2,0,0,0,2,13,0,1,0 39 | "DAF-00512",2,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,19,2,2,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,7,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,9,0,0,0,1,7,2 40 | "DAF-00516",0,0,0,0,0,2,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,41,20,24,0,1,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,10,1,0,5,0,0,1,0,0,0,0,0,0,0,0,0,0,3,6,0,28,0,0,2,0,13,1 41 | "DAF-00517",14,0,0,0,0,4,0,0,1,0,1,0,0,0,7,0,1,0,2,0,0,1,1,0,0,1,0,1,0,2,1,0,2,0,0,0,0,2,20,191,50,0,2,2,0,2,3,1,2,1,2,0,4,2,1,0,0,0,4,1,5,0,0,0,1,0,0,0,1,0,2,1,1,0,58,8,1,58,0,2,3,4,1,1,1,0,1,1,0,0,0,4,15,8,134,0,1,14,5,77,1 42 | "DAF-00518",9,0,0,0,0,4,0,0,0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,1,2,0,0,0,0,4,24,50,112,0,1,0,1,1,0,2,1,1,2,0,2,0,0,0,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0,1,1,1,34,3,1,24,1,1,4,1,3,1,0,0,0,0,0,0,0,0,3,3,74,0,1,4,4,53,2 43 | "DC2-00001",0,0,0,0,0,1,1,1,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,1,0,0 44 | "DCF-00085",0,1,1,1,1,2,0,1,1,0,0,0,0,0,2,1,1,0,1,0,1,2,2,2,0,1,0,0,0,0,0,1,0,0,0,1,0,0,1,2,1,0,51,27,23,1,0,2,3,0,6,3,5,8,3,1,0,0,2,1,0,0,2,0,0,0,0,0,0,0,2,0,0,0,2,1,0,1,1,1,2,1,0,1,1,0,1,1,1,0,0,3,4,0,0,0,3,4,2,1,0 45 | "DCF-00086",0,0,0,0,0,2,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,2,0,0,27,43,26,1,0,2,2,2,6,2,5,3,1,0,0,0,5,2,0,0,0,0,0,0,0,0,0,0,2,0,0,1,3,1,0,2,0,0,1,1,1,0,3,0,2,0,1,0,0,1,3,0,1,2,3,2,1,1,0 46 | "DCF-00087",0,1,1,1,1,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,23,26,33,0,0,3,2,2,6,2,3,3,1,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,3,1,0,1,0,0,2,1,0,0,2,0,0,0,0,0,0,3,4,0,0,0,0,1,0,2,0 47 | "DCF-00104",2,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,1,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,1,2,1,0,1,1,0,66,4,1,1,1,0,0,1,1,0,14,0,0,3,0,1,0,12,0,0,0,0,0,5,0,4,2,2,1,0,0,0,0,0,0,3,0,0,1,25,21,8,12,3,2,3,2,4,1,2,0,2,7,1,0,0 48 | "DCF-00173",0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,4,30,0,0,0,1,1,0,1,0,6,1,0,1,0,0,1,2,0,0,0,0,0,2,0,3,2,2,0,1,0,0,0,0,1,2,0,0,0,5,4,6,11,0,0,0,1,2,0,1,0,2,7,0,1,0 49 | "DCF-00197",0,0,0,0,0,3,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,2,0,2,2,3,1,0,38,26,26,7,5,5,6,0,2,0,0,1,8,0,0,1,0,0,0,0,0,0,1,1,1,1,0,1,1,0,0,0,5,9,5,7,0,0,1,1,0,0,0,0,2,2,0,3,1,0,1,0,2,1 50 | "DCF-00198",0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,0,1,2,1,0,3,2,2,1,0,26,43,27,5,4,6,5,0,1,0,0,1,6,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,11,13,7,8,0,1,1,0,0,0,0,0,4,4,0,1,1,0,2,0,1,4 51 | "DCF-00199",0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,1,0,0,2,2,1,0,26,27,36,7,5,9,5,0,0,0,0,1,6,0,0,0,0,0,0,0,0,0,1,2,1,1,0,0,0,0,0,0,7,10,5,6,0,0,1,0,0,0,0,0,2,2,0,1,1,0,0,0,0,3 52 | "DCF-00203",0,1,1,1,1,2,0,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,2,2,0,6,6,6,0,1,7,5,7,37,16,18,13,0,1,0,0,0,1,0,0,1,0,0,0,0,0,1,2,0,2,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,2,4,1,4,1,2,1,0,2,1 53 | "DCF-00204",0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,2,2,0,1,5,4,5,16,25,15,11,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,2,0,1,1,0,0,0,0,0,0,2,0,1,1,0,0,0,1,2 54 | "DCF-00205",0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,4,2,0,5,5,3,1,0,5,6,9,18,15,35,12,2,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,0,2,0,0,0,0,1,0,0,4,4,0,2,1,1,0,0,0,0,0,0,2,4,1,4,0,0,1,0,1,2 55 | "DCF-00206",0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,1,0,0,0,1,0,0,1,1,0,1,0,0,0,0,0,0,0,2,0,0,8,3,3,1,1,6,5,5,13,11,12,29,0,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,1,2,0,0,1,0,0,0,0,0,1,2,0,2,0,0,1,0,1,2 56 | "DCF-00252",0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,3,1,1,0,0,0,0,0,0,0,2,0,31,0,0,0,3,0,0,0,0,0,0,0,0,1,0,0,4,1,1,1,0,0,0,0,0,0,0,0,0,0,6,5,4,4,0,1,0,0,0,0,0,0,2,1,0,0,1 57 | "DCF-00253",1,0,0,0,0,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,14,6,2,1,0,1,1,0,0,0,85,24,0,2,1,1,0,17,0,0,0,0,0,11,2,4,1,1,3,0,0,0,1,1,0,2,2,1,0,44,23,18,22,0,0,1,2,3,1,0,0,0,3,0,0,2 58 | "DCF-00254",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,24,29,0,1,0,0,0,6,0,0,0,0,0,7,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,15,7,8,10,0,0,1,1,1,0,0,0,2,0,0,0,0 59 | "DDF-00078",1,0,1,0,0,1,4,0,0,0,2,0,1,0,0,1,0,1,0,0,2,3,0,2,0,2,1,0,0,2,1,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0 60 | "DDF-00122",6,0,0,0,0,1,0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,1,0,2,5,4,3,1,1,1,1,0,0,1,1,3,2,1,0,74,2,2,0,0,1,0,0,2,0,2,1,10,2,5,2,4,0,0,9,0,2,2,0,3,0,0,2,1,2,0,0,0,5,5,0,5,1,7,0,1,2,2 61 | "DHF-00533",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,2,0,0,0,8,6,6,1,2,1,2,0,1,0,1,2,18,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,3,2,5,5,0,0,0,0,1,0,0,0,0,0,0,2,0,0,1,0,1,2 62 | "DHF-00826",2,0,0,0,0,1,0,0,0,0,1,1,0,0,2,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,2,1,5,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,2,0,28,2,0,0,0,0,0,0,0,0,0,0,0,0,7,4,0,6,1,0,0,0,0,0,0,1,0,1,0,1,0,2,3,4,8,0,0,4,2,5,0 63 | "DHF-00847",0,1,2,0,1,3,2,1,1,0,1,0,1,0,4,1,2,1,3,0,0,2,1,0,1,0,0,1,0,1,2,1,4,1,2,2,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,2,33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,1,0,0,0,25,1,0,0 64 | "DHF-00881",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,12,2,1,0,0,1,0,0,0,0,17,6,0,0,0,0,0,30,0,0,0,0,0,11,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,8,4,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0 65 | "DHF-00890",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,18,7,13,13,4,0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 66 | "DHF-00894",0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,22,15,15,5,0,0,0,0,0,0,0,0,12,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0 67 | "DHF-00904",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,15,26,21,3,0,0,0,0,0,0,0,0,18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 68 | "DHF-00905",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,13,15,21,28,2,0,0,0,0,0,0,0,0,17,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0 69 | "DHF-00907",0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,2,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,4,5,3,2,12,0,0,2,0,0,2,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0 70 | "DHF-00927",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,5,2,0,0,0,1,0,0,0,0,11,7,0,2,0,0,0,11,0,0,0,0,0,24,1,3,1,1,1,0,0,0,0,0,0,0,0,0,0,3,2,3,3,0,0,1,0,0,0,0,0,3,2,0,2,0 71 | "DHF-01029",0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,41,33,23,19,31,0,0,0,0,0,0,1,1,1,0,0,0,0,1,0,0,0,1,1,0,0,0,13,3,0,0,0 72 | "DHF-01030",0,0,2,1,0,4,2,1,0,0,0,0,0,0,3,1,0,0,2,0,0,3,1,6,0,1,0,1,0,0,0,0,0,0,1,0,1,0,0,2,0,0,2,2,1,4,3,1,2,2,0,0,0,0,4,4,1,0,10,2,0,0,1,0,0,0,0,2,3,33,141,29,58,76,2,0,0,1,0,0,2,1,1,1,2,2,3,5,0,0,0,4,5,0,1,0,61,6,2,3,2 73 | "DHF-01031",0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,2,2,1,0,1,2,0,2,0,1,1,0,0,2,0,0,0,0,0,0,0,0,0,1,23,29,43,25,30,0,0,0,1,0,0,0,0,0,0,3,0,1,2,0,0,0,1,2,0,2,0,10,1,0,0,0 74 | "DHF-01037",0,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,0,2,2,1,0,1,0,0,0,0,1,1,0,0,5,0,0,0,0,0,0,0,0,0,1,19,58,25,75,52,0,0,0,0,0,0,1,0,1,0,1,1,1,3,0,0,0,1,4,0,0,0,40,0,1,0,1 75 | "DHF-01038",0,0,1,1,0,4,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,1,3,0,0,2,2,0,0,1,0,0,0,0,2,1,31,76,30,52,93,0,0,0,0,0,0,1,1,1,0,1,0,0,2,0,0,0,2,4,0,1,0,50,2,0,0,0 76 | "DHF-01055",8,0,0,0,0,1,0,0,0,0,1,0,0,0,4,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,7,10,58,34,0,2,3,3,0,1,1,0,0,0,0,0,0,0,0,0,0,4,0,7,0,0,0,0,0,0,0,0,0,2,0,0,0,121,11,0,72,1,0,0,1,1,1,0,0,2,0,0,1,0,1,2,3,83,0,1,1,3,49,2 77 | "DHF-01056",1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,8,3,0,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,11,20,0,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,2,2,3,0 78 | "DHF-01080",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,7,12,18,17,3,0,0,0,0,0,0,0,0,21,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 79 | "DHF-01159",12,0,0,0,0,6,1,0,0,0,1,1,2,0,5,2,0,1,3,1,0,0,0,2,0,1,0,0,0,1,0,1,0,0,0,0,0,8,5,58,24,0,1,2,1,0,0,0,0,0,0,0,0,0,0,1,0,1,9,0,6,1,0,0,1,0,0,0,0,0,1,1,0,0,72,9,0,142,1,1,2,2,1,3,0,1,1,0,0,1,0,4,7,3,65,0,1,12,6,45,1 80 | "DHF-01242",0,0,1,0,0,2,3,2,0,5,3,0,1,0,7,3,4,1,1,0,1,2,1,2,0,2,0,0,0,2,3,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,30,0,0,0,0,4,0,0,1,0,0,0,0,1,1,0,1,0,0,16,1,1,0 81 | "DHF-01331",0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,2,1,0,1,0,0,0,1,5,11,7,0,2,4,2,0,0,0,0,2,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,30,17,13,13,0,0,1,0,0,0,0,0,2,2,0,0,0,0,0,0,1,3 82 | "DHF-01332",0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,1,3,4,0,2,1,2,3,2,9,13,10,2,1,4,3,0,2,0,0,2,2,0,0,0,0,0,0,0,0,0,1,2,0,1,1,0,0,0,2,0,17,51,16,17,0,3,1,0,2,0,0,0,2,5,0,3,0,1,4,0,2,7 83 | "DHF-01333",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,1,0,0,0,4,1,0,1,1,1,0,0,5,7,5,0,0,0,1,0,2,0,0,0,5,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,2,0,13,16,33,11,0,0,0,1,1,1,0,0,2,3,0,2,0,0,3,0,1,3 84 | "DHF-01334",1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,3,0,0,1,0,0,0,7,8,6,0,2,2,2,0,1,0,0,3,5,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,0,0,1,0,13,17,11,30,0,1,0,0,0,0,0,0,1,4,0,0,0,0,2,0,2,6 85 | "DHF-01406",0,1,2,0,0,7,4,0,2,3,2,0,2,0,12,4,1,3,4,0,1,2,4,1,2,2,1,2,2,0,6,2,8,3,5,0,3,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,3,4,0,0,0,0,71,0,0,0,0,2,1,1,0,1,0,0,0,1,48,0,0,0 86 | "DHF-01436",0,0,1,0,0,1,3,0,0,0,0,0,0,0,1,0,1,0,0,0,1,2,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,3,2,25,5,0,1,0,0,1,1,0,6,44,15,0,0,0,0,0,8,0,0,0,1,0,3,0,2,3,1,1,0,0,1,0,0,0,3,0,1,0,91,28,26,19,3,1,2,4,6,0,0,0,0,5,1,0,1 87 | "DHF-01437",1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,21,4,1,1,1,0,1,0,1,5,23,7,0,2,0,1,0,4,0,0,0,0,0,2,0,2,0,1,0,0,0,0,1,0,1,1,0,0,0,28,60,13,18,1,1,1,1,1,1,0,0,1,5,0,0,1 88 | "DHF-01438",0,0,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,2,0,8,6,1,0,0,0,0,0,0,4,18,8,0,1,0,0,0,1,0,0,0,1,0,3,0,3,1,1,0,2,0,1,1,1,0,0,1,0,0,26,13,48,14,1,0,1,1,1,0,0,0,3,3,0,1,0 89 | "DHF-01439",2,0,0,0,0,1,2,0,1,0,0,0,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,12,11,0,0,0,1,0,0,0,4,22,10,0,2,1,1,0,1,0,0,0,0,0,3,1,5,2,3,2,0,0,0,0,0,0,2,1,0,0,19,18,14,50,0,0,0,0,0,1,1,0,1,5,0,0,0 90 | "DHF-01440",0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,1,1,1,0,1,2,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,3,1,1,0,19,8,4,3,3,0,0,1,0,7,1,0,0 91 | "DHF-01441",0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,1,0,0,8,13,3,0,0,0,0,1,0,2,1,0,0 92 | "DHF-01444",0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,1,1,0,4,3,9,0,0,0,0,1,0,3,0,0,0 93 | "DHF-01512",0,0,2,2,0,3,1,0,0,0,0,0,0,0,1,1,0,0,2,0,0,0,0,1,0,0,0,0,0,2,0,1,0,0,0,0,1,0,3,4,0,0,3,1,3,2,1,2,4,2,2,0,2,1,0,2,1,0,5,0,2,0,0,0,0,0,0,0,0,1,4,1,1,2,1,0,0,4,1,2,2,2,1,0,4,1,1,0,3,0,0,121,121,0,0,2,4,9,2,2,3 94 | "DHF-01528",0,0,3,2,0,5,2,0,1,0,0,0,0,1,1,1,0,0,3,0,0,2,0,1,0,0,0,0,0,2,0,2,0,0,0,0,2,0,6,15,3,0,4,3,4,4,2,2,4,2,4,2,4,2,0,3,1,0,5,0,3,0,0,0,0,0,0,0,0,1,5,2,4,4,2,0,0,7,1,2,5,3,4,1,6,1,1,0,3,0,0,121,158,1,8,3,5,17,2,4,7 95 | "DHF-01529",2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,4,0,8,3,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,4,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,3,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,18,9,0,0,2,0,2,0 96 | "DHF-01530",15,0,0,0,0,3,0,0,0,0,1,0,0,0,6,0,0,0,0,0,0,0,1,0,0,0,0,1,0,2,1,2,0,0,0,0,0,9,28,134,74,0,0,1,0,2,1,3,1,1,4,1,4,2,0,0,0,0,5,2,8,0,0,0,1,0,0,0,0,0,1,2,0,1,83,12,0,65,1,0,3,2,0,0,0,0,0,1,0,0,0,0,8,9,219,0,1,9,11,104,0 97 | "DHF-01540",0,0,0,0,0,2,1,0,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,0,1,1,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,2,3,0,0,23,0,2,2,0,0 98 | "DHF-01550",0,0,1,1,0,3,1,0,0,0,0,0,0,0,3,0,0,0,2,0,0,1,0,1,0,1,0,0,0,0,0,0,0,0,1,0,2,0,0,1,1,0,3,3,0,2,2,0,0,0,2,0,0,0,2,0,2,0,7,0,0,0,0,0,0,0,0,1,3,13,61,10,40,50,1,0,0,1,0,0,1,0,0,1,0,1,3,1,0,0,0,4,5,0,1,0,84,2,2,2,0 99 | "DQF-00248",1,22,39,25,16,210,242,62,62,16,47,13,28,13,119,94,35,20,80,16,24,130,26,42,27,71,11,29,14,80,32,45,19,15,29,23,13,0,2,14,4,4,4,2,1,7,7,1,2,0,1,0,1,1,1,3,0,10,0,1,4,25,0,0,0,0,0,0,2,3,6,1,0,2,1,2,0,12,16,0,4,3,2,48,5,5,3,5,7,2,3,9,17,2,9,2,2,1071,24,4,3 100 | "DQF-00358",0,0,0,0,0,6,1,1,2,0,0,0,1,1,3,4,0,2,4,1,0,3,1,2,1,1,0,2,0,1,0,0,0,1,0,0,0,1,0,5,4,1,2,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,1,0,0,0,0,0,0,0,0,2,0,1,0,3,2,0,6,1,0,0,0,0,0,1,0,0,0,1,1,0,2,2,0,11,2,2,24,60,12,1 101 | "DQF-00362",16,0,0,0,0,0,0,0,0,0,2,0,0,0,3,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,0,0,1,7,13,77,53,0,1,1,2,0,1,2,1,0,2,1,1,1,0,0,0,0,2,1,5,0,0,0,0,0,0,0,2,0,3,0,0,0,49,3,0,45,1,1,2,1,2,0,0,0,1,0,0,0,0,2,4,2,104,0,2,4,12,149,0 102 | "DR5-00001",0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,1,1,2,0,0,0,0,0,0,1,4,3,1,2,2,2,1,2,0,0,2,2,0,0,0,0,0,0,0,0,0,0,2,0,1,0,2,0,0,1,0,3,7,3,6,0,1,1,0,0,0,0,0,3,7,0,0,0,0,3,1,0,29 103 | -------------------------------------------------------------------------------- /tests/resources/sim_count3.csv: -------------------------------------------------------------------------------- 1 | "","DAF-00255","DAF-00280","DAF-00281","DAF-00283","DAF-00284","DAF-00288","DAF-00349","DAF-00350","DAF-00351","DAF-00367","DAF-00375","DAF-00385","DAF-00396","DAF-00399","DAF-00416","DAF-00419","DAF-00420","DAF-00437","DAF-00442","DAF-00443","DAF-00444","DAF-00448","DAF-00449","DAF-00450","DAF-00451","DAF-00460","DAF-00462","DAF-00464","DAF-00465","DAF-00482","DAF-00488","DAF-00491","DAF-00498","DAF-00499","DAF-00502","DAF-00503","DAF-00504","DAF-00512","DAF-00516","DAF-00517","DAF-00518","DC2-00001","DCF-00085","DCF-00086","DCF-00087","DCF-00104","DCF-00173","DCF-00197","DCF-00198","DCF-00199","DCF-00203","DCF-00204","DCF-00205","DCF-00206","DCF-00252","DCF-00253","DCF-00254","DDF-00078","DDF-00122","DHF-00533","DHF-00826","DHF-00847","DHF-00881","DHF-00890","DHF-00894","DHF-00904","DHF-00905","DHF-00907","DHF-00927","DHF-01029","DHF-01030","DHF-01031","DHF-01037","DHF-01038","DHF-01055","DHF-01056","DHF-01080","DHF-01159","DHF-01242","DHF-01331","DHF-01332","DHF-01333","DHF-01334","DHF-01406","DHF-01436","DHF-01437","DHF-01438","DHF-01439","DHF-01440","DHF-01441","DHF-01444","DHF-01512","DHF-01528","DHF-01529","DHF-01530","DHF-01540","DHF-01550","DQF-00248","DQF-00358","DQF-00362","DR5-00001" 2 | "DAF-00255",31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,0,0,0,0,16,0 3 | "DAF-00280",0,29,5,4,5,6,5,4,0,0,0,0,3,4,3,0,0,0,0,3,4,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,22,0,0,0 4 | "DAF-00281",0,5,55,6,6,13,12,5,4,0,0,0,0,0,3,3,0,0,4,5,8,6,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,39,0,0,0 5 | "DAF-00283",0,4,6,43,5,11,14,7,4,0,0,0,0,4,0,0,0,0,3,3,4,3,0,0,0,0,0,0,0,6,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0 6 | "DAF-00284",0,5,6,5,26,8,7,4,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,0,0 7 | "DAF-00288",0,6,13,11,8,323,82,24,26,3,12,0,14,3,20,23,20,5,26,8,10,24,11,18,4,22,0,6,0,23,12,18,0,0,5,4,5,0,0,4,4,0,0,0,0,0,0,3,3,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,4,0,0,4,0,0,0,6,0,0,0,0,0,7,0,0,0,0,0,0,0,3,5,0,3,0,3,210,6,0,0 8 | "DAF-00349",0,5,12,14,7,82,347,58,35,5,24,4,4,6,15,19,6,3,35,8,9,30,0,5,0,19,0,5,0,46,10,15,5,4,10,6,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,4,3,0,0,0,0,0,0,0,0,0,0,0,0,242,0,0,0 9 | "DAF-00350",0,4,5,7,4,24,58,97,24,0,4,0,0,3,3,0,0,0,10,3,4,10,0,0,3,5,0,0,0,16,4,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,62,0,0,0 10 | "DAF-00351",0,0,4,4,0,26,35,24,103,0,4,0,5,5,3,4,5,0,15,7,6,13,8,6,5,11,0,0,0,20,5,3,0,6,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,62,0,0,0 11 | "DAF-00367",0,0,0,0,0,3,5,0,0,26,0,0,0,0,4,0,0,0,6,0,0,0,0,0,0,4,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,0,0 12 | "DAF-00375",0,0,0,0,0,12,24,4,4,0,71,3,0,0,4,5,0,0,4,0,0,3,0,0,0,14,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47,0,0,0 13 | "DAF-00385",0,0,0,0,0,0,4,0,0,0,3,23,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,0,0,0 14 | "DAF-00396",0,3,0,0,0,14,4,0,5,0,0,0,44,3,0,3,7,0,5,0,0,7,3,6,3,3,0,0,0,3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28,0,0,0 15 | "DAF-00399",0,4,0,4,0,3,6,3,5,0,0,0,3,22,3,0,0,0,4,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,0,0,0 16 | "DAF-00416",0,3,3,0,0,20,15,3,3,4,4,0,0,3,152,13,0,0,4,0,0,26,4,6,0,7,0,0,0,5,3,5,0,0,3,0,3,0,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,3,0,0,0,4,0,0,5,7,0,0,0,0,12,0,0,0,0,0,0,0,0,0,0,6,0,3,119,3,3,0 17 | "DAF-00419",0,0,3,0,0,23,19,0,4,0,5,0,3,0,13,120,7,0,5,0,3,9,0,3,0,8,0,0,0,3,0,6,0,0,0,3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,94,4,0,0 18 | "DAF-00420",0,0,0,0,0,20,6,0,5,0,0,0,7,0,0,7,64,0,9,0,0,4,0,4,0,5,0,0,0,5,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,35,0,0,0 19 | "DAF-00437",0,0,0,0,0,5,3,0,0,0,0,0,0,0,0,0,0,25,0,0,0,3,0,0,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0 20 | "DAF-00442",0,0,4,3,0,26,35,10,15,6,4,3,5,4,4,5,9,0,132,0,0,17,0,11,3,11,0,0,0,8,7,4,0,0,5,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,4,0,0,0,0,0,0,0,0,3,0,0,0,0,80,4,0,0 21 | "DAF-00443",0,3,5,3,0,8,8,3,7,0,0,0,0,0,0,0,0,0,0,27,5,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,0,0 22 | "DAF-00444",0,4,8,4,4,10,9,4,6,0,0,0,0,5,0,3,0,0,0,5,37,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,0,0,0 23 | "DAF-00448",0,0,6,3,0,24,30,10,13,0,3,0,7,0,26,9,4,3,17,0,0,194,21,40,12,10,0,0,0,9,9,0,4,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130,3,0,0 24 | "DAF-00449",0,0,0,0,0,11,0,0,8,0,0,0,3,0,4,0,0,0,0,0,0,21,50,14,5,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,26,0,0,0 25 | "DAF-00450",0,0,0,0,0,18,5,0,6,0,0,0,6,0,6,3,4,0,11,0,0,40,14,85,7,6,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,42,0,0,0 26 | "DAF-00451",0,0,0,0,0,4,0,3,5,0,0,0,3,0,0,0,0,0,3,0,0,12,5,7,39,0,0,0,4,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27,0,0,0 27 | "DAF-00460",0,0,0,0,0,22,19,5,11,4,14,0,3,0,7,8,5,3,11,0,0,10,5,6,0,102,0,7,8,6,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,71,0,0,0 28 | "DAF-00462",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,3,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0 29 | "DAF-00464",0,0,0,0,0,6,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,3,36,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,29,0,0,0 30 | "DAF-00465",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,8,4,6,23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,0,0,0 31 | "DAF-00482",0,4,3,6,4,23,46,16,20,0,4,0,3,0,5,3,5,0,8,6,6,9,0,6,0,6,0,0,0,115,0,3,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,80,0,0,0 32 | "DAF-00488",0,0,0,0,0,12,10,4,5,3,0,0,0,0,3,0,0,3,7,0,0,9,0,0,0,4,0,0,0,0,50,0,3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0 33 | "DAF-00491",0,0,0,3,0,18,15,4,3,0,0,0,3,0,5,6,4,0,4,0,0,0,0,0,0,0,0,0,0,3,0,58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,45,0,0,0 34 | "DAF-00498",0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,3,0,26,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,19,0,0,0 35 | "DAF-00499",0,0,0,0,0,0,4,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,5,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,15,0,0,0 36 | "DAF-00502",0,0,0,0,0,5,10,0,0,0,0,0,0,0,3,0,0,0,5,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,39,6,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,29,0,0,0 37 | "DAF-00503",0,0,0,0,0,4,6,0,0,0,0,0,0,0,0,3,0,0,0,0,0,5,0,0,3,0,0,0,0,0,0,0,0,0,6,33,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,0,0,0 38 | "DAF-00504",0,0,0,0,0,5,5,0,4,0,0,0,0,0,3,3,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,4,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,13,0,0,0 39 | "DAF-00512",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,19,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,9,0,0,0,0,7,0 40 | "DAF-00516",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41,20,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,3,6,0,28,0,0,0,0,13,0 41 | "DAF-00517",14,0,0,0,0,4,0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,191,50,0,0,0,0,0,3,0,0,0,0,0,4,0,0,0,0,0,4,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,58,8,0,58,0,0,3,4,0,0,0,0,0,0,0,0,0,4,15,8,134,0,0,14,5,77,0 42 | "DAF-00518",9,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,24,50,112,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,34,3,0,24,0,0,4,0,3,0,0,0,0,0,0,0,0,0,3,3,74,0,0,4,4,53,0 43 | "DC2-00001",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0 44 | "DCF-00085",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,51,27,23,0,0,0,3,0,6,3,5,8,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,0,0,3,4,0,0,0 45 | "DCF-00086",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27,43,26,0,0,0,0,0,6,0,5,3,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0 46 | "DCF-00087",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,26,33,0,0,3,0,0,6,0,3,3,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,0,0,0,0,0,0,0 47 | "DCF-00104",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,66,4,0,0,0,0,0,0,0,0,14,0,0,3,0,0,0,12,0,0,0,0,0,5,0,4,0,0,0,0,0,0,0,0,0,3,0,0,0,25,21,8,12,3,0,3,0,4,0,0,0,0,7,0,0,0 48 | "DCF-00173",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,4,30,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,5,4,6,11,0,0,0,0,0,0,0,0,0,7,0,0,0 49 | "DCF-00197",0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,38,26,26,7,5,5,6,0,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,9,5,7,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0 50 | "DCF-00198",0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,26,43,27,5,4,6,5,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,13,7,8,0,0,0,0,0,0,0,0,4,4,0,0,0,0,0,0,0,4 51 | "DCF-00199",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,26,27,36,7,5,9,5,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,10,5,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3 52 | "DCF-00203",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,6,6,0,0,7,5,7,37,16,18,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,4,0,0,0,0,0,0 53 | "DCF-00204",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,5,4,5,16,25,15,11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 54 | "DCF-00205",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,5,5,3,0,0,5,6,9,18,15,35,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,4,0,0,0,0,0,0,0,0,0,0,0,4,0,4,0,0,0,0,0,0 55 | "DCF-00206",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,3,3,0,0,6,5,5,13,11,12,29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 56 | "DCF-00252",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,31,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,6,5,4,4,0,0,0,0,0,0,0,0,0,0,0,0,0 57 | "DCF-00253",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,6,0,0,0,0,0,0,0,0,85,24,0,0,0,0,0,17,0,0,0,0,0,11,0,4,0,0,3,0,0,0,0,0,0,0,0,0,0,44,23,18,22,0,0,0,0,3,0,0,0,0,3,0,0,0 58 | "DCF-00254",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,24,29,0,0,0,0,0,6,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15,7,8,10,0,0,0,0,0,0,0,0,0,0,0,0,0 59 | "DDF-00078",0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0 60 | "DDF-00122",6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,5,4,3,0,0,0,0,0,0,0,0,3,0,0,0,74,0,0,0,0,0,0,0,0,0,0,0,10,0,5,0,4,0,0,9,0,0,0,0,3,0,0,0,0,0,0,0,0,5,5,0,5,0,7,0,0,0,0 61 | "DHF-00533",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,6,6,0,0,0,0,0,0,0,0,0,18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,5,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 62 | "DHF-00826",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28,0,0,0,0,0,0,0,0,0,0,0,0,0,7,4,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,8,0,0,4,0,5,0 63 | "DHF-00847",0,0,0,0,0,3,0,0,0,0,0,0,0,0,4,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0 64 | "DHF-00881",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0,0,17,6,0,0,0,0,0,30,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 65 | "DHF-00890",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18,7,13,13,4,0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 66 | "DHF-00894",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,22,15,15,5,0,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 67 | "DHF-00904",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,15,26,21,3,0,0,0,0,0,0,0,0,18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 68 | "DHF-00905",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,15,21,28,0,0,0,0,0,0,0,0,0,17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 69 | "DHF-00907",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,5,3,0,12,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 70 | "DHF-00927",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,11,7,0,0,0,0,0,11,0,0,0,0,0,24,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,3,3,0,0,0,0,0,0,0,0,3,0,0,0,0 71 | "DHF-01029",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41,33,23,19,31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,3,0,0,0 72 | "DHF-01030",0,0,0,0,0,4,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,3,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,3,0,0,0,0,0,0,0,4,4,0,0,10,0,0,0,0,0,0,0,0,0,3,33,141,29,58,76,0,0,0,0,0,0,0,0,0,0,0,0,3,5,0,0,0,4,5,0,0,0,61,6,0,3,0 73 | "DHF-01031",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23,29,43,25,30,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0 74 | "DHF-01037",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,19,58,25,75,52,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,4,0,0,0,40,0,0,0,0 75 | "DHF-01038",0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,31,76,30,52,93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,50,0,0,0,0 76 | "DHF-01055",8,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,10,58,34,0,0,3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,7,0,0,0,0,0,0,0,0,0,0,0,0,0,121,11,0,72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,83,0,0,0,3,49,0 77 | "DHF-01056",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,11,20,0,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0,3,0 78 | "DHF-01080",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,12,18,17,3,0,0,0,0,0,0,0,0,21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 79 | "DHF-01159",12,0,0,0,0,6,0,0,0,0,0,0,0,0,5,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,5,58,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,72,9,0,142,0,0,0,0,0,3,0,0,0,0,0,0,0,4,7,3,65,0,0,12,6,45,0 80 | "DHF-01242",0,0,0,0,0,0,3,0,0,5,3,0,0,0,7,3,4,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,30,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,0,0 81 | "DHF-01331",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,11,7,0,0,4,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,30,17,13,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3 82 | "DHF-01332",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,0,0,0,3,0,9,13,10,0,0,4,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17,51,16,17,0,3,0,0,0,0,0,0,0,5,0,3,0,0,4,0,0,7 83 | "DHF-01333",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,5,7,5,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,16,33,11,0,0,0,0,0,0,0,0,0,3,0,0,0,0,3,0,0,3 84 | "DHF-01334",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,7,8,6,0,0,0,0,0,0,0,0,3,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,17,11,30,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,6 85 | "DHF-01406",0,0,0,0,0,7,4,0,0,3,0,0,0,0,12,4,0,3,4,0,0,0,4,0,0,0,0,0,0,0,6,0,8,3,5,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,0,0,0,71,0,0,0,0,0,0,0,0,0,0,0,0,0,48,0,0,0 86 | "DHF-01436",0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,25,5,0,0,0,0,0,0,0,6,44,15,0,0,0,0,0,8,0,0,0,0,0,3,0,0,3,0,0,0,0,0,0,0,0,3,0,0,0,91,28,26,19,3,0,0,4,6,0,0,0,0,5,0,0,0 87 | "DHF-01437",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21,4,0,0,0,0,0,0,0,5,23,7,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28,60,13,18,0,0,0,0,0,0,0,0,0,5,0,0,0 88 | "DHF-01438",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,6,0,0,0,0,0,0,0,4,18,8,0,0,0,0,0,0,0,0,0,0,0,3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,26,13,48,14,0,0,0,0,0,0,0,0,3,3,0,0,0 89 | "DHF-01439",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,11,0,0,0,0,0,0,0,4,22,10,0,0,0,0,0,0,0,0,0,0,0,3,0,5,0,3,0,0,0,0,0,0,0,0,0,0,0,19,18,14,50,0,0,0,0,0,0,0,0,0,5,0,0,0 90 | "DHF-01440",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,19,8,4,3,3,0,0,0,0,7,0,0,0 91 | "DHF-01441",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,13,3,0,0,0,0,0,0,0,0,0,0 92 | "DHF-01444",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,3,9,0,0,0,0,0,0,3,0,0,0 93 | "DHF-01512",0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,0,3,0,3,0,0,0,4,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,4,0,0,0,0,0,0,4,0,0,0,3,0,0,121,121,0,0,0,4,9,0,0,3 94 | "DHF-01528",0,0,3,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,15,3,0,4,3,4,4,0,0,4,0,4,0,4,0,0,3,0,0,5,0,3,0,0,0,0,0,0,0,0,0,5,0,4,4,0,0,0,7,0,0,5,3,4,0,6,0,0,0,3,0,0,121,158,0,8,3,5,17,0,4,7 95 | "DHF-01529",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,8,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18,9,0,0,0,0,0,0 96 | "DHF-01530",15,0,0,0,0,3,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,28,134,74,0,0,0,0,0,0,3,0,0,4,0,4,0,0,0,0,0,5,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,83,12,0,65,0,0,3,0,0,0,0,0,0,0,0,0,0,0,8,9,219,0,0,9,11,104,0 97 | "DHF-01540",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,23,0,0,0,0,0 98 | "DHF-01550",0,0,0,0,0,3,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,0,0,0,0,0,0,0,0,0,3,13,61,10,40,50,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,4,5,0,0,0,84,0,0,0,0 99 | "DQF-00248",0,22,39,25,16,210,242,62,62,16,47,13,28,13,119,94,35,20,80,16,24,130,26,42,27,71,11,29,14,80,32,45,19,15,29,23,13,0,0,14,4,4,4,0,0,7,7,0,0,0,0,0,0,0,0,3,0,10,0,0,4,25,0,0,0,0,0,0,0,3,6,0,0,0,0,0,0,12,16,0,4,3,0,48,5,5,3,5,7,0,3,9,17,0,9,0,0,1071,24,4,3 100 | "DQF-00358",0,0,0,0,0,6,0,0,0,0,0,0,0,0,3,4,0,0,4,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,24,60,12,0 101 | "DQF-00362",16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,13,77,53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,3,0,0,0,49,3,0,45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,104,0,0,4,12,149,0 102 | "DR5-00001",0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,7,3,6,0,0,0,0,0,0,0,0,3,7,0,0,0,0,3,0,0,29 103 | --------------------------------------------------------------------------------