├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── beetle ├── LICENSE ├── README.md ├── beetle.cabal ├── changelog.md ├── src │ └── Beetle │ │ ├── Base.hs │ │ ├── Datasets │ │ └── Streaming.hs │ │ └── UK │ │ └── PropertyData.hs └── stack.yaml ├── dampf ├── .dampfcfg.yaml.example ├── LICENSE ├── Main.hs ├── README.md ├── changelog.md ├── dampf.cabal ├── dampf.yaml.example ├── example │ ├── .dockerignore │ ├── pyping │ │ ├── Dockerfile │ │ ├── dampf.yaml │ │ └── serve.py │ └── todo-hs │ │ ├── Dockerfile │ │ ├── LICENSE │ │ ├── Main.hs │ │ ├── dampf.yaml │ │ ├── lib │ │ └── Todo │ │ │ ├── Items.hs │ │ │ ├── Serve.hs │ │ │ └── Test.hs │ │ ├── migrations │ │ └── 20170911151904_todo_table.sql │ │ ├── stack.yaml │ │ ├── todo.cabal │ │ └── www │ │ └── js │ │ └── blob.js ├── lib │ ├── Dampf.hs │ └── Dampf │ │ ├── AppFile │ │ ├── Pretty.hs │ │ └── Types.hs │ │ ├── Browse.hs │ │ ├── ConfigFile │ │ ├── Pretty.hs │ │ └── Types.hs │ │ ├── Docker.hs │ │ ├── Docker │ │ ├── Args.hs │ │ ├── Args │ │ │ ├── Class.hs │ │ │ ├── Network.hs │ │ │ └── Run.hs │ │ ├── Free.hs │ │ └── Types.hs │ │ ├── Internal │ │ ├── Env.hs │ │ └── Yaml.hs │ │ ├── Monitor.hs │ │ ├── Nginx.hs │ │ ├── Nginx │ │ ├── Config.hs │ │ ├── Test.hs │ │ └── Types.hs │ │ ├── Postgres.hs │ │ ├── Postgres │ │ ├── Connect.hs │ │ ├── Migrate.hs │ │ └── Setup.hs │ │ ├── Provision.hs │ │ ├── Test.hs │ │ └── Types.hs └── stack.yaml ├── dashdo-examples ├── LICENSE ├── README.md ├── dashdo-examples.cabal ├── exe │ ├── GapminderScatterplot.hs │ ├── IrisKMeans.hs │ ├── StatView.hs │ └── TestDashdo.hs ├── lib │ └── Dashdo │ │ └── Examples │ │ ├── GapminderScatterplot.hs │ │ ├── IrisKMeans.hs │ │ ├── StatView.hs │ │ └── TestDashdo.hs └── stack.yaml ├── dashdo ├── LICENSE ├── README.md ├── Setup.hs ├── dashdo.cabal ├── public │ └── js │ │ ├── dashdo.js │ │ └── runners │ │ ├── base.js │ │ └── rdashdo.js ├── src │ ├── Dashdo.hs │ └── Dashdo │ │ ├── Elements.hs │ │ ├── Files.hs │ │ ├── FlexibleInput.hs │ │ ├── Rdash.hs │ │ ├── Serve.hs │ │ └── Types.hs └── stack.yaml ├── docker-minidebhs ├── Dockerfile └── README.md ├── echarts-hs ├── .gitignore ├── LICENSE ├── README.md ├── TestEcharts.hs ├── changelog.md ├── echarts.cabal ├── echartstest.html ├── src │ └── Graphics │ │ └── Echarts.hs └── stack.yaml ├── fuml-svm ├── LICENSE ├── TestSVM.hs ├── fuml-svm.cabal ├── lib │ └── Fuml │ │ └── Svm.hs └── stack.yaml ├── fuml ├── LICENSE ├── README.md ├── Setup.hs ├── TestFuml.hs ├── fuml.cabal ├── lib │ └── Fuml │ │ ├── Base.hs │ │ ├── Base │ │ ├── KNN.hs │ │ ├── LinearRegression.hs │ │ ├── Logistic.hs │ │ └── PCA.hs │ │ ├── Core.hs │ │ ├── Optimisation.hs │ │ ├── Optimisation │ │ ├── BFGS.hs │ │ ├── NelderMead.hs │ │ └── SGD.hs │ │ ├── Supervised.hs │ │ ├── Supervised │ │ ├── Accuracy.hs │ │ └── KNN.hs │ │ └── Unsupervised.hs └── stack.yaml ├── inliterate ├── InlitPreProc.hs ├── LICENSE ├── README.md ├── Setup.hs ├── TestInliterate.hs ├── TestInliteratePreProc.hs ├── changelog.md ├── inliterate.cabal ├── lib │ ├── Inliterate.hs │ └── Inliterate │ │ ├── Import.hs │ │ └── Inspect.hs └── stack.yaml ├── lucid-extras ├── LICENSE ├── README.md ├── Setup.hs ├── changelog.md ├── lib │ └── Lucid │ │ ├── Bootstrap3.hs │ │ ├── DataTables.hs │ │ ├── Leaflet.hs │ │ ├── PreEscaped.hs │ │ ├── Rdash.hs │ │ ├── Tables.hs │ │ └── VegaLite.hs ├── lucid-extras.cabal ├── site-gen │ ├── DevelMain.hs │ └── Main.hs └── stack.yaml ├── parfoldl ├── LICENSE ├── README.md ├── Setup.hs ├── TestParFoldl.hs ├── changelog.md ├── licenses │ └── foldl.txt ├── parfoldl.cabal ├── src │ └── Control │ │ └── Parallel │ │ └── Foldl.hs └── stack.yaml ├── plotlyhs ├── LICENSE ├── README.md ├── Setup.hs ├── TestPlotly.hs ├── changelog.md ├── contour.html ├── docs │ └── index.html ├── gendoc │ ├── GenDoc.hs │ ├── GenDocInlit.hs │ ├── LICENSE │ ├── plotly-gendoc.cabal │ ├── stack.yaml │ └── test.hs ├── plotlyhs.cabal ├── src │ └── Graphics │ │ ├── Plotly.hs │ │ └── Plotly │ │ ├── Base.hs │ │ ├── Blaze.hs │ │ ├── GoG.hs │ │ ├── Histogram.hs │ │ ├── Lucid.hs │ │ ├── Simple.hs │ │ └── Utils.hs ├── stack.yaml └── tests │ └── Contour.hs ├── postgresql-simple-expr ├── LICENSE ├── lib │ └── Database │ │ └── PostgreSQL │ │ └── Simple │ │ ├── Connect.hs │ │ ├── Expr.hs │ │ └── FakeRows.hs ├── postgresql-simple-expr.cabal ├── stack.yaml └── tests │ ├── Common.hs │ ├── FakeRowsSpec.hs │ ├── KeySpec.hs │ └── Spec.hs ├── stack.yaml ├── stanhs ├── LICENSE ├── README.md ├── changelog.md ├── lib │ └── Stan │ │ ├── AST.hs │ │ ├── AST │ │ └── Pretty.hs │ │ ├── Data.hs │ │ ├── Run.hs │ │ ├── Schools.hs │ │ └── Simulate.hs ├── stack.yaml ├── stanhs.cabal └── test-stan.hs └── youido ├── LICENSE ├── README.md ├── examples └── Example.hs ├── form-repeat.js ├── lib ├── Youido.hs └── Youido │ ├── Authentication.hs │ ├── Dashdo.hs │ ├── Serve.hs │ └── Types.hs ├── stack.yaml └── youido.cabal /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | dist-* 3 | cabal-dev 4 | *.o 5 | *.hi 6 | *.chi 7 | *.chs.h 8 | *.dyn_o 9 | *.dyn_hi 10 | .hpc 11 | .hsenv 12 | .cabal-sandbox/ 13 | cabal.sandbox.config 14 | *.prof 15 | *.aux 16 | *.hp 17 | *.eventlog 18 | .stack-work/ 19 | cabal.project.local 20 | *~ 21 | .vscode/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 diffusionkinetics 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | DiffusionKinetics open source projects 2 | ===== 3 | [![Build Status](https://secure.travis-ci.org/diffusionkinetics/open.svg)](http://travis-ci.org/diffusionkinetics/open) 4 | 5 | This is a mono-repo containing a series of projects, mostly related to 6 | data science, machine learning and statistics in Haskell. 7 | 8 | * [dampf](https://github.com/diffusionkinetics/open/tree/master/dampf) - Declarative DevOps for busy developers 9 | * [plotlyhs](https://github.com/diffusionkinetics/open/tree/master/plotlyhs) - Plotting using plotly.js 10 | * [parfoldl](https://github.com/diffusionkinetics/open/tree/master/parfoldl) - Parallel folds 11 | * [dashdo](https://github.com/diffusionkinetics/open/tree/master/dashdo) - Dashboards (Shiny for Haskell) 12 | * [fuml](https://github.com/diffusionkinetics/open/tree/master/fuml) - Functional machine learning 13 | * [inliterate](https://github.com/diffusionkinetics/open/tree/master/inliterate) - Dynamically generated reports 14 | * [stanhs](https://github.com/diffusionkinetics/open/tree/master/stanhs) - Probabilistic programming (interface to [Stan](http://mc-stan.org)) 15 | 16 | 17 | ## News 18 | 19 | Oct 16, 2018 : Development of `datasets` continues within `DataHaskell/dh-core` -------------------------------------------------------------------------------- /beetle/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /beetle/README.md: -------------------------------------------------------------------------------- 1 | Beetle 2 | ====== 3 | 4 | Beetle is an ETL framework for Haskell. for the moment its only functionality is to 5 | cache api calls. -------------------------------------------------------------------------------- /beetle/beetle.cabal: -------------------------------------------------------------------------------- 1 | Name: beetle 2 | Version: 0.1 3 | Synopsis: ETL with various data sources 4 | Description: ETL with some UK commercially relevant data sources 5 | 6 | License: MIT 7 | License-file: LICENSE 8 | Author: Tom Nielsen 9 | Maintainer: Tom Nielsen 10 | build-type: Simple 11 | Cabal-Version: >= 1.10 12 | homepage: https://github.com/diffusionkinetics/open/beetle 13 | bug-reports: https://github.com/diffusionkinetics/open/issues 14 | category: Data 15 | Tested-With: GHC == 7.10.2, GHC == 7.10.3, GHC == 8.0.1 16 | extra-source-files: 17 | changelog.md 18 | 19 | source-repository head 20 | type: git 21 | location: https://github.com/diffusionkinetics/open 22 | 23 | Library 24 | ghc-options: -Wall 25 | hs-source-dirs: src 26 | other-extensions: TemplateHaskell 27 | default-language: Haskell2010 28 | 29 | Exposed-modules: 30 | Beetle.Base 31 | Beetle.Datasets.Streaming 32 | Beetle.UK.PropertyData 33 | Build-depends: 34 | base >= 4.6 && < 5 35 | , aeson 36 | , attoparsec >= 0.13 37 | , bytestring 38 | , cassava 39 | , hashable 40 | , microlens 41 | , text 42 | , time 43 | , vector 44 | , wreq 45 | , higher-leveldb 46 | , datasets 47 | , streaming 48 | , streaming-bytestring 49 | , streaming-cassava 50 | , cassava 51 | , mtl 52 | -------------------------------------------------------------------------------- /beetle/changelog.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/beetle/changelog.md -------------------------------------------------------------------------------- /beetle/src/Beetle/Base.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric, FlexibleInstances, OverloadedStrings #-} 2 | 3 | module Beetle.Base where 4 | 5 | import Data.Aeson 6 | import qualified Network.Wreq as Wreq 7 | import Lens.Micro ((^.)) 8 | import qualified Data.ByteString as BS 9 | import qualified Data.ByteString.Char8 as BS8 10 | import qualified Data.ByteString.Lazy as BSL 11 | import Database.LevelDB.Higher 12 | import Control.Monad.IO.Class 13 | import System.IO 14 | import Data.Monoid 15 | 16 | data Key a = Key { theKey :: String, unKey :: a } 17 | 18 | class ToURL a where 19 | toURL :: a -> String 20 | 21 | callAPI :: (ToURL a, FromJSON b, MonadKV m) => a -> m (Either String b) 22 | callAPI x = do 23 | let url = toURL x 24 | mv <- getBS (BS8.pack url) 25 | let getIt = do 26 | --liftIO $ hPutStrLn stderr $ "calling api "++url 27 | rsp <- liftIO (Wreq.get url) 28 | let jbs = rsp ^. Wreq.responseBody 29 | --liftIO $ BS.hPutStrLn stderr $ BSL.toStrict $ jbs 30 | putBS (BS8.pack url) (BSL.toStrict $ jbs) 31 | case eitherDecode' jbs of 32 | Right v -> return $ Right v 33 | Left err -> return $ Left $ "decode: "++err++"\nFull result: \n" 34 | ++BS8.unpack (BSL.toStrict jbs) 35 | ++"\nURL: "++url 36 | case mv of 37 | Just respBS -> do 38 | --liftIO $ hPutStrLn stderr $ "got cached value" 39 | case decode $ BSL.fromStrict respBS of 40 | Nothing -> getIt 41 | Just v -> return $ Right v 42 | Nothing -> getIt 43 | 44 | class MonadIO m => MonadKV m where 45 | getBS :: BS.ByteString -> m (Maybe BS.ByteString) 46 | putBS :: BS.ByteString -> BS.ByteString -> m () 47 | rmKey :: BS.ByteString -> m () 48 | 49 | instance MonadKV IO where 50 | getBS _ = return Nothing 51 | putBS _ _ = return () 52 | rmKey _ = return () 53 | 54 | instance MonadKV (LevelDBT IO) where 55 | getBS k = get k 56 | putBS k v = put k v 57 | rmKey k = delete k -------------------------------------------------------------------------------- /beetle/src/Beetle/Datasets/Streaming.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE MultiParamTypeClasses #-} 2 | 3 | module Beetle.Datasets.Streaming where 4 | 5 | import qualified Streaming.Prelude as S 6 | import Streaming.Prelude (Stream, Of) 7 | import qualified Data.ByteString.Streaming as SBS 8 | import Streaming.Cassava 9 | import Numeric.Datasets 10 | import Control.Monad.IO.Class 11 | import Data.Maybe 12 | import System.IO.Error (userError) 13 | import Control.Monad.Error.Class (throwError) 14 | 15 | unCsvException :: CsvParseException -> String 16 | unCsvException (CsvParseException s) = s 17 | 18 | streamDataset :: Dataset a -> Stream (Of (Either String a)) IO () 19 | streamDataset ds = do 20 | dir <- liftIO $ tempDirForDataset ds 21 | lbs <- liftIO $ fmap (fromMaybe id $ preProcess ds) $ getFileFromSource dir $ source ds 22 | readStreamDataset (readAs ds) $ SBS.fromLazy lbs 23 | 24 | readStreamDataset :: ReadAs a -> SBS.ByteString IO () -> Stream (Of (Either String a)) IO () 25 | readStreamDataset (CSVRecord hhdr opts) sbs 26 | = fmap (const ()) $ S.map (either (Left . unCsvException) Right) $ decodeWithErrors opts hhdr sbs 27 | readStreamDataset _ _ = throwError $ userError "readStreamDataset: only CSVRecord implemented " 28 | 29 | foldDataset :: Dataset a -> b -> (b -> Either String a -> IO b) -> IO b 30 | foldDataset ds x0 accf = do 31 | let s = streamDataset ds 32 | S.foldM_ accf (return x0) (return) s 33 | 34 | mapDataset_ :: Dataset a -> (Either String a -> IO ()) -> IO () 35 | mapDataset_ ds f = foldDataset ds () (\() x -> f x) -------------------------------------------------------------------------------- /beetle/src/Beetle/UK/PropertyData.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric, FlexibleInstances #-} 2 | 3 | module Beetle.UK.PropertyData where 4 | 5 | import Data.Aeson 6 | import Data.Aeson.Types 7 | import GHC.Generics 8 | import Beetle.Base 9 | 10 | type Postcode = String 11 | 12 | data Prices = Prices Postcode Int 13 | data Demographics = Demographics Postcode 14 | 15 | instance ToURL (Key Prices) where 16 | toURL (Key k (Prices pc bedrms)) 17 | = concat [ "http://api.propertydata.co.uk/prices?key=", 18 | k, "&postcode=",pc,"&bedrooms=", show bedrms] 19 | 20 | data Wrapper a = Wrapper 21 | { wstatus :: String 22 | , wdata :: a 23 | } deriving Generic 24 | 25 | instance FromJSON a => FromJSON (Wrapper a) where 26 | parseJSON = genericParseJSON defaultOptions {fieldLabelModifier = drop 1 } 27 | 28 | data PriceResp = PriceResp 29 | { paverage :: Double 30 | } deriving Generic 31 | 32 | instance FromJSON PriceResp where 33 | parseJSON = genericParseJSON defaultOptions {fieldLabelModifier = drop 1 } 34 | 35 | 36 | getPrices :: MonadKV m => Key Prices -> m (Either String PriceResp) 37 | getPrices k = do 38 | fmap (fmap wdata) $ callAPI k -------------------------------------------------------------------------------- /beetle/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-9.21 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | - ../datasets 41 | - ../../streaming-cassava 42 | 43 | # Dependency packages to be pulled from upstream that are not in the resolver 44 | # (e.g., acme-missiles-0.3) 45 | extra-deps: 46 | - higher-leveldb-0.4.0.1 47 | 48 | # Override default flag values for local packages and extra-deps 49 | flags: {} 50 | 51 | # Extra package databases containing global packages 52 | extra-package-dbs: [] 53 | 54 | # Control whether we use the GHC we find on the path 55 | # system-ghc: true 56 | # 57 | # Require a specific version of stack, using version ranges 58 | # require-stack-version: -any # Default 59 | # require-stack-version: ">=1.1" 60 | # 61 | # Override the architecture used by stack, especially useful on Windows 62 | # arch: i386 63 | # arch: x86_64 64 | # 65 | # Extra directories used by stack for building 66 | # extra-include-dirs: [/path/to/dir] 67 | # extra-lib-dirs: [/path/to/dir] 68 | # 69 | # Allow a newer minor version of GHC than the snapshot specifies 70 | # compiler-check: newer-minor -------------------------------------------------------------------------------- /dampf/.dampfcfg.yaml.example: -------------------------------------------------------------------------------- 1 | # a file such as the below should me placed in $HOME/.dampfcfg.yaml 2 | 3 | postgres: 4 | host: localhost 5 | port: 5432 6 | 7 | users: 8 | postgres: pgpassword 9 | user: userspassword 10 | 11 | -------------------------------------------------------------------------------- /dampf/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /dampf/changelog.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/dampf/changelog.md -------------------------------------------------------------------------------- /dampf/dampf.cabal: -------------------------------------------------------------------------------- 1 | name: dampf 2 | version: 0.1.0 3 | synopsis: Declarative DevOps for busy developers 4 | description: Declarative DevOps for busy developers 5 | author: Tom Nielsen 6 | maintainer: tanielsen@gmail.com 7 | homepage: https://github.com/diffusionkinetics/open/dampf 8 | bug-reports: https://github.com/diffusionkinetics/open/issues 9 | license: MIT 10 | license-file: LICENSE 11 | 12 | build-type: Simple 13 | category: Control, Statistics 14 | cabal-version: >= 1.8 15 | tested-with: GHC == 7.8.4, GHC == 7.10.2, GHC == 7.10.3, GHC == 8.0.1 16 | 17 | extra-source-files: changelog.md 18 | 19 | 20 | library 21 | hs-source-dirs: lib 22 | 23 | exposed-modules: 24 | Dampf 25 | , Dampf.Docker 26 | , Dampf.Docker.Free 27 | , Dampf.Docker.Types 28 | , Dampf.Test 29 | , Dampf.Monitor 30 | , Dampf.Browse 31 | , Dampf.Nginx 32 | , Dampf.Nginx.Config 33 | , Dampf.Nginx.Types 34 | , Dampf.Nginx.Test 35 | , Dampf.Postgres 36 | , Dampf.Postgres.Migrate 37 | , Dampf.Postgres.Connect 38 | , Dampf.Postgres.Setup 39 | , Dampf.Types 40 | , Dampf.AppFile.Pretty 41 | , Dampf.AppFile.Types 42 | , Dampf.ConfigFile.Pretty 43 | , Dampf.ConfigFile.Types 44 | , Dampf.Docker.Args 45 | , Dampf.Docker.Args.Class 46 | , Dampf.Docker.Args.Run 47 | , Dampf.Docker.Args.Network 48 | , Dampf.Provision 49 | 50 | other-modules: 51 | Dampf.Internal.Env 52 | , Dampf.Internal.Yaml 53 | 54 | ghc-options: 55 | -Wall 56 | -fwarn-incomplete-patterns 57 | -fwarn-missing-signatures 58 | -fwarn-overlapping-patterns 59 | -fwarn-tabs 60 | -fwarn-warnings-deprecations 61 | 62 | build-depends: 63 | base >= 4.7 && < 5 64 | , aeson 65 | , wreq 66 | , attoparsec 67 | , containers 68 | , directory 69 | , exceptions 70 | , filepath 71 | , free 72 | , lens 73 | , mtl 74 | , postgresql-simple 75 | , pretty 76 | , process 77 | , text 78 | , time 79 | , typed-process 80 | , unix 81 | , unordered-containers 82 | , vector 83 | , yaml 84 | , regex-posix 85 | , regex-compat 86 | , bytestring 87 | , split 88 | , random 89 | , transformers 90 | , scientific 91 | , shelly 92 | , wreq 93 | 94 | executable dampf 95 | main-is: Main.hs 96 | build-depends: 97 | base >=4.7 && <5 98 | , dampf 99 | , lens 100 | , optparse-applicative 101 | , optparse-generic 102 | , text 103 | -------------------------------------------------------------------------------- /dampf/dampf.yaml.example: -------------------------------------------------------------------------------- 1 | image myimage: 2 | dockerFile: . 3 | 4 | container mycontainer1: 5 | image: myimage 6 | expose: [3002] 7 | 8 | container myadmincontainer: 9 | image: myimage 10 | expose: [3102] 11 | command: foobar serve 12 | 13 | postgresdb mydb: 14 | migrations: migrations/ 15 | user: myuser 16 | extensions: ["hstore"] 17 | 18 | domain api.mydomain.com: 19 | proxyContainer: mycontainer1:3002 20 | letsEncrypt: true 21 | 22 | domain platform.mydomain.com: 23 | proxyContainer: myadmincontainer:3102 24 | letsEncrypt: true 25 | 26 | domain mydomain.com: 27 | static: www/ 28 | letsEncrypt: true 29 | 30 | test domaintest: 31 | when: [Frequently] 32 | units: 33 | - run myimage foobar test 34 | - get http://mydomain.com =~ mydomain 35 | - get https://mydomain.com =~ mydomain 36 | -------------------------------------------------------------------------------- /dampf/example/.dockerignore: -------------------------------------------------------------------------------- 1 | dist 2 | .stack-work 3 | .git 4 | Dockerfile 5 | deploy/ 6 | dampf.yaml 7 | -------------------------------------------------------------------------------- /dampf/example/pyping/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6.3-slim 2 | 3 | COPY serve.py /root 4 | 5 | EXPOSE 8080 6 | 7 | CMD ["/root/serve.py"] -------------------------------------------------------------------------------- /dampf/example/pyping/dampf.yaml: -------------------------------------------------------------------------------- 1 | image pyping: 2 | dockerFile: . 3 | 4 | container pyping: 5 | image: pyping 6 | expose: [8080] 7 | 8 | domain foo.com: 9 | proxyContainer: pyping:8080 10 | letsEncrypt: true 11 | 12 | test ping: 13 | when: [AtDeploy] 14 | units: 15 | - get http://foo.com =~ ping 16 | -------------------------------------------------------------------------------- /dampf/example/pyping/serve.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # From https://daanlenaerts.com/blog/2015/06/03/create-a-simple-http-server-with-python-3/ 4 | 5 | from http.server import BaseHTTPRequestHandler, HTTPServer 6 | 7 | # HTTPRequestHandler class 8 | class testHTTPServer_RequestHandler(BaseHTTPRequestHandler): 9 | 10 | # GET 11 | def do_GET(self): 12 | # Send response status code 13 | self.send_response(200) 14 | 15 | # Send headers 16 | self.send_header('Content-type','text/html') 17 | self.end_headers() 18 | 19 | # Send message back to client 20 | message = "ping" 21 | # Write content as utf-8 data 22 | self.wfile.write(bytes(message, "utf8")) 23 | return 24 | 25 | def run(): 26 | print('starting server...') 27 | 28 | # Server settings 29 | # Choose port 8080, for port 80, which is normally used for a http server, you need root access 30 | server_address = ('', 8080) 31 | httpd = HTTPServer(server_address, testHTTPServer_RequestHandler) 32 | print('running server...') 33 | httpd.serve_forever() 34 | 35 | 36 | run() -------------------------------------------------------------------------------- /dampf/example/todo-hs/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM filopodia/minidebhs:lts-8.5 2 | 3 | RUN cd /tmp && stack install -j2 lucid postgresql-simple \ 4 | typed-process scotty-cookie microlens-platform wreq blaze-html 5 | 6 | COPY stack.yaml /src-boot/stack.yaml 7 | COPY todo.cabal /src-boot/ 8 | 9 | RUN cd /src-boot && stack build -j 4 --dependencies-only 10 | 11 | COPY . /opt/todo 12 | 13 | WORKDIR /opt/todo 14 | 15 | RUN stack install -j2 todo:exe:todo \ 16 | && rm -rf /opt/todo/ 17 | 18 | EXPOSE 3001 19 | 20 | WORKDIR /root 21 | 22 | ENV PATH "$PATH:/root/.local/bin" 23 | 24 | CMD ["/root/.local/bin/todo", "serve"] -------------------------------------------------------------------------------- /dampf/example/todo-hs/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /dampf/example/todo-hs/Main.hs: -------------------------------------------------------------------------------- 1 | import Todo.Serve 2 | import Todo.Test 3 | 4 | import System.Environment 5 | 6 | main = getArgs >>= dispatch 7 | 8 | dispatch ["serve"] = serve 9 | dispatch ("test":args) = withArgs args todoTest -------------------------------------------------------------------------------- /dampf/example/todo-hs/dampf.yaml: -------------------------------------------------------------------------------- 1 | image todo: 2 | dockerFile: . 3 | 4 | container todo: 5 | image: todo 6 | expose: [3166] 7 | command: todo serve 8 | useDatabase: todo 9 | 10 | postgresdb todo: 11 | migrations: migrations/ 12 | user: tomn 13 | extensions: [] 14 | 15 | domain todo.diffusionkinetics.com: 16 | proxyContainer: todo:3166 17 | letsEncrypt: true 18 | static: www/ 19 | 20 | test pingpong: 21 | when: [AtDeploy] 22 | units: 23 | - run todo todo test 24 | -------------------------------------------------------------------------------- /dampf/example/todo-hs/lib/Todo/Items.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, TypeFamilies, ExtendedDefaultRules, TemplateHaskell, DeriveGeneric #-} 2 | 3 | module Todo.Items where 4 | 5 | import Database.PostgreSQL.Simple 6 | import Database.PostgreSQL.Simple.Expr 7 | import GHC.Generics 8 | import Prelude hiding (id) 9 | import Data.Aeson 10 | 11 | data Todo = Todo 12 | { id :: Serial Int 13 | , title :: String 14 | , done :: Bool 15 | } deriving (Show, Generic) 16 | 17 | instance FromRow Todo 18 | instance ToRow Todo 19 | instance HasFieldNames Todo 20 | instance HasTable Todo where 21 | tableName _ = "todos" 22 | instance HasKey Todo where 23 | type Key Todo = Serial Int 24 | getKey = id 25 | getKeyFieldNames _ = ["id"] 26 | 27 | instance ToJSON Todo -------------------------------------------------------------------------------- /dampf/example/todo-hs/lib/Todo/Serve.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExtendedDefaultRules, TemplateHaskell #-} 2 | 3 | module Todo.Serve where 4 | 5 | import Todo.Items 6 | import Web.Scotty 7 | import Database.PostgreSQL.Simple.Connect 8 | import Database.PostgreSQL.Simple.Expr 9 | import Database.PostgreSQL.Simple 10 | import Data.String 11 | import Control.Monad.IO.Class 12 | import Data.ByteString.Lazy.Char8 (unpack) 13 | 14 | serve :: IO () 15 | serve = do 16 | conn <- createConn =<< configFromEnv 17 | scotty 3166 $ do 18 | get "/ping" $ text "pong" 19 | post "/add" $ do 20 | bd <- body 21 | Serial k <- liftIO $ insert conn $ Todo 0 (unpack bd) False 22 | text $ fromString $ show k 23 | post "/done/:id" $ do 24 | tid <- param "id" 25 | _ <- liftIO $ execute conn "update todos set done = true where id = ?" (Only (tid::Int)) 26 | text "OK" 27 | get "/list" $ do 28 | todos <- liftIO $ selectFrom conn "todos" () 29 | json (todos::[Todo]) 30 | -------------------------------------------------------------------------------- /dampf/example/todo-hs/lib/Todo/Test.hs: -------------------------------------------------------------------------------- 1 | module Todo.Test where 2 | 3 | import Test.HTTP 4 | 5 | todoTest :: IO () 6 | todoTest = defaultMain $ httpTestCase "pingpong" "http://todo.diffusionkinetics.com" $ do 7 | pong <- get "/ping" 8 | assert "pongs" $ pong == "pong" 9 | -------------------------------------------------------------------------------- /dampf/example/todo-hs/migrations/20170911151904_todo_table.sql: -------------------------------------------------------------------------------- 1 | create table todos ( 2 | id serial primary key, 3 | title text not null, 4 | done boolean not null 5 | ); -------------------------------------------------------------------------------- /dampf/example/todo-hs/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-8.5 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | - location: 41 | git: https://github.com/diffusionkinetics/open.git 42 | commit: c5807af17c9c89d12666232013f06d0fb71f2991 43 | extra-dep: true 44 | subdirs: 45 | - postgresql-simple-expr 46 | 47 | # Dependency packages to be pulled from upstream that are not in the resolver 48 | # (e.g., acme-missiles-0.3) 49 | extra-deps: 50 | - http-test-0.2.5 51 | 52 | 53 | # Override default flag values for local packages and extra-deps 54 | flags: {} 55 | 56 | # Extra package databases containing global packages 57 | extra-package-dbs: [] 58 | 59 | # Control whether we use the GHC we find on the path 60 | # system-ghc: true 61 | # 62 | # Require a specific version of stack, using version ranges 63 | # require-stack-version: -any # Default 64 | # require-stack-version: ">=1.1" 65 | # 66 | # Override the architecture used by stack, especially useful on Windows 67 | # arch: i386 68 | # arch: x86_64 69 | # 70 | # Extra directories used by stack for building 71 | # extra-include-dirs: [/path/to/dir] 72 | # extra-lib-dirs: [/path/to/dir] 73 | # 74 | # Allow a newer minor version of GHC than the snapshot specifies 75 | # compiler-check: newer-minor 76 | -------------------------------------------------------------------------------- /dampf/example/todo-hs/todo.cabal: -------------------------------------------------------------------------------- 1 | Name: todo 2 | Version: 0.1.0.0 3 | Synopsis: 4 | Description: 5 | License: MIT 6 | License-file: LICENSE 7 | Author: Tom Nielsen 8 | Maintainer: Tom Nielsen 9 | build-type: Simple 10 | Cabal-Version: >= 1.8 11 | homepage: https://github.com/diffusionkinetics/open/dampf/example 12 | bug-reports: https://github.com/diffusionkinetics/open/issues 13 | category: Web 14 | Tested-With: GHC == 8.0.1 15 | 16 | Library 17 | ghc-options: -Wall 18 | hs-source-dirs: lib 19 | Exposed-modules: 20 | Todo.Serve 21 | Todo.Test 22 | Todo.Items 23 | Build-depends: 24 | base >= 4.6 && < 5 25 | , text 26 | , aeson 27 | , scotty 28 | , postgresql-simple 29 | , postgresql-simple-expr 30 | , scotty 31 | , bytestring 32 | , http-test 33 | 34 | executable todo 35 | main-is: Main.hs 36 | Build-depends: 37 | base >= 4.6 && < 5 38 | , todo 39 | -------------------------------------------------------------------------------- /dampf/example/todo-hs/www/js/blob.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/dampf/example/todo-hs/www/js/blob.js -------------------------------------------------------------------------------- /dampf/lib/Dampf.hs: -------------------------------------------------------------------------------- 1 | module Dampf where 2 | 3 | import Control.Lens 4 | import Control.Monad.Catch (MonadThrow) 5 | import Control.Monad.IO.Class (MonadIO, liftIO) 6 | import GHC.Conc 7 | 8 | import Dampf.Docker 9 | import Dampf.Nginx 10 | import Dampf.Postgres 11 | import Dampf.Types 12 | import Dampf.Monitor 13 | 14 | dump :: (MonadIO m) => DampfT m () 15 | dump = do 16 | a <- view app 17 | c <- view config 18 | 19 | liftIO $ do 20 | putStrLn $ pShowDampfApp a 21 | putStrLn $ pShowDampfConfig c 22 | 23 | 24 | goBuild :: (MonadIO m, MonadThrow m) => DampfT m () 25 | goBuild = do 26 | setupDB 27 | buildDocker 28 | 29 | 30 | goDeploy :: (MonadIO m, MonadThrow m) => DampfT m () 31 | goDeploy = do 32 | goBuild 33 | runMigrations Nothing 34 | deployDocker 35 | deployDomains 36 | liftIO $ threadDelay 1000000 37 | runMonitor [] 38 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Browse.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, DeriveGeneric #-} 2 | module Dampf.Browse where 3 | 4 | import Dampf.Test 5 | import Dampf.Types 6 | import Dampf.Docker.Free 7 | import Dampf.Docker.Types 8 | import Dampf.Docker.Args.Run 9 | 10 | import Data.Text (Text) 11 | import qualified Data.Map.Strict as Map 12 | 13 | import GHC.Generics 14 | import Control.Lens 15 | 16 | import Control.Monad (void) 17 | import Control.Monad.IO.Class (MonadIO) 18 | import Control.Monad.Catch (MonadThrow) 19 | 20 | type ContainerName = Text 21 | type Args = RunArgs -> RunArgs 22 | 23 | data Browser = Browser ContainerName ContainerSpec Args 24 | 25 | chrome_vnc :: Browser 26 | chrome_vnc = Browser "dampf-chrome-vnc-server" spec id where 27 | spec = ContainerSpec 28 | "siomiz/chrome" 29 | Nothing 30 | Nothing 31 | Nothing 32 | 33 | chrome_x11 :: Browser 34 | chrome_x11 = Browser "dampf-chrome-x11" spec args where 35 | spec = ContainerSpec 36 | "jess/chrome" 37 | Nothing 38 | Nothing 39 | Nothing 40 | args = set privileged True 41 | . set envs (Map.fromList [("DISPLAY","unix:0")]) 42 | . set volumes [("/tmp/.X11-unix/","/tmp/.X11-unix")] 43 | 44 | data Backend = VNC | X11 deriving (Show, Read, Eq, Generic) 45 | 46 | browse :: (MonadIO m, MonadThrow m) => Backend -> DampfT m () 47 | browse b = 48 | let go (Browser name' spec args) = do 49 | (hosts, argsTweak, container_names, netName) <- fakeHostsArgs 50 | void . runDockerT $ runWith (args . argsTweak) name' spec 51 | cleanUp netName (name' : container_names) 52 | in case b of 53 | VNC -> go chrome_vnc 54 | X11 -> go chrome_x11 55 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/ConfigFile/Pretty.hs: -------------------------------------------------------------------------------- 1 | module Dampf.ConfigFile.Pretty 2 | ( pShowDampfConfig 3 | ) where 4 | 5 | import Control.Lens 6 | import qualified Data.Map.Strict as Map 7 | import qualified Data.Text as T 8 | import Text.PrettyPrint 9 | 10 | import Dampf.ConfigFile.Types 11 | 12 | 13 | pShowDampfConfig :: DampfConfig -> String 14 | pShowDampfConfig = render . hang (text "Config File:") 4 . pprDampfConfig 15 | 16 | 17 | pprDampfConfig :: DampfConfig -> Doc 18 | pprDampfConfig cfg = vcat 19 | [ pprDatabaseServer d 20 | ] 21 | where 22 | d = cfg ^. postgres 23 | 24 | 25 | pprLiveCertificate :: Maybe FilePath -> Doc 26 | pprLiveCertificate (Just l) = vcat 27 | [ text "Live Certificate:" 28 | , text "" 29 | , nest 4 (text l) 30 | , text "" 31 | ] 32 | 33 | pprLiveCertificate Nothing = empty 34 | 35 | 36 | pprDatabaseServer :: Maybe PostgresConfig -> Doc 37 | pprDatabaseServer (Just s) = vcat 38 | [ text "Database Server:" 39 | , text "" 40 | , nest 4 (pprPostgresConfig s) 41 | , text "" 42 | ] 43 | 44 | pprDatabaseServer Nothing = empty 45 | 46 | 47 | pprPostgresConfig :: PostgresConfig -> Doc 48 | pprPostgresConfig cfg = vcat 49 | [ text "host:" <+> text h 50 | , text "port:" <+> int p 51 | , text "users:" 52 | , nest 4 (pprMap u) 53 | ] 54 | where 55 | h = cfg ^. host . to T.unpack 56 | p = cfg ^. port 57 | u = cfg ^. users . to Map.toList 58 | 59 | 60 | pprMap :: (Show a) => [(a, a)] -> Doc 61 | pprMap kvs = vcat 62 | $ fmap (\(k, v) -> text (show k) <> colon <+> text (show v)) kvs 63 | 64 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/ConfigFile/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | 6 | module Dampf.ConfigFile.Types 7 | ( -- * Configuration Types 8 | DampfProfiles(..) 9 | , HasDampfProfiles(..) 10 | , DampfConfig(..) 11 | , HasDampfConfig(..) 12 | , PostgresConfig(..) 13 | , HasPostgresConfig(..) 14 | ) where 15 | 16 | import Control.Lens 17 | import Control.Monad (forM) 18 | import Data.Aeson 19 | import qualified Data.HashMap.Lazy as HM 20 | import Data.Map.Strict (Map) 21 | import qualified Data.Map.Strict as Map 22 | import Data.Text (Text) 23 | import qualified Data.Text as T 24 | import GHC.Generics 25 | 26 | import Dampf.Internal.Yaml 27 | 28 | 29 | -- Configuration Types 30 | 31 | data PostgresConfig = PostgresConfig 32 | { _host :: Text 33 | , _port :: Int 34 | , _users :: Map Text Text 35 | } deriving (Eq, Show, Generic) 36 | 37 | makeClassy ''PostgresConfig 38 | 39 | 40 | instance FromJSON PostgresConfig where 41 | parseJSON = gDecode 42 | 43 | 44 | data DampfConfig = DC 45 | { _postgres :: Maybe PostgresConfig 46 | } deriving (Eq, Show, Generic) 47 | 48 | makeClassy ''DampfConfig 49 | 50 | 51 | instance FromJSON DampfConfig where 52 | parseJSON = gDecode 53 | 54 | 55 | data DampfProfiles = DP 56 | { _profiles :: Map Text DampfConfig 57 | } deriving (Eq, Show, Generic) 58 | 59 | makeClassy ''DampfProfiles 60 | 61 | 62 | instance FromJSON DampfProfiles where 63 | parseJSON = withObject "Configuration File" $ \o -> 64 | fmap (DP . Map.fromList) $ forM (HM.toList o) $ \(k, v) -> 65 | case T.words k of 66 | ["profile", name] -> (,) <$> return name <*> parseJSON v 67 | _ -> fail "Invalid profile specification" 68 | 69 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Docker.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module Dampf.Docker 3 | ( -- * Actions 4 | buildDocker 5 | , deployDocker 6 | , runDocker 7 | ) where 8 | 9 | import Control.Lens 10 | import Control.Monad (void) 11 | import Control.Monad.Catch (MonadThrow) 12 | import Control.Monad.IO.Class (MonadIO) 13 | import Data.Text (Text) 14 | import Data.Monoid 15 | import Data.Map.Strict (keys) 16 | 17 | import Dampf.Docker.Free 18 | import Dampf.Docker.Types 19 | import Dampf.Types 20 | 21 | 22 | -- TODO: Rename this buildImages? 23 | buildDocker :: (MonadIO m, MonadThrow m) => DampfT m () 24 | buildDocker = do 25 | is <- view (app . images) 26 | runDockerT . iforM_ is $ \n spec -> 27 | build n (spec ^. dockerFile) 28 | 29 | 30 | -- TODO: Rename this deployContainers? 31 | deployDocker :: (MonadIO m, MonadThrow m) => DampfT m () 32 | deployDocker = do 33 | cs <- view (app . containers) 34 | runDockerT . iforM_ cs $ \n spec -> do 35 | stop n 36 | rm n 37 | run True n spec 38 | 39 | runDocker :: (MonadIO m, MonadThrow m) => Text -> Maybe Text -> DampfT m () 40 | runDocker imgNm mCmd = do 41 | dbs <- view (app . databases) 42 | let firstDb = safeHead $ keys dbs 43 | runDockerT $ void . run False ("run"<>imgNm) $ ContainerSpec imgNm Nothing mCmd firstDb 44 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Docker/Args.hs: -------------------------------------------------------------------------------- 1 | module Dampf.Docker.Args 2 | ( ToArgs(..) 3 | , RunArgs 4 | , HasRunArgs(..) 5 | , mkRunArgs 6 | , defCreateArg 7 | , defConnectArg 8 | , ConnectArgs 9 | , CreateArgs 10 | , unDaemonize 11 | ) where 12 | 13 | import Dampf.Docker.Args.Class 14 | import Dampf.Docker.Args.Run 15 | import Dampf.Docker.Args.Network 16 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Docker/Args/Class.hs: -------------------------------------------------------------------------------- 1 | module Dampf.Docker.Args.Class where 2 | 3 | import Data.Text (Text) 4 | import qualified Data.Text as T 5 | 6 | class ToArgs a where 7 | toArgs :: a -> [String] 8 | 9 | flagArg' :: String -> [String] 10 | flagArg' n | null n = [] 11 | | otherwise = ["--" ++ n] 12 | 13 | flagArg :: (Show a) => a -> [String] 14 | flagArg v = if null s then [] else [s] 15 | where 16 | s = show v 17 | 18 | namedArg :: (Show a) => String -> a -> [String] 19 | namedArg n v = ["--" ++ n ++ "=" ++ show v] 20 | 21 | namedTextArg :: String -> Text -> [String] 22 | namedTextArg n v = ["--" ++ n ++ "=" ++ T.unpack v] 23 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Docker/Args/Network.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell #-} 2 | module Dampf.Docker.Args.Network where 3 | 4 | import Dampf.Docker.Args.Class 5 | import Data.Text (Text) 6 | import Control.Lens 7 | import Data.Monoid ((<>)) 8 | 9 | import qualified Data.Text as T 10 | 11 | data Driver = Bridge 12 | 13 | instance Show Driver where 14 | show Bridge = "bridge" 15 | 16 | data CreateArgs = CreateArgs { 17 | _driver :: Driver 18 | , _createNet :: Text 19 | } 20 | 21 | data ConnectArgs = ConnectArgs { 22 | _alias :: Maybe Text 23 | , _ip :: Maybe Text 24 | , _link :: Maybe [Text] 25 | , _connectToNet :: Text 26 | , _containerName :: Text 27 | } 28 | 29 | defCreateArg :: Text -> CreateArgs 30 | defCreateArg = CreateArgs Bridge 31 | 32 | defConnectArg :: Text -> Text -> ConnectArgs 33 | defConnectArg net container = ConnectArgs Nothing Nothing Nothing net container 34 | 35 | makeClassy ''ConnectArgs 36 | makeClassy ''CreateArgs 37 | 38 | instance ToArgs CreateArgs where 39 | toArgs s = ["network", "create"] 40 | <> s ^. driver . to (namedArg "driver") 41 | <> [s ^. createNet . to T.unpack] 42 | 43 | 44 | instance ToArgs ConnectArgs where 45 | toArgs s = ["network", "connect"] 46 | <> s ^. alias . _Just . to (namedTextArg "alias") 47 | <> s ^. ip . _Just . to (namedTextArg "ip") 48 | <> ["--link"] <> toListOf (link . _Just . traverse . to T.unpack) s 49 | <> [s ^. connectToNet . to T.unpack] 50 | <> [s ^. containerName . to T.unpack] 51 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Internal/Env.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE LambdaCase #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | 4 | module Dampf.Internal.Env 5 | ( resolveEnvVars 6 | ) where 7 | 8 | import Control.Monad (forM) 9 | import Data.HashMap.Strict (HashMap) 10 | import qualified Data.HashMap.Strict as HM 11 | import Data.Text (Text) 12 | import qualified Data.Text as T 13 | import Data.Vector (Vector) 14 | import Data.Yaml 15 | import System.Environment 16 | 17 | 18 | -- Resolving Environment Variables 19 | 20 | resolveEnvVars :: Value -> IO Value 21 | resolveEnvVars (Object m) = Object <$> resolveObject m 22 | resolveEnvVars (Array vs) = Array <$> resolveArray vs 23 | resolveEnvVars (String t) = toJSON <$> resolveString t 24 | resolveEnvVars x = return x 25 | 26 | 27 | resolveObject :: HashMap Text Value -> IO (HashMap Text Value) 28 | resolveObject m = fmap HM.fromList . forM kvs $ \(k, v) -> do 29 | rk <- resolveString k 30 | rv <- resolveEnvVars v 31 | 32 | return (rk, rv) 33 | where 34 | kvs = HM.toList m 35 | 36 | 37 | resolveArray :: Vector Value -> IO (Vector Value) 38 | resolveArray = mapM resolveEnvVars 39 | 40 | 41 | resolveString :: Text -> IO Text 42 | resolveString t = case T.stripPrefix "$" t of 43 | Just e -> let e' = T.unpack e in 44 | (lookup e' <$> getEnvironment) >>= \case 45 | Just v -> return (T.pack v) 46 | Nothing -> error $ "Variable not in environment " ++ e' 47 | 48 | Nothing -> return t 49 | 50 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Internal/Yaml.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | 4 | module Dampf.Internal.Yaml 5 | ( gDecode 6 | , options 7 | , catchYamlException 8 | ) where 9 | 10 | import Control.Monad.Catch 11 | import Control.Monad.IO.Class 12 | import Data.Aeson.Types 13 | import Data.Yaml 14 | import GHC.Generics 15 | 16 | 17 | gDecode :: (Generic a, GFromJSON Zero (Rep a)) => Value -> Parser a 18 | gDecode = genericParseJSON options 19 | 20 | 21 | options :: Options 22 | options = defaultOptions 23 | { fieldLabelModifier = drop 1 24 | , sumEncoding = UntaggedValue 25 | } 26 | 27 | 28 | catchYamlException :: (MonadIO m, MonadThrow m, Exception e) 29 | => (String -> e) -> ParseException -> m a 30 | catchYamlException f e = throwM $ f e' 31 | where 32 | e' = prettyPrintParseException e 33 | 34 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Nginx/Config.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Dampf.Nginx.Config where 4 | 5 | import Control.Lens 6 | import Control.Monad.IO.Class (MonadIO) 7 | import Data.Text (Text) 8 | import qualified Data.Text as T 9 | import System.FilePath 10 | 11 | import Dampf.Nginx.Types 12 | import Dampf.Types 13 | import Data.Maybe (fromMaybe) 14 | 15 | 16 | domainConfig :: (MonadIO m) => Text -> DomainSpec -> DampfT m Text 17 | domainConfig name spec = T.pack . pShowServer <$> domainToServer name spec 18 | 19 | 20 | domainToServer :: (MonadIO m) => Text -> DomainSpec -> DampfT m Server 21 | domainToServer name spec 22 | | isSSL = do 23 | decls <- (http ++) <$> sslDecls name spec 24 | return (Server decls) 25 | 26 | | otherwise = return (Server http) 27 | where 28 | isSSL = spec ^. letsEncrypt . non False 29 | http = httpDecls name spec 30 | 31 | 32 | domainToLocation :: Text -> DomainSpec -> [(Text, Text)] 33 | domainToLocation name spec = 34 | maybe [] staticAttrs s 35 | ++ cdnAttrs cdn 36 | ++ maybe [] proxyAttrs p 37 | where 38 | s :: Maybe Text 39 | s = const name <$> spec ^. static 40 | p = spec ^. proxyContainer 41 | cdn = spec ^. isCDN 42 | 43 | 44 | sslDecls :: (MonadIO m) => Text -> DomainSpec -> DampfT m [ServerDecl] 45 | sslDecls name spec = do 46 | return $ f $ "/etc/letsencrypt/live/"++ T.unpack name 47 | where 48 | f live = 49 | [ Listen 443 ["ssl"] 50 | , SSLCertificate $ live "fullchain.pem" 51 | , SSLCertificateKey $ live "privkey.pem" 52 | , SSLTrustedCertificate $ live "chain.pem" 53 | ] 54 | 55 | httpDecls :: Text -> DomainSpec -> [ServerDecl] 56 | httpDecls name spec = 57 | [ Listen 80 [] 58 | , if (fromMaybe False $ _nowww spec) 59 | then ServerName [name] 60 | else ServerName [name, "www." `T.append` name] 61 | , Location "/" $ domainToLocation name spec 62 | ] 63 | 64 | cdnAttrs :: Maybe Bool -> [(Text, Text)] 65 | cdnAttrs (Just True) = 66 | [ ("gzip_static","on") 67 | , ("expires","max") 68 | , ("log_not_found","off") 69 | , ("access_log","off") 70 | , ("add_header","Cache-Control public") 71 | , ("add_header","'Access-Control-Allow-Origin' '*'") 72 | , ("add_header","'Access-Control-Allow-Methods' 'GET, OPTIONS'") 73 | ] 74 | cdnAttrs _ = [] 75 | 76 | 77 | staticAttrs :: Text -> [(Text, Text)] 78 | staticAttrs x = 79 | [ ("root", "/var/www/" `T.append` x) 80 | , ("index", "index.html") 81 | ] 82 | 83 | 84 | proxyAttrs :: Text -> [(Text, Text)] 85 | proxyAttrs x = 86 | [ ("proxy_pass", "http://127.0.0.1:" `T.append` p) 87 | , ("proxy_set_header", "Host $host") 88 | , ("proxy_set_header", "X-Real-IP $remote_addr") 89 | , ("proxy_set_header", "X-Forwarded-For $proxy_add_x_forwarded_for") 90 | , ("proxy_set_header", "X-Forwarded-Proto $scheme") 91 | ] 92 | where 93 | p = last $ T.splitOn ":" x 94 | 95 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Nginx/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module Dampf.Nginx.Types 3 | ( Server(..) 4 | , ServerDecl(..) 5 | , pShowServer 6 | , pShowFakeServer 7 | ) where 8 | 9 | import Data.Text (Text) 10 | import qualified Data.Text as T 11 | import Text.PrettyPrint 12 | 13 | 14 | newtype Server = Server [ServerDecl] 15 | 16 | addDecl :: ServerDecl -> Server -> Server 17 | addDecl d (Server ds) = Server (d : ds) 18 | 19 | data ServerDecl 20 | = Listen Int [String] 21 | | ServerName [Text] 22 | | Location Text [(Text, Text)] 23 | | Include FilePath 24 | | SSLCertificate FilePath 25 | | SSLCertificateKey FilePath 26 | | SSLTrustedCertificate FilePath 27 | 28 | 29 | pShowServer :: Server -> String 30 | pShowServer = render . pprServer 31 | 32 | pShowFakeServer :: Server -> String 33 | pShowFakeServer = render . addMoreThings 34 | where addMoreThings doc = 35 | text "events" <+> lbrace 36 | $+$ nest 4 (text "worker_connections 512;") 37 | $+$ rbrace 38 | $+$ text "http" <+> lbrace 39 | $+$ nest 4 (pprServer doc) 40 | $+$ rbrace 41 | 42 | 43 | pprServer :: Server -> Doc 44 | pprServer (Server ds) = 45 | text "server" <+> lbrace 46 | $+$ nest 4 (vcat $ fmap pprServerDecl ds) 47 | $+$ rbrace 48 | 49 | pprServerDecl :: ServerDecl -> Doc 50 | pprServerDecl (Listen p ss) = text "listen" 51 | <+> int p <+> vcat (fmap text ss) <> semi 52 | 53 | pprServerDecl (ServerName ns) = text "server_name" 54 | <+> hsep (fmap (text . T.unpack) ns) <> semi 55 | 56 | pprServerDecl (Location p kvs) = text "location" 57 | <+> text (T.unpack p) <+> lbrace 58 | $+$ nest 4 (vcat (fmap ppMap kvs)) 59 | $+$ rbrace 60 | 61 | pprServerDecl (Include p) = text "include" 62 | <+> text p <> semi 63 | 64 | pprServerDecl (SSLCertificate p) = text "ssl_certificate" 65 | <+> text p <> semi 66 | 67 | pprServerDecl (SSLTrustedCertificate p) = text "ssl_trusted_certificate" 68 | <+> text p <> semi 69 | 70 | 71 | pprServerDecl (SSLCertificateKey p) = text "ssl_certificate_key" 72 | <+> text p <> semi 73 | 74 | ppMap :: (Text, Text) -> Doc 75 | ppMap (k, v) = text (T.unpack k) <+> text (T.unpack v) <> semi 76 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Postgres/Connect.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE LambdaCase #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE ScopedTypeVariables #-} 4 | 5 | module Dampf.Postgres.Connect where 6 | 7 | import Control.Lens 8 | import Control.Monad.Catch (MonadThrow, throwM) 9 | import Control.Monad.IO.Class (MonadIO, liftIO) 10 | import Data.Text (Text) 11 | import qualified Data.Text as T 12 | import Database.PostgreSQL.Simple 13 | 14 | import Dampf.Types 15 | 16 | 17 | lookupPassword :: (HasPostgresConfig c) => Text -> c -> String 18 | lookupPassword name cfg = case cfg ^. users . at name of 19 | Nothing -> error $ "no password for user "++ T.unpack name ++ " in .dampf.cfg" 20 | Just pw -> T.unpack pw 21 | 22 | 23 | createSuperUserConn :: (MonadIO m, MonadThrow m) 24 | => Text -> DampfT m Connection 25 | createSuperUserConn name = createConn name spec 26 | where 27 | spec = DatabaseSpec Nothing "postgres" [] 28 | 29 | createSuperUserPostgresConn :: (MonadIO m, MonadThrow m) 30 | => DampfT m Connection 31 | createSuperUserPostgresConn = createConn "postgres" spec 32 | where 33 | spec = DatabaseSpec Nothing "postgres" [] 34 | 35 | createConn :: (MonadIO m, MonadThrow m) 36 | => Text -> DatabaseSpec -> DampfT m Connection 37 | createConn name spec = view (config . postgres) >>= \case 38 | Just s -> liftIO $ connect ConnectInfo 39 | { connectHost = s ^. host . to T.unpack 40 | , connectPort = s ^. port . to fromIntegral 41 | , connectUser = spec ^. user . to T.unpack 42 | , connectPassword = lookupPassword (spec ^. user) s 43 | , connectDatabase = T.unpack name 44 | } 45 | 46 | Nothing -> throwM NoDatabaseServer 47 | 48 | 49 | destroyConn :: (MonadIO m) => Connection -> DampfT m () 50 | destroyConn = liftIO . close 51 | 52 | -------------------------------------------------------------------------------- /dampf/lib/Dampf/Postgres/Migrate.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Dampf.Postgres.Migrate where 4 | 5 | import Control.Arrow ((&&&)) 6 | import Control.Lens 7 | import Control.Monad (forM_, unless, void, when) 8 | import Control.Monad.Catch (MonadThrow, throwM) 9 | import Control.Monad.IO.Class (MonadIO, liftIO) 10 | import Data.Char (isDigit) 11 | import Data.List (isSuffixOf, sort) 12 | import Data.String (fromString) 13 | import Data.Text (Text) 14 | import Data.Time 15 | import Database.PostgreSQL.Simple 16 | import System.Directory 17 | import System.FilePath 18 | 19 | import Dampf.Postgres.Connect 20 | import Dampf.Types 21 | 22 | 23 | newMigration :: String -> DatabaseSpec -> IO () 24 | newMigration mig spec = case spec ^. migrations of 25 | Just m -> do 26 | ts <- formatTime defaultTimeLocale "%Y%m%d%H%M%S" <$> getCurrentTime 27 | let f = m ts ++ "_" ++ mig ++ ".sql" 28 | 29 | writeFile f "" 30 | putStrLn f 31 | 32 | Nothing -> throwM NoMigrations 33 | 34 | 35 | migrate :: (MonadIO m, MonadThrow m) => Text -> DatabaseSpec -> DampfT m () 36 | migrate name spec = case spec ^. migrations of 37 | Just m -> do 38 | ms <- view (config . postgres) 39 | exists <- liftIO $ doesDirectoryExist m 40 | 41 | case ms of 42 | Just _ -> when exists $ do 43 | migs <- liftIO $ getMigrations m 44 | 45 | unless (null migs) $ do 46 | conn <- createConn name spec 47 | done <- liftIO $ getAppliedMigrations conn 48 | 49 | let migs' = filter ((`notElem` done) . fst) migs 50 | 51 | liftIO . forM_ migs' $ \(t, f) -> do 52 | content <- readFile f 53 | putStrLn $ "Applying migration: " ++ t 54 | 55 | let qStr = content 56 | ++ "; INSERT INTO migrations (timestamp) VALUES ('" 57 | ++ t 58 | ++ "')" 59 | 60 | execute_ conn $ fromString qStr 61 | 62 | Nothing -> throwM NoDatabaseServer 63 | 64 | Nothing -> return () 65 | 66 | 67 | getAppliedMigrations :: Connection -> IO [String] 68 | getAppliedMigrations conn = do 69 | void $ execute_ conn 70 | "CREATE TABLE IF NOT EXISTS migrations (timestamp varchar(15) PRIMARY KEY)" 71 | 72 | rows <- query_ conn "SELECT timestamp FROM migrations" 73 | return (fmap head rows) 74 | 75 | 76 | getMigrations :: FilePath -> IO [(String, FilePath)] 77 | getMigrations d = 78 | fmap (takeWhile isDigit &&& (d )) . sort . filter (isSuffixOf ".sql") 79 | <$> getDirectoryContents d 80 | 81 | -------------------------------------------------------------------------------- /dampf/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./cstom-snapshot.yaml" 18 | resolver: lts-9.21 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | 41 | # Dependency packages to be pulled from upstream that are not in the resolver 42 | # (e.g., acme-missiles-0.3) 43 | extra-deps: [] 44 | 45 | # Override default flag values for local packages and extra-deps 46 | flags: {} 47 | 48 | # Extra package databases containing global packages 49 | extra-package-dbs: [] 50 | 51 | # Control whether we use the GHC we find on the path 52 | # system-ghc: true 53 | # 54 | # Require a specific version of stack, using version ranges 55 | # require-stack-version: -any # Default 56 | # require-stack-version: ">=1.1" 57 | # 58 | # Override the architecture used by stack, especially useful on Windows 59 | # arch: i386 60 | # arch: x86_64 61 | # 62 | # Extra directories used by stack for building 63 | # extra-include-dirs: [/path/to/dir] 64 | # extra-lib-dirs: [/path/to/dir] 65 | # 66 | -------------------------------------------------------------------------------- /dashdo-examples/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /dashdo-examples/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/dashdo-examples/README.md -------------------------------------------------------------------------------- /dashdo-examples/dashdo-examples.cabal: -------------------------------------------------------------------------------- 1 | Name: dashdo-examples 2 | Version: 0.1.0 3 | Synopsis: Shiny for Haskell 4 | Description: 5 | Web-based interactive applets for data analysis 6 | 7 | 8 | License: MIT 9 | License-file: LICENSE 10 | Author: Tom Nielsen 11 | Maintainer: tanielsen@gmail.com 12 | build-type: Simple 13 | category: Statistics, Web 14 | Cabal-Version: >= 1.10 15 | 16 | library 17 | hs-source-dirs: lib 18 | exposed-modules: 19 | Dashdo.Examples.GapminderScatterplot 20 | Dashdo.Examples.IrisKMeans 21 | Dashdo.Examples.StatView 22 | Dashdo.Examples.TestDashdo 23 | build-depends: 24 | base >= 4 && <= 5 25 | , plotlyhs 26 | , datasets 27 | , lucid 28 | , lucid-extras 29 | , aeson 30 | , text 31 | , dashdo 32 | , microlens-platform 33 | , fuml 34 | , vector 35 | , mtl 36 | , statgrab 37 | , unix 38 | 39 | executable test-dashdo 40 | main-is: TestDashdo.hs 41 | default-language: Haskell2010 42 | hs-source-dirs: exe 43 | build-depends: base >=4.6 && <5 44 | , dashdo-examples 45 | 46 | executable iris-kmeans-dashdo 47 | main-is: IrisKMeans.hs 48 | default-language: Haskell2010 49 | hs-source-dirs: exe 50 | build-depends: base >=4.6 && <5 51 | , dashdo-examples 52 | 53 | executable statview-dashdo 54 | main-is: StatView.hs 55 | default-language: Haskell2010 56 | hs-source-dirs: exe 57 | build-depends: base >=4.6 && <5 58 | , dashdo-examples 59 | 60 | executable gapminder-dashdo 61 | main-is: GapminderScatterplot.hs 62 | default-language: Haskell2010 63 | hs-source-dirs: exe 64 | build-depends: base >=4.6 && <5 65 | , dashdo-examples -------------------------------------------------------------------------------- /dashdo-examples/exe/GapminderScatterplot.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Dashdo.Examples.GapminderScatterplot 4 | 5 | main = gapMinderScatterPlot -------------------------------------------------------------------------------- /dashdo-examples/exe/IrisKMeans.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Dashdo.Examples.IrisKMeans 4 | 5 | main = irisKMeans 6 | -------------------------------------------------------------------------------- /dashdo-examples/exe/StatView.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Dashdo.Examples.StatView 4 | 5 | main = statView -------------------------------------------------------------------------------- /dashdo-examples/exe/TestDashdo.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Dashdo.Examples.TestDashdo 4 | 5 | main = testDashdo -------------------------------------------------------------------------------- /dashdo-examples/lib/Dashdo/Examples/IrisKMeans.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExistentialQuantification, ExtendedDefaultRules, FlexibleContexts, TemplateHaskell #-} 2 | module Dashdo.Examples.IrisKMeans where 3 | 4 | import Numeric.Datasets.Iris 5 | 6 | import Dashdo 7 | import Dashdo.Types 8 | import Dashdo.Serve 9 | import Dashdo.Elements hiding (numInput) 10 | import Dashdo.FlexibleInput hiding (textInput) 11 | import Control.Monad 12 | import Control.Monad.State.Strict 13 | import Lucid 14 | import Lucid.Bootstrap3 15 | import Lucid.Bootstrap 16 | import Data.Monoid ((<>)) 17 | import Data.Text (Text, unpack, pack) 18 | import Lens.Micro.Platform 19 | import Fuml.Core 20 | import qualified Data.Vector.Storable as VS 21 | 22 | import Graphics.Plotly hiding (xaxis, yaxis) 23 | import Graphics.Plotly.Lucid 24 | import Control.Monad.Identity 25 | 26 | data IKM = IKM 27 | { _nclusters :: Int 28 | , _xaxis :: Tag (Int, Iris -> Double) 29 | , _yaxis :: Tag (Int, Iris -> Double) 30 | } 31 | 32 | makeLenses ''IKM 33 | 34 | axes = [tagOpt "sepal length" (0, sepalLength), 35 | tagOpt "sepal width" (1, sepalWidth), 36 | tagOpt "petal length" (2, petalLength), 37 | tagOpt "petal width" (3, petalWidth)] 38 | 39 | ikm0 = IKM 3 (snd $ axes!!0) (snd $ axes!!1) 40 | 41 | irisData 42 | = (const () 43 | ~~ [sepalLength, sepalWidth, petalLength, petalWidth]) 44 | iris 45 | 46 | irisKMeans = runDashdoIO $ Dashdo ikm0 dashdo 47 | 48 | dashdo :: SHtml IO IKM () 49 | dashdo = wrap plotlyCDN $ do 50 | ikm <- getValue 51 | --let ctrs :: [VS.Vector Double] 52 | -- ctrs = model $ runIdentity $ runSupervisor (kmeans $ ikm ^. nclusters) Nothing irisData 53 | 54 | h2_ "Iris k-means clustering" 55 | row_ $ do 56 | mkCol [(MD,3)] $ div_ [class_ "well"] $ do 57 | "X Variable" 58 | br_ [] 59 | xaxis <<~ select axes 60 | br_ [] 61 | "y Variable" 62 | br_ [] 63 | yaxis <<~ select axes 64 | br_ [] 65 | "Cluster count" 66 | br_ [] 67 | nclusters <<~ numInput & minVal ?~ 0 & step ?~ 1 68 | 69 | mkCol [(MD,9)] $ do 70 | let trace :: Trace 71 | trace = points (aes & x .~ (ikm ^. xaxis . tagVal . _2) 72 | & y .~ (ikm ^. yaxis . tagVal . _2) 73 | & color ?~ (getCol. irisClass) ) 74 | iris 75 | {-traceCtrs 76 | = points (aes & x .~ (VS.! (ikm ^. xaxis . tagVal . _1)) 77 | & y .~ (VS.! (ikm ^. yaxis . tagVal . _1))) 78 | ctrs -} 79 | toHtml $ plotly "foo" [trace {- ,traceCtrs-}] 80 | & layout . margin ?~ thinMargins 81 | 82 | getCol :: IrisClass -> RGB Double 83 | getCol Setosa = RGB 1 0 0 84 | getCol Versicolor = RGB 0 1 0 85 | getCol Virginica = RGB 0 0 1 -------------------------------------------------------------------------------- /dashdo-examples/lib/Dashdo/Examples/TestDashdo.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExistentialQuantification, ExtendedDefaultRules, FlexibleContexts, TemplateHaskell #-} 2 | module Dashdo.Examples.TestDashdo where 3 | 4 | import Numeric.Datasets.Iris 5 | 6 | import Dashdo 7 | import Dashdo.Types 8 | import Dashdo.Serve 9 | import Dashdo.Elements 10 | import Dashdo.FlexibleInput 11 | import Control.Monad 12 | import Control.Monad.State.Strict 13 | import Lucid 14 | import Data.Monoid ((<>)) 15 | import Data.Text (Text, unpack, pack) 16 | import Lens.Micro.Platform 17 | 18 | import Graphics.Plotly (plotly, layout, title, Trace) 19 | import Graphics.Plotly.Lucid 20 | import Graphics.Plotly.GoG 21 | import Graphics.Plotly.Histogram (histogram) 22 | 23 | data Example = Example 24 | { _pname :: Text 25 | , _isMale :: Bool 26 | , _xaxis :: Tag (Iris -> Double) 27 | , _yaxis :: Tag (Iris -> Double) 28 | } 29 | 30 | makeLenses ''Example 31 | 32 | testDashdo = runDashdoIO $ Dashdo initv (example iris) 33 | 34 | test :: SHtml IO Bool () 35 | test = do 36 | b <- getValue 37 | "The person is male: " 38 | if b then "yes" else "no" 39 | 40 | hello :: SHtml IO Text () 41 | hello = do 42 | id <<~ textInput 43 | br_ [] 44 | txt <- getValue 45 | "Hello, " <> (toHtml txt) <> "!" 46 | 47 | example :: [Iris] -> SHtml IO Example () 48 | example irisd = wrap plotlyCDN $ do 49 | nm <- getValue 50 | let trace :: Trace 51 | trace = points (aes & x .~ (nm ^. xaxis . tagVal) 52 | & y .~ (nm ^. yaxis . tagVal)) irisd 53 | -- & marker ?~ (defMarker & markercolor ?~ catColors (map irisClass irisd)) 54 | 55 | h2_ "Testing Dashdo" 56 | 57 | isMale <<~ select [("Male", True),("Female", False)] 58 | br_ [] 59 | 60 | "Name input #1:" 61 | pname <<~ textInput 62 | br_ [] 63 | 64 | "Name input #2:" 65 | pname <<~ textInput 66 | br_ [] 67 | 68 | "Name input #3:" 69 | pname <<~ textInput 70 | br_ [] 71 | 72 | "Greetings using (#>):" 73 | pname #> hello 74 | br_ [] 75 | 76 | isMale #> test 77 | br_ [] 78 | 79 | xaxis <<~ select axes 80 | yaxis <<~ select axes 81 | toHtml $ plotly "foo" [trace] & layout . title ?~ "my plot" 82 | 83 | axes = [tagOpt "sepal length" sepalLength, 84 | tagOpt "sepal width" sepalWidth, 85 | tagOpt "petal length" petalLength, 86 | tagOpt "petal width" petalWidth] 87 | 88 | initv = Example "Simon" True (snd $ axes!!0) (snd $ axes!!1) 89 | 90 | {-hbarData :: [(Text, Double)] 91 | hbarData = [("Simon", 14.5), ("Joe", 18.9), ("Dorothy", 16.2)] 92 | 93 | hbarsTrace = bars & y ?~ map fst hbarData & x ?~ map snd hbarData & orientation ?~ Horizontal -} 94 | -------------------------------------------------------------------------------- /dashdo-examples/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-8.5 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | - '../../open/plotlyhs/' 41 | - '../../open/dashdo/' 42 | - '../../open/lucid-extras/' 43 | - '../../open/fuml/' 44 | - '../../open/parfoldl/' 45 | - '../../open/datasets' 46 | 47 | #- location: 48 | # git: git@github.com:glutamate/plotlyhs 49 | # commit: 1739b231193107297c38021dd8bf1a7c3cb2e3f8 50 | 51 | 52 | # Dependency packages to be pulled from upstream that are not in the resolver 53 | # (e.g., acme-missiles-0.3) 54 | extra-deps: 55 | - kmeans-vector-0.3.2 56 | - probable-0.1.2 57 | - statgrab-0.1.3 58 | 59 | 60 | # Override default flag values for local packages and extra-deps 61 | flags: {} 62 | 63 | # Extra package databases containing global packages 64 | extra-package-dbs: [] 65 | 66 | # Control whether we use the GHC we find on the path 67 | # system-ghc: true 68 | # 69 | # Require a specific version of stack, using version ranges 70 | # require-stack-version: -any # Default 71 | # require-stack-version: ">=1.1" 72 | # 73 | # Override the architecture used by stack, especially useful on Windows 74 | # arch: i386 75 | # arch: x86_64 76 | # 77 | # Extra directories used by stack for building 78 | # extra-include-dirs: [/path/to/dir] 79 | # extra-lib-dirs: [/path/to/dir] 80 | # 81 | # Allow a newer minor version of GHC than the snapshot specifies 82 | # compiler-check: newer-minor -------------------------------------------------------------------------------- /dashdo/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /dashdo/README.md: -------------------------------------------------------------------------------- 1 | ## Dashdo 2 | 3 | A framework for writing web-based analytics dashboards in Haskell, inspired by [Shiny](https://shiny.rstudio.com/). 4 | 5 | Dashdo dashboards are defined over a Haskell type which specify the state of the dashboard. Form elements are tied to 6 | lenses into the state value, and the displayed content is calculated from the state. Dashboards are defined in 7 | as single record type that can be interpreted in multiple different ways. 8 | 9 | ```haskell 10 | {- 11 | +------------------- the state of the dashboard 12 | | 13 | | +-------- the state is augmented 14 | | | with information fetched in `IO` 15 | | | 16 | v v -} 17 | data Dashdo t = forall b. Dashdo 18 | { initial :: t -- the initial state 19 | , fetch :: t -> IO b -- fetch more data 20 | , render :: t -> b -> SHtml t () -- render the dashboard 21 | } {- /|\ 22 | | 23 | +-------- generate HTML, and keep track 24 | of how form elements bind 25 | lenses into state 26 | -} 27 | ``` 28 | 29 | `SHtml` is a Lucid monad transformer and you can use all the usual Lucid combinators. 30 | 31 | Run your dashdo dashboard with Dashdo.Serve.runDashdo. 32 | 33 | ### Examples 34 | 35 | For a simple example, we will just ask for the users name. We need a simple state containing that name. We could just 36 | use a `Text` value as the state and the identity lens, but a record is a more realistic starting point for when you 37 | want to add functionality. 38 | 39 | ```haskell 40 | data Example = Example 41 | { _name :: Text 42 | } 43 | 44 | makeLenses ''Example 45 | ``` 46 | 47 | We then need to show the dashboard as a function of the state, together with the form element (here an ``) 48 | to change the state. 49 | 50 | ```haskell 51 | myRender :: Example -> SHtml Example () 52 | myRender s = do 53 | h2_ "Testing Dashdo" 54 | textInput name 55 | br_ [] 56 | "Hello " <> (toHtml $ s ^. name) 57 | ``` 58 | 59 | Since we are not making use of the functionality to fetch extra information in the `IO` monad, we can use 60 | the `pureDashdo` function to generate our `Dashdo`. 61 | 62 | ```haskell 63 | main = runDashdo $ pureDashdo "George" myRender 64 | ``` 65 | 66 | For more examples, see the [dashdo-examples](https://github.com/diffusionkinetics/open/tree/master/dashdo-examples) directory. 67 | 68 | ### To re-run on file changes 69 | 70 | `git ls-files | entr -r stack build --exec test-dashdo` 71 | 72 | or 73 | 74 | `find . -name '*.hs' ! -path '*stack-work*' | entr -r stack build --exec test-dashdo` -------------------------------------------------------------------------------- /dashdo/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /dashdo/dashdo.cabal: -------------------------------------------------------------------------------- 1 | Name: dashdo 2 | Version: 0.1.0 3 | Synopsis: Shiny for Haskell 4 | Description: 5 | Web-based interactive applets for data analysis 6 | 7 | 8 | License: MIT 9 | License-file: LICENSE 10 | Author: Tom Nielsen 11 | Maintainer: tanielsen@gmail.com 12 | build-type: Simple 13 | category: Statistics, Web 14 | Cabal-Version: >= 1.10 15 | extra-source-files: 16 | public/js/dashdo.js 17 | public/js/runners/*.js 18 | 19 | Library 20 | ghc-options: -Wall -fno-warn-type-defaults 21 | hs-source-dirs: src 22 | default-language: Haskell2010 23 | 24 | 25 | Exposed-modules: 26 | Dashdo 27 | , Dashdo.Serve 28 | , Dashdo.Types 29 | , Dashdo.Elements 30 | , Dashdo.Rdash 31 | , Dashdo.FlexibleInput 32 | , Dashdo.Files 33 | 34 | Build-depends: 35 | base >= 4.6 && <5 36 | , scotty 37 | , http-types 38 | , lucid 39 | , lucid-extras 40 | , text 41 | , cassava 42 | , mtl 43 | , microlens 44 | , microlens-th 45 | , microlens-mtl 46 | , uuid 47 | , bytestring 48 | , random 49 | , file-embed >=0.0.9 50 | , hashable 51 | , wai-extra 52 | , template-haskell 53 | , aeson 54 | , plotlyhs 55 | , deepseq 56 | , safe-exceptions 57 | , containers 58 | -------------------------------------------------------------------------------- /dashdo/public/js/runners/base.js: -------------------------------------------------------------------------------- 1 | (function($) { 2 | $(document).ready(function() { 3 | $('#dashdoform').dashdo() 4 | }) 5 | })(jQuery) -------------------------------------------------------------------------------- /dashdo/public/js/runners/rdashdo.js: -------------------------------------------------------------------------------- 1 | (function($) { 2 | $(document).ready(function() { 3 | $('#dashdo-form').dashdo({ 4 | containerSelector: '#dashdo-main', 5 | switcherElements: $('.dashdo-link'), 6 | }) 7 | }) 8 | })(jQuery) -------------------------------------------------------------------------------- /dashdo/src/Dashdo.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExistentialQuantification, ExtendedDefaultRules, FlexibleContexts, Rank2Types, TemplateHaskell, ScopedTypeVariables #-} 2 | 3 | module Dashdo where 4 | 5 | import qualified Data.Text.Lazy as TL 6 | import Dashdo.Types 7 | import Data.Monoid ((<>)) 8 | import Data.Text (Text) 9 | 10 | 11 | dashdoGenOut :: Monad m 12 | => Dashdo m a 13 | -> a 14 | -> [(TL.Text, TL.Text)] 15 | -> m (TL.Text, FormFields a, [(Text, a -> m ActionResult)] ) 16 | dashdoGenOut (Dashdo _ r) x pars = runSHtml x r pars 17 | 18 | 19 | parseForm :: a -> FormFields a -> [(TL.Text, TL.Text)] -> a 20 | parseForm x [] _ = x 21 | parseForm x ((fnm,f):nfs) pars = -- x=initial d (accumulator) 22 | let fldName = TL.fromStrict fnm -- fn 23 | newx = case lookup fldName pars of -- looking for fn in params [(TL.Text, TL.Text)] 24 | Just lt -> f x (TL.toStrict lt) -- apply function corresponding to `n` in list of FormFields - number-function pairs 25 | Nothing -> case filter ((== fldName <> "[]") . fst) pars of -- if nothing found, try looking for fn[] 26 | [] -> x 27 | listParams -> foldl f x (map (TL.toStrict . snd) listParams) 28 | in parseForm newx nfs pars 29 | {-parseForm x _ [] = x 30 | parseForm x ffs pars@((k,v):parsTail) = 31 | let 32 | currentKeyIsList = "[]" `isSuffixOf` (TL.unpack k)x 33 | 34 | lookupKey = 35 | if not currentKeyIsList 36 | then TL.toStrict k 37 | else TL.toStrict $ TL.dropEnd 2 k 38 | 39 | newx = 40 | case lookup lookupKey ffs of -- TODO: will not find fn[] 41 | Just f -> 42 | if not currentKeyIsList 43 | then 44 | f x $ TL.toStrict v 45 | else 46 | foldl f x (map (TL.toStrict . snd) $ filter ((==k) . fst) pars) 47 | Nothing -> x 48 | 49 | newparsTail = 50 | if not currentKeyIsList 51 | then parsTail 52 | else filter (not . (==k) . fst) parsTail -- we have used that key, will not use anymore 53 | 54 | in parseForm newx ffs newparsTail -} 55 | -------------------------------------------------------------------------------- /dashdo/src/Dashdo/Files.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, TemplateHaskell #-} 2 | module Dashdo.Files where 3 | 4 | import Data.FileEmbed 5 | import qualified Data.ByteString.Lazy as BLS 6 | import qualified Data.Text as T 7 | import Lucid 8 | import Lucid.PreEscaped 9 | 10 | 11 | dashdoCDN, dashdoCDNbase, dashdoCDNrdash :: Monad m => HtmlT m () 12 | dashdoCDN = scriptSrc "https://cdn.diffusionkinetics.com/dashdo/0.1/dashdo.js" 13 | dashdoCDNbase = scriptSrc "https://cdn.diffusionkinetics.com/dashdo/0.1/base.js" 14 | dashdoCDNrdash = scriptSrc "https://cdn.diffusionkinetics.com/dashdo/0.1/rdash.js" 15 | 16 | dashdoJS :: BLS.ByteString 17 | dashdoJS = BLS.fromStrict $(embedFile "public/js/dashdo.js") 18 | 19 | dashdoJSrunnerBase :: BLS.ByteString 20 | dashdoJSrunnerBase = BLS.fromStrict $(embedFile "public/js/runners/base.js") 21 | 22 | dashdoJSrunnerRdash :: BLS.ByteString 23 | dashdoJSrunnerRdash = BLS.fromStrict $(embedFile "public/js/runners/rdashdo.js") 24 | 25 | runnersEmbedded :: [(T.Text, BLS.ByteString)] 26 | runnersEmbedded = 27 | [ ("base.js", dashdoJSrunnerBase) 28 | , ("rdashdo.js", dashdoJSrunnerRdash) 29 | ] 30 | -------------------------------------------------------------------------------- /dashdo/src/Dashdo/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExistentialQuantification, ExtendedDefaultRules, FlexibleContexts, Rank2Types, TemplateHaskell, ScopedTypeVariables #-} 2 | 3 | module Dashdo.Types where 4 | 5 | import Lucid 6 | import Data.Text (Text, pack) 7 | import qualified Data.Text.Lazy as TL 8 | import Control.Monad.State.Strict 9 | import Lens.Micro 10 | import Data.Monoid ((<>)) 11 | import Lens.Micro.TH 12 | import Lens.Micro.Mtl 13 | 14 | type FormField t = (Text, t -> Text -> t) 15 | type FormFields t = [FormField t] 16 | type FieldName = Text 17 | 18 | data ActionResult = Reset | DoNothing | Goto String 19 | 20 | data DD m t = DD 21 | { _freshSupply :: [FieldName] 22 | , _value :: t 23 | , _rawParams :: [(TL.Text, TL.Text)] 24 | , _formFields :: FormFields t 25 | , _actions :: [(Text, t -> m ActionResult)] 26 | } 27 | 28 | makeLenses ''DD 29 | 30 | type SHtml m t = HtmlT (StateT (DD m t) m) 31 | 32 | data RDashdo m = forall t. RDashdo 33 | { rdFid :: String 34 | , rdTitle :: Text 35 | , rdDashdo :: Dashdo m t } 36 | 37 | data Dashdo m t = Dashdo 38 | { initial :: t 39 | , render :: SHtml m t () } 40 | 41 | runSHtml :: Monad m 42 | => t 43 | -> SHtml m t () 44 | -> [(TL.Text, TL.Text)] 45 | -> m (TL.Text, FormFields t, [(Text, t -> m ActionResult)]) 46 | runSHtml val shtml pars = do 47 | let stT = renderTextT shtml 48 | iniFldNams = map (("f"<>) . pack . show) [(0::Int)..] 49 | (t, (DD _ _ _ ffs acts)) <- runStateT stT (DD iniFldNams val pars [] []) 50 | return (t, ffs, acts) 51 | 52 | fresh :: Monad m => SHtml m a FieldName 53 | fresh = do 54 | head <$> (freshSupply <<%= tail) 55 | 56 | freshAndValue :: Monad m => SHtml m a (a, FieldName) 57 | freshAndValue = (,) <$> getValue <*> fresh 58 | 59 | named :: Monad m => FieldName -> SHtml m t a -> SHtml m t a 60 | named nm mx = do 61 | freshSupply %= (nm:) 62 | mx 63 | 64 | getValue :: Monad m => SHtml m a a 65 | getValue = use value 66 | 67 | putFormField :: Monad m => FormField t -> SHtml m t () 68 | putFormField ff = do 69 | formFields %= (ff:) 70 | 71 | putAction :: Monad m => (Text, t -> m ActionResult) -> SHtml m t () 72 | putAction ff = do 73 | actions %= (ff:) 74 | 75 | lensSetter :: ASetter' s a -> (s -> a -> s) 76 | lensSetter l x y = x & l .~ y 77 | 78 | lensPusher :: ASetter' s [a] -> (s -> a -> s) 79 | lensPusher l x y = x & l %~ ((:) y) -------------------------------------------------------------------------------- /dashdo/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-9.21 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | - '../../open/plotlyhs/' 41 | - '../../open/lucid-extras/' 42 | #- location: 43 | # git: git@github.com:glutamate/plotlyhs 44 | # commit: 1739b231193107297c38021dd8bf1a7c3cb2e3f8 45 | 46 | 47 | # Dependency packages to be pulled from upstream that are not in the resolver 48 | # (e.g., acme-missiles-0.3) 49 | extra-deps: [] 50 | 51 | # Override default flag values for local packages and extra-deps 52 | flags: {} 53 | 54 | # Extra package databases containing global packages 55 | extra-package-dbs: [] 56 | 57 | # Control whether we use the GHC we find on the path 58 | # system-ghc: true 59 | # 60 | # Require a specific version of stack, using version ranges 61 | # require-stack-version: -any # Default 62 | # require-stack-version: ">=1.1" 63 | # 64 | # Override the architecture used by stack, especially useful on Windows 65 | # arch: i386 66 | # arch: x86_64 67 | # 68 | # Extra directories used by stack for building 69 | # extra-include-dirs: [/path/to/dir] 70 | # extra-lib-dirs: [/path/to/dir] 71 | # 72 | # Allow a newer minor version of GHC than the snapshot specifies 73 | # compiler-check: newer-minor -------------------------------------------------------------------------------- /docker-minidebhs/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM bitnami/minideb:jessie 2 | 3 | RUN apt-get update \ 4 | && apt-get -y install wget build-essential libffi-dev libgmp-dev xz-utils zlib1g-dev libpq-dev \ 5 | git gnupg libgsl0-dev liblapack-dev libatlas-base-dev pkg-config \ 6 | && wget -qO- https://get.haskellstack.org/ | sh && rm -rf /var/lib/apt/lists/* 7 | 8 | RUN cd /tmp && echo "resolver: lts-8.5" > stack.yaml && echo "packages: []" >> stack.yaml \ 9 | && cat stack.yaml \ 10 | && stack setup \ 11 | && stack install lens scotty hmatrix uuid random-fu haskell-src-exts 12 | -------------------------------------------------------------------------------- /docker-minidebhs/README.md: -------------------------------------------------------------------------------- 1 | To build & push 2 | 3 | ``` 4 | docker build -t filopodia/minidebhs:lts-8.5 . 5 | docker push filopodia/minidebhs:lts-8.5 6 | ``` -------------------------------------------------------------------------------- /echarts-hs/.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | dist-* 3 | cabal-dev 4 | *.o 5 | *.hi 6 | *.chi 7 | *.chs.h 8 | *.dyn_o 9 | *.dyn_hi 10 | .hpc 11 | .hsenv 12 | .cabal-sandbox/ 13 | cabal.sandbox.config 14 | *.prof 15 | *.aux 16 | *.hp 17 | *.eventlog 18 | .stack-work/ 19 | cabal.project.local 20 | .HTF/ 21 | echartstest.html 22 | -------------------------------------------------------------------------------- /echarts-hs/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 DiffusionKinetics 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /echarts-hs/README.md: -------------------------------------------------------------------------------- 1 | # echarts-hs 2 | -------------------------------------------------------------------------------- /echarts-hs/changelog.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/echarts-hs/changelog.md -------------------------------------------------------------------------------- /echarts-hs/echarts.cabal: -------------------------------------------------------------------------------- 1 | Name: echarts 2 | Version: 0.2 3 | Synopsis: Haskell bindings to Echarts.js 4 | Description: 5 | Generate web-based plots with the Echarts.js library. 6 | 7 | 8 | License: MIT 9 | License-file: LICENSE 10 | Author: Tom Nielsen, Jason Chan 11 | Maintainer: tanielsen@gmail.com 12 | build-type: Simple 13 | Cabal-Version: >= 1.10 14 | homepage: https://github.com/diffusionkinetics/open/echarts 15 | bug-reports: https://github.com/diffusionkinetics/open/echarts/issues 16 | category: Graphics, Charts 17 | Tested-With: GHC == 7.8.4, GHC == 7.10.2, GHC == 7.10.3, GHC == 8.0.1 18 | 19 | 20 | extra-source-files: 21 | changelog.md 22 | 23 | 24 | Library 25 | ghc-options: -Wall -fno-warn-type-defaults 26 | hs-source-dirs: src 27 | default-language: Haskell2010 28 | 29 | 30 | Exposed-modules: 31 | Graphics.Echarts 32 | 33 | Build-depends: 34 | base >= 4.6 && <5 35 | , aeson 36 | , lucid 37 | , lucid-extras 38 | , blaze-html 39 | , blaze-markup 40 | , text 41 | , bytestring 42 | , microlens-th 43 | , microlens 44 | 45 | Test-suite test-echarts 46 | type: exitcode-stdio-1.0 47 | default-language: Haskell2010 48 | 49 | main-is: TestEcharts.hs 50 | build-depends: base >=4.6 && <5 51 | , lucid 52 | , lucid-extras 53 | , echarts 54 | , text 55 | , microlens 56 | , datasets 57 | , bytestring 58 | , aeson 59 | -------------------------------------------------------------------------------- /echarts-hs/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-8.5 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | - ../lucid-extras 41 | - ../datasets 42 | 43 | # Dependency packages to be pulled from upstream that are not in the resolver 44 | # (e.g., acme-missiles-0.3) 45 | extra-deps: [] 46 | 47 | # Override default flag values for local packages and extra-deps 48 | flags: {} 49 | 50 | # Extra package databases containing global packages 51 | extra-package-dbs: [] 52 | 53 | # Control whether we use the GHC we find on the path 54 | # system-ghc: true 55 | # 56 | # Require a specific version of stack, using version ranges 57 | # require-stack-version: -any # Default 58 | # require-stack-version: ">=1.1" 59 | # 60 | # Override the architecture used by stack, especially useful on Windows 61 | # arch: i386 62 | # arch: x86_64 63 | # 64 | # Extra directories used by stack for building 65 | # extra-include-dirs: [/path/to/dir] 66 | # extra-lib-dirs: [/path/to/dir] 67 | # 68 | # Allow a newer minor version of GHC than the snapshot specifies 69 | # compiler-check: newer-minor 70 | -------------------------------------------------------------------------------- /fuml-svm/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License (MIT) 2 | 3 | Copyright (c) 2017 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /fuml-svm/TestSVM.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Numeric.Datasets.Iris 4 | import Numeric.Datasets.OldFaithful 5 | import Fuml.Svm 6 | import Fuml.Core 7 | import Fuml.Supervised.Accuracy 8 | 9 | import Data.Function (on) 10 | import Data.Tuple 11 | import AI.SVM.Simple 12 | import Control.Arrow 13 | import Data.Random 14 | import qualified Data.Vector.Storable as V 15 | 16 | toVector = swap . fmap V.fromList 17 | 18 | toList :: Iris -> (Int, [Double]) 19 | toList (Iris a b c d cl) = (toNum cl, [a,b,c,d]) where 20 | toNum Virginica = 0 21 | toNum Setosa = -1 22 | toNum Versicolor = 1 23 | 24 | oflist :: OldFaithful -> (V.Vector Double, Double) 25 | oflist (OF a b c) = (V.fromList [fromIntegral a, b], c) 26 | 27 | directSvmSimple :: IO () 28 | directSvmSimple = do 29 | (test, train) <- fmap (splitAt 20) . sample . shuffle . fmap toList $ iris 30 | 31 | let ((Result c gamma acc), cl) = chehLin train 32 | 33 | count :: Eq a => [(a,a)] -> Double 34 | count xs = on (/) (realToFrac . length) (filter (uncurry (==)) xs) xs 35 | 36 | print "test accuracy" 37 | print . count $ fmap (classify cl) <$> test 38 | print "training accuracy" 39 | print acc 40 | 41 | testClassifier :: IO () 42 | testClassifier = do 43 | (test, train) <- fmap (splitAt 50) . sample . shuffle . fmap (toVector . toList) $ iris 44 | 45 | 46 | let oneclass = runSupervisor' (oneClass 0.01 Linear) $ fmap (const ()) <$> filter ((== 1) . snd) train 47 | let simple = runSupervisor' rbfSimple train 48 | let basic = runSupervisor' (svc (C 8) (Polynomial 3 3 3)) train 49 | 50 | let go a | a == 1 = In | otherwise = Out 51 | print "oneclass" 52 | print $ accuracy (fmap go <$> test) oneclass 53 | 54 | print "simple accuracy" 55 | print $ accuracy test simple 56 | 57 | print "basic accuracy" 58 | print $ accuracy test basic 59 | 60 | testRegression :: IO () 61 | testRegression = do 62 | (test, train) <- fmap (splitAt 20) . sample . shuffle . fmap oflist $ oldFaithful 63 | 64 | let regr = runSupervisor' (svr (Epsilon 0.1 0.1) (Sigmoid 3e3 3e3)) train 65 | 66 | print $ rmse test regr 67 | 68 | {-main = testRegression-} 69 | {-main = testClassifier-} 70 | main = do 71 | testRegression 72 | testClassifier 73 | -------------------------------------------------------------------------------- /fuml-svm/fuml-svm.cabal: -------------------------------------------------------------------------------- 1 | name: fuml-svm 2 | version: 0.1.0.0 3 | synopsis: Support vector machines using fuml 4 | description: Support vector machines using fuml 5 | homepage: https://github.com/diffusionkinetics/open/tree/master/fuml-svm 6 | license: MIT 7 | license-file: LICENSE 8 | author: Tom Nielsen 9 | maintainer: tomn@diffusionkinetics.com 10 | copyright: Tom Nielsen 11 | category: Statistics 12 | build-type: Simple 13 | -- extra-source-files: 14 | cabal-version: >=1.10 15 | 16 | source-repository head 17 | type: git 18 | location: https://github.com/diffusionkinetics/open 19 | 20 | library 21 | hs-source-dirs: lib 22 | ghc-options: -fwarn-unused-imports -fno-warn-unused-do-bind -fno-warn-type-defaults 23 | default-language: Haskell2010 24 | exposed-modules: Fuml.Svm 25 | build-depends: base >= 4.6 && < 5 26 | , svm-simple 27 | , fuml 28 | , vector 29 | , hmatrix 30 | , random-fu 31 | , data-default 32 | , kmeans-vector 33 | , microlens 34 | , mtl 35 | , random-fu 36 | , deepseq 37 | 38 | Test-Suite test-svm 39 | type: exitcode-stdio-1.0 40 | main-is: TestSVM.hs 41 | default-language: Haskell2010 42 | build-depends: base >=4.6 && <5 43 | , fuml 44 | , datasets 45 | , vector 46 | , mtl 47 | , random-fu 48 | , svm-simple 49 | , fuml-svm 50 | , microlens 51 | -------------------------------------------------------------------------------- /fuml-svm/lib/Fuml/Svm.hs: -------------------------------------------------------------------------------- 1 | module Fuml.Svm 2 | ( ClassifierType(..) 3 | , Kernel(..) 4 | , OneClassResult(..) 5 | , RegressorType(..) 6 | , rbfSimple 7 | , svc 8 | , svcWtd 9 | , svr 10 | , oneClass 11 | ) where 12 | 13 | import AI.SVM.Simple 14 | import Data.List (maximumBy) 15 | import Data.Function (on) 16 | import Control.Monad.Identity (Identity) 17 | import Control.DeepSeq (NFData) 18 | import Fuml.Core 19 | import qualified Data.Vector.Storable as VS 20 | 21 | rbfSimple :: (Ord o, NFData o) => Supervisor Identity o (Double, SVMClassifier o) o 22 | rbfSimple = Supervisor . const $ predWith chehLin classify 23 | 24 | svc :: Ord o => 25 | ClassifierType 26 | -> Kernel 27 | -> Supervisor Identity o (Double, SVMClassifier o) o 28 | svc cltype kernel = Supervisor . const $ predWith (trainClassifier cltype kernel) classify 29 | 30 | svcWtd :: Ord o => 31 | ClassifierType 32 | -> Kernel 33 | -> Supervisor Identity (Weighted o) (Double, SVMClassifier o) o 34 | svcWtd cltype kernel = Supervisor $ \_ theData -> 35 | let f = factor theData 36 | 37 | separate :: (Weighted o, VS.Vector Double) -> ((o, Double), (o, VS.Vector Double)) 38 | separate (Weighted w o, v) = ((o, w), (o, v)) 39 | 40 | wtdCl = snd $ trainWtdClassifier cltype kernel weights trainingSet 41 | (weights, trainingSet) = unzip . map separate . vScale f $ theData 42 | 43 | in return $ Predict (f, wtdCl) (classify wtdCl . VS.map (*f)) 44 | 45 | predWith train interpret dataset = return $ Predict (f, svm) (interpret svm . VS.map (*f)) 46 | where f = factor dataset 47 | scaled = vScale f dataset 48 | svm = snd . train $ scaled 49 | 50 | factor :: [(VS.Vector Double, o)] -> Double 51 | factor = recip . VS.maximum . fst . maximumBy (compare `on` VS.maximum . fst) 52 | 53 | vScale = map . scaleSwap where 54 | scaleSwap :: Double -> (VS.Vector Double, o) -> (o, VS.Vector Double) 55 | scaleSwap factor = (\(v,o) -> (o, VS.map (* factor) v)) 56 | 57 | oneClass :: 58 | Double -- 'nu' parameter 59 | -> Kernel 60 | -> Supervisor Identity () (Double, SVMOneClass) OneClassResult 61 | oneClass nu kernel = Supervisor . const $ 62 | predWith (trainOneClass nu kernel . map snd) inSet 63 | 64 | svr :: 65 | RegressorType 66 | -> Kernel 67 | -> Supervisor Identity Double (Double, SVMRegressor) Double 68 | svr rtype kernel = Supervisor . const $ 69 | predWith (trainRegressor rtype kernel) predictRegression 70 | -------------------------------------------------------------------------------- /fuml-svm/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by stack init 2 | # For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration/ 3 | 4 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 5 | resolver: lts-8.5 6 | 7 | # Local packages, usually specified by relative directory name 8 | packages: 9 | - '.' 10 | - ../datasets 11 | - ../fuml 12 | - location: 13 | git: https://github.com/glutamate/random-fu.git 14 | commit: 4274fc99b22610951853c03cf4bf23bab906a8c7 15 | extra-dep: true 16 | subdirs: 17 | - random-fu 18 | - location: 19 | git: https://github.com/aleator/Simple-SVM 20 | commit: 591b86e645e04ac8e7b73246e9a6b573840fb251 21 | extra-dep: true 22 | 23 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 24 | extra-deps: 25 | - kmeans-vector-0.3.2 26 | - probable-0.1.2 27 | - bindings-svm-0.2.1 28 | 29 | # Override default flag values for local packages and extra-deps 30 | flags: {} 31 | 32 | # Extra package databases containing global packages 33 | extra-package-dbs: [] 34 | 35 | # Control whether we use the GHC we find on the path 36 | # system-ghc: true 37 | 38 | # Require a specific version of stack, using version ranges 39 | # require-stack-version: -any # Default 40 | # require-stack-version: >= 1.0.0 41 | 42 | # Override the architecture used by stack, especially useful on Windows 43 | # arch: i386 44 | # arch: x86_64 45 | 46 | # Extra directories used by stack for building 47 | # extra-include-dirs: [/path/to/dir] 48 | # extra-lib-dirs: [/path/to/dir] 49 | 50 | # Allow a newer minor version of GHC than the snapshot specifies 51 | # compiler-check: newer-minor 52 | -------------------------------------------------------------------------------- /fuml/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License (MIT) 2 | 3 | Copyright (c) 2017 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /fuml/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /fuml/TestFuml.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Fuml.Supervised 4 | import Fuml.Unsupervised 5 | import Fuml.Core 6 | import Fuml.Base 7 | import Fuml.Base.Logistic 8 | import Fuml.Optimisation 9 | import Fuml.Optimisation.SGD 10 | import Numeric.Datasets.Iris 11 | import qualified Data.Vector.Storable as VS 12 | import Control.Monad 13 | import Control.Monad.Identity 14 | import Data.Random 15 | 16 | irisData = map f iris where 17 | f i = ( irisVec i 18 | , irisClass i) 19 | 20 | irisDataBin = map f iris where 21 | f i = ( irisVec i 22 | , irisClass i == Setosa) 23 | 24 | irisVec i = VS.fromList [sepalLength i, sepalWidth i, petalLength i, petalWidth i] 25 | 26 | main = do 27 | let l = runIdentity $ runSupervisor (oneVsRest (logisticBfgs 0.001)) Nothing irisData 28 | print $ model l 29 | let i1 = head iris 30 | i2 = last iris 31 | print (i1, softMax $ predict l (irisVec i1)) 32 | print (i2, softMax $ predict l (irisVec i2)) 33 | 34 | --let clusters = cluster iris irisVec (kmeans 3) 35 | --forM_ clusters $ \is -> print $ map irisClass is 36 | --testGradients 37 | testSGD 38 | putStrLn "hello world" 39 | 40 | sgdOpts = SGDOpts 0.01 5 200 41 | 42 | testSGD = do 43 | let Predict (betaBfgs, _) _ = runIdentity $ runSupervisor (logisticBfgs 0.01) Nothing irisDataBin 44 | print betaBfgs 45 | Predict (SGDModel betaSgd losses) _ 46 | <- sample $ runSupervisor (sgdSupervisor sgdOpts $ logisticCost 0.01) Nothing irisDataBin 47 | print losses 48 | print betaSgd 49 | 50 | testGradients :: IO () 51 | testGradients = do 52 | let Predict (beta, _) _ = runIdentity $ runSupervisor (logisticBfgs 0.01) Nothing irisDataBin 53 | --beta = VS.fromList [2.6636743970209067,7.813238177371347,-11.428852736947137,-5.1733429446846255] 54 | (ptv, outc) = head irisDataBin 55 | (ptv1, outc1) = last irisDataBin 56 | (ptvnan, outcnan) = irisDataBin!!14 57 | beta0 = VS.map (const (0::Double)) beta 58 | beta12 = VS.map (/(2::Double)) beta 59 | delta = 0.5 60 | f = logLikelihood delta irisDataBin 61 | f1 v = logLikelihood1 v ptv outc 62 | symgradf1 v = gradLogLikelihood1 v ptv outc 63 | symgradf v = gradLogLikelihood delta irisDataBin v 64 | --nanv = VS.fromList [3.146947641220465,9.257333905281397,-13.535856317260674,-6.124467708046397] 65 | -- print2 "ll betanan" $ logLikelihood1 nanv ptvnan outcnan 66 | -- print2 "ll betanan" $ logit $ VS.sum $ VS.zipWith (*) nanv ptvnan 67 | -- print2 "nan data pt" $ irisDataBin!!14 68 | print2 "ll beta12" $ f beta12 69 | print2 "beta" $ beta 70 | -- print2 "ll betahat" $ f beta 71 | print2 "fdgrad beta12" $ fdgrad f beta12 72 | print2 "symgradf beta12" $ symgradf beta12 73 | --print2 "f1 betahat" $ f1 beta 74 | --print2 "f1 beta0" $ f1 beta0 75 | --print2 "fdgradll betahat" $ fdgrad f beta 76 | -- print2 "fdgradf1 beta0" $ fdgrad f1 beta0 77 | -- print2 "symgradf1 beta0" $ symgradf1 beta0 78 | return () 79 | 80 | print2 s x = putStrLn $ s ++ ": "++show x 81 | -------------------------------------------------------------------------------- /fuml/fuml.cabal: -------------------------------------------------------------------------------- 1 | name: fuml 2 | version: 0.1.0.0 3 | synopsis: Functional Machine Learning 4 | description: Functional Machine Learning using combinators for classification, 5 | regression, clustering and dimensionality reduction. 6 | homepage: https://github.com/diffusionkinetics/open/tree/master/fuml 7 | license: MIT 8 | license-file: LICENSE 9 | author: Tom Nielsen 10 | maintainer: tomn@diffusionkinetics.com 11 | copyright: Tom Nielsen 12 | category: Statistics 13 | build-type: Simple 14 | -- extra-source-files: 15 | cabal-version: >=1.10 16 | 17 | source-repository head 18 | type: git 19 | location: https://github.com/diffusionkinetics/open 20 | 21 | library 22 | hs-source-dirs: lib 23 | ghc-options: -fwarn-unused-imports -fno-warn-unused-do-bind -fno-warn-type-defaults 24 | default-language: Haskell2010 25 | exposed-modules: 26 | Fuml.Base 27 | , Fuml.Base.LinearRegression 28 | , Fuml.Base.Logistic 29 | , Fuml.Base.PCA 30 | , Fuml.Base.KNN 31 | , Fuml.Core 32 | , Fuml.Supervised 33 | , Fuml.Supervised.Accuracy 34 | , Fuml.Supervised.KNN 35 | , Fuml.Unsupervised 36 | , Fuml.Optimisation 37 | , Fuml.Optimisation.NelderMead 38 | , Fuml.Optimisation.BFGS 39 | , Fuml.Optimisation.SGD 40 | build-depends: base >= 4.6 && < 5 41 | , vector 42 | , hmatrix 43 | , random-fu 44 | , data-default 45 | , microlens 46 | , mtl 47 | , random-fu 48 | , parfoldl 49 | 50 | Test-Suite test-fuml 51 | type: exitcode-stdio-1.0 52 | main-is: TestFuml.hs 53 | default-language: Haskell2010 54 | build-depends: base >=4.6 && <5 55 | , fuml 56 | , datasets 57 | , vector 58 | , mtl 59 | , random-fu 60 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Base.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts #-} 2 | 3 | 4 | module Fuml.Base where 5 | 6 | import qualified Data.Vector.Generic as V 7 | 8 | fdgrad :: V.Vector v Double => (v Double -> Double) -> v Double -> v Double 9 | fdgrad f xv = V.imap g xv where 10 | g ix x = let h = if abs x > 1e-7 11 | then abs (x) * 2e-5 12 | else 1e-10 13 | plus = xv V.// [(ix, x+h)] 14 | minus = xv V.// [(ix, x-h)] 15 | in (f plus - f minus)/(2*h) 16 | 17 | softMax :: [(a, Double)] -> [(a, Double)] 18 | softMax axs = let d = sum $ map (exp . snd) axs 19 | f (a,x) = (a, exp x / d) 20 | in map f axs 21 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Base/KNN.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ScopedTypeVariables, TypeFamilies #-} 2 | 3 | module Fuml.Base.KNN where 4 | 5 | import qualified Data.Vector.Storable as VS 6 | import Numeric.LinearAlgebra 7 | import Data.List (nub, sortBy) 8 | import Data.Ord (comparing) 9 | 10 | euclideanDistance :: Vector Double -> Vector Double -> Double 11 | euclideanDistance v1 v2 = sqrt $ VS.sum $ VS.map (^2) $ VS.zipWith (-) v1 v2 12 | 13 | weightedBoolVote :: [(Double, Bool)] -> Double 14 | weightedBoolVote distBools = 15 | let wtrue = sum $ map (recip . fst) $ filter snd distBools 16 | wfalse = sum $ map (recip . fst) $ filter (not . snd) distBools 17 | in exp wtrue / (exp wtrue + exp wfalse) 18 | 19 | majorityVote :: Eq a => [(Double, a)] -> a 20 | majorityVote distXs = let classes = nub $ map snd distXs 21 | occurences c = (c,negate $ length $ filter ((==c) . snd) distXs) 22 | in fst $ head $ sortBy (comparing snd) $ map occurences classes 23 | 24 | weightedMajorityVote :: Eq a => [(Double, a)] -> a 25 | weightedMajorityVote distXs 26 | = let classes = nub $ map snd distXs 27 | weight c = (c,negate $ sum $ map (recip . fst) $ filter ((==c) . snd) distXs) 28 | in fst $ head $ sortBy (comparing snd) $ map weight classes 29 | 30 | avgVote :: [(Double, Double)] -> Double 31 | avgVote distXs = let n = realToFrac $ length distXs 32 | in (sum $ map snd distXs) / n 33 | 34 | weightedAvgVote :: [(Double, Double)] -> Double 35 | weightedAvgVote distXs 36 | = let wsum = sum $ map (recip . fst) distXs 37 | in (sum $ map (uncurry (*)) distXs) / wsum 38 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Base/LinearRegression.hs: -------------------------------------------------------------------------------- 1 | module Fuml.Base.LinearRegression where 2 | 3 | import Numeric.LinearAlgebra 4 | 5 | -- | Ordinary least squares 6 | ols :: [(Vector Double, Double)] -> Vector Double 7 | ols xys = 8 | let x = fromRows $ map fst xys 9 | y = col $ map snd xys 10 | betaMat = inv (tr' x <> x) <> tr' x <> y 11 | in tr' betaMat ! 0 12 | 13 | -- | Weighted ordinary least squares 14 | wols :: Vector Double -> [(Vector Double, Double)] -> Vector Double 15 | wols wvs xys = 16 | let w = diag wvs 17 | x = fromRows $ map fst xys 18 | y = col $ map snd xys 19 | betaMat = inv (tr' x <> w <> x) <> tr' x <> w <> y 20 | in tr' betaMat ! 0 21 | 22 | -- | Ridge regression 23 | ridge :: Matrix Double -> [(Vector Double, Double)] -> Vector Double 24 | ridge gamma xys = 25 | let x = fromRows $ map fst xys 26 | y = col $ map snd xys 27 | betaMat = inv (tr' x <> x + tr' gamma <> gamma) <> tr' x <> y 28 | in tr' betaMat ! 0 29 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Base/Logistic.hs: -------------------------------------------------------------------------------- 1 | module Fuml.Base.Logistic where 2 | 3 | import Numeric.LinearAlgebra 4 | import qualified Data.Vector.Storable as VS 5 | import Fuml.Optimisation.BFGS 6 | import Data.List (foldl1') 7 | 8 | -- maybe follow this: https://idontgetoutmuch.wordpress.com/2013/04/30/logistic-regression-and-automated-differentiation-3/ 9 | 10 | logit :: Floating a => a -> a 11 | logit x = 1 / (1 + exp (negate x)) 12 | 13 | logLikelihood1 :: Vector Double -> Vector Double -> Bool -> Double 14 | logLikelihood1 theta x y = ind y * log (logit z) + 15 | (1 - ind y) * log (1 - logit z) 16 | where 17 | z = VS.sum $ VS.zipWith (*) theta x 18 | 19 | -- negative log likelihood 20 | logLikelihood :: Double -> [(Vector Double, Bool)] -> Vector Double -> Double 21 | logLikelihood delta theData theta = negate $ (a - delta*b)/l where 22 | l = fromIntegral $ length theData 23 | a = sum $ map (uncurry $ logLikelihood1 theta) theData 24 | b = (/2) $ VS.sum $ VS.map (^2) theta 25 | 26 | 27 | --https://www.cs.ox.ac.uk/people/nando.defreitas/machinelearning/lecture6.pdf 28 | gradLogLikelihood1 :: Vector Double -> Vector Double -> Bool -> Vector Double 29 | gradLogLikelihood1 theta x y = VS.map (*(ind y - lz)) x 30 | -- (1 - ind y) * log (1 - logit z) 31 | where 32 | z = VS.sum $ VS.zipWith (*) theta x 33 | lz = logit z 34 | 35 | gradLogLikelihood :: Double -> [(Vector Double, Bool)] -> Vector Double -> Vector Double 36 | gradLogLikelihood delta theData theta = 37 | let vs = map (uncurry $ gradLogLikelihood1 theta) theData 38 | vsum = foldl1' vadd vs 39 | l = fromIntegral $ length theData 40 | in VS.map (negate . (/l)) vsum `vadd` VS.map ((/l) . (*delta)) theta 41 | 42 | 43 | logisticRegression :: [(Vector Double, Bool)] -> Vector Double 44 | logisticRegression theData = 45 | let start = VS.map (const 0) $ fst $ head theData 46 | inisbox = VS.map (const (0.1:: Double)) $ fst $ head theData 47 | hessInit = ident $ size start 48 | in --fst $ minimizeV NMSimplex 1e-4 200 inisbox (logLikelihood theData) start 49 | 50 | --fst $ minimizeVD ConjugateFR 1e-10 500 0.01 0.01 (logLikelihood theData) (gradLogLikelihood theData) start -- inisbox (logLikelihood theData) start 51 | 52 | case bfgsWith myBOpts (logLikelihood 0 theData) (gradLogLikelihood 0 theData) start hessInit of 53 | Left s -> error s 54 | Right (p, h) -> p 55 | --fst $ minimizeV NMSimplex 1e-3 100 inisbox (logLikelihood theData) start 56 | 57 | myBOpts = BFGSOpts 1e-7 2e-5 200 58 | 59 | ind :: Bool -> Double 60 | ind True = 1 61 | ind False = 0 62 | 63 | vadd :: Vector Double -> Vector Double -> Vector Double 64 | vadd = VS.zipWith (+) 65 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Base/PCA.hs: -------------------------------------------------------------------------------- 1 | module Fuml.Base.PCA where 2 | 3 | import Numeric.LinearAlgebra 4 | import Data.List (foldl') 5 | 6 | sumWith :: Num a => (Vector Double -> a) -> [Vector Double] -> a 7 | sumWith f (v:vs) = foldl' (\acc v -> acc + f v) (f v) vs 8 | 9 | meanVs :: [Vector Double] -> Vector Double 10 | meanVs vecs = scale (recip $ realToFrac $ length vecs) $ sumWith id vecs 11 | 12 | covVs :: Vector Double -> [Vector Double] -> Herm Double 13 | covVs xmn xs 14 | = let k = length xs 15 | f xi = outerSame $ xi - xmn 16 | in sym $ scale (recip $ realToFrac k - 1 ) $ sumWith f xs 17 | 18 | outerSame v = outer v v 19 | 20 | --following https://github.com/albertoruiz/hmatrix/blob/master/examples/pca2.hs 21 | type Stat = (Vector Double, [Double], Matrix Double) 22 | 23 | statVs :: [Vector Double] -> Stat 24 | statVs vs = (m, toList s, tr' v) where 25 | m = meanVs vs 26 | (s,v) = eigSH (covVs m vs) 27 | 28 | pcaN :: Int -> Stat -> (Vector Double -> Vector Double , Vector Double -> Vector Double ) 29 | pcaN n (m,s,v) = (encode,decode) 30 | where 31 | encode x = vp #> (x - m) 32 | decode x = x <# vp + m 33 | vp = takeRows n v 34 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Core.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ScopedTypeVariables, TypeFamilies, TupleSections #-} 2 | 3 | module Fuml.Core where 4 | 5 | import qualified Data.Vector.Storable as VS 6 | import Numeric.LinearAlgebra 7 | import Data.List (nub) 8 | import Lens.Micro 9 | import Control.Monad.Identity 10 | 11 | data Weighted o = Weighted Double o 12 | 13 | -- |The result of running a predictive model 14 | data Predict p a = Predict 15 | { model :: p -- ^ the internal state of the model 16 | , predict :: Vector Double -> a -- ^ the prediction function 17 | } 18 | 19 | instance Functor (Predict p) where 20 | fmap f (Predict m predf) = Predict m (f . predf) 21 | 22 | newtype Supervisor m o p a 23 | = Supervisor { runSupervisor :: Maybe p 24 | -> [(Vector Double, o)] 25 | -> m (Predict p a) } 26 | 27 | instance Functor m => Functor (Supervisor m o p) where 28 | fmap f (Supervisor sf) = Supervisor $ \mp d -> fmap (fmap f) $ sf mp d 29 | 30 | -- | Helper function for running simple Supervisors in the Identity monad 31 | runSupervisor' :: Supervisor Identity o p a -- ^ the 'Supervisor' value 32 | -> [(Vector Double, o)] -- ^ the dataset 33 | -> Predict p a -- ^ the 'Predict' value 34 | runSupervisor' (Supervisor sf) = runIdentity . sf Nothing 35 | 36 | oneVsRest :: (Monad m, Eq a, Functor m) => Supervisor m Bool p b -> Supervisor m a [(a,p)] [(a,b)] 37 | oneVsRest subsuper = Supervisor $ \_ theData -> do 38 | let classes = nub $ map snd theData 39 | boolize c (v, c1) = (v, c == c1) 40 | train c = fmap (c,) $ runSupervisor subsuper Nothing $ map (boolize c) theData 41 | models <- mapM train classes 42 | return $ Predict (map (over _2 model) models) $ \v -> map (\(c,pr) -> (c, predict pr v)) models 43 | 44 | 45 | (~~) :: (a -> o) -> [a -> Double] -> [a] -> [(Vector Double, o)] 46 | fy ~~ fxs = let f w = (VS.fromList (map ($w) fxs) , fy w) 47 | in map f 48 | 49 | --prepare :: (a -> o) -> [a -> Double] -> [a] -> [(Vector Double, o)] 50 | --prepare fo fxs d = fo ~~ fxs $ d 51 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Optimisation.hs: -------------------------------------------------------------------------------- 1 | module Fuml.Optimisation where 2 | 3 | import Fuml.Core 4 | import Fuml.Optimisation.BFGS 5 | import Fuml.Optimisation.SGD 6 | import Numeric.LinearAlgebra 7 | import qualified Data.Vector.Storable as VS 8 | import Fuml.Base.Logistic 9 | import Data.Maybe (fromMaybe) 10 | import Control.Monad.Identity 11 | import Data.Random 12 | 13 | type BfgsModel = (Vector Double, Matrix Double) 14 | 15 | data CostGradientModel a b = CostGradientModel 16 | { costFunction :: ([(Vector Double, a)] -> Vector Double -> Double) 17 | , costGradient :: ([(Vector Double, a)] -> Vector Double -> Vector Double) 18 | , generatePrediction :: Vector Double -> Vector Double -> b 19 | } 20 | 21 | bfgsSupervisor :: BFGSOpts 22 | -> CostGradientModel a b 23 | -> Supervisor Identity a BfgsModel b 24 | bfgsSupervisor bopts (CostGradientModel ll gradll predf) = Supervisor $ \mp theData -> 25 | let start = fromMaybe (VS.map (const 0) $ fst $ head theData) $ fmap fst mp 26 | hessInit = fromMaybe (ident $ size start) $ fmap snd mp 27 | in case bfgsWith bopts (ll theData) (gradll theData) start hessInit of 28 | Left s -> error s 29 | Right (p, h) -> return $ Predict (p, h) (predf p) 30 | 31 | defBfgs = BFGSOpts 1e-7 1e-7 200 32 | 33 | data SGDModel = SGDModel 34 | { sgdFit :: Vector Double 35 | , lossCurve :: [Double] 36 | } 37 | 38 | sgdSupervisor :: SGDOpts 39 | -> CostGradientModel a b 40 | -> Supervisor RVar a SGDModel b 41 | sgdSupervisor opts (CostGradientModel ll gradll predf) = Supervisor $ \mp theData -> 42 | let start = fromMaybe (VS.map (const 0) $ fst $ head theData) $ fmap sgdFit mp 43 | mkPredict sgdModel = Predict sgdModel (predf (sgdFit sgdModel)) 44 | go 0 p losses = return $ SGDModel p $ reverse losses 45 | go n p losses = do 46 | nextp <- sgdEpoch opts gradll theData p 47 | let loss = ll theData p 48 | go (n-1) nextp (loss:losses) 49 | in fmap mkPredict $ go (epochs opts) start [] 50 | 51 | logisticCost :: Double -> CostGradientModel Bool Double 52 | logisticCost delta = CostGradientModel (logLikelihood delta) (gradLogLikelihood delta) (\beta v -> logit $ v `dot` beta) 53 | 54 | logisticBfgs :: Double -> Supervisor Identity Bool BfgsModel Double 55 | logisticBfgs delta = bfgsSupervisor defBfgs (logisticCost delta) 56 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Optimisation/SGD.hs: -------------------------------------------------------------------------------- 1 | module Fuml.Optimisation.SGD where 2 | 3 | import Numeric.LinearAlgebra 4 | import Data.Random 5 | 6 | data SGDOpts = SGDOpts 7 | { learning_rate :: Double 8 | , batch_size :: Int 9 | , epochs :: Int 10 | } 11 | 12 | sgdBatch :: SGDOpts -> (Vector Double -> Vector Double) -> Vector Double -> Vector Double 13 | sgdBatch opts grad p = p - scale (learning_rate opts) (grad p) 14 | 15 | sgdEpoch :: SGDOpts -> ([a] -> Vector Double -> Vector Double) -> [a] -> Vector Double -> RVar (Vector Double) 16 | sgdEpoch opts grad pts' p' = do 17 | pts <- shuffle pts' 18 | return $ go p' $ inGroupsOf (batch_size opts) pts 19 | where go p [] = p 20 | go p (batch:bs) = go (sgdBatch opts (grad batch) p) bs 21 | 22 | inGroupsOf :: Int -> [a] -> [[a]] 23 | inGroupsOf _ [] = [] 24 | inGroupsOf n xs = let (these,those) = splitAt n xs 25 | in these : inGroupsOf n those 26 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Supervised.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ScopedTypeVariables, TypeFamilies #-} 2 | 3 | module Fuml.Supervised where 4 | 5 | import qualified Fuml.Base.LinearRegression as LR 6 | import Fuml.Core 7 | import Numeric.LinearAlgebra 8 | import Control.Monad.Identity 9 | 10 | newtype Beta = Beta { unBeta :: Vector Double } 11 | 12 | ols :: Supervisor Identity Double Beta Double 13 | ols = Supervisor $ \_ theData -> 14 | let beta = LR.ols theData 15 | in return $ linPredict beta 16 | 17 | type Weight = Double 18 | 19 | olsWeighted :: Supervisor Identity (Double, Weight) Beta Double 20 | olsWeighted 21 | = Supervisor $ \_ theDataW -> 22 | let wvs = fromList $ map (\(_,(_,w)) -> w) theDataW 23 | theData = map (\(xs,(y,_)) -> (xs,y)) theDataW 24 | beta = LR.wols wvs theData 25 | in return $ linPredict beta 26 | 27 | ridge :: Double -> Supervisor Identity Double Beta Double 28 | ridge gammac = Supervisor $ \_ xys -> 29 | let npars = size $ fst $ head xys 30 | gamma = diag $ konst gammac npars 31 | beta = LR.ridge gamma xys 32 | in return $ linPredict beta 33 | 34 | linPredict :: Vector Double -> Predict Beta Double 35 | linPredict beta = Predict (Beta beta) (`dot` beta) 36 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Supervised/Accuracy.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ScopedTypeVariables, TypeFamilies, TupleSections #-} 2 | 3 | module Fuml.Supervised.Accuracy where 4 | 5 | import Fuml.Core 6 | import Numeric.LinearAlgebra 7 | import Data.List (partition, transpose) 8 | import Control.Parallel.Foldl hiding (sum) 9 | 10 | accuracy :: (Eq o) => [(Vector Double, o)] -> Predict p o -> Double 11 | accuracy theData model = 12 | let isCorrect (v,y) = predict model v == y 13 | corrects = map isCorrect theData 14 | in realToFrac (length $ filter id corrects) / realToFrac (length corrects) 15 | 16 | rmse :: [(Vector Double, Double)] -> Predict p Double -> Double 17 | rmse theData model = 18 | let getDev (v,y) = let d = y - predict model v in d*d 19 | devs = map getDev theData 20 | in sqrt $ sum devs / realToFrac (length devs) 21 | 22 | varExplained :: [(Vector Double, Double)] -> Predict p Double -> Double 23 | varExplained d p = 24 | let allVar = fold (premap snd variance) d 25 | predOutcomes = map (predict p . fst) d 26 | predVar = fold variance predOutcomes 27 | in predVar/allVar 28 | 29 | splitData :: Int -> Int -> [a] -> ([a], [a]) 30 | splitData nfolds fold xs 31 | = let xfs = zip xs (cycle $ [0..(nfolds-1)]) 32 | (zins, zouts) = partition ((==fold) . snd) xfs 33 | in (map fst zins,map fst zouts) 34 | 35 | crossValidate :: Monad m 36 | => Int 37 | -> [(Vector Double, o)] 38 | -> ( [(Vector Double, o)] -> Predict p o1 -> Double ) 39 | -> Supervisor m o p o1 40 | -> Maybe Int 41 | -> m Double 42 | crossValidate nfolds theData metric super (Just fold) = 43 | let (testD, trainD) = splitData nfolds fold theData 44 | fit = runSupervisor super Nothing trainD 45 | in fmap (metric testD) fit 46 | crossValidate nfolds theData metric super Nothing = 47 | let folds = [0..(nfolds-1)] 48 | accs = mapM (crossValidate nfolds theData metric super . Just) folds 49 | in fmap avg accs 50 | 51 | gridSearch :: Monad m 52 | => Int 53 | -> [(Vector Double, o)] 54 | -> ( [(Vector Double, o)] -> Predict p o1 -> Double ) 55 | -> (hyper -> Supervisor m o p o1) 56 | -> [hyper] 57 | -> Maybe Int 58 | -> m [(hyper,Double)] 59 | gridSearch nfolds theData metric hsuper hypers (Just fold) = 60 | let (testD, trainD) = splitData nfolds fold theData 61 | getAcc h = fmap ((h,) . metric testD) $ runSupervisor (hsuper h) Nothing $ trainD 62 | in mapM getAcc hypers 63 | gridSearch nfolds theData metric hsuper hypers Nothing = do 64 | let folds = [0..(nfolds-1)] 65 | accs <- mapM (gridSearch nfolds theData metric hsuper hypers . Just) folds 66 | let f accs = (fst $ head accs, avg $ map snd accs) 67 | return $ map f $ transpose accs 68 | 69 | avg xs = sum xs / realToFrac (length xs) 70 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Supervised/KNN.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ScopedTypeVariables, TypeFamilies #-} 2 | 3 | module Fuml.Supervised.KNN where 4 | 5 | import Numeric.LinearAlgebra 6 | import Data.List (sortBy) 7 | import Data.Ord (comparing) 8 | import Fuml.Core 9 | import Fuml.Base.KNN 10 | import Control.Monad.Identity 11 | 12 | 13 | data KNN a = KNN { 14 | knnPoints :: [(Vector Double, a)] 15 | } 16 | 17 | generalKNN :: Int -> (Vector Double -> Vector Double -> Double) -> ([(Double, a)] -> b) -> Supervisor Identity a (KNN a) b 18 | generalKNN k dist vote = Supervisor $ \_ theData -> 19 | return $ Predict (KNN theData) $ \v -> 20 | let ptDist (v1, y) = (dist v v1,y) 21 | nns = take k $ sortBy (comparing fst) $ map ptDist $ theData 22 | in vote nns 23 | 24 | binaryKNN :: Int -> Supervisor Identity Bool (KNN Bool) Double 25 | binaryKNN k = generalKNN k euclideanDistance weightedBoolVote 26 | 27 | catKNN :: Eq a => Int -> Supervisor Identity a (KNN a) a 28 | catKNN k = generalKNN k euclideanDistance weightedMajorityVote 29 | 30 | regressKNN :: Int -> Supervisor Identity Double (KNN Double) Double 31 | regressKNN k = generalKNN k euclideanDistance weightedAvgVote 32 | -------------------------------------------------------------------------------- /fuml/lib/Fuml/Unsupervised.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ScopedTypeVariables, TypeFamilies, TupleSections, FlexibleContexts #-} 2 | 3 | module Fuml.Unsupervised where 4 | 5 | import Fuml.Core 6 | import qualified Fuml.Base.PCA as PCA 7 | import Fuml.Base.KNN (euclideanDistance) 8 | import Numeric.LinearAlgebra 9 | import qualified Data.Vector.Storable as VS 10 | import qualified Data.Vector.Unboxed as VU 11 | import qualified Data.Vector as V 12 | import Data.List (foldl1', sortBy, nub) 13 | import Data.Ord (comparing) 14 | import Control.Monad.Identity 15 | 16 | 17 | type Unsupervisor m p a = Supervisor m () p a 18 | 19 | with :: b -> [a] -> [(a,b)] 20 | with y = map (,y) 21 | 22 | data PCA = PCA { pcaComponents :: Int, pcaStat :: PCA.Stat } 23 | 24 | pca :: Int -> Unsupervisor Identity PCA.Stat (Vector Double) 25 | pca ncomp = Supervisor $ \_ theData -> 26 | let stat@(m,_,v) = PCA.statVs $ map fst theData 27 | in return $ Predict stat $ \x -> takeRows ncomp v #> (x - m) 28 | 29 | findNearestCentroidIx :: [Vector Double] -> Vector Double -> Int 30 | findNearestCentroidIx ctrs x = 31 | let vdixs = zip [0..] $ map (\c -> euclideanDistance c x) ctrs 32 | in fst $ head $ sortBy (comparing snd) vdixs 33 | 34 | centroid :: [Vector Double] -> Vector Double 35 | centroid vs = 36 | let vadd = VS.zipWith (+) 37 | n = realToFrac $ length vs 38 | in VS.map (/n) $ foldl1' vadd vs 39 | 40 | {-kmeans :: Int -> Unsupervisor Identity [Vector Double] Int 41 | kmeans nclus 42 | = Supervisor $ \_ theData -> 43 | let clus = KM.kmeans (VU.convert) KM.euclidSq nclus (map fst theData) 44 | ctrs = map (centroid . KM.elements) $ V.toList clus 45 | pr v = findNearestCentroidIx ctrs v 46 | in return $ Predict ctrs pr 47 | 48 | cluster :: Eq b => [a] -> (a-> Vector Double) -> Unsupervisor Identity p b -> [[a]] 49 | cluster xs f unsup = 50 | let p = runIdentity $ runSupervisor unsup Nothing $ with () $ map (f) xs 51 | withClus x = (x,predict p $ f x) 52 | withCluss = map withClus xs 53 | cluss = nub $ map snd withCluss 54 | getElems clus = map fst $ filter ((==clus) . snd) withCluss 55 | in map getElems cluss -} 56 | -------------------------------------------------------------------------------- /fuml/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by stack init 2 | # For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration/ 3 | 4 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 5 | resolver: lts-8.5 6 | 7 | # Local packages, usually specified by relative directory name 8 | packages: 9 | - '.' 10 | - ../datasets 11 | - ../parfoldl 12 | - location: 13 | git: https://github.com/glutamate/random-fu.git 14 | commit: 4274fc99b22610951853c03cf4bf23bab906a8c7 15 | extra-dep: true 16 | subdirs: 17 | - random-fu 18 | 19 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 20 | extra-deps: [] 21 | 22 | # Override default flag values for local packages and extra-deps 23 | flags: {} 24 | 25 | # Extra package databases containing global packages 26 | extra-package-dbs: [] 27 | 28 | # Control whether we use the GHC we find on the path 29 | # system-ghc: true 30 | 31 | # Require a specific version of stack, using version ranges 32 | # require-stack-version: -any # Default 33 | # require-stack-version: >= 1.0.0 34 | 35 | # Override the architecture used by stack, especially useful on Windows 36 | # arch: i386 37 | # arch: x86_64 38 | 39 | # Extra directories used by stack for building 40 | # extra-include-dirs: [/path/to/dir] 41 | # extra-lib-dirs: [/path/to/dir] 42 | 43 | # Allow a newer minor version of GHC than the snapshot specifies 44 | # compiler-check: newer-minor 45 | -------------------------------------------------------------------------------- /inliterate/InlitPreProc.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Inliterate 4 | import Inliterate.Inspect 5 | import System.Environment 6 | import qualified Data.Text as T 7 | import qualified Data.Text.IO as T 8 | 9 | 10 | main = do 11 | _:inFile:outFile:_ <- getArgs 12 | d <- readDoc inFile 13 | T.writeFile outFile $ genHaskell d 14 | -------------------------------------------------------------------------------- /inliterate/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /inliterate/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /inliterate/TestInliterate.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Inliterate 4 | import Inliterate.Inspect 5 | import System.Environment 6 | import qualified Data.Text.IO as T 7 | 8 | main = do 9 | putStr "\n" 10 | dumpDoc "TestInliteratePreProc.hs" 11 | d <- readDoc "TestInliteratePreProc.hs" 12 | 13 | T.putStrLn $ genHaskell d 14 | -------------------------------------------------------------------------------- /inliterate/TestInliteratePreProc.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -F -pgmF inlitpp #-} 2 | 3 | # Test document 4 | 5 | Hello from my test document! 6 | 7 | ```haskell hide top 8 | import Data.Time 9 | ``` 10 | 11 | ```haskell do 12 | 13 | now <- getCurrentTime 14 | 15 | ``` 16 | 17 | ```haskell do 18 | 19 | let later :: UTCTime 20 | later = addUTCTime 100 now 21 | 22 | ``` 23 | 24 | ```haskell top 25 | 26 | f :: Int -> Int 27 | f x = x - 7 28 | 29 | ``` 30 | 31 | ```haskell eval 32 | 33 | f (2+2) 34 | 35 | ``` 36 | 37 | ```haskell eval 38 | 39 | [100000::Int .. ] 40 | 41 | ``` 42 | 43 | and lets have another question 44 | 45 | ```haskell eval 46 | 47 | (now, later) 48 | 49 | ``` 50 | 51 | Goodbye! 52 | -------------------------------------------------------------------------------- /inliterate/changelog.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/inliterate/changelog.md -------------------------------------------------------------------------------- /inliterate/inliterate.cabal: -------------------------------------------------------------------------------- 1 | Name: inliterate 2 | Version: 0.1.0 3 | Synopsis: Interactive literate programming 4 | Description: 5 | Evaluate markdown code blocks to show the results of running the code. 6 | 7 | 8 | License: MIT 9 | License-file: LICENSE 10 | Author: Tom Nielsen 11 | Maintainer: tomn@diffusionkinetics.com 12 | build-type: Simple 13 | Cabal-Version: >= 1.8 14 | homepage: https://github.com/diffusionkinetics/open/inliterate 15 | bug-reports: https://github.com/diffusionkinetics/open/issues 16 | category: Statistics 17 | Tested-With: GHC == 7.8.4, GHC == 7.10.2, GHC == 7.10.3, GHC == 8.0.1 18 | 19 | 20 | extra-source-files: 21 | changelog.md 22 | README.md 23 | 24 | 25 | Library 26 | ghc-options: -Wall -fno-warn-type-defaults 27 | hs-source-dirs: lib 28 | 29 | Exposed-modules: 30 | Inliterate 31 | , Inliterate.Import 32 | , Inliterate.Inspect 33 | 34 | Build-depends: 35 | base >= 4.6 && <5 36 | , text 37 | , containers 38 | , cheapskate 39 | , blaze-html 40 | , time 41 | , plotlyhs 42 | , lucid 43 | , lucid-extras 44 | , haskell-src-exts 45 | , svg-builder 46 | 47 | 48 | Executable inlitpp 49 | main-is: InlitPreProc.hs 50 | build-depends: base >=4.6 && <5 51 | , inliterate 52 | , text 53 | 54 | Test-suite test-inliterate 55 | type: exitcode-stdio-1.0 56 | main-is: TestInliterate.hs 57 | build-depends: base >=4.6 && <5 58 | , inliterate 59 | , text 60 | 61 | Test-suite test-inliterate-pp 62 | type: exitcode-stdio-1.0 63 | buildable: False 64 | main-is: TestInliteratePreProc.hs 65 | build-depends: base >=4.6 && <5 66 | , inliterate 67 | , text 68 | , time 69 | -------------------------------------------------------------------------------- /inliterate/lib/Inliterate/Inspect.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Inliterate.Inspect where 4 | 5 | import Cheapskate 6 | import Cheapskate.Html 7 | import qualified Data.Text as T 8 | import qualified Data.Text.Lazy as TL 9 | import qualified Data.Text.IO as T 10 | import Data.Foldable (toList) 11 | import Text.Blaze.Html.Renderer.Text (renderHtml) 12 | 13 | 14 | 15 | readDoc :: FilePath -> IO Doc 16 | readDoc fp = do 17 | t <- T.readFile fp 18 | return $ markdown def t 19 | 20 | 21 | inspect :: FilePath -> IO (T.Text, T.Text) 22 | inspect fp = do 23 | d <- readDoc fp 24 | 25 | return $ getTitleFirstP d 26 | 27 | getTitleFirstP :: Doc -> (T.Text, T.Text) 28 | getTitleFirstP (Doc _ sblocks) = go allBlocks where 29 | allBlocks = toList sblocks 30 | go (Header _ inls : rest ) = (T.concat $ map inlineToText $ toList inls, goPara rest) 31 | go (_ : blocks ) = go blocks 32 | go [] = ("No Title", goPara allBlocks) 33 | goPara (Para inls : _ ) = TL.toStrict $ renderHtml $ renderInlines def inls 34 | goPara (_:rest) = goPara rest 35 | goPara [] = "" 36 | 37 | --strip formatting 38 | inlineToText :: Inline -> T.Text 39 | inlineToText (Str s) = s 40 | inlineToText Space = " " 41 | inlineToText SoftBreak = " " -- ?? 42 | inlineToText LineBreak = " " 43 | inlineToText (Emph inls) = T.concat $ map inlineToText $ toList inls 44 | inlineToText (Strong inls) = T.concat $ map inlineToText $ toList inls 45 | inlineToText n = error $ "inlineToText: unsupported type "++show n 46 | -------------------------------------------------------------------------------- /inliterate/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-8.5 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | - ../plotlyhs 41 | - ../lucid-extras 42 | 43 | 44 | # Dependency packages to be pulled from upstream that are not in the resolver 45 | # (e.g., acme-missiles-0.3) 46 | extra-deps: [] 47 | 48 | # Override default flag values for local packages and extra-deps 49 | flags: {} 50 | 51 | # Extra package databases containing global packages 52 | extra-package-dbs: [] 53 | 54 | # Control whether we use the GHC we find on the path 55 | # system-ghc: true 56 | # 57 | # Require a specific version of stack, using version ranges 58 | # require-stack-version: -any # Default 59 | # require-stack-version: ">=1.1" 60 | # 61 | # Override the architecture used by stack, especially useful on Windows 62 | # arch: i386 63 | # arch: x86_64 64 | # 65 | # Extra directories used by stack for building 66 | # extra-include-dirs: [/path/to/dir] 67 | # extra-lib-dirs: [/path/to/dir] 68 | # -------------------------------------------------------------------------------- /lucid-extras/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 diffusionkinetics 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lucid-extras/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/lucid-extras/README.md -------------------------------------------------------------------------------- /lucid-extras/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /lucid-extras/changelog.md: -------------------------------------------------------------------------------- 1 | 0.2 2 | * Add functions for generating vega and vega-lite enabled pages (https://vega.github.io) 3 | -------------------------------------------------------------------------------- /lucid-extras/lib/Lucid/DataTables.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, StandaloneDeriving #-} 2 | 3 | module Lucid.DataTables where 4 | 5 | import qualified Data.Aeson as Aeson 6 | import qualified Data.ByteString.Lazy as BSL 7 | import Lucid 8 | import qualified Data.Text as T 9 | import qualified Data.Text.Encoding as T 10 | import Lucid.PreEscaped 11 | import Data.Monoid 12 | 13 | dataTablesCDN :: Monad m => HtmlT m () 14 | dataTablesCDN 15 | = scriptSrc "https://cdn.datatables.net/1.10.16/js/jquery.dataTables.min.js" 16 | 17 | -- | Additional properties to pass to the table. 18 | -- See https://datatables.net/reference/option/ for values 19 | -- we might wish to use here. 20 | activateDataTable :: Monad m => T.Text 21 | -> Maybe (Aeson.Value) 22 | -> HtmlT m () 23 | activateDataTable elm props = script_ $ 24 | "$(document).ready(function(){ $('"<> elm<>"').DataTable("<> propStr <> "); })" 25 | where 26 | propStr = case props of 27 | Just val -> T.decodeUtf8 . BSL.toStrict . Aeson.encode $ val 28 | Nothing -> "" 29 | 30 | dataTablesCssCDN :: Monad m => HtmlT m () 31 | dataTablesCssCDN = 32 | link_ [rel_ "stylesheet", 33 | href_ "https://cdn.datatables.net/1.10.16/css/jquery.dataTables.min.css"] 34 | -------------------------------------------------------------------------------- /lucid-extras/lib/Lucid/Leaflet.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, DeriveGeneric #-} 2 | {-| 3 | Module : Lucid.Leaflet 4 | Description : LeafletJS bindings 5 | Copyright : (c) Tom Nielsen, Marco Zocca, 2019 6 | License : GPL-3 7 | Maintainer : ocramz fripost org 8 | Stability : experimental 9 | Portability : POSIX 10 | 11 | Bindings to the LeafletJS map API. 12 | 13 | See https://leafletjs.com/ for usage details 14 | -} 15 | module Lucid.Leaflet ( 16 | -- * CDN declarations 17 | leafletCDN, leafletCssCDN, 18 | -- * Utilities 19 | leafletMap, osmTileLayer, 20 | -- * Types 21 | LMap(..), LMapElement(..), TileLayerProperties(..) 22 | ) where 23 | 24 | import qualified Data.Aeson as Aeson 25 | import qualified Data.ByteString.Lazy as BSL 26 | import Lucid 27 | import qualified Data.Text as T 28 | import qualified Data.Text.Encoding as T 29 | import Lucid.PreEscaped 30 | import Data.Monoid 31 | import GHC.Generics 32 | 33 | 34 | 35 | -- | Statement for embedding the LeafletJS javascript blob. 36 | leafletCDN :: Monad m => HtmlT m () 37 | leafletCDN 38 | = scriptSrc "https://unpkg.com/leaflet@1.2.0/dist/leaflet.js" 39 | 40 | data LMap = LMap T.Text | SetView (Double, Double) Double LMap deriving (Eq, Show) 41 | 42 | data LMapElement = TileLayer T.Text TileLayerProperties 43 | | Marker (Double, Double) 44 | | BindPopup T.Text LMapElement deriving (Eq, Show) 45 | 46 | newtype TileLayerProperties = TileLayerProperties { attribution :: T.Text } deriving (Eq, Show, Generic) 47 | instance Aeson.ToJSON TileLayerProperties 48 | 49 | mapElementToJS :: LMapElement -> T.Text 50 | mapElementToJS e' = "\n" <> f e' <> ".addTo(lmap);" where 51 | tshow = T.pack . show 52 | f (Marker (x, y)) = "L.marker(["<> tshow x<>", "<> tshow y<>"])" 53 | f (BindPopup t e) = f e <> ".bindPopup('"<>t<>"')" 54 | f (TileLayer url ps) = "L.tileLayer('"<>url<>"',"<>g ps<>")" 55 | g = T.decodeUtf8 . BSL.toStrict . Aeson.encode 56 | 57 | 58 | -- | OpenStreetMap tile layer 59 | osmTileLayer :: LMapElement 60 | osmTileLayer 61 | = TileLayer "http://{s}.tile.osm.org/{z}/{x}/{y}.png" 62 | $ TileLayerProperties "© OpenStreetMap contributors" 63 | 64 | -- | Statement for embedding the LeafletJS CSS stylesheet. 65 | leafletCssCDN :: Monad m => HtmlT m () 66 | leafletCssCDN = 67 | link_ [rel_ "stylesheet", 68 | href_ "https://unpkg.com/leaflet@1.2.0/dist/leaflet.css"] 69 | 70 | -- | @\@ section that declares a LeafletJS map 71 | leafletMap :: Monad m => LMap -> [LMapElement] -> HtmlT m () 72 | leafletMap mp elms = script_ $ writeMap mp <> writeElems elms where 73 | writeMap m = "\nvar lmap = " <> writeMap' m <> ";" 74 | writeMap' (LMap e) = "L.map('"<>e<>"')" 75 | writeMap' (SetView (x,y) z m) = writeMap' m <> ".setView(["<> tshow x<>", "<> tshow y<>"], "<> tshow z<>")" 76 | writeElems = T.unlines . map mapElementToJS 77 | tshow = T.pack . show 78 | 79 | -------------------------------------------------------------------------------- /lucid-extras/lib/Lucid/PreEscaped.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, StandaloneDeriving #-} 2 | 3 | module Lucid.PreEscaped where 4 | 5 | import Lucid 6 | import Lucid.Base 7 | import qualified Data.Text as T 8 | import qualified Blaze.ByteString.Builder as Blaze 9 | import qualified Blaze.ByteString.Builder.Html.Utf8 as Blaze 10 | import Data.Monoid ((<>)) 11 | import qualified Data.ByteString.Lazy as LBS 12 | 13 | 14 | preEscaped :: Monad m => T.Text -> HtmlT m () 15 | preEscaped name = 16 | HtmlT (return (\_ -> Blaze.fromText name, ())) 17 | 18 | preEscapedByteString :: Monad m => LBS.ByteString -> HtmlT m () 19 | preEscapedByteString name = 20 | HtmlT (return (\_ -> Blaze.fromLazyByteString name, ())) 21 | 22 | 23 | 24 | scriptSrc :: Monad m => T.Text -> HtmlT m () 25 | scriptSrc url = 26 | HtmlT (return (\_ -> "", ())) 27 | -------------------------------------------------------------------------------- /lucid-extras/lib/Lucid/VegaLite.hs: -------------------------------------------------------------------------------- 1 | {-# language OverloadedStrings, ExtendedDefaultRules #-} 2 | {-| 3 | Module : Lucid.VegaLite 4 | Description : Vega-lite bindings 5 | Copyright : (c) Tom Nielsen, Marco Zocca, 2019 6 | License : GPL-3 7 | Maintainer : ocramz fripost org 8 | Stability : experimental 9 | Portability : POSIX 10 | 11 | Bindings to the vega-lite visualization and infographics API. 12 | 13 | See https://vega.github.io/ for usage details 14 | -} 15 | module Lucid.VegaLite ( 16 | -- * Standalone 17 | mkVegaHtml 18 | -- * Utilities 19 | , vegaEmbedHead, vegaEmbedBodyScript) where 20 | 21 | import Lucid 22 | import Lucid.PreEscaped (scriptSrc) 23 | 24 | import qualified Data.Aeson as A 25 | 26 | -- import qualified Data.Text as T 27 | import qualified Data.Text.Encoding as T (decodeUtf8) 28 | 29 | import qualified Data.ByteString.Lazy as LBS 30 | 31 | import Data.Monoid 32 | 33 | vegaCDN, vegaLiteCDN, vegaEmbedCDN :: Monad m => HtmlT m () 34 | vegaCDN = scriptSrc "https://cdn.jsdelivr.net/npm/vega@3" 35 | vegaLiteCDN = scriptSrc "https://cdn.jsdelivr.net/npm/vega-lite@2.5.0" 36 | vegaEmbedCDN = scriptSrc "https://cdn.jsdelivr.net/npm/vega-embed@3" 37 | 38 | -- | Construct a standalone HTML page that can render a vega-lite plot. The plot will be rendered by the vega-embed library. 39 | -- 40 | -- NB: the 'A.Value' parameter must contain a vega-lite JSON payload 41 | mkVegaHtml :: A.Value -> Html () 42 | mkVegaHtml vl = doctypehtml_ $ html_ $ do 43 | meta_ [charset_ "UTF-8"] 44 | head_ vegaEmbedHead 45 | with div_ [id_ "vis"] "" 46 | body_ $ vegaEmbedBodyScript vl 47 | 48 | -- | The statements for downloading the vega javascript blobs from the CDN. Must be in the document \ 49 | vegaEmbedHead :: Html () 50 | vegaEmbedHead = do 51 | vegaCDN 52 | vegaLiteCDN 53 | vegaEmbedCDN 54 | 55 | -- | The statement for embedding the vega JSON payload and initializing vega-embed. Must be in the \ block and referenced by a \
\ 56 | vegaEmbedBodyScript :: A.Value -> Html () 57 | vegaEmbedBodyScript vl = 58 | script_ $ T.decodeUtf8 $ LBS.toStrict ("const spec =" <> A.encode vl <> "; vegaEmbed('#vis', spec).then(result => console.log(result)).catch(console.warn);" :: LBS.ByteString) 59 | -------------------------------------------------------------------------------- /lucid-extras/lucid-extras.cabal: -------------------------------------------------------------------------------- 1 | Name: lucid-extras 2 | Version: 0.2.2 3 | Synopsis: Generate more HTML with Lucid - Bootstrap, Rdash, Vega-Lite, Leaflet JS, Email. 4 | Description: Generate more HTML with Lucid - Bootstrap, Rdash, Vega-Lite, Leaflet JS, Email. Provides functions to generate rich web page elements for infographics, geographical maps, and more. 5 | License: MIT 6 | License-file: LICENSE 7 | Author: Tom Nielsen 8 | Maintainer: Marco Zocca 9 | build-type: Simple 10 | Cabal-Version: >= 1.10 11 | homepage: https://github.com/diffusionkinetics/open/lucid-extras 12 | bug-reports: https://github.com/diffusionkinetics/open/issues 13 | category: Web, Graphics 14 | Tested-With: GHC == 7.10.2, GHC == 7.10.3, GHC == 8.0.1, GHC == 8.0.2 15 | extra-source-files: 16 | changelog.md 17 | 18 | source-repository head 19 | type: git 20 | location: https://github.com/diffusionkinetics/open 21 | 22 | Library 23 | ghc-options: -Wall 24 | hs-source-dirs: lib 25 | default-language: Haskell2010 26 | 27 | Exposed-modules: 28 | Lucid.Bootstrap3 29 | , Lucid.PreEscaped 30 | , Lucid.DataTables 31 | , Lucid.Rdash 32 | , Lucid.Leaflet 33 | , Lucid.Tables 34 | , Lucid.VegaLite 35 | Build-depends: 36 | base >= 4.6 && < 5 37 | , aeson 38 | , lucid 39 | , text 40 | , blaze-builder 41 | , bytestring 42 | 43 | Test-suite site-gen 44 | type: exitcode-stdio-1.0 45 | ghc-options: -Wall 46 | hs-source-dirs: site-gen 47 | main-is: Main.hs 48 | default-language: Haskell2010 49 | other-modules: DevelMain 50 | Build-Depends: base >= 4.6 && < 5 51 | , directory >= 1.2 52 | , lucid-extras 53 | , lucid -------------------------------------------------------------------------------- /lucid-extras/site-gen/DevelMain.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module DevelMain where 4 | 5 | import System.Directory 6 | 7 | import Lucid.Rdash 8 | import Lucid 9 | 10 | html :: Html () 11 | html = indexPage 12 | 13 | update :: IO () 14 | update = do 15 | createDirectoryIfMissing True "sample-site" 16 | renderToFile "sample-site/index.html" html 17 | -------------------------------------------------------------------------------- /lucid-extras/site-gen/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import DevelMain 4 | 5 | main :: IO () 6 | main = update 7 | -------------------------------------------------------------------------------- /lucid-extras/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-9.21 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | 41 | # Dependency packages to be pulled from upstream that are not in the resolver 42 | # (e.g., acme-missiles-0.3) 43 | extra-deps: [] 44 | 45 | # Override default flag values for local packages and extra-deps 46 | flags: {} 47 | 48 | # Extra package databases containing global packages 49 | extra-package-dbs: [] 50 | 51 | # Control whether we use the GHC we find on the path 52 | # system-ghc: true 53 | # 54 | # Require a specific version of stack, using version ranges 55 | # require-stack-version: -any # Default 56 | # require-stack-version: ">=1.1" 57 | # 58 | # Override the architecture used by stack, especially useful on Windows 59 | # arch: i386 60 | # arch: x86_64 61 | # 62 | # Extra directories used by stack for building 63 | # extra-include-dirs: [/path/to/dir] 64 | # extra-lib-dirs: [/path/to/dir] 65 | # 66 | # Allow a newer minor version of GHC than the snapshot specifies 67 | # compiler-check: newer-minor -------------------------------------------------------------------------------- /parfoldl/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /parfoldl/README.md: -------------------------------------------------------------------------------- 1 | # pfoldl 2 | Parallel Folds 3 | -------------------------------------------------------------------------------- /parfoldl/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /parfoldl/TestParFoldl.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE StandaloneDeriving #-} 2 | 3 | import Control.Parallel.Foldl 4 | import Control.Applicative 5 | import Numeric.Datasets.Iris 6 | import Numeric.Datasets.BostonHousing 7 | import Numeric.Datasets 8 | import qualified Data.Map.Strict as Map 9 | 10 | irisApply :: Fold Double Double -> Fold Iris Iris 11 | irisApply f = Iris <$> premap sepalLength f 12 | <*> premap sepalWidth f 13 | <*> premap petalLength f 14 | <*> premap petalWidth f 15 | <*> premap irisClass mode 16 | 17 | main :: IO () 18 | main = do print $ ("iris average seplen", fold (premap sepalLength average) iris) 19 | print $ ("iris variance seplen", fold (premap sepalLength variance) iris) 20 | print $ ("iris twopvar seplen", twoPassVariance $ map sepalLength iris) 21 | print $ fold (irisApply average) iris 22 | let byClass = Map.toList $ fold (groupBy irisClass $ irisApply average) iris 23 | mapM_ print byClass 24 | bh <- getDataset bostonHousing 25 | print $ length bh 26 | print $ fold (premap tax average) bh 27 | print $ fold (premap tax variance) bh 28 | print $ twoPassVariance $ map tax bh 29 | let manyNums = [1..10000] 30 | print $ twoPassVariance manyNums 31 | print $ fold variance manyNums 32 | 33 | 34 | {- 35 | 36 | scikit-learn 37 | 38 | >>> iris.data[:,0].var(ddof=0) 39 | 0.6811222222222223 40 | >>> iris.data[:,0].var(ddof=1) 41 | 0.68569351230425069 42 | 43 | >>> boston.data[:,9].var(ddof=0) 44 | 28348.62359980628 45 | >>> boston.data[:,9].var(ddof=1) 46 | 28404.759488122731 47 | 48 | -} 49 | -------------------------------------------------------------------------------- /parfoldl/changelog.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/parfoldl/changelog.md -------------------------------------------------------------------------------- /parfoldl/licenses/foldl.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013 Gabriel Gonzalez 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | * Redistributions of source code must retain the above copyright notice, 7 | this list of conditions and the following disclaimer. 8 | * Redistributions in binary form must reproduce the above copyright notice, 9 | this list of conditions and the following disclaimer in the documentation 10 | and/or other materials provided with the distribution. 11 | * Neither the name of Gabriel Gonzalez nor the names of other contributors 12 | may be used to endorse or promote products derived from this software 13 | without specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 19 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 22 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /parfoldl/parfoldl.cabal: -------------------------------------------------------------------------------- 1 | Name: parfoldl 2 | Version: 0.1.0 3 | Synopsis: Parallel folds 4 | Description: 5 | similar to foldl, with an extra 'combine' step, along with statistical folds 6 | 7 | 8 | License: MIT 9 | License-file: LICENSE 10 | Author: Tom Nielsen 11 | Maintainer: tanielsen@gmail.com 12 | build-type: Simple 13 | Cabal-Version: >= 1.10 14 | homepage: https://github.com/diffusionkinetics/open/parfoldl 15 | bug-reports: https://github.com/diffusionkinetics/open/issues 16 | category: Control, Statistics 17 | Tested-With: GHC == 7.8.4, GHC == 7.10.2, GHC == 7.10.3, GHC == 8.0.1 18 | 19 | 20 | extra-source-files: 21 | changelog.md 22 | 23 | 24 | Library 25 | ghc-options: -Wall -fno-warn-type-defaults 26 | hs-source-dirs: src 27 | default-language: Haskell2010 28 | 29 | Exposed-modules: 30 | Control.Parallel.Foldl 31 | 32 | Build-depends: 33 | base >= 4.6 && <5 34 | , vector 35 | , profunctors 36 | , containers 37 | , monad-par 38 | , foldl 39 | 40 | Test-suite test-parfoldl 41 | type: exitcode-stdio-1.0 42 | default-language: Haskell2010 43 | 44 | main-is: TestParFoldl.hs 45 | build-depends: base >=4.6 && <5 46 | , datasets >= 0.2.3 47 | , parfoldl 48 | , containers -------------------------------------------------------------------------------- /parfoldl/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-9.21 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | - ../datasets 41 | 42 | # Dependency packages to be pulled from upstream that are not in the resolver 43 | # (e.g., acme-missiles-0.3) 44 | extra-deps: [] 45 | 46 | # Override default flag values for local packages and extra-deps 47 | flags: {} 48 | 49 | # Extra package databases containing global packages 50 | extra-package-dbs: [] 51 | 52 | # Control whether we use the GHC we find on the path 53 | # system-ghc: true 54 | # 55 | # Require a specific version of stack, using version ranges 56 | # require-stack-version: -any # Default 57 | # require-stack-version: ">=1.1" 58 | # 59 | # Override the architecture used by stack, especially useful on Windows 60 | # arch: i386 61 | # arch: x86_64 62 | # 63 | # Extra directories used by stack for building 64 | # extra-include-dirs: [/path/to/dir] 65 | # extra-lib-dirs: [/path/to/dir] 66 | # -------------------------------------------------------------------------------- /plotlyhs/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /plotlyhs/README.md: -------------------------------------------------------------------------------- 1 | Plotlyhs: Haskell bindings for Plotly.js 2 | ===== 3 | 4 | [![Hackage](https://img.shields.io/hackage/v/plotlyhs.svg)](https://hackage.haskell.org/package/plotlyhs) [![Build Status](https://secure.travis-ci.org/glutamate/plotlyhs.svg)](http://travis-ci.org/glutamate/plotlyhs) 5 | 6 | This is a library for generating JSON value to use with the Plotly.js 7 | library. The interface directly reflects the structure of the 8 | Plotly.js library and is therefore quite low-level. Lenses are used 9 | throughout to set `Maybe` fields in records to provide both data and configuration options. 10 | 11 | This library does *not* attempt to communicate with the Plotly API in 12 | any other way. All generated plots can be hosted on stand-alone web 13 | pages. 14 | 15 | ## Example 16 | 17 | For more, [see the examples page](https://glutamate.github.io/plotlyhs/) 18 | 19 | ```haskell 20 | {-# LANGUAGE OverloadedStrings #-} 21 | 22 | import Lucid 23 | import Lucid.Html5 24 | import Graphics.Plotly 25 | import Graphics.Plotly.Lucid 26 | import Lens.Micro 27 | 28 | import qualified Data.Text.Lazy as T 29 | import qualified Data.Text.Lazy.IO as T 30 | 31 | main = 32 | T.writeFile "test.html" $ renderText $ doctypehtml_ $ do 33 | head_ $ do meta_ [charset_ "utf-8"] 34 | plotlyCDN 35 | body_ $ toHtml $ plotly "myDiv" [myTrace] 36 | 37 | myTrace = scatter & x ?~ [1,2,3,4] 38 | & y ?~ [500,3000,700,200] 39 | ``` 40 | -------------------------------------------------------------------------------- /plotlyhs/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /plotlyhs/TestPlotly.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | import Lucid 4 | import Lucid.Html5 5 | import Graphics.Plotly 6 | import Graphics.Plotly.Lucid 7 | import Lens.Micro 8 | 9 | import qualified Data.Text.Lazy as T 10 | import qualified Data.Text.Lazy.IO as T 11 | 12 | main = 13 | T.writeFile "test.html" $ renderText $ doctypehtml_ $ do 14 | head_ $ do meta_ [charset_ "utf-8"] 15 | plotlyCDN 16 | body_ $ toHtml $ plotly "myDiv" [myTrace] 17 | 18 | myTrace = scatter & x ?~ [1,2,3,4] 19 | & y ?~ [500,3000,700,200] 20 | 21 | -------------------------------------------------------------------------------- /plotlyhs/changelog.md: -------------------------------------------------------------------------------- 1 | 0.1.1 2 | 3 | * Simple module to help build traces 4 | 5 | * remove test-plotly executable 6 | 7 | 8 | 0.1 9 | 10 | * Initial release 11 | -------------------------------------------------------------------------------- /plotlyhs/contour.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |
9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /plotlyhs/gendoc/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /plotlyhs/gendoc/plotly-gendoc.cabal: -------------------------------------------------------------------------------- 1 | Name: plotly-gendoc 2 | Version: 0.1.0 3 | Synopsis: Generate docs for Haskell bindings to Plotly.js 4 | Description: 5 | Generate docs for generating web-based plots 6 | 7 | 8 | License: MIT 9 | License-file: LICENSE 10 | Author: Tom Nielsen 11 | Maintainer: tanielsen@gmail.com 12 | build-type: Simple 13 | Cabal-Version: >= 1.8 14 | 15 | executable plotly-gendoc 16 | main-is: GenDocInlit.hs 17 | build-depends: base >=4.6 && <5 18 | , plotlyhs 19 | , lucid 20 | , aeson 21 | , text 22 | , microlens 23 | , plotlyhs 24 | , inliterate 25 | , datasets 26 | , neat-interpolation 27 | 28 | executable test-plotly-gendoc 29 | main-is: test.hs 30 | buildable: False 31 | build-depends: base >=4.6 && <5 32 | , plotlyhs 33 | , lucid 34 | , aeson 35 | , text 36 | , microlens 37 | , plotlyhs 38 | , inliterate 39 | , datasets 40 | , neat-interpolation 41 | -------------------------------------------------------------------------------- /plotlyhs/gendoc/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-8.5 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | 40 | - '.' 41 | - '../' 42 | - '../../datasets' 43 | - '../../inliterate' 44 | - '../../lucid-extras' 45 | 46 | # Dependency packages to be pulled from upstream that are not in the resolver 47 | # (e.g., acme-missiles-0.3) 48 | extra-deps: 49 | 50 | # Override default flag values for local packages and extra-deps 51 | flags: {} 52 | 53 | # Extra package databases containing global packages 54 | extra-package-dbs: [] 55 | 56 | # Control whether we use the GHC we find on the path 57 | # system-ghc: true 58 | # 59 | # Require a specific version of stack, using version ranges 60 | # require-stack-version: -any # Default 61 | # require-stack-version: ">=1.1" 62 | # 63 | # Override the architecture used by stack, especially useful on Windows 64 | # arch: i386 65 | # arch: x86_64 66 | # 67 | # Extra directories used by stack for building 68 | # extra-include-dirs: [/path/to/dir] 69 | # extra-lib-dirs: [/path/to/dir] 70 | # 71 | # Allow a newer minor version of GHC than the snapshot specifies 72 | # compiler-check: newer-minor 73 | -------------------------------------------------------------------------------- /plotlyhs/gendoc/test.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | import Lucid 4 | import Lucid.Html5 5 | import Graphics.Plotly 6 | import Graphics.Plotly.Lucid 7 | import Lens.Micro 8 | 9 | import qualified Data.Text.Lazy as T 10 | import qualified Data.Text.Lazy.IO as T 11 | 12 | main = 13 | T.writeFile "test.html" $ renderText $ doctypehtml_ $ do 14 | head_ $ do meta_ [charset_ "utf-8"] 15 | plotlyCDN 16 | body_ $ toHtml $ plotly "myDiv" [myTrace] 17 | 18 | pointsData :: [(Double, Double)] 19 | pointsData = zip [1,2,3,4] [500,3000,700,200] 20 | 21 | myTrace 22 | = line (aes & x .~ fst 23 | & y .~ snd) pointsData -------------------------------------------------------------------------------- /plotlyhs/plotlyhs.cabal: -------------------------------------------------------------------------------- 1 | Name: plotlyhs 2 | Version: 0.2.2 3 | Synopsis: Haskell bindings to Plotly.js 4 | Description: 5 | Generate web-based plots with the Plotly.js library. 6 | For examples, see 7 | 8 | 9 | License: MIT 10 | License-file: LICENSE 11 | Author: Tom Nielsen 12 | Maintainer: tanielsen@gmail.com 13 | build-type: Simple 14 | Cabal-Version: >= 1.10 15 | homepage: https://github.com/diffusionkinetics/open/plotlyhs 16 | bug-reports: https://github.com/diffusionkinetics/open/issues 17 | category: Graphics, Charts 18 | Tested-With: GHC == 7.8.4, GHC == 7.10.2, GHC == 7.10.3, GHC == 8.0.1 19 | 20 | 21 | extra-source-files: 22 | changelog.md 23 | 24 | 25 | Library 26 | ghc-options: -Wall -fno-warn-type-defaults 27 | hs-source-dirs: src 28 | default-language: Haskell2010 29 | 30 | 31 | Exposed-modules: 32 | Graphics.Plotly 33 | , Graphics.Plotly.Utils 34 | , Graphics.Plotly.Lucid 35 | , Graphics.Plotly.GoG 36 | , Graphics.Plotly.Base 37 | , Graphics.Plotly.Blaze 38 | , Graphics.Plotly.Histogram 39 | , Graphics.Plotly.Simple 40 | 41 | Build-depends: 42 | base >= 4.6 && <5 43 | , aeson 44 | , lucid 45 | , blaze-html 46 | , blaze-markup 47 | , text 48 | , time 49 | , bytestring 50 | , microlens-th 51 | , microlens 52 | -------------------------------------------------------------------------------- /plotlyhs/src/Graphics/Plotly.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric, OverloadedStrings,FlexibleInstances, TemplateHaskell #-} 2 | 3 | {-| 4 | 5 | Re-exports the Simple interface, the grammar of grpahics 6 | interface and parts of the base interface. 7 | 8 | -} 9 | 10 | module Graphics.Plotly ( 11 | module Base, 12 | module Simple, 13 | module GoG 14 | ) where 15 | 16 | import Graphics.Plotly.Base as Base 17 | hiding (x,y, z, _x, _y, _z, _size, _line, size, line) 18 | import Graphics.Plotly.Simple as Simple 19 | import Graphics.Plotly.GoG as GoG 20 | 21 | -------------------------------------------------------------------------------- /plotlyhs/src/Graphics/Plotly/Blaze.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExtendedDefaultRules #-} 2 | {-# OPTIONS_GHC -fno-warn-orphans #-} 3 | 4 | {-| 5 | 6 | Plot traces to html using blaze-html 7 | 8 | Example code: 9 | 10 | @ 11 | plotHtml :: Html () 12 | plotHtml = toHtml $ plotly "myDiv" [trace] & layout . title ?~ "my plot" 13 | & layout . width ?~ 300 14 | 15 | @ 16 | 17 | where `trace` is a value of type `Trace` 18 | 19 | -} 20 | module Graphics.Plotly.Blaze where 21 | 22 | import Text.Blaze 23 | import qualified Text.Blaze.Html5 as H 24 | import qualified Text.Blaze.Html5.Attributes as A 25 | import Graphics.Plotly.Base 26 | import Data.Monoid ((<>)) 27 | import Data.Text.Encoding (decodeUtf8) 28 | import Data.ByteString.Lazy (toStrict) 29 | import Data.Aeson 30 | 31 | -- |`script` tag to go in the header to import the plotly.js javascript from the official CDN 32 | plotlyCDN :: H.Html 33 | plotlyCDN = H.script ! A.src "https://cdn.plot.ly/plotly-latest.min.js" $ "" 34 | 35 | -- |Activate a plot defined by a `Plotly` value 36 | plotlyJS :: Plotly -> H.Html 37 | plotlyJS (Plotly divNm trs lay) = 38 | let trJSON = decodeUtf8 $ toStrict $ encode trs 39 | layoutJSON = decodeUtf8 $ toStrict $ encode lay 40 | in H.script $ H.toHtml ("Plotly.newPlot('"<>divNm<>"', "<>trJSON<>","<>layoutJSON<>", {displayModeBar: false});") 41 | 42 | -- |Create a div for a Plotly value 43 | plotlyDiv :: Plotly -> H.Html 44 | plotlyDiv (Plotly divNm _ _) = 45 | H.div ! A.id (toValue divNm) $ "" 46 | 47 | 48 | instance ToMarkup Plotly where 49 | toMarkup pl = plotlyDiv pl >> plotlyJS pl 50 | -------------------------------------------------------------------------------- /plotlyhs/src/Graphics/Plotly/Histogram.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | 3 | Simple histograms 4 | 5 | -} 6 | 7 | module Graphics.Plotly.Histogram where 8 | 9 | import Graphics.Plotly.Base hiding (sort) 10 | import Data.List (sort, group) 11 | import Lens.Micro 12 | import Data.Aeson (toJSON) 13 | import Data.Text (Text) 14 | 15 | -- | build a histogram with a given binsize 16 | histogram :: Int -- ^ number of bins 17 | -> [Double] -- ^ the individual observations 18 | -> Trace 19 | histogram nbins pts = 20 | let (lo, hi) = (minimum pts, maximum pts) 21 | binSize = (hi - lo) / realToFrac nbins 22 | binToX :: Int -> Double 23 | binToX binN = realToFrac binN * binSize + lo 24 | binMap :: [(Int, Int)] 25 | binMap = getBinMap lo binSize pts 26 | in bars & x ?~ map (toJSON . binToX . fst) binMap & y ?~ map (toJSON . snd) binMap 27 | 28 | 29 | histMany :: Int -> [(Text, [Double])] -> [Trace] 30 | histMany nbins hdata = 31 | let allPts = concat $ map snd hdata 32 | (lo, hi) = (minimum allPts, maximum allPts) 33 | binSize = (hi - lo) / realToFrac nbins 34 | binToX :: Int -> Double 35 | binToX binN = realToFrac binN * binSize + lo 36 | getTrace (nm,pts) = 37 | let binMap = getBinMap lo binSize pts 38 | in bars & x ?~ map (toJSON . binToX . fst) binMap 39 | & y ?~ map (toJSON . snd) binMap 40 | & name ?~ nm 41 | in map getTrace hdata 42 | 43 | goFill :: [(Int,Int)] -> [(Int,Int)] 44 | goFill (car@(bin1,_):cdr@((bin2,_):_)) 45 | | bin2 == bin1 + 1 = car : goFill cdr 46 | | otherwise = car : goFill ((bin1+1,0):cdr) 47 | goFill l = l 48 | 49 | getBinMap :: Double -> Double -> [Double] -> [(Int, Int)] 50 | getBinMap lo binSize pts = 51 | let binf :: Double -> Int 52 | binf xv = floor $ (xv - lo) / binSize 53 | bins = group $ sort $ map binf pts 54 | binMap :: [(Int, Int)] 55 | binMap = goFill $ map (\is -> (head is, length is)) bins 56 | in binMap 57 | -------------------------------------------------------------------------------- /plotlyhs/src/Graphics/Plotly/Lucid.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ExtendedDefaultRules #-} 2 | {-# OPTIONS_GHC -fno-warn-orphans #-} 3 | 4 | {-| 5 | 6 | Plot traces to html using lucid 7 | 8 | Example code: 9 | 10 | @ 11 | plotHtml :: Html () 12 | plotHtml = toHtml $ plotly "myDiv" [trace] & layout . title ?~ "my plot" 13 | & layout . width ?~ 300 14 | 15 | @ 16 | 17 | where `trace` is a value of type `Trace` 18 | 19 | -} 20 | module Graphics.Plotly.Lucid where 21 | 22 | import Lucid 23 | import Graphics.Plotly.Base 24 | import Data.Monoid ((<>)) 25 | import Data.Text.Encoding (decodeUtf8) 26 | import Data.ByteString.Lazy (toStrict) 27 | import Data.Aeson 28 | 29 | -- |`script` tag to go in the header to import the plotly.js javascript from the official CDN 30 | plotlyCDN :: Monad m => HtmlT m () 31 | plotlyCDN = script_ [src_ "https://cdn.plot.ly/plotly-latest.min.js"] $ toHtml (""::String) 32 | 33 | -- |Activate a plot defined by a `Plotly` value 34 | plotlyJS :: Monad m => Plotly -> HtmlT m () 35 | plotlyJS (Plotly divNm trs lay) = 36 | let trJSON = decodeUtf8 $ toStrict $ encode trs 37 | layoutJSON = decodeUtf8 $ toStrict $ encode lay 38 | in script_ ("Plotly.newPlot('"<>divNm<>"', "<>trJSON<>","<>layoutJSON<>", {displayModeBar: false});") 39 | 40 | -- |Create a div for a Plotly value 41 | plotlyDiv :: Monad m => Plotly -> HtmlT m () 42 | plotlyDiv (Plotly divNm _ _) = 43 | div_ [id_ divNm] 44 | "" 45 | 46 | instance ToHtml Plotly where 47 | toHtml pl = plotlyDiv pl >> plotlyJS pl 48 | toHtmlRaw = toHtml 49 | -------------------------------------------------------------------------------- /plotlyhs/src/Graphics/Plotly/Simple.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Functions to build Traces from standard data. Generated traces can still be 3 | customized with lenses. 4 | -} 5 | module Graphics.Plotly.Simple where 6 | 7 | import Data.Aeson 8 | import Data.Text (Text) 9 | import Lens.Micro 10 | 11 | import Graphics.Plotly.Base 12 | 13 | 14 | -- |Generate a scatterplot from pairs 15 | scatterPlot :: (ToJSON a, ToJSON b) => [(a, b)] -> Trace 16 | scatterPlot xys = scatter 17 | & x ?~ fmap (toJSON . fst) xys 18 | & y ?~ fmap (toJSON . snd) xys 19 | & mode ?~ [Markers] 20 | 21 | 22 | -- |Generate a line plot from pairs 23 | linePlot :: (ToJSON a, ToJSON b) => [(a, b)] -> Trace 24 | linePlot xys = scatter 25 | & x ?~ fmap (toJSON . fst) xys 26 | & y ?~ fmap (toJSON . snd) xys 27 | & mode ?~ [Lines] 28 | 29 | 30 | -- |Generate a horizontal bar chart from pairs of text and value. 31 | hbarChart :: [(Text, Double)] -> Trace 32 | hbarChart tvs = bars 33 | & y ?~ fmap (toJSON . fst) tvs 34 | & x ?~ fmap (toJSON . snd) tvs 35 | & orientation ?~ Horizontal 36 | 37 | 38 | -- |Generate a horizontal bar chart from pairs of text and value. 39 | vbarChart :: [(Text, Double)] -> Trace 40 | vbarChart tvs = bars 41 | & x ?~ fmap (toJSON . fst) tvs 42 | & y ?~ fmap (toJSON . snd) tvs 43 | & orientation ?~ Vertical 44 | 45 | 46 | -- |Generate a fan plot with a given width in standard deviations and 47 | -- (x,(y,sd)) data 48 | fanPlot :: Double -> [(Double, (Double, Double))] -> Trace 49 | fanPlot sdCount tmnsds = scatter 50 | & x ?~ fmap toJSON xs 51 | & y ?~ fmap toJSON ys 52 | & fill ?~ ToZeroY 53 | where 54 | xs = fmap fst tmnsds ++ reverse (fmap fst tmnsds) 55 | ys = fmap ((\(m, sd) -> m + sdCount * sd) . snd) tmnsds 56 | ++ reverse (fmap ((\(m, sd) -> m - sdCount * sd) . snd) tmnsds) 57 | 58 | -------------------------------------------------------------------------------- /plotlyhs/src/Graphics/Plotly/Utils.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | 3 | Helper functions for defining valid JSON instances 4 | 5 | -} 6 | 7 | module Graphics.Plotly.Utils where 8 | 9 | import Data.List (stripPrefix) 10 | import Data.Aeson.Types 11 | 12 | unLens :: String -> String 13 | unLens ('_':s) = s 14 | unLens s = s 15 | 16 | dropInitial :: String -> String -> String 17 | dropInitial s s' = case stripPrefix s s' of 18 | Nothing -> s' 19 | Just s'' -> s'' 20 | 21 | rename :: String -> String -> String -> String 22 | rename froms tos s | s == froms = tos 23 | | otherwise = s 24 | 25 | jsonOptions :: Options 26 | jsonOptions = defaultOptions {omitNothingFields = True, 27 | fieldLabelModifier = unLens } 28 | -------------------------------------------------------------------------------- /plotlyhs/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-8.5 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | 41 | # Dependency packages to be pulled from upstream that are not in the resolver 42 | # (e.g., acme-missiles-0.3) 43 | extra-deps: [] 44 | 45 | # Override default flag values for local packages and extra-deps 46 | flags: {} 47 | 48 | # Extra package databases containing global packages 49 | extra-package-dbs: [] 50 | 51 | # Control whether we use the GHC we find on the path 52 | # system-ghc: true 53 | # 54 | # Require a specific version of stack, using version ranges 55 | # require-stack-version: -any # Default 56 | # require-stack-version: ">=1.1" 57 | # 58 | # Override the architecture used by stack, especially useful on Windows 59 | # arch: i386 60 | # arch: x86_64 61 | # 62 | # Extra directories used by stack for building 63 | # extra-include-dirs: [/path/to/dir] 64 | # extra-lib-dirs: [/path/to/dir] 65 | # 66 | # Allow a newer minor version of GHC than the snapshot specifies 67 | # compiler-check: newer-minor 68 | 69 | # https://github.com/gibiansky/IHaskell/issues/636#issuecomment-228571480 70 | 71 | apply-ghc-options: everything 72 | ghc-options: 73 | "*": -opta-Wa,-mrelax-relocations=no -------------------------------------------------------------------------------- /plotlyhs/tests/Contour.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, FlexibleContexts #-} 2 | 3 | import Lucid 4 | import Lucid.Html5 5 | import Graphics.Plotly 6 | import Graphics.Plotly.Lucid 7 | import Graphics.Plotly.GoG 8 | import Lens.Micro 9 | 10 | import qualified Data.Text.Lazy as T 11 | import qualified Data.Text.Lazy.IO as T 12 | 13 | -- We'd like to generate 14 | -- 15 | -- 24 | 25 | zss :: [[Double]] 26 | zss = [[10, 10.625, 12.5, 15.625, 20], 27 | [5.625, 6.25, 8.125, 11.25, 15.625], 28 | [2.5, 3.125, 5.0, 8.125, 12.5], 29 | [0.625, 1.25, 3.125, 6.25, 10.625], 30 | [0, 0.625, 2.5, 5.625, 10]] 31 | 32 | xs, ys :: [Double] 33 | xs = [-9, -6, -5 , -3, -1] 34 | ys = [0, 1, 4, 5, 7] 35 | 36 | bar = hcontour (aes & x.~ fst & y .~ (fst. snd) & z .~ (snd . snd)) 37 | (zip xs (zip ys zss)) 38 | 39 | main = 40 | T.writeFile "contour.html" $ renderText $ doctypehtml_ $ do 41 | head_ $ do meta_ [charset_ "utf-8"] 42 | plotlyCDN 43 | body_ $ toHtml $ plotly "myContour" [bar] 44 | 45 | -------------------------------------------------------------------------------- /postgresql-simple-expr/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License (MIT) 2 | 3 | Copyright (c) 2017 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /postgresql-simple-expr/lib/Database/PostgreSQL/Simple/Connect.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ScopedTypeVariables, DeriveGeneric, TypeApplications #-} 2 | 3 | module Database.PostgreSQL.Simple.Connect where 4 | 5 | import System.Exit (die) 6 | import GHC.Generics 7 | import GHC.Conc 8 | 9 | import Data.Aeson 10 | import qualified Data.ByteString.Lazy as BSL 11 | import Control.Exception 12 | import System.Environment 13 | 14 | import Database.PostgreSQL.Simple 15 | import Data.Pool 16 | import Data.Time.Clock 17 | 18 | data DatabaseConfig = DatabaseConfig 19 | { user :: String 20 | , password :: String 21 | , host :: String 22 | , port :: Integer 23 | , dbname :: String 24 | , numstripes :: Int 25 | , keepOpenTime :: NominalDiffTime 26 | , resPerStripe :: Int 27 | } deriving (Show, Eq, Generic) 28 | 29 | instance FromJSON DatabaseConfig 30 | 31 | configFromEnv :: IO DatabaseConfig 32 | configFromEnv = DatabaseConfig 33 | <$> getEnv "PGUSER" 34 | <*> getEnv "PGPASSWORD" 35 | <*> getEnv "PGHOST" 36 | <*> (read <$> getEnv "PGPORT") 37 | <*> getEnv "PGDATABASE" 38 | <*> (maybe 2 read <$> (lookupEnv "PGPOOL_NUM_STRIPES")) 39 | <*> (maybe (24*60*60) (fromIntegral . read @Int) <$> 40 | (lookupEnv "PGPOOL_KEEP_OPEN_TIME")) 41 | <*> (maybe 20 read <$> (lookupEnv "PGPOOL_RES_PER_STRIPES")) 42 | 43 | 44 | createConn :: DatabaseConfig -> IO Connection 45 | createConn config = do 46 | catch (createConn' config) 47 | (\(e::SomeException) -> do putStrLn $ "Failed to connect to the database, retrying in 10s ... ("++show e++")" 48 | threadDelay $ 10 * 1000 * 1000 49 | createConn' config) 50 | 51 | dbCfgToConnectInfo :: DatabaseConfig -> ConnectInfo 52 | dbCfgToConnectInfo config = ConnectInfo 53 | { connectHost = host config 54 | , connectUser = user config 55 | , connectPassword = password config 56 | , connectDatabase = dbname config 57 | , connectPort = fromInteger $ port config 58 | } 59 | 60 | createConn' :: DatabaseConfig -> IO Connection 61 | createConn' = connect . dbCfgToConnectInfo 62 | 63 | createConnPool :: DatabaseConfig -> IO (Pool Connection) 64 | createConnPool cfg = createPool (createConn' cfg) close 65 | (numstripes cfg) (keepOpenTime cfg) (resPerStripe cfg) 66 | 67 | readJSON :: FromJSON a => FilePath -> IO a 68 | readJSON path = do 69 | configJson <- BSL.readFile path 70 | case eitherDecode configJson of 71 | Right config -> return config 72 | Left err -> do 73 | die $ "Can't read the config file: " ++ err 74 | -------------------------------------------------------------------------------- /postgresql-simple-expr/postgresql-simple-expr.cabal: -------------------------------------------------------------------------------- 1 | name: postgresql-simple-expr 2 | version: 0.1.0.0 3 | synopsis: Simple composable queries for postgresql-simple 4 | description: Avoid some boilerplate in some postgresql-simple queries 5 | homepage: https://github.com/diffusionkinetics/open/tree/master/postgresql-simple-expr 6 | license: MIT 7 | license-file: LICENSE 8 | author: Tom Nielsen 9 | maintainer: tanielsen@gmail.com 10 | copyright: Tom Nielsen 11 | category: Statistics 12 | build-type: Simple 13 | -- extra-source-files: 14 | cabal-version: >=1.10 15 | 16 | source-repository head 17 | type: git 18 | location: https://github.com/diffusionkinetics/open 19 | 20 | library 21 | hs-source-dirs: lib 22 | ghc-options: -fwarn-unused-imports -fno-warn-unused-do-bind -fno-warn-type-defaults 23 | default-language: Haskell2010 24 | exposed-modules: 25 | Database.PostgreSQL.Simple.Expr 26 | Database.PostgreSQL.Simple.Connect 27 | Database.PostgreSQL.Simple.FakeRows 28 | build-depends: base >= 4.6 && < 5 29 | , postgresql-simple 30 | , aeson 31 | , bytestring 32 | , text 33 | , mtl 34 | , resource-pool 35 | , time 36 | , safe-exceptions 37 | , fake 38 | , containers 39 | 40 | test-suite tests 41 | type: exitcode-stdio-1.0 42 | hs-source-dirs: tests 43 | main-is: Spec.hs 44 | other-modules: Common 45 | , KeySpec 46 | , FakeRowsSpec 47 | default-language: Haskell2010 48 | ghc-options: -threaded -rtsopts "-with-rtsopts=-N" 49 | build-depends: base 50 | , aeson 51 | , hspec 52 | , mtl 53 | , postgresql-simple 54 | , postgresql-simple-expr 55 | , text 56 | , fake 57 | -------------------------------------------------------------------------------- /postgresql-simple-expr/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by stack init 2 | # For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration/ 3 | 4 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 5 | resolver: lts-9.21 6 | 7 | # Local packages, usually specified by relative directory name 8 | packages: 9 | - '.' 10 | 11 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 12 | extra-deps: 13 | - fake-0.1.1 14 | 15 | # Override default flag values for local packages and extra-deps 16 | flags: {} 17 | 18 | # Extra package databases containing global packages 19 | extra-package-dbs: [] 20 | 21 | # Control whether we use the GHC we find on the path 22 | # system-ghc: true 23 | 24 | # Require a specific version of stack, using version ranges 25 | # require-stack-version: -any # Default 26 | # require-stack-version: >= 1.0.0 27 | 28 | # Override the architecture used by stack, especially useful on Windows 29 | # arch: i386 30 | # arch: x86_64 31 | 32 | # Extra directories used by stack for building 33 | # extra-include-dirs: [/path/to/dir] 34 | # extra-lib-dirs: [/path/to/dir] 35 | 36 | # Allow a newer minor version of GHC than the snapshot specifies 37 | # compiler-check: newer-minor 38 | -------------------------------------------------------------------------------- /postgresql-simple-expr/tests/Common.hs: -------------------------------------------------------------------------------- 1 | module Common where 2 | 3 | import Database.PostgreSQL.Simple 4 | import Database.PostgreSQL.Simple.Connect 5 | import Control.Monad.Reader 6 | 7 | rr :: Connection -> ReaderT Connection IO a -> IO a 8 | rr conn r = runReaderT r conn 9 | 10 | setupPG :: IO Connection 11 | setupPG = do 12 | cfg <- configFromEnv 13 | createConn' cfg 14 | -------------------------------------------------------------------------------- /postgresql-simple-expr/tests/Spec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TypeFamilies #-} 2 | {-# LANGUAGE OverloadedStrings, DeriveGeneric, DeriveAnyClass #-} 3 | 4 | module Main where 5 | 6 | import Database.PostgreSQL.Simple 7 | import Database.PostgreSQL.Simple.Connect 8 | import Database.PostgreSQL.Simple.Expr 9 | import Test.Hspec 10 | 11 | import Common 12 | import KeySpec 13 | import FakeRowsSpec 14 | 15 | basicSpec :: SpecWith Connection 16 | basicSpec = 17 | describe "Expr.executeC" $ do 18 | it "should be able to create a table and destroy it" $ \c -> do 19 | rr c $ executeC "CREATE TABLE pgspec(id integer PRIMARY KEY, x text)" () 20 | rr c $ executeC "INSERT INTO pgspec(id,x) values(1,'one')" () 21 | [n] <- rr c $ queryC "select count(*) from pgspec" () 22 | rr c $ executeC "DROP TABLE pgspec" () 23 | (n :: Only Int) `shouldBe` Only 1 24 | 25 | main :: IO () 26 | main = hspec $ beforeAll setupPG $ afterAll close $ do 27 | basicSpec 28 | keySpec 29 | fakeRowsSpec 30 | -------------------------------------------------------------------------------- /stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-9.21 2 | packages: 3 | - ./plotlyhs 4 | - ./lucid-extras 5 | - ./parfoldl 6 | - ./dashdo 7 | - ./fuml 8 | - ./inliterate 9 | 10 | 11 | extra-deps: 12 | - kmeans-vector-0.3.2 13 | - probable-0.1.2 14 | -------------------------------------------------------------------------------- /stanhs/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /stanhs/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/stanhs/README.md -------------------------------------------------------------------------------- /stanhs/changelog.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/diffusionkinetics/open/597ef520bb42c4747576032be2f7e3046ca66178/stanhs/changelog.md -------------------------------------------------------------------------------- /stanhs/lib/Stan/AST.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass, DeriveGeneric, FlexibleInstances #-} 2 | 3 | module Stan.AST where 4 | 5 | import Data.Hashable 6 | import GHC.Generics (Generic) 7 | import Data.String 8 | 9 | type Var = String 10 | 11 | data Stan = Data [Decl] 12 | | TransformedData [Decl] 13 | | Parameters [Decl] 14 | | TransformedParameters [Decl] 15 | | Model [Decl] 16 | | GeneratedQuantities [Decl] 17 | deriving (Eq, Show, Generic, Hashable) 18 | 19 | data Decl = Type ::: (Var,[Expr]) 20 | | (Var,[Expr]) := Expr 21 | | (Var,[Expr]) :~ (String, [Expr]) 22 | | For Var Expr Expr [Decl] 23 | | Print String [Expr] 24 | deriving (Eq, Show, Generic, Hashable) 25 | 26 | data Type = Real 27 | | Int 28 | | Bounded (Maybe Expr) 29 | (Maybe Expr) 30 | Type 31 | deriving (Eq, Show, Generic, Hashable) 32 | 33 | data Expr = LitInt Int 34 | | LitFloat Float 35 | | BinOp String Expr Expr 36 | | Ix Expr [Expr] 37 | | Apply String [Expr] 38 | | Var Var 39 | deriving (Eq, Show, Generic, Hashable) 40 | 41 | infixl 1 := 42 | infixl 1 ::: 43 | class Indexable a where 44 | (!) :: a -> [Expr] -> a 45 | 46 | instance Indexable Expr where 47 | (!) = Ix 48 | 49 | instance Indexable (Var,[Expr]) where 50 | (v,exprs) ! es = (v,exprs++es) 51 | 52 | instance Num Expr where 53 | e1 + e2 = BinOp "+" e1 e2 54 | e1 - e2 = BinOp "-" e1 e2 55 | e1 * e2 = BinOp "*" e1 e2 56 | negate e = Apply "-" [e] 57 | abs e = Apply "abs" [e] 58 | signum _ = error "stan: signum?" 59 | fromInteger x = LitInt (fromInteger x) 60 | 61 | instance Fractional Expr where 62 | e1 / e2 = BinOp "/" e1 e2 63 | fromRational x = LitFloat $ fromRational x 64 | 65 | instance Floating Expr where 66 | pi = LitFloat pi 67 | exp e = Apply "exp" [e] 68 | log e = Apply "log" [e] 69 | sqrt e = Apply "sqrt" [e] 70 | sin e = Apply "sin" [e] 71 | cos e = Apply "cos" [e] 72 | tan e = Apply "tan" [e] 73 | asin e = Apply "asin" [e] 74 | acos e = Apply "acos" [e] 75 | atan e = Apply "atan" [e] 76 | asinh e = Apply "asinh" [e] 77 | acosh e = Apply "acosh" [e] 78 | atanh e = Apply "atanh" [e] 79 | sinh e = Apply "sinh" [e] 80 | cosh e = Apply "cosh" [e] 81 | tanh e = Apply "tanh" [e] 82 | 83 | instance IsString Expr where 84 | fromString s = Var s 85 | 86 | instance IsString (Var,[Expr]) where 87 | fromString s = (s, []) 88 | 89 | normal :: (Expr , Expr) -> (String, [Expr]) 90 | normal (mn, sd) = ("normal", [mn,sd]) 91 | 92 | gamma :: (Expr , Expr) -> (String, [Expr]) 93 | gamma (a, b) = ("gamma", [a,b]) 94 | 95 | exponential :: Expr -> (String, [Expr]) 96 | exponential mu = ("exponential", [mu]) 97 | 98 | dot :: Expr -> Expr -> Expr 99 | dot e1 e2 = Apply "dot_product" [e1,e2] 100 | 101 | lower :: Expr -> Type -> Type 102 | lower lo ty = Bounded (Just lo) Nothing ty -------------------------------------------------------------------------------- /stanhs/lib/Stan/Schools.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Stan.Schools where 4 | 5 | import Stan.AST 6 | import Stan.Data 7 | import Data.Monoid ((<>)) 8 | 9 | 10 | schools :: [Stan] 11 | schools = [ 12 | Data [ lower 0 Int ::: "J" 13 | , Real ::: "y"!["J"] 14 | , lower 0 Real ::: "sigma"!["J"] 15 | ], 16 | Parameters [ Real ::: "mu" 17 | , lower 0 Real ::: "tau" 18 | , Real ::: "eta"!["J"] 19 | ], 20 | TransformedParameters [ Real ::: "theta"!["J"] 21 | , For "j" 1 "J" [ 22 | "theta"!["j"] := "mu" + "tau" * "eta"!["j"] 23 | ] 24 | 25 | ], 26 | Model [ "eta" :~ normal (0,1) 27 | , "y" :~ normal ("theta","sigma") 28 | ] 29 | ] 30 | 31 | j :: Int 32 | j = 8 33 | 34 | y :: [Double] 35 | y = [28, 8, -3, 7, -1, 1, 18, 12] 36 | 37 | sigma :: [Double] 38 | sigma = [15, 10, 16, 11, 9, 11, 10, 18] 39 | 40 | schoolData :: StanData 41 | schoolData = "J" <~ j <> 42 | "y" <~ y <> 43 | "sigma" <~ sigma 44 | -------------------------------------------------------------------------------- /stanhs/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./cstom-snapshot.yaml" 18 | resolver: lts-9.21 19 | 20 | # User packages to be built. 21 | # Various formats can be used as shown in the example below. 22 | # 23 | # packages: 24 | # - some-directory 25 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 26 | # - location: 27 | # git: https://github.com/commercialhaskell/stack.git 28 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 29 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 30 | # extra-dep: true 31 | # subdirs: 32 | # - auto-update 33 | # - wai 34 | # 35 | # A package marked 'extra-dep: true' will only be built if demanded by a 36 | # non-dependency (i.e. a user package), and its test suites and benchmarks 37 | # will not be run. This is useful for tweaking upstream packages. 38 | packages: 39 | - '.' 40 | 41 | # Dependency packages to be pulled from upstream that are not in the resolver 42 | # (e.g., acme-missiles-0.3) 43 | extra-deps: [] 44 | 45 | # Override default flag values for local packages and extra-deps 46 | flags: {} 47 | 48 | # Extra package databases containing global packages 49 | extra-package-dbs: [] 50 | 51 | # Control whether we use the GHC we find on the path 52 | # system-ghc: true 53 | # 54 | # Require a specific version of stack, using version ranges 55 | # require-stack-version: -any # Default 56 | # require-stack-version: ">=1.1" 57 | # 58 | # Override the architecture used by stack, especially useful on Windows 59 | # arch: i386 60 | # arch: x86_64 61 | # 62 | # Extra directories used by stack for building 63 | # extra-include-dirs: [/path/to/dir] 64 | # extra-lib-dirs: [/path/to/dir] 65 | # -------------------------------------------------------------------------------- /stanhs/stanhs.cabal: -------------------------------------------------------------------------------- 1 | Name: stanhs 2 | Version: 0.1.0 3 | Synopsis: Bindings to Stan (Bayesian inference with MCMC) 4 | Description: 5 | Bindings to Stan, a language for Bayesian inference with MCMC and other methods 6 | 7 | License: MIT 8 | License-file: LICENSE 9 | Author: Tom Nielsen 10 | Maintainer: tanielsen@gmail.com 11 | build-type: Simple 12 | Cabal-Version: >= 1.8 13 | homepage: https://github.com/diffusionkinetics/open/stanhs 14 | bug-reports: https://github.com/diffusionkinetics/open/issues 15 | category: Control, Statistics 16 | Tested-With: GHC == 7.8.4, GHC == 7.10.2, GHC == 7.10.3, GHC == 8.0.1 17 | 18 | 19 | extra-source-files: 20 | changelog.md 21 | 22 | 23 | Library 24 | ghc-options: -Wall -fno-warn-type-defaults -fno-warn-orphans 25 | hs-source-dirs: lib 26 | 27 | Exposed-modules: 28 | Stan.AST 29 | , Stan.AST.Pretty 30 | , Stan.Data 31 | , Stan.Schools 32 | , Stan.Run 33 | , Stan.Simulate 34 | 35 | Build-depends: 36 | base >= 4.6 && <5 37 | , pretty 38 | , directory 39 | , hashable 40 | , filepath 41 | , process 42 | , containers 43 | , random 44 | , vector 45 | , mtl 46 | , random-fu 47 | , random-source 48 | 49 | Test-Suite test-stan 50 | type: exitcode-stdio-1.0 51 | main-is: test-stan.hs 52 | build-depends: base 53 | , stanhs -------------------------------------------------------------------------------- /stanhs/test-stan.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Stan.AST 4 | import Stan.AST.Pretty 5 | import Stan.Run 6 | import Stan.Schools 7 | 8 | myExpr :: Expr 9 | myExpr = sin $ Ix (Var "xs") [2+1*3] 10 | 11 | myExpr1 :: Expr 12 | myExpr1 = 2*(1+3) 13 | 14 | myModel :: Stan 15 | myModel = Model [ 16 | Real ::: ("foo",[]), 17 | ("foo", []) := myExpr 18 | ] 19 | 20 | main :: IO () 21 | main = do 22 | putStrLn "" 23 | -- putStrLn $ pp myExpr 24 | -- putStrLn $ pp myModel 25 | putStrLn $ ppStans schools 26 | res <- runStan schools schoolData sample {numSamples = 1000} 27 | putStrLn $ take 400 $ show res 28 | res1 <- runStan schools schoolData optimize 29 | putStrLn $ take 400 $ show res1 30 | return () 31 | -------------------------------------------------------------------------------- /youido/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License (MIT) 2 | 3 | Copyright (c) 2017 Tom Nielsen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /youido/README.md: -------------------------------------------------------------------------------- 1 | Youido 2 | ============= 3 | 4 | Youido is a high level framework for developing Haskell web applications that satisfy a number of highly restrictive assumptions: 5 | 6 | 1. You are building enterprise style, not consumer facing, applications. 7 | 2. You don't care about SEO ranking or what the URL looks like. 8 | 3. Your functionality is guarded by a user login 9 | 4. All rendering is done server-side 10 | 11 | Within these assumptions, youido attempts to combine what we see as the best aspects of various Haskell 12 | web frameworks. Request handling is performed by pattern matching on algebraic data types like in Yesod, 13 | but these datatypes are not a single type, allowing composition and reuse. 14 | 15 | The main API is given in [Youido.Types](https://github.com/diffusionkinetics/open/blob/master/youido/lib/Youido/Types.hs) 16 | and an [example is given](https://github.com/diffusionkinetics/open/blob/master/youido/examples/Example.hs) 17 | 18 | The principal mechanism of request handling is pattern matching on types that implement the `FromRequest` typeclass. 19 | 20 | Dashboards defined with [dashdo](https://github.com/diffusionkinetics/open/tree/master/dashdo) can 21 | also be connected to a youido application. This is illustrated in the example. 22 | -------------------------------------------------------------------------------- /youido/form-repeat.js: -------------------------------------------------------------------------------- 1 | var youidoItemClass = 'youido_multi_item'; 2 | var youidoDummyItem = 'youido_dummy_item'; 3 | 4 | function youidoReplaceIndex(currentPath, pathRegexp, idx) { 5 | return currentPath.replace(pathRegexp, '$1.' + idx); 6 | } 7 | 8 | function youidoUpdatePaths($items, fieldName, fieldPath) { 9 | // regex matches e.g. form.fieldName.0 and form.fieldName.-1 10 | var regex = new RegExp('^(' + fieldPath + ')\\.-?\\d+'); 11 | var attrs = ['for', 'id', 'name']; 12 | $items.each(function (idx) { 13 | for (var j=0; j < attrs.length; j++) { 14 | $(this).find("*[" + attrs[j] + "^='" + fieldPath + ".']") 15 | .attr(attrs[j], function(i,old) { 16 | return youidoReplaceIndex(old, regex, idx); 17 | }); 18 | } 19 | }); 20 | } 21 | 22 | function youidoGetFieldPath(itemsDiv) { 23 | var indices = $(itemsDiv).children("input[id$='.indices']")[0]; 24 | if (!!indices) { 25 | return indices.id.replace(/\.indices$/, ''); 26 | } else return null; 27 | } 28 | 29 | function youidoUpdateIndices(fieldPath, newLength) { 30 | var newVal = ''; 31 | for (var i=0; i < newLength; i++) { 32 | newVal = newVal + i; 33 | if (i < newLength - 1) { 34 | newVal = newVal + ','; 35 | } 36 | } 37 | var indices = document.getElementById(fieldPath + '.indices'); 38 | indices.setAttribute('value', newVal); 39 | } 40 | 41 | function youidoUpdate($items, fieldName, fieldPath) { 42 | var dummySel = "[id='" + fieldPath + '.' + youidoDummyItem + "']"; 43 | var $itemsNoDummy = $items.not(dummySel); 44 | youidoUpdatePaths($itemsNoDummy, fieldName, fieldPath); 45 | youidoUpdateIndices(fieldPath, $itemsNoDummy.length); 46 | } 47 | 48 | function youidoAddItem(itemsDiv, fieldName) { 49 | var fieldPath = youidoGetFieldPath(itemsDiv); 50 | var dummyId = fieldPath + '.' + youidoDummyItem; 51 | var dummy = document.getElementById(dummyId); 52 | var newItem = dummy.cloneNode(true); 53 | newItem.setAttribute('style', 'display: inherit'); 54 | newItem.setAttribute('id', newItem.getAttribute('id').replace(dummyId, '')); 55 | var $items = $(itemsDiv).children('div.' + youidoItemClass); 56 | $items[$items.length - 1].after(newItem); 57 | $items.push(newItem); 58 | youidoUpdate($items, fieldName, fieldPath); 59 | } 60 | 61 | function youidoRemoveItem(item, fieldName) { 62 | var itemsDiv = item.parentNode; 63 | var fieldPath = youidoGetFieldPath(itemsDiv); 64 | itemsDiv.removeChild(item); 65 | youidoUpdate($(itemsDiv).children('div.' + youidoItemClass), fieldName, fieldPath); 66 | } 67 | -------------------------------------------------------------------------------- /youido/lib/Youido.hs: -------------------------------------------------------------------------------- 1 | module Youido where 2 | 3 | -------------------------------------------------------------------------------- /youido/lib/Youido/Authentication.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings, ScopedTypeVariables, TupleSections, 2 | DeriveGeneric, ExtendedDefaultRules, FlexibleContexts#-} 3 | 4 | module Youido.Authentication where 5 | 6 | import Web.Scotty 7 | import Web.Scotty.Cookie 8 | import Control.Concurrent.STM 9 | import qualified Data.IntMap 10 | import Control.Monad.IO.Class 11 | import Data.Text (Text, unpack, pack) 12 | 13 | import System.Random 14 | import Text.Read (readMaybe) 15 | 16 | import Data.ByteString(ByteString) 17 | 18 | -------------------------------------------------------------------------- 19 | --- SERVING 20 | -------------------------------------------------------------------------- 21 | 22 | 23 | newSession :: TVar (Data.IntMap.IntMap a) -> a -> ActionM () 24 | newSession tv x = do 25 | n <- liftIO $ randomRIO (0,99999999999) 26 | liftIO $ atomically $ modifyTVar' tv (Data.IntMap.insert n x) 27 | setSimpleCookie "youisess" (pack $ show n) 28 | return () 29 | 30 | lookupSession :: TVar (Data.IntMap.IntMap a) -> ActionM (Maybe (Int, a)) 31 | lookupSession tv = do 32 | mi <- (>>=readMaybe) . fmap unpack <$> getCookie "youisess" 33 | case mi of 34 | Nothing -> return Nothing 35 | Just i -> do 36 | mp <- liftIO $ readTVarIO tv 37 | return $ fmap (i,) $ Data.IntMap.lookup i mp 38 | 39 | 40 | 41 | deleteSession :: TVar (Data.IntMap.IntMap a) -> Int -> ActionM () 42 | deleteSession tv n = do 43 | liftIO $ atomically $ modifyTVar' tv (Data.IntMap.delete n) 44 | deleteCookie "youisess" 45 | return () 46 | 47 | -------------------------------------------------------------------------------- /youido/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by stack init 2 | # For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration/ 3 | 4 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 5 | resolver: lts-9.21 6 | 7 | # Local packages, usually specified by relative directory name 8 | packages: 9 | - '.' 10 | - '../lucid-extras' 11 | - '../dashdo' 12 | - '../datasets' 13 | 14 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 15 | extra-deps: 16 | - plotlyhs-0.2 17 | - scotty-cookie-0.1.0.3 18 | - digestive-functors-0.8.3.0 19 | - digestive-functors-lucid-0.0.0.5 20 | 21 | # Override default flag values for local packages and extra-deps 22 | flags: {} 23 | 24 | # Extra package databases containing global packages 25 | extra-package-dbs: [] 26 | 27 | # Control whether we use the GHC we find on the path 28 | # system-ghc: true 29 | 30 | # Require a specific version of stack, using version ranges 31 | # require-stack-version: -any # Default 32 | # require-stack-version: >= 1.0.0 33 | 34 | # Override the architecture used by stack, especially useful on Windows 35 | # arch: i386 36 | # arch: x86_64 37 | 38 | # Extra directories used by stack for building 39 | # extra-include-dirs: [/path/to/dir] 40 | # extra-lib-dirs: [/path/to/dir] 41 | 42 | # Allow a newer minor version of GHC than the snapshot specifies 43 | # compiler-check: newer-minor 44 | -------------------------------------------------------------------------------- /youido/youido.cabal: -------------------------------------------------------------------------------- 1 | name: youido 2 | version: 0.1.0.0 3 | synopsis: Web application constructor kit 4 | description: Web application constructor kit. 5 | homepage: https://github.com/diffusionkinetics/open/tree/master/youido 6 | license: MIT 7 | license-file: LICENSE 8 | author: Tom Nielsen 9 | maintainer: tomn@diffusionkinetics.com 10 | copyright: Tom Nielsen 11 | category: Web 12 | build-type: Simple 13 | extra-source-files: form-repeat.js 14 | cabal-version: >=1.10 15 | 16 | source-repository head 17 | type: git 18 | location: https://github.com/diffusionkinetics/open 19 | 20 | Flag example 21 | Description: Build an example executable 22 | Default: False 23 | 24 | library 25 | hs-source-dirs: lib 26 | ghc-options: -fwarn-unused-imports -fno-warn-unused-do-bind -fno-warn-type-defaults 27 | default-language: Haskell2010 28 | exposed-modules: Youido 29 | Youido.Types 30 | Youido.Serve 31 | Youido.Dashdo 32 | Youido.Authentication 33 | build-depends: base >= 4.6 && < 5 34 | , text 35 | , lucid 36 | , lucid-extras 37 | , microlens 38 | , mtl 39 | , random-fu 40 | , scotty 41 | , postgresql-simple 42 | , wai-middleware-static 43 | , wai-extra 44 | , wai 45 | , aeson 46 | , bytestring 47 | , http-types 48 | , dashdo 49 | , void 50 | , microlens-platform 51 | , stm 52 | , containers 53 | , random 54 | , scotty-cookie 55 | , bcrypt 56 | , split 57 | , parsec 58 | , digestive-functors 59 | , digestive-functors-lucid 60 | , transformers 61 | 62 | Executable youido-example 63 | main-is: examples/Example.hs 64 | if flag(example) 65 | Buildable: True 66 | else 67 | Buildable: False 68 | 69 | build-depends: base >=4.6 && <5 70 | , youido 71 | , lucid 72 | , lucid-extras 73 | , dashdo 74 | , mtl 75 | , wai 76 | , microlens-platform 77 | , text 78 | , datasets 79 | , digestive-functors 80 | , digestive-functors-lucid 81 | , stm 82 | --------------------------------------------------------------------------------