├── .github └── FUNDING.yml ├── .gitignore ├── .replit ├── APACHE_LICENSE-2.0.txt ├── Blackjack ├── .gitignore ├── Blackjack.cabal ├── Card.hs ├── LICENSE ├── Main.hs ├── README.md ├── RandomizedList.hs ├── Table.hs └── stack.yaml ├── BraveSearch ├── BraveSearch.hs ├── Main.hs ├── README.md └── brave-search-client.cabal ├── ClientServer ├── Client.hs ├── ClientServer.cabal ├── LICENSE ├── README.md ├── Server.hs └── stack.yaml ├── CommandLineApp ├── .gitignore ├── CommandLine1.hs ├── CommandLine2.hs ├── CommandLineApp.cabal ├── GameLoop1.hs ├── GameLoop2.hs ├── LICENSE ├── README.md ├── ReadTextFile.hs ├── ReadTextFileErrorHandling.hs └── stack.yaml ├── Database-postgres ├── .gitignore ├── LICENSE ├── README.md ├── TestDatabase.cabal ├── TestPostgres1.hs └── stack.yaml ├── Database-sqlite ├── .gitignore ├── LICENSE ├── README.md ├── TestDatabase.cabal ├── TestSqLite1.hs └── stack.yaml ├── FastTag ├── FastTag.cabal ├── LICENSE ├── LexiconData.hs ├── README.md ├── fasttag.hs └── stack.yaml ├── HybridHaskellPythonCorefAnaphoraResolution ├── .gitignore ├── ChangeLog.md ├── HybridHaskellPythonCorefAnaphoraResolution.cabal ├── README.md ├── Setup.hs ├── app │ └── Main.hs ├── python_coreference_anaphora_resolution_server │ ├── Makefile │ ├── README.md │ ├── bin │ │ └── corefserver │ ├── corefserver │ │ ├── coref_server.py │ │ └── test │ │ │ └── README.md │ ├── setup.py │ └── test_client.py ├── src │ └── CorefWebClient.hs ├── stack.yaml └── test │ └── Spec.hs ├── HybridHaskellPythonNlp ├── .gitignore ├── ChangeLog.md ├── LICENSE ├── README.md ├── Setup.hs ├── app │ └── Main.hs ├── python_spacy_nlp_server │ ├── Makefile │ ├── README.md │ ├── bin │ │ └── spacynlpserver │ ├── setup.py │ ├── spacynlpserver │ │ ├── server_spacy.py │ │ └── test │ │ │ └── README.md │ └── test_client.py ├── src │ └── NlpWebClient.hs ├── stack.yaml └── test │ └── Spec.hs ├── ImPure ├── .gitignore ├── CommonWords.hs ├── DoLetExample.hs ├── DoLetExample2.hs ├── DoLetExample3.hs ├── FmapExample.hs ├── ImPure.cabal ├── README.md ├── stack.yaml ├── text1.txt └── text2.txt ├── Makefile ├── NlpTool ├── .gitignore ├── NlpTool.cabal ├── README.md ├── WebApp.hs ├── app │ └── NlpTool.hs ├── src │ └── nlp │ │ ├── Categorize.hs │ │ ├── Entities.hs │ │ ├── NlpUtils.hs │ │ ├── Sentence.hs │ │ ├── Stemmer.hs │ │ ├── Summarize.hs │ │ └── data │ │ ├── BroadcastNetworkNamesDbPedia.hs │ │ ├── Category1Gram.hs │ │ ├── Category2Gram.hs │ │ ├── CityNamesDbpedia.hs │ │ ├── CompanyNamesDbpedia.hs │ │ ├── CountryNamesDbpedia.hs │ │ ├── PeopleDbPedia.hs │ │ ├── PoliticalPartyNamesDbPedia.hs │ │ ├── TradeUnionNamesDbPedia.hs │ │ └── UniversityNamesDbPedia.hs └── stack.yaml ├── OpenAiApiClient ├── .gitignore ├── GenText.hs ├── OpenAiApiClient.cabal ├── README.md └── stack.yaml ├── Pure ├── .gitignore ├── Cases.hs ├── ChainedCalls.hs ├── Conditionals.hs ├── Guards.hs ├── IfThenElses.hs ├── LICENSE ├── LetAndWhere.hs ├── MapExamples.hs ├── MyColors.hs ├── NoIO.hs ├── Pure.cabal ├── README.md ├── Simple.hs ├── myfunc1.hs └── stack.yaml ├── README.md ├── SparqlClient ├── .gitignore ├── HttpSparqlClient.hs ├── HttpSparqlJsonClient.hs ├── JsonTest.hs ├── README.md ├── RobsExample.hs ├── SparqlClient.cabal ├── TestSparqlClient.hs └── stack.yaml ├── StateMonad ├── README.md ├── State1.hs ├── stack.yaml └── statemonad.cabal ├── TestingHaskell ├── README.md ├── Setup.hs ├── TestingHaskell.cabal ├── app │ └── Main.hs ├── src │ └── MyColors.hs ├── stack.yaml └── test │ └── Spec.hs ├── TextProcessing ├── CleanText.hs ├── README.md ├── TestAESON.hs ├── TestCSV.hs ├── TestTextJSON.hs ├── TextProcessing.cabal ├── stack.yaml └── test.csv ├── Timers ├── .gitignore ├── README.md ├── TimerTest.hs ├── Timers.cabal └── stack.yaml ├── WebScraping ├── README.md ├── TagSoupTest.hs ├── WebScraping.cabal └── stack.yaml ├── debugging ├── LICENSE ├── README.md ├── TraceTimerTest.hs ├── debugging.cabal └── stack.yaml ├── gemini_commandline ├── Main.hs ├── README.md ├── cabal.project.freeze └── gemini.cabal ├── haskell.svg ├── knowledge_graph_creator_pure ├── .gitignore ├── KGCreator.cabal ├── LICENSE ├── Makefile ├── NEO4J_NOTES.md ├── README.md ├── Setup.hs ├── TBD_FIX.md ├── app │ └── Main.hs ├── dev │ ├── Dev.hs │ └── DevSummarize.hs ├── python_utils │ └── sparql_file_query_example.py ├── src │ ├── fileutils │ │ ├── BlackBoard.hs │ │ ├── DirUtils.hs │ │ └── FileUtils.hs │ ├── sw │ │ ├── GenNeo4jCypher.hs │ │ └── GenTriples.hs │ ├── toplevel │ │ └── Apis.hs │ └── webclients │ │ ├── ClassificationWebClient.hs │ │ ├── CorefWebClient.hs │ │ └── NlpWebClient.hs ├── stack.yaml ├── stack.yaml.lock ├── test │ └── Spec.hs └── test_data │ ├── test1.meta │ ├── test1.txt │ ├── test2.meta │ ├── test2.txt │ ├── test3.meta │ └── test3.txt ├── ollama_commandline ├── Main.hs ├── README.md ├── cabal.project.freeze └── ollama-client.cabal ├── replit.nix └── webchat ├── README.md ├── cabal.project.freeze ├── gemini-chat.cabal └── main.hs /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: mark-watson # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | markwatson: # Replace with a single Patreon username 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | test.db 2 | .idea 3 | *.iml 4 | .stack-work 5 | *~ 6 | _* 7 | .DS_Store 8 | NLPdb.hsproj 9 | .vscode 10 | stack.yaml.lock 11 | out 12 | dist-newstyle 13 | *.dyn_hi 14 | *.dyn_o 15 | 16 | -------------------------------------------------------------------------------- /.replit: -------------------------------------------------------------------------------- 1 | modules = ["haskell-ghc9.4", "python-3.12"] 2 | run = "ls" 3 | 4 | [nix] 5 | channel = "stable-24_05" 6 | 7 | [deployment] 8 | run = ["sh", "-c", "ls"] 9 | -------------------------------------------------------------------------------- /Blackjack/.gitignore: -------------------------------------------------------------------------------- 1 | .stack-work 2 | -------------------------------------------------------------------------------- /Blackjack/Blackjack.cabal: -------------------------------------------------------------------------------- 1 | name: Blackjack 2 | version: 0.1.0.0 3 | synopsis: Simple project template from stack 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable Blackjack 16 | hs-source-dirs: . 17 | main-is: Main.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.8.2.0, containers, random, lens, array, MonadRandom 20 | other-modules: Card, Table, RandomizedList 21 | 22 | -------------------------------------------------------------------------------- /Blackjack/Card.hs: -------------------------------------------------------------------------------- 1 | module Card (Card, Rank, Suit, orderedCardDeck, cardValue) where 2 | 3 | import Data.Maybe (fromMaybe) 4 | import Data.List (elemIndex) 5 | import Data.Map (fromList, lookup, keys) 6 | 7 | data Card = Card { rank :: Rank 8 | , suit :: Suit } 9 | deriving (Eq, Show) 10 | 11 | data Suit = Hearts | Diamonds | Clubs | Spades 12 | deriving (Eq, Show, Enum, Ord) 13 | 14 | data Rank = Two | Three | Four 15 | | Five | Six | Seven | Eight 16 | | Nine | Ten | Jack | Queen | King | Ace 17 | deriving (Eq, Show, Enum, Ord) 18 | 19 | rankMap = fromList [(Two,2), (Three,3), (Four,4), (Five,5), 20 | (Six,6), (Seven,7), (Eight,8), (Nine,9), 21 | (Ten,10), (Jack,10), (Queen,10), 22 | (King,10), (Ace,11)] 23 | 24 | orderedCardDeck :: [Card] 25 | orderedCardDeck = [Card rank suit | rank <- keys rankMap, 26 | suit <- [Hearts .. Clubs]] 27 | 28 | cardValue :: Card -> Int 29 | cardValue aCard = 30 | case (Data.Map.lookup (rank aCard) rankMap) of 31 | Just n -> n 32 | Nothing -> 0 -- should never happen 33 | 34 | -------------------------------------------------------------------------------- /Blackjack/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017, Mark L. Watson 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | 8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 10 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 11 | 12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 13 | -------------------------------------------------------------------------------- /Blackjack/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Card -- pure code 4 | import Table -- pure code 5 | import RandomizedList -- impure code 6 | 7 | printTable :: Table -> IO () 8 | printTable aTable = 9 | putStrLn $ showTable aTable 10 | 11 | randomDeck = 12 | randomizedList orderedCardDeck 13 | 14 | gameLoop :: Table -> Int -> IO b 15 | gameLoop aTable numberOfPlayers = do 16 | printTable aTable 17 | cardDeck <- randomDeck 18 | if (handOver aTable) then 19 | do 20 | putStrLn "\nHand over. State of table at the end of the game:\n" 21 | printTable aTable 22 | putStrLn "\nNewly dealt hand:\n" 23 | gameLoop (initialDeal cardDeck (scoreHands aTable) numberOfPlayers) numberOfPlayers 24 | else 25 | do 26 | putStrLn "Enter command: h)it or set bet to 10, 20, 30; any other key to stay:" 27 | command <- getLine 28 | if elem command ["10", "20", "30"] then gameLoop (setPlayerBet (read command) aTable) numberOfPlayers 29 | else 30 | if command == "h" then gameLoop (dealCards aTable [0 .. numberOfPlayers]) numberOfPlayers 31 | else gameLoop (setPlayerPasses (dealCards aTable [1 .. numberOfPlayers])) numberOfPlayers 32 | -- player stays (no new cards) 33 | 34 | main :: IO b 35 | main = do 36 | print "Start a game of Blackjack. Besides yourself, how many other players do you want at the table?" 37 | s <- getLine 38 | let num = (read s :: Int) + 1 -- player indices: 0)user, 1)dealer, and > 1 are the other players 39 | cardDeck <- randomDeck 40 | let aTable = initialDeal cardDeck (createNewTable num) num 41 | gameLoop aTable num 42 | -------------------------------------------------------------------------------- /Blackjack/README.md: -------------------------------------------------------------------------------- 1 | # Simple implementation of Blackjack game 2 | 3 | The are a few limitations in this implementation: 4 | 5 | - Aces always count as 11 points (instead of 11 or 1) 6 | - The command line user interface does not hide "down cards" beecause it is intended to show internal state of the game 7 | 8 | 9 | ## Running game 10 | 11 | stack build --exec Blackjack 12 | 13 | Notes: 14 | 15 | - Start by entering the number of players (besides the game user) at the table (a good value is 1) 16 | - In the main game loop, you can enter one of: 17 | -- 10, 20, 30: change the user's bet by typing a number 18 | -- h: hit (user gets dealt a card) and the dealer and other player hit if they have < 17 19 | -- blank line: user passes and other players and dealer keep hitting until that have > 16 or they bust (have > 21) 20 | 21 | After a user pass, just start a new game by hitting 'h', etc. 22 | 23 | ## Maintain state of game without using a State Monad 24 | 25 | Solution, I used my code for the "Game Loop" pattern from my book [Haskell Tutorial and Cookbook](https://leanpub.com/haskell-cookbook). 26 | 27 | ### Player Chips 28 | 29 | Starting value of 10, can be changed during the game. 30 | 31 | ## Data Transformations 32 | 33 | I maintain a read-only value for a Table value. Many functions take a Table and return a modified Table. 34 | 35 | 36 | ## Interactive development 37 | 38 | -- After 'stack ghci' the following is useful during development: 39 | 40 | -- let cardDeck = [Card {rank = Eight, suit = Clubs},Card {rank = Three, suit = Hearts},Card {rank = Ace, suit = Hearts},Card {rank = Six, suit = Hearts},Card {rank = Ace, suit = Clubs},Card {rank = Jack, suit = Diamonds},Card {rank = Nine, suit = Clubs},Card {rank = Two, suit = Clubs},Card {rank = Ten, suit = Hearts},Card {rank = Jack, suit = Clubs},Card {rank = Five, suit = Clubs},Card {rank = Four, suit = Diamonds},Card {rank = Queen, suit = Clubs},Card {rank = King, suit = Diamonds},Card {rank = Ace, suit = Diamonds},Card {rank = Nine, suit = Diamonds},Card {rank = Eight, suit = Hearts},Card {rank = Ten, suit = Diamonds},Card {rank = King, suit = Hearts},Card {rank = Queen, suit = Diamonds},Card {rank = Four, suit = Hearts},Card {rank = Seven, suit = Hearts},Card {rank = King, suit = Clubs},Card {rank = Ten, suit = Clubs},Card {rank = Jack, suit = Hearts},Card {rank = Three, suit = Clubs},Card {rank = Two, suit = Hearts},Card {rank = Seven, suit = Diamonds},Card {rank = Nine, suit = Hearts},Card {rank = Eight, suit = Diamonds},Card {rank = Three, suit = Diamonds},Card {rank = Four, suit = Clubs},Card {rank = Queen, suit = Hearts},Card {rank = Seven, suit = Clubs},Card {rank = Two, suit = Diamonds},Card {rank = Six, suit = Clubs},Card {rank = Five, suit = Diamonds},Card {rank = Six, suit = Diamonds},Card {rank = Five, suit = Hearts}] 41 | -- let tt = setCardDeck cardDeck (createNewTable 2) 42 | -- let tt2 = dealCardToUser tt 1 43 | -- setPlayerBet 99 tt 44 | -- changeChipStack 2 88 tt 45 | -- _chipStacks tt 46 | -------------------------------------------------------------------------------- /Blackjack/RandomizedList.hs: -------------------------------------------------------------------------------- 1 | module RandomizedList (randomizedList) where 2 | 3 | 4 | import System.Random 5 | import Data.Array.IO 6 | import Control.Monad 7 | 8 | -- NOTE: the following is copied from https://wiki.haskell.org/Random_shuffle: 9 | 10 | randomizedList :: [a] -> IO [a] 11 | randomizedList xs = do 12 | ar <- newArray n xs 13 | forM [1..n] $ \i -> do 14 | j <- randomRIO (i,n) 15 | vi <- readArray ar i 16 | vj <- readArray ar j 17 | writeArray ar j vi 18 | return vj 19 | where 20 | n = length xs 21 | newArray :: Int -> [a] -> IO (IOArray Int a) 22 | newArray n xs = newListArray (1,n) xs 23 | 24 | -------------------------------------------------------------------------------- /Blackjack/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | resolver: lts-22.26 4 | 5 | allow-newer: false 6 | 7 | # Local packages, usually specified by relative directory name 8 | packages: 9 | - '.' 10 | 11 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 12 | extra-deps: [] 13 | 14 | # Override default flag values for local packages and extra-deps 15 | flags: {} 16 | 17 | # Control whether we use the GHC we find on the path 18 | # system-ghc: true 19 | 20 | # Require a specific version of stack, using version ranges 21 | # require-stack-version: -any # Default 22 | # require-stack-version: >= 0.1.4.0 23 | 24 | # Override the architecture used by stack, especially useful on Windows 25 | # arch: i386 26 | # arch: x86_64 27 | 28 | # Extra directories used by stack for building 29 | # extra-include-dirs: [/path/to/dir] 30 | # extra-lib-dirs: [/path/to/dir] 31 | -------------------------------------------------------------------------------- /BraveSearch/BraveSearch.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE RecordWildCards #-} 3 | 4 | module BraveSearch 5 | ( getSearchSuggestions 6 | ) where 7 | 8 | import Network.HTTP.Simple 9 | import Data.Text.Encoding (encodeUtf8) 10 | import Data.Aeson 11 | import qualified Data.Text as T 12 | import Control.Exception (try) 13 | import Network.HTTP.Client (HttpException) 14 | import qualified Data.ByteString.Char8 as BS 15 | -- removed unused import Data.ByteString.Lazy.Char8 16 | 17 | data SearchResponse = SearchResponse 18 | { query :: QueryInfo 19 | , web :: WebResults 20 | } deriving (Show) 21 | 22 | data QueryInfo = QueryInfo 23 | { original :: T.Text 24 | } deriving (Show) 25 | 26 | data WebResults = WebResults 27 | { results :: [WebResult] 28 | } deriving (Show) 29 | 30 | data WebResult = WebResult 31 | { type_ :: T.Text 32 | , index :: Maybe Int 33 | , all :: Maybe Bool 34 | , title :: Maybe T.Text 35 | , url :: Maybe T.Text 36 | , description :: Maybe T.Text 37 | } deriving (Show) 38 | 39 | instance FromJSON SearchResponse where 40 | parseJSON = withObject "SearchResponse" $ \v -> SearchResponse 41 | <$> v .: "query" 42 | <*> v .: "web" 43 | 44 | instance FromJSON QueryInfo where 45 | parseJSON = withObject "QueryInfo" $ \v -> QueryInfo 46 | <$> v .: "original" 47 | 48 | instance FromJSON WebResults where 49 | parseJSON = withObject "WebResults" $ \v -> WebResults 50 | <$> v .: "results" 51 | 52 | instance FromJSON WebResult where 53 | parseJSON = withObject "WebResult" $ \v -> WebResult 54 | <$> v .: "type" 55 | <*> v .:? "index" 56 | <*> v .:? "all" 57 | <*> v .:? "title" 58 | <*> v .:? "url" 59 | <*> v .:? "description" 60 | 61 | -- | Perform a Brave Search with the given API key (as raw bytes) and text query. 62 | getSearchSuggestions :: BS.ByteString -> T.Text -> IO (Either T.Text [T.Text]) 63 | getSearchSuggestions apiKey query = do 64 | -- Build base request 65 | let baseUrl = "https://api.search.brave.com/res/v1/web/search" 66 | request0 <- parseRequest baseUrl 67 | -- Add query parameters (URL-encoded) and headers 68 | let request1 = setRequestQueryString 69 | [ ("q", Just $ encodeUtf8 query) 70 | , ("country", Just "US") 71 | , ("count", Just "5") 72 | ] 73 | request0 74 | request = setRequestHeader "Accept" ["application/json"] 75 | $ setRequestHeader "X-Subscription-Token" [apiKey] 76 | $ request1 77 | 78 | result <- try $ httpLBS request 79 | 80 | case result of 81 | Left e -> return . Left $ T.pack $ "Network error: " ++ show (e :: HttpException) 82 | Right response -> 83 | let status = getResponseStatusCode response 84 | in if status /= 200 85 | then return . Left $ T.pack $ "HTTP error: " ++ show status 86 | else case eitherDecode (getResponseBody response) of 87 | Left err -> return . Left $ T.pack $ "JSON parsing error: " ++ err 88 | Right SearchResponse{..} -> 89 | let originalQuery = original query 90 | webResults = results web 91 | suggestions = ("Original Query: " <> originalQuery) 92 | : map formatResult webResults 93 | in return $ Right suggestions 94 | 95 | formatResult :: WebResult -> T.Text 96 | formatResult WebResult{..} = 97 | let titleText = maybe "N/A" ("Title: " <>) title 98 | urlText = maybe "N/A" ("URL: " <>) url 99 | descText = maybe "N/A" ("Description: " <>) (fmap (T.take 100) description) 100 | in T.intercalate " | " [titleText, urlText, descText] 101 | -------------------------------------------------------------------------------- /BraveSearch/Main.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Main where 4 | 5 | import BraveSearch (getSearchSuggestions) 6 | import qualified Data.ByteString.Char8 as BS 7 | import System.Environment (getEnv) 8 | import qualified Data.Text as T 9 | import qualified Data.Text.IO as TIO 10 | 11 | main :: IO () 12 | main = do 13 | -- Get the API key from the environment variable 14 | -- Read API key from environment and convert to ByteString 15 | apiKeyRaw <- getEnv "BRAVE_SEARCH_API_KEY" 16 | let apiKey = BS.pack apiKeyRaw 17 | 18 | -- Prompt the user for a search query 19 | TIO.putStrLn "Enter a search query:" 20 | query <- TIO.getLine 21 | 22 | -- Call the function to get search suggestions 23 | result <- getSearchSuggestions apiKey query 24 | 25 | case result of 26 | Left err -> TIO.putStrLn $ "Error: " <> err 27 | Right suggestions -> do 28 | TIO.putStrLn "Search suggestions:" 29 | mapM_ (TIO.putStrLn . ("- " <>)) suggestions 30 | -------------------------------------------------------------------------------- /BraveSearch/README.md: -------------------------------------------------------------------------------- 1 | # Brave Search APIs 2 | 3 | You need to sign up for a free or paid for account on the [Brave search page](https://brave.com/search/api/) and set an environment variable to your assigned API key: 4 | 5 | ```{line-numbers: false} 6 | export BRAVE_SEARCH_API_KEY = BSAgQ-Nc5..... 7 | ``` 8 | 9 | ## Build and run 10 | 11 | cabal build 12 | cabal run 13 | 14 | -------------------------------------------------------------------------------- /BraveSearch/brave-search-client.cabal: -------------------------------------------------------------------------------- 1 | name: brave-search-client 2 | version: 0.1.0.0 3 | build-type: Simple 4 | cabal-version: >=1.10 5 | 6 | executable brave-search-client 7 | main-is: Main.hs 8 | other-modules: BraveSearch 9 | build-depends: base >=4.7 && <5 10 | , http-conduit 11 | , aeson 12 | , text 13 | , bytestring 14 | , http-client 15 | default-language: Haskell2010 -------------------------------------------------------------------------------- /ClientServer/Client.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Main where 4 | 5 | import Control.Monad 6 | import qualified Network.Simple.TCP as T 7 | 8 | main = do 9 | T.connect "127.0.0.1" "3000" $ \(connectionSocket, remoteAddr) -> do 10 | putStrLn $ "Connection established to " ++ show remoteAddr 11 | T.send connectionSocket "test123" 12 | response <- T.recv connectionSocket 100 13 | case response of 14 | Just s -> putStrLn $ "Response: " ++ show s 15 | Nothing -> putStrLn "No response from server" 16 | 17 | -------------------------------------------------------------------------------- /ClientServer/ClientServer.cabal: -------------------------------------------------------------------------------- 1 | name: ClientServer 2 | version: 0.1.0.0 3 | synopsis: Simple project template from stack 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable Server 16 | hs-source-dirs: . 17 | main-is: Server.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, bytestring, network-simple 20 | 21 | executable Client 22 | hs-source-dirs: . 23 | main-is: Client.hs 24 | default-language: Haskell2010 25 | build-depends: base >= 4.7 && < 5, bytestring, network-simple 26 | -------------------------------------------------------------------------------- /ClientServer/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Mark Watson (c) 2016 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Mark Watson nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /ClientServer/README.md: -------------------------------------------------------------------------------- 1 | # running example code 2 | 3 | stack build 4 | 5 | Then in two terminal windows: 6 | 7 | stack exec Server 8 | stack exec Client -------------------------------------------------------------------------------- /ClientServer/Server.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Main where 4 | 5 | -- example derived from an network-simple library example 6 | 7 | import Control.Monad 8 | import qualified Data.ByteString.Char8 as B 9 | import qualified Network.Simple.TCP as T 10 | 11 | reverseStringLoop sock = do 12 | -- get a byte string wrapped as a MonadIO: 13 | mbs <- T.recv sock 4096 14 | case mbs of 15 | Just bs -> T.send sock (B.reverse bs) >> reverseStringLoop sock 16 | Nothing -> return () 17 | 18 | main :: IO () 19 | main = T.withSocketsDo $ do -- derived from library example 20 | T.listen "*" "3000" $ \(lsock, laddr) -> do 21 | putStrLn $ "Listening at " ++ show laddr 22 | forever . T.acceptFork lsock $ \(sock, addr) -> do 23 | putStrLn $ "Connection from " ++ show addr 24 | reverseStringLoop sock 25 | 26 | -------------------------------------------------------------------------------- /ClientServer/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | resolver: lts-22.26 4 | 5 | allow-newer: false 6 | 7 | # Local packages, usually specified by relative directory name 8 | packages: 9 | - '.' 10 | 11 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 12 | extra-deps: [] 13 | 14 | # Override default flag values for local packages and extra-deps 15 | flags: {} 16 | 17 | # Control whether we use the GHC we find on the path 18 | # system-ghc: true 19 | 20 | # Require a specific version of stack, using version ranges 21 | # require-stack-version: -any # Default 22 | # require-stack-version: >= 0.1.4.0 23 | 24 | # Override the architecture used by stack, especially useful on Windows 25 | # arch: i386 26 | # arch: x86_64 27 | 28 | # Extra directories used by stack for building 29 | # extra-include-dirs: [/path/to/dir] 30 | # extra-lib-dirs: [/path/to/dir] 31 | -------------------------------------------------------------------------------- /CommandLineApp/.gitignore: -------------------------------------------------------------------------------- 1 | temp.txt 2 | -------------------------------------------------------------------------------- /CommandLineApp/CommandLine1.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import System.IO 4 | import Data.Char (toUpper) 5 | 6 | main = do 7 | putStrLn "Enter a line of text for test 1:" 8 | s <- getLine 9 | putStrLn $ "As upper case:\t" ++ (map toUpper s) 10 | main 11 | 12 | -------------------------------------------------------------------------------- /CommandLineApp/CommandLine2.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import System.IO 4 | import Data.Char (toUpper) 5 | 6 | main = do 7 | putStrLn "Enter a line of text for test2:" 8 | s <- getLine 9 | putStrLn $ "As upper case:\t" ++ (map toUpper s) 10 | appendFile "temp.txt" $ s ++ "\n" 11 | main 12 | 13 | -------------------------------------------------------------------------------- /CommandLineApp/CommandLineApp.cabal: -------------------------------------------------------------------------------- 1 | name: CommandLineApp 2 | version: 0.1.0.0 3 | synopsis: Simple project template from stack 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable CommandLine1 16 | hs-source-dirs: . 17 | main-is: CommandLine1.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5 20 | 21 | executable CommandLine2 22 | hs-source-dirs: . 23 | main-is: CommandLine2.hs 24 | default-language: Haskell2010 25 | build-depends: base >= 4.7 && < 5 26 | 27 | executable ReadTextFile 28 | hs-source-dirs: . 29 | main-is: ReadTextFile.hs 30 | default-language: Haskell2010 31 | build-depends: base >= 4.7 && < 5 32 | 33 | executable ReadTextFileErrorHandling 34 | hs-source-dirs: . 35 | main-is: ReadTextFile.hs 36 | default-language: Haskell2010 37 | build-depends: base >= 4.7 && < 5, mtl 38 | 39 | executable GameLoop1 40 | hs-source-dirs: . 41 | main-is: GameLoop1.hs 42 | default-language: Haskell2010 43 | build-depends: base >= 4.7 && < 5, time 44 | 45 | executable GameLoop2 46 | hs-source-dirs: . 47 | main-is: GameLoop2.hs 48 | default-language: Haskell2010 49 | build-depends: base >= 4.7 && < 5, random 50 | -------------------------------------------------------------------------------- /CommandLineApp/GameLoop1.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Data.Time.Clock.POSIX 4 | 5 | data GameState = GameState { numberToGuess::Integer, numTries::Integer} 6 | deriving (Show) 7 | 8 | gameLoop :: GameState -> IO GameState 9 | gameLoop gs = do 10 | print $ numberToGuess gs 11 | putStrLn "Enter a number:" 12 | s <- getLine 13 | let num = read s :: Integer 14 | if num == numberToGuess gs then 15 | return gs 16 | else gameLoop $ GameState (numberToGuess gs) ((numTries gs) + 1) 17 | 18 | main = do 19 | pTime <- round `fmap` getPOSIXTime 20 | let gameState = GameState (pTime `mod` 5) 1 21 | print "Guess a number between 1 and 4" 22 | gameLoop gameState 23 | 24 | -------------------------------------------------------------------------------- /CommandLineApp/GameLoop2.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import System.Random 4 | 5 | data GameState = GameState { numberToGuess::Integer, numTries::Integer} 6 | deriving (Show) 7 | 8 | gameLoop :: GameState -> IO GameState 9 | gameLoop gs = do 10 | print $ numberToGuess gs 11 | putStrLn "Enter a number:" 12 | s <- getLine 13 | let num = read s :: Integer 14 | if num == numberToGuess gs then 15 | return gs 16 | else gameLoop $ GameState (numberToGuess gs) ((numTries gs) + 1) 17 | 18 | main = do 19 | pTime <- randomRIO(1,4) 20 | let gameState = GameState pTime 1 21 | print "Guess a number between 1 and 4" 22 | gameLoop gameState 23 | -------------------------------------------------------------------------------- /CommandLineApp/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Mark Watson (c) 2016 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Mark Watson nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /CommandLineApp/README.md: -------------------------------------------------------------------------------- 1 | # Building and running the program 2 | 3 | ~~~~~~~~ 4 | stack build --exec CommandLine1 5 | ~~~~~~~~ 6 | 7 | -------------------------------------------------------------------------------- /CommandLineApp/ReadTextFile.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import System.IO 4 | import Control.Monad 5 | 6 | main = do 7 | entireFileAsString <- readFile "temp.txt" 8 | print entireFileAsString 9 | let allWords = words entireFileAsString 10 | print allWords -------------------------------------------------------------------------------- /CommandLineApp/ReadTextFileErrorHandling.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import System.IO 4 | import Control.Exception 5 | 6 | -- catchAny from Michael Snoyman's aticle: 7 | -- (https://www.schoolofhaskell.com/user/snoyberg/general-haskell/exceptions/catching-all-exceptions: 8 | catchAny :: IO a -> (SomeException -> IO a) -> IO a 9 | catchAny = Control.Exception.catch 10 | 11 | safeFileReader :: FilePath -> IO String 12 | safeFileReader fPath = do 13 | entireFileAsString <- catchAny (readFile fPath) $ \error -> do 14 | putStrLn $ "Error: " ++ show error 15 | return "" 16 | return entireFileAsString 17 | 18 | main :: IO () 19 | main = do 20 | fContents <- safeFileReader "temp.txt" 21 | print fContents 22 | print $ words fContents 23 | -------------------------------------------------------------------------------- /CommandLineApp/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.26 5 | 6 | allow-newer: false 7 | 8 | # Local packages, usually specified by relative directory name 9 | packages: 10 | - '.' 11 | 12 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 13 | extra-deps: [] 14 | 15 | # Override default flag values for local packages and extra-deps 16 | flags: {} 17 | 18 | # Control whether we use the GHC we find on the path 19 | # system-ghc: true 20 | 21 | # Require a specific version of stack, using version ranges 22 | # require-stack-version: -any # Default 23 | # require-stack-version: >= 0.1.4.0 24 | 25 | # Override the architecture used by stack, especially useful on Windows 26 | # arch: i386 27 | # arch: x86_64 28 | 29 | # Extra directories used by stack for building 30 | # extra-include-dirs: [/path/to/dir] 31 | # extra-lib-dirs: [/path/to/dir] 32 | -------------------------------------------------------------------------------- /Database-postgres/.gitignore: -------------------------------------------------------------------------------- 1 | test.db 2 | -------------------------------------------------------------------------------- /Database-postgres/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Mark Watson (c) 2015 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Mark Watson nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /Database-postgres/README.md: -------------------------------------------------------------------------------- 1 | # Database Examples 2 | 3 | ## Test database: 4 | 5 | markw=# create database haskell; 6 | CREATE DATABASE 7 | markw=# \c haskell 8 | You are now connected to database "haskell" as user "markw". 9 | 10 | create table customers (id int, name text, email text); 11 | CREATE TABLE 12 | haskell=# insert into customers values (1, 'Acme Cement', 'info@acmecement.com'); 13 | INSERT 0 1 14 | haskell=# insert into customers values (2, 'Biff Home Sales', 'info@biff.com'); 15 | INSERT 0 1 16 | haskell=# insert into customers values (3, 'My Pens', 'info@mypens.com'); 17 | 18 | markw=# \c haskell 19 | You are now connected to database "haskell" as user "markw". 20 | haskell=# \d 21 | List of relations 22 | Schema | Name | Type | Owner 23 | --------+-----------+-------+------- 24 | public | customers | table | markw 25 | public | links | table | markw 26 | public | products | table | markw 27 | (3 rows) 28 | 29 | haskell=# select * from customers; 30 | id | name | email 31 | ----+-----------------+--------------------- 32 | 1 | Acme Cement | info@acmecement.com 33 | 2 | Biff Home Sales | info@biff.com 34 | 3 | My Pens | info@mypens.com 35 | (3 rows) 36 | 37 | haskell=# select * from products; 38 | id | name | cost 39 | ----+---------------+------ 40 | 1 | Cement bag | 2.5 41 | 2 | Cheap Pen | 1.5 42 | 3 | Expensive Pen | 14.5 43 | (3 rows) 44 | 45 | haskell=# select * from links; 46 | id | customer_id | productid 47 | ----+-------------+----------- 48 | 1 | 1 | 1 49 | 2 | 3 | 2 50 | 3 | 3 | 3 51 | (3 rows) 52 | 53 | 54 | Then build and run: 55 | 56 | 57 | ```````` 58 | stack build --exec TestPostgres1 59 | ```````` 60 | 61 | -------------------------------------------------------------------------------- /Database-postgres/TestDatabase.cabal: -------------------------------------------------------------------------------- 1 | name: TestDatabase 2 | version: 0.1.0.0 3 | synopsis: Simple project template from stack 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable TestPostgres1 16 | hs-source-dirs: . 17 | main-is: TestPostgres1.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, containers, split, MissingH, postgresql-simple, HDBC 20 | -------------------------------------------------------------------------------- /Database-postgres/TestPostgres1.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Main where 4 | 5 | import Database.PostgreSQL.Simple 6 | 7 | main :: IO () 8 | main = do 9 | conn <- connect defaultConnectInfo { connectDatabase = "haskell", connectUser = "markw", connectPassword = "test1"} 10 | -- start by getting table names in database: 11 | do 12 | r <- query_ conn "SELECT name FROM customers" :: IO [(Only String)] 13 | print "names and emails in table 'customers' in database haskell:" 14 | mapM_ (print . fromOnly) r 15 | 16 | -- add a row to table 'test' and then print out the rows in table 'test': 17 | do 18 | let rows :: [(Int, String, String)] 19 | rows = [(4, "Mary Smith", "marys@acme.com")] 20 | executeMany conn "INSERT INTO customers (id, name, email) VALUES (?,?,?)" rows 21 | r2 <- query_ conn "SELECT * from customers" :: IO [(Int, String, String)] 22 | print "number of rows in table 'customers':" 23 | print (length r2) 24 | print "rows in table 'customers':" 25 | mapM_ print r2 26 | 27 | close conn 28 | -------------------------------------------------------------------------------- /Database-postgres/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.26 5 | 6 | allow-newer: true 7 | 8 | # Local packages, usually specified by relative directory name 9 | packages: 10 | - '.' 11 | 12 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 13 | extra-deps: [] 14 | 15 | # Override default flag values for local packages and extra-deps 16 | flags: {} 17 | 18 | # Control whether we use the GHC we find on the path 19 | # system-ghc: true 20 | 21 | # Require a specific version of stack, using version ranges 22 | # require-stack-version: -any # Default 23 | # require-stack-version: >= 0.1.4.0 24 | 25 | # Override the architecture used by stack, especially useful on Windows 26 | # arch: i386 27 | # arch: x86_64 28 | 29 | # Extra directories used by stack for building 30 | # extra-include-dirs: [/path/to/dir] 31 | # extra-lib-dirs: [/path/to/dir] 32 | -------------------------------------------------------------------------------- /Database-sqlite/.gitignore: -------------------------------------------------------------------------------- 1 | test.db 2 | -------------------------------------------------------------------------------- /Database-sqlite/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Mark Watson (c) 2015 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Mark Watson nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /Database-sqlite/README.md: -------------------------------------------------------------------------------- 1 | # Database Examples 2 | 3 | ## Run first sqlite example 4 | 5 | Create sqlite database: 6 | ```````` 7 | sqlite3 test.db "create table test (id integer primary key, str text);" 8 | ```````` 9 | 10 | Then build and run: 11 | 12 | 13 | ```````` 14 | stack build --exec TestSqLite1 15 | ```````` 16 | 17 | ## Run using Replit.com, Nix, Cabal 18 | 19 | sqlite3 test.db "create table test (id integer primary key, str text);" 20 | cabal build 21 | cabal run -------------------------------------------------------------------------------- /Database-sqlite/TestDatabase.cabal: -------------------------------------------------------------------------------- 1 | name: TestDatabase 2 | version: 0.1.0.0 3 | synopsis: SqLite example 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable TestSqLite1 16 | hs-source-dirs: . 17 | main-is: TestSqLite1.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, containers, split, MissingH, sqlite-simple 20 | -------------------------------------------------------------------------------- /Database-sqlite/TestSqLite1.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Main where 4 | 5 | import Database.SQLite.Simple 6 | 7 | {- 8 | Create sqlite database: 9 | sqlite3 test.db "create table test (id integer primary key, str text);" 10 | 11 | This example is derived from the example at github.com/nurpax/sqlite-simple 12 | -} 13 | 14 | main :: IO () 15 | main = do 16 | conn <- open "test.db" 17 | -- start by getting table names in database: 18 | do 19 | r <- query_ conn "SELECT name FROM sqlite_master WHERE type='table'" :: IO [Only String] 20 | print "Table names in database test.db:" 21 | mapM_ (print . fromOnly) r 22 | 23 | -- get the metadata for table test in test.db: 24 | do 25 | r <- query_ conn "SELECT sql FROM sqlite_master WHERE type='table' and name='test'" :: IO [Only String] 26 | print "SQL to create table 'test' in database test.db:" 27 | mapM_ (print . fromOnly) r 28 | 29 | -- add a row to table 'test' and then print out the rows in table 'test': 30 | do 31 | execute conn "INSERT INTO test (str) VALUES (?)" 32 | (Only ("test string 2" :: String)) 33 | r2 <- query_ conn "SELECT * from test" :: IO [(Int, String)] 34 | print "number of rows in table 'test':" 35 | print (length r2) 36 | print "rows in table 'test':" 37 | mapM_ print r2 38 | 39 | close conn 40 | -------------------------------------------------------------------------------- /Database-sqlite/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.26 5 | 6 | allow-newer: false 7 | 8 | # Local packages, usually specified by relative directory name 9 | packages: 10 | - '.' 11 | 12 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 13 | extra-deps: 14 | - MissingH-1.6.0.1 15 | - random-1.2.1.2 16 | - sqlite-simple-0.4.19.0 17 | - direct-sqlite-2.3.29 18 | 19 | # Override default flag values for local packages and extra-deps 20 | flags: {} 21 | 22 | # Control whether we use the GHC we find on the path 23 | # system-ghc: true 24 | 25 | # Require a specific version of stack, using version ranges 26 | # require-stack-version: -any # Default 27 | # require-stack-version: >= 0.1.4.0 28 | 29 | # Override the architecture used by stack, especially useful on Windows 30 | # arch: i386 31 | # arch: x86_64 32 | 33 | # Extra directories used by stack for building 34 | # extra-include-dirs: [/path/to/dir] 35 | # extra-lib-dirs: [/path/to/dir] 36 | -------------------------------------------------------------------------------- /FastTag/FastTag.cabal: -------------------------------------------------------------------------------- 1 | name: FastTag 2 | version: 0.1.0.0 3 | synopsis: Simple 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable fasttag 16 | hs-source-dirs: . 17 | main-is: fasttag.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, containers, strings 20 | other-modules: LexiconData 21 | -------------------------------------------------------------------------------- /FastTag/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Mark Watson (c) 2016 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Mark Watson nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /FastTag/README.md: -------------------------------------------------------------------------------- 1 | # Haskell version of FastTag 2 | 3 | Copyright 2016 Mark Watson (http://markwatson.com) 4 | 5 | Licensed under LGPL3 or Apache 2 licenses: take your pick. 6 | 7 | The lexicon file is derived from Eric Brill's data and converted to Haskell source code that creates a map used to look up words. 8 | 9 | ACKNOWLEDGMENTS: 10 | ---------------- 11 | 12 | - Eric Brill for his lexicon and trained rule set: http://www.cs.jhu.edu/~brill/ 13 | 14 | - Medpost team for their tagging lexicon: http://mmtx.nlm.nih.gov/MedPost_SKR.shtml 15 | 16 | - Brant Chee for bug reports and bug fixes to the Java version of FastTag 17 | 18 | ## Running code 19 | 20 | stack build 21 | stack exec fasttag 22 | 23 | TAG DEFINITIONS: 24 | ---------------- 25 | 26 |
 27 | 	
 28 | CC Coord Conjuncn           and,but,or
 29 | NN Noun, sing. or mass      dog
 30 | CD Cardinal number          one,two
 31 | NNS Noun, plural            dogs
 32 | DT Determiner               the,some
 33 | NNP Proper noun, sing.      Edinburgh
 34 | EX Existential there        there
 35 | NNPS Proper noun, plural    Smiths
 36 | FW Foreign Word             mon dieu
 37 | PDT Predeterminer           all, both
 38 | IN Preposition              of,in,by
 39 | POS Possessive ending       Õs
 40 | JJ Adjective                big
 41 | PP Personal pronoun         I,you,she
 42 | JJR Adj., comparative       bigger
 43 | PP$ Possessive pronoun      my,oneÕs
 44 | JJS Adj., superlative       biggest
 45 | RB Adverb                   quickly
 46 | LS List item marker         1,One
 47 | RBR Adverb, comparative     faster
 48 | MD Modal                    can,should
 49 | RBS Adverb, superlative     fastest
 50 | RP Particle                 up,off
 51 | WP$ Possessive-Wh           whose
 52 | SYM Symbol                  +,%,&
 53 | WRB Wh-adverb               how,where
 54 | TO ÒtoÓ                     to
 55 | $ Dollar sign               $
 56 | UH Interjection             oh, oops
 57 | # Pound sign                #
 58 | VB verb, base form          eat
 59 | " quote                     "
 60 | VBD verb, past tense        ate
 61 | VBG verb, gerund            eating
 62 | ( Left paren                (
 63 | VBN verb, past part         eaten
 64 | ) Right paren               )
 65 | VBP Verb, present           eat
 66 | , Comma                     ,
 67 | VBZ Verb, present           eats
 68 | . Sent-final punct          . ! ?
 69 | WDT Wh-determiner           which,that
 70 | : Mid-sent punct.           : ; Ñ
 71 | WP Wh pronoun               who,what
 72 | 
 73 | 
 74 | MEDPOST TAG DEFINITIONS:
 75 | 
 76 | CC coordinating conjunction
 77 | CS subordinating conjunction
 78 | CSN comparative conjunction (than)
 79 | CST complementizer (that)
 80 | DB predeterminer
 81 | DD determiner
 82 | EX existential there
 83 | GE genitive marker Õs
 84 | II preposition
 85 | JJ adjective
 86 | JJR comparative adjective
 87 | JJT superlative adjective
 88 | MC number or numeric
 89 | NN noun
 90 | NNP proper noun
 91 | NNS plural noun
 92 | PN pronoun
 93 | PND determiner as pronoun
 94 | PNG genitive pronoun
 95 | PNR relative pronoun
 96 | RR adverb 
 97 | RRR comparative adverb
 98 | RRT superlative adverb
 99 | SYM symbol
100 | TO infinitive marker to
101 | VM modal
102 | VBB base be, am, are
103 | VBD past was, were
104 | VBG participle being
105 | VBI infinitive be
106 | VBN participle been
107 | VBZ 3rd pers. sing. is
108 | VDB base do
109 | VDD past did
110 | VDG participle doing
111 | VDI infinite do
112 | VDN participle done
113 | VDZ 3rd pers. sing. does
114 | VHB base have
115 | VHD past had
116 | VHG participle having
117 | VHI infinitive have
118 | VHN participle had
119 | VHZ 3rd pers. sing. has
120 | VVB base form lexical verb
121 | VVD past tense
122 | VVG present part.
123 | VVI infinitive lexical verb
124 | VVN past part.
125 | VVZ 3rd pers. sing.
126 | VVNJ prenominal past part.
127 | VVGJ prenominal present part.
128 | VVGN nominal gerund
129 | ( left parenthesis
130 | ) right parenthesis
131 | , comma
132 | . end-of-sentence period
133 | : dashes, colons
134 |  ? ? right quo
135 | 
136 | 
137 | -------------------------------------------------------------------------------- /FastTag/fasttag.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import qualified Data.Map as M 4 | import Data.Strings (strEndsWith, strStartsWith) 5 | import Data.List (isInfixOf) 6 | 7 | import LexiconData (lexicon) 8 | 9 | bigram :: [a] -> [[a]] 10 | bigram [] = [] 11 | bigram [_] = [] 12 | bigram xs = take 2 xs : bigram (tail xs) 13 | 14 | containsString word substring = isInfixOf substring word 15 | 16 | 17 | fixTags twogramList = 18 | map 19 | -- in the following inner function, [last,current] might be bound, 20 | -- for example, to [["dog","NN"],["ran","VBD"]] 21 | (\[last, current] -> 22 | -- rule 1: DT, {VBD | VBP} --> DT, NN 23 | if last !! 1 == "DT" && (current !! 1 == "VBD" || 24 | current !! 1 == "VB" || 25 | current !! 1 == "VBP") 26 | then "NN" 27 | else 28 | -- rule 2: convert a noun to a number (CD) if "." appears in the word 29 | if (current !! 1) !! 0 == 'N' && containsString (current !! 0) "." 30 | then "CD" 31 | else 32 | -- rule 3: convert a noun to a past participle if 33 | -- words.get(i) ends with "ed" 34 | if (current !! 1) !! 0 == 'N' && strEndsWith (current !! 0) "ed" 35 | then "VBN" 36 | else 37 | -- rule 4: convert any type to adverb if it ends in "ly" 38 | if strEndsWith (current !! 0) "ly" 39 | then "RB" 40 | else 41 | -- rule 5: convert a common noun (NN or NNS) to an 42 | -- adjective if it ends with "al" 43 | if strStartsWith (current !! 1) "NN" && 44 | strEndsWith (current !! 1) "al" 45 | then "JJ" 46 | else 47 | -- rule 6: convert a noun to a verb if the preceeding 48 | -- word is "would" 49 | if strStartsWith (current !! 1) "NN" && 50 | (last !! 0) == "would" -- should be case insensitive 51 | then "VB" 52 | else 53 | -- rule 7: if a word has been categorized as a 54 | -- common noun and it ends with "s", 55 | -- then set its type to plural common noun (NNS) 56 | if strStartsWith (current !! 1) "NN" && strEndsWith (current !! 0) "s" 57 | then "NNS" 58 | else 59 | -- rule 8: convert a common noun to a present 60 | -- participle verb (i.e., a gerand) 61 | if strStartsWith (current !! 1) "NN" && 62 | strEndsWith (current !! 0) "ing" 63 | then "VBG" 64 | else (current !! 1)) 65 | twogramList 66 | 67 | substitute tks = bigram $ map tagHelper tks 68 | 69 | tagHelper token = 70 | let tags = M.findWithDefault [] token lexicon in 71 | if tags == [] then [token, "NN"] else [token, tags] 72 | 73 | tag tokens = fixTags $ substitute ([""] ++ tokens) 74 | 75 | 76 | main = do 77 | let tokens = ["the", "dog", "ran", "around", "the", "tree", "while", 78 | "the", "cat", "snaked", "around", "the", "trunk", 79 | "while", "banking", "to", "the", "left"] 80 | print $ tag tokens 81 | print $ zip tokens $ tag tokens 82 | 83 | -------------------------------------------------------------------------------- /FastTag/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.26 5 | 6 | allow-newer: false 7 | 8 | # Local packages, usually specified by relative directory name 9 | packages: 10 | - '.' 11 | 12 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 13 | extra-deps: 14 | - strings-1.1 15 | 16 | 17 | # Override default flag values for local packages and extra-deps 18 | flags: {} 19 | 20 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/.gitignore: -------------------------------------------------------------------------------- 1 | .stack-work/ 2 | HybridHaskellPythonDeepLearning.cabal 3 | *~ 4 | 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | .idea 10 | .vscode 11 | 12 | .DS_Store 13 | 14 | *~ 15 | 16 | # C extensions 17 | *.so 18 | 19 | # Distribution / packaging 20 | .Python 21 | env/ 22 | build/ 23 | develop-eggs/ 24 | dist/ 25 | downloads/ 26 | eggs/ 27 | .eggs/ 28 | lib/ 29 | lib64/ 30 | parts/ 31 | sdist/ 32 | var/ 33 | wheels/ 34 | 35 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/ChangeLog.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mark-watson/haskell_tutorial_cookbook_examples/718d33078b76a496e13a9e618eab206eb92f0760/HybridHaskellPythonCorefAnaphoraResolution/ChangeLog.md -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/HybridHaskellPythonCorefAnaphoraResolution.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 1.12 2 | 3 | -- This file has been generated from package.yaml by hpack version 0.31.2. 4 | -- 5 | -- see: https://github.com/sol/hpack 6 | -- 7 | -- hash: f2bdd513eef0a939561c376956abebf1d1a11cf8ad2b963fa362ed0438ff30cd 8 | 9 | name: HybridHaskellPythonCorefAnaphoraResolution 10 | version: 0.1.0.0 11 | description: Please see the README.md 12 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 13 | author: Mark Watson 14 | copyright: 2016-2020 Mark Watson. All rights reserved. 15 | license: Apache-2.0 16 | license-file: ../APACHE_LICENSE-2.0.txt 17 | build-type: Simple 18 | extra-source-files: 19 | README.md 20 | ChangeLog.md 21 | 22 | source-repository head 23 | type: git 24 | location: https://github.com/githubuser/HybridHaskellPythonCorefAnaphoraResolution 25 | 26 | library 27 | exposed-modules: 28 | CorefWebClient 29 | other-modules: 30 | Paths_HybridHaskellPythonCorefAnaphoraResolution 31 | hs-source-dirs: 32 | src 33 | build-depends: 34 | base >=4.7 && <5, wreq, lens, bytestring, uri-encode >= 1.5.0.5 35 | default-language: Haskell2010 36 | 37 | executable HybridHaskellPythonCorefAnaphoraResolution-exe 38 | main-is: Main.hs 39 | other-modules: 40 | Paths_HybridHaskellPythonCorefAnaphoraResolution 41 | hs-source-dirs: 42 | app 43 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 44 | build-depends: 45 | HybridHaskellPythonCorefAnaphoraResolution, wreq, lens, bytestring 46 | , base >=4.7 && <5, uri-encode >= 1.5.0.5 47 | default-language: Haskell2010 48 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/README.md: -------------------------------------------------------------------------------- 1 | # HybridHaskellPythonCorefAnaphoraResolution 2 | 3 | This project uses a the BERT model with the spaCy NP library. 4 | 5 | To run the Python server that the Haskell code in this project calls, 'cd python_coreference_anaphora_resolution_server' 6 | and follow the instructions in the README.md file. 7 | 8 | ## Running the Haskell client 9 | 10 | stack build --fast --exec HybridHaskellPythonCorefAnaphoraResolution-exe -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/app/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import CorefWebClient 4 | 5 | main :: IO () 6 | main = do 7 | putStrLn "Enter text (all on one line)" 8 | s <- getLine 9 | response <- corefClient s 10 | putStr "response from coreference server:\t" 11 | putStrLn $ show response 12 | main -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/python_coreference_anaphora_resolution_server/Makefile: -------------------------------------------------------------------------------- 1 | run: 2 | nohup python server_coref.py > /tmp/coref_service.txt & 3 | 4 | stop: 5 | pkill -f server_coref 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/python_coreference_anaphora_resolution_server/README.md: -------------------------------------------------------------------------------- 1 | # Installation: 2 | 3 | pip install spacy==2.1.0 # see discusion in book of either using older version of spaCy or building neuralcoref from source 4 | pip install neuralcoref 5 | pip install -U spacy 6 | python -m spacy download en 7 | pip install falcon 8 | 9 | python setup.py install 10 | 11 | 12 | This installs the executable **corefserver** on your path so you can run the server from any 13 | directory using: 14 | 15 | corefserver 16 | 17 | ## If you get an error 18 | 19 | ValueError: spacy.strings.StringStore size changed, may indicate binary incompatibility. Expected 112 from C header, got 88 from PyObject 20 | 21 | Then you can either downgrade spaCy to 2.1.0, or build neuralcoref from source if you want to use a more recent version of spaCy. 22 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/python_coreference_anaphora_resolution_server/bin/corefserver: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """Coreference resolution server example. 4 | A simple server serving the coreference system. 5 | 6 | This file is copied and modified from an example 7 | program from https://github.com/huggingface/neuralcoref 8 | 9 | """ 10 | from __future__ import unicode_literals 11 | from __future__ import print_function 12 | 13 | from corefserver.coref_server import AllResource 14 | from wsgiref.simple_server import make_server 15 | import falcon 16 | 17 | if __name__ == '__main__': 18 | RESSOURCE = AllResource() 19 | APP = falcon.API() 20 | APP.add_route('/', RESSOURCE) 21 | HTTPD = make_server('0.0.0.0', 8000, APP) 22 | HTTPD.serve_forever() 23 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/python_coreference_anaphora_resolution_server/corefserver/coref_server.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """Coreference resolution server example. 4 | A simple server serving the coreference system. 5 | 6 | This file is copied and modified from an example 7 | program from https://github.com/huggingface/neuralcoref 8 | 9 | """ 10 | from __future__ import unicode_literals 11 | from __future__ import print_function 12 | 13 | import json 14 | from wsgiref.simple_server import make_server 15 | import falcon 16 | import spacy 17 | 18 | unicode_ = str # Python 3 19 | 20 | 21 | class AllResource(object): 22 | def __init__(self): 23 | #self.nlp = spacy.load('en_coref_sm') 24 | # MLW: 25 | self.nlp = spacy.load('en') 26 | import neuralcoref 27 | neuralcoref.add_to_pipe(self.nlp) 28 | print("Server loaded") 29 | self.response = None 30 | 31 | def on_get(self, req, resp): 32 | self.response = {} 33 | 34 | text_param = req.get_param("text") 35 | no_detail = req.get_param("no_detail") 36 | if text_param is not None: 37 | 38 | text = ",".join(text_param) if isinstance(text_param, list) else text_param 39 | text = unicode_(text) 40 | text = text.replace("%20", " ").replace("%3B", ";").replace("%2C", ",").replace("%3A", ":").replace("%24","$").replace("%2C",",") 41 | print("** text=", text) 42 | doc = self.nlp(text) 43 | #print("** doc=", doc) 44 | if no_detail != None: 45 | self.response = doc._.coref_resolved 46 | resp.body = self.response 47 | resp.content_type = 'application/text' 48 | else: 49 | if doc._.has_coref: 50 | mentions = [{'start': mention.start_char, 51 | 'end': mention.end_char, 52 | 'text': mention.text, 53 | 'resolved': cluster.main.text 54 | } 55 | for cluster in doc._.coref_clusters 56 | for mention in cluster.mentions] 57 | clusters = list(list(span.text for span in cluster) 58 | for cluster in doc._.coref_clusters) 59 | resolved = doc._.coref_resolved 60 | self.response['mentions'] = mentions 61 | self.response['clusters'] = clusters 62 | self.response['resolved'] = resolved 63 | 64 | resp.body = json.dumps(self.response) 65 | resp.content_type = 'application/json' 66 | resp.append_header('Access-Control-Allow-Origin', "*") 67 | resp.status = falcon.HTTP_200 68 | 69 | if __name__ == '__main__': 70 | RESSOURCE = AllResource() 71 | APP = falcon.API() 72 | APP.add_route('/', RESSOURCE) 73 | HTTPD = make_server('0.0.0.0', 8000, APP) 74 | HTTPD.serve_forever() 75 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/python_coreference_anaphora_resolution_server/corefserver/test/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mark-watson/haskell_tutorial_cookbook_examples/718d33078b76a496e13a9e618eab206eb92f0760/HybridHaskellPythonCorefAnaphoraResolution/python_coreference_anaphora_resolution_server/corefserver/test/README.md -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/python_coreference_anaphora_resolution_server/setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | 3 | setup(name='coref_server', 4 | version='0.11', 5 | packages=['corefserver', 'corefserver.test'], 6 | license='Apache 2', 7 | scripts=['bin/corefserver'], 8 | long_description=open('README.md').read()) 9 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/python_coreference_anaphora_resolution_server/test_client.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | from urllib.request import Request, urlopen 4 | import urllib 5 | 6 | base_uri = 'http://127.0.0.1:8000?text=' 7 | 8 | 9 | def coref(text, no_detail=False): 10 | def get_raw_data_from_web(a_uri): 11 | req = Request(a_uri, headers={'User-Agent': 'PythonBook/1.0'}) 12 | http_response = urlopen(req) 13 | data = http_response.read() 14 | return data 15 | 16 | encoded_text = urllib.parse.quote(text, safe='') 17 | if no_detail: 18 | z = '&no_detail=1' 19 | else: 20 | z = '' 21 | raw_data = get_raw_data_from_web(base_uri + encoded_text + z) 22 | return raw_data.decode("UTF8") 23 | 24 | 25 | print(coref('My sister has a dog named Sam. She loves him')) 26 | print(coref('My sister has a dog named Sam. She loves him', no_detail=True)) 27 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/src/CorefWebClient.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | -- reference: http://www.serpentine.com/wreq/tutorial.html 4 | module CorefWebClient 5 | ( corefClient 6 | ) where 7 | 8 | import Control.Lens 9 | import Data.ByteString.Lazy.Char8 (unpack) 10 | import Data.Maybe (fromJust) 11 | import Network.URI.Encode (encode) 12 | import Network.Wreq 13 | 14 | base_url = "http://127.0.0.1:8000?text=" 15 | 16 | corefClient :: [Char] -> IO [Char] 17 | corefClient query = do 18 | putStrLn $ "\n\n*** Processing " ++ (encode query) 19 | r <- get $ base_url ++ (encode query) ++ "&no_detail=1" 20 | putStrLn $ "status code: " ++ (show (r ^. responseStatus . statusCode)) 21 | putStrLn $ "content type: " ++ (show (r ^? responseHeader "Content-Type")) 22 | putStrLn $ "response body: " ++ (unpack (fromJust (r ^? responseBody))) 23 | return $ unpack (fromJust (r ^? responseBody)) 24 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # https://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | resolver: lts-22.26 12 | 13 | allow-newer: true 14 | 15 | # User packages to be built. 16 | # Various formats can be used as shown in the example below. 17 | # 18 | # packages: 19 | # - some-directory 20 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 21 | # subdirs: 22 | # - auto-update 23 | # - wai 24 | packages: 25 | - . 26 | # Dependency packages to be pulled from upstream that are not in the resolver. 27 | # These entries can reference officially published versions as well as 28 | # forks / in-progress versions pinned to a git hash. For example: 29 | # 30 | # extra-deps: 31 | # - acme-missiles-0.3 32 | # - git: https://github.com/commercialhaskell/stack.git 33 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 34 | # 35 | # extra-deps: [] 36 | 37 | # Override default flag values for local packages and extra-deps 38 | # flags: {} 39 | 40 | # Extra package databases containing global packages 41 | # extra-package-dbs: [] 42 | 43 | # Control whether we use the GHC we find on the path 44 | # system-ghc: true 45 | # 46 | # Require a specific version of stack, using version ranges 47 | # require-stack-version: -any # Default 48 | # require-stack-version: ">=2.1" 49 | # 50 | # Override the architecture used by stack, especially useful on Windows 51 | # arch: i386 52 | # arch: x86_64 53 | # 54 | # Extra directories used by stack for building 55 | # extra-include-dirs: [/path/to/dir] 56 | # extra-lib-dirs: [/path/to/dir] 57 | # 58 | # Allow a newer minor version of GHC than the snapshot specifies 59 | # compiler-check: newer-minor 60 | -------------------------------------------------------------------------------- /HybridHaskellPythonCorefAnaphoraResolution/test/Spec.hs: -------------------------------------------------------------------------------- 1 | main :: IO () 2 | main = putStrLn "Test suite not yet implemented" 3 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/.gitignore: -------------------------------------------------------------------------------- 1 | .stack-work/ 2 | HybridHaskellPythonNlp.cabal 3 | *~ 4 | 5 | 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | .idea 11 | .vscode 12 | 13 | .DS_Store 14 | 15 | *~ 16 | 17 | # C extensions 18 | *.so 19 | 20 | # Distribution / packaging 21 | .Python 22 | env/ 23 | build/ 24 | develop-eggs/ 25 | dist/ 26 | downloads/ 27 | eggs/ 28 | .eggs/ 29 | lib/ 30 | lib64/ 31 | parts/ 32 | sdist/ 33 | var/ 34 | wheels/ 35 | 36 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/ChangeLog.md: -------------------------------------------------------------------------------- 1 | # Changelog for HybridHaskellPythonNlp 2 | 3 | ## Unreleased changes 4 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Mark Watson (c) 2019 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Author name here nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/README.md: -------------------------------------------------------------------------------- 1 | # HybridHaskellPythonNlp 2 | 3 | ## Start the Python Server 4 | 5 | I assume that it is installed (see the book for directions) or see the readme file in the subdirectory python_spacy_nlp_server 6 | 7 | 8 | ## Running the Haskell client 9 | 10 | stack build --fast --exec HybridHaskellPythonNlp-exe 11 | 12 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/app/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import NlpWebClient 4 | 5 | main :: IO () 6 | main = do 7 | putStrLn "Enter text (all on one line)" 8 | s <- getLine 9 | response <- (nlpClient s) :: IO NlpResponse 10 | putStr "response from NLP server:\n" 11 | putStrLn $ show response 12 | main -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/python_spacy_nlp_server/Makefile: -------------------------------------------------------------------------------- 1 | run: 2 | nohup python server_spacy.py > /tmp/AISspacy.txt & 3 | 4 | stop: 5 | pkill -f server_spacy 6 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/python_spacy_nlp_server/README.md: -------------------------------------------------------------------------------- 1 | # Installation: 2 | 3 | pip install -U spacy 4 | python -m spacy download en 5 | pip install falcon 6 | 7 | python setup.py install 8 | 9 | bin/spacynlpserver 10 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/python_spacy_nlp_server/bin/spacynlpserver: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """Coreference resolution server example. 4 | A simple server serving the coreference system. 5 | 6 | This file is copied and modified from an example 7 | program from https://github.com/huggingface/neuralcoref 8 | 9 | """ 10 | from __future__ import unicode_literals 11 | from __future__ import print_function 12 | 13 | from spacynlpserver.server_spacy import AllResource 14 | 15 | import json 16 | from wsgiref.simple_server import make_server 17 | import falcon 18 | 19 | if __name__ == '__main__': 20 | RESSOURCE = AllResource() 21 | APP = falcon.API() 22 | APP.add_route('/', RESSOURCE) 23 | HTTPD = make_server('0.0.0.0', 8008, APP) 24 | HTTPD.serve_forever() 25 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/python_spacy_nlp_server/setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | 3 | setup(name='spacy_nlp_server', 4 | version='0.11', 5 | packages=['spacynlpserver', 'spacynlpserver.test'], 6 | license='Apache 2', 7 | scripts=['bin/spacynlpserver'], 8 | long_description=open('README.md').read()) 9 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/python_spacy_nlp_server/spacynlpserver/server_spacy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """Coreference resolution server example. 4 | A simple server serving the coreference system. 5 | 6 | This file is copied and modified from an example 7 | program from https://github.com/huggingface/neuralcoref 8 | 9 | """ 10 | from __future__ import unicode_literals 11 | from __future__ import print_function 12 | 13 | import json 14 | from wsgiref.simple_server import make_server 15 | import falcon 16 | import spacy 17 | 18 | unicode_ = str # Python 3 19 | 20 | class AllResource(object): 21 | def __init__(self): 22 | self.nlp = spacy.load('en') 23 | print("Server loaded") 24 | self.response = None 25 | 26 | def on_get(self, req, resp): 27 | self.response = {} 28 | 29 | text_param = req.get_param("text") 30 | if text_param is not None: 31 | 32 | text = ",".join(text_param) if isinstance(text_param, list) else text_param 33 | text = unicode_(text) 34 | text = text.replace("%20", " ").replace("%3B", ";").replace("%2C", ",").replace("%3A", ":").replace("%24","$").replace("%2C",",") 35 | print("** text=", text) 36 | doc = self.nlp(text) 37 | #print("** doc=", doc) 38 | self.response['entities'] = [ent.text + "/" + ent.label_ for ent in doc.ents] 39 | self.response['tokens'] = [token.text for token in doc] 40 | 41 | resp.body = json.dumps(self.response) 42 | resp.content_type = 'application/json' 43 | resp.append_header('Access-Control-Allow-Origin', "*") 44 | resp.status = falcon.HTTP_200 45 | 46 | if __name__ == '__main__': 47 | RESSOURCE = AllResource() 48 | APP = falcon.API() 49 | APP.add_route('/', RESSOURCE) 50 | HTTPD = make_server('0.0.0.0', 8008, APP) 51 | HTTPD.serve_forever() 52 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/python_spacy_nlp_server/spacynlpserver/test/README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mark-watson/haskell_tutorial_cookbook_examples/718d33078b76a496e13a9e618eab206eb92f0760/HybridHaskellPythonNlp/python_spacy_nlp_server/spacynlpserver/test/README.md -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/python_spacy_nlp_server/test_client.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | from urllib.request import Request, urlopen 4 | import urllib 5 | import json 6 | 7 | base_uri = 'http://127.0.0.1:8008?text=' 8 | 9 | 10 | def spacy_client(text, no_detail=False): 11 | def get_raw_data_from_web(a_uri): 12 | req = Request(a_uri, headers={'User-Agent': 'PythonBook/1.0'}) 13 | http_response = urlopen(req) 14 | data = http_response.read() 15 | return data 16 | 17 | encoded_text = urllib.parse.quote(text) 18 | raw_data = get_raw_data_from_web(base_uri + encoded_text) 19 | return json.loads(raw_data.decode("UTF8")) 20 | 21 | 22 | print(spacy_client('Bill Clinton went to the Pepsi factory in Mexico.')) 23 | print(spacy_client('The Mark Jones ran. Sam Bell ran faster.')) 24 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/src/NlpWebClient.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE DeriveDataTypeable #-} 3 | 4 | -- reference: http://www.serpentine.com/wreq/tutorial.html 5 | module NlpWebClient 6 | ( nlpClient, NlpResponse 7 | ) where 8 | 9 | import Control.Lens 10 | import Data.ByteString.Lazy.Char8 (unpack) 11 | import Data.Maybe (fromJust) 12 | import Network.URI.Encode as E -- encode is also in Data.Aeson 13 | import Network.Wreq 14 | 15 | import Text.JSON.Generic 16 | 17 | data NlpResponse = NlpResponse {entities::[String], tokens::[String]} deriving (Show, Data, Typeable) 18 | 19 | base_url = "http://127.0.0.1:8008?text=" 20 | 21 | nlpClient :: [Char] -> IO NlpResponse 22 | nlpClient query = do 23 | putStrLn $ "\n\n*** Processing " ++ query 24 | r <- get $ base_url ++ (E.encode query) ++ "&no_detail=1" 25 | --putStrLn $ "status code: " ++ (show (r ^. responseStatus . statusCode)) 26 | --putStrLn $ "content type: " ++ (show (r ^? responseHeader "Content-Type")) 27 | --putStrLn $ "response body: " ++ (unpack (fromJust (r ^? responseBody))) 28 | let ret = (decodeJSON (unpack (fromJust (r ^? responseBody)))) :: NlpResponse 29 | return ret 30 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # https://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | resolver: lts-22.26 12 | 13 | allow-newer: true 14 | 15 | # User packages to be built. 16 | # Various formats can be used as shown in the example below. 17 | # 18 | # packages: 19 | # - some-directory 20 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 21 | # subdirs: 22 | # - auto-update 23 | # - wai 24 | packages: 25 | - . 26 | extra-deps: 27 | - json-0.10 28 | # Dependency packages to be pulled from upstream that are not in the resolver. 29 | # These entries can reference officially published versions as well as 30 | # forks / in-progress versions pinned to a git hash. For example: 31 | # 32 | # extra-deps: 33 | # - acme-missiles-0.3 34 | # - git: https://github.com/commercialhaskell/stack.git 35 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 36 | # 37 | # extra-deps: [] 38 | 39 | # Override default flag values for local packages and extra-deps 40 | # flags: {} 41 | 42 | # Extra package databases containing global packages 43 | # extra-package-dbs: [] 44 | 45 | # Control whether we use the GHC we find on the path 46 | # system-ghc: true 47 | # 48 | # Require a specific version of stack, using version ranges 49 | # require-stack-version: -any # Default 50 | # require-stack-version: ">=2.1" 51 | # 52 | # Override the architecture used by stack, especially useful on Windows 53 | # arch: i386 54 | # arch: x86_64 55 | # 56 | # Extra directories used by stack for building 57 | # extra-include-dirs: [/path/to/dir] 58 | # extra-lib-dirs: [/path/to/dir] 59 | # 60 | # Allow a newer minor version of GHC than the snapshot specifies 61 | # compiler-check: newer-minor 62 | -------------------------------------------------------------------------------- /HybridHaskellPythonNlp/test/Spec.hs: -------------------------------------------------------------------------------- 1 | main :: IO () 2 | main = putStrLn "Test suite not yet implemented" 3 | -------------------------------------------------------------------------------- /ImPure/.gitignore: -------------------------------------------------------------------------------- 1 | temp.txt 2 | -------------------------------------------------------------------------------- /ImPure/CommonWords.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Data.Set (fromList, toList, intersection) 4 | import Data.Char (toLower) 5 | 6 | fileToWords fileName = do 7 | fileText <- readFile fileName 8 | return $ (fromList . words) (map toLower fileText) 9 | 10 | commonWords file1 file2 = do 11 | words1 <- fileToWords file1 12 | words2 <- fileToWords file2 13 | return $ toList $ intersection words1 words2 14 | 15 | commonWords2 file1 file2 = 16 | fileToWords file1 >>= \f1 -> 17 | fileToWords file2 >>= \f2 -> 18 | return $ toList $ intersection f1 f2 19 | 20 | commonWords3 file1 file2 = 21 | (\f1 f2 -> toList $ intersection f1 f2) 22 | <$> fileToWords file1 23 | <*> fileToWords file2 24 | 25 | main = do 26 | cw <- commonWords "text1.txt" "text2.txt" 27 | print cw 28 | cw2 <- commonWords2 "text1.txt" "text2.txt" 29 | print cw2 30 | cw3 <- commonWords3 "text1.txt" "text2.txt" 31 | print cw3 32 | 33 | -------------------------------------------------------------------------------- /ImPure/DoLetExample.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | example1 = do -- good style 4 | putStrLn "Enter an integer number:" 5 | s <- getLine 6 | let number = (read s :: Int) + 2 7 | putStrLn $ "Number plus 2 = " ++ (show number) 8 | 9 | example2 = do -- avoid using "in" inside a do statement 10 | putStrLn "Enter an integer number:" 11 | s <- getLine 12 | let number = (read s :: Int) + 2 in 13 | putStrLn $ "Number plus 2 = " ++ (show number) 14 | 15 | example3 = do -- avoid using "in" inside a do statement 16 | putStrLn "Enter an integer number:" 17 | s <- getLine 18 | let number = (read s :: Int) + 2 in 19 | do 20 | putStrLn "Result is:" 21 | putStrLn $ "Number plus 2 = " ++ (show number) 22 | 23 | main = do 24 | example1 25 | example2 26 | example3 -------------------------------------------------------------------------------- /ImPure/DoLetExample2.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | example1 = do 4 | putStrLn "Enter an integer number:" 5 | s <- getLine 6 | let number = (read s :: Int) + 2 7 | putStrLn $ "Number plus 2 = " ++ (show number) 8 | 9 | example2 = do 10 | putStrLn "Enter an integer number:" 11 | s <- getLine 12 | let number = (read s :: Int) + 2 in 13 | putStrLn $ "Number plus 2 = " ++ (show number) 14 | 15 | main = example1 >> example2 >> example1 -------------------------------------------------------------------------------- /ImPure/DoLetExample3.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | example3 = putStrLn "Enter an integer number:" >> getLine 4 | 5 | example4 mv = do 6 | let number = (read mv :: Int) + 2 7 | putStrLn $ "Number plus 2 = " ++ (show number) 8 | 9 | main = example3 >>= example4 10 | -------------------------------------------------------------------------------- /ImPure/FmapExample.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | fileToWords fileName = do 4 | fileText <- readFile fileName 5 | return $ words fileText 6 | 7 | main = do 8 | words1 <- fileToWords "text1.txt" 9 | print $ reverse words1 10 | words2 <- fmap reverse $ fileToWords "text1.txt" 11 | print words2 12 | 13 | -------------------------------------------------------------------------------- /ImPure/ImPure.cabal: -------------------------------------------------------------------------------- 1 | name: ImPure 2 | version: 0.1.0.0 3 | synopsis: ImPure 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: 1.22.4.0 14 | 15 | executable DoLetExample 16 | hs-source-dirs: . 17 | main-is: DoLetExample.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5 20 | 21 | executable DoLetExample2 22 | hs-source-dirs: . 23 | main-is: DoLetExample2.hs 24 | default-language: Haskell2010 25 | build-depends: base >= 4.7 && < 5 26 | 27 | executable DoLetExample3 28 | hs-source-dirs: . 29 | main-is: DoLetExample3.hs 30 | default-language: Haskell2010 31 | build-depends: base >= 4.7 && < 5 32 | 33 | executable FmapExample 34 | hs-source-dirs: . 35 | main-is: FmapExample.hs 36 | default-language: Haskell2010 37 | build-depends: base >= 4.7 && < 5 38 | 39 | executable CommonWords 40 | hs-source-dirs: . 41 | main-is: CommonWords.hs 42 | default-language: Haskell2010 43 | build-depends: base >= 4.7 && < 5, containers 44 | 45 | 46 | -------------------------------------------------------------------------------- /ImPure/README.md: -------------------------------------------------------------------------------- 1 | # Running the program snippets from the Pure Haskell Tutorial Chapter 2 | 3 | You can run the examples interactively: 4 | 5 | ~~~~~~~~ 6 | stack ghci 7 | ~~~~~~~~ 8 | 9 | or build and run them: 10 | 11 | ~~~~~~~~ 12 | stack build 13 | stack exec CommonWords 14 | stack exec DoLetExample 15 | stack exec DoLetExample2 16 | stack exec DoLetExample3 17 | stack exec FmapExample 18 | ~~~~~~~~ 19 | 20 | 21 | These simple examples are some of the code used in the tutorial chapter on "impure" Haskell. 22 | 23 | 24 | -------------------------------------------------------------------------------- /ImPure/stack.yaml: -------------------------------------------------------------------------------- 1 | # Read the resolvr version number from ~/.stack/config.yaml 2 | 3 | resolver: lts-22.26 4 | 5 | allow-newer: false 6 | 7 | # Local packages, usually specified by relative directory name 8 | packages: 9 | - '.' 10 | 11 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 12 | extra-deps: [] 13 | 14 | # Override default flag values for local packages and extra-deps 15 | flags: {} 16 | 17 | # Control whether we use the GHC we find on the path 18 | # system-ghc: true 19 | 20 | # Require a specific version of stack, using version ranges 21 | # require-stack-version: -any # Default 22 | # require-stack-version: >= 0.1.4.0 23 | 24 | # Override the architecture used by stack, especially useful on Windows 25 | # arch: i386 26 | # arch: x86_64 27 | 28 | # Extra directories used by stack for building 29 | # extra-include-dirs: [/path/to/dir] 30 | # extra-lib-dirs: [/path/to/dir] 31 | -------------------------------------------------------------------------------- /ImPure/text1.txt: -------------------------------------------------------------------------------- 1 | The dog chased the cat down the street. 2 | 3 | -------------------------------------------------------------------------------- /ImPure/text2.txt: -------------------------------------------------------------------------------- 1 | The dog and cat were friends. 2 | 3 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | build: 2 | cd Blackjack; stack build 3 | cd ClientServer; stack build 4 | cd CommandLineApp; stack build 5 | echo "skipping building Database-postgres" 6 | cd Database-sqlite; stack build 7 | cd debugging; stack build 8 | cd ImPure; stack build 9 | cd NlpTool; stack build 10 | cd OpenAiApiClient; stack build 11 | cd knowledge_graph_creator_pure; stack build 12 | cd Pure; stack build 13 | cd SparqlClient; stack build 14 | cd StateMonad; stack build 15 | cd TextProcessing; stack build 16 | cd Timers; stack build 17 | cd WebScraping; cabal build 18 | echo "There may be problems compiling the getenerated LexData source files in LexiconData.hs:" 19 | cd FastTag; stack build 20 | 21 | clean: 22 | rm -r -f */.stack-work */dist-newstyle */*.dyn* 23 | 24 | update_stack_resolver_macOs: 25 | sed -i '' 's/^resolver: .*/resolver: lts-22.26/' */stack.yaml 26 | 27 | update_stack_resolver_linux: 28 | sed -i 's/^resolver: .*/resolver: lts-22.26/' */stack.yaml 29 | 30 | -------------------------------------------------------------------------------- /NlpTool/.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | cabal-dev 3 | .cabal-sandbox 4 | *.o 5 | *.hi 6 | *.chi 7 | *.chs.h 8 | .virtualenv 9 | .hsenv 10 | .cabal-sandbox/ 11 | cabal.sandbox.config 12 | *.aes 13 | .stack-work 14 | -------------------------------------------------------------------------------- /NlpTool/NlpTool.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 1.12 2 | 3 | name: NlpTool 4 | version: 0.1.0.0 5 | description: Please see the README.md 6 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 7 | author: Mark Watson 8 | maintainer: mark@markwatson.com 9 | copyright: 2019 Mark Watson 10 | license: Apache-2.0 11 | license-file: ../APACHE_LICENSE-2.0.txt 12 | build-type: Simple 13 | extra-source-files: 14 | README.md 15 | 16 | source-repository head 17 | type: git 18 | location: https://github.com/githubuser/NlpTool 19 | 20 | library 21 | exposed-modules: 22 | Categorize 23 | Entities 24 | Sentence 25 | Summarize 26 | NlpUtils 27 | Stemmer 28 | BroadcastNetworkNamesDbPedia 29 | Category1Gram 30 | Category2Gram 31 | CityNamesDbpedia 32 | CompanyNamesDbpedia 33 | CountryNamesDbpedia 34 | PeopleDbPedia 35 | PoliticalPartyNamesDbPedia 36 | TradeUnionNamesDbPedia 37 | UniversityNamesDbPedia 38 | other-modules: 39 | Paths_NlpTool 40 | hs-source-dirs: 41 | src 42 | src/nlp 43 | src/nlp/data 44 | build-depends: 45 | base >=4.7 && <5, wreq, lens, bytestring 46 | , hxt, json, uri-encode >= 1.5.0.5, MissingH 47 | , aeson >= 1.4.2.0, containers, split, text 48 | , directory, yaml, aeson, string-conversions 49 | , MissingH, json >= 0.9.3 50 | default-language: Haskell2010 51 | 52 | executable NlpTool-exe 53 | main-is: NlpTool.hs 54 | other-modules: 55 | Paths_NlpTool 56 | hs-source-dirs: 57 | app 58 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 59 | build-depends: 60 | NlpTool 61 | , base >=4.7 && <5 62 | , MissingH 63 | , directory, json >= 0.9.3 64 | default-language: Haskell2010 65 | -------------------------------------------------------------------------------- /NlpTool/README.md: -------------------------------------------------------------------------------- 1 | kbnlp.hs 2 | ======== 3 | 4 | Mark Watson's Haskell NLP Experiments 5 | 6 | Copyright 2014 by Mark Watson. All rights reserved. 7 | 8 | License: AGPL version 3 license. (Note: I own the copyright of all of the code in this project; contact me if you need a commercial license.) 9 | 10 | This project also includes some experiments using Haskell for SPARQL clients. 11 | 12 | I am currently working on using, when possible, DBPedia URIs as identifiers for entities detected in text. As is much of this project, this is a work in progress. 13 | 14 | 15 | ## Credits 16 | 17 | I use the Haskell stemmer written by Dmitry Antonyuk and the sentence splitting code written by Eric Kow. 18 | 19 | Thanks!! 20 | 21 | ## Generated code 22 | 23 | There are a fair number of Haskell "source" files that were generated by Ruby scripts that are not included in this project. 24 | 25 | These files (e.g., CityNamesDbpedia.hs, Category1Gram.hs, Category2Gram.hs, etc.) mostly use Data.Map.fromList to create in-memory maps for lingusitic and other data. These files are not particularly interesting. The more interesting code is found in the top level files Summarize.hs, Entities.hs, etc. 26 | 27 | ## Building the NlpTool-exe executable: 28 | 29 | stack build --fast --exec NlpTool-exe 30 | 31 | 32 | ## Running the NLP examples Interactively - with stack 33 | 34 | Install stack: https://github.com/commercialhaskell/stack/wiki/Downloads 35 | 36 | stack init 37 | 38 | stack build 39 | 40 | stack ghci 41 | 42 | stack exec NlpTool 43 | 44 | Inside stack ghci: 45 | 46 | stack ghci 47 | 48 | ~~~~~~~~ 49 | :l Categorize.hs 50 | main 51 | 52 | :l Summarize.hs 53 | main 54 | ~~~~~~~~ 55 | 56 | etc. 57 | -------------------------------------------------------------------------------- /NlpTool/app/NlpTool.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | -- Copyright 2014 by Mark Watson. All rights reserved. 4 | -- The software and data in this project can be used under the terms of the GPL version 3 license or APache 2 license. 5 | 6 | -- experiments with creating a command line tool for categorization and entity detection 7 | 8 | import System.IO 9 | import Text.JSON (showJSON, encode) 10 | 11 | import NlpUtils 12 | 13 | import Categorize 14 | import Entities 15 | import Summarize 16 | 17 | main = do 18 | putStrLn "Enter text (all on one line)" 19 | s <- getLine 20 | let cats = bestCategories (splitWords s); 21 | bestCat = if not (null cats) then fst (head cats) else ""; 22 | sum = summarizeS s; 23 | spwkc = splitWordsKeepCase s; 24 | people = encode $ showJSON $ peopleNames spwkc; 25 | countries = encode $ showJSON $ countryNames spwkc; 26 | companies = encode $ showJSON $ companyNames spwkc; 27 | result = encode $ showJSON [bestCat, sum]; 28 | result2 = encode $ showJSON [people, countries, companies] in 29 | do 30 | putStr "category:\t" 31 | putStrLn bestCat 32 | putStr "summary:\t" 33 | putStrLn sum 34 | putStr "people:\t" 35 | putStrLn people 36 | putStr "companies:\t" 37 | putStrLn companies 38 | putStr "countries:\t" 39 | putStrLn countries 40 | main 41 | -------------------------------------------------------------------------------- /NlpTool/src/nlp/Categorize.hs: -------------------------------------------------------------------------------- 1 | -- Copyright 2014-2016 by Mark Watson. All rights reserved. 2 | -- The software and data in this project can be used under the terms of the AGPL version 3. 3 | module Categorize 4 | ( bestCategories 5 | , splitWords 6 | , bigram 7 | ) where 8 | 9 | import Data.List (sortBy) 10 | import qualified Data.Map as M 11 | 12 | import Category1Gram (onegrams) 13 | import Category2Gram (twograms) 14 | 15 | import Sentence (segment) 16 | 17 | import Stemmer (stem) 18 | 19 | import NlpUtils (bigram, bigram_s, splitWords) 20 | 21 | catnames1 = map fst onegrams 22 | 23 | catnames2 = map fst twograms 24 | 25 | stemWordsInString :: String -> [Char] 26 | stemWordsInString s = init $ concatMap ((++ " ") . stem) (splitWords s) -- why discard last item in list? 27 | 28 | stemScoredWordList = map (\(str, score) -> (stemWordsInString str, score)) 29 | 30 | stemHelper = 31 | map 32 | (\(category, swl) -> 33 | (category, M.fromList (stemScoredWordList (M.toList swl)))) 34 | 35 | stem2 :: [([Char], M.Map [Char] Double)] 36 | stem2 = stemHelper twograms 37 | 38 | stem1 :: [([Char], M.Map [Char] Double)] 39 | stem1 = stemHelper onegrams 40 | 41 | scoreCat wrds amap = sum $ map (\x -> M.findWithDefault 0.0 x amap) wrds 42 | 43 | score wrds amap = 44 | filter (\(a, b) -> b > 0.9) $ 45 | zip [0 ..] $ map (\(s, m) -> scoreCat wrds m) amap 46 | 47 | cmpScore (a1, b1) (a2, b2) = compare b2 b1 48 | 49 | bestCategoriesHelper wrds ngramMap categoryNames = 50 | let tg = bigram_s wrds 51 | in map (\(a, b) -> (categoryNames !! a, b)) $ 52 | sortBy cmpScore $ score wrds ngramMap 53 | 54 | bestCategories1 wrds = take 3 $ bestCategoriesHelper wrds onegrams catnames1 55 | 56 | bestCategories2 :: [[Char]] -> [([Char], Double)] 57 | bestCategories2 wrds = 58 | take 3 $ bestCategoriesHelper (bigram_s wrds) twograms catnames2 59 | 60 | bestCategories1stem :: [[Char]] -> [([Char], Double)] 61 | bestCategories1stem wrds = take 3 $ bestCategoriesHelper wrds stem1 catnames1 62 | 63 | bestCategories2stem :: [[Char]] -> [([Char], Double)] 64 | bestCategories2stem wrds = 65 | take 3 $ bestCategoriesHelper (bigram_s wrds) stem2 catnames2 66 | 67 | bestCategories :: [String] -> [(String, Double)] 68 | bestCategories wrds = map do_normalization_to_probabilities non_normalized 69 | where 70 | sum1 = 71 | M.unionWith 72 | (+) 73 | (M.fromList $ bestCategories1 wrds) 74 | (M.fromList $ bestCategories2 wrds) 75 | sum2 = 76 | M.unionWith 77 | (+) 78 | (M.fromList $ bestCategories1stem wrds) 79 | (M.fromList $ bestCategories2stem wrds) 80 | non_normalized = sortBy cmpScore $ M.toList $ M.unionWith (+) sum1 sum2 81 | total_scores = foldl (+) 0 $ map snd non_normalized 82 | do_normalization_to_probabilities (name, value) = 83 | (name, value / total_scores) 84 | -------------------------------------------------------------------------------- /NlpTool/src/nlp/Entities.hs: -------------------------------------------------------------------------------- 1 | -- Copyright 2014 by Mark Watson. All rights reserved. The software and data in this project can be used under the terms of either the GPL version 3 license or the Apache 2 license. 2 | -- Identify entities (people, places, companies, etc.) in text and 3 | -- return entities and URIs to further information in DBPedia/WikiPedia 4 | module Entities 5 | ( companyNames 6 | , peopleNames 7 | , countryNames 8 | , cityNames 9 | , broadcastNetworkNames 10 | , politicalPartyNames 11 | , tradeUnionNames 12 | , universityNames 13 | ) where 14 | 15 | import Data.Char (toLower) 16 | import Data.List (intersect, intersperse, sort) 17 | import qualified Data.Map as M 18 | import Data.Maybe (isJust) 19 | import Data.Set (empty) 20 | import qualified Data.Set as S 21 | 22 | import NlpUtils 23 | ( bigram 24 | , bigram_s 25 | , removeDuplicates 26 | , splitWords 27 | , splitWordsKeepCase 28 | , trigram 29 | , trigram_s 30 | ) 31 | 32 | import CityNamesDbpedia (cityMap) 33 | import CompanyNamesDbpedia (companyMap) 34 | import CountryNamesDbpedia (countryMap) 35 | import PeopleDbPedia (peopleMap) 36 | 37 | import BroadcastNetworkNamesDbPedia (broadcastNetworkMap) 38 | import PoliticalPartyNamesDbPedia (politicalPartyMap) 39 | import TradeUnionNamesDbPedia (tradeUnionMap) 40 | import UniversityNamesDbPedia (universityMap) 41 | 42 | isSubsetOf :: (Foldable t1, Foldable t2, Eq a) => t1 a -> t2 a -> Bool 43 | xs `isSubsetOf` ys = all (`elem` ys) xs 44 | 45 | namesHelper ngrams dbPediaMap = 46 | filter 47 | (\x -> 48 | case x of 49 | (_, Just x) -> True 50 | _ -> False) $ 51 | map 52 | (\ngram -> 53 | ( ngram 54 | , let v = M.lookup ngram dbPediaMap 55 | in if isJust v 56 | then return (ngram, v) 57 | else Nothing)) 58 | ngrams 59 | 60 | helperNames1W = namesHelper 61 | 62 | helperNames2W wrds = namesHelper (bigram_s wrds) 63 | 64 | helperNames3W wrds = namesHelper (trigram_s wrds) 65 | 66 | entityHelper entityTypeMap wrds = 67 | let cns = 68 | removeDuplicates $ 69 | sort $ 70 | helperNames1W wrds entityTypeMap ++ 71 | helperNames2W wrds entityTypeMap ++ helperNames3W wrds entityTypeMap 72 | in map (\(s, Just (a, Just b)) -> (a, b)) cns 73 | 74 | companyNames wrds = entityHelper companyMap wrds 75 | 76 | countryNames wrds = entityHelper countryMap wrds 77 | 78 | peopleNames wrds = entityHelper peopleMap wrds 79 | 80 | cityNames wrds = entityHelper cityMap wrds 81 | 82 | broadcastNetworkNames wrds = entityHelper broadcastNetworkMap wrds 83 | 84 | politicalPartyNames wrds = entityHelper politicalPartyMap wrds 85 | 86 | tradeUnionNames wrds = entityHelper tradeUnionMap wrds 87 | 88 | universityNames wrds = entityHelper universityMap wrds 89 | 90 | main :: IO () 91 | main = do 92 | let s = 93 | "As read in the San Francisco Chronicle, the company is owned by John Smith, Bill Clinton, Betty Sanders, and Dr. Ben Jones. Ben Jones and Mr. John Smith are childhood friends who grew up in Brazil, Canada, Buenos Aires, and the British Virgin Islands. Apple Computer relased a new version of OS X yesterday. Brazil Brazil Brazil. John Smith bought stock in ConocoPhillips, Heinz, Hasbro, and General Motors, Fox Sports Radio. I listen to B J Cole. Awami National Party is a political party. ALAEA is a trade union. She went to Brandeis University." 94 | print $ peopleNames $ splitWordsKeepCase s 95 | print $ countryNames $ splitWordsKeepCase s 96 | print $ companyNames $ splitWordsKeepCase s 97 | print $ cityNames $ splitWordsKeepCase s 98 | print $ broadcastNetworkNames $ splitWordsKeepCase s 99 | print $ politicalPartyNames $ splitWordsKeepCase s 100 | print $ tradeUnionNames $ splitWordsKeepCase s 101 | print $ universityNames $ splitWordsKeepCase s 102 | -------------------------------------------------------------------------------- /NlpTool/src/nlp/NlpUtils.hs: -------------------------------------------------------------------------------- 1 | -- Copyright 2014 by Mark Watson. All rights reserved. The software and data in this project can be used under the terms of the GPL version 3 license. 2 | module NlpUtils 3 | ( splitWords 4 | , bigram 5 | , bigram_s 6 | , splitWordsKeepCase 7 | , trigram 8 | , trigram_s 9 | , removeDuplicates 10 | , cleanText 11 | ) where 12 | 13 | import Data.Char (toLower) 14 | import Data.Set (fromList, toList) 15 | import Data.String.Utils (replace) 16 | 17 | cleanText s = replace "“" "\"" $ replace "”" "\"" $ replace "’" "'" s 18 | 19 | splitWords :: String -> [String] 20 | splitWords = 21 | words . 22 | map 23 | (\c -> 24 | if c `elem` ".,;:!\n\t\"" 25 | then ' ' 26 | else toLower c) 27 | 28 | bigram :: [a] -> [[a]] 29 | bigram [] = [] 30 | bigram [_] = [] 31 | bigram xs = take 2 xs : bigram (tail xs) 32 | 33 | bigram_s xs = [(head a) ++ " " ++ (head a) | a <- bigram xs] 34 | 35 | splitWordsKeepCase :: String -> [String] 36 | splitWordsKeepCase = 37 | words . 38 | map 39 | (\c -> 40 | if c `elem` ".,;:!\n\t\"" 41 | then ' ' 42 | else c) 43 | 44 | trigram :: [a] -> [[a]] 45 | trigram [] = [] 46 | trigram [_] = [] 47 | trigram [_, _] = [] 48 | trigram xs = take 3 xs : trigram (tail xs) 49 | 50 | trigram_s xs = 51 | [(head a) ++ " " ++ (a !! 1) ++ " " ++ (a !! 2) | a <- trigram xs] 52 | 53 | removeDuplicates :: Ord a => [a] -> [a] 54 | removeDuplicates = toList . fromList 55 | -------------------------------------------------------------------------------- /NlpTool/src/nlp/Stemmer.hs: -------------------------------------------------------------------------------- 1 | module Stemmer 2 | ( stem 3 | ) where 4 | 5 | import Control.Monad 6 | import Data.List 7 | import Data.Maybe 8 | import System.IO 9 | 10 | -- written by Dmitry Antonyuk - believed to be in public domain 11 | isConsonant str i 12 | | c `elem` "aeiou" = False 13 | | c == 'y' = i == 0 || isVowel str (i - 1) 14 | | otherwise = True 15 | where 16 | c = str !! i 17 | 18 | isVowel = (not .) . isConsonant 19 | 20 | byIndex fun str = fun str [0 .. length str - 1] 21 | 22 | measure = 23 | length . 24 | filter not . init . (True :) . map head . group . byIndex (map . isConsonant) 25 | 26 | containsVowel = byIndex (any . isVowel) 27 | 28 | endsWithDouble = startsWithDouble . reverse 29 | where 30 | startsWithDouble l 31 | | length l < 2 = False 32 | | otherwise = 33 | let (x:y:_) = l 34 | in x == y && x `notElem` "aeiou" 35 | 36 | cvc word 37 | | length word < 3 = False 38 | | otherwise = 39 | isConsonant word lastIndex && 40 | isVowel word (lastIndex - 1) && 41 | isConsonant word (lastIndex - 2) && last word `notElem` "wxy" 42 | where 43 | lastIndex = length word - 1 44 | 45 | statefulReplace predicate str end replacement 46 | | end `isSuffixOf` str = Just replaced 47 | | otherwise = Nothing 48 | where 49 | part = take (length str - length end) str 50 | replaced 51 | | predicate part = Right (part ++ replacement) 52 | | otherwise = Left str 53 | 54 | replaceEnd predicate str end replacement = do 55 | result <- statefulReplace predicate str end replacement 56 | return (either id id result) 57 | 58 | findStem f word pairs = msum $ map (uncurry (replaceEnd f word)) pairs 59 | 60 | measureGT = flip ((>) . measure) 61 | 62 | step1a word = fromMaybe word result 63 | where 64 | result = 65 | findStem 66 | (const True) 67 | word 68 | [("sses", "ss"), ("ies", "i"), ("ss", "ss"), ("s", "")] 69 | 70 | beforeStep1b word = fromMaybe (Left word) result 71 | where 72 | cond23 x = do 73 | v <- x 74 | either (const Nothing) (return . Right) v 75 | cond1 x = do 76 | v <- x 77 | return (Left v) 78 | result = 79 | cond1 (replaceEnd (measureGT 0) word "eed" "ee") `mplus` 80 | cond23 (statefulReplace containsVowel word "ed" "") `mplus` 81 | cond23 (statefulReplace containsVowel word "ing" "") 82 | 83 | afterStep1b word = fromMaybe word result 84 | where 85 | double = 86 | endsWithDouble word && not (any ((`isSuffixOf` word) . return) "lsz") 87 | mEq1AndCvc = measure word == 1 && cvc word 88 | iif cond val = 89 | if cond 90 | then Just val 91 | else Nothing 92 | result = 93 | findStem (const True) word [("at", "ate"), ("bl", "ble"), ("iz", "ize")] `mplus` 94 | iif double (init word) `mplus` 95 | iif mEq1AndCvc (word ++ "e") 96 | 97 | step1b = either id afterStep1b . beforeStep1b 98 | 99 | step1c word = fromMaybe word result 100 | where 101 | result = replaceEnd containsVowel word "y" "i" 102 | 103 | step1 = step1c . step1b . step1a 104 | 105 | step2 word = fromMaybe word result 106 | where 107 | result = 108 | findStem 109 | (measureGT 0) 110 | word 111 | [ ("ational", "ate") 112 | , ("tional", "tion") 113 | , ("enci", "ence") 114 | , ("anci", "ance") 115 | , ("izer", "ize") 116 | , ("bli", "ble") 117 | , ("alli", "al") 118 | , ("entli", "ent") 119 | , ("eli", "e") 120 | , ("ousli", "ous") 121 | , ("ization", "ize") 122 | , ("ation", "ate") 123 | , ("ator", "ate") 124 | , ("alism", "al") 125 | , ("iveness", "ive") 126 | , ("fulness", "ful") 127 | , ("ousness", "ous") 128 | , ("aliti", "al") 129 | , ("iviti", "ive") 130 | , ("biliti", "ble") 131 | , ("logi", "log") 132 | ] 133 | 134 | step3 word = fromMaybe word result 135 | where 136 | result = 137 | findStem 138 | (measureGT 0) 139 | word 140 | [ ("icate", "ic") 141 | , ("ative", "") 142 | , ("alize", "al") 143 | , ("iciti", "ic") 144 | , ("ical", "ic") 145 | , ("ful", "") 146 | , ("ness", "") 147 | ] 148 | 149 | step4 word = fromMaybe word result 150 | where 151 | gt1andST str = (measureGT 1) str && any ((`isSuffixOf` str) . return) "st" 152 | findGT1 = findStem (measureGT 1) word . map (flip (,) "") 153 | result = 154 | (findGT1 155 | [ "al" 156 | , "ance" 157 | , "ence" 158 | , "er" 159 | , "ic" 160 | , "able" 161 | , "ible" 162 | , "ant" 163 | , "ement" 164 | , "ment" 165 | , "ent" 166 | ]) `mplus` 167 | (findStem gt1andST word [("ion", "")]) `mplus` 168 | (findGT1 ["ou", "ism", "ate", "iti", "ous", "ive", "ize"]) 169 | 170 | step5a word = fromMaybe word result 171 | where 172 | test str = (measureGT 1 str) || ((measure str == 1) && (not $ cvc str)) 173 | result = replaceEnd test word "e" "" 174 | 175 | step5b word = fromMaybe word result 176 | where 177 | cond s = last s == 'l' && measureGT 1 s 178 | result = replaceEnd cond word "l" "" 179 | 180 | step5 = step5b . step5a 181 | 182 | allSteps = step5 . step4 . step3 . step2 . step1 183 | 184 | stem s 185 | | length s < 3 = s 186 | | otherwise = allSteps s 187 | 188 | main :: IO () 189 | main 190 | --content <- readFile "input.txt" 191 | --writeFile "out.txt" $ unlines $ map stem $ lines content 192 | = do 193 | let content = 194 | "The sport of hocky is about 100 years old by ahdi dates. American Football is a newer sport. Programming is fun. Congress passed a new budget that might help the economy. The President signed the tax bill." 195 | in print $ map stem $ words content 196 | -------------------------------------------------------------------------------- /NlpTool/src/nlp/Summarize.hs: -------------------------------------------------------------------------------- 1 | -- Copyright 2014 by Mark Watson. All rights reserved. 2 | -- The software and data in this project can be used under the terms of the AGPL version 3 license or Apache 2 license. 3 | module Summarize 4 | ( summarize 5 | , summarizeS 6 | ) where 7 | 8 | import Categorize (bestCategories) 9 | import Data.List.Utils (replace) 10 | import qualified Data.Map as M 11 | import Data.Maybe (fromMaybe) 12 | import NlpUtils (bigram_s, cleanText, splitWords) 13 | import Sentence (segment) 14 | 15 | import Category1Gram (onegrams) 16 | import Category2Gram (twograms) 17 | 18 | scoreSentenceHelper words scoreMap -- just use 1grams for now 19 | = sum $ map (\word -> M.findWithDefault 0.0 word scoreMap) words 20 | 21 | safeLookup key alist = fromMaybe 0 $ lookup key alist 22 | 23 | scoreSentenceByBestCategories words catDataMaps bestCategories = 24 | map 25 | (\(category, aMap) -> 26 | ( category 27 | , safeLookup category bestCategories * scoreSentenceHelper words aMap)) 28 | catDataMaps 29 | 30 | scoreForSentence words catDataMaps bestCategories = 31 | sum $ map snd $ scoreSentenceByBestCategories words catDataMaps bestCategories 32 | 33 | summarize s = 34 | let words = splitWords $ cleanText s 35 | bestCats = bestCategories words 36 | sentences = segment s 37 | result1grams = 38 | map 39 | (\sentence -> 40 | ( sentence 41 | , scoreForSentence (splitWords sentence) onegrams bestCats)) 42 | sentences 43 | result2grams = 44 | map 45 | (\sentence -> 46 | ( sentence 47 | , scoreForSentence 48 | (bigram_s (splitWords sentence)) 49 | twograms 50 | bestCats)) 51 | sentences 52 | mergedResults = 53 | filter (\(s, v) -> v > 0.05) $ 54 | M.toList $ 55 | M.unionWith (+) (M.fromList result1grams) (M.fromList result1grams) 56 | c400 = filter (\(sentence, score) -> score > 10) mergedResults 57 | c300 = filter (\(sentence, score) -> score > 7) mergedResults 58 | c280 = filter (\(sentence, score) -> score > 4) mergedResults 59 | c250 = filter (\(sentence, score) -> score > 3) mergedResults 60 | c200 = filter (\(sentence, score) -> score > 2) mergedResults 61 | c100 = filter (\(sentence, score) -> score > 1) mergedResults 62 | c050 = filter (\(sentence, score) -> score > 0.5) mergedResults 63 | c000 = mergedResults 64 | in if (not (null c400)) && (length c400) > 1 && (length c400) < 3 65 | then c400 66 | else if (not (null c300)) && (length c300) > 1 && (length c300) < 3 67 | then c300 68 | else if not (null c280) && (length c280) > 1 && (length c280) < 3 69 | then c280 70 | else if not (null c250) && 71 | (length c250) > 1 && (length c250) < 3 72 | then c250 73 | else if not (null c200) && 74 | (length c200) > 1 && (length c200) < 3 75 | then c200 76 | else if not (null c100) && 77 | (length c100) > 1 && 78 | (length c100) < 3 79 | then c100 80 | else if not (null c050) && 81 | (length c050) > 1 && 82 | (length c050) < 3 83 | then c050 84 | else if (length mergedResults) < 85 | 3 86 | then c000 87 | else [ head 88 | mergedResults 89 | , (head . tail) 90 | mergedResults 91 | ] 92 | 93 | summarizeS s = 94 | let a = 95 | replace "\"" "'" $ 96 | replace "\n" " " $ concatMap (\x -> fst x ++ " ") $ summarize s 97 | in init $ 98 | if not (null a) 99 | then a 100 | else safeFirst $ segment s 101 | where 102 | safeFirst x 103 | | length x > 1 = head x ++ x !! 1 104 | | not (null x) = head x 105 | | otherwise = " " 106 | 107 | main = do 108 | let s = 109 | "Plunging European stocks, wobbly bonds and grave concerns about the health of Portuguese lender Banco Espirito Santo SA made last week feel like a rerun of the euro crisis, but most investors say it was no more than a blip for a resurgent region. Banco Espirito Santo has been in investors’ sights since December, when The Wall Street Journal first reported on accounting irregularities at the complex firm. Nerves frayed on Thursday when Banco Espirito Santo's parent company said it wouldn't be able to meet some short-term debt obligations." 110 | print $ summarize s 111 | print $ summarizeS s 112 | -------------------------------------------------------------------------------- /NlpTool/src/nlp/data/UniversityNamesDbPedia.hs: -------------------------------------------------------------------------------- 1 | -- Copyright 2014 by Mark Watson. All rights reserved. The software and data in this project can be used under the terms of the GPL version 3 license. 2 | 3 | module UniversityNamesDbPedia (universityMap) where 4 | 5 | import qualified Data.Map as M 6 | 7 | universityMap = M.fromList [("ISAE formation SUPAERO", ""), ("Telecom Bretagne", ""), ("Akita International University", ""), ("Al-Azhar University", ""), ("American University", ""), ("Babson College", ""), ("Berea College", ""), ("Bocconi University", ""), ("Boston College", "")] 8 | 9 | -- SHORT LIST!! After dev, get old long version from kbnlp.hs project 10 | 11 | -------------------------------------------------------------------------------- /NlpTool/stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-22.26 2 | 3 | allow-newer: false 4 | 5 | packages: 6 | - . 7 | # Dependency packages to be pulled from upstream that are not in the resolver 8 | # using the same syntax as the packages field. 9 | # (e.g., acme-missiles-0.3) 10 | extra-deps: 11 | - MissingH-1.6.0.1 12 | - random-1.2.1.2 13 | - json-0.11 14 | 15 | -------------------------------------------------------------------------------- /OpenAiApiClient/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .codegpt -------------------------------------------------------------------------------- /OpenAiApiClient/GenText.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | import OpenAI.Client 3 | 4 | import Network.HTTP.Client 5 | import Network.HTTP.Client.TLS 6 | import System.Environment (getEnv) 7 | import qualified Data.Text as T 8 | import Data.Maybe (fromMaybe) 9 | import Data.Text (splitOn) 10 | 11 | -- example derived from the openai-client library documentation 12 | 13 | completionRequestToString :: String -> IO String 14 | completionRequestToString prompt = do 15 | manager <- newManager tlsManagerSettings 16 | apiKey <- T.pack <$> getEnv "OPENAI_KEY" 17 | let client = makeOpenAIClient apiKey manager 4 18 | let request = ChatCompletionRequest 19 | { chcrModel = ModelId "gpt-4o" 20 | , chcrMessages = 21 | [ ChatMessage 22 | { chmContent = Just (T.pack prompt) 23 | , chmRole = "user" 24 | , chmFunctionCall = Nothing 25 | , chmName = Nothing 26 | } 27 | ] 28 | , chcrFunctions = Nothing 29 | , chcrTemperature = Nothing 30 | , chcrTopP = Nothing 31 | , chcrN = Nothing 32 | , chcrStream = Nothing 33 | , chcrStop = Nothing 34 | , chcrMaxTokens = Nothing 35 | , chcrPresencePenalty = Nothing 36 | , chcrFrequencyPenalty = Nothing 37 | , chcrLogitBias = Nothing 38 | , chcrUser = Nothing 39 | } 40 | result <- completeChat client request 41 | case result of 42 | Left failure -> return (show failure) 43 | Right success -> 44 | case chrChoices success of 45 | (ChatChoice {chchMessage = ChatMessage {chmContent = content}} : _) -> 46 | return $ fromMaybe "No content" $ T.unpack <$> content 47 | _ -> return "No choices returned" 48 | 49 | -- find place names 50 | findPlaces :: String -> IO [String] 51 | findPlaces text = do 52 | let prompt = "Extract only the place names separated by commas from the following text:\n\n" ++ text 53 | response <- completionRequestToString prompt 54 | -- Convert Text to String using T.unpack before filtering 55 | let places = filter (not . null) $ map T.unpack $ splitOn "," (T.pack response) 56 | -- Strip leading and trailing whitespace from each place name 57 | return $ map (T.unpack . T.strip . T.pack) places 58 | 59 | findPeople :: String -> IO [String] 60 | findPeople text = do 61 | let prompt = "Extract only the person names separated by commas from the following text:\n\n" ++ text 62 | response <- completionRequestToString prompt 63 | let people = filter (not . null) $ map T.unpack $ splitOn "," (T.pack response) 64 | return $ map (T.unpack . T.strip . T.pack) people 65 | 66 | main :: IO () 67 | main = do 68 | response <- completionRequestToString "Write a hello world program in Haskell" 69 | putStrLn response 70 | 71 | places <- findPlaces "I visited London, Paris, and New York last year." 72 | print places 73 | 74 | people <- findPeople "John Smith met with Sarah Johnson and Michael Brown at the conference." 75 | print people -------------------------------------------------------------------------------- /OpenAiApiClient/OpenAiApiClient.cabal: -------------------------------------------------------------------------------- 1 | name: OpenAiApiClient 2 | version: 0.1.0.0 3 | synopsis: Simple project template from stack 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark watson and the author of OpenAI Client library Alexander Thiemann 9 | copyright: 2024 Mark Watson 10 | category: Web 11 | build-type: Simple 12 | cabal-version: >=1.10 13 | 14 | executable GenText 15 | hs-source-dirs: . 16 | main-is: GenText.hs 17 | default-language: Haskell2010 18 | build-depends: base >= 4.7 && < 5, mtl >= 2.2.2, text, http-client >= 0.7.13.1, openai-hs, http-client-tls 19 | 20 | -------------------------------------------------------------------------------- /OpenAiApiClient/README.md: -------------------------------------------------------------------------------- 1 | # OpenAI APIs 2 | 3 | I am using the library written by Alexander Thiemann at: 4 | 5 | https://github.com/agrafix/openai-hs/tree/main/openai-hs 6 | 7 | If I just print: 8 | 9 | ``` 10 | case result of 11 | Left failure -> print failure 12 | Right success -> print $ chrChoices success 13 | ``` 14 | 15 | then the output looks like this: 16 | 17 | ``` 18 | [ChatChoice {chchIndex = 0, chchMessage = ChatMessage {chmContent = Just "Certainly! Here is a simple "Hello, World!" program in Haskell:\n\nhaskell\nmain :: IO ()\nmain = putStrLn \"Hello, World!\"\n\n\nTo run this program, follow these steps:\n\n1. Save the code in a file with a .hs extension, for example, HelloWorld.hs.\n2. Open a terminal and navigate to the directory where you saved the file.\n3. Compile the program using the Glasgow Haskell Compiler (GHC) by running:\n sh\n ghc HelloWorld.hs\n \n4. This will produce an executable file named HelloWorld (or HelloWorld.exe on Windows).\n5. Run the executable by typing:\n sh\n ./HelloWorld\n \n\nYou should see the output:\n\nHello, World!\n", chmRole = "assistant", chmFunctionCall = Nothing, chmName = Nothing}, chchFinishReason = Just "stop"}] 19 | ``` 20 | 21 | If I print: 22 | 23 | ``` 24 | Right success -> -- print $ chrChoices success 25 | case chrChoices success of 26 | (ChatChoice {chchMessage = ChatMessage {chmContent = content}}:_) -> 27 | putStrLn $ fromMaybe "No content" $ T.unpack <$> content 28 | _ -> putStrLn "No choices returned" 29 | ``` 30 | 31 | then the putput looks like this: 32 | 33 | ``` 34 | Certainly! Here's a simple "Hello, World!" program in Haskell: 35 | 36 | ```haskell 37 | main :: IO () 38 | main = putStrLn "Hello, World!" 39 | ``` 40 | 41 | To run this program: 42 | 43 | 1. Save the code in a file, for example, `HelloWorld.hs`. 44 | 2. Open a terminal (command prompt). 45 | 3. Navigate to the directory where you saved the file. 46 | 4. Compile the program using GHC (Glasgow Haskell Compiler): 47 | 48 | ```sh 49 | ghc --make HelloWorld.hs 50 | ``` 51 | 52 | 5. Run the compiled program: 53 | 54 | ```sh 55 | ./HelloWorld 56 | ``` 57 | 58 | You should see the output: 59 | 60 | ``` 61 | Hello, World! 62 | ``` 63 | 64 | Alternatively, you can run the Haskell code without compiling by using an interpreter like GHCi (the interactive environment for Haskell): 65 | 66 | 1. Open a terminal. 67 | 2. Start GHCi by typing `ghci`. 68 | 3. Load your file with the following command: 69 | 70 | ```sh 71 | :load HelloWorld.hs 72 | ``` 73 | 74 | 4. Run the `main` function: 75 | 76 | ```sh 77 | main 78 | ``` 79 | 80 | This should also produce the output "Hello, World!" in the terminal. 81 | ``` 82 | 83 | 84 | ## Run using Replit.com, Nix, Cabal 85 | 86 | cabal build 87 | cabal run 88 | 89 | Note: I had to manually install: cabal install cpphs 90 | -------------------------------------------------------------------------------- /OpenAiApiClient/stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-22.26 2 | 3 | allow-newer: false 4 | 5 | # flags: 6 | extra-package-dbs: [] 7 | packages: 8 | - '.' 9 | extra-deps: 10 | - http-client-0.7.17 11 | - openai-servant-0.3.0.1 12 | - openai-hs-0.3.0.1 13 | - http-client-tls-0.3.6.3 14 | 15 | -------------------------------------------------------------------------------- /Pure/.gitignore: -------------------------------------------------------------------------------- 1 | temp.txt 2 | -------------------------------------------------------------------------------- /Pure/Cases.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | numberOpinion n = 4 | case n of 5 | 0 -> "Too low" 6 | 1 -> "just right" 7 | _ -> "OK, that is a number" 8 | 9 | main = do 10 | print $ numberOpinion 0 11 | print $ numberOpinion 1 12 | print $ numberOpinion 2 13 | 14 | -------------------------------------------------------------------------------- /Pure/ChainedCalls.hs: -------------------------------------------------------------------------------- 1 | module ChainedCalls where 2 | 3 | doubleOddElements = 4 | map (\x -> if x `mod` 2 == 0 then x else 2 * x) 5 | 6 | times10Elements = map (* 10) 7 | 8 | main = do 9 | print $ doubleOddElements [0,1,2,3,4,5,6,7,8] 10 | let aList = [0,1,2,3,4,5] 11 | let newList = times10Elements $ doubleOddElements aList 12 | print newList 13 | let newList2 = (times10Elements . doubleOddElements) aList 14 | print newList2 15 | -------------------------------------------------------------------------------- /Pure/Conditionals.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | head' (x:_) = x 4 | tail' (_:xs) = xs 5 | 6 | doubleList [] = [] 7 | doubleList (x:xs) = (* 2) x : doubleList xs 8 | 9 | bumpList n [] = [] 10 | bumpList n (x:xs) = n * x : bumpList n xs 11 | 12 | map' f [] = [] 13 | map' f (x:xs) = (f x) : map' f xs 14 | 15 | main = do 16 | print $ head' ["bird","dog","cat"] 17 | print $ tail' [0,1,2,3,4,5] 18 | print $ doubleList [0..5] 19 | print $ bumpList 3 [0..5] 20 | print $ map' (* 7) [0..5] 21 | print $ map' (+ 1.1) [0..5] 22 | print $ map' (\x -> (x + 1) * 2) [0..5] 23 | 24 | 25 | -------------------------------------------------------------------------------- /Pure/Guards.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Data.Maybe 4 | import System.Random -- uses random library (see Pure.cabal file) 5 | 6 | spaceship n 7 | | n < 0 = -1 8 | | n == 0 = 0 9 | | otherwise = 1 10 | 11 | randomMaybeValue n 12 | | n `mod` 2 == 0 = Just n 13 | | otherwise = Nothing 14 | 15 | main = do 16 | print $ spaceship (-100) 17 | print $ spaceship 0 18 | print $ spaceship 17 19 | print $ randomMaybeValue 1 20 | print $ randomMaybeValue 2 21 | -------------------------------------------------------------------------------- /Pure/IfThenElses.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | ageToString age = 4 | if age < 21 then "minor" else "adult" 5 | 6 | main = do 7 | print $ ageToString 15 8 | print $ ageToString 37 9 | -------------------------------------------------------------------------------- /Pure/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Mark Watson (c) 2016 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Mark Watson nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /Pure/LetAndWhere.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | funnySummation w x y z = 4 | let bob = w + x 5 | sally = y + z 6 | in bob + sally 7 | 8 | testLetComprehension = 9 | [(a,b) | a <- [0..5], let b = 10 * a] 10 | 11 | testWhereBlocks a = 12 | z * q 13 | where 14 | z = a + 2 15 | q = 2 16 | 17 | functionWithWhere n = 18 | (n + 1) * tenn 19 | where 20 | tenn = 10 * n 21 | 22 | main = do 23 | print $ funnySummation 1 2 3 4 24 | let n = "Rigby" 25 | print n 26 | print testLetComprehension 27 | print $ testWhereBlocks 11 28 | print $ functionWithWhere 1 29 | 30 | 31 | -------------------------------------------------------------------------------- /Pure/MapExamples.hs: -------------------------------------------------------------------------------- 1 | module MapExamples where 2 | 3 | import qualified Data.Map as M -- from library containers 4 | 5 | 6 | aTestMap = M.fromList [("height", 120), ("weight", 15)] 7 | 8 | getNumericValue key aMap = 9 | case M.lookup key aMap of 10 | Nothing -> -1 11 | Just value -> value 12 | 13 | main = do 14 | print $ getNumericValue "height" aTestMap 15 | print $ getNumericValue "age" aTestMap 16 | 17 | -------------------------------------------------------------------------------- /Pure/MyColors.hs: -------------------------------------------------------------------------------- 1 | data MyColors = Orange | Red | Blue | Green | Silver 2 | deriving (Show, Eq) 3 | 4 | instance Ord MyColors where 5 | compare c1 c2 = compare (show c1) (show c2) 6 | -------------------------------------------------------------------------------- /Pure/NoIO.hs: -------------------------------------------------------------------------------- 1 | module NoIO where 2 | 3 | main :: Integer 4 | main = do 5 | let i = 1 in 6 | 2 * i -------------------------------------------------------------------------------- /Pure/Pure.cabal: -------------------------------------------------------------------------------- 1 | name: Pure 2 | version: 0.1.0.0 3 | synopsis: Simple 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable Simple 16 | hs-source-dirs: . 17 | main-is: Simple.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, random, containers 20 | 21 | -------------------------------------------------------------------------------- /Pure/README.md: -------------------------------------------------------------------------------- 1 | # Running the program snippets from the Pure Haskell Tutorial Chapter 2 | 3 | ~~~~~~~~ 4 | stack ghci 5 | ~~~~~~~~ 6 | 7 | These simple examples are some of the code used in the very long introductory tutorial chapter on using "pure" Haskell. 8 | 9 | ## Running with Replit.com, Nix, Cabal: 10 | 11 | cabal update 12 | cabal build 13 | cabal run 14 | -------------------------------------------------------------------------------- /Pure/Simple.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | sum2 x y = x + y 4 | 5 | main = do 6 | putStrLn ("1 + 2 = " ++ show (sum2 1 2)) 7 | 8 | -------------------------------------------------------------------------------- /Pure/myfunc1.hs: -------------------------------------------------------------------------------- 1 | myfunc1 :: [a] -> [a] -> [a] 2 | myfunc1 x y = x ++ y 3 | -------------------------------------------------------------------------------- /Pure/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.26 5 | 6 | allow-newer: false 7 | 8 | # Local packages, usually specified by relative directory name 9 | packages: 10 | - '.' 11 | 12 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 13 | extra-deps: [] 14 | 15 | # Override default flag values for local packages and extra-deps 16 | flags: {} 17 | 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Examples for "Haskell Tutorial and Cookbook" by Mark Watson 2 | 3 | ![Haskell Logo](haskell.svg) 4 | 5 | This repo contains the examples for my Haskell book that I try to update with new material about once a year: 6 | 7 | [Haskell Tutorial and Cookbook](https://leanpub.com/haskell-cookbook) 8 | 9 | If you would like to support my work please consider purchasing my books on [Leanpub](https://leanpub.com/u/markwatson) and star my git repositories that you find useful on [GitHub](https://github.com/mark-watson?tab=repositories&q=&type=public). You can also interact with me on social media on [Mastodon](https://mastodon.social/@mark_watson) and [Twitter](https://twitter.com/mark_l_watson). 10 | 11 | ## I am starting to use this repo for all personal Haskell projects. 12 | 13 | In the future, if you see a file named **NOT_YET_IN_BOOK.md** in any subdirectory, then please don't expect (yet) that this example is in the book. Currently everything in this repo is in the second edition of my book. 14 | 15 | ## Updates 16 | 17 | Updated 7/28/2023 for stack resolver: lts-lts-21.4 18 | 19 | Note: to build sqlite example I had to: 20 | 21 | Set 'allow-newer: true' in /home/markw/.stack/config.yaml to ignore all version constraints 22 | 23 | ## Tool Installation 24 | 25 | See Appendix A in my book, or simply install **stack** and **hlint**. 26 | 27 | Note: 6/18/2020: install directions in Appendix A to optionally install **cabal-install** (with latest GHC 8.8.3) produces an error running: 28 | 29 | stack install cabal-install 30 | 31 | Installing **cabal-install** is optional. Installing **stack** using the directions in Appendix A which refers you to th official documentation at [http://docs.haskellstack.org/en/stable/README.html](http://docs.haskellstack.org/en/stable/README.html is sufficient for running the exampes in my book. 32 | 33 | ## Notes for running examples using Replit.com repls (September 8, 2024) 34 | 35 | By default stack is not installed. 36 | Accept the default's for Haskell after creating a new Replit repl by cloning this GitHub project. 37 | 38 | Then update cabal package index: 39 | 40 | cd Pure 41 | cabal update 42 | 43 | It will ask you which cabal sysem to install. I chose the command line version. 44 | 45 | cabal build 46 | cabal repl 47 | ghci> :l Guards.hs 48 | ghci> main 49 | -------------------------------------------------------------------------------- /SparqlClient/.gitignore: -------------------------------------------------------------------------------- 1 | sparql-client 2 | 3 | -------------------------------------------------------------------------------- /SparqlClient/HttpSparqlClient.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Main where 4 | 5 | import Network.HTTP.Conduit (simpleHttp) 6 | import Network.HTTP.Base (urlEncode) 7 | import Text.XML.HXT.Core 8 | import Text.HandsomeSoup 9 | import qualified Data.ByteString.Lazy.Char8 as B 10 | --import qualified Data.Text as T 11 | 12 | prefixUrl :: [Char] 13 | prefixUrl = "http://dbpedia.org/sparql/?query=" 14 | 15 | buildQuery :: String -> [Char] 16 | buildQuery sparqlString = prefixUrl ++ urlEncode sparqlString 17 | 18 | main :: IO () 19 | main = do 20 | let query = buildQuery "select * where { ?o . FILTER langMatches(lang(?o), \"EN\")} LIMIT 100" 21 | res <- simpleHttp query 22 | let doc = readString [] (B.unpack res) 23 | putStrLn "\nAbstracts:\n" 24 | abstracts <- runX $ doc >>> css "binding" >>> (getAttrValue "name" &&& (deep getText)) 25 | print abstracts 26 | -------------------------------------------------------------------------------- /SparqlClient/HttpSparqlJsonClient.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE DeriveDataTypeable #-} 3 | 4 | module Main where 5 | 6 | import Text.JSON.Generic 7 | 8 | -- DOES NOT WORK, just experimenting: 9 | 10 | import Network.HTTP.Conduit (simpleHttp) 11 | import Network.HTTP.Base (urlEncode) 12 | --import Text.XML.HXT.Core 13 | import Text.HandsomeSoup 14 | import qualified Data.ByteString.Lazy.Char8 as B 15 | --import qualified Data.Text as T 16 | 17 | prefixUrl :: [Char] 18 | prefixUrl = "http://dbpedia.org/sparql/?query=" 19 | 20 | buildQuery :: String -> [Char] 21 | buildQuery sparqlString = prefixUrl ++ urlEncode sparqlString ++ "&format=json" 22 | 23 | main :: IO () 24 | main = do 25 | let query = buildQuery "select * where { ?o . FILTER langMatches(lang(?o), \"EN\")} LIMIT 100" 26 | res <- simpleHttp query 27 | --let doc = readJSString (B.unpack res) 28 | --putStrLn (show doc) 29 | putStrLn "\nAbstracts:\n" 30 | -- abstracts <- runX $ doc >>> css "binding" >>> (getAttrValue "name" &&& (deep getText)) 31 | -- print abstracts 32 | -------------------------------------------------------------------------------- /SparqlClient/JsonTest.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | import Data.Aeson (Value) 3 | import qualified Data.ByteString.Char8 as S8 4 | import qualified Data.Yaml as Yaml 5 | import Network.HTTP.Simple 6 | 7 | import Data.Typeable(typeOf) -- for debugging 8 | import Control.Monad.Trans(lift) -- experiment only 9 | 10 | main :: IO () 11 | main = do 12 | 13 | response <- httpJSON "http://dbpedia.org/sparql/?query=select * where {?s ?p ?o} limit 2" 14 | print $ typeOf response 15 | -- rb <- lift $ (getResponseBody response) -- :: Value) 16 | putStrLn $ "The status code was: " ++ 17 | show (getResponseStatusCode response) 18 | print $ getResponseHeader "Content-Type" response 19 | S8.putStrLn $ Yaml.encode (getResponseBody response :: Value) 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /SparqlClient/README.md: -------------------------------------------------------------------------------- 1 | # Run example 2 | 3 | stack ghci 4 | *Main> :l HttpSparqlClient 5 | *Main> main 6 | *Main> :l RobsExample 7 | *Main> main 8 | 9 | Note: the URI for the demo RDF file for RobsExample is no longer valid so this example will not run as-is. 10 | 11 | # Using RDF4H 12 | 13 | [RDF4H documentation](https://robstewart57.github.io/rdf4h/) 14 | 15 | [RDF4H github repository](https://github.com/robstewart57/rdf4h) 16 | 17 | -------------------------------------------------------------------------------- /SparqlClient/RobsExample.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | -- Example from https://github.com/robstewart57/rdf4h/blob/master/examples/ESWC.hs 4 | 5 | module Main where 6 | 7 | import Data.RDF 8 | import qualified Data.Text as T 9 | 10 | eswcCommitteeURI, heldByProp :: T.Text 11 | eswcCommitteeURI = "http://data.semanticweb.org/conference/eswc/2015/program-committee-member" 12 | heldByProp = "swc:heldBy" 13 | 14 | -- | returns a list of full names of people who served as 15 | -- members on the ESWC 2015 conference programme committee. 16 | eswcCommitteeMembers :: RDF TList -> [T.Text] 17 | eswcCommitteeMembers graph = 18 | let triples = query graph (Just (unode eswcCommitteeURI)) (Just (unode heldByProp)) Nothing 19 | memberURIs = map objectOf triples 20 | in map 21 | (\memberURI -> 22 | let (LNode (PlainL firstName)) = 23 | objectOf $ head $ query graph (Just memberURI) (Just (unode "foaf:firstName")) Nothing 24 | (LNode (PlainL lastName)) = 25 | objectOf $ head $ query graph (Just memberURI) (Just (unode "foaf:lastName")) Nothing 26 | in (T.append firstName (T.append (T.pack " ") lastName))) 27 | memberURIs 28 | 29 | main :: IO () 30 | main = do 31 | result <- parseURL 32 | (XmlParser Nothing Nothing) 33 | "http://data.semanticweb.org/dumps/conferences/eswc-2015-complete.rdf" 34 | case result of 35 | Left (ParseFailure err) -> error ("Unable to parse RDF content from that URL: " ++ err) 36 | Right rdfGraph -> do 37 | let eswcMemberNames = eswcCommitteeMembers rdfGraph 38 | mapM_ (putStrLn . T.unpack) eswcMemberNames 39 | -------------------------------------------------------------------------------- /SparqlClient/SparqlClient.cabal: -------------------------------------------------------------------------------- 1 | name: SparqlClient 2 | version: 0.1.0.0 3 | synopsis: Simple project template from stack 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark watson 9 | copyright: 2016 Mark Watson 10 | category: Web 11 | build-type: Simple 12 | cabal-version: >=1.10 13 | 14 | executable TestSparqlClient 15 | hs-source-dirs: . 16 | main-is: TestSparqlClient.hs 17 | default-language: Haskell2010 18 | build-depends: base >= 4.7 && < 5, hsparql, mtl >= 2.2.2, rdf4h >= 3.0.0, text 19 | 20 | executable RobsExample 21 | hs-source-dirs: . 22 | main-is: RobsExample.hs 23 | default-language: Haskell2010 24 | build-depends: HTTP >= 4, base >= 4.7 && < 5, hsparql, mtl >= 2.2.2, rdf4h >= 3.0.0, text 25 | 26 | executable HttpSparqlClient 27 | hs-source-dirs: . 28 | main-is: HttpSparqlClient.hs 29 | default-language: Haskell2010 30 | build-depends: HTTP >= 4, HandsomeSoup, base >= 4.7 && < 5, bytestring, containers, http-conduit, hxt, mtl >= 2.2.2, text 31 | 32 | executable HttpSparqlJsonClient 33 | hs-source-dirs: . 34 | main-is: HttpSparqlJsonClient.hs 35 | default-language: Haskell2010 36 | build-depends: HTTP >= 4, HandsomeSoup, aeson, base >= 4.7 && < 5, bytestring, containers, http-conduit, hxt, json >= 0.10, mtl >= 2.2.2, text, yaml 37 | 38 | executable JsonTest 39 | hs-source-dirs: . 40 | main-is: JsonTest.hs 41 | default-language: Haskell2010 42 | build-depends: HTTP >= 4, HandsomeSoup, aeson, base >= 4.7 && < 5, bytestring, containers, http-conduit, hxt, json >= 0.10, mtl >= 2.2.2, text, yaml 43 | 44 | -------------------------------------------------------------------------------- /SparqlClient/TestSparqlClient.hs: -------------------------------------------------------------------------------- 1 | -- simple experiments with the excellent HSparql library 2 | 3 | {-# LANGUAGE OverloadedStrings #-} 4 | 5 | module Main where 6 | 7 | import Database.HSparql.Connection (BindingValue(Bound)) 8 | 9 | import Data.RDF hiding (triple) 10 | import Database.HSparql.QueryGenerator 11 | import Database.HSparql.Connection (selectQuery) 12 | 13 | webBrowserSelect :: Query SelectQuery 14 | webBrowserSelect = do 15 | resource <- prefix "dbprop" (iriRef "http://dbpedia.org/resource/") 16 | dbpprop <- prefix "dbpedia" (iriRef "http://dbpedia.org/property/") 17 | foaf <- prefix "foaf" (iriRef "http://xmlns.com/foaf/0.1/") 18 | x <- var 19 | name <- var 20 | triple x (dbpprop .:. "genre") (resource .:. "Web_browser") 21 | triple x (foaf .:. "name") name 22 | 23 | return SelectQuery { queryVars = [name] } 24 | 25 | companyAbstractSelect :: Query SelectQuery 26 | companyAbstractSelect = do 27 | resource <- prefix "dbprop" (iriRef "http://dbpedia.org/resource/") 28 | ontology <- prefix "ontology" (iriRef "http://dbpedia.org/ontology/") 29 | o <- var 30 | triple (resource .:. "Edinburgh_University_Press") (ontology .:. "abstract") o 31 | return SelectQuery { queryVars = [o] } 32 | 33 | companyTypeSelect :: Query SelectQuery 34 | companyTypeSelect = do 35 | resource <- prefix "dbprop" (iriRef "http://dbpedia.org/resource/") 36 | ontology <- prefix "ontology" (iriRef "http://dbpedia.org/ontology/") 37 | o <- var 38 | triple (resource .:. "Edinburgh_University_Press") (ontology .:. "type") o 39 | return SelectQuery { queryVars = [o] } 40 | 41 | main :: IO () 42 | main = do 43 | sq1 <- selectQuery "http://dbpedia.org/sparql" companyAbstractSelect 44 | --putStrLn "\nRaw results of company abstract SPARQL query:\n" 45 | --print sq1 46 | putStrLn "\nWeb browser names extracted from the company abstract query results:\n" 47 | case sq1 of 48 | Just a -> print $ map (\[Bound (LNode (PlainLL s _))] -> s) a 49 | Nothing -> putStrLn "nothing" 50 | sq2 <- selectQuery "http://dbpedia.org/sparql" companyTypeSelect 51 | --putStrLn "\nRaw results of company type SPARQL query:\n" 52 | --print sq2 53 | putStrLn "\nWeb browser names extracted from the company type query results:\n" 54 | case sq2 of 55 | Just a -> print $ map (\[Bound (UNode s)] -> s) a 56 | Nothing -> putStrLn "nothing" 57 | sq3 <- selectQuery "http://dbpedia.org/sparql" webBrowserSelect 58 | --putStrLn "\nRaw results of SPARQL query:\n" 59 | --print sq3 60 | putStrLn "\nWeb browser names extracted from the query results:\n" 61 | case sq3 of 62 | Just a -> print $ map (\[Bound (LNode (PlainLL s _))] -> s) a 63 | Nothing -> putStrLn "nothing" 64 | -------------------------------------------------------------------------------- /SparqlClient/stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-22.26 2 | 3 | allow-newer: false 4 | 5 | # flags: 6 | extra-package-dbs: [] 7 | packages: 8 | - '.' 9 | #- location: 10 | # git: https://github.com/robstewart57/rdf4h.git 11 | # commit: 046ea63f3c8f7636251b80695bee627306b8b2d2 12 | #- location: 13 | # git: https://github.com/robstewart57/hsparql.git 14 | # commit: 7f6e1952e44c5e6edf952d3493e2263ef689385b 15 | extra-deps: 16 | - MissingH-1.6.0.1 17 | - random-1.2.1.2 18 | - hgal-2.0.0.3 19 | - rdf4h-3.0.1 20 | - hsparql-0.2.9 21 | 22 | -------------------------------------------------------------------------------- /StateMonad/README.md: -------------------------------------------------------------------------------- 1 | # To run example: 2 | 3 | stack build 4 | stack exec State1 5 | -------------------------------------------------------------------------------- /StateMonad/State1.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Control.Monad.State 4 | 5 | incrementState :: State Int Int 6 | incrementState = do 7 | n <- get 8 | put (n + 1) 9 | return n 10 | 11 | -- same state monad without using a 'do' expression: 12 | incrementState2 :: State Int Int 13 | incrementState2 = get >>= \a -> 14 | put (a + 1) >>= \b -> 15 | return a 16 | 17 | bumpVals (a,b) = (a+1, b+2) 18 | 19 | main = do 20 | print $ runState incrementState 1 -- (1,2) == (return value, final state) 21 | print $ runState incrementState2 1 -- (1,2) == (return value, final state) 22 | print $ runState (mapState bumpVals incrementState) 1 -- (2,4) 23 | print $ evalState incrementState 1 -- 1 == return value 24 | print $ execState incrementState 1 -- 2 == final state 25 | 26 | -------------------------------------------------------------------------------- /StateMonad/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.26 5 | 6 | allow-newer: false 7 | 8 | # Local packages, usually specified by relative directory name 9 | packages: 10 | - '.' 11 | 12 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 13 | extra-deps: [] 14 | 15 | # Override default flag values for local packages and extra-deps 16 | flags: {} 17 | -------------------------------------------------------------------------------- /StateMonad/statemonad.cabal: -------------------------------------------------------------------------------- 1 | name: statemonad 2 | version: 0.1.0.0 3 | synopsis: state monad examples 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable State1 16 | hs-source-dirs: . 17 | main-is: State1.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, mtl 20 | -------------------------------------------------------------------------------- /TestingHaskell/README.md: -------------------------------------------------------------------------------- 1 | # build and run tests: 2 | 3 | stack test 4 | 5 | Note: one of the three tests should fail. 6 | -------------------------------------------------------------------------------- /TestingHaskell/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /TestingHaskell/TestingHaskell.cabal: -------------------------------------------------------------------------------- 1 | name: TestingHaskell 2 | version: 0.1.0.0 3 | synopsis: testing experiments 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | copyright: 2016 Mark Watson 10 | category: Testing 11 | build-type: Simple 12 | -- extra-source-files: 13 | cabal-version: >=1.10 14 | 15 | library 16 | hs-source-dirs: src 17 | exposed-modules: MyColors 18 | build-depends: base >= 4.7 && < 5 19 | default-language: Haskell2010 20 | 21 | executable TestingHaskell-exe 22 | hs-source-dirs: app 23 | main-is: Main.hs 24 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 25 | build-depends: base 26 | , TestingHaskell 27 | default-language: Haskell2010 28 | 29 | test-suite TestingHaskell-test 30 | type: exitcode-stdio-1.0 31 | hs-source-dirs: test, src 32 | main-is: Spec.hs 33 | build-depends: base 34 | , TestingHaskell 35 | , hspec 36 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 37 | default-language: Haskell2010 38 | 39 | source-repository head 40 | type: git 41 | location: https://github.com/githubuser/TestingHaskell 42 | -------------------------------------------------------------------------------- /TestingHaskell/app/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import MyColors 4 | 5 | main :: IO () 6 | main = do 7 | print $ Red < Green 8 | 9 | -------------------------------------------------------------------------------- /TestingHaskell/src/MyColors.hs: -------------------------------------------------------------------------------- 1 | module MyColors where 2 | 3 | data MyColors = Orange | Red | Blue | Green | Silver 4 | deriving (Show, Eq) 5 | 6 | instance Ord MyColors where 7 | compare c1 c2 = compare (show c1) (show c2) 8 | -------------------------------------------------------------------------------- /TestingHaskell/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # http://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | # 11 | # resolver: lts-3.5 12 | # resolver: nightly-2015-09-21 13 | # resolver: ghc-7.10.2 14 | # resolver: ghcjs-0.1.0_ghc-7.10.2 15 | # resolver: 16 | # name: custom-snapshot 17 | # location: "./custom-snapshot.yaml" 18 | resolver: lts-22.26 19 | 20 | allow-newer: true 21 | 22 | # User packages to be built. 23 | # Various formats can be used as shown in the example below. 24 | # 25 | # packages: 26 | # - some-directory 27 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 28 | # - location: 29 | # git: https://github.com/commercialhaskell/stack.git 30 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 31 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 32 | # extra-dep: true 33 | # subdirs: 34 | # - auto-update 35 | # - wai 36 | # 37 | # A package marked 'extra-dep: true' will only be built if demanded by a 38 | # non-dependency (i.e. a user package), and its test suites and benchmarks 39 | # will not be run. This is useful for tweaking upstream packages. 40 | packages: 41 | - '.' 42 | # Dependency packages to be pulled from upstream that are not in the resolver 43 | # (e.g., acme-missiles-0.3) 44 | extra-deps: [] 45 | 46 | # Override default flag values for local packages and extra-deps 47 | flags: {} 48 | 49 | # Extra package databases containing global packages 50 | extra-package-dbs: [] 51 | 52 | # Control whether we use the GHC we find on the path 53 | # system-ghc: true 54 | # 55 | # Require a specific version of stack, using version ranges 56 | # require-stack-version: -any # Default 57 | # require-stack-version: ">=1.1" 58 | # 59 | # Override the architecture used by stack, especially useful on Windows 60 | # arch: i386 61 | # arch: x86_64 62 | # 63 | # Extra directories used by stack for building 64 | # extra-include-dirs: [/path/to/dir] 65 | # extra-lib-dirs: [/path/to/dir] 66 | # 67 | # Allow a newer minor version of GHC than the snapshot specifies 68 | # compiler-check: newer-minor 69 | -------------------------------------------------------------------------------- /TestingHaskell/test/Spec.hs: -------------------------------------------------------------------------------- 1 | import Test.Hspec 2 | 3 | import MyColors 4 | 5 | main :: IO () 6 | main = hspec spec 7 | 8 | spec :: Spec 9 | spec = do 10 | describe "head" $ do 11 | it "test removing first list element" $ do 12 | head [1,2,3,4] `shouldBe` 1 13 | head ["the", "dog", "ran"] `shouldBe` "dog" -- should fail 14 | describe "MyColors tests" $ do 15 | it "test custom 'compare' function descending test" $ do 16 | MyColors.Green < MyColors.Red `shouldBe` True 17 | it "test custom 'compare' function ascending test" $ do 18 | Red > Silver `shouldBe` False 19 | -------------------------------------------------------------------------------- /TextProcessing/CleanText.hs: -------------------------------------------------------------------------------- 1 | -- {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Main where 4 | 5 | import Data.List.Split (splitOn) 6 | import Data.List (intercalate) 7 | import Data.Char as C 8 | import Data.List.Utils (replace) 9 | 10 | noiseCharacters = ['[', ']', '{', '}', '\n', '\t', '&', '^', 11 | '@', '%', '$', '#', ','] 12 | 13 | substituteNoiseCharacters :: [Char] -> [Char] 14 | substituteNoiseCharacters = 15 | map (\x -> if elem x noiseCharacters then ' ' else x) 16 | 17 | cleanText s = 18 | intercalate 19 | " " $ 20 | filter 21 | (\x -> length x > 0) $ 22 | splitOn " " $ substituteNoiseCharacters $ 23 | (replace "." " . " 24 | (replace "," " , " 25 | (replace ";" " ; " s))) 26 | 27 | stopWords = ["a", "the", "that", "of", "an", "and"] 28 | 29 | toLower' :: [Char] -> [Char] 30 | toLower' s = map (\x -> if isLower x then x else (C.toLower x)) s 31 | 32 | removeStopWords :: String -> [Char] 33 | removeStopWords s = 34 | intercalate 35 | " " $ 36 | filter 37 | (\x -> notElem (toLower' x) stopWords) $ 38 | words s 39 | 40 | main = do 41 | let ct = cleanText "The[]@] cat, and all the dogs, escaped&^. They were caught." 42 | print ct 43 | let nn = removeStopWords ct 44 | print nn 45 | -------------------------------------------------------------------------------- /TextProcessing/README.md: -------------------------------------------------------------------------------- 1 | # Run example 2 | 3 | stack build --exec CleanText 4 | stack build --exec TestAESON 5 | stack build --exec TestCSV 6 | stack build --exec TestTextJSON 7 | 8 | -------------------------------------------------------------------------------- /TextProcessing/TestAESON.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | 3 | module Main where 4 | 5 | import Data.Aeson 6 | import GHC.Generics 7 | import Data.Maybe 8 | 9 | data Person = Person {name::String, email::String } deriving (Show, Generic) 10 | 11 | -- nice trick from School Of Haskell tutorial on Aeson: 12 | instance FromJSON Person -- DeriveGeneric language setting allows 13 | instance ToJSON Person -- automatic generation of instance of 14 | -- types deriving Generic. 15 | 16 | main = do 17 | let a = encode $ Person "Sam" "sam@a.com" 18 | print a 19 | let (Just d) = (decode a :: Maybe Person) 20 | print d 21 | print $ name d 22 | print $ email d 23 | 24 | -------------------------------------------------------------------------------- /TextProcessing/TestCSV.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Text.CSV (parseCSVFromFile, CSV) 4 | import Data.Either.Unwrap (fromRight) 5 | 6 | readCsvFile :: FilePath -> IO CSV 7 | readCsvFile fname = do 8 | c <- parseCSVFromFile fname 9 | return $ fromRight c 10 | 11 | main = do 12 | c <- readCsvFile "test.csv" 13 | print c 14 | print $ map head c 15 | let header:rows = c 16 | print header 17 | print rows 18 | 19 | -------------------------------------------------------------------------------- /TextProcessing/TestTextJSON.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable #-} 2 | 3 | module Main where 4 | 5 | import Text.JSON.Generic 6 | 7 | data Person = Person {name::String, email::String } deriving (Show, Data, Typeable) 8 | 9 | main = do 10 | let a = encodeJSON $ Person "Sam" "sam@a.com" 11 | print a 12 | --let d = (decodeJSON a :: Person) 13 | let d = (decodeJSON a) 14 | print d 15 | print $ name d 16 | print $ email d 17 | 18 | -------------------------------------------------------------------------------- /TextProcessing/TextProcessing.cabal: -------------------------------------------------------------------------------- 1 | name: TextProcessing 2 | version: 0.1.0.0 3 | synopsis: text processing examples 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark watson 9 | copyright: 2016 Mark Watson 10 | category: TextProcessing 11 | build-type: Simple 12 | cabal-version: >=1.10 13 | 14 | executable TestCSV 15 | hs-source-dirs: . 16 | main-is: TestCSV.hs 17 | default-language: Haskell2010 18 | build-depends: base >= 4.7 && < 5, csv, either-unwrap >= 1.1 19 | 20 | executable TestTextJSON 21 | hs-source-dirs: . 22 | main-is: TestTextJSON.hs 23 | default-language: Haskell2010 24 | build-depends: base >= 4.7 && < 5, json >= 0.10 25 | 26 | executable TestAESON 27 | hs-source-dirs: . 28 | main-is: TestAESON.hs 29 | default-language: Haskell2010 30 | build-depends: base >= 4.7 && < 5, aeson 31 | 32 | 33 | executable CleanText 34 | hs-source-dirs: . 35 | main-is: CleanText.hs 36 | default-language: Haskell2010 37 | build-depends: base >= 4.7 && < 5, split, text, MissingH 38 | -------------------------------------------------------------------------------- /TextProcessing/stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-22.26 2 | allow-newer: false 3 | 4 | # flags: 5 | extra-package-dbs: [] 6 | packages: 7 | - '.' 8 | extra-deps: 9 | - MissingH-1.6.0.1 10 | - random-1.2.1.2 11 | - either-unwrap-1.1 12 | - json-0.11 13 | 14 | -------------------------------------------------------------------------------- /TextProcessing/test.csv: -------------------------------------------------------------------------------- 1 | name, email, age 2 | John Smith, jsmith@acmetools.com, 41 3 | June Jones, jj@acmetools.com, 38 -------------------------------------------------------------------------------- /Timers/.gitignore: -------------------------------------------------------------------------------- 1 | temp.txt 2 | -------------------------------------------------------------------------------- /Timers/README.md: -------------------------------------------------------------------------------- 1 | # Building and running the TimerTest program 2 | 3 | ~~~~~~~~ 4 | stack build --exec TimerTest 5 | ~~~~~~~~ 6 | 7 | -------------------------------------------------------------------------------- /Timers/TimerTest.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Data.Time.Clock.POSIX -- for getPOSIXTime 4 | import System.TimeIt -- for timeIt 5 | import System.Timeout -- for timeout 6 | 7 | anyCalculationWillDo n = 8 | take n $ sieve [2..] 9 | where 10 | sieve (x:xs) = 11 | x:sieve [y | y <- xs, rem y x > 0] 12 | 13 | main = do 14 | startingTime <- getPOSIXTime 15 | print startingTime 16 | print $ last $ take 20000001 [0..] 17 | endingTime <- getPOSIXTime 18 | print endingTime 19 | print (endingTime - startingTime) 20 | 21 | timeIt $ print $ last $ anyCalculationWillDo 2000 22 | let somePrimes = anyCalculationWillDo 3333 in 23 | timeIt $ print $ last somePrimes 24 | 25 | -- 100000 microseconds timeout tests: 26 | timeout 100000 $ print "simple print statement did not timeout" 27 | timeout 100000 $ print $ last $ anyCalculationWillDo 4 28 | timeout 100000 $ print $ last $ anyCalculationWillDo 40 29 | timeout 100000 $ print $ last $ anyCalculationWillDo 400 30 | timeout 100000 $ print $ last $ anyCalculationWillDo 4000 31 | timeout 100000 $ print $ last $ anyCalculationWillDo 40000 32 | print $ anyCalculationWillDo 5 33 | 34 | 35 | -------------------------------------------------------------------------------- /Timers/Timers.cabal: -------------------------------------------------------------------------------- 1 | name: Timers 2 | version: 0.1.0.0 3 | synopsis: experiments with timers 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2016 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable TimerTest 16 | hs-source-dirs: . 17 | main-is: TimerTest.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, time, timeit 20 | 21 | -------------------------------------------------------------------------------- /Timers/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.26 5 | 6 | allow-newer: false 7 | 8 | # Local packages, usually specified by relative directory name 9 | packages: 10 | - '.' 11 | 12 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 13 | extra-deps: [] 14 | 15 | # Override default flag values for local packages and extra-deps 16 | flags: {} 17 | 18 | -------------------------------------------------------------------------------- /WebScraping/README.md: -------------------------------------------------------------------------------- 1 | # HTTP Client Example 2 | 3 | ## Run examples using stack 4 | 5 | ```````` 6 | stack build --exec TagSoupTest 7 | ```````` 8 | 9 | ## Run examples using cabal: 10 | 11 | ```````` 12 | cabal build 13 | cabal run TagSoupTest 14 | ```````` 15 | 16 | -------------------------------------------------------------------------------- /WebScraping/TagSoupTest.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | import Network.HTTP.Simple 4 | import Text.HTML.TagSoup 5 | import Data.Text (Text) 6 | import qualified Data.Text as T 7 | import qualified Data.Text.IO as TIO 8 | import qualified Data.ByteString.Lazy.Char8 as BL8 9 | import Data.Maybe (mapMaybe) 10 | 11 | main :: IO () 12 | main = do 13 | -- Fetch the HTML content 14 | response <- httpLBS "https://markwatson.com/" 15 | let body = BL8.unpack $ getResponseBody response 16 | tags = parseTags body 17 | 18 | -- Extract and print headers 19 | let headers = getResponseHeaders response 20 | putStrLn "Headers:" 21 | mapM_ print headers 22 | 23 | -- Extract and print all text content 24 | let texts = extractTexts tags 25 | putStrLn "\nText Content:" 26 | TIO.putStrLn texts 27 | 28 | -- Extract and print all links 29 | let links = extractLinks tags 30 | putStrLn "\nLinks:" 31 | mapM_ TIO.putStrLn links 32 | 33 | -- Function to extract all text content from tags 34 | extractTexts :: [Tag String] -> Text 35 | extractTexts = T.unwords . map (T.strip . T.pack) . filter (not . null) . mapMaybe maybeTagText 36 | 37 | -- Function to extract all links from tags 38 | extractLinks :: [Tag String] -> [Text] 39 | extractLinks = map (T.pack . fromAttrib "href") . filter isATag 40 | where 41 | isATag (TagOpen "a" _) = True 42 | isATag _ = False -------------------------------------------------------------------------------- /WebScraping/WebScraping.cabal: -------------------------------------------------------------------------------- 1 | name: WebScraping 2 | version: 0.1.0.0 3 | synopsis: web scraping examples 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2015 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable TagSoupTest 16 | hs-source-dirs: . 17 | main-is: TagSoupTest.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, wreq, lens, bytestring, 20 | json >= 0.10, text >=2.1, 21 | http-conduit >= 2.3.8.3, tagsoup >= 0.14.8 22 | -------------------------------------------------------------------------------- /WebScraping/stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-22.26 2 | allow-newer: false 3 | 4 | flags: {} 5 | packages: 6 | - . 7 | extra-deps: 8 | - json-0.11 9 | -------------------------------------------------------------------------------- /debugging/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Mark Watson (c) 2015 2 | 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of Mark Watson nor the names of other 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /debugging/README.md: -------------------------------------------------------------------------------- 1 | # Debug-only tracing 2 | 3 | ## Run example 4 | 5 | ```````` 6 | stack build --exec TraceTimerTest 7 | ```````` 8 | -------------------------------------------------------------------------------- /debugging/TraceTimerTest.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import Debug.Trace (trace, traceShow) -- for debugging only! 4 | 5 | anyCalculationWillDo n = 6 | trace 7 | ("+++ anyCalculationWillDo: " ++ show n) $ 8 | anyCalculationWillDo' n 9 | 10 | anyCalculationWillDo' n = 11 | take n $ trace (" -- sieve n:" ++ (show n)) $ sieve [2..] 12 | where 13 | sieve (x:xs) = 14 | traceShow (" -- inside sieve recursion") $ 15 | x:sieve [y | y <- xs, rem y x > 0] 16 | 17 | main = do 18 | print $ anyCalculationWillDo 5 19 | 20 | 21 | -------------------------------------------------------------------------------- /debugging/debugging.cabal: -------------------------------------------------------------------------------- 1 | name: debugging 2 | version: 0.1.0.0 3 | synopsis: debugging 4 | description: Please see README.md 5 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 6 | license: Apache-2.0 7 | license-file: ../APACHE_LICENSE-2.0.txt 8 | author: Mark Watson 9 | maintainer: markw@markwatson.com 10 | copyright: Copyright 2018 Mark Watson. All rights reserved 11 | category: dev 12 | build-type: Simple 13 | cabal-version: >=1.22.4.0 14 | 15 | executable TraceTimerTest 16 | hs-source-dirs: . 17 | main-is: TraceTimerTest.hs 18 | default-language: Haskell2010 19 | build-depends: base >= 4.7 && < 5, time, timeit 20 | 21 | -------------------------------------------------------------------------------- /debugging/stack.yaml: -------------------------------------------------------------------------------- 1 | # For more information, see: https://github.com/commercialhaskell/stack/blob/master/doc/yaml_configuration.md 2 | 3 | # Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2) 4 | resolver: lts-22.26 5 | 6 | allow-newer: false 7 | 8 | # Local packages, usually specified by relative directory name 9 | packages: 10 | - '.' 11 | 12 | # Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3) 13 | extra-deps: 14 | - random-1.2.1.2 15 | 16 | 17 | # Override default flag values for local packages and extra-deps 18 | flags: {} 19 | 20 | # Control whether we use the GHC we find on the path 21 | # system-ghc: true 22 | 23 | # Require a specific version of stack, using version ranges 24 | # require-stack-version: -any # Default 25 | # require-stack-version: >= 0.1.4.0 26 | 27 | # Override the architecture used by stack, especially useful on Windows 28 | # arch: i386 29 | # arch: x86_64 30 | 31 | # Extra directories used by stack for building 32 | # extra-include-dirs: [/path/to/dir] 33 | # extra-lib-dirs: [/path/to/dir] 34 | -------------------------------------------------------------------------------- /gemini_commandline/README.md: -------------------------------------------------------------------------------- 1 | # Command Line Utility To Use the Google Gemini APIs 2 | 3 | This example is similar to the example in ../webchat but here we build a command line application, not a web application to use the Google Gemini LLM APIs. 4 | 5 | Example: 6 | 7 | ``` 8 | $ gemini "what is the square of pi?" 9 | Response: 10 | 11 | The square of pi (π) is π multiplied by itself: π². Since π is approximately 3.14159, π² is approximately 9.8696. 12 | ``` 13 | 14 | The executable file **gemini** is on my path because I copied the executable file to my personal bin directory: 15 | 16 | ``` 17 | $ cabal build 18 | $ find . -name gemini 19 | ... output not shown 20 | $ cp ./dist-newstyle/build/aarch64-osx/ghc-9.4.8/gemini-0.1.0.0/x/gemini/build/gemini/gemini ~/bin 21 | ``` 22 | 23 | If you don’t want to permanently install this example on your laptop, then just run it with cabal: 24 | 25 | ``` 26 | $ cabal run gemini "what is 11 + 23?" 27 | Response: 28 | 29 | 11 + 23 = 34 30 | ``` 31 | -------------------------------------------------------------------------------- /gemini_commandline/gemini.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 2.4 2 | name: gemini 3 | version: 0.1.0.0 4 | -- synopsis: 5 | -- description: 6 | license: MIT 7 | license-file: LICENSE 8 | author: Bard 9 | maintainer: Bard 10 | -- copyright: 11 | -- category: 12 | build-type: Simple 13 | extra-source-files: CHANGELOG.md 14 | 15 | executable gemini 16 | main-is: Main.hs 17 | -- other-modules: 18 | -- other-extensions: 19 | build-depends: base >= 4.7 && < 5 20 | , aeson 21 | , bytestring 22 | , text 23 | , http-client 24 | , http-client-tls 25 | , http-types 26 | , directory 27 | , vector 28 | if os(darwin) 29 | ghc-options: 30 | ld-options: 31 | 32 | -- Language extensions used in the code 33 | default-extensions: 34 | OverloadedStrings 35 | DeriveGeneric 36 | LambdaCase 37 | DeriveAnyClass 38 | 39 | program-default-options 40 | hsc2hs: --with-hsc2hs=/opt/homebrew/bin/hsc2hs 41 | 42 | -- Compiler flags 43 | ghc-options: -Wall -O2 44 | 45 | 46 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/.gitignore: -------------------------------------------------------------------------------- 1 | .stack-work/ 2 | .DS_Store 3 | *.n3 4 | *.rdf 5 | *.cypher 6 | *~ 7 | .vscode 8 | 9 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/KGCreator.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 1.12 2 | 3 | name: KGCreator 4 | version: 0.1.0.0 5 | description: Please see the README on GitHub at 6 | homepage: https://github.com/mark-watson/haskell_tutorial_cookbook_examples 7 | author: Mark Watson 8 | maintainer: mark@markwatson.com 9 | copyright: 2019 Mark Watson 10 | license: Apache-2.0 11 | license-file: ../APACHE_LICENSE-2.0.txt 12 | build-type: Simple 13 | extra-source-files: 14 | README.md 15 | 16 | source-repository head 17 | type: git 18 | location: https://github.com/githubuser/KGCreator 19 | 20 | library 21 | exposed-modules: 22 | CorefWebClient 23 | NlpWebClient 24 | ClassificationWebClient 25 | DirUtils 26 | FileUtils 27 | BlackBoard 28 | GenTriples 29 | GenNeo4jCypher 30 | Apis 31 | Categorize 32 | NlpUtils 33 | Summarize 34 | Entities 35 | other-modules: 36 | Paths_KGCreator 37 | BroadcastNetworkNamesDbPedia 38 | Category1Gram 39 | Category2Gram 40 | CityNamesDbpedia 41 | CompanyNamesDbpedia 42 | CountryNamesDbpedia 43 | PeopleDbPedia 44 | PoliticalPartyNamesDbPedia 45 | Sentence 46 | Stemmer 47 | TradeUnionNamesDbPedia 48 | UniversityNamesDbPedia 49 | 50 | hs-source-dirs: 51 | src 52 | src/webclients 53 | src/fileutils 54 | src/sw 55 | src/toplevel 56 | ../NlpTool/src/nlp 57 | ../NlpTool/src/nlp/data 58 | 59 | build-depends: 60 | base >=4.7 && <5, wreq, lens, bytestring 61 | , hxt, json, uri-encode >= 1.5.0.5, MissingH 62 | , aeson >= 1.4.2.0, containers, split >= 0.2.3.3, text 63 | , directory, yaml, aeson, string-conversions 64 | , MissingH, sqlite-simple 65 | default-language: Haskell2010 66 | 67 | executable KGCreator-exe 68 | main-is: Main.hs 69 | other-modules: 70 | Paths_KGCreator 71 | hs-source-dirs: 72 | app 73 | ../NlpTool/src/nlp 74 | ../NlpTool/src/nlp/data 75 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 76 | build-depends: 77 | KGCreator 78 | , base >=4.7 && <5 79 | , MissingH 80 | , directory, containers >= 0.6.0.1, split >= 0.2.3.3 81 | default-language: Haskell2010 82 | 83 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/Makefile: -------------------------------------------------------------------------------- 1 | all: gendata rdf cypher 2 | 3 | gendata: 4 | stack build --fast --exec Dev-exe 5 | 6 | rdf: 7 | echo "Removing duplicate RDF statements" 8 | awk '!visited[$$0]++' out.n3 > output.n3 9 | rm -f out.n3 10 | 11 | cypher: 12 | echo "Removing duplicate Cypher statements" 13 | awk '!visited[$$0]++' out.cypher > output.cypher 14 | rm -f out.cypher 15 | 16 | clean: 17 | rm -f *~ */*~ 18 | 19 | run: 20 | ../python-intelligent-systems/spacy_nlp_web_service/bin/spacynlpserver > /tmp/AISspacynlp.txt & 21 | ../python-intelligent-systems/coref_anaphora_resolution_web_service/bin/corefserver > /tmp/AISspacycoref.txt & 22 | 23 | stop: 24 | pkill -f corefserver 25 | pkill -f spacynlpserver 26 | 27 | tidy: 28 | cd src/fileutils; stylish-haskell -i *.hs; hindent *.hs 29 | cd src/nlp; stylish-haskell -i *.hs; hindent *.hs 30 | cd src/sw; stylish-haskell -i *.hs; hindent *.hs 31 | cd src/webclients; stylish-haskell -i *.hs; hindent *.hs 32 | cd src/toplevel; stylish-haskell -i *.hs; hindent *.hs 33 | cd test; stylish-haskell -i *.hs; hindent *.hs 34 | cd app; stylish-haskell -i *.hs; hindent *.hs 35 | cd dev; stylish-haskell -i *.hs; hindent *.hs 36 | 37 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/NEO4J_NOTES.md: -------------------------------------------------------------------------------- 1 | # login password 2 | 3 | account: neo4j 4 | password: myself 5 | 6 | # remove local database to start over 7 | 8 | cd ~/bin/neo4j 9 | rm -rf data/databases/graph.db 10 | 11 | # run web interface 12 | 13 | cd ~/bin/neo4j 14 | ./bin/neo4j console 15 | 16 | use http://localhost:7474/ and click Neo4J graphic in upper left to get to tutorials and console 17 | 18 | # Loading Data 19 | 20 | Sample data files: https://neo4j.com/developer/example-data/ 21 | 22 | not so useful 23 | 24 | # Loading RDF data 25 | 26 | Neo4J plugin: https://github.com/jbarrasa/neosemantics 27 | 28 | 29 | # good article on converting RDF data for Neo4J 30 | 31 | https://jbarrasa.com/2016/06/07/importing-rdf-data-into-neo4j/ 32 | 33 | Rule1: Subjects of triples are mapped to nodes in Neo4j. A node in Neo4j representing an RDF resource will be labeled :Resource and have a property uri with the resource’s URI. 34 | 35 | (S,P,O) => (:Resource {uri:S})... 36 | 37 | Rule2a: Predicates of triples are mapped to node properties in Neo4j if the object of the triple is a literal 38 | 39 | (S,P,O) && isLiteral(O) => (:Resource {uri:S, P:O}) 40 | 41 | Rule 2b: Predicates of triples are mapped to relationships in Neo4j if the object of the triple is a resource 42 | 43 | (S,P,O) && !isLiteral(O) => (:Resource {uri:S})-[:P]->(:Resource {uri:O}) 44 | 45 | Example, convert this (and a few more): 46 | 47 | 48 | 49 | . 50 | 51 | 52 | 53 | . 54 | 55 | to (NOTE! not the same data, I wanted a shared node): 56 | 57 | CREATE (z902:News {name:"z902", uri:"https://newsshop.com/june/z902.html"}) 58 | CREATE (weather:News {name:"weather", uri:"http://knowledgebooks.com/schema/topic/weather"}) 59 | CREATE (canada:News {name:"Canada", uri:"http://dbpedia.org/resource/Canada"}) 60 | CREATE (z902)-[:Category]->(weather) 61 | CREATE (z902)-[:ContainsCountryDbPediaLink]->(canada) 62 | CREATE (a1023:News {name:"a1023", uri:"https://newsshop.com/may/a1023.html"}) 63 | CREATE (a1023)-[:ContainsCountryDbPediaLink]->(canada) 64 | 65 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/README.md: -------------------------------------------------------------------------------- 1 | # KGCreator 2 | 3 | Licensed under the AGPL version 3. See https://markwatson.com/opensource/ for an alternative commercial license ($50). 4 | 5 | ## CorefWebClient Client Example 6 | 7 | ### Run examples 8 | 9 | Main program with fast build option: 10 | 11 | ~~~~~~~~ 12 | stack build --fast --exec KGCreator-exe 13 | stack build --fast --exec "KGCreator-exe test_data outtest" 14 | ~~~~~~~~ 15 | 16 | Main program, recompile everything: 17 | 18 | ~~~~~~~~ 19 | stack build --exec KGCreator-exe 20 | ~~~~~~~~ 21 | 22 | ## for Makefile tidy target: 23 | 24 | stack build hindent stylish-haskell 25 | 26 | ## Problems with Haskero VSCode plugin? 27 | 28 | try: 29 | 30 | stack build intero 31 | 32 | in this project's directory. This installs intero locally. System side 33 | installation does not seem to work with VSCode and Haskero. 34 | 35 | Set key binding to add function type: setup > key bindings, search for haskero add type: 36 | 37 | add: 38 | 39 | { 40 | "key": "ctrl+k t", 41 | "command": "haskero.insertType", 42 | "when": "editorTextFocus" 43 | } 44 | 45 | 46 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | 3 | main = defaultMain 4 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/TBD_FIX.md: -------------------------------------------------------------------------------- 1 | # check for "New" in sentence, then don't take the next word as a lookup item in WikiPedia data 2 | 3 | example: "England" found as a link, but sentence contained "New England" 4 | 5 | # augment WikiPedia map data to inlude: 6 | 7 | - US state names 8 | - as a one-off, add "New England" 9 | 10 | # change categries like "news_economy" to "" 11 | 12 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/app/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import System.Environment (getArgs) 4 | import Apis (processFilesToRdf, processFilesToNeo4j) 5 | 6 | main :: IO () 7 | main 8 | -- TBD: add command line argument processing 9 | = do 10 | args <- getArgs 11 | case args of 12 | [] -> error "must supply an input directory containing text and meta files" 13 | [_] -> error "in addition to an input directory, also specify a root file name for the generated RDF and Cypher files" 14 | [inputDir, outputFileRoot] -> do 15 | processFilesToRdf inputDir $ outputFileRoot ++ ".n3" 16 | processFilesToNeo4j inputDir $ outputFileRoot ++ ".cypher" 17 | _ -> error "too many arguments" 18 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/dev/Dev.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE ScopedTypeVariables #-} 4 | 5 | module Main where 6 | 7 | -- this file is just for testing/dev 8 | import Apis (processFilesToNeo4j, processFilesToRdf) 9 | import BlackBoard 10 | import Categorize 11 | import ClassificationWebClient 12 | import CorefWebClient 13 | import DirUtils 14 | import Entities 15 | import FileUtils 16 | import GenNeo4jCypher 17 | import GenTriples 18 | import NlpUtils 19 | import NlpWebClient 20 | 21 | import Control.Monad (forM, mapM) 22 | import Data.String.Utils (replace) 23 | import System.Directory (getDirectoryContents) 24 | 25 | -- for debug: 26 | import Data.Typeable (typeOf) 27 | 28 | import Database.SQLite.Simple 29 | 30 | testWebServices :: IO () 31 | testWebServices 32 | -- JSON from Coref server (written in Python) 33 | = do 34 | s1 <- corefClient "My sister has a dog. She loves him" 35 | putStrLn $ s1 36 | s2 <- nlpClient "John Smith went to Mexico." 37 | putStrLn $ s2 38 | 39 | testNlp = do 40 | let s3 = 41 | "The sport of hocky is about 100 years old by ahdi dates. American Football is a newer sport. Programming is fun. Congress passed a new budget that might help the economy. The frontier initially was a value path. The ai research of john mccarthy." 42 | putStrLn "\nbest categories:\n" 43 | print $ bestCategories (splitWords s3) 44 | let s4 = 45 | "As read in the San Francisco Chronicle, the company is owned by John Smith, Bill Clinton, Betty Sanders, and Dr. Ben Jones. Ben Jones and Mr. John Smith are childhood friends who grew up in Brazil, Canada, Buenos Aires, and the British Virgin Islands. Apple Computer relased a new version of OS X yesterday. Brazil Brazil Brazil. John Smith bought stock in ConocoPhillips, Heinz, Hasbro, and General Motors, Fox Sports Radio. I listen to B J Cole. Awami National Party is a political party. ALAEA is a trade union. She went to Brandeis University." 46 | putStrLn "\nentity tests:\n" 47 | print $ peopleNames $ splitWordsKeepCase s4 48 | print $ countryNames $ splitWordsKeepCase s4 49 | print $ companyNames $ splitWordsKeepCase s4 50 | print $ cityNames $ splitWordsKeepCase s4 51 | print $ broadcastNetworkNames $ splitWordsKeepCase s4 52 | print $ politicalPartyNames $ splitWordsKeepCase s4 53 | print $ tradeUnionNames $ splitWordsKeepCase s4 54 | print $ universityNames $ splitWordsKeepCase s4 55 | putStrLn "\ndirectory iteration test:\n" 56 | iterateOverDir "." putStrLn 57 | some_words <- filePathToWordTokens "test_data/test1.txt" 58 | print some_words 59 | some_json <- readMetaFile "test_data/test1.meta" 60 | print $ show some_json 61 | print $ uri some_json 62 | putStrLn "\n" 63 | triples_as_string <- 64 | textToTriples "test_data/test1.txt" "test_data/test1.meta" 65 | putStrLn triples_as_string 66 | 67 | main :: IO () 68 | main = do 69 | putStrLn "KGcreator: files -> RDF triples and Cypher Neo4j data" 70 | --testWebServices 71 | --testNlp 72 | processFilesToRdf "test_data" "out.n3" 73 | putStrLn "Before processFilesToNeo4j...." 74 | processFilesToNeo4j "test_data" "out.cypher" 75 | putStrLn ".... after processFilesToNeo4j" 76 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/dev/DevSummarize.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | -- this file is just for testing/dev 4 | import Categorize 5 | import FileUtils (filePathToString) 6 | import NlpUtils 7 | import Summarize 8 | 9 | import Control.Monad (forM, mapM) 10 | import Data.String.Utils (replace) 11 | import System.Directory (getDirectoryContents) 12 | 13 | -- for debug: 14 | import Data.Typeable (typeOf) 15 | 16 | testNlp = do 17 | s1 <- filePathToString "test_data/test1.txt" 18 | putStrLn "\nSummaries:\n" 19 | print $ summarize s1 20 | print $ summarizeS s1 21 | putStrLn "\nEvidence: categories for this text:" 22 | let words = splitWords $ cleanText s1 23 | bestCats = bestCategories words 24 | print bestCats 25 | putStrLn "\n" 26 | s2 <- filePathToString "test_data/test2.txt" 27 | print $ summarize s2 28 | print $ summarizeS s2 29 | putStrLn "\nEvidence: categories for this text:" 30 | let words = splitWords $ cleanText s2 31 | bestCats = bestCategories words 32 | print bestCats 33 | putStrLn "\n" 34 | s3 <- filePathToString "test_data/test3.txt" 35 | print $ summarize s3 36 | print $ summarizeS s3 37 | putStrLn "\nEvidence: categories for this text:" 38 | let words = splitWords $ cleanText s3 39 | bestCats = bestCategories words 40 | print bestCats 41 | 42 | main :: IO () 43 | main = do 44 | putStrLn "DevSummarize:" 45 | testNlp 46 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/python_utils/sparql_file_query_example.py: -------------------------------------------------------------------------------- 1 | import rdflib.graph as g 2 | from pprint import pprint 3 | 4 | graph = g.Graph() 5 | graph.parse('output.n3', format='n3') 6 | 7 | results = graph.query(""" 8 | SELECT ?s ?p ?o 9 | WHERE { 10 | ?s ?p ?o. 11 | } 12 | ORDER BY (?p) 13 | """) 14 | 15 | for result in results: 16 | pprint(result) 17 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/src/fileutils/BlackBoard.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module BlackBoard 4 | ( blackboard_init 5 | , blackboard_write 6 | , blackboard_check_key 7 | ) where 8 | 9 | import Data.ByteString.Lazy.Char8 () 10 | import Data.Monoid ((<>), mappend, mempty) 11 | import Database.SQLite.Simple 12 | 13 | import qualified Database.SQLite.Simple as SQL 14 | 15 | -- for debug: 16 | import Data.Typeable (typeOf) 17 | 18 | blackboard_init = do 19 | conn <- SQL.open "temp.db" 20 | putStrLn "Creating table blackboard" 21 | execute_ conn "drop table if exists blackboard;" 22 | execute_ conn "create table blackboard (key text);" 23 | close conn 24 | 25 | blackboard_write key = do 26 | conn <- SQL.open "temp.db" 27 | execute conn "INSERT INTO blackboard (key) VALUES (?)" (Only (key :: String)) 28 | close conn 29 | 30 | blackboard_check_key key = do 31 | conn <- SQL.open "temp.db" 32 | r <- 33 | query conn "SELECT key FROM blackboard WHERE key = ?" (Only (key :: String)) :: IO [Only String] 34 | close conn 35 | return ((length r) > 0) 36 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/src/fileutils/DirUtils.hs: -------------------------------------------------------------------------------- 1 | module DirUtils 2 | ( iterateOverDir 3 | , iterateOverDirWithFilePattern 4 | ) where 5 | 6 | import Control.Monad (forM, mapM) 7 | import System.Directory (getCurrentDirectory, getDirectoryContents) 8 | 9 | iterateOverDir :: FilePath -> ([Char] -> IO b) -> IO () 10 | iterateOverDir dir_path fn = do 11 | files <- getDirectoryContents dir_path :: IO [FilePath] 12 | print files 13 | let filtered_files = filter (/= "..") $ filter (/= ".") files 14 | mapM_ fn filtered_files 15 | 16 | iterateOverDirWithFilePattern dirPath filterFn fn = do 17 | files <- getDirectoryContents dirPath :: IO [FilePath] 18 | print files 19 | let filtered_files = filter filterFn files 20 | putStrLn "filtered_files:" 21 | print filtered_files 22 | let full_paths = [dirPath ++ "/" ++ fn | fn <- filtered_files] 23 | putStrLn "full_paths:" 24 | print full_paths 25 | mapM_ fn full_paths 26 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/src/fileutils/FileUtils.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveDataTypeable #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | 4 | module FileUtils 5 | ( filePathToWordTokens 6 | , filePathToString 7 | , readMetaFile 8 | , uri 9 | , MyMeta 10 | , isTextFile 11 | , isMetaFile 12 | ) where 13 | 14 | import Control.Monad 15 | import Data.List 16 | import System.IO 17 | import Data.Char (toLower) 18 | 19 | import Database.SQLite.Simple 20 | 21 | import Text.JSON.Generic 22 | 23 | import NlpUtils (splitWordsKeepCase) 24 | 25 | filePathToWordTokens :: FilePath -> IO [String] 26 | filePathToWordTokens file_path = do 27 | handle <- openFile file_path ReadMode 28 | contents <- hGetContents handle 29 | let some_words = splitWordsKeepCase contents 30 | return some_words 31 | 32 | filePathToString file_path = do 33 | handle <- openFile file_path ReadMode 34 | contents <- hGetContents handle 35 | return contents 36 | 37 | data MyMeta = 38 | MyMeta 39 | { uri :: String, 40 | similar_docs :: [String] 41 | } 42 | deriving (Show, Data, Typeable) 43 | 44 | readMetaFile :: [Char] -> IO MyMeta 45 | readMetaFile file_path = do 46 | putStr $ concat ["++ readMetaFile ", show file_path, "\n"] 47 | handle <- openFile file_path ReadMode 48 | s <- (hGetContents handle) 49 | let meta = (decodeJSON s :: MyMeta) 50 | return meta 51 | -- unused for now, but may be used later: 52 | 53 | findSubstring pat str = findIndex (isPrefixOf pat) (tails str) 54 | 55 | isTextFile file_name = (findSubstring ".txt" file_name) /= Nothing 56 | 57 | isMetaFile file_name = (findSubstring ".meta" file_name) /= Nothing 58 | 59 | -- utilities from vmchale's hgis project: 60 | 61 | stripFileExtension = reverse . drop 1 . dropWhile (/= '.') . reverse 62 | 63 | getFileExtension = fmap toLower . reverse . takeWhile (/= '.') . reverse 64 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/src/sw/GenNeo4jCypher.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module GenNeo4jCypher 4 | ( textToCypher 5 | , neo4j_category_node_defs 6 | ) where 7 | 8 | import Categorize (bestCategories) 9 | import Data.List (isInfixOf) 10 | import Data.Char (toLower) 11 | import Data.String.Utils (replace) 12 | import Entities 13 | ( broadcastNetworkNames 14 | , cityNames 15 | , companyNames 16 | , countryNames 17 | , peopleNames 18 | , politicalPartyNames 19 | , tradeUnionNames 20 | , universityNames 21 | ) 22 | import FileUtils 23 | ( MyMeta 24 | , filePathToString 25 | , filePathToWordTokens 26 | , readMetaFile 27 | , uri 28 | ) 29 | import GenTriples (category_to_uri_map) 30 | import Summarize (summarize, summarizeS) 31 | 32 | import qualified Data.Map as M 33 | import Data.Maybe (fromMaybe) 34 | 35 | neo4j_category_node_defs :: [Char] 36 | neo4j_category_node_defs = 37 | replace 38 | "/" 39 | "_" 40 | $ concat 41 | [ "CREATE (" ++ c ++ ":CategoryType {name:\"" ++ c ++ "\"})\n" 42 | | c <- M.keys category_to_uri_map 43 | ] 44 | 45 | uri_from_category :: p -> p 46 | uri_from_category s = s -- might want the full version from GenTriples 47 | 48 | repl :: Char -> Char 49 | repl '-' = '_' 50 | repl '/' = '_' 51 | repl '.' = '_' 52 | repl c = c 53 | 54 | filterChars :: [Char] -> [Char] 55 | filterChars = filter (\c -> c /= '?' && c /= '=' && c /= '<' && c /= '>') 56 | 57 | create_neo4j_node :: [Char] -> ([Char], [Char]) 58 | create_neo4j_node uri = 59 | let name = 60 | (map repl (filterChars 61 | (replace "https://" "" (replace "http://" "" uri)))) ++ 62 | "_" ++ 63 | (map toLower node_type) 64 | node_type = 65 | if isInfixOf "dbpedia" uri 66 | then "DbPedia" 67 | else "News" 68 | new_node = 69 | "CREATE (" ++ 70 | name ++ ":" ++ 71 | node_type ++ " {name:\"" ++ (replace " " "_" name) ++ 72 | "\", uri:\"" ++ uri ++ "\"})\n" 73 | in (name, new_node) 74 | 75 | create_neo4j_link :: [Char] -> [Char] -> [Char] -> [Char] 76 | create_neo4j_link node1 linkName node2 = 77 | "CREATE (" ++ node1 ++ ")-[:" ++ linkName ++ "]->(" ++ node2 ++ ")\n" 78 | 79 | create_summary_node :: [Char] -> [Char] -> [Char] 80 | create_summary_node uri summary = 81 | let name = 82 | "summary_of_" ++ 83 | (map repl $ 84 | filterChars (replace "https://" "" (replace "http://" "" uri))) 85 | s1 = "CREATE (" ++ name ++ ":Summary {name:\"" ++ name ++ "\", uri:\"" 86 | s2 = uri ++ "\", summary:\"" ++ summary ++ "\"})\n" 87 | in s1 ++ s2 88 | 89 | create_entity_node :: ([Char], [Char]) -> [Char] 90 | create_entity_node entity_pair = 91 | "CREATE (" ++ (replace " " "_" (fst entity_pair)) ++ 92 | ":Entity {name:\"" ++ (fst entity_pair) ++ "\", uri:\"" ++ 93 | (snd entity_pair) ++ "\"})\n" 94 | 95 | create_contains_entity :: [Char] -> [Char] -> ([Char], [Char]) -> [Char] 96 | create_contains_entity relation_name source_uri entity_pair = 97 | let new_person_node = create_entity_node entity_pair 98 | new_link = create_neo4j_link source_uri 99 | relation_name 100 | (replace " " "_" (fst entity_pair)) 101 | in 102 | (new_person_node ++ new_link) 103 | 104 | entity_node_helper :: [Char] -> [Char] -> [([Char], [Char])] -> [Char] 105 | entity_node_helper relation_name node_name entity_list = 106 | concat [create_contains_entity 107 | relation_name node_name entity | entity <- entity_list] 108 | 109 | textToCypher :: FilePath -> [Char] -> IO [Char] 110 | textToCypher file_path meta_file_path = do 111 | let prelude_nodes = neo4j_category_node_defs 112 | putStrLn "+++++++++++++++++ prelude node defs:" 113 | print prelude_nodes 114 | word_tokens <- filePathToWordTokens file_path 115 | contents <- filePathToString file_path 116 | putStrLn $ "** contents:\n" ++ contents ++ "\n" 117 | meta_data <- readMetaFile meta_file_path 118 | putStrLn "++ meta_data:" 119 | print meta_data 120 | let people = peopleNames word_tokens 121 | let companies = companyNames word_tokens 122 | putStrLn "^^^^ companies:" 123 | print companies 124 | let countries = countryNames word_tokens 125 | let cities = cityNames word_tokens 126 | let broadcast_networks = broadcastNetworkNames word_tokens 127 | let political_parties = politicalPartyNames word_tokens 128 | let trade_unions = tradeUnionNames word_tokens 129 | let universities = universityNames word_tokens 130 | let a_summary = summarizeS contents 131 | let the_categories = bestCategories word_tokens 132 | let filtered_categories = 133 | map (uri_from_category . fst) $ 134 | filter (\(name, value) -> value > 0.3) the_categories 135 | putStrLn "\nfiltered_categories:" 136 | print filtered_categories 137 | let (node1_name, node1) = create_neo4j_node (uri meta_data) 138 | let summary1 = create_summary_node (uri meta_data) a_summary 139 | let category1 = 140 | concat 141 | [ create_neo4j_link node1_name "Category" cat 142 | | cat <- filtered_categories 143 | ] 144 | let pp = entity_node_helper "ContainsPersonDbPediaLink" node1_name people 145 | let cmpny = entity_node_helper "ContainsCompanyDbPediaLink" node1_name companies 146 | let cntry = entity_node_helper "ContainsCountryDbPediaLink" node1_name countries 147 | let citys = entity_node_helper "ContainsCityDbPediaLink" node1_name cities 148 | let bnet = entity_node_helper "ContainsBroadcastNetworkDbPediaLink" 149 | node1_name broadcast_networks 150 | let ppart = entity_node_helper "ContainsPoliticalPartyDbPediaLink" 151 | node1_name political_parties 152 | let tunion = entity_node_helper "ContainsTradeUnionDbPediaLink" 153 | node1_name trade_unions 154 | let uni = entity_node_helper "ContainsUniversityDbPediaLink" 155 | node1_name universities 156 | return $ concat [node1, summary1, category1, pp, cmpny, cntry, citys, bnet, 157 | ppart, tunion, uni] 158 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/src/toplevel/Apis.hs: -------------------------------------------------------------------------------- 1 | module Apis 2 | ( processFilesToRdf 3 | , processFilesToNeo4j 4 | ) where 5 | 6 | import FileUtils 7 | import GenNeo4jCypher 8 | import GenTriples (textToTriples) 9 | 10 | import qualified Database.SQLite.Simple as SQL 11 | 12 | import Control.Monad (mapM) 13 | import Data.String.Utils (replace) 14 | import System.Directory (getDirectoryContents) 15 | 16 | import Data.Typeable (typeOf) 17 | 18 | processFilesToRdf :: FilePath -> FilePath -> IO () 19 | processFilesToRdf dirPath outputRdfFilePath = do 20 | files <- getDirectoryContents dirPath :: IO [FilePath] 21 | let filtered_files = filter isTextFile files 22 | let full_paths = [dirPath ++ "/" ++ fn | fn <- filtered_files] 23 | putStrLn "full_paths:" 24 | print full_paths 25 | let r = 26 | [textToTriples fp1 (replace ".txt" ".meta" fp1) 27 | | 28 | fp1 <- full_paths] :: [IO [Char]] 29 | tripleL <- 30 | mapM (\fp -> textToTriples fp (replace ".txt" ".meta" fp)) full_paths 31 | let tripleS = concat tripleL 32 | putStrLn tripleS 33 | writeFile outputRdfFilePath tripleS 34 | 35 | processFilesToNeo4j :: FilePath -> FilePath -> IO () 36 | processFilesToNeo4j dirPath outputRdfFilePath = do 37 | files <- getDirectoryContents dirPath :: IO [FilePath] 38 | let filtered_files = filter isTextFile files 39 | let full_paths = [dirPath ++ "/" ++ fn | fn <- filtered_files] 40 | putStrLn "full_paths:" 41 | print full_paths 42 | let prelude_node_defs = neo4j_category_node_defs 43 | putStrLn 44 | ("+++++ type of prelude_node_defs is: " ++ 45 | (show (typeOf prelude_node_defs))) 46 | print prelude_node_defs 47 | cypher_dataL <- 48 | mapM (\fp -> textToCypher fp (replace ".txt" ".meta" fp)) full_paths 49 | let cypher_dataS = concat cypher_dataL 50 | putStrLn cypher_dataS 51 | writeFile outputRdfFilePath $ prelude_node_defs ++ cypher_dataS 52 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/src/webclients/ClassificationWebClient.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | -- reference: http://www.serpentine.com/wreq/tutorial.html 4 | module ClassificationWebClient 5 | ( classification_client 6 | ) where 7 | 8 | import Control.Lens 9 | import Data.ByteString.Lazy.Char8 (unpack) 10 | import Data.Maybe (fromJust) 11 | import Network.URI.Encode (encode) 12 | import Network.Wreq 13 | 14 | base_url = "http://127.0.0.1:8015?text=" -- check - Python code not implemented yet 15 | 16 | classification_client :: [Char] -> IO [Char] 17 | classification_client query = do 18 | let empty = "" 19 | return empty 20 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/src/webclients/CorefWebClient.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | -- reference: http://www.serpentine.com/wreq/tutorial.html 4 | module CorefWebClient 5 | ( corefClient 6 | ) where 7 | 8 | import Control.Lens 9 | import Data.ByteString.Lazy.Char8 (unpack) 10 | import Data.Maybe (fromJust) 11 | import Network.URI.Encode (encode) 12 | import Network.Wreq 13 | 14 | base_url = "http://127.0.0.1:8000?text=" 15 | 16 | corefClient :: [Char] -> IO [Char] 17 | corefClient query = do 18 | putStrLn $ "\n\n*** Processing " ++ (encode query) 19 | r <- get $ base_url ++ (encode query) ++ "&no_detail=1" 20 | putStrLn $ "status code: " ++ (show (r ^. responseStatus . statusCode)) 21 | putStrLn $ "content type: " ++ (show (r ^? responseHeader "Content-Type")) 22 | putStrLn $ "response body: " ++ (unpack (fromJust (r ^? responseBody))) 23 | return $ unpack (fromJust (r ^? responseBody)) 24 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/src/webclients/NlpWebClient.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | -- reference: http://www.serpentine.com/wreq/tutorial.html 4 | module NlpWebClient 5 | ( nlpClient 6 | ) where 7 | 8 | import Control.Lens 9 | import Data.ByteString.Lazy.Char8 (unpack) 10 | import Data.Maybe (fromJust) 11 | import Network.URI.Encode (encode) 12 | import Network.Wreq 13 | 14 | base_url = "http://127.0.0.1:8008?text=" 15 | 16 | nlpClient :: [Char] -> IO [Char] 17 | nlpClient query = do 18 | putStrLn $ "\n\n*** Processing " ++ query 19 | r <- get $ base_url ++ (encode query) ++ "&no_detail=1" 20 | putStrLn $ "status code: " ++ (show (r ^. responseStatus . statusCode)) 21 | putStrLn $ "content type: " ++ (show (r ^? responseHeader "Content-Type")) 22 | putStrLn $ "response body: " ++ (unpack (fromJust (r ^? responseBody))) 23 | return $ unpack (fromJust (r ^? responseBody)) 24 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/stack.yaml: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by 'stack init' 2 | # 3 | # Some commonly used options have been documented as comments in this file. 4 | # For advanced use and comprehensive documentation of the format, please see: 5 | # https://docs.haskellstack.org/en/stable/yaml_configuration/ 6 | 7 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 8 | # A snapshot resolver dictates the compiler version and the set of packages 9 | # to be used for project dependencies. For example: 10 | 11 | resolver: lts-22.26 12 | 13 | allow-newer: false 14 | 15 | 16 | # User packages to be built. 17 | # Various formats can be used as shown in the example below. 18 | # 19 | # packages: 20 | # - some-directory 21 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 22 | # - location: 23 | # git: https://github.com/commercialhaskell/stack.git 24 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 25 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 26 | # subdirs: 27 | # - auto-update 28 | # - wai 29 | packages: 30 | - . 31 | extra-deps: 32 | - json-0.11 33 | - sqlite-simple-0.4.19.0 34 | - direct-sqlite-2.3.29 35 | 36 | #extra_deps: 37 | #- ../NlpTool-0.1.0.0 38 | 39 | # Override default flag values for local packages and extra-deps 40 | # flags: {} 41 | 42 | # Extra package databases containing global packages 43 | # extra-package-dbs: [] 44 | 45 | # Control whether we use the GHC we find on the path 46 | # system-ghc: true 47 | # 48 | # Require a specific version of stack, using version ranges 49 | # require-stack-version: -any # Default 50 | # require-stack-version: ">=1.9" 51 | # 52 | # Override the architecture used by stack, especially useful on Windows 53 | # arch: i386 54 | # arch: x86_64 55 | # 56 | # Extra directories used by stack for building 57 | # extra-include-dirs: [/path/to/dir] 58 | # extra-lib-dirs: [/path/to/dir] 59 | # 60 | # Allow a newer minor version of GHC than the snapshot specifies 61 | # compiler-check: newer-minor 62 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/stack.yaml.lock: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by Stack. 2 | # You should not edit this file by hand. 3 | # For more information, please see the documentation at: 4 | # https://docs.haskellstack.org/en/stable/lock_files 5 | 6 | packages: 7 | - completed: 8 | hackage: json-0.11@sha256:3afa37628415992fe648da6f002672f5a0119aa5d49022bf928d10a927c29318,3250 9 | pantry-tree: 10 | sha256: 71526412887c0e9b5fe732f629fd1771e6fcce132ebe7bb1a95d6c509372c902 11 | size: 3190 12 | original: 13 | hackage: json-0.11 14 | - completed: 15 | hackage: sqlite-simple-0.4.19.0@sha256:2996a0cef2ac99925ffb51cbf99d0a3fc129d67f3ece2294f14edb277d7b32d5,3153 16 | pantry-tree: 17 | sha256: 328b696862be96e4eb3769a3ef2ebf2ebf82fb85ae71c114826b81e85dc876c5 18 | size: 1930 19 | original: 20 | hackage: sqlite-simple-0.4.19.0 21 | - completed: 22 | hackage: direct-sqlite-2.3.29@sha256:6ff3969a6eae383c8a9ab093abfee7f7b0ed76dab045c984a1497b7e1d71279d,4180 23 | pantry-tree: 24 | sha256: 794455a2e32dc749ff32d8907bc51d810f79b963d5e21c42bc8555bc34bbd625 25 | size: 770 26 | original: 27 | hackage: direct-sqlite-2.3.29 28 | snapshots: 29 | - completed: 30 | sha256: 8e7996960d864443a66eb4105338bbdd6830377b9f6f99cd5527ef73c10c01e7 31 | size: 719128 32 | url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/22/26.yaml 33 | original: lts-22.26 34 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/test/Spec.hs: -------------------------------------------------------------------------------- 1 | import ClassificationWebClient 2 | import CorefWebClient 3 | import NlpWebClient 4 | 5 | main :: IO () 6 | main 7 | -- JSON from Coref server (written in Python) 8 | = do 9 | s <- corefClient "My sister has a dog. She loves him" 10 | putStrLn $ s 11 | s2 <- nlpClient "John Smith went to Mexico." 12 | putStrLn $ s2 13 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/test_data/test1.meta: -------------------------------------------------------------------------------- 1 | {"uri": "", "similar_docs":["", ""]} 2 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/test_data/test1.txt: -------------------------------------------------------------------------------- 1 | Plunging European stocks, wobbly bonds and grave concerns about 2 | the health of Portuguese lender Banco Espirito Santo SA made last 3 | week feel like a rerun of the euro crisis, but most investors say 4 | it was no more than a blip for a resurgent region. Banco Espirito 5 | Santo has been in investors’ sights since December, when The Wall 6 | Street Journal first reported on accounting irregularities at the 7 | complex firm. Nerves frayed on Thursday when Banco Espirito Santo's 8 | parent company said it wouldn't be able to meet some short-term debt 9 | obligations. B J Cole gave a concert at IBM headquarters in Canada and then in France. 10 | I heard him on the Australian Broadcasting Corporation being 11 | critical of Australian Broadcasting Corporation. 12 | Story was written by Frank Munoz a member of the Australian Writers Guild 13 | as taught at the American University. 14 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/test_data/test2.meta: -------------------------------------------------------------------------------- 1 | {"uri": "", "similar_docs":[]} 2 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/test_data/test2.txt: -------------------------------------------------------------------------------- 1 | The Wall Street Journal covered the stock market crash in Mexico and Canada. 2 | Stock prices are increasing. The weather was bad yesterday. 3 | -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/test_data/test3.meta: -------------------------------------------------------------------------------- 1 | {"uri": "", "similar_docs":[]} -------------------------------------------------------------------------------- /knowledge_graph_creator_pure/test_data/test3.txt: -------------------------------------------------------------------------------- 1 | Over the weekend, 41 tornadoes were reported across eight states as severe storms again target the heartland. 2 | Violent, potentially life-threatening tornadoes may strike western Texas and Oklahoma as the threat of severe storms stretches all the way into Kansas. 3 | Six states also are under flood alerts, with flash flooding a major threat from Texas all the way up to North Dakota. 4 | Some areas in Texas, Oklahoma and Kansas could see half a foot of rain. 5 | Part of the system that delivered severe weather to the central U.S. over the weekend is moving into the Northeast today, producing strong to severe storms -- damaging winds, hail or isolated tornadoes can't be ruled out. 6 | The biggest threats will be in from the Hudson Valley toward Albany, New York, and into New England. 7 | Severe weather is forecast to continue on Tuesday, with the western storm moving east into the Midwest and parts of the mid-Mississippi Valley. 8 | The biggest threat tomorrow will be damaging winds, large hail and a few tornadoes -- with the tornado threat largest in the morning. -------------------------------------------------------------------------------- /ollama_commandline/Main.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DuplicateRecordFields #-} 2 | {-# LANGUAGE OverloadedRecordDot #-} 3 | {-# LANGUAGE DeriveGeneric #-} -- Added DeriveGeneric for clarity, though Generic is imported 4 | 5 | import Control.Monad (when) -- Import when 6 | import System.Environment (getArgs) 7 | import qualified Data.Aeson as Aeson 8 | import Data.Aeson (FromJSON, ToJSON) 9 | import GHC.Generics (Generic) -- Explicitly import Generic 10 | import Network.HTTP.Client 11 | ( newManager 12 | , httpLbs 13 | , parseRequest 14 | , Request(..) 15 | , RequestBody(..) 16 | , responseBody 17 | , responseStatus 18 | , defaultManagerSettings 19 | , Manager -- Import Manager type 20 | ) 21 | import Network.HTTP.Types.Status (statusIsSuccessful) -- Import statusIsSuccessful 22 | 23 | -- Data types for Ollama interaction 24 | data OllamaRequest = OllamaRequest 25 | { model :: String 26 | , prompt :: String 27 | , stream :: Bool 28 | } deriving (Show, Generic, ToJSON) -- Derive Generic and ToJSON 29 | 30 | data OllamaResponse = OllamaResponse 31 | { model :: String 32 | , created_at :: String 33 | , response :: String -- This matches the actual field name in the JSON 34 | , done :: Bool 35 | , done_reason :: Maybe String -- done_reason might be null/missing in some responses, using Maybe is safer 36 | } deriving (Show, Generic, FromJSON) -- Derive Generic and FromJSON 37 | 38 | -- Function to call the Ollama API 39 | callOllama :: Manager -> String -> String -> IO (Either String OllamaResponse) 40 | callOllama manager modelName userPrompt = do 41 | -- Note: parseRequest throws exceptions on invalid URLs, which is acceptable here. 42 | initialRequest <- parseRequest "http://localhost:11434/api/generate" 43 | 44 | let ollamaRequestBody = OllamaRequest 45 | { model = modelName 46 | , prompt = userPrompt 47 | , stream = False -- Keeping stream as False for a single response 48 | } 49 | 50 | let request = initialRequest 51 | { requestHeaders = [("Content-Type", "application/json")] 52 | , method = "POST" 53 | , requestBody = RequestBodyLBS $ Aeson.encode ollamaRequestBody 54 | } 55 | 56 | -- httpLbs also throws exceptions on network errors, which `main` handles implicitly 57 | httpResponse <- httpLbs request manager 58 | 59 | let status = responseStatus httpResponse 60 | body = responseBody httpResponse 61 | 62 | if statusIsSuccessful status -- Use statusIsSuccessful for clarity 63 | then do 64 | let maybeOllamaResponse = Aeson.decode body :: Maybe OllamaResponse 65 | case maybeOllamaResponse of 66 | Just ollamaResponse -> return $ Right ollamaResponse 67 | Nothing -> return $ Left $ "Error: Failed to parse JSON response. Body: " ++ show body 68 | else do 69 | return $ Left $ "Error: HTTP request failed with status " ++ show status ++ ". Body: " ++ show body 70 | 71 | main :: IO () 72 | main = do 73 | args <- getArgs 74 | case args of 75 | [] -> putStrLn "Usage: [model_name]" 76 | (promptArg:modelArgs) -> do 77 | let modelName = case modelArgs of 78 | (m:_) -> m 79 | [] -> "llama3.2:latest" -- Default model 80 | 81 | manager <- newManager defaultManagerSettings 82 | 83 | putStrLn $ "Sending prompt '" ++ promptArg ++ "' to model '" ++ modelName ++ "'..." 84 | 85 | result <- callOllama manager modelName promptArg 86 | 87 | case result of 88 | Right ollamaResponse -> do 89 | -- No need for liftIO here, putStrLn is already IO 90 | putStrLn "\n--- Response ---" 91 | putStrLn ollamaResponse.response 92 | when (ollamaResponse.done_reason /= Nothing) $ -- Check if done_reason is present 93 | putStrLn $ "\nDone reason: " ++ show ollamaResponse.done_reason -- Show the reason if present 94 | Left err -> do 95 | -- No need for liftIO here either 96 | putStrLn $ "API Error: " ++ err -------------------------------------------------------------------------------- /ollama_commandline/README.md: -------------------------------------------------------------------------------- 1 | # Command line tool to access local Ollama LLM server 2 | 3 | Run example: 4 | 5 | cabal run ollama-client "how much is 4 + 11 + 13?" 6 | 7 | or: 8 | 9 | cabal run ollama-client "write Python script to print out 11th and 12th prime numbers" 10 | 11 | or: 12 | 13 | cabal run ollama-client "Write a Haskell hello world program" 14 | -------------------------------------------------------------------------------- /ollama_commandline/cabal.project.freeze: -------------------------------------------------------------------------------- 1 | active-repositories: hackage.haskell.org:merge 2 | constraints: any.OneTuple ==0.4.2, 3 | any.QuickCheck ==2.15.0.1, 4 | QuickCheck -old-random +templatehaskell, 5 | any.StateVar ==1.2.2, 6 | any.aeson ==2.2.3.0, 7 | aeson +ordered-keymap, 8 | any.ansi-terminal ==1.1.2, 9 | ansi-terminal -example, 10 | any.ansi-terminal-types ==1.1, 11 | any.appar ==0.1.8, 12 | any.array ==0.5.4.0, 13 | any.assoc ==1.1.1, 14 | assoc -tagged, 15 | any.async ==2.2.5, 16 | async -bench, 17 | any.base ==4.17.2.1, 18 | any.base-orphans ==0.9.2, 19 | any.base64-bytestring ==1.2.1.0, 20 | any.bifunctors ==5.6.2, 21 | bifunctors +tagged, 22 | any.binary ==0.8.9.1, 23 | any.blaze-builder ==0.4.2.3, 24 | any.byteorder ==1.0.4, 25 | any.bytestring ==0.11.5.3, 26 | any.case-insensitive ==1.2.1.0, 27 | any.character-ps ==0.1, 28 | any.colour ==2.3.6, 29 | any.comonad ==5.0.8, 30 | comonad +containers +distributive +indexed-traversable, 31 | any.containers ==0.6.7, 32 | any.contravariant ==1.5.5, 33 | contravariant +semigroups +statevar +tagged, 34 | any.cookie ==0.5.0, 35 | any.data-default ==0.8.0.0, 36 | any.data-default-class ==0.2.0.0, 37 | any.data-fix ==0.3.4, 38 | any.deepseq ==1.4.8.0, 39 | any.directory ==1.3.7.1, 40 | any.distributive ==0.6.2.1, 41 | distributive +semigroups +tagged, 42 | any.dlist ==1.0, 43 | dlist -werror, 44 | any.exceptions ==0.10.5, 45 | any.filepath ==1.4.2.2, 46 | any.foldable1-classes-compat ==0.1, 47 | foldable1-classes-compat +tagged, 48 | any.generically ==0.1.1, 49 | any.ghc-bignum ==1.3, 50 | any.ghc-boot-th ==9.4.8, 51 | any.ghc-prim ==0.9.1, 52 | any.hashable ==1.4.7.0, 53 | hashable -arch-native +integer-gmp -random-initial-seed, 54 | any.hsc2hs ==0.68.10, 55 | hsc2hs -in-ghc-tree, 56 | any.http-client ==0.7.17, 57 | http-client +network-uri, 58 | any.http-types ==0.12.4, 59 | any.indexed-traversable ==0.1.4, 60 | any.indexed-traversable-instances ==0.1.2, 61 | any.integer-conversion ==0.1.1, 62 | any.integer-logarithms ==1.0.3.1, 63 | integer-logarithms -check-bounds +integer-gmp, 64 | any.iproute ==1.7.15, 65 | any.mime-types ==0.1.2.0, 66 | any.mtl ==2.2.2, 67 | any.network ==3.2.7.0, 68 | network -devel, 69 | any.network-uri ==2.6.4.2, 70 | any.optparse-applicative ==0.18.1.0, 71 | optparse-applicative +process, 72 | any.os-string ==2.0.7, 73 | any.parsec ==3.1.16.1, 74 | any.pretty ==1.1.3.6, 75 | any.prettyprinter ==1.7.1, 76 | prettyprinter -buildreadme +text, 77 | any.prettyprinter-ansi-terminal ==1.1.3, 78 | any.primitive ==0.9.0.0, 79 | any.process ==1.6.18.0, 80 | any.random ==1.2.1.2, 81 | any.rts ==1.0.2, 82 | any.scientific ==0.3.8.0, 83 | scientific -integer-simple, 84 | any.semialign ==1.3.1, 85 | semialign +semigroupoids, 86 | any.semigroupoids ==6.0.1, 87 | semigroupoids +comonad +containers +contravariant +distributive +tagged +unordered-containers, 88 | any.splitmix ==0.1.0.5, 89 | splitmix -optimised-mixer, 90 | any.stm ==2.5.1.0, 91 | any.streaming-commons ==0.2.2.6, 92 | streaming-commons -use-bytestring-builder, 93 | any.strict ==0.5.1, 94 | any.tagged ==0.8.8, 95 | tagged +deepseq +transformers, 96 | any.tasty ==1.5.2, 97 | tasty +unix, 98 | any.template-haskell ==2.19.0.0, 99 | any.text ==2.0.2, 100 | any.text-iso8601 ==0.1.1, 101 | any.text-short ==0.1.6, 102 | text-short -asserts, 103 | any.th-abstraction ==0.7.0.0, 104 | any.th-compat ==0.1.5, 105 | any.these ==1.2.1, 106 | any.time ==1.12.2, 107 | any.time-compat ==1.9.7, 108 | any.transformers ==0.5.6.2, 109 | any.transformers-compat ==0.7.2, 110 | transformers-compat -five +five-three -four +generic-deriving +mtl -three -two, 111 | any.unix ==2.7.3, 112 | any.unordered-containers ==0.2.20, 113 | unordered-containers -debug, 114 | any.uuid-types ==1.0.6, 115 | any.vector ==0.13.2.0, 116 | vector +boundschecks -internalchecks -unsafechecks -wall, 117 | any.vector-stream ==0.1.0.1, 118 | any.witherable ==0.5, 119 | any.zlib ==0.7.1.0, 120 | zlib -bundled-c-zlib +non-blocking-ffi +pkg-config 121 | index-state: hackage.haskell.org 2024-12-02T14:17:24Z 122 | -------------------------------------------------------------------------------- /ollama_commandline/ollama-client.cabal: -------------------------------------------------------------------------------- 1 | name: ollama-client 2 | version: 0.1.0.0 3 | synopsis: A simple client for Ollama API 4 | description: A Haskell client to interact with local Ollama API 5 | license: MIT 6 | license-file: LICENSE 7 | author: Your Name 8 | maintainer: your.email@example.com 9 | category: AI 10 | build-type: Simple 11 | cabal-version: >=1.10 12 | 13 | executable ollama-client 14 |   main-is: Main.hs 15 | build-depends: 16 | base >= 4.7 && < 5, 17 | aeson, 18 | http-client, 19 | text, 20 | http-types, 21 | vector, 22 | optparse-applicative 23 | if os(darwin) 24 | ghc-options: 25 | ld-options: 26 | 27 | -- Language extensions used in the code 28 | default-extensions: 29 | OverloadedStrings 30 | DeriveGeneric 31 | LambdaCase 32 | DeriveAnyClass 33 | 34 | program-default-options 35 | hsc2hs: --with-hsc2hs=/opt/homebrew/bin/hsc2hs 36 | 37 | -- Compiler flags 38 | ghc-options: -Wall -O2 39 | -------------------------------------------------------------------------------- /replit.nix: -------------------------------------------------------------------------------- 1 | {pkgs}: { 2 | deps = [ 3 | pkgs.haskellPackages.cpphs 4 | pkgs.sqlite-interactive 5 | pkgs.vim 6 | pkgs.cabal-install 7 | pkgs.stack 8 | pkgs.zlib.dev 9 | ]; 10 | } 11 | -------------------------------------------------------------------------------- /webchat/README.md: -------------------------------------------------------------------------------- 1 | # A Web Application For Using the Google Gemini APIs 2 | 3 | Build and run using: 4 | 5 | cabal run 6 | 7 | Open a browser of: 8 | 9 | http://localhost:3000 10 | -------------------------------------------------------------------------------- /webchat/gemini-chat.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 2.4 2 | name: gemini-chat 3 | version: 0.1.0.0 4 | executable gemini-chat 5 | main-is: Main.hs 6 | 7 | -- Dependencies 8 | build-depends: base >= 4.7 && < 5 9 | , scotty 10 | , http-client >= 0.7 11 | , http-client-tls 12 | , http-types 13 | , aeson 14 | , text 15 | , vector 16 | , bytestring 17 | if os(darwin) 18 | ghc-options: 19 | ld-options: 20 | 21 | -- Language extensions used in the code 22 | default-extensions: 23 | OverloadedStrings 24 | DeriveGeneric 25 | LambdaCase 26 | DeriveAnyClass 27 | 28 | program-default-options 29 | hsc2hs: --with-hsc2hs=/opt/homebrew/bin/hsc2hs 30 | 31 | -- Compiler flags 32 | ghc-options: -Wall -O2 33 | 34 | --------------------------------------------------------------------------------