├── Setup.hs ├── test ├── Spec.hs ├── Data │ └── Aeson │ │ ├── TestUtil.hs │ │ ├── EmbeddedSpec.hs │ │ └── AlternativeSpec.hs └── AWSLambda │ └── Events │ ├── KinesisEventSpec.hs │ ├── S3EventSpec.hs │ ├── APIGatewaySpec.hs │ ├── SQSEventSpec.hs │ └── SNSEventSpec.hs ├── integration-test ├── expected │ ├── offline_output.txt │ ├── subdir_output.json │ ├── output.json │ ├── output_modified.json │ ├── output_js.json │ ├── local_output_js.txt │ ├── logs.txt │ ├── local_output.txt │ ├── error_output.txt │ ├── local_error_output.txt │ └── multi_output.txt ├── skeleton │ ├── package.json │ ├── stack.yaml │ ├── subdir │ │ ├── stack.yaml │ │ ├── package.yaml │ │ └── Main.hs │ ├── jsfunc │ │ └── handler.js │ ├── package.yaml │ ├── ApiGateway.hs │ ├── serverless.yml │ └── Main.hs ├── extra-deps.lts-14 ├── tests.sh └── run.sh ├── example-project ├── Setup.hs ├── serverless.yml ├── package.json ├── app │ └── Main.hs ├── apigw-app │ └── Main.hs ├── package.yaml ├── LICENSE └── serverless-haskell-example.cabal ├── .eslintignore ├── .gitignore ├── .eslintrc.js ├── tsconfig.json ├── stack-14.yaml ├── update-lts ├── latest-lts ├── stack.yaml ├── config.ts ├── src ├── AWSLambda │ ├── Events │ │ ├── MessageAttribute.hs │ │ ├── Records.hs │ │ ├── KinesisEvent.hs │ │ ├── SQSEvent.hs │ │ ├── SNSEvent.hs │ │ ├── S3Event.hs │ │ └── APIGateway.hs │ ├── Orphans.hs │ ├── Events.hs │ └── Handler.hs ├── Data │ └── Aeson │ │ ├── Alternative.hs │ │ ├── TextValue.hs │ │ └── Embedded.hs └── AWSLambda.hs ├── .github └── workflows │ ├── deploy.yaml │ └── main.yaml ├── package.json ├── version.ts ├── deploy ├── LICENSE ├── ld.ts ├── stack.yaml.lock ├── updateAWSEnvironment.ts ├── bumpversion ├── serverless-haskell.cabal ├── AWSEnvironment.ts ├── ChangeLog.md ├── README.md ├── serverless-haskell.hsfiles └── index.ts /Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /test/Spec.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -F -pgmF hspec-discover #-} 2 | -------------------------------------------------------------------------------- /integration-test/expected/offline_output.txt: -------------------------------------------------------------------------------- 1 | Hello, integration 2 | -------------------------------------------------------------------------------- /example-project/Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /integration-test/expected/subdir_output.json: -------------------------------------------------------------------------------- 1 | [ 2 | "subdir result" 3 | ] 4 | -------------------------------------------------------------------------------- /integration-test/expected/output.json: -------------------------------------------------------------------------------- 1 | [ 2 | 11, 3 | 22, 4 | 33 5 | ] 6 | -------------------------------------------------------------------------------- /integration-test/expected/output_modified.json: -------------------------------------------------------------------------------- 1 | [ 2 | 11, 3 | 22, 4 | 44 5 | ] 6 | -------------------------------------------------------------------------------- /integration-test/expected/output_js.json: -------------------------------------------------------------------------------- 1 | { 2 | "result": "Hello from JavaScript" 3 | } 4 | -------------------------------------------------------------------------------- /integration-test/skeleton/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "NAME", 3 | "version": "1.0.0" 4 | } 5 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | dist 2 | integration-test/run 3 | integration-test/skeleton/jsfunc 4 | node_modules 5 | -------------------------------------------------------------------------------- /integration-test/expected/local_output_js.txt: -------------------------------------------------------------------------------- 1 | { 2 | "result": "Hello from JavaScript" 3 | } 4 | -------------------------------------------------------------------------------- /integration-test/expected/logs.txt: -------------------------------------------------------------------------------- 1 | This should go to logs 2 | Array [Number 4.0,Number 5.0,Number 6.0] 3 | -------------------------------------------------------------------------------- /integration-test/skeleton/stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: RESOLVER 2 | 3 | extra-deps: 4 | - DIST 5 | EXTRA_DEPS 6 | -------------------------------------------------------------------------------- /integration-test/skeleton/subdir/stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: RESOLVER 2 | 3 | extra-deps: 4 | - DIST 5 | EXTRA_DEPS 6 | -------------------------------------------------------------------------------- /integration-test/expected/local_output.txt: -------------------------------------------------------------------------------- 1 | This should go to logs 2 | Array [Number 4.0,Number 5.0,Number 6.0] 3 | [11,22,33] 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .serverless 3 | .stack-work 4 | /example-project/package-lock.json 5 | /integration-test/run 6 | /dist 7 | /*.tgz 8 | -------------------------------------------------------------------------------- /integration-test/skeleton/jsfunc/handler.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports.main = (event, context, callback) => { 4 | callback(null, { 5 | result: "Hello from JavaScript" 6 | }); 7 | }; 8 | -------------------------------------------------------------------------------- /integration-test/expected/error_output.txt: -------------------------------------------------------------------------------- 1 | { 2 | "errorType": "ErrorCall", 3 | "errorMessage": "Magic error\nCallStack (from HasCallStack):\n error, called at Main.hs:25:30 in main:Main" 4 | } 5 | 6 | -------------------------------------------------------------------------------- /integration-test/expected/local_error_output.txt: -------------------------------------------------------------------------------- 1 | This should go to logs 2 | Object (fromList [("error",Number 1.0)]) 3 | {"errorType":"ErrorCall","errorMessage":"Magic error\nCallStack (from HasCallStack):\n error, called at Main.hs:25:30 in main:Main"} 4 | 5 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | parser: '@typescript-eslint/parser', 4 | plugins: [ 5 | '@typescript-eslint', 6 | ], 7 | extends: [ 8 | 'eslint:recommended', 9 | 'plugin:@typescript-eslint/eslint-recommended', 10 | 'plugin:@typescript-eslint/recommended', 11 | ], 12 | }; 13 | -------------------------------------------------------------------------------- /integration-test/skeleton/subdir/package.yaml: -------------------------------------------------------------------------------- 1 | name: subtest 2 | version: 0.0.0 3 | 4 | dependencies: 5 | - base >= 4.7 && < 5 6 | - aeson 7 | - serverless-haskell 8 | - text 9 | 10 | executables: 11 | main: 12 | main: Main.hs 13 | ghc-options: 14 | - -threaded 15 | - -rtsopts 16 | - -with-rtsopts=-N 17 | -------------------------------------------------------------------------------- /integration-test/extra-deps.lts-14: -------------------------------------------------------------------------------- 1 | - amazonka-core-1.6.1@sha256:9bc59ce403c6eeba3b3eaf3f10e5f0b6a33b6edbbf8f6de0dd6f4c67b86fa698,5135 2 | - amazonka-kinesis-1.6.1@sha256:439479d6d7f4a731a2f1feeb3a4c653d266c11d655cd4085608e157eb524c51b,3998 3 | - amazonka-s3-1.6.1@sha256:9d07240fca59ad5197fb614ce3051e701e4951e6d4625a2dab4a9c17a1900194,6317 4 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "module": "commonjs", 5 | "esModuleInterop": true, 6 | 7 | "declaration": true, 8 | "outDir": "dist", 9 | 10 | "strict": true, 11 | "alwaysStrict": true, 12 | 13 | "forceConsistentCasingInFileNames": true 14 | }, 15 | "include": [ 16 | "**/*" 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /integration-test/skeleton/subdir/Main.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Main module for a subproject in the integration test. 3 | -} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | 6 | import qualified Data.Aeson as Aeson 7 | import Data.Text (Text) 8 | 9 | import AWSLambda 10 | 11 | main :: IO () 12 | main = lambdaMain handler 13 | 14 | handler :: Aeson.Value -> IO [Text] 15 | handler _ = do 16 | pure ["subdir result"] 17 | -------------------------------------------------------------------------------- /example-project/serverless.yml: -------------------------------------------------------------------------------- 1 | service: serverless-haskell-example 2 | 3 | provider: 4 | name: aws 5 | runtime: haskell 6 | region: ap-southeast-2 7 | 8 | functions: 9 | hello: 10 | handler: serverless-haskell-example.example-exe 11 | apigw: 12 | handler: serverless-haskell-example.api-exe 13 | events: 14 | - http: 15 | path: hello/{name} 16 | method: get 17 | 18 | plugins: 19 | - serverless-haskell 20 | -------------------------------------------------------------------------------- /stack-14.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-14.27 2 | packages: 3 | - . 4 | - example-project 5 | nix: 6 | packages: 7 | - zlib.dev 8 | - zlib.out 9 | 10 | extra-deps: 11 | - amazonka-core-1.6.1@sha256:9bc59ce403c6eeba3b3eaf3f10e5f0b6a33b6edbbf8f6de0dd6f4c67b86fa698,5135 12 | - amazonka-kinesis-1.6.1@sha256:439479d6d7f4a731a2f1feeb3a4c653d266c11d655cd4085608e157eb524c51b,3998 13 | - amazonka-s3-1.6.1@sha256:9d07240fca59ad5197fb614ce3051e701e4951e6d4625a2dab4a9c17a1900194,6317 14 | -------------------------------------------------------------------------------- /example-project/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "serverless-haskell-example", 3 | "version": "1.0.0", 4 | "main": "index.js", 5 | "directories": { 6 | "test": "test" 7 | }, 8 | "dependencies": { 9 | "serverless": "^1.41.1", 10 | "serverless-haskell": "file:.." 11 | }, 12 | "devDependencies": {}, 13 | "scripts": { 14 | "test": "echo \"Error: no test specified\" && exit 1" 15 | }, 16 | "author": "", 17 | "license": "MIT", 18 | "description": "" 19 | } 20 | -------------------------------------------------------------------------------- /update-lts: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Update stack.yaml and serverless-haskell.hsfiles to the latest lts 4 | 5 | set -e 6 | 7 | sedi() { 8 | if [ "$(uname)" = "Linux" ] 9 | then 10 | sed -i "$@" 11 | else 12 | sed -i '' "$@" 13 | fi 14 | } 15 | 16 | latest_lts=$(./latest-lts lts) 17 | 18 | sedi -E 's/^resolver: lts-[0-9]+\.[0-9]+/resolver: '$latest_lts'/g' stack.yaml 19 | sedi -E 's/^resolver: lts-[0-9]+\.[0-9]+/resolver: '$latest_lts'/g' serverless-haskell.hsfiles 20 | -------------------------------------------------------------------------------- /example-project/app/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import AWSLambda.Handler 4 | import AWSLambda.Events.S3Event 5 | 6 | import Data.Aeson.Alternative 7 | 8 | main :: IO () 9 | main = lambdaMain $ handlerIntArray `alternative` handlerS3 10 | 11 | handlerIntArray :: [Int] -> IO String 12 | handlerIntArray values = do 13 | putStrLn "This should go to logs" 14 | pure $ "Highest value is " ++ show (maximum values) 15 | 16 | handlerS3 :: S3Event -> IO String 17 | handlerS3 _ = pure "S3 event received" 18 | -------------------------------------------------------------------------------- /test/Data/Aeson/TestUtil.hs: -------------------------------------------------------------------------------- 1 | module Data.Aeson.TestUtil where 2 | 3 | import Data.Aeson 4 | import Data.ByteString.Lazy (ByteString) 5 | 6 | import Test.Hspec 7 | 8 | testEncodeDecode :: (FromJSON a, ToJSON a, Show a, Eq a) => String -> ByteString -> a -> Spec 9 | testEncodeDecode description bytestring value = describe description $ do 10 | it "decodes" $ 11 | decode bytestring `shouldBe` Just value 12 | it "encodes" $ 13 | decode (encode value) `shouldBe` Just value 14 | -------------------------------------------------------------------------------- /integration-test/skeleton/package.yaml: -------------------------------------------------------------------------------- 1 | name: shtest 2 | version: 0.0.0 3 | 4 | dependencies: 5 | - base >= 4.7 && < 5 6 | - aeson 7 | - serverless-haskell 8 | 9 | executables: 10 | main: 11 | main: Main.hs 12 | ghc-options: 13 | - -threaded 14 | - -rtsopts 15 | - -with-rtsopts=-N 16 | 17 | apigw: 18 | main: ApiGateway.hs 19 | ghc-options: 20 | - -threaded 21 | - -rtsopts 22 | - -with-rtsopts=-N 23 | dependencies: 24 | - lens 25 | - text 26 | - unordered-containers 27 | -------------------------------------------------------------------------------- /integration-test/expected/multi_output.txt: -------------------------------------------------------------------------------- 1 | [ 2 | 11, 3 | 22, 4 | 33 5 | ] 6 | [ 7 | 11, 8 | 22, 9 | 33 10 | ] 11 | [ 12 | 11, 13 | 22, 14 | 33 15 | ] 16 | [ 17 | 11, 18 | 22, 19 | 33 20 | ] 21 | [ 22 | 11, 23 | 22, 24 | 33 25 | ] 26 | [ 27 | 11, 28 | 22, 29 | 33 30 | ] 31 | [ 32 | 11, 33 | 22, 34 | 33 35 | ] 36 | [ 37 | 11, 38 | 22, 39 | 33 40 | ] 41 | [ 42 | 11, 43 | 22, 44 | 33 45 | ] 46 | [ 47 | 11, 48 | 22, 49 | 33 50 | ] 51 | -------------------------------------------------------------------------------- /latest-lts: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Find the latest resolver in the given LTS series 3 | 4 | set -e 5 | 6 | RESOLVER_SERIES=$1 7 | 8 | SNAPSHOT_JSON=$(mktemp) 9 | trap 'rm -rf $SNAPSHOT_JSON' EXIT 10 | for URL in https://www.stackage.org/download/snapshots.json https://mirrors.tuna.tsinghua.edu.cn/stackage/snapshots.json 11 | do 12 | if curl -o $SNAPSHOT_JSON --max-time 5 --retry 5 $URL 13 | then 14 | cat $SNAPSHOT_JSON | jq -r '."'$RESOLVER_SERIES'"' 15 | exit 0 16 | else 17 | echo "Failed to download LTS information from $URL." >&2 18 | fi 19 | done 20 | echo "No URL could be downloaded." >&2 21 | exit 1 22 | -------------------------------------------------------------------------------- /test/Data/Aeson/EmbeddedSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module Data.Aeson.EmbeddedSpec where 3 | 4 | import Data.Aeson 5 | import Data.Aeson.Embedded 6 | import Data.Aeson.TestUtil 7 | 8 | import Test.Hspec 9 | 10 | spec :: Spec 11 | spec = describe "Embedded" $ do 12 | testEncodeDecode "array" "\"[1,2,3]\"" $ Embedded [1::Int, 2, 3] 13 | testEncodeDecode "string" "\"\\\"foo\\\"\"" $ Embedded ("foo"::String) 14 | testEncodeDecode "object" "\"{\\\"a\\\":42,\\\"b\\\":true}\"" $ Embedded $ object ["a" .= (42::Int), "b" .= True] 15 | testEncodeDecode "in field" "{\"a\":\"[1,2,3]\"}" $ object ["a" .= Embedded [1::Int, 2, 3]] 16 | -------------------------------------------------------------------------------- /integration-test/skeleton/ApiGateway.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Main where 4 | 5 | import AWSLambda.Events.APIGateway 6 | import Control.Lens 7 | import qualified Data.HashMap.Strict as HashMap 8 | import Data.Semigroup 9 | import Data.Text (Text) 10 | 11 | main :: IO () 12 | main = apiGatewayMain hello 13 | 14 | hello :: APIGatewayProxyRequest Text -> IO (APIGatewayProxyResponse Text) 15 | hello request = do 16 | putStrLn "This should go to logs" 17 | case HashMap.lookup "name" (request ^. agprqPathParameters) of 18 | Just name -> return $ responseOK & responseBody ?~ "Hello, " <> name <> "\n" 19 | Nothing -> return responseNotFound 20 | -------------------------------------------------------------------------------- /stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-20.26 # ghc 9.2.8 2 | packages: 3 | - . 4 | - example-project 5 | nix: 6 | packages: 7 | - zlib.dev 8 | - zlib.out 9 | # Work around https://github.com/commercialhaskell/stack/issues/5290 10 | save-hackage-creds: false 11 | extra-deps: 12 | - crypton-0.33@sha256:5e92f29b9b7104d91fcdda1dec9400c9ad1f1791c231cc41ceebd783fb517dee,18202 13 | - amazonka-core-2.0@sha256:d9f0533c272ac92bd7b18699077038b6b51b3552e91b65743af4ce646286b4f8,4383 14 | - amazonka-kinesis-2.0@sha256:bd669fb35bc22da589a787a391e64a4b095f8725566e3b96ce38600adf76d933,5086 15 | - amazonka-s3-2.0@sha256:e3143e11ab9e57ee0dfd9d1dd95a44c5b4a7d34af78c8f5b2e6c00afad118a02,13853 16 | -------------------------------------------------------------------------------- /example-project/apigw-app/Main.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | module Main where 3 | 4 | import AWSLambda.Events.APIGateway 5 | import Control.Lens 6 | import qualified Data.HashMap.Strict as HashMap 7 | import Data.Semigroup 8 | import Data.Text (Text) 9 | 10 | main :: IO () 11 | main = apiGatewayMain hello 12 | 13 | hello :: APIGatewayProxyRequest Text -> IO (APIGatewayProxyResponse Text) 14 | hello request = do 15 | putStrLn "This should go to logs" 16 | case HashMap.lookup "name" (request ^. agprqPathParameters) of 17 | Just name -> return $ responseOK & responseBody ?~ "Hello, " <> name 18 | Nothing -> return responseNotFound 19 | -------------------------------------------------------------------------------- /config.ts: -------------------------------------------------------------------------------- 1 | // Constants 2 | 3 | export type Runtime = string; 4 | 5 | // Runtime handled by this plugin 6 | export const HASKELL_RUNTIME: Runtime = 'haskell'; 7 | 8 | // Runtime used by the wrapper 9 | export const BASE_RUNTIME: Runtime = 'provided.al2'; 10 | 11 | // Docker image used as reference 12 | // https://aws.amazon.com/blogs/aws/new-for-aws-lambda-container-image-support/ 13 | // https://hub.docker.com/r/amazon/aws-lambda-provided 14 | export const DOCKER_IMAGE = 'amazon/aws-lambda-provided:al2'; 15 | 16 | // Docker image used for the builds. This needs to have all dependencies for 17 | // running GHC and Stack, but can't have glibc later what AWS provides. 18 | export const BUILD_DOCKER_IMAGE = 'haskell:stretch'; 19 | -------------------------------------------------------------------------------- /src/AWSLambda/Events/MessageAttribute.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell #-} 2 | 3 | {-| 4 | Module: AWSLambda.Events.MessageAttribute 5 | Description: Types for SQS and SNS message attributes 6 | -} 7 | module AWSLambda.Events.MessageAttribute where 8 | 9 | import Control.Lens (makeLenses) 10 | import Data.Aeson.Casing (aesonPrefix, pascalCase) 11 | import Data.Aeson.TH (deriveFromJSON) 12 | import Data.Text (Text) 13 | 14 | data MessageAttribute = MessageAttribute 15 | { _maType :: !Text 16 | , _maValue :: !Text 17 | } deriving (Eq, Show) 18 | 19 | $(deriveFromJSON (aesonPrefix pascalCase) ''MessageAttribute) 20 | $(makeLenses ''MessageAttribute) 21 | -------------------------------------------------------------------------------- /src/AWSLambda/Orphans.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE StandaloneDeriving #-} 5 | {-# OPTIONS_GHC -fno-warn-orphans #-} 6 | module AWSLambda.Orphans where 7 | 8 | import Data.Aeson 9 | import Amazonka.Data.Text (fromText) 10 | import qualified Amazonka.S3 as S3 11 | 12 | #if !MIN_VERSION_amazonka_core(1,6,0) 13 | deriving instance FromJSON S3.BucketName 14 | #endif 15 | 16 | deriving instance FromJSON S3.ObjectKey 17 | 18 | deriving instance FromJSON S3.ObjectVersionId 19 | 20 | instance FromJSON S3.ETag where 21 | parseJSON = withText "ETag" $ either fail return . fromText 22 | -------------------------------------------------------------------------------- /integration-test/skeleton/serverless.yml: -------------------------------------------------------------------------------- 1 | service: NAME 2 | 3 | configValidationMode: error 4 | 5 | provider: 6 | name: aws 7 | runtime: haskell 8 | lambdaHashingVersion: 20201221 9 | apiGateway: 10 | shouldStartNameWithService: true 11 | 12 | functions: 13 | main: 14 | handler: shtest.main 15 | 16 | apigw: 17 | handler: shtest.apigw 18 | events: 19 | - http: 20 | path: hello/{name} 21 | method: get 22 | 23 | subdir: 24 | handler: subdir/subtest.main 25 | 26 | jsfunc: 27 | runtime: nodejs12.x 28 | handler: jsfunc/handler.main 29 | 30 | plugins: 31 | - serverless-haskell 32 | - serverless-offline 33 | 34 | custom: 35 | haskell: 36 | docker: ${strToBool(${env:FORCE_DOCKER, "true"})} 37 | -------------------------------------------------------------------------------- /test/Data/Aeson/AlternativeSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Data.Aeson.AlternativeSpec where 4 | 5 | import Data.Aeson 6 | import Data.Aeson.Alternative 7 | 8 | import Test.Hspec 9 | 10 | spec :: Spec 11 | spec = 12 | describe "AlternativeJSON" $ do 13 | let handler1 :: [Int] -> Int 14 | handler1 = sum 15 | let handler2 :: Bool -> Int 16 | handler2 = fromEnum 17 | let handler = handler1 `alternative` handler2 18 | it "parses the first alternative" $ 19 | (handler <$> decode "[1, 2, 3]") `shouldBe` (Just 6) 20 | it "parses the second alternative" $ 21 | (handler <$> decode "true") `shouldBe` (Just 1) 22 | it "fails to parse anything else" $ 23 | (handler <$> decode "{}") `shouldBe` Nothing 24 | -------------------------------------------------------------------------------- /integration-test/skeleton/Main.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Main module for the integration test. 3 | -} 4 | {-# LANGUAGE OverloadedStrings #-} 5 | 6 | import Data.Aeson ((.=)) 7 | import qualified Data.Aeson as Aeson 8 | 9 | import AWSLambda 10 | 11 | import Control.Monad (when) 12 | 13 | import System.Environment 14 | 15 | main :: IO () 16 | main = lambdaMain handler 17 | 18 | handler :: Aeson.Value -> IO [Int] 19 | handler evt = do 20 | -- Test logs going through 21 | putStrLn "This should go to logs" 22 | -- Test passed event 23 | print evt 24 | -- Throw error on a magic input value 25 | when (evt == errorEvent) $ error "Magic error" 26 | -- Test return value 27 | pure [11, 22, 33] 28 | 29 | errorEvent :: Aeson.Value 30 | errorEvent = Aeson.object ["error" .= (1 :: Int)] 31 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy 2 | 3 | on: 4 | push: 5 | tags: 6 | - v[0-9]+.[0-9]+.[0-9]+ 7 | 8 | jobs: 9 | deploy: 10 | 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v2 16 | - name: Set up Stack 17 | run: stack upgrade || curl -sSL https://get.haskellstack.org/ | sh -s - -f 18 | - name: Set up Node.js 19 | uses: actions/setup-node@v1 20 | with: 21 | node-version: 12.x 22 | - name: Set up cache 23 | uses: actions/cache@v1 24 | with: 25 | key: deploy 26 | path: ~/.npm 27 | - name: Deploy 28 | run: ./deploy 29 | env: 30 | HACKAGE_USERNAME: ${{ secrets.HACKAGE_USERNAME }} 31 | HACKAGE_PASSWORD: ${{ secrets.HACKAGE_PASSWORD }} 32 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 33 | -------------------------------------------------------------------------------- /src/Data/Aeson/Alternative.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : Data.Aeson.Alternative 3 | Stability : experimental 4 | 5 | Utilities for decoding JSON into one of the possible types and handling the 6 | resulting sum type. 7 | -} 8 | module Data.Aeson.Alternative 9 | ( AlternativeJSON 10 | , alternative 11 | ) where 12 | 13 | import Control.Applicative 14 | 15 | import Data.Aeson 16 | 17 | -- | One of the two values that has been parsed from JSON 18 | data AlternativeJSON a b 19 | = FirstJSON a 20 | | SecondJSON b 21 | deriving (Eq, Ord, Show) 22 | 23 | instance (FromJSON a, FromJSON b) => FromJSON (AlternativeJSON a b) where 24 | parseJSON v = FirstJSON <$> parseJSON v <|> SecondJSON <$> parseJSON v 25 | 26 | -- | Handle either of the two types that have been parsed from JSON 27 | alternative :: (a -> r) -> (b -> r) -> AlternativeJSON a b -> r 28 | alternative f _ (FirstJSON a) = f a 29 | alternative _ g (SecondJSON b) = g b 30 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "serverless-haskell", 3 | "version": "0.12.6", 4 | "description": "Deploy Haskell binaries onto AWS Lambda", 5 | "main": "dist/index.js", 6 | "files": [ 7 | "dist" 8 | ], 9 | "scripts": { 10 | "test": "eslint . --ext .js,.ts --max-warnings 0", 11 | "prepare": "tsc", 12 | "updateAWSEnvironment": "ts-node updateAWSEnvironment.ts" 13 | }, 14 | "repository": { 15 | "type": "git", 16 | "url": "https://github.com/seek-oss/serverless-haskell.git" 17 | }, 18 | "author": "SEEK", 19 | "license": "MIT", 20 | "dependencies": { 21 | "fs-extra": "^5.0.0", 22 | "serverless": "^2.40.0" 23 | }, 24 | "devDependencies": { 25 | "@types/fs-extra": "^8.1.0", 26 | "@types/node": "^13.9.0", 27 | "@types/serverless": "^1.78.26", 28 | "@typescript-eslint/eslint-plugin": "^2.26.0", 29 | "@typescript-eslint/parser": "^2.26.0", 30 | "eslint": "^6.8.0", 31 | "ts-node": "^8.6.2", 32 | "typescript": "^3.8.3" 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /version.ts: -------------------------------------------------------------------------------- 1 | export type Version = number[]; 2 | 3 | export function parse(str: string): Version { 4 | return str.split('.').map(n => +n); 5 | } 6 | 7 | export function format(version: Version): string { 8 | return version.join('.'); 9 | } 10 | 11 | // Compares two arrays as versions and returns: 12 | // -1 if x < y 13 | // 0 if x == y 14 | // 1 if x > y 15 | export function compare(x: Version, y: Version): number { 16 | for (let i = 0; i < x.length && i < y.length; i++) { 17 | if (x[i] < y[i]) { 18 | return -1; 19 | } 20 | if (x[i] > y[i]) { 21 | return 1; 22 | } 23 | } 24 | if (x.length < y.length) { 25 | return -1; 26 | } 27 | if (x.length > y.length) { 28 | return 1; 29 | } 30 | return 0; 31 | } 32 | 33 | export function greater(x: Version, y: Version): boolean { 34 | return compare(x, y) > 0; 35 | } 36 | 37 | export function max(x: Version, y: Version): Version { 38 | return greater(x, y) ? x : y; 39 | } 40 | -------------------------------------------------------------------------------- /deploy: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Script for deploying the package via Travis CI 3 | # Use --dry-run for testing without uploading anything 4 | 5 | set -euo pipefail 6 | 7 | DRY_RUN= 8 | while [ $# -gt 0 ] 9 | do 10 | case "$1" in 11 | --dry-run) 12 | DRY_RUN=true 13 | shift 14 | ;; 15 | *) 16 | shift 17 | ;; 18 | esac 19 | done 20 | 21 | function report_fail() { 22 | echo $1 >&2 23 | exit 1 24 | } 25 | 26 | # Test releasing 27 | stack sdist || report_fail "Failed to package for Hackage." 28 | npm install 29 | npm publish --dry-run || report_fail "Failed to package for NPM." 30 | 31 | if [ -z "$DRY_RUN" ] 32 | then 33 | # Release to Hackage via Stack 34 | 35 | stack upload . || report_fail "Failed to upload to Hackage." 36 | 37 | # Release to NPM 38 | 39 | NPMRC=$HOME/.npmrc 40 | if [ ! -f "$NPMRC" ] 41 | then 42 | echo '//registry.npmjs.org/:_authToken='"$NPM_TOKEN" > "$NPMRC" 43 | chmod go-rwx "$NPMRC" 44 | fi 45 | npm publish || report_fail "Failed to upload to NPM." 46 | fi 47 | -------------------------------------------------------------------------------- /example-project/package.yaml: -------------------------------------------------------------------------------- 1 | name: serverless-haskell-example 2 | category: AWS, Cloud, Network 3 | maintainer: akotlyarov@seek.com.au 4 | version: 1.0.0 5 | github: seek-oss/serverless-haskell 6 | license: MIT 7 | synopsis: Deploying Haskell code onto AWS Lambda using Serverless 8 | description: Example package to demonstrate deployment of Haskell code onto AWS Lambda using Serverless 9 | 10 | dependencies: 11 | - base >= 4.7 && < 5 12 | 13 | executables: 14 | example-exe: 15 | main: Main.hs 16 | source-dirs: app 17 | ghc-options: 18 | - -threaded 19 | - -rtsopts 20 | - -with-rtsopts=-N 21 | dependencies: 22 | - serverless-haskell 23 | 24 | api-exe: 25 | main: Main.hs 26 | source-dirs: apigw-app 27 | ghc-options: 28 | - -threaded 29 | - -rtsopts 30 | - -with-rtsopts=-N 31 | dependencies: 32 | - aeson 33 | - lens 34 | - serverless-haskell 35 | - text 36 | - unordered-containers 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 SEEK 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /example-project/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 SEEK 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/Data/Aeson/TextValue.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | 6 | {-| 7 | Module: Data.Aeson.TextValue 8 | Description: Type for things that can be embedded in a JSON string 9 | 10 | Provides @FromJSON@ and @ToJSON@ instances for anything that 11 | has @FromText@ and @ToText@ instances, e.g. @TextValue Text@, 12 | @(FromJSON a, ToJSON a) => TextValue (Embedded a)@, 13 | @TextValue Base64@ 14 | -} 15 | module Data.Aeson.TextValue where 16 | 17 | import Control.Lens.TH 18 | import Data.Aeson 19 | import Data.String 20 | import Amazonka.Data.Text (FromText (..), ToText (..), fromText) 21 | 22 | 23 | newtype TextValue a = TextValue { _unTextValue :: a } deriving (Eq, Show, IsString) 24 | 25 | instance FromText a => FromJSON (TextValue a) where 26 | parseJSON = withText "TextValue" $ fmap TextValue . either fail pure . fromText 27 | 28 | instance ToText a => ToJSON (TextValue a) where 29 | toJSON = toJSON . toText . _unTextValue 30 | toEncoding = toEncoding . toText . _unTextValue 31 | 32 | $(makeLenses ''TextValue) 33 | -------------------------------------------------------------------------------- /ld.ts: -------------------------------------------------------------------------------- 1 | // Parse output of ldd and ldconfig 2 | 3 | import * as version from './version'; 4 | 5 | export type Paths = { [path: string]: string }; 6 | 7 | // Parse output of ldd or ldconfig and return a map of library names to paths 8 | export function parseLdOutput(output: string): Paths { 9 | const libraryList = output.split('\n').filter(ln => ln.includes('=>')); 10 | 11 | const result: Paths = {}; 12 | libraryList.forEach(s => { 13 | const [name, , libPath] = s.trim().split(' '); 14 | result[name] = libPath; 15 | }); 16 | 17 | return result; 18 | } 19 | 20 | // Parse output of objdump -T and return minimum glibc version required 21 | export function parseObjdumpOutput(output: string): version.Version | null { 22 | const glibcPattern = /\bGLIBC_([0-9.]+)\b/g; 23 | 24 | let maxVersion = null; 25 | let match; 26 | do { 27 | match = glibcPattern.exec(output); 28 | if (match) { 29 | const thisVersion = version.parse(match[1]); 30 | if (!maxVersion || version.greater(thisVersion, maxVersion)) { 31 | maxVersion = thisVersion; 32 | } 33 | } 34 | } while (match); 35 | return maxVersion; 36 | } 37 | -------------------------------------------------------------------------------- /src/AWSLambda/Events/Records.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveFoldable #-} 2 | {-# LANGUAGE DeriveFunctor #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | 6 | module AWSLambda.Events.Records where 7 | 8 | import Control.Exception.Safe (MonadCatch) 9 | import Control.Lens.TH (makeLenses) 10 | import Control.Monad.IO.Class 11 | import Data.Aeson (FromJSON(..), withObject, (.:)) 12 | import Data.Foldable (traverse_) 13 | 14 | import AWSLambda.Handler (lambdaMain) 15 | 16 | newtype RecordsEvent a = RecordsEvent { _reRecords :: [a] } deriving (Eq, Show, Functor, Foldable) 17 | 18 | instance FromJSON a => FromJSON (RecordsEvent a) where 19 | parseJSON = withObject "RecordsEvent" $ \o -> RecordsEvent <$> o .: "Records" 20 | 21 | $(makeLenses ''RecordsEvent) 22 | 23 | -- | Traverse all the records in a Lambda event 24 | traverseRecords :: Applicative m => (a -> m ()) -> RecordsEvent a -> m () 25 | traverseRecords = traverse_ 26 | 27 | -- | A specialised version of the 'lambdaMain' entry-point 28 | -- for handling individual records in a Lambda event 29 | recordsMain :: (FromJSON a, MonadCatch m, MonadIO m) => (a -> m ()) -> m () 30 | recordsMain = lambdaMain . traverseRecords 31 | -------------------------------------------------------------------------------- /src/Data/Aeson/Embedded.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | 6 | {-| 7 | Module: Data.Aeson.Embedded 8 | Description: Type for a JSON value embedded within a JSON string value 9 | -} 10 | module Data.Aeson.Embedded where 11 | 12 | import Control.Lens.TH 13 | import Data.Aeson 14 | import qualified Data.ByteString.Lazy as LBS 15 | import Data.Text.Encoding (decodeUtf8, encodeUtf8) 16 | import Amazonka.Data.Text (FromText (..), ToText (..), fromText) 17 | 18 | -- | Type for a JSON value embedded within a JSON string value 19 | newtype Embedded a = Embedded { _unEmbed :: a } deriving (Eq, Show) 20 | 21 | instance FromJSON a => 22 | FromText (Embedded a) where 23 | fromText txt = 24 | fmap Embedded . eitherDecodeStrict $ encodeUtf8 txt 25 | 26 | instance FromJSON a => 27 | FromJSON (Embedded a) where 28 | parseJSON v = either fail pure . fromText =<< parseJSON v 29 | 30 | instance ToJSON a => ToText (Embedded a) where 31 | toText = decodeUtf8 . LBS.toStrict . encode . _unEmbed 32 | 33 | instance ToJSON a => ToJSON (Embedded a) where 34 | toJSON = toJSON . toText 35 | toEncoding = toEncoding . toText 36 | 37 | $(makeLenses ''Embedded) 38 | -------------------------------------------------------------------------------- /example-project/serverless-haskell-example.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 1.12 2 | 3 | -- This file has been generated from package.yaml by hpack version 0.35.0. 4 | -- 5 | -- see: https://github.com/sol/hpack 6 | 7 | name: serverless-haskell-example 8 | version: 1.0.0 9 | synopsis: Deploying Haskell code onto AWS Lambda using Serverless 10 | description: Example package to demonstrate deployment of Haskell code onto AWS Lambda using Serverless 11 | category: AWS, Cloud, Network 12 | homepage: https://github.com/seek-oss/serverless-haskell#readme 13 | bug-reports: https://github.com/seek-oss/serverless-haskell/issues 14 | maintainer: akotlyarov@seek.com.au 15 | license: MIT 16 | license-file: LICENSE 17 | build-type: Simple 18 | 19 | source-repository head 20 | type: git 21 | location: https://github.com/seek-oss/serverless-haskell 22 | 23 | executable api-exe 24 | main-is: Main.hs 25 | other-modules: 26 | Paths_serverless_haskell_example 27 | hs-source-dirs: 28 | apigw-app 29 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 30 | build-depends: 31 | aeson 32 | , base >=4.7 && <5 33 | , lens 34 | , serverless-haskell 35 | , text 36 | , unordered-containers 37 | default-language: Haskell2010 38 | 39 | executable example-exe 40 | main-is: Main.hs 41 | other-modules: 42 | Paths_serverless_haskell_example 43 | hs-source-dirs: 44 | app 45 | ghc-options: -threaded -rtsopts -with-rtsopts=-N 46 | build-depends: 47 | base >=4.7 && <5 48 | , serverless-haskell 49 | default-language: Haskell2010 50 | -------------------------------------------------------------------------------- /stack.yaml.lock: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by Stack. 2 | # You should not edit this file by hand. 3 | # For more information, please see the documentation at: 4 | # https://docs.haskellstack.org/en/stable/lock_files 5 | 6 | packages: 7 | - completed: 8 | hackage: crypton-0.33@sha256:5e92f29b9b7104d91fcdda1dec9400c9ad1f1791c231cc41ceebd783fb517dee,18202 9 | pantry-tree: 10 | sha256: 38809499d7f9775ef45cd29ab5c3dc9b283a813f34c1cdc56681b24f8cf8bb4f 11 | size: 23148 12 | original: 13 | hackage: crypton-0.33@sha256:5e92f29b9b7104d91fcdda1dec9400c9ad1f1791c231cc41ceebd783fb517dee,18202 14 | - completed: 15 | hackage: amazonka-core-2.0@sha256:d9f0533c272ac92bd7b18699077038b6b51b3552e91b65743af4ce646286b4f8,4383 16 | pantry-tree: 17 | sha256: 46e7e4de910b08ee2df98db9cda2becf388ce49510024018289a46c43e175ee0 18 | size: 3222 19 | original: 20 | hackage: amazonka-core-2.0@sha256:d9f0533c272ac92bd7b18699077038b6b51b3552e91b65743af4ce646286b4f8,4383 21 | - completed: 22 | hackage: amazonka-kinesis-2.0@sha256:bd669fb35bc22da589a787a391e64a4b095f8725566e3b96ce38600adf76d933,5086 23 | pantry-tree: 24 | sha256: 89a2e5347a459a17189304fc1e38ad2d9453605989aa0e29cf3bf0c68ccfcf57 25 | size: 9733 26 | original: 27 | hackage: amazonka-kinesis-2.0@sha256:bd669fb35bc22da589a787a391e64a4b095f8725566e3b96ce38600adf76d933,5086 28 | - completed: 29 | hackage: amazonka-s3-2.0@sha256:e3143e11ab9e57ee0dfd9d1dd95a44c5b4a7d34af78c8f5b2e6c00afad118a02,13853 30 | pantry-tree: 31 | sha256: 49f801400582ef0ca290312243b55e6e3c540c0c94c1a3110735bc400386fefb 32 | size: 38690 33 | original: 34 | hackage: amazonka-s3-2.0@sha256:e3143e11ab9e57ee0dfd9d1dd95a44c5b4a7d34af78c8f5b2e6c00afad118a02,13853 35 | snapshots: 36 | - completed: 37 | sha256: 5a59b2a405b3aba3c00188453be172b85893cab8ebc352b1ef58b0eae5d248a2 38 | size: 650475 39 | url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/20/26.yaml 40 | original: lts-20.26 41 | -------------------------------------------------------------------------------- /src/AWSLambda/Events/KinesisEvent.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE RecordWildCards #-} 3 | {-# LANGUAGE TemplateHaskell #-} 4 | 5 | {-| 6 | Module: AWSLambda.Events.KinesisEvent 7 | Description: Types for Kinesis Lambda events 8 | 9 | Based on https://github.com/aws/aws-lambda-dotnet/tree/master/Libraries/src/Amazon.Lambda.KinesisEvents 10 | -} 11 | module AWSLambda.Events.KinesisEvent where 12 | 13 | import Control.Lens.TH 14 | import Data.Aeson (FromJSON (..), withObject, (.:)) 15 | import Data.Aeson.Casing (aesonDrop, camelCase) 16 | import Data.Aeson.TH (deriveFromJSON) 17 | import Data.Text (Text) 18 | import Amazonka.Data.Base64 (Base64 (..)) 19 | import qualified Amazonka.Kinesis.Types as Kinesis 20 | import qualified Amazonka.Types as AWS 21 | 22 | import AWSLambda.Events.Records 23 | 24 | data KinesisRecord = KinesisRecord 25 | { _krRecord :: !Kinesis.Record 26 | , _krKinesisSchemaVersion :: !Text 27 | } deriving (Eq, Show) 28 | 29 | instance FromJSON KinesisRecord where 30 | parseJSON = 31 | withObject "KinesisRecord" $ 32 | \o -> do 33 | _krKinesisSchemaVersion <- o .: "kinesisSchemaVersion" 34 | dataBase64 <- o .: "data" 35 | _krRecord <- 36 | Kinesis.newRecord <$> (o .: "sequenceNumber") <*> pure (unBase64 dataBase64) <*> 37 | (o .: "partitionKey") 38 | return KinesisRecord {..} 39 | $(makeLenses ''KinesisRecord) 40 | 41 | data KinesisEventRecord = KinesisEventRecord 42 | { _kerKinesis :: !KinesisRecord 43 | , _kerEventSource :: !Text 44 | , _kerEventID :: !Text 45 | , _kerInvokeIdentityArn :: !Text 46 | , _kerEventVersion :: !Text 47 | , _kerEventName :: !Text 48 | , _kerEventSourceARN :: !Text 49 | , _kerAwsRegion :: !AWS.Region 50 | } deriving (Eq, Show) 51 | $(deriveFromJSON (aesonDrop 4 camelCase) ''KinesisEventRecord) 52 | $(makeLenses ''KinesisEventRecord) 53 | 54 | type KinesisEvent = RecordsEvent KinesisEventRecord 55 | -------------------------------------------------------------------------------- /src/AWSLambda/Events.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | {-# LANGUAGE TemplateHaskell #-} 3 | 4 | module AWSLambda.Events 5 | ( module AWSLambda.Events.APIGateway 6 | , module AWSLambda.Events.KinesisEvent 7 | , module AWSLambda.Events.MessageAttribute 8 | , module AWSLambda.Events.Records 9 | , module AWSLambda.Events.S3Event 10 | , module AWSLambda.Events.SNSEvent 11 | , module AWSLambda.Events.SQSEvent 12 | , traverseSnsInSqs 13 | , snsInSqsMain 14 | , traverseS3InSnsInSqs 15 | , s3InSnsInSqsMain 16 | ) where 17 | 18 | import Control.Exception.Safe (MonadCatch) 19 | import Control.Monad.IO.Class 20 | import Data.Aeson (FromJSON (..)) 21 | 22 | import AWSLambda.Events.APIGateway 23 | import AWSLambda.Events.KinesisEvent 24 | import AWSLambda.Events.MessageAttribute 25 | import AWSLambda.Events.Records 26 | import AWSLambda.Events.S3Event 27 | import AWSLambda.Events.SNSEvent 28 | import AWSLambda.Events.SQSEvent 29 | import AWSLambda.Handler (lambdaMain) 30 | import Data.Aeson.Embedded (Embedded) 31 | 32 | -- | Traverse all the SNS messages embedded in an SQS event 33 | traverseSnsInSqs :: (FromJSON a, Applicative m) => (a -> m ()) -> SQSEvent (Embedded (SNSMessage (Embedded a))) -> m () 34 | traverseSnsInSqs = traverseSqs . traverseSnsMessage 35 | 36 | -- | A specialised version of the 'lambdaMain' entry-point 37 | -- for handling individual SNS messages embedded in an SQS event 38 | snsInSqsMain :: (FromJSON a, MonadCatch m, MonadIO m) => (a -> m ()) -> m () 39 | snsInSqsMain = lambdaMain . traverseSnsInSqs 40 | 41 | -- | Traverse S3 events embedded within SNS messages within an SQS event 42 | traverseS3InSnsInSqs :: (Applicative m) => (S3EventNotification -> m ()) -> SQSEvent (Embedded (SNSMessage (Embedded S3Event))) -> m () 43 | traverseS3InSnsInSqs = traverseSnsInSqs . traverseRecords 44 | 45 | -- | A specialised version of the 'lambdaMain' entry-point 46 | -- for handling individual S3 event notifications embedded in 47 | -- SNS messages embedded in an SQS event 48 | s3InSnsInSqsMain :: (MonadCatch m, MonadIO m) => (S3EventNotification -> m ()) -> m () 49 | s3InSnsInSqsMain = lambdaMain . traverseS3InSnsInSqs 50 | -------------------------------------------------------------------------------- /updateAWSEnvironment.ts: -------------------------------------------------------------------------------- 1 | // Update the list of libraries included in the AWS Lambda environment 2 | 3 | import { spawnSync } from 'child_process'; 4 | import { writeFileSync } from 'fs-extra'; 5 | import * as path from 'path'; 6 | 7 | import * as config from './config'; 8 | import * as ld from './ld'; 9 | import * as version from './version'; 10 | 11 | const EXPORT_FILE_NAME = path.resolve(__dirname, 'AWSEnvironment.ts'); 12 | 13 | function commandOutput(cmd: string, args: string[]): string { 14 | const result = spawnSync( 15 | 'docker', 16 | [ 17 | 'run', 18 | '--rm', 19 | '--entrypoint', cmd, 20 | config.DOCKER_IMAGE, 21 | ...args, 22 | ] 23 | ); 24 | 25 | if (result.error || result.status && result.status > 0) { 26 | const stderr = result.stderr.toString().trim(); 27 | throw new Error(`Error calling ${cmd} in the AWS image: ${stderr}`); 28 | } 29 | 30 | return result.stdout.toString().trim(); 31 | } 32 | 33 | function getLibraries(): string[] { 34 | const lddOutput = commandOutput('/usr/sbin/ldconfig', [ 35 | '-c', 'new', 36 | '-p' 37 | ]); 38 | 39 | const libraries = Object.keys(ld.parseLdOutput(lddOutput)); 40 | 41 | return libraries; 42 | } 43 | 44 | function getGlibcVersion(): version.Version { 45 | const lddOutput = commandOutput('/usr/bin/ldd', ['--version']); 46 | 47 | const glibcVersionMatch = lddOutput.match(/((\d+.)+\d+)/); 48 | if (!glibcVersionMatch) { 49 | throw new Error(`Unexpected output from ldd: ${lddOutput}`); 50 | } 51 | const glibcVersion = version.parse(glibcVersionMatch[0]); 52 | return glibcVersion; 53 | } 54 | 55 | function main(): void { 56 | spawnSync('docker', ['pull', config.DOCKER_IMAGE]); 57 | 58 | let content = "// This file is autogenerated.\n" + 59 | "// Please use 'npm run updateAWSEnvironment' to update.\n"; 60 | 61 | function addExport(name: string, value: unknown): string { 62 | return "export const " + name + " = " + 63 | JSON.stringify(value, null, 4) + ";\n"; 64 | } 65 | 66 | content += addExport("libraries", getLibraries()); 67 | content += addExport("glibcVersion", getGlibcVersion()); 68 | 69 | writeFileSync(EXPORT_FILE_NAME, content); 70 | } 71 | 72 | main(); 73 | -------------------------------------------------------------------------------- /test/AWSLambda/Events/KinesisEventSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE QuasiQuotes #-} 3 | 4 | module AWSLambda.Events.KinesisEventSpec where 5 | 6 | import AWSLambda.Events.KinesisEvent 7 | import AWSLambda.Events.Records 8 | 9 | import Data.Aeson 10 | import Data.ByteString.Lazy (ByteString) 11 | 12 | import qualified Amazonka.Kinesis.Types as Kinesis 13 | import Amazonka.Types (Region (..)) 14 | 15 | import Text.RawString.QQ 16 | 17 | import Test.Hspec 18 | 19 | spec :: Spec 20 | spec = 21 | describe "KinesisEvent" $ 22 | it "parses sample event" $ 23 | decode sampleKinesisJSON `shouldBe` Just sampleKinesisEvent 24 | 25 | sampleKinesisJSON :: ByteString 26 | sampleKinesisJSON = [r| 27 | { 28 | "Records": [ 29 | { 30 | "kinesis": { 31 | "partitionKey": "partitionKey-3", 32 | "kinesisSchemaVersion": "1.0", 33 | "data": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0IDEyMy4=", 34 | "sequenceNumber": "49545115243490985018280067714973144582180062593244200961" 35 | }, 36 | "eventSource": "aws:kinesis", 37 | "eventID": "shardId-000000000000:49545115243490985018280067714973144582180062593244200961", 38 | "invokeIdentityArn": "arn:aws:iam::account-id:role/testLEBRole", 39 | "eventVersion": "1.0", 40 | "eventName": "aws:kinesis:record", 41 | "eventSourceARN": "arn:aws:kinesis:us-west-2:35667example:stream/examplestream", 42 | "awsRegion": "us-west-2" 43 | } 44 | ] 45 | } 46 | |] 47 | 48 | sampleKinesisEvent :: KinesisEvent 49 | sampleKinesisEvent = 50 | RecordsEvent 51 | [ KinesisEventRecord 52 | { _kerKinesis = 53 | KinesisRecord 54 | { _krRecord = 55 | Kinesis.newRecord 56 | "49545115243490985018280067714973144582180062593244200961" 57 | "Hello, this is a test 123." 58 | "partitionKey-3" 59 | , _krKinesisSchemaVersion = "1.0" 60 | } 61 | , _kerEventSource = "aws:kinesis" 62 | , _kerEventID = 63 | "shardId-000000000000:49545115243490985018280067714973144582180062593244200961" 64 | , _kerInvokeIdentityArn = "arn:aws:iam::account-id:role/testLEBRole" 65 | , _kerEventVersion = "1.0" 66 | , _kerEventName = "aws:kinesis:record" 67 | , _kerEventSourceARN = 68 | "arn:aws:kinesis:us-west-2:35667example:stream/examplestream" 69 | , _kerAwsRegion = Oregon 70 | } 71 | ] 72 | -------------------------------------------------------------------------------- /integration-test/tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Utility functions for tests 3 | 4 | # Test running utilities 5 | TESTS=0 6 | FAILED=0 7 | 8 | RED='\033[0;31m' 9 | GREEN='\033[0;32m' 10 | NC='\033[0m' 11 | 12 | # Verify that a command succeeds 13 | assert_success() { 14 | MESSAGE="$1" 15 | shift 16 | 17 | ((++TESTS)) 18 | 19 | if "$@" 20 | then 21 | echo -e "${GREEN}$MESSAGE: success${NC}" 22 | else 23 | echo -e "${RED}${MESSAGE}: fail${NC}" 24 | ((++FAILED)) 25 | if [ "$FAILFAST" = "true" ] 26 | then 27 | echo -e "${RED}Aborting further tests.${NC}" 28 | exit $FAILED 29 | fi 30 | fi 31 | } 32 | 33 | # Directory with the expected outputs 34 | EXPECTED=$(cd $(dirname $0)/expected; echo $PWD) 35 | 36 | # Remove volatile bits from the output 37 | sanitise() { 38 | grep -v 'Serverless: ' | \ 39 | grep -v RequestId | \ 40 | grep -v '^[[:space:]]*$' | \ 41 | sed '/Error ----/{c\ 42 | 43 | q 44 | }' 45 | } 46 | 47 | # Test that output of the command, save for volatile bits, is as expected 48 | assert_output() { 49 | MESSAGE="$1" 50 | shift 51 | FILE="$1" 52 | shift 53 | "$@" > $FILE 54 | # Trim volatile content 55 | cat $FILE | sanitise > stable-$FILE 56 | if diff -q $EXPECTED/$FILE stable-$FILE 57 | then 58 | assert_success "$MESSAGE" true 59 | else 60 | echo -e "${RED}Unexpected output from '$*':${NC}" 61 | cat $FILE 62 | echo -e "${RED}Difference:${NC}" 63 | diff $EXPECTED/$FILE stable-$FILE || true 64 | assert_success "$MESSAGE" false 65 | fi 66 | } 67 | 68 | # Test that output of the command contains expected text 69 | assert_contains_output() { 70 | MESSAGE="$1" 71 | shift 72 | OUTPUT="$1" 73 | shift 74 | OUTFILE=$(mktemp) 75 | "$@" > $OUTFILE 76 | # Trim volatile content 77 | cat $OUTFILE | sanitise > $OUTFILE-stable 78 | if grep -q --fixed-strings "$OUTPUT" $OUTFILE-stable 79 | then 80 | rm $OUTFILE $OUTFILE-stable 81 | assert_success "$MESSAGE" true 82 | else 83 | echo -e "${RED}Unexpected output from '$*':${NC}" 84 | cat $OUTFILE 85 | echo -e "${RED}Expected:${NC}" 86 | echo $OUTPUT 87 | rm $OUTFILE $OUTFILE-stable 88 | assert_success "$MESSAGE" false 89 | fi 90 | } 91 | 92 | # End testing and indicate the error code 93 | end_tests() { 94 | if ((FAILED > 0)) 95 | then 96 | echo -e "${RED}Run ${TESTS} tests, ${FAILED} failed.${NC}" 97 | exit $FAILED 98 | else 99 | echo -e "${GREEN}${TESTS} tests passed.${NC}" 100 | exit 0 101 | fi 102 | } 103 | -------------------------------------------------------------------------------- /src/AWSLambda/Events/SQSEvent.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | 6 | {-| 7 | Module: AWSLambda.Events.SQSEvent 8 | Description: Types for SQS Lambda events 9 | -} 10 | module AWSLambda.Events.SQSEvent where 11 | 12 | import Control.Exception.Safe (MonadCatch) 13 | import Control.Lens 14 | import Control.Monad.IO.Class 15 | import Data.Aeson (FromJSON(..), genericParseJSON) 16 | import Data.Aeson.Casing (aesonPrefix, camelCase) 17 | import Data.Aeson.Embedded 18 | import Data.Aeson.TextValue 19 | import Data.ByteString (ByteString) 20 | import Data.HashMap.Strict (HashMap) 21 | import Data.Text (Text) 22 | import GHC.Generics (Generic) 23 | import Amazonka.Data.Base64 24 | import Amazonka.Data.Text (FromText) 25 | import qualified Amazonka.Types as AWS 26 | 27 | import AWSLambda.Events.MessageAttribute 28 | import AWSLambda.Events.Records 29 | import AWSLambda.Handler (lambdaMain) 30 | 31 | data SQSMessage body = SQSMessage 32 | { _sqsmMessageId :: !Text 33 | , _sqsmReceiptHandle :: !Text 34 | , _sqsmBody :: !(TextValue body) 35 | , _sqsmAttributes :: !(HashMap Text Text) 36 | , _sqsmMessageAttributes :: !(HashMap Text MessageAttribute) 37 | , _sqsmMd5OfBody :: !Text 38 | , _sqsmEventSource :: !Text 39 | , _sqsmEventSourceARN :: !Text 40 | , _sqsmAwsRegion :: !AWS.Region 41 | } deriving (Show, Eq, Generic) 42 | 43 | instance FromText message => FromJSON (SQSMessage message) where 44 | parseJSON = genericParseJSON $ aesonPrefix camelCase 45 | 46 | $(makeLenses ''SQSMessage) 47 | 48 | type SQSEvent body = RecordsEvent (SQSMessage body) 49 | 50 | -- | A Traversal to get messages from an SQSEvent 51 | sqsMessages :: Traversal (SQSEvent message) (SQSEvent message') message message' 52 | sqsMessages = reRecords . traverse . sqsmBody . unTextValue 53 | 54 | -- | A Traversal to get embedded JSON values from an SQSEvent 55 | sqsEmbedded :: Traversal (SQSEvent (Embedded v)) (SQSEvent (Embedded v')) v v' 56 | sqsEmbedded = sqsMessages . unEmbed 57 | 58 | sqsBinary :: Traversal' (SQSEvent Base64) ByteString 59 | sqsBinary = sqsMessages . _Base64 60 | 61 | -- | Traverse all the messages in an SQS event 62 | traverseSqs :: (FromJSON a, Applicative m) => (a -> m ()) -> SQSEvent (Embedded a) -> m () 63 | traverseSqs act = traverseRecords $ \record -> 64 | act $ record ^. sqsmBody . unTextValue . unEmbed 65 | 66 | -- | A specialised version of the 'lambdaMain' entry-point 67 | -- for handling individual SQS messages 68 | sqsMain :: (FromJSON a, MonadCatch m, MonadIO m) => (a -> m ()) -> m () 69 | sqsMain = lambdaMain . traverseSqs 70 | -------------------------------------------------------------------------------- /bumpversion: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Bump the package version and release it 3 | 4 | set -e 5 | 6 | # Set defaults and parse arguments 7 | 8 | PART=patch 9 | DRY_RUN= 10 | NO_DIRTY_CHECK= 11 | 12 | while test $# -gt 0 13 | do 14 | case "$1" in 15 | patch) 16 | PART=patch 17 | shift 18 | ;; 19 | 20 | minor) 21 | PART=minor 22 | shift 23 | ;; 24 | 25 | major) 26 | PART=major 27 | shift 28 | ;; 29 | 30 | --dry-run) 31 | DRY_RUN=true 32 | shift 33 | ;; 34 | 35 | --no-dirty-check) 36 | NO_DIRTY_CHECK=true 37 | shift 38 | ;; 39 | 40 | *) 41 | echo "Usage: $0 [major|minor|patch] [--dry-run] [--no-dirty-check]" >&2 42 | exit 1 43 | ;; 44 | esac 45 | done 46 | 47 | sedi() { 48 | if [ "$(uname)" = "Linux" ] 49 | then 50 | sed -i "$@" 51 | else 52 | sed -i '' "$@" 53 | fi 54 | } 55 | 56 | # check if git is dirty 57 | if [ -z $NO_DIRTY_CHECK ] 58 | then 59 | if !(git diff-index --quiet HEAD --) 60 | then 61 | echo "Index is dirty, commit the changes first." >&2 62 | exit 1 63 | fi 64 | if [ "$(git symbolic-ref --short HEAD)" != "master" ] 65 | then 66 | echo "Versions should only be tagged from master." >&2 67 | exit 1 68 | fi 69 | fi 70 | 71 | # find current version 72 | OLD_VERSION=$(cat package.yaml | grep -E '^version' | sed -E 's/.+"(.+)"/\1/') 73 | 74 | # find new version 75 | case "$PART" in 76 | patch) 77 | NEW_VERSION=$(echo $OLD_VERSION | awk -F . '{print $1 "." $2 "." ($3 + 1)}') 78 | ;; 79 | 80 | minor) 81 | NEW_VERSION=$(echo $OLD_VERSION | awk -F . '{print $1 "." ($2 + 1) "." 0}') 82 | ;; 83 | 84 | major) 85 | NEW_VERSION=$(echo $OLD_VERSION | awk -F . '{print ($1 + 1) "." 0 "." 0}') 86 | ;; 87 | 88 | *) 89 | echo "Invalid PART value." >&2 90 | exit 1 91 | ;; 92 | esac 93 | 94 | # update changelog 95 | sedi -E '/^## Unreleased changes/{G;a\ 96 | ## '$NEW_VERSION' 97 | }' ChangeLog.md 98 | 99 | # bump version 100 | sedi -E 's/(version: +)"'$OLD_VERSION'"/\1"'$NEW_VERSION'"/g' package.yaml 101 | sedi -E 's/^ "version": "'$OLD_VERSION'"/ "version": "'$NEW_VERSION'"/g' package.json 102 | sedi -E 's/^ "version": "'$OLD_VERSION'"/ "version": "'$NEW_VERSION'"/g' package-lock.json 103 | sedi -E 's/^ "serverless-haskell": "\^'$OLD_VERSION'"/ "serverless-haskell": "^'$NEW_VERSION'"/g' serverless-haskell.hsfiles 104 | sedi -E 's/^- serverless-haskell-'$OLD_VERSION'/- serverless-haskell-'$NEW_VERSION'/g' serverless-haskell.hsfiles 105 | 106 | 107 | if [ -z $DRY_RUN ] 108 | then 109 | # commit and tag 110 | MESSAGE="Bump version: $OLD_VERSION → $NEW_VERSION" 111 | git commit -a -m "$MESSAGE" 112 | git tag -a -m "$MESSAGE" v$NEW_VERSION 113 | 114 | # push the tag then branch 115 | git push --tags 116 | git push 117 | fi 118 | -------------------------------------------------------------------------------- /serverless-haskell.cabal: -------------------------------------------------------------------------------- 1 | cabal-version: 1.12 2 | 3 | -- This file has been generated from package.yaml by hpack version 0.34.4. 4 | -- 5 | -- see: https://github.com/sol/hpack 6 | 7 | name: serverless-haskell 8 | version: 0.12.6 9 | synopsis: Deploying Haskell code onto AWS Lambda using Serverless 10 | description: Utilities to help process the events from AWS Lambda when deployed with the serverless-haskell plugin. 11 | category: AWS, Cloud, Network 12 | homepage: https://github.com/seek-oss/serverless-haskell#readme 13 | bug-reports: https://github.com/seek-oss/serverless-haskell/issues 14 | maintainer: akotlyarov@seek.com.au 15 | license: MIT 16 | license-file: LICENSE 17 | build-type: Simple 18 | extra-source-files: 19 | README.md 20 | 21 | source-repository head 22 | type: git 23 | location: https://github.com/seek-oss/serverless-haskell 24 | 25 | library 26 | exposed-modules: 27 | AWSLambda 28 | AWSLambda.Events 29 | AWSLambda.Events.APIGateway 30 | AWSLambda.Events.KinesisEvent 31 | AWSLambda.Events.MessageAttribute 32 | AWSLambda.Events.Records 33 | AWSLambda.Events.S3Event 34 | AWSLambda.Events.SNSEvent 35 | AWSLambda.Events.SQSEvent 36 | AWSLambda.Handler 37 | AWSLambda.Orphans 38 | Data.Aeson.Alternative 39 | Data.Aeson.Embedded 40 | Data.Aeson.TextValue 41 | other-modules: 42 | Paths_serverless_haskell 43 | hs-source-dirs: 44 | src 45 | ghc-options: -Wall 46 | build-depends: 47 | aeson >=2.0 && <2.2 48 | , aeson-casing 49 | , amazonka-core ==2.0.* 50 | , amazonka-kinesis ==2.0.* 51 | , amazonka-s3 ==2.0.* 52 | , base >=4.7 && <5 53 | , bytestring 54 | , case-insensitive 55 | , containers 56 | , http-client 57 | , http-types 58 | , iproute 59 | , lens 60 | , safe-exceptions 61 | , text 62 | , time 63 | , unix 64 | , unordered-containers 65 | default-language: Haskell2010 66 | 67 | test-suite tests 68 | type: exitcode-stdio-1.0 69 | main-is: Spec.hs 70 | other-modules: 71 | AWSLambda.Events.APIGatewaySpec 72 | AWSLambda.Events.KinesisEventSpec 73 | AWSLambda.Events.S3EventSpec 74 | AWSLambda.Events.SNSEventSpec 75 | AWSLambda.Events.SQSEventSpec 76 | Data.Aeson.AlternativeSpec 77 | Data.Aeson.EmbeddedSpec 78 | Data.Aeson.TestUtil 79 | Paths_serverless_haskell 80 | hs-source-dirs: 81 | test 82 | ghc-options: -Wall 83 | build-depends: 84 | aeson >=2.0 && <2.2 85 | , aeson-casing 86 | , amazonka-core ==2.0.* 87 | , amazonka-kinesis ==2.0.* 88 | , amazonka-s3 ==2.0.* 89 | , base >=4.7 && <5 90 | , bytestring 91 | , case-insensitive 92 | , containers 93 | , hspec 94 | , hspec-discover 95 | , http-client 96 | , http-types 97 | , iproute 98 | , lens 99 | , raw-strings-qq 100 | , safe-exceptions 101 | , serverless-haskell 102 | , text 103 | , time 104 | , transformers 105 | , unix 106 | , unordered-containers 107 | default-language: Haskell2010 108 | -------------------------------------------------------------------------------- /.github/workflows/main.yaml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | schedule: 9 | - cron: "0 0 * * 5" 10 | 11 | jobs: 12 | build: 13 | 14 | runs-on: ubuntu-latest 15 | 16 | strategy: 17 | matrix: 18 | resolver: 19 | - lts-17 20 | - lts-16 21 | - lts-15 22 | - lts-14 23 | - lts-13 24 | - lts-12 25 | - lts-11 26 | - lts-10 27 | include: 28 | - resolver: lts-14 29 | extra-args: "--stack-yaml stack-14.yaml" 30 | 31 | steps: 32 | - name: Checkout 33 | uses: actions/checkout@v2 34 | - name: Set up Stack 35 | run: stack upgrade || curl -sSL https://get.haskellstack.org/ | sh -s - -f 36 | - name: Set up Node.js 37 | uses: actions/setup-node@v1 38 | with: 39 | node-version: 12.x 40 | - name: Free disk space 41 | run: | 42 | sudo swapoff -a 43 | sudo rm -f /swapfile 44 | sudo apt clean 45 | docker rmi $(docker image ls -aq) 46 | df -h 47 | - name: Set up cache 48 | uses: actions/cache@v1 49 | with: 50 | key: ${{ matrix.resolver }} 51 | path: ~/.stack 52 | - name: Find latest LTS release 53 | id: resolver 54 | run: echo "::set-output name=resolver::$(./latest-lts ${{ matrix.resolver }})" 55 | - name: Install dependencies 56 | run: | 57 | sudo apt-get install -y jq pwgen 58 | stack --no-terminal --install-ghc --resolver ${{ steps.resolver.outputs.resolver }} ${{ matrix.extra-args }} --docker --docker-image haskell:stretch test --bench --only-dependencies 59 | - name: Test 60 | run: | 61 | stack --no-terminal --resolver ${{ steps.resolver.outputs.resolver }} ${{ matrix.extra-args }} --docker --docker-image haskell:stretch test --bench --no-run-benchmarks --haddock --no-haddock-deps 62 | - name: Integration test 63 | run: | 64 | RESOLVER_SERIES=${{ matrix.resolver }} ./integration-test/run.sh --dry-run 65 | 66 | node-build: 67 | 68 | runs-on: ubuntu-latest 69 | 70 | steps: 71 | - name: Checkout 72 | uses: actions/checkout@v2 73 | - name: Set up Node.js 74 | uses: actions/setup-node@v1 75 | with: 76 | node-version: 12.x 77 | - name: Set up cache 78 | uses: actions/cache@v1 79 | with: 80 | key: node-build 81 | path: ~/.npm 82 | - name: Install dependencies 83 | run: npm install 84 | - name: Test 85 | run: npm test 86 | 87 | deploy-dry-run: 88 | 89 | runs-on: ubuntu-latest 90 | 91 | steps: 92 | - name: Checkout 93 | uses: actions/checkout@v2 94 | - name: Set up Stack 95 | run: stack upgrade || curl -sSL https://get.haskellstack.org/ | sh -s - -f 96 | - name: Set up Node.js 97 | uses: actions/setup-node@v1 98 | with: 99 | node-version: 12.x 100 | - name: Set up cache 101 | uses: actions/cache@v1 102 | with: 103 | key: deploy-dry-run 104 | path: ~/.npm 105 | - name: Deploy (dry run) 106 | run: ./deploy --dry-run 107 | -------------------------------------------------------------------------------- /AWSEnvironment.ts: -------------------------------------------------------------------------------- 1 | // This file is autogenerated. 2 | // Please use 'npm run updateAWSEnvironment' to update. 3 | export const libraries = [ 4 | "p11-kit-trust.so", 5 | "libz.so.1", 6 | "libxml2.so.2", 7 | "libverto.so.1", 8 | "libuuid.so.1", 9 | "libutil.so.1", 10 | "libunistring.so.0", 11 | "libtinfo.so.6", 12 | "libtic.so.6", 13 | "libthread_db.so.1", 14 | "libtasn1.so.6", 15 | "libstdc++.so.6", 16 | "libssl3.so", 17 | "libssl.so.10", 18 | "libssh2.so.1", 19 | "libsqlite3.so.0", 20 | "libsoftokn3.so", 21 | "libsmime3.so", 22 | "libslapi-2.4.so.2", 23 | "libsepol.so.1", 24 | "libselinux.so.1", 25 | "libsasl2.so.3", 26 | "librt.so.1", 27 | "librpmsign.so.1", 28 | "librpmio.so.3", 29 | "librpmbuild.so.3", 30 | "librpm.so.3", 31 | "libresolv.so.2", 32 | "libreadline.so.6", 33 | "libp11-kit.so.0", 34 | "libpython2.7.so.1.0", 35 | "libpthread.so.0", 36 | "libpth.so.20", 37 | "libpopt.so.0", 38 | "libplds4.so", 39 | "libplc4.so", 40 | "libpcre32.so.0", 41 | "libpcre16.so.0", 42 | "libpcreposix.so.0", 43 | "libpcrecpp.so.0", 44 | "libpcre.so.1", 45 | "libpcprofile.so", 46 | "libpanelw.so.6", 47 | "libpanel.so.6", 48 | "libnss3.so", 49 | "libnssutil3.so", 50 | "libnsssysinit.so", 51 | "libnsspem.so", 52 | "libnssdbm3.so", 53 | "libnss_files.so.2", 54 | "libnss_dns.so.2", 55 | "libnss_compat.so.2", 56 | "libnspr4.so", 57 | "libnsl.so.1", 58 | "libnghttp2.so.14", 59 | "libncursesw.so.6", 60 | "libncurses.so.6", 61 | "libmvec.so.1", 62 | "libmount.so.1", 63 | "libmetalink.so.3", 64 | "libmenuw.so.6", 65 | "libmenu.so.6", 66 | "libmemusage.so", 67 | "libmagic.so.1", 68 | "libm.so.6", 69 | "liblzma.so.5", 70 | "liblua-5.1.so", 71 | "libldap_r-2.4.so.2", 72 | "libldap-2.4.so.2", 73 | "liblber-2.4.so.2", 74 | "libk5crypto.so.3", 75 | "libkrb5support.so.0", 76 | "libkrb5.so.3", 77 | "libkrad.so.0", 78 | "libkeyutils.so.1", 79 | "libkdb5.so.8", 80 | "libidn2.so.0", 81 | "libicuuc.so.50", 82 | "libicutu.so.50", 83 | "libicutest.so.50", 84 | "libiculx.so.50", 85 | "libicule.so.50", 86 | "libicui18n.so.50", 87 | "libicuio.so.50", 88 | "libicudata.so.50", 89 | "libhistory.so.6", 90 | "libgthread-2.0.so.0", 91 | "libgssrpc.so.4", 92 | "libgssapi_krb5.so.2", 93 | "libgpgme.so.11", 94 | "libgpgme-pthread.so.11", 95 | "libgpg-error.so.0", 96 | "libgobject-2.0.so.0", 97 | "libgmpxx.so.4", 98 | "libgmp.so.10", 99 | "libgmodule-2.0.so.0", 100 | "libglib-2.0.so.0", 101 | "libgio-2.0.so.0", 102 | "libgdbm_compat.so.4", 103 | "libgdbm.so.4", 104 | "libgcrypt.so.11", 105 | "libgcc_s.so.1", 106 | "libfreebl3.so", 107 | "libfreeblpriv3.so", 108 | "libformw.so.6", 109 | "libform.so.6", 110 | "libffi.so.6", 111 | "libexpat.so.1", 112 | "libelf.so.1", 113 | "libdl.so.2", 114 | "libdb-5.3.so", 115 | "libcurl.so.4", 116 | "libcrypto.so.10", 117 | "libcrypt.so.1", 118 | "libcom_err.so.2", 119 | "libcidn.so.1", 120 | "libcap.so.2", 121 | "libc.so.6", 122 | "libbz2.so.1", 123 | "libblkid.so.1", 124 | "libattr.so.1", 125 | "libassuan.so.0", 126 | "libanl.so.1", 127 | "libacl.so.1", 128 | "libSegFault.so", 129 | "libBrokenLocale.so.1", 130 | "ld-linux-x86-64.so.2" 131 | ]; 132 | export const glibcVersion = [ 133 | 2, 134 | 26 135 | ]; 136 | -------------------------------------------------------------------------------- /src/AWSLambda.hs: -------------------------------------------------------------------------------- 1 | {-| 2 | Module : AWSLambda 3 | Stability : experimental 4 | Portability : POSIX 5 | 6 | Tools for running Haskell on AWS Lambda using Serverless. 7 | 8 | = Usage 9 | 10 | To deploy a Haskell function on AWS Lambda: 11 | 12 | * Initialise a Serverless project in the same directory as your Stack-enabled 13 | package and install the @serverless-haskell@ plugin: 14 | 15 | > npm init -y 16 | > npm install --save serverless serverless-haskell@x.y.z 17 | 18 | The version of the NPM package to install must match the version of the 19 | Haskell package. 20 | 21 | * Create @serverless.yml@ with the following contents: 22 | 23 | > service: myservice 24 | > 25 | > provider: 26 | > name: aws 27 | > runtime: haskell 28 | > 29 | > functions: 30 | > myfunc: 31 | > handler: mypackage.mypackage-exe 32 | > # Here, mypackage is the Haskell package name and mypackage-exe is the 33 | > # executable name as defined in the Cabal file 34 | > 35 | > plugins: 36 | > - serverless-haskell 37 | 38 | * Write your @main@ function using 'AWSLambda.lambdaMain'. 39 | 40 | * Add @aeson@ and @serverless-haskell@ to @package.yaml@: 41 | 42 | > dependencies: 43 | > - base >= 4.7 && < 5 44 | > - aeson 45 | > - serverless-haskell 46 | 47 | * Build and test locally using @sls invoke local@: 48 | 49 | The @serverless-haskell@ plugin will build the package using Stack. Note that 50 | the first build can take a long time. Consider adding @export SLS_DEBUG=*@ so 51 | you can see what is happening. 52 | 53 | > export SLS_DEBUG=* 54 | > sls invoke local -f myfunc 55 | 56 | * Use @sls deploy@ to deploy the executable to AWS Lambda. 57 | 58 | The @serverless-haskell@ plugin will build the package using Stack and upload 59 | it to AWS together with a JavaScript wrapper to pass the input and output 60 | from/to AWS Lambda. 61 | 62 | > export SLS_DEBUG=* 63 | > sls deploy 64 | 65 | You can test the function and see the invocation results with @sls invoke 66 | -f myfunc@. 67 | 68 | = API Gateway 69 | 70 | This plugin supports handling API Gateway requests. Declare the HTTP events 71 | normally in @serverless.yml@ and use 'AWSLambda.Events.APIGateway' in the 72 | handler to process them. 73 | 74 | can be used 75 | for local testing of API Gateway requests. You must use @--useDocker@ flag so 76 | that the native Haskell runtime works correctly. 77 | 78 | = Additional features 79 | 80 | Configuration options are passed to the plugin under @haskell@ key in @custom@ 81 | section of @serverless.yml@. 82 | 83 | * To add flags to @stack build@, specify them as an array under 84 | @stackBuildArgs@: 85 | 86 | > custom: 87 | > haskell: 88 | > stackBuildArgs: 89 | > - --pedantic 90 | > - --allow-different-user 91 | 92 | * Dependent system libraries not present in the AWS Lambda environment will be 93 | automatically uploaded along with the executable. Note that while statically 94 | linking the executable via Cabal options is possible, it might still require 95 | the corresponding glibc version on the AWS environment. 96 | 97 | * Stack's Docker image will be used to match the AWS Lambda Linux environment. 98 | To disable this, set @docker@ key to @false@, but beware that the resulting 99 | binary might not have the required libraries to run on Lambda. 100 | 101 | > custom: 102 | > haskell: 103 | > docker: false 104 | 105 | * By default, @stack build@ command is invoked to build all the project's 106 | executables. To only build the ones used in the handlers, set @buildAll@ key to 107 | @false@. Note that at least Stack 1.9.3 has better caching behavior when 108 | building the whole project, as it doesn't need to reconfigure the build for the 109 | individual ones every time. 110 | 111 | > custom: 112 | > haskell: 113 | > buildAll: false 114 | 115 | -} 116 | module AWSLambda 117 | ( Handler.lambdaMain 118 | , module AWSLambda.Events 119 | ) where 120 | 121 | import qualified AWSLambda.Handler as Handler 122 | 123 | import AWSLambda.Events 124 | -------------------------------------------------------------------------------- /src/AWSLambda/Events/SNSEvent.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | 6 | {-| 7 | Module: AWSLambda.Events.SNSEvent 8 | Description: Types for SNS Lambda events 9 | 10 | Based on https://github.com/aws/aws-lambda-dotnet/tree/master/Libraries/src/Amazon.Lambda.SNSEvents 11 | -} 12 | module AWSLambda.Events.SNSEvent where 13 | 14 | import Control.Applicative ((<|>)) 15 | import Control.Exception.Safe (MonadCatch) 16 | import Control.Lens 17 | import Control.Monad.IO.Class 18 | import Data.Aeson 19 | (FromJSON(..), genericParseJSON, withObject, (.!=), (.:), (.:?)) 20 | import Data.Aeson.Casing (aesonDrop, pascalCase) 21 | import Data.Aeson.Embedded 22 | import Data.Aeson.TextValue 23 | import Data.ByteString (ByteString) 24 | import Data.HashMap.Strict (HashMap) 25 | import Data.Text (Text) 26 | import Data.Time.Clock (UTCTime) 27 | import GHC.Generics (Generic) 28 | import Amazonka.Data.Base64 29 | import Amazonka.Data.Text (FromText) 30 | 31 | import AWSLambda.Events.MessageAttribute 32 | import AWSLambda.Events.Records 33 | import AWSLambda.Handler (lambdaMain) 34 | 35 | data SNSMessage message = SNSMessage 36 | { _smMessage :: !(TextValue message ) 37 | , _smMessageAttributes :: !(HashMap Text MessageAttribute) 38 | , _smMessageId :: !Text 39 | , _smSignature :: !Text 40 | , _smSignatureVersion :: !Text 41 | , _smSigningCertUrl :: !Text 42 | , _smSubject :: !Text 43 | , _smTimestamp :: !UTCTime 44 | , _smTopicArn :: !Text 45 | , _smType :: !Text 46 | , _smUnsubscribeUrl :: !Text 47 | } deriving (Eq, Show, Generic) 48 | 49 | -- When a lambda is triggered directly off of an SNS topic, 50 | -- the SNS message contains message attributes and the URI 51 | -- fields are cased as `SigningCertUrl` and `UnsubscribeUrl`. 52 | -- When an SNS message is embedded in an SQS event, 53 | -- the SNS message changes in two ways; `MessageAttributes` 54 | -- is not present and the casing for the URI fields becomes 55 | -- `SigningCertURL` and `UnsubscribeURL`. 56 | -- For these reasons we must hand-roll the `FromJSON` instance. 57 | instance FromText message => FromJSON (SNSMessage message) where 58 | parseJSON = withObject "SNSMessage'" $ \o -> 59 | SNSMessage 60 | <$> o .: "Message" 61 | <*> o .:? "MessageAttributes" .!= mempty 62 | <*> o .: "MessageId" 63 | <*> o .: "Signature" 64 | <*> o .: "SignatureVersion" 65 | <*> do o .: "SigningCertUrl" <|> o .: "SigningCertURL" 66 | <*> o .: "Subject" 67 | <*> o .: "Timestamp" 68 | <*> o .: "TopicArn" 69 | <*> o .: "Type" 70 | <*> do o .: "UnsubscribeUrl" <|> o .: "UnsubscribeURL" 71 | 72 | $(makeLenses ''SNSMessage) 73 | 74 | data SNSRecord message = SNSRecord 75 | { _srEventVersion :: !Text 76 | , _srEventSubscriptionArn :: !Text 77 | , _srEventSource :: !Text 78 | , _srSns :: !(SNSMessage message) 79 | } deriving (Eq, Show, Generic) 80 | 81 | instance FromText message => FromJSON (SNSRecord message) where 82 | parseJSON = genericParseJSON $ aesonDrop 3 pascalCase 83 | 84 | $(makeLenses ''SNSRecord) 85 | 86 | -- | SNSEvent. 87 | -- The 'message' type is parameterised. To treat it as a text value 88 | -- use @SNSEvent Text@. 89 | -- To extract an embedded event object use the 'Embedded' type. 90 | -- E.g. @SNSEvent (Embedded S3Event)@ will treat the message 91 | -- as an embedded S3Event. 92 | -- To extract embedded Base64 encoded binary use 93 | -- @SNSEvent Base64@ 94 | type SNSEvent message = RecordsEvent (SNSRecord message) 95 | 96 | -- | A Traversal to get messages from an SNSEvent 97 | messages :: Traversal (SNSEvent message) (SNSEvent message') message message' 98 | messages = reRecords . traverse . srSns . smMessage . unTextValue 99 | 100 | -- | A Traversal to get embedded JSON values from an SNSEvent 101 | embedded :: Traversal (SNSEvent (Embedded v)) (SNSEvent (Embedded v')) v v' 102 | embedded = messages . unEmbed 103 | 104 | binary :: Traversal' (SNSEvent Base64) ByteString 105 | binary = messages . _Base64 106 | 107 | -- | Traverse an SNS message 108 | traverseSnsMessage :: (FromJSON a, Applicative m) => (a -> m ()) -> SNSMessage (Embedded a) -> m () 109 | traverseSnsMessage act message = 110 | act $ message ^. smMessage . unTextValue . unEmbed 111 | 112 | -- | Traverse all the messages in an SNS event 113 | traverseSns :: (FromJSON a, Applicative m) => (a -> m ()) -> SNSEvent (Embedded a) -> m () 114 | traverseSns act = traverseRecords $ \record -> 115 | act $ record ^. srSns . smMessage . unTextValue . unEmbed 116 | 117 | -- | A specialed version of the 'lambdaMain' entry-point 118 | -- for handling individual SNS messages 119 | snsMain :: (FromJSON a, MonadCatch m, MonadIO m) => (a -> m ()) -> m () 120 | snsMain = lambdaMain . traverseSns 121 | -------------------------------------------------------------------------------- /src/AWSLambda/Events/S3Event.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE RecordWildCards #-} 3 | {-# LANGUAGE TemplateHaskell #-} 4 | {-# LANGUAGE LambdaCase #-} 5 | 6 | {-| 7 | Module: AWSLambda.Events.S3Event 8 | Description: Types for S3 Lambda events 9 | 10 | Based on https://github.com/aws/aws-lambda-dotnet/tree/master/Libraries/src/Amazon.Lambda.S3Events 11 | -} 12 | module AWSLambda.Events.S3Event where 13 | 14 | import Control.Lens.TH 15 | import Control.Monad (guard) 16 | import Data.Aeson (FromJSON (..), withObject, (.:)) 17 | import Data.Aeson.Casing (aesonDrop, camelCase) 18 | import Data.Aeson.TH (deriveFromJSON) 19 | import Data.Text (Text) 20 | import Data.Time.Clock (UTCTime) 21 | import Amazonka.S3 (BucketName, ETag, Event(..), ObjectKey, ObjectVersionId) 22 | import qualified Amazonka.Types as AWS 23 | 24 | import AWSLambda.Events.Records 25 | import AWSLambda.Orphans () 26 | 27 | newtype UserIdentityEntity = UserIdentityEntity 28 | { _uiePrincipalId :: Text 29 | } deriving (Eq, Show) 30 | 31 | $(deriveFromJSON (aesonDrop 4 camelCase) ''UserIdentityEntity) 32 | $(makeLenses ''UserIdentityEntity) 33 | 34 | data S3BucketEntity = S3BucketEntity 35 | { _sbeArn :: !Text 36 | , _sbeName :: !BucketName 37 | , _sbeOwnerIdentity :: !UserIdentityEntity 38 | } deriving (Eq, Show) 39 | 40 | $(deriveFromJSON (aesonDrop 4 camelCase) ''S3BucketEntity) 41 | $(makeLenses ''S3BucketEntity) 42 | 43 | data S3ObjectEntity = S3ObjectEntity 44 | { _soeETag :: !(Maybe ETag) 45 | , _soeKey :: !ObjectKey 46 | , _soeSize :: !(Maybe Integer) 47 | , _soeSequencer :: !Text 48 | , _soeVersionId :: !(Maybe ObjectVersionId) 49 | } deriving (Eq, Show) 50 | 51 | $(deriveFromJSON (aesonDrop 4 camelCase) ''S3ObjectEntity) 52 | $(makeLenses ''S3ObjectEntity) 53 | 54 | newtype RequestParametersEntity = RequestParametersEntity 55 | { _rpeSourceIPAddress :: Text 56 | } deriving (Eq, Show) 57 | 58 | $(deriveFromJSON (aesonDrop 4 camelCase) ''RequestParametersEntity) 59 | $(makeLenses ''RequestParametersEntity) 60 | 61 | data ResponseElementsEntity = ResponseElementsEntity 62 | { _reeXAmzId2 :: !Text 63 | , _reeXAmzRequestId :: !Text 64 | } deriving (Eq, Show) 65 | 66 | instance FromJSON ResponseElementsEntity where 67 | parseJSON = 68 | withObject "ResponseElementsEntity" $ 69 | \o -> 70 | ResponseElementsEntity <$> o .: "x-amz-id-2" <*> o .: "x-amz-request-id" 71 | $(makeLenses ''ResponseElementsEntity) 72 | 73 | data S3Entity = S3Entity 74 | { _seBucket :: !S3BucketEntity 75 | , _seConfigurationId :: !Text 76 | , _seObject :: !S3ObjectEntity 77 | , _seS3SchemaVersion :: !Text 78 | } deriving (Eq, Show) 79 | 80 | $(deriveFromJSON (aesonDrop 3 camelCase) ''S3Entity) 81 | $(makeLenses ''S3Entity) 82 | 83 | data S3EventNotification = S3EventNotification 84 | { _senAwsRegion :: !AWS.Region 85 | , _senEventName :: !Event 86 | , _senEventSource :: !Text 87 | , _senEventTime :: !UTCTime 88 | , _senEventVersion :: !Text 89 | , _senRequestParameters :: !RequestParametersEntity 90 | , _senResponseElements :: !ResponseElementsEntity 91 | , _senS3 :: !S3Entity 92 | , _senUserIdentity :: !UserIdentityEntity 93 | } deriving (Eq, Show) 94 | 95 | instance FromJSON S3EventNotification where 96 | parseJSON = withObject "S3EventNotification" $ \o -> do 97 | _senEventSource <- o .: "eventSource" 98 | guard $ _senEventSource == "aws:s3" 99 | _senAwsRegion <- o .: "awsRegion" 100 | _senEventName <- o .: "eventName" 101 | _senEventTime <- o .: "eventTime" 102 | _senEventVersion <- o .: "eventVersion" 103 | _senRequestParameters <- o .: "requestParameters" 104 | _senResponseElements <- o .: "responseElements" 105 | _senS3 <- o .: "s3" 106 | _senUserIdentity <- o .: "userIdentity" 107 | return S3EventNotification {..} 108 | $(makeLenses ''S3EventNotification) 109 | 110 | type S3Event = RecordsEvent S3EventNotification 111 | 112 | -- | Is the event an object creation event 113 | isCreateEvent :: S3EventNotification -> Bool 114 | isCreateEvent e = case _senEventName e of 115 | Event_S3_ObjectCreated_CompleteMultipartUpload -> True 116 | Event_S3_ObjectCreated_Copy -> True 117 | Event_S3_ObjectCreated_Post -> True 118 | Event_S3_ObjectCreated_Put -> True 119 | Event_S3_ObjectCreated__ -> True 120 | Event_S3_ObjectRemoved_Delete -> False 121 | Event_S3_ObjectRemoved_DeleteMarkerCreated -> False 122 | Event_S3_ObjectRemoved__ -> False 123 | Event_S3_ObjectRestore_Completed -> False 124 | Event_S3_ObjectRestore_Post -> False 125 | Event_S3_ObjectRestore__ -> False 126 | Event_S3_ReducedRedundancyLostObject -> False 127 | Event_S3_Replication_OperationFailedReplication -> False 128 | Event_S3_Replication_OperationMissedThreshold -> False 129 | Event_S3_Replication_OperationNotTracked -> False 130 | Event_S3_Replication_OperationReplicatedAfterThreshold -> False 131 | Event_S3_Replication__ -> False 132 | Event' _ -> False 133 | 134 | 135 | -- | Is the event an object removal event 136 | isRemoveEvent :: S3EventNotification -> Bool 137 | isRemoveEvent e = case _senEventName e of 138 | Event_S3_ObjectCreated_CompleteMultipartUpload -> False 139 | Event_S3_ObjectCreated_Copy -> False 140 | Event_S3_ObjectCreated_Post -> False 141 | Event_S3_ObjectCreated_Put -> False 142 | Event_S3_ObjectCreated__ -> False 143 | Event_S3_ObjectRemoved_Delete -> True 144 | Event_S3_ObjectRemoved_DeleteMarkerCreated -> True 145 | Event_S3_ObjectRemoved__ -> True 146 | Event_S3_ObjectRestore_Completed -> False 147 | Event_S3_ObjectRestore_Post -> False 148 | Event_S3_ObjectRestore__ -> False 149 | Event_S3_ReducedRedundancyLostObject -> False 150 | Event_S3_Replication_OperationFailedReplication -> False 151 | Event_S3_Replication_OperationMissedThreshold -> False 152 | Event_S3_Replication_OperationNotTracked -> False 153 | Event_S3_Replication_OperationReplicatedAfterThreshold -> False 154 | Event_S3_Replication__ -> False 155 | Event' _ -> False 156 | -------------------------------------------------------------------------------- /src/AWSLambda/Handler.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | {-| 4 | Module : AWSLambda.Handler 5 | Stability : experimental 6 | Portability : POSIX 7 | 8 | Entry point for AWS Lambda handlers deployed with @serverless-haskell@ plugin. 9 | -} 10 | module AWSLambda.Handler 11 | ( lambdaMain 12 | , lambdaMainRaw 13 | ) where 14 | 15 | import Control.Exception.Safe (MonadCatch, SomeException(..), displayException, tryAny) 16 | import Control.Monad (forever, void) 17 | import Control.Monad.IO.Class 18 | 19 | import Data.Aeson ((.=)) 20 | import qualified Data.Aeson as Aeson 21 | 22 | import Data.Typeable (typeOf) 23 | 24 | import qualified Data.ByteString as ByteString 25 | import qualified Data.ByteString.Char8 as Char8 26 | import qualified Data.ByteString.Lazy as LBS 27 | 28 | import qualified Data.Text.Encoding as Text 29 | import qualified Data.Text.IO as Text 30 | 31 | import GHC.IO.Handle (BufferMode(..), hSetBuffering) 32 | 33 | import Network.HTTP.Client 34 | import Network.HTTP.Types (HeaderName) 35 | 36 | import System.Environment (lookupEnv) 37 | import System.IO (stdout) 38 | 39 | -- | Process incoming events from @serverless-haskell@ using a provided 40 | -- function. 41 | -- 42 | -- The handler receives the input event given to the AWS Lambda function, and 43 | -- its return value is returned from the function. 44 | -- 45 | -- This is intended to be used as @main@, for example: 46 | -- 47 | -- > import qualified Data.Aeson as Aeson 48 | -- > 49 | -- > import AWSLambda 50 | -- > 51 | -- > main = lambdaMain handler 52 | -- > 53 | -- > handler :: Aeson.Value -> IO [Int] 54 | -- > handler evt = do 55 | -- > putStrLn "This should go to logs" 56 | -- > print evt 57 | -- > pure [1, 2, 3] 58 | -- 59 | -- The handler function can receive arbitrary JSON values from custom 60 | -- invocations, or one of the events from the "AWSLambda.Events" module, such as 61 | -- 'AWSLambda.Events.S3Event': 62 | -- 63 | -- > import AWSLambda.Events.S3Event 64 | -- > 65 | -- > handler :: S3Event -> IO () 66 | -- > handler evt = do 67 | -- > print $ records evt 68 | -- 69 | -- If the Lambda function needs to process several types of events, use 70 | -- 'Data.Aeson.Alternative' to combine several handlers: 71 | -- 72 | -- > import AWSLambda 73 | -- > import AWSLambda.Events.S3Event 74 | -- > import Data.Aeson 75 | -- > import Data.Aeson.Alternative 76 | -- > 77 | -- > main = lambdaMain $ handlerS3 `alternative` handlerCustom 78 | -- > 79 | -- > handlerS3 :: S3Event -> IO () 80 | -- > handlerS3 = _ 81 | -- > 82 | -- > handlerCustom :: Value -> IO () 83 | -- > handlerCustom = _ 84 | -- 85 | -- When run outside the AWS Lambda environment, the input is read as JSON from 86 | -- the command line, and the result of the execution is printed, also as JSON, 87 | -- to the standard output. 88 | lambdaMain :: 89 | (Aeson.FromJSON event, Aeson.ToJSON res, MonadCatch m, MonadIO m) 90 | => (event -> m res) -- ^ Function to process the event 91 | -> m () 92 | lambdaMain act = 93 | lambdaMainRaw $ \input -> do 94 | case Aeson.eitherDecode input of 95 | Left err -> error err 96 | Right event -> do 97 | result <- act event 98 | pure $ Aeson.encode result 99 | 100 | -- | Process the incoming requests (using the AWS Lambda runtime interface or from the standard input). 101 | -- Also set line buffering on standard output for AWS Lambda so the logs are output in a timely manner. 102 | -- This function provides a lower level interface than 'lambdaMain' for users who don't want to use 103 | -- Aeson for encoding and decoding JSON. 104 | lambdaMainRaw :: (MonadCatch m, MonadIO m) => (LBS.ByteString -> m LBS.ByteString) -> m () 105 | lambdaMainRaw act = do 106 | lambdaApiAddress <- liftIO $ lookupEnv lambdaApiAddressEnv 107 | case lambdaApiAddress of 108 | Just address -> do 109 | liftIO $ hSetBuffering stdout LineBuffering 110 | manager <- liftIO $ newManager defaultManagerSettings 111 | forever $ do 112 | invocation <- liftIO $ httpLbs (invocationRequest address) manager 113 | let input = responseBody invocation 114 | let requestId = responseRequestId invocation 115 | resultOrError <- tryAny $ act input 116 | case resultOrError of 117 | Right result -> liftIO $ void $ httpNoBody (resultRequest address requestId result) manager 118 | Left exception -> do 119 | putStrLnLBS $ Aeson.encode $ exceptionJSON exception 120 | liftIO $ void $ httpNoBody (errorRequest address requestId exception) manager 121 | Nothing -> do 122 | input <- liftIO $ LBS.fromStrict <$> ByteString.getLine 123 | result <- act input 124 | putStrLnLBS result 125 | 126 | putStrLnLBS :: MonadIO m => LBS.ByteString -> m () 127 | putStrLnLBS = liftIO . Text.putStrLn . Text.decodeUtf8 . LBS.toStrict 128 | 129 | lambdaApiAddressEnv :: String 130 | lambdaApiAddressEnv = "AWS_LAMBDA_RUNTIME_API" 131 | 132 | lambdaRequest :: String -> String -> Request 133 | lambdaRequest apiAddress rqPath = parseRequest_ $ "http://" ++ apiAddress ++ "/2018-06-01" ++ rqPath 134 | 135 | invocationRequest :: String -> Request 136 | invocationRequest apiAddress = (lambdaRequest apiAddress "/runtime/invocation/next") { responseTimeout = responseTimeoutNone } 137 | 138 | resultRequest :: String -> String -> LBS.ByteString -> Request 139 | resultRequest apiAddress requestId result = (lambdaRequest apiAddress $ "/runtime/invocation/" ++ requestId ++ "/response") { method = "POST", requestBody = RequestBodyLBS result } 140 | 141 | errorRequest :: String -> String -> SomeException -> Request 142 | errorRequest apiAddress requestId exception = (lambdaRequest apiAddress $ "/runtime/invocation/" ++ requestId ++ "/error") { method = "POST", requestBody = RequestBodyLBS body } 143 | where 144 | body = Aeson.encode $ exceptionJSON exception 145 | 146 | exceptionJSON :: SomeException -> Aeson.Value 147 | exceptionJSON exception = Aeson.object [ "errorMessage" .= displayException exception, "errorType" .= exceptionType exception] 148 | 149 | exceptionType :: SomeException -> String 150 | exceptionType (SomeException e) = show (typeOf e) 151 | 152 | requestIdHeader :: HeaderName 153 | requestIdHeader = "Lambda-Runtime-Aws-Request-Id" 154 | 155 | responseRequestId :: Response a -> String 156 | responseRequestId = Char8.unpack . snd . head . filter (uncurry $ \h _ -> h == requestIdHeader) . responseHeaders 157 | -------------------------------------------------------------------------------- /test/AWSLambda/Events/S3EventSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE QuasiQuotes #-} 3 | 4 | module AWSLambda.Events.S3EventSpec where 5 | 6 | import AWSLambda.Events.Records 7 | import AWSLambda.Events.S3Event 8 | 9 | import Data.Aeson 10 | import Data.ByteString.Lazy (ByteString) 11 | import Data.Time.Calendar 12 | import Data.Time.Clock 13 | 14 | import Amazonka.S3 15 | 16 | import Text.RawString.QQ 17 | 18 | import Test.Hspec 19 | 20 | spec :: Spec 21 | spec = 22 | describe "S3Event" $ do 23 | it "parses sample Put event" $ 24 | decode sampleS3PutJSON `shouldBe` Just sampleS3PutEvent 25 | it "parses sample Delete event" $ 26 | decode sampleS3DeleteJSON `shouldBe` Just sampleS3DeleteEvent 27 | 28 | sampleS3PutJSON :: ByteString 29 | sampleS3PutJSON = [r| 30 | { 31 | "Records": [ 32 | { 33 | "eventVersion": "2.0", 34 | "eventTime": "1970-01-01T00:00:00.000Z", 35 | "requestParameters": { 36 | "sourceIPAddress": "127.0.0.1" 37 | }, 38 | "s3": { 39 | "configurationId": "testConfigRule", 40 | "object": { 41 | "eTag": "0123456789abcdef0123456789abcdef", 42 | "sequencer": "0A1B2C3D4E5F678901", 43 | "key": "HappyFace.jpg", 44 | "size": 1024 45 | }, 46 | "bucket": { 47 | "arn": "bucketarn", 48 | "name": "sourcebucket", 49 | "ownerIdentity": { 50 | "principalId": "EXAMPLE" 51 | } 52 | }, 53 | "s3SchemaVersion": "1.0" 54 | }, 55 | "responseElements": { 56 | "x-amz-id-2": "EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH", 57 | "x-amz-request-id": "EXAMPLE123456789" 58 | }, 59 | "awsRegion": "us-east-1", 60 | "eventName": "s3:ObjectCreated:Put", 61 | "userIdentity": { 62 | "principalId": "EXAMPLE" 63 | }, 64 | "eventSource": "aws:s3" 65 | } 66 | ] 67 | } 68 | |] 69 | 70 | sampleS3PutEvent :: S3Event 71 | sampleS3PutEvent = 72 | RecordsEvent 73 | { _reRecords = 74 | [ S3EventNotification 75 | { _senAwsRegion = NorthVirginia 76 | , _senEventName = Event_S3_ObjectCreated_Put 77 | , _senEventSource = "aws:s3" 78 | , _senEventTime = UTCTime (fromGregorian 1970 1 1) (secondsToDiffTime 0) 79 | , _senEventVersion = "2.0" 80 | , _senRequestParameters = 81 | RequestParametersEntity 82 | { _rpeSourceIPAddress = "127.0.0.1" 83 | } 84 | , _senResponseElements = 85 | ResponseElementsEntity 86 | { _reeXAmzId2 = 87 | "EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH" 88 | , _reeXAmzRequestId = "EXAMPLE123456789" 89 | } 90 | , _senS3 = 91 | S3Entity 92 | { _seBucket = 93 | S3BucketEntity 94 | { _sbeArn = "bucketarn" 95 | , _sbeName = BucketName "sourcebucket" 96 | , _sbeOwnerIdentity = 97 | UserIdentityEntity 98 | { _uiePrincipalId = "EXAMPLE" 99 | } 100 | } 101 | , _seConfigurationId = "testConfigRule" 102 | , _seObject = 103 | S3ObjectEntity 104 | { _soeETag = Just (ETag "0123456789abcdef0123456789abcdef") 105 | , _soeKey = ObjectKey "HappyFace.jpg" 106 | , _soeSize = Just 1024 107 | , _soeSequencer = "0A1B2C3D4E5F678901" 108 | , _soeVersionId = Nothing 109 | } 110 | , _seS3SchemaVersion = "1.0" 111 | } 112 | , _senUserIdentity = 113 | UserIdentityEntity 114 | { _uiePrincipalId = "EXAMPLE" 115 | } 116 | } 117 | ] 118 | } 119 | 120 | sampleS3DeleteJSON :: ByteString 121 | sampleS3DeleteJSON = [r| 122 | { 123 | "Records": [ 124 | { 125 | "eventVersion": "2.0", 126 | "eventTime": "1970-01-01T00:00:00.000Z", 127 | "requestParameters": { 128 | "sourceIPAddress": "127.0.0.1" 129 | }, 130 | "s3": { 131 | "configurationId": "testConfigRule", 132 | "object": { 133 | "sequencer": "0A1B2C3D4E5F678901", 134 | "key": "HappyFace.jpg" 135 | }, 136 | "bucket": { 137 | "arn": "bucketarn", 138 | "name": "sourcebucket", 139 | "ownerIdentity": { 140 | "principalId": "EXAMPLE" 141 | } 142 | }, 143 | "s3SchemaVersion": "1.0" 144 | }, 145 | "responseElements": { 146 | "x-amz-id-2": "EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH", 147 | "x-amz-request-id": "EXAMPLE123456789" 148 | }, 149 | "awsRegion": "us-east-1", 150 | "eventName": "s3:ObjectRemoved:Delete", 151 | "userIdentity": { 152 | "principalId": "EXAMPLE" 153 | }, 154 | "eventSource": "aws:s3" 155 | } 156 | ] 157 | } 158 | |] 159 | 160 | sampleS3DeleteEvent :: S3Event 161 | sampleS3DeleteEvent = 162 | RecordsEvent 163 | { _reRecords = 164 | [ S3EventNotification 165 | { _senAwsRegion = NorthVirginia 166 | , _senEventName = Event_S3_ObjectRemoved_Delete 167 | , _senEventSource = "aws:s3" 168 | , _senEventTime = UTCTime (fromGregorian 1970 1 1) (secondsToDiffTime 0) 169 | , _senEventVersion = "2.0" 170 | , _senRequestParameters = 171 | RequestParametersEntity 172 | { _rpeSourceIPAddress = "127.0.0.1" 173 | } 174 | , _senResponseElements = 175 | ResponseElementsEntity 176 | { _reeXAmzId2 = 177 | "EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH" 178 | , _reeXAmzRequestId = "EXAMPLE123456789" 179 | } 180 | , _senS3 = 181 | S3Entity 182 | { _seBucket = 183 | S3BucketEntity 184 | { _sbeArn = "bucketarn" 185 | , _sbeName = BucketName "sourcebucket" 186 | , _sbeOwnerIdentity = 187 | UserIdentityEntity 188 | { _uiePrincipalId = "EXAMPLE" 189 | } 190 | } 191 | , _seConfigurationId = "testConfigRule" 192 | , _seObject = 193 | S3ObjectEntity 194 | { _soeETag = Nothing 195 | , _soeKey = ObjectKey "HappyFace.jpg" 196 | , _soeSize = Nothing 197 | , _soeSequencer = "0A1B2C3D4E5F678901" 198 | , _soeVersionId = Nothing 199 | } 200 | , _seS3SchemaVersion = "1.0" 201 | } 202 | , _senUserIdentity = 203 | UserIdentityEntity 204 | { _uiePrincipalId = "EXAMPLE" 205 | } 206 | } 207 | ] 208 | } 209 | -------------------------------------------------------------------------------- /integration-test/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Test packaging a function, deploying it to AWS and running it. With --dry-run, 3 | # only packaging is tested. With --no-docker, Docker isn't used for packaging. 4 | 5 | set -euo pipefail 6 | 7 | DRY_RUN= 8 | REUSE_DIR= 9 | FAILFAST= 10 | while [ $# -gt 0 ] 11 | do 12 | case "$1" in 13 | --dry-run) 14 | DRY_RUN=true 15 | shift 16 | ;; 17 | --no-clean-dir) 18 | REUSE_DIR=true 19 | shift 20 | ;; 21 | --failfast) 22 | FAILFAST=true 23 | shift 24 | ;; 25 | *) 26 | shift 27 | ;; 28 | esac 29 | done 30 | 31 | for DEPENDENCY in curl jq npm pwgen stack 32 | do 33 | command -v $DEPENDENCY >/dev/null || \ 34 | (echo "$DEPENDENCY is required for the test." >&2; exit 1) 35 | done 36 | if command -v pkgconf >/dev/null 37 | then 38 | PKGCONF=pkgconf 39 | elif command -v pkg-config >/dev/null 40 | then 41 | PKGCONF=pkg-config 42 | else 43 | echo "pkg-config is required for the test." >&2 44 | exit 1 45 | fi 46 | 47 | # Directory of the integration test 48 | HERE=$(cd $(dirname $0); echo $PWD) 49 | 50 | . $HERE/tests.sh 51 | 52 | # Root directory of the repository 53 | DIST=$(cd $HERE/..; echo $PWD) 54 | 55 | # Directory with the test project skeleton 56 | SKELETON=$(cd $HERE/skeleton; echo $PWD) 57 | 58 | # Stackage resolver series to use 59 | : "${RESOLVER_SERIES:=$(cat $DIST/stack.yaml | grep resolver | sed -E 's/resolver: (lts-[0-9]+)\..+/\1/')}" 60 | 61 | SLS_OFFLINE_PID= 62 | function kill_sls_offline () { 63 | if [ -n "$SLS_OFFLINE_PID" ] && kill -0 $SLS_OFFLINE_PID 64 | then 65 | kill $SLS_OFFLINE_PID || true 66 | SLS_OFFLINE_PID= 67 | fi 68 | } 69 | function cleanup () { 70 | kill_sls_offline 71 | if [ -z "$DRY_RUN" ] 72 | then 73 | sls remove --no-color || true 74 | fi 75 | if [ -z "$REUSE_DIR" ] 76 | then 77 | rm -rf $DIR 78 | fi 79 | } 80 | trap cleanup exit 81 | 82 | if [ -n "$REUSE_DIR" ] 83 | then 84 | DIR=$HERE/run 85 | mkdir -p $DIR 86 | echo "Testing in $DIR" 87 | 88 | NAME=s-h-test 89 | else 90 | # Temporary directory to create a project in 91 | DIR=$(mktemp -d) 92 | echo "Testing in $DIR" 93 | 94 | NAME=s-h-test-$(pwgen 10 -0 -A) 95 | fi 96 | cd $DIR 97 | 98 | # Make sure test directory is accessible by Docker containers 99 | chmod +rx $DIR 100 | umask u=rwx,g=rx,o=rx 101 | 102 | RESOLVER=$($DIST/latest-lts $RESOLVER_SERIES) 103 | echo "Using resolver: $RESOLVER" 104 | 105 | # Extra dependencies to use for the resolver 106 | EXTRA_DEPS=$HERE/extra-deps.$RESOLVER_SERIES 107 | if ! [ -f $EXTRA_DEPS ] 108 | then 109 | EXTRA_DEPS=/dev/null 110 | fi 111 | 112 | # Copy the test files over, replacing the values 113 | skeleton() { 114 | mkdir -p $(dirname $1) 115 | sed "s!NAME!$NAME!g 116 | s!DIST!$DIST!g 117 | s!RESOLVER!$RESOLVER!g 118 | /EXTRA_DEPS/{ 119 | r$EXTRA_DEPS 120 | d 121 | }" < $SKELETON/$1 > $1 122 | } 123 | for FILE in $(find $SKELETON -type f | grep -v /\\. | sed "s!$SKELETON/!!") 124 | do 125 | skeleton $FILE 126 | done 127 | 128 | export PATH=$(npm bin):$PATH 129 | 130 | # Install Serverless and serverless-offline 131 | npm install serverless 132 | npm install serverless-offline 133 | 134 | # Compile and install the plugin 135 | pushd $DIST >/dev/null 136 | npm install 137 | find . -maxdepth 1 -type f -name "serverless-haskell-*.tgz" -delete 138 | npm pack 139 | popd >/dev/null 140 | npm install $DIST/serverless-haskell-*.tgz 141 | 142 | # Disable deprecation messages so the output is stable 143 | export SLS_DEPRECATION_DISABLE='*' 144 | 145 | # Just package the service first 146 | assert_success "sls package" sls package 147 | 148 | # Test local invocation 149 | assert_contains_output "sls invoke local: logs" "This should go to logs" \ 150 | sls invoke local --function main --data '[4, 5, 6]' 151 | assert_contains_output "sls invoke local: echo argument" 'Array [Number 4.0,Number 5.0,Number 6.0]' \ 152 | sls invoke local --function main --data '[4, 5, 6]' 153 | assert_contains_output "sls invoke local: result" "[11,22,33]" \ 154 | sls invoke local --function main --data '[4, 5, 6]' 155 | 156 | # Test local invocation that errors 157 | assert_contains_output "sls invoke local (error)" \ 158 | '{"errorType":"ErrorCall","errorMessage":"Magic error\nCallStack (from HasCallStack):\n error, called at Main.hs:25:30 in main:Main"}' \ 159 | sh -c 'sls invoke local --function main --data '"'"'{"error":1}'"'"' || true' 160 | 161 | # Test local invocation of a JavaScript function 162 | assert_output "sls invoke local (JavaScript)" local_output_js.txt \ 163 | sls invoke local --function jsfunc --data '{}' 164 | 165 | # Test serverless-offline 166 | sls offline start --useDocker & 167 | SLS_OFFLINE_PID=$! 168 | until curl http://localhost:3002/ >/dev/null 2>&1 169 | do 170 | sleep 1 171 | done 172 | assert_output "sls offline" offline_output.txt \ 173 | curl -s http://localhost:3000/dev/hello/integration 174 | 175 | kill_sls_offline 176 | 177 | if [ -n "$DRY_RUN" ] 178 | then 179 | # All done (locally) 180 | : 181 | else 182 | # Deploy to AWS 183 | sls deploy 184 | 185 | # Run the function and verify the results 186 | assert_output "sls invoke" output.json \ 187 | sls invoke --function main --data '[4, 5, 6]' 188 | 189 | # Wait for the logs to be propagated and verify them 190 | sleep 20 191 | assert_output "sls logs" logs.txt \ 192 | sls logs --function main 193 | 194 | # Test an invocation that errors 195 | assert_output "sls invoke" error_output.txt \ 196 | sh -c 'sls invoke --function main --data '"'"'{"error":1}'"'"' || true' 197 | 198 | # Test for error present in logs 199 | sleep 20 200 | assert_contains_output "sls logs (error)" \ 201 | '{"errorType":"ErrorCall","errorMessage":"Magic error\nCallStack (from HasCallStack):\n error, called at Main.hs:25:30 in main:Main"}' \ 202 | sls logs --function main 203 | 204 | # Run the function a few times in repetition 205 | assert_output "sls invoke (multiple)" multi_output.txt \ 206 | bash -c "for i in {1..10}; do sls invoke --function main --data []; done" 207 | 208 | # Run the function from the subdirectory and verify the result 209 | assert_output "sls invoke (subdirectory)" subdir_output.json \ 210 | sls invoke --function subdir --data '{}' 211 | 212 | # Run the JavaScript function and verify the results 213 | assert_output "sls invoke (JavaScript)" output_js.json \ 214 | sls invoke --function jsfunc --data '[4, 5, 6]' 215 | 216 | # Update a function 217 | sed 's/33/44/g' Main.hs > Main_modified.hs && mv Main_modified.hs Main.hs 218 | sls deploy function --function main 219 | 220 | # Verify the updated result 221 | assert_output "sls invoke (after sls deploy function)" output_modified.json \ 222 | sls invoke --function main --data '[4, 5, 6]' 223 | fi 224 | 225 | end_tests 226 | -------------------------------------------------------------------------------- /ChangeLog.md: -------------------------------------------------------------------------------- 1 | # Changelog for Serverless Haskell 2 | 3 | For the changes in v0.6.x, see this file on the corresponding branch. 4 | 5 | ## Unreleased changes 6 | 7 | * Update dependencies (#167) 8 | * Do not crash on Docker image functions (#167) 9 | 10 | ## 0.12.6 11 | 12 | * Don't rely on JSON key ordering in tests (#165) 13 | * Support LTS 17 (#163) 14 | * Use `haskell` Docker image for building (#163) 15 | 16 | ## 0.12.5 17 | 18 | * Use official AWS base image (#160) 19 | 20 | ## 0.12.4 21 | 22 | * Add Serverless validation schema (http://slss.io/configuration-validation) 23 | (#153) 24 | 25 | ## 0.12.3 26 | 27 | * If an error occurs during a request, it is logged to standard output (and 28 | therefore CloudWatch) in addition to being returned to the client (#152). 29 | 30 | ## 0.12.2 31 | 32 | * Explicitly use Amazon Linux 2 runtime (#150). 33 | 34 | ## 0.12.1 35 | 36 | * Fix for traversing SNS messages embedded in SQS events. 37 | 38 | ## 0.12.0 39 | 40 | * Remove `LambdaEvent` type and unimplemented event types. 41 | * Add functions `isCreatedEvent` and `isRemoveEvent` for S3 events. 42 | * Generalise the monad type for `lambdaMain`. 43 | * Add specialised main functions to make working with SQS, SNS and S3 events 44 | easier. 45 | 46 | ## 0.11.3 47 | 48 | * Fix deployment yet again. 49 | 50 | ## 0.11.2 51 | 52 | * Fix deployment (#137). 53 | 54 | ## 0.11.1 55 | 56 | * Fix deployment (#136). 57 | 58 | ## 0.11.0 59 | 60 | * Change to a native Haskell runtime (#130). The Haskell process is now 61 | receiving the events from AWS and sending the results back without the 62 | JavaScript wrapper. 63 | * Several Lambda invocations can reuse the same Haskell process instead of 64 | a new one being created each time. 65 | * Passing arguments to the executable is not supported anymore. 66 | * Local invocation (`sls invoke local` and `sls offline`) use the Docker build 67 | by default. 68 | * `--useDocker` flag is required when using Serverless Offline. 69 | 70 | ## 0.10.5 71 | 72 | * Fix uploading the package to NPM (#132). 73 | 74 | ## 0.10.4 75 | 76 | * Fix uploading the package to NPM (#131). 77 | 78 | ## 0.10.3 79 | 80 | * Fix tests of serverless-offline functionality (#128). 81 | * Use TypeScript for the wrapper (#126). 82 | 83 | ## 0.10.2 84 | 85 | * Speed up checking required glibc version, avoiding potentially very long 86 | deployment times (#124). 87 | * Fix running integration tests on macOS (#125). 88 | 89 | ## 0.10.1 90 | 91 | * Support LTS 15. 92 | 93 | ## 0.10.0 94 | 95 | * Adds support for SQS events, including those with embedded SNS and S3 messages. 96 | 97 | ## 0.9.4 98 | 99 | * Update Node.js runtime to 12.x. 100 | 101 | ## 0.9.3 102 | 103 | * More robust deployment process. 104 | 105 | ## 0.9.2 106 | 107 | * Support LTS 14. 108 | * Check glibc version of the resulting executable to avoid errors when running 109 | (#114). 110 | * Use LTS 13 Docker image for building the binary to avoid depending on glibc 111 | version not present on AWS. 112 | 113 | ## 0.9.1 114 | 115 | * Release a version following the proper release process. 116 | 117 | ## 0.9.0 118 | 119 | * Use `ObjectVersionId` from `amazonka-s3` in S3 events. 120 | 121 | ## 0.8.11 122 | 123 | * Update the list of system libraries available on AWS Lambda (#108). 124 | 125 | ## 0.8.10 126 | 127 | * Fix deployment to Hackage using new Stack. 128 | 129 | ## 0.8.9 130 | 131 | * Close the listening socket to fix local invocations hanging (#103). 132 | 133 | ## 0.8.8 134 | 135 | * Switch AWS Lambda runtime to NodeJS 10.x. 136 | 137 | ## 0.8.7 138 | 139 | * Build the whole project instead of individual handlers by default for better 140 | caching behavior. 141 | 142 | ## 0.8.6 143 | 144 | * Ensure Serverless variable substitutions properly affect the `docker` value 145 | in the configuration. 146 | 147 | ## 0.8.5 148 | 149 | * Explicitly support LTS 13 150 | 151 | ## 0.8.4 152 | 153 | * Change the communication method between the JavaScript wrapper and the Haskell 154 | process to TCP. This fixes errors like "Resource vanished: broken pipe" when 155 | running the Haskell executable standalone. 156 | * Do not crash when `ldd` is not found when locally invoking a function on macOS. 157 | * Fix error message about no Haskell functions being found when invoking a 158 | function locally. 159 | 160 | ## 0.8.3 161 | 162 | * It is now an error if the plugin is enabled but there are no functions with 163 | Haskell runtime. 164 | 165 | ## 0.8.2 166 | 167 | * Add authorizer parsing to proxy lambda request context 168 | * Use `runtime: haskell` to distinguish Haskell functions. This makes it 169 | possible to use Haskell and other language functions in a single project. 170 | 171 | ## v0.7.5 172 | 173 | * Add support for `sls deploy function`. 174 | 175 | ## v0.7.4 176 | 177 | * Update serverless dependency to work with new NodeJS. 178 | 179 | ## v0.7.3 180 | 181 | * Documentation fixes. 182 | 183 | ## v0.7.2 184 | 185 | * Documentation fixes. 186 | 187 | ## v0.7.1 188 | 189 | * Better display of Stack errors when building. 190 | * Support deploying static executables. 191 | 192 | ## v0.7.0 193 | 194 | * Default to building with Docker. 195 | * Support LTS 12. 196 | 197 | ## v0.6.1 198 | 199 | * Support `serverless-offline` and `serverless invoke local`. 200 | 201 | ## v0.6.0 202 | 203 | * Automatically add necessary system library dependencies. 204 | 205 | ## v0.5.3 206 | 207 | * Remove workaround for `amazonka` not being in Stackage LTS 11. 208 | 209 | ## v0.5.2 210 | 211 | * Support Stackage LTS 11. 212 | 213 | ## v0.5.1 214 | 215 | * Improve checking Haskell package version. 216 | 217 | ## v0.5.0 218 | 219 | * Support projects in subdirectories. 220 | * Use `http-types` where applicable. 221 | 222 | ## v0.4.3 223 | 224 | * Documentation fixes. 225 | * Do not try to build with Nix. 226 | 227 | ## v0.4.2 228 | 229 | * Documentation fixes. 230 | * Add API Gateway types. 231 | * Check that the JS package version corresponds to the Haskell one. 232 | 233 | ## v0.4.1 234 | 235 | * Improve Node version compatibility when packaging. 236 | * Documentation fixes. 237 | 238 | ## v0.4.0 239 | 240 | * Move JSON-related modules into their own namespace. 241 | * Option to force using a Docker image when building. 242 | 243 | ## v0.3.1 244 | 245 | * Speed up deployment by disabling JavaScript dependency checks. 246 | 247 | ## v0.3.0 248 | 249 | * Fix JavaScript wrapper syntax. 250 | 251 | ## v0.2.1 252 | 253 | * Documentation fixes. 254 | 255 | ## v0.2.0 256 | 257 | * Documentation updates. 258 | * Allow including library dependencies. 259 | * Allow specifying arguments to the executable. 260 | 261 | ## v0.1.0 262 | 263 | * Improve SNS and Lambda event types. 264 | 265 | ## v0.0.6 266 | 267 | * Trim dependencies for releasing on Stackage. 268 | 269 | ## v0.0.4 270 | 271 | * Release fixes. 272 | 273 | ## v0.0.3 274 | 275 | * Release fixes. 276 | 277 | ## v0.0.2 278 | 279 | * Release fixes. 280 | 281 | ## v0.0.1 282 | 283 | * Initial release. 284 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Serverless Haskell 2 | 3 | ![Build status](https://github.com/seek-oss/serverless-haskell/workflows/Build/badge.svg) 4 | [![Hackage](https://img.shields.io/hackage/v/serverless-haskell.svg)](https://hackage.haskell.org/package/serverless-haskell) 5 | [![Stackage LTS](https://www.stackage.org/package/serverless-haskell/badge/lts)](https://www.stackage.org/lts/package/serverless-haskell) 6 | [![Hackage dependencies](https://img.shields.io/hackage-deps/v/serverless-haskell.svg)](https://packdeps.haskellers.com/feed?needle=serverless-haskell) 7 | [![npm](https://img.shields.io/npm/v/serverless-haskell.svg)](https://www.npmjs.com/package/serverless-haskell) 8 | 9 | Deploying Haskell code onto [AWS Lambda] as native runtime using [Serverless]. 10 | 11 | ## Prerequisites 12 | 13 | * AWS account 14 | * [Stack] 15 | * [NPM] 16 | * [Docker] 17 | 18 | ## Usage 19 | 20 | There are two ways to start, either via the stack template, or directly modifying a project. You may want to use the manual approach as the template specifies a specific stack resolver as it needs to hardcode the `stack.yaml` file. 21 | 22 | In either case, you will want to have [Serverless] installed, eg. `npm install -g serverless`. 23 | 24 | ### Using the stack template 25 | 26 | * Create a [Stack] package for your code: 27 | 28 | ```shell 29 | stack new mypackage https://raw.githubusercontent.com/seek-oss/serverless-haskell/master/serverless-haskell.hsfiles 30 | ``` 31 | 32 | * Update the resolver in the `stack.yaml` file. This is hardcoded as the resolver number is not known at template interpolation time. You should pick either the latest resolver, or one you have used before and have thus prebuilt many of the core packages for. 33 | 34 | * Install the dependencies and build the project: 35 | 36 | ```shell 37 | cd mypackage 38 | npm install 39 | stack build 40 | sls invoke local -f mypackage-func 41 | ``` 42 | 43 | This should invoke serverless locally and display output once everything has built. 44 | 45 | ### Manually 46 | 47 | * Create a [Stack] package for your code: 48 | 49 | ```shell 50 | stack new mypackage 51 | ``` 52 | 53 | LTS 10-17 are supported, older versions are likely to work too but untested. 54 | 55 | * Initialise a Serverless project inside the Stack package directory and install 56 | the `serverless-haskell` plugin: 57 | 58 | ```shell 59 | cd mypackage 60 | npm init -y 61 | npm install --save serverless serverless-haskell@x.y.z 62 | ``` 63 | 64 | The version of the NPM package to install must match the version of the 65 | Haskell package. 66 | 67 | * Create `serverless.yml` with the following contents: 68 | 69 | ```yaml 70 | service: myservice 71 | 72 | provider: 73 | name: aws 74 | runtime: haskell 75 | 76 | functions: 77 | myfunc: 78 | handler: mypackage.mypackage-exe 79 | # Here, mypackage is the Haskell package name and mypackage-exe is the 80 | # executable name as defined in the Cabal file. The handler field may be 81 | # prefixed with a path of the form `dir1/.../dirn`, relative to 82 | # `serverless.yml`, which points to the location where the Haskell 83 | # package `mypackage` is defined. This prefix is not needed when the 84 | # Stack project is defined at the same level as `serverless.yml`. 85 | 86 | plugins: 87 | - serverless-haskell 88 | ``` 89 | 90 | * Write your `main` function: 91 | 92 | ```haskell 93 | import qualified Data.Aeson as Aeson 94 | 95 | import AWSLambda 96 | 97 | main = lambdaMain handler 98 | 99 | handler :: Aeson.Value -> IO [Int] 100 | handler evt = do 101 | putStrLn "This should go to logs" 102 | print evt 103 | pure [1, 2, 3] 104 | ``` 105 | 106 | * Add `aeson` and `serverless-haskell` to `package.yaml`: 107 | 108 | ```yaml 109 | dependencies: 110 | - base >= 4.7 && < 5 111 | - aeson 112 | - serverless-haskell 113 | ``` 114 | 115 | * Build and test locally using `sls invoke local`: 116 | 117 | The `serverless-haskell` plugin will build the package using Stack. Note that 118 | the first build can take a long time. Consider adding `export SLS_DEBUG=*` so 119 | you can see what is happening. 120 | 121 | ``` 122 | export SLS_DEBUG=* 123 | sls invoke local -f myfunc 124 | ``` 125 | 126 | * Use `sls deploy` to deploy the executable to AWS Lambda. 127 | 128 | The `serverless-haskell` plugin will build the package using Stack, then upload 129 | it to AWS together with a JavaScript wrapper to pass the input and output 130 | from/to AWS Lambda. 131 | 132 | ``` 133 | export SLS_DEBUG=* 134 | sls deploy 135 | ``` 136 | You can test the function and see the invocation results with: 137 | 138 | ``` 139 | sls invoke -f myfunc` 140 | ``` 141 | 142 | 143 | ### API Gateway 144 | 145 | This plugin supports handling API Gateway requests. Declare the HTTP events 146 | normally in `serverless.yml` and use 147 | [AWSLambda.Events.APIGateway](https://hackage.haskell.org/package/serverless-haskell/docs/AWSLambda-Events-APIGateway.html) 148 | in the handler to process them. 149 | 150 | [Serverless Offline] can be used for local testing of API Gateway requests. You 151 | must use `--useDocker` flag so that the native Haskell runtime works correctly. 152 | 153 | When using [Serverless Offline], make sure that the project directory is 154 | world-readable, otherwise the started Docker container will be unable to access 155 | the handlers and all invocations will return HTTP status 502. 156 | 157 | ### Notes 158 | 159 | * Only AWS Lambda is supported at the moment. Other cloud providers would 160 | require different JavaScript wrappers to be implemented. 161 | 162 | See 163 | [AWSLambda](https://hackage.haskell.org/package/serverless-haskell/docs/AWSLambda.html) 164 | for documentation, including additional options to control the deployment. 165 | 166 | ## Development 167 | 168 | `master` branch is the stable version. It is normally released to Hackage once 169 | new changes are merged via Git tags. 170 | 171 | The package is also maintained in Stackage LTS, provided the dependencies are 172 | not blocking it. 173 | 174 | ### Testing 175 | 176 | * Haskell code is tested with Stack: `stack test`. 177 | * TypeScript code is linted with `eslint`. 178 | 179 | ### Integration tests 180 | 181 | Integration test verifies that the project can build and deploy a complete 182 | function to AWS, and it runs with expected functionality. 183 | 184 | Integration test is only automatically run up to deployment due to the need for 185 | an AWS account. To run manually: 186 | 187 | * Ensure you have the required dependencies: 188 | - `curl` 189 | - [jq] 190 | - [NPM] 191 | - [`pkg-config`](pkg-config) 192 | - `pwgen` 193 | - [Stack] 194 | * Get an AWS account and add the access credentials into your shell environment. 195 | * Run `./integration-test/run.sh`. The exit code indicates success. 196 | * To verify just the packaging, without deployment, run 197 | `./integration-test/run.sh --dry-run`. 198 | * By default, the integration test is run with the LTS specified in 199 | `stack.yaml`. To specify a different series, use `RESOLVER_SERIES=lts-9`. 200 | * To avoid creating a temporary directory for every run, specify 201 | `--no-clean-dir`. This can speed up repeated test runs, but does not guarantee 202 | the same results as a clean test. 203 | 204 | ### Releasing 205 | 206 | * Ensure you are on the `master` branch. 207 | * Ensure that all the changes are reflected in the changelog. 208 | * Run the integration tests. 209 | * Run `./bumpversion major|minor|patch`. This will increment the version number, 210 | update the changelog, create and push the Git tag and the branch. 211 | * If you have released an LTS version, merge the version branch into `master`, 212 | taking care of the conflicts around version numbers and changelog, and release 213 | the latest version as well. 214 | 215 | [AWS Lambda]: https://aws.amazon.com/lambda/ 216 | [Docker]: https://www.docker.com/ 217 | [jq]: https://stedolan.github.io/jq/ 218 | [NPM]: https://www.npmjs.com/ 219 | [pkg-config]: https://www.freedesktop.org/wiki/Software/pkg-config/ 220 | [Serverless]: https://serverless.com/framework/ 221 | [Serverless Offline]: https://github.com/dherault/serverless-offline 222 | [Stack]: https://haskellstack.org 223 | -------------------------------------------------------------------------------- /serverless-haskell.hsfiles: -------------------------------------------------------------------------------- 1 | {-# START_FILE package.yaml #-} 2 | name: {{name}} 3 | version: 0.1.0 4 | github: "{{github-username}}{{^github-username}}githubuser{{/github-username}}/{{name}}" 5 | license: BSD3 6 | author: "{{author-name}}{{^author-name}}Author name here{{/author-name}}" 7 | maintainer: "{{author-email}}{{^author-email}}example@example.com{{/author-email}}" 8 | copyright: "{{copyright}}{{^copyright}}{{year}}{{^year}}2018{{/year}} {{author-name}}{{^author-name}}Author name here{{/author-name}}{{/copyright}}" 9 | 10 | extra-source-files: 11 | - README.md 12 | - ChangeLog.md 13 | 14 | # Metadata used when publishing your package 15 | # synopsis: Short description of your package 16 | # category: {{category}}{{^category}}Web{{/category}} 17 | 18 | # To avoid duplicated efforts in documentation and dealing with the 19 | # complications of embedding Haddock markup inside cabal files, it is 20 | # common to point users to the README.md file. 21 | description: Please see the README on GitHub at 22 | 23 | dependencies: 24 | - base >= 4.7 && < 5 25 | - serverless-haskell 26 | - aeson 27 | 28 | library: 29 | source-dirs: src 30 | 31 | executables: 32 | {{name}}-exe: 33 | main: Main.hs 34 | source-dirs: app 35 | ghc-options: 36 | - -threaded 37 | - -rtsopts 38 | - -with-rtsopts=-N 39 | dependencies: 40 | - {{name}} 41 | 42 | tests: 43 | {{name}}-test: 44 | main: Spec.hs 45 | source-dirs: test 46 | ghc-options: 47 | - -threaded 48 | - -rtsopts 49 | - -with-rtsopts=-N 50 | dependencies: 51 | - {{name}} 52 | 53 | {-# START_FILE Setup.hs #-} 54 | import Distribution.Simple 55 | main = defaultMain 56 | 57 | {-# START_FILE test/Spec.hs #-} 58 | main :: IO () 59 | main = putStrLn "Test suite not yet implemented" 60 | 61 | {-# START_FILE src/Lib.hs #-} 62 | module Lib 63 | ( someFunc 64 | ) where 65 | 66 | someFunc :: IO () 67 | someFunc = putStrLn "Serverless is running your lambda function!" 68 | 69 | {-# START_FILE app/Main.hs #-} 70 | module Main where 71 | 72 | import Lib 73 | import qualified Data.Aeson as Aeson 74 | 75 | import AWSLambda 76 | 77 | main = lambdaMain handler 78 | 79 | handler :: Aeson.Value -> IO [Int] 80 | handler evt = do 81 | putStrLn "This should go to logs" 82 | someFunc 83 | print evt 84 | pure [1, 2, 3] 85 | 86 | {-# START_FILE README.md #-} 87 | # {{name}} 88 | 89 | {-# START_FILE ChangeLog.md #-} 90 | # Changelog for {{name}} 91 | 92 | ## Unreleased changes 93 | 94 | {-# START_FILE LICENSE #-} 95 | Copyright {{author-name}}{{^author-name}}Author name here{{/author-name}} (c) {{year}}{{^year}}2018{{/year}} 96 | 97 | All rights reserved. 98 | 99 | Redistribution and use in source and binary forms, with or without 100 | modification, are permitted provided that the following conditions are met: 101 | 102 | * Redistributions of source code must retain the above copyright 103 | notice, this list of conditions and the following disclaimer. 104 | 105 | * Redistributions in binary form must reproduce the above 106 | copyright notice, this list of conditions and the following 107 | disclaimer in the documentation and/or other materials provided 108 | with the distribution. 109 | 110 | * Neither the name of {{author-name}}{{^author-name}}Author name here{{/author-name}} nor the names of other 111 | contributors may be used to endorse or promote products derived 112 | from this software without specific prior written permission. 113 | 114 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 115 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 116 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 117 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 118 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 119 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 120 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 121 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 122 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 123 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 124 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 125 | 126 | {-# START_FILE .gitignore #-} 127 | .stack-work/ 128 | node_modules/ 129 | .serverless/ 130 | {{name}}.cabal 131 | 132 | {-# START_FILE serverless.yml #-} 133 | service: {{name}} 134 | 135 | provider: 136 | name: aws 137 | runtime: haskell 138 | 139 | functions: 140 | {{name}}-func: 141 | handler: {{name}}.{{name}}-exe 142 | # Here, mypackage is the Haskell package name and myfunc is the executable 143 | # name as defined in the Cabal file. The handler field may be prefixed 144 | # with a path of the form `dir1/.../dirn`, relative to `serverless.yml`, 145 | # which points to the location where the Haskell package `mypackage` is 146 | # defined. This prefix is not needed when the Stack project is defined at 147 | # the same level as `serverless.yml`. 148 | 149 | plugins: 150 | - serverless-haskell 151 | 152 | {-# START_FILE package.json #-} 153 | { 154 | "name": "{{name}}", 155 | "version": "0.1.0", 156 | "description": "", 157 | "main": "index.js", 158 | "directories": { 159 | "test": "test" 160 | }, 161 | "scripts": { 162 | "test": "echo \"Error: no test specified\" && exit 1" 163 | }, 164 | "author": "", 165 | "license": "ISC", 166 | "dependencies": { 167 | "serverless": "^1.41.1", 168 | "serverless-haskell": "^0.12.6" 169 | } 170 | } 171 | 172 | {-# START_FILE stack.yaml #-} 173 | # This file was automatically generated by 'stack init' 174 | # 175 | # Some commonly used options have been documented as comments in this file. 176 | # For advanced use and comprehensive documentation of the format, please see: 177 | # https://docs.haskellstack.org/en/stable/yaml_configuration/ 178 | 179 | # Resolver to choose a 'specific' stackage snapshot or a compiler version. 180 | # A snapshot resolver dictates the compiler version and the set of packages 181 | # to be used for project dependencies. For example: 182 | # 183 | # resolver: lts-3.5 184 | # resolver: nightly-2015-09-21 185 | # resolver: ghc-7.10.2 186 | # resolver: ghcjs-0.1.0_ghc-7.10.2 187 | # 188 | # The location of a snapshot can be provided as a file or url. Stack assumes 189 | # a snapshot provided as a file might change, whereas a url resource does not. 190 | # 191 | # resolver: ./custom-snapshot.yaml 192 | # resolver: https://example.com/snapshots/2018-01-01.yaml 193 | resolver: lts-17.7 194 | 195 | # User packages to be built. 196 | # Various formats can be used as shown in the example below. 197 | # 198 | # packages: 199 | # - some-directory 200 | # - https://example.com/foo/bar/baz-0.0.2.tar.gz 201 | # - location: 202 | # git: https://github.com/commercialhaskell/stack.git 203 | # commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a 204 | # - location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a 205 | # subdirs: 206 | # - auto-update 207 | # - wai 208 | packages: 209 | - . 210 | # Dependency packages to be pulled from upstream that are not in the resolver 211 | # using the same syntax as the packages field. 212 | # (e.g., acme-missiles-0.3) 213 | extra-deps: 214 | - serverless-haskell-0.12.6 215 | 216 | # Override default flag values for local packages and extra-deps 217 | # flags: {} 218 | 219 | # Extra package databases containing global packages 220 | # extra-package-dbs: [] 221 | 222 | # Control whether we use the GHC we find on the path 223 | # system-ghc: true 224 | # 225 | # Require a specific version of stack, using version ranges 226 | # require-stack-version: -any # Default 227 | # require-stack-version: ">=1.7" 228 | # 229 | # Override the architecture used by stack, especially useful on Windows 230 | # arch: i386 231 | # arch: x86_64 232 | # 233 | # Extra directories used by stack for building 234 | # extra-include-dirs: [/path/to/dir] 235 | # extra-lib-dirs: [/path/to/dir] 236 | # 237 | # Allow a newer minor version of GHC than the snapshot specifies 238 | # compiler-check: newer-minor 239 | -------------------------------------------------------------------------------- /test/AWSLambda/Events/APIGatewaySpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE QuasiQuotes #-} 3 | 4 | module AWSLambda.Events.APIGatewaySpec where 5 | 6 | import AWSLambda.Events.APIGateway 7 | 8 | import Control.Lens 9 | import Data.Aeson 10 | import qualified Data.Aeson.KeyMap as KeyMap 11 | import Data.ByteString.Lazy (ByteString) 12 | import qualified Data.HashMap.Strict as HashMap 13 | import Data.IP 14 | import Data.Text (Text) 15 | 16 | import Text.RawString.QQ 17 | 18 | import Test.Hspec 19 | 20 | spec :: Spec 21 | spec = do 22 | describe "APIGatewayProxyRequest" $ 23 | it "parses sample GET request" $ 24 | eitherDecode sampleGetRequestJSON `shouldBe` Right sampleGetRequest 25 | describe "APIGatewayProxyResponse" $ 26 | it "parses sample text event" $ 27 | eitherDecode sampleOKResponseJSON `shouldBe` Right sampleOKResponse 28 | 29 | sampleGetRequestJSON :: ByteString 30 | sampleGetRequestJSON = [r| 31 | { 32 | "path": "/test/hello", 33 | "headers": { 34 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 35 | "Accept-Encoding": "gzip, deflate, lzma, sdch, br", 36 | "Accept-Language": "en-US,en;q=0.8", 37 | "CloudFront-Forwarded-Proto": "https", 38 | "CloudFront-Is-Desktop-Viewer": "true", 39 | "CloudFront-Is-Mobile-Viewer": "false", 40 | "CloudFront-Is-SmartTV-Viewer": "false", 41 | "CloudFront-Is-Tablet-Viewer": "false", 42 | "CloudFront-Viewer-Country": "US", 43 | "Host": "wt6mne2s9k.execute-api.us-west-2.amazonaws.com", 44 | "Upgrade-Insecure-Requests": "1", 45 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48", 46 | "Via": "1.1 fb7cca60f0ecd82ce07790c9c5eef16c.cloudfront.net (CloudFront)", 47 | "X-Amz-Cf-Id": "nBsWBOrSHMgnaROZJK1wGCZ9PcRcSpq_oSXZNQwQ10OTZL4cimZo3g==", 48 | "X-Forwarded-For": "192.168.100.1, 192.168.1.1", 49 | "X-Forwarded-Port": "443", 50 | "X-Forwarded-Proto": "https" 51 | }, 52 | "pathParameters": { 53 | "proxy": "hello" 54 | }, 55 | "requestContext": { 56 | "accountId": "123456789012", 57 | "resourceId": "us4z18", 58 | "stage": "test", 59 | "requestId": "41b45ea3-70b5-11e6-b7bd-69b5aaebc7d9", 60 | "protocol": "HTTP/1.1", 61 | "identity": { 62 | "cognitoIdentityPoolId": "", 63 | "accountId": "", 64 | "cognitoIdentityId": "", 65 | "caller": "", 66 | "apiKey": "", 67 | "sourceIp": "192.168.100.1", 68 | "cognitoAuthenticationType": "", 69 | "cognitoAuthenticationProvider": "", 70 | "userArn": "", 71 | "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48", 72 | "user": "" 73 | }, 74 | "resourcePath": "/{proxy+}", 75 | "httpMethod": "GET", 76 | "apiId": "wt6mne2s9k", 77 | "authorizer": { 78 | "principalId": "test-principalId", 79 | "claims": { 80 | "email": "test@example.com", 81 | "email_verified": true 82 | }, 83 | "custom_context": 10 84 | } 85 | }, 86 | "resource": "/{proxy+}", 87 | "httpMethod": "GET", 88 | "queryStringParameters": { 89 | "name": "me" 90 | }, 91 | "stageVariables": { 92 | "stageVarName": "stageVarValue" 93 | } 94 | } 95 | |] 96 | 97 | sampleGetRequest :: APIGatewayProxyRequest Text 98 | sampleGetRequest = 99 | APIGatewayProxyRequest 100 | { _agprqResource = "/{proxy+}" 101 | , _agprqPath = "/test/hello" 102 | , _agprqHttpMethod = "GET" 103 | , _agprqHeaders = 104 | [ ("X-Forwarded-Proto", "https") 105 | , ("CloudFront-Is-Desktop-Viewer", "true") 106 | , ( "Accept" 107 | , "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8") 108 | , ( "X-Amz-Cf-Id" 109 | , "nBsWBOrSHMgnaROZJK1wGCZ9PcRcSpq_oSXZNQwQ10OTZL4cimZo3g==") 110 | , ("Accept-Encoding", "gzip, deflate, lzma, sdch, br") 111 | , ("CloudFront-Forwarded-Proto", "https") 112 | , ("Accept-Language", "en-US,en;q=0.8") 113 | , ("CloudFront-Is-Tablet-Viewer", "false") 114 | , ("Upgrade-Insecure-Requests", "1") 115 | , ("CloudFront-Viewer-Country", "US") 116 | , ( "User-Agent" 117 | , "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48") 118 | , ("CloudFront-Is-Mobile-Viewer", "false") 119 | , ("Host", "wt6mne2s9k.execute-api.us-west-2.amazonaws.com") 120 | , ("X-Forwarded-Port", "443") 121 | , ("CloudFront-Is-SmartTV-Viewer", "false") 122 | , ( "Via" 123 | , "1.1 fb7cca60f0ecd82ce07790c9c5eef16c.cloudfront.net (CloudFront)") 124 | , ("X-Forwarded-For", "192.168.100.1, 192.168.1.1") 125 | ] 126 | , _agprqQueryStringParameters = [("name", Just "me")] 127 | , _agprqPathParameters = HashMap.fromList [("proxy", "hello")] 128 | , _agprqStageVariables = HashMap.fromList [("stageVarName", "stageVarValue")] 129 | , _agprqRequestContext = 130 | ProxyRequestContext 131 | { _prcPath = Nothing 132 | , _prcAccountId = "123456789012" 133 | , _prcResourceId = "us4z18" 134 | , _prcStage = "test" 135 | , _prcRequestId = "41b45ea3-70b5-11e6-b7bd-69b5aaebc7d9" 136 | , _prcIdentity = 137 | RequestIdentity 138 | { _riCognitoIdentityPoolId = Just "" 139 | , _riAccountId = Just "" 140 | , _riCognitoIdentityId = Just "" 141 | , _riCaller = Just "" 142 | , _riApiKey = Just "" 143 | , _riSourceIp = Just $ IPv4 $ toIPv4 [192, 168, 100, 1] 144 | , _riCognitoAuthenticationType = Just "" 145 | , _riCognitoAuthenticationProvider = Just "" 146 | , _riUserArn = Just "" 147 | , _riUserAgent = Just 148 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48" 149 | , _riUser = Just "" 150 | } 151 | , _prcResourcePath = "/{proxy+}" 152 | , _prcHttpMethod = "GET" 153 | , _prcApiId = "wt6mne2s9k" 154 | , _prcProtocol = "HTTP/1.1" 155 | , _prcAuthorizer = 156 | Just Authorizer 157 | { _aPrincipalId = Just "test-principalId" 158 | , _aClaims = KeyMap.fromList [("email", toJSON ("test@example.com" :: Text)), ("email_verified", toJSON True)] 159 | , _aContext = KeyMap.fromList [("custom_context", toJSON (10 :: Int))] 160 | } 161 | 162 | } 163 | , _agprqBody = Nothing 164 | } 165 | 166 | sampleOKResponseJSON :: ByteString 167 | sampleOKResponseJSON = [r| 168 | { 169 | "statusCode": 200, 170 | "headers": { 171 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 172 | "Accept-Encoding": "gzip, deflate, lzma, sdch, br", 173 | "Accept-Language": "en-US,en;q=0.8", 174 | "CloudFront-Forwarded-Proto": "https", 175 | "CloudFront-Is-Desktop-Viewer": "true", 176 | "CloudFront-Is-Mobile-Viewer": "false", 177 | "CloudFront-Is-SmartTV-Viewer": "false", 178 | "CloudFront-Is-Tablet-Viewer": "false", 179 | "CloudFront-Viewer-Country": "US", 180 | "Host": "wt6mne2s9k.execute-api.us-west-2.amazonaws.com", 181 | "Upgrade-Insecure-Requests": "1", 182 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48", 183 | "Via": "1.1 fb7cca60f0ecd82ce07790c9c5eef16c.cloudfront.net (CloudFront)", 184 | "X-Amz-Cf-Id": "nBsWBOrSHMgnaROZJK1wGCZ9PcRcSpq_oSXZNQwQ10OTZL4cimZo3g==", 185 | "X-Forwarded-For": "192.168.100.1, 192.168.1.1", 186 | "X-Forwarded-Port": "443", 187 | "X-Forwarded-Proto": "https" 188 | }, 189 | "body": "Hello World" 190 | } 191 | |] 192 | 193 | sampleOKResponse :: APIGatewayProxyResponse Text 194 | sampleOKResponse = 195 | responseOK 196 | & responseBody ?~ "Hello World" 197 | & agprsHeaders .~ 198 | [ ("X-Forwarded-Proto", "https") 199 | , ("CloudFront-Is-Desktop-Viewer", "true") 200 | , ( "Accept" 201 | , "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8") 202 | , ("X-Amz-Cf-Id", "nBsWBOrSHMgnaROZJK1wGCZ9PcRcSpq_oSXZNQwQ10OTZL4cimZo3g==") 203 | , ("Accept-Encoding", "gzip, deflate, lzma, sdch, br") 204 | , ("CloudFront-Forwarded-Proto", "https") 205 | , ("Accept-Language", "en-US,en;q=0.8") 206 | , ("CloudFront-Is-Tablet-Viewer", "false") 207 | , ("Upgrade-Insecure-Requests", "1") 208 | , ("CloudFront-Viewer-Country", "US") 209 | , ( "User-Agent" 210 | , "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48") 211 | , ("CloudFront-Is-Mobile-Viewer", "false") 212 | , ("Host", "wt6mne2s9k.execute-api.us-west-2.amazonaws.com") 213 | , ("X-Forwarded-Port", "443") 214 | , ("CloudFront-Is-SmartTV-Viewer", "false") 215 | , ("Via", "1.1 fb7cca60f0ecd82ce07790c9c5eef16c.cloudfront.net (CloudFront)") 216 | , ("X-Forwarded-For", "192.168.100.1, 192.168.1.1") 217 | ] 218 | -------------------------------------------------------------------------------- /test/AWSLambda/Events/SQSEventSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedLists #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE QuasiQuotes #-} 4 | 5 | module AWSLambda.Events.SQSEventSpec where 6 | 7 | import AWSLambda.Events 8 | 9 | import Control.Monad.Trans.Writer 10 | import Data.Aeson 11 | import Data.Aeson.Embedded 12 | import Data.Aeson.TextValue 13 | import Data.ByteString.Lazy (ByteString) 14 | import Data.Text (Text) 15 | import Data.Time.Calendar 16 | import Data.Time.Clock 17 | import Amazonka.S3 as S3 18 | 19 | import Text.RawString.QQ 20 | 21 | import Test.Hspec 22 | 23 | spec :: Spec 24 | spec = describe "Handler" $ do 25 | it "parses sample text event" $ 26 | decode sampleSQSJSON `shouldBe` Just sampleSQSEvent 27 | it "parses sample embedded S3 -> SNS -> SQS event" $ 28 | eitherDecode sampleS3SNSSQSJSON `shouldBe` Right sampleS3SNSSQSEvent 29 | it "traverses sample embedded S3 -> SNS -> SQS event" $ 30 | (execWriter $ 31 | traverseS3InSnsInSqs (tell . (: []) . _sbeName . _seBucket . _senS3) sampleS3SNSSQSEvent) 32 | `shouldBe` ["my-bucket"] 33 | 34 | sampleSQSJSON :: ByteString 35 | sampleSQSJSON = [r| 36 | { 37 | "Records": [ 38 | { 39 | "messageId": "b792b6ba-b444-48c5-9cd8-29b8ad373eae", 40 | "receiptHandle": "ReceiptHandle", 41 | "body": "Hello from SQS!", 42 | "attributes": { 43 | "ApproximateReceiveCount": "1", 44 | "SentTimestamp": "1572575717896", 45 | "SenderId": "AIDAIY4XCTD3OFZN5ED42", 46 | "ApproximateFirstReceiveTimestamp": "1572575717898" 47 | }, 48 | "messageAttributes": { 49 | "Test": { 50 | "Type": "String", 51 | "Value": "TestString" 52 | }, 53 | "TestBinary": { 54 | "Type": "Binary", 55 | "Value": "TestBinary" 56 | } 57 | }, 58 | "md5OfBody": "aca137746cb7d6a8e3eda3d1ce09b0c5", 59 | "eventSource": "aws:sqs", 60 | "eventSourceARN": "arn:aws:sqs:ap-southeast-2:1234567890:my-queue", 61 | "awsRegion": "ap-southeast-2" 62 | } 63 | ] 64 | } 65 | |] 66 | 67 | sampleSQSEvent :: SQSEvent Text 68 | sampleSQSEvent = 69 | RecordsEvent 70 | [ SQSMessage 71 | { _sqsmMessageId = "b792b6ba-b444-48c5-9cd8-29b8ad373eae" 72 | , _sqsmReceiptHandle = "ReceiptHandle" 73 | , _sqsmBody = "Hello from SQS!" 74 | , _sqsmAttributes = 75 | [ ("ApproximateReceiveCount", "1") 76 | , ("SentTimestamp", "1572575717896") 77 | , ("SenderId", "AIDAIY4XCTD3OFZN5ED42") 78 | , ("ApproximateFirstReceiveTimestamp", "1572575717898") 79 | ] 80 | , _sqsmMessageAttributes = 81 | [ ( "Test" 82 | , MessageAttribute 83 | { _maType = "String" 84 | , _maValue = "TestString" 85 | }) 86 | , ( "TestBinary" 87 | , MessageAttribute 88 | { _maType = "Binary" 89 | , _maValue = "TestBinary" 90 | }) 91 | ] 92 | , _sqsmMd5OfBody = "aca137746cb7d6a8e3eda3d1ce09b0c5" 93 | , _sqsmEventSource = "aws:sqs" 94 | , _sqsmEventSourceARN = "arn:aws:sqs:ap-southeast-2:1234567890:my-queue" 95 | , _sqsmAwsRegion = Sydney 96 | } 97 | ] 98 | 99 | sampleS3SNSSQSJSON :: ByteString 100 | sampleS3SNSSQSJSON = [r| 101 | { 102 | "Records": [ 103 | { 104 | "messageId": "b792b6ba-b444-48c5-9cd8-29b8ad373eae", 105 | "receiptHandle": "ReceiptHandle", 106 | "body": "{\n \"Type\" : \"Notification\",\n \"MessageId\" : \"MessageId\",\n \"TopicArn\" : \"arn:aws:sns:ap-southeast-2:11111111111111:my-topic\",\n \"Subject\" : \"Amazon S3 Notification\",\n \"Message\" : \"{\\\"Records\\\":[{\\\"eventVersion\\\":\\\"2.1\\\",\\\"eventSource\\\":\\\"aws:s3\\\",\\\"awsRegion\\\":\\\"ap-southeast-2\\\",\\\"eventTime\\\":\\\"2019-11-01T00:00:00.00Z\\\",\\\"eventName\\\":\\\"s3:ObjectCreated:Put\\\",\\\"userIdentity\\\":{\\\"principalId\\\":\\\"AWS:AHJD568HF4356HJJ:bob\\\"},\\\"requestParameters\\\":{\\\"sourceIPAddress\\\":\\\"787.39.11.220\\\"},\\\"responseElements\\\":{\\\"x-amz-request-id\\\":\\\"GDJS6765sJSHSS\\\",\\\"x-amz-id-2\\\":\\\"ID2\\\"},\\\"s3\\\":{\\\"s3SchemaVersion\\\":\\\"1.0\\\",\\\"configurationId\\\":\\\"ConfigurationId\\\",\\\"bucket\\\":{\\\"name\\\":\\\"my-bucket\\\",\\\"ownerIdentity\\\":{\\\"principalId\\\":\\\"ASKD794UDYDH\\\"},\\\"arn\\\":\\\"arn:aws:s3:::my-bucket\\\"},\\\"object\\\":{\\\"key\\\":\\\"my-key\\\",\\\"size\\\":13315,\\\"eTag\\\":\\\"1231234fabf233124124\\\",\\\"versionId\\\":\\\"735hjf893ufb8fhuf\\\",\\\"sequencer\\\":\\\"HUJKFDHJD8656567HGGSGJKD\\\"}}}]}\",\n \"Timestamp\" : \"2019-11-01T00:00:00Z\",\n \"SignatureVersion\" : \"1\",\n \"Signature\" : \"Signature\",\n \"SigningCertURL\" : \"https://sns.ap-southeast-2.amazonaws.com/SimpleNotificationService-my-cert.pem\",\n \"UnsubscribeURL\" : \"https://sns.ap-southeast-2.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:ap-southeast-2:11111111111111:my-topic:unsub\"\n}", 107 | "attributes": { 108 | "ApproximateReceiveCount": "1", 109 | "SentTimestamp": "1572575717896", 110 | "SenderId": "AIDAIY4XCTD3OFZN5ED42", 111 | "ApproximateFirstReceiveTimestamp": "1572575717898" 112 | }, 113 | "messageAttributes": { 114 | "Test": { 115 | "Type": "String", 116 | "Value": "TestString" 117 | }, 118 | "TestBinary": { 119 | "Type": "Binary", 120 | "Value": "TestBinary" 121 | } 122 | }, 123 | "md5OfBody": "aca137746cb7d6a8e3eda3d1ce09b0c5", 124 | "eventSource": "aws:sqs", 125 | "eventSourceARN": "arn:aws:sqs:ap-southeast-2:1234567890:my-queue", 126 | "awsRegion": "ap-southeast-2" 127 | } 128 | ] 129 | } 130 | |] 131 | 132 | sampleS3SNSSQSEvent :: SQSEvent (Embedded (SNSMessage (Embedded S3Event))) 133 | sampleS3SNSSQSEvent = 134 | RecordsEvent 135 | [ SQSMessage 136 | { _sqsmMessageId = "b792b6ba-b444-48c5-9cd8-29b8ad373eae" 137 | , _sqsmReceiptHandle = "ReceiptHandle" 138 | , _sqsmBody = 139 | TextValue $ Embedded $ SNSMessage 140 | { _smMessage = TextValue $ Embedded $ RecordsEvent 141 | [ S3EventNotification 142 | { _senAwsRegion = Sydney 143 | , _senEventName = Event_S3_ObjectCreated_Put 144 | , _senEventSource = "aws:s3" 145 | , _senEventTime = UTCTime (fromGregorian 2019 11 1) 0 146 | , _senEventVersion = "2.1" 147 | , _senRequestParameters = RequestParametersEntity "787.39.11.220" 148 | , _senResponseElements = ResponseElementsEntity 149 | { _reeXAmzId2 = "ID2" 150 | , _reeXAmzRequestId = "GDJS6765sJSHSS" } 151 | , _senS3 = S3Entity 152 | { _seBucket = S3BucketEntity 153 | { _sbeArn = "arn:aws:s3:::my-bucket" 154 | , _sbeName = BucketName "my-bucket" 155 | , _sbeOwnerIdentity = UserIdentityEntity "ASKD794UDYDH" } 156 | , _seConfigurationId = "ConfigurationId" 157 | , _seObject = S3ObjectEntity 158 | { _soeETag = Just (ETag "1231234fabf233124124") 159 | , _soeKey = ObjectKey "my-key" 160 | , _soeSize = Just 13315 161 | , _soeSequencer = "HUJKFDHJD8656567HGGSGJKD" 162 | , _soeVersionId = Just "735hjf893ufb8fhuf" } 163 | , _seS3SchemaVersion = "1.0" 164 | } 165 | , _senUserIdentity = UserIdentityEntity "AWS:AHJD568HF4356HJJ:bob" 166 | } 167 | ] 168 | , _smMessageAttributes = mempty 169 | , _smMessageId = "MessageId" 170 | , _smSignature = "Signature" 171 | , _smSignatureVersion = "1" 172 | , _smSigningCertUrl = "https://sns.ap-southeast-2.amazonaws.com/SimpleNotificationService-my-cert.pem" 173 | , _smSubject = "Amazon S3 Notification" 174 | , _smTimestamp = UTCTime (fromGregorian 2019 11 1) 0 175 | , _smTopicArn = "arn:aws:sns:ap-southeast-2:11111111111111:my-topic" 176 | , _smType = "Notification" 177 | , _smUnsubscribeUrl = "https://sns.ap-southeast-2.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:ap-southeast-2:11111111111111:my-topic:unsub" 178 | } 179 | , _sqsmAttributes = 180 | [ ("ApproximateReceiveCount", "1") 181 | , ("SentTimestamp", "1572575717896") 182 | , ("SenderId", "AIDAIY4XCTD3OFZN5ED42") 183 | , ("ApproximateFirstReceiveTimestamp", "1572575717898") 184 | ] 185 | , _sqsmMessageAttributes = 186 | [ ( "Test" 187 | , MessageAttribute 188 | { _maType = "String" 189 | , _maValue = "TestString" 190 | }) 191 | , ( "TestBinary" 192 | , MessageAttribute 193 | { _maType = "Binary" 194 | , _maValue = "TestBinary" 195 | }) 196 | ] 197 | , _sqsmMd5OfBody = "aca137746cb7d6a8e3eda3d1ce09b0c5" 198 | , _sqsmEventSource = "aws:sqs" 199 | , _sqsmEventSourceARN = "arn:aws:sqs:ap-southeast-2:1234567890:my-queue" 200 | , _sqsmAwsRegion = Sydney 201 | } 202 | ] 203 | -------------------------------------------------------------------------------- /test/AWSLambda/Events/SNSEventSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE QuasiQuotes #-} 3 | 4 | module AWSLambda.Events.SNSEventSpec where 5 | 6 | import AWSLambda.Events.MessageAttribute 7 | import AWSLambda.Events.Records 8 | import AWSLambda.Events.S3Event 9 | import AWSLambda.Events.SNSEvent 10 | 11 | import Data.Aeson 12 | import Data.Aeson.Embedded 13 | import Data.Aeson.TextValue 14 | import Data.ByteString.Lazy (ByteString) 15 | import qualified Data.HashMap.Strict as HashMap 16 | import Data.Text (Text) 17 | import Data.Time.Calendar 18 | import Data.Time.Clock 19 | import Amazonka.S3 as S3 20 | 21 | import Text.RawString.QQ 22 | 23 | import Test.Hspec 24 | 25 | spec :: Spec 26 | spec = 27 | describe "SNSEvent" $ do 28 | it "parses sample text event" $ 29 | decode sampleSNSJSON `shouldBe` Just sampleSNSEvent 30 | it "parses sample embedded S3 event" $ 31 | decode sampleSNSS3JSON `shouldBe` Just sampleSNSS3Event 32 | 33 | sampleSNSJSON :: ByteString 34 | sampleSNSJSON = [r| 35 | { 36 | "Records": [ 37 | { 38 | "EventVersion": "1.0", 39 | "EventSubscriptionArn": "eventsubscriptionarn", 40 | "EventSource": "aws:sns", 41 | "Sns": { 42 | "SignatureVersion": "1", 43 | "Timestamp": "1970-01-01T00:00:00.000Z", 44 | "Signature": "EXAMPLE", 45 | "SigningCertUrl": "EXAMPLE", 46 | "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", 47 | "Message": "Hello from SNS!", 48 | "MessageAttributes": { 49 | "Test": { 50 | "Type": "String", 51 | "Value": "TestString" 52 | }, 53 | "TestBinary": { 54 | "Type": "Binary", 55 | "Value": "TestBinary" 56 | } 57 | }, 58 | "Type": "Notification", 59 | "UnsubscribeUrl": "EXAMPLE", 60 | "TopicArn": "topicarn", 61 | "Subject": "TestInvoke" 62 | } 63 | } 64 | ] 65 | } 66 | |] 67 | 68 | sampleSNSEvent :: SNSEvent Text 69 | sampleSNSEvent = 70 | RecordsEvent 71 | [ SNSRecord 72 | { _srEventVersion = "1.0" 73 | , _srEventSubscriptionArn = "eventsubscriptionarn" 74 | , _srEventSource = "aws:sns" 75 | , _srSns = 76 | SNSMessage 77 | { _smMessage = "Hello from SNS!" 78 | , _smMessageAttributes = 79 | HashMap.fromList 80 | [ ( "Test" 81 | , MessageAttribute 82 | { _maType = "String" 83 | , _maValue = "TestString" 84 | }) 85 | , ( "TestBinary" 86 | , MessageAttribute 87 | { _maType = "Binary" 88 | , _maValue = "TestBinary" 89 | }) 90 | ] 91 | , _smMessageId = "95df01b4-ee98-5cb9-9903-4c221d41eb5e" 92 | , _smSignature = "EXAMPLE" 93 | , _smSignatureVersion = "1" 94 | , _smSigningCertUrl = "EXAMPLE" 95 | , _smSubject = "TestInvoke" 96 | , _smTimestamp = UTCTime (fromGregorian 1970 1 1) (secondsToDiffTime 0) 97 | , _smTopicArn = "topicarn" 98 | , _smType = "Notification" 99 | , _smUnsubscribeUrl = "EXAMPLE" 100 | } 101 | } 102 | ] 103 | 104 | sampleSNSS3JSON :: ByteString 105 | sampleSNSS3JSON = [r| 106 | {"Records": 107 | [ 108 | {"EventSource":"aws:sns", 109 | "EventVersion":"1.0", 110 | "EventSubscriptionArn":"arn:aws:sns:ap-southeast-2:012345678901:SomeSNSEvent:23e2d254-e8bb-4db5-92ce-d917e5aad090", 111 | "Sns":{ 112 | "Type":"Notification", 113 | "MessageId":"89f6fe8b-a751-5dcd-8e0c-afbd75420455", 114 | "TopicArn":"arn:aws:sns:ap-southeast-2:012345678901:SomeSNSEvent", 115 | "Subject":"Amazon S3 Notification", 116 | "Message": "{\"Records\":[{\"eventVersion\":\"2.0\",\"eventSource\":\"aws:s3\",\"awsRegion\":\"ap-southeast-2\",\"eventTime\":\"2017-03-06T02:56:19.713Z\",\"eventName\":\"s3:ObjectCreated:Put\",\"userIdentity\":{\"principalId\":\"AWS:DFLKSDFLKJ987SDFLJJDJ:some-principal-id\"},\"requestParameters\":{\"sourceIPAddress\":\"192.168.0.1\"},\"responseElements\":{\"x-amz-request-id\":\"324098EDFLK0894F\",\"x-amz-id-2\":\"xsdSDF/pgAl401Fz3UIATJ5/didfljDSFDSFsdfkjsdfl8JdsfLSDF89ldsf7SDF898jsdfljiA=\"},\"s3\":{\"s3SchemaVersion\":\"1.0\",\"configurationId\":\"SomeS3Event:Created\",\"bucket\":{\"name\":\"some-bucket\",\"ownerIdentity\":{\"principalId\":\"A3O1SDFLKJIJXU\"},\"arn\":\"arn:aws:s3:::some-bucket\"},\"object\":{\"key\":\"path/to/some/object\",\"size\":53598442,\"eTag\":\"6b1f72b9e81e4d6fcd3e0c808e8477f8\",\"sequencer\":\"0058BCCFD25C798E7B\"}}}]}", 117 | "Timestamp":"2017-03-06T02:56:19.834Z", 118 | "SignatureVersion":"1", 119 | "Signature":"aybEgnTjKzSbC2puHxho7SUnYOje4SjBoCyt0Q13bMWyp7M64+EU6jzi7P01+gSIuBFyYPsHreSmyqGMRSxbFuzn7rG5JcVGN0901U3CRXdk42eh03je8evRvs/Oa7TJlhpCTEDDOScalCWbIH0RthYONQpPR01nEgaNKj3e8YVJqyRQV+4RbU3YWJOj+Spyi4u1hOC9PLUv4BH7U80nbhbOe9EwgX0zpeNU1WBRbEpqPoACm+7/uB0w79qFBKjB/Q7OWc1kASUZV9q8bz03yceoQeVvza0QGhPsnSXi49sn1mLWQOFS4KvgbJIC/Qk7H036ShrDioP6pP+UEg6kow==", 120 | "SigningCertUrl":"https://sns.ap-southeast-2.amazonaws.com/SimpleNotificationService-b95095beb82e8f6a046b3aafc7f4149a.pem", 121 | "UnsubscribeUrl":"https://sns.ap-southeast-2.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:ap-southeast-2:012345678901:SomeSNSEvent:23e2d254-e8bb-4db5-92ce-d917e5aad090", 122 | "MessageAttributes":{} 123 | } 124 | } 125 | ] 126 | } 127 | |] 128 | 129 | sampleSNSS3Event :: SNSEvent (Embedded S3Event) 130 | sampleSNSS3Event = 131 | RecordsEvent 132 | { _reRecords = 133 | [ SNSRecord 134 | { _srEventVersion = "1.0" 135 | , _srEventSubscriptionArn = 136 | "arn:aws:sns:ap-southeast-2:012345678901:SomeSNSEvent:23e2d254-e8bb-4db5-92ce-d917e5aad090" 137 | , _srEventSource = "aws:sns" 138 | , _srSns = 139 | SNSMessage 140 | { _smMessage = 141 | TextValue 142 | { _unTextValue = 143 | Embedded 144 | { _unEmbed = 145 | RecordsEvent 146 | { _reRecords = 147 | [ S3EventNotification 148 | { _senAwsRegion = Sydney 149 | , _senEventName = Event_S3_ObjectCreated_Put 150 | , _senEventSource = "aws:s3" 151 | , _senEventTime = 152 | UTCTime 153 | (fromGregorian 2017 3 6) 154 | (picosecondsToDiffTime 10579713000000000) 155 | , _senEventVersion = "2.0" 156 | , _senRequestParameters = 157 | RequestParametersEntity 158 | { _rpeSourceIPAddress = "192.168.0.1" 159 | } 160 | , _senResponseElements = 161 | ResponseElementsEntity 162 | { _reeXAmzId2 = 163 | "xsdSDF/pgAl401Fz3UIATJ5/didfljDSFDSFsdfkjsdfl8JdsfLSDF89ldsf7SDF898jsdfljiA=" 164 | , _reeXAmzRequestId = "324098EDFLK0894F" 165 | } 166 | , _senS3 = 167 | S3Entity 168 | { _seBucket = 169 | S3BucketEntity 170 | { _sbeArn = "arn:aws:s3:::some-bucket" 171 | , _sbeName = BucketName "some-bucket" 172 | , _sbeOwnerIdentity = 173 | UserIdentityEntity 174 | { _uiePrincipalId = "A3O1SDFLKJIJXU" 175 | } 176 | } 177 | , _seConfigurationId = "SomeS3Event:Created" 178 | , _seObject = 179 | S3ObjectEntity 180 | { _soeETag = 181 | Just (ETag "6b1f72b9e81e4d6fcd3e0c808e8477f8") 182 | , _soeKey = ObjectKey "path/to/some/object" 183 | , _soeSize = Just 53598442 184 | , _soeSequencer = "0058BCCFD25C798E7B" 185 | , _soeVersionId = Nothing 186 | } 187 | , _seS3SchemaVersion = "1.0" 188 | } 189 | , _senUserIdentity = 190 | UserIdentityEntity 191 | { _uiePrincipalId = 192 | "AWS:DFLKSDFLKJ987SDFLJJDJ:some-principal-id" 193 | } 194 | } 195 | ] 196 | } 197 | } 198 | } 199 | , _smMessageAttributes = HashMap.fromList [] 200 | , _smMessageId = "89f6fe8b-a751-5dcd-8e0c-afbd75420455" 201 | , _smSignature = 202 | "aybEgnTjKzSbC2puHxho7SUnYOje4SjBoCyt0Q13bMWyp7M64+EU6jzi7P01+gSIuBFyYPsHreSmyqGMRSxbFuzn7rG5JcVGN0901U3CRXdk42eh03je8evRvs/Oa7TJlhpCTEDDOScalCWbIH0RthYONQpPR01nEgaNKj3e8YVJqyRQV+4RbU3YWJOj+Spyi4u1hOC9PLUv4BH7U80nbhbOe9EwgX0zpeNU1WBRbEpqPoACm+7/uB0w79qFBKjB/Q7OWc1kASUZV9q8bz03yceoQeVvza0QGhPsnSXi49sn1mLWQOFS4KvgbJIC/Qk7H036ShrDioP6pP+UEg6kow==" 203 | , _smSignatureVersion = "1" 204 | , _smSigningCertUrl = 205 | "https://sns.ap-southeast-2.amazonaws.com/SimpleNotificationService-b95095beb82e8f6a046b3aafc7f4149a.pem" 206 | , _smSubject = "Amazon S3 Notification" 207 | , _smTimestamp = 208 | UTCTime 209 | (fromGregorian 2017 3 6) 210 | (picosecondsToDiffTime 10579834000000000) 211 | , _smTopicArn = "arn:aws:sns:ap-southeast-2:012345678901:SomeSNSEvent" 212 | , _smType = "Notification" 213 | , _smUnsubscribeUrl = 214 | "https://sns.ap-southeast-2.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:ap-southeast-2:012345678901:SomeSNSEvent:23e2d254-e8bb-4db5-92ce-d917e5aad090" 215 | } 216 | } 217 | ] 218 | } 219 | -------------------------------------------------------------------------------- /src/AWSLambda/Events/APIGateway.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | {-# LANGUAGE OverloadedStrings #-} 3 | {-# LANGUAGE RecordWildCards #-} 4 | {-# LANGUAGE TemplateHaskell #-} 5 | 6 | {-| 7 | Module: AWSLambda.Events.APIGateway 8 | Description: Types for APIGateway Lambda requests and responses 9 | 10 | Based on https://github.com/aws/aws-lambda-dotnet/tree/master/Libraries/src/Amazon.Lambda.APIGatewayEvents 11 | 12 | To enable processing of API Gateway events, use the @events@ key in 13 | @serverless.yml@ as usual: 14 | 15 | > functions: 16 | > myapifunc: 17 | > handler: mypackage.mypackage-exe 18 | > events: 19 | > - http: 20 | > path: hello/{name} 21 | > method: get 22 | 23 | Then use 'apiGatewayMain' in the handler to process the requests. 24 | -} 25 | module AWSLambda.Events.APIGateway where 26 | 27 | import Control.Lens hiding ((.=)) 28 | import Data.Aeson 29 | import Data.Aeson.Casing (aesonDrop, camelCase) 30 | import Data.Aeson.TH (deriveFromJSON) 31 | import Data.Aeson.Embedded 32 | import Data.Aeson.TextValue 33 | import Data.Aeson.Types (Parser) 34 | import qualified Data.Aeson.KeyMap as KeyMap 35 | import Data.ByteString (ByteString) 36 | import qualified Data.CaseInsensitive as CI 37 | import Data.Function (on) 38 | import Data.HashMap.Strict (HashMap) 39 | import qualified Data.HashMap.Strict as HashMap 40 | import Data.IP 41 | import qualified Data.Set as Set 42 | import qualified Data.Text as Text 43 | import Data.Text.Encoding (decodeUtf8, encodeUtf8) 44 | import GHC.Generics (Generic) 45 | import Amazonka.Data.Base64 46 | import Amazonka.Data.Text 47 | import qualified Network.HTTP.Types as HTTP 48 | import Text.Read 49 | 50 | import AWSLambda.Handler (lambdaMain) 51 | 52 | type Method = Text 53 | -- type HeaderName = CI Text 54 | type HeaderName = Text --- XXX should be CI Text 55 | type HeaderValue = Text 56 | type QueryParamName = Text 57 | type QueryParamValue = Text 58 | type PathParamName = Text 59 | type PathParamValue = Text 60 | type StageVarName = Text 61 | type StageVarValue = Text 62 | 63 | data RequestIdentity = RequestIdentity 64 | { _riCognitoIdentityPoolId :: !(Maybe Text) 65 | , _riAccountId :: !(Maybe Text) 66 | , _riCognitoIdentityId :: !(Maybe Text) 67 | , _riCaller :: !(Maybe Text) 68 | , _riApiKey :: !(Maybe Text) 69 | , _riSourceIp :: !(Maybe IP) 70 | , _riCognitoAuthenticationType :: !(Maybe Text) 71 | , _riCognitoAuthenticationProvider :: !(Maybe Text) 72 | , _riUserArn :: !(Maybe Text) 73 | , _riUserAgent :: !(Maybe Text) 74 | , _riUser :: !(Maybe Text) 75 | } deriving (Eq, Show) 76 | 77 | readParse :: Read a => String -> Text -> Parser a 78 | readParse msg str = 79 | case readMaybe (Text.unpack str) of 80 | Just result -> pure result 81 | Nothing -> fail $ "Failed to parse an " ++ msg 82 | 83 | instance FromJSON RequestIdentity where 84 | parseJSON = 85 | withObject "RequestIdentity" $ \o -> 86 | RequestIdentity <$> o .:? "cognitoIdentityPoolId" <*> o .:? "accountId" <*> 87 | o .:? "cognitoIdentityId" <*> 88 | o .:? "caller" <*> 89 | o .:? "apiKey" <*> 90 | (o .:? "sourceIp" >>= traverse (readParse "IP address")) <*> 91 | o .:? "cognitoAuthenticationType" <*> 92 | o .:? "cognitoAuthenticationProvider" <*> 93 | o .:? "userArn" <*> 94 | o .:? "userAgent" <*> 95 | o .:? "user" 96 | $(makeLenses ''RequestIdentity) 97 | 98 | data Authorizer = Authorizer 99 | { _aPrincipalId :: !(Maybe Text) 100 | , _aClaims :: !Object 101 | , _aContext :: !Object 102 | } deriving (Eq, Show) 103 | instance FromJSON Authorizer where 104 | parseJSON = withObject "Authorizer" $ \o -> 105 | Authorizer 106 | <$> o .:? "principalId" 107 | <*> o .:? "claims" .!= mempty 108 | <*> (pure $ KeyMap.delete "principalId" $ KeyMap.delete "claims" o) 109 | 110 | $(makeLenses ''Authorizer) 111 | 112 | data ProxyRequestContext = ProxyRequestContext 113 | { _prcPath :: !(Maybe Text) 114 | , _prcAccountId :: !Text 115 | , _prcResourceId :: !Text 116 | , _prcStage :: !Text 117 | , _prcRequestId :: !Text 118 | , _prcIdentity :: !RequestIdentity 119 | , _prcResourcePath :: !Text 120 | , _prcHttpMethod :: !Text 121 | , _prcApiId :: !Text 122 | , _prcProtocol :: !Text 123 | , _prcAuthorizer :: !(Maybe Authorizer) 124 | } deriving (Eq, Show) 125 | $(deriveFromJSON (aesonDrop 4 camelCase) ''ProxyRequestContext) 126 | $(makeLenses ''ProxyRequestContext) 127 | 128 | data APIGatewayProxyRequest body = APIGatewayProxyRequest 129 | { _agprqResource :: !Text 130 | , _agprqPath :: !ByteString 131 | , _agprqHttpMethod :: !HTTP.Method 132 | , _agprqHeaders :: !HTTP.RequestHeaders 133 | , _agprqQueryStringParameters :: !HTTP.Query 134 | , _agprqPathParameters :: !(HashMap PathParamName PathParamValue) 135 | , _agprqStageVariables :: !(HashMap StageVarName StageVarValue) 136 | , _agprqRequestContext :: !ProxyRequestContext 137 | , _agprqBody :: !(Maybe (TextValue body)) 138 | } deriving (Show, Generic) 139 | 140 | instance Eq body => Eq (APIGatewayProxyRequest body) where 141 | (==) = 142 | (==) `on` \rq -> 143 | ( _agprqResource rq 144 | , _agprqPath rq 145 | , _agprqHttpMethod rq 146 | , Set.fromList (_agprqHeaders rq) -- header order doesn't matter 147 | , _agprqQueryStringParameters rq 148 | , _agprqPathParameters rq 149 | , _agprqStageVariables rq 150 | , _agprqRequestContext rq 151 | , _agprqBody rq) 152 | 153 | instance FromText body => FromJSON (APIGatewayProxyRequest body) where 154 | parseJSON = withObject "APIGatewayProxyRequest" $ \o -> 155 | APIGatewayProxyRequest 156 | <$> o .: "resource" 157 | <*> (encodeUtf8 <$> o .: "path") 158 | <*> (encodeUtf8 <$> o .: "httpMethod") 159 | <*> (fmap fromAWSHeaders <$> o .:? "headers") .!= mempty 160 | <*> (fmap fromAWSQuery <$> o .:? "queryStringParameters") .!= mempty 161 | <*> o .:? "pathParameters" .!= HashMap.empty 162 | <*> o .:? "stageVariables" .!= HashMap.empty 163 | <*> o .: "requestContext" 164 | <*> o .:? "body" 165 | where 166 | -- Explicit type signatures so that we don't accidentally tell Aeson 167 | -- to try to parse the wrong sort of structure 168 | fromAWSHeaders :: HashMap HeaderName HeaderValue -> HTTP.RequestHeaders 169 | fromAWSHeaders = fmap toHeader . HashMap.toList 170 | where 171 | toHeader = bimap (CI.mk . encodeUtf8) encodeUtf8 172 | fromAWSQuery :: HashMap QueryParamName QueryParamValue -> HTTP.Query 173 | fromAWSQuery = fmap toQueryItem . HashMap.toList 174 | where 175 | toQueryItem = bimap encodeUtf8 (\x -> if Text.null x then Nothing else Just . encodeUtf8 $ x) 176 | 177 | $(makeLenses ''APIGatewayProxyRequest) 178 | 179 | -- | Get the request body, if there is one 180 | requestBody :: Getter (APIGatewayProxyRequest body) (Maybe body) 181 | requestBody = agprqBody . mapping unTextValue 182 | 183 | -- | Get the embedded request body, if there is one 184 | requestBodyEmbedded :: Getter (APIGatewayProxyRequest (Embedded v)) (Maybe v) 185 | requestBodyEmbedded = requestBody . mapping unEmbed 186 | 187 | -- | Get the binary (decoded Base64) request body, if there is one 188 | requestBodyBinary :: Getter (APIGatewayProxyRequest Base64) (Maybe ByteString) 189 | requestBodyBinary = requestBody . mapping _Base64 190 | 191 | data APIGatewayProxyResponse body = APIGatewayProxyResponse 192 | { _agprsStatusCode :: !Int 193 | , _agprsHeaders :: !HTTP.ResponseHeaders 194 | , _agprsBody :: !(Maybe (TextValue body)) 195 | } deriving (Show, Generic) 196 | 197 | instance (Eq body) => Eq (APIGatewayProxyResponse body) where 198 | -- header order doesn't matter 199 | (==) = (==) `on` \r -> (_agprsStatusCode r, Set.fromList (_agprsHeaders r), _agprsBody r) 200 | 201 | instance ToText body => ToJSON (APIGatewayProxyResponse body) where 202 | toJSON APIGatewayProxyResponse {..} = 203 | object 204 | [ "statusCode" .= _agprsStatusCode 205 | , "headers" .= toAWSHeaders _agprsHeaders 206 | , "body" .= _agprsBody 207 | ] 208 | where 209 | toAWSHeaders :: HTTP.ResponseHeaders -> HashMap HeaderName HeaderValue 210 | toAWSHeaders = HashMap.fromList . fmap (bimap (decodeUtf8 . CI.original) decodeUtf8) 211 | 212 | instance FromText body => FromJSON (APIGatewayProxyResponse body) where 213 | parseJSON = 214 | withObject "APIGatewayProxyResponse" $ \o -> 215 | APIGatewayProxyResponse <$> o .: "statusCode" <*> 216 | (fromAWSHeaders <$> o .: "headers") <*> 217 | o .:? "body" 218 | -- Explicit type signatures so that we don't accidentally tell Aeson 219 | -- to try to parse the wrong sort of structure 220 | where 221 | fromAWSHeaders :: HashMap HeaderName HeaderValue -> HTTP.RequestHeaders 222 | fromAWSHeaders = fmap toHeader . HashMap.toList 223 | where 224 | toHeader = bimap (CI.mk . encodeUtf8) encodeUtf8 225 | 226 | $(makeLenses ''APIGatewayProxyResponse) 227 | 228 | response :: Int -> APIGatewayProxyResponse body 229 | response statusCode = APIGatewayProxyResponse statusCode mempty Nothing 230 | 231 | responseOK :: APIGatewayProxyResponse body 232 | responseOK = response 200 233 | 234 | responseNotFound :: APIGatewayProxyResponse body 235 | responseNotFound = response 404 236 | 237 | responseBadRequest :: APIGatewayProxyResponse body 238 | responseBadRequest = response 400 239 | 240 | responseBody :: Setter' (APIGatewayProxyResponse body) (Maybe body) 241 | responseBody = agprsBody . at () . mapping unTextValue 242 | 243 | responseBodyEmbedded :: Setter' (APIGatewayProxyResponse (Embedded body)) (Maybe body) 244 | responseBodyEmbedded = responseBody . mapping unEmbed 245 | 246 | responseBodyBinary :: Setter' (APIGatewayProxyResponse Base64) (Maybe ByteString) 247 | responseBodyBinary = responseBody . mapping _Base64 248 | 249 | {-| Process incoming events from @serverless-haskell@ using a provided function. 250 | 251 | This is a specialisation of 'lambdaMain' for API Gateway. 252 | 253 | The handler receives the input event given to the AWS Lambda function, and 254 | its return value is returned from the function. 255 | 256 | This is intended to be used as @main@, for example: 257 | 258 | > import AWSLambda.Events.APIGateway 259 | > import Control.Lens 260 | > import Data.Aeson 261 | > import Data.Aeson.Embedded 262 | > 263 | > main = apiGatewayMain handler 264 | > 265 | > handler :: APIGatewayProxyRequest (Embedded Value) -> IO (APIGatewayProxyResponse (Embedded [Int])) 266 | > handler request = do 267 | > putStrLn "This should go to logs" 268 | > print $ request ^. requestBody 269 | > pure $ responseOK & responseBodyEmbedded ?~ [1, 2, 3] 270 | 271 | The type parameters @reqBody@ and @resBody@ represent the types of request and response body, respectively. 272 | The @FromText@ and @ToText@ contraints are required because these values come from string fields 273 | in the request and response JSON objects. 274 | To get direct access to the body string, use @Text@ as the parameter type. 275 | To treat the body as a stringified embedded JSON value, use @Embedded a@, where @a@ has the 276 | appropriate @FromJSON@ or @ToJSON@ instances. 277 | To treat the body as base 64 encoded binary use @Base64@. 278 | -} 279 | apiGatewayMain 280 | :: (FromText reqBody, ToText resBody) 281 | => (APIGatewayProxyRequest reqBody -> IO (APIGatewayProxyResponse resBody)) -- ^ Function to process the event 282 | -> IO () 283 | apiGatewayMain = lambdaMain 284 | -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import { spawnSync, SpawnSyncOptions, SpawnSyncReturns } from 'child_process'; 4 | import { chmodSync, copySync, removeSync, writeFileSync } from 'fs-extra'; 5 | import * as path from 'path'; 6 | import Serverless, { FunctionDefinition, FunctionDefinitionHandler } from 'serverless'; 7 | import Service from 'serverless/classes/Service'; 8 | 9 | import * as AWSEnvironment from './AWSEnvironment'; 10 | import * as config from './config'; 11 | import * as ld from './ld'; 12 | import * as version from './version'; 13 | 14 | const PACKAGE_NAME = 'serverless-haskell'; 15 | 16 | const ADDITIONAL_EXCLUDE = [ 17 | '**/.stack-work/**', 18 | 'node_modules/**', 19 | ]; 20 | 21 | // Dependent libraries not to suggest adding 22 | const IGNORE_LIBRARIES = [ 23 | 'linux-vdso.so.1', 24 | '/lib64/ld-linux-x86-64.so.2', 25 | ].concat(AWSEnvironment.libraries); 26 | 27 | const BOOTSTRAP = '#!/bin/sh\nexec ${_HANDLER}'; 28 | 29 | const NO_OUTPUT_CAPTURE: SpawnSyncOptions = {stdio: ['ignore', process.stdout, process.stderr]}; 30 | const OUTPUT_CAPTURE: SpawnSyncOptions = {maxBuffer: 1024 * 1024 * 100}; 31 | 32 | type Custom = { 33 | stackBuildArgs: string[]; 34 | docker: boolean; 35 | buildAll: boolean; 36 | }; 37 | 38 | type Schema = { 39 | type: 'object'; 40 | properties: { 41 | [key: string]: Schema; 42 | }; 43 | } | { 44 | type: 'array'; 45 | items: Schema; 46 | } | { 47 | type: 'string'; 48 | } | { 49 | type: 'boolean'; 50 | }; 51 | 52 | const customSchema: Schema = { 53 | type: 'object', 54 | properties: { 55 | haskell: { 56 | type: 'object', 57 | properties: { 58 | stackBuildArgs: { 59 | type: 'array', 60 | items: { 61 | type: 'string', 62 | }, 63 | }, 64 | docker: { 65 | type: 'boolean', 66 | }, 67 | buildAll: { 68 | type: 'boolean', 69 | }, 70 | }, 71 | }, 72 | }, 73 | }; 74 | 75 | // FIXME: Service is missing 'package' property in @types/serverless 76 | type ServiceEx = Service & { 77 | package: { 78 | exclude: string[]; 79 | excludeDevDependencies?: boolean; 80 | }; 81 | } 82 | 83 | // FIXME: Schema is not implemented in @types/serverless 84 | type ConfigSchemaHandler = { 85 | schema: { 86 | definitions: { 87 | [key: string]: { 88 | enum?: string[]; 89 | }; 90 | }; 91 | }; 92 | defineCustomProperties(properties: Schema): void; 93 | } 94 | 95 | type ServerlessEx = Serverless & { 96 | configSchemaHandler: ConfigSchemaHandler; 97 | } 98 | 99 | function isHandler(func: FunctionDefinition): func is FunctionDefinitionHandler { 100 | return !!Object.prototype.hasOwnProperty.call(func, 'handler'); 101 | } 102 | 103 | type Options = { 104 | function?: string; 105 | }; 106 | 107 | class ProcessError extends Error { 108 | result: SpawnSyncReturns; 109 | constructor(message: string, result: SpawnSyncReturns) { 110 | super(message); 111 | this.result = result; 112 | Object.setPrototypeOf(this, new.target.prototype); 113 | } 114 | } 115 | 116 | class ServerlessPlugin { 117 | serverless: ServerlessEx; 118 | service: ServiceEx; 119 | options: Options; 120 | hooks: { [hook: string]: (options: {}) => void }; 121 | servicePath: string; 122 | additionalFiles: string[]; 123 | 124 | constructor(serverless: ServerlessEx, options: Options) { 125 | this.serverless = serverless; 126 | this.service = serverless.service as ServiceEx; 127 | this.options = options; 128 | 129 | if (this.serverless.service.provider.name !== "aws") { 130 | throw new Error("Only AWS provider is supported."); 131 | } 132 | 133 | this.hooks = { 134 | 'before:package:createDeploymentArtifacts': this.buildHandlers.bind(this), 135 | 'after:package:createDeploymentArtifacts': this.cleanupHandlers.bind(this), 136 | 137 | // deploy function 138 | 'before:deploy:function:packageFunction': this.buildHandlers.bind(this), 139 | 'after:deploy:function:packageFunction': this.cleanupHandlers.bind(this), 140 | 141 | // invoke local 142 | 'before:invoke:local:invoke': this.buildHandlers.bind(this), 143 | 'after:invoke:local:invoke': this.cleanupHandlers.bind(this), 144 | 145 | // serverless-offline 146 | 'before:offline:start:init': this.buildHandlers.bind(this), 147 | 'after:offline:start:end': this.cleanupHandlers.bind(this), 148 | }; 149 | 150 | this.servicePath = this.serverless.config.servicePath || ''; 151 | 152 | // By default, Serverless examines node_modules to figure out which 153 | // packages there are from dependencies versus devDependencies of a 154 | // package. While there will always be a node_modules due to Serverless 155 | // and this plugin being installed, it will be excluded anyway. 156 | // Therefore, the filtering can be disabled to speed up the process. 157 | this.service.package.excludeDevDependencies = false; 158 | 159 | // Customize Serverless schema 160 | const configSchemaHandler = this.serverless.configSchemaHandler; 161 | 162 | // Add new possible runtime to the schema 163 | configSchemaHandler.schema.definitions.awsLambdaRuntime.enum?.push("haskell"); 164 | 165 | // Add plugin options 166 | configSchemaHandler.defineCustomProperties(customSchema); 167 | 168 | this.additionalFiles = []; 169 | } 170 | 171 | custom(): Custom { 172 | return Object.assign( 173 | { 174 | stackBuildArgs: [], 175 | docker: true, 176 | buildAll: true, 177 | }, 178 | this.serverless.service.custom && 179 | this.serverless.service.custom.haskell || 180 | {} 181 | ); 182 | } 183 | 184 | runStack(directory: string, args: string[], options: {captureOutput?: boolean} = {}): SpawnSyncReturns { 185 | options = options || {}; 186 | const envArgs = []; 187 | if (this.custom().docker) { 188 | envArgs.push('--docker'); 189 | envArgs.push('--docker-image', config.BUILD_DOCKER_IMAGE); 190 | envArgs.push('--no-nix'); 191 | } 192 | 193 | if (directory) { 194 | envArgs.push('--stack-yaml', `${directory}stack.yaml`); 195 | } 196 | 197 | const stackArgs = [ 198 | ...envArgs, 199 | ...this.custom().stackBuildArgs, 200 | ...args, 201 | ]; 202 | 203 | const result = spawnSync( 204 | 'stack', 205 | stackArgs, 206 | options.captureOutput ? OUTPUT_CAPTURE : NO_OUTPUT_CAPTURE 207 | ); 208 | 209 | if (result.error || result.status) { 210 | const reasons = []; 211 | if (result.error) { 212 | reasons.push(result.error); 213 | } 214 | if (result.status) { 215 | reasons.push(`exit code: ${result.status}`); 216 | } 217 | const stderr = result.stderr?.toString().trim(); 218 | if (stderr) { 219 | reasons.push(stderr); 220 | } 221 | const message = `Error when running Stack: ${reasons.join('; ')}\n` + 222 | `Stack command: stack ${stackArgs.join(" ")}`; 223 | throw new ProcessError(message, result); 224 | } 225 | 226 | return result; 227 | } 228 | 229 | runStackOutput(directory: string, args: string[]): string { 230 | const result = this.runStack(directory, args, {captureOutput: true}); 231 | return result.stdout.toString().trim(); 232 | } 233 | 234 | dependentLibraries(directory: string, executablePath: string): ld.Paths { 235 | try { 236 | const lddOutput = this.runStackOutput( 237 | directory, 238 | [ 239 | 'exec', 240 | 'ldd', 241 | executablePath, 242 | ] 243 | ); 244 | return ld.parseLdOutput(lddOutput); 245 | } 246 | catch (error) { 247 | if (error.result && 248 | error.result.stdout && 249 | error.result.stdout.includes("not a dynamic executable")) { 250 | // Static executables have no dependencies 251 | return {}; 252 | } else if (process.platform === 'darwin' && !this.custom().docker) { 253 | // Even if ldd was available on macOS, the dependencies won't 254 | // translate 255 | return {}; 256 | } else { 257 | throw error; 258 | } 259 | } 260 | } 261 | 262 | glibcVersion(directory: string, executablePath: string): version.Version | null { 263 | const objdumpOutput = this.runStackOutput( 264 | directory, 265 | [ 266 | 'exec', 267 | 'objdump', 268 | '--', 269 | '-T', 270 | executablePath, 271 | ] 272 | ); 273 | return ld.parseObjdumpOutput(objdumpOutput); 274 | } 275 | 276 | assertServerlessPackageVersionsMatch(directory: string): void { 277 | // Check that the Haskell package version corresponds to our own 278 | const stackDependencies = this.runStackOutput( 279 | directory, 280 | [ 281 | 'ls', 282 | 'dependencies', 283 | ] 284 | ).split("\n"); 285 | const haskellPackageVersions = stackDependencies.filter(dep => dep.startsWith(`${PACKAGE_NAME} `)); 286 | if (haskellPackageVersions.length === 0) { 287 | this.serverless.cli.log(`Could not find ${PACKAGE_NAME} in stack's dependencies. Make sure ${PACKAGE_NAME} you are using LTS 12 (or newer), or add it as an extra-dep in your stack.yaml, and reference it in package.yaml or the Cabal file.`); 288 | throw new Error("Package not found."); 289 | } 290 | const haskellPackageVersion = haskellPackageVersions[0].split(' ')[1]; 291 | 292 | const javascriptPackageVersion = JSON.parse(spawnSync( 293 | 'npm', 294 | [ 295 | 'list', 296 | PACKAGE_NAME, 297 | '--json', 298 | ] 299 | ).stdout).dependencies[PACKAGE_NAME].version; 300 | 301 | if (haskellPackageVersion !== javascriptPackageVersion) { 302 | this.serverless.cli.log(`Package version mismatch: serverless-haskell installed from NPM: ${javascriptPackageVersion}, installed from Stack: ${haskellPackageVersion}. Versions must be in sync to work correctly. Please install matching versions of serverless-haskell from NPM and Stack by either pinning your NPM version to match stack, or adding an extra-dep in your stack.yaml to match the NPM version.`); 303 | throw new Error("Package version mismatch."); 304 | } 305 | } 306 | 307 | writeBootstrap(): void { 308 | const bootstrapPath = path.resolve(this.servicePath, 'bootstrap'); 309 | writeFileSync(bootstrapPath, BOOTSTRAP); 310 | chmodSync(bootstrapPath, 0o755); 311 | this.additionalFiles.push(bootstrapPath); 312 | } 313 | 314 | // Which functions are being deployed now - all (default) or only one of 315 | // them ('deploy function') 316 | deployedFunctions(): string[] { 317 | if (this.options.function) { 318 | return [this.options.function]; 319 | } else { 320 | return this.serverless.service.getAllFunctions(); 321 | } 322 | } 323 | 324 | buildHandlers(): void { 325 | const service = this.service; 326 | 327 | if (!this.custom().docker) { 328 | // Warn when Docker is disabled 329 | this.serverless.cli.log( 330 | "Warning: not using Docker to build. " + 331 | "The resulting binary might not match the AWS environment."); 332 | } 333 | 334 | // Exclude Haskell artifacts from uploading 335 | service.package.exclude = service.package.exclude || []; 336 | service.package.exclude = [ 337 | ...service.package.exclude, 338 | ...ADDITIONAL_EXCLUDE, 339 | ]; 340 | 341 | // Keep track of which extra libraries were copied 342 | const libraries: { [name: string]: boolean } = {}; 343 | 344 | let haskellFunctionsFound = false; 345 | 346 | this.deployedFunctions().forEach(funcName => { 347 | const func = service.getFunction(funcName); 348 | 349 | // Only process Haskell functions 350 | if (!isHandler(func)) { 351 | return; 352 | } 353 | const runtime = func.runtime || service.provider.runtime; 354 | if (runtime !== config.HASKELL_RUNTIME) { 355 | return; 356 | } 357 | haskellFunctionsFound = true; 358 | func.runtime = config.BASE_RUNTIME; 359 | 360 | const handlerPattern = /(.*\/)?([^./]*)\.(.*)/; 361 | const matches = handlerPattern.exec(func.handler); 362 | 363 | if (!matches) { 364 | throw new Error(`handler ${func.handler} was not of the form 'packageName.executableName' or 'dir1/dir2/packageName.executableName'.`); 365 | } 366 | 367 | const [, directory, packageName, executableName] = matches; 368 | 369 | // Ensure package versions match 370 | this.assertServerlessPackageVersionsMatch(directory); 371 | 372 | // Ensure the executable is built 373 | this.serverless.cli.log(`Building handler ${funcName} with Stack...`); 374 | const buildCommand = this.custom().buildAll ? 375 | ['build'] : 376 | ['build', `${packageName}:exe:${executableName}`]; 377 | 378 | this.runStack(directory, buildCommand); 379 | 380 | // Copy the executable to the destination directory 381 | const stackInstallRoot = this.runStackOutput( 382 | directory, 383 | [ 384 | 'path', 385 | '--local-install-root', 386 | ] 387 | ); 388 | const targetDirectory = directory ? directory : "./"; 389 | const executablePath = path.resolve(stackInstallRoot, 'bin', executableName); 390 | const targetPath = path.resolve(this.servicePath, targetDirectory, executableName); 391 | copySync(executablePath, targetPath); 392 | this.additionalFiles.push(targetPath); 393 | func.handler = targetDirectory + executableName; 394 | 395 | // Check glibc version 396 | const glibcVersion = this.glibcVersion(directory, executablePath); 397 | if (glibcVersion && version.greater(glibcVersion, AWSEnvironment.glibcVersion)) { 398 | this.serverless.cli.log( 399 | "Warning: glibc version required by the executable (" + version.format(glibcVersion) + ") is " + 400 | "higher than the one in AWS environment (" + version.format(AWSEnvironment.glibcVersion) + ")."); 401 | throw new Error("glibc version mismatch."); 402 | } 403 | 404 | // Copy libraries not present on AWS Lambda environment 405 | const executableLibraries = this.dependentLibraries(directory, executablePath); 406 | 407 | for (const name in executableLibraries) { 408 | if (!libraries[name] && !IGNORE_LIBRARIES.includes(name)) { 409 | const libPath = executableLibraries[name]; 410 | const libTargetPath = path.resolve(this.servicePath, name); 411 | this.runStack( 412 | directory, 413 | [ 414 | 'exec', 415 | 'cp', 416 | libPath, 417 | libTargetPath, 418 | ]); 419 | this.additionalFiles.push(libTargetPath); 420 | libraries[name] = true; 421 | } 422 | } 423 | }); 424 | 425 | if (!this.options.function && !haskellFunctionsFound) { 426 | throw new Error( 427 | `Error: no Haskell functions found. ` + 428 | `Use 'runtime: ${config.HASKELL_RUNTIME}' in global or ` + 429 | `function configuration to use this plugin.` 430 | ); 431 | } 432 | 433 | this.writeBootstrap(); 434 | 435 | // Ensure the runtime is set to a sane value for other plugins 436 | if (service.provider.runtime === config.HASKELL_RUNTIME) { 437 | service.provider.runtime = config.BASE_RUNTIME; 438 | } 439 | } 440 | 441 | cleanupHandlers(): void { 442 | this.additionalFiles.forEach(fileName => removeSync(fileName)); 443 | } 444 | } 445 | 446 | module.exports = ServerlessPlugin; 447 | --------------------------------------------------------------------------------