├── .github ├── FUNDING.yml └── workflows │ └── build.yml ├── .gitignore ├── CONTRIBUTING.md ├── CONTRIBUTORS ├── LICENSE ├── README.md ├── ROADMAP.md ├── build_dev_bin.sh ├── builder └── src │ ├── AbsoluteSrcDir.hs │ ├── BackgroundWriter.hs │ ├── Build.hs │ ├── Deps │ ├── Diff.hs │ ├── Package.hs │ └── Solver.hs │ ├── Directories.hs │ ├── File.hs │ ├── Generate.hs │ ├── Git.hs │ ├── Gren │ ├── Details.hs │ ├── Outline.hs │ ├── Platform.hs │ └── PossibleFilePath.hs │ ├── Reporting.hs │ └── Reporting │ ├── Exit.hs │ ├── Exit │ └── Help.hs │ └── Task.hs ├── cli.js ├── compiler └── src │ ├── AST │ ├── Canonical.hs │ ├── Optimized.hs │ ├── Source.hs │ ├── SourceComments.hs │ └── Utils │ │ ├── Binop.hs │ │ └── Type.hs │ ├── Canonicalize │ ├── Effects.hs │ ├── Environment.hs │ ├── Environment │ │ ├── Dups.hs │ │ ├── Foreign.hs │ │ └── Local.hs │ ├── Expression.hs │ ├── Module.hs │ ├── Pattern.hs │ └── Type.hs │ ├── Compile.hs │ ├── Data │ ├── Bag.hs │ ├── Index.hs │ ├── Map │ │ └── Utils.hs │ ├── Name.hs │ ├── NonEmptyList.hs │ ├── OneOrMore.hs │ └── Utf8.hs │ ├── Generate │ ├── Html.hs │ ├── JavaScript.hs │ ├── JavaScript │ │ ├── Builder.hs │ │ ├── Expression.hs │ │ ├── Functions.hs │ │ └── Name.hs │ ├── Mode.hs │ ├── Node.hs │ ├── SourceMap.hs │ └── VLQ.hs │ ├── Gren │ ├── Compiler │ │ ├── Imports.hs │ │ ├── Type.hs │ │ └── Type │ │ │ └── Extract.hs │ ├── Constraint.hs │ ├── Docs.hs │ ├── Float.hs │ ├── Int.hs │ ├── Interface.hs │ ├── Kernel.hs │ ├── Licenses.hs │ ├── Magnitude.hs │ ├── ModuleName.hs │ ├── Package.hs │ ├── String.hs │ └── Version.hs │ ├── Json │ ├── Decode.hs │ ├── Encode.hs │ └── String.hs │ ├── Nitpick │ ├── Debug.hs │ └── PatternMatches.hs │ ├── Optimize │ ├── Case.hs │ ├── DecisionTree.hs │ ├── Expression.hs │ ├── Module.hs │ ├── Names.hs │ └── Port.hs │ ├── Parse │ ├── Declaration.hs │ ├── Expression.hs │ ├── Keyword.hs │ ├── Module.hs │ ├── Number.hs │ ├── Pattern.hs │ ├── Primitives.hs │ ├── Space.hs │ ├── String.hs │ ├── Symbol.hs │ ├── Type.hs │ └── Variable.hs │ ├── Reporting │ ├── Annotation.hs │ ├── Doc.hs │ ├── Error.hs │ ├── Error │ │ ├── Canonicalize.hs │ │ ├── Docs.hs │ │ ├── Import.hs │ │ ├── Json.hs │ │ ├── Main.hs │ │ ├── Pattern.hs │ │ ├── Syntax.hs │ │ └── Type.hs │ ├── Render │ │ ├── Code.hs │ │ ├── Type.hs │ │ └── Type │ │ │ └── Localizer.hs │ ├── Report.hs │ ├── Result.hs │ ├── Suggest.hs │ └── Warning.hs │ └── Type │ ├── Constrain │ ├── Expression.hs │ ├── Module.hs │ └── Pattern.hs │ ├── Error.hs │ ├── Instantiate.hs │ ├── Occurs.hs │ ├── Solve.hs │ ├── Type.hs │ ├── Unify.hs │ └── UnionFind.hs ├── devbox.json ├── devbox.lock ├── docs ├── hacking_on_core_packages.md └── kernel_code.md ├── gren.cabal ├── gren.json ├── hints ├── bad-recursion.md ├── comparing-custom-types.md ├── comparing-records.md ├── implicit-casts.md ├── import-cycles.md ├── imports.md ├── infinite-type.md ├── init.md ├── missing-patterns.md ├── optimize.md ├── port-modules.md ├── recursive-alias.md ├── repl.md ├── shadowing.md └── type-annotations.md ├── index.js ├── package-lock.json ├── package.json ├── src └── Main.gren ├── terminal ├── Docs.hs ├── Init.hs ├── Main.hs ├── Make.hs ├── Package │ ├── Bump.hs │ ├── Diff.hs │ ├── Install.hs │ ├── Outdated.hs │ ├── Uninstall.hs │ └── Validate.hs └── Repl.hs └── tests ├── Generate └── VLQSpec.hs ├── Helpers ├── Instances.hs └── Parse.hs ├── Parse ├── AliasSpec.hs ├── DeclSpec.hs ├── MultilineStringSpec.hs ├── RecordUpdateSpec.hs ├── SpaceSpec.hs └── UnderscorePatternSpec.hs └── Spec.hs /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | ko_fi: gren 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | *~ 3 | 4 | # Haskell build stuff 5 | dist 6 | dist-newstyle 7 | cabal-dev 8 | gren 9 | cabal.project.local 10 | 11 | # node build stuff 12 | node_modules 13 | 14 | # Gren build stuff 15 | .gren 16 | compiler.js 17 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Before starting to work on a feature or bug fix for Gren, it might be a good idea 4 | to check if the change you're intending to make is a good fit for the project. 5 | 6 | We use the `Help Wanted` tag on github issues to indicate that a PR would be 7 | welcome. If you cannot find an issue for the change you're intending to make, 8 | head over to the [Discord server](https://discord.gg/Chb9YB9Vmh). 9 | 10 | We're a friendly bunch, so don't be afraid to drop in to say hi or ask questions! 11 | 12 | We also like to talk things through before commiting to a change, so it's a good 13 | idea to bring up an idea on Discord before making a PR. 14 | 15 | [forum-channel-what]: https://support.discord.com/hc/en-us/articles/6208479917079-Forum-Channels-FAQ#h_01G69FJQWTWN88HFEHK7Z6X79N 16 | 17 | To do so: 18 | 19 | 1. Join the Discord server 20 | 2. Find the server's left-hand sidebar 21 | 3. Scroll down to the `Development` category 22 | 4. Choose one of the following [forum channels][forum-channel-what] beneath it: 23 | 24 | - [`#compiler`](https://discord.com/channels/1250584603085766677/1250591099681247332) 25 | - [`#language`](https://discord.com/channels/1250584603085766677/1250591320335188099) 26 | - [`#core-packages`](https://discord.com/channels/1250584603085766677/1250591260159377490) 27 | - [`#www`](https://discord.com/channels/1250584603085766677/1250592392646492283) 28 | 29 | 5. Make your post: 30 | 31 | - Click in the textbox labelled with 'Search or create a post...' 32 | - Enter a title 33 | - Follow any additional steps as prompted 34 | 35 | All PRs will be considered, but going through the above process significantly 36 | improves your chances of a merge! 37 | 38 | ## Back-up / Historic Archives 39 | 40 | There's also a [Zulip](https://gren.zulipchat.com) with older 41 | `#language-design` or `#api-design` streams. 42 | 43 | This may act as a fallback for the time being, but it may be 44 | sunset since Discord seems preferred. 45 | -------------------------------------------------------------------------------- /CONTRIBUTORS: -------------------------------------------------------------------------------- 1 | Robin Heggelund Hansen (robinheghan) 2 | Julian Antonielli (jjant) 3 | Aaron VonderHaar (avh4) 4 | lue (lue-bird) 5 | Allan Clark (allanderek) 6 | Gaute Berge (gauteab) 7 | Dimitri B. (BendingBender) 8 | pushfoo 9 | mbartlett21 10 | Patrick Bollinger (pjbollinger) 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Original work Copyright 2012-2021 Evan Czaplicki 2 | Modified work Copyright 2021-present The Gren CONTRIBUTORS 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | 8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 10 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 11 | 12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Gren 2 | 3 | Compiler for Gren, a pure functional programming language that is easy to learn, but powerful in use. 4 | 5 | There are easier ways to install the compiler than compiling the source, you might want to read the [setup instructions](https://gren-lang.org/install). 6 | 7 | ## Build from source 8 | 9 | This project uses [devbox](https://www.jetify.com/devbox) for managing dependencies required to build the project. If you don't want to use devbox, 10 | you can find a list of the requried dependencies and the commands for building the compiler in `devbox.json`. 11 | 12 | Since Gren 0.4 the compiler is implemented in two parts. The Gren-part of the compiler lives in `src`, and once built it acts 13 | as a frontend to the Haskell-part of the compiler. 14 | 15 | The end goal is for the entire compiler to be written in Gren. 16 | 17 | To build the compiler: 18 | 19 | 1. Use `devbox run prepare-deps` to setup the required dependencies. The first time you run this it might take a while. 20 | 2. Build the compiler with `devbox run build`. This will create a `cli.js` file and a `gren` executable in the project root directory. 21 | 22 | You can now execute the Gren-part of the compiler with `node ./cli.js` or just `./cli.js`. This either requires `node` to be installed, 23 | or that you've entered the development shell by using `devbox shell`. The Gren-part of the compiler will by default download a pre-built 24 | binary of the Haskell-part from Github. If you want to run the compiler with the Haskell-part you've just built, set the path 25 | to the Haskell-binary in a `GREN_BIN` environment variable, like: `GREN_BIN=$PWD/gren node ./cli.js`. 26 | 27 | You might also want to study the scripts defined in `devbox.json`. You can execute one of these scripts by running 28 | `devbox run `. 29 | -------------------------------------------------------------------------------- /ROADMAP.md: -------------------------------------------------------------------------------- 1 | # Gren Roadmap 2 | 3 | Here is a plan for what future releases of Gren will focus on, on the road to 1.0. 4 | 5 | This is to be considered a living document. Releases may arrive late due to real-life changes of the contributors involved. We may also learn new things along the way which change the things we wish to focus on. 6 | 7 | Also, keep in mind that this only focuses on the big picture. Major features like parametric modules and Web Assembly are likely to cause an avalanche of other changes as well. 8 | 9 | ## Releases 10 | 11 | - June 2025 - Rewrite dependency management in Gren 12 | - December 2025 - Rewrite parser in Gren. Parametric modules. Operators as syntax sugar. 13 | -------------------------------------------------------------------------------- /build_dev_bin.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | cabal build -f dev 5 | cp `cabal list-bin .` . 6 | -------------------------------------------------------------------------------- /builder/src/AbsoluteSrcDir.hs: -------------------------------------------------------------------------------- 1 | module AbsoluteSrcDir 2 | ( AbsoluteSrcDir (..), 3 | fromFilePath, 4 | addRelative, 5 | toFilePath, 6 | ) 7 | where 8 | 9 | import System.Directory qualified as Dir 10 | import System.FilePath (()) 11 | 12 | newtype AbsoluteSrcDir 13 | = AbsoluteSrcDir FilePath 14 | 15 | toFilePath :: AbsoluteSrcDir -> FilePath 16 | toFilePath (AbsoluteSrcDir path) = path 17 | 18 | fromFilePath :: FilePath -> IO AbsoluteSrcDir 19 | fromFilePath srcDir = 20 | AbsoluteSrcDir 21 | <$> Dir.canonicalizePath srcDir 22 | 23 | addRelative :: AbsoluteSrcDir -> FilePath -> FilePath 24 | addRelative (AbsoluteSrcDir srcDir) path = 25 | srcDir path 26 | -------------------------------------------------------------------------------- /builder/src/BackgroundWriter.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns #-} 2 | 3 | module BackgroundWriter 4 | ( Scope, 5 | withScope, 6 | writeBinary, 7 | ) 8 | where 9 | 10 | import Control.Concurrent (forkIO) 11 | import Control.Concurrent.MVar (MVar, newEmptyMVar, newMVar, putMVar, takeMVar) 12 | import Data.Binary qualified as Binary 13 | import Data.Foldable (traverse_) 14 | import File qualified 15 | 16 | -- BACKGROUND WRITER 17 | 18 | newtype Scope 19 | = Scope (MVar [MVar ()]) 20 | 21 | withScope :: (Scope -> IO a) -> IO a 22 | withScope callback = 23 | do 24 | workList <- newMVar [] 25 | result <- callback (Scope workList) 26 | mvars <- takeMVar workList 27 | traverse_ takeMVar mvars 28 | return result 29 | 30 | writeBinary :: (Binary.Binary a) => Scope -> FilePath -> a -> IO () 31 | writeBinary (Scope workList) path value = 32 | do 33 | mvar <- newEmptyMVar 34 | _ <- forkIO (File.writeBinary path value >> putMVar mvar ()) 35 | oldWork <- takeMVar workList 36 | let !newWork = mvar : oldWork 37 | putMVar workList newWork 38 | -------------------------------------------------------------------------------- /builder/src/Deps/Package.hs: -------------------------------------------------------------------------------- 1 | module Deps.Package 2 | ( getVersions, 3 | -- 4 | LatestCompatibleVersionError (..), 5 | latestCompatibleVersion, 6 | latestCompatibleVersionForPackages, 7 | -- 8 | bumpPossibilities, 9 | isPackageInCache, 10 | installPackageVersion, 11 | ) 12 | where 13 | 14 | import Data.List qualified as List 15 | import Data.Map qualified as Map 16 | import Directories qualified as Dirs 17 | import Git qualified 18 | import Gren.Constraint qualified as C 19 | import Gren.Magnitude qualified as M 20 | import Gren.Outline qualified as Outline 21 | import Gren.Package qualified as Pkg 22 | import Gren.Version qualified as V 23 | import System.Directory qualified as Dir 24 | 25 | -- GET VERSIONS 26 | 27 | getVersions :: Pkg.Name -> IO (Either Git.Error (V.Version, [V.Version])) 28 | getVersions name = 29 | Git.tags (Git.githubUrl name) 30 | 31 | -- GET LATEST COMPATIBLE VERSION 32 | 33 | data LatestCompatibleVersionError 34 | = NoCompatiblePackage 35 | | GitError Git.Error 36 | 37 | latestCompatibleVersion :: 38 | Dirs.PackageCache -> 39 | Pkg.Name -> 40 | IO (Either LatestCompatibleVersionError V.Version) 41 | latestCompatibleVersion cache name = do 42 | versionsResult <- getVersions name 43 | case versionsResult of 44 | Right (first, rest) -> 45 | let versionsHighToLow = List.reverse $ List.sort (first : rest) 46 | in do 47 | potentiallyCompatibleVersion <- getCompatibleVersion cache name versionsHighToLow 48 | case potentiallyCompatibleVersion of 49 | Nothing -> 50 | return $ Left NoCompatiblePackage 51 | Just v -> 52 | return $ Right v 53 | Left gitError -> 54 | return $ Left $ GitError gitError 55 | 56 | getCompatibleVersion :: Dirs.PackageCache -> Pkg.Name -> [V.Version] -> IO (Maybe V.Version) 57 | getCompatibleVersion cache name versions = 58 | case versions of 59 | [] -> 60 | return Nothing 61 | vsn : rest -> do 62 | potentialInstallationError <- installPackageVersion cache name vsn 63 | case potentialInstallationError of 64 | Left _ -> 65 | getCompatibleVersion cache name rest 66 | Right () -> do 67 | let pkgPath = Dirs.package cache name vsn 68 | potentialOutline <- Outline.read pkgPath 69 | case potentialOutline of 70 | Right (Outline.Pkg outline) -> 71 | if C.goodGren (Outline._pkg_gren_version outline) 72 | then return $ Just vsn 73 | else getCompatibleVersion cache name rest 74 | _ -> 75 | getCompatibleVersion cache name rest 76 | 77 | -- LATEST COMPATIBLE VERSION FOR PACKAGES 78 | 79 | latestCompatibleVersionForPackages :: 80 | Dirs.PackageCache -> 81 | [Pkg.Name] -> 82 | IO (Either LatestCompatibleVersionError (Map.Map Pkg.Name C.Constraint)) 83 | latestCompatibleVersionForPackages cache pkgs = 84 | latestCompatibleVersionForPackagesHelp cache pkgs Map.empty 85 | 86 | latestCompatibleVersionForPackagesHelp :: 87 | Dirs.PackageCache -> 88 | [Pkg.Name] -> 89 | Map.Map Pkg.Name C.Constraint -> 90 | IO (Either LatestCompatibleVersionError (Map.Map Pkg.Name C.Constraint)) 91 | latestCompatibleVersionForPackagesHelp cache pkgs result = 92 | case pkgs of 93 | [] -> return $ Right result 94 | pkg : rest -> do 95 | possibleVersion <- latestCompatibleVersion cache pkg 96 | case possibleVersion of 97 | Left err -> 98 | return $ Left err 99 | Right vsn -> 100 | let newResult = Map.insert pkg (C.untilNextMajor vsn) result 101 | in latestCompatibleVersionForPackagesHelp cache rest newResult 102 | 103 | -- GET POSSIBILITIES 104 | 105 | bumpPossibilities :: (V.Version, [V.Version]) -> [(V.Version, V.Version, M.Magnitude)] 106 | bumpPossibilities (latest, previous) = 107 | let allVersions = reverse (latest : previous) 108 | minorPoints = map last (List.groupBy sameMajor allVersions) 109 | patchPoints = map last (List.groupBy sameMinor allVersions) 110 | in (latest, V.bumpMajor latest, M.MAJOR) 111 | : map (\v -> (v, V.bumpMinor v, M.MINOR)) minorPoints 112 | ++ map (\v -> (v, V.bumpPatch v, M.PATCH)) patchPoints 113 | 114 | sameMajor :: V.Version -> V.Version -> Bool 115 | sameMajor (V.Version major1 _ _) (V.Version major2 _ _) = 116 | major1 == major2 117 | 118 | sameMinor :: V.Version -> V.Version -> Bool 119 | sameMinor (V.Version major1 minor1 _) (V.Version major2 minor2 _) = 120 | major1 == major2 && minor1 == minor2 121 | 122 | -- INSTALL PACKAGE VERSION 123 | 124 | isPackageInCache :: Dirs.PackageCache -> Pkg.Name -> V.Version -> IO Bool 125 | isPackageInCache cache pkg vsn = do 126 | let versionedPkgPath = Dirs.package cache pkg vsn 127 | Dir.doesDirectoryExist versionedPkgPath 128 | 129 | installPackageVersion :: Dirs.PackageCache -> Pkg.Name -> V.Version -> IO (Either Git.Error ()) 130 | installPackageVersion cache pkg vsn = do 131 | let versionedPkgPath = Dirs.package cache pkg vsn 132 | versionedPkgExists <- Dir.doesDirectoryExist versionedPkgPath 133 | if versionedPkgExists 134 | then return $ Right () 135 | else do 136 | Git.clone (Git.githubUrl pkg) vsn versionedPkgPath 137 | -------------------------------------------------------------------------------- /builder/src/Directories.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wall #-} 2 | 3 | module Directories 4 | ( details, 5 | interfaces, 6 | objects, 7 | greni, 8 | greno, 9 | findRoot, 10 | withRootLock, 11 | withRegistryLock, 12 | PackageCache, 13 | getPackageCache, 14 | package, 15 | getReplCache, 16 | getGrenHome, 17 | ) 18 | where 19 | 20 | import Gren.ModuleName qualified as ModuleName 21 | import Gren.Package qualified as Pkg 22 | import Gren.Version qualified as V 23 | import System.Directory qualified as Dir 24 | import System.Environment qualified as Env 25 | import System.FileLock qualified as Lock 26 | import System.FilePath ((<.>), ()) 27 | import System.FilePath qualified as FP 28 | 29 | -- PATHS 30 | 31 | projectCache :: FilePath -> FilePath 32 | projectCache root = 33 | root ".gren" compilerVersion 34 | 35 | details :: FilePath -> FilePath 36 | details root = 37 | projectCache root "d.dat" 38 | 39 | interfaces :: FilePath -> FilePath 40 | interfaces root = 41 | projectCache root "i.dat" 42 | 43 | objects :: FilePath -> FilePath 44 | objects root = 45 | projectCache root "o.dat" 46 | 47 | compilerVersion :: FilePath 48 | compilerVersion = 49 | V.toChars V.compiler 50 | 51 | -- GRENI and GRENO 52 | 53 | greni :: FilePath -> ModuleName.Raw -> FilePath 54 | greni root name = 55 | toArtifactPath root name "greni" 56 | 57 | greno :: FilePath -> ModuleName.Raw -> FilePath 58 | greno root name = 59 | toArtifactPath root name "greno" 60 | 61 | toArtifactPath :: FilePath -> ModuleName.Raw -> String -> FilePath 62 | toArtifactPath root name ext = 63 | projectCache root ModuleName.toHyphenPath name <.> ext 64 | 65 | -- ROOT 66 | 67 | findRoot :: IO (Maybe FilePath) 68 | findRoot = 69 | do 70 | dir <- Dir.getCurrentDirectory 71 | findRootHelp (FP.splitDirectories dir) 72 | 73 | findRootHelp :: [String] -> IO (Maybe FilePath) 74 | findRootHelp dirs = 75 | case dirs of 76 | [] -> 77 | return Nothing 78 | _ : _ -> 79 | do 80 | exists <- Dir.doesFileExist (FP.joinPath dirs "gren.json") 81 | if exists 82 | then return (Just (FP.joinPath dirs)) 83 | else findRootHelp (init dirs) 84 | 85 | -- LOCKS 86 | 87 | withRootLock :: FilePath -> IO a -> IO a 88 | withRootLock root work = 89 | do 90 | let dir = projectCache root 91 | Dir.createDirectoryIfMissing True dir 92 | Lock.withFileLock (dir "lock") Lock.Exclusive (\_ -> work) 93 | 94 | withRegistryLock :: PackageCache -> IO a -> IO a 95 | withRegistryLock (PackageCache dir) work = 96 | Lock.withFileLock (dir "lock") Lock.Exclusive (\_ -> work) 97 | 98 | -- PACKAGE CACHES 99 | 100 | newtype PackageCache = PackageCache FilePath 101 | 102 | getPackageCache :: IO PackageCache 103 | getPackageCache = 104 | PackageCache <$> getCacheDir "packages" 105 | 106 | package :: PackageCache -> Pkg.Name -> V.Version -> FilePath 107 | package (PackageCache dir) name version = 108 | dir Pkg.toFilePath name V.toChars version 109 | 110 | -- CACHE 111 | 112 | getReplCache :: IO FilePath 113 | getReplCache = 114 | getCacheDir "repl" 115 | 116 | getCacheDir :: FilePath -> IO FilePath 117 | getCacheDir projectName = 118 | do 119 | home <- getGrenHome 120 | let root = home compilerVersion projectName 121 | Dir.createDirectoryIfMissing True root 122 | return root 123 | 124 | getGrenHome :: IO FilePath 125 | getGrenHome = 126 | do 127 | maybeCustomHome <- Env.lookupEnv "GREN_HOME" 128 | case maybeCustomHome of 129 | Just customHome -> return customHome 130 | Nothing -> Dir.getXdgDirectory Dir.XdgCache "gren" 131 | -------------------------------------------------------------------------------- /builder/src/Generate.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE BangPatterns #-} 2 | 3 | module Generate 4 | ( dev, 5 | prod, 6 | repl, 7 | ) 8 | where 9 | 10 | import AST.Optimized qualified as Opt 11 | import Build qualified 12 | import Control.Concurrent (MVar, forkIO, newEmptyMVar, newMVar, putMVar, readMVar) 13 | import Control.Monad (liftM2) 14 | import Data.ByteString.Builder qualified as B 15 | import Data.Map ((!)) 16 | import Data.Map qualified as Map 17 | import Data.Maybe qualified as Maybe 18 | import Data.Name qualified as N 19 | import Data.NonEmptyList qualified as NE 20 | import Directories qualified as Dirs 21 | import File qualified 22 | import Generate.JavaScript qualified as JS 23 | import Generate.Mode qualified as Mode 24 | import Gren.Details qualified as Details 25 | import Gren.ModuleName qualified as ModuleName 26 | import Gren.Package qualified as Pkg 27 | import Nitpick.Debug qualified as Nitpick 28 | import Reporting.Exit qualified as Exit 29 | import Reporting.Task qualified as Task 30 | import Prelude hiding (cycle, print) 31 | 32 | -- GENERATORS 33 | 34 | type Task a = 35 | Task.Task Exit.Generate a 36 | 37 | dev :: FilePath -> Details.Details -> Build.Artifacts -> Task JS.GeneratedResult 38 | dev root details (Build.Artifacts pkg _ roots modules) = 39 | do 40 | objects <- finalizeObjects =<< loadObjects root details modules 41 | let mode = Mode.Dev 42 | let graph = objectsToGlobalGraph objects 43 | let mains = gatherMains pkg objects roots 44 | return $ JS.generate mode graph mains 45 | 46 | prod :: FilePath -> Details.Details -> Build.Artifacts -> Task JS.GeneratedResult 47 | prod root details (Build.Artifacts pkg _ roots modules) = 48 | do 49 | objects <- finalizeObjects =<< loadObjects root details modules 50 | checkForDebugUses objects 51 | let graph = objectsToGlobalGraph objects 52 | let mode = Mode.Prod (Mode.shortenFieldNames graph) 53 | let mains = gatherMains pkg objects roots 54 | return $ JS.generate mode graph mains 55 | 56 | repl :: FilePath -> Details.Details -> Bool -> Build.ReplArtifacts -> N.Name -> Task B.Builder 57 | repl root details ansi (Build.ReplArtifacts home modules localizer annotations) name = 58 | do 59 | objects <- finalizeObjects =<< loadObjects root details modules 60 | let graph = objectsToGlobalGraph objects 61 | return $ JS.generateForRepl ansi localizer graph home name (annotations ! name) 62 | 63 | -- CHECK FOR DEBUG 64 | 65 | checkForDebugUses :: Objects -> Task () 66 | checkForDebugUses (Objects _ locals) = 67 | case Map.keys (Map.filter Nitpick.hasDebugUses locals) of 68 | [] -> return () 69 | m : ms -> Task.throw (Exit.GenerateCannotOptimizeDebugValues m ms) 70 | 71 | -- GATHER MAINS 72 | 73 | gatherMains :: Pkg.Name -> Objects -> NE.List Build.Root -> Map.Map ModuleName.Canonical Opt.Main 74 | gatherMains pkg (Objects _ locals) roots = 75 | Map.fromList $ Maybe.mapMaybe (lookupMain pkg locals) (NE.toList roots) 76 | 77 | lookupMain :: Pkg.Name -> Map.Map ModuleName.Raw Opt.LocalGraph -> Build.Root -> Maybe (ModuleName.Canonical, Opt.Main) 78 | lookupMain pkg locals root = 79 | let toPair name (Opt.LocalGraph maybeMain _ _) = 80 | (,) (ModuleName.Canonical pkg name) <$> maybeMain 81 | in case root of 82 | Build.Inside name -> toPair name =<< Map.lookup name locals 83 | Build.Outside name _ g -> toPair name g 84 | 85 | -- LOADING OBJECTS 86 | 87 | data LoadingObjects = LoadingObjects 88 | { _foreign_mvar :: MVar (Maybe Opt.GlobalGraph), 89 | _local_mvars :: Map.Map ModuleName.Raw (MVar (Maybe Opt.LocalGraph)) 90 | } 91 | 92 | loadObjects :: FilePath -> Details.Details -> [Build.Module] -> Task LoadingObjects 93 | loadObjects root details modules = 94 | Task.io $ 95 | do 96 | mvar <- Details.loadObjects root details 97 | mvars <- traverse (loadObject root) modules 98 | return $ LoadingObjects mvar (Map.fromList mvars) 99 | 100 | loadObject :: FilePath -> Build.Module -> IO (ModuleName.Raw, MVar (Maybe Opt.LocalGraph)) 101 | loadObject root modul = 102 | case modul of 103 | Build.Fresh name _ graph -> 104 | do 105 | mvar <- newMVar (Just graph) 106 | return (name, mvar) 107 | Build.Cached name _ _ -> 108 | do 109 | mvar <- newEmptyMVar 110 | _ <- forkIO $ putMVar mvar =<< File.readBinary (Dirs.greno root name) 111 | return (name, mvar) 112 | 113 | -- FINALIZE OBJECTS 114 | 115 | data Objects = Objects 116 | { _foreign :: Opt.GlobalGraph, 117 | _locals :: Map.Map ModuleName.Raw Opt.LocalGraph 118 | } 119 | 120 | finalizeObjects :: LoadingObjects -> Task Objects 121 | finalizeObjects (LoadingObjects mvar mvars) = 122 | Task.eio id $ 123 | do 124 | result <- readMVar mvar 125 | results <- traverse readMVar mvars 126 | case liftM2 Objects result (sequence results) of 127 | Just loaded -> return (Right loaded) 128 | Nothing -> return (Left Exit.GenerateCannotLoadArtifacts) 129 | 130 | objectsToGlobalGraph :: Objects -> Opt.GlobalGraph 131 | objectsToGlobalGraph (Objects globals locals) = 132 | foldr Opt.addLocalGraph globals locals 133 | -------------------------------------------------------------------------------- /builder/src/Gren/Platform.hs: -------------------------------------------------------------------------------- 1 | module Gren.Platform 2 | ( Platform (..), 3 | -- 4 | compatible, 5 | -- 6 | encode, 7 | decoder, 8 | fromChars, 9 | toChars, 10 | ) 11 | where 12 | 13 | import Data.Binary (Binary, get, getWord8, put, putWord8) 14 | import Data.Utf8 qualified as Utf8 15 | import Json.Decode qualified as D 16 | import Json.Encode qualified as E 17 | 18 | data Platform 19 | = Common 20 | | Browser 21 | | Node 22 | deriving (Show, Eq) 23 | 24 | -- COMPATIBILITY 25 | 26 | compatible :: Platform -> Platform -> Bool 27 | compatible rootPlatform comparison = 28 | rootPlatform == comparison || comparison == Common 29 | 30 | -- JSON 31 | 32 | encode :: Platform -> E.Value 33 | encode platform = 34 | case platform of 35 | Common -> E.chars "common" 36 | Browser -> E.chars "browser" 37 | Node -> E.chars "node" 38 | 39 | decoder :: a -> D.Decoder a Platform 40 | decoder badPlatformError = 41 | do 42 | platformStr <- D.string 43 | case fromChars $ Utf8.toChars platformStr of 44 | Just platform -> D.succeed platform 45 | Nothing -> D.failure badPlatformError 46 | 47 | fromChars :: [Char] -> Maybe Platform 48 | fromChars value = 49 | case value of 50 | "common" -> Just Common 51 | "browser" -> Just Browser 52 | "node" -> Just Node 53 | _ -> Nothing 54 | 55 | toChars :: Platform -> [Char] 56 | toChars value = 57 | case value of 58 | Common -> "common" 59 | Browser -> "browser" 60 | Node -> "node" 61 | 62 | -- BINARY 63 | 64 | instance Binary Platform where 65 | put platform = 66 | case platform of 67 | Common -> putWord8 0 68 | Browser -> putWord8 1 69 | Node -> putWord8 2 70 | 71 | get = 72 | do 73 | n <- getWord8 74 | case n of 75 | 0 -> return Common 76 | 1 -> return Browser 77 | 2 -> return Node 78 | _ -> fail "binary encoding of Platform was corrupted" 79 | -------------------------------------------------------------------------------- /builder/src/Gren/PossibleFilePath.hs: -------------------------------------------------------------------------------- 1 | module Gren.PossibleFilePath 2 | ( PossibleFilePath (..), 3 | mapWith, 4 | encodeJson, 5 | other, 6 | is, 7 | toChars, 8 | ) 9 | where 10 | 11 | import Data.Utf8 qualified as Utf8 12 | import Json.Encode qualified as E 13 | 14 | data PossibleFilePath a 15 | = Is FilePath 16 | | Other a 17 | deriving (Eq) 18 | 19 | mapWith :: (a -> b) -> PossibleFilePath a -> PossibleFilePath b 20 | mapWith fn possibleFP = 21 | case possibleFP of 22 | Is filePath -> Is filePath 23 | Other a -> Other $ fn a 24 | 25 | other :: PossibleFilePath a -> Maybe a 26 | other possibleFP = 27 | case possibleFP of 28 | Is _ -> Nothing 29 | Other a -> Just a 30 | 31 | is :: PossibleFilePath a -> Bool 32 | is possibleFP = 33 | case possibleFP of 34 | Is _ -> True 35 | Other _ -> False 36 | 37 | encodeJson :: (a -> E.Value) -> PossibleFilePath a -> E.Value 38 | encodeJson encoderForNonFP possibleFP = 39 | case possibleFP of 40 | Is filePath -> 41 | E.string $ Utf8.fromChars $ "local:" ++ filePath 42 | Other a -> 43 | encoderForNonFP a 44 | 45 | toChars :: (a -> String) -> PossibleFilePath a -> String 46 | toChars otherToString pfp = 47 | case pfp of 48 | Is fp -> fp 49 | Other a -> otherToString a 50 | -------------------------------------------------------------------------------- /builder/src/Reporting/Exit/Help.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module Reporting.Exit.Help 4 | ( Report, 5 | report, 6 | docReport, 7 | jsonReport, 8 | compilerReport, 9 | reportToDoc, 10 | reportToJson, 11 | syntaxErrorToDoc, 12 | toString, 13 | toStdout, 14 | toStderr, 15 | ) 16 | where 17 | 18 | import Data.Maybe qualified as Maybe 19 | import GHC.IO.Handle (hIsTerminalDevice) 20 | import Json.Encode ((==>)) 21 | import Json.Encode qualified as E 22 | import Reporting.Doc ((<+>)) 23 | import Reporting.Doc qualified as D 24 | import Reporting.Error qualified as Error 25 | import Reporting.Error.Syntax qualified as Error.Syntax 26 | import Reporting.Render.Code qualified as Code 27 | import Reporting.Report qualified as Report 28 | import System.Environment qualified 29 | import System.IO (Handle, hPutStr, stderr, stdout) 30 | 31 | -- REPORT 32 | 33 | data Report 34 | = CompilerReport FilePath Error.Module [Error.Module] 35 | | Report 36 | { _title :: String, 37 | _path :: Maybe FilePath, 38 | _message :: D.Doc 39 | } 40 | 41 | report :: String -> Maybe FilePath -> String -> [D.Doc] -> Report 42 | report title path startString others = 43 | Report title path $ D.stack (D.reflow startString : others) 44 | 45 | docReport :: String -> Maybe FilePath -> D.Doc -> [D.Doc] -> Report 46 | docReport title path startDoc others = 47 | Report title path $ D.stack (startDoc : others) 48 | 49 | jsonReport :: String -> Maybe FilePath -> D.Doc -> Report 50 | jsonReport = 51 | Report 52 | 53 | compilerReport :: FilePath -> Error.Module -> [Error.Module] -> Report 54 | compilerReport = 55 | CompilerReport 56 | 57 | syntaxErrorToDoc :: Code.Source -> Maybe FilePath -> Error.Syntax.Error -> D.Doc 58 | syntaxErrorToDoc source path moduleError = 59 | let syntaxReport = Error.Syntax.toReport source moduleError 60 | in reportToDoc $ Report (Report._title syntaxReport) path (Report._message syntaxReport) 61 | 62 | -- TO DOC 63 | 64 | reportToDoc :: Report -> D.Doc 65 | reportToDoc report_ = 66 | case report_ of 67 | CompilerReport root e es -> 68 | Error.toDoc root e es 69 | Report title maybePath message -> 70 | let makeDashes n = 71 | replicate (max 1 (80 - n)) '-' 72 | 73 | errorBarEnd = 74 | case maybePath of 75 | Nothing -> 76 | makeDashes (4 + length title) 77 | Just path -> 78 | makeDashes (5 + length title + length path) ++ " " ++ path 79 | 80 | errorBar = 81 | D.dullcyan $ 82 | "--" <+> D.fromChars title <+> D.fromChars errorBarEnd 83 | in D.stack [errorBar, message, ""] 84 | 85 | -- TO JSON 86 | 87 | reportToJson :: Report -> E.Value 88 | reportToJson report_ = 89 | case report_ of 90 | CompilerReport _ e es -> 91 | E.object 92 | [ "type" ==> E.chars "compile-errors", 93 | "errors" ==> E.list Error.toJson (e : es) 94 | ] 95 | Report title maybePath message -> 96 | E.object 97 | [ "type" ==> E.chars "error", 98 | "path" ==> maybe E.null E.chars maybePath, 99 | "title" ==> E.chars title, 100 | "message" ==> D.encode message 101 | ] 102 | 103 | -- OUTPUT 104 | 105 | toString :: D.Doc -> String 106 | toString = 107 | D.toString 108 | 109 | toStdout :: D.Doc -> IO () 110 | toStdout doc = 111 | toHandle stdout doc 112 | 113 | toStderr :: D.Doc -> IO () 114 | toStderr doc = 115 | toHandle stderr doc 116 | 117 | toHandle :: Handle -> D.Doc -> IO () 118 | toHandle handle doc = 119 | do 120 | isTerminal <- hIsTerminalDevice handle 121 | forceColorEnv <- System.Environment.lookupEnv "FORCE_COLOR" 122 | let forceColor = Maybe.isJust forceColorEnv 123 | if isTerminal || forceColor 124 | then D.toAnsi handle doc 125 | else hPutStr handle (toString doc) 126 | -------------------------------------------------------------------------------- /builder/src/Reporting/Task.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE Rank2Types #-} 2 | 3 | module Reporting.Task 4 | ( Task, 5 | run, 6 | throw, 7 | mapError, 8 | -- 9 | io, 10 | mio, 11 | eio, 12 | ) 13 | where 14 | 15 | -- TASKS 16 | 17 | newtype Task x a 18 | = Task 19 | ( forall result. (a -> IO result) -> (x -> IO result) -> IO result 20 | ) 21 | 22 | run :: Task x a -> IO (Either x a) 23 | run (Task task) = 24 | task (return . Right) (return . Left) 25 | 26 | throw :: x -> Task x a 27 | throw x = 28 | Task $ \_ err -> err x 29 | 30 | mapError :: (x -> y) -> Task x a -> Task y a 31 | mapError func (Task task) = 32 | Task $ \ok err -> 33 | task ok (err . func) 34 | 35 | -- IO 36 | 37 | io :: IO a -> Task x a 38 | io work = 39 | Task $ \ok _ -> work >>= ok 40 | 41 | mio :: x -> IO (Maybe a) -> Task x a 42 | mio x work = 43 | Task $ \ok err -> 44 | do 45 | result <- work 46 | case result of 47 | Just a -> ok a 48 | Nothing -> err x 49 | 50 | eio :: (x -> y) -> IO (Either x a) -> Task y a 51 | eio func work = 52 | Task $ \ok err -> 53 | do 54 | result <- work 55 | case result of 56 | Right a -> ok a 57 | Left x -> err (func x) 58 | 59 | -- INSTANCES 60 | 61 | instance Functor (Task x) where 62 | fmap func (Task taskA) = 63 | Task $ \ok err -> 64 | let okA arg = ok (func arg) 65 | in taskA okA err 66 | 67 | instance Applicative (Task x) where 68 | pure a = 69 | Task $ \ok _ -> ok a 70 | 71 | (<*>) (Task taskFunc) (Task taskArg) = 72 | Task $ \ok err -> 73 | let okFunc func = 74 | let okArg arg = ok (func arg) 75 | in taskArg okArg err 76 | in taskFunc okFunc err 77 | 78 | instance Monad (Task x) where 79 | return = pure 80 | 81 | (>>=) (Task taskA) callback = 82 | Task $ \ok err -> 83 | let okA a = 84 | case callback a of 85 | Task taskB -> taskB ok err 86 | in taskA okA err 87 | -------------------------------------------------------------------------------- /cli.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const compiler = require("./compiler.js"); 4 | const compilerInstance = compiler.Gren.Main.init({}); 5 | 6 | compilerInstance.ports.completeStaticBuild.subscribe(async function (output) { 7 | const isMac = process.platform === "darwin"; 8 | const isWin = process.platform === "win32"; 9 | 10 | const jsBuildPath = output; 11 | const blobPath = output + ".blob"; 12 | 13 | const seaConfigPath = output + ".sea.config"; 14 | const seaConfig = { 15 | main: jsBuildPath, 16 | output: blobPath, 17 | disableExperimentalSEAWarning: true, 18 | useSnapshot: true, 19 | }; 20 | 21 | const binPath = isWin ? output + ".node.exe" : output + ".node"; 22 | 23 | const fs = require("fs/promises"); 24 | const cp = require("child_process"); 25 | const postject = require("postject"); 26 | 27 | // For snapshots to work we need to wrap the function call that starts 28 | // the Gren application, with a hint that tells the V8 engine what the 29 | // main function is 30 | 31 | try { 32 | const compiledSrc = await fs.readFile(jsBuildPath, "utf-8"); 33 | 34 | const initRegex = /this\.Gren\..+\(\{\}\);/g; 35 | const initCall = compiledSrc.match(initRegex)[0]; 36 | const snapshotCompatibleSrc = compiledSrc.replace( 37 | initCall, 38 | ` 39 | const v8 = require('node:v8'); 40 | v8.startupSnapshot.setDeserializeMainFunction(function() { 41 | ${initCall} 42 | }); 43 | `, 44 | ); 45 | 46 | await fs.writeFile(jsBuildPath, snapshotCompatibleSrc); 47 | 48 | // We then need to generate the snapshot 49 | 50 | const nodePath = process.execPath; 51 | await fs.writeFile(seaConfigPath, JSON.stringify(seaConfig)); 52 | cp.execFileSync(nodePath, ["--experimental-sea-config", seaConfigPath]); 53 | 54 | // Then copy the node executable and inject the snapshot into it 55 | await fs.copyFile(nodePath, binPath); 56 | await fs.chmod(binPath, "755"); 57 | 58 | if (isMac) { 59 | // required on mac, optional on windows, not required on linux 60 | cp.execFileSync("codesign", ["--remove-signature", binPath]); 61 | } 62 | 63 | const blobContent = await fs.readFile(blobPath); 64 | 65 | await postject.inject(binPath, "NODE_SEA_BLOB", blobContent, { 66 | sentinelFuse: "NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2", 67 | machoSegmentName: isMac ? "NODE_SEA" : undefined, 68 | }); 69 | 70 | if (isMac) { 71 | // required on mac 72 | cp.execFileSync("codesign", ["--sign", "-", binPath]); 73 | } 74 | 75 | const outputPath = isWin ? jsBuildPath + ".exe" : jsBuildPath; 76 | await fs.rename(binPath, outputPath); 77 | 78 | if (isWin) { 79 | await fs.rm(jsBuildPath); 80 | } 81 | 82 | console.log("Done!"); 83 | } catch (e) { 84 | console.error("Failed to create static executable", e); 85 | await fs.rm(jsBuildPath); 86 | } finally { 87 | // cleanup 88 | await fs.rm(blobPath); 89 | await fs.rm(seaConfigPath); 90 | } 91 | }); 92 | -------------------------------------------------------------------------------- /compiler/src/AST/SourceComments.hs: -------------------------------------------------------------------------------- 1 | module AST.SourceComments where 2 | 3 | import Data.Utf8 qualified as Utf8 4 | import Reporting.Annotation qualified as A 5 | 6 | data GREN_COMMENT 7 | 8 | type Comment = A.Located Comment_ 9 | 10 | data Comment_ 11 | = BlockComment (Utf8.Utf8 GREN_COMMENT) 12 | | LineComment (Utf8.Utf8 GREN_COMMENT) 13 | deriving (Eq, Show) 14 | 15 | -- Module 16 | 17 | data HeaderComments = HeaderComments 18 | { _beforeModuleLine :: [Comment], 19 | _afterModuleKeyword :: [Comment], 20 | _afterModuleName :: [Comment], 21 | _afterExposingKeyword :: [Comment], 22 | _afterModuleLine :: [Comment], 23 | _afterModuleDocComment :: [Comment] 24 | } 25 | deriving (Show) 26 | 27 | -- Effects 28 | 29 | data PortsComments = PortsComments 30 | { _afterPortKeyword :: [Comment] 31 | } 32 | deriving (Show) 33 | 34 | data ManagerComments = ManagerComments 35 | { _afterEffectKeyword :: [Comment], 36 | _afterWhereKeyword :: [Comment], 37 | _afterManager :: [Comment] 38 | } 39 | deriving (Show) 40 | 41 | -- Manager 42 | 43 | data CmdComments = CmdComments 44 | { _beforeCommandKeyword :: [Comment], 45 | _afterCommand :: [Comment] 46 | } 47 | deriving (Show) 48 | 49 | data SubComments = SubComments 50 | { _beforeSubscriptionsKeyword :: [Comment], 51 | _afterSubscriptions :: [Comment] 52 | } 53 | deriving (Show) 54 | 55 | data FxComments = FxComments 56 | { _cmdComments :: CmdComments, 57 | _subComments :: SubComments 58 | } 59 | deriving (Show) 60 | 61 | -- Import 62 | 63 | data ImportComments = ImportComments 64 | { _afterImportKeyword :: [Comment], 65 | _afterImportName :: [Comment] 66 | } 67 | deriving (Show) 68 | 69 | data ImportAliasComments = ImportAliasComments 70 | { _afterAs :: [Comment], 71 | _afterAliasName :: [Comment] 72 | } 73 | deriving (Show) 74 | 75 | data ImportExposingComments = ImportExposingComments 76 | { _afterExposing :: [Comment], 77 | _afterExposingListing :: [Comment] 78 | } 79 | deriving (Show) 80 | 81 | -- Declarations 82 | 83 | data ValueComments = ValueComments 84 | { _beforeValueEquals :: [Comment], 85 | _beforeValueBody :: [Comment], 86 | _afterValueBody :: [Comment] 87 | } 88 | deriving (Show) 89 | 90 | data ValueTypeComments = ValueTypeComments 91 | { _beforeTypeColon :: [Comment], 92 | _afterTypeColon :: [Comment], 93 | _afterValueType :: [Comment] 94 | } 95 | deriving (Show) 96 | 97 | data UnionComments = UnionComments 98 | { _beforeUnionTypeName :: [Comment], 99 | _afterUnionTypeArgs :: [Comment] 100 | } 101 | deriving (Show) 102 | 103 | -- Expressions 104 | 105 | data BinopsSegmentComments = BinopsSegmentComments 106 | { _beforeOperator :: [Comment], 107 | _afterOperator :: [Comment] 108 | } 109 | deriving (Show) 110 | 111 | data ArrayEntryComments = ArrayEntryComments 112 | { _beforeArrayEntry :: [Comment], 113 | _afterArrayEntry :: [Comment] 114 | } 115 | deriving (Show) 116 | 117 | data LambdaComments = LambdaComments 118 | { _beforeArrow :: [Comment], 119 | _afterArrow :: [Comment] 120 | } 121 | deriving (Show) 122 | 123 | data IfComments = IfComments 124 | { _beforeElseBody :: [Comment], 125 | _afterElseBody :: [Comment] 126 | } 127 | deriving (Show) 128 | 129 | data IfBranchComments = IfBranchComments 130 | { _afterIfKeyword :: [Comment], 131 | _beforeThenKeyword :: [Comment], 132 | _beforeThenBody :: [Comment], 133 | _afterThenBody :: [Comment] 134 | } 135 | deriving (Show) 136 | 137 | data LetComments = LetComments 138 | { _afterLetDecls :: [Comment], 139 | _afterIn :: [Comment] 140 | } 141 | deriving (Show) 142 | 143 | data CaseComments = CaseComments 144 | { _afterCaseKeyword :: [Comment], 145 | _beforeOfKeyword :: [Comment] 146 | } 147 | deriving (Show) 148 | 149 | data CaseBranchComments = CaseBranchComments 150 | { _beforeBranch :: [Comment], 151 | _beforeBranchArrow :: [Comment], 152 | _beforeBranchBody :: [Comment], 153 | _afterBranchBody :: [Comment] 154 | } 155 | deriving (Show) 156 | 157 | data UpdateComments = UpdateComments 158 | { _beforeBase :: [Comment], 159 | _afterBase :: [Comment] 160 | } 161 | deriving (Show) 162 | 163 | data RecordFieldComments = RecordFieldComments 164 | { _beforeFieldName :: [Comment], 165 | _afterFieldName :: [Comment], 166 | _beforeFieldValue :: [Comment], 167 | _afterFieldValue :: [Comment] 168 | } 169 | deriving (Show) 170 | 171 | -- Patterns 172 | 173 | data PArrayEntryComments = PArrayEntryComments 174 | { _beforePArrayEntry :: [Comment], 175 | _afterPArrayEntry :: [Comment] 176 | } 177 | deriving (Show) 178 | 179 | -- Types 180 | 181 | data TLambdaComments = TLambdaComments 182 | { _beforeTArrow :: [Comment], 183 | _afterTArrow :: [Comment] 184 | } 185 | deriving (Show) 186 | 187 | data TParensComments = TParensComments 188 | { _afterOpening :: [Comment], 189 | _beforeClosing :: [Comment] 190 | } 191 | deriving (Show) 192 | -------------------------------------------------------------------------------- /compiler/src/AST/Utils/Binop.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wall #-} 2 | 3 | module AST.Utils.Binop 4 | ( Precedence (..), 5 | Associativity (..), 6 | ) 7 | where 8 | 9 | import Control.Monad (liftM) 10 | import Data.Binary 11 | import Prelude hiding (Either (..)) 12 | 13 | -- BINOP STUFF 14 | 15 | newtype Precedence = Precedence Int 16 | deriving (Eq, Ord, Show) 17 | 18 | data Associativity 19 | = Left 20 | | Non 21 | | Right 22 | deriving (Eq, Show) 23 | 24 | -- BINARY 25 | 26 | instance Binary Precedence where 27 | get = 28 | liftM Precedence get 29 | 30 | put (Precedence n) = 31 | put n 32 | 33 | instance Binary Associativity where 34 | get = 35 | do 36 | n <- getWord8 37 | case n of 38 | 0 -> return Left 39 | 1 -> return Non 40 | 2 -> return Right 41 | _ -> fail "Error reading valid associativity from serialized string" 42 | 43 | put assoc = 44 | putWord8 $ 45 | case assoc of 46 | Left -> 0 47 | Non -> 1 48 | Right -> 2 49 | -------------------------------------------------------------------------------- /compiler/src/AST/Utils/Type.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wall #-} 2 | 3 | module AST.Utils.Type 4 | ( delambda, 5 | dealias, 6 | deepDealias, 7 | iteratedDealias, 8 | ) 9 | where 10 | 11 | import AST.Canonical (AliasType (..), FieldType (..), Type (..)) 12 | import Data.Map qualified as Map 13 | import Data.Name qualified as Name 14 | 15 | -- DELAMBDA 16 | 17 | delambda :: Type -> [Type] 18 | delambda tipe = 19 | case tipe of 20 | TLambda arg result -> 21 | arg : delambda result 22 | _ -> 23 | [tipe] 24 | 25 | -- DEALIAS 26 | 27 | dealias :: [(Name.Name, Type)] -> AliasType -> Type 28 | dealias args aliasType = 29 | case aliasType of 30 | Holey tipe -> 31 | dealiasHelp (Map.fromList args) tipe 32 | Filled tipe -> 33 | tipe 34 | 35 | dealiasHelp :: Map.Map Name.Name Type -> Type -> Type 36 | dealiasHelp typeTable tipe = 37 | case tipe of 38 | TLambda a b -> 39 | TLambda 40 | (dealiasHelp typeTable a) 41 | (dealiasHelp typeTable b) 42 | TVar x -> 43 | Map.findWithDefault tipe x typeTable 44 | TRecord fields ext -> 45 | TRecord (Map.map (dealiasField typeTable) fields) ext 46 | TAlias home name args t' -> 47 | TAlias home name (map (fmap (dealiasHelp typeTable)) args) t' 48 | TType home name args -> 49 | TType home name (map (dealiasHelp typeTable) args) 50 | 51 | dealiasField :: Map.Map Name.Name Type -> FieldType -> FieldType 52 | dealiasField typeTable (FieldType index tipe) = 53 | FieldType index (dealiasHelp typeTable tipe) 54 | 55 | -- DEEP DEALIAS 56 | 57 | deepDealias :: Type -> Type 58 | deepDealias tipe = 59 | case tipe of 60 | TLambda a b -> 61 | TLambda (deepDealias a) (deepDealias b) 62 | TVar _ -> 63 | tipe 64 | TRecord fields ext -> 65 | TRecord (Map.map deepDealiasField fields) ext 66 | TAlias _ _ args tipe' -> 67 | deepDealias (dealias args tipe') 68 | TType home name args -> 69 | TType home name (map deepDealias args) 70 | 71 | deepDealiasField :: FieldType -> FieldType 72 | deepDealiasField (FieldType index tipe) = 73 | FieldType index (deepDealias tipe) 74 | 75 | -- ITERATED DEALIAS 76 | 77 | iteratedDealias :: Type -> Type 78 | iteratedDealias tipe = 79 | case tipe of 80 | TAlias _ _ args realType -> 81 | iteratedDealias (dealias args realType) 82 | _ -> 83 | tipe 84 | -------------------------------------------------------------------------------- /compiler/src/Canonicalize/Environment/Dups.hs: -------------------------------------------------------------------------------- 1 | module Canonicalize.Environment.Dups 2 | ( detect, 3 | detectLocated, 4 | checkFields, 5 | checkLocatedFields, 6 | checkFields', 7 | checkLocatedFields', 8 | Dict, 9 | none, 10 | one, 11 | insert, 12 | union, 13 | unions, 14 | ) 15 | where 16 | 17 | import Data.Function ((&)) 18 | import Data.Map qualified as Map 19 | import Data.Maybe qualified as Maybe 20 | import Data.Name qualified as Name 21 | import Data.OneOrMore qualified as OneOrMore 22 | import Reporting.Annotation qualified as A 23 | import Reporting.Error.Canonicalize qualified as Error 24 | import Reporting.Result qualified as Result 25 | 26 | -- DUPLICATE TRACKER 27 | 28 | type Dict value = 29 | Map.Map Name.Name (OneOrMore.OneOrMore (Info value)) 30 | 31 | data Info value = Info 32 | { _region :: A.Region, 33 | _value :: value 34 | } 35 | 36 | -- DETECT 37 | 38 | type ToError = 39 | Name.Name -> A.Region -> A.Region -> Error.Error 40 | 41 | detect :: ToError -> Dict a -> Result.Result i w Error.Error (Map.Map Name.Name a) 42 | detect toError dict = 43 | Map.traverseWithKey (detectHelp toError) dict 44 | 45 | detectLocated :: ToError -> Dict a -> Result.Result i w Error.Error (Map.Map (A.Located Name.Name) a) 46 | detectLocated toError dict = 47 | let nameLocations = Map.mapMaybe extractLocation dict 48 | in dict 49 | & Map.mapKeys (\k -> A.At (Maybe.fromMaybe A.zero $ Map.lookup k nameLocations) k) 50 | & Map.traverseWithKey (\(A.At _ name) values -> detectHelp toError name values) 51 | 52 | extractLocation :: OneOrMore.OneOrMore (Info a) -> Maybe A.Region 53 | extractLocation oneOrMore = 54 | case oneOrMore of 55 | OneOrMore.One (Info region _) -> Just region 56 | OneOrMore.More _ _ -> Nothing 57 | 58 | detectHelp :: ToError -> Name.Name -> OneOrMore.OneOrMore (Info a) -> Result.Result i w Error.Error a 59 | detectHelp toError name values = 60 | case values of 61 | OneOrMore.One (Info _ value) -> 62 | return value 63 | OneOrMore.More left right -> 64 | let (Info r1 _, Info r2 _) = 65 | OneOrMore.getFirstTwo left right 66 | in Result.throw (toError name r1 r2) 67 | 68 | -- CHECK FIELDS 69 | 70 | checkLocatedFields :: [(A.Located Name.Name, a, comments)] -> Result.Result i w Error.Error (Map.Map (A.Located Name.Name) a) 71 | checkLocatedFields fields = 72 | detectLocated Error.DuplicateField (foldr addField none fields) 73 | 74 | checkFields :: [(A.Located Name.Name, a, comments)] -> Result.Result i w Error.Error (Map.Map Name.Name a) 75 | checkFields fields = 76 | detect Error.DuplicateField (foldr addField none fields) 77 | 78 | addField :: (A.Located Name.Name, a, comments) -> Dict a -> Dict a 79 | addField (A.At region name, value, _) dups = 80 | Map.insertWith OneOrMore.more name (OneOrMore.one (Info region value)) dups 81 | 82 | checkLocatedFields' :: (A.Region -> a -> b) -> [(A.Located Name.Name, a, comments)] -> Result.Result i w Error.Error (Map.Map (A.Located Name.Name) b) 83 | checkLocatedFields' toValue fields = 84 | detectLocated Error.DuplicateField (foldr (addField' toValue) none fields) 85 | 86 | checkFields' :: (A.Region -> a -> b) -> [(A.Located Name.Name, a, comments)] -> Result.Result i w Error.Error (Map.Map Name.Name b) 87 | checkFields' toValue fields = 88 | detect Error.DuplicateField (foldr (addField' toValue) none fields) 89 | 90 | addField' :: (A.Region -> a -> b) -> (A.Located Name.Name, a, comments) -> Dict b -> Dict b 91 | addField' toValue (A.At region name, value, _) dups = 92 | Map.insertWith OneOrMore.more name (OneOrMore.one (Info region (toValue region value))) dups 93 | 94 | -- BUILDING DICTIONARIES 95 | 96 | none :: Dict a 97 | none = 98 | Map.empty 99 | 100 | one :: Name.Name -> A.Region -> value -> Dict value 101 | one name region value = 102 | Map.singleton name (OneOrMore.one (Info region value)) 103 | 104 | insert :: Name.Name -> A.Region -> a -> Dict a -> Dict a 105 | insert name region value dict = 106 | Map.insertWith (\new old -> OneOrMore.more old new) name (OneOrMore.one (Info region value)) dict 107 | 108 | union :: Dict a -> Dict a -> Dict a 109 | union a b = 110 | Map.unionWith OneOrMore.more a b 111 | 112 | unions :: [Dict a] -> Dict a 113 | unions dicts = 114 | Map.unionsWith OneOrMore.more dicts 115 | -------------------------------------------------------------------------------- /compiler/src/Canonicalize/Pattern.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wall #-} 2 | 3 | module Canonicalize.Pattern 4 | ( verify, 5 | Bindings, 6 | DupsDict, 7 | canonicalize, 8 | ) 9 | where 10 | 11 | import AST.Canonical qualified as Can 12 | import AST.Source qualified as Src 13 | import Canonicalize.Environment qualified as Env 14 | import Canonicalize.Environment.Dups qualified as Dups 15 | import Data.Index qualified as Index 16 | import Data.Map.Strict qualified as Map 17 | import Data.Name qualified as Name 18 | import Gren.ModuleName qualified as ModuleName 19 | import Reporting.Annotation qualified as A 20 | import Reporting.Error.Canonicalize qualified as Error 21 | import Reporting.Result qualified as Result 22 | 23 | -- RESULTS 24 | 25 | type Result i w a = 26 | Result.Result i w Error.Error a 27 | 28 | type Bindings = 29 | Map.Map Name.Name A.Region 30 | 31 | -- VERIFY 32 | 33 | verify :: Error.DuplicatePatternContext -> Result DupsDict w a -> Result i w (a, Bindings) 34 | verify context (Result.Result k) = 35 | Result.Result $ \info warnings bad good -> 36 | k 37 | Dups.none 38 | warnings 39 | ( \_ warnings1 errors -> 40 | bad info warnings1 errors 41 | ) 42 | ( \bindings warnings1 value -> 43 | case Dups.detect (Error.DuplicatePattern context) bindings of 44 | Result.Result k1 -> 45 | k1 46 | () 47 | () 48 | (\() () errs -> bad info warnings1 errs) 49 | (\() () dict -> good info warnings1 (value, dict)) 50 | ) 51 | 52 | -- CANONICALIZE 53 | 54 | type DupsDict = 55 | Dups.Dict A.Region 56 | 57 | canonicalize :: Env.Env -> Src.Pattern -> Result DupsDict w Can.Pattern 58 | canonicalize env (A.At region pattern) = 59 | A.At region 60 | <$> case pattern of 61 | Src.PAnything _ -> 62 | Result.ok Can.PAnything 63 | Src.PVar name -> 64 | logVar name region (Can.PVar name) 65 | Src.PRecord fields -> 66 | Can.PRecord <$> canonicalizeRecordFields env fields 67 | Src.PCtor nameRegion name patterns -> 68 | canonicalizeCtor env region name (fmap snd patterns) =<< Env.findCtor nameRegion env name 69 | Src.PCtorQual nameRegion home name patterns -> 70 | canonicalizeCtor env region name (fmap snd patterns) =<< Env.findCtorQual nameRegion env home name 71 | Src.PArray patterns -> 72 | Can.PArray <$> canonicalizeList env (fmap fst patterns) 73 | Src.PAlias ptrn (A.At reg name) -> 74 | do 75 | cpattern <- canonicalize env ptrn 76 | logVar name reg (Can.PAlias cpattern name) 77 | Src.PChr chr -> 78 | Result.ok (Can.PChr chr) 79 | Src.PStr str -> 80 | Result.ok (Can.PStr str) 81 | Src.PInt int _ -> 82 | Result.ok (Can.PInt int) 83 | 84 | canonicalizeRecordFields :: Env.Env -> [Src.RecordFieldPattern] -> Result DupsDict w [Can.PatternRecordField] 85 | canonicalizeRecordFields env patterns = 86 | case patterns of 87 | [] -> 88 | Result.ok [] 89 | pattern : otherPatterns -> 90 | (:) 91 | <$> canonicalizeRecordField env pattern 92 | <*> canonicalizeRecordFields env otherPatterns 93 | 94 | canonicalizeRecordField :: Env.Env -> Src.RecordFieldPattern -> Result DupsDict w Can.PatternRecordField 95 | canonicalizeRecordField env (A.At region (Src.RFPattern locatedName pattern)) = 96 | A.At region . Can.PRFieldPattern (A.toValue locatedName) 97 | <$> canonicalize env pattern 98 | 99 | canonicalizeCtor :: Env.Env -> A.Region -> Name.Name -> [Src.Pattern] -> Env.Ctor -> Result DupsDict w Can.Pattern_ 100 | canonicalizeCtor env region name patterns ctor = 101 | case ctor of 102 | Env.Ctor home tipe union index args -> 103 | let toCanonicalArg argIndex argPattern argTipe = 104 | Can.PatternCtorArg argIndex argTipe <$> canonicalize env argPattern 105 | in do 106 | verifiedList <- Index.indexedZipWithA toCanonicalArg patterns args 107 | case verifiedList of 108 | Index.LengthMatch cargs -> 109 | if tipe == Name.bool && home == ModuleName.basics 110 | then Result.ok (Can.PBool union (name == Name.true)) 111 | else Result.ok (Can.PCtor home tipe union name index cargs) 112 | Index.LengthMismatch actualLength expectedLength -> 113 | Result.throw (Error.BadArity region Error.PatternArity name expectedLength actualLength) 114 | 115 | canonicalizeList :: Env.Env -> [Src.Pattern] -> Result DupsDict w [Can.Pattern] 116 | canonicalizeList env list = 117 | case list of 118 | [] -> 119 | Result.ok [] 120 | pattern : otherPatterns -> 121 | (:) 122 | <$> canonicalize env pattern 123 | <*> canonicalizeList env otherPatterns 124 | 125 | -- LOG BINDINGS 126 | 127 | logVar :: Name.Name -> A.Region -> a -> Result DupsDict w a 128 | logVar name region value = 129 | Result.Result $ \bindings warnings _ ok -> 130 | ok (Dups.insert name region region bindings) warnings value 131 | -------------------------------------------------------------------------------- /compiler/src/Canonicalize/Type.hs: -------------------------------------------------------------------------------- 1 | module Canonicalize.Type 2 | ( toAnnotation, 3 | canonicalize, 4 | ) 5 | where 6 | 7 | import AST.Canonical qualified as Can 8 | import AST.Source qualified as Src 9 | import Canonicalize.Environment qualified as Env 10 | import Canonicalize.Environment.Dups qualified as Dups 11 | import Data.List qualified as List 12 | import Data.Map qualified as Map 13 | import Data.Name qualified as Name 14 | import Reporting.Annotation qualified as A 15 | import Reporting.Error.Canonicalize qualified as Error 16 | import Reporting.Result qualified as Result 17 | 18 | -- RESULT 19 | 20 | type Result i w a = 21 | Result.Result i w Error.Error a 22 | 23 | -- TO ANNOTATION 24 | 25 | toAnnotation :: Env.Env -> Src.Type -> Result i w Can.Annotation 26 | toAnnotation env srcType = 27 | do 28 | tipe <- canonicalize env srcType 29 | Result.ok $ Can.Forall (addFreeVars Map.empty tipe) tipe 30 | 31 | -- CANONICALIZE TYPES 32 | 33 | canonicalize :: Env.Env -> Src.Type -> Result i w Can.Type 34 | canonicalize env (A.At typeRegion tipe) = 35 | case tipe of 36 | Src.TVar x -> 37 | Result.ok (Can.TVar x) 38 | Src.TType region name args -> 39 | canonicalizeType env typeRegion name (fmap snd args) 40 | =<< Env.findType region env name 41 | Src.TTypeQual region home name args -> 42 | canonicalizeType env typeRegion name (fmap snd args) 43 | =<< Env.findTypeQual region env home name 44 | Src.TLambda a b _ -> 45 | Can.TLambda 46 | <$> canonicalize env a 47 | <*> canonicalize env b 48 | Src.TRecord fields ext -> 49 | do 50 | cfields <- sequenceA =<< Dups.checkFields (canonicalizeFields env fields) 51 | return $ Can.TRecord cfields (fmap (A.toValue . fst) ext) 52 | Src.TParens inner _ -> 53 | canonicalize env inner 54 | 55 | canonicalizeFields :: Env.Env -> [Src.TRecordField] -> [(A.Located Name.Name, Result i w Can.FieldType, ())] 56 | canonicalizeFields env fields = 57 | let len = fromIntegral (length fields) 58 | canonicalizeField index (name, srcType, _) = 59 | (name, Can.FieldType index <$> canonicalize env srcType, ()) 60 | in zipWith canonicalizeField [0 .. len] fields 61 | 62 | -- CANONICALIZE TYPE 63 | 64 | canonicalizeType :: Env.Env -> A.Region -> Name.Name -> [Src.Type] -> Env.Type -> Result i w Can.Type 65 | canonicalizeType env region name args info = 66 | do 67 | cargs <- traverse (canonicalize env) args 68 | case info of 69 | Env.Alias arity home argNames aliasedType -> 70 | checkArity arity region name args $ 71 | Can.TAlias home name (zip argNames cargs) (Can.Holey aliasedType) 72 | Env.Union arity home -> 73 | checkArity arity region name args $ 74 | Can.TType home name cargs 75 | 76 | checkArity :: Int -> A.Region -> Name.Name -> [A.Located arg] -> answer -> Result i w answer 77 | checkArity expected region name args answer = 78 | let actual = length args 79 | in if expected == actual 80 | then Result.ok answer 81 | else Result.throw (Error.BadArity region Error.TypeArity name expected actual) 82 | 83 | -- ADD FREE VARS 84 | 85 | addFreeVars :: Map.Map Name.Name () -> Can.Type -> Map.Map Name.Name () 86 | addFreeVars freeVars tipe = 87 | case tipe of 88 | Can.TLambda arg result -> 89 | addFreeVars (addFreeVars freeVars result) arg 90 | Can.TVar var -> 91 | Map.insert var () freeVars 92 | Can.TType _ _ args -> 93 | List.foldl' addFreeVars freeVars args 94 | Can.TRecord fields Nothing -> 95 | Map.foldl addFieldFreeVars freeVars fields 96 | Can.TRecord fields (Just ext) -> 97 | Map.foldl addFieldFreeVars (Map.insert ext () freeVars) fields 98 | Can.TAlias _ _ args _ -> 99 | List.foldl' (\fvs (_, arg) -> addFreeVars fvs arg) freeVars args 100 | 101 | addFieldFreeVars :: Map.Map Name.Name () -> Can.FieldType -> Map.Map Name.Name () 102 | addFieldFreeVars freeVars (Can.FieldType _ tipe) = 103 | addFreeVars freeVars tipe 104 | -------------------------------------------------------------------------------- /compiler/src/Compile.hs: -------------------------------------------------------------------------------- 1 | module Compile 2 | ( Artifacts (..), 3 | compile, 4 | ) 5 | where 6 | 7 | import AST.Canonical qualified as Can 8 | import AST.Optimized qualified as Opt 9 | import AST.Source qualified as Src 10 | import Canonicalize.Module qualified as Canonicalize 11 | import Data.Map qualified as Map 12 | import Data.Name qualified as Name 13 | import Gren.Interface qualified as I 14 | import Gren.ModuleName qualified as ModuleName 15 | import Gren.Package qualified as Pkg 16 | import Gren.Platform qualified as P 17 | import Nitpick.PatternMatches qualified as PatternMatches 18 | import Optimize.Module qualified as Optimize 19 | import Reporting.Error qualified as E 20 | import Reporting.Render.Type.Localizer qualified as Localizer 21 | import Reporting.Result qualified as R 22 | import System.IO.Unsafe (unsafePerformIO) 23 | import Type.Constrain.Module qualified as Type 24 | import Type.Solve qualified as Type 25 | 26 | -- COMPILE 27 | 28 | data Artifacts = Artifacts 29 | { _modul :: Can.Module, 30 | _types :: Map.Map Name.Name Can.Annotation, 31 | _graph :: Opt.LocalGraph 32 | } 33 | 34 | compile :: P.Platform -> Pkg.Name -> Map.Map ModuleName.Raw I.Interface -> Src.Module -> Either E.Error Artifacts 35 | compile platform pkg ifaces modul = 36 | do 37 | canonical <- canonicalize pkg ifaces modul 38 | annotations <- typeCheck modul canonical 39 | () <- nitpick canonical 40 | objects <- optimize platform modul annotations canonical 41 | return (Artifacts canonical annotations objects) 42 | 43 | -- PHASES 44 | 45 | canonicalize :: Pkg.Name -> Map.Map ModuleName.Raw I.Interface -> Src.Module -> Either E.Error Can.Module 46 | canonicalize pkg ifaces modul = 47 | case snd $ R.run $ Canonicalize.canonicalize pkg ifaces modul of 48 | Right canonical -> 49 | Right canonical 50 | Left errors -> 51 | Left $ E.BadNames errors 52 | 53 | typeCheck :: Src.Module -> Can.Module -> Either E.Error (Map.Map Name.Name Can.Annotation) 54 | typeCheck modul canonical = 55 | case unsafePerformIO (Type.run =<< Type.constrain canonical) of 56 | Right annotations -> 57 | Right annotations 58 | Left errors -> 59 | Left (E.BadTypes (Localizer.fromModule modul) errors) 60 | 61 | nitpick :: Can.Module -> Either E.Error () 62 | nitpick canonical = 63 | case PatternMatches.check canonical of 64 | Right () -> 65 | Right () 66 | Left errors -> 67 | Left (E.BadPatterns errors) 68 | 69 | optimize :: P.Platform -> Src.Module -> Map.Map Name.Name Can.Annotation -> Can.Module -> Either E.Error Opt.LocalGraph 70 | optimize platform modul annotations canonical = 71 | case snd $ R.run $ Optimize.optimize platform annotations canonical of 72 | Right localGraph -> 73 | Right localGraph 74 | Left errors -> 75 | Left (E.BadMains (Localizer.fromModule modul) errors) 76 | -------------------------------------------------------------------------------- /compiler/src/Data/Bag.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wall #-} 2 | 3 | module Data.Bag 4 | ( Bag (..), 5 | empty, 6 | one, 7 | append, 8 | map, 9 | toList, 10 | fromList, 11 | ) 12 | where 13 | 14 | import Data.List qualified as List 15 | import Prelude hiding (map) 16 | 17 | -- BAGS 18 | 19 | data Bag a 20 | = Empty 21 | | One a 22 | | Two (Bag a) (Bag a) 23 | 24 | -- HELPERS 25 | 26 | empty :: Bag a 27 | empty = 28 | Empty 29 | 30 | one :: a -> Bag a 31 | one = 32 | One 33 | 34 | append :: Bag a -> Bag a -> Bag a 35 | append left right = 36 | case (left, right) of 37 | (other, Empty) -> 38 | other 39 | (Empty, other) -> 40 | other 41 | (_, _) -> 42 | Two left right 43 | 44 | -- MAP 45 | 46 | map :: (a -> b) -> Bag a -> Bag b 47 | map func bag = 48 | case bag of 49 | Empty -> 50 | Empty 51 | One a -> 52 | One (func a) 53 | Two left right -> 54 | Two (map func left) (map func right) 55 | 56 | -- TO LIST 57 | 58 | toList :: Bag a -> [a] 59 | toList bag = 60 | toListHelp bag [] 61 | 62 | toListHelp :: Bag a -> [a] -> [a] 63 | toListHelp bag list = 64 | case bag of 65 | Empty -> 66 | list 67 | One x -> 68 | x : list 69 | Two a b -> 70 | toListHelp a (toListHelp b list) 71 | 72 | -- FROM LIST 73 | 74 | fromList :: (a -> b) -> [a] -> Bag b 75 | fromList func list = 76 | case list of 77 | [] -> 78 | Empty 79 | first : rest -> 80 | List.foldl' (add func) (One (func first)) rest 81 | 82 | add :: (a -> b) -> Bag b -> a -> Bag b 83 | add func bag value = 84 | Two (One (func value)) bag 85 | -------------------------------------------------------------------------------- /compiler/src/Data/Index.hs: -------------------------------------------------------------------------------- 1 | module Data.Index 2 | ( ZeroBased, 3 | first, 4 | second, 5 | third, 6 | next, 7 | toMachine, 8 | toHuman, 9 | indexedMap, 10 | indexedTraverse, 11 | indexedForA, 12 | VerifiedList (..), 13 | indexedZipWith, 14 | indexedZipWithA, 15 | ) 16 | where 17 | 18 | import Control.Monad (liftM) 19 | import Data.Binary 20 | 21 | -- ZERO BASED 22 | 23 | newtype ZeroBased = ZeroBased Int 24 | deriving (Eq, Ord, Show) 25 | 26 | first :: ZeroBased 27 | first = 28 | ZeroBased 0 29 | 30 | second :: ZeroBased 31 | second = 32 | ZeroBased 1 33 | 34 | third :: ZeroBased 35 | third = 36 | ZeroBased 2 37 | 38 | next :: ZeroBased -> ZeroBased 39 | next (ZeroBased i) = 40 | ZeroBased (i + 1) 41 | 42 | -- DESTRUCT 43 | 44 | toMachine :: ZeroBased -> Int 45 | toMachine (ZeroBased index) = 46 | index 47 | 48 | toHuman :: ZeroBased -> Int 49 | toHuman (ZeroBased index) = 50 | index + 1 51 | 52 | -- INDEXED MAP 53 | 54 | indexedMap :: (ZeroBased -> a -> b) -> [a] -> [b] 55 | indexedMap func xs = 56 | zipWith func (map ZeroBased [0 .. length xs]) xs 57 | 58 | indexedTraverse :: (Applicative f) => (ZeroBased -> a -> f b) -> [a] -> f [b] 59 | indexedTraverse func xs = 60 | sequenceA (indexedMap func xs) 61 | 62 | indexedForA :: (Applicative f) => [a] -> (ZeroBased -> a -> f b) -> f [b] 63 | indexedForA xs func = 64 | sequenceA (indexedMap func xs) 65 | 66 | -- VERIFIED/INDEXED ZIP 67 | 68 | data VerifiedList a 69 | = LengthMatch [a] 70 | | LengthMismatch Int Int 71 | 72 | indexedZipWith :: (ZeroBased -> a -> b -> c) -> [a] -> [b] -> VerifiedList c 73 | indexedZipWith func listX listY = 74 | indexedZipWithHelp func 0 listX listY [] 75 | 76 | indexedZipWithHelp :: (ZeroBased -> a -> b -> c) -> Int -> [a] -> [b] -> [c] -> VerifiedList c 77 | indexedZipWithHelp func index listX listY revListZ = 78 | case (listX, listY) of 79 | ([], []) -> 80 | LengthMatch (reverse revListZ) 81 | (x : xs, y : ys) -> 82 | indexedZipWithHelp func (index + 1) xs ys $ 83 | func (ZeroBased index) x y : revListZ 84 | (_, _) -> 85 | LengthMismatch (index + length listX) (index + length listY) 86 | 87 | indexedZipWithA :: (Applicative f) => (ZeroBased -> a -> b -> f c) -> [a] -> [b] -> f (VerifiedList c) 88 | indexedZipWithA func listX listY = 89 | case indexedZipWith func listX listY of 90 | LengthMatch xs -> 91 | LengthMatch <$> sequenceA xs 92 | LengthMismatch x y -> 93 | pure (LengthMismatch x y) 94 | 95 | -- BINARY 96 | 97 | instance Binary ZeroBased where 98 | get = liftM ZeroBased get 99 | put (ZeroBased n) = put n 100 | -------------------------------------------------------------------------------- /compiler/src/Data/Map/Utils.hs: -------------------------------------------------------------------------------- 1 | module Data.Map.Utils 2 | ( fromKeys, 3 | fromKeysA, 4 | fromValues, 5 | any, 6 | ) 7 | where 8 | 9 | import Data.Map qualified as Map 10 | import Data.Map.Internal (Map (..)) 11 | import Prelude hiding (any) 12 | 13 | -- FROM KEYS 14 | 15 | fromKeys :: (Ord k) => (k -> v) -> [k] -> Map.Map k v 16 | fromKeys toValue keys = 17 | Map.fromList $ map (\k -> (k, toValue k)) keys 18 | 19 | fromKeysA :: (Applicative f, Ord k) => (k -> f v) -> [k] -> f (Map.Map k v) 20 | fromKeysA toValue keys = 21 | Map.fromList <$> traverse (\k -> (,) k <$> toValue k) keys 22 | 23 | fromValues :: (Ord k) => (v -> k) -> [v] -> Map.Map k v 24 | fromValues toKey values = 25 | Map.fromList $ map (\v -> (toKey v, v)) values 26 | 27 | -- ANY 28 | 29 | any :: (v -> Bool) -> Map.Map k v -> Bool 30 | any isGood = go 31 | where 32 | go Tip = False 33 | go (Bin _ _ v l r) = isGood v || go l || go r 34 | -------------------------------------------------------------------------------- /compiler/src/Data/NonEmptyList.hs: -------------------------------------------------------------------------------- 1 | module Data.NonEmptyList 2 | ( List (..), 3 | singleton, 4 | toList, 5 | fromList, 6 | sortBy, 7 | ) 8 | where 9 | 10 | import Control.Monad (liftM2) 11 | import Data.Binary (Binary, get, put) 12 | import Data.List qualified as List 13 | 14 | -- LIST 15 | 16 | data List a 17 | = List a [a] 18 | 19 | singleton :: a -> List a 20 | singleton a = 21 | List a [] 22 | 23 | toList :: List a -> [a] 24 | toList (List x xs) = 25 | x : xs 26 | 27 | fromList :: [a] -> Maybe (List a) 28 | fromList [] = Nothing 29 | fromList (x : xs) = Just (List x xs) 30 | 31 | -- INSTANCES 32 | 33 | instance Functor List where 34 | fmap func (List x xs) = List (func x) (map func xs) 35 | 36 | instance Traversable List where 37 | traverse func (List x xs) = List <$> func x <*> traverse func xs 38 | 39 | instance Foldable List where 40 | foldr step state (List x xs) = step x (foldr step state xs) 41 | foldl step state (List x xs) = foldl step (step state x) xs 42 | foldl1 step (List x xs) = foldl step x xs 43 | 44 | -- SORT BY 45 | 46 | sortBy :: (Ord b) => (a -> b) -> List a -> List a 47 | sortBy toRank (List x xs) = 48 | let comparison a b = 49 | compare (toRank a) (toRank b) 50 | in case List.sortBy comparison xs of 51 | [] -> 52 | List x [] 53 | y : ys -> 54 | case comparison x y of 55 | LT -> List x (y : ys) 56 | EQ -> List x (y : ys) 57 | GT -> List y (List.insertBy comparison x ys) 58 | 59 | -- BINARY 60 | 61 | instance (Binary a) => Binary (List a) where 62 | put (List x xs) = put x >> put xs 63 | get = liftM2 List get get 64 | -------------------------------------------------------------------------------- /compiler/src/Data/OneOrMore.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wall #-} 2 | 3 | module Data.OneOrMore 4 | ( OneOrMore (..), 5 | one, 6 | more, 7 | map, 8 | destruct, 9 | getFirstTwo, 10 | ) 11 | where 12 | 13 | import Prelude hiding (map) 14 | 15 | -- ONE OR MORE 16 | 17 | data OneOrMore a 18 | = One a 19 | | More (OneOrMore a) (OneOrMore a) 20 | 21 | one :: a -> OneOrMore a 22 | one = 23 | One 24 | 25 | more :: OneOrMore a -> OneOrMore a -> OneOrMore a 26 | more = 27 | More 28 | 29 | -- MAP 30 | 31 | map :: (a -> b) -> OneOrMore a -> OneOrMore b 32 | map func oneOrMore = 33 | case oneOrMore of 34 | One value -> 35 | One (func value) 36 | More left right -> 37 | More (map func left) (map func right) 38 | 39 | -- DESTRUCT 40 | 41 | destruct :: (a -> [a] -> b) -> OneOrMore a -> b 42 | destruct func oneOrMore = 43 | destructLeft func oneOrMore [] 44 | 45 | destructLeft :: (a -> [a] -> b) -> OneOrMore a -> [a] -> b 46 | destructLeft func oneOrMore xs = 47 | case oneOrMore of 48 | One x -> 49 | func x xs 50 | More a b -> 51 | destructLeft func a (destructRight b xs) 52 | 53 | destructRight :: OneOrMore a -> [a] -> [a] 54 | destructRight oneOrMore xs = 55 | case oneOrMore of 56 | One x -> 57 | x : xs 58 | More a b -> 59 | destructRight a (destructRight b xs) 60 | 61 | -- GET FIRST TWO 62 | 63 | getFirstTwo :: OneOrMore a -> OneOrMore a -> (a, a) 64 | getFirstTwo left right = 65 | case left of 66 | One x -> 67 | (x, getFirstOne right) 68 | More lleft lright -> 69 | getFirstTwo lleft lright 70 | 71 | getFirstOne :: OneOrMore a -> a 72 | getFirstOne oneOrMore = 73 | case oneOrMore of 74 | One x -> 75 | x 76 | More left _ -> 77 | getFirstOne left 78 | -------------------------------------------------------------------------------- /compiler/src/Generate/Html.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE QuasiQuotes #-} 3 | 4 | module Generate.Html 5 | ( sandwich, 6 | leadingLines, 7 | ) 8 | where 9 | 10 | import Data.ByteString.Builder qualified as B 11 | import Data.Name qualified as Name 12 | import Text.RawString.QQ (r) 13 | 14 | leadingLines :: Int 15 | leadingLines = 2 16 | 17 | sandwich :: Name.Name -> B.Builder -> B.Builder 18 | sandwich moduleName javascript = 19 | let name = Name.toBuilder moduleName 20 | in [r| 21 | 22 | 23 | 24 | |] 25 | <> name 26 | <> [r| 27 | 28 | 29 | 30 | 31 | 32 |

33 | 
34 | 
56 | 
57 | 
58 | |]
59 | 


--------------------------------------------------------------------------------
/compiler/src/Generate/JavaScript/Functions.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | {-# LANGUAGE QuasiQuotes #-}
 3 | 
 4 | module Generate.JavaScript.Functions
 5 |   ( functions,
 6 |   )
 7 | where
 8 | 
 9 | import Data.ByteString.Builder qualified as B
10 | import Text.RawString.QQ (r)
11 | 
12 | -- FUNCTIONS
13 | 
14 | functions :: B.Builder
15 | functions =
16 |   [r|
17 | 
18 | function F(arity, fun, wrapper) {
19 |   wrapper.a = arity;
20 |   wrapper.f = fun;
21 |   return wrapper;
22 | }
23 | 
24 | function F2(fun) {
25 |   return F(2, fun, function(a) { return function(b) { return fun(a,b); }; })
26 | }
27 | function F3(fun) {
28 |   return F(3, fun, function(a) {
29 |     return function(b) { return function(c) { return fun(a, b, c); }; };
30 |   });
31 | }
32 | function F4(fun) {
33 |   return F(4, fun, function(a) { return function(b) { return function(c) {
34 |     return function(d) { return fun(a, b, c, d); }; }; };
35 |   });
36 | }
37 | function F5(fun) {
38 |   return F(5, fun, function(a) { return function(b) { return function(c) {
39 |     return function(d) { return function(e) { return fun(a, b, c, d, e); }; }; }; };
40 |   });
41 | }
42 | function F6(fun) {
43 |   return F(6, fun, function(a) { return function(b) { return function(c) {
44 |     return function(d) { return function(e) { return function(f) {
45 |     return fun(a, b, c, d, e, f); }; }; }; }; };
46 |   });
47 | }
48 | function F7(fun) {
49 |   return F(7, fun, function(a) { return function(b) { return function(c) {
50 |     return function(d) { return function(e) { return function(f) {
51 |     return function(g) { return fun(a, b, c, d, e, f, g); }; }; }; }; }; };
52 |   });
53 | }
54 | function F8(fun) {
55 |   return F(8, fun, function(a) { return function(b) { return function(c) {
56 |     return function(d) { return function(e) { return function(f) {
57 |     return function(g) { return function(h) {
58 |     return fun(a, b, c, d, e, f, g, h); }; }; }; }; }; }; };
59 |   });
60 | }
61 | function F9(fun) {
62 |   return F(9, fun, function(a) { return function(b) { return function(c) {
63 |     return function(d) { return function(e) { return function(f) {
64 |     return function(g) { return function(h) { return function(i) {
65 |     return fun(a, b, c, d, e, f, g, h, i); }; }; }; }; }; }; }; };
66 |   });
67 | }
68 | 
69 | function A2(fun, a, b) {
70 |   return fun.a === 2 ? fun.f(a, b) : fun(a)(b);
71 | }
72 | function A3(fun, a, b, c) {
73 |   return fun.a === 3 ? fun.f(a, b, c) : fun(a)(b)(c);
74 | }
75 | function A4(fun, a, b, c, d) {
76 |   return fun.a === 4 ? fun.f(a, b, c, d) : fun(a)(b)(c)(d);
77 | }
78 | function A5(fun, a, b, c, d, e) {
79 |   return fun.a === 5 ? fun.f(a, b, c, d, e) : fun(a)(b)(c)(d)(e);
80 | }
81 | function A6(fun, a, b, c, d, e, f) {
82 |   return fun.a === 6 ? fun.f(a, b, c, d, e, f) : fun(a)(b)(c)(d)(e)(f);
83 | }
84 | function A7(fun, a, b, c, d, e, f, g) {
85 |   return fun.a === 7 ? fun.f(a, b, c, d, e, f, g) : fun(a)(b)(c)(d)(e)(f)(g);
86 | }
87 | function A8(fun, a, b, c, d, e, f, g, h) {
88 |   return fun.a === 8 ? fun.f(a, b, c, d, e, f, g, h) : fun(a)(b)(c)(d)(e)(f)(g)(h);
89 | }
90 | function A9(fun, a, b, c, d, e, f, g, h, i) {
91 |   return fun.a === 9 ? fun.f(a, b, c, d, e, f, g, h, i) : fun(a)(b)(c)(d)(e)(f)(g)(h)(i);
92 | }
93 | 
94 | |]
95 | 


--------------------------------------------------------------------------------
/compiler/src/Generate/Mode.hs:
--------------------------------------------------------------------------------
 1 | module Generate.Mode
 2 |   ( Mode (..),
 3 |     ShortFieldNames,
 4 |     shortenFieldNames,
 5 |   )
 6 | where
 7 | 
 8 | import AST.Optimized qualified as Opt
 9 | import Data.List qualified as List
10 | import Data.Map qualified as Map
11 | import Data.Name qualified as Name
12 | import Generate.JavaScript.Name qualified as JsName
13 | 
14 | -- MODE
15 | 
16 | data Mode
17 |   = Dev
18 |   | Prod ShortFieldNames
19 | 
20 | -- SHORTEN FIELD NAMES
21 | 
22 | type ShortFieldNames =
23 |   Map.Map Name.Name JsName.Name
24 | 
25 | shortenFieldNames :: Opt.GlobalGraph -> ShortFieldNames
26 | shortenFieldNames (Opt.GlobalGraph _ frequencies) =
27 |   Map.foldr addToShortNames Map.empty $
28 |     Map.foldrWithKey addToBuckets Map.empty frequencies
29 | 
30 | addToBuckets :: Name.Name -> Int -> Map.Map Int [Name.Name] -> Map.Map Int [Name.Name]
31 | addToBuckets field frequency buckets =
32 |   Map.insertWith (++) frequency [field] buckets
33 | 
34 | addToShortNames :: [Name.Name] -> ShortFieldNames -> ShortFieldNames
35 | addToShortNames fields shortNames =
36 |   List.foldl' addField shortNames fields
37 | 
38 | addField :: ShortFieldNames -> Name.Name -> ShortFieldNames
39 | addField shortNames field =
40 |   let rename = JsName.fromInt (Map.size shortNames)
41 |    in Map.insert field rename shortNames
42 | 


--------------------------------------------------------------------------------
/compiler/src/Generate/Node.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | {-# LANGUAGE QuasiQuotes #-}
 3 | 
 4 | module Generate.Node
 5 |   ( sandwich,
 6 |     leadingLines,
 7 |   )
 8 | where
 9 | 
10 | import Data.ByteString.Builder qualified as B
11 | import Data.Name qualified as Name
12 | import Text.RawString.QQ (r)
13 | 
14 | leadingLines :: Int
15 | leadingLines = 7
16 | 
17 | sandwich :: Name.Name -> B.Builder -> B.Builder
18 | sandwich moduleName javascript =
19 |   let name = Name.toBuilder moduleName
20 |    in [r|#!/usr/bin/env node
21 | 
22 | if (parseInt(process.versions.node.split('.')[0]) < 20) {
23 |   throw new Error("This program requires Node v20 or later to run")
24 | }
25 | 
26 | try {
27 | |]
28 |         <> javascript
29 |         <> [r|
30 | |]
31 |         <> [r|this.Gren.|]
32 |         <> name
33 |         <> [r|.init({});
34 | }
35 | catch (e)
36 | {
37 | console.error(e);
38 | }
39 | |]
40 | 


--------------------------------------------------------------------------------
/compiler/src/Generate/VLQ.hs:
--------------------------------------------------------------------------------
 1 | module Generate.VLQ
 2 |   ( encode,
 3 |   )
 4 | where
 5 | 
 6 | import Data.Bits ((.&.), (.|.))
 7 | import Data.Bits qualified as Bit
 8 | import Data.Foldable.WithIndex (ifoldr)
 9 | import Data.Function ((&))
10 | import Data.List qualified as List
11 | import Data.Map (Map, (!))
12 | import Data.Map qualified as Map
13 | 
14 | {- Ported from the Elm package Janiczek/elm-vlq
15 | -}
16 | 
17 | -- Int is converted to 32-bit representation before encoding
18 | encode :: Int -> String
19 | encode num =
20 |   let numWithSign =
21 |         if num < 0
22 |           then ((negate num .&. usableBits) `Bit.shiftL` 1) .|. 1
23 |           else (num .&. usableBits) `Bit.shiftL` 1
24 |    in encodeHelp numWithSign ""
25 | 
26 | usableBits :: Int
27 | usableBits =
28 |   0xFFFFFFFF `Bit.shiftR` 1
29 | 
30 | encodeHelp :: Int -> String -> String
31 | encodeHelp num acc =
32 |   let clamped =
33 |         num .&. 31
34 | 
35 |       newNum =
36 |         num `Bit.shiftR` 5
37 | 
38 |       newClamped =
39 |         if newNum > 0
40 |           then clamped .|. 32
41 |           else clamped
42 | 
43 |       newAcc =
44 |         base64Table ! newClamped : acc
45 |    in if newNum > 0
46 |         then encodeHelp newNum newAcc
47 |         else List.reverse newAcc
48 | 
49 | base64Table :: Map Int Char
50 | base64Table =
51 |   "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='"
52 |     & ifoldr Map.insert Map.empty
53 | 


--------------------------------------------------------------------------------
/compiler/src/Gren/Compiler/Imports.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | {-# OPTIONS_GHC -Wall #-}
 3 | 
 4 | module Gren.Compiler.Imports
 5 |   ( defaults,
 6 |   )
 7 | where
 8 | 
 9 | import AST.Source qualified as Src
10 | import AST.SourceComments qualified as SC
11 | import Data.Name qualified as Name
12 | import Gren.ModuleName qualified as ModuleName
13 | import Reporting.Annotation qualified as A
14 | 
15 | -- DEFAULTS
16 | 
17 | defaults :: [Src.Import]
18 | defaults =
19 |   [ import_ ModuleName.basics Nothing Src.Open,
20 |     import_ ModuleName.debug Nothing closed,
21 |     import_ ModuleName.array Nothing (typeClosed Name.array),
22 |     import_ ModuleName.maybe Nothing (typeOpen Name.maybe),
23 |     import_ ModuleName.result Nothing (typeOpen Name.result),
24 |     import_ ModuleName.string Nothing (typeClosed Name.string),
25 |     import_ ModuleName.char Nothing (typeClosed Name.char),
26 |     import_ ModuleName.platform Nothing (typeClosed Name.program),
27 |     import_ ModuleName.cmd (Just Name.cmd) (typeClosed Name.cmd),
28 |     import_ ModuleName.sub (Just Name.sub) (typeClosed Name.sub)
29 |   ]
30 | 
31 | import_ :: ModuleName.Canonical -> Maybe Name.Name -> Src.Exposing -> Src.Import
32 | import_ (ModuleName.Canonical _ name) maybeAlias exposing =
33 |   let maybeAliasWithComments = fmap (,(SC.ImportAliasComments [] [])) maybeAlias
34 |    in Src.Import (A.At A.zero name) maybeAliasWithComments exposing Nothing (SC.ImportComments [] [])
35 | 
36 | -- EXPOSING
37 | 
38 | closed :: Src.Exposing
39 | closed =
40 |   Src.Explicit []
41 | 
42 | typeOpen :: Name.Name -> Src.Exposing
43 | typeOpen name =
44 |   Src.Explicit [Src.Upper (A.At A.zero name) (Src.Public A.zero)]
45 | 
46 | typeClosed :: Name.Name -> Src.Exposing
47 | typeClosed name =
48 |   Src.Explicit [Src.Upper (A.At A.zero name) Src.Private]
49 | 


--------------------------------------------------------------------------------
/compiler/src/Gren/Compiler/Type.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE OverloadedStrings #-}
  2 | {-# OPTIONS_GHC -Wall -Wno-incomplete-uni-patterns #-}
  3 | 
  4 | module Gren.Compiler.Type
  5 |   ( Type (..),
  6 |     RT.Context (..),
  7 |     toDoc,
  8 |     DebugMetadata (..),
  9 |     Alias (..),
 10 |     Union (..),
 11 |     encode,
 12 |     decoder,
 13 |     encodeMetadata,
 14 |   )
 15 | where
 16 | 
 17 | import AST.Source qualified as Src
 18 | import Data.Name qualified as Name
 19 | import Json.Decode qualified as D
 20 | import Json.Encode ((==>))
 21 | import Json.Encode qualified as E
 22 | import Json.String qualified as Json
 23 | import Parse.Primitives qualified as P
 24 | import Parse.Type qualified as Type
 25 | import Reporting.Annotation qualified as A
 26 | import Reporting.Doc qualified as D
 27 | import Reporting.Render.Type qualified as RT
 28 | import Reporting.Render.Type.Localizer qualified as L
 29 | 
 30 | -- TYPES
 31 | 
 32 | data Type
 33 |   = Lambda Type Type
 34 |   | Var Name.Name
 35 |   | Type Name.Name [Type]
 36 |   | Record [(Name.Name, Type)] (Maybe Name.Name)
 37 | 
 38 | data DebugMetadata = DebugMetadata
 39 |   { _message :: Type,
 40 |     _aliases :: [Alias],
 41 |     _unions :: [Union]
 42 |   }
 43 | 
 44 | data Alias = Alias Name.Name [Name.Name] Type
 45 | 
 46 | data Union = Union Name.Name [Name.Name] [(Name.Name, [Type])]
 47 | 
 48 | -- TO DOC
 49 | 
 50 | toDoc :: L.Localizer -> RT.Context -> Type -> D.Doc
 51 | toDoc localizer context tipe =
 52 |   case tipe of
 53 |     Lambda _ _ ->
 54 |       let a : b : cs =
 55 |             map (toDoc localizer RT.Func) (collectLambdas tipe)
 56 |        in RT.lambda context a b cs
 57 |     Var name ->
 58 |       D.fromName name
 59 |     Type name args ->
 60 |       RT.apply
 61 |         context
 62 |         (D.fromName name)
 63 |         (map (toDoc localizer RT.App) args)
 64 |     Record fields ext ->
 65 |       RT.record
 66 |         (map (entryToDoc localizer) fields)
 67 |         (fmap D.fromName ext)
 68 | 
 69 | entryToDoc :: L.Localizer -> (Name.Name, Type) -> (D.Doc, D.Doc)
 70 | entryToDoc localizer (field, fieldType) =
 71 |   (D.fromName field, toDoc localizer RT.None fieldType)
 72 | 
 73 | collectLambdas :: Type -> [Type]
 74 | collectLambdas tipe =
 75 |   case tipe of
 76 |     Lambda arg body ->
 77 |       arg : collectLambdas body
 78 |     _ ->
 79 |       [tipe]
 80 | 
 81 | -- JSON for TYPE
 82 | 
 83 | encode :: Type -> E.Value
 84 | encode tipe =
 85 |   E.chars $ D.toLine (toDoc L.empty RT.None tipe)
 86 | 
 87 | decoder :: D.Decoder () Type
 88 | decoder =
 89 |   let parser =
 90 |         P.specialize (\_ _ _ -> ()) (fromRawType . fst . fst <$> Type.expression)
 91 |    in D.customString parser (\_ _ -> ())
 92 | 
 93 | fromRawType :: Src.Type -> Type
 94 | fromRawType (A.At _ astType) =
 95 |   case astType of
 96 |     Src.TLambda t1 t2 _ ->
 97 |       Lambda (fromRawType t1) (fromRawType t2)
 98 |     Src.TVar x ->
 99 |       Var x
100 |     Src.TType _ name args ->
101 |       Type name (map (fromRawType . snd) args)
102 |     Src.TTypeQual _ _ name args ->
103 |       Type name (map (fromRawType . snd) args)
104 |     Src.TRecord fields ext ->
105 |       let fromField (A.At _ field, tipe, _) = (field, fromRawType tipe)
106 |        in Record
107 |             (map fromField fields)
108 |             (fmap (A.toValue . fst) ext)
109 |     Src.TParens inner _ ->
110 |       fromRawType inner
111 | 
112 | -- JSON for PROGRAM
113 | 
114 | encodeMetadata :: DebugMetadata -> E.Value
115 | encodeMetadata (DebugMetadata msg aliases unions) =
116 |   E.object
117 |     [ "message" ==> encode msg,
118 |       "aliases" ==> E.object (map toTypeAliasField aliases),
119 |       "unions" ==> E.object (map toCustomTypeField unions)
120 |     ]
121 | 
122 | toTypeAliasField :: Alias -> (Json.String, E.Value)
123 | toTypeAliasField (Alias name args tipe) =
124 |   ( Json.fromName name,
125 |     E.object
126 |       [ "args" ==> E.list E.name args,
127 |         "type" ==> encode tipe
128 |       ]
129 |   )
130 | 
131 | toCustomTypeField :: Union -> (Json.String, E.Value)
132 | toCustomTypeField (Union name args constructors) =
133 |   ( Json.fromName name,
134 |     E.object
135 |       [ "args" ==> E.list E.name args,
136 |         "tags" ==> E.object (map toVariantObject constructors)
137 |       ]
138 |   )
139 | 
140 | toVariantObject :: (Name.Name, [Type]) -> (Json.String, E.Value)
141 | toVariantObject (name, args) =
142 |   (Json.fromName name, E.list encode args)
143 | 


--------------------------------------------------------------------------------
/compiler/src/Gren/Float.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE EmptyDataDecls #-}
 2 | {-# LANGUAGE FlexibleInstances #-}
 3 | {-# OPTIONS_GHC -Wall #-}
 4 | 
 5 | module Gren.Float
 6 |   ( Float,
 7 |     fromPtr,
 8 |     toBuilder,
 9 |   )
10 | where
11 | 
12 | import Data.Binary (Binary, get, put)
13 | import Data.ByteString.Builder qualified as B
14 | import Data.Utf8 qualified as Utf8
15 | import Data.Word (Word8)
16 | import Foreign.Ptr (Ptr)
17 | import Prelude hiding (Float)
18 | 
19 | -- FLOATS
20 | 
21 | type Float =
22 |   Utf8.Utf8 GREN_FLOAT
23 | 
24 | data GREN_FLOAT
25 | 
26 | -- HELPERS
27 | 
28 | fromPtr :: Ptr Word8 -> Ptr Word8 -> Float
29 | fromPtr =
30 |   Utf8.fromPtr
31 | 
32 | toBuilder :: Float -> B.Builder
33 | toBuilder =
34 |   Utf8.toBuilder
35 | 
36 | -- BINARY
37 | 
38 | instance Binary (Utf8.Utf8 GREN_FLOAT) where
39 |   get = Utf8.getUnder256
40 |   put = Utf8.putUnder256
41 | 


--------------------------------------------------------------------------------
/compiler/src/Gren/Int.hs:
--------------------------------------------------------------------------------
1 | module Gren.Int (IntFormat (..)) where
2 | 
3 | data IntFormat = DecimalInt | HexInt
4 |   deriving (Show)
5 | 


--------------------------------------------------------------------------------
/compiler/src/Gren/Magnitude.hs:
--------------------------------------------------------------------------------
 1 | module Gren.Magnitude
 2 |   ( Magnitude (..),
 3 |     toChars,
 4 |   )
 5 | where
 6 | 
 7 | -- MAGNITUDE
 8 | 
 9 | data Magnitude
10 |   = PATCH
11 |   | MINOR
12 |   | MAJOR
13 |   deriving (Eq, Ord)
14 | 
15 | toChars :: Magnitude -> String
16 | toChars magnitude =
17 |   case magnitude of
18 |     PATCH -> "PATCH"
19 |     MINOR -> "MINOR"
20 |     MAJOR -> "MAJOR"
21 | 


--------------------------------------------------------------------------------
/compiler/src/Gren/ModuleName.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE BangPatterns #-}
  2 | {-# LANGUAGE OverloadedStrings #-}
  3 | {-# LANGUAGE UnboxedTuples #-}
  4 | 
  5 | module Gren.ModuleName
  6 |   ( Raw,
  7 |     toChars,
  8 |     toFilePath,
  9 |     toHyphenPath,
 10 |     --
 11 |     encode,
 12 |     decoder,
 13 |     parser,
 14 |     --
 15 |     Canonical (..),
 16 |     basics,
 17 |     char,
 18 |     string,
 19 |     maybe,
 20 |     result,
 21 |     array,
 22 |     dict,
 23 |     platform,
 24 |     cmd,
 25 |     sub,
 26 |     debug,
 27 |     virtualDom,
 28 |     jsonDecode,
 29 |     jsonEncode,
 30 |   )
 31 | where
 32 | 
 33 | import Control.Monad (liftM2)
 34 | import Data.Binary (Binary (..))
 35 | import Data.Name qualified as Name
 36 | import Data.Utf8 qualified as Utf8
 37 | import Data.Word (Word8)
 38 | import Foreign.Ptr (Ptr, minusPtr, plusPtr)
 39 | import Gren.Package qualified as Pkg
 40 | import Json.Decode qualified as D
 41 | import Json.Encode qualified as E
 42 | import Parse.Primitives (Col, Row)
 43 | import Parse.Primitives qualified as P
 44 | import Parse.Variable qualified as Var
 45 | import System.FilePath qualified as FP
 46 | import Prelude hiding (maybe)
 47 | 
 48 | -- RAW
 49 | 
 50 | type Raw = Name.Name
 51 | 
 52 | toChars :: Raw -> String
 53 | toChars =
 54 |   Name.toChars
 55 | 
 56 | toFilePath :: Raw -> FilePath
 57 | toFilePath name =
 58 |   map (\c -> if c == '.' then FP.pathSeparator else c) (Name.toChars name)
 59 | 
 60 | toHyphenPath :: Raw -> FilePath
 61 | toHyphenPath name =
 62 |   map (\c -> if c == '.' then '-' else c) (Name.toChars name)
 63 | 
 64 | -- JSON
 65 | 
 66 | encode :: Raw -> E.Value
 67 | encode =
 68 |   E.name
 69 | 
 70 | decoder :: D.Decoder (Row, Col) Raw
 71 | decoder =
 72 |   D.customString parser (,)
 73 | 
 74 | -- PARSER
 75 | 
 76 | parser :: P.Parser (Row, Col) Raw
 77 | parser =
 78 |   P.Parser $ \(P.State src pos end indent row col) cok _ cerr eerr ->
 79 |     let (# isGood, newPos, newCol #) = chompStart pos end col
 80 |      in if isGood && minusPtr newPos pos < 256
 81 |           then
 82 |             let !newState = P.State src newPos end indent row newCol
 83 |              in cok (Utf8.fromPtr pos newPos) newState
 84 |           else
 85 |             if col == newCol
 86 |               then eerr row newCol (,)
 87 |               else cerr row newCol (,)
 88 | 
 89 | chompStart :: Ptr Word8 -> Ptr Word8 -> Col -> (# Bool, Ptr Word8, Col #)
 90 | chompStart pos end col =
 91 |   let !width = Var.getUpperWidth pos end
 92 |    in if width == 0
 93 |         then (# False, pos, col #)
 94 |         else chompInner (plusPtr pos width) end (col + 1)
 95 | 
 96 | chompInner :: Ptr Word8 -> Ptr Word8 -> Col -> (# Bool, Ptr Word8, Col #)
 97 | chompInner pos end col =
 98 |   if pos >= end
 99 |     then (# True, pos, col #)
100 |     else
101 |       let !word = P.unsafeIndex pos
102 |           !width = Var.getInnerWidthHelp pos end word
103 |        in if width == 0
104 |             then
105 |               if word == 0x2E {-.-}
106 |                 then chompStart (plusPtr pos 1) end (col + 1)
107 |                 else (# True, pos, col #)
108 |             else chompInner (plusPtr pos width) end (col + 1)
109 | 
110 | -- CANONICAL
111 | 
112 | data Canonical = Canonical
113 |   { _package :: !Pkg.Name,
114 |     _module :: !Name.Name
115 |   }
116 |   deriving (Show)
117 | 
118 | -- INSTANCES
119 | 
120 | instance Eq Canonical where
121 |   (==) (Canonical pkg1 name1) (Canonical pkg2 name2) =
122 |     name1 == name2 && pkg1 == pkg2
123 | 
124 | instance Ord Canonical where
125 |   compare (Canonical pkg1 name1) (Canonical pkg2 name2) =
126 |     case compare name1 name2 of
127 |       LT -> LT
128 |       EQ -> compare pkg1 pkg2
129 |       GT -> GT
130 | 
131 | instance Binary Canonical where
132 |   put (Canonical a b) = put a >> put b
133 |   get = liftM2 Canonical get get
134 | 
135 | -- CORE
136 | 
137 | basics :: Canonical
138 | basics = Canonical Pkg.core Name.basics
139 | 
140 | char :: Canonical
141 | char = Canonical Pkg.core Name.char
142 | 
143 | string :: Canonical
144 | string = Canonical Pkg.core Name.string
145 | 
146 | maybe :: Canonical
147 | maybe = Canonical Pkg.core Name.maybe
148 | 
149 | result :: Canonical
150 | result = Canonical Pkg.core Name.result
151 | 
152 | array :: Canonical
153 | array = Canonical Pkg.core Name.array
154 | 
155 | dict :: Canonical
156 | dict = Canonical Pkg.core Name.dict
157 | 
158 | platform :: Canonical
159 | platform = Canonical Pkg.core Name.platform
160 | 
161 | cmd :: Canonical
162 | cmd = Canonical Pkg.core "Platform.Cmd"
163 | 
164 | sub :: Canonical
165 | sub = Canonical Pkg.core "Platform.Sub"
166 | 
167 | debug :: Canonical
168 | debug = Canonical Pkg.core Name.debug
169 | 
170 | -- HTML
171 | 
172 | virtualDom :: Canonical
173 | virtualDom = Canonical Pkg.browser Name.virtualDom
174 | 
175 | -- JSON
176 | 
177 | jsonDecode :: Canonical
178 | jsonDecode = Canonical Pkg.core "Json.Decode"
179 | 
180 | jsonEncode :: Canonical
181 | jsonEncode = Canonical Pkg.core "Json.Encode"
182 | 


--------------------------------------------------------------------------------
/compiler/src/Gren/String.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE BangPatterns #-}
  2 | {-# LANGUAGE EmptyDataDecls #-}
  3 | {-# LANGUAGE FlexibleInstances #-}
  4 | {-# OPTIONS_GHC -Wall -fno-warn-name-shadowing #-}
  5 | 
  6 | module Gren.String
  7 |   ( String,
  8 |     StringFormat (..),
  9 |     toChars,
 10 |     fromChars,
 11 |     toBuilder,
 12 |     Chunk (..),
 13 |     fromChunks,
 14 |   )
 15 | where
 16 | 
 17 | import Data.Binary (Binary, get, put)
 18 | import Data.Bits (shiftR, (.&.))
 19 | import Data.ByteString.Builder qualified as B
 20 | import Data.Utf8 (MBA, copyFromPtr, freeze, newByteArray, writeWord8)
 21 | import Data.Utf8 qualified as Utf8
 22 | import GHC.Exts (Ptr, RealWorld)
 23 | import GHC.IO (stToIO, unsafeDupablePerformIO)
 24 | import GHC.ST (ST)
 25 | import GHC.Word (Word8)
 26 | import Prelude hiding (String)
 27 | 
 28 | -- STRINGS
 29 | 
 30 | type String =
 31 |   Utf8.Utf8 GREN_STRING
 32 | 
 33 | data GREN_STRING
 34 | 
 35 | data StringFormat
 36 |   = SingleLineString
 37 |   | MultilineString
 38 |   deriving (Show)
 39 | 
 40 | -- HELPERS
 41 | 
 42 | toChars :: String -> [Char]
 43 | toChars =
 44 |   Utf8.toChars
 45 | 
 46 | fromChars :: [Char] -> String
 47 | fromChars =
 48 |   Utf8.fromChars
 49 | 
 50 | toBuilder :: String -> B.Builder
 51 | toBuilder =
 52 |   Utf8.toBuilder
 53 | 
 54 | -- FROM CHUNKS
 55 | 
 56 | data Chunk
 57 |   = Slice (Ptr Word8) Int
 58 |   | Escape Word8
 59 |   | CodePoint Int
 60 | 
 61 | fromChunks :: [Chunk] -> String
 62 | fromChunks chunks =
 63 |   unsafeDupablePerformIO
 64 |     ( stToIO
 65 |         ( do
 66 |             let !len = sum (map chunkToWidth chunks)
 67 |             mba <- newByteArray len
 68 |             writeChunks mba 0 chunks
 69 |             freeze mba
 70 |         )
 71 |     )
 72 | 
 73 | chunkToWidth :: Chunk -> Int
 74 | chunkToWidth chunk =
 75 |   case chunk of
 76 |     Slice _ len -> len
 77 |     Escape _ -> 2
 78 |     CodePoint c -> if c < 0xFFFF then 6 else 12
 79 | 
 80 | writeChunks :: MBA RealWorld -> Int -> [Chunk] -> ST RealWorld ()
 81 | writeChunks mba offset chunks =
 82 |   case chunks of
 83 |     [] ->
 84 |       return ()
 85 |     chunk : chunks ->
 86 |       case chunk of
 87 |         Slice ptr len ->
 88 |           do
 89 |             copyFromPtr ptr mba offset len
 90 |             let !newOffset = offset + len
 91 |             writeChunks mba newOffset chunks
 92 |         Escape word ->
 93 |           do
 94 |             writeWord8 mba offset 0x5C {- \ -}
 95 |             writeWord8 mba (offset + 1) word
 96 |             let !newOffset = offset + 2
 97 |             writeChunks mba newOffset chunks
 98 |         CodePoint code ->
 99 |           if code < 0xFFFF
100 |             then do
101 |               writeCode mba offset code
102 |               let !newOffset = offset + 6
103 |               writeChunks mba newOffset chunks
104 |             else do
105 |               let (hi, lo) = divMod (code - 0x10000) 0x400
106 |               writeCode mba (offset) (hi + 0xD800)
107 |               writeCode mba (offset + 6) (lo + 0xDC00)
108 |               let !newOffset = offset + 12
109 |               writeChunks mba newOffset chunks
110 | 
111 | writeCode :: MBA RealWorld -> Int -> Int -> ST RealWorld ()
112 | writeCode mba offset code =
113 |   do
114 |     writeWord8 mba offset 0x5C {- \ -}
115 |     writeWord8 mba (offset + 1) 0x75 {- u -}
116 |     writeHex mba (offset + 2) (shiftR code 12)
117 |     writeHex mba (offset + 3) (shiftR code 8)
118 |     writeHex mba (offset + 4) (shiftR code 4)
119 |     writeHex mba (offset + 5) code
120 | 
121 | writeHex :: MBA RealWorld -> Int -> Int -> ST RealWorld ()
122 | writeHex mba !offset !bits =
123 |   do
124 |     let !n = fromIntegral bits .&. 0x0F
125 |     writeWord8 mba offset (if n < 10 then 0x30 + n else 0x37 + n)
126 | 
127 | -- BINARY
128 | 
129 | instance Binary (Utf8.Utf8 GREN_STRING) where
130 |   get = Utf8.getVeryLong
131 |   put = Utf8.putVeryLong
132 | 


--------------------------------------------------------------------------------
/compiler/src/Gren/Version.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE BangPatterns #-}
  2 | {-# LANGUAGE UnboxedTuples #-}
  3 | {-# OPTIONS_GHC -Wall #-}
  4 | 
  5 | module Gren.Version
  6 |   ( Version (..),
  7 |     one,
  8 |     max,
  9 |     compiler,
 10 |     bumpPatch,
 11 |     bumpMinor,
 12 |     bumpMajor,
 13 |     toChars,
 14 |     --
 15 |     decoder,
 16 |     encode,
 17 |     --
 18 |     parser,
 19 |   )
 20 | where
 21 | 
 22 | import Control.Monad (liftM3)
 23 | import Data.Binary (Binary, get, getWord8, put, putWord8)
 24 | import Data.Version qualified as Version
 25 | import Data.Word (Word16, Word8)
 26 | import Foreign.Ptr (Ptr, minusPtr, plusPtr)
 27 | import Json.Decode qualified as D
 28 | import Json.Encode qualified as E
 29 | import Parse.Primitives (Col, Row)
 30 | import Parse.Primitives qualified as P
 31 | import Paths_gren qualified
 32 | import Prelude hiding (max)
 33 | 
 34 | -- VERSION
 35 | 
 36 | data Version = Version
 37 |   { _major :: {-# UNPACK #-} !Word16,
 38 |     _minor :: {-# UNPACK #-} !Word16,
 39 |     _patch :: {-# UNPACK #-} !Word16
 40 |   }
 41 |   deriving (Eq, Ord, Show)
 42 | 
 43 | one :: Version
 44 | one =
 45 |   Version 1 0 0
 46 | 
 47 | max :: Version
 48 | max =
 49 |   Version maxBound 0 0
 50 | 
 51 | compiler :: Version
 52 | compiler =
 53 |   case map fromIntegral (Version.versionBranch Paths_gren.version) of
 54 |     major : minor : patch : _ ->
 55 |       Version major minor patch
 56 |     [major, minor] ->
 57 |       Version major minor 0
 58 |     [major] ->
 59 |       Version major 0 0
 60 |     [] ->
 61 |       error "could not detect version of the compiler you are using"
 62 | 
 63 | -- BUMP
 64 | 
 65 | bumpPatch :: Version -> Version
 66 | bumpPatch (Version major minor patch) =
 67 |   Version major minor (patch + 1)
 68 | 
 69 | bumpMinor :: Version -> Version
 70 | bumpMinor (Version major minor _patch) =
 71 |   Version major (minor + 1) 0
 72 | 
 73 | bumpMajor :: Version -> Version
 74 | bumpMajor (Version major _minor _patch) =
 75 |   Version (major + 1) 0 0
 76 | 
 77 | -- TO CHARS
 78 | 
 79 | toChars :: Version -> [Char]
 80 | toChars (Version major minor patch) =
 81 |   show major ++ '.' : show minor ++ '.' : show patch
 82 | 
 83 | -- JSON
 84 | 
 85 | decoder :: D.Decoder (Row, Col) Version
 86 | decoder =
 87 |   D.customString parser (,)
 88 | 
 89 | encode :: Version -> E.Value
 90 | encode version =
 91 |   E.chars (toChars version)
 92 | 
 93 | -- BINARY
 94 | 
 95 | instance Binary Version where
 96 |   get =
 97 |     do
 98 |       word <- getWord8
 99 |       if word == 255
100 |         then liftM3 Version get get get
101 |         else do
102 |           minor <- getWord8
103 |           patch <- getWord8
104 |           return (Version (fromIntegral word) (fromIntegral minor) (fromIntegral patch))
105 | 
106 |   put (Version major minor patch) =
107 |     if major < 255 && minor < 256 && patch < 256
108 |       then do
109 |         putWord8 (fromIntegral major)
110 |         putWord8 (fromIntegral minor)
111 |         putWord8 (fromIntegral patch)
112 |       else do
113 |         putWord8 255
114 |         put major
115 |         put minor
116 |         put patch
117 | 
118 | -- PARSER
119 | 
120 | parser :: P.Parser (Row, Col) Version
121 | parser =
122 |   do
123 |     major <- numberParser
124 |     P.word1 0x2E {-.-} (,)
125 |     minor <- numberParser
126 |     P.word1 0x2E {-.-} (,)
127 |     patch <- numberParser
128 |     return (Version major minor patch)
129 | 
130 | numberParser :: P.Parser (Row, Col) Word16
131 | numberParser =
132 |   P.Parser $ \(P.State src pos end indent row col) cok _ _ eerr ->
133 |     if pos >= end
134 |       then eerr row col (,)
135 |       else
136 |         let !word = P.unsafeIndex pos
137 |          in if word == 0x30 {-0-}
138 |               then
139 |                 let !newState = P.State src (plusPtr pos 1) end indent row (col + 1)
140 |                  in cok 0 newState
141 |               else
142 |                 if isDigit word
143 |                   then
144 |                     let (# total, newPos #) = chompWord16 (plusPtr pos 1) end (fromIntegral (word - 0x30))
145 |                         !newState = P.State src newPos end indent row (col + fromIntegral (minusPtr newPos pos))
146 |                      in cok total newState
147 |                   else eerr row col (,)
148 | 
149 | chompWord16 :: Ptr Word8 -> Ptr Word8 -> Word16 -> (# Word16, Ptr Word8 #)
150 | chompWord16 pos end total =
151 |   if pos >= end
152 |     then (# total, pos #)
153 |     else
154 |       let !word = P.unsafeIndex pos
155 |        in if isDigit word
156 |             then chompWord16 (plusPtr pos 1) end (10 * total + fromIntegral (word - 0x30))
157 |             else (# total, pos #)
158 | 
159 | isDigit :: Word8 -> Bool
160 | isDigit word =
161 |   0x30 {-0-} <= word && word <= 0x39 {-9-}
162 | 


--------------------------------------------------------------------------------
/compiler/src/Json/String.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE BangPatterns #-}
  2 | {-# LANGUAGE EmptyDataDecls #-}
  3 | {-# OPTIONS_GHC -Wall -fno-warn-name-shadowing #-}
  4 | 
  5 | module Json.String
  6 |   ( String,
  7 |     isEmpty,
  8 |     --
  9 |     fromPtr,
 10 |     fromName,
 11 |     fromChars,
 12 |     fromSnippet,
 13 |     fromComment,
 14 |     --
 15 |     toChars,
 16 |     toBuilder,
 17 |   )
 18 | where
 19 | 
 20 | import Data.ByteString.Builder qualified as B
 21 | import Data.Coerce qualified as Coerce
 22 | import Data.Name qualified as Name
 23 | import Data.Utf8 (MBA, copyFromPtr, freeze, newByteArray, writeWord8)
 24 | import Data.Utf8 qualified as Utf8
 25 | import Data.Word (Word8)
 26 | import Foreign.ForeignPtr (withForeignPtr)
 27 | import Foreign.Ptr (Ptr, minusPtr, plusPtr)
 28 | import GHC.Exts (RealWorld)
 29 | import GHC.IO (stToIO, unsafeDupablePerformIO, unsafePerformIO)
 30 | import GHC.ST (ST)
 31 | import Parse.Primitives qualified as P
 32 | import Prelude hiding (String)
 33 | 
 34 | -- JSON STRINGS
 35 | 
 36 | -- INVARIANT: any Json.String is appropriately escaped already
 37 | -- PERF: is this the right representation for Json.String? Maybe ByteString instead?
 38 | --
 39 | type String =
 40 |   Utf8.Utf8 JSON_STRING
 41 | 
 42 | data JSON_STRING
 43 | 
 44 | isEmpty :: String -> Bool
 45 | isEmpty =
 46 |   Utf8.isEmpty
 47 | 
 48 | -- FROM
 49 | 
 50 | fromPtr :: Ptr Word8 -> Ptr Word8 -> String
 51 | fromPtr =
 52 |   Utf8.fromPtr
 53 | 
 54 | fromChars :: [Char] -> String
 55 | fromChars =
 56 |   Utf8.fromChars
 57 | 
 58 | fromSnippet :: P.Snippet -> String
 59 | fromSnippet =
 60 |   Utf8.fromSnippet
 61 | 
 62 | fromName :: Name.Name -> String
 63 | fromName =
 64 |   Coerce.coerce
 65 | 
 66 | -- TO
 67 | 
 68 | toChars :: String -> [Char]
 69 | toChars =
 70 |   Utf8.toChars
 71 | 
 72 | toBuilder :: String -> B.Builder
 73 | toBuilder =
 74 |   Utf8.toBuilder
 75 | 
 76 | -- FROM COMMENT
 77 | 
 78 | fromComment :: P.Snippet -> String
 79 | fromComment (P.Snippet fptr off len _ _) =
 80 |   unsafePerformIO $
 81 |     withForeignPtr fptr $ \ptr ->
 82 |       let !pos = plusPtr ptr off
 83 |           !end = plusPtr pos len
 84 |           !str = fromChunks (chompChunks pos end pos [])
 85 |        in return str
 86 | 
 87 | chompChunks :: Ptr Word8 -> Ptr Word8 -> Ptr Word8 -> [Chunk] -> [Chunk]
 88 | chompChunks pos end start revChunks =
 89 |   if pos >= end
 90 |     then reverse (addSlice start end revChunks)
 91 |     else
 92 |       let !word = P.unsafeIndex pos
 93 |        in case word of
 94 |             0x0A {-\n-} -> chompEscape 0x6E {-n-} pos end start revChunks
 95 |             0x22 {-"-} -> chompEscape 0x22 {-"-} pos end start revChunks
 96 |             0x5C {-\-} -> chompEscape 0x5C {-\-} pos end start revChunks
 97 |             0x0D {-\r-} ->
 98 |               let !newPos = plusPtr pos 1
 99 |                in chompChunks newPos end newPos (addSlice start pos revChunks)
100 |             _ ->
101 |               let !width = P.getCharWidth word
102 |                   !newPos = plusPtr pos width
103 |                in chompChunks newPos end start revChunks
104 | 
105 | chompEscape :: Word8 -> Ptr Word8 -> Ptr Word8 -> Ptr Word8 -> [Chunk] -> [Chunk]
106 | chompEscape escape pos end start revChunks =
107 |   let !pos1 = plusPtr pos 1
108 |    in chompChunks pos1 end pos1 (Escape escape : addSlice start pos revChunks)
109 | 
110 | addSlice :: Ptr Word8 -> Ptr Word8 -> [Chunk] -> [Chunk]
111 | addSlice start end revChunks =
112 |   if start == end
113 |     then revChunks
114 |     else Slice start (minusPtr end start) : revChunks
115 | 
116 | -- FROM CHUNKS
117 | 
118 | data Chunk
119 |   = Slice (Ptr Word8) Int
120 |   | Escape Word8
121 | 
122 | fromChunks :: [Chunk] -> String
123 | fromChunks chunks =
124 |   unsafeDupablePerformIO
125 |     ( stToIO
126 |         ( do
127 |             let !len = sum (map chunkToWidth chunks)
128 |             mba <- newByteArray len
129 |             writeChunks mba 0 chunks
130 |             freeze mba
131 |         )
132 |     )
133 | 
134 | chunkToWidth :: Chunk -> Int
135 | chunkToWidth chunk =
136 |   case chunk of
137 |     Slice _ len -> len
138 |     Escape _ -> 2
139 | 
140 | writeChunks :: MBA RealWorld -> Int -> [Chunk] -> ST RealWorld ()
141 | writeChunks mba offset chunks =
142 |   case chunks of
143 |     [] ->
144 |       return ()
145 |     chunk : chunks ->
146 |       case chunk of
147 |         Slice ptr len ->
148 |           do
149 |             copyFromPtr ptr mba offset len
150 |             let !newOffset = offset + len
151 |             writeChunks mba newOffset chunks
152 |         Escape word ->
153 |           do
154 |             writeWord8 mba offset 0x5C {- \ -}
155 |             writeWord8 mba (offset + 1) word
156 |             let !newOffset = offset + 2
157 |             writeChunks mba newOffset chunks
158 | 


--------------------------------------------------------------------------------
/compiler/src/Nitpick/Debug.hs:
--------------------------------------------------------------------------------
 1 | module Nitpick.Debug
 2 |   ( hasDebugUses,
 3 |   )
 4 | where
 5 | 
 6 | import AST.Optimized qualified as Opt
 7 | import Data.Map.Utils qualified as Map
 8 | import Data.Maybe qualified as Maybe
 9 | 
10 | -- HAS DEBUG USES
11 | 
12 | hasDebugUses :: Opt.LocalGraph -> Bool
13 | hasDebugUses (Opt.LocalGraph _ graph _) =
14 |   Map.any nodeHasDebug graph
15 | 
16 | nodeHasDebug :: Opt.Node -> Bool
17 | nodeHasDebug node =
18 |   case node of
19 |     Opt.Define _ expr _ -> hasDebug expr
20 |     Opt.DefineTailFunc _ _ expr _ -> hasDebug expr
21 |     Opt.Ctor _ _ -> False
22 |     Opt.Enum _ -> False
23 |     Opt.Box -> False
24 |     Opt.Link _ -> False
25 |     Opt.Cycle _ vs fs _ -> any (hasDebug . snd) vs || any defHasDebug fs
26 |     Opt.Manager _ -> False
27 |     Opt.Kernel _ _ -> False
28 |     Opt.PortIncoming expr _ -> hasDebug expr
29 |     Opt.PortOutgoing expr _ -> hasDebug expr
30 |     Opt.PortTask maybeExpr expr _ -> hasDebug expr || Maybe.maybe False hasDebug maybeExpr
31 | 
32 | hasDebug :: Opt.Expr -> Bool
33 | hasDebug expression =
34 |   case expression of
35 |     Opt.Bool _ _ -> False
36 |     Opt.Chr _ _ -> False
37 |     Opt.Str _ _ -> False
38 |     Opt.Int _ _ -> False
39 |     Opt.Float _ _ -> False
40 |     Opt.VarLocal _ _ -> False
41 |     Opt.VarGlobal _ _ -> False
42 |     Opt.VarEnum _ _ _ -> False
43 |     Opt.VarBox _ _ -> False
44 |     Opt.VarCycle _ _ _ -> False
45 |     Opt.VarDebug _ _ _ _ -> True
46 |     Opt.VarKernel _ _ _ -> False
47 |     Opt.Array _ exprs -> any hasDebug exprs
48 |     Opt.Function _ _ expr -> hasDebug expr
49 |     Opt.Call _ e es -> hasDebug e || any hasDebug es
50 |     Opt.TailCall _ args -> any (hasDebug . snd) args
51 |     Opt.If conds finally -> any (\(c, e) -> hasDebug c || hasDebug e) conds || hasDebug finally
52 |     Opt.Let def body -> defHasDebug def || hasDebug body
53 |     Opt.Destruct _ expr -> hasDebug expr
54 |     Opt.Case _ _ d jumps -> deciderHasDebug d || any (hasDebug . snd) jumps
55 |     Opt.Accessor _ _ -> False
56 |     Opt.Access r _ _ -> hasDebug r
57 |     Opt.Update _ r fs -> hasDebug r || any hasDebug fs
58 |     Opt.Record _ fs -> any hasDebug fs
59 | 
60 | defHasDebug :: Opt.Def -> Bool
61 | defHasDebug def =
62 |   case def of
63 |     Opt.Def _ _ expr -> hasDebug expr
64 |     Opt.TailDef _ _ _ expr -> hasDebug expr
65 | 
66 | deciderHasDebug :: Opt.Decider Opt.Choice -> Bool
67 | deciderHasDebug decider =
68 |   case decider of
69 |     Opt.Leaf (Opt.Inline expr) -> hasDebug expr
70 |     Opt.Leaf (Opt.Jump _) -> False
71 |     Opt.Chain _ success failure -> deciderHasDebug success || deciderHasDebug failure
72 |     Opt.FanOut _ tests fallback -> any (deciderHasDebug . snd) tests || deciderHasDebug fallback
73 | 


--------------------------------------------------------------------------------
/compiler/src/Optimize/Case.hs:
--------------------------------------------------------------------------------
  1 | {-# OPTIONS_GHC -Wall #-}
  2 | 
  3 | module Optimize.Case
  4 |   ( optimize,
  5 |   )
  6 | where
  7 | 
  8 | import AST.Canonical qualified as Can
  9 | import AST.Optimized qualified as Opt
 10 | import Control.Arrow (second)
 11 | import Data.Map ((!))
 12 | import Data.Map qualified as Map
 13 | import Data.Maybe qualified as Maybe
 14 | import Data.Name qualified as Name
 15 | import Optimize.DecisionTree qualified as DT
 16 | 
 17 | -- OPTIMIZE A CASE EXPRESSION
 18 | 
 19 | optimize :: Name.Name -> Name.Name -> [(Can.Pattern, Opt.Expr)] -> Opt.Expr
 20 | optimize temp root optBranches =
 21 |   let (patterns, indexedBranches) =
 22 |         unzip (zipWith indexify [0 ..] optBranches)
 23 | 
 24 |       decider = treeToDecider (DT.compile patterns)
 25 |       targetCounts = countTargets decider
 26 | 
 27 |       (choices, maybeJumps) =
 28 |         unzip (map (createChoices targetCounts) indexedBranches)
 29 |    in Opt.Case
 30 |         temp
 31 |         root
 32 |         (insertChoices (Map.fromList choices) decider)
 33 |         (Maybe.catMaybes maybeJumps)
 34 | 
 35 | indexify :: Int -> (a, b) -> ((a, Int), (Int, b))
 36 | indexify index (pattern, branch) =
 37 |   ( (pattern, index),
 38 |     (index, branch)
 39 |   )
 40 | 
 41 | -- TREE TO DECIDER
 42 | --
 43 | -- Decision trees may have some redundancies, so we convert them to a Decider
 44 | -- which has special constructs to avoid code duplication when possible.
 45 | 
 46 | treeToDecider :: DT.DecisionTree -> Opt.Decider Int
 47 | treeToDecider tree =
 48 |   case tree of
 49 |     DT.Match target ->
 50 |       Opt.Leaf target
 51 |     -- zero options
 52 |     DT.Decision _ [] Nothing ->
 53 |       error "compiler bug, somehow created an empty decision tree"
 54 |     -- one option
 55 |     DT.Decision _ [(_, subTree)] Nothing ->
 56 |       treeToDecider subTree
 57 |     DT.Decision _ [] (Just subTree) ->
 58 |       treeToDecider subTree
 59 |     -- two options
 60 |     DT.Decision path [(test, successTree)] (Just failureTree) ->
 61 |       toChain path test successTree failureTree
 62 |     DT.Decision path [(test, successTree), (_, failureTree)] Nothing ->
 63 |       toChain path test successTree failureTree
 64 |     -- many options
 65 |     DT.Decision path edges Nothing ->
 66 |       let (necessaryTests, fallback) =
 67 |             (init edges, snd (last edges))
 68 |        in Opt.FanOut
 69 |             path
 70 |             (map (second treeToDecider) necessaryTests)
 71 |             (treeToDecider fallback)
 72 |     DT.Decision path edges (Just fallback) ->
 73 |       Opt.FanOut path (map (second treeToDecider) edges) (treeToDecider fallback)
 74 | 
 75 | toChain :: DT.Path -> DT.Test -> DT.DecisionTree -> DT.DecisionTree -> Opt.Decider Int
 76 | toChain path test successTree failureTree =
 77 |   let failure =
 78 |         treeToDecider failureTree
 79 |    in case treeToDecider successTree of
 80 |         Opt.Chain testChain success subFailure
 81 |           | failure == subFailure ->
 82 |               Opt.Chain ((path, test) : testChain) success failure
 83 |         success ->
 84 |           Opt.Chain [(path, test)] success failure
 85 | 
 86 | -- INSERT CHOICES
 87 | --
 88 | -- If a target appears exactly once in a Decider, the corresponding expression
 89 | -- can be inlined. Whether things are inlined or jumps is called a "choice".
 90 | 
 91 | countTargets :: Opt.Decider Int -> Map.Map Int Int
 92 | countTargets decisionTree =
 93 |   case decisionTree of
 94 |     Opt.Leaf target ->
 95 |       Map.singleton target 1
 96 |     Opt.Chain _ success failure ->
 97 |       Map.unionWith (+) (countTargets success) (countTargets failure)
 98 |     Opt.FanOut _ tests fallback ->
 99 |       Map.unionsWith (+) (map countTargets (fallback : map snd tests))
100 | 
101 | createChoices ::
102 |   Map.Map Int Int ->
103 |   (Int, Opt.Expr) ->
104 |   ((Int, Opt.Choice), Maybe (Int, Opt.Expr))
105 | createChoices targetCounts (target, branch) =
106 |   if targetCounts ! target == 1
107 |     then
108 |       ( (target, Opt.Inline branch),
109 |         Nothing
110 |       )
111 |     else
112 |       ( (target, Opt.Jump target),
113 |         Just (target, branch)
114 |       )
115 | 
116 | insertChoices ::
117 |   Map.Map Int Opt.Choice ->
118 |   Opt.Decider Int ->
119 |   Opt.Decider Opt.Choice
120 | insertChoices choiceDict decider =
121 |   let go =
122 |         insertChoices choiceDict
123 |    in case decider of
124 |         Opt.Leaf target ->
125 |           Opt.Leaf (choiceDict ! target)
126 |         Opt.Chain testChain success failure ->
127 |           Opt.Chain testChain (go success) (go failure)
128 |         Opt.FanOut path tests fallback ->
129 |           Opt.FanOut path (map (second go) tests) (go fallback)
130 | 


--------------------------------------------------------------------------------
/compiler/src/Optimize/Names.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE OverloadedStrings #-}
  2 | {-# LANGUAGE Rank2Types #-}
  3 | {-# OPTIONS_GHC -Wall #-}
  4 | 
  5 | module Optimize.Names
  6 |   ( Tracker,
  7 |     run,
  8 |     generate,
  9 |     registerKernel,
 10 |     registerGlobal,
 11 |     registerDebug,
 12 |     registerCtor,
 13 |     registerField,
 14 |     registerFieldDict,
 15 |     registerFieldList,
 16 |   )
 17 | where
 18 | 
 19 | import AST.Canonical qualified as Can
 20 | import AST.Optimized qualified as Opt
 21 | import Data.Index qualified as Index
 22 | import Data.Map qualified as Map
 23 | import Data.Name qualified as Name
 24 | import Data.Set qualified as Set
 25 | import Gren.ModuleName qualified as ModuleName
 26 | import Reporting.Annotation qualified as A
 27 | 
 28 | -- GENERATOR
 29 | 
 30 | newtype Tracker a
 31 |   = Tracker
 32 |       ( forall r.
 33 |         Int ->
 34 |         Set.Set Opt.Global ->
 35 |         Map.Map Name.Name Int ->
 36 |         (Int -> Set.Set Opt.Global -> Map.Map Name.Name Int -> a -> r) ->
 37 |         r
 38 |       )
 39 | 
 40 | run :: Tracker a -> (Set.Set Opt.Global, Map.Map Name.Name Int, a)
 41 | run (Tracker k) =
 42 |   k
 43 |     0
 44 |     Set.empty
 45 |     Map.empty
 46 |     (\_uid deps fields value -> (deps, fields, value))
 47 | 
 48 | generate :: Tracker Name.Name
 49 | generate =
 50 |   Tracker $ \uid deps fields ok ->
 51 |     ok (uid + 1) deps fields (Name.fromVarIndex uid)
 52 | 
 53 | registerKernel :: Name.Name -> a -> Tracker a
 54 | registerKernel home value =
 55 |   Tracker $ \uid deps fields ok ->
 56 |     ok uid (Set.insert (Opt.toKernelGlobal home) deps) fields value
 57 | 
 58 | registerGlobal :: A.Region -> ModuleName.Canonical -> Name.Name -> Tracker Opt.Expr
 59 | registerGlobal region home name =
 60 |   Tracker $ \uid deps fields ok ->
 61 |     let global = Opt.Global home name
 62 |      in ok uid (Set.insert global deps) fields (Opt.VarGlobal region global)
 63 | 
 64 | registerDebug :: Name.Name -> ModuleName.Canonical -> A.Region -> Tracker Opt.Expr
 65 | registerDebug name home region =
 66 |   Tracker $ \uid deps fields ok ->
 67 |     let global = Opt.Global ModuleName.debug name
 68 |      in ok uid (Set.insert global deps) fields (Opt.VarDebug region name home Nothing)
 69 | 
 70 | registerCtor :: A.Region -> ModuleName.Canonical -> A.Located Name.Name -> Index.ZeroBased -> Can.CtorOpts -> Tracker Opt.Expr
 71 | registerCtor region home (A.At _ name) index opts =
 72 |   Tracker $ \uid deps fields ok ->
 73 |     let global = Opt.Global home name
 74 |         newDeps = Set.insert global deps
 75 |      in case opts of
 76 |           Can.Normal ->
 77 |             ok uid newDeps fields (Opt.VarGlobal region global)
 78 |           Can.Enum ->
 79 |             ok uid newDeps fields $
 80 |               case name of
 81 |                 "True" | home == ModuleName.basics -> Opt.Bool region True
 82 |                 "False" | home == ModuleName.basics -> Opt.Bool region False
 83 |                 _ -> Opt.VarEnum region global index
 84 |           Can.Unbox ->
 85 |             ok uid (Set.insert identity newDeps) fields (Opt.VarBox region global)
 86 | 
 87 | identity :: Opt.Global
 88 | identity =
 89 |   Opt.Global ModuleName.basics Name.identity
 90 | 
 91 | registerField :: Name.Name -> a -> Tracker a
 92 | registerField name value =
 93 |   Tracker $ \uid d fields ok ->
 94 |     ok uid d (Map.insertWith (+) name 1 fields) value
 95 | 
 96 | registerFieldDict :: Map.Map Name.Name v -> a -> Tracker a
 97 | registerFieldDict newFields value =
 98 |   Tracker $ \uid d fields ok ->
 99 |     ok uid d (Map.unionWith (+) fields (Map.map toOne newFields)) value
100 | 
101 | toOne :: a -> Int
102 | toOne _ = 1
103 | 
104 | registerFieldList :: [Name.Name] -> a -> Tracker a
105 | registerFieldList names value =
106 |   Tracker $ \uid deps fields ok ->
107 |     ok uid deps (foldr addOne fields names) value
108 | 
109 | addOne :: Name.Name -> Map.Map Name.Name Int -> Map.Map Name.Name Int
110 | addOne name fields =
111 |   Map.insertWith (+) name 1 fields
112 | 
113 | -- INSTANCES
114 | 
115 | instance Functor Tracker where
116 |   fmap func (Tracker kv) =
117 |     Tracker $ \n d f ok ->
118 |       let ok1 n1 d1 f1 value =
119 |             ok n1 d1 f1 (func value)
120 |        in kv n d f ok1
121 | 
122 | instance Applicative Tracker where
123 |   pure value =
124 |     Tracker $ \n d f ok -> ok n d f value
125 | 
126 |   (<*>) (Tracker kf) (Tracker kv) =
127 |     Tracker $ \n d f ok ->
128 |       let ok1 n1 d1 f1 func =
129 |             let ok2 n2 d2 f2 value =
130 |                   ok n2 d2 f2 (func value)
131 |              in kv n1 d1 f1 ok2
132 |        in kf n d f ok1
133 | 
134 | instance Monad Tracker where
135 |   return = pure
136 | 
137 |   (>>=) (Tracker k) callback =
138 |     Tracker $ \n d f ok ->
139 |       let ok1 n1 d1 f1 a =
140 |             case callback a of
141 |               Tracker kb -> kb n1 d1 f1 ok
142 |        in k n d f ok1
143 | 


--------------------------------------------------------------------------------
/compiler/src/Parse/Symbol.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE BangPatterns #-}
 2 | {-# LANGUAGE OverloadedStrings #-}
 3 | {-# OPTIONS_GHC -Wall #-}
 4 | 
 5 | module Parse.Symbol
 6 |   ( operator,
 7 |     BadOperator (..),
 8 |     binopCharSet,
 9 |   )
10 | where
11 | 
12 | import Data.Char qualified as Char
13 | import Data.IntSet qualified as IntSet
14 | import Data.Name qualified as Name
15 | import Data.Vector qualified as Vector
16 | import Foreign.Ptr (Ptr, minusPtr, plusPtr)
17 | import GHC.Word (Word8)
18 | import Parse.Primitives (Col, Parser, Row)
19 | import Parse.Primitives qualified as P
20 | 
21 | -- OPERATOR
22 | 
23 | data BadOperator
24 |   = BadDot
25 |   | BadPipe
26 |   | BadArrow
27 |   | BadEquals
28 |   | BadHasType
29 |   deriving (Show)
30 | 
31 | operator :: (Row -> Col -> x) -> (BadOperator -> Row -> Col -> x) -> Parser x Name.Name
32 | operator toExpectation toError =
33 |   P.Parser $ \(P.State src pos end indent row col) cok _ cerr eerr ->
34 |     let !newPos = chompOps pos end
35 |      in if pos == newPos
36 |           then eerr row col toExpectation
37 |           else case Name.fromPtr pos newPos of
38 |             "." -> eerr row col (toError BadDot)
39 |             "|" -> cerr row col (toError BadPipe)
40 |             "->" -> cerr row col (toError BadArrow)
41 |             "=" -> cerr row col (toError BadEquals)
42 |             ":" -> cerr row col (toError BadHasType)
43 |             op ->
44 |               let !newCol = col + fromIntegral (minusPtr newPos pos)
45 |                   !newState = P.State src newPos end indent row newCol
46 |                in cok op newState
47 | 
48 | chompOps :: Ptr Word8 -> Ptr Word8 -> Ptr Word8
49 | chompOps pos end =
50 |   if pos < end && isBinopCharHelp (P.unsafeIndex pos)
51 |     then chompOps (plusPtr pos 1) end
52 |     else pos
53 | 
54 | isBinopCharHelp :: Word8 -> Bool
55 | isBinopCharHelp word =
56 |   word < 128 && Vector.unsafeIndex binopCharVector (fromIntegral word)
57 | 
58 | binopCharVector :: Vector.Vector Bool
59 | binopCharVector =
60 |   Vector.generate 128 (\i -> IntSet.member i binopCharSet)
61 | 
62 | binopCharSet :: IntSet.IntSet
63 | binopCharSet =
64 |   IntSet.fromList (map Char.ord "+-/*=.<>:&|^?%!")
65 | 


--------------------------------------------------------------------------------
/compiler/src/Reporting/Annotation.hs:
--------------------------------------------------------------------------------
  1 | {-# OPTIONS_GHC -Wall #-}
  2 | 
  3 | module Reporting.Annotation
  4 |   ( Located (..),
  5 |     Position (..),
  6 |     Region (..),
  7 |     traverse,
  8 |     toValue,
  9 |     merge,
 10 |     at,
 11 |     isIndentedMoreThan,
 12 |     toRegion,
 13 |     mergeRegions,
 14 |     zero,
 15 |     zeroPosition,
 16 |     one,
 17 |   )
 18 | where
 19 | 
 20 | import Control.Monad (liftM2)
 21 | import Data.Binary (Binary, get, put)
 22 | import Data.Word (Word16)
 23 | import Prelude hiding (traverse)
 24 | 
 25 | -- LOCATED
 26 | 
 27 | data Located a
 28 |   = At Region a -- PERF see if unpacking region is helpful
 29 |   deriving (Show)
 30 | 
 31 | instance Functor Located where
 32 |   fmap f (At region a) =
 33 |     At region (f a)
 34 | 
 35 | traverse :: (Functor f) => (a -> f b) -> Located a -> f (Located b)
 36 | traverse func (At region value) =
 37 |   At region <$> func value
 38 | 
 39 | toValue :: Located a -> a
 40 | toValue (At _ value) =
 41 |   value
 42 | 
 43 | merge :: Located a -> Located b -> value -> Located value
 44 | merge (At r1 _) (At r2 _) value =
 45 |   At (mergeRegions r1 r2) value
 46 | 
 47 | isIndentedMoreThan :: Word16 -> Located a -> Bool
 48 | isIndentedMoreThan indent (At (Region (Position _ col) _) _) =
 49 |   col > indent
 50 | 
 51 | -- POSITION
 52 | 
 53 | data Position
 54 |   = Position
 55 |       {-# UNPACK #-} !Word16
 56 |       {-# UNPACK #-} !Word16
 57 |   deriving (Eq, Show)
 58 | 
 59 | at :: Position -> Position -> a -> Located a
 60 | at start end a =
 61 |   At (Region start end) a
 62 | 
 63 | -- REGION
 64 | 
 65 | data Region = Region Position Position
 66 |   deriving (Eq, Show)
 67 | 
 68 | toRegion :: Located a -> Region
 69 | toRegion (At region _) =
 70 |   region
 71 | 
 72 | mergeRegions :: Region -> Region -> Region
 73 | mergeRegions (Region start _) (Region _ end) =
 74 |   Region start end
 75 | 
 76 | zero :: Region
 77 | zero =
 78 |   Region zeroPosition zeroPosition
 79 | 
 80 | zeroPosition :: Position
 81 | zeroPosition = Position 0 0
 82 | 
 83 | one :: Region
 84 | one =
 85 |   Region (Position 1 1) (Position 1 1)
 86 | 
 87 | instance Binary Region where
 88 |   put (Region a b) = put a >> put b
 89 |   get = liftM2 Region get get
 90 | 
 91 | instance Binary Position where
 92 |   put (Position a b) = put a >> put b
 93 |   get = liftM2 Position get get
 94 | 
 95 | instance (Ord a) => Ord (Located a) where
 96 |   compare (At _ lhs) (At _ rhs) = compare lhs rhs
 97 | 
 98 | instance (Eq a) => Eq (Located a) where
 99 |   (==) (At _ lhs) (At _ rhs) = lhs == rhs
100 | 
101 | instance (Binary a) => Binary (Located a) where
102 |   put (At a b) = put a >> put b
103 |   get = liftM2 At get get
104 | 


--------------------------------------------------------------------------------
/compiler/src/Reporting/Error/Main.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE OverloadedStrings #-}
  2 | {-# OPTIONS_GHC -Wall #-}
  3 | 
  4 | module Reporting.Error.Main
  5 |   ( Error (..),
  6 |     toReport,
  7 |   )
  8 | where
  9 | 
 10 | import AST.Canonical qualified as Can
 11 | import Data.List qualified as List
 12 | import Data.Name qualified as Name
 13 | import Reporting.Annotation qualified as A
 14 | import Reporting.Doc qualified as D
 15 | import Reporting.Error.Canonicalize qualified as E
 16 | import Reporting.Render.Code qualified as Code
 17 | import Reporting.Render.Type qualified as RT
 18 | import Reporting.Render.Type.Localizer qualified as L
 19 | import Reporting.Report qualified as Report
 20 | 
 21 | -- ERROR
 22 | 
 23 | data Error
 24 |   = BadType A.Region Can.Type [String]
 25 |   | BadCycle A.Region Name.Name [Name.Name]
 26 |   | BadFlags A.Region Can.Type E.InvalidPayload
 27 | 
 28 | -- TO REPORT
 29 | 
 30 | toReport :: L.Localizer -> Code.Source -> Error -> Report.Report
 31 | toReport localizer source err =
 32 |   case err of
 33 |     BadType region tipe allowed ->
 34 |       Report.Report "BAD MAIN TYPE" region [] $
 35 |         Code.toSnippet
 36 |           source
 37 |           region
 38 |           Nothing
 39 |           ( "I cannot handle this type of `main` value:",
 40 |             D.stack
 41 |               [ "The type of `main` value I am seeing is:",
 42 |                 D.indent 4 $ D.dullyellow $ RT.canToDoc localizer RT.None tipe,
 43 |                 D.reflow $ "But I only know how to handle these types: " ++ List.intercalate ", " allowed
 44 |               ]
 45 |           )
 46 |     BadCycle region name names ->
 47 |       Report.Report "BAD MAIN" region [] $
 48 |         Code.toSnippet
 49 |           source
 50 |           region
 51 |           Nothing
 52 |           ( "A `main` definition cannot be defined in terms of itself.",
 53 |             D.stack
 54 |               [ D.reflow $
 55 |                   "It should be a boring value with no recursion. But\
 56 |                   \ instead it is involved in this cycle of definitions:",
 57 |                 D.cycle 4 name names
 58 |               ]
 59 |           )
 60 |     BadFlags region _badType invalidPayload ->
 61 |       let formatDetails (aBadKindOfThing, butThatIsNoGood) =
 62 |             Report.Report "BAD FLAGS" region [] $
 63 |               Code.toSnippet
 64 |                 source
 65 |                 region
 66 |                 Nothing
 67 |                 ( D.reflow $
 68 |                     "Your `main` program wants " ++ aBadKindOfThing ++ " from JavaScript.",
 69 |                   butThatIsNoGood
 70 |                 )
 71 |        in formatDetails $
 72 |             case invalidPayload of
 73 |               E.ExtendedRecord ->
 74 |                 ( "an extended record",
 75 |                   D.reflow $
 76 |                     "But the exact shape of the record must be known at compile time. No type variables!"
 77 |                 )
 78 |               E.Function ->
 79 |                 ( "a function",
 80 |                   D.reflow $
 81 |                     "But if I allowed functions from JS, it would be possible to sneak\
 82 |                     \ side-effects and runtime exceptions into Gren!"
 83 |                 )
 84 |               E.TypeVariable name ->
 85 |                 ( "an unspecified type",
 86 |                   D.reflow $
 87 |                     "But type variables like `"
 88 |                       ++ Name.toChars name
 89 |                       ++ "` cannot be given as flags.\
 90 |                          \ I need to know exactly what type of data I am getting, so I can guarantee that\
 91 |                          \ unexpected data cannot sneak in and crash the Gren program."
 92 |                 )
 93 |               E.UnsupportedType name ->
 94 |                 ( "a `" ++ Name.toChars name ++ "` value",
 95 |                   D.stack
 96 |                     [ D.reflow $ "I cannot handle that. The types that CAN be in flags include:",
 97 |                       D.indent 4 $
 98 |                         D.reflow $
 99 |                           "Unit, Ints, Floats, Bools, Strings, Maybes, Arrays,\
100 |                           \ records, and JSON values.",
101 |                       D.reflow $
102 |                         "Since JSON values can flow through, you can use JSON encoders and decoders\
103 |                         \ to allow other types through as well. More advanced users often just do\
104 |                         \ everything with encoders and decoders for more control and better errors."
105 |                     ]
106 |                 )
107 | 


--------------------------------------------------------------------------------
/compiler/src/Reporting/Render/Type/Localizer.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | {-# OPTIONS_GHC -Wall #-}
 3 | 
 4 | module Reporting.Render.Type.Localizer
 5 |   ( Localizer,
 6 |     toDoc,
 7 |     toChars,
 8 |     empty,
 9 |     fromNames,
10 |     fromModule,
11 |   )
12 | where
13 | 
14 | import AST.Source qualified as Src
15 | import Data.Map qualified as Map
16 | import Data.Name qualified as Name
17 | import Data.Set qualified as Set
18 | import Gren.ModuleName qualified as ModuleName
19 | import Reporting.Annotation qualified as A
20 | import Reporting.Doc qualified as D
21 | 
22 | -- LOCALIZER
23 | 
24 | newtype Localizer
25 |   = Localizer (Map.Map Name.Name Import)
26 | 
27 | data Import = Import
28 |   { _alias :: Maybe Name.Name,
29 |     _exposing :: Exposing
30 |   }
31 | 
32 | data Exposing
33 |   = All
34 |   | Only (Set.Set Name.Name)
35 | 
36 | empty :: Localizer
37 | empty =
38 |   Localizer Map.empty
39 | 
40 | -- LOCALIZE
41 | 
42 | toDoc :: Localizer -> ModuleName.Canonical -> Name.Name -> D.Doc
43 | toDoc localizer home name =
44 |   D.fromChars (toChars localizer home name)
45 | 
46 | toChars :: Localizer -> ModuleName.Canonical -> Name.Name -> String
47 | toChars (Localizer localizer) (ModuleName.Canonical _ home) name =
48 |   case Map.lookup home localizer of
49 |     Nothing ->
50 |       Name.toChars home <> "." <> Name.toChars name
51 |     Just (Import alias exposing) ->
52 |       case exposing of
53 |         All ->
54 |           Name.toChars name
55 |         Only set ->
56 |           if Set.member name set
57 |             then Name.toChars name
58 |             else Name.toChars (maybe home id alias) <> "." <> Name.toChars name
59 | 
60 | -- FROM NAMES
61 | 
62 | fromNames :: Map.Map Name.Name a -> Localizer
63 | fromNames names =
64 |   Localizer $ Map.map (\_ -> Import Nothing All) names
65 | 
66 | -- FROM MODULE
67 | 
68 | fromModule :: Src.Module -> Localizer
69 | fromModule modul@(Src.Module _ _ _ imports _ _ _ _ _ _ _) =
70 |   Localizer $
71 |     Map.fromList $
72 |       (Src.getName modul, Import Nothing All) : map (toPair . snd) imports
73 | 
74 | toPair :: Src.Import -> (Name.Name, Import)
75 | toPair (Src.Import (A.At _ name) alias exposing _ _) =
76 |   ( name,
77 |     Import (fmap fst alias) (toExposing exposing)
78 |   )
79 | 
80 | toExposing :: Src.Exposing -> Exposing
81 | toExposing exposing =
82 |   case exposing of
83 |     Src.Open ->
84 |       All
85 |     Src.Explicit exposedList ->
86 |       Only (foldr addType Set.empty exposedList)
87 | 
88 | addType :: Src.Exposed -> Set.Set Name.Name -> Set.Set Name.Name
89 | addType exposed types =
90 |   case exposed of
91 |     Src.Lower _ -> types
92 |     Src.Upper (A.At _ name) _ -> Set.insert name types
93 |     Src.Operator _ _ -> types
94 | 


--------------------------------------------------------------------------------
/compiler/src/Reporting/Report.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | 
 3 | module Reporting.Report
 4 |   ( Report (..),
 5 |   )
 6 | where
 7 | 
 8 | import Reporting.Annotation qualified as A
 9 | import Reporting.Doc qualified as D
10 | 
11 | -- BUILD REPORTS
12 | 
13 | data Report = Report
14 |   { _title :: String,
15 |     _region :: A.Region,
16 |     _sgstns :: [String],
17 |     _message :: D.Doc
18 |   }
19 | 


--------------------------------------------------------------------------------
/compiler/src/Reporting/Result.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE Rank2Types #-}
 2 | {-# OPTIONS_GHC -Wall #-}
 3 | 
 4 | module Reporting.Result
 5 |   ( Result (..),
 6 |     run,
 7 |     ok,
 8 |     warn,
 9 |     throw,
10 |     mapError,
11 |   )
12 | where
13 | 
14 | import Data.OneOrMore qualified as OneOrMore
15 | import Reporting.Warning qualified as Warning
16 | 
17 | -- RESULT
18 | 
19 | newtype Result info warnings error a
20 |   = Result
21 |       ( forall result.
22 |         info ->
23 |         warnings ->
24 |         (info -> warnings -> OneOrMore.OneOrMore error -> result) ->
25 |         (info -> warnings -> a -> result) ->
26 |         result
27 |       )
28 | 
29 | run :: Result () [w] e a -> ([w], Either (OneOrMore.OneOrMore e) a)
30 | run (Result k) =
31 |   k
32 |     ()
33 |     []
34 |     (\() w e -> (reverse w, Left e))
35 |     (\() w a -> (reverse w, Right a))
36 | 
37 | -- HELPERS
38 | 
39 | ok :: a -> Result i w e a
40 | ok a =
41 |   Result $ \i w _ good ->
42 |     good i w a
43 | 
44 | warn :: Warning.Warning -> Result i [Warning.Warning] e ()
45 | warn warning =
46 |   Result $ \i warnings _ good ->
47 |     good i (warning : warnings) ()
48 | 
49 | throw :: e -> Result i w e a
50 | throw e =
51 |   Result $ \i w bad _ ->
52 |     bad i w (OneOrMore.one e)
53 | 
54 | mapError :: (e -> e') -> Result i w e a -> Result i w e' a
55 | mapError func (Result k) =
56 |   Result $ \i w bad good ->
57 |     let bad1 i1 w1 e1 =
58 |           bad i1 w1 (OneOrMore.map func e1)
59 |      in k i w bad1 good
60 | 
61 | -- FANCY INSTANCE STUFF
62 | 
63 | instance Functor (Result i w e) where
64 |   fmap func (Result k) =
65 |     Result $ \i w bad good ->
66 |       let good1 i1 w1 value =
67 |             good i1 w1 (func value)
68 |        in k i w bad good1
69 | 
70 | instance Applicative (Result i w e) where
71 |   pure = ok
72 | 
73 |   (<*>) (Result kf) (Result kv) =
74 |     Result $ \i w bad good ->
75 |       let bad1 i1 w1 e1 =
76 |             let bad2 i2 w2 e2 = bad i2 w2 (OneOrMore.more e1 e2)
77 |                 good2 i2 w2 _value = bad i2 w2 e1
78 |              in kv i1 w1 bad2 good2
79 | 
80 |           good1 i1 w1 func =
81 |             let bad2 i2 w2 e2 = bad i2 w2 e2
82 |                 good2 i2 w2 value = good i2 w2 (func value)
83 |              in kv i1 w1 bad2 good2
84 |        in kf i w bad1 good1
85 | 
86 |   (*>) (Result ka) (Result kb) =
87 |     Result $ \i w bad good ->
88 |       let good1 i1 w1 _ =
89 |             kb i1 w1 bad good
90 |        in ka i w bad good1
91 | 
92 | instance Monad (Result i w e) where
93 |   (>>=) (Result ka) callback =
94 |     Result $ \i w bad good ->
95 |       let good1 i1 w1 a =
96 |             case callback a of
97 |               Result kb -> kb i1 w1 bad good
98 |        in ka i w bad good1
99 | 


--------------------------------------------------------------------------------
/compiler/src/Reporting/Suggest.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | {-# OPTIONS_GHC -Wall #-}
 3 | 
 4 | module Reporting.Suggest
 5 |   ( distance,
 6 |     sort,
 7 |     rank,
 8 |   )
 9 | where
10 | 
11 | import Data.Char qualified as Char
12 | import Data.List qualified as List
13 | import Text.EditDistance qualified as Dist
14 | 
15 | -- DISTANCE
16 | 
17 | distance :: String -> String -> Int
18 | distance x y =
19 |   Dist.restrictedDamerauLevenshteinDistance Dist.defaultEditCosts x y
20 | 
21 | -- SORT
22 | 
23 | sort :: String -> (a -> String) -> [a] -> [a]
24 | sort target toString values =
25 |   List.sortOn (distance (toLower target) . toLower . toString) values
26 | 
27 | toLower :: String -> String
28 | toLower string =
29 |   map Char.toLower string
30 | 
31 | -- RANK
32 | 
33 | rank :: String -> (a -> String) -> [a] -> [(Int, a)]
34 | rank target toString values =
35 |   let toRank v =
36 |         distance (toLower target) (toLower (toString v))
37 | 
38 |       addRank v =
39 |         (toRank v, v)
40 |    in List.sortOn fst (map addRank values)
41 | 


--------------------------------------------------------------------------------
/compiler/src/Reporting/Warning.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | {-# OPTIONS_GHC -Wall #-}
 3 | 
 4 | module Reporting.Warning
 5 |   ( Warning (..),
 6 |     Context (..),
 7 |     toReport,
 8 |   )
 9 | where
10 | 
11 | import AST.Canonical qualified as Can
12 | import AST.Utils.Type qualified as Type
13 | import Data.Name qualified as Name
14 | import Reporting.Annotation qualified as A
15 | import Reporting.Doc qualified as D
16 | import Reporting.Render.Code qualified as Code
17 | import Reporting.Render.Type qualified as RT
18 | import Reporting.Render.Type.Localizer qualified as L
19 | import Reporting.Report qualified as Report
20 | 
21 | -- ALL POSSIBLE WARNINGS
22 | 
23 | data Warning
24 |   = UnusedImport A.Region Name.Name
25 |   | UnusedVariable A.Region Context Name.Name
26 |   | MissingTypeAnnotation A.Region Name.Name Can.Type
27 | 
28 | data Context = Def | Pattern
29 | 
30 | -- TO REPORT
31 | 
32 | toReport :: L.Localizer -> Code.Source -> Warning -> Report.Report
33 | toReport localizer source warning =
34 |   case warning of
35 |     UnusedImport region moduleName ->
36 |       Report.Report "unused import" region [] $
37 |         Code.toSnippet
38 |           source
39 |           region
40 |           Nothing
41 |           ( D.reflow $
42 |               "Nothing from the `" <> Name.toChars moduleName <> "` module is used in this file.",
43 |             "I recommend removing unused imports."
44 |           )
45 |     UnusedVariable region context name ->
46 |       let title = defOrPat context "unused definition" "unused variable"
47 |        in Report.Report title region [] $
48 |             Code.toSnippet
49 |               source
50 |               region
51 |               Nothing
52 |               ( D.reflow $
53 |                   "You are not using `" <> Name.toChars name <> "` anywhere.",
54 |                 D.stack
55 |                   [ D.reflow $
56 |                       "Is there a typo? Maybe you intended to use `"
57 |                         <> Name.toChars name
58 |                         <> "` somewhere but typed another name instead?",
59 |                     D.reflow $
60 |                       defOrPat
61 |                         context
62 |                         ( "If you are sure there is no typo, remove the definition.\
63 |                           \ This way future readers will not have to wonder why it is there!"
64 |                         )
65 |                         ( "If you are sure there is no typo, replace `"
66 |                             <> Name.toChars name
67 |                             <> "` with _ so future readers will not have to wonder why it is there!"
68 |                         )
69 |                   ]
70 |               )
71 |     MissingTypeAnnotation region name inferredType ->
72 |       Report.Report "missing type annotation" region [] $
73 |         Code.toSnippet
74 |           source
75 |           region
76 |           Nothing
77 |           ( D.reflow $
78 |               case Type.deepDealias inferredType of
79 |                 Can.TLambda _ _ ->
80 |                   "The `" <> Name.toChars name <> "` function has no type annotation."
81 |                 _ ->
82 |                   "The `" <> Name.toChars name <> "` definition has no type annotation.",
83 |             D.stack
84 |               [ "I inferred the type annotation myself though! You can copy it into your code:",
85 |                 D.green $
86 |                   D.hang 4 $
87 |                     D.sep $
88 |                       [ D.fromName name <> " :",
89 |                         RT.canToDoc localizer RT.None inferredType
90 |                       ]
91 |               ]
92 |           )
93 | 
94 | defOrPat :: Context -> a -> a -> a
95 | defOrPat context def pat =
96 |   case context of
97 |     Def -> def
98 |     Pattern -> pat
99 | 


--------------------------------------------------------------------------------
/compiler/src/Type/Instantiate.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | {-# OPTIONS_GHC -Wall #-}
 3 | 
 4 | module Type.Instantiate
 5 |   ( FreeVars,
 6 |     fromSrcType,
 7 |   )
 8 | where
 9 | 
10 | import AST.Canonical qualified as Can
11 | import Data.Map.Strict ((!))
12 | import Data.Map.Strict qualified as Map
13 | import Data.Name qualified as Name
14 | import Type.Type
15 | 
16 | -- FREE VARS
17 | 
18 | type FreeVars =
19 |   Map.Map Name.Name Type
20 | 
21 | -- FROM SOURCE TYPE
22 | 
23 | fromSrcType :: Map.Map Name.Name Type -> Can.Type -> IO Type
24 | fromSrcType freeVars sourceType =
25 |   case sourceType of
26 |     Can.TLambda arg result ->
27 |       FunN
28 |         <$> fromSrcType freeVars arg
29 |         <*> fromSrcType freeVars result
30 |     Can.TVar name ->
31 |       return (freeVars ! name)
32 |     Can.TType home name args ->
33 |       AppN home name <$> traverse (fromSrcType freeVars) args
34 |     Can.TAlias home name args aliasedType ->
35 |       do
36 |         targs <- traverse (traverse (fromSrcType freeVars)) args
37 |         AliasN home name targs
38 |           <$> case aliasedType of
39 |             Can.Filled realType ->
40 |               fromSrcType freeVars realType
41 |             Can.Holey realType ->
42 |               fromSrcType (Map.fromList targs) realType
43 |     Can.TRecord fields maybeExt ->
44 |       RecordN
45 |         <$> traverse (fromSrcFieldType freeVars) fields
46 |         <*> case maybeExt of
47 |           Nothing ->
48 |             return EmptyRecordN
49 |           Just ext ->
50 |             return (freeVars ! ext)
51 | 
52 | fromSrcFieldType :: Map.Map Name.Name Type -> Can.FieldType -> IO Type
53 | fromSrcFieldType freeVars (Can.FieldType _ tipe) =
54 |   fromSrcType freeVars tipe
55 | 


--------------------------------------------------------------------------------
/compiler/src/Type/Occurs.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | {-# OPTIONS_GHC -Wall #-}
 3 | 
 4 | module Type.Occurs
 5 |   ( occurs,
 6 |   )
 7 | where
 8 | 
 9 | import Data.Foldable (foldrM)
10 | import Data.Map.Strict qualified as Map
11 | import Type.Type as Type
12 | import Type.UnionFind qualified as UF
13 | 
14 | -- OCCURS
15 | 
16 | occurs :: Type.Variable -> IO Bool
17 | occurs var =
18 |   occursHelp [] var False
19 | 
20 | occursHelp :: [Type.Variable] -> Type.Variable -> Bool -> IO Bool
21 | occursHelp seen var foundCycle =
22 |   if elem var seen
23 |     then return True
24 |     else do
25 |       (Descriptor content _ _ _) <- UF.get var
26 |       case content of
27 |         FlexVar _ ->
28 |           return foundCycle
29 |         FlexSuper _ _ ->
30 |           return foundCycle
31 |         RigidVar _ ->
32 |           return foundCycle
33 |         RigidSuper _ _ ->
34 |           return foundCycle
35 |         Structure term ->
36 |           let newSeen = var : seen
37 |            in case term of
38 |                 App1 _ _ args ->
39 |                   foldrM (occursHelp newSeen) foundCycle args
40 |                 Fun1 a b ->
41 |                   occursHelp newSeen a
42 |                     =<< occursHelp newSeen b foundCycle
43 |                 EmptyRecord1 ->
44 |                   return foundCycle
45 |                 Record1 fields ext ->
46 |                   occursHelp newSeen ext
47 |                     =<< foldrM (occursHelp newSeen) foundCycle (Map.elems fields)
48 |         Alias _ _ args _ ->
49 |           foldrM (occursHelp (var : seen)) foundCycle (map snd args)
50 |         Error ->
51 |           return foundCycle
52 | 


--------------------------------------------------------------------------------
/compiler/src/Type/UnionFind.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE BangPatterns #-}
  2 | {-# OPTIONS_GHC -funbox-strict-fields #-}
  3 | 
  4 | module Type.UnionFind
  5 |   ( Point,
  6 |     fresh,
  7 |     union,
  8 |     equivalent,
  9 |     redundant,
 10 |     get,
 11 |     set,
 12 |     modify,
 13 |   )
 14 | where
 15 | 
 16 | {- This is based on the following implementations:
 17 | 
 18 |   - https://hackage.haskell.org/package/union-find-0.2/docs/src/Data-UnionFind-IO.html
 19 |   - http://yann.regis-gianas.org/public/mini/code_UnionFind.html
 20 | 
 21 | It seems like the OCaml one came first, but I am not sure.
 22 | 
 23 | Compared to the Haskell implementation, the major changes here include:
 24 | 
 25 |   1. No more reallocating PointInfo when changing the weight
 26 |   2. Using the strict modifyIORef
 27 | 
 28 | -}
 29 | 
 30 | import Control.Monad (when)
 31 | import Data.IORef (IORef, modifyIORef', newIORef, readIORef, writeIORef)
 32 | import Data.Word (Word32)
 33 | 
 34 | -- POINT
 35 | 
 36 | newtype Point a
 37 |   = Pt (IORef (PointInfo a))
 38 |   deriving (Eq)
 39 | 
 40 | data PointInfo a
 41 |   = Info {-# UNPACK #-} !(IORef Word32) {-# UNPACK #-} !(IORef a)
 42 |   | Link {-# UNPACK #-} !(Point a)
 43 | 
 44 | -- HELPERS
 45 | 
 46 | fresh :: a -> IO (Point a)
 47 | fresh value =
 48 |   do
 49 |     weight <- newIORef 1
 50 |     desc <- newIORef value
 51 |     link <- newIORef (Info weight desc)
 52 |     return (Pt link)
 53 | 
 54 | repr :: Point a -> IO (Point a)
 55 | repr point@(Pt ref) =
 56 |   do
 57 |     pInfo <- readIORef ref
 58 |     case pInfo of
 59 |       Info _ _ ->
 60 |         return point
 61 |       Link point1@(Pt ref1) ->
 62 |         do
 63 |           point2 <- repr point1
 64 |           when (point2 /= point1) $
 65 |             do
 66 |               pInfo1 <- readIORef ref1
 67 |               writeIORef ref pInfo1
 68 |           return point2
 69 | 
 70 | get :: Point a -> IO a
 71 | get point@(Pt ref) =
 72 |   do
 73 |     pInfo <- readIORef ref
 74 |     case pInfo of
 75 |       Info _ descRef ->
 76 |         readIORef descRef
 77 |       Link (Pt ref1) ->
 78 |         do
 79 |           link' <- readIORef ref1
 80 |           case link' of
 81 |             Info _ descRef ->
 82 |               readIORef descRef
 83 |             Link _ ->
 84 |               get =<< repr point
 85 | 
 86 | set :: Point a -> a -> IO ()
 87 | set point@(Pt ref) newDesc =
 88 |   do
 89 |     pInfo <- readIORef ref
 90 |     case pInfo of
 91 |       Info _ descRef ->
 92 |         writeIORef descRef newDesc
 93 |       Link (Pt ref1) ->
 94 |         do
 95 |           link' <- readIORef ref1
 96 |           case link' of
 97 |             Info _ descRef ->
 98 |               writeIORef descRef newDesc
 99 |             Link _ ->
100 |               do
101 |                 newPoint <- repr point
102 |                 set newPoint newDesc
103 | 
104 | modify :: Point a -> (a -> a) -> IO ()
105 | modify point@(Pt ref) func =
106 |   do
107 |     pInfo <- readIORef ref
108 |     case pInfo of
109 |       Info _ descRef ->
110 |         modifyIORef' descRef func
111 |       Link (Pt ref1) ->
112 |         do
113 |           link' <- readIORef ref1
114 |           case link' of
115 |             Info _ descRef ->
116 |               modifyIORef' descRef func
117 |             Link _ ->
118 |               do
119 |                 newPoint <- repr point
120 |                 modify newPoint func
121 | 
122 | union :: Point a -> Point a -> a -> IO ()
123 | union p1 p2 newDesc =
124 |   do
125 |     point1@(Pt ref1) <- repr p1
126 |     point2@(Pt ref2) <- repr p2
127 | 
128 |     Info w1 d1 <- readIORef ref1
129 |     Info w2 d2 <- readIORef ref2
130 | 
131 |     if point1 == point2
132 |       then writeIORef d1 newDesc
133 |       else do
134 |         weight1 <- readIORef w1
135 |         weight2 <- readIORef w2
136 | 
137 |         let !newWeight = weight1 + weight2
138 | 
139 |         if weight1 >= weight2
140 |           then do
141 |             writeIORef ref2 (Link point1)
142 |             writeIORef w1 newWeight
143 |             writeIORef d1 newDesc
144 |           else do
145 |             writeIORef ref1 (Link point2)
146 |             writeIORef w2 newWeight
147 |             writeIORef d2 newDesc
148 | 
149 | equivalent :: Point a -> Point a -> IO Bool
150 | equivalent p1 p2 =
151 |   do
152 |     v1 <- repr p1
153 |     v2 <- repr p2
154 |     return (v1 == v2)
155 | 
156 | redundant :: Point a -> IO Bool
157 | redundant (Pt ref) =
158 |   do
159 |     pInfo <- readIORef ref
160 |     case pInfo of
161 |       Info _ _ ->
162 |         return False
163 |       Link _ ->
164 |         return True
165 | 


--------------------------------------------------------------------------------
/devbox.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "$schema": "https://raw.githubusercontent.com/jetify-com/devbox/0.13.5/.schema/devbox.schema.json",
 3 |   "packages": [
 4 |     "nodejs@20",
 5 |     "ormolu@0.7",
 6 |     "ghc@9.6",
 7 |     "cabal-install@3.10"
 8 |   ],
 9 |   "shell": {
10 |     "init_hook": [
11 |       "echo 'Welcome to devbox!' > /dev/null"
12 |     ],
13 |     "scripts": {
14 |       "prepare-deps": [
15 |         "cabal update",
16 |         "npm ci"
17 |       ],
18 |       "build": [
19 |         "npm run prepublishOnly",
20 |         "./build_dev_bin.sh"
21 |       ],
22 |       "test": [
23 |         "cabal test -f dev"
24 |       ],
25 |       "compiler": [
26 |         "GREN_BIN=$PWD/gren node cli.js"
27 |       ],
28 |       "format": [
29 |         "ormolu --mode inplace $(git ls-files '*.hs')",
30 |         "npm run prettier"
31 |       ],
32 |       "format:check": [
33 |         "ormolu --check-idempotence --mode check $(git ls-files '*.hs')",
34 |         "npm run prettier:check"
35 |       ]
36 |     }
37 |   }
38 | }
39 | 


--------------------------------------------------------------------------------
/docs/hacking_on_core_packages.md:
--------------------------------------------------------------------------------
 1 | # Hacking on core packages
 2 | 
 3 | When making changes to core packages that contain kernel code, a bit of hackery is required due to the constraints enforced by the compiler.
 4 | 
 5 | This document describes making changes to a core package used in the context of an application. The intent of the application is to be able to test the results of the changes made to the package.
 6 | 
 7 | ## Package resolution
 8 | 
 9 | When compiling a project, the Gren compiler reads the list of dependencies from `gren.json` and checks to see if they are all downloaded to disk. This check simply sees if a specific `gren.json` file exists at a specific file path. If the file path exists, then the compiler will treat whatever source code is there as the correct code for the package and version in question.
10 | 
11 | For instance, if your project has a dependency on `gren/core 1.0.0`, then the Gren compiler will look for a `gren.json` file in `~/.cache/gren//packages/gren/core/1.0.0`. The `~/.cache/gren/` folder is otherwise known as the package cache.
12 | 
13 | This means that if you want to test a bug-fix for `gren/core`, you can simply edit the source code at that location then compile your application and see if the bug is fixed.
14 | 
15 | The compiler follows symlinks, so you could replace a package directory with a symlink to your local checkout of that package.
16 | 
17 | ## Caches
18 | 
19 | In order to speed up development, Gren caches the compiled result of packages to avoid doing it again in the future.
20 | 
21 | When making changes to a package that resides in the package cache, as described above, you'll also need to remove these compilation caches for the changes to be included in the next build.
22 | 
23 | In the package cache, you'll need to delete a `artifacts.dat` file in the directory of a versioned package.
24 | 
25 | In your application, you'll need to remove the `.gren` directory.
26 | 
27 | Once those are removed, running `gren make` will recompile both the package and the application, and all recent changes should be present.
28 | 


--------------------------------------------------------------------------------
/docs/kernel_code.md:
--------------------------------------------------------------------------------
 1 | # Kernel code
 2 | 
 3 | Kernel code is JavaScript which can be called directly from Elm. As in, it is not compiled from Elm source code. Such code is inherently unsafe, but is necessary for reasons of performance and for exposing Web APIs to Gren.
 4 | 
 5 | Because it is unsafe, kernel code has certain limitations. Chief among them is that it can only be contained within a `gren/` package. As such, kernel code can only be written through collaboration with the Gren core team.
 6 | 
 7 | If you have an idea for a package that needs to make use of kernel code, please reach out to the core team to discuss this idea before venturing forth.
 8 | 
 9 | ## How do I write kernel code?
10 | 
11 | As mentioned, kernel code has certain limitations:
12 | 
13 | - Only packages can contain kernel code, not applications.
14 | - The package _has to_ be hosted in the `gren` organization on GitHub.
15 | - Kernel code must be placed in JavaScript file in the `src/Gren/Kernel` directory of the package.
16 | - Kernel code files can be imported by Gren modules, but cannot be aliased or have an exposing list.
17 | - Kernel code needs to be wrapped by a Gren function _with_ type annotations.
18 | 
19 | When writing kernel code, you should aim to write as little of it as possible. Kernel code is susceptible to break between compiler releases, and by reducing it to a minimum it becomes easier to upgrade it to a new compiler release.
20 | 
21 | There are also many rough edges when dealing with kernel code, and smoothing over those edges is not a top priority.
22 | 
23 | ## Structure of a kernel code file
24 | 
25 | Let's say we want to create a package for accessing the `LocalStorage` API. Create a new package (the package name needs to be something like `gren/local-storage`, and create a `src/Gren/Kernel/LocalStorage.js` file.
26 | 
27 | The implementation might look like this:
28 | 
29 | ```
30 | /*
31 | import Maybe exposing (Just, Nothing)
32 | */
33 | 
34 | function _LocalStorage_getItem(key) {
35 |     var item = localStorage.getItem(key);
36 |     if (typeof item !== 'undefined') {
37 |         return __Maybe_Just(item);
38 |     }
39 | 
40 |     return __Maybe_Nothing;
41 | }
42 | ```
43 | 
44 | A few things to note:
45 | 
46 | - Each \*.js file _must_ begin with a multi-line comment. Inside this comment block, you can use Gren-like syntax to import Gren and Kernel code modules.
47 | - When defining functions and variables in the module, it needs to be prefixed with a _single_ underscore and the name of the file. In this case: `_LocalStorage_functionName`.
48 | - When calling a function, or referencing a variable in the same module, it must use the same prefix as when defining something.
49 | - When calling an external function or referencing an external variable, you must use _two_ underscores and the module name. In this case: `__Maybe_Just(item)`.
50 | - Object properties must be prefixed with `__$` on the javascript side if the objects come from gren, or are being passed to gren. Otherwise things will crash when `--optimize` mangles the names. E.g. `Gren.Kernel.MyModule.myFunc { foo = "bar" }` on the js side should access `foo` with `myOjb.__$foo`, and vice versa sending an object to gren from js: `__MyModule_anotherFunc({__$bar: "baz"})` would allow gren to access `bar` with `myObject.bar`.
51 | - Defining a function that takes more than two arguments, must be constructed using a curried function helper. For instance, defining a function like `setItem` would look like: `var \_LocalStorage_setItem = F2(function(key, value) { ... });
52 | - Calling a function that takes between 2-9 arguments must be called using a partial application helper. As an example, calling a function with two arguments look like this: `A2(_LocalStorage_setItem, key, value)`.
53 | - Alternatively, you can simply perform a curried function call, though this will be worse for performance: `_LocalStorage_setItem(key)(value)`.
54 | 
55 | Also keep in mind that the code above has noticeable side effects, and wouldn't pass code review by the core team. In this particular instance, the `getItem` function should probably return a `Task`. You can take a look at the `Process` module in `gren/core` for an example.
56 | 
57 | ## Calling kernel code form Gren
58 | 
59 | Following the example above, create a Gren file at `src/LocalStorage.gren`. It might look like the following:
60 | 
61 | ```
62 | module LocalStorage exposing (getItem)
63 | 
64 | import Gren.Kernel.LocalStorage
65 | 
66 | getItem : String -> Maybe String
67 | getItem =
68 |   Gren.Kernel.LocalStorage.getItem
69 | ```
70 | 
71 | Nothing is being done by the Gren compiler to verify that the kernel code adheres to the type signature, this needs to be verified manually to avoid strange effects in otherwise safe Gren code.
72 | 


--------------------------------------------------------------------------------
/gren.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "type": "application",
 3 |     "platform": "node",
 4 |     "source-directories": [
 5 |         "src"
 6 |     ],
 7 |     "gren-version": "0.4.5",
 8 |     "dependencies": {
 9 |         "direct": {
10 |             "gren-lang/core": "5.1.1",
11 |             "gren-lang/node": "4.2.1",
12 |             "gren-lang/compiler-node": "1.0.2"
13 |         },
14 |         "indirect": {
15 |             "gren-lang/url": "4.0.0"
16 |         }
17 |     }
18 | }
19 | 


--------------------------------------------------------------------------------
/hints/comparing-custom-types.md:
--------------------------------------------------------------------------------
 1 | # Comparing Custom Types
 2 | 
 3 | The built-in comparison operators work on a fixed set of types, like `Int` and `String`. That covers a lot of cases, but what happens when you want to compare custom types?
 4 | 
 5 | This page aims to catalog these scenarios and offer alternative paths that can get you unstuck.
 6 | 
 7 | ## Wrapped Types
 8 | 
 9 | It is common to try to get some extra type safety by creating really simple custom types:
10 | 
11 | ```gren
12 | type Id = Id Int
13 | type Age = Age Int
14 | 
15 | type Comment = Comment String
16 | type Description = Description String
17 | ```
18 | 
19 | By wrapping the primitive values like this, the type system can now help you make sure that you never mix up a `Id` and an `Age`. Those are different types! This trick is extra cool because it has no runtime cost in `--optimize` mode. The compiler can just use an `Int` or `String` directly when you use that flag!
20 | 
21 | The problem arises when you want to use a `Id` as a key in a dictionary. This is a totally reasonable thing to do, but the current version of Gren cannot handle this scenario.
22 | 
23 | Instead of creating a `Dict Id Info` type, one thing you can do is create a custom data structure like this:
24 | 
25 | ```gren
26 | module User exposing (Id, Table, empty, get, add)
27 | 
28 | import Dict exposing (Dict)
29 | 
30 | 
31 | -- USER
32 | 
33 | type Id = Id Int
34 | 
35 | 
36 | -- TABLE
37 | 
38 | type Table info =
39 |   Table Int (Dict Int info)
40 | 
41 | empty : Table info
42 | empty =
43 |   Table 0 Dict.empty
44 | 
45 | get : Id -> Table info -> Maybe info
46 | get (Id id) (Table _ dict) =
47 |   Dict.get id dict
48 | 
49 | add : info -> Table info -> (Table info, Id)
50 | add info (Table nextId dict) =
51 |   ( Table (nextId + 1) (Dict.insert nextId info dict)
52 |   , Id nextId
53 |   )
54 | ```
55 | 
56 | There are a couple nice things about this approach:
57 | 
58 | 1. The only way to get a new `User.Id` is to `add` information to a `User.Table`.
59 | 2. All the operations on a `User.Table` are explicit. Does it make sense to remove users? To merge two tables together? Are there any special details to consider in those cases? This will always be captured explicitly in the interface of the `User` module.
60 | 3. If you ever want to switch the internal representation from `Dict` to `Array` or something else, it is no problem. All the changes will be within the `User` module.
61 | 
62 | So while this approach is not as convenient as using a `Dict` directly, it has some benefits of its own that can be helpful in some cases.
63 | 
64 | ## Enumerations to Ints
65 | 
66 | Say you need to define a `trafficLightToInt` function:
67 | 
68 | ```gren
69 | type TrafficLight = Green | Yellow | Red
70 | 
71 | trafficLightToInt : TrafficLight -> Int
72 | trafficLightToInt trafficLight =
73 |   ???
74 | ```
75 | 
76 | We have heard that some people would prefer to use a dictionary for this sort of thing. That way you do not need to write the numbers yourself, they can be generated such that you never have a typo.
77 | 
78 | I would recommend using a `when` expression though:
79 | 
80 | ```gren
81 | type TrafficLight = Green | Yellow | Red
82 | 
83 | trafficLightToInt : TrafficLight -> Int
84 | trafficLightToInt trafficLight =
85 |   when trafficLight is
86 |     Green  -> 1
87 |     Yellow -> 2
88 |     Red    -> 3
89 | ```
90 | 
91 | This is really straight-forward while avoiding questions like “is `Green` less than or greater than `Red`?”
92 | 
93 | ## Something else?
94 | 
95 | If you have some other situation, please tell us about it [here](https://github.com/gren/error-message-catalog/issues). That is a log of error messages that can be improved, and we can use the particulars of your scenario to add more advice on this page!
96 | 


--------------------------------------------------------------------------------
/hints/comparing-records.md:
--------------------------------------------------------------------------------
 1 | The built-in comparison operators work on a fixed set of types, like `Int` and `String`. That covers a lot of cases, but what happens when you want to compare records?
 2 | 
 3 | This page aims to catalog these scenarios and offer alternative paths that can get you unstuck.
 4 | 
 5 | ## Sorting Records
 6 | 
 7 | Say we want a `view` function that can show a list of students sorted by different characteristics.
 8 | 
 9 | We could create something like this:
10 | 
11 | ```gren
12 | import Html exposing (..)
13 | 
14 | type alias Student =
15 |   { name : String
16 |   , age : Int
17 |   , gpa : Float
18 |   }
19 | 
20 | type Order = Name | Age | GPA
21 | 
22 | viewStudents : Order -> List Student -> Html msg
23 | viewStudents order students =
24 |   let
25 |     orderlyStudents =
26 |       when order is
27 |         Name -> List.sortBy .name students
28 |         Age -> List.sortBy .age students
29 |         GPA -> List.sortBy .gpa students
30 |   in
31 |   ul [] (List.map viewStudent orderlyStudents)
32 | 
33 | viewStudent : Student -> Html msg
34 | viewStudent student =
35 |   li [] [ text student.name ]
36 | ```
37 | 
38 | If you are worried about the performance of changing the order or updating information about particular students, you can start using the [`Html.Lazy`](https://package.gren-lang.org/packages/gren/html/latest/Html-Lazy) and [`Html.Keyed`](https://package.gren-lang.org/packages/gren/html/latest/Html-Keyed) modules. The updated code would look something like this:
39 | 
40 | ```gren
41 | import Html exposing (..)
42 | import Html.Lazy exposing (lazy)
43 | import Html.Keyed as Keyed
44 | 
45 | type Order = Name | Age | GPA
46 | 
47 | type alias Student =
48 |   { name : String
49 |   , age : Int
50 |   , gpa : Float
51 |   }
52 | 
53 | viewStudents : Order -> List Student -> Html msg
54 | viewStudents order students =
55 |   let
56 |     orderlyStudents =
57 |       when order is
58 |         Name -> List.sortBy .name students
59 |         Age -> List.sortBy .age students
60 |         GPA -> List.sortBy .gpa students
61 |   in
62 |   Keyed.ul [] (List.map viewKeyedStudent orderlyStudents)
63 | 
64 | viewKeyedStudent : Student -> (String, Html msg)
65 | viewKeyedStudent student =
66 |   ( student.name, lazy viewStudent student )
67 | 
68 | viewStudent : Student -> Html msg
69 | viewStudent student =
70 |   li [] [ text student.name ]
71 | ```
72 | 
73 | By using `Keyed.ul` we help the renderer move the DOM nodes around based on their key. This makes it much cheaper to reorder a bunch of students. And by using `lazy` we help the renderer skip a bunch of work. If the `Student` is the same as last time, the render can skip over it.
74 | 
75 | > **Note:** Some people are skeptical of having logic like this in `view` functions, but I think the alternative (maintaining sort order in your `Model`) has some serious downsides. Say a colleague is adding a message to `Add` students, but they do not know about the sort order rules needed for presentation. Bug! So in this alternate design, you must diligently test your `update` function to make sure that no message disturbs the sort order. This is bound to lead to bugs over time!
76 | >
77 | > With all the optimizations possible with `Html.Lazy` and `Html.Keyed`, I would always be inclined to work on optimizing my `view` functions rather than making my `update` functions more complicated and error prone.
78 | 
79 | ## Something else?
80 | 
81 | If you have some other situation, please tell us about it [here](https://github.com/gren/error-message-catalog/issues). That is a log of error messages that can be improved, and we can use the particulars of your scenario to add more advice on this page!
82 | 


--------------------------------------------------------------------------------
/hints/implicit-casts.md:
--------------------------------------------------------------------------------
 1 | # Implicit Casts
 2 | 
 3 | Many languages automatically convert from `Int` to `Float` when they think it is necessary. This conversion is often called an [implicit cast](https://en.wikipedia.org/wiki/Type_conversion).
 4 | 
 5 | Languages that will add in implicit casts for addition include:
 6 | 
 7 | - JavaScript
 8 | - Python
 9 | - Ruby
10 | - C
11 | - C++
12 | - C#
13 | - Java
14 | - Scala
15 | 
16 | These languages generally agree that an `Int` may be implicitly cast to a `Float` when necessary. So everyone is doing it, why not Gren?!
17 | 
18 | ## Type Inference + Implicit Casts
19 | 
20 | Gren comes from the ML-family of languages. Languages in the ML-family that **never** do implicit casts include:
21 | 
22 | - Standard ML
23 | - OCaml
24 | - Elm
25 | - F#
26 | - Gren
27 | - Haskell
28 | 
29 | Why would so many languages from this lineage require explicit conversions though?
30 | 
31 | Well, we have to go back to the 1970s for some background. J. Roger Hindley and Robin Milner independently discovered an algorithm that could _efficiently_ figure out the type of everything in your program without any type annotations. Type Inference! Every ML-family language has some variation of this algorithm at the center of its design.
32 | 
33 | For decades, the problem was that nobody could figure out how to combine type inference with implicit casts AND make the resulting algorithm efficient enough for daily use. As far as I know, Scala was the first widely known language to figure out how to combine these two things! Its creator, Martin Odersky did a lot of work on combining type inference and subtyping to make this possible.
34 | 
35 | So for any ML-family language designed before Scala, it is safe to assume that implicit conversions just was not an option. Okay, but what about Gren?! It comes after Scala, so why not do it like them?!
36 | 
37 | 1. You pay performance cost to mix type inference and implicit conversions. At least as far as anyone knows, it defeats an optimization that is crucial to getting _reliably_ good performance. It is fine in most cases, but it can be a real issue in very large code bases.
38 | 
39 | 2. Based on experience reports from Scala users, it seemed like the convenience was not worth the hidden cost. Yes, you can convert `n` in `(n + 1.5)` and everything is nice, but when you are in larger programs that are sparsely annotated, it can be quite difficult to figure out what is going on.
40 | 
41 | This user data may be confounded by the fact that Scala allows quite extensive conversions, not just from `Int` to `Float`, but I think it is worth taking seriously nonetheless. So it is _possible_, but it has tradeoffs.
42 | 
43 | ## Conclusion
44 | 
45 | First, based on the landscape of design possibilities, it seems like requiring _explicit_ conversions is a pretty nice balance. We can have type inference, it can produce friendly error messages, the algorithm is snappy, and an unintended implicit cast will not flow hundreds of lines before manifesting to the user.
46 | 
47 | Second, Gren very much favors explicit code, so this also fits in with the overall spirit of the language and libraries.
48 | 
49 | I hope that clarifies why you have to add those `toFloat` and `round` functions! It definitely can take some getting used to, but there are tons of folks who get past that acclimation period and really love the tradeoffs!
50 | 


--------------------------------------------------------------------------------
/hints/infinite-type.md:
--------------------------------------------------------------------------------
 1 | # Hints for Infinite Types
 2 | 
 3 | Infinite types are probably the trickiest kind of bugs to track down. **Writing down type annotations is usually the fastest way to figure them out.** Let's work through an example to get a feel for how these errors usually work though!
 4 | 
 5 | ## Example
 6 | 
 7 | A common way to get an infinite type error is very small typos. For example, do you see the problem in the following code?
 8 | 
 9 | ```gren
10 | incrementNumbers list =
11 |   List.map incrementNumbers list
12 | 
13 | incrementNumber n =
14 |   n + 1
15 | ```
16 | 
17 | The issue is that `incrementNumbers` calls itself, not the `incrementNumber` function defined below. So there is an extra `s` in this program! Let's focus on that:
18 | 
19 | ```gren
20 | incrementNumbers list =
21 |   List.map incrementNumbers list -- BUG extra `s` makes this self-recursive
22 | ```
23 | 
24 | Now the compiler does not know that anything is wrong yet. It just tries to figure out the types like normal. It knows that `incrementNumbers` is a function. The definition uses `List.map` so we can deduce that `list : List t1` and the result of this function call should be some other `List t2`. This also means that `incrementNumbers : List t1 -> List t2`.
25 | 
26 | The issue is that `List.map` uses `incrementNumbers` on `list`! That means that each element of `list` (which has type `t1`) must be fed into `incrementNumbers` (which takes `List t1`)
27 | 
28 | That means that `t1 = List t1`, which is an infinite type! If we start expanding this, we get `List (List (List (List (List ...))))` out to infinity!
29 | 
30 | The point is mainly that we are in a confusing situation. The types are confusing. This explanation is confusing. The compiler is confused. It is a bad time. But luckily, the more type annotations you add, the better chance there is that you and the compiler can figure things out! So say we change our definition to:
31 | 
32 | ```gren
33 | incrementNumbers : List Int -> List Int
34 | incrementNumbers list =
35 |   List.map incrementNumbers list -- STILL HAS BUG
36 | ```
37 | 
38 | Now we are going to get a pretty normal type error. Hey, you said that each element in the `list` is an `Int` but I cannot feed that into a `List Int -> List Int` function! Something like that.
39 | 
40 | In summary, the root issue is often some small typo, and the best way out is to start adding type annotations on everything!
41 | 


--------------------------------------------------------------------------------
/hints/init.md:
--------------------------------------------------------------------------------
 1 | # Creating an Gren project
 2 | 
 3 | The main goal of `gren init` is to get you to this page!
 4 | 
 5 | It just creates an `gren.json` file and a `src/` directory for your code.
 6 | 
 7 | ## What is `gren.json`?
 8 | 
 9 | This file describes your project. It lists all of the packages you depend upon, so it will say the particular version of [`gren/core`](https://package.gren-lang.org/packages/gren/core/latest/) and [`gren/html`](https://package.gren-lang.org/packages/gren/html/latest/) that you are using. It makes builds reproducible! You can read a bit more about it [here](https://github.com/gren/compiler/blob/master/docs/gren.json/application.md).
10 | 
11 | You should generally not edit it by hand. It is better to add new dependencies with commands like `gren install gren/http` or `gren install gren/json`.
12 | 
13 | ## What goes in `src/`?
14 | 
15 | This is where all of your Gren files live. It is best to start with a file called `src/Main.gren`. As you work through [the official guide](https://gren-lang.org/learn), you can put the code examples in that `src/Main.gren` file.
16 | 
17 | ## How do I compile it?
18 | 
19 | You can run `gren make src/Main.gren` and it will produce an `index.html` file that you can look at in your browser.
20 | 


--------------------------------------------------------------------------------
/hints/missing-patterns.md:
--------------------------------------------------------------------------------
  1 | # Hints for Missing Patterns
  2 | 
  3 | Gren checks to make sure that all possible inputs to a function or `when` are handled. This gives us the guarantee that no Gren code is ever going to crash because data had an unexpected shape.
  4 | 
  5 | There are a couple techniques for making this work for you in every scenario.
  6 | 
  7 | ## The danger of wildcard patterns
  8 | 
  9 | A common scenario is that you want to add a tag to a custom type that is used in a bunch of places. For example, maybe you are working different variations of users in a chat room:
 10 | 
 11 | ```gren
 12 | type User
 13 |   = Regular String Int
 14 |   | Anonymous
 15 | 
 16 | toName : User -> String
 17 | toName user =
 18 |   when user is
 19 |     Regular name _ ->
 20 |       name
 21 | 
 22 |     _ ->
 23 |       "anonymous"
 24 | ```
 25 | 
 26 | Notice the wildcard pattern in `toName`. This will hurt us! Say we add a `Visitor String` variant to `User` at some point. Now we have a bug that visitor names are reported as `"anonymous"`, and the compiler cannot help us!
 27 | 
 28 | So instead, it is better to explicitly list all possible variants, like this:
 29 | 
 30 | ```gren
 31 | type User
 32 |   = Regular String Int
 33 |   | Visitor String
 34 |   | Anonymous
 35 | 
 36 | toName : User -> String
 37 | toName user =
 38 |   when user is
 39 |     Regular name _ ->
 40 |       name
 41 | 
 42 |     Anonymous ->
 43 |       "anonymous"
 44 | ```
 45 | 
 46 | Now the compiler will say "hey, what should `toName` do when it sees a `Visitor`?" This is a tiny bit of extra work, but it is very worth it!
 47 | 
 48 | ## I want to go fast!
 49 | 
 50 | Imagine that the `User` type appears in 20 or 30 functions across your project. When we add a `Visitor` variant, the compiler points out all the places that need to be updated. That is very convenient, but in a big project, maybe you want to get through it extra quickly.
 51 | 
 52 | In that case, it can be helpful to use [`Debug.todo`](https://package.gren-lang.org/packages/gren-lang/core/latest/Debug#todo) to leave some code incomplete:
 53 | 
 54 | ```gren
 55 | type User
 56 |   = Regular String Int
 57 |   | Visitor String
 58 |   | Anonymous
 59 | 
 60 | toName : User -> String
 61 | toName user =
 62 |   when user is
 63 |     Regular name _ ->
 64 |       name
 65 | 
 66 |     Visitor _ ->
 67 |       Debug.todo "give the visitor name"
 68 | 
 69 |     Anonymous ->
 70 |       "anonymous"
 71 | 
 72 | -- and maybe a bunch of other things
 73 | ```
 74 | 
 75 | In this case it is easier to just write the implementation, but the point is that on more complex functions, you can put things off a bit.
 76 | 
 77 | The Gren compiler is actually aware of `Debug.todo` so when it sees it in a `when` like this, it will crash with a bunch of helpful information. It will tell you:
 78 | 
 79 | 1. The name of the module that contains the code.
 80 | 2. The line numbers of the `when` containing the TODO.
 81 | 3. The particular value that led to this TODO.
 82 | 
 83 | From that information you have a pretty good idea of what went wrong and can go fix it.
 84 | 
 85 | I tend to use `Debug.todo` as the message when my goal is to go quick because it makes it easy to go and find all remaining todos in my code before a release.
 86 | 
 87 | ## A list that definitely is not empty
 88 | 
 89 | This can come up from time to time, but Gren **will not** let you write code like this:
 90 | 
 91 | ```gren
 92 | last : List a -> a
 93 | last list =
 94 |   when list is
 95 |     [x] ->
 96 |         x
 97 | 
 98 |     _ :: rest ->
 99 |         last rest
100 | ```
101 | 
102 | This is no good. It does not handle the empty list. There are two ways to handle this. One is to make the function return a `Maybe` like this:
103 | 
104 | ```gren
105 | last : List a -> Maybe a
106 | last list =
107 |   when list is
108 |     [] ->
109 |         Nothing
110 | 
111 |     [x] ->
112 |         Just x
113 | 
114 |     _ :: rest ->
115 |         last rest
116 | ```
117 | 
118 | This is nice because it lets users know that there might be a failure, so they can recover from it however they want.
119 | 
120 | The other option is to “unroll the list” one level to ensure that no one can ever provide an empty list in the first place:
121 | 
122 | ```gren
123 | last : a -> List a -> a
124 | last first rest =
125 |   when rest is
126 |     [] ->
127 |       first
128 | 
129 |     newFirst :: newRest ->
130 |       last newFirst newRest
131 | ```
132 | 
133 | By demanding the first element of the list as an argument, it becomes impossible to call this function if you have an empty list!
134 | 
135 | This “unroll the list” trick is quite useful. I recommend using it directly, not through some external library. It is nothing special. Just a useful idea!
136 | 


--------------------------------------------------------------------------------
/hints/optimize.md:
--------------------------------------------------------------------------------
 1 | # How to optimize Gren code
 2 | 
 3 | When you are serving a website, there are two kinds of optimizations you want to do:
 4 | 
 5 | 1. **Asset Size** — How can we send as few bits as possible?
 6 | 2. **Performance** — How can those bits run as quickly as possible?
 7 | 
 8 | It turns out that Gren does really well on both! We have [very small assets](https://gren-lang.org/news/small-assets-without-the-headache) and [very fast code](https://gren-lang.org/news/blazing-fast-html-round-two) when compared to the popular alternatives.
 9 | 
10 | Okay, but how do we get those numbers?
11 | 
12 | ## Instructions
13 | 
14 | Step one is to compile with the `--optimize` flag. This does things like shortening record field names and unboxing values.
15 | 
16 | Step two is to call `uglifyjs` with a bunch of special flags. The flags unlock optimizations that are unreliable in normal JS code, but because Gren does not have side-effects, they work fine for us!
17 | 
18 | Putting those together, here is how I would optimize `src/Main.gren` with two terminal commands:
19 | 
20 | ```bash
21 | gren make src/Main.gren --optimize --output=gren.js
22 | uglifyjs gren.js --compress "pure_funcs=[F2,F3,F4,F5,F6,F7,F8,F9,A2,A3,A4,A5,A6,A7,A8,A9],pure_getters,keep_fargs=false,unsafe_comps,unsafe" | uglifyjs --mangle --output gren.min.js
23 | ```
24 | 
25 | After this you will have an `gren.js` and a significantly smaller `gren.min.js` file!
26 | 
27 | **Note 1:** `uglifyjs` is called twice there. First to `--compress` and second to `--mangle`. This is necessary! Otherwise `uglifyjs` will ignore our `pure_funcs` flag.
28 | 
29 | **Note 2:** If the `uglifyjs` command is not available in your terminal, you can run the command `npm install uglify-js --global` to download it. You probably already have `npm` from getting `gren repl` working, but if not, it is bundled with [nodejs](https://nodejs.org/).
30 | 
31 | ## Scripts
32 | 
33 | It is hard to remember all that, so it is probably a good idea to write a script that does it.
34 | 
35 | I would maybe want to run `./optimize.sh src/Main.gren` and get out `gren.js` and `gren.min.js`, so on Mac or Linux, I would make a script called `optimize.sh` like this:
36 | 
37 | ```bash
38 | #!/bin/sh
39 | 
40 | set -e
41 | 
42 | js="gren.js"
43 | min="gren.min.js"
44 | 
45 | gren make --optimize --output=$js $@
46 | 
47 | uglifyjs $js --compress "pure_funcs=[F2,F3,F4,F5,F6,F7,F8,F9,A2,A3,A4,A5,A6,A7,A8,A9],pure_getters,keep_fargs=false,unsafe_comps,unsafe" | uglifyjs --mangle --output $min
48 | 
49 | echo "Initial size: $(cat $js | wc -c) bytes  ($js)"
50 | echo "Minified size:$(cat $min | wc -c) bytes  ($min)"
51 | echo "Gzipped size: $(cat $min | gzip -c | wc -c) bytes"
52 | ```
53 | 
54 | It also prints out all the asset sizes for you! Your server should be configured to gzip the assets it sends, so the last line is telling you how many bytes would _actually_ get sent to the user.
55 | 
56 | Again, the important commands are `gren` and `uglifyjs` which work on any platform, so it should not be too tough to do something similar on Windows.
57 | 


--------------------------------------------------------------------------------
/hints/port-modules.md:
--------------------------------------------------------------------------------
 1 | # No Ports in Packages
 2 | 
 3 | The package ecosystem is one of the most important parts of Gren. Right now, our ecosystem has some compelling benefits:
 4 | 
 5 | - There are many obvious default packages that work well.
 6 | - Adding dependencies cannot introduce runtime exceptions.
 7 | - Patch changes cannot lead to surprise build failures.
 8 | 
 9 | These are really important factors if you want to _quickly_ create _reliable_ applications. The Gren community thinks this is valuable.
10 | 
11 | Other communities think that the _number_ of packages is a better measure of ecosystem health. That is a fine metric to use, but it is not the one we use for Gren. We would rather have 50 great packages than 100k packages of wildly varying quality.
12 | 
13 | ## So what about ports?
14 | 
15 | Imagine you install a new package that claims to support `WebSocket`. You get it set up, working through any compile errors. You run it, but it does not seem to work! After trying to figure it out for hours, you realize there is some poorly documented `port` to hook up...
16 | 
17 | Okay, now you need to hook up some JavaScript code. Is that JS file in the Gren package? Or is it on `npm`? Wait, what version on `npm` though? And is this patch version going to work as well? Also, how does this file fit into my build process? And assuming we get through all that, maybe the `port` has the same name as one of the ports in your project. Or it clashes with a `port` name in another package.
18 | 
19 | **Suddenly adding dependencies is much more complicated and risky!** An experienced developer would always check for ports up front, spending a bunch of time manually classifying unacceptable packages. Most people would not know to do that and learn all the pitfalls through personal experience, ultimately spending even _more_ time than the person who defensively checks to avoid these issues.
20 | 
21 | So “ports in packages” would impose an enormous cost on application developers, and in the end, we would have a less reliable package ecosystem overall.
22 | 
23 | ## Conclusion
24 | 
25 | Our wager with the Gren package ecosystem is that it is better to get a package _right_ than to get it _right now_. So while we could use “ports in packages” as a way to get twenty `localStorage` packages of varying quality _right now_, we are choosing not to go that route. Instead we ask that developers use ports directly in their application code, getting the same result a different way.
26 | 
27 | Now this may not be the right choice for your particular project, and that is okay! We will be expanding our core libraries over time, as explained [here](https://github.com/gren-lang/projects/blob/master/roadmap.md#where-is-the-localstorage-package), and we hope you will circle back later to see if Gren has grown into a better fit!
28 | 
29 | If you have more questions about this choice or what it means for your application, please come ask the [Gren Discord](https://discord.gg/Chb9YB9Vmh). Folks are friendly and happy to help out! Chances are that a `port` in your application will work great for your case once you learn more about how they are meant to be used.
30 | 


--------------------------------------------------------------------------------
/hints/recursive-alias.md:
--------------------------------------------------------------------------------
  1 | # Hints for Recursive Type Aliases
  2 | 
  3 | At the root of this issue is the distinction between a `type` and a `type alias`.
  4 | 
  5 | ## What is a type alias?
  6 | 
  7 | When you create a type alias, you are just creating a shorthand to refer to an existing type. So when you say the following:
  8 | 
  9 | ```gren
 10 | type alias Time = Float
 11 | 
 12 | type alias Degree = Float
 13 | 
 14 | type alias Weight = Float
 15 | ```
 16 | 
 17 | You have not created any _new_ types, you just made some alternate names for `Float`. You can write down things like this and it'll work fine:
 18 | 
 19 | ```gren
 20 | add : Time -> Degree -> Weight
 21 | add time degree =
 22 |   time + degree
 23 | ```
 24 | 
 25 | This is kind of a weird way to use type aliases though. The typical usage would be for records, where you do not want to write out the whole thing every time. Stuff like this:
 26 | 
 27 | ```gren
 28 | type alias Person =
 29 |   { name : String
 30 |   , age : Int
 31 |   , height : Float
 32 |   }
 33 | ```
 34 | 
 35 | It is much easier to write down `Person` in a type, and then it will just expand out to the underlying type when the compiler checks the program.
 36 | 
 37 | ## Recursive type aliases?
 38 | 
 39 | Okay, so let's say you have some type that may contain itself. In Gren, a common example of this is a comment that might have subcomments:
 40 | 
 41 | ```gren
 42 | type alias Comment =
 43 |   { message : String
 44 |   , upvotes : Int
 45 |   , downvotes : Int
 46 |   , responses : List Comment
 47 |   }
 48 | ```
 49 | 
 50 | Now remember that type _aliases_ are just alternate names for the real type. So to make `Comment` into a concrete type, the compiler would start expanding it out.
 51 | 
 52 | ```gren
 53 |   { message : String
 54 |   , upvotes : Int
 55 |   , downvotes : Int
 56 |   , responses :
 57 |       List
 58 |         { message : String
 59 |         , upvotes : Int
 60 |         , downvotes : Int
 61 |         , responses :
 62 |             List
 63 |               { message : String
 64 |               , upvotes : Int
 65 |               , downvotes : Int
 66 |               , responses : List ...
 67 |               }
 68 |         }
 69 |   }
 70 | ```
 71 | 
 72 | The compiler cannot deal with values like this. It would just keep expanding forever.
 73 | 
 74 | ## Recursive types!
 75 | 
 76 | In cases where you want a recursive type, you need to actually create a brand new type. This is what the `type` keyword is for. A simple example of this can be seen when defining a linked list:
 77 | 
 78 | ```gren
 79 | type List
 80 |     = Empty
 81 |     | Node Int List
 82 | ```
 83 | 
 84 | No matter what, the type of `Node n xs` is going to be `List`. There is no expansion to be done. This means you can represent recursive structures with types that do not explode into infinity.
 85 | 
 86 | So let's return to wanting to represent a `Comment` that may have responses. There are a couple ways to do this:
 87 | 
 88 | ### Obvious, but kind of annoying
 89 | 
 90 | ```gren
 91 | type Comment =
 92 |    Comment
 93 |       { message : String
 94 |       , upvotes : Int
 95 |       , downvotes : Int
 96 |       , responses : List Comment
 97 |       }
 98 | ```
 99 | 
100 | Now let's say you want to register an upvote on a comment:
101 | 
102 | ```gren
103 | upvote : Comment -> Comment
104 | upvote (Comment comment) =
105 |   Comment { comment | upvotes = 1 + comment.upvotes }
106 | ```
107 | 
108 | It is kind of annoying that we now have to unwrap and wrap the record to do anything with it.
109 | 
110 | ### Less obvious, but nicer
111 | 
112 | ```gren
113 | type alias Comment =
114 |   { message : String
115 |   , upvotes : Int
116 |   , downvotes : Int
117 |   , responses : Responses
118 |   }
119 | 
120 | type Responses = Responses (List Comment)
121 | ```
122 | 
123 | In this world, we introduce the `Responses` type to capture the recursion, but `Comment` is still an alias for a record. This means the `upvote` function looks nice again:
124 | 
125 | ```gren
126 | upvote : Comment -> Comment
127 | upvote comment =
128 |   { comment | upvotes = 1 + comment.upvotes }
129 | ```
130 | 
131 | So rather than having to unwrap a `Comment` to do _anything_ to it, you only have to do some unwrapping in the cases where you are doing something recursive. In practice, this means you will do less unwrapping which is nice.
132 | 
133 | ## Mutually recursive type aliases
134 | 
135 | It is also possible to build type aliases that are _mutually_ recursive. That might be something like this:
136 | 
137 | ```gren
138 | type alias Comment =
139 |   { message : String
140 |   , upvotes : Int
141 |   , downvotes : Int
142 |   , responses : Responses
143 |   }
144 | 
145 | type alias Responses =
146 |   { sortBy : SortBy
147 |   , responses : List Comment
148 |   }
149 | 
150 | type SortBy = Time | Score | MostResponses
151 | ```
152 | 
153 | When you try to expand `Comment` you have to expand `Responses` which needs to expand `Comment` which needs to expand `Responses`, etc.
154 | 
155 | So this is just a fancy case of a self-recursive type alias. The solution is the same. Somewhere in that cycle, you need to define an actual `type` to end the infinite expansion.
156 | 


--------------------------------------------------------------------------------
/hints/repl.md:
--------------------------------------------------------------------------------
 1 | # REPL
 2 | 
 3 | The REPL lets you interact with Gren values and functions in your terminal.
 4 | 
 5 | ## Use
 6 | 
 7 | You can type in expressions, definitions, custom types, and module imports using normal Gren syntax.
 8 | 
 9 | ```gren
10 | > 1 + 1
11 | 2 : number
12 | 
13 | > "hello" ++ "world"
14 | "helloworld" : String
15 | ```
16 | 
17 | The same can be done with definitions and custom types:
18 | 
19 | ```gren
20 | > fortyTwo = 42
21 | 42 : number
22 | 
23 | > increment n = n + 1
24 |  : number -> number
25 | 
26 | > increment 41
27 | 42 : number
28 | 
29 | > factorial n =
30 | |   if n < 1 then
31 | |     1
32 | |   else
33 | |     n * factorial (n-1)
34 | |
35 |  : number -> number
36 | 
37 | > factorial 5
38 | 120 : number
39 | 
40 | > type User
41 | |   = Regular String
42 | |   | Visitor String
43 | |
44 | 
45 | > when Regular "Tom" is
46 | |   Regular name -> "Hey again!"
47 | |   Visitor name -> "Nice to meet you!"
48 | |
49 | "Hey again!" : String
50 | ```
51 | 
52 | When you run `gren repl` in a project with an [`gren.json`](https://github.com/gren/compiler/blob/master/docs/gren.json/application.md) file, you can import any module available in the project. So if your project has an `gren/html` dependency, you could say:
53 | 
54 | ```gren
55 | > import Html exposing (Html)
56 | 
57 | > Html.text "hello"
58 |  : Html msg
59 | 
60 | > Html.text
61 |  : String -> Html msg
62 | ```
63 | 
64 | If you create a module in your project named `MyThing` in your project, you can say `import MyThing` in the REPL as well. Any module that is accessible in your project should be accessible in the REPL.
65 | 
66 | ## Exit
67 | 
68 | To exit the REPL, you can type `:exit`.
69 | 
70 | You can also press `ctrl-d` or `ctrl-c` on some platforms.
71 | 


--------------------------------------------------------------------------------
/hints/shadowing.md:
--------------------------------------------------------------------------------
 1 | # Variable Shadowing
 2 | 
 3 | Variable shadowing is when you define the same variable name twice in an ambiguous way. Here is a pretty reasonable use of shadowing:
 4 | 
 5 | ```gren
 6 | viewName : Maybe String -> Html msg
 7 | viewName name =
 8 |   when name is
 9 |     Nothing ->
10 |       ...
11 | 
12 |     Just name ->
13 |       ...
14 | ```
15 | 
16 | I define a `name` with type `Maybe String` and then in that second branch, I define a `name` that is a `String`. Now that there are two `name` values, it is not 100% obvious which one you want in that second branch.
17 | 
18 | Most linters produce warnings on variable shadowing, so Gren makes “best practices” the default. Just rename the first one to `maybeName` and move on.
19 | 
20 | This choice is relatively uncommon in programming languages though, so I want to provide the reasoning behind it.
21 | 
22 | ## The Cost of Shadowing
23 | 
24 | The code snippet from above is the best case scenario for variable shadowing. It is pretty clear really. But that is because it is a fake example. It does not even compile.
25 | 
26 | In a large module that is evolving over time, this is going to cause bugs in a very predictable way. You will have two definitions, separated by hundreds of lines. For example:
27 | 
28 | ```gren
29 | name : String
30 | name =
31 |   "Tom"
32 | 
33 | -- hundreds of lines
34 | 
35 | viewName : String -> Html msg
36 | viewName name =
37 |   ... name ... name ... name ...
38 | ```
39 | 
40 | Okay, so the `viewName` function has an argument `name` and it uses it three times. Maybe the `viewName` function is 50 lines long in total, so those uses are not totally easy to see. This is fine so far, but say your colleague comes along five months later and wants to support first and last names. They refactor the code like this:
41 | 
42 | ```gren
43 | viewName : String -> String -> Html msg
44 | viewName firstName lastName =
45 |   ... name ... name ... name ...
46 | ```
47 | 
48 | The code compiles, but it does not work as intended. They forgot to change all the uses of `name`, and because it shadows the top-level `name` value, it always shows up as `"Tom"`. It is a simple mistake, but it is always the last thing I think of.
49 | 
50 | > Is the data being fetched properly? Let me log all of the JSON requests. Maybe the JSON decoders are messed up? Hmm. Maybe someone is transforming the name in a bad way at some point? Let me check my `update` code.
51 | 
52 | Basically, a bunch of time gets wasted on something that could easily be detected by the compiler. But this bug is rare, right?
53 | 
54 | ## Aggregate Cost
55 | 
56 | Thinking of a unique and helpful name takes some extra time. Maybe 30 seconds. But it means that:
57 | 
58 | 1. Your code is easier to read and understand later on. So you spend 30 seconds once `O(1)` rather than spending 10 seconds each time someone reads that code in the future `O(n)`.
59 | 
60 | 2. The tricky shadowing bug described above is impossible. Say there is a 5% chance that any given edit produces a shadowing bug, and that resolving that shadowing bug takes one hour. That means the expected time for each edit increases by three minutes.
61 | 
62 | If you are still skeptical, I encourage you can play around with the number of edits, time costs, and probabilities here. When shadowing is not allowed, the resulting overhead for the entire lifetime of the code is the 30 seconds it takes to pick a better name, so that is what you need to beat!
63 | 
64 | ## Summary
65 | 
66 | Without shadowing, the code easier to read and folks spend less time on pointless debugging. The net outcome is that folks have more time to make something wonderful with Gren!
67 | 


--------------------------------------------------------------------------------
/hints/type-annotations.md:
--------------------------------------------------------------------------------
 1 | # Hints for Type Annotation Problems
 2 | 
 3 | At the root of this kind of issue is always the fact that a type annotation in your code does not match the corresponding definition. Now that may mean that the type annotation is "wrong" or it may mean that the definition is "wrong". The compiler cannot figure out your intent, only that there is some mismatch.
 4 | 
 5 | This document is going to outline the various things that can go wrong and show some examples.
 6 | 
 7 | ## Annotation vs. Definition
 8 | 
 9 | The most common issue is with user-defined type variables that are too general. So let's say you have defined a function like this:
10 | 
11 | ```gren
12 | addPair : a -> a -> a
13 | addPair x y =
14 |   x + y
15 | ```
16 | 
17 | The issue is that the type annotation is saying "I will accept two parameters that can be literally _anything_" but the definition is using `(+)` which requires things to be numbers. So the compiler is going to infer that the true type of the definition is this:
18 | 
19 | ```gren
20 | addPair : number -> number -> -> number
21 | ```
22 | 
23 | So you will probably see an error saying "I cannot match `a` with `number`" which is essentially saying, you are trying to provide a type annotation that is **too general**. You are saying `addPair` accepts anything, but in fact, it can only handle numbers.
24 | 
25 | In cases like this, you want to go with whatever the compiler inferred. It is good at figuring this kind of stuff out ;)
26 | 
27 | ## Annotation vs. Itself
28 | 
29 | It is also possible to have a type annotation that clashes with itself. This is probably more rare, but someone will run into it eventually. Let's use another version of `addPair` with problems:
30 | 
31 | ```gren
32 | addPair : Int -> Int -> number
33 | addPair x y =
34 |   x + y
35 | ```
36 | 
37 | In this case the annotation says we should get a `number` out, but because we were specific about the inputs being `Int`, the output should also be an `Int`.
38 | 
39 | ## Annotation vs. Internal Annotation
40 | 
41 | A quite tricky case is when an outer type annotation clashes with an inner type annotation. Here is an example of this:
42 | 
43 | ```gren
44 | filter : (a -> Bool) -> List a -> List a
45 | filter isOkay list =
46 |   let
47 |     keepIfOkay : a -> Maybe a
48 |     keepIfOkay x =
49 |       if isOkay x then Just x else Nothing
50 |   in
51 |     List.filterMap keepIfOkay list
52 | ```
53 | 
54 | This case is very unfortunate because all the type annotations are correct, but there is a detail of how type inference works right now that **user-defined type variables are not shared between annotations**. This can lead to probably the worst type error messages we have because the problem here is that `a` in the outer annotation does not equal `a` in the inner annotation.
55 | 
56 | For now the best route is to leave off the inner annotation. It is unfortunate, and hopefully we will be able to do a nicer thing in future releases.
57 | 


--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
  1 | // This file gives you programmatic access to the Gren compiler from JavaScript.
  2 | 
  3 | const fs = require("fs/promises");
  4 | const childProcess = require("child_process");
  5 | const util = require("util");
  6 | 
  7 | const execFile = util.promisify(childProcess.execFile);
  8 | 
  9 | const compilerPath = require.resolve("./cli.js");
 10 | 
 11 | /* Execute an arbitrary command on the Gren compiler.
 12 |  *
 13 |  * `path` should be set to the project directory where you wish to execute this command.
 14 |  * `args` is an array of arguments passed to the gren compiler.
 15 |  * `options` allow you to set environment variables and a timeout (milliseconds).
 16 |  */
 17 | async function execute(path, args, options) {
 18 |   return await execFile(process.argv[0], [compilerPath].concat(args), {
 19 |     cwd: path,
 20 |     env: options.env || {},
 21 |     timeout: options.timeout || 30_000,
 22 |     shell: true,
 23 |   });
 24 | }
 25 | 
 26 | /* Get the version of the Gren compiler
 27 |  */
 28 | async function version() {
 29 |   const stdout = await handleFailableExecution(
 30 |     process.cwd(),
 31 |     ["--version"],
 32 |     {},
 33 |   );
 34 |   return stdout.trim();
 35 | }
 36 | 
 37 | /* Install the dependencies of a Gren project.
 38 |  *
 39 |  * This executes `gren package install`
 40 |  *
 41 |  * `path` should be set to the project directory where you wish to execute this command.
 42 |  * `options` allow you to set environment variables and a timeout (milliseconds).
 43 |  */
 44 | async function installDependencies(path, options) {
 45 |   await execute(path, ["package", "install"], options || {});
 46 |   return true;
 47 | }
 48 | 
 49 | /* Compile a Gren project.
 50 |  *
 51 |  * This executes `gren make` and returns the compiled output, or throws an exception.
 52 |  * If you're compiling an application, pass the relative path of the entrypoint as the `target` in the options object.
 53 |  *
 54 |  * `path` should be set to the project directory where you wish to execute this command.
 55 |  * `options` allow you to set environment variables and a timeout. See `execute` for more information.
 56 |  *
 57 |  * If `options` contains a sourcemaps property that is true, sourcemaps will be generated and inlined into the output.
 58 |  */
 59 | async function compileProject(path, options) {
 60 |   let args = ["make", "--output=/dev/stdout", "--report=json"];
 61 | 
 62 |   if (options.sourcemaps) {
 63 |     args.push("--sourcemaps");
 64 |   }
 65 | 
 66 |   if (options.target) {
 67 |     args.push(options.target);
 68 |   }
 69 | 
 70 |   return handleFailableExecution(path, args, options);
 71 | }
 72 | 
 73 | async function handleFailableExecution(path, args, options) {
 74 |   try {
 75 |     const res = await execute(path, args, options);
 76 |     return res.stdout;
 77 |   } catch (e) {
 78 |     let errorData;
 79 |     try {
 80 |       errorData = JSON.parse(e.stderr);
 81 |     } catch (parseErr) {
 82 |       // Didn't get error from compiler
 83 |       throw e;
 84 |     }
 85 | 
 86 |     const compileError = new Error(`Failed to compile project: ${path}`);
 87 |     for (let key in errorData) {
 88 |       compileError[key] = errorData[key];
 89 |     }
 90 | 
 91 |     throw compileError;
 92 |   }
 93 | }
 94 | 
 95 | /* Compile the documentation of a Gren project.
 96 |  *
 97 |  * This executes `gren docs` and returns the documentation object, or throws an exception.
 98 |  *
 99 |  * `path` should be set to the project directory where you wish to execute this command.
100 |  * `options` allow you to set environment variables and a timeout (milliseconds).
101 |  */
102 | async function compileDocs(path, options) {
103 |   const args = ["docs", "--output=/dev/stdout", "--report=json"];
104 |   const docs = await handleFailableExecution(path, args, options || {});
105 | 
106 |   return JSON.parse(docs);
107 | }
108 | 
109 | /* Checks that a Gren project compiles, and (for packages) that the documentation builds.
110 |  *
111 |  * This executes `gren make` or `gren docs` (for packages), and returns true if the project compiles successfully.
112 |  * If you're compiling an application, pass the relative path of the entrypoint as the `target` in the options object.
113 |  *
114 |  * `path` should be set to the project directory where you wish to execute this command.
115 |  * `options` allow you to set environment variables and a timeout (milliseconds).
116 |  */
117 | async function validateProject(path, opts) {
118 |   let options = opts || {};
119 |   let args;
120 | 
121 |   if (options.target) {
122 |     args = ["make", "--output=/dev/null", "--report=json", options.target];
123 |   } else {
124 |     args = ["docs", "--output=/dev/null", "--report=json"];
125 |   }
126 | 
127 |   await handleFailableExecution(path, args, options);
128 | 
129 |   return true;
130 | }
131 | 
132 | module.exports = {
133 |   execute,
134 |   version,
135 |   installDependencies,
136 |   compileProject,
137 |   compileDocs,
138 |   validateProject,
139 | };
140 | 


--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "name": "gren-lang",
 3 |   "version": "0.5.4",
 4 |   "description": "Compiler for the Gren programming language",
 5 |   "scripts": {
 6 |     "test": "echo \"Error: no test specified\" && exit 1",
 7 |     "prepublishOnly": "npx --package=gren-lang@0.4.5 --yes -- gren make src/Main.gren --output=/dev/stdout | terser -c -m -o compiler.js",
 8 |     "build": "npx --package=gren-lang@0.4.5 --yes -- gren make src/Main.gren --output=compiler.js",
 9 |     "prettier": "prettier -w \"!**/*.json\" .",
10 |     "prettier:check": "prettier -c \"!**/*.json\" ."
11 |   },
12 |   "repository": {
13 |     "type": "git",
14 |     "url": "git+https://github.com/gren-lang/compiler.git"
15 |   },
16 |   "files": [
17 |     "index.js",
18 |     "compiler.js",
19 |     "cli.js"
20 |   ],
21 |   "bin": {
22 |     "gren": "cli.js"
23 |   },
24 |   "keywords": [
25 |     "gren",
26 |     "lang",
27 |     "language"
28 |   ],
29 |   "author": "Robin Heggelund Hansen",
30 |   "license": "BSD-3-Clause",
31 |   "bugs": {
32 |     "url": "https://github.com/gren-lang/compiler/issues"
33 |   },
34 |   "homepage": "https://gren-lang.org",
35 |   "dependencies": {
36 |     "postject": "^1.0.0-alpha.6"
37 |   },
38 |   "devDependencies": {
39 |     "prettier": "^3.4.2",
40 |     "terser": "^5.38.1"
41 |   }
42 | }
43 | 


--------------------------------------------------------------------------------
/terminal/Docs.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | 
 3 | module Docs
 4 |   ( Flags (..),
 5 |     Output (..),
 6 |     run,
 7 |   )
 8 | where
 9 | 
10 | import BackgroundWriter qualified as BW
11 | import Build qualified
12 | import Data.ByteString.Builder qualified as B
13 | import Data.NonEmptyList qualified as NE
14 | import Directories qualified as Dirs
15 | import Gren.Details qualified as Details
16 | import Gren.Docs qualified as Docs
17 | import Gren.ModuleName qualified as ModuleName
18 | import Json.Encode qualified as Json
19 | import Reporting qualified
20 | import Reporting.Exit qualified as Exit
21 | import Reporting.Task qualified as Task
22 | import System.IO qualified as IO
23 | 
24 | -- FLAGS
25 | 
26 | data Flags = Flags
27 |   { _output :: Maybe Output,
28 |     _report :: Bool
29 |   }
30 | 
31 | data Output
32 |   = JSON FilePath
33 |   | DevNull
34 |   | DevStdOut
35 |   deriving (Show)
36 | 
37 | -- RUN
38 | 
39 | type Task a = Task.Task Exit.Docs a
40 | 
41 | run :: Flags -> IO ()
42 | run flags@(Flags _ report) =
43 |   do
44 |     style <- getStyle report
45 |     maybeRoot <- Dirs.findRoot
46 |     Reporting.attemptWithStyle style Exit.docsToReport $
47 |       case maybeRoot of
48 |         Just root -> runHelp root style flags
49 |         Nothing -> return $ Left Exit.DocsNoOutline
50 | 
51 | runHelp :: FilePath -> Reporting.Style -> Flags -> IO (Either Exit.Docs ())
52 | runHelp root style (Flags maybeOutput _) =
53 |   BW.withScope $ \scope ->
54 |     Dirs.withRootLock root $
55 |       Task.run $
56 |         do
57 |           details <- Task.eio Exit.DocsBadDetails (Details.load style scope root)
58 |           exposed <- getExposed details
59 |           case maybeOutput of
60 |             Just DevNull ->
61 |               do
62 |                 buildExposed style root details Build.IgnoreDocs exposed
63 |                 return ()
64 |             Just DevStdOut ->
65 |               do
66 |                 docs <- buildExposed Reporting.silent root details Build.KeepDocs exposed
67 |                 let builder = Json.encodeUgly $ Docs.encode docs
68 |                 Task.io $ B.hPutBuilder IO.stdout builder
69 |             Nothing ->
70 |               buildExposed style root details (Build.WriteDocs "docs.json") exposed
71 |             Just (JSON target) ->
72 |               buildExposed style root details (Build.WriteDocs target) exposed
73 | 
74 | -- GET INFORMATION
75 | 
76 | getStyle :: Bool -> IO Reporting.Style
77 | getStyle report =
78 |   if report then return Reporting.json else Reporting.terminal
79 | 
80 | getExposed :: Details.Details -> Task (NE.List ModuleName.Raw)
81 | getExposed (Details.Details _ validOutline _ _ _ _) =
82 |   case validOutline of
83 |     Details.ValidApp _ _ ->
84 |       Task.throw Exit.DocsApplication
85 |     Details.ValidPkg _ _ exposed ->
86 |       case exposed of
87 |         [] -> Task.throw Exit.DocsNoExposed
88 |         m : ms -> return (NE.List m ms)
89 | 
90 | -- BUILD PROJECTS
91 | 
92 | buildExposed :: Reporting.Style -> FilePath -> Details.Details -> Build.DocsGoal a -> NE.List ModuleName.Raw -> Task a
93 | buildExposed style root details docsGoal exposed =
94 |   Task.eio Exit.DocsBadBuild $
95 |     Build.fromExposed style root details docsGoal exposed
96 | 


--------------------------------------------------------------------------------
/terminal/Init.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE OverloadedStrings #-}
  2 | 
  3 | module Init
  4 |   ( Flags (..),
  5 |     run,
  6 |   )
  7 | where
  8 | 
  9 | import Data.Map qualified as Map
 10 | import Data.NonEmptyList qualified as NE
 11 | import Deps.Package qualified as DPkg
 12 | import Deps.Solver qualified as Solver
 13 | import Directories qualified as Dirs
 14 | import Gren.Constraint qualified as Con
 15 | import Gren.Licenses qualified as Licenses
 16 | import Gren.Outline qualified as Outline
 17 | import Gren.Package qualified as Pkg
 18 | import Gren.Platform qualified as Platform
 19 | import Gren.PossibleFilePath (PossibleFilePath)
 20 | import Gren.PossibleFilePath qualified as PossibleFilePath
 21 | import Gren.Version qualified as V
 22 | import Reporting qualified
 23 | import Reporting.Doc qualified as D
 24 | import Reporting.Exit qualified as Exit
 25 | import System.Directory qualified as Dir
 26 | import Prelude hiding (init)
 27 | 
 28 | data Flags = Flags
 29 |   { _skipPrompts :: Bool,
 30 |     _isPackage :: Bool,
 31 |     _platform :: Platform.Platform
 32 |   }
 33 | 
 34 | -- RUN
 35 | 
 36 | run :: Flags -> IO ()
 37 | run flags =
 38 |   Reporting.attempt Exit.initToReport $
 39 |     do
 40 |       exists <- Dir.doesFileExist "gren.json"
 41 |       if exists
 42 |         then return (Left Exit.InitAlreadyExists)
 43 |         else do
 44 |           approved <- Reporting.ask (_skipPrompts flags) question
 45 |           if approved
 46 |             then init flags
 47 |             else do
 48 |               putStrLn "Okay, I did not make any changes!"
 49 |               return (Right ())
 50 | 
 51 | question :: D.Doc
 52 | question =
 53 |   D.stack
 54 |     [ D.fillSep
 55 |         [ "Hello!",
 56 |           "Gren",
 57 |           "projects",
 58 |           "always",
 59 |           "start",
 60 |           "with",
 61 |           "an",
 62 |           D.green "gren.json",
 63 |           "file.",
 64 |           "I",
 65 |           "can",
 66 |           "create",
 67 |           "them!"
 68 |         ],
 69 |       "Would you like me to create an gren.json file now? [Y/n]: "
 70 |     ]
 71 | 
 72 | -- INIT
 73 | 
 74 | init :: Flags -> IO (Either Exit.Init ())
 75 | init flags =
 76 |   do
 77 |     let platform = _platform flags
 78 |     let initialDeps = suggestDependencies platform
 79 |     (Solver.Env cache) <- Solver.initEnv
 80 |     potentialDeps <-
 81 |       Dirs.withRegistryLock cache $
 82 |         DPkg.latestCompatibleVersionForPackages cache initialDeps
 83 |     case potentialDeps of
 84 |       Left DPkg.NoCompatiblePackage ->
 85 |         return $ Left $ Exit.InitNoCompatibleDependencies Nothing
 86 |       Left (DPkg.GitError gitError) ->
 87 |         return $ Left $ Exit.InitNoCompatibleDependencies $ Just gitError
 88 |       Right resolvedDeps -> do
 89 |         let deps = Map.map PossibleFilePath.Other resolvedDeps
 90 |         result <- Solver.verify Reporting.ignorer cache platform deps
 91 |         case result of
 92 |           Solver.Err exit ->
 93 |             return (Left (Exit.InitSolverProblem exit))
 94 |           Solver.NoSolution ->
 95 |             return (Left (Exit.InitNoSolution initialDeps))
 96 |           Solver.Ok details ->
 97 |             let outline =
 98 |                   if _isPackage flags
 99 |                     then pkgOutline platform deps
100 |                     else appOutlineFromSolverDetails platform initialDeps details
101 |              in do
102 |                   Dir.createDirectoryIfMissing True "src"
103 |                   Outline.write "." outline
104 |                   putStrLn "Okay, I created it."
105 |                   return (Right ())
106 | 
107 | pkgOutline :: Platform.Platform -> Map.Map Pkg.Name (PossibleFilePath Con.Constraint) -> Outline.Outline
108 | pkgOutline platform deps =
109 |   Outline.Pkg $
110 |     Outline.PkgOutline
111 |       Pkg.dummyName
112 |       Outline.defaultSummary
113 |       Licenses.bsd3
114 |       V.one
115 |       (Outline.ExposedList [])
116 |       deps
117 |       Con.defaultGren
118 |       platform
119 | 
120 | appOutlineFromSolverDetails ::
121 |   Platform.Platform ->
122 |   [Pkg.Name] ->
123 |   Map.Map Pkg.Name Solver.Details ->
124 |   Outline.Outline
125 | appOutlineFromSolverDetails platform initialDeps details =
126 |   let solution = Map.map (\(Solver.Details vsn _ _) -> vsn) details
127 |       defaultDeps = Map.fromList $ map (\dep -> (dep, Con.exactly V.one)) initialDeps
128 |       directs = Map.intersection solution defaultDeps
129 |       indirects = Map.difference solution defaultDeps
130 |    in Outline.App $
131 |         Outline.AppOutline
132 |           V.compiler
133 |           platform
134 |           (NE.List (Outline.RelativeSrcDir "src") [])
135 |           (Map.map PossibleFilePath.Other directs)
136 |           (Map.map PossibleFilePath.Other indirects)
137 | 
138 | suggestDependencies :: Platform.Platform -> [Pkg.Name]
139 | suggestDependencies platform =
140 |   case platform of
141 |     Platform.Common -> [Pkg.core]
142 |     Platform.Browser -> [Pkg.core, Pkg.browser]
143 |     Platform.Node -> [Pkg.core, Pkg.node]
144 | 


--------------------------------------------------------------------------------
/tests/Generate/VLQSpec.hs:
--------------------------------------------------------------------------------
 1 | module Generate.VLQSpec (spec) where
 2 | 
 3 | import Generate.VLQ (encode)
 4 | import Test.Hspec (Spec, describe, it, shouldBe)
 5 | 
 6 | spec :: Spec
 7 | spec = do
 8 |   describe "VLQ tests" $ do
 9 |     it "Encodes from Int to String" $ do
10 |       encode 0 `shouldBe` "A"
11 |       encode 1 `shouldBe` "C"
12 |       encode (-1) `shouldBe` "D"
13 |       encode 3 `shouldBe` "G"
14 |       encode 123 `shouldBe` "2H"
15 |       encode 123456789 `shouldBe` "qxmvrH"
16 |       -- limits:
17 |       encode (-2147483648) `shouldBe` "B"
18 |       encode 2147483647 `shouldBe` "+/////D"
19 | 


--------------------------------------------------------------------------------
/tests/Helpers/Instances.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE StandaloneDeriving #-}
 2 | {-# OPTIONS_GHC -Wno-orphans #-}
 3 | 
 4 | module Helpers.Instances where
 5 | 
 6 | import Data.String (IsString (..))
 7 | import Data.Utf8 qualified as Utf8
 8 | import Reporting.Error.Syntax qualified as E
 9 | 
10 | deriving instance Eq E.Space
11 | 
12 | instance IsString (Utf8.Utf8 a) where
13 |   fromString = Utf8.fromChars
14 | 


--------------------------------------------------------------------------------
/tests/Helpers/Parse.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | 
 3 | module Helpers.Parse
 4 |   ( checkParse,
 5 |     checkSuccessfulParse,
 6 |     checkParseError,
 7 |   )
 8 | where
 9 | 
10 | import Data.ByteString qualified as BS
11 | import Parse.Primitives qualified as P
12 | import Parse.Space qualified as Space
13 | import Reporting.Annotation qualified as A
14 | import Test.Hspec qualified as Hspec
15 | 
16 | checkParse :: (Show error, Show target) => Space.Parser error target -> (P.Row -> P.Col -> error) -> (Either error (target, A.Position) -> Bool) -> BS.ByteString -> IO ()
17 | checkParse parser toBadEnd checkResult str =
18 |   Hspec.shouldSatisfy
19 |     (P.fromByteString parser toBadEnd str)
20 |     checkResult
21 | 
22 | checkSuccessfulParse :: (Show error, Show target) => Space.Parser error (A.Located target) -> (P.Row -> P.Col -> error) -> (target -> Bool) -> BS.ByteString -> IO ()
23 | checkSuccessfulParse parser toBadEnd checkTarget =
24 |   let checkResult result =
25 |         case result of
26 |           Right (A.At _ target, _) ->
27 |             checkTarget target
28 |           Left _ ->
29 |             False
30 |    in checkParse parser toBadEnd checkResult
31 | 
32 | checkParseError :: (Show error, Show target) => Space.Parser error target -> (P.Row -> P.Col -> error) -> (error -> Bool) -> BS.ByteString -> IO ()
33 | checkParseError parser toBadEnd checkError =
34 |   let checkResult result =
35 |         case result of
36 |           Left err ->
37 |             checkError err
38 |           Right _ ->
39 |             False
40 |    in checkParse parser toBadEnd checkResult
41 | 


--------------------------------------------------------------------------------
/tests/Parse/AliasSpec.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | 
 3 | module Parse.AliasSpec where
 4 | 
 5 | import AST.Source qualified as Src
 6 | import Data.ByteString qualified as BS
 7 | import Data.Name qualified as Name
 8 | import Helpers.Instances ()
 9 | import Parse.Module qualified as Module
10 | import Parse.Primitives qualified as P
11 | import Reporting.Error.Syntax qualified as Error.Syntax
12 | import Test.Hspec (Spec, describe, it, shouldSatisfy)
13 | 
14 | spec :: Spec
15 | spec = do
16 |   describe "Import alias" $ do
17 |     it "regression test" $
18 |       parse
19 |         "Module"
20 |         "import Some.Long.Module as Module\n"
21 | 
22 |     it "Aliases can have dots in them" $ do
23 |       parse
24 |         "My.Module"
25 |         "import Some.Long.Module as My.Module\n"
26 | 
27 | parse :: String -> BS.ByteString -> IO ()
28 | parse expectedAlias str =
29 |   let checkResult result =
30 |         case result of
31 |           Right (imp, _) ->
32 |             case Src._alias imp of
33 |               Just (alias, _) ->
34 |                 Name.toChars alias == expectedAlias
35 |               Nothing ->
36 |                 False
37 |           Left _ ->
38 |             False
39 |    in shouldSatisfy
40 |         (P.fromByteString Module.chompImport Error.Syntax.ModuleBadEnd str)
41 |         checkResult
42 | 


--------------------------------------------------------------------------------
/tests/Parse/DeclSpec.hs:
--------------------------------------------------------------------------------
 1 | module Parse.DeclSpec where
 2 | 
 3 | import Data.ByteString.UTF8 qualified as Utf8
 4 | import Helpers.Instances ()
 5 | import Parse.Declaration (declaration)
 6 | import Parse.Primitives qualified as P
 7 | import Test.Hspec (Spec, describe, it, shouldSatisfy)
 8 | 
 9 | data ParseError
10 |   = DeclError P.Row P.Col
11 |   | OtherError String P.Row P.Col
12 |   deriving (Show, Eq)
13 | 
14 | spec :: Spec
15 | spec = do
16 |   describe "Top Level Valeus" $ do
17 |     it "regression test" $
18 |       parse "test = 1"
19 | 
20 |     it "Value names can contain non-ascii characters" $ do
21 |       parse "vålue = 1"
22 | 
23 |     it "Value names can be only non-ascii characters" $ do
24 |       parse "æøå = 1"
25 | 
26 | parse :: String -> IO ()
27 | parse str =
28 |   P.fromByteString
29 |     (P.specialize (\_ row col -> DeclError row col) declaration)
30 |     (OtherError "fromByteString failed")
31 |     (Utf8.fromString str)
32 |     `shouldSatisfy` valid
33 | 
34 | valid :: Either x y -> Bool
35 | valid result =
36 |   case result of
37 |     Right _ -> True
38 |     Left _ -> False
39 | 


--------------------------------------------------------------------------------
/tests/Parse/MultilineStringSpec.hs:
--------------------------------------------------------------------------------
  1 | {-# LANGUAGE OverloadedStrings #-}
  2 | 
  3 | module Parse.MultilineStringSpec where
  4 | 
  5 | import AST.Source qualified as Src
  6 | import Data.ByteString qualified as BS
  7 | import Data.Utf8 qualified as Utf8
  8 | import Helpers.Instances ()
  9 | import Helpers.Parse qualified as Helpers
 10 | import Parse.Expression qualified as Expression
 11 | import Parse.Pattern qualified as Pattern
 12 | import Reporting.Error.Syntax (Expr (ExpressionBadEnd))
 13 | import Reporting.Error.Syntax qualified as Error.Syntax
 14 | import Test.Hspec (Spec, describe, it)
 15 | 
 16 | spec :: Spec
 17 | spec = do
 18 |   describe "Multiline String" $ do
 19 |     it "regression test" $
 20 |       parse
 21 |         "normal string"
 22 |         "\"\"\"\nnormal string\"\"\""
 23 | 
 24 |     it "crlf regression test" $ do
 25 |       parse
 26 |         "normal string"
 27 |         "\"\"\"\r\nnormal string\"\"\""
 28 | 
 29 |     it "no ending newline works" $ do
 30 |       parse
 31 |         "this is \\na test \\nfor newlines"
 32 |         "\"\"\"\nthis is \na test \nfor newlines\"\"\""
 33 | 
 34 |     it "crlfs work" $ do
 35 |       parse
 36 |         "this is\\na test"
 37 |         "\"\"\"\r\n   this is\r\n   a test\r\n\"\"\""
 38 | 
 39 |     it "mixing quotes work" $ do
 40 |       parse
 41 |         "string with \" in it"
 42 |         "\"\"\"\nstring with \" in it\"\"\""
 43 | 
 44 |     it "single quotes don't eat spaces" $ do
 45 |       parse
 46 |         "quote followed by spaces: \\'    "
 47 |         "\"\"\"\n  quote followed by spaces: \'    \"\"\""
 48 | 
 49 |     it "escapes don't eat spaces" $ do
 50 |       parse
 51 |         "quote followed by spaces: \\'    "
 52 |         "\"\"\"\n  quote followed by spaces: \\'    \"\"\""
 53 | 
 54 |     it "unicode escapes don't eat spaces" $ do
 55 |       parse
 56 |         "quote followed by spaces: \\u0020    "
 57 |         "\"\"\"\n  quote followed by spaces: \\u{0020}    \"\"\""
 58 | 
 59 |     it "first newline, and leading whitespace, is dropped" $ do
 60 |       parse
 61 |         "this is\\na test"
 62 |         "\"\"\"\n   this is\n   a test\n\"\"\""
 63 | 
 64 |     it "First proper line decides how many spaces to drop" $ do
 65 |       parse
 66 |         "this is\\n a test"
 67 |         "\"\"\"\n   this is\n    a test\n\"\"\""
 68 | 
 69 |     it "First proper line decides how many spaces to drop for crlf" $ do
 70 |       parse
 71 |         "this is\\n a test"
 72 |         "\"\"\"\r\n   this is\r\n    a test\r\n\"\"\""
 73 | 
 74 |     it "Works with differing lines" $ do
 75 |       parse
 76 |         "this is\\n a test"
 77 |         "\"\"\"\n   this is\r\n    a test\n\"\"\""
 78 | 
 79 |     it "Only leading spaces are dropped" $ do
 80 |       parse
 81 |         "this is\\na test"
 82 |         "\"\"\"\n   this is\n a test\n\"\"\""
 83 | 
 84 |     it "does not allow non-newline characters on the first line" $ do
 85 |       let isCorrectError ((Error.Syntax.String Error.Syntax.StringMultilineWithoutLeadingNewline _ _)) = True
 86 |           isCorrectError _ = False
 87 |       Helpers.checkParseError Expression.expression ExpressionBadEnd isCorrectError "\"\"\"this is not allowed\"\"\""
 88 | 
 89 |     it "does not allow CR without LF on the first line" $ do
 90 |       let isCorrectError ((Error.Syntax.String Error.Syntax.StringInvalidNewline _ _)) = True
 91 |           isCorrectError _ = False
 92 |       Helpers.checkParseError Expression.expression ExpressionBadEnd isCorrectError "\"\"\"\rthis is not allowed\"\"\""
 93 | 
 94 |     it "does not allow CR without LF on the other lines" $ do
 95 |       let isCorrectError ((Error.Syntax.String Error.Syntax.StringInvalidNewline _ _)) = True
 96 |           isCorrectError _ = False
 97 |       Helpers.checkParseError Expression.expression ExpressionBadEnd isCorrectError "\"\"\"\nthis\ris not allowed\"\"\""
 98 | 
 99 | parse :: String -> BS.ByteString -> IO ()
100 | parse expectedStr =
101 |   let isExpectedString :: Src.Pattern_ -> Bool
102 |       isExpectedString pattern =
103 |         case pattern of
104 |           Src.PStr str ->
105 |             expectedStr == Utf8.toChars str
106 |           _ ->
107 |             False
108 |    in Helpers.checkSuccessfulParse (fmap (\((pat, _), loc) -> (pat, loc)) Pattern.expression) Error.Syntax.PStart isExpectedString
109 | 


--------------------------------------------------------------------------------
/tests/Parse/RecordUpdateSpec.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | 
 3 | module Parse.RecordUpdateSpec where
 4 | 
 5 | import AST.Source qualified as Src
 6 | import Data.ByteString qualified as BS
 7 | import Helpers.Instances ()
 8 | import Parse.Expression (expression)
 9 | import Parse.Primitives qualified as P
10 | import Reporting.Annotation qualified as A
11 | import Test.Hspec
12 | 
13 | data ParseError
14 |   = ExprError P.Row P.Col
15 |   | OtherError String P.Row P.Col
16 |   deriving (Show, Eq)
17 | 
18 | spec :: Spec
19 | spec = do
20 |   describe "record update" $ do
21 |     it "regression test" $
22 |       parseRecordLiteral "{ field = 2 }"
23 | 
24 |     it "regression test with multiple fields" $
25 |       parseRecordLiteral "{ f1 = 1, f2 = 2, f3 = 3 }"
26 | 
27 |     it "basic case" $
28 |       parse "{ record | prop = 1 }"
29 | 
30 |     it "qualified var" $
31 |       parse "{ Module.record | prop = 1 }"
32 | 
33 |     it "nested var" $
34 |       parse "{ Module.record.nested | prop = 1 }"
35 | 
36 |     it "update literal record" $
37 |       parse "{ { prop = 2 } | prop = 1 }"
38 | 
39 |     it "parenthesized if statement" $
40 |       parse "{ (if 1 == 2 then { prop = 2 } else { prop = 3 }) | prop = 1 }"
41 | 
42 |     it "parenthesized if statement with || operator" $
43 |       parse "{ (if left || right then { prop = 2 } else { prop = 3 }) | prop = 1 }"
44 | 
45 | --
46 | 
47 | parse :: BS.ByteString -> IO ()
48 | parse str =
49 |   ( P.fromByteString
50 |       (P.specialize (\_ row col -> ExprError row col) expression)
51 |       (OtherError "fromByteString failed")
52 |       str
53 |   )
54 |     `shouldSatisfy` isUpdateExpr
55 | 
56 | isUpdateExpr :: Either x ((Src.Expr, [Src.Comment]), A.Position) -> Bool
57 | isUpdateExpr result =
58 |   case result of
59 |     Right ((A.At _ (Src.Update _ _ _), _), _) -> True
60 |     _ -> False
61 | 
62 | --
63 | 
64 | parseRecordLiteral :: BS.ByteString -> IO ()
65 | parseRecordLiteral str =
66 |   ( P.fromByteString
67 |       (P.specialize (\_ row col -> ExprError row col) expression)
68 |       (OtherError "fromByteString failed")
69 |       str
70 |   )
71 |     `shouldSatisfy` isRecordLiteral
72 | 
73 | isRecordLiteral :: Either x ((Src.Expr, [Src.Comment]), A.Position) -> Bool
74 | isRecordLiteral result =
75 |   case result of
76 |     Right ((A.At _ (Src.Record _), _), _) -> True
77 |     _ -> False
78 | 


--------------------------------------------------------------------------------
/tests/Parse/SpaceSpec.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | 
 3 | module Parse.SpaceSpec where
 4 | 
 5 | import AST.Source (Comment, Comment_ (..))
 6 | import Data.ByteString qualified as BS
 7 | import Data.Word (Word16)
 8 | import Helpers.Instances ()
 9 | import Parse.Primitives qualified as P
10 | import Parse.Space qualified as Space
11 | import Reporting.Annotation qualified as A
12 | import Test.Hspec
13 | 
14 | data ParseError x
15 |   = SubjectError x P.Row P.Col
16 |   | OtherError String P.Row P.Col
17 |   deriving (Eq, Show)
18 | 
19 | spec :: Spec
20 | spec = do
21 |   describe "chomp" $ do
22 |     let parseChomp = parse (Space.chomp SubjectError)
23 |     let parseChomp3 p1 p2 = parse $ do
24 |           () <- p1
25 |           result <- Space.chomp SubjectError
26 |           () <- p2
27 |           return result
28 | 
29 |     it "parses spaces and newlines" $
30 |       parseChomp "  \n  " `shouldParseComments` Right []
31 | 
32 |     it "parses tokens before and after" $
33 |       parseChomp3 a b "a b" `shouldParseComments` Right []
34 | 
35 |     it "allows zero whitespace" $
36 |       parseChomp3 a b "ab" `shouldParseComments` Right []
37 | 
38 |     it "parses curly brace comments" $
39 |       parseChomp "{- 1 -}"
40 |         `shouldParseComments` Right [at 1 1 1 8 $ BlockComment " 1 "]
41 | 
42 |     it "can parse curly brace token adjacent to whitespace" $
43 |       parseChomp3 leftCurly leftCurly "{{- 1 -} {"
44 |         `shouldParseComments` Right [at 1 2 1 9 $ BlockComment " 1 "]
45 | 
46 |     it "can parse nested curly brace comments" $
47 |       parseChomp "{- {- inner -} outer -}"
48 |         `shouldParseComments` Right [at 1 1 1 24 $ BlockComment " {- inner -} outer "]
49 | 
50 |     it "parses hyphen comments" $
51 |       parseChomp "-- 1\n" `shouldParseComments` Right [at 1 1 1 5 $ LineComment " 1"]
52 | 
53 |     it "parses hyphen comments at end of file" $
54 |       parseChomp "-- 1" `shouldParseComments` Right [at 1 1 1 5 $ LineComment " 1"]
55 | 
56 |     it "can parse hyphen adjacent to whitespace" $
57 |       parseChomp3 hyphen hyphen "- -- 1\n-" `shouldParseComments` Right [at 1 3 1 7 $ LineComment " 1"]
58 | 
59 |     it "can parse nested hyphen comments" $
60 |       parseChomp "-- outer -- inner" `shouldParseComments` Right [at 1 1 1 18 $ LineComment " outer -- inner"]
61 | 
62 |     it "returns comments in the correct order" $
63 |       parseChomp "{- 1 -}{- 2 -}  -- 3\n{- 4 -}\n{- 5 -}"
64 |         `shouldParseComments` Right
65 |           [ at 1 1 1 8 $ BlockComment " 1 ",
66 |             at 1 8 1 15 $ BlockComment " 2 ",
67 |             at 1 17 1 21 $ LineComment " 3",
68 |             at 2 1 2 8 $ BlockComment " 4 ",
69 |             at 3 1 3 8 $ BlockComment " 5 "
70 |           ]
71 | 
72 | parse :: P.Parser (ParseError x) a -> BS.ByteString -> Either (ParseError x) a
73 | parse parser =
74 |   P.fromByteString parser (OtherError "fromByteString failed")
75 | 
76 | shouldParseComments :: (Eq x, Show x) => Either x [Comment] -> Either x [Comment] -> IO ()
77 | shouldParseComments actual expected =
78 |   fmap (fmap locatedToTuple) actual
79 |     `shouldBe` fmap (fmap locatedToTuple) expected
80 |   where
81 |     locatedToTuple (A.At (A.Region start end) comment) =
82 |       ((start, end), comment)
83 | 
84 | a :: P.Parser (ParseError x) ()
85 | a = P.word1 0x61 {- a -} (OtherError "Expected 'a'")
86 | 
87 | b :: P.Parser (ParseError x) ()
88 | b = P.word1 0x62 {- b -} (OtherError "Expected 'b'")
89 | 
90 | leftCurly :: P.Parser (ParseError x) ()
91 | leftCurly = P.word1 0x7B {- { -} (OtherError "Expected '{'")
92 | 
93 | hyphen :: P.Parser (ParseError x) ()
94 | hyphen = P.word1 0x2D {- - -} (OtherError "Expected '-'")
95 | 
96 | at :: Word16 -> Word16 -> Word16 -> Word16 -> a -> A.Located a
97 | at startRow startCol endRow endCol =
98 |   A.At (A.Region (A.Position startRow startCol) (A.Position endRow endCol))
99 | 


--------------------------------------------------------------------------------
/tests/Parse/UnderscorePatternSpec.hs:
--------------------------------------------------------------------------------
 1 | {-# LANGUAGE OverloadedStrings #-}
 2 | 
 3 | module Parse.UnderscorePatternSpec where
 4 | 
 5 | import AST.Source qualified as Src
 6 | import Data.ByteString qualified as BS
 7 | import Data.Name qualified as Name
 8 | import Helpers.Instances ()
 9 | import Helpers.Parse qualified as Helpers
10 | import Parse.Expression qualified as Expression
11 | import Parse.Pattern qualified as Pattern
12 | import Reporting.Error.Syntax qualified as Error.Syntax
13 | import Test.Hspec (Spec, describe, it)
14 | 
15 | spec :: Spec
16 | spec = do
17 |   describe "Wildcard patterns" $ do
18 |     it "regression test" $
19 |       parse "" "_"
20 |     it "Newly allowed named wildcard pattern" $
21 |       parse "argument" "_argument"
22 |     it "You can have underscores as part of the lower variable which follows the underscore" $
23 |       parse "hello_world" "_hello_world"
24 |     it "Keywords are not allowed as the whole variable part of an underscore pattern" $
25 |       failToParse "_let"
26 |     it "But you can have a keyword as **part** of a variable name just as for normal variable names." $
27 |       parse "let_down" "_let_down"
28 |     it "But you cannot start with multiple underscores" $
29 |       failToParse "__hello"
30 |     it "But it must be a lower name, for an underscore pattern" $
31 |       failToParse "_Hello"
32 |     it "We should give the specialised error when we attempt to parse _key as an expression" $
33 |       let isWildCardAttemptError :: Error.Syntax.Expr -> Bool
34 |           isWildCardAttemptError err =
35 |             case err of
36 |               Error.Syntax.WildCard (Error.Syntax.WildCardAttempt _) _ _ ->
37 |                 True
38 |               _ ->
39 |                 False
40 |        in Helpers.checkParseError Expression.expression Error.Syntax.Start isWildCardAttemptError "_key"
41 | 
42 | parse :: String -> BS.ByteString -> IO ()
43 | parse expectedName =
44 |   let isWildCardPattern :: Src.Pattern_ -> Bool
45 |       isWildCardPattern pattern =
46 |         case pattern of
47 |           Src.PAnything name ->
48 |             expectedName == (Name.toChars name)
49 |           _ ->
50 |             False
51 |    in Helpers.checkSuccessfulParse (fmap (\((pat, _), loc) -> (pat, loc)) Pattern.expression) Error.Syntax.PStart isWildCardPattern
52 | 
53 | failToParse :: BS.ByteString -> IO ()
54 | failToParse =
55 |   Helpers.checkParseError Pattern.expression Error.Syntax.PStart (\_ -> True)
56 | 


--------------------------------------------------------------------------------
/tests/Spec.hs:
--------------------------------------------------------------------------------
1 | {-# OPTIONS_GHC -F -pgmF hspec-discover #-}
2 | 


--------------------------------------------------------------------------------