├── .gitignore
├── .gitmodules
├── Dockerfile
├── NOTES.md
├── README.md
├── frontend
├── .gitignore
├── elm.json
└── src
│ ├── ElmHelpers.elm
│ ├── Generated
│ ├── Decoders.elm
│ └── Types.elm
│ ├── HsCore
│ ├── Helpers.elm
│ └── Trafo
│ │ ├── Diff.elm
│ │ ├── EraseTypes.elm
│ │ ├── Reconstruct.elm
│ │ └── VarOccs.elm
│ ├── HtmlHelpers.elm
│ ├── Loading.elm
│ ├── Main.elm
│ ├── Pages
│ ├── Code.elm
│ └── Overview.elm
│ ├── Ports.elm
│ ├── Ppr.elm
│ ├── PprGHC.elm
│ ├── PprRender.elm
│ ├── Reader.elm
│ ├── Types.elm
│ ├── UI
│ ├── FileDropper.elm
│ └── Slider.elm
│ ├── default-dark.min.css
│ ├── index_html_for_elm.html
│ ├── pygments.css
│ └── style.css
├── generate-elm
├── .gitignore
├── CHANGELOG.md
├── app
│ └── Main.hs
├── cabal.project
└── generate-elm.cabal
├── make_release.sh
├── plugin
├── cabal.project
├── hs-comprehension-plugin.cabal
├── lib
│ └── HsComprehension
│ │ ├── Ast.hs
│ │ ├── Cvt.hs
│ │ ├── DefAnalysis.hs
│ │ ├── ElmDeriving.hs
│ │ ├── ElmDerivingUtils.hs
│ │ ├── Hash.hs
│ │ ├── Plugin.hs
│ │ └── Uniqify.hs
└── src
│ └── Zip.hs
├── test-project
├── .gitignore
├── CHANGELOG.md
├── README.md
├── app
│ ├── Channable.hs
│ ├── Extra
│ │ └── Hidden.hs
│ ├── Factorial.hs
│ ├── Fusion.hs
│ ├── HalfMatch.hs
│ ├── InspectionTests.hs
│ ├── LargeRecords.hs
│ ├── Main.hs
│ ├── Palin.hs
│ ├── Peano.hs
│ ├── Quicksort.hs
│ ├── Records.hs
│ ├── Streaming.hs
│ ├── Tail.hs
│ ├── Text.hs
│ ├── TextExt.hs
│ ├── Tree.hs
│ ├── Typeclass.hs
│ └── Unlines.hs
├── cabal.project
├── dumps
│ └── coredump-Baseline
│ │ ├── Channable.hs
│ │ ├── Channable.zstd
│ │ ├── Factorial.hs
│ │ ├── Factorial.zstd
│ │ ├── Main.hs
│ │ ├── Main.zstd
│ │ └── capture.zstd
├── output.txt
├── stream-fusion
│ ├── Control
│ │ └── Monad
│ │ │ └── Stream.hs
│ ├── Data
│ │ ├── List
│ │ │ └── Stream.hs
│ │ └── Stream.hs
│ ├── GHC_BUILD
│ ├── LICENSE
│ ├── README
│ ├── Setup.lhs
│ ├── TODO
│ ├── desugar
│ │ ├── Desugar.hs
│ │ └── Examples.hs
│ ├── setup-base.sh
│ ├── stream-fusion.cabal
│ └── tests
│ │ ├── Bench
│ │ ├── Fusion.hs
│ │ ├── ListVsBase.hs
│ │ ├── StreamList.hs
│ │ ├── StreamVsList.hs
│ │ ├── Utils.hs
│ │ └── data
│ │ ├── Examples
│ │ ├── ConcatMap.hs
│ │ ├── Enum.hs
│ │ ├── Sum.hs
│ │ └── SumReplicate.hs
│ │ ├── FuseTest.hs
│ │ ├── Makefile
│ │ ├── Properties
│ │ ├── Internal.hs
│ │ ├── ListVsBase.hs
│ │ ├── ListVsSpec.hs
│ │ ├── Monomorphic
│ │ │ ├── Base.hs
│ │ │ ├── List.hs
│ │ │ ├── Spec.hs
│ │ │ ├── SpecStream.hs
│ │ │ ├── Stream.hs
│ │ │ └── StreamList.hs
│ │ ├── StreamListVsBase.hs
│ │ ├── StreamListVsSpec.hs
│ │ ├── StreamVsSpecStream.hs
│ │ └── Utils.hs
│ │ ├── Spec
│ │ ├── List.hs
│ │ ├── ListExts.hs
│ │ └── PreludeList.hs
│ │ ├── Strictness
│ │ ├── BaseVsSpec.hs
│ │ ├── ListVsBase.hs
│ │ ├── ListVsSpec.hs
│ │ ├── Monomorphic
│ │ │ ├── Base.hs
│ │ │ ├── List.hs
│ │ │ ├── Spec.hs
│ │ │ └── StreamList.hs
│ │ ├── StreamListVsList.hs
│ │ └── Utils.hs
│ │ └── Test
│ │ └── SmallCheck
│ │ └── Partial.hs
├── test-project.cabal
├── text-1.2.3.2
│ ├── .gitignore
│ ├── .hgignore
│ ├── .hgtags
│ ├── .travis.yml.disabled
│ ├── Data
│ │ ├── Text.hs
│ │ └── Text
│ │ │ ├── Array.hs
│ │ │ ├── Encoding.hs
│ │ │ ├── Encoding
│ │ │ └── Error.hs
│ │ │ ├── Foreign.hs
│ │ │ ├── IO.hs
│ │ │ ├── Internal.hs
│ │ │ ├── Internal
│ │ │ ├── Builder.hs
│ │ │ ├── Builder
│ │ │ │ ├── Functions.hs
│ │ │ │ ├── Int
│ │ │ │ │ └── Digits.hs
│ │ │ │ └── RealFloat
│ │ │ │ │ └── Functions.hs
│ │ │ ├── Encoding
│ │ │ │ ├── Fusion.hs
│ │ │ │ ├── Fusion
│ │ │ │ │ └── Common.hs
│ │ │ │ ├── Utf16.hs
│ │ │ │ ├── Utf32.hs
│ │ │ │ └── Utf8.hs
│ │ │ ├── Functions.hs
│ │ │ ├── Fusion.hs
│ │ │ ├── Fusion
│ │ │ │ ├── CaseMapping.hs
│ │ │ │ ├── Common.hs
│ │ │ │ ├── Size.hs
│ │ │ │ └── Types.hs
│ │ │ ├── IO.hs
│ │ │ ├── Lazy.hs
│ │ │ ├── Lazy
│ │ │ │ ├── Encoding
│ │ │ │ │ └── Fusion.hs
│ │ │ │ ├── Fusion.hs
│ │ │ │ └── Search.hs
│ │ │ ├── Private.hs
│ │ │ ├── Read.hs
│ │ │ ├── Search.hs
│ │ │ ├── Unsafe.hs
│ │ │ └── Unsafe
│ │ │ │ ├── Char.hs
│ │ │ │ └── Shift.hs
│ │ │ ├── Lazy.hs
│ │ │ ├── Lazy
│ │ │ ├── Builder.hs
│ │ │ ├── Builder
│ │ │ │ ├── Int.hs
│ │ │ │ └── RealFloat.hs
│ │ │ ├── Encoding.hs
│ │ │ ├── IO.hs
│ │ │ ├── Internal.hs
│ │ │ └── Read.hs
│ │ │ ├── Read.hs
│ │ │ ├── Show.hs
│ │ │ └── Unsafe.hs
│ ├── LICENSE
│ ├── README.markdown
│ ├── Setup.lhs
│ ├── benchmarks
│ │ ├── .gitignore
│ │ ├── Setup.hs
│ │ ├── cabal.project
│ │ ├── cbits
│ │ │ └── time_iconv.c
│ │ ├── haskell
│ │ │ ├── Benchmarks.hs
│ │ │ ├── Benchmarks
│ │ │ │ ├── Builder.hs
│ │ │ │ ├── Concat.hs
│ │ │ │ ├── DecodeUtf8.hs
│ │ │ │ ├── EncodeUtf8.hs
│ │ │ │ ├── Equality.hs
│ │ │ │ ├── FileRead.hs
│ │ │ │ ├── FoldLines.hs
│ │ │ │ ├── Mul.hs
│ │ │ │ ├── Programs
│ │ │ │ │ ├── BigTable.hs
│ │ │ │ │ ├── Cut.hs
│ │ │ │ │ ├── Fold.hs
│ │ │ │ │ ├── Sort.hs
│ │ │ │ │ ├── StripTags.hs
│ │ │ │ │ └── Throughput.hs
│ │ │ │ ├── Pure.hs
│ │ │ │ ├── ReadNumbers.hs
│ │ │ │ ├── Replace.hs
│ │ │ │ ├── Search.hs
│ │ │ │ ├── Stream.hs
│ │ │ │ └── WordFrequencies.hs
│ │ │ ├── Multilang.hs
│ │ │ └── Timer.hs
│ │ ├── python
│ │ │ ├── .gitignore
│ │ │ ├── cut.py
│ │ │ ├── multilang.py
│ │ │ ├── sort.py
│ │ │ ├── strip_tags.py
│ │ │ └── utils.py
│ │ ├── ruby
│ │ │ ├── cut.rb
│ │ │ ├── fold.rb
│ │ │ ├── sort.rb
│ │ │ ├── strip_tags.rb
│ │ │ └── utils.rb
│ │ └── text-benchmarks.cabal
│ ├── cabal.project
│ ├── cbits
│ │ └── cbits.c
│ ├── changelog.md
│ ├── include
│ │ └── text_cbits.h
│ ├── scripts
│ │ ├── ApiCompare.hs
│ │ ├── Arsec.hs
│ │ ├── CaseFolding.hs
│ │ ├── CaseMapping.hs
│ │ └── SpecialCasing.hs
│ ├── tests-and-benchmarks.markdown
│ ├── tests
│ │ ├── .ghci
│ │ ├── LiteralRuleTest.hs
│ │ ├── Makefile
│ │ ├── Tests.hs
│ │ ├── Tests
│ │ │ ├── IO.hs
│ │ │ ├── Properties.hs
│ │ │ ├── Properties
│ │ │ │ └── Mul.hs
│ │ │ ├── QuickCheckUtils.hs
│ │ │ ├── Regressions.hs
│ │ │ ├── SlowFunctions.hs
│ │ │ └── Utils.hs
│ │ ├── cabal.config
│ │ ├── literal-rule-test.sh
│ │ ├── scripts
│ │ │ └── cover-stdio.sh
│ │ └── text-tests.cabal
│ └── text.cabal
├── text-1.2.4.0
│ ├── .gitignore
│ ├── .hgignore
│ ├── .hgtags
│ ├── .travis.yml
│ ├── Data
│ │ ├── Text.hs
│ │ └── Text
│ │ │ ├── Array.hs
│ │ │ ├── Encoding.hs
│ │ │ ├── Encoding
│ │ │ └── Error.hs
│ │ │ ├── Foreign.hs
│ │ │ ├── IO.hs
│ │ │ ├── Internal.hs
│ │ │ ├── Internal
│ │ │ ├── Builder.hs
│ │ │ ├── Builder
│ │ │ │ ├── Functions.hs
│ │ │ │ ├── Int
│ │ │ │ │ └── Digits.hs
│ │ │ │ └── RealFloat
│ │ │ │ │ └── Functions.hs
│ │ │ ├── Encoding
│ │ │ │ ├── Fusion.hs
│ │ │ │ ├── Fusion
│ │ │ │ │ └── Common.hs
│ │ │ │ ├── Utf16.hs
│ │ │ │ ├── Utf32.hs
│ │ │ │ └── Utf8.hs
│ │ │ ├── Functions.hs
│ │ │ ├── Fusion.hs
│ │ │ ├── Fusion
│ │ │ │ ├── CaseMapping.hs
│ │ │ │ ├── Common.hs
│ │ │ │ ├── Size.hs
│ │ │ │ └── Types.hs
│ │ │ ├── IO.hs
│ │ │ ├── Lazy.hs
│ │ │ ├── Lazy
│ │ │ │ ├── Encoding
│ │ │ │ │ └── Fusion.hs
│ │ │ │ ├── Fusion.hs
│ │ │ │ └── Search.hs
│ │ │ ├── Private.hs
│ │ │ ├── Read.hs
│ │ │ ├── Search.hs
│ │ │ ├── Unsafe.hs
│ │ │ └── Unsafe
│ │ │ │ ├── Char.hs
│ │ │ │ └── Shift.hs
│ │ │ ├── Lazy.hs
│ │ │ ├── Lazy
│ │ │ ├── Builder.hs
│ │ │ ├── Builder
│ │ │ │ ├── Int.hs
│ │ │ │ └── RealFloat.hs
│ │ │ ├── Encoding.hs
│ │ │ ├── IO.hs
│ │ │ ├── Internal.hs
│ │ │ └── Read.hs
│ │ │ ├── Read.hs
│ │ │ ├── Show.hs
│ │ │ └── Unsafe.hs
│ ├── LICENSE
│ ├── README.markdown
│ ├── Setup.lhs
│ ├── benchmarks
│ │ ├── .gitignore
│ │ ├── Setup.hs
│ │ ├── cabal.project
│ │ ├── cbits
│ │ │ └── time_iconv.c
│ │ ├── haskell
│ │ │ ├── Benchmarks.hs
│ │ │ ├── Benchmarks
│ │ │ │ ├── Builder.hs
│ │ │ │ ├── Concat.hs
│ │ │ │ ├── DecodeUtf8.hs
│ │ │ │ ├── EncodeUtf8.hs
│ │ │ │ ├── Equality.hs
│ │ │ │ ├── FileRead.hs
│ │ │ │ ├── FoldLines.hs
│ │ │ │ ├── Mul.hs
│ │ │ │ ├── Programs
│ │ │ │ │ ├── BigTable.hs
│ │ │ │ │ ├── Cut.hs
│ │ │ │ │ ├── Fold.hs
│ │ │ │ │ ├── Sort.hs
│ │ │ │ │ ├── StripTags.hs
│ │ │ │ │ └── Throughput.hs
│ │ │ │ ├── Pure.hs
│ │ │ │ ├── ReadNumbers.hs
│ │ │ │ ├── Replace.hs
│ │ │ │ ├── Search.hs
│ │ │ │ ├── Stream.hs
│ │ │ │ └── WordFrequencies.hs
│ │ │ ├── Multilang.hs
│ │ │ └── Timer.hs
│ │ ├── python
│ │ │ ├── .gitignore
│ │ │ ├── cut.py
│ │ │ ├── multilang.py
│ │ │ ├── sort.py
│ │ │ ├── strip_tags.py
│ │ │ └── utils.py
│ │ ├── ruby
│ │ │ ├── cut.rb
│ │ │ ├── fold.rb
│ │ │ ├── sort.rb
│ │ │ ├── strip_tags.rb
│ │ │ └── utils.rb
│ │ └── text-benchmarks.cabal
│ ├── cabal.project
│ ├── cbits
│ │ └── cbits.c
│ ├── changelog.md
│ ├── include
│ │ └── text_cbits.h
│ ├── scripts
│ │ ├── ApiCompare.hs
│ │ ├── Arsec.hs
│ │ ├── CaseFolding.hs
│ │ ├── CaseMapping.hs
│ │ └── SpecialCasing.hs
│ ├── tests-and-benchmarks.markdown
│ ├── tests
│ │ ├── .ghci
│ │ ├── LiteralRuleTest.hs
│ │ ├── Makefile
│ │ ├── Tests.hs
│ │ ├── Tests
│ │ │ ├── IO.hs
│ │ │ ├── Properties.hs
│ │ │ ├── Properties
│ │ │ │ └── Mul.hs
│ │ │ ├── QuickCheckUtils.hs
│ │ │ ├── Regressions.hs
│ │ │ ├── SlowFunctions.hs
│ │ │ └── Utils.hs
│ │ ├── cabal.config
│ │ ├── literal-rule-test.sh
│ │ ├── scripts
│ │ │ └── cover-stdio.sh
│ │ └── text-tests.cabal
│ ├── text.cabal
│ └── th-tests
│ │ ├── LICENSE
│ │ ├── tests
│ │ ├── Lift.hs
│ │ └── th-tests.hs
│ │ └── th-tests.cabal
└── text-nofusion
│ ├── .cirrus.yml
│ ├── .github
│ ├── dependabot.yml
│ └── workflows
│ │ ├── haskell-ci.yml
│ │ └── windows_and_macOS.yml
│ ├── .gitignore
│ ├── LICENSE
│ ├── README.markdown
│ ├── Setup.lhs
│ ├── benchmarks
│ ├── AUTHORS
│ └── haskell
│ │ ├── Benchmarks.hs
│ │ └── Benchmarks
│ │ ├── Builder.hs
│ │ ├── Concat.hs
│ │ ├── DecodeUtf8.hs
│ │ ├── EncodeUtf8.hs
│ │ ├── Equality.hs
│ │ ├── FileRead.hs
│ │ ├── FoldLines.hs
│ │ ├── Mul.hs
│ │ ├── Multilang.hs
│ │ ├── Programs
│ │ ├── BigTable.hs
│ │ ├── Cut.hs
│ │ ├── Fold.hs
│ │ ├── Sort.hs
│ │ ├── StripTags.hs
│ │ └── Throughput.hs
│ │ ├── Pure.hs
│ │ ├── ReadNumbers.hs
│ │ ├── Replace.hs
│ │ ├── Search.hs
│ │ ├── Stream.hs
│ │ └── WordFrequencies.hs
│ ├── cabal.haskell-ci
│ ├── cabal.project
│ ├── cbits
│ └── cbits.c
│ ├── changelog.md
│ ├── include
│ └── text_cbits.h
│ ├── scripts
│ ├── ApiCompare.hs
│ ├── Arsec.hs
│ ├── CaseFolding.hs
│ ├── CaseMapping.hs
│ ├── SpecialCasing.hs
│ └── tests.sh
│ ├── src
│ └── Data
│ │ ├── Text.hs
│ │ └── Text
│ │ ├── Array.hs
│ │ ├── Encoding.hs
│ │ ├── Encoding
│ │ └── Error.hs
│ │ ├── Foreign.hs
│ │ ├── IO.hs
│ │ ├── Internal.hs
│ │ ├── Internal
│ │ ├── Builder.hs
│ │ ├── Builder
│ │ │ ├── Functions.hs
│ │ │ ├── Int
│ │ │ │ └── Digits.hs
│ │ │ └── RealFloat
│ │ │ │ └── Functions.hs
│ │ ├── ByteStringCompat.hs
│ │ ├── Encoding
│ │ │ ├── Fusion.hs
│ │ │ ├── Fusion
│ │ │ │ └── Common.hs
│ │ │ ├── Utf16.hs
│ │ │ ├── Utf32.hs
│ │ │ └── Utf8.hs
│ │ ├── Functions.hs
│ │ ├── Fusion.hs
│ │ ├── Fusion
│ │ │ ├── CaseMapping.hs
│ │ │ ├── Common.hs
│ │ │ ├── Size.hs
│ │ │ └── Types.hs
│ │ ├── IO.hs
│ │ ├── Lazy.hs
│ │ ├── Lazy
│ │ │ ├── Encoding
│ │ │ │ └── Fusion.hs
│ │ │ ├── Fusion.hs
│ │ │ └── Search.hs
│ │ ├── PrimCompat.hs
│ │ ├── Private.hs
│ │ ├── Read.hs
│ │ ├── Search.hs
│ │ ├── Unsafe.hs
│ │ └── Unsafe
│ │ │ └── Char.hs
│ │ ├── Lazy.hs
│ │ ├── Lazy
│ │ ├── Builder.hs
│ │ ├── Builder
│ │ │ ├── Int.hs
│ │ │ └── RealFloat.hs
│ │ ├── Encoding.hs
│ │ ├── IO.hs
│ │ ├── Internal.hs
│ │ └── Read.hs
│ │ ├── Read.hs
│ │ ├── Show.hs
│ │ └── Unsafe.hs
│ ├── tests
│ ├── LiteralRuleTest.hs
│ ├── Tests.hs
│ ├── Tests
│ │ ├── IO.hs
│ │ ├── Lift.hs
│ │ ├── Properties.hs
│ │ ├── Properties
│ │ │ ├── Basics.hs
│ │ │ ├── Builder.hs
│ │ │ ├── Folds.hs
│ │ │ ├── Instances.hs
│ │ │ ├── LowLevel.hs
│ │ │ ├── Read.hs
│ │ │ ├── Substrings.hs
│ │ │ ├── Text.hs
│ │ │ └── Transcoding.hs
│ │ ├── QuickCheckUtils.hs
│ │ ├── Regressions.hs
│ │ ├── SlowFunctions.hs
│ │ └── Utils.hs
│ └── literal-rule-test.sh
│ └── text.cabal
└── unlines-test
├── CHANGELOG.md
├── app
├── Main.hs
└── Unlines.hs
├── cabal.project
├── new.txt
├── ref.txt
└── unlines-test.cabal
/.gitignore:
--------------------------------------------------------------------------------
1 | dist-newstyle
2 | release
3 | release.zip
4 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "ghc-dump"]
2 | path = ghc-dump
3 | url = https://github.com/HugoPeters1024/ghc-dump
4 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM haskell:9.2-slim
2 | WORKDIR /app
3 |
4 | RUN apt-get update
5 | RUN apt-get install curl gzip
6 | RUN curl -L -o elm.gz https://github.com/elm/compiler/releases/download/0.19.1/binary-for-linux-64-bit.gz
7 | RUN gunzip elm.gz
8 | RUN chmod +x elm
9 | RUN mv elm /bin
10 | RUN elm --help
11 |
12 | COPY . /app/hs-comprehension
13 | # RUN git clone --recursive https://github.com/HugoPeters1024/hs-comprehension
14 | RUN cabal update
15 | # Build takes long so we do it outside the script to get a docker checkpoint
16 | WORKDIR /app/hs-comprehension/server
17 | RUN cabal build -O2
18 | RUN cp /app/hs-comprehension/server/dist-newstyle/build/x86_64-linux/ghc-9.2.4/hs-comprehension-server-0.1.0.0/x/hs-comprehension-server/opt/build/hs-comprehension-server/hs-comprehension-server server
19 |
20 | WORKDIR /app/hs-comprehension/frontend
21 | RUN elm make --optimize src/Main.elm
22 | WORKDIR /app/hs-comprehension/server
23 | RUN mkdir static
24 | RUN cp ../frontend/index.html static
25 | RUN cp ../frontend/src/style.css static
26 | RUN cp ../frontend/src/pygments.css static
27 |
28 | CMD ["./server", "--direct-root", "../test-project/dumps"]
29 | EXPOSE 8080
30 |
--------------------------------------------------------------------------------
/frontend/.gitignore:
--------------------------------------------------------------------------------
1 | elm-stuff
2 |
--------------------------------------------------------------------------------
/frontend/elm.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "application",
3 | "source-directories": [
4 | "src"
5 | ],
6 | "elm-version": "0.19.1",
7 | "dependencies": {
8 | "direct": {
9 | "NoRedInk/elm-json-decode-pipeline": "1.0.1",
10 | "agu-z/elm-zip": "3.0.1",
11 | "elm/browser": "1.0.2",
12 | "elm/bytes": "1.0.8",
13 | "elm/core": "1.0.5",
14 | "elm/file": "1.0.5",
15 | "elm/html": "1.0.0",
16 | "elm/http": "2.0.0",
17 | "elm/json": "1.1.3",
18 | "elm/time": "1.0.0",
19 | "elm/url": "1.0.0",
20 | "folkertdev/elm-state": "3.0.1",
21 | "hecrj/html-parser": "2.4.0",
22 | "jinjor/elm-contextmenu": "2.0.0",
23 | "rtfeldman/elm-css": "17.1.1",
24 | "rundis/elm-bootstrap": "5.2.0",
25 | "the-sett/elm-pretty-printer": "3.0.0",
26 | "toastal/either": "3.6.3",
27 | "turboMaCk/any-set": "1.5.0"
28 | },
29 | "indirect": {
30 | "avh4/elm-color": "1.0.0",
31 | "elm/parser": "1.1.0",
32 | "elm/virtual-dom": "1.0.2",
33 | "elm-community/basics-extra": "4.1.0",
34 | "elm-community/list-extra": "8.6.0",
35 | "folkertdev/elm-flate": "2.0.5",
36 | "justinmimbs/date": "4.0.1",
37 | "justinmimbs/time-extra": "1.1.1",
38 | "robinheghan/murmur3": "1.0.0",
39 | "rtfeldman/elm-hex": "1.0.0",
40 | "turboMaCk/any-dict": "2.6.0"
41 | }
42 | },
43 | "test-dependencies": {
44 | "direct": {},
45 | "indirect": {}
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/frontend/src/HsCore/Trafo/EraseTypes.elm:
--------------------------------------------------------------------------------
1 | module HsCore.Trafo.EraseTypes exposing (eraseTypesPhase, eraseTypesTopBinding)
2 |
3 | import Generated.Types exposing (..)
4 | import HsCore.Helpers as H
5 |
6 | eraseTypesPhase : Phase -> Phase
7 | eraseTypesPhase mod = {mod | phaseTopBindings = List.map eraseTypesTopBinding mod.phaseTopBindings}
8 |
9 | eraseTypesTopBinding : TopBinding -> TopBinding
10 | eraseTypesTopBinding tp =
11 | let go : TopBindingInfo -> TopBindingInfo
12 | go bi = {bi | topBindingRHS = eraseTypesExpr bi.topBindingRHS }
13 | in case tp of
14 | NonRecTopBinding bi -> NonRecTopBinding (go bi)
15 | RecTopBinding bis -> RecTopBinding (List.map go bis)
16 |
17 | eraseTypesExpr : Expr -> Expr
18 | eraseTypesExpr expr = case expr of
19 | EVar x -> EVar x
20 | EVarGlobal g -> EVarGlobal g
21 | ELit l -> ELit l
22 | EApp f a ->
23 | let ea = eraseTypesExpr a
24 | in case ea of
25 | EVar x -> if H.isTyBinderId x then (eraseTypesExpr f) else EApp (eraseTypesExpr f) ea
26 | EType _ -> eraseTypesExpr f
27 | _ -> EApp (eraseTypesExpr f) ea
28 | ETyLam _ a -> eraseTypesExpr a
29 | ELam b a -> ELam b (eraseTypesExpr a)
30 | -- TODO
31 | ELet bses e ->
32 | let (bs, es) = List.unzip bses
33 | in ELet (H.zip bs (List.map eraseTypesExpr es)) (eraseTypesExpr e)
34 | ECase e b alts -> ECase (eraseTypesExpr e) b (List.map eraseTypesAlt alts)
35 | ETick t e -> ETick t (eraseTypesExpr e)
36 | EType t -> EType t
37 | ECoercion -> ECoercion
38 | EMarkDiff e -> EMarkDiff (eraseTypesExpr e)
39 |
40 | eraseTypesAlt : Alt -> Alt
41 | eraseTypesAlt alt = { alt | altBinders = List.filter (not << H.isTyBinder) alt.altBinders
42 | , altRHS = eraseTypesExpr alt.altRHS }
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/frontend/src/HsCore/Trafo/VarOccs.elm:
--------------------------------------------------------------------------------
1 | module HsCore.Trafo.VarOccs exposing (exprVarOccs)
2 |
3 | import Generated.Types exposing (..)
4 | import HsCore.Helpers exposing (..)
5 | import ElmHelpers as EH
6 |
7 | import Set exposing (Set)
8 |
9 |
10 | exprVarOccs : Expr -> Set Int
11 | exprVarOccs expr = case expr of
12 | EVar v -> Set.singleton (binderIdToInt v)
13 | EVarGlobal (ExternalName e) -> case e.localBinder of
14 | Found var -> Set.singleton (binderToInt var)
15 | _ -> Set.empty
16 | EVarGlobal ForeignCall -> Set.empty
17 | ELit _ -> Set.empty
18 | EApp f a -> Set.union (exprVarOccs f) (exprVarOccs a)
19 | ETyLam b e -> Set.insert (binderToInt b) (exprVarOccs e)
20 | ELam b e -> Set.insert (binderToInt b) (exprVarOccs e)
21 | ELet bses rhs ->
22 | let (bs, es) = List.unzip bses
23 | in
24 | EH.setInsertMany (List.map binderToInt bs) (EH.setCombine (List.map exprVarOccs es))
25 | |> Set.union (exprVarOccs rhs)
26 | ECase e b alts ->
27 | exprVarOccs e
28 | |> Set.insert (binderToInt b)
29 | |> Set.union (EH.setCombine (List.map altVarOccs alts))
30 | ETick _ e -> exprVarOccs e
31 | EType _ -> Set.empty
32 | ECoercion -> Set.empty
33 | EMarkDiff e -> exprVarOccs e
34 |
35 | altVarOccs : Alt -> Set Int
36 | altVarOccs alt =
37 | EH.setInsertMany (List.map binderToInt alt.altBinders) (exprVarOccs alt.altRHS)
38 |
--------------------------------------------------------------------------------
/frontend/src/Ports.elm:
--------------------------------------------------------------------------------
1 | port module Ports exposing (..)
2 |
3 | port openBrowserTab : String -> Cmd msg
4 |
--------------------------------------------------------------------------------
/frontend/src/Reader.elm:
--------------------------------------------------------------------------------
1 | module Reader exposing (..)
2 |
3 | type Reader env a = Reader (env -> a)
4 |
5 | ask : Reader env env
6 | ask = askFor identity
7 |
8 | askFor : (env -> a) -> Reader env a
9 | askFor = Reader
10 |
11 | exec : (env -> Reader env a) -> Reader env a
12 | exec rr = ask |> andThen rr
13 |
14 | runReader : env -> Reader env a -> a
15 | runReader env (Reader f) = f env
16 |
17 | map : (a -> b) -> Reader env a -> Reader env b
18 | map f (Reader a) = Reader <| \env -> f (a env)
19 |
20 | map2 : (a -> b -> c) -> Reader env a -> Reader env b -> Reader env c
21 | map2 f (Reader a) (Reader b) = Reader <| \env -> f (a env) (b env)
22 |
23 | traverse : (a -> Reader env a) -> List a -> Reader env (List a)
24 | traverse f xss = case xss of
25 | [] -> pure []
26 | x::xs -> map2 (\y ys -> y::ys) (f x) (traverse f xs)
27 |
28 | pure : a -> Reader env a
29 | pure x = Reader <| \_ -> x
30 |
31 | andThen : (a -> Reader env b) -> Reader env a -> Reader env b
32 | andThen f (Reader g) = Reader <| \env -> runReader env (f (g env))
33 |
34 | foldM : (a -> b -> b) -> b -> List (Reader env a) -> Reader env b
35 | foldM f def xs = Reader <| \env -> List.foldl f def (List.map (runReader env) xs)
36 |
37 |
38 |
--------------------------------------------------------------------------------
/frontend/src/UI/Slider.elm:
--------------------------------------------------------------------------------
1 | module UI.Slider exposing (..)
2 |
3 | import Html exposing (..)
4 | import Html.Attributes exposing (..)
5 | import Html.Events exposing (..)
6 |
7 | type alias Model =
8 | { value : Int
9 | }
10 |
11 | type alias ViewModel msg =
12 | { lift : Msg -> msg
13 | , mininum : Int
14 | , maximum : Int
15 | }
16 |
17 |
18 |
19 | type Msg =
20 | SliderUpdate Model
21 |
22 | init : Int -> Model
23 | init v = { value = v}
24 |
25 | update : Msg -> Model -> Model
26 | update (SliderUpdate m) _ = m
27 |
28 | config : ViewModel msg -> ViewModel msg
29 | config = identity
30 |
31 | view : Model -> ViewModel msg -> Html msg
32 | view m vm =
33 | div [style "width" "100%"]
34 | [
35 | input [ type_ "range"
36 | , Html.Attributes.min (String.fromInt vm.mininum)
37 | , Html.Attributes.max (String.fromInt vm.maximum)
38 | , value (String.fromInt m.value)
39 | , onInput (\inp -> vm.lift (case String.toInt inp of
40 | Nothing -> SliderUpdate m
41 | Just i -> SliderUpdate {m | value = i}))
42 | , style "width" "100%"
43 | , style "height" "24px"
44 | ] []
45 | ]
46 |
--------------------------------------------------------------------------------
/frontend/src/default-dark.min.css:
--------------------------------------------------------------------------------
1 | /*!
2 | Theme: Default Dark
3 | Author: Chris Kempson (http://chriskempson.com)
4 | License: ~ MIT (or more permissive) [via base16-schemes-source]
5 | Maintainer: @highlightjs/core-team
6 | Version: 2021.09.0
7 | */pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}.hljs{color:#d8d8d8;background:#181818}.hljs ::selection,.hljs::selection{background-color:#383838;color:#d8d8d8}.hljs-comment{color:#585858}.hljs-tag{color:#b8b8b8}.hljs-operator,.hljs-punctuation,.hljs-subst{color:#d8d8d8}.hljs-operator{opacity:.7}.hljs-bullet,.hljs-deletion,.hljs-name,.hljs-selector-tag,.hljs-template-variable,.hljs-variable{color:#ab4642}.hljs-attr,.hljs-link,.hljs-literal,.hljs-number,.hljs-symbol,.hljs-variable.constant_{color:#dc9656}.hljs-class .hljs-title,.hljs-title,.hljs-title.class_{color:#f7ca88}.hljs-strong{font-weight:700;color:#f7ca88}.hljs-addition,.hljs-code,.hljs-string,.hljs-title.class_.inherited__{color:#a1b56c}.hljs-built_in,.hljs-doctag,.hljs-keyword.hljs-atrule,.hljs-quote,.hljs-regexp{color:#86c1b9}.hljs-attribute,.hljs-function .hljs-title,.hljs-section,.hljs-title.function_,.ruby .hljs-property{color:#7cafc2}.diff .hljs-meta,.hljs-keyword,.hljs-template-tag,.hljs-type{color:#ba8baf}.hljs-emphasis{color:#ba8baf;font-style:italic}.hljs-meta,.hljs-meta .hljs-keyword,.hljs-meta .hljs-string{color:#a16946}.hljs-meta .hljs-keyword,.hljs-meta-keyword{font-weight:700}
--------------------------------------------------------------------------------
/frontend/src/index_html_for_elm.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | HsComprehension
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/frontend/src/style.css:
--------------------------------------------------------------------------------
1 | body {
2 | padding: 3em;
3 | font-family: Ubuntu, sans-serif;
4 | }
5 |
6 | a.no-style, a.no-style:hover, a.no-style:visited, a.no-style:focus, a.no-style:active {
7 | text-decoration: none;
8 | color: inherit;
9 | outline: 0;
10 | cursor: pointer;
11 | }
12 |
13 | .dark {
14 | width: 100%;
15 | max-width: 100%;
16 | min-height: 1024px;
17 | color: #FFFFFF;
18 | background-color: #303444;
19 | padding: 1em;
20 | font-size: 11pt;
21 | box-sizing: border-box;
22 | margin: 0;
23 | overflow: scroll;
24 | font-family: Menlo,ui-monospace,SFMono-Regular,SF Mono,Menlo,Consolas,Liberation Mono,monospace;
25 | line-height: 1.0em;
26 | }
27 |
28 | pre>code {
29 | width: 100%;
30 | max-width: 100%;
31 | max-height: 1200px;
32 | }
33 |
34 | span.diff {
35 | background-color: red;
36 | }
37 |
38 | .info-panel {
39 | width: 100%;
40 | max-width: 100%
41 | padding: 1em;
42 | box-sizing: border-box;
43 | color: #dddddd;
44 | background-color: #212128;
45 | }
46 |
47 | ul.no-dot {
48 | list-style-type: none;
49 | margin-left: -16px;
50 | }
51 |
52 | .highlight-exact {
53 | background-color: yellow;
54 | color: black !important;
55 | }
56 |
57 | .highlight-approx {
58 | background-color: purple;
59 | color: black !important;
60 | }
61 |
62 | .panel-4-1 {
63 | display: grid;
64 | grid-template-columns: 2fr 2fr 1fr;
65 | width: 100%;
66 | }
67 |
68 | .src-toggle {
69 | cursor: pointer;
70 | text-decoration: underline !important;
71 | color: gray !important;
72 | font-size: 10pt;
73 | }
74 |
75 | .hidden {
76 | display: none;
77 | }
78 |
79 |
--------------------------------------------------------------------------------
/generate-elm/.gitignore:
--------------------------------------------------------------------------------
1 | dist-newstyle
2 | *.elm
3 |
--------------------------------------------------------------------------------
/generate-elm/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Revision history for generate-elm
2 |
3 | ## 0.1.0.0 -- YYYY-mm-dd
4 |
5 | * First version. Released on an unsuspecting world.
6 |
--------------------------------------------------------------------------------
/generate-elm/cabal.project:
--------------------------------------------------------------------------------
1 | packages: .
2 | , ../ghc-dump/ghc-dump-core
3 | , ../plugin
4 | allow-newer: all
5 |
--------------------------------------------------------------------------------
/generate-elm/generate-elm.cabal:
--------------------------------------------------------------------------------
1 | cabal-version: 2.4
2 | name: generate-elm
3 | version: 0.1.0.0
4 |
5 | -- A short (one-line) description of the package.
6 | -- synopsis:
7 |
8 | -- A longer description of the package.
9 | -- description:
10 |
11 | -- A URL where users can report bugs.
12 | -- bug-reports:
13 |
14 | -- The license under which the package is released.
15 | -- license:
16 | author: HugoPeters1024
17 | maintainer: hpeters1024@gmail.com
18 |
19 | -- A copyright notice.
20 | -- copyright:
21 | -- category:
22 | extra-source-files: CHANGELOG.md
23 |
24 | executable generate-elm
25 | main-is: Main.hs
26 |
27 | -- Modules included in this executable, other than Main.
28 | -- other-modules:
29 |
30 | -- LANGUAGE extensions used by modules in this package.
31 | -- other-extensions:
32 | build-depends: base
33 | , hs-comprehension-plugin
34 | , haskell-to-elm
35 | , elm-syntax
36 | , unordered-containers
37 | , aeson
38 | , MissingH
39 | hs-source-dirs: app
40 | default-language: Haskell2010
41 |
--------------------------------------------------------------------------------
/make_release.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd "${0%/*}"
3 |
4 | mkdir release
5 |
6 | echo "Building Elm Frontend"
7 | cd ./frontend/
8 | elm make src/Main.elm --optimize --output main.js
9 | cd ../
10 |
11 | mv ./frontend/main.js release
12 | uglifyjs release/main.js --compress 'pure_funcs="F2,F3,F4,F5,F6,F7,F8,F9,A2,A3,A4,A5,A6,A7,A8,A9",pure_getters=true,keep_fargs=false,unsafe_comps=true,unsafe=true,passes=2' --output release/main.js && uglifyjs release/main.js --mangle --output release/main.js
13 |
14 | cp ./frontend/src/index_html_for_elm.html release/index.html
15 | cp ./frontend/src/style.css release
16 | cp ./frontend/src/pygments.css release
17 |
18 |
--------------------------------------------------------------------------------
/plugin/cabal.project:
--------------------------------------------------------------------------------
1 | packages: .
2 | , ../ghc-dump/ghc-dump-core
3 | allow-newer: elm-syntax
4 |
--------------------------------------------------------------------------------
/plugin/lib/HsComprehension/Hash.hs:
--------------------------------------------------------------------------------
1 | module HsComprehension.Hash where
2 |
3 | import Data.Hashable
4 |
5 | import HsComprehension.Ast
6 |
7 | hashExprModAlpha :: Expr -> Int
8 | hashExprModAlpha (EVar bi) = binderIdDeBruijn bi
9 | hashExprModAlpha (EVarGlobal en) = hash (externalModuleName en <> externalName en)
10 | hashExprModAlpha (ELit lit) = hash lit
11 | hashExprModAlpha (EApp f a) = hashExprModAlpha f + hashExprModAlpha a
12 | hashExprModAlpha (ETyLam bi ex) = hashExprModAlpha ex
13 | hashExprModAlpha (ELam bi ex) = hashExprModAlpha ex
14 | hashExprModAlpha (ELet x1 ex) = hashExprModAlpha ex
15 | hashExprModAlpha (ECase ex bi alts) = sum (map (hashExprModAlpha . altRHS) alts)
16 | hashExprModAlpha (ETick ti ex) = hashExprModAlpha ex
17 | hashExprModAlpha (EType ty) = hashType ty
18 | hashExprModAlpha ECoercion = 0
19 | hashExprModAlpha (EMarkDiff ex) = hashExprModAlpha ex
20 |
21 | hashType :: Type -> Int
22 | hashType (VarTy bi) = binderIdDeBruijn bi
23 | hashType (FunTy f a) = hashType f + hashType a
24 | hashType (TyConApp tc tys) = sum (map hashType tys)
25 | hashType (AppTy f a) = hashType f + hashType a
26 | hashType (ForAllTy bi e) = hashType e
27 | hashType (LitTy tylit) = hash tylit
28 | hashType CoercionTy = 0
29 |
30 |
--------------------------------------------------------------------------------
/plugin/src/Zip.hs:
--------------------------------------------------------------------------------
1 | module Main where
2 |
3 | import Prelude as P
4 | import Control.Monad
5 | import Data.Maybe
6 | import System.Environment
7 | import System.FilePath.Posix as FP
8 | import System.Directory as FP
9 | import qualified Codec.Archive.Zip as Zip
10 | import qualified Data.ByteString.Lazy as BSL
11 |
12 | import HsComprehension.Plugin
13 |
14 | main :: IO ()
15 | main = do
16 | slug <- parseCmdLineOptions <$> getArgs
17 | let cv = defaultCaptureView
18 |
19 | let dump_dir = coreDumpDir cv slug
20 | putStrLn $ "Attempting to archive dump files in " ++ dump_dir
21 |
22 | isDir <- FP.doesDirectoryExist dump_dir
23 | when (not isDir) $ FP.makeAbsolute dump_dir >>= \path -> error (path ++ " does not exist")
24 |
25 | allFileNames <- FP.getDirectoryContents (coreDumpDir cv slug)
26 | entries <- forM allFileNames $ \fname -> do
27 | let path = coreDumpDir cv slug `FP.combine` fname
28 | -- Mostly to filter out any directories (The node .. is always included for example)
29 | isFile <- FP.doesFileExist path
30 | case isFile of
31 | True -> do
32 | content <- BSL.readFile path
33 | time <- (`div` 1000) <$> currentPosixMillis
34 | pure $ Just $ Zip.toEntry fname (fromIntegral time) content
35 | False -> pure Nothing
36 |
37 | putStrLn $ "Archiving " ++ show (length entries) ++ " files"
38 |
39 | let archive = P.foldr Zip.addEntryToArchive Zip.emptyArchive (catMaybes entries)
40 | let archive_path = coreDumpArchive cv slug
41 | BSL.writeFile archive_path (Zip.fromArchive archive)
42 | FP.makeAbsolute archive_path >>= \path -> putStrLn $ "Created " ++ path
43 |
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/test-project/.gitignore:
--------------------------------------------------------------------------------
1 | Core
2 |
--------------------------------------------------------------------------------
/test-project/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Revision history for hs-plugin-test
2 |
3 | ## 0.1.0.0 -- YYYY-mm-dd
4 |
5 | * First version. Released on an unsuspecting world.
6 |
--------------------------------------------------------------------------------
/test-project/README.md:
--------------------------------------------------------------------------------
1 | ## Background:
2 |
3 | ### Simplifier passes
4 |
5 | Unintuitively, this is the ordered list of simplifier phases: `[gentle, 2, 1, 0]`
6 |
7 |
8 | ## Test 1: Stream Fusion Rewrite Rule Failing
9 |
10 | given the function
11 |
12 | ```haskell
13 | addThree :: [Int] -> [Int]
14 | addThree = map (+1) . map(+2)
15 | ```
16 |
17 | The following rewrite rules fire in the first simplifier pass (and none later):
18 |
19 | ```haskell
20 | {-
21 | RULES FIRED:
22 | Class op + (BUILTIN)
23 | Class op + (BUILTIN)
24 | map -> fusible (Data.List.Stream)
25 | map -> fusible (Data.List.Stream)
26 | STREAM stream/unstream fusion (Data.Stream)
27 | STREAM map/map fusion (Data.Stream)
28 | +# (BUILTIN)
29 | -}
30 | ```
31 |
32 | This yields a nicely fused result:
33 |
34 | ```haskell
35 | addThree :: [Int] -> [Int]
36 | addThree x = unstream (map lvl (stream x))
37 | ```
38 |
39 | However, the stream/unstream is still there, making this likely
40 | not a cheaper expression than an unfused canonical map composition.
41 | However, this rule exists in `Data.List.Stream`
42 |
43 | ```haskell
44 | {-# RULES
45 | "map -> fusible" [~1] forall f xs.
46 | map f xs = unstream (Stream.map f (stream xs))
47 | --"map -> unfused" [1] forall f xs.
48 | -- unstream (Stream.map f (stream xs)) = map f xs
49 | #-}
50 | ```
51 |
52 | Here we see two cyclical rules. One enables the stream fusion, and the other removes it. Using the phase control they should
53 | complement each other. However, in phase 1 the second rule is not being called. And it is not clear why.
54 |
--------------------------------------------------------------------------------
/test-project/app/Channable.hs:
--------------------------------------------------------------------------------
1 | module Channable where
2 |
3 | import Data.Text (Text)
4 | import qualified Data.Text as T
5 |
6 | slice :: Int -> Int -> Text -> Text
7 | slice offset len = T.take len . T.drop offset
8 |
9 | {-# NOINLINE noInlineTake #-}
10 | noInlineTake :: Int -> Text -> Text
11 | noInlineTake = T.take
12 |
13 |
--------------------------------------------------------------------------------
/test-project/app/Extra/Hidden.hs:
--------------------------------------------------------------------------------
1 | module Extra.Hidden where
2 |
3 | secret :: String
4 | secret = "p4ssw0rd"
5 |
--------------------------------------------------------------------------------
/test-project/app/Factorial.hs:
--------------------------------------------------------------------------------
1 | module Factorial where
2 |
3 | fac :: Int -> Int
4 | fac 0 = 0
5 | fac 1 = 1
6 | fac n = n * fac (n-1)
7 |
8 |
9 | halves :: [Int] -> [Int]
10 | halves = map (`div` 2) . filter even
11 |
12 | halves_fast :: [Int] -> [Int]
13 | halves_fast [] = []
14 | halves_fast (x:xs) =
15 | let
16 | tl = halves_fast xs
17 | in if even x
18 | then (x `div` 2):tl
19 | else tl
20 |
--------------------------------------------------------------------------------
/test-project/app/Fusion.hs:
--------------------------------------------------------------------------------
1 | module Fusion where
2 |
3 | import Prelude hiding (sum, scanl, zip3, zipWith3, map)
4 | import Data.Stream
5 |
6 | import HsComprehension.Plugin (dumpThisModule)
7 |
8 | import qualied Data.List.Stream as S
9 |
10 | dumpThisModule
11 |
12 | halves :: [Int] -> [Int]
13 | halves = map (`div` 2) . filter even
14 |
15 | halves_stream :: [Int] -> [Int]
16 | halves_stream = S.map (`div` 2) . S.filter even
17 |
18 |
19 |
--------------------------------------------------------------------------------
/test-project/app/HalfMatch.hs:
--------------------------------------------------------------------------------
1 | module HalfMatch where
2 |
3 | data Pair a b = Pair a b
4 |
5 | test :: Pair Int String -> String
6 | test (Pair 0 s) = s
7 | test (Pair n _) = "not 0"
8 |
--------------------------------------------------------------------------------
/test-project/app/InspectionTests.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE TemplateHaskell #-}
2 | module InspectionTests where
3 |
4 | import Test.Inspection
5 | import qualified Data.Text as T
6 | import qualified Data.Text.Encoding as TE
7 | import Data.ByteString
8 | import HsComprehension.Plugin (dumpThisModule)
9 |
10 | import Tree
11 | --addNothing :: Tree Int -> Tree Int
12 | --addNothing = mapTree (+0) . mapTree id
13 |
14 | --addNothing2 :: Tree Int -> Tree Int
15 | --addNothing2 = fmap (+0) . fmap id
16 |
17 | countChars :: ByteString -> Int
18 | countChars = T.length . T.toUpper . TE.decodeUtf8
19 |
20 |
21 | --inspect $ 'countChars `hasNoType` ''T.Text
22 |
23 | --dumpThisModule
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/test-project/app/LargeRecords.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE DataKinds #-}
2 | {-# LANGUAGE KindSignatures #-}
3 | {-# LANGUAGE TypeFamilies #-}
4 | {-# LANGUAGE FlexibleInstances #-}
5 | {-# LANGUAGE MultiParamTypeClasses #-}
6 | {-# LANGUAGE UndecidableInstances #-}
7 | {-# LANGUAGE ConstraintKinds #-}
8 | {-# LANGUAGE ExistentialQuantification #-}
9 | {-# LANGUAGE ScopedTypeVariables #-}
10 | module LargeRecords where
11 |
12 | --import Data.Record.Plugin
13 | --import Data.Record.Plugin.Options
14 | --
15 | --{-# ANN type B largeRecordStrict #-}
16 | --data B = MkB { field00 :: Int
17 | -- , field01 :: Int
18 | -- , field02 :: Int
19 | -- , field03 :: Int
20 | -- , field04 :: Int
21 | -- , field05 :: Int
22 | -- , field06 :: Int
23 | -- , field07 :: Int
24 | -- , field08 :: Int
25 | -- , field09 :: Int
26 | -- , field10 :: Int
27 | -- }
28 |
--------------------------------------------------------------------------------
/test-project/app/Palin.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE DataKinds #-}
2 | {-# LANGUAGE GADTs #-}
3 | {-# LANGUAGE TypeOperators #-}
4 | module Palin where
5 |
6 | data Palin xs where
7 | Empty :: Palin '[]
8 | Single :: a -> Palin '[a]
9 | Cons :: a -> Palin xs -> a -> Palin (a ': xs)
10 |
11 | {-# RULES
12 | "flipPalin/flipPalin" flipPalin . flipPalin = id
13 | #-}
14 |
15 | {-# RULES
16 | "flipPalin/flipPalin2" forall p. flipPalin (flipPalin p) = p
17 | #-}
18 |
19 | flipPalin :: Palin xs -> Palin xs
20 | flipPalin Empty = Empty
21 | flipPalin (Single x) = Single x
22 | flipPalin (Cons lhs tl rhs) = Cons rhs (flipPalin tl) lhs
23 |
24 | doubleFlipPalin :: Palin xs -> Palin xs
25 | doubleFlipPalin = flipPalin . flipPalin
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/test-project/app/Peano.hs:
--------------------------------------------------------------------------------
1 | module Peano where
2 |
3 | data Nat = Z | S Nat
4 |
5 | natCata :: r -> (r -> r) -> Nat -> r
6 | natCata zero succ = f
7 | where f Z = zero
8 | f (S n) = succ (f n)
9 |
10 | {-# RULES
11 | "natConst1" natToInt (S Z) = 1
12 | #-}
13 |
14 | natToInt :: Nat -> Int
15 | natToInt Z = 0
16 | natToInt (S n) = 1 + natToInt n
17 |
18 | peanoSix :: Nat
19 | peanoSix = S Z
20 |
21 | six :: Int
22 | six = natToInt peanoSix
23 |
--------------------------------------------------------------------------------
/test-project/app/Quicksort.hs:
--------------------------------------------------------------------------------
1 | module Quicksort where
2 |
3 | quicksort :: Ord a => [a] -> [a]
4 | quicksort [] = []
5 | quicksort (x:xs) =
6 | let lesser = filter (x) xs
8 | in quicksort lesser ++ [x] ++ quicksort more
9 |
--------------------------------------------------------------------------------
/test-project/app/Records.hs:
--------------------------------------------------------------------------------
1 | module Records where
2 |
3 | data Big = Big
4 | { field00 :: Int
5 | , field01 :: Int
6 | , field02 :: Int
7 | , field03 :: Int
8 | , field04 :: Int
9 | , field05 :: Int
10 | , field06 :: Int
11 | , field07 :: Int
12 | , field08 :: Int
13 | , field09 :: Int
14 | , field10 :: Int
15 | }
16 |
--------------------------------------------------------------------------------
/test-project/app/Streaming.hs:
--------------------------------------------------------------------------------
1 | module Streaming where
2 |
3 | import qualified Data.List.Stream as S
4 |
5 | halves :: [Int] -> [Int]
6 | halves xs = S.map (`div` 2) (S.filter even xs)
7 |
8 | --div2_s :: [Int] -> [Int]
9 | --div2_s xs = S.map (`div` 2) $ S.filter even xs
10 | --
11 | --doubleSum :: [Int] -> Int
12 | --doubleSum = sum . map (*2)
13 |
14 |
15 |
--------------------------------------------------------------------------------
/test-project/app/Tail.hs:
--------------------------------------------------------------------------------
1 | module Tail where
2 |
3 | count :: [a] -> Int
4 | count = count' 0
5 | where count' :: Int -> [a] -> Int
6 | count' n [] = n
7 | count' n (x:xs) = count' (n+1) xs
8 |
9 |
10 |
--------------------------------------------------------------------------------
/test-project/app/Text.hs:
--------------------------------------------------------------------------------
1 | module Text where
2 |
3 | import Data.Text (Text)
4 | import qualified Data.Text as T
5 | import qualified Data.Text.Encoding as T
6 |
7 | import Data.ByteString (ByteString)
8 | import qualified Data.ByteString as B
9 |
10 | {-# INLINE slice #-}
11 | slice :: Int -> Int -> Text -> Text
12 | slice offset len = T.take len . T.drop offset
13 |
14 | -- from: https://arxiv.org/pdf/1803.07130.pdf
15 | countChars :: ByteString -> Int
16 | countChars = T.length . T.toUpper . T.decodeUtf8
17 |
--------------------------------------------------------------------------------
/test-project/app/TextExt.hs:
--------------------------------------------------------------------------------
1 | module TextExt where
2 |
3 | import Data.Text as T
4 |
5 | test :: Text -> Int
6 | test = T.length . T.init . T.toUpper
7 |
8 | msg :: Int
9 | msg = test (T.pack "abcd")
10 |
--------------------------------------------------------------------------------
/test-project/app/Tree.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE TemplateHaskell #-}
2 | module Tree where
3 |
4 | import Debug.Trace
5 | import Test.Inspection
6 | import HsComprehension.Plugin (dumpThisModule)
7 |
8 | dumpThisModule
9 |
10 | data Tree a
11 | = Leaf a
12 | | Node (Tree a) (Tree a)
13 | deriving Show
14 |
15 | {-# Rules
16 | "mapTree/mapTree" forall f g t. mapTree f (mapTree g t) = mapTree (f . g) t ;
17 | #-}
18 |
19 | mapTree :: (a -> b) -> Tree a -> Tree b
20 | mapTree f (Leaf x) = Leaf (f (trace "ping" x))
21 | mapTree f (Node lhs rhs) = Node (mapTree f lhs) (mapTree f rhs)
22 |
23 | makeImportant :: Tree Int -> Tree String
24 | makeImportant = mapTree (++"!") . mapTree show
25 |
26 | makeImportantFused :: Tree Int -> Tree String
27 | makeImportantFused = mapTree (\x -> show x ++ "!")
28 |
29 | inspect $ 'makeImportant === 'makeImportantFused
30 |
31 |
--------------------------------------------------------------------------------
/test-project/app/Typeclass.hs:
--------------------------------------------------------------------------------
1 | module Typeclass where
2 |
3 | data List a = Emtpy | Cons a (List a)
4 |
5 | class PoliteShow a where
6 | politeShow :: a -> String
7 |
8 | instance PoliteShow Int where
9 | politeShow x = show x ++ " please"
10 |
11 | instance PoliteShow a => PoliteShow (List a) where
12 | politeShow Emtpy = "empty list please"
13 | politeShow (Cons x xs) = politeShow x ++ " and then " ++ politeShow xs
14 |
15 | test :: String
16 | test =
17 | let
18 | ls :: List Int
19 | ls = Cons 1 (Cons 2 Emtpy)
20 | in politeShow ls
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/test-project/app/Unlines.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE TemplateHaskell #-}
2 | module Unlines where
3 |
4 | import HsComprehension.Plugin (dumpThisModule)
5 | import qualified Data.List.Stream as S
6 |
7 | --unlines_stream :: [String] -> String
8 | --unlines_stream ls = S.concat (S.map (\l -> l S.++ ['\n']) ls)
9 |
10 | unlines :: [String] -> String
11 | unlines ls = concat (map (\l -> l ++ ['\n']) ls)
12 |
13 | --dumpThisModule
14 |
--------------------------------------------------------------------------------
/test-project/cabal.project:
--------------------------------------------------------------------------------
1 | packages: .
2 | , ../plugin
3 | , ../ghc-dump/ghc-dump-core
4 | , ./stream-fusion
5 | -- , ./text-1.2.4.0
6 | allow-newer: all
7 |
--------------------------------------------------------------------------------
/test-project/dumps/coredump-Baseline/Channable.hs:
--------------------------------------------------------------------------------
1 | module Channable where
2 |
3 | import Data.Text (Text)
4 | import qualified Data.Text as T
5 |
6 | slice :: Int -> Int -> Text -> Text
7 | slice offset len = T.take len . T.drop offset
8 |
9 | {-# NOINLINE noInlineTake #-}
10 | noInlineTake :: Int -> Text -> Text
11 | noInlineTake = T.take
12 |
13 |
--------------------------------------------------------------------------------
/test-project/dumps/coredump-Baseline/Channable.zstd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HugoPeters1024/hs-sleuth/91aa2806ea71b7a26add3801383b3133200971a5/test-project/dumps/coredump-Baseline/Channable.zstd
--------------------------------------------------------------------------------
/test-project/dumps/coredump-Baseline/Factorial.hs:
--------------------------------------------------------------------------------
1 | module Factorial where
2 |
3 | fac :: Int -> Int
4 | fac 0 = 0
5 | fac 1 = 1
6 | fac n = n * fac (n-1)
7 |
8 |
--------------------------------------------------------------------------------
/test-project/dumps/coredump-Baseline/Factorial.zstd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HugoPeters1024/hs-sleuth/91aa2806ea71b7a26add3801383b3133200971a5/test-project/dumps/coredump-Baseline/Factorial.zstd
--------------------------------------------------------------------------------
/test-project/dumps/coredump-Baseline/Main.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 | module Main where
3 |
4 | import Factorial (fac)
5 |
6 | triangular :: Int -> Int
7 | triangular 0 = 0
8 | triangular n = n + triangular (n-1)
9 |
10 | main :: IO ()
11 | main = print $ triangular 1000000
12 |
--------------------------------------------------------------------------------
/test-project/dumps/coredump-Baseline/Main.zstd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HugoPeters1024/hs-sleuth/91aa2806ea71b7a26add3801383b3133200971a5/test-project/dumps/coredump-Baseline/Main.zstd
--------------------------------------------------------------------------------
/test-project/dumps/coredump-Baseline/capture.zstd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/HugoPeters1024/hs-sleuth/91aa2806ea71b7a26add3801383b3133200971a5/test-project/dumps/coredump-Baseline/capture.zstd
--------------------------------------------------------------------------------
/test-project/stream-fusion/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) Duncan Coutts 2006-2007
2 | Copyright (c) Don Stewart 2006-2007
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions
6 | are met:
7 |
8 | 1. Redistributions of source code must retain the above copyright
9 | notice, this list of conditions and the following disclaimer.
10 | 2. Redistributions in binary form must reproduce the above copyright
11 | notice, this list of conditions and the following disclaimer in the
12 | documentation and/or other materials provided with the distribution.
13 | 3. Neither the name of the author nor the names of his contributors
14 | may be used to endorse or promote products derived from this software
15 | without specific prior written permission.
16 |
17 | THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND
18 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 | ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
21 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23 | OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24 | HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26 | OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27 | SUCH DAMAGE.
28 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/README:
--------------------------------------------------------------------------------
1 | Stream fusible lists
2 |
3 | Faster lists using stream fusion.
4 |
5 | The abstract from the paper:
6 |
7 | This paper presents an automatic deforestation system, \emph{stream
8 | fusion}, based on equational transformations, that fuses a wider
9 | range of functions than existing short-cut fusion systems. In
10 | particular, stream fusion is able to fuse zips, left folds and
11 | functions over nested lists, including list comprehensions. A
12 | distinguishing feature of the framework is its simplicity: by
13 | transforming list functions to expose their structure, intermediate
14 | values are eliminated by general purpose compiler optimisations.
15 |
16 | We have reimplemented the Haskell standard List library on top of
17 | our framework, providing stream fusion for Haskell lists. By
18 | allowing a wider range of functions to fuse, we see an increase in
19 | the number of occurrences of fusion in typical Haskell programs. We
20 | present benchmarks documenting time and space improvements.
21 |
22 | Building:
23 |
24 | $ runhaskell Setup.lhs configure --prefix=/home/dons
25 | $ runhaskell Setup.lhs build
26 | $ runhaskell Setup.lhs install
27 |
28 | Use:
29 |
30 | import Data.List.Stream
31 |
32 | and use as you would for normal lists.
33 | Compile with ghc -O2 for best results.
34 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/Setup.lhs:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env runhaskell
2 | > import Distribution.Simple
3 | > main = defaultMain
--------------------------------------------------------------------------------
/test-project/stream-fusion/desugar/Examples.hs:
--------------------------------------------------------------------------------
1 | module Examples where
2 |
3 | map :: (a -> b) -> [a] -> [b]
4 | map f as = [ f a | a <- as ]
5 |
6 | concat :: [[a]] -> [a]
7 | concat ass = [ a | as <- ass, a <- as ]
8 |
9 | concatMap :: (a -> [b]) -> [a] -> [b]
10 | concatMap f as = [ b | a <- as, b <- f a ]
11 |
12 | filter :: (a -> Bool) -> [a] -> [a]
13 | filter p as = [ a | a <- as, p a ]
14 |
15 | prod xs ys = [ x * y | x <- xs, y <- ys ]
16 |
17 | foo xs ys = [ x * y | Left (x, _) <- xs, y <- ys ]
18 |
19 | bar xs ys zs = [ x * y | x <- xs, y <- ys, z <- zs ]
20 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/setup-base.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | # dump (and edit) the stream/list code into a copy of base-streams
4 | # such that the whole thing builds.
5 |
6 | #
7 | # assumes we're in the 'list' main repo
8 | #
9 |
10 | ghc_base_path=$*
11 | if [ -z "$ghc_base_path" ] ; then
12 | echo "usage: ./setup-base.sh path_to_base_package"
13 | exit 1
14 | fi
15 |
16 | # Where are we sending things?
17 | echo -n "Preparing to set up streams stuff under: "
18 | echo $ghc_base_path
19 |
20 | # Just double check its a base repo:
21 | echo -n "Checking it looks like a proper base repo .... "
22 | if [ -d "$ghc_base_path/_darcs/patches" ] ; then
23 | looks_ok=True
24 | else
25 | looks_ok=False
26 | fi
27 | echo $looks_ok
28 |
29 | if [ "$looks_ok" = "False" ] ; then
30 | echo "'$ghc_base_path' doesn't look like a darcs repo!"
31 | exit 1
32 | fi
33 |
34 | # Work out if we need to create the Data/List subdir
35 | echo -n "Checking if we need to create the Data/List subdir... "
36 | ghc_base_streams="$ghc_base_path/Data/List"
37 | if [ ! -d "$ghc_base_streams" ] ; then
38 | create_streams=True
39 | else
40 | create_streams=False
41 | fi
42 | echo $create_streams
43 |
44 | if [ "$create_streams" = "True" ] ; then
45 | mkdir $ghc_base_streams
46 | fi
47 |
48 | # copy first
49 | echo "{-# OPTIONS_GHC -fno-implicit-prelude #-}" > $ghc_base_streams/Stream.hs
50 | echo "{-# OPTIONS_GHC -fno-implicit-prelude #-}" > $ghc_base_path/Data/Stream.hs
51 |
52 | cat Data/List/Stream.hs >> $ghc_base_streams/Stream.hs
53 | cat Data/Stream.hs >> $ghc_base_path/Data/Stream.hs
54 |
55 |
56 | echo "done."
57 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/stream-fusion.cabal:
--------------------------------------------------------------------------------
1 | Name: stream-fusion
2 | Version: 0.1.2.5
3 | Author: Duncan Coutts, Don Stewart
4 | Maintainer: duncan.coutts@worc.ox.ac.uk, dons00@gmail.com
5 | License: BSD3
6 | License-file: LICENSE
7 | Synopsis: Faster Haskell lists using stream fusion
8 | Homepage: http://hackage.haskell.org/trac/ghc/ticket/915
9 | Description:
10 | This package provides the standard Haskell list library
11 | reimplemented to allow stream fusion. This should in general
12 | provide faster list operations, and faster code for list-heavy
13 | programs. See the paper /Stream Fusion: From Lists to Streams to Nothing at All/,
14 | Coutts, Leshchinskiy and Stewart, 2007.
15 | To use, simply import Data.List.Stream in place of Data.List,
16 | and hide list functions from the Prelude.
17 | Category: Data
18 | Build-Type: Simple
19 | Stability: experimental
20 | Tested-with: GHC==7.6.1
21 | cabal-version: >= 1.6
22 |
23 | source-repository head
24 | type: darcs
25 | location: http://code.haskell.org/~dons/code/stream-fusion
26 |
27 | Library
28 | Build-Depends: base >= 3 && < 5
29 | Exposed-modules: Data.Stream
30 | Data.List.Stream
31 | --Control.Monad.Stream
32 | Extensions: CPP, BangPatterns, ExistentialQuantification, MagicHash, TypeOperators
33 | cpp-options: -DEXTERNAL_PACKAGE
34 | ghc-options: -O2
35 | -fspec-constr
36 | -funbox-strict-fields
37 | -fdicts-cheap
38 | -fmax-simplifier-iterations10
39 | -fliberate-case-threshold100
40 | -Wall
41 | -fno-warn-orphans
42 |
43 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/tests/Examples/ConcatMap.hs:
--------------------------------------------------------------------------------
1 | module M where
2 | import Char
3 |
4 | import Data.List.Stream as L
5 |
6 | foo :: [Char] -> [Char]
7 | foo xs = (L.concatMap (L.replicate 10000)) ( map toUpper xs)
8 |
9 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/tests/Examples/Enum.hs:
--------------------------------------------------------------------------------
1 |
2 | import Data.List.Stream
3 | import Prelude hiding (map,sum,head)
4 | import System.Environment
5 |
6 | main = do
7 | n <- getArgs >>= readIO . head
8 | print (sum (map (+1) [1..(n::Int)])) -- 1 fusion site.
9 | print (sum (map (+1) [1..(10::Int)])) -- 2 fusion site.
10 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/tests/Examples/Sum.hs:
--------------------------------------------------------------------------------
1 | import Data.List
2 |
3 | main = print . sum . map read . lines =<< getContents
4 |
5 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/tests/Examples/SumReplicate.hs:
--------------------------------------------------------------------------------
1 | module FuseTest where
2 |
3 | import Data.List as L
4 |
5 | foo :: Int -> Int
6 | foo n = L.sum (L.replicate n 1)
7 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/tests/FuseTest.hs:
--------------------------------------------------------------------------------
1 | module FuseTest where
2 |
3 | import Data.List.Stream as L
4 |
5 | foo :: Int -> Int
6 | foo n = L.sum (L.replicate n 1)
7 |
--------------------------------------------------------------------------------
/test-project/stream-fusion/tests/Spec/ListExts.hs:
--------------------------------------------------------------------------------
1 | -- Spececifications of things in Data.List but not in the H98 List module
2 |
3 |
4 | module Spec.ListExts (
5 | foldl',
6 | foldl1',
7 |
8 | intercalate,
9 |
10 | isInfixOf,
11 | ) where
12 |
13 | import Prelude (Int, Integer, Integral, Num(..), Eq(..), Ord(..), Ordering(..),
14 | Bool(..), (&&), (||), not, Maybe(..), String,
15 | (.), error, seq, otherwise, flip)
16 | import Spec.List
17 |
18 | foldl' :: (a -> b -> a) -> a -> [b] -> a
19 | foldl' f z [] = z
20 | foldl' f z (x:xs) = let z' = f z x in z' `seq` foldl f z' xs
21 |
22 |
23 | foldl1' :: (a -> a -> a) -> [a] -> a
24 | foldl1' f (x:xs) = foldl' f x xs
25 | foldl1' _ [] = error "Prelude.foldl1: empty list"
26 |
27 |
28 | isInfixOf :: Eq a => [a] -> [a] -> Bool
29 | isInfixOf needle haystack = any (isPrefixOf needle) (tails haystack)
30 |
31 |
32 | intercalate :: [a] -> [[a]] -> [a]
33 | intercalate xs xss = concat (intersperse xs xss)
34 |
--------------------------------------------------------------------------------
/test-project/test-project.cabal:
--------------------------------------------------------------------------------
1 | cabal-version: 3.6
2 | name: test-project
3 | version: 0.1.0.0
4 |
5 | -- A short (one-line) description of the package.
6 | -- synopsis:
7 |
8 | -- A longer description of the package.
9 | -- description:
10 |
11 | -- A URL where users can report bugs.
12 | -- bug-reports:
13 |
14 | -- The license under which the package is released.
15 | -- license:
16 | author: Hugo
17 | maintainer: hpeters1024@gmail.com
18 |
19 | -- A copyright notice.
20 | -- copyright:
21 | -- category:
22 | extra-source-files: CHANGELOG.md
23 |
24 | executable hs-plugin-test
25 | main-is: Main.hs
26 |
27 | -- Modules included in this executable, other than Main.
28 | other-modules: Channable
29 | , Factorial
30 | , Quicksort
31 | , Peano
32 | , Tree
33 | , InspectionTests
34 | , Text
35 | , Typeclass
36 | , Unlines
37 | , Streaming
38 |
39 | -- LANGUAGE extensions used by modules in this package.
40 | -- other-extensions:
41 | build-depends: base
42 | , hs-comprehension-plugin
43 | -- , text == 1.2.3.2
44 | , text
45 | , bytestring
46 | , stream-fusion
47 | , inspection-testing
48 | , criterion
49 | , template-haskell
50 |
51 | hs-source-dirs: app
52 | default-language: Haskell2010
53 | ghc-options: -O1 -dverbose-core2core
54 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/.gitignore:
--------------------------------------------------------------------------------
1 | /dist/
2 | /dist-boot/
3 | /dist-install/
4 | /dist-newstyle/
5 | /cabal-dev/
6 | /cabal.sandbox.config
7 | /ghc.mk
8 | /GNUmakefile
9 | /.ghc.environment.*
10 | /cabal.project.local
11 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/.hgignore:
--------------------------------------------------------------------------------
1 | ^(?:dist|benchmarks/dist|tests/coverage|tests/dist)$
2 | ^benchmarks/.*\.txt$
3 | ^tests/text-testdata.tar.bz2$
4 | ^tests/(?:\.hpc|bm|qc|qc-hpc|stdio-hpc|text/test)$
5 | \.(?:aux|eventlog|h[ip]|log|[oa]|orig|prof|ps|rej|swp|tix)$
6 | ~$
7 |
8 | syntax: glob
9 | .\#*
10 | cabal-dev
11 | cabal.sandbox.config
12 | \.cabal-sandbox
13 | scripts/CaseFolding.txt
14 | scripts/SpecialCasing.txt
15 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Internal/Builder/Functions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE MagicHash #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Builder.Functions
5 | -- Copyright : (c) 2011 MailRank, Inc.
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : GHC
11 | --
12 | -- /Warning/: this is an internal module, and does not have a stable
13 | -- API or name. Functions in this module may not check or enforce
14 | -- preconditions expected by public modules. Use at your own risk!
15 | --
16 | -- Useful functions and combinators.
17 |
18 | module Data.Text.Internal.Builder.Functions
19 | (
20 | (<>)
21 | , i2d
22 | ) where
23 |
24 | import Data.Monoid (mappend)
25 | import Data.Text.Lazy.Builder (Builder)
26 | import GHC.Base (chr#,ord#,(+#),Int(I#),Char(C#))
27 | import Prelude ()
28 |
29 | -- | Unsafe conversion for decimal digits.
30 | {-# INLINE i2d #-}
31 | i2d :: Int -> Char
32 | i2d (I# i#) = C# (chr# (ord# '0'# +# i#))
33 |
34 | -- | The normal 'mappend' function with right associativity instead of
35 | -- left.
36 | (<>) :: Builder -> Builder -> Builder
37 | (<>) = mappend
38 | {-# INLINE (<>) #-}
39 |
40 | infixr 4 <>
41 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Internal/Builder/Int/Digits.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | -- Module: Data.Text.Internal.Builder.Int.Digits
4 | -- Copyright: (c) 2013 Bryan O'Sullivan
5 | -- License: BSD-style
6 | -- Maintainer: Bryan O'Sullivan
7 | -- Stability: experimental
8 | -- Portability: portable
9 | --
10 | -- /Warning/: this is an internal module, and does not have a stable
11 | -- API or name. Functions in this module may not check or enforce
12 | -- preconditions expected by public modules. Use at your own risk!
13 | --
14 | -- This module exists because the C preprocessor does things that we
15 | -- shall not speak of when confronted with Haskell multiline strings.
16 |
17 | module Data.Text.Internal.Builder.Int.Digits (digits) where
18 |
19 | import Data.ByteString.Char8 (ByteString)
20 |
21 | digits :: ByteString
22 | digits = "0001020304050607080910111213141516171819\
23 | \2021222324252627282930313233343536373839\
24 | \4041424344454647484950515253545556575859\
25 | \6061626364656667686970717273747576777879\
26 | \8081828384858687888990919293949596979899"
27 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Internal/Builder/RealFloat/Functions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE CPP #-}
2 |
3 | -- |
4 | -- Module: Data.Text.Internal.Builder.RealFloat.Functions
5 | -- Copyright: (c) The University of Glasgow 1994-2002
6 | -- License: see libraries/base/LICENSE
7 | --
8 | -- /Warning/: this is an internal module, and does not have a stable
9 | -- API or name. Functions in this module may not check or enforce
10 | -- preconditions expected by public modules. Use at your own risk!
11 |
12 | module Data.Text.Internal.Builder.RealFloat.Functions
13 | (
14 | roundTo
15 | ) where
16 |
17 | roundTo :: Int -> [Int] -> (Int,[Int])
18 |
19 | #if MIN_VERSION_base(4,6,0)
20 |
21 | roundTo d is =
22 | case f d True is of
23 | x@(0,_) -> x
24 | (1,xs) -> (1, 1:xs)
25 | _ -> error "roundTo: bad Value"
26 | where
27 | b2 = base `quot` 2
28 |
29 | f n _ [] = (0, replicate n 0)
30 | f 0 e (x:xs) | x == b2 && e && all (== 0) xs = (0, []) -- Round to even when at exactly half the base
31 | | otherwise = (if x >= b2 then 1 else 0, [])
32 | f n _ (i:xs)
33 | | i' == base = (1,0:ds)
34 | | otherwise = (0,i':ds)
35 | where
36 | (c,ds) = f (n-1) (even i) xs
37 | i' = c + i
38 | base = 10
39 |
40 | #else
41 |
42 | roundTo d is =
43 | case f d is of
44 | x@(0,_) -> x
45 | (1,xs) -> (1, 1:xs)
46 | _ -> error "roundTo: bad Value"
47 | where
48 | f n [] = (0, replicate n 0)
49 | f 0 (x:_) = (if x >= 5 then 1 else 0, [])
50 | f n (i:xs)
51 | | i' == 10 = (1,0:ds)
52 | | otherwise = (0,i':ds)
53 | where
54 | (c,ds) = f (n-1) xs
55 | i' = c + i
56 |
57 | #endif
58 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Internal/Encoding/Utf16.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE MagicHash, BangPatterns #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Encoding.Utf16
5 | -- Copyright : (c) 2008, 2009 Tom Harper,
6 | -- (c) 2009 Bryan O'Sullivan,
7 | -- (c) 2009 Duncan Coutts
8 | --
9 | -- License : BSD-style
10 | -- Maintainer : bos@serpentine.com
11 | -- Stability : experimental
12 | -- Portability : GHC
13 | --
14 | -- /Warning/: this is an internal module, and does not have a stable
15 | -- API or name. Functions in this module may not check or enforce
16 | -- preconditions expected by public modules. Use at your own risk!
17 | --
18 | -- Basic UTF-16 validation and character manipulation.
19 | module Data.Text.Internal.Encoding.Utf16
20 | (
21 | chr2
22 | , validate1
23 | , validate2
24 | ) where
25 |
26 | import GHC.Exts
27 | import GHC.Word (Word16(..))
28 |
29 | chr2 :: Word16 -> Word16 -> Char
30 | chr2 (W16# a#) (W16# b#) = C# (chr# (upper# +# lower# +# 0x10000#))
31 | where
32 | !x# = word2Int# a#
33 | !y# = word2Int# b#
34 | !upper# = uncheckedIShiftL# (x# -# 0xD800#) 10#
35 | !lower# = y# -# 0xDC00#
36 | {-# INLINE chr2 #-}
37 |
38 | validate1 :: Word16 -> Bool
39 | validate1 x1 = x1 < 0xD800 || x1 > 0xDFFF
40 | {-# INLINE validate1 #-}
41 |
42 | validate2 :: Word16 -> Word16 -> Bool
43 | validate2 x1 x2 = x1 >= 0xD800 && x1 <= 0xDBFF &&
44 | x2 >= 0xDC00 && x2 <= 0xDFFF
45 | {-# INLINE validate2 #-}
46 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Internal/Encoding/Utf32.hs:
--------------------------------------------------------------------------------
1 | -- |
2 | -- Module : Data.Text.Internal.Encoding.Utf32
3 | -- Copyright : (c) 2008, 2009 Tom Harper,
4 | -- (c) 2009, 2010 Bryan O'Sullivan,
5 | -- (c) 2009 Duncan Coutts
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : portable
11 | --
12 | -- /Warning/: this is an internal module, and does not have a stable
13 | -- API or name. Functions in this module may not check or enforce
14 | -- preconditions expected by public modules. Use at your own risk!
15 | --
16 | -- Basic UTF-32 validation.
17 | module Data.Text.Internal.Encoding.Utf32
18 | (
19 | validate
20 | ) where
21 |
22 | import Data.Word (Word32)
23 |
24 | validate :: Word32 -> Bool
25 | validate x1 = x1 < 0xD800 || (x1 > 0xDFFF && x1 <= 0x10FFFF)
26 | {-# INLINE validate #-}
27 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Internal/Functions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE CPP, DeriveDataTypeable #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Functions
5 | -- Copyright : 2010 Bryan O'Sullivan
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : GHC
11 | --
12 | -- /Warning/: this is an internal module, and does not have a stable
13 | -- API or name. Functions in this module may not check or enforce
14 | -- preconditions expected by public modules. Use at your own risk!
15 | --
16 | -- Useful functions.
17 |
18 | module Data.Text.Internal.Functions
19 | (
20 | intersperse
21 | ) where
22 |
23 | -- | A lazier version of Data.List.intersperse. The other version
24 | -- causes space leaks!
25 | intersperse :: a -> [a] -> [a]
26 | intersperse _ [] = []
27 | intersperse sep (x:xs) = x : go xs
28 | where
29 | go [] = []
30 | go (y:ys) = sep : y: go ys
31 | {-# INLINE intersperse #-}
32 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Internal/Private.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, Rank2Types, UnboxedTuples #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Private
5 | -- Copyright : (c) 2011 Bryan O'Sullivan
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : GHC
11 |
12 | module Data.Text.Internal.Private
13 | (
14 | runText
15 | , span_
16 | ) where
17 |
18 | import Control.Monad.ST (ST, runST)
19 | import Data.Text.Internal (Text(..), text)
20 | import Data.Text.Unsafe (Iter(..), iter)
21 | import qualified Data.Text.Array as A
22 |
23 | span_ :: (Char -> Bool) -> Text -> (# Text, Text #)
24 | span_ p t@(Text arr off len) = (# hd,tl #)
25 | where hd = text arr off k
26 | tl = text arr (off+k) (len-k)
27 | !k = loop 0
28 | loop !i | i < len && p c = loop (i+d)
29 | | otherwise = i
30 | where Iter c d = iter t i
31 | {-# INLINE span_ #-}
32 |
33 | runText :: (forall s. (A.MArray s -> Int -> ST s Text) -> ST s Text) -> Text
34 | runText act = runST (act $ \ !marr !len -> do
35 | arr <- A.unsafeFreeze marr
36 | return $! text arr 0 len)
37 | {-# INLINE runText #-}
38 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Lazy/Builder.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, CPP, Rank2Types #-}
2 | #if __GLASGOW_HASKELL__ >= 702
3 | {-# LANGUAGE Trustworthy #-}
4 | #endif
5 |
6 | -----------------------------------------------------------------------------
7 | -- |
8 | -- Module : Data.Text.Lazy.Builder
9 | -- Copyright : (c) 2013 Bryan O'Sullivan
10 | -- (c) 2010 Johan Tibell
11 | -- License : BSD-style (see LICENSE)
12 | --
13 | -- Maintainer : Johan Tibell
14 | -- Portability : portable to Hugs and GHC
15 | --
16 | -- Efficient construction of lazy @Text@ values. The principal
17 | -- operations on a @Builder@ are @singleton@, @fromText@, and
18 | -- @fromLazyText@, which construct new builders, and 'mappend', which
19 | -- concatenates two builders.
20 | --
21 | -- To get maximum performance when building lazy @Text@ values using a
22 | -- builder, associate @mappend@ calls to the right. For example,
23 | -- prefer
24 | --
25 | -- > singleton 'a' `mappend` (singleton 'b' `mappend` singleton 'c')
26 | --
27 | -- to
28 | --
29 | -- > singleton 'a' `mappend` singleton 'b' `mappend` singleton 'c'
30 | --
31 | -- as the latter associates @mappend@ to the left. Or, equivalently,
32 | -- prefer
33 | --
34 | -- > singleton 'a' <> singleton 'b' <> singleton 'c'
35 | --
36 | -- since the '<>' from recent versions of 'Data.Monoid' associates
37 | -- to the right.
38 |
39 | -----------------------------------------------------------------------------
40 |
41 | module Data.Text.Lazy.Builder
42 | ( -- * The Builder type
43 | Builder
44 | , toLazyText
45 | , toLazyTextWith
46 |
47 | -- * Constructing Builders
48 | , singleton
49 | , fromText
50 | , fromLazyText
51 | , fromString
52 |
53 | -- * Flushing the buffer state
54 | , flush
55 | ) where
56 |
57 | import Data.Text.Internal.Builder
58 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Data/Text/Lazy/Internal.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, DeriveDataTypeable #-}
2 | -- |
3 | -- Module : Data.Text.Lazy.Internal
4 | -- Copyright : (c) 2013 Bryan O'Sullivan
5 | --
6 | -- License : BSD-style
7 | -- Maintainer : bos@serpentine.com
8 | -- Stability : experimental
9 | -- Portability : GHC
10 | --
11 | -- This module has been renamed to 'Data.Text.Internal.Lazy'. This
12 | -- name for the module will be removed in the next major release.
13 |
14 | module Data.Text.Lazy.Internal
15 | {-# DEPRECATED "Use Data.Text.Internal.Lazy instead" #-}
16 | (
17 | module Data.Text.Internal.Lazy
18 | ) where
19 |
20 | import Data.Text.Internal.Lazy
21 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2008-2009, Tom Harper
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions
6 | are met:
7 |
8 | * Redistributions of source code must retain the above copyright
9 | notice, this list of conditions and the following disclaimer.
10 |
11 | * Redistributions in binary form must reproduce the above
12 | copyright notice, this list of conditions and the following
13 | disclaimer in the documentation and/or other materials provided
14 | with the distribution.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/README.markdown:
--------------------------------------------------------------------------------
1 | # `text`: Fast, packed Unicode strings, using stream fusion
2 |
3 | This package provides the Data.Text library, a library for the space-
4 | and time-efficient manipulation of Unicode text in Haskell.
5 |
6 | **Please refer to the [package description on Hackage](https://hackage.haskell.org/package/text#description) for more information.**
7 |
8 | # Get involved!
9 |
10 | Please report bugs via the
11 | [github issue tracker](https://github.com/haskell/text/issues).
12 |
13 | Master [git repository](https://github.com/haskell/text):
14 |
15 | * `git clone git://github.com/haskell/text.git`
16 |
17 | There's also a [Mercurial mirror](https://bitbucket.org/bos/text):
18 |
19 | * `hg clone https://bitbucket.org/bos/text`
20 |
21 | (You can create and contribute changes using either Mercurial or git.)
22 |
23 |
24 | # Authors
25 |
26 | The base code for this library was originally written by Tom Harper,
27 | based on the stream fusion framework developed by Roman Leshchinskiy,
28 | Duncan Coutts, and Don Stewart.
29 |
30 | The core library was fleshed out, debugged, and tested by Bryan
31 | O'Sullivan , and he is the current maintainer.
32 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/Setup.lhs:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env runhaskell
2 | > import Distribution.Simple
3 | > main = defaultMain
4 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/.gitignore:
--------------------------------------------------------------------------------
1 | /dist/
2 | /dist-newstyle/
3 | /.ghc.environment.*
4 | /cabal.project.local
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/Setup.hs:
--------------------------------------------------------------------------------
1 | import Distribution.Simple
2 | main = defaultMain
3 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/cabal.project:
--------------------------------------------------------------------------------
1 | -- NB: we use a separate project
2 | packages: .
3 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/cbits/time_iconv.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 |
6 | int time_iconv(char *srcbuf, size_t srcbufsize)
7 | {
8 | uint16_t *destbuf = NULL;
9 | size_t destbufsize;
10 | static uint16_t *origdestbuf;
11 | static size_t origdestbufsize;
12 | iconv_t ic = (iconv_t) -1;
13 | int ret = 0;
14 |
15 | if (ic == (iconv_t) -1) {
16 | ic = iconv_open("UTF-16LE", "UTF-8");
17 | if (ic == (iconv_t) -1) {
18 | ret = -1;
19 | goto done;
20 | }
21 | }
22 |
23 | destbufsize = srcbufsize * sizeof(uint16_t);
24 | if (destbufsize > origdestbufsize) {
25 | free(origdestbuf);
26 | origdestbuf = destbuf = malloc(origdestbufsize = destbufsize);
27 | } else {
28 | destbuf = origdestbuf;
29 | }
30 |
31 | iconv(ic, &srcbuf, &srcbufsize, (char**) &destbuf, &destbufsize);
32 |
33 | done:
34 | return ret;
35 | }
36 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/Concat.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | module Benchmarks.Concat (benchmark) where
4 |
5 | import Control.Monad.Trans.Writer
6 | import Criterion (Benchmark, bgroup, bench, whnf)
7 | import Data.Text as T
8 |
9 | benchmark :: IO Benchmark
10 | benchmark = return $ bgroup "Concat"
11 | [ bench "append" $ whnf (append4 "Text 1" "Text 2" "Text 3") "Text 4"
12 | , bench "concat" $ whnf (concat4 "Text 1" "Text 2" "Text 3") "Text 4"
13 | , bench "write" $ whnf (write4 "Text 1" "Text 2" "Text 3") "Text 4"
14 | ]
15 |
16 | append4, concat4, write4 :: Text -> Text -> Text -> Text -> Text
17 |
18 | {-# NOINLINE append4 #-}
19 | append4 x1 x2 x3 x4 = x1 `append` x2 `append` x3 `append` x4
20 |
21 | {-# NOINLINE concat4 #-}
22 | concat4 x1 x2 x3 x4 = T.concat [x1, x2, x3, x4]
23 |
24 | {-# NOINLINE write4 #-}
25 | write4 x1 x2 x3 x4 = execWriter $ tell x1 >> tell x2 >> tell x3 >> tell x4
26 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/EncodeUtf8.hs:
--------------------------------------------------------------------------------
1 | -- | UTF-8 encode a text
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Replicating a string a number of times
6 | --
7 | -- * UTF-8 encoding it
8 | --
9 | module Benchmarks.EncodeUtf8
10 | ( benchmark
11 | ) where
12 |
13 | import Criterion (Benchmark, bgroup, bench, whnf)
14 | import qualified Data.ByteString as B
15 | import qualified Data.ByteString.Lazy as BL
16 | import qualified Data.Text as T
17 | import qualified Data.Text.Encoding as T
18 | import qualified Data.Text.Lazy as TL
19 | import qualified Data.Text.Lazy.Encoding as TL
20 |
21 | benchmark :: String -> IO Benchmark
22 | benchmark string = do
23 | return $ bgroup "EncodeUtf8"
24 | [ bench "Text" $ whnf (B.length . T.encodeUtf8) text
25 | , bench "LazyText" $ whnf (BL.length . TL.encodeUtf8) lazyText
26 | ]
27 | where
28 | -- The string in different formats
29 | text = T.replicate k $ T.pack string
30 | lazyText = TL.replicate (fromIntegral k) $ TL.pack string
31 |
32 | -- Amount
33 | k = 100000
34 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/Equality.hs:
--------------------------------------------------------------------------------
1 | -- | Compare a string with a copy of itself that is identical except
2 | -- for the last character.
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Comparison of strings (Eq instance)
7 | --
8 | module Benchmarks.Equality
9 | ( benchmark
10 | ) where
11 |
12 | import Criterion (Benchmark, bgroup, bench, whnf)
13 | import qualified Data.ByteString.Char8 as B
14 | import qualified Data.ByteString.Lazy.Char8 as BL
15 | import qualified Data.Text as T
16 | import qualified Data.Text.Encoding as T
17 | import qualified Data.Text.Lazy as TL
18 | import qualified Data.Text.Lazy.Encoding as TL
19 |
20 | benchmark :: FilePath -> IO Benchmark
21 | benchmark fp = do
22 | b <- B.readFile fp
23 | bl1 <- BL.readFile fp
24 | -- A lazy bytestring is a list of chunks. When we do not explicitly create two
25 | -- different lazy bytestrings at a different address, the bytestring library
26 | -- will compare the chunk addresses instead of the chunk contents. This is why
27 | -- we read the lazy bytestring twice here.
28 | bl2 <- BL.readFile fp
29 | l <- readFile fp
30 | let t = T.decodeUtf8 b
31 | tl = TL.decodeUtf8 bl1
32 | return $ bgroup "Equality"
33 | [ bench "Text" $ whnf (== T.init t `T.snoc` '\xfffd') t
34 | , bench "LazyText" $ whnf (== TL.init tl `TL.snoc` '\xfffd') tl
35 | , bench "ByteString" $ whnf (== B.init b `B.snoc` '\xfffd') b
36 | , bench "LazyByteString" $ whnf (== BL.init bl2 `BL.snoc` '\xfffd') bl1
37 | , bench "String" $ whnf (== init l ++ "\xfffd") l
38 | ]
39 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/FileRead.hs:
--------------------------------------------------------------------------------
1 | -- | Benchmarks simple file reading
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Reading a file from the disk
6 | --
7 | module Benchmarks.FileRead
8 | ( benchmark
9 | ) where
10 |
11 | import Control.Applicative ((<$>))
12 | import Criterion (Benchmark, bgroup, bench, whnfIO)
13 | import qualified Data.ByteString as SB
14 | import qualified Data.ByteString.Lazy as LB
15 | import qualified Data.Text as T
16 | import qualified Data.Text.Encoding as T
17 | import qualified Data.Text.IO as T
18 | import qualified Data.Text.Lazy as LT
19 | import qualified Data.Text.Lazy.Encoding as LT
20 | import qualified Data.Text.Lazy.IO as LT
21 |
22 | benchmark :: FilePath -> IO Benchmark
23 | benchmark p = return $ bgroup "FileRead"
24 | [ bench "String" $ whnfIO $ length <$> readFile p
25 | , bench "ByteString" $ whnfIO $ SB.length <$> SB.readFile p
26 | , bench "LazyByteString" $ whnfIO $ LB.length <$> LB.readFile p
27 | , bench "Text" $ whnfIO $ T.length <$> T.readFile p
28 | , bench "LazyText" $ whnfIO $ LT.length <$> LT.readFile p
29 | , bench "TextByteString" $ whnfIO $
30 | (T.length . T.decodeUtf8) <$> SB.readFile p
31 | , bench "LazyTextByteString" $ whnfIO $
32 | (LT.length . LT.decodeUtf8) <$> LB.readFile p
33 | ]
34 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/FoldLines.hs:
--------------------------------------------------------------------------------
1 | -- | Read a file line-by-line using handles, and perform a fold over the lines.
2 | -- The fold is used here to calculate the number of lines in the file.
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Buffered, line-based IO
7 | --
8 | {-# LANGUAGE BangPatterns #-}
9 | module Benchmarks.FoldLines
10 | ( benchmark
11 | ) where
12 |
13 | import Criterion (Benchmark, bgroup, bench, whnfIO)
14 | import System.IO
15 | import qualified Data.ByteString as B
16 | import qualified Data.Text as T
17 | import qualified Data.Text.IO as T
18 |
19 | benchmark :: FilePath -> IO Benchmark
20 | benchmark fp = return $ bgroup "ReadLines"
21 | [ bench "Text" $ withHandle $ foldLinesT (\n _ -> n + 1) (0 :: Int)
22 | , bench "ByteString" $ withHandle $ foldLinesB (\n _ -> n + 1) (0 :: Int)
23 | ]
24 | where
25 | withHandle f = whnfIO $ do
26 | h <- openFile fp ReadMode
27 | hSetBuffering h (BlockBuffering (Just 16384))
28 | x <- f h
29 | hClose h
30 | return x
31 |
32 | -- | Text line fold
33 | --
34 | foldLinesT :: (a -> T.Text -> a) -> a -> Handle -> IO a
35 | foldLinesT f z0 h = go z0
36 | where
37 | go !z = do
38 | eof <- hIsEOF h
39 | if eof
40 | then return z
41 | else do
42 | l <- T.hGetLine h
43 | let z' = f z l in go z'
44 | {-# INLINE foldLinesT #-}
45 |
46 | -- | ByteString line fold
47 | --
48 | foldLinesB :: (a -> B.ByteString -> a) -> a -> Handle -> IO a
49 | foldLinesB f z0 h = go z0
50 | where
51 | go !z = do
52 | eof <- hIsEOF h
53 | if eof
54 | then return z
55 | else do
56 | l <- B.hGetLine h
57 | let z' = f z l in go z'
58 | {-# INLINE foldLinesB #-}
59 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/Programs/BigTable.hs:
--------------------------------------------------------------------------------
1 | -- | Create a large HTML table and dump it to a handle
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Creating a large HTML document using a builder
6 | --
7 | -- * Writing to a handle
8 | --
9 | {-# LANGUAGE OverloadedStrings #-}
10 | module Benchmarks.Programs.BigTable
11 | ( benchmark
12 | ) where
13 |
14 | import Criterion (Benchmark, bench, whnfIO)
15 | import Data.Monoid (mappend, mconcat)
16 | import Data.Text.Lazy.Builder (Builder, fromText, toLazyText)
17 | import Data.Text.Lazy.IO (hPutStr)
18 | import System.IO (Handle)
19 | import qualified Data.Text as T
20 |
21 | benchmark :: Handle -> IO Benchmark
22 | benchmark sink = return $ bench "BigTable" $ whnfIO $ do
23 | hPutStr sink "Content-Type: text/html\n\n"
24 | hPutStr sink . toLazyText . makeTable =<< rows
25 | hPutStr sink "
"
26 | where
27 | -- We provide the number of rows in IO so the builder value isn't shared
28 | -- between the benchmark samples.
29 | rows :: IO Int
30 | rows = return 20000
31 | {-# NOINLINE rows #-}
32 |
33 | makeTable :: Int -> Builder
34 | makeTable n = mconcat $ replicate n $ mconcat $ map makeCol [1 .. 50]
35 |
36 | makeCol :: Int -> Builder
37 | makeCol 1 = fromText "1 | "
38 | makeCol 50 = fromText "50 |
"
39 | makeCol i = fromText "" `mappend` (fromInt i `mappend` fromText " | ")
40 |
41 | fromInt :: Int -> Builder
42 | fromInt = fromText . T.pack . show
43 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/Programs/StripTags.hs:
--------------------------------------------------------------------------------
1 | -- | Program to replace HTML tags by whitespace
2 | --
3 | -- This program was originally contributed by Petr Prokhorenkov.
4 | --
5 | -- Tested in this benchmark:
6 | --
7 | -- * Reading the file
8 | --
9 | -- * Replacing text between HTML tags (<>) with whitespace
10 | --
11 | -- * Writing back to a handle
12 | --
13 | {-# OPTIONS_GHC -fspec-constr-count=5 #-}
14 | module Benchmarks.Programs.StripTags
15 | ( benchmark
16 | ) where
17 |
18 | import Criterion (Benchmark, bgroup, bench, whnfIO)
19 | import Data.List (mapAccumL)
20 | import System.IO (Handle, hPutStr)
21 | import qualified Data.ByteString as B
22 | import qualified Data.ByteString.Char8 as BC
23 | import qualified Data.Text as T
24 | import qualified Data.Text.Encoding as T
25 | import qualified Data.Text.IO as T
26 |
27 | benchmark :: FilePath -> Handle -> IO Benchmark
28 | benchmark i o = return $ bgroup "StripTags"
29 | [ bench "String" $ whnfIO $ readFile i >>= hPutStr o . string
30 | , bench "ByteString" $ whnfIO $ B.readFile i >>= B.hPutStr o . byteString
31 | , bench "Text" $ whnfIO $ T.readFile i >>= T.hPutStr o . text
32 | , bench "TextByteString" $ whnfIO $
33 | B.readFile i >>= B.hPutStr o . T.encodeUtf8 . text . T.decodeUtf8
34 | ]
35 |
36 | string :: String -> String
37 | string = snd . mapAccumL step 0
38 |
39 | text :: T.Text -> T.Text
40 | text = snd . T.mapAccumL step 0
41 |
42 | byteString :: B.ByteString -> B.ByteString
43 | byteString = snd . BC.mapAccumL step 0
44 |
45 | step :: Int -> Char -> (Int, Char)
46 | step d c
47 | | d > 0 || d' > 0 = (d', ' ')
48 | | otherwise = (d', c)
49 | where
50 | d' = d + depth c
51 | depth '>' = 1
52 | depth '<' = -1
53 | depth _ = 0
54 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/Programs/Throughput.hs:
--------------------------------------------------------------------------------
1 | -- | This benchmark simply reads and writes a file using the various string
2 | -- libraries. The point of it is that we can make better estimations on how
3 | -- much time the other benchmarks spend doing IO.
4 | --
5 | -- Note that we expect ByteStrings to be a whole lot faster, since they do not
6 | -- do any actual encoding/decoding here, while String and Text do have UTF-8
7 | -- encoding/decoding.
8 | --
9 | -- Tested in this benchmark:
10 | --
11 | -- * Reading the file
12 | --
13 | -- * Replacing text between HTML tags (<>) with whitespace
14 | --
15 | -- * Writing back to a handle
16 | --
17 | module Benchmarks.Programs.Throughput
18 | ( benchmark
19 | ) where
20 |
21 | import Criterion (Benchmark, bgroup, bench, whnfIO)
22 | import System.IO (Handle, hPutStr)
23 | import qualified Data.ByteString as B
24 | import qualified Data.ByteString.Lazy as BL
25 | import qualified Data.Text.Encoding as T
26 | import qualified Data.Text.IO as T
27 | import qualified Data.Text.Lazy.Encoding as TL
28 | import qualified Data.Text.Lazy.IO as TL
29 |
30 | benchmark :: FilePath -> Handle -> IO Benchmark
31 | benchmark fp sink = return $ bgroup "Throughput"
32 | [ bench "String" $ whnfIO $ readFile fp >>= hPutStr sink
33 | , bench "ByteString" $ whnfIO $ B.readFile fp >>= B.hPutStr sink
34 | , bench "LazyByteString" $ whnfIO $ BL.readFile fp >>= BL.hPutStr sink
35 | , bench "Text" $ whnfIO $ T.readFile fp >>= T.hPutStr sink
36 | , bench "LazyText" $ whnfIO $ TL.readFile fp >>= TL.hPutStr sink
37 | , bench "TextByteString" $ whnfIO $
38 | B.readFile fp >>= B.hPutStr sink . T.encodeUtf8 . T.decodeUtf8
39 | , bench "LazyTextByteString" $ whnfIO $
40 | BL.readFile fp >>= BL.hPutStr sink . TL.encodeUtf8 . TL.decodeUtf8
41 | ]
42 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/Replace.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns #-}
2 | -- | Replace a string by another string
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Search and replace of a pattern in a text
7 | --
8 | module Benchmarks.Replace
9 | ( benchmark
10 | ) where
11 |
12 | import Criterion (Benchmark, bgroup, bench, nf)
13 | import qualified Data.ByteString.Char8 as B
14 | import qualified Data.ByteString.Lazy as BL
15 | import qualified Data.ByteString.Lazy.Search as BL
16 | import qualified Data.ByteString.Search as B
17 | import qualified Data.Text as T
18 | import qualified Data.Text.Encoding as T
19 | import qualified Data.Text.Lazy as TL
20 | import qualified Data.Text.Lazy.Encoding as TL
21 | import qualified Data.Text.Lazy.IO as TL
22 |
23 | benchmark :: FilePath -> String -> String -> IO Benchmark
24 | benchmark fp pat sub = do
25 | tl <- TL.readFile fp
26 | bl <- BL.readFile fp
27 | let !t = TL.toStrict tl
28 | !b = T.encodeUtf8 t
29 | return $ bgroup "Replace" [
30 | bench "Text" $ nf (T.length . T.replace tpat tsub) t
31 | , bench "ByteString" $ nf (BL.length . B.replace bpat bsub) b
32 | , bench "LazyText" $ nf (TL.length . TL.replace tlpat tlsub) tl
33 | , bench "LazyByteString" $ nf (BL.length . BL.replace blpat blsub) bl
34 | ]
35 | where
36 | tpat = T.pack pat
37 | tsub = T.pack sub
38 | tlpat = TL.pack pat
39 | tlsub = TL.pack sub
40 | bpat = T.encodeUtf8 tpat
41 | bsub = T.encodeUtf8 tsub
42 | blpat = B.concat $ BL.toChunks $ TL.encodeUtf8 tlpat
43 | blsub = B.concat $ BL.toChunks $ TL.encodeUtf8 tlsub
44 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/Search.hs:
--------------------------------------------------------------------------------
1 | -- | Search for a pattern in a file, find the number of occurences
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Searching all occurences of a pattern using library routines
6 | --
7 | module Benchmarks.Search
8 | ( benchmark
9 | ) where
10 |
11 | import Criterion (Benchmark, bench, bgroup, whnf)
12 | import qualified Data.ByteString as B
13 | import qualified Data.ByteString.Lazy as BL
14 | import qualified Data.ByteString.Lazy.Search as BL
15 | import qualified Data.ByteString.Search as B
16 | import qualified Data.Text as T
17 | import qualified Data.Text.Encoding as T
18 | import qualified Data.Text.IO as T
19 | import qualified Data.Text.Lazy as TL
20 | import qualified Data.Text.Lazy.IO as TL
21 |
22 | benchmark :: FilePath -> T.Text -> IO Benchmark
23 | benchmark fp needleT = do
24 | b <- B.readFile fp
25 | bl <- BL.readFile fp
26 | t <- T.readFile fp
27 | tl <- TL.readFile fp
28 | return $ bgroup "FileIndices"
29 | [ bench "ByteString" $ whnf (byteString needleB) b
30 | , bench "LazyByteString" $ whnf (lazyByteString needleB) bl
31 | , bench "Text" $ whnf (text needleT) t
32 | , bench "LazyText" $ whnf (lazyText needleTL) tl
33 | ]
34 | where
35 | needleB = T.encodeUtf8 needleT
36 | needleTL = TL.fromChunks [needleT]
37 |
38 | byteString :: B.ByteString -> B.ByteString -> Int
39 | byteString needle = length . B.indices needle
40 |
41 | lazyByteString :: B.ByteString -> BL.ByteString -> Int
42 | lazyByteString needle = length . BL.indices needle
43 |
44 | text :: T.Text -> T.Text -> Int
45 | text = T.count
46 |
47 | lazyText :: TL.Text -> TL.Text -> Int
48 | lazyText needle = fromIntegral . TL.count needle
49 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Benchmarks/WordFrequencies.hs:
--------------------------------------------------------------------------------
1 | -- | A word frequency count using the different string types
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Splitting into words
6 | --
7 | -- * Converting to lowercase
8 | --
9 | -- * Comparing: Eq/Ord instances
10 | --
11 | module Benchmarks.WordFrequencies
12 | ( benchmark
13 | ) where
14 |
15 | import Criterion (Benchmark, bench, bgroup, whnf)
16 | import Data.Char (toLower)
17 | import Data.List (foldl')
18 | import Data.Map (Map)
19 | import qualified Data.ByteString.Char8 as B
20 | import qualified Data.Map as M
21 | import qualified Data.Text as T
22 | import qualified Data.Text.IO as T
23 |
24 | benchmark :: FilePath -> IO Benchmark
25 | benchmark fp = do
26 | s <- readFile fp
27 | b <- B.readFile fp
28 | t <- T.readFile fp
29 | return $ bgroup "WordFrequencies"
30 | [ bench "String" $ whnf (frequencies . words . map toLower) s
31 | , bench "ByteString" $ whnf (frequencies . B.words . B.map toLower) b
32 | , bench "Text" $ whnf (frequencies . T.words . T.toLower) t
33 | ]
34 |
35 | frequencies :: Ord a => [a] -> Map a Int
36 | frequencies = foldl' (\m k -> M.insertWith (+) k 1 m) M.empty
37 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Multilang.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, OverloadedStrings, RankNTypes #-}
2 |
3 | module Main (
4 | main
5 | ) where
6 |
7 | import Control.Monad (forM_)
8 | import qualified Data.ByteString as B
9 | import qualified Data.Text as Text
10 | import Data.Text.Encoding (decodeUtf8)
11 | import Data.Text (Text)
12 | import System.IO (hFlush, stdout)
13 | import Timer (timer)
14 |
15 | type BM = Text -> ()
16 |
17 | bm :: forall a. (Text -> a) -> BM
18 | bm f t = f t `seq` ()
19 |
20 | benchmarks :: [(String, Text.Text -> ())]
21 | benchmarks = [
22 | ("find_first", bm $ Text.isInfixOf "en:Benin")
23 | , ("find_index", bm $ Text.findIndex (=='c'))
24 | ]
25 |
26 | main :: IO ()
27 | main = do
28 | !contents <- decodeUtf8 `fmap` B.readFile "../tests/text-test-data/yiwiki.xml"
29 | forM_ benchmarks $ \(name, bmark) -> do
30 | putStr $ name ++ " "
31 | hFlush stdout
32 | putStrLn =<< (timer 100 contents bmark)
33 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/haskell/Timer.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns #-}
2 |
3 | module Timer (timer) where
4 |
5 | import Control.Exception (evaluate)
6 | import Data.Time.Clock.POSIX (getPOSIXTime)
7 | import GHC.Float (FFFormat(..), formatRealFloat)
8 |
9 | ickyRound :: Int -> Double -> String
10 | ickyRound k = formatRealFloat FFFixed (Just k)
11 |
12 | timer :: Int -> a -> (a -> b) -> IO String
13 | timer count a0 f = do
14 | let loop !k !fastest
15 | | k <= 0 = return fastest
16 | | otherwise = do
17 | start <- getPOSIXTime
18 | let inner a i
19 | | i <= 0 = return ()
20 | | otherwise = evaluate (f a) >> inner a (i-1)
21 | inner a0 count
22 | end <- getPOSIXTime
23 | let elapsed = end - start
24 | loop (k-1) (min fastest (elapsed / fromIntegral count))
25 | t <- loop (3::Int) 1e300
26 | let log10 x = log x / log 10
27 | ft = realToFrac t
28 | prec = round (log10 (fromIntegral count) - log10 ft)
29 | return $! ickyRound prec ft
30 | {-# NOINLINE timer #-}
31 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/python/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.pyc
3 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/python/cut.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import utils, sys, codecs
4 |
5 | def cut(filename, l, r):
6 | content = open(filename, encoding='utf-8')
7 | for line in content:
8 | print(line[l:r])
9 |
10 | for f in sys.argv[1:]:
11 | t = utils.benchmark(lambda: cut(f, 20, 40))
12 | sys.stderr.write('{0}: {1}\n'.format(f, t))
13 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/python/multilang.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import math
4 | import sys
5 | import time
6 |
7 | def find_first():
8 | cf = contents.find
9 | return timer(lambda: cf("en:Benin"))
10 |
11 | def timer(f, count=100):
12 | a = 1e300
13 | def g():
14 | return
15 | for i in xrange(3):
16 | start = time.time()
17 | for j in xrange(count):
18 | g()
19 | a = min(a, (time.time() - start) / count)
20 |
21 | b = 1e300
22 | for i in xrange(3):
23 | start = time.time()
24 | for j in xrange(count):
25 | f()
26 | b = min(b, (time.time() - start) / count)
27 |
28 | return round(b - a, int(round(math.log(count, 10) - math.log(b - a, 10))))
29 |
30 | contents = open('../../tests/text-test-data/yiwiki.xml', 'r').read()
31 | contents = contents.decode('utf-8')
32 |
33 | benchmarks = (
34 | find_first,
35 | )
36 |
37 | to_run = sys.argv[1:]
38 | bms = []
39 | if to_run:
40 | for r in to_run:
41 | for b in benchmarks:
42 | if b.__name__.startswith(r):
43 | bms.append(b)
44 | else:
45 | bms = benchmarks
46 |
47 | for b in bms:
48 | sys.stdout.write(b.__name__ + ' ')
49 | sys.stdout.flush()
50 | print b()
51 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/python/sort.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import utils, sys, codecs
4 |
5 | def sort(filename):
6 | content = open(filename, encoding='utf-8').read()
7 | lines = content.splitlines()
8 | lines.sort()
9 | print('\n'.join(lines))
10 |
11 | for f in sys.argv[1:]:
12 | t = utils.benchmark(lambda: sort(f))
13 | sys.stderr.write('{0}: {1}\n'.format(f, t))
14 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/python/strip_tags.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import utils, sys
4 |
5 | def strip_tags(filename):
6 | string = open(filename, encoding='utf-8').read()
7 |
8 | d = 0
9 | out = []
10 |
11 | for c in string:
12 | if c == '<': d += 1
13 |
14 | if d > 0:
15 | out += ' '
16 | else:
17 | out += c
18 |
19 | if c == '>': d -= 1
20 |
21 | print(''.join(out))
22 |
23 | for f in sys.argv[1:]:
24 | t = utils.benchmark(lambda: strip_tags(f))
25 | sys.stderr.write('{0}: {1}\n'.format(f, t))
26 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/python/utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import sys, time
4 |
5 | def benchmark_once(f):
6 | start = time.time()
7 | f()
8 | end = time.time()
9 | return end - start
10 |
11 | def benchmark(f):
12 | runs = 100
13 | total = 0.0
14 | for i in range(runs):
15 | result = benchmark_once(f)
16 | sys.stderr.write('Run {0}: {1}\n'.format(i, result))
17 | total += result
18 | return total / runs
19 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/ruby/cut.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require './utils.rb'
4 |
5 | def cut(filename, l, r)
6 | File.open(filename, 'r:utf-8') do |file|
7 | file.each_line do |line|
8 | puts line[l, r - l]
9 | end
10 | end
11 | end
12 |
13 | ARGV.each do |f|
14 | t = benchmark { cut(f, 20, 40) }
15 | STDERR.puts "#{f}: #{t}"
16 | end
17 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/ruby/fold.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require './utils.rb'
4 |
5 | def fold(filename, max_width)
6 | File.open(filename, 'r:utf-8') do |file|
7 | # Words in this paragraph
8 | paragraph = []
9 |
10 | file.each_line do |line|
11 | # If we encounter an empty line, we reformat and dump the current
12 | # paragraph
13 | if line.strip.empty?
14 | puts fold_paragraph(paragraph, max_width)
15 | puts
16 | paragraph = []
17 | # Otherwise, we append the words found in the line to the paragraph
18 | else
19 | paragraph.concat line.split
20 | end
21 | end
22 |
23 | # Last paragraph
24 | puts fold_paragraph(paragraph, max_width) unless paragraph.empty?
25 | end
26 | end
27 |
28 | # Fold a single paragraph to the desired width
29 | def fold_paragraph(paragraph, max_width)
30 | # Gradually build our output
31 | str, *rest = paragraph
32 | width = str.length
33 |
34 | rest.each do |word|
35 | if width + word.length + 1 <= max_width
36 | str << ' ' << word
37 | width += word.length + 1
38 | else
39 | str << "\n" << word
40 | width = word.length
41 | end
42 | end
43 |
44 | str
45 | end
46 |
47 | ARGV.each do |f|
48 | t = benchmark { fold(f, 80) }
49 | STDERR.puts "#{f}: #{t}"
50 | end
51 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/ruby/sort.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require './utils.rb'
4 |
5 | def sort(filename)
6 | File.open(filename, 'r:utf-8') do |file|
7 | content = file.read
8 | puts content.lines.sort.join
9 | end
10 | end
11 |
12 | ARGV.each do |f|
13 | t = benchmark { sort(f) }
14 | STDERR.puts "#{f}: #{t}"
15 | end
16 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/ruby/strip_tags.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require './utils.rb'
4 |
5 | def strip_tags(filename)
6 | File.open(filename, 'r:utf-8') do |file|
7 | str = file.read
8 |
9 | d = 0
10 |
11 | str.each_char do |c|
12 | d += 1 if c == '<'
13 | putc(if d > 0 then ' ' else c end)
14 | d -= 1 if c == '>'
15 | end
16 | end
17 | end
18 |
19 | ARGV.each do |f|
20 | t = benchmark { strip_tags(f) }
21 | STDERR.puts "#{f}: #{t}"
22 | end
23 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/benchmarks/ruby/utils.rb:
--------------------------------------------------------------------------------
1 | require 'benchmark'
2 |
3 | def benchmark(&block)
4 | runs = 100
5 | total = 0
6 |
7 | runs.times do |i|
8 | result = Benchmark.measure(&block).total
9 | $stderr.puts "Run #{i}: #{result}"
10 | total += result
11 | end
12 |
13 | total / runs
14 | end
15 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/cabal.project:
--------------------------------------------------------------------------------
1 | -- See http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html
2 | packages: ., benchmarks
3 | tests: True
4 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/include/text_cbits.h:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2013 Bryan O'Sullivan .
3 | */
4 |
5 | #ifndef _text_cbits_h
6 | #define _text_cbits_h
7 |
8 | #define UTF8_ACCEPT 0
9 | #define UTF8_REJECT 12
10 |
11 | #endif
12 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/scripts/ApiCompare.hs:
--------------------------------------------------------------------------------
1 | -- This script compares the strict and lazy Text APIs to ensure that
2 | -- they're reasonably in sync.
3 |
4 | {-# LANGUAGE OverloadedStrings #-}
5 |
6 | import qualified Data.Set as S
7 | import qualified Data.Text as T
8 | import System.Process
9 |
10 | main = do
11 | let tidy pkg = (S.fromList . filter (T.isInfixOf "::") . T.lines .
12 | T.replace "GHC.Int.Int64" "Int" .
13 | T.replace "\n " "" .
14 | T.replace (T.append (T.pack pkg) ".") "" . T.pack) `fmap`
15 | readProcess "ghci" [] (":browse " ++ pkg)
16 | let diff a b = mapM_ (putStrLn . (" "++) . T.unpack) . S.toList $
17 | S.difference a b
18 | text <- tidy "Data.Text"
19 | lazy <- tidy "Data.Text.Lazy"
20 | list <- tidy "Data.List"
21 | putStrLn "Text \\ List:"
22 | diff text list
23 | putStrLn ""
24 | putStrLn "Text \\ Lazy:"
25 | diff text lazy
26 | putStrLn ""
27 | putStrLn "Lazy \\ Text:"
28 | diff lazy text
29 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/scripts/Arsec.hs:
--------------------------------------------------------------------------------
1 | module Arsec
2 | (
3 | Comment
4 | , comment
5 | , semi
6 | , showC
7 | , unichar
8 | , unichars
9 | , module Control.Applicative
10 | , module Control.Monad
11 | , module Data.Char
12 | , module Text.ParserCombinators.Parsec.Char
13 | , module Text.ParserCombinators.Parsec.Combinator
14 | , module Text.ParserCombinators.Parsec.Error
15 | , module Text.ParserCombinators.Parsec.Prim
16 | ) where
17 |
18 | import Control.Monad
19 | import Control.Applicative
20 | import Data.Char
21 | import Numeric
22 | import Text.ParserCombinators.Parsec.Char hiding (lower, upper)
23 | import Text.ParserCombinators.Parsec.Combinator hiding (optional)
24 | import Text.ParserCombinators.Parsec.Error
25 | import Text.ParserCombinators.Parsec.Prim hiding ((<|>), many)
26 |
27 | type Comment = String
28 |
29 | unichar :: Parser Char
30 | unichar = chr . fst . head . readHex <$> many1 hexDigit
31 |
32 | unichars :: Parser [Char]
33 | unichars = manyTill (unichar <* spaces) semi
34 |
35 | semi :: Parser ()
36 | semi = char ';' *> spaces *> pure ()
37 |
38 | comment :: Parser Comment
39 | comment = (char '#' *> manyTill anyToken (char '\n')) <|> string "\n"
40 |
41 | showC :: Char -> String
42 | showC c = "'\\x" ++ d ++ "'"
43 | where h = showHex (ord c) ""
44 | d = replicate (4 - length h) '0' ++ h
45 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/scripts/CaseFolding.hs:
--------------------------------------------------------------------------------
1 | -- This script processes the following source file:
2 | --
3 | -- http://unicode.org/Public/UNIDATA/CaseFolding.txt
4 |
5 | module CaseFolding
6 | (
7 | CaseFolding(..)
8 | , Fold(..)
9 | , parseCF
10 | , mapCF
11 | ) where
12 |
13 | import Arsec
14 |
15 | data Fold = Fold {
16 | code :: Char
17 | , status :: Char
18 | , mapping :: [Char]
19 | , name :: String
20 | } deriving (Eq, Ord, Show)
21 |
22 | data CaseFolding = CF { cfComments :: [Comment], cfFolding :: [Fold] }
23 | deriving (Show)
24 |
25 | entries :: Parser CaseFolding
26 | entries = CF <$> many comment <*> many (entry <* many comment)
27 | where
28 | entry = Fold <$> unichar <* semi
29 | <*> oneOf "CFST" <* semi
30 | <*> unichars
31 | <*> (string "# " *> manyTill anyToken (char '\n'))
32 |
33 | parseCF :: FilePath -> IO (Either ParseError CaseFolding)
34 | parseCF name = parse entries name <$> readFile name
35 |
36 | mapCF :: CaseFolding -> [String]
37 | mapCF (CF _ ms) = typ ++ (map nice . filter p $ ms) ++ [last]
38 | where
39 | typ = ["foldMapping :: forall s. Char -> s -> Step (CC s) Char"
40 | ,"{-# NOINLINE foldMapping #-}"]
41 | last = "foldMapping c s = Yield (toLower c) (CC s '\\0' '\\0')"
42 | nice c = "-- " ++ name c ++ "\n" ++
43 | "foldMapping " ++ showC (code c) ++ " s = Yield " ++ x ++ " (CC s " ++ y ++ " " ++ z ++ ")"
44 | where [x,y,z] = (map showC . take 3) (mapping c ++ repeat '\0')
45 | p f = status f `elem` "CF" &&
46 | mapping f /= [toLower (code f)]
47 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/scripts/CaseMapping.hs:
--------------------------------------------------------------------------------
1 | import System.Environment
2 | import System.IO
3 |
4 | import Arsec
5 | import CaseFolding
6 | import SpecialCasing
7 |
8 | main = do
9 | args <- getArgs
10 | let oname = case args of
11 | [] -> "../Data/Text/Internal/Fusion/CaseMapping.hs"
12 | [o] -> o
13 | psc <- parseSC "SpecialCasing.txt"
14 | pcf <- parseCF "CaseFolding.txt"
15 | scs <- case psc of
16 | Left err -> print err >> return undefined
17 | Right ms -> return ms
18 | cfs <- case pcf of
19 | Left err -> print err >> return undefined
20 | Right ms -> return ms
21 | h <- openFile oname WriteMode
22 | let comments = map ("--" ++) $
23 | take 2 (cfComments cfs) ++ take 2 (scComments scs)
24 | mapM_ (hPutStrLn h) $
25 | ["{-# LANGUAGE Rank2Types #-}"
26 | ,"-- AUTOMATICALLY GENERATED - DO NOT EDIT"
27 | ,"-- Generated by scripts/CaseMapping.hs"] ++
28 | comments ++
29 | [""
30 | ,"module Data.Text.Internal.Fusion.CaseMapping where"
31 | ,"import Data.Char"
32 | ,"import Data.Text.Internal.Fusion.Types"
33 | ,""]
34 | mapM_ (hPutStrLn h) (mapSC "upper" upper toUpper scs)
35 | mapM_ (hPutStrLn h) (mapSC "lower" lower toLower scs)
36 | mapM_ (hPutStrLn h) (mapSC "title" title toTitle scs)
37 | mapM_ (hPutStrLn h) (mapCF cfs)
38 | hClose h
39 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/.ghci:
--------------------------------------------------------------------------------
1 | :set -isrc -i../..
2 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/LiteralRuleTest.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | module LiteralRuleTest where
4 |
5 | import Data.Text (Text)
6 |
7 | -- This should produce 8 firings of the "TEXT literal" rule
8 | strings :: [Text]
9 | strings = [ "abstime", "aclitem", "bit", "bool", "box", "bpchar", "bytea", "char" ]
10 |
11 | -- This should produce 7 firings of the "TEXT literal UTF8" rule
12 | utf8Strings :: [Text]
13 | utf8Strings = [ "\0abstime", "\0aclitem", "\xfefe bit", "\0bool", "\0box", "\0bpchar", "\0bytea" ]
14 |
15 | -- This should produce 4 firings of the "TEXT empty literal" rule
16 | empties :: [Text]
17 | empties = [ "", "", "", "" ]
18 |
19 | -- This should produce 5 firings of the "TEXT empty literal" rule
20 | --singletons :: [Text]
21 | --singletons = [ "a", "b", "c", "d", "e" ]
22 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/Makefile:
--------------------------------------------------------------------------------
1 | VCS = hg
2 | count = 1000
3 |
4 | all: coverage literal-rule-test
5 |
6 | literal-rule-test:
7 | ./literal-rule-test.sh
8 |
9 | coverage: build coverage/hpc_index.html
10 |
11 | build: text-test-data
12 | cabal configure -fhpc
13 | cabal build
14 |
15 | text-test-data:
16 | ifeq ($(VCS),git)
17 | git clone https://github.com/bos/text-test-data.git
18 | else
19 | hg clone https://bitbucket.org/bos/text-test-data
20 | endif
21 | $(MAKE) -C text-test-data
22 |
23 | coverage/text-tests.tix:
24 | -mkdir -p coverage
25 | ./dist/build/text-tests/text-tests -a $(count)
26 | mv text-tests.tix $@
27 |
28 | coverage/text-tests-stdio.tix:
29 | -mkdir -p coverage
30 | ./scripts/cover-stdio.sh ./dist/build/text-tests-stdio/text-tests-stdio
31 | mv text-tests-stdio.tix $@
32 |
33 | coverage/coverage.tix: coverage/text-tests.tix coverage/text-tests-stdio.tix
34 | hpc combine --output=$@ \
35 | --exclude=Main \
36 | coverage/text-tests.tix \
37 | coverage/text-tests-stdio.tix
38 |
39 | coverage/hpc_index.html: coverage/coverage.tix
40 | hpc markup --destdir=coverage coverage/coverage.tix
41 |
42 | clean:
43 | rm -rf dist coverage .hpc
44 |
45 | .PHONY: all build clean coverage literal-rule-test
46 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/Tests.hs:
--------------------------------------------------------------------------------
1 | -- | Provides a simple main function which runs all the tests
2 | --
3 | module Main
4 | ( main
5 | ) where
6 |
7 | import Test.Framework (defaultMain)
8 |
9 | import qualified Tests.Properties as Properties
10 | import qualified Tests.Regressions as Regressions
11 |
12 | main :: IO ()
13 | main = defaultMain [Properties.tests, Regressions.tests]
14 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/Tests/IO.hs:
--------------------------------------------------------------------------------
1 | -- | Program which exposes some haskell functions as an exutable. The results
2 | -- and coverage of this module is meant to be checked using a shell script.
3 | --
4 | module Main
5 | (
6 | main
7 | ) where
8 |
9 | import System.Environment (getArgs)
10 | import System.Exit (exitFailure)
11 | import System.IO (hPutStrLn, stderr)
12 | import qualified Data.Text as T
13 | import qualified Data.Text.IO as T
14 | import qualified Data.Text.Lazy as TL
15 | import qualified Data.Text.Lazy.IO as TL
16 |
17 | main :: IO ()
18 | main = do
19 | args <- getArgs
20 | case args of
21 | ["T.readFile", name] -> T.putStr =<< T.readFile name
22 | ["T.writeFile", name, t] -> T.writeFile name (T.pack t)
23 | ["T.appendFile", name, t] -> T.appendFile name (T.pack t)
24 | ["T.interact"] -> T.interact id
25 | ["T.getContents"] -> T.putStr =<< T.getContents
26 | ["T.getLine"] -> T.putStrLn =<< T.getLine
27 |
28 | ["TL.readFile", name] -> TL.putStr =<< TL.readFile name
29 | ["TL.writeFile", name, t] -> TL.writeFile name (TL.pack t)
30 | ["TL.appendFile", name, t] -> TL.appendFile name (TL.pack t)
31 | ["TL.interact"] -> TL.interact id
32 | ["TL.getContents"] -> TL.putStr =<< TL.getContents
33 | ["TL.getLine"] -> TL.putStrLn =<< TL.getLine
34 | _ -> hPutStrLn stderr "invalid directive!" >> exitFailure
35 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/Tests/Properties/Mul.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE ScopedTypeVariables #-}
2 |
3 | module Tests.Properties.Mul (tests) where
4 |
5 | import Control.Applicative ((<$>), pure)
6 | import Control.Exception as E (SomeException, catch, evaluate)
7 | import Data.Int (Int32, Int64)
8 | import Data.Text.Internal (mul, mul32, mul64)
9 | import System.IO.Unsafe (unsafePerformIO)
10 | import Test.Framework (Test)
11 | import Test.Framework.Providers.QuickCheck2 (testProperty)
12 | import Test.QuickCheck hiding ((.&.))
13 |
14 | mulRef :: (Integral a, Bounded a) => a -> a -> Maybe a
15 | mulRef a b
16 | | ab < bot || ab > top = Nothing
17 | | otherwise = Just (fromIntegral ab)
18 | where ab = fromIntegral a * fromIntegral b
19 | top = fromIntegral (maxBound `asTypeOf` a) :: Integer
20 | bot = fromIntegral (minBound `asTypeOf` a) :: Integer
21 |
22 | eval :: (a -> b -> c) -> a -> b -> Maybe c
23 | eval f a b = unsafePerformIO $
24 | (Just <$> evaluate (f a b)) `E.catch` (\(_::SomeException) -> pure Nothing)
25 |
26 | t_mul32 :: Int32 -> Int32 -> Property
27 | t_mul32 a b = mulRef a b === eval mul32 a b
28 |
29 | t_mul64 :: Int64 -> Int64 -> Property
30 | t_mul64 a b = mulRef a b === eval mul64 a b
31 |
32 | t_mul :: Int -> Int -> Property
33 | t_mul a b = mulRef a b === eval mul a b
34 |
35 | tests :: [Test]
36 | tests = [
37 | testProperty "t_mul" t_mul
38 | , testProperty "t_mul32" t_mul32
39 | , testProperty "t_mul64" t_mul64
40 | ]
41 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/Tests/SlowFunctions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns #-}
2 | module Tests.SlowFunctions
3 | (
4 | indices
5 | , splitOn
6 | ) where
7 |
8 | import qualified Data.Text as T
9 | import Data.Text.Internal (Text(..))
10 | import Data.Text.Unsafe (iter_, unsafeHead, unsafeTail)
11 |
12 | indices :: T.Text -- ^ Substring to search for (@needle@)
13 | -> T.Text -- ^ Text to search in (@haystack@)
14 | -> [Int]
15 | indices needle@(Text _narr _noff nlen) haystack@(Text harr hoff hlen)
16 | | T.null needle = []
17 | | otherwise = scan 0
18 | where
19 | scan i | i >= hlen = []
20 | | needle `T.isPrefixOf` t = i : scan (i+nlen)
21 | | otherwise = scan (i+d)
22 | where t = Text harr (hoff+i) (hlen-i)
23 | d = iter_ haystack i
24 |
25 | splitOn :: T.Text -- ^ Text to split on
26 | -> T.Text -- ^ Input text
27 | -> [T.Text]
28 | splitOn pat src0
29 | | T.null pat = error "splitOn: empty"
30 | | l == 1 = T.split (== (unsafeHead pat)) src0
31 | | otherwise = go src0
32 | where
33 | l = T.length pat
34 | go src = search 0 src
35 | where
36 | search !n !s
37 | | T.null s = [src] -- not found
38 | | pat `T.isPrefixOf` s = T.take n src : go (T.drop l s)
39 | | otherwise = search (n+1) (unsafeTail s)
40 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/cabal.config:
--------------------------------------------------------------------------------
1 | -- These flags help to speed up building the test suite.
2 |
3 | documentation: False
4 | executable-stripping: False
5 | flags: developer
6 | library-profiling: False
7 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/literal-rule-test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 |
3 | failed=0
4 |
5 | function check_firings() {
6 | rule=$1
7 | expected=$2
8 | build="ghc -O -ddump-rule-firings LiteralRuleTest.hs"
9 | build="$build -i.. -I../include"
10 | touch LiteralRuleTest.hs
11 | echo -n "Want to see $expected firings of rule $rule... " >&2
12 | firings=$($build 2>&1 | grep "Rule fired: $rule\$" | wc -l)
13 | rm -f LiteralRuleTest.{o.hi}
14 |
15 | if [ $firings != $expected ]; then
16 | echo "failed, saw $firings" >&2
17 | failed=1
18 | else
19 | echo "pass" >&2
20 | fi
21 | }
22 |
23 | check_firings "TEXT literal" 8
24 | check_firings "TEXT literal UTF8" 7
25 | check_firings "TEXT empty literal" 4
26 | # This is broken at the moment. "TEXT literal" rule fires instead.
27 | #check_firings "TEXT singleton literal" 5
28 |
29 | exit $failed
30 |
--------------------------------------------------------------------------------
/test-project/text-1.2.3.2/tests/scripts/cover-stdio.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [[ $# < 1 ]]; then
4 | echo "Usage: $0 "
5 | exit 1
6 | fi
7 |
8 | exe=$1
9 |
10 | rm -f $exe.tix
11 |
12 | f=$(mktemp stdio-f.XXXXXX)
13 | g=$(mktemp stdio-g.XXXXXX)
14 |
15 | for t in T TL; do
16 | echo $t.readFile > $f
17 | $exe $t.readFile $f > $g
18 | if ! diff -u $f $g; then
19 | errs=$((errs+1))
20 | echo FAIL: $t.readFile 1>&2
21 | fi
22 |
23 | $exe $t.writeFile $f $t.writeFile
24 | echo -n $t.writeFile > $g
25 | if ! diff -u $f $g; then
26 | errs=$((errs+1))
27 | echo FAIL: $t.writeFile 1>&2
28 | fi
29 |
30 | echo -n quux > $f
31 | $exe $t.appendFile $f $t.appendFile
32 | echo -n quux$t.appendFile > $g
33 | if ! diff -u $f $g; then
34 | errs=$((errs+1))
35 | echo FAIL: $t.appendFile 1>&2
36 | fi
37 |
38 | echo $t.interact | $exe $t.interact > $f
39 | echo $t.interact > $g
40 | if ! diff -u $f $g; then
41 | errs=$((errs+1))
42 | echo FAIL: $t.interact 1>&2
43 | fi
44 |
45 | echo $t.getContents | $exe $t.getContents > $f
46 | echo $t.getContents > $g
47 | if ! diff -u $f $g; then
48 | errs=$((errs+1))
49 | echo FAIL: $t.getContents 1>&2
50 | fi
51 |
52 | echo $t.getLine | $exe $t.getLine > $f
53 | echo $t.getLine > $g
54 | if ! diff -u $f $g; then
55 | errs=$((errs+1))
56 | echo FAIL: $t.getLine 1>&2
57 | fi
58 | done
59 |
60 | rm -f $f $g
61 |
62 | exit $errs
63 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/.gitignore:
--------------------------------------------------------------------------------
1 | /dist/
2 | /dist-boot/
3 | /dist-install/
4 | /dist-newstyle/
5 | /cabal-dev/
6 | /cabal.sandbox.config
7 | /ghc.mk
8 | /GNUmakefile
9 | /.ghc.environment.*
10 | /cabal.project.local
11 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/.hgignore:
--------------------------------------------------------------------------------
1 | ^(?:dist|benchmarks/dist|tests/coverage|tests/dist)$
2 | ^benchmarks/.*\.txt$
3 | ^tests/text-testdata.tar.bz2$
4 | ^tests/(?:\.hpc|bm|qc|qc-hpc|stdio-hpc|text/test)$
5 | \.(?:aux|eventlog|h[ip]|log|[oa]|orig|prof|ps|rej|swp|tix)$
6 | ~$
7 |
8 | syntax: glob
9 | .\#*
10 | cabal-dev
11 | cabal.sandbox.config
12 | \.cabal-sandbox
13 | scripts/CaseFolding.txt
14 | scripts/SpecialCasing.txt
15 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Internal/Builder/Functions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE MagicHash #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Builder.Functions
5 | -- Copyright : (c) 2011 MailRank, Inc.
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : GHC
11 | --
12 | -- /Warning/: this is an internal module, and does not have a stable
13 | -- API or name. Functions in this module may not check or enforce
14 | -- preconditions expected by public modules. Use at your own risk!
15 | --
16 | -- Useful functions and combinators.
17 |
18 | module Data.Text.Internal.Builder.Functions
19 | (
20 | (<>)
21 | , i2d
22 | ) where
23 |
24 | import Data.Monoid (mappend)
25 | import Data.Text.Lazy.Builder (Builder)
26 | import GHC.Base (chr#,ord#,(+#),Int(I#),Char(C#))
27 | import Prelude ()
28 |
29 | -- | Unsafe conversion for decimal digits.
30 | {-# INLINE i2d #-}
31 | i2d :: Int -> Char
32 | i2d (I# i#) = C# (chr# (ord# '0'# +# i#))
33 |
34 | -- | The normal 'mappend' function with right associativity instead of
35 | -- left.
36 | (<>) :: Builder -> Builder -> Builder
37 | (<>) = mappend
38 | {-# INLINE (<>) #-}
39 |
40 | infixr 4 <>
41 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Internal/Builder/Int/Digits.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | -- Module: Data.Text.Internal.Builder.Int.Digits
4 | -- Copyright: (c) 2013 Bryan O'Sullivan
5 | -- License: BSD-style
6 | -- Maintainer: Bryan O'Sullivan
7 | -- Stability: experimental
8 | -- Portability: portable
9 | --
10 | -- /Warning/: this is an internal module, and does not have a stable
11 | -- API or name. Functions in this module may not check or enforce
12 | -- preconditions expected by public modules. Use at your own risk!
13 | --
14 | -- This module exists because the C preprocessor does things that we
15 | -- shall not speak of when confronted with Haskell multiline strings.
16 |
17 | module Data.Text.Internal.Builder.Int.Digits (digits) where
18 |
19 | import Data.ByteString.Char8 (ByteString)
20 |
21 | digits :: ByteString
22 | digits = "0001020304050607080910111213141516171819\
23 | \2021222324252627282930313233343536373839\
24 | \4041424344454647484950515253545556575859\
25 | \6061626364656667686970717273747576777879\
26 | \8081828384858687888990919293949596979899"
27 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Internal/Builder/RealFloat/Functions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE CPP #-}
2 |
3 | -- |
4 | -- Module: Data.Text.Internal.Builder.RealFloat.Functions
5 | -- Copyright: (c) The University of Glasgow 1994-2002
6 | -- License: see libraries/base/LICENSE
7 | --
8 | -- /Warning/: this is an internal module, and does not have a stable
9 | -- API or name. Functions in this module may not check or enforce
10 | -- preconditions expected by public modules. Use at your own risk!
11 |
12 | module Data.Text.Internal.Builder.RealFloat.Functions
13 | (
14 | roundTo
15 | ) where
16 |
17 | roundTo :: Int -> [Int] -> (Int,[Int])
18 |
19 | #if MIN_VERSION_base(4,6,0)
20 |
21 | roundTo d is =
22 | case f d True is of
23 | x@(0,_) -> x
24 | (1,xs) -> (1, 1:xs)
25 | _ -> error "roundTo: bad Value"
26 | where
27 | b2 = base `quot` 2
28 |
29 | f n _ [] = (0, replicate n 0)
30 | f 0 e (x:xs) | x == b2 && e && all (== 0) xs = (0, []) -- Round to even when at exactly half the base
31 | | otherwise = (if x >= b2 then 1 else 0, [])
32 | f n _ (i:xs)
33 | | i' == base = (1,0:ds)
34 | | otherwise = (0,i':ds)
35 | where
36 | (c,ds) = f (n-1) (even i) xs
37 | i' = c + i
38 | base = 10
39 |
40 | #else
41 |
42 | roundTo d is =
43 | case f d is of
44 | x@(0,_) -> x
45 | (1,xs) -> (1, 1:xs)
46 | _ -> error "roundTo: bad Value"
47 | where
48 | f n [] = (0, replicate n 0)
49 | f 0 (x:_) = (if x >= 5 then 1 else 0, [])
50 | f n (i:xs)
51 | | i' == 10 = (1,0:ds)
52 | | otherwise = (0,i':ds)
53 | where
54 | (c,ds) = f (n-1) xs
55 | i' = c + i
56 |
57 | #endif
58 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Internal/Encoding/Utf16.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE MagicHash, BangPatterns #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Encoding.Utf16
5 | -- Copyright : (c) 2008, 2009 Tom Harper,
6 | -- (c) 2009 Bryan O'Sullivan,
7 | -- (c) 2009 Duncan Coutts
8 | --
9 | -- License : BSD-style
10 | -- Maintainer : bos@serpentine.com
11 | -- Stability : experimental
12 | -- Portability : GHC
13 | --
14 | -- /Warning/: this is an internal module, and does not have a stable
15 | -- API or name. Functions in this module may not check or enforce
16 | -- preconditions expected by public modules. Use at your own risk!
17 | --
18 | -- Basic UTF-16 validation and character manipulation.
19 | module Data.Text.Internal.Encoding.Utf16
20 | (
21 | chr2
22 | , validate1
23 | , validate2
24 | ) where
25 |
26 | import GHC.Exts
27 | import GHC.Word (Word16(..))
28 |
29 | chr2 :: Word16 -> Word16 -> Char
30 | chr2 (W16# a#) (W16# b#) = C# (chr# (upper# +# lower# +# 0x10000#))
31 | where
32 | !x# = word2Int# a#
33 | !y# = word2Int# b#
34 | !upper# = uncheckedIShiftL# (x# -# 0xD800#) 10#
35 | !lower# = y# -# 0xDC00#
36 | {-# INLINE chr2 #-}
37 |
38 | validate1 :: Word16 -> Bool
39 | validate1 x1 = x1 < 0xD800 || x1 > 0xDFFF
40 | {-# INLINE validate1 #-}
41 |
42 | validate2 :: Word16 -> Word16 -> Bool
43 | validate2 x1 x2 = x1 >= 0xD800 && x1 <= 0xDBFF &&
44 | x2 >= 0xDC00 && x2 <= 0xDFFF
45 | {-# INLINE validate2 #-}
46 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Internal/Encoding/Utf32.hs:
--------------------------------------------------------------------------------
1 | -- |
2 | -- Module : Data.Text.Internal.Encoding.Utf32
3 | -- Copyright : (c) 2008, 2009 Tom Harper,
4 | -- (c) 2009, 2010 Bryan O'Sullivan,
5 | -- (c) 2009 Duncan Coutts
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : portable
11 | --
12 | -- /Warning/: this is an internal module, and does not have a stable
13 | -- API or name. Functions in this module may not check or enforce
14 | -- preconditions expected by public modules. Use at your own risk!
15 | --
16 | -- Basic UTF-32 validation.
17 | module Data.Text.Internal.Encoding.Utf32
18 | (
19 | validate
20 | ) where
21 |
22 | import Data.Word (Word32)
23 |
24 | validate :: Word32 -> Bool
25 | validate x1 = x1 < 0xD800 || (x1 > 0xDFFF && x1 <= 0x10FFFF)
26 | {-# INLINE validate #-}
27 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Internal/Functions.hs:
--------------------------------------------------------------------------------
1 | -- |
2 | -- Module : Data.Text.Internal.Functions
3 | -- Copyright : 2010 Bryan O'Sullivan
4 | --
5 | -- License : BSD-style
6 | -- Maintainer : bos@serpentine.com
7 | -- Stability : experimental
8 | -- Portability : GHC
9 | --
10 | -- /Warning/: this is an internal module, and does not have a stable
11 | -- API or name. Functions in this module may not check or enforce
12 | -- preconditions expected by public modules. Use at your own risk!
13 | --
14 | -- Useful functions.
15 |
16 | module Data.Text.Internal.Functions
17 | (
18 | intersperse
19 | ) where
20 |
21 | -- | A lazier version of Data.List.intersperse. The other version
22 | -- causes space leaks!
23 | intersperse :: a -> [a] -> [a]
24 | intersperse _ [] = []
25 | intersperse sep (x:xs) = x : go xs
26 | where
27 | go [] = []
28 | go (y:ys) = sep : y: go ys
29 | {-# INLINE intersperse #-}
30 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Internal/Private.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, Rank2Types, UnboxedTuples #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Private
5 | -- Copyright : (c) 2011 Bryan O'Sullivan
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : GHC
11 |
12 | module Data.Text.Internal.Private
13 | (
14 | runText
15 | , span_
16 | ) where
17 |
18 | import Control.Monad.ST (ST, runST)
19 | import Data.Text.Internal (Text(..), text)
20 | import Data.Text.Unsafe (Iter(..), iter)
21 | import qualified Data.Text.Array as A
22 |
23 | span_ :: (Char -> Bool) -> Text -> (# Text, Text #)
24 | span_ p t@(Text arr off len) = (# hd,tl #)
25 | where hd = text arr off k
26 | tl = text arr (off+k) (len-k)
27 | !k = loop 0
28 | loop !i | i < len && p c = loop (i+d)
29 | | otherwise = i
30 | where Iter c d = iter t i
31 | {-# INLINE span_ #-}
32 |
33 | runText :: (forall s. (A.MArray s -> Int -> ST s Text) -> ST s Text) -> Text
34 | runText act = runST (act $ \ !marr !len -> do
35 | arr <- A.unsafeFreeze marr
36 | return $! text arr 0 len)
37 | {-# INLINE runText #-}
38 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Lazy/Builder.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, CPP, Rank2Types #-}
2 | #if __GLASGOW_HASKELL__ >= 702
3 | {-# LANGUAGE Trustworthy #-}
4 | #endif
5 |
6 | -----------------------------------------------------------------------------
7 | -- |
8 | -- Module : Data.Text.Lazy.Builder
9 | -- Copyright : (c) 2013 Bryan O'Sullivan
10 | -- (c) 2010 Johan Tibell
11 | -- License : BSD-style (see LICENSE)
12 | --
13 | -- Maintainer : Johan Tibell
14 | -- Portability : portable to Hugs and GHC
15 | --
16 | -- Efficient construction of lazy @Text@ values. The principal
17 | -- operations on a @Builder@ are @singleton@, @fromText@, and
18 | -- @fromLazyText@, which construct new builders, and 'mappend', which
19 | -- concatenates two builders.
20 | --
21 | -- To get maximum performance when building lazy @Text@ values using a
22 | -- builder, associate @mappend@ calls to the right. For example,
23 | -- prefer
24 | --
25 | -- > singleton 'a' `mappend` (singleton 'b' `mappend` singleton 'c')
26 | --
27 | -- to
28 | --
29 | -- > singleton 'a' `mappend` singleton 'b' `mappend` singleton 'c'
30 | --
31 | -- as the latter associates @mappend@ to the left. Or, equivalently,
32 | -- prefer
33 | --
34 | -- > singleton 'a' <> singleton 'b' <> singleton 'c'
35 | --
36 | -- since the '<>' from recent versions of 'Data.Monoid' associates
37 | -- to the right.
38 |
39 | -----------------------------------------------------------------------------
40 |
41 | module Data.Text.Lazy.Builder
42 | ( -- * The Builder type
43 | Builder
44 | , toLazyText
45 | , toLazyTextWith
46 |
47 | -- * Constructing Builders
48 | , singleton
49 | , fromText
50 | , fromLazyText
51 | , fromString
52 |
53 | -- * Flushing the buffer state
54 | , flush
55 | ) where
56 |
57 | import Data.Text.Internal.Builder
58 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Data/Text/Lazy/Internal.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, DeriveDataTypeable #-}
2 | -- |
3 | -- Module : Data.Text.Lazy.Internal
4 | -- Copyright : (c) 2013 Bryan O'Sullivan
5 | --
6 | -- License : BSD-style
7 | -- Maintainer : bos@serpentine.com
8 | -- Stability : experimental
9 | -- Portability : GHC
10 | --
11 | -- This module has been renamed to 'Data.Text.Internal.Lazy'. This
12 | -- name for the module will be removed in the next major release.
13 |
14 | module Data.Text.Lazy.Internal
15 | {-# DEPRECATED "Use Data.Text.Internal.Lazy instead" #-}
16 | (
17 | module Data.Text.Internal.Lazy
18 | ) where
19 |
20 | import Data.Text.Internal.Lazy
21 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2008-2009, Tom Harper
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions
6 | are met:
7 |
8 | * Redistributions of source code must retain the above copyright
9 | notice, this list of conditions and the following disclaimer.
10 |
11 | * Redistributions in binary form must reproduce the above
12 | copyright notice, this list of conditions and the following
13 | disclaimer in the documentation and/or other materials provided
14 | with the distribution.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/README.markdown:
--------------------------------------------------------------------------------
1 | # `text`: Fast, packed Unicode strings, using stream fusion
2 |
3 | This package provides the Data.Text library, a library for the space-
4 | and time-efficient manipulation of Unicode text in Haskell.
5 |
6 | **Please refer to the [package description on Hackage](https://hackage.haskell.org/package/text#description) for more information.**
7 |
8 | # Get involved!
9 |
10 | Please report bugs via the
11 | [github issue tracker](https://github.com/haskell/text/issues).
12 |
13 | Master [git repository](https://github.com/haskell/text):
14 |
15 | * `git clone git://github.com/haskell/text.git`
16 |
17 | There's also a [Mercurial mirror](https://bitbucket.org/bos/text):
18 |
19 | * `hg clone https://bitbucket.org/bos/text`
20 |
21 | (You can create and contribute changes using either Mercurial or git.)
22 |
23 |
24 | # Authors
25 |
26 | The base code for this library was originally written by Tom Harper,
27 | based on the stream fusion framework developed by Roman Leshchinskiy,
28 | Duncan Coutts, and Don Stewart.
29 |
30 | The core library was fleshed out, debugged, and tested by Bryan
31 | O'Sullivan , and he is the current maintainer.
32 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/Setup.lhs:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env runhaskell
2 | > import Distribution.Simple
3 | > main = defaultMain
4 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/.gitignore:
--------------------------------------------------------------------------------
1 | /dist/
2 | /dist-newstyle/
3 | /.ghc.environment.*
4 | /cabal.project.local
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/Setup.hs:
--------------------------------------------------------------------------------
1 | import Distribution.Simple
2 | main = defaultMain
3 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/cabal.project:
--------------------------------------------------------------------------------
1 | -- NB: we use a separate project
2 | packages: .
3 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/cbits/time_iconv.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 |
6 | int time_iconv(char *srcbuf, size_t srcbufsize)
7 | {
8 | uint16_t *destbuf = NULL;
9 | size_t destbufsize;
10 | static uint16_t *origdestbuf;
11 | static size_t origdestbufsize;
12 | iconv_t ic = (iconv_t) -1;
13 | int ret = 0;
14 |
15 | if (ic == (iconv_t) -1) {
16 | ic = iconv_open("UTF-16LE", "UTF-8");
17 | if (ic == (iconv_t) -1) {
18 | ret = -1;
19 | goto done;
20 | }
21 | }
22 |
23 | destbufsize = srcbufsize * sizeof(uint16_t);
24 | if (destbufsize > origdestbufsize) {
25 | free(origdestbuf);
26 | origdestbuf = destbuf = malloc(origdestbufsize = destbufsize);
27 | } else {
28 | destbuf = origdestbuf;
29 | }
30 |
31 | iconv(ic, &srcbuf, &srcbufsize, (char**) &destbuf, &destbufsize);
32 |
33 | done:
34 | return ret;
35 | }
36 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/Concat.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | module Benchmarks.Concat (benchmark) where
4 |
5 | import Control.Monad.Trans.Writer
6 | import Criterion (Benchmark, bgroup, bench, whnf)
7 | import Data.Text as T
8 |
9 | benchmark :: Benchmark
10 | benchmark = bgroup "Concat"
11 | [ bench "append" $ whnf (append4 "Text 1" "Text 2" "Text 3") "Text 4"
12 | , bench "concat" $ whnf (concat4 "Text 1" "Text 2" "Text 3") "Text 4"
13 | , bench "write" $ whnf (write4 "Text 1" "Text 2" "Text 3") "Text 4"
14 | ]
15 |
16 | append4, concat4, write4 :: Text -> Text -> Text -> Text -> Text
17 |
18 | {-# NOINLINE append4 #-}
19 | append4 x1 x2 x3 x4 = x1 `append` x2 `append` x3 `append` x4
20 |
21 | {-# NOINLINE concat4 #-}
22 | concat4 x1 x2 x3 x4 = T.concat [x1, x2, x3, x4]
23 |
24 | {-# NOINLINE write4 #-}
25 | write4 x1 x2 x3 x4 = execWriter $ tell x1 >> tell x2 >> tell x3 >> tell x4
26 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/EncodeUtf8.hs:
--------------------------------------------------------------------------------
1 | -- | UTF-8 encode a text
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Replicating a string a number of times
6 | --
7 | -- * UTF-8 encoding it
8 | --
9 | module Benchmarks.EncodeUtf8
10 | ( benchmark
11 | ) where
12 |
13 | import Criterion (Benchmark, bgroup, bench, whnf)
14 | import qualified Data.ByteString as B
15 | import qualified Data.ByteString.Lazy as BL
16 | import qualified Data.Text as T
17 | import qualified Data.Text.Encoding as T
18 | import qualified Data.Text.Lazy as TL
19 | import qualified Data.Text.Lazy.Encoding as TL
20 |
21 | benchmark :: String -> Benchmark
22 | benchmark string =
23 | bgroup "EncodeUtf8"
24 | [ bench "Text" $ whnf (B.length . T.encodeUtf8) text
25 | , bench "LazyText" $ whnf (BL.length . TL.encodeUtf8) lazyText
26 | ]
27 | where
28 | -- The string in different formats
29 | text = T.replicate k $ T.pack string
30 | lazyText = TL.replicate (fromIntegral k) $ TL.pack string
31 |
32 | -- Amount
33 | k = 100000
34 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/Equality.hs:
--------------------------------------------------------------------------------
1 | -- | Compare a string with a copy of itself that is identical except
2 | -- for the last character.
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Comparison of strings (Eq instance)
7 | --
8 | module Benchmarks.Equality
9 | ( initEnv
10 | , benchmark
11 | ) where
12 |
13 | import Criterion (Benchmark, bgroup, bench, whnf)
14 | import qualified Data.ByteString.Char8 as B
15 | import qualified Data.ByteString.Lazy.Char8 as BL
16 | import qualified Data.Text as T
17 | import qualified Data.Text.Encoding as T
18 | import qualified Data.Text.Lazy as TL
19 | import qualified Data.Text.Lazy.Encoding as TL
20 |
21 | type Env = (T.Text, TL.Text, B.ByteString, BL.ByteString, BL.ByteString, String)
22 |
23 | initEnv :: FilePath -> IO Env
24 | initEnv fp = do
25 | b <- B.readFile fp
26 | bl1 <- BL.readFile fp
27 | -- A lazy bytestring is a list of chunks. When we do not explicitly create two
28 | -- different lazy bytestrings at a different address, the bytestring library
29 | -- will compare the chunk addresses instead of the chunk contents. This is why
30 | -- we read the lazy bytestring twice here.
31 | bl2 <- BL.readFile fp
32 | l <- readFile fp
33 | return (T.decodeUtf8 b, TL.decodeUtf8 bl1, b, bl1, bl2, l)
34 |
35 | benchmark :: Env -> Benchmark
36 | benchmark ~(t, tl, b, bl1, bl2, l) =
37 | bgroup "Equality"
38 | [ bench "Text" $ whnf (== T.init t `T.snoc` '\xfffd') t
39 | , bench "LazyText" $ whnf (== TL.init tl `TL.snoc` '\xfffd') tl
40 | , bench "ByteString" $ whnf (== B.init b `B.snoc` '\xfffd') b
41 | , bench "LazyByteString" $ whnf (== BL.init bl2 `BL.snoc` '\xfffd') bl1
42 | , bench "String" $ whnf (== init l ++ "\xfffd") l
43 | ]
44 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/FileRead.hs:
--------------------------------------------------------------------------------
1 | -- | Benchmarks simple file reading
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Reading a file from the disk
6 | --
7 | module Benchmarks.FileRead
8 | ( benchmark
9 | ) where
10 |
11 | import Control.Applicative ((<$>))
12 | import Criterion (Benchmark, bgroup, bench, whnfIO)
13 | import qualified Data.ByteString as SB
14 | import qualified Data.ByteString.Lazy as LB
15 | import qualified Data.Text as T
16 | import qualified Data.Text.Encoding as T
17 | import qualified Data.Text.IO as T
18 | import qualified Data.Text.Lazy as LT
19 | import qualified Data.Text.Lazy.Encoding as LT
20 | import qualified Data.Text.Lazy.IO as LT
21 |
22 | benchmark :: FilePath -> Benchmark
23 | benchmark p = bgroup "FileRead"
24 | [ bench "String" $ whnfIO $ length <$> readFile p
25 | , bench "ByteString" $ whnfIO $ SB.length <$> SB.readFile p
26 | , bench "LazyByteString" $ whnfIO $ LB.length <$> LB.readFile p
27 | , bench "Text" $ whnfIO $ T.length <$> T.readFile p
28 | , bench "LazyText" $ whnfIO $ LT.length <$> LT.readFile p
29 | , bench "TextByteString" $ whnfIO $
30 | (T.length . T.decodeUtf8) <$> SB.readFile p
31 | , bench "LazyTextByteString" $ whnfIO $
32 | (LT.length . LT.decodeUtf8) <$> LB.readFile p
33 | ]
34 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/FoldLines.hs:
--------------------------------------------------------------------------------
1 | -- | Read a file line-by-line using handles, and perform a fold over the lines.
2 | -- The fold is used here to calculate the number of lines in the file.
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Buffered, line-based IO
7 | --
8 | {-# LANGUAGE BangPatterns #-}
9 | module Benchmarks.FoldLines
10 | ( benchmark
11 | ) where
12 |
13 | import Criterion (Benchmark, bgroup, bench, whnfIO)
14 | import System.IO
15 | import qualified Data.ByteString as B
16 | import qualified Data.Text as T
17 | import qualified Data.Text.IO as T
18 |
19 | benchmark :: FilePath -> Benchmark
20 | benchmark fp = bgroup "ReadLines"
21 | [ bench "Text" $ withHandle $ foldLinesT (\n _ -> n + 1) (0 :: Int)
22 | , bench "ByteString" $ withHandle $ foldLinesB (\n _ -> n + 1) (0 :: Int)
23 | ]
24 | where
25 | withHandle f = whnfIO $ do
26 | h <- openFile fp ReadMode
27 | hSetBuffering h (BlockBuffering (Just 16384))
28 | x <- f h
29 | hClose h
30 | return x
31 |
32 | -- | Text line fold
33 | --
34 | foldLinesT :: (a -> T.Text -> a) -> a -> Handle -> IO a
35 | foldLinesT f z0 h = go z0
36 | where
37 | go !z = do
38 | eof <- hIsEOF h
39 | if eof
40 | then return z
41 | else do
42 | l <- T.hGetLine h
43 | let z' = f z l in go z'
44 | {-# INLINE foldLinesT #-}
45 |
46 | -- | ByteString line fold
47 | --
48 | foldLinesB :: (a -> B.ByteString -> a) -> a -> Handle -> IO a
49 | foldLinesB f z0 h = go z0
50 | where
51 | go !z = do
52 | eof <- hIsEOF h
53 | if eof
54 | then return z
55 | else do
56 | l <- B.hGetLine h
57 | let z' = f z l in go z'
58 | {-# INLINE foldLinesB #-}
59 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/Programs/BigTable.hs:
--------------------------------------------------------------------------------
1 | -- | Create a large HTML table and dump it to a handle
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Creating a large HTML document using a builder
6 | --
7 | -- * Writing to a handle
8 | --
9 | {-# LANGUAGE OverloadedStrings #-}
10 | module Benchmarks.Programs.BigTable
11 | ( benchmark
12 | ) where
13 |
14 | import Criterion (Benchmark, bench, whnfIO)
15 | import Data.Monoid (mappend, mconcat)
16 | import Data.Text.Lazy.Builder (Builder, fromText, toLazyText)
17 | import Data.Text.Lazy.IO (hPutStr)
18 | import System.IO (Handle)
19 | import qualified Data.Text as T
20 |
21 | benchmark :: Handle -> Benchmark
22 | benchmark sink = bench "BigTable" $ whnfIO $ do
23 | hPutStr sink "Content-Type: text/html\n\n"
24 | hPutStr sink . toLazyText . makeTable =<< rows
25 | hPutStr sink "
"
26 | where
27 | -- We provide the number of rows in IO so the builder value isn't shared
28 | -- between the benchmark samples.
29 | rows :: IO Int
30 | rows = return 20000
31 | {-# NOINLINE rows #-}
32 |
33 | makeTable :: Int -> Builder
34 | makeTable n = mconcat $ replicate n $ mconcat $ map makeCol [1 .. 50]
35 |
36 | makeCol :: Int -> Builder
37 | makeCol 1 = fromText "1 | "
38 | makeCol 50 = fromText "50 |
"
39 | makeCol i = fromText "" `mappend` (fromInt i `mappend` fromText " | ")
40 |
41 | fromInt :: Int -> Builder
42 | fromInt = fromText . T.pack . show
43 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/Programs/StripTags.hs:
--------------------------------------------------------------------------------
1 | -- | Program to replace HTML tags by whitespace
2 | --
3 | -- This program was originally contributed by Petr Prokhorenkov.
4 | --
5 | -- Tested in this benchmark:
6 | --
7 | -- * Reading the file
8 | --
9 | -- * Replacing text between HTML tags (<>) with whitespace
10 | --
11 | -- * Writing back to a handle
12 | --
13 | {-# OPTIONS_GHC -fspec-constr-count=5 #-}
14 | module Benchmarks.Programs.StripTags
15 | ( benchmark
16 | ) where
17 |
18 | import Criterion (Benchmark, bgroup, bench, whnfIO)
19 | import Data.List (mapAccumL)
20 | import System.IO (Handle, hPutStr)
21 | import qualified Data.ByteString as B
22 | import qualified Data.ByteString.Char8 as BC
23 | import qualified Data.Text as T
24 | import qualified Data.Text.Encoding as T
25 | import qualified Data.Text.IO as T
26 |
27 | benchmark :: FilePath -> Handle -> Benchmark
28 | benchmark i o = bgroup "StripTags"
29 | [ bench "String" $ whnfIO $ readFile i >>= hPutStr o . string
30 | , bench "ByteString" $ whnfIO $ B.readFile i >>= B.hPutStr o . byteString
31 | , bench "Text" $ whnfIO $ T.readFile i >>= T.hPutStr o . text
32 | , bench "TextByteString" $ whnfIO $
33 | B.readFile i >>= B.hPutStr o . T.encodeUtf8 . text . T.decodeUtf8
34 | ]
35 |
36 | string :: String -> String
37 | string = snd . mapAccumL step 0
38 |
39 | text :: T.Text -> T.Text
40 | text = snd . T.mapAccumL step 0
41 |
42 | byteString :: B.ByteString -> B.ByteString
43 | byteString = snd . BC.mapAccumL step 0
44 |
45 | step :: Int -> Char -> (Int, Char)
46 | step d c
47 | | d > 0 || d' > 0 = (d', ' ')
48 | | otherwise = (d', c)
49 | where
50 | d' = d + depth c
51 | depth '>' = 1
52 | depth '<' = -1
53 | depth _ = 0
54 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/Programs/Throughput.hs:
--------------------------------------------------------------------------------
1 | -- | This benchmark simply reads and writes a file using the various string
2 | -- libraries. The point of it is that we can make better estimations on how
3 | -- much time the other benchmarks spend doing IO.
4 | --
5 | -- Note that we expect ByteStrings to be a whole lot faster, since they do not
6 | -- do any actual encoding/decoding here, while String and Text do have UTF-8
7 | -- encoding/decoding.
8 | --
9 | -- Tested in this benchmark:
10 | --
11 | -- * Reading the file
12 | --
13 | -- * Replacing text between HTML tags (<>) with whitespace
14 | --
15 | -- * Writing back to a handle
16 | --
17 | module Benchmarks.Programs.Throughput
18 | ( benchmark
19 | ) where
20 |
21 | import Criterion (Benchmark, bgroup, bench, whnfIO)
22 | import System.IO (Handle, hPutStr)
23 | import qualified Data.ByteString as B
24 | import qualified Data.ByteString.Lazy as BL
25 | import qualified Data.Text.Encoding as T
26 | import qualified Data.Text.IO as T
27 | import qualified Data.Text.Lazy.Encoding as TL
28 | import qualified Data.Text.Lazy.IO as TL
29 |
30 | benchmark :: FilePath -> Handle -> Benchmark
31 | benchmark fp sink = bgroup "Throughput"
32 | [ bench "String" $ whnfIO $ readFile fp >>= hPutStr sink
33 | , bench "ByteString" $ whnfIO $ B.readFile fp >>= B.hPutStr sink
34 | , bench "LazyByteString" $ whnfIO $ BL.readFile fp >>= BL.hPutStr sink
35 | , bench "Text" $ whnfIO $ T.readFile fp >>= T.hPutStr sink
36 | , bench "LazyText" $ whnfIO $ TL.readFile fp >>= TL.hPutStr sink
37 | , bench "TextByteString" $ whnfIO $
38 | B.readFile fp >>= B.hPutStr sink . T.encodeUtf8 . T.decodeUtf8
39 | , bench "LazyTextByteString" $ whnfIO $
40 | BL.readFile fp >>= BL.hPutStr sink . TL.encodeUtf8 . TL.decodeUtf8
41 | ]
42 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/Replace.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns #-}
2 | -- | Replace a string by another string
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Search and replace of a pattern in a text
7 | --
8 | module Benchmarks.Replace
9 | ( benchmark
10 | , initEnv
11 | ) where
12 |
13 | import Criterion (Benchmark, bgroup, bench, nf)
14 | import qualified Data.ByteString.Char8 as B
15 | import qualified Data.ByteString.Lazy as BL
16 | import qualified Data.ByteString.Lazy.Search as BL
17 | import qualified Data.ByteString.Search as B
18 | import qualified Data.Text as T
19 | import qualified Data.Text.Encoding as T
20 | import qualified Data.Text.Lazy as TL
21 | import qualified Data.Text.Lazy.Encoding as TL
22 | import qualified Data.Text.Lazy.IO as TL
23 |
24 | type Env = (T.Text, B.ByteString, TL.Text, BL.ByteString)
25 |
26 | initEnv :: FilePath -> IO Env
27 | initEnv fp = do
28 | tl <- TL.readFile fp
29 | bl <- BL.readFile fp
30 | let !t = TL.toStrict tl
31 | !b = T.encodeUtf8 t
32 | return (t, b, tl, bl)
33 |
34 | benchmark :: String -> String -> Env -> Benchmark
35 | benchmark pat sub ~(t, b, tl, bl) =
36 | bgroup "Replace" [
37 | bench "Text" $ nf (T.length . T.replace tpat tsub) t
38 | , bench "ByteString" $ nf (BL.length . B.replace bpat bsub) b
39 | , bench "LazyText" $ nf (TL.length . TL.replace tlpat tlsub) tl
40 | , bench "LazyByteString" $ nf (BL.length . BL.replace blpat blsub) bl
41 | ]
42 | where
43 | tpat = T.pack pat
44 | tsub = T.pack sub
45 | tlpat = TL.pack pat
46 | tlsub = TL.pack sub
47 | bpat = T.encodeUtf8 tpat
48 | bsub = T.encodeUtf8 tsub
49 | blpat = B.concat $ BL.toChunks $ TL.encodeUtf8 tlpat
50 | blsub = B.concat $ BL.toChunks $ TL.encodeUtf8 tlsub
51 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Benchmarks/WordFrequencies.hs:
--------------------------------------------------------------------------------
1 | -- | A word frequency count using the different string types
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Splitting into words
6 | --
7 | -- * Converting to lowercase
8 | --
9 | -- * Comparing: Eq/Ord instances
10 | --
11 | module Benchmarks.WordFrequencies
12 | ( initEnv
13 | , benchmark
14 | ) where
15 |
16 | import Criterion (Benchmark, bench, bgroup, whnf)
17 | import Data.Char (toLower)
18 | import Data.List (foldl')
19 | import Data.Map (Map)
20 | import qualified Data.ByteString.Char8 as B
21 | import qualified Data.Map as M
22 | import qualified Data.Text as T
23 | import qualified Data.Text.IO as T
24 |
25 | type Env = (String, B.ByteString, T.Text)
26 |
27 | initEnv :: FilePath -> IO Env
28 | initEnv fp = do
29 | s <- readFile fp
30 | b <- B.readFile fp
31 | t <- T.readFile fp
32 | return (s, b, t)
33 |
34 | benchmark :: Env -> Benchmark
35 | benchmark ~(s, b, t) =
36 | bgroup "WordFrequencies"
37 | [ bench "String" $ whnf (frequencies . words . map toLower) s
38 | , bench "ByteString" $ whnf (frequencies . B.words . B.map toLower) b
39 | , bench "Text" $ whnf (frequencies . T.words . T.toLower) t
40 | ]
41 |
42 | frequencies :: Ord a => [a] -> Map a Int
43 | frequencies = foldl' (\m k -> M.insertWith (+) k 1 m) M.empty
44 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Multilang.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, OverloadedStrings, RankNTypes #-}
2 |
3 | module Main (
4 | main
5 | ) where
6 |
7 | import Control.Monad (forM_)
8 | import qualified Data.ByteString as B
9 | import qualified Data.Text as Text
10 | import Data.Text.Encoding (decodeUtf8)
11 | import Data.Text (Text)
12 | import System.IO (hFlush, stdout)
13 | import Timer (timer)
14 |
15 | type BM = Text -> ()
16 |
17 | bm :: forall a. (Text -> a) -> BM
18 | bm f t = f t `seq` ()
19 |
20 | benchmarks :: [(String, Text.Text -> ())]
21 | benchmarks = [
22 | ("find_first", bm $ Text.isInfixOf "en:Benin")
23 | , ("find_index", bm $ Text.findIndex (=='c'))
24 | ]
25 |
26 | main :: IO ()
27 | main = do
28 | !contents <- decodeUtf8 `fmap` B.readFile "../tests/text-test-data/yiwiki.xml"
29 | forM_ benchmarks $ \(name, bmark) -> do
30 | putStr $ name ++ " "
31 | hFlush stdout
32 | putStrLn =<< (timer 100 contents bmark)
33 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/haskell/Timer.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns #-}
2 |
3 | module Timer (timer) where
4 |
5 | import Control.Exception (evaluate)
6 | import Data.Time.Clock.POSIX (getPOSIXTime)
7 | import GHC.Float (FFFormat(..), formatRealFloat)
8 |
9 | ickyRound :: Int -> Double -> String
10 | ickyRound k = formatRealFloat FFFixed (Just k)
11 |
12 | timer :: Int -> a -> (a -> b) -> IO String
13 | timer count a0 f = do
14 | let loop !k !fastest
15 | | k <= 0 = return fastest
16 | | otherwise = do
17 | start <- getPOSIXTime
18 | let inner a i
19 | | i <= 0 = return ()
20 | | otherwise = evaluate (f a) >> inner a (i-1)
21 | inner a0 count
22 | end <- getPOSIXTime
23 | let elapsed = end - start
24 | loop (k-1) (min fastest (elapsed / fromIntegral count))
25 | t <- loop (3::Int) 1e300
26 | let log10 x = log x / log 10
27 | ft = realToFrac t
28 | prec = round (log10 (fromIntegral count) - log10 ft)
29 | return $! ickyRound prec ft
30 | {-# NOINLINE timer #-}
31 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/python/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | *.pyc
3 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/python/cut.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import utils, sys, codecs
4 |
5 | def cut(filename, l, r):
6 | content = open(filename, encoding='utf-8')
7 | for line in content:
8 | print(line[l:r])
9 |
10 | for f in sys.argv[1:]:
11 | t = utils.benchmark(lambda: cut(f, 20, 40))
12 | sys.stderr.write('{0}: {1}\n'.format(f, t))
13 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/python/multilang.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import math
4 | import sys
5 | import time
6 |
7 | def find_first():
8 | cf = contents.find
9 | return timer(lambda: cf("en:Benin"))
10 |
11 | def timer(f, count=100):
12 | a = 1e300
13 | def g():
14 | return
15 | for i in xrange(3):
16 | start = time.time()
17 | for j in xrange(count):
18 | g()
19 | a = min(a, (time.time() - start) / count)
20 |
21 | b = 1e300
22 | for i in xrange(3):
23 | start = time.time()
24 | for j in xrange(count):
25 | f()
26 | b = min(b, (time.time() - start) / count)
27 |
28 | return round(b - a, int(round(math.log(count, 10) - math.log(b - a, 10))))
29 |
30 | contents = open('../../tests/text-test-data/yiwiki.xml', 'r').read()
31 | contents = contents.decode('utf-8')
32 |
33 | benchmarks = (
34 | find_first,
35 | )
36 |
37 | to_run = sys.argv[1:]
38 | bms = []
39 | if to_run:
40 | for r in to_run:
41 | for b in benchmarks:
42 | if b.__name__.startswith(r):
43 | bms.append(b)
44 | else:
45 | bms = benchmarks
46 |
47 | for b in bms:
48 | sys.stdout.write(b.__name__ + ' ')
49 | sys.stdout.flush()
50 | print b()
51 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/python/sort.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import utils, sys, codecs
4 |
5 | def sort(filename):
6 | content = open(filename, encoding='utf-8').read()
7 | lines = content.splitlines()
8 | lines.sort()
9 | print('\n'.join(lines))
10 |
11 | for f in sys.argv[1:]:
12 | t = utils.benchmark(lambda: sort(f))
13 | sys.stderr.write('{0}: {1}\n'.format(f, t))
14 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/python/strip_tags.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import utils, sys
4 |
5 | def strip_tags(filename):
6 | string = open(filename, encoding='utf-8').read()
7 |
8 | d = 0
9 | out = []
10 |
11 | for c in string:
12 | if c == '<': d += 1
13 |
14 | if d > 0:
15 | out += ' '
16 | else:
17 | out += c
18 |
19 | if c == '>': d -= 1
20 |
21 | print(''.join(out))
22 |
23 | for f in sys.argv[1:]:
24 | t = utils.benchmark(lambda: strip_tags(f))
25 | sys.stderr.write('{0}: {1}\n'.format(f, t))
26 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/python/utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import sys, time
4 |
5 | def benchmark_once(f):
6 | start = time.time()
7 | f()
8 | end = time.time()
9 | return end - start
10 |
11 | def benchmark(f):
12 | runs = 100
13 | total = 0.0
14 | for i in range(runs):
15 | result = benchmark_once(f)
16 | sys.stderr.write('Run {0}: {1}\n'.format(i, result))
17 | total += result
18 | return total / runs
19 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/ruby/cut.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require './utils.rb'
4 |
5 | def cut(filename, l, r)
6 | File.open(filename, 'r:utf-8') do |file|
7 | file.each_line do |line|
8 | puts line[l, r - l]
9 | end
10 | end
11 | end
12 |
13 | ARGV.each do |f|
14 | t = benchmark { cut(f, 20, 40) }
15 | STDERR.puts "#{f}: #{t}"
16 | end
17 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/ruby/fold.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require './utils.rb'
4 |
5 | def fold(filename, max_width)
6 | File.open(filename, 'r:utf-8') do |file|
7 | # Words in this paragraph
8 | paragraph = []
9 |
10 | file.each_line do |line|
11 | # If we encounter an empty line, we reformat and dump the current
12 | # paragraph
13 | if line.strip.empty?
14 | puts fold_paragraph(paragraph, max_width)
15 | puts
16 | paragraph = []
17 | # Otherwise, we append the words found in the line to the paragraph
18 | else
19 | paragraph.concat line.split
20 | end
21 | end
22 |
23 | # Last paragraph
24 | puts fold_paragraph(paragraph, max_width) unless paragraph.empty?
25 | end
26 | end
27 |
28 | # Fold a single paragraph to the desired width
29 | def fold_paragraph(paragraph, max_width)
30 | # Gradually build our output
31 | str, *rest = paragraph
32 | width = str.length
33 |
34 | rest.each do |word|
35 | if width + word.length + 1 <= max_width
36 | str << ' ' << word
37 | width += word.length + 1
38 | else
39 | str << "\n" << word
40 | width = word.length
41 | end
42 | end
43 |
44 | str
45 | end
46 |
47 | ARGV.each do |f|
48 | t = benchmark { fold(f, 80) }
49 | STDERR.puts "#{f}: #{t}"
50 | end
51 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/ruby/sort.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require './utils.rb'
4 |
5 | def sort(filename)
6 | File.open(filename, 'r:utf-8') do |file|
7 | content = file.read
8 | puts content.lines.sort.join
9 | end
10 | end
11 |
12 | ARGV.each do |f|
13 | t = benchmark { sort(f) }
14 | STDERR.puts "#{f}: #{t}"
15 | end
16 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/ruby/strip_tags.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require './utils.rb'
4 |
5 | def strip_tags(filename)
6 | File.open(filename, 'r:utf-8') do |file|
7 | str = file.read
8 |
9 | d = 0
10 |
11 | str.each_char do |c|
12 | d += 1 if c == '<'
13 | putc(if d > 0 then ' ' else c end)
14 | d -= 1 if c == '>'
15 | end
16 | end
17 | end
18 |
19 | ARGV.each do |f|
20 | t = benchmark { strip_tags(f) }
21 | STDERR.puts "#{f}: #{t}"
22 | end
23 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/benchmarks/ruby/utils.rb:
--------------------------------------------------------------------------------
1 | require 'benchmark'
2 |
3 | def benchmark(&block)
4 | runs = 100
5 | total = 0
6 |
7 | runs.times do |i|
8 | result = Benchmark.measure(&block).total
9 | $stderr.puts "Run #{i}: #{result}"
10 | total += result
11 | end
12 |
13 | total / runs
14 | end
15 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/cabal.project:
--------------------------------------------------------------------------------
1 | -- See http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html
2 | packages: ., benchmarks, th-tests
3 | tests: True
4 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/include/text_cbits.h:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2013 Bryan O'Sullivan .
3 | */
4 |
5 | #ifndef _text_cbits_h
6 | #define _text_cbits_h
7 |
8 | #define UTF8_ACCEPT 0
9 | #define UTF8_REJECT 12
10 |
11 | #endif
12 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/scripts/ApiCompare.hs:
--------------------------------------------------------------------------------
1 | -- This script compares the strict and lazy Text APIs to ensure that
2 | -- they're reasonably in sync.
3 |
4 | {-# LANGUAGE OverloadedStrings #-}
5 |
6 | import qualified Data.Set as S
7 | import qualified Data.Text as T
8 | import System.Process
9 |
10 | main = do
11 | let tidy pkg = (S.fromList . filter (T.isInfixOf "::") . T.lines .
12 | T.replace "GHC.Int.Int64" "Int" .
13 | T.replace "\n " "" .
14 | T.replace (T.append (T.pack pkg) ".") "" . T.pack) `fmap`
15 | readProcess "ghci" [] (":browse " ++ pkg)
16 | let diff a b = mapM_ (putStrLn . (" "++) . T.unpack) . S.toList $
17 | S.difference a b
18 | text <- tidy "Data.Text"
19 | lazy <- tidy "Data.Text.Lazy"
20 | list <- tidy "Data.List"
21 | putStrLn "Text \\ List:"
22 | diff text list
23 | putStrLn ""
24 | putStrLn "Text \\ Lazy:"
25 | diff text lazy
26 | putStrLn ""
27 | putStrLn "Lazy \\ Text:"
28 | diff lazy text
29 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/scripts/Arsec.hs:
--------------------------------------------------------------------------------
1 | module Arsec
2 | (
3 | Comment
4 | , comment
5 | , semi
6 | , showC
7 | , unichar
8 | , unichars
9 | , module Control.Applicative
10 | , module Control.Monad
11 | , module Data.Char
12 | , module Text.ParserCombinators.Parsec.Char
13 | , module Text.ParserCombinators.Parsec.Combinator
14 | , module Text.ParserCombinators.Parsec.Error
15 | , module Text.ParserCombinators.Parsec.Prim
16 | ) where
17 |
18 | import Control.Monad
19 | import Control.Applicative
20 | import Data.Char
21 | import Numeric
22 | import Text.ParserCombinators.Parsec.Char hiding (lower, upper)
23 | import Text.ParserCombinators.Parsec.Combinator hiding (optional)
24 | import Text.ParserCombinators.Parsec.Error
25 | import Text.ParserCombinators.Parsec.Prim hiding ((<|>), many)
26 |
27 | type Comment = String
28 |
29 | unichar :: Parser Char
30 | unichar = chr . fst . head . readHex <$> many1 hexDigit
31 |
32 | unichars :: Parser [Char]
33 | unichars = manyTill (unichar <* spaces) semi
34 |
35 | semi :: Parser ()
36 | semi = char ';' *> spaces *> pure ()
37 |
38 | comment :: Parser Comment
39 | comment = (char '#' *> manyTill anyToken (char '\n')) <|> string "\n"
40 |
41 | showC :: Char -> String
42 | showC c = "'\\x" ++ d ++ "'"
43 | where h = showHex (ord c) ""
44 | d = replicate (4 - length h) '0' ++ h
45 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/scripts/CaseFolding.hs:
--------------------------------------------------------------------------------
1 | -- This script processes the following source file:
2 | --
3 | -- http://unicode.org/Public/UNIDATA/CaseFolding.txt
4 |
5 | module CaseFolding
6 | (
7 | CaseFolding(..)
8 | , Fold(..)
9 | , parseCF
10 | , mapCF
11 | ) where
12 |
13 | import Arsec
14 |
15 | data Fold = Fold {
16 | code :: Char
17 | , status :: Char
18 | , mapping :: [Char]
19 | , name :: String
20 | } deriving (Eq, Ord, Show)
21 |
22 | data CaseFolding = CF { cfComments :: [Comment], cfFolding :: [Fold] }
23 | deriving (Show)
24 |
25 | entries :: Parser CaseFolding
26 | entries = CF <$> many comment <*> many (entry <* many comment)
27 | where
28 | entry = Fold <$> unichar <* semi
29 | <*> oneOf "CFST" <* semi
30 | <*> unichars
31 | <*> (string "# " *> manyTill anyToken (char '\n'))
32 |
33 | parseCF :: FilePath -> IO (Either ParseError CaseFolding)
34 | parseCF name = parse entries name <$> readFile name
35 |
36 | mapCF :: CaseFolding -> [String]
37 | mapCF (CF _ ms) = typ ++ (map nice . filter p $ ms) ++ [last]
38 | where
39 | typ = ["foldMapping :: forall s. Char -> s -> Step (CC s) Char"
40 | ,"{-# NOINLINE foldMapping #-}"]
41 | last = "foldMapping c s = Yield (toLower c) (CC s '\\0' '\\0')"
42 | nice c = "-- " ++ name c ++ "\n" ++
43 | "foldMapping " ++ showC (code c) ++ " s = Yield " ++ x ++ " (CC s " ++ y ++ " " ++ z ++ ")"
44 | where [x,y,z] = (map showC . take 3) (mapping c ++ repeat '\0')
45 | p f = status f `elem` "CF" &&
46 | mapping f /= [toLower (code f)]
47 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/scripts/CaseMapping.hs:
--------------------------------------------------------------------------------
1 | import System.Environment
2 | import System.IO
3 |
4 | import Arsec
5 | import CaseFolding
6 | import SpecialCasing
7 |
8 | main = do
9 | args <- getArgs
10 | let oname = case args of
11 | [] -> "../Data/Text/Internal/Fusion/CaseMapping.hs"
12 | [o] -> o
13 | psc <- parseSC "SpecialCasing.txt"
14 | pcf <- parseCF "CaseFolding.txt"
15 | scs <- case psc of
16 | Left err -> print err >> return undefined
17 | Right ms -> return ms
18 | cfs <- case pcf of
19 | Left err -> print err >> return undefined
20 | Right ms -> return ms
21 | h <- openFile oname WriteMode
22 | let comments = map ("--" ++) $
23 | take 2 (cfComments cfs) ++ take 2 (scComments scs)
24 | mapM_ (hPutStrLn h) $
25 | ["{-# LANGUAGE Rank2Types #-}"
26 | ,"-- AUTOMATICALLY GENERATED - DO NOT EDIT"
27 | ,"-- Generated by scripts/CaseMapping.hs"] ++
28 | comments ++
29 | [""
30 | ,"module Data.Text.Internal.Fusion.CaseMapping where"
31 | ,"import Data.Char"
32 | ,"import Data.Text.Internal.Fusion.Types"
33 | ,""]
34 | mapM_ (hPutStrLn h) (mapSC "upper" upper toUpper scs)
35 | mapM_ (hPutStrLn h) (mapSC "lower" lower toLower scs)
36 | mapM_ (hPutStrLn h) (mapSC "title" title toTitle scs)
37 | mapM_ (hPutStrLn h) (mapCF cfs)
38 | hClose h
39 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/.ghci:
--------------------------------------------------------------------------------
1 | :set -isrc -i../..
2 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/LiteralRuleTest.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | module LiteralRuleTest where
4 |
5 | import Data.Text (Text)
6 |
7 | -- This should produce 8 firings of the "TEXT literal" rule
8 | strings :: [Text]
9 | strings = [ "abstime", "aclitem", "bit", "bool", "box", "bpchar", "bytea", "char" ]
10 |
11 | -- This should produce 7 firings of the "TEXT literal UTF8" rule
12 | utf8Strings :: [Text]
13 | utf8Strings = [ "\0abstime", "\0aclitem", "\xfefe bit", "\0bool", "\0box", "\0bpchar", "\0bytea" ]
14 |
15 | -- This should produce 4 firings of the "TEXT empty literal" rule
16 | empties :: [Text]
17 | empties = [ "", "", "", "" ]
18 |
19 | -- This should produce 5 firings of the "TEXT empty literal" rule
20 | --singletons :: [Text]
21 | --singletons = [ "a", "b", "c", "d", "e" ]
22 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/Makefile:
--------------------------------------------------------------------------------
1 | VCS = hg
2 | count = 1000
3 |
4 | all: coverage literal-rule-test
5 |
6 | literal-rule-test:
7 | ./literal-rule-test.sh
8 |
9 | coverage: build coverage/hpc_index.html
10 |
11 | build: text-test-data
12 | cabal configure -fhpc
13 | cabal build
14 |
15 | text-test-data:
16 | ifeq ($(VCS),git)
17 | git clone https://github.com/bos/text-test-data.git
18 | else
19 | hg clone https://bitbucket.org/bos/text-test-data
20 | endif
21 | $(MAKE) -C text-test-data
22 |
23 | coverage/text-tests.tix:
24 | -mkdir -p coverage
25 | ./dist/build/text-tests/text-tests -a $(count)
26 | mv text-tests.tix $@
27 |
28 | coverage/text-tests-stdio.tix:
29 | -mkdir -p coverage
30 | ./scripts/cover-stdio.sh ./dist/build/text-tests-stdio/text-tests-stdio
31 | mv text-tests-stdio.tix $@
32 |
33 | coverage/coverage.tix: coverage/text-tests.tix coverage/text-tests-stdio.tix
34 | hpc combine --output=$@ \
35 | --exclude=Main \
36 | coverage/text-tests.tix \
37 | coverage/text-tests-stdio.tix
38 |
39 | coverage/hpc_index.html: coverage/coverage.tix
40 | hpc markup --destdir=coverage coverage/coverage.tix
41 |
42 | clean:
43 | rm -rf dist coverage .hpc
44 |
45 | .PHONY: all build clean coverage literal-rule-test
46 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/Tests.hs:
--------------------------------------------------------------------------------
1 | -- | Provides a simple main function which runs all the tests
2 | --
3 | module Main
4 | ( main
5 | ) where
6 |
7 | import Test.Framework (defaultMain)
8 |
9 | import qualified Tests.Properties as Properties
10 | import qualified Tests.Regressions as Regressions
11 |
12 | main :: IO ()
13 | main = defaultMain [Properties.tests, Regressions.tests]
14 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/Tests/IO.hs:
--------------------------------------------------------------------------------
1 | -- | Program which exposes some haskell functions as an exutable. The results
2 | -- and coverage of this module is meant to be checked using a shell script.
3 | --
4 | module Main
5 | (
6 | main
7 | ) where
8 |
9 | import System.Environment (getArgs)
10 | import System.Exit (exitFailure)
11 | import System.IO (hPutStrLn, stderr)
12 | import qualified Data.Text as T
13 | import qualified Data.Text.IO as T
14 | import qualified Data.Text.Lazy as TL
15 | import qualified Data.Text.Lazy.IO as TL
16 |
17 | main :: IO ()
18 | main = do
19 | args <- getArgs
20 | case args of
21 | ["T.readFile", name] -> T.putStr =<< T.readFile name
22 | ["T.writeFile", name, t] -> T.writeFile name (T.pack t)
23 | ["T.appendFile", name, t] -> T.appendFile name (T.pack t)
24 | ["T.interact"] -> T.interact id
25 | ["T.getContents"] -> T.putStr =<< T.getContents
26 | ["T.getLine"] -> T.putStrLn =<< T.getLine
27 |
28 | ["TL.readFile", name] -> TL.putStr =<< TL.readFile name
29 | ["TL.writeFile", name, t] -> TL.writeFile name (TL.pack t)
30 | ["TL.appendFile", name, t] -> TL.appendFile name (TL.pack t)
31 | ["TL.interact"] -> TL.interact id
32 | ["TL.getContents"] -> TL.putStr =<< TL.getContents
33 | ["TL.getLine"] -> TL.putStrLn =<< TL.getLine
34 | _ -> hPutStrLn stderr "invalid directive!" >> exitFailure
35 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/Tests/Properties/Mul.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE ScopedTypeVariables #-}
2 |
3 | module Tests.Properties.Mul (tests) where
4 |
5 | import Control.Applicative ((<$>), pure)
6 | import Control.Exception as E (SomeException, catch, evaluate)
7 | import Data.Int (Int32, Int64)
8 | import Data.Text.Internal (mul, mul32, mul64)
9 | import System.IO.Unsafe (unsafePerformIO)
10 | import Test.Framework (Test)
11 | import Test.Framework.Providers.QuickCheck2 (testProperty)
12 | import Test.QuickCheck hiding ((.&.))
13 |
14 | mulRef :: (Integral a, Bounded a) => a -> a -> Maybe a
15 | mulRef a b
16 | | ab < bot || ab > top = Nothing
17 | | otherwise = Just (fromIntegral ab)
18 | where ab = fromIntegral a * fromIntegral b
19 | top = fromIntegral (maxBound `asTypeOf` a) :: Integer
20 | bot = fromIntegral (minBound `asTypeOf` a) :: Integer
21 |
22 | eval :: (a -> b -> c) -> a -> b -> Maybe c
23 | eval f a b = unsafePerformIO $
24 | (Just <$> evaluate (f a b)) `E.catch` (\(_::SomeException) -> pure Nothing)
25 |
26 | t_mul32 :: Int32 -> Int32 -> Property
27 | t_mul32 a b = mulRef a b === eval mul32 a b
28 |
29 | t_mul64 :: Int64 -> Int64 -> Property
30 | t_mul64 a b = mulRef a b === eval mul64 a b
31 |
32 | t_mul :: Int -> Int -> Property
33 | t_mul a b = mulRef a b === eval mul a b
34 |
35 | tests :: [Test]
36 | tests = [
37 | testProperty "t_mul" t_mul
38 | , testProperty "t_mul32" t_mul32
39 | , testProperty "t_mul64" t_mul64
40 | ]
41 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/Tests/SlowFunctions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns #-}
2 | module Tests.SlowFunctions
3 | (
4 | indices
5 | , splitOn
6 | ) where
7 |
8 | import qualified Data.Text as T
9 | import Data.Text.Internal (Text(..))
10 | import Data.Text.Unsafe (iter_, unsafeHead, unsafeTail)
11 |
12 | indices :: T.Text -- ^ Substring to search for (@needle@)
13 | -> T.Text -- ^ Text to search in (@haystack@)
14 | -> [Int]
15 | indices needle@(Text _narr _noff nlen) haystack@(Text harr hoff hlen)
16 | | T.null needle = []
17 | | otherwise = scan 0
18 | where
19 | scan i | i >= hlen = []
20 | | needle `T.isPrefixOf` t = i : scan (i+nlen)
21 | | otherwise = scan (i+d)
22 | where t = Text harr (hoff+i) (hlen-i)
23 | d = iter_ haystack i
24 |
25 | splitOn :: T.Text -- ^ Text to split on
26 | -> T.Text -- ^ Input text
27 | -> [T.Text]
28 | splitOn pat src0
29 | | T.null pat = error "splitOn: empty"
30 | | l == 1 = T.split (== (unsafeHead pat)) src0
31 | | otherwise = go src0
32 | where
33 | l = T.length pat
34 | go src = search 0 src
35 | where
36 | search !n !s
37 | | T.null s = [src] -- not found
38 | | pat `T.isPrefixOf` s = T.take n src : go (T.drop l s)
39 | | otherwise = search (n+1) (unsafeTail s)
40 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/cabal.config:
--------------------------------------------------------------------------------
1 | -- These flags help to speed up building the test suite.
2 |
3 | documentation: False
4 | executable-stripping: False
5 | flags: developer
6 | library-profiling: False
7 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/literal-rule-test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 |
3 | failed=0
4 |
5 | function check_firings() {
6 | rule=$1
7 | expected=$2
8 | build="ghc -O -ddump-rule-firings LiteralRuleTest.hs"
9 | build="$build -i.. -I../include"
10 | touch LiteralRuleTest.hs
11 | echo -n "Want to see $expected firings of rule $rule... " >&2
12 | firings=$($build 2>&1 | grep "Rule fired: $rule\$" | wc -l)
13 | rm -f LiteralRuleTest.{o.hi}
14 |
15 | if [ $firings != $expected ]; then
16 | echo "failed, saw $firings" >&2
17 | failed=1
18 | else
19 | echo "pass" >&2
20 | fi
21 | }
22 |
23 | check_firings "TEXT literal" 8
24 | check_firings "TEXT literal UTF8" 7
25 | check_firings "TEXT empty literal" 4
26 | # This is broken at the moment. "TEXT literal" rule fires instead.
27 | #check_firings "TEXT singleton literal" 5
28 |
29 | exit $failed
30 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/tests/scripts/cover-stdio.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [[ $# < 1 ]]; then
4 | echo "Usage: $0 "
5 | exit 1
6 | fi
7 |
8 | exe=$1
9 |
10 | rm -f $exe.tix
11 |
12 | f=$(mktemp stdio-f.XXXXXX)
13 | g=$(mktemp stdio-g.XXXXXX)
14 |
15 | for t in T TL; do
16 | echo $t.readFile > $f
17 | $exe $t.readFile $f > $g
18 | if ! diff -u $f $g; then
19 | errs=$((errs+1))
20 | echo FAIL: $t.readFile 1>&2
21 | fi
22 |
23 | $exe $t.writeFile $f $t.writeFile
24 | echo -n $t.writeFile > $g
25 | if ! diff -u $f $g; then
26 | errs=$((errs+1))
27 | echo FAIL: $t.writeFile 1>&2
28 | fi
29 |
30 | echo -n quux > $f
31 | $exe $t.appendFile $f $t.appendFile
32 | echo -n quux$t.appendFile > $g
33 | if ! diff -u $f $g; then
34 | errs=$((errs+1))
35 | echo FAIL: $t.appendFile 1>&2
36 | fi
37 |
38 | echo $t.interact | $exe $t.interact > $f
39 | echo $t.interact > $g
40 | if ! diff -u $f $g; then
41 | errs=$((errs+1))
42 | echo FAIL: $t.interact 1>&2
43 | fi
44 |
45 | echo $t.getContents | $exe $t.getContents > $f
46 | echo $t.getContents > $g
47 | if ! diff -u $f $g; then
48 | errs=$((errs+1))
49 | echo FAIL: $t.getContents 1>&2
50 | fi
51 |
52 | echo $t.getLine | $exe $t.getLine > $f
53 | echo $t.getLine > $g
54 | if ! diff -u $f $g; then
55 | errs=$((errs+1))
56 | echo FAIL: $t.getLine 1>&2
57 | fi
58 | done
59 |
60 | rm -f $f $g
61 |
62 | exit $errs
63 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/th-tests/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2008-2009, Tom Harper
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions
6 | are met:
7 |
8 | * Redistributions of source code must retain the above copyright
9 | notice, this list of conditions and the following disclaimer.
10 |
11 | * Redistributions in binary form must reproduce the above
12 | copyright notice, this list of conditions and the following
13 | disclaimer in the documentation and/or other materials provided
14 | with the distribution.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/th-tests/tests/Lift.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 | {-# LANGUAGE TemplateHaskell #-}
3 | module Lift
4 | ( tests
5 | )
6 | where
7 |
8 | import qualified Data.Text as S
9 | import qualified Data.Text.Lazy as L
10 | import Language.Haskell.TH.Syntax (lift)
11 | import Test.HUnit (assertBool, assertEqual, assertFailure)
12 | import qualified Test.Framework as F
13 | import qualified Test.Framework.Providers.HUnit as F
14 |
15 | tests :: F.Test
16 | tests = F.testGroup "TH lifting Text"
17 | [ F.testCase "strict" $ assertEqual "strict"
18 | $(lift ("foo" :: S.Text))
19 | ("foo" :: S.Text)
20 | , F.testCase "lazy" $ assertEqual "lazy"
21 | $(lift ("foo" :: L.Text))
22 | ("foo" :: L.Text)
23 | ]
24 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/th-tests/tests/th-tests.hs:
--------------------------------------------------------------------------------
1 | -- | Provides a simple main function which runs all the tests
2 | --
3 | module Main
4 | ( main
5 | ) where
6 |
7 | import Test.Framework (defaultMain)
8 |
9 | import qualified Lift
10 |
11 | main :: IO ()
12 | main = defaultMain [Lift.tests]
13 |
--------------------------------------------------------------------------------
/test-project/text-1.2.4.0/th-tests/th-tests.cabal:
--------------------------------------------------------------------------------
1 | cabal-version: 2.2
2 | name: th-tests
3 | version: 0
4 | description:
5 | Tests that use 'Text' functions during compile time.
6 | .
7 | These are in a separate package because of https://github.com/haskell/cabal/issues/5623
8 | license: BSD-2-Clause
9 | license-file: LICENSE
10 |
11 | test-suite th-tests
12 | default-language: Haskell2010
13 | type: exitcode-stdio-1.0
14 | hs-source-dirs:
15 | tests/
16 | main-is: th-tests.hs
17 | other-modules:
18 | Lift
19 | build-depends:
20 | HUnit >= 1.2,
21 | base,
22 | template-haskell,
23 | text,
24 | test-framework >= 0.4,
25 | test-framework-hunit >= 0.2
26 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/.cirrus.yml:
--------------------------------------------------------------------------------
1 | freebsd_instance:
2 | image_family: freebsd-13-0
3 |
4 | task:
5 | install_script: pkg install -y ghc hs-cabal-install git
6 | script:
7 | - cabal update
8 | - cabal test --test-show-details=direct
9 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "github-actions"
4 | directory: "/"
5 | schedule:
6 | interval: "daily"
7 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/.github/workflows/windows_and_macOS.yml:
--------------------------------------------------------------------------------
1 | name: win-mac-ci
2 | on:
3 | push:
4 | branches:
5 | - master
6 | pull_request: {}
7 |
8 | defaults:
9 | run:
10 | shell: bash
11 |
12 | jobs:
13 | build:
14 | runs-on: ${{ matrix.os }}
15 | strategy:
16 | matrix:
17 | os: ['windows-latest', 'macOS-latest']
18 | ghc: ['9.0']
19 | fail-fast: false
20 | steps:
21 | - uses: actions/checkout@v2.3.4
22 | - uses: haskell/actions/setup@v1.2.1
23 | id: setup-haskell-cabal
24 | with:
25 | ghc-version: ${{ matrix.ghc }}
26 | - name: Update cabal package database
27 | run: cabal update
28 | - uses: actions/cache@v2.1.6
29 | name: Cache cabal stuff
30 | with:
31 | path: ${{ steps.setup-haskell-cabal.outputs.cabal-store }}
32 | key: ${{ runner.os }}-${{ matrix.ghc }}
33 | # We rebuild tests several times to avoid intermittent failures on Windows
34 | # https://github.com/haskell/actions/issues/36
35 | - name: Test
36 | run: |
37 | bld() { cabal build pkg:text:tests; }
38 | bld || bld || bld
39 | cabal test
40 | - name: Haddock
41 | run: cabal haddock
42 | - name: SDist
43 | run: cabal sdist
44 | - name: Build bench
45 | run: cabal bench --benchmark-option=-l
46 | if: contains(matrix.os, 'macOS')
47 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/.gitignore:
--------------------------------------------------------------------------------
1 | /dist/
2 | /dist-boot/
3 | /dist-install/
4 | /dist-newstyle/
5 | /cabal-dev/
6 | /cabal.sandbox.config
7 | /ghc.mk
8 | /GNUmakefile
9 | /.ghc.environment.*
10 | /cabal.project.local
11 | /cabal.test.project.local
12 |
13 | # Test data repo ignored. Please see instruction in tests-and-benchmarks.markdown
14 | /benchmarks/text-test-data/
15 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2008-2009, Tom Harper
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions
6 | are met:
7 |
8 | * Redistributions of source code must retain the above copyright
9 | notice, this list of conditions and the following disclaimer.
10 |
11 | * Redistributions in binary form must reproduce the above
12 | copyright notice, this list of conditions and the following
13 | disclaimer in the documentation and/or other materials provided
14 | with the distribution.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/README.markdown:
--------------------------------------------------------------------------------
1 | # `text`: Fast, packed Unicode strings, using stream fusion [](https://hackage.haskell.org/package/text)
2 |
3 | This package provides the Data.Text library, a library for the space-
4 | and time-efficient manipulation of Unicode text in Haskell.
5 |
6 | # Get involved!
7 |
8 | Please report bugs via the
9 | [github issue tracker](https://github.com/haskell/text/issues).
10 |
11 | The main repo:
12 |
13 | ```bash
14 | git clone git://github.com/haskell/text.git
15 | ```
16 |
17 | To run benchmarks please clone and unpack test files:
18 |
19 | ```bash
20 | git clone https://github.com/bos/text-test-data benchmarks/text-test-data
21 | cd benchmarks/text-test-data
22 | make
23 | ```
24 |
25 | # Authors
26 |
27 | The base code for this library was originally written by Tom Harper,
28 | based on the stream fusion framework developed by Roman Leshchinskiy,
29 | Duncan Coutts, and Don Stewart.
30 |
31 | The core library was fleshed out, debugged, and tested by Bryan
32 | O'Sullivan , and he is the current maintainer.
33 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/Setup.lhs:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env runhaskell
2 | > import Distribution.Simple
3 | > main = defaultMain
4 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/AUTHORS:
--------------------------------------------------------------------------------
1 | Jasper Van der Jeugt
2 | Bryan O'Sullivan
3 | Tom Harper
4 | Duncan Coutts
5 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/Concat.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | module Benchmarks.Concat (benchmark) where
4 |
5 | import Control.Monad.Trans.Writer
6 | import Test.Tasty.Bench (Benchmark, bgroup, bench, whnf)
7 | import Data.Text as T
8 |
9 | benchmark :: Benchmark
10 | benchmark = bgroup "Concat"
11 | [ bench "append" $ whnf (append4 "Text 1" "Text 2" "Text 3") "Text 4"
12 | , bench "concat" $ whnf (concat4 "Text 1" "Text 2" "Text 3") "Text 4"
13 | , bench "write" $ whnf (write4 "Text 1" "Text 2" "Text 3") "Text 4"
14 | ]
15 |
16 | append4, concat4, write4 :: Text -> Text -> Text -> Text -> Text
17 |
18 | {-# NOINLINE append4 #-}
19 | append4 x1 x2 x3 x4 = x1 `append` x2 `append` x3 `append` x4
20 |
21 | {-# NOINLINE concat4 #-}
22 | concat4 x1 x2 x3 x4 = T.concat [x1, x2, x3, x4]
23 |
24 | {-# NOINLINE write4 #-}
25 | write4 x1 x2 x3 x4 = execWriter $ tell x1 >> tell x2 >> tell x3 >> tell x4
26 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/EncodeUtf8.hs:
--------------------------------------------------------------------------------
1 | -- | UTF-8 encode a text
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Replicating a string a number of times
6 | --
7 | -- * UTF-8 encoding it
8 | --
9 | module Benchmarks.EncodeUtf8
10 | ( benchmark
11 | ) where
12 |
13 | import Test.Tasty.Bench (Benchmark, bgroup, bench, whnf)
14 | import qualified Data.ByteString as B
15 | import qualified Data.ByteString.Lazy as BL
16 | import qualified Data.Text as T
17 | import qualified Data.Text.Encoding as T
18 | import qualified Data.Text.Lazy as TL
19 | import qualified Data.Text.Lazy.Encoding as TL
20 |
21 | benchmark :: String -> String -> Benchmark
22 | benchmark name string =
23 | bgroup name
24 | [ bench "Text" $ whnf (B.length . T.encodeUtf8) text
25 | , bench "LazyText" $ whnf (BL.length . TL.encodeUtf8) lazyText
26 | ]
27 | where
28 | -- The string in different formats
29 | text = T.replicate k $ T.pack string
30 | lazyText = TL.replicate (fromIntegral k) $ TL.pack string
31 |
32 | -- Amount
33 | k = 100000
34 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/Equality.hs:
--------------------------------------------------------------------------------
1 | -- | Compare a string with a copy of itself that is identical except
2 | -- for the last character.
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Comparison of strings (Eq instance)
7 | --
8 | module Benchmarks.Equality
9 | ( initEnv
10 | , benchmark
11 | ) where
12 |
13 | import Test.Tasty.Bench (Benchmark, bgroup, bench, whnf)
14 | import qualified Data.ByteString.Char8 as B
15 | import qualified Data.ByteString.Lazy.Char8 as BL
16 | import qualified Data.Text as T
17 | import qualified Data.Text.Encoding as T
18 | import qualified Data.Text.Lazy as TL
19 | import qualified Data.Text.Lazy.Encoding as TL
20 |
21 | type Env = (T.Text, TL.Text)
22 |
23 | initEnv :: FilePath -> IO Env
24 | initEnv fp = do
25 | b <- B.readFile fp
26 | bl1 <- BL.readFile fp
27 | return (T.decodeUtf8 b, TL.decodeUtf8 bl1)
28 |
29 | benchmark :: Env -> Benchmark
30 | benchmark ~(t, tl) =
31 | bgroup "Equality"
32 | [ bench "Text" $ whnf (== T.init t `T.snoc` '\xfffd') t
33 | , bench "LazyText" $ whnf (== TL.init tl `TL.snoc` '\xfffd') tl
34 | ]
35 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/FileRead.hs:
--------------------------------------------------------------------------------
1 | -- | Benchmarks simple file reading
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Reading a file from the disk
6 | --
7 |
8 | {-# LANGUAGE CPP #-}
9 |
10 | module Benchmarks.FileRead
11 | ( benchmark
12 | ) where
13 |
14 | import Test.Tasty.Bench (Benchmark, bgroup, bench, whnfIO)
15 | import qualified Data.ByteString as SB
16 | import qualified Data.ByteString.Lazy as LB
17 | import qualified Data.Text as T
18 | import qualified Data.Text.Encoding as T
19 | import qualified Data.Text.IO as T
20 | import qualified Data.Text.Lazy as LT
21 | import qualified Data.Text.Lazy.Encoding as LT
22 | import qualified Data.Text.Lazy.IO as LT
23 |
24 | benchmark :: FilePath -> Benchmark
25 | benchmark p = bgroup "FileRead"
26 | [ bench "Text" $ whnfIO $ T.length <$> T.readFile p
27 | , bench "LazyText" $ whnfIO $ LT.length <$> LT.readFile p
28 | , bench "TextByteString" $ whnfIO $
29 | (T.length . T.decodeUtf8) <$> SB.readFile p
30 | , bench "LazyTextByteString" $ whnfIO $
31 | (LT.length . LT.decodeUtf8) <$> LB.readFile p
32 | ]
33 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/FoldLines.hs:
--------------------------------------------------------------------------------
1 | -- | Read a file line-by-line using handles, and perform a fold over the lines.
2 | -- The fold is used here to calculate the number of lines in the file.
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Buffered, line-based IO
7 | --
8 | {-# LANGUAGE BangPatterns #-}
9 | module Benchmarks.FoldLines
10 | ( benchmark
11 | ) where
12 |
13 | import Test.Tasty.Bench (Benchmark, bgroup, bench, whnfIO)
14 | import System.IO
15 | import qualified Data.Text as T
16 | import qualified Data.Text.IO as T
17 |
18 | benchmark :: FilePath -> Benchmark
19 | benchmark fp = bgroup "ReadLines"
20 | [ bench "Text" $ withHandle $ foldLinesT (\n _ -> n + 1) (0 :: Int)
21 | ]
22 | where
23 | withHandle f = whnfIO $ do
24 | h <- openFile fp ReadMode
25 | hSetBuffering h (BlockBuffering (Just 16384))
26 | x <- f h
27 | hClose h
28 | return x
29 |
30 | -- | Text line fold
31 | --
32 | foldLinesT :: (a -> T.Text -> a) -> a -> Handle -> IO a
33 | foldLinesT f z0 h = go z0
34 | where
35 | go !z = do
36 | eof <- hIsEOF h
37 | if eof
38 | then return z
39 | else do
40 | l <- T.hGetLine h
41 | let z' = f z l in go z'
42 | {-# INLINE foldLinesT #-}
43 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/Multilang.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, OverloadedStrings, RankNTypes #-}
2 |
3 | module Benchmarks.Multilang (benchmark) where
4 |
5 | import qualified Data.ByteString as B
6 | import qualified Data.Text as Text
7 | import Data.Text.Encoding (decodeUtf8)
8 | import Data.Text (Text)
9 | import Test.Tasty.Bench (Benchmark, bgroup, bench, env, nf)
10 |
11 | readYiwiki :: IO Text
12 | readYiwiki = decodeUtf8 `fmap` B.readFile "benchmarks/text-test-data/yiwiki.xml"
13 |
14 | benchmark :: Benchmark
15 | benchmark = env readYiwiki $ \content -> bgroup "Multilang"
16 | [ bench "find_first" $ nf (Text.isInfixOf "en:Benin") content
17 | , bench "find_index" $ nf (Text.findIndex (=='c')) content
18 | ]
19 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/Programs/BigTable.hs:
--------------------------------------------------------------------------------
1 | -- | Create a large HTML table and dump it to a handle
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Creating a large HTML document using a builder
6 | --
7 | -- * Writing to a handle
8 | --
9 | {-# LANGUAGE CPP, OverloadedStrings #-}
10 | module Benchmarks.Programs.BigTable
11 | ( benchmark
12 | ) where
13 |
14 | import Test.Tasty.Bench (Benchmark, bench, whnfIO)
15 | import Data.Text.Lazy.Builder (Builder, fromText, toLazyText)
16 | import Data.Text.Lazy.IO (hPutStr)
17 | import System.IO (Handle)
18 | import qualified Data.Text as T
19 |
20 | benchmark :: Handle -> Benchmark
21 | benchmark sink = bench "BigTable" $ whnfIO $ do
22 | hPutStr sink "Content-Type: text/html\n\n"
23 | hPutStr sink . toLazyText . makeTable =<< rows
24 | hPutStr sink "
"
25 | where
26 | -- We provide the number of rows in IO so the builder value isn't shared
27 | -- between the benchmark samples.
28 | rows :: IO Int
29 | rows = return 20000
30 | {-# NOINLINE rows #-}
31 |
32 | makeTable :: Int -> Builder
33 | makeTable n = mconcat $ replicate n $ mconcat $ map makeCol [1 .. 50]
34 |
35 | makeCol :: Int -> Builder
36 | makeCol 1 = fromText "1 | "
37 | makeCol 50 = fromText "50 |
"
38 | makeCol i = fromText "" `mappend` (fromInt i `mappend` fromText " | ")
39 |
40 | fromInt :: Int -> Builder
41 | fromInt = fromText . T.pack . show
42 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/Programs/StripTags.hs:
--------------------------------------------------------------------------------
1 | -- | Program to replace HTML tags by whitespace
2 | --
3 | -- This program was originally contributed by Petr Prokhorenkov.
4 | --
5 | -- Tested in this benchmark:
6 | --
7 | -- * Reading the file
8 | --
9 | -- * Replacing text between HTML tags (<>) with whitespace
10 | --
11 | -- * Writing back to a handle
12 | --
13 | {-# OPTIONS_GHC -fspec-constr-count=5 #-}
14 | module Benchmarks.Programs.StripTags
15 | ( benchmark
16 | ) where
17 |
18 | import Test.Tasty.Bench (Benchmark, bgroup, bench, whnfIO)
19 | import System.IO (Handle)
20 | import qualified Data.ByteString as B
21 | import qualified Data.Text as T
22 | import qualified Data.Text.Encoding as T
23 | import qualified Data.Text.IO as T
24 |
25 | benchmark :: FilePath -> Handle -> Benchmark
26 | benchmark i o = bgroup "StripTags"
27 | [ bench "Text" $ whnfIO $ T.readFile i >>= T.hPutStr o . text
28 | , bench "TextByteString" $ whnfIO $
29 | B.readFile i >>= B.hPutStr o . T.encodeUtf8 . text . T.decodeUtf8
30 | ]
31 |
32 | text :: T.Text -> T.Text
33 | text = snd . T.mapAccumL step 0
34 |
35 | step :: Int -> Char -> (Int, Char)
36 | step d c
37 | | d > 0 || d' > 0 = (d', ' ')
38 | | otherwise = (d', c)
39 | where
40 | d' = d + depth c
41 | depth '>' = 1
42 | depth '<' = -1
43 | depth _ = 0
44 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/Programs/Throughput.hs:
--------------------------------------------------------------------------------
1 | -- | This benchmark simply reads and writes a file using the various string
2 | -- libraries. The point of it is that we can make better estimations on how
3 | -- much time the other benchmarks spend doing IO.
4 | --
5 | -- Note that we expect ByteStrings to be a whole lot faster, since they do not
6 | -- do any actual encoding/decoding here, while String and Text do have UTF-8
7 | -- encoding/decoding.
8 | --
9 | -- Tested in this benchmark:
10 | --
11 | -- * Reading the file
12 | --
13 | -- * Replacing text between HTML tags (<>) with whitespace
14 | --
15 | -- * Writing back to a handle
16 | --
17 | module Benchmarks.Programs.Throughput
18 | ( benchmark
19 | ) where
20 |
21 | import Test.Tasty.Bench (Benchmark, bgroup, bench, whnfIO)
22 | import System.IO (Handle)
23 | import qualified Data.ByteString as B
24 | import qualified Data.ByteString.Lazy as BL
25 | import qualified Data.Text.Encoding as T
26 | import qualified Data.Text.IO as T
27 | import qualified Data.Text.Lazy.Encoding as TL
28 | import qualified Data.Text.Lazy.IO as TL
29 |
30 | benchmark :: FilePath -> Handle -> Benchmark
31 | benchmark fp sink = bgroup "Throughput"
32 | [ bench "Text" $ whnfIO $ T.readFile fp >>= T.hPutStr sink
33 | , bench "LazyText" $ whnfIO $ TL.readFile fp >>= TL.hPutStr sink
34 | , bench "TextByteString" $ whnfIO $
35 | B.readFile fp >>= B.hPutStr sink . T.encodeUtf8 . T.decodeUtf8
36 | , bench "LazyTextByteString" $ whnfIO $
37 | BL.readFile fp >>= BL.hPutStr sink . TL.encodeUtf8 . TL.decodeUtf8
38 | ]
39 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/Replace.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns #-}
2 | -- | Replace a string by another string
3 | --
4 | -- Tested in this benchmark:
5 | --
6 | -- * Search and replace of a pattern in a text
7 | --
8 | module Benchmarks.Replace
9 | ( benchmark
10 | , initEnv
11 | ) where
12 |
13 | import Test.Tasty.Bench (Benchmark, bgroup, bench, nf)
14 | import qualified Data.Text as T
15 | import qualified Data.Text.Lazy as TL
16 | import qualified Data.Text.Lazy.IO as TL
17 |
18 | type Env = (T.Text, TL.Text)
19 |
20 | initEnv :: FilePath -> IO Env
21 | initEnv fp = do
22 | tl <- TL.readFile fp
23 | let !t = TL.toStrict tl
24 | return (t, tl)
25 |
26 | benchmark :: String -> String -> Env -> Benchmark
27 | benchmark pat sub ~(t, tl) =
28 | bgroup "Replace" [
29 | bench "Text" $ nf (T.length . T.replace tpat tsub) t
30 | , bench "LazyText" $ nf (TL.length . TL.replace tlpat tlsub) tl
31 | ]
32 | where
33 | tpat = T.pack pat
34 | tsub = T.pack sub
35 | tlpat = TL.pack pat
36 | tlsub = TL.pack sub
37 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/Search.hs:
--------------------------------------------------------------------------------
1 | -- | Search for a pattern in a file, find the number of occurences
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Searching all occurences of a pattern using library routines
6 | --
7 | module Benchmarks.Search
8 | ( initEnv
9 | , benchmark
10 | ) where
11 |
12 | import Test.Tasty.Bench (Benchmark, bench, bgroup, whnf)
13 | import qualified Data.Text as T
14 | import qualified Data.Text.IO as T
15 | import qualified Data.Text.Lazy as TL
16 | import qualified Data.Text.Lazy.IO as TL
17 |
18 | type Env = (T.Text, TL.Text)
19 |
20 | initEnv :: FilePath -> IO Env
21 | initEnv fp = do
22 | t <- T.readFile fp
23 | tl <- TL.readFile fp
24 | return (t, tl)
25 |
26 | benchmark :: T.Text -> Env -> Benchmark
27 | benchmark needleT ~(t, tl) =
28 | bgroup "FileIndices"
29 | [ bench "Text" $ whnf (text needleT) t
30 | , bench "LazyText" $ whnf (lazyText needleTL) tl
31 | ]
32 | where
33 | needleTL = TL.fromChunks [needleT]
34 |
35 | text :: T.Text -> T.Text -> Int
36 | text = T.count
37 |
38 | lazyText :: TL.Text -> TL.Text -> Int
39 | lazyText needle = fromIntegral . TL.count needle
40 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/benchmarks/haskell/Benchmarks/WordFrequencies.hs:
--------------------------------------------------------------------------------
1 | -- | A word frequency count using the different string types
2 | --
3 | -- Tested in this benchmark:
4 | --
5 | -- * Splitting into words
6 | --
7 | -- * Converting to lowercase
8 | --
9 | -- * Comparing: Eq/Ord instances
10 | --
11 | module Benchmarks.WordFrequencies
12 | ( initEnv
13 | , benchmark
14 | ) where
15 |
16 | import Test.Tasty.Bench (Benchmark, bench, bgroup, whnf)
17 | import Data.List (foldl')
18 | import Data.Map (Map)
19 | import qualified Data.Map as M
20 | import qualified Data.Text as T
21 | import qualified Data.Text.IO as T
22 |
23 | type Env = T.Text
24 |
25 | initEnv :: FilePath -> IO Env
26 | initEnv fp = do
27 | t <- T.readFile fp
28 | return t
29 |
30 | benchmark :: Env -> Benchmark
31 | benchmark ~t =
32 | bgroup "WordFrequencies"
33 | [ bench "Text" $ whnf (frequencies . T.words . T.toLower) t
34 | ]
35 |
36 | frequencies :: Ord a => [a] -> Map a Int
37 | frequencies = foldl' (\m k -> M.insertWith (+) k 1 m) M.empty
38 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/cabal.haskell-ci:
--------------------------------------------------------------------------------
1 | benchmarks: >=7.6
2 | doctest: <9.0
3 | doctest-options: -fobject-code -DINTEGER_GMP
4 | branches: master
5 |
6 | installed: +all -text -parsec
7 | install-dependencies: False
8 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/cabal.project:
--------------------------------------------------------------------------------
1 | packages: .
2 | tests: True
3 | benchmarks: True
4 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/include/text_cbits.h:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2013 Bryan O'Sullivan .
3 | */
4 |
5 | #ifndef _text_cbits_h
6 | #define _text_cbits_h
7 |
8 | #define UTF8_ACCEPT 0
9 | #define UTF8_REJECT 12
10 |
11 | #endif
12 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/scripts/ApiCompare.hs:
--------------------------------------------------------------------------------
1 | -- This script compares the strict and lazy Text APIs to ensure that
2 | -- they're reasonably in sync.
3 |
4 | {-# LANGUAGE OverloadedStrings #-}
5 |
6 | import qualified Data.Set as S
7 | import qualified Data.Text as T
8 | import System.Process
9 |
10 | main = do
11 | let tidy pkg = (S.fromList . filter (T.isInfixOf "::") . T.lines .
12 | T.replace "GHC.Int.Int64" "Int" .
13 | T.replace "\n " "" .
14 | T.replace (T.append (T.pack pkg) ".") "" . T.pack) `fmap`
15 | readProcess "ghci" [] (":browse " ++ pkg)
16 | let diff a b = mapM_ (putStrLn . (" "++) . T.unpack) . S.toList $
17 | S.difference a b
18 | text <- tidy "Data.Text"
19 | lazy <- tidy "Data.Text.Lazy"
20 | list <- tidy "Data.List"
21 | putStrLn "Text \\ List:"
22 | diff text list
23 | putStrLn ""
24 | putStrLn "Text \\ Lazy:"
25 | diff text lazy
26 | putStrLn ""
27 | putStrLn "Lazy \\ Text:"
28 | diff lazy text
29 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/scripts/Arsec.hs:
--------------------------------------------------------------------------------
1 | module Arsec
2 | (
3 | Comment
4 | , comment
5 | , semi
6 | , showC
7 | , unichar
8 | , unichars
9 | , module Control.Applicative
10 | , module Control.Monad
11 | , module Data.Char
12 | , module Text.ParserCombinators.Parsec.Char
13 | , module Text.ParserCombinators.Parsec.Combinator
14 | , module Text.ParserCombinators.Parsec.Error
15 | , module Text.ParserCombinators.Parsec.Prim
16 | ) where
17 |
18 | import Control.Monad
19 | import Control.Applicative
20 | import Data.Char
21 | import Numeric
22 | import Text.ParserCombinators.Parsec.Char hiding (lower, upper)
23 | import Text.ParserCombinators.Parsec.Combinator hiding (optional)
24 | import Text.ParserCombinators.Parsec.Error
25 | import Text.ParserCombinators.Parsec.Prim hiding ((<|>), many)
26 |
27 | type Comment = String
28 |
29 | unichar :: Parser Char
30 | unichar = chr . fst . head . readHex <$> many1 hexDigit
31 |
32 | unichars :: Parser [Char]
33 | unichars = manyTill (unichar <* spaces) semi
34 |
35 | semi :: Parser ()
36 | semi = char ';' *> spaces *> pure ()
37 |
38 | comment :: Parser Comment
39 | comment = (char '#' *> manyTill anyToken (char '\n')) <|> string "\n"
40 |
41 | showC :: Char -> String
42 | showC c = "'\\x" ++ d ++ "'"
43 | where h = showHex (ord c) ""
44 | d = replicate (4 - length h) '0' ++ h
45 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/scripts/CaseFolding.hs:
--------------------------------------------------------------------------------
1 | -- This script processes the following source file:
2 | --
3 | -- http://unicode.org/Public/UNIDATA/CaseFolding.txt
4 |
5 | module CaseFolding
6 | (
7 | CaseFolding(..)
8 | , Fold(..)
9 | , parseCF
10 | , mapCF
11 | ) where
12 |
13 | import Arsec
14 |
15 | data Fold = Fold {
16 | code :: Char
17 | , status :: Char
18 | , mapping :: [Char]
19 | , name :: String
20 | } deriving (Eq, Ord, Show)
21 |
22 | data CaseFolding = CF { cfComments :: [Comment], cfFolding :: [Fold] }
23 | deriving (Show)
24 |
25 | entries :: Parser CaseFolding
26 | entries = CF <$> many comment <*> many (entry <* many comment)
27 | where
28 | entry = Fold <$> unichar <* semi
29 | <*> oneOf "CFST" <* semi
30 | <*> unichars
31 | <*> (string "# " *> manyTill anyToken (char '\n'))
32 |
33 | parseCF :: FilePath -> IO (Either ParseError CaseFolding)
34 | parseCF name = parse entries name <$> readFile name
35 |
36 | mapCF :: CaseFolding -> [String]
37 | mapCF (CF _ ms) = typ ++ (map nice . filter p $ ms) ++ [last]
38 | where
39 | typ = ["foldMapping :: forall s. Char -> s -> Step (CC s) Char"
40 | ,"{-# NOINLINE foldMapping #-}"]
41 | last = "foldMapping c s = Yield (toLower c) (CC s '\\0' '\\0')"
42 | nice c = "-- " ++ name c ++ "\n" ++
43 | "foldMapping " ++ showC (code c) ++ " s = Yield " ++ x ++ " (CC s " ++ y ++ " " ++ z ++ ")"
44 | where [x,y,z] = (map showC . take 3) (mapping c ++ repeat '\0')
45 | p f = status f `elem` "CF" &&
46 | mapping f /= [toLower (code f)]
47 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/scripts/CaseMapping.hs:
--------------------------------------------------------------------------------
1 | import System.Environment
2 | import System.IO
3 |
4 | import Arsec
5 | import CaseFolding
6 | import SpecialCasing
7 |
8 | main = do
9 | args <- getArgs
10 | let oname = case args of
11 | [] -> "../src/Data/Text/Internal/Fusion/CaseMapping.hs"
12 | [o] -> o
13 | psc <- parseSC "SpecialCasing.txt"
14 | pcf <- parseCF "CaseFolding.txt"
15 | scs <- case psc of
16 | Left err -> print err >> return undefined
17 | Right ms -> return ms
18 | cfs <- case pcf of
19 | Left err -> print err >> return undefined
20 | Right ms -> return ms
21 | h <- openFile oname WriteMode
22 | let comments = map ("--" ++) $
23 | take 2 (cfComments cfs) ++ take 2 (scComments scs)
24 | mapM_ (hPutStrLn h) $
25 | ["{-# LANGUAGE Rank2Types #-}"
26 | ,"-- AUTOMATICALLY GENERATED - DO NOT EDIT"
27 | ,"-- Generated by scripts/CaseMapping.hs"] ++
28 | comments ++
29 | [""
30 | ,"module Data.Text.Internal.Fusion.CaseMapping where"
31 | ,"import Data.Char"
32 | ,"import Data.Text.Internal.Fusion.Types"
33 | ,""]
34 | mapM_ (hPutStrLn h) (mapSC "upper" upper toUpper scs)
35 | mapM_ (hPutStrLn h) (mapSC "lower" lower toLower scs)
36 | mapM_ (hPutStrLn h) (mapSC "title" title toTitle scs)
37 | mapM_ (hPutStrLn h) (mapCF cfs)
38 | hClose h
39 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/scripts/tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -ex
4 |
5 | runtest() {
6 | HC=$1
7 | shift
8 |
9 | # EDIT last line to pass arguments
10 |
11 | cabal run text-tests:test:tests \
12 | --project-file=cabal.tests.project \
13 | --builddir="dist-newstyle/$HC" \
14 | --with-compiler="$HC" \
15 | -- "$@"
16 | }
17 |
18 | runtest ghc-8.10.2 "$@"
19 | runtest ghc-8.8.4 "$@"
20 | runtest ghc-8.6.5 "$@"
21 | runtest ghc-8.4.4 "$@"
22 | runtest ghc-8.2.2 "$@"
23 | runtest ghc-8.0.2 "$@"
24 |
25 | runtest ghc-7.10.3 "$@"
26 | runtest ghc-7.8.4 "$@"
27 | runtest ghc-7.6.3 "$@"
28 | runtest ghc-7.4.2 "$@"
29 | runtest ghc-7.2.2 "$@"
30 | runtest ghc-7.0.4 "$@"
31 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Internal/Builder/Functions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE MagicHash #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Builder.Functions
5 | -- Copyright : (c) 2011 MailRank, Inc.
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : GHC
11 | --
12 | -- /Warning/: this is an internal module, and does not have a stable
13 | -- API or name. Functions in this module may not check or enforce
14 | -- preconditions expected by public modules. Use at your own risk!
15 | --
16 | -- Useful functions and combinators.
17 |
18 | module Data.Text.Internal.Builder.Functions
19 | (
20 | (<>)
21 | , i2d
22 | ) where
23 |
24 | import Data.Monoid (mappend)
25 | import Data.Text.Lazy.Builder (Builder)
26 | import GHC.Base (chr#,ord#,(+#),Int(I#),Char(C#))
27 | import Prelude ()
28 |
29 | -- | Unsafe conversion for decimal digits.
30 | {-# INLINE i2d #-}
31 | i2d :: Int -> Char
32 | i2d (I# i#) = C# (chr# (ord# '0'# +# i#))
33 |
34 | -- | The normal 'mappend' function with right associativity instead of
35 | -- left.
36 | (<>) :: Builder -> Builder -> Builder
37 | (<>) = mappend
38 | {-# INLINE (<>) #-}
39 |
40 | infixr 4 <>
41 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Internal/Builder/Int/Digits.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | -- Module: Data.Text.Internal.Builder.Int.Digits
4 | -- Copyright: (c) 2013 Bryan O'Sullivan
5 | -- License: BSD-style
6 | -- Maintainer: Bryan O'Sullivan
7 | -- Stability: experimental
8 | -- Portability: portable
9 | --
10 | -- /Warning/: this is an internal module, and does not have a stable
11 | -- API or name. Functions in this module may not check or enforce
12 | -- preconditions expected by public modules. Use at your own risk!
13 | --
14 | -- This module exists because the C preprocessor does things that we
15 | -- shall not speak of when confronted with Haskell multiline strings.
16 |
17 | module Data.Text.Internal.Builder.Int.Digits (digits) where
18 |
19 | import Data.ByteString.Char8 (ByteString)
20 |
21 | digits :: ByteString
22 | digits = "0001020304050607080910111213141516171819\
23 | \2021222324252627282930313233343536373839\
24 | \4041424344454647484950515253545556575859\
25 | \6061626364656667686970717273747576777879\
26 | \8081828384858687888990919293949596979899"
27 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Internal/Builder/RealFloat/Functions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE CPP #-}
2 |
3 | -- |
4 | -- Module: Data.Text.Internal.Builder.RealFloat.Functions
5 | -- Copyright: (c) The University of Glasgow 1994-2002
6 | -- License: see libraries/base/LICENSE
7 | --
8 | -- /Warning/: this is an internal module, and does not have a stable
9 | -- API or name. Functions in this module may not check or enforce
10 | -- preconditions expected by public modules. Use at your own risk!
11 |
12 | module Data.Text.Internal.Builder.RealFloat.Functions
13 | (
14 | roundTo
15 | ) where
16 |
17 | roundTo :: Int -> [Int] -> (Int,[Int])
18 | roundTo d is =
19 | case f d True is of
20 | x@(0,_) -> x
21 | (1,xs) -> (1, 1:xs)
22 | _ -> error "roundTo: bad Value"
23 | where
24 | b2 = base `quot` 2
25 |
26 | f n _ [] = (0, replicate n 0)
27 | f 0 e (x:xs) | x == b2 && e && all (== 0) xs = (0, []) -- Round to even when at exactly half the base
28 | | otherwise = (if x >= b2 then 1 else 0, [])
29 | f n _ (i:xs)
30 | | i' == base = (1,0:ds)
31 | | otherwise = (0,i':ds)
32 | where
33 | (c,ds) = f (n-1) (even i) xs
34 | i' = c + i
35 | base = 10
36 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Internal/Encoding/Utf16.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE CPP #-}
2 | {-# LANGUAGE MagicHash #-}
3 | {-# LANGUAGE BangPatterns #-}
4 |
5 | -- |
6 | -- Module : Data.Text.Internal.Encoding.Utf16
7 | -- Copyright : (c) 2008, 2009 Tom Harper,
8 | -- (c) 2009 Bryan O'Sullivan,
9 | -- (c) 2009 Duncan Coutts
10 | --
11 | -- License : BSD-style
12 | -- Maintainer : bos@serpentine.com
13 | -- Stability : experimental
14 | -- Portability : GHC
15 | --
16 | -- /Warning/: this is an internal module, and does not have a stable
17 | -- API or name. Functions in this module may not check or enforce
18 | -- preconditions expected by public modules. Use at your own risk!
19 | --
20 | -- Basic UTF-16 validation and character manipulation.
21 | module Data.Text.Internal.Encoding.Utf16
22 | (
23 | chr2
24 | , validate1
25 | , validate2
26 | ) where
27 |
28 | import GHC.Exts
29 | import GHC.Word (Word16(..))
30 |
31 | #if !MIN_VERSION_base(4,16,0)
32 | -- harmless to import, except for warnings that it is unused.
33 | import Data.Text.Internal.PrimCompat ( word16ToWord# )
34 | #endif
35 |
36 | chr2 :: Word16 -> Word16 -> Char
37 | chr2 (W16# a#) (W16# b#) = C# (chr# (upper# +# lower# +# 0x10000#))
38 | where
39 | !x# = word2Int# (word16ToWord# a#)
40 | !y# = word2Int# (word16ToWord# b#)
41 | !upper# = uncheckedIShiftL# (x# -# 0xD800#) 10#
42 | !lower# = y# -# 0xDC00#
43 | {-# INLINE chr2 #-}
44 |
45 | validate1 :: Word16 -> Bool
46 | validate1 x1 = x1 < 0xD800 || x1 > 0xDFFF
47 | {-# INLINE validate1 #-}
48 |
49 | validate2 :: Word16 -> Word16 -> Bool
50 | validate2 x1 x2 = x1 >= 0xD800 && x1 <= 0xDBFF &&
51 | x2 >= 0xDC00 && x2 <= 0xDFFF
52 | {-# INLINE validate2 #-}
53 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Internal/Encoding/Utf32.hs:
--------------------------------------------------------------------------------
1 | -- |
2 | -- Module : Data.Text.Internal.Encoding.Utf32
3 | -- Copyright : (c) 2008, 2009 Tom Harper,
4 | -- (c) 2009, 2010 Bryan O'Sullivan,
5 | -- (c) 2009 Duncan Coutts
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : portable
11 | --
12 | -- /Warning/: this is an internal module, and does not have a stable
13 | -- API or name. Functions in this module may not check or enforce
14 | -- preconditions expected by public modules. Use at your own risk!
15 | --
16 | -- Basic UTF-32 validation.
17 | module Data.Text.Internal.Encoding.Utf32
18 | (
19 | validate
20 | ) where
21 |
22 | import Data.Word (Word32)
23 |
24 | validate :: Word32 -> Bool
25 | validate x1 = x1 < 0xD800 || (x1 > 0xDFFF && x1 <= 0x10FFFF)
26 | {-# INLINE validate #-}
27 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Internal/Functions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE CPP #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Functions
5 | -- Copyright : 2010 Bryan O'Sullivan
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : GHC
11 | --
12 | -- /Warning/: this is an internal module, and does not have a stable
13 | -- API or name. Functions in this module may not check or enforce
14 | -- preconditions expected by public modules. Use at your own risk!
15 | --
16 | -- Useful functions.
17 |
18 | module Data.Text.Internal.Functions
19 | (
20 | intersperse,
21 | unsafeWithForeignPtr
22 | ) where
23 |
24 | import Foreign.Ptr (Ptr)
25 | import Foreign.ForeignPtr (ForeignPtr)
26 | #if MIN_VERSION_base(4,15,0)
27 | import qualified GHC.ForeignPtr (unsafeWithForeignPtr)
28 | #else
29 | import qualified Foreign.ForeignPtr (withForeignPtr)
30 | #endif
31 |
32 | -- | A lazier version of Data.List.intersperse. The other version
33 | -- causes space leaks!
34 | intersperse :: a -> [a] -> [a]
35 | intersperse _ [] = []
36 | intersperse sep (x:xs) = x : go xs
37 | where
38 | go [] = []
39 | go (y:ys) = sep : y: go ys
40 | {-# INLINE intersperse #-}
41 |
42 | unsafeWithForeignPtr :: ForeignPtr a -> (Ptr a -> IO b) -> IO b
43 | #if MIN_VERSION_base(4,15,0)
44 | unsafeWithForeignPtr = GHC.ForeignPtr.unsafeWithForeignPtr
45 | #else
46 | unsafeWithForeignPtr = Foreign.ForeignPtr.withForeignPtr
47 | #endif
48 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Internal/PrimCompat.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE CPP #-}
2 | {-# LANGUAGE MagicHash #-}
3 |
4 | module Data.Text.Internal.PrimCompat
5 | ( word8ToWord#
6 | , wordToWord8#
7 |
8 | , word16ToWord#
9 | , wordToWord16#
10 |
11 | , wordToWord32#
12 | , word32ToWord#
13 | ) where
14 |
15 | #if MIN_VERSION_base(4,16,0)
16 |
17 | import GHC.Base
18 |
19 | #else
20 |
21 | import GHC.Prim (Word#)
22 |
23 | wordToWord8#, word8ToWord# :: Word# -> Word#
24 | wordToWord16#, word16ToWord# :: Word# -> Word#
25 | wordToWord32#, word32ToWord# :: Word# -> Word#
26 | word8ToWord# w = w
27 | word16ToWord# w = w
28 | word32ToWord# w = w
29 | wordToWord8# w = w
30 | wordToWord16# w = w
31 | wordToWord32# w = w
32 | {-# INLINE wordToWord16# #-}
33 | {-# INLINE word16ToWord# #-}
34 | {-# INLINE wordToWord32# #-}
35 | {-# INLINE word32ToWord# #-}
36 |
37 | #endif
38 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Internal/Private.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, CPP, Rank2Types, UnboxedTuples #-}
2 |
3 | -- |
4 | -- Module : Data.Text.Internal.Private
5 | -- Copyright : (c) 2011 Bryan O'Sullivan
6 | --
7 | -- License : BSD-style
8 | -- Maintainer : bos@serpentine.com
9 | -- Stability : experimental
10 | -- Portability : GHC
11 |
12 | module Data.Text.Internal.Private
13 | (
14 | runText
15 | , span_
16 | ) where
17 |
18 | import Control.Monad.ST (ST, runST)
19 | import Data.Text.Internal (Text(..), text)
20 | import Data.Text.Unsafe (Iter(..), iter)
21 | import qualified Data.Text.Array as A
22 |
23 | #if defined(ASSERTS)
24 | import GHC.Stack (HasCallStack)
25 | #endif
26 |
27 | span_ :: (Char -> Bool) -> Text -> (# Text, Text #)
28 | span_ p t@(Text arr off len) = (# hd,tl #)
29 | where hd = text arr off k
30 | tl = text arr (off+k) (len-k)
31 | !k = loop 0
32 | loop !i | i < len && p c = loop (i+d)
33 | | otherwise = i
34 | where Iter c d = iter t i
35 | {-# INLINE span_ #-}
36 |
37 | runText ::
38 | #if defined(ASSERTS)
39 | HasCallStack =>
40 | #endif
41 | (forall s. (A.MArray s -> Int -> ST s Text) -> ST s Text) -> Text
42 | runText act = runST (act $ \ !marr !len -> do
43 | arr <- A.unsafeFreeze marr
44 | return $! text arr 0 len)
45 | {-# INLINE runText #-}
46 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Lazy/Builder.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, CPP, Rank2Types #-}
2 | {-# LANGUAGE Trustworthy #-}
3 |
4 | -----------------------------------------------------------------------------
5 | -- |
6 | -- Module : Data.Text.Lazy.Builder
7 | -- Copyright : (c) 2013 Bryan O'Sullivan
8 | -- (c) 2010 Johan Tibell
9 | -- License : BSD-style (see LICENSE)
10 | --
11 | -- Maintainer : Johan Tibell
12 | -- Portability : portable to Hugs and GHC
13 | --
14 | -- Efficient construction of lazy @Text@ values. The principal
15 | -- operations on a @Builder@ are @singleton@, @fromText@, and
16 | -- @fromLazyText@, which construct new builders, and 'mappend', which
17 | -- concatenates two builders.
18 | --
19 | -- To get maximum performance when building lazy @Text@ values using a
20 | -- builder, associate @mappend@ calls to the right. For example,
21 | -- prefer
22 | --
23 | -- > singleton 'a' `mappend` (singleton 'b' `mappend` singleton 'c')
24 | --
25 | -- to
26 | --
27 | -- > singleton 'a' `mappend` singleton 'b' `mappend` singleton 'c'
28 | --
29 | -- as the latter associates @mappend@ to the left. Or, equivalently,
30 | -- prefer
31 | --
32 | -- > singleton 'a' <> singleton 'b' <> singleton 'c'
33 | --
34 | -- since the '<>' from recent versions of 'Data.Monoid' associates
35 | -- to the right.
36 |
37 | -----------------------------------------------------------------------------
38 |
39 | module Data.Text.Lazy.Builder
40 | ( -- * The Builder type
41 | Builder
42 | , toLazyText
43 | , toLazyTextWith
44 |
45 | -- * Constructing Builders
46 | , singleton
47 | , fromText
48 | , fromLazyText
49 | , fromString
50 |
51 | -- * Flushing the buffer state
52 | , flush
53 | ) where
54 |
55 | import Data.Text.Internal.Builder
56 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/src/Data/Text/Lazy/Internal.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns, DeriveDataTypeable #-}
2 | -- |
3 | -- Module : Data.Text.Lazy.Internal
4 | -- Copyright : (c) 2013 Bryan O'Sullivan
5 | --
6 | -- License : BSD-style
7 | -- Maintainer : bos@serpentine.com
8 | -- Stability : experimental
9 | -- Portability : GHC
10 | --
11 | -- This module has been renamed to 'Data.Text.Internal.Lazy'. This
12 | -- name for the module will be removed in the next major release.
13 |
14 | module Data.Text.Lazy.Internal
15 | {-# DEPRECATED "Use Data.Text.Internal.Lazy instead" #-}
16 | (
17 | module Data.Text.Internal.Lazy
18 | ) where
19 |
20 | import Data.Text.Internal.Lazy
21 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/tests/LiteralRuleTest.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 |
3 | module LiteralRuleTest where
4 |
5 | import Data.Text (Text)
6 |
7 | -- This should produce 8 firings of the "TEXT literal" rule
8 | strings :: [Text]
9 | strings = [ "abstime", "aclitem", "bit", "bool", "box", "bpchar", "bytea", "char" ]
10 |
11 | -- This should produce 7 firings of the "TEXT literal UTF8" rule
12 | utf8Strings :: [Text]
13 | utf8Strings = [ "\0abstime", "\0aclitem", "\xfefe bit", "\0bool", "\0box", "\0bpchar", "\0bytea" ]
14 |
15 | -- This should produce 4 firings of the "TEXT empty literal" rule
16 | empties :: [Text]
17 | empties = [ "", "", "", "" ]
18 |
19 | -- This should produce 5 firings of the "TEXT empty literal" rule
20 | --singletons :: [Text]
21 | --singletons = [ "a", "b", "c", "d", "e" ]
22 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/tests/Tests.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE CPP #-}
2 |
3 | module Main
4 | ( main
5 | ) where
6 |
7 | import Test.Tasty (defaultMain, testGroup)
8 |
9 | import qualified Tests.Lift as Lift
10 | import qualified Tests.Properties as Properties
11 | import qualified Tests.Regressions as Regressions
12 |
13 | main :: IO ()
14 | main = defaultMain $ testGroup "All"
15 | [ Lift.tests
16 | , Properties.tests
17 | , Regressions.tests
18 | ]
19 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/tests/Tests/IO.hs:
--------------------------------------------------------------------------------
1 | -- | Program which exposes some haskell functions as an exutable. The results
2 | -- and coverage of this module is meant to be checked using a shell script.
3 | --
4 | module Main
5 | (
6 | main
7 | ) where
8 |
9 | import System.Environment (getArgs)
10 | import System.Exit (exitFailure)
11 | import System.IO (hPutStrLn, stderr)
12 | import qualified Data.Text as T
13 | import qualified Data.Text.IO as T
14 | import qualified Data.Text.Lazy as TL
15 | import qualified Data.Text.Lazy.IO as TL
16 |
17 | main :: IO ()
18 | main = do
19 | args <- getArgs
20 | case args of
21 | ["T.readFile", name] -> T.putStr =<< T.readFile name
22 | ["T.writeFile", name, t] -> T.writeFile name (T.pack t)
23 | ["T.appendFile", name, t] -> T.appendFile name (T.pack t)
24 | ["T.interact"] -> T.interact id
25 | ["T.getContents"] -> T.putStr =<< T.getContents
26 | ["T.getLine"] -> T.putStrLn =<< T.getLine
27 |
28 | ["TL.readFile", name] -> TL.putStr =<< TL.readFile name
29 | ["TL.writeFile", name, t] -> TL.writeFile name (TL.pack t)
30 | ["TL.appendFile", name, t] -> TL.appendFile name (TL.pack t)
31 | ["TL.interact"] -> TL.interact id
32 | ["TL.getContents"] -> TL.putStr =<< TL.getContents
33 | ["TL.getLine"] -> TL.putStrLn =<< TL.getLine
34 | _ -> hPutStrLn stderr "invalid directive!" >> exitFailure
35 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/tests/Tests/Lift.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE OverloadedStrings #-}
2 | {-# LANGUAGE TemplateHaskell #-}
3 | module Tests.Lift
4 | ( tests
5 | )
6 | where
7 |
8 | import qualified Data.Text as S
9 | import qualified Data.Text.Lazy as L
10 | import Language.Haskell.TH.Syntax (lift)
11 | import Test.Tasty.HUnit (testCase, assertEqual)
12 | import Test.Tasty (TestTree, testGroup)
13 |
14 | tests :: TestTree
15 | tests = testGroup "TH lifting Text"
16 | [ testCase "strict" $ assertEqual "strict"
17 | $(lift ("foo" :: S.Text))
18 | ("foo" :: S.Text)
19 | , testCase "lazy" $ assertEqual "lazy"
20 | $(lift ("foo" :: L.Text))
21 | ("foo" :: L.Text)
22 | ]
23 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/tests/Tests/Properties.hs:
--------------------------------------------------------------------------------
1 | -- | QuickCheck properties for the text library.
2 |
3 | {-# OPTIONS_GHC -fno-enable-rewrite-rules -fno-warn-missing-signatures #-}
4 | module Tests.Properties
5 | (
6 | tests
7 | ) where
8 |
9 | import Test.Tasty (TestTree, testGroup)
10 | import Tests.Properties.Basics (testBasics)
11 | import Tests.Properties.Builder (testBuilder)
12 | import Tests.Properties.Folds (testFolds)
13 | import Tests.Properties.LowLevel (testLowLevel)
14 | import Tests.Properties.Instances (testInstances)
15 | import Tests.Properties.Substrings (testSubstrings)
16 | import Tests.Properties.Read (testRead)
17 | import Tests.Properties.Text (testText)
18 | import Tests.Properties.Transcoding (testTranscoding)
19 |
20 | tests :: TestTree
21 | tests =
22 | testGroup "Properties" [
23 | testTranscoding,
24 | testInstances,
25 | testBasics,
26 | testFolds,
27 | testText,
28 | testSubstrings,
29 | testBuilder,
30 | testLowLevel,
31 | testRead
32 | ]
33 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/tests/Tests/SlowFunctions.hs:
--------------------------------------------------------------------------------
1 | {-# LANGUAGE BangPatterns #-}
2 | module Tests.SlowFunctions
3 | (
4 | indices
5 | , splitOn
6 | ) where
7 |
8 | import qualified Data.Text as T
9 | import Data.Text.Internal (Text(..))
10 | import Data.Text.Unsafe (iter_, unsafeHead, unsafeTail)
11 |
12 | indices :: T.Text -- ^ Substring to search for (@needle@)
13 | -> T.Text -- ^ Text to search in (@haystack@)
14 | -> [Int]
15 | indices needle@(Text _narr _noff nlen) haystack@(Text harr hoff hlen)
16 | | T.null needle = []
17 | | otherwise = scan 0
18 | where
19 | scan i | i >= hlen = []
20 | | needle `T.isPrefixOf` t = i : scan (i+nlen)
21 | | otherwise = scan (i+d)
22 | where t = Text harr (hoff+i) (hlen-i)
23 | d = iter_ haystack i
24 |
25 | splitOn :: T.Text -- ^ Text to split on
26 | -> T.Text -- ^ Input text
27 | -> [T.Text]
28 | splitOn pat src0
29 | | T.null pat = error "splitOn: empty"
30 | | l == 1 = T.split (== (unsafeHead pat)) src0
31 | | otherwise = go src0
32 | where
33 | l = T.length pat
34 | go src = search 0 src
35 | where
36 | search !n !s
37 | | T.null s = [src] -- not found
38 | | pat `T.isPrefixOf` s = T.take n src : go (T.drop l s)
39 | | otherwise = search (n+1) (unsafeTail s)
40 |
--------------------------------------------------------------------------------
/test-project/text-nofusion/tests/literal-rule-test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 |
3 | failed=0
4 |
5 | function check_firings() {
6 | rule=$1
7 | expected=$2
8 | build="ghc -O -ddump-rule-firings LiteralRuleTest.hs"
9 | build="$build -i.. -I../include"
10 | touch LiteralRuleTest.hs
11 | echo -n "Want to see $expected firings of rule $rule... " >&2
12 | firings=$($build 2>&1 | grep "Rule fired: $rule\$" | wc -l)
13 | rm -f LiteralRuleTest.{o.hi}
14 |
15 | if [ $firings != $expected ]; then
16 | echo "failed, saw $firings" >&2
17 | failed=1
18 | else
19 | echo "pass" >&2
20 | fi
21 | }
22 |
23 | check_firings "TEXT literal" 8
24 | check_firings "TEXT literal UTF8" 7
25 | check_firings "TEXT empty literal" 4
26 | # This is broken at the moment. "TEXT literal" rule fires instead.
27 | #check_firings "TEXT singleton literal" 5
28 |
29 | exit $failed
30 |
--------------------------------------------------------------------------------
/unlines-test/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Revision history for unlines-test
2 |
3 | ## 0.1.0.0 -- YYYY-mm-dd
4 |
5 | * First version. Released on an unsuspecting world.
6 |
--------------------------------------------------------------------------------
/unlines-test/app/Unlines.hs:
--------------------------------------------------------------------------------
1 | module Unlines where
2 |
3 |
4 |
5 | unlines :: [String] -> String
6 | unlines ls = concat (map (\l -> l ++ ['\n']) ls)
7 |
--------------------------------------------------------------------------------
/unlines-test/cabal.project:
--------------------------------------------------------------------------------
1 | packages: .
2 | allow-newer: all
3 |
--------------------------------------------------------------------------------
/unlines-test/unlines-test.cabal:
--------------------------------------------------------------------------------
1 | cabal-version: 2.4
2 | name: unlines-test
3 | version: 0.1.0.0
4 |
5 | -- A short (one-line) description of the package.
6 | -- synopsis:
7 |
8 | -- A longer description of the package.
9 | -- description:
10 |
11 | -- A URL where users can report bugs.
12 | -- bug-reports:
13 |
14 | -- The license under which the package is released.
15 | -- license:
16 | author: HugoPeters1024
17 | maintainer: hpeters1024@gmail.com
18 |
19 | -- A copyright notice.
20 | -- copyright:
21 | -- category:
22 | extra-source-files: CHANGELOG.md
23 |
24 | executable unlines-test
25 | main-is: Main.hs
26 |
27 | -- Modules included in this executable, other than Main.
28 | other-modules: Unlines
29 |
30 |
31 | -- LANGUAGE extensions used by modules in this package.
32 | -- other-extensions:
33 | build-depends: base
34 | , criterion
35 | hs-source-dirs: app
36 | default-language: Haskell2010
37 | ghc-options: -dverbose-core2core -dsuppress-all
38 |
--------------------------------------------------------------------------------