├── .git-blame-ignore-revs
├── .github
├── labeler.yml
├── release.yml
└── workflows
│ ├── ci.yml
│ ├── clean.yml
│ ├── label-pr.yml
│ ├── publish-website.yml
│ └── release.yml
├── .gitignore
├── .gitmodules
├── .jvmopts
├── .scalafmt.conf
├── HISTORICALCONTEXT.md
├── IDEAS.md
├── LICENSE.md
├── PROPOSAL.md
├── README.md
├── build.sbt
├── docs-gen
└── src
│ └── main
│ └── scala
│ └── ch
│ └── epfl
│ └── scala
│ └── profiling
│ ├── Docs.scala
│ └── docs
│ ├── DependencyResolution.scala
│ └── Sonatype.scala
├── docs
├── plugins
│ └── sbt-plugin.md
└── user-guide
│ ├── installation.md
│ ├── motivation.md
│ └── usage.md
├── integrations
└── src
│ └── main
│ └── scala
│ └── profiling
│ └── integrations
│ └── caseapp
│ ├── CliOptions.scala
│ ├── Commands.scala
│ ├── CommonOptions.scala
│ └── Parsers.scala
├── plugin
└── src
│ ├── main
│ ├── resources
│ │ └── scalac-plugin.xml
│ ├── scala-2.12.18
│ │ └── ch
│ │ │ └── epfl
│ │ │ └── scala
│ │ │ └── profilers
│ │ │ └── tools
│ │ │ └── SettingsOps.scala
│ ├── scala-2.12.19
│ │ └── ch
│ │ │ └── epfl
│ │ │ └── scala
│ │ │ └── profilers
│ │ │ └── tools
│ │ │ └── SettingsOps.scala
│ ├── scala-2.12.20
│ │ └── ch
│ │ │ └── epfl
│ │ │ └── scala
│ │ │ └── profilers
│ │ │ └── tools
│ │ │ └── SettingsOps.scala
│ ├── scala-2.12
│ │ └── ch
│ │ │ └── epfl
│ │ │ └── scala
│ │ │ └── profilers
│ │ │ └── tools
│ │ │ └── ScalaSettingsOps.scala
│ ├── scala-2.13.14
│ │ └── ch
│ │ │ └── epfl
│ │ │ └── scala
│ │ │ └── profilers
│ │ │ └── tools
│ │ │ └── SettingsOps.scala
│ ├── scala-2.13.15
│ │ └── ch
│ │ │ └── epfl
│ │ │ └── scala
│ │ │ └── profilers
│ │ │ └── tools
│ │ │ └── SettingsOps.scala
│ ├── scala-2.13.16
│ │ └── ch
│ │ │ └── epfl
│ │ │ └── scala
│ │ │ └── profilers
│ │ │ └── tools
│ │ │ └── SettingsOps.scala
│ ├── scala-2.13
│ │ └── ch
│ │ │ └── epfl
│ │ │ └── scala
│ │ │ └── profilers
│ │ │ └── tools
│ │ │ └── ScalaSettingsOps.scala
│ └── scala
│ │ └── ch
│ │ └── epfl
│ │ └── scala
│ │ ├── ImplicitSearchDebugInfo.scala
│ │ ├── PluginConfig.scala
│ │ ├── ProfilingPlugin.scala
│ │ └── profilers
│ │ ├── ProfilingImpl.scala
│ │ └── tools
│ │ ├── Logger.scala
│ │ └── QuantitiesHijacker.scala
│ └── test
│ └── scala
│ └── ch
│ └── epfl
│ └── scala
│ ├── ImplicitTest.scala
│ └── tools
│ └── TestUtil.scala
├── profiledb
└── src
│ └── main
│ ├── protobuf
│ └── profiledb.proto
│ └── scala
│ └── ch.epfl.scala.profiledb
│ ├── ProfileDb.scala
│ ├── ProfileDbPath.scala
│ └── utils
│ ├── AbsolutePath.scala
│ └── RelativePath.scala
├── project
├── BuildPlugin.scala
├── WorkingPluginCross.scala
├── build.properties
├── build.sbt
└── project
│ └── build.properties
├── sbt-plugin
└── src
│ ├── main
│ └── scala
│ │ └── sbt
│ │ └── ch
│ │ └── epfl
│ │ └── scala
│ │ ├── ProfilingSbtPlugin.scala
│ │ └── SbtTaskTimer.scala
│ └── sbt-test
│ └── compiler-profiling
│ ├── warmup-60-seconds-compile
│ ├── build.sbt
│ ├── project
│ │ └── plugins.sbt
│ ├── src
│ │ └── main
│ │ │ └── scala
│ │ │ └── Target.scala
│ └── test
│ └── warmup-60-seconds-test
│ ├── build.sbt
│ ├── project
│ └── plugins.sbt
│ ├── src
│ └── test
│ │ └── scala
│ │ └── Target.scala
│ └── test
└── website
├── core
├── Footer.js
└── GridBlock.js
├── i18n
└── en.json
├── package.json
├── pages
└── en
│ └── index.js
├── sidebars.json
├── siteConfig.js
└── static
├── css
└── custom.css
└── img
├── favicon.ico
├── favicon.png
├── macro-impl.png
├── scala-steward-implicit-searches-flamegraph.svg
├── scalac-profiling-logo-footer.png
├── scalac-profiling-logo.png
├── scalacenter.png
├── scalacenter2x.png
└── speed-up-compilation.png
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # Scala Steward: Reformat with scalafmt 3.8.0
2 | 28c4e96f2bb8d9379104718f4103c087a22cc1bc
3 |
--------------------------------------------------------------------------------
/.github/labeler.yml:
--------------------------------------------------------------------------------
1 | dependencies:
2 | - all:
3 | - head-branch: '^update/'
4 | - changed-files:
5 | - all-globs-to-any-file: '**/build.sbt'
6 |
7 | docs:
8 | - changed-files:
9 | - any-glob-to-any-file: ['docs/**', 'website/**', '**/*.md']
10 |
11 | sbt-scalac-profiling:
12 | - changed-files:
13 | - any-glob-to-any-file: 'sbt-plugin/**'
14 |
15 | scalac-profiling:
16 | - changed-files:
17 | - any-glob-to-any-file: ['external/**', 'integrations/**', 'profiledb/**', 'plugin/**']
18 |
19 | behind-the-scenes:
20 | - all:
21 | - changed-files:
22 | - any-glob-to-all-files: '!**/build.sbt'
23 | - any-glob-to-all-files: '!docs/**'
24 | - any-glob-to-all-files: '!website/**'
25 | - any-glob-to-all-files: '!**/*.md'
26 | - any-glob-to-all-files: '!sbt-plugin/**'
27 | - any-glob-to-all-files: '!external/**'
28 | - any-glob-to-all-files: '!integrations/**'
29 | - any-glob-to-all-files: '!profiledb/**'
30 | - any-glob-to-all-files: '!plugin/**'
31 |
--------------------------------------------------------------------------------
/.github/release.yml:
--------------------------------------------------------------------------------
1 | changelog:
2 | categories:
3 | - title: scalac-profiling
4 | labels:
5 | - scalac-profiling
6 | exclude:
7 | labels:
8 | - behind-the-scenes
9 | - title: sbt-scalac-profiling
10 | labels:
11 | - sbt-scalac-profiling
12 | exclude:
13 | labels:
14 | - behind-the-scenes
15 |
16 | - title: Documentation
17 | labels:
18 | - docs
19 | exclude:
20 | labels:
21 | - behind-the-scenes
22 |
23 | - title: Behind the scenes
24 | labels:
25 | - "*"
26 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 | on:
3 | push:
4 | tags: ["*"]
5 | branches: ['**', '!update/**', '!pr/**']
6 | pull_request:
7 | branches: ['**', '!update/**', '!pr/**']
8 |
9 | jobs:
10 | scalafmt:
11 | name: Check formatting
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v4
15 | - uses: sbt/setup-sbt@v1
16 | - name: Setup Java (temurin@11)
17 | uses: actions/setup-java@v4
18 | with:
19 | distribution: temurin
20 | java-version: 11
21 | cache: sbt
22 | - uses: coursier/cache-action@v6
23 | - name: Check formatting
24 | run: sbt -v "scalafmtSbtCheck" "scalafmtCheckAll"
25 | docs:
26 | name: Build docs
27 | runs-on: ubuntu-latest
28 | steps:
29 | - uses: actions/checkout@v4
30 | - uses: sbt/setup-sbt@v1
31 | - name: Setup Java (temurin@11)
32 | uses: actions/setup-java@v4
33 | with:
34 | distribution: temurin
35 | java-version: 11
36 | cache: sbt
37 | - uses: coursier/cache-action@v6
38 | - run: sbt docs/run
39 | plugin-tests:
40 | name: Compiler and SBT plugin tests
41 | strategy:
42 | matrix:
43 | os: [ ubuntu-latest ]
44 | scala:
45 | - 2.12.18
46 | - 2.12.19
47 | - 2.12.20
48 | - 2.13.14
49 | - 2.13.15
50 | - 2.13.16
51 | runs-on: ${{ matrix.os }}
52 | steps:
53 | - uses: actions/checkout@v4
54 | with:
55 | submodules: true
56 | - uses: sbt/setup-sbt@v1
57 | - name: Setup Java (temurin@11)
58 | uses: actions/setup-java@v4
59 | with:
60 | distribution: temurin
61 | java-version: 11
62 | cache: sbt
63 | - uses: coursier/cache-action@v6
64 | - name: Compiler plugin tests
65 | run: sbt -v '++ ${{ matrix.scala }}' plugin/test
66 | - name: SBT plugin tests
67 | run: sbt -v '++ ${{ matrix.scala }}' profilingSbtPlugin/scripted
68 | integration-tests:
69 | name: Integration tests
70 | runs-on: ubuntu-latest
71 | steps:
72 | - uses: actions/checkout@v4
73 | with:
74 | submodules: true
75 | - uses: sbt/setup-sbt@v1
76 | - name: Setup Java (temurin@11)
77 | uses: actions/setup-java@v4
78 | with:
79 | distribution: temurin
80 | java-version: 11
81 | cache: sbt
82 | - uses: coursier/cache-action@v6
83 | - name: Running tests
84 | run: sbt -v "showScalaInstances" "integrations/testOnly integration better-files wartremover"
85 |
--------------------------------------------------------------------------------
/.github/workflows/clean.yml:
--------------------------------------------------------------------------------
1 | # all credits to the sbt/sbt-github-actions project
2 | name: Clean
3 |
4 | on: push
5 |
6 | jobs:
7 | delete-artifacts:
8 | name: Delete Artifacts
9 | runs-on: ubuntu-latest
10 | env:
11 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
12 | steps:
13 | - name: Delete artifacts
14 | run: |
15 | # Customize those three lines with your repository and credentials:
16 | REPO=${GITHUB_API_URL}/repos/${{ github.repository }}
17 |
18 | # A shortcut to call GitHub API.
19 | ghapi() { curl --silent --location --user _:$GITHUB_TOKEN "$@"; }
20 |
21 | # A temporary file which receives HTTP response headers.
22 | TMPFILE=/tmp/tmp.$$
23 |
24 | # An associative array, key: artifact name, value: number of artifacts of that name.
25 | declare -A ARTCOUNT
26 |
27 | # Process all artifacts on this repository, loop on returned "pages".
28 | URL=$REPO/actions/artifacts
29 | while [[ -n "$URL" ]]; do
30 |
31 | # Get current page, get response headers in a temporary file.
32 | JSON=$(ghapi --dump-header $TMPFILE "$URL")
33 |
34 | # Get URL of next page. Will be empty if we are at the last page.
35 | URL=$(grep '^Link:' "$TMPFILE" | tr ',' '\n' | grep 'rel="next"' | head -1 | sed -e 's/.*/' -e 's/>.*//')
36 | rm -f $TMPFILE
37 |
38 | # Number of artifacts on this page:
39 | COUNT=$(( $(jq <<<$JSON -r '.artifacts | length') ))
40 |
41 | # Loop on all artifacts on this page.
42 | for ((i=0; $i < $COUNT; i++)); do
43 |
44 | # Get name of artifact and count instances of this name.
45 | name=$(jq <<<$JSON -r ".artifacts[$i].name?")
46 | ARTCOUNT[$name]=$(( $(( ${ARTCOUNT[$name]} )) + 1))
47 |
48 | id=$(jq <<<$JSON -r ".artifacts[$i].id?")
49 | size=$(( $(jq <<<$JSON -r ".artifacts[$i].size_in_bytes?") ))
50 | printf "Deleting '%s' #%d, %'d bytes\n" $name ${ARTCOUNT[$name]} $size
51 | ghapi -X DELETE $REPO/actions/artifacts/$id
52 | done
53 | done
54 |
--------------------------------------------------------------------------------
/.github/workflows/label-pr.yml:
--------------------------------------------------------------------------------
1 | name: "Label PRs"
2 | on:
3 | - pull_request_target
4 |
5 | jobs:
6 | labeler:
7 | permissions:
8 | contents: read
9 | pull-requests: write
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/labeler@v5
13 | with:
14 | sync-labels: true
15 |
--------------------------------------------------------------------------------
/.github/workflows/publish-website.yml:
--------------------------------------------------------------------------------
1 | name: Deploy Website
2 | on:
3 | push:
4 | branches: [main]
5 | tags: ["*"]
6 | jobs:
7 | publish:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@v4
11 | with:
12 | fetch-depth: 0
13 | - uses: sbt/setup-sbt@v1
14 | - uses: coursier/cache-action@v6
15 | - name: Build website
16 | run: sbt docs/docusaurusCreateSite
17 | - name: Publish website to GitHub Pages
18 | uses: JamesIves/github-pages-deploy-action@v4
19 | with:
20 | branch: gh-pages
21 | folder: ./website/build/scalac-profiling
22 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 | on:
3 | push:
4 | branches: [main]
5 | tags: ["*"]
6 | jobs:
7 | publish:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@v4
11 | with:
12 | fetch-depth: 0
13 | - uses: sbt/setup-sbt@v1
14 | - uses: actions/setup-java@v4
15 | with:
16 | distribution: 'temurin'
17 | java-version: '8'
18 | cache: 'sbt'
19 | - run: sbt ci-release
20 | env:
21 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }}
22 | PGP_SECRET: ${{ secrets.PGP_SECRET }}
23 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
24 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}
25 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.class
2 | *.log
3 | **/.DS_Store
4 |
5 | # sbt specific
6 | .cache
7 | .history
8 | .lib/
9 | .idea
10 | dist/*
11 | target/
12 | target_211/
13 | lib_managed/
14 | src_managed/
15 | project/boot/
16 | project/plugins/project/
17 |
18 | # Scala-IDE specific
19 | .scala_dependencies
20 | .worksheet
21 | .ensime_cache/
22 | .ensime/
23 |
24 | # Project specific
25 | .scalac-hash
26 | .hidden
27 | .profiledb_211
28 | .proxy
29 |
30 | # metals vscode
31 | .bloop
32 | .bsp
33 | .metals
34 | .vscode
35 | metals.sbt
36 |
37 | # website
38 | website/translated_docs
39 | website/build/
40 | website/yarn.lock
41 | website/node_modules
42 | website/i18n/*
43 | !website/i18n/en.json
44 | out/
45 | node_modules/
46 | package-lock.json
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "external/FlameGraph"]
2 | path = external/FlameGraph
3 | url = https://github.com/scalacenter/FlameGraph
4 | branch = scala-compilation
5 |
--------------------------------------------------------------------------------
/.jvmopts:
--------------------------------------------------------------------------------
1 | -Xms2048m
2 | -Xmx8096m
3 | -XX:ReservedCodeCacheSize=512m
4 |
--------------------------------------------------------------------------------
/.scalafmt.conf:
--------------------------------------------------------------------------------
1 | version = "3.9.4"
2 | runner.dialect = Scala213
3 | maxColumn = 100
4 | docstrings.style = Asterisk
5 | docstrings.wrap = no
6 | assumeStandardLibraryStripMargin = true
7 | newlines.beforeCurlyLambdaParams = multilineWithCaseOnly
8 | align.tokens=[]
9 | align.openParenCallSite = false
10 | align.openParenDefnSite = false
11 | binPack.literalArgumentLists = true
12 | project.git = true
13 | trailingCommas = preserve
14 |
--------------------------------------------------------------------------------
/HISTORICALCONTEXT.md:
--------------------------------------------------------------------------------
1 | # Historical Context
2 |
3 | Most of the statements made in this doc correspond to the `1.0.0` version. As of now (Late Fall '23), it's an essay describing the process of building the `scalac-profiling` tool.
4 |
5 | ## Goal of the project
6 |
7 | The goal of this proposal is to allow Scala developers to optimize their
8 | codebase to reduce compile times, spotting inefficient implicit searches,
9 | expanded macro code, and other reasons that slow down compile times and
10 | decrease developer productivity.
11 |
12 | This repository holds the compiler plugin and a fork of mainstream scalac
13 | that will be eventually be merged upstream. This work is prompted by [Morgan
14 | Stanley's proposal](PROPOSAL.md) and was approved in our last advisory board.
15 |
16 | ## Scalac status
17 |
18 | The required changes to the compiler, [Scalac](http://github.com/scala/scala), are
19 | the following:
20 |
21 | 1. [Collect all statistics and optimize checks](https://github.com/scala/scala/pull/6034).
22 | 1. [Initialize statistics per global](https://github.com/scala/scala/pull/6051).
23 | 1. [Add extra timers and counters](https://github.com/scala/scala/pull/6067).
24 |
25 | Effective since Scala 2.12.5, all of those changes have been released.
26 |
27 | ## Information about the integration tests
28 |
29 | The project uses a forked scalac version that is used to compile both the
30 | compiler plugin and several OSS projects from the community. The integration
31 | tests are for now [Better Files](https://github.com/pathikrit/better-files) and
32 | [Wartremover](https://github.com/wartremover/wartremover), and they help us look
33 | into big profiling numbers and detect hot spots and misbehaviours.
34 |
35 | If you think a particular codebase is a good candidate to become an integration test, please [open an issue](https://github.com/scalacenter/scalac-profiling/issues/new).
36 |
37 | ## Plan
38 |
39 | The [proposal](PROPOSAL.md) is divided into three main areas:
40 |
41 | 1. Data generation and capture.
42 | 1. Data visualisation and comparison.
43 | 1. Reproducibility.
44 |
45 | How to tackle each of these problems to make the implementation successful?
46 |
47 | ### Data generation and capture
48 |
49 | The generation of data comes from the guts of the compiler. To optimize for
50 | impact, the collection of information is done in two different places (a
51 | compiler plugin and a forked scalac).
52 |
53 | #### Project structure
54 |
55 | 1. [A compiler plugin](plugin/) to get information from the macro infrastructure independently
56 | of the used Scalac version.
57 | 2. [Profiledb readers and writers](profiledb/) to allow IDEs and editors to read and write profiledb's.
58 | 3. [An sbt plugin for reproducibility](sbt-plugin/) that warms up the compiler before profiling.
59 |
60 | The work is split into two parts so that Scala developers that are stuck in previous Scala
61 | versions can use the compiler plugin to get some profiling information about macros.
62 |
63 | This structure is more practical because it allow us to evolve things faster in the compiler
64 | plugin, or put there things that cannot be merged upstream.
65 |
66 | ### Data visualisation and comparison
67 |
68 | The profiling data will be accessible in two different ways (provided that
69 | the pertinent profiling flags are enabled):
70 |
71 | 1. A summary of the stats will be printed out in every compile run.
72 | 1. A protobuf file will be generated at the root of the class files directory.
73 | * The file is generated via protobuf so that it's backwards and forwards binary compatible
74 | * The protobuf file will contain all the profiling information.
75 |
76 | Why a protobuf file instead of a JSON file? Forwards and backwards binary
77 | compatibility is important -- we want our tooling to be able to read files
78 | generated by previous or upcoming versions of the compiler. Our goal is to
79 | create a single tool that all IDEs and third-party tools use to parse and
80 | interpret the statistics from JARs and compile runs.
81 |
82 | We're collaborating with [Intellij](https://github.com/JetBrains/intellij-scala) to provide
83 | some of the statistics within the IDE (e.g. macro invocations or implicit searches per line).
84 | We have some ideas to show this information as [heat map](https://en.wikipedia.org/wiki/Heat_map) in the future.
85 |
86 | ### Reproducibility
87 |
88 | Getting reproducible numbers is important to reason about the code and
89 | identifying when a commit increases or decreases compile times with
90 | certainty.
91 |
92 | To do so, several conditions must be met: the compiler must be warmed up, the
93 | load in the running computer must be low, and the hardware must be tweaked to
94 | disable options that make executions non reproducible (like Turbo Boost).
95 |
96 | However, this warming up cannot be done in an isolated scenario as [Scalac's
97 | benchmarking](https://github.com/scala/compiler-benchmark) infrastructure
98 | does because it doesn't measure the overhead of the build tool calling the
99 | compiler, which can be significant (e.g. in sbt).
100 |
101 | As a result, reproducibility must be achieved in the build tool itself. The goal
102 | of this project is to provide an sbt plugin that warms up the compiler by a configurable
103 | amount of time. It also bundles recommendations and tips on how and where to run compilation.
104 |
105 | ## Collected data
106 |
107 | In the following sections, I elaborate on the collected data that we want to
108 | extract from the compiler as well as technical details for every section in
109 | the [original proposal](PROPOSAL.md).
110 |
111 | ### Information about macros
112 |
113 | Per call-site, file and total:
114 |
115 | - [x] How many macros are expanded?
116 | - [x] How long do they take to run?
117 | - [x] How many tree nodes do macros create?
118 |
119 | ### Information about implicit search
120 |
121 | Getting hold of this information requires changes in mainstream scalac.
122 |
123 | Per call-site, file and total:
124 |
125 | - [x] How many implicit searches are triggered per position?
126 | - [x] How many implicit searches are triggered for a given type?
127 | - [x] How long implicit searches take to run?
128 | - [x] How many implicit search failures are?
129 | - [x] How many implicit search hits are?
130 | - [x] What's the ratio of search failures/hits?
131 |
132 | ### Results
133 |
134 | These are the requirements that the proposal lays out.
135 |
136 | Note that in some cases, this plugin provides more information than the requested by the
137 | original proposal.
138 |
139 | #### What the proposal wants
140 |
141 | - [x] Compilation time totally (*this is provided by `-Ystatistics`*)
142 | - [x] Macro details
143 | - [x] Time per file
144 | - [x] Time per macro
145 | - [x] Invocations
146 | - [x] Per type
147 | - [x] Total time
148 | - [x] Flamegraph of all macros
149 | - [x] Implicit search details (time and number)
150 | - [x] By type
151 | - [x] By invocation (only number for now)
152 | - [x] By file (can be aggregated from the "by invocation" data)
153 | - [x] Flamegraph of all the implicit searches
154 | - [x] User time, kernel time, wall clock, I/O time.
155 | This feature was **already provided by Scalac**, implemented in [this PR](https://github.com/scala/scala/pull/5848).
156 | - [x] Time for flagged features (for certain features – e.g. optimisation)
157 | - The best way to capture this information is running statistics for the compiler with
158 | and without optimization, and compare the profiles. There are also some extra counters.
159 | - [x] Time resolving types from classpath
160 | - [x] Total
161 |
--------------------------------------------------------------------------------
/IDEAS.md:
--------------------------------------------------------------------------------
1 | # Ideas to be considered
2 |
3 | These are out of the scope of this project for now.
4 |
5 | ### Tell users how to organize their code to maximize implicit search hits
6 |
7 | Based on all the implicit search information that we collect from typer, is
8 | it possible to advice Scala developers how to organize to optimize implicit
9 | search hits?
10 |
11 | For instance, if we detect that typer is continuosly testing implicit
12 | candidates that fail but have higher precedence (because of implicit search
13 | priorities or implicit names in scope), can we develop an algorithm that
14 | understands priorities and is able to tell users "remove that wildcard
15 | import" or "move that implicit definition to a higher priority scope, like
16 | X"?
17 |
18 | (My hunch feeling is that we can, but this requires testing and a deeper
19 | investigation.)
20 |
21 | ### Report on concrete, inefficient macros
22 |
23 | Macro-generated code is usually inefficient because macro authors do not
24 | optimize for compactness and compile times and express the macro logic with
25 | high-level Scala.
26 |
27 | Instead, if they really become a bottleneck, they could use low-level constructs that
28 | spare work to the compiler (manually generating getters and setters, code-generating
29 | shorter fresh names, spare use of `final` and `private[this]` flags, explicitly typing
30 | all the members, avoiding the use of traits, et cetera).
31 |
32 | (The compile time difference between an optimized macro and an unoptimized one has
33 | yet to be measured, but it could be significant under concrete scenarios).
34 |
35 | A well-known problem of macros is that different call-sites that invoke a
36 | macro with the same inputs will generate different trees with identical
37 | semantics. This lack of caching at the macro level is one of the main
38 | problems affecting compile times, especially when it comes to typeclass
39 | derivation.
40 |
41 | Ideally, this plugin would be able to:
42 |
43 | 1. Identify inefficient expanded code with tree-size heuristics and the use
44 | of particular features that could be expressed in a more low-level manner.
45 | 1. Tell users if there's any repetition in the expanded code.
46 | 1. Let users inspect the macro generated code to manually investigate inefficient
47 | macro expansions. The generated code could be written in a directory passed in
48 | via compiler plugin settings, and would be disabled by default.
49 |
50 | As a side note, repetitions in expanded code can only be addressed by the user.
51 |
52 | * Create a cache of expanded code in the compiler macro infrastructure.
53 | * Create a cache of expanded code in the macro implementation.
54 |
55 | Both alternatives are **challenging**, if not impossible. The easiest way to
56 | cache implicits is that the
57 | Developers of `implicit` and `macro`-intensive codebases can cache the macro
58 | results in values of objects for all the target types at the same definition
59 | site (so that implicit values can be reused instead of triggering a macro call).
60 |
61 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2017-2018 Jorge Vicente Cantero
190 | Copyright 2017-2018 EPFL (École Polytechnique Federal de Lausanne)
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/PROPOSAL.md:
--------------------------------------------------------------------------------
1 | # Providing Better Compilation Performance Information
2 |
3 | ## Proposer
4 |
5 | Proposed by James Belsey, Morgan Stanley, May 2017
6 |
7 | ## Abstract
8 |
9 | As a user of the Scala compiler investigating slow build times is difficult.
10 | We propose enhancing the current flags and tooling within the Scala compiler to
11 | identify hotspots in or caused by users' code. For example a macro may be
12 | significant or called more often than expected (e.g. in implicit resolution).
13 | Providing tooling that generates user-understandable reports e.g. per macro
14 | times called and total times or identifying poor implicit resolution allows
15 | users to tune their builds for optimal performance.
16 |
17 | This can be broken into three distinct areas:
18 |
19 | - data generation and capture
20 | - data visualisation and comparison
21 | - reproducibility
22 |
23 | One important consideration is that the Instrumentation must be carefully
24 | engineered to ensure that when not in use it does not adversely affect
25 | compilation time when disabled.
26 |
27 | ## Proposal
28 |
29 | Generation includes capturing information such as:
30 |
31 | - Compilation time per file
32 | - Total
33 | - Broken down by phase
34 | - Times per macro
35 | - Per file
36 | - Per macro
37 | - Invocations
38 | - Total time
39 | - implicit search details (time and number)
40 | - By type
41 | - By invocation
42 | - By file
43 | - User time, kernel time, wall clock, I/O time
44 | - Time for flagged features (for certain features – e.g. optimisation)
45 | - Time resolving types from classpath
46 | - Total
47 | - by jar
48 | - Imports – unused/wildcard timings?
49 |
50 | Other avenues might include providing information about file dependencies, for
51 | example those that cause the incremental compiler to fail and fall back to full
52 | compilation. Or islands of compilation which could benefit from being split
53 | into separate modules. These features may come out of the semantic database
54 | work by Eugene Burmako et al.
55 |
56 | The generated data should be generated in both machine and human consumable
57 | form.
58 |
59 | Human-readable reports (e.g. files sorted by compile time) are a key artifact
60 | produced by this work. This could for example be HTML reports summarizing and
61 | organising the data along various axes. For example for macros it would be
62 | useful to see both by macro and by file location when they are used.
63 |
64 | Machine readable data should allow both external tools to support investigation
65 | and allow investigation between different runs (not just the investigation of a
66 | single run). This data allows it to be integrated into CI and regression
67 | testing.
68 |
69 | The generated profiling numbers should have high reproducibility and reflect
70 | the real behaviour – this may include warming the compiler and other profiling
71 | techniques to ensure consistency between runs, i.e. if I make a change it is
72 | important that you have high confidence the build is faster or better.
73 |
74 | ## Cost
75 |
76 | Unknown at this stage.
77 |
78 | ## Timescales
79 |
80 | Unknown at this stage.
81 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Providing Better Compilation Performance Information
2 |
3 | When compile times become a problem, how can Scala developers reason about
4 | the relation between their code and compile times?
5 |
6 | ## Installation
7 |
8 | Add the `scalac-profiling` compiler plugin into your project:
9 |
10 | ```scala
11 | addCompilerPlugin("ch.epfl.scala" %% "scalac-profiling" % "" cross CrossVersion.full)
12 | ```
13 |
14 | Note that in Scala 2.13, the preferred form of the compiler option to enable statistics is `-Vstatistics`. It is part of the family of `-V` flags that enable various "verbose" behaviors (in 2.12, the flag is called `-Ystatistics`).
15 |
16 | Learn more at https://scalacenter.github.io/scalac-profiling.
17 |
18 | Also, you may wish to read the [Speeding Up Compilation Time with `scalac-profiling`](https://www.scala-lang.org/blog/2018/06/04/scalac-profiling.html) in the scala-lang blog. Worth noting that the article is 5+ years old, and hasn't been updated. But still, you may gather a lot of ideas while reading it.
19 |
20 | ## Maintenance status
21 |
22 | This tool was created at the [Scala Center](http://scala.epfl.ch) in 2017 and 2018 as the result of the proposal [SCP-10](https://github.com/scalacenter/advisoryboard/blob/main/proposals/010-compiler-profiling.md), submitted by a [corporate member](https://scala.epfl.ch/corporate-membership.html) of the board. The Center is seeking new corporate members to fund activities such as these, to benefit the entire Scala community.
23 |
24 | The plugin is now community-maintained, with maintenance overseen by the Center. Thanks to volunteer contributors, the latest release 1.1.0 supports both Scala 2.13 and 2.12.
25 |
26 | ## Historical context
27 |
28 | The historical context of this project is quite interesting. For those wondering about the details, see the [dedicated section](HISTORICALCONTEXT.md).
29 |
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | import xsbti.compile.CompileAnalysis
11 |
12 | lazy val root = project
13 | .in(file("."))
14 | .aggregate(profiledb, plugin, profilingSbtPlugin)
15 | .settings(
16 | name := "scalac-profiling-root",
17 | crossScalaVersions := bin212 ++ bin213,
18 | publish := {},
19 | publishLocal := {},
20 | skip / publish := true,
21 | watchSources ++=
22 | (plugin / watchSources).value ++
23 | (profiledb / watchSources).value ++
24 | (integrations / watchSources).value
25 | )
26 |
27 | val bin212 = Seq("2.12.20", "2.12.19", "2.12.18")
28 | val bin213 = Seq("2.13.16", "2.13.15", "2.13.14")
29 |
30 | // Copied from
31 | // https://github.com/scalameta/scalameta/blob/370e304b0d10db1dd65fc79a5abc1f39004aeffd/build.sbt#L724-L737
32 | lazy val fullCrossVersionSettings = Seq(
33 | crossVersion := CrossVersion.full,
34 | crossScalaVersions := bin212 ++ bin213,
35 | Compile / unmanagedSourceDirectories += {
36 | // NOTE: SBT 1.x provides cross-version support for Scala sources
37 | // (https://www.scala-sbt.org/1.x/docs/Cross-Build.html#Scala-version+specific+source+directory).
38 | // Unfortunately, it only includes directories like "scala_2.12" or "scala_2.13",
39 | // not "scala_2.12.20" or "scala_2.13.15" that we need.
40 | // That's why we have to work around here.
41 | val base = (Compile / sourceDirectory).value
42 | val versionDir = scalaVersion.value.replaceAll("-.*", "")
43 | base / ("scala-" + versionDir)
44 | }
45 | )
46 |
47 | import _root_.ch.epfl.scala.profiling.build.BuildImplementation.BuildDefaults
48 | import scalapb.compiler.Version.scalapbVersion
49 | lazy val profiledb = project
50 | .in(file("profiledb"))
51 | .enablePlugins(BuildInfoPlugin)
52 | // .settings(metalsSettings)
53 | .settings(
54 | // Specify scala version to allow third-party software to use this module
55 | crossScalaVersions := bin212 ++ bin213,
56 | scalaVersion := bin212.head,
57 | libraryDependencies +=
58 | "com.thesamet.scalapb" %% "scalapb-runtime" % scalapbVersion % "protobuf",
59 | Compile / managedSourceDirectories += target.value / "protobuf-generated",
60 | Compile / PB.targets := Seq(
61 | scalapb.gen() -> (target.value / "protobuf-generated")
62 | ),
63 | buildInfoPackage := "scalac.profiling.internal.build",
64 | buildInfoKeys := List[BuildInfoKey](
65 | Keys.organization,
66 | Keys.version,
67 | Keys.scalaVersion,
68 | Keys.crossScalaVersions
69 | )
70 | )
71 |
72 | // Do not change the lhs id of this plugin, `BuildPlugin` relies on it
73 | lazy val plugin = project
74 | .dependsOn(profiledb)
75 | // .settings(metalsSettings)
76 | .settings(
77 | fullCrossVersionSettings,
78 | name := "scalac-profiling",
79 | libraryDependencies ++= List(
80 | "com.lihaoyi" %% "pprint" % "0.9.0",
81 | scalaOrganization.value % "scala-compiler" % scalaVersion.value
82 | ),
83 | libraryDependencies ++= List(
84 | "junit" % "junit" % "4.13.2" % "test",
85 | "com.github.sbt" % "junit-interface" % "0.13.3" % "test"
86 | ),
87 | Test / testOptions ++= List(Tests.Argument("-v"), Tests.Argument("-s")),
88 | allDepsForCompilerPlugin := {
89 | val jar = (Compile / Keys.packageBin).value
90 | val profileDbJar = (profiledb / Compile / Keys.`package`).value
91 | val absoluteJars = List(jar, profileDbJar).classpath
92 | val pluginDeps = (Compile / managedClasspath).value
93 | (absoluteJars ++ pluginDeps)
94 | },
95 | // Make the tests to compile with the plugin
96 | optionsForSourceCompilerPlugin := {
97 | val jar = (Compile / Keys.packageBin).value
98 | val pluginAndDeps = allDepsForCompilerPlugin.value.map(_.data.getAbsolutePath()).mkString(":")
99 | val addPlugin = "-Xplugin:" + pluginAndDeps
100 | val dummy = "-Jdummy=" + jar.lastModified
101 | // Enable debugging information when necessary
102 | val debuggingPluginOptions =
103 | if (!enableStatistics.value) Nil
104 | else List("-Ystatistics") // , "-P:scalac-profiling:show-profiles")
105 | // else List("-Xlog-implicits", "-Ystatistics:typer")
106 | Seq(addPlugin, dummy) ++ debuggingPluginOptions
107 | },
108 | Test / scalacOptions ++= optionsForSourceCompilerPlugin.value,
109 | // Generate toolbox classpath while compiling for both configurations
110 | Compile / resourceGenerators += generateToolboxClasspath.taskValue,
111 | Test / resourceGenerators += Def.task {
112 | val options = scalacOptions.value
113 | val stringOptions = options.filterNot(_ == "-Ydebug").mkString(" ")
114 | val pluginOptionsFile = resourceManaged.value / "toolbox.plugin"
115 | IO.write(pluginOptionsFile, stringOptions)
116 | List(pluginOptionsFile.getAbsoluteFile)
117 | }.taskValue,
118 | inCompileAndTest(unmanagedSourceDirectories ++= {
119 | val scalaPartialVersion = CrossVersion partialVersion scalaVersion.value
120 | scalaPartialVersion.collect {
121 | case (2, y) if y == 12 => new File(scalaSource.value.getPath + "-2.12")
122 | case (2, y) if y >= 13 => new File(scalaSource.value.getPath + "-2.13")
123 | }.toList
124 | }),
125 | Compile / Keys.packageBin := (Compile / assembly).value,
126 | assembly / test := {}
127 | )
128 |
129 | lazy val profilingSbtPlugin = project
130 | .in(file("sbt-plugin"))
131 | .settings(
132 | name := "sbt-scalac-profiling",
133 | scalaVersion := bin212.head,
134 | sbtPlugin := true,
135 | scriptedLaunchOpts ++= Seq(
136 | "-Xmx2048M",
137 | "-Xms1024M",
138 | "-Xss8M",
139 | s"-Dplugin.version=${version.value}"
140 | ),
141 | scriptedBufferLog := false
142 | )
143 | .enablePlugins(ScriptedPlugin)
144 |
145 | // Source dependencies are specified in `project/BuildPlugin.scala`
146 | lazy val integrations = project
147 | .in(file("integrations"))
148 | .settings(
149 | skip / publish := true,
150 | libraryDependencies += "com.github.alexarchambault" %% "case-app" % "2.0.6",
151 | Test / parallelExecution := false,
152 | Compile / scalacOptions := (Def.taskDyn {
153 | val options = (Compile / scalacOptions).value
154 | val ref = Keys.thisProjectRef.value
155 | Def.task(options ++ BuildDefaults.scalacProfilingScalacOptions(ref).value)
156 | }).value,
157 | clean := Def
158 | .sequential(
159 | clean,
160 | (BetterFilesCore / Compile / clean),
161 | (WartremoverCore / Compile / clean)
162 | )
163 | .value,
164 | test := Def
165 | .sequential(
166 | (ThisBuild / showScalaInstances),
167 | (Compile / compile)
168 | )
169 | .value,
170 | testOnly := Def.inputTaskDyn {
171 | val keywords = keywordsSetting.parsed
172 | val emptyAnalysis = Def.task[CompileAnalysis](sbt.internal.inc.Analysis.Empty)
173 | val IntegrationTask = Def.taskDyn {
174 | if (keywords.contains(Keywords.Integration))
175 | Def.sequential(
176 | (Compile / compile)
177 | )
178 | else emptyAnalysis
179 | }
180 | val BetterFilesTask = Def.taskDyn {
181 | if (keywords.contains(Keywords.BetterFiles))
182 | Def.sequential(
183 | (BetterFilesCore / Compile / compile)
184 | )
185 | else emptyAnalysis
186 | }
187 | val WartremoverTask = Def.taskDyn {
188 | if (keywords.contains(Keywords.Wartremover))
189 | Def.sequential(
190 | (WartremoverCore / Compile / compile)
191 | )
192 | else emptyAnalysis
193 | }
194 |
195 | Def.sequential(
196 | IntegrationTask,
197 | BetterFilesTask,
198 | WartremoverTask
199 | )
200 | }.evaluated
201 | )
202 |
203 | lazy val docs = project
204 | .in(file("docs-gen"))
205 | .dependsOn(profiledb)
206 | .enablePlugins(MdocPlugin, DocusaurusPlugin)
207 | .settings(
208 | name := "scalac-profiling-docs",
209 | moduleName := "scalac-profiling-docs",
210 | libraryDependencies += "io.get-coursier" % "interface" % "1.0.19",
211 | (publish / skip) := true,
212 | scalaVersion := bin212.head,
213 | mdoc := (Compile / run).evaluated,
214 | (Compile / mainClass) := Some("ch.epfl.scala.profiling.Docs"),
215 | (Compile / resources) ++= {
216 | List((ThisBuild / baseDirectory).value / "docs")
217 | }
218 | )
219 |
220 | val proxy = project
221 | .in(file(".proxy"))
222 | .aggregate(BetterFiles, Wartremover)
223 | .settings(skip / publish := true)
224 |
--------------------------------------------------------------------------------
/docs-gen/src/main/scala/ch/epfl/scala/profiling/Docs.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profiling
2 |
3 | import ch.epfl.scala.profiling.docs.Sonatype
4 | import mdoc.MainSettings
5 |
6 | import scala.meta.io.AbsolutePath
7 |
8 | object Docs {
9 | def main(args: Array[String]): Unit = {
10 | val cwd0 = AbsolutePath.workingDirectory
11 | // Depending on who runs it (sbt vs bloop), the current working directory is different
12 | val cwd = if (!cwd0.resolve("docs").isDirectory) cwd0.toNIO.getParent else cwd0.toNIO
13 |
14 | def prepareVersions(prefix: String): Seq[String] => String =
15 | _.sortWith {
16 | case (l, r) =>
17 | l.replaceFirst(prefix + ".", "").toInt >=
18 | r.replaceFirst(prefix + ".", "").toInt
19 | }.mkString(", ")
20 |
21 | val (scala212Versions, scala213Versions) =
22 | scalac.profiling.internal.build.BuildInfo.crossScalaVersions.partition(_.startsWith("2.12"))
23 |
24 | val settings = MainSettings()
25 | .withSiteVariables(
26 | Map(
27 | "VERSION" -> Sonatype.releaseScalacProfiling.version,
28 | "LATEST_VERSION" -> scalac.profiling.internal.build.BuildInfo.version,
29 | "SBT_PLUGIN_VERSION" -> Sonatype.releaseSbtPlugin.version,
30 | "SCALA212_VERSIONS" -> prepareVersions("2.12")(scala212Versions),
31 | "SCALA213_VERSIONS" -> prepareVersions("2.13")(scala213Versions)
32 | )
33 | )
34 | .withArgs(args.toList)
35 | // it should work with mdoc when run inside bloop but it doesn't, let's wait until it's fixed
36 | .withIn(cwd.resolve("docs"))
37 | .withOut(cwd.resolve("out"))
38 |
39 | val exitCode = _root_.mdoc.Main.process(settings)
40 | if (exitCode != 0) sys.exit(exitCode)
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/docs-gen/src/main/scala/ch/epfl/scala/profiling/docs/DependencyResolution.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profiling.docs
2 |
3 | import coursierapi.Repository
4 | import coursierapi.error.CoursierError
5 |
6 | import ch.epfl.scala.profiledb.utils.AbsolutePath
7 |
8 | import scala.jdk.CollectionConverters._
9 |
10 | // Slight modification of `bloop.DependencyResolution`
11 | object DependencyResolution {
12 |
13 | /**
14 | * @param organization The module's organization.
15 | * @param module The module's name.
16 | * @param version The module's version.
17 | */
18 | final case class Artifact(organization: String, module: String, version: String)
19 |
20 | /**
21 | * Resolve the specified modules and get all the files. By default, the local Ivy
22 | * repository and Maven Central are included in resolution. This resolution throws
23 | * in case there is an error.
24 | *
25 | * @param artifacts Artifacts to resolve
26 | * @param resolveSources Resolve JAR files containing sources
27 | * @param additionalRepos Additional repositories to include in resolution.
28 | * @return All the resolved files.
29 | */
30 | def resolve(
31 | artifacts: List[Artifact],
32 | resolveSources: Boolean = false,
33 | additionalRepos: Seq[Repository] = Nil
34 | ): Array[AbsolutePath] = {
35 | resolveWithErrors(artifacts, resolveSources, additionalRepos) match {
36 | case Right(paths) => paths
37 | case Left(error) => throw error
38 | }
39 | }
40 |
41 | /**
42 | * Resolve the specified module and get all the files. By default, the local ivy
43 | * repository and Maven Central are included in resolution. This resolution is
44 | * pure and returns either some errors or some resolved jars.
45 | *
46 | * @param artifacts Artifacts to resolve
47 | * @return Either a coursier error or all the resolved files.
48 | */
49 | def resolveWithErrors(
50 | artifacts: List[Artifact],
51 | resolveSources: Boolean = false,
52 | additionalRepositories: Seq[Repository] = Nil
53 | ): Either[CoursierError, Array[AbsolutePath]] = {
54 | val dependencies = artifacts.map { artifact =>
55 | import artifact._
56 | val baseDep = coursierapi.Dependency.of(organization, module, version)
57 | if (resolveSources) baseDep.withClassifier("sources")
58 | else baseDep
59 | }
60 | resolveDependenciesWithErrors(dependencies, resolveSources, additionalRepositories)
61 | }
62 |
63 | /**
64 | * Resolve the specified dependencies and get all the files. By default, the
65 | * local ivy repository and Maven Central are included in resolution. This
66 | * resolution is pure and returns either some errors or some resolved jars.
67 | *
68 | * @param dependencies Dependencies to resolve.
69 | * @param additionalRepositories Additional repositories to include in resolution.
70 | * @return Either a coursier error or all the resolved files.
71 | */
72 | def resolveDependenciesWithErrors(
73 | dependencies: Seq[coursierapi.Dependency],
74 | resolveSources: Boolean = false,
75 | additionalRepositories: Seq[Repository] = Nil
76 | ): Either[CoursierError, Array[AbsolutePath]] = {
77 | val fetch = coursierapi.Fetch
78 | .create()
79 | .withDependencies(dependencies: _*)
80 | if (resolveSources)
81 | fetch.addArtifactTypes("src", "jar")
82 | fetch.addRepositories(additionalRepositories: _*)
83 |
84 | try Right(fetch.fetch().asScala.toArray.map(f => AbsolutePath(f.toPath)))
85 | catch {
86 | case error: CoursierError => Left(error)
87 | }
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/docs-gen/src/main/scala/ch/epfl/scala/profiling/docs/Sonatype.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profiling.docs
2 |
3 | import java.text.SimpleDateFormat
4 | import java.util.Date
5 | import org.jsoup.Jsoup
6 |
7 | import scala.util.control.NonFatal
8 | import coursierapi.MavenRepository
9 |
10 | import scala.jdk.CollectionConverters._
11 |
12 | final case class Release(version: String, lastModified: Date)
13 |
14 | object Sonatype {
15 | lazy val releaseScalacProfiling = fetchLatest("scalac-profiling_2.12.18")
16 | lazy val releaseSbtPlugin = fetchLatest("sbt-scalac-profiling_2.12_1.0")
17 |
18 | /** Returns the latest published snapshot release, or the current release if. */
19 | private def fetchLatest(artifact: String): Release = {
20 | val artifacts = List(
21 | DependencyResolution.Artifact("ch.epfl.scala", artifact, "latest.release")
22 | )
23 | val resolvedJars = DependencyResolution.resolve(
24 | artifacts,
25 | additionalRepos =
26 | List(MavenRepository.of(s"https://oss.sonatype.org/content/repositories/staging"))
27 | )
28 |
29 | val latestStableVersion = resolvedJars.find(_.syntax.contains(artifact)) match {
30 | case None => sys.error(s"Missing jar for resolved artifact '$artifact'")
31 | case Some(jar) =>
32 | val firstTry =
33 | jar.underlying
34 | .getFileName()
35 | .toString
36 | .stripSuffix(".jar")
37 | .stripPrefix(artifact + "-")
38 |
39 | if (!firstTry.endsWith("_2.12.18") && !firstTry.endsWith("_2.12_1.0"))
40 | firstTry
41 | else jar.getParent.getParent.underlying.getFileName.toString
42 | }
43 |
44 | val doc = Jsoup
45 | .connect(
46 | s"https://oss.sonatype.org/content/repositories/releases/ch/epfl/scala/$artifact/"
47 | )
48 | .get
49 |
50 | val dateTime = new SimpleDateFormat("yyyy-MM-dd HH:mm")
51 | val releases = doc
52 | .select("pre")
53 | .asScala
54 | .flatMap { versionRow =>
55 | val elements = versionRow.getAllElements().asScala.filterNot(_.text().contains("../"))
56 | val nodes = versionRow.textNodes().asScala.filter(_.text().trim.nonEmpty)
57 |
58 | elements.zip(nodes).flatMap {
59 | case (element, node) =>
60 | val version = element.text().stripSuffix("/")
61 |
62 | if (version.startsWith("maven-metadata")) Nil
63 | else {
64 | node.text().trim().split("\\s+").init.toList match {
65 | case List(date, time) =>
66 | try {
67 | val parsedDate = dateTime.parse(s"$date $time")
68 | List(Release(version, parsedDate))
69 | } catch {
70 | case NonFatal(_) => Nil
71 | }
72 | case _ => Nil
73 | }
74 | }
75 | }
76 | }
77 |
78 | releases.filter(_.version == latestStableVersion).maxBy(_.lastModified.getTime)
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/docs/plugins/sbt-plugin.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: sbt-plugin
3 | title: SBT Plugin
4 | ---
5 |
6 | The SBT plugin allows users to warm up the compiler before measuring compilation times
7 | and analyzing statistics. This plugin is simple in its goals and bundles a set
8 | of tips that users of this plugin must take into account to get reliable data.
9 |
10 | ### Installation
11 |
12 | Add the plugin into `project/plugins.sbt`:
13 |
14 | ```scala
15 | addSbtPlugin("ch.epfl.scala" % "sbt-scalac-profiling" % "@SBT_PLUGIN_VERSION@")
16 | ```
17 |
18 | ### Usage
19 |
20 | Run the `profilingWarmupCompiler` task in SBT in your CI / local machine
21 | before actually performing compilation to gather the data to build graphs.
22 | The default warmup duration is 60 seconds. You can modify it like this:
23 |
24 | ```diff
25 | // setting the warmup duration to 30 globally
26 | + Global / profilingWarmupDuration := 30
27 | // or setting the warmup duration to 50 in one project
28 | val myProject = project.settings(
29 | + profilingWarmupDuration := 50
30 | )
31 | ```
32 |
33 | ### Several tips
34 |
35 | To get reliable and predictable data, your infrastructure needs to be stable.
36 | These are some encouraged practices:
37 |
38 | 1. The cpu load of the running machine must be kept low. Remove unnecessary processes and cron
39 | jobs that may be running on the background.
40 | 2. Enable the `-Vstatistics` option before warming up the compiler.
41 | Otherwise, the warm-up will trigger JVM to decompile code and throw away optimized code.
42 | The same applies for other flags/build changes that affect compilation.
43 | 3. **Do not reload** the SBT shell, or you'll need to warm up the compiler again.
44 |
--------------------------------------------------------------------------------
/docs/user-guide/installation.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: installation
3 | title: Installation
4 | ---
5 |
6 | ### Pick the right version
7 |
8 | | Scala series | Supported versions | `scalac-profiling` |
9 | |:-------------|:--------------------|:-------------------|
10 | | 2.12.x | @SCALA212_VERSIONS@ | `@VERSION@` |
11 | | 2.13.x | @SCALA213_VERSIONS@ | `@VERSION@` |
12 |
13 | ### Add the dependency
14 |
15 | Add the scalac compiler plugin into your build:
16 |
17 | ```scala
18 | addCompilerPlugin("ch.epfl.scala" %% "scalac-profiling" % "@VERSION@" cross CrossVersion.full)
19 | ```
20 |
21 | Also, it's required to enable compiler statistics — for Scala 2.13 the needed compiler
22 | flag is `-Vstatistics`, and for Scala 2.12 is `-Ystatistics`.
23 |
24 | For example, for the SBT build tool, add the following settings to `build.sbt`:
25 |
26 | ```diff
27 | + inThisBuild(
28 | + List(
29 | + addCompilerPlugin("ch.epfl.scala" %% "scalac-profiling" % "@VERSION@" cross CrossVersion.full),
30 | + ThisBuild / scalacOptions += "-Vstatistics",
31 | + )
32 | + )
33 | ```
34 |
35 | You can also use project-scoped settings if you want to profile a particular project:
36 |
37 | ```diff
38 | lazy val myproject = project
39 | .settings(
40 | + addCompilerPlugin("ch.epfl.scala" %% "scalac-profiling" % "@VERSION@" cross CrossVersion.full),
41 | + ThisBuild / scalacOptions += "-Vstatistics",
42 | )
43 | ```
44 |
45 | ### Extend the configuration
46 |
47 | There are several compiler plugin options to enable to enrichment of analysis capabilities.
48 | All the following options are prepended by the `-P:scalac-profiling:`.
49 |
50 | | Name | Description |
51 | |:-----------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
52 | | `generate-global-flamegraph` | Creates a global flamegraph of implicit searches for all compilation units. Use the `-P:scalac-profiling:cross-target` option to manage the target directory for the resulting flamegraph file, otherwise, the SBT target directory will be picked. |
53 | | `generate-macro-flamegraph` | Generate a flamegraph for macro expansions. The flamegraph for implicit searches is enabled by default. |
54 | | `generate-profiledb` | Generate profiledb. |
55 | | `print-failed-implicit-macro-candidates` | Print trees of all failed implicit searches that triggered a macro expansion. |
56 | | `print-search-result` | Print the result retrieved by an implicit search. Example: `-P:scalac-profiling:print-search-result:$MACRO_ID`. |
57 | | `show-concrete-implicit-tparams` | Use more concrete type parameters in the implicit search flamegraph. Note that it may change the shape of the flamegraph. |
58 | | `show-profiles` | Show implicit searches and macro expansions by type and call-site. |
59 | | `sourceroot` | Tell the plugin what is the source directory of the project. Example: `-P:scalac-profiling:sourceroot:$PROJECT_BASE_DIR`. |
60 | | `cross-target` | Tell the plugin what is the cross target directory of the project. Example: `-P:scalac-profiling:cross-target:$PROJECT_TARGET`. |
61 |
--------------------------------------------------------------------------------
/docs/user-guide/motivation.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: motivation
3 | title: What is scalac-profiling?
4 | ---
5 |
6 |
7 | `scalac-profiling` is a compilation profiling tool for Scala 2 projects
8 | which aims to help you better understand what is slowing down compilation in your project.
9 | As of the `@VERSION@` version, it's built for Scala 2.12 and 2.13.
10 |
11 | ### When to use scalac-profiling?
12 |
13 | Using implicits and macros can significantly increase compilation time,
14 | based on their usage and your codebase organization. Suppose your project
15 | heavily depends on automatic code generation tools powered with macros,
16 | like type-classes derivation, while it's a super powerful and user-friendly technology,
17 | it's likely to materialize implicits for the same type many times across the project
18 | resulting in excessive compilation time. Given all of that, although the `scalac-profiling`
19 | can be used for general compilation analysis, it is best for chasing down bottlenecks
20 | with a focus on implicit searches and macro expansions.
21 |
22 | ### Why scalac-profiling?
23 |
24 | With `scalac-profiling`, you can easily generate advantageous [flamegraphs][flamegraph]
25 | that provide next-level profiling of compilation times. Explore the following flamegraph
26 | of the implicit searches in the [Scala Steward][scala-steward] project
27 | we've built by literally adding 5 LoC to the build file and running one script
28 | from the FlameGraph project. Note that the following graph is clickable.
29 |
30 |
31 |
34 |
35 |
36 | ### Maintenance status
37 |
38 | This tool originated at the [Scala Center][scala-center] in 2017-2018
39 | as the result of the proposal [SCP-10][scp-10], submitted by a [corporate member][corporate-membership]
40 | of the board. The Center is seeking new corporate members to fund activities such as these,
41 | to benefit the entire Scala community.
42 |
43 | The `scalac-profiling` is now community-maintained, with maintenance overseen by the Center.
44 | We invite interested users to participate and submit further improvements.
45 |
46 |
47 |
48 | [corporate-membership]: https://scala.epfl.ch/corporate-membership.html
49 | [flamegraph]: https://github.com/brendangregg/FlameGraph
50 | [scala-center]: http://scala.epfl.ch
51 | [scala-steward]: https://github.com/scala-steward-org/scala-steward
52 | [scp-10]: https://github.com/scalacenter/advisoryboard/blob/main/proposals/010-compiler-profiling.md
53 |
--------------------------------------------------------------------------------
/docs/user-guide/usage.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: usage
3 | title: Usage
4 | ---
5 |
6 | ### Once everything is set up
7 |
8 | By default, `scalac-profiling` generates the _graph representation_ of the
9 | implicit searches happening during compilation, and optionally of the macro
10 | expansions separately by enabling proper compiler plugin option
11 | (check the available [compiler plugin options](installation.md)).
12 | To be more precise, the compiler plugin does not generate the graphs for you; instead,
13 | it persists the graph data in a format that allows you to generate the graphs yourself
14 | without touching or transforming the data. That graph data is present under the
15 | _profiledb META-INF directory_, located in the _classes_ directory. For example,
16 | a flamegraph data file will be located at
17 | `/target/scala-2.13/classes/META-INF/profiledb/graphs/implicit-searches-X.flamegraph`.
18 |
19 | The resulting graphs are obtained by using the [FlameGraph][flamegraph] tool.
20 | They are intuitive to inspect and browse, and stand out because:
21 |
22 | * They allow you to selectively choose what things to profile. Click on every
23 | stack to zoom in, and reset by clicking "Reset zoom" on the bottom left.
24 | * They allow you to search via regexes and those matching stacks are
25 | highlighted. Check the search button on the top right.
26 |
27 | ### Lastly, how to generate a flamegraph?
28 |
29 | In order to generate flamegraphs, clone the [scalac-profiling][flamegraph]
30 | GitHub repository. The repository contains the `FlameGraph` git submodule consisting
31 | of the tools to generate the SVG files that you will later use. Once the prepared
32 | data is generated by the compiler plugin (`.flamegraph` file):
33 |
34 | 1. `cd` into the `external/Flamegraph` directory;
35 | 2. Execute the `git submodule update --init`;
36 | 3. And run the following command in the `FlameGraph` project's root directory:
37 | ```bash
38 | ./flamegraph.pl \
39 | --hash --countname="μs" \
40 | --color=scala-compilation \
41 | $PATH_TO_FLAMEGRAPH_DATA > implicit-searches-graph.svg
42 | ```
43 |
44 | The resulting graph will look like this one we generated for the [Scala Steward][scala-steward] project:
45 |
46 |
47 |
50 |
51 |
52 | ### Reading the graphs
53 |
54 | A graph is a set of nodes and edges. A node represents an implicit search for a given type.
55 | Every node specifies how many implicit searches have been triggered in total,
56 | and how long they took in total. An edge represents the dependency between
57 | an implicit search and another one.
58 |
59 | > It is important to note that every node in a program can be relied upon by other nodes
60 | > and can serve as the starting point for an implicit search. Therefore, the number of times a
61 | > node has been searched for may not always be equal to the total number of nodes that depend on it.
62 |
63 |
64 | [flamegraph]: https://github.com/brendangregg/FlameGraph
65 | [scalac-profiling]: https://github.com/scalacenter/scalac-profiling/
66 | [scala-steward]: https://github.com/scala-steward-org/scala-steward
67 |
--------------------------------------------------------------------------------
/integrations/src/main/scala/profiling/integrations/caseapp/CliOptions.scala:
--------------------------------------------------------------------------------
1 | package profiling.integrations.caseapp
2 |
3 | import java.nio.file.Path
4 |
5 | import caseapp.{ExtraName, HelpMessage, Recurse, ValueDescription}
6 |
7 | case class CliOptions(
8 | @ExtraName("c")
9 | @HelpMessage("File path to the bloop config directory.")
10 | @ValueDescription(".bloop")
11 | configDir: Option[Path] = None,
12 | @ExtraName("v")
13 | @HelpMessage("If set, print the about section at the beginning of the execution.")
14 | version: Boolean = false,
15 | @HelpMessage("If set, print out debugging information to stderr.")
16 | verbose: Boolean = false,
17 | @Recurse common: CommonOptions = CommonOptions.default,
18 | )
19 |
20 | object CliOptions {
21 | val default = CliOptions()
22 | }
23 |
--------------------------------------------------------------------------------
/integrations/src/main/scala/profiling/integrations/caseapp/Commands.scala:
--------------------------------------------------------------------------------
1 | package profiling.integrations.caseapp
2 |
3 | // import java.nio.file.Path
4 | // import caseapp.{ArgsName, ExtraName, HelpMessage, Hidden}
5 |
6 | import caseapp.{CommandName, Recurse}
7 |
8 | object Commands {
9 |
10 | /* sealed abstract class Mode(val name: String)
11 |
12 | /** The kind of items that should be returned for autocompletion */
13 | object Mode {
14 | case object Commands extends Mode("commands")
15 | case object Projects extends Mode("projects")
16 | case object ProjectBoundCommands extends Mode("project-commands")
17 | case object Flags extends Mode("flags")
18 | case object Reporters extends Mode("reporters")
19 | case object Protocols extends Mode("protocols")
20 | case object TestsFQCN extends Mode("testsfqcn")
21 | case object MainsFQCN extends Mode("mainsfqcn")
22 |
23 | implicit val completionModeRead: ArgParser[Mode] = ???
24 | }
25 |
26 | sealed abstract class BspProtocol(val name: String)
27 |
28 | object BspProtocol {
29 | case object Local extends BspProtocol("local")
30 | case object Tcp extends BspProtocol("tcp")
31 |
32 | implicit val bspProtocolRead: ArgParser[BspProtocol] = ???
33 | }
34 |
35 | sealed abstract class ReporterKind(val name: String)
36 | case object ScalacReporter extends ReporterKind("scalac")
37 | case object BloopReporter extends ReporterKind("bloop")*/
38 |
39 | sealed trait RawCommand {
40 | def cliOptions: CliOptions
41 | }
42 |
43 | sealed trait CompilingCommand extends RawCommand {
44 | // def project: String
45 | // def reporter: ReporterKind
46 | }
47 |
48 | /* sealed trait Tree[A]
49 | case class Leaf[A](value: A) extends Tree[A]
50 | case class Branch[A](
51 | left: Tree[A],
52 | right: Tree[A]
53 | ) extends Tree[A]*/
54 |
55 | case class Help(
56 | @Recurse cliOptions: CliOptions = CliOptions.default
57 | ) extends RawCommand
58 |
59 | case class Autocomplete(
60 | @Recurse cliOptions: CliOptions = CliOptions.default,
61 | // mode: Mode,
62 | // format: Format,
63 | /* command: Option[String],
64 | project: Option[String]*/
65 | ) extends RawCommand
66 |
67 | case class About(
68 | @Recurse cliOptions: CliOptions = CliOptions.default
69 | ) extends RawCommand
70 |
71 | case class Projects(
72 | /* @ExtraName("dot")
73 | @HelpMessage("Print out a dot graph you can pipe into `dot`. By default, false.")
74 | dotGraph: Boolean = false,*/
75 | @Recurse cliOptions: CliOptions = CliOptions.default
76 | ) extends RawCommand
77 |
78 | case class Configure(
79 | /* @ExtraName("parallelism")
80 | @HelpMessage("Set the number of threads used for parallel compilation and test execution.")
81 | threads: Int = 4,*/
82 | @Recurse cliOptions: CliOptions = CliOptions.default
83 | ) extends RawCommand
84 |
85 | case class Clean(
86 | /* @ExtraName("p")
87 | @HelpMessage("The projects to clean.")
88 | project: List[String] = Nil,
89 | @HelpMessage("Do not run clean for dependencies. By default, false.")
90 | isolated: Boolean = false,*/
91 | @Recurse cliOptions: CliOptions = CliOptions.default,
92 | ) extends RawCommand
93 |
94 | @CommandName("bsp")
95 | case class Bsp(
96 | /*/* @ExtraName("p")
97 | @HelpMessage("The connection protocol for the bsp server. By default, local.")
98 | protocol: BspProtocol = BspProtocol.Local,*/
99 | @ExtraName("h")
100 | @HelpMessage("The server host for the bsp server (TCP only).")
101 | host: String = "127.0.0.1",
102 | @HelpMessage("The port for the bsp server (TCP only).")
103 | port: Int = 5101,
104 | @ExtraName("s")
105 | @HelpMessage("A path to a socket file to communicate through Unix sockets (local only).")
106 | socket: Option[Path] = None,
107 | @ExtraName("pn")
108 | @HelpMessage(
109 | "A path to a new existing socket file to communicate through Unix sockets (local only)."
110 | )
111 | pipeName: Option[String] = None,*/
112 | @Recurse cliOptions: CliOptions = CliOptions.default
113 | ) extends RawCommand
114 |
115 | case class Compile(
116 | /* @ExtraName("p")
117 | @HelpMessage("The project to compile (will be inferred from remaining cli args).")
118 | project: String = "",
119 | @HelpMessage("Compile the project incrementally. By default, true.")
120 | incremental: Boolean = true,
121 | /* @HelpMessage("Pick reporter to show compilation messages. By default, bloop's used.")
122 | reporter: ReporterKind = BloopReporter,*/
123 | @ExtraName("w")
124 | @HelpMessage("Run the command when projects' source files change. By default, false.")
125 | watch: Boolean = false,*/
126 | @Recurse cliOptions: CliOptions = CliOptions.default,
127 | ) extends CompilingCommand
128 |
129 | case class Test(
130 | /* @ExtraName("p")
131 | @HelpMessage("The project to test (will be inferred from remaining cli args).")
132 | project: String = "",
133 | @HelpMessage("Do not run tests for dependencies. By default, false.")
134 | isolated: Boolean = false,
135 | @ExtraName("o")
136 | @HelpMessage("The list of test suite filters to test for only.")
137 | only: List[String] = Nil,
138 | @HelpMessage("The arguments to pass in to the test framework.")
139 | args: List[String] = Nil,
140 | /* @HelpMessage("Pick reporter to show compilation messages. By default, bloop's used.")
141 | reporter: ReporterKind = BloopReporter,*/
142 | @ExtraName("w")
143 | @HelpMessage("Run the command when projects' source files change. By default, false.")
144 | watch: Boolean = false,*/
145 | @Recurse cliOptions: CliOptions = CliOptions.default
146 | ) extends CompilingCommand
147 |
148 | case class Console(
149 | /* @ExtraName("p")
150 | @HelpMessage("The project to run the console at (will be inferred from remaining cli args).")
151 | project: String = "",
152 | /* @HelpMessage("Pick reporter to show compilation messages. By default, bloop's used.")
153 | reporter: ReporterKind = BloopReporter,*/
154 | @HelpMessage("Start up the console compiling only the target project's dependencies.")
155 | excludeRoot: Boolean = false,*/
156 | @Recurse cliOptions: CliOptions = CliOptions.default
157 | ) extends CompilingCommand
158 |
159 | case class Run(
160 | /* @ExtraName("p")
161 | @HelpMessage("The project to run (will be inferred from remaining cli args).")
162 | project: String = "",
163 | @ExtraName("m")
164 | @HelpMessage("The main class to run. Leave unset to let bloop select automatically.")
165 | main: Option[String] = None,
166 | /* @HelpMessage("Pick reporter to show compilation messages. By default, bloop's used.")
167 | reporter: ReporterKind = BloopReporter,*/
168 | @HelpMessage("The arguments to pass in to the main class.")
169 | args: List[String] = Nil,
170 | @ExtraName("w")
171 | @HelpMessage("If set, run the command whenever projects' source files change.")
172 | watch: Boolean = false,*/
173 | @Recurse cliOptions: CliOptions = CliOptions.default
174 | ) extends CompilingCommand
175 | }
176 |
--------------------------------------------------------------------------------
/integrations/src/main/scala/profiling/integrations/caseapp/CommonOptions.scala:
--------------------------------------------------------------------------------
1 | package profiling.integrations.caseapp
2 |
3 | import java.io.{InputStream, PrintStream}
4 | import java.util.Properties
5 |
6 | import caseapp.Hidden
7 |
8 | /**
9 | * Describes the common options for any command or CLI operation.
10 | *
11 | * They exist for two purposes: testing and nailgun. In both cases we
12 | * need a precise handling of these parameters because they change
13 | * depending on the environment we're running on.
14 | *
15 | * They are hidden because they are optional.
16 | */
17 | case class CommonOptions(
18 | @Hidden workingDirectory: String = System.getProperty("user.dir"),
19 | @Hidden out: PrintStream = System.out,
20 | @Hidden in: InputStream = System.in,
21 | @Hidden err: PrintStream = System.err,
22 | @Hidden ngout: PrintStream = System.out,
23 | @Hidden ngerr: PrintStream = System.err
24 | )
25 |
26 | object CommonOptions {
27 | final val default = CommonOptions()
28 |
29 | // Our own version of properties in which we override `toString`
30 | final class PrettyProperties extends Properties {
31 | override def toString: String = synchronized {
32 | super.keySet().toArray.map(_.toString).mkString(", ")
33 | }
34 | }
35 |
36 | final lazy val currentEnv: PrettyProperties = {
37 | import scala.collection.JavaConverters._
38 | System.getenv().asScala.foldLeft(new PrettyProperties()) {
39 | case (props, (key, value)) => props.setProperty(key, value); props
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/integrations/src/main/scala/profiling/integrations/caseapp/Parsers.scala:
--------------------------------------------------------------------------------
1 | package profiling.integrations.caseapp
2 |
3 | import java.io.{InputStream, PrintStream}
4 | import java.nio.file.{Path, Paths}
5 |
6 | import CommonOptions.PrettyProperties
7 | import caseapp.core.default.Default
8 | import caseapp.core.Error.Other
9 |
10 | import caseapp.core.argparser.{ArgParser, SimpleArgParser}
11 | import caseapp.core.parser.Parser
12 | import shapeless.{LabelledGeneric, HList}
13 |
14 | import scala.util.Try
15 |
16 | trait CachedImplicits {
17 | implicit val inputStreamRead: ArgParser[InputStream] =
18 | SimpleArgParser.from[InputStream]("stdin")(_ => Right(System.in))
19 | implicit val printStreamRead: ArgParser[PrintStream] =
20 | SimpleArgParser.from[PrintStream]("stdout")(_ => Right(System.out))
21 |
22 | implicit val pathParser: ArgParser[Path] = SimpleArgParser.from("A filepath parser") {
23 | case supposedPath: String =>
24 | val toPath = Try(Paths.get(supposedPath)).toEither
25 | toPath.left.map(t =>
26 | Other(s"The provided path ${supposedPath} is not valid: '${t.getMessage()}'.")
27 | )
28 | }
29 |
30 | implicit val propertiesParser: ArgParser[PrettyProperties] = {
31 | SimpleArgParser.from("A properties parser") { _ =>
32 | Left(Other("You cannot pass in properties through the command line."))
33 | }
34 | }
35 |
36 | implicit val implicitHNil: shapeless.HNil = HList.apply()
37 |
38 | implicit val implicitOptionDefaultString: Option[Default[String]] =
39 | Some(Default(""))
40 |
41 | implicit val implicitOptionDefaultInt: Option[Default[Int]] =
42 | Some(Default(0))
43 |
44 | implicit val implicitOptionDefaultBoolean: Option[Default[Boolean]] =
45 | Some(Default(true))
46 |
47 | implicit val implicitDefaultBoolean: Default[Boolean] =
48 | Default(true)
49 |
50 | implicit val implicitOptionDefaultOptionPath: Option[Default[Option[Path]]] =
51 | Some(Default(None))
52 |
53 | implicit val implicitOptionDefaultPrintStream: Option[Default[PrintStream]] =
54 | Some(Default[PrintStream](System.out))
55 |
56 | implicit val implicitOptionDefaultInputStream: Option[Default[InputStream]] =
57 | Some(Default[InputStream](System.in))
58 | }
59 |
60 | object Parsers extends CachedImplicits {
61 |
62 | implicit val labelledGenericCommonOptions: LabelledGeneric.Aux[CommonOptions, _] =
63 | LabelledGeneric.materializeProduct
64 | implicit val commonOptionsParser: Parser.Aux[CommonOptions, _] = Parser.derive
65 | implicit val labelledGenericCliOptions: LabelledGeneric.Aux[CliOptions, _] =
66 | LabelledGeneric.materializeProduct
67 | implicit val cliOptionsParser: Parser.Aux[CliOptions, _] = Parser.derive
68 |
69 | implicit val strictAutocompleteParser: Parser.Aux[Commands.Autocomplete, _] = Parser.derive
70 | implicit val strictAboutParser: Parser.Aux[Commands.About, _] = Parser.derive
71 | implicit val strictBspParser: Parser.Aux[Commands.Bsp, _] = Parser.derive
72 | implicit val strictCleanParser: Parser.Aux[Commands.Clean, _] = Parser.derive
73 | implicit val strictCompileParser: Parser.Aux[Commands.Compile, _] = Parser.derive
74 | implicit val strictConfigureParser: Parser.Aux[Commands.Configure, _] = Parser.derive
75 | implicit val strictConsoleParser: Parser.Aux[Commands.Console, _] = Parser.derive
76 | implicit val strictHelpParser: Parser.Aux[Commands.Help, _] = Parser.derive
77 | implicit val strictProjectsParser: Parser.Aux[Commands.Projects, _] = Parser.derive
78 | implicit val strictRunParser: Parser.Aux[Commands.Run, _] = Parser.derive
79 | implicit val strictTestParser: Parser.Aux[Commands.Test, _] = Parser.derive
80 |
81 | }
82 |
83 | object Main extends App {
84 | println("Hello World!")
85 | }
86 |
--------------------------------------------------------------------------------
/plugin/src/main/resources/scalac-plugin.xml:
--------------------------------------------------------------------------------
1 |
2 | scalac-profiling
3 | ch.epfl.scala.ProfilingPlugin
4 |
5 |
--------------------------------------------------------------------------------
/plugin/src/main/scala-2.12.18/ch/epfl/scala/profilers/tools/SettingsOps.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | import scala.tools.nsc.Global
4 |
5 | object SettingsOps {
6 | def areStatisticsEnabled(g: Global): Boolean =
7 | g.settings.areStatisticsEnabled
8 | }
9 |
--------------------------------------------------------------------------------
/plugin/src/main/scala-2.12.19/ch/epfl/scala/profilers/tools/SettingsOps.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | import scala.tools.nsc.Global
4 |
5 | object SettingsOps {
6 | def areStatisticsEnabled(g: Global): Boolean =
7 | g.settings.areStatisticsEnabled
8 | }
9 |
--------------------------------------------------------------------------------
/plugin/src/main/scala-2.12.20/ch/epfl/scala/profilers/tools/SettingsOps.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | import scala.tools.nsc.Global
4 |
5 | object SettingsOps {
6 | def areStatisticsEnabled(g: Global): Boolean =
7 | g.settings.areStatisticsEnabled
8 | }
9 |
--------------------------------------------------------------------------------
/plugin/src/main/scala-2.12/ch/epfl/scala/profilers/tools/ScalaSettingsOps.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | object ScalaSettingsOps {
4 | def isScala212: Boolean = true
5 | def isScala213: Boolean = false
6 | }
7 |
--------------------------------------------------------------------------------
/plugin/src/main/scala-2.13.14/ch/epfl/scala/profilers/tools/SettingsOps.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | import scala.tools.nsc.Global
4 |
5 | object SettingsOps {
6 | def areStatisticsEnabled(g: Global): Boolean =
7 | g.settings.areStatisticsEnabled
8 | }
9 |
--------------------------------------------------------------------------------
/plugin/src/main/scala-2.13.15/ch/epfl/scala/profilers/tools/SettingsOps.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | import scala.tools.nsc.Global
4 |
5 | object SettingsOps {
6 | def areStatisticsEnabled(g: Global): Boolean =
7 | g.settings.areStatisticsEnabled
8 | }
9 |
--------------------------------------------------------------------------------
/plugin/src/main/scala-2.13.16/ch/epfl/scala/profilers/tools/SettingsOps.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | import scala.tools.nsc.Global
4 |
5 | object SettingsOps {
6 | def areStatisticsEnabled(g: Global): Boolean =
7 | g.settings.areStatisticsEnabled
8 | }
9 |
--------------------------------------------------------------------------------
/plugin/src/main/scala-2.13/ch/epfl/scala/profilers/tools/ScalaSettingsOps.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | object ScalaSettingsOps {
4 | def isScala212: Boolean = false
5 | def isScala213: Boolean = true
6 | }
7 |
--------------------------------------------------------------------------------
/plugin/src/main/scala/ch/epfl/scala/ImplicitSearchDebugInfo.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala
2 |
3 | final case class ImplicitSearchDebugInfo private (firings: Int, sourceFiles: List[String])
4 |
5 | object ImplicitSearchDebugInfo {
6 | def apply(firings: Int, sourceFiles: List[String]): Option[ImplicitSearchDebugInfo] =
7 | if (firings > 0 && sourceFiles.nonEmpty)
8 | Some(new ImplicitSearchDebugInfo(firings, sourceFiles))
9 | else
10 | None
11 | }
12 |
--------------------------------------------------------------------------------
/plugin/src/main/scala/ch/epfl/scala/PluginConfig.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala
2 |
3 | import ch.epfl.scala.profiledb.utils.AbsolutePath
4 |
5 | final case class PluginConfig(
6 | showProfiles: Boolean,
7 | generateDb: Boolean,
8 | sourceRoot: AbsolutePath,
9 | crossTarget: AbsolutePath,
10 | printSearchIds: Set[Int],
11 | generateMacroFlamegraph: Boolean,
12 | generateGlobalFlamegraph: Boolean,
13 | printFailedMacroImplicits: Boolean,
14 | concreteTypeParamsInImplicits: Boolean
15 | )
16 |
--------------------------------------------------------------------------------
/plugin/src/main/scala/ch/epfl/scala/ProfilingPlugin.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | package ch.epfl.scala
11 |
12 | import java.nio.file.Files
13 | import ch.epfl.scala.profiledb.{ProfileDb, ProfileDbPath}
14 | import ch.epfl.scala.profiledb.utils.{AbsolutePath, RelativePath}
15 | import ch.epfl.scala.profilers.ProfilingImpl
16 | import ch.epfl.scala.profilers.tools.{Logger, ScalaSettingsOps, SettingsOps}
17 |
18 | import scala.reflect.internal.util.{NoPosition, SourceFile, Statistics}
19 | import scala.reflect.io.Path
20 | import scala.tools.nsc.Reporting.WarningCategory
21 | import scala.tools.nsc.io.AbstractFile
22 | import scala.tools.nsc.{Global, Phase}
23 | import scala.tools.nsc.plugins.{Plugin, PluginComponent}
24 | import scala.util.Try
25 | import scala.util.matching.Regex
26 |
27 | class ProfilingPlugin(val global: Global) extends Plugin { self =>
28 | // Every definition used at init needs to be lazy otherwise it slays the compiler
29 | val name = "scalac-profiling"
30 | val description = "Adds instrumentation to keep an eye on Scalac performance."
31 | val components = List[PluginComponent](ProfilingComponent)
32 |
33 | private final lazy val ShowProfiles = "show-profiles"
34 | private final lazy val SourceRoot = "sourceroot"
35 | private final lazy val CrossTarget = "cross-target"
36 | private final lazy val PrintSearchResult = "print-search-result"
37 | private final lazy val GenerateMacroFlamegraph = "generate-macro-flamegraph"
38 | private final lazy val GenerateGlobalFlamegraph = "generate-global-flamegraph"
39 | private final lazy val PrintFailedMacroImplicits = "print-failed-implicit-macro-candidates"
40 | private final lazy val GenerateProfileDb = "generate-profiledb"
41 | private final lazy val ShowConcreteImplicitTparams = "show-concrete-implicit-tparams"
42 | private final lazy val PrintSearchRegex = s"$PrintSearchResult:(.*)".r
43 | private final lazy val SourceRootRegex = s"$SourceRoot:(.*)".r
44 | private final lazy val CrossTargetRegex = s"$CrossTarget:(.*)".r
45 |
46 | def findOption(name: String, pattern: Regex): Option[String] = {
47 | super.options.find(_.startsWith(name)).flatMap {
48 | case pattern(matched) => Some(matched)
49 | case _ => None
50 | }
51 | }
52 |
53 | def findSearchIds(userOption: Option[String]): Set[Int] = {
54 | userOption match {
55 | case Some(value) => value.split(",", Int.MaxValue).map(_.toInt).toSet
56 | case None => Set.empty
57 | }
58 | }
59 |
60 | private final lazy val config = {
61 | val sourceRoot = findOption(SourceRoot, SourceRootRegex)
62 | .map(AbsolutePath.apply)
63 | .getOrElse(AbsolutePath.workingDirectory)
64 | val crossTarget = findOption(CrossTarget, CrossTargetRegex)
65 | .map(AbsolutePath.apply)
66 | .getOrElse {
67 | val scalaDir =
68 | if (ScalaSettingsOps.isScala212)
69 | "scala-2.12"
70 | else if (ScalaSettingsOps.isScala213)
71 | "scala-2.13"
72 | else
73 | sys.error(
74 | s"Currently, only Scala 2.12 and 2.13 are supported, " +
75 | s"but [${global.settings.source.value}] has been spotted"
76 | )
77 |
78 | sourceRoot.resolve(RelativePath(s"target/$scalaDir"))
79 | }
80 |
81 | PluginConfig(
82 | showProfiles = super.options.contains(ShowProfiles),
83 | generateDb = super.options.contains(GenerateProfileDb),
84 | sourceRoot = sourceRoot,
85 | crossTarget = crossTarget,
86 | printSearchIds = findSearchIds(findOption(PrintSearchResult, PrintSearchRegex)),
87 | generateMacroFlamegraph = super.options.contains(GenerateMacroFlamegraph),
88 | generateGlobalFlamegraph = super.options.contains(GenerateGlobalFlamegraph),
89 | printFailedMacroImplicits = super.options.contains(PrintFailedMacroImplicits),
90 | concreteTypeParamsInImplicits = super.options.contains(ShowConcreteImplicitTparams)
91 | )
92 | }
93 |
94 | private lazy val logger = new Logger(global)
95 |
96 | private def pad20(option: String): String = option + (" " * (20 - option.length))
97 |
98 | override def init(ops: List[String], e: (String) => Unit): Boolean = true
99 |
100 | // format: off
101 | override val optionsHelp: Option[String] = Some(
102 | s"""
103 | |-P:$name:${pad20(GenerateGlobalFlamegraph)} Creates a global flamegraph of implicit searches for all compilation units. Use the `-P:$name:$CrossTarget` option to manage the target directory for the resulting flamegraph file, otherwise, the SBT target directory will be picked.
104 | |-P:$name:${pad20(GenerateMacroFlamegraph)} Generates a flamegraph for macro expansions. The flamegraph for implicit searches is enabled by default.
105 | |-P:$name:${pad20(GenerateProfileDb)} Generates profiledb (will be removed later).
106 | |-P:$name:${pad20(PrintFailedMacroImplicits)} Prints trees of all failed implicit searches that triggered a macro expansion.
107 | |-P:$name:${pad20(PrintSearchResult)}:_ Print implicit search result trees for a list of search ids separated by a comma.
108 | |-P:$name:${pad20(ShowConcreteImplicitTparams)} Shows types in flamegraphs of implicits with concrete type params.
109 | |-P:$name:${pad20(ShowProfiles)} Logs profile information for every call-site.
110 | |-P:$name:${pad20(SourceRoot)}:_ Sets the source root for this project.
111 | |-P:$name:${pad20(CrossTarget)}:_ Sets the cross target for this project.
112 | """.stripMargin
113 | ) // format: on
114 |
115 | lazy val implementation = new ProfilingImpl(ProfilingPlugin.this.global, config, logger)
116 | implementation.registerProfilers()
117 |
118 | private object ProfilingComponent extends PluginComponent {
119 | override val global: implementation.global.type = implementation.global
120 | override val phaseName: String = "scalacenter-profiling"
121 | override val runsAfter: List[String] = List("jvm")
122 | override val runsBefore: List[String] = List("terminal")
123 |
124 | private def showExpansion(expansion: (global.Tree, Int)): (String, Int) =
125 | global.showCode(expansion._1) -> expansion._2
126 |
127 | // This is just for displaying purposes
128 | import scala.collection.mutable.LinkedHashMap
129 | private def toLinkedHashMap[K, V](xs: Seq[(K, V)]): LinkedHashMap[K, V] = {
130 | val builder = LinkedHashMap.newBuilder[K, V]
131 | builder.++=(xs)
132 | builder.result()
133 | }
134 |
135 | private def reportStatistics(graphsPath: AbsolutePath): Unit = {
136 | val globalDir =
137 | if (config.generateGlobalFlamegraph) {
138 | val globalDir =
139 | ProfileDbPath.toGraphsProfilePath(
140 | config.crossTarget.resolve(RelativePath("classes"))
141 | )
142 |
143 | Some(globalDir)
144 | } else None
145 |
146 | val persistedGraphData = implementation.generateGraphData(graphsPath, globalDir)
147 | persistedGraphData.foreach(p => logger.info(s"Writing graph to ${p.underlying}"))
148 |
149 | if (config.showProfiles) {
150 | val macroProfiler = implementation.macroProfiler
151 |
152 | logger.info("Macro data per call-site", macroProfiler.perCallSite)
153 | logger.info("Macro data per file", macroProfiler.perFile)
154 | logger.info("Macro data in total", macroProfiler.inTotal)
155 | val expansions = macroProfiler.repeatedExpansions.map(showExpansion)
156 | logger.info("Macro repeated expansions", expansions)
157 |
158 | val macrosType = implementation.macrosByType.toList.sortBy(_._2)
159 | val macrosTypeLines = global.exitingTyper(macrosType.map(kv => kv._1.toString -> kv._2))
160 | logger.info("Macro expansions by type", toLinkedHashMap(macrosTypeLines))
161 |
162 | val implicitSearchesPosition = toLinkedHashMap(
163 | implementation.implicitSearchesByPos.toList.sortBy(_._2)
164 | )
165 | logger.info("Implicit searches by position", implicitSearchesPosition)
166 |
167 | val sortedImplicitSearches =
168 | implementation.implicitSearchesSourceFilesByType.toVector
169 | .flatMap {
170 | case (tpe, sourceFiles) =>
171 | val firings = implementation.implicitSearchesByType.getOrElse(tpe, 0)
172 | val files = sourceFiles.toList.flatMap {
173 | case f if f.length > 0 =>
174 | List(f.path)
175 | case _ =>
176 | List.empty
177 | }
178 |
179 | ImplicitSearchDebugInfo(firings, files).map(tpe -> _)
180 | }
181 | .sortBy(_._2.firings)
182 | // Make sure to stringify types right after typer to avoid compiler crashes
183 | val stringifiedSortedImplicitSearches =
184 | global.exitingTyper(
185 | sortedImplicitSearches
186 | .map(kv => kv._1.toString() -> kv._2)
187 | )
188 | logger.info("Implicit searches by type", toLinkedHashMap(stringifiedSortedImplicitSearches))
189 | }
190 | }
191 |
192 | import com.google.protobuf.duration.Duration
193 | import com.google.protobuf.timestamp.Timestamp
194 | import ch.epfl.scala.profiledb.{profiledb => schema}
195 | private final val nanoScale: Int = 1000000000
196 |
197 | private def toDuration(nanos: Long): Duration = {
198 | val seconds: Long = nanos / nanoScale
199 | val remainder: Int = (nanos % nanoScale).toInt
200 | Duration(seconds = seconds, nanos = remainder)
201 | }
202 |
203 | private lazy val getCurrentTimestamp: Timestamp = {
204 | val duration = toDuration(System.nanoTime())
205 | Timestamp(seconds = duration.seconds, nanos = duration.nanos)
206 | }
207 |
208 | private def toGlobalDatabase(statistics: Statistics): schema.Database = {
209 | import statistics.{Timer, Counter}
210 | def toSchemaTimer(scalacTimer: Timer): schema.Timer = {
211 | val id = scalacTimer.prefix
212 | val duration = toDuration(scalacTimer.nanos)
213 | schema.Timer(id = id, duration = Some(duration))
214 | }
215 |
216 | def toSchemaCounter(scalacCounter: Counter): schema.Counter = {
217 | val id = scalacCounter.prefix
218 | val ticks = scalacCounter.value.toLong
219 | schema.Counter(id = id, ticks = ticks)
220 | }
221 |
222 | val allScalacPhases = global.phaseDescriptors.map(_.phaseName)
223 | val scalacQuantities = statistics.allQuantities.toList
224 | val quantitiesPerPhase =
225 | allScalacPhases.map(phase => phase -> scalacQuantities.filter(_.showAt(phase)))
226 | val phaseProfiles = quantitiesPerPhase.map {
227 | case (phaseName, phaseQuantities) =>
228 | val timers = phaseQuantities.collect { case t: Timer => t }.map(toSchemaTimer)
229 | val counters = phaseQuantities.collect { case c: Counter => c }.map(toSchemaCounter)
230 | schema.PhaseProfile(name = phaseName, timers = timers, counters = counters)
231 | }
232 |
233 | val timestamp = Some(getCurrentTimestamp)
234 | val runProfile = Some(schema.RunProfile(phaseProfiles = phaseProfiles))
235 | val entry = schema.DatabaseEntry(
236 | timestamp = timestamp,
237 | runProfile = runProfile,
238 | compilationUnitProfile = None
239 | )
240 | schema.Database(
241 | `type` = schema.ContentType.GLOBAL,
242 | entries = List(entry)
243 | )
244 | }
245 |
246 | private def getOutputDirFor(absFile: AbstractFile): Path = Path {
247 | val outputPath = global.settings.outputDirs.outputDirFor(absFile).path
248 | if (outputPath.isEmpty) "." else outputPath
249 | }
250 |
251 | private def dbPathFor(sourceFile: SourceFile): Option[ProfileDbPath] = {
252 | val absoluteSourceFile = AbsolutePath(sourceFile.file.path)
253 | val targetPath = absoluteSourceFile.toRelative(config.sourceRoot)
254 | if (targetPath.syntax.endsWith(".scala")) {
255 | val outputDir = getOutputDirFor(sourceFile.file)
256 | val absoluteOutput = AbsolutePath(outputDir.jfile)
257 | val dbTargetPath = ProfileDbPath.toProfileDbPath(targetPath)
258 | Some(ProfileDbPath(absoluteOutput, dbTargetPath))
259 | } else None
260 | }
261 |
262 | private final val EmptyDuration = Duration.defaultInstance
263 | private def profileDbEntryFor(sourceFile: SourceFile): schema.DatabaseEntry = {
264 | import scala.reflect.internal.util.Position
265 | import implementation.{MacroInfo, ImplicitInfo}
266 |
267 | def perFile[V](ps: Map[Position, V]): Map[Position, V] =
268 | ps.collect { case t @ (pos, _) if pos.source == sourceFile => t }
269 |
270 | def toPos(pos: Position): schema.Position = {
271 | val point = pos.point
272 | val line = pos.line
273 | val column = pos.column
274 | schema.Position(point = point, line = line, column = column)
275 | }
276 |
277 | def toMacroProfile(pos: Position, info: MacroInfo): schema.MacroProfile = {
278 | val currentPos = Some(toPos(pos))
279 | val expandedMacros = info.expandedMacros.toLong
280 | val approximateSize = info.expandedNodes.toLong
281 | val duration = Some(toDuration(info.expansionNanos))
282 | schema.MacroProfile(
283 | position = currentPos,
284 | expandedMacros = expandedMacros,
285 | approximateSize = approximateSize,
286 | duration = duration
287 | )
288 | }
289 |
290 | def toImplicitProfile(pos: Position, info: ImplicitInfo): schema.ImplicitSearchProfile = {
291 | val currentPos = Some(toPos(pos))
292 | val searches = info.count.toLong
293 | val duration = Some(EmptyDuration)
294 | schema.ImplicitSearchProfile(
295 | position = currentPos,
296 | searches = searches,
297 | duration = duration
298 | )
299 | }
300 |
301 | val macroProfiles = perFile(implementation.macroProfiler.perCallSite)
302 | .map { case (pos: Position, info: MacroInfo) => toMacroProfile(pos, info) }
303 | val implicitSearchProfiles = perFile(implementation.implicitProfiler.perCallSite)
304 | .map { case (pos: Position, info: ImplicitInfo) => toImplicitProfile(pos, info) }
305 |
306 | val timestamp = Some(getCurrentTimestamp)
307 | val compilationUnitProfile = Some(
308 | schema.CompilationUnitProfile(
309 | macroProfiles = macroProfiles.toList,
310 | implicitSearchProfiles = implicitSearchProfiles.toList
311 | )
312 | )
313 | schema.DatabaseEntry(timestamp = timestamp, compilationUnitProfile = compilationUnitProfile)
314 | }
315 |
316 | def writeDatabase(db: schema.Database, path: ProfileDbPath): Try[schema.Database] = {
317 | if (Files.exists(path.target.underlying)) {
318 | ProfileDb.read(path).flatMap { oldDb =>
319 | val oldDbType = oldDb.`type`
320 | val newDbType = db.`type`
321 | if (
322 | oldDbType.isGlobal && newDbType.isGlobal ||
323 | (oldDbType.isPerCompilationUnit && newDbType.isPerCompilationUnit)
324 | ) {
325 | val updatedDb = oldDb.addAllEntries(db.entries)
326 | ProfileDb.write(updatedDb, path)
327 | } else Try(sys.error(s"Db type mismatch: $newDbType != $oldDbType"))
328 | }
329 | } else ProfileDb.write(db, path)
330 | }
331 |
332 | lazy val globalOutputDir = AbsolutePath(
333 | new java.io.File(
334 | global.settings.outputDirs.getSingleOutput
335 | .map(_.file.getAbsolutePath)
336 | .getOrElse(global.settings.outdir.value)
337 | )
338 | )
339 |
340 | private final val PerCompilationUnit = schema.ContentType.PER_COMPILATION_UNIT
341 | override def newPhase(prev: Phase): Phase = {
342 | new StdPhase(prev) {
343 | override def apply(unit: global.CompilationUnit): Unit = {
344 | if (
345 | SettingsOps.areStatisticsEnabled(global) &&
346 | config.generateDb
347 | ) {
348 | val currentSourceFile = unit.source
349 | val compilationUnitEntry = profileDbEntryFor(currentSourceFile)
350 | dbPathFor(currentSourceFile) match {
351 | case Some(profileDbPath) =>
352 | val canonicalTarget = profileDbPath.target.underlying.normalize()
353 | logger.debug(s"Creating profiledb for ${canonicalTarget}")
354 | val freshDatabase =
355 | schema.Database(`type` = PerCompilationUnit, entries = List(compilationUnitEntry))
356 | writeDatabase(freshDatabase, profileDbPath).failed
357 | .foreach(t => global.globalError(s"I/O profiledb error: ${t.getMessage}"))
358 | case None => global.globalError(s"Could not write profiledb for $currentSourceFile.")
359 | }
360 | }
361 | }
362 |
363 | override def run(): Unit = {
364 | super.run()
365 |
366 | if (!SettingsOps.areStatisticsEnabled(global)) {
367 | val flagName = global.settings.Ystatistics.name
368 | global.runReporting.warning(
369 | NoPosition,
370 | s"`${self.name}` compiler plugin requires the option `$flagName` to be enabled",
371 | WarningCategory.OtherDebug,
372 | ""
373 | )
374 | }
375 |
376 | val graphsRelativePath = ProfileDbPath.GraphsProfileDbRelativePath
377 | val graphsDir = globalOutputDir.resolve(graphsRelativePath)
378 | reportStatistics(graphsDir)
379 |
380 | if (config.generateDb) {
381 | val globalDatabase = toGlobalDatabase(global.statistics)
382 | val globalRelativePath = ProfileDbPath.GlobalProfileDbRelativePath
383 | val globalProfileDbPath = ProfileDbPath(globalOutputDir, globalRelativePath)
384 | val pathToWrite = globalProfileDbPath.target.underlying.toAbsolutePath
385 | logger.info(s"Creating global statistics information at $pathToWrite.")
386 | writeDatabase(globalDatabase, globalProfileDbPath)
387 | }
388 | }
389 | }
390 | }
391 | }
392 | }
393 |
--------------------------------------------------------------------------------
/plugin/src/main/scala/ch/epfl/scala/profilers/ProfilingImpl.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | package ch.epfl.scala.profilers
11 |
12 | import java.nio.file.{Files, Path, StandardOpenOption}
13 |
14 | import ch.epfl.scala.PluginConfig
15 | import ch.epfl.scala.profiledb.utils.AbsolutePath
16 | import ch.epfl.scala.profilers.tools.{Logger, QuantitiesHijacker, SettingsOps}
17 |
18 | import scala.tools.nsc.Global
19 | import scala.reflect.internal.util.SourceFile
20 |
21 | final class ProfilingImpl[G <: Global](
22 | override val global: G,
23 | config: PluginConfig,
24 | logger: Logger[G]
25 | ) extends ProfilingStats {
26 | import global._
27 |
28 | def registerProfilers(): Unit = {
29 | // Register our profiling macro plugin
30 | analyzer.addMacroPlugin(ProfilingMacroPlugin)
31 | analyzer.addAnalyzerPlugin(ProfilingAnalyzerPlugin)
32 | }
33 |
34 | /**
35 | * Represents the profiling information about expanded macros.
36 | *
37 | * Note that we could derive the value of expanded macros from the
38 | * number of instances of [[MacroInfo]] if it were not by the fact
39 | * that a macro can expand in the same position more than once. We
40 | * want to be able to report/analyse such cases on their own, so
41 | * we keep it as a paramater of this entity.
42 | */
43 | case class MacroInfo(expandedMacros: Int, expandedNodes: Int, expansionNanos: Long) {
44 | def +(other: MacroInfo): MacroInfo = {
45 | val totalExpanded = expandedMacros + other.expandedMacros
46 | val totalNodes = expandedNodes + other.expandedNodes
47 | val totalTime = expansionNanos + other.expansionNanos
48 | MacroInfo(totalExpanded, totalNodes, totalTime)
49 | }
50 | }
51 |
52 | object MacroInfo {
53 | final val Empty = MacroInfo(0, 0, 0L)
54 | implicit val macroInfoOrdering: Ordering[MacroInfo] = Ordering.by(_.expansionNanos)
55 | def aggregate(infos: Iterator[MacroInfo]): MacroInfo = {
56 | infos.foldLeft(MacroInfo.Empty)(_ + _)
57 | }
58 | }
59 |
60 | import scala.reflect.internal.util.SourceFile
61 | case class MacroProfiler(
62 | perCallSite: Map[Position, MacroInfo],
63 | perFile: Map[SourceFile, MacroInfo],
64 | inTotal: MacroInfo,
65 | repeatedExpansions: Map[Tree, Int]
66 | )
67 |
68 | def toMillis(nanos: Long): Long =
69 | java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(nanos)
70 |
71 | def groupPerFile[V](
72 | kvs: Map[Position, V]
73 | )(empty: V, aggregate: (V, V) => V): Map[SourceFile, V] = {
74 | kvs.groupBy(_._1.source).map {
75 | case (sf, posInfos: Map[Position, V]) => sf -> posInfos.valuesIterator.fold(empty)(aggregate)
76 | }
77 | }
78 |
79 | lazy val macroProfiler: MacroProfiler = {
80 | import ProfilingMacroPlugin.macroInfos // , repeatedTrees}
81 | val perCallSite = macroInfos.toMap
82 | val perFile = groupPerFile(perCallSite)(MacroInfo.Empty, _ + _)
83 | val inTotal = MacroInfo.aggregate(perFile.valuesIterator)
84 |
85 | /* val repeated = repeatedTrees.toMap.valuesIterator
86 | .filter(_.count > 1)
87 | .map(v => v.original -> v.count)
88 | .toMap*/
89 |
90 | // perFile and inTotal are already converted to millis
91 | val callSiteNanos = perCallSite
92 | MacroProfiler(callSiteNanos, perFile, inTotal, Map.empty) // repeated)
93 | }
94 |
95 | case class ImplicitInfo(count: Int) {
96 | def +(other: ImplicitInfo): ImplicitInfo = ImplicitInfo(count + other.count)
97 | }
98 |
99 | object ImplicitInfo {
100 | final val Empty = ImplicitInfo(0)
101 | def aggregate(infos: Iterator[ImplicitInfo]): ImplicitInfo = infos.fold(Empty)(_ + _)
102 | implicit val infoOrdering: Ordering[ImplicitInfo] = Ordering.by(_.count)
103 | }
104 |
105 | case class ImplicitProfiler(
106 | perCallSite: Map[Position, ImplicitInfo],
107 | perFile: Map[SourceFile, ImplicitInfo],
108 | perType: Map[Type, ImplicitInfo],
109 | inTotal: ImplicitInfo
110 | )
111 |
112 | lazy val implicitProfiler: ImplicitProfiler = {
113 | val perCallSite = implicitSearchesByPos.map {
114 | case (pos, i) => pos -> ImplicitInfo.apply(i)
115 | }.toMap
116 | val perFile = groupPerFile[ImplicitInfo](perCallSite)(ImplicitInfo.Empty, _ + _)
117 | val perType = implicitSearchesByType.map {
118 | case (pos, i) => pos -> ImplicitInfo.apply(i)
119 | }.toMap
120 | val inTotal = ImplicitInfo.aggregate(perFile.valuesIterator)
121 | ImplicitProfiler(perCallSite, perFile, perType, inTotal)
122 | }
123 |
124 | // Copied from `TypeDiagnostics` to have expanded types in implicit search
125 | private object DealiasedType extends TypeMap {
126 | def apply(tp: Type): Type = tp match {
127 | case TypeRef(pre, sym, _) if sym.isAliasType && !sym.isInDefaultNamespace =>
128 | mapOver(tp.dealias)
129 | case _ => mapOver(tp)
130 | }
131 | }
132 |
133 | def concreteTypeFromSearch(tree: Tree, default: Type): Type = {
134 | tree match {
135 | case EmptyTree => default
136 | case Block(_, expr) => expr.tpe
137 | case Try(block, _, _) =>
138 | block match {
139 | case Block(_, expr) => expr.tpe
140 | case t => t.tpe
141 | }
142 | case t =>
143 | val treeType = t.tpe
144 | if (treeType == null || treeType == NoType) default else treeType
145 | }
146 | }
147 |
148 | def generateGraphData(
149 | outputDir: AbsolutePath,
150 | globalDirMaybe: Option[AbsolutePath]
151 | ): List[AbsolutePath] = {
152 | Files.createDirectories(outputDir.underlying)
153 |
154 | val randomId = java.lang.Long.toString(System.currentTimeMillis())
155 |
156 | /*val dotFile = outputDir.resolve(s"$graphName.dot")
157 | ProfilingAnalyzerPlugin.dottify(graphName, dotFile.underlying)*/
158 |
159 | val implicitFlamegraphFiles = {
160 | val mkImplicitGraphName: String => String =
161 | postfix => s"implicit-searches-$postfix.flamegraph"
162 | val compileUnitFlamegraphFile = outputDir.resolve(mkImplicitGraphName(randomId))
163 |
164 | globalDirMaybe match {
165 | case Some(globalDir) =>
166 | Files.createDirectories(globalDir.underlying)
167 |
168 | val globalFile =
169 | globalDir
170 | .resolve(mkImplicitGraphName("global"))
171 |
172 | List(compileUnitFlamegraphFile, globalFile)
173 |
174 | case None =>
175 | List(compileUnitFlamegraphFile)
176 | }
177 | }
178 |
179 | val macroFlamegraphFiles =
180 | if (config.generateMacroFlamegraph) {
181 | val macroGraphName = s"macros-$randomId"
182 | val file = outputDir.resolve(s"$macroGraphName.flamegraph")
183 | List(file)
184 | } else Nil
185 |
186 | ProfilingAnalyzerPlugin.foldImplicitStacks(implicitFlamegraphFiles)
187 | ProfilingMacroPlugin.foldMacroStacks(macroFlamegraphFiles)
188 |
189 | implicitFlamegraphFiles ::: macroFlamegraphFiles
190 | }
191 |
192 | private val registeredQuantities = QuantitiesHijacker.getRegisteredQuantities(global)
193 | def registerTyperTimerFor(prefix: String): statistics.Timer = {
194 | val typerTimer = statistics.newTimer(prefix, "typer")
195 | registeredQuantities.remove(s"/$prefix")
196 | typerTimer
197 | }
198 |
199 | private def typeToString(`type`: Type): String =
200 | global.exitingTyper(`type`.toLongString).trim
201 |
202 | // Moving this here so that it's accessible to the macro plugin
203 | private type Entry =
204 | (global.analyzer.ImplicitSearch, statistics.TimerSnapshot, statistics.TimerSnapshot)
205 | private var implicitsStack: List[Entry] = Nil
206 |
207 | private object ProfilingAnalyzerPlugin extends global.analyzer.AnalyzerPlugin {
208 | import scala.collection.mutable
209 | private val implicitsTimers = perRunCaches.newAnyRefMap[Type, statistics.Timer]()
210 | private val searchIdsToTargetTypes = perRunCaches.newMap[Int, Type]()
211 | private val stackedNanos = perRunCaches.newMap[Int, (Long, Type)]()
212 | private val stackedNames = perRunCaches.newMap[Int, List[String]]()
213 | private val searchIdsToTimers = perRunCaches.newMap[Int, statistics.Timer]()
214 | private val implicitsDependants = new mutable.AnyRefMap[Type, mutable.HashSet[Type]]()
215 | private val searchIdChildren = perRunCaches.newMap[Int, List[analyzer.ImplicitSearch]]()
216 |
217 | def foldImplicitStacks(outputPaths: Seq[AbsolutePath]): Unit =
218 | if (outputPaths.nonEmpty) {
219 | // This part is memory intensive and hence the use of java collections
220 | val stacksJavaList = new java.util.ArrayList[String]()
221 | stackedNanos.foreach {
222 | case (id, (nanos, _)) =>
223 | val names =
224 | stackedNames.getOrElse(
225 | id,
226 | sys.error(s"Stack name for search id ${id} doesn't exist!")
227 | )
228 | val stackName = names.mkString(";")
229 | // val count = implicitSearchesByType.getOrElse(tpe, sys.error(s"No counter for ${tpe}"))
230 | stacksJavaList.add(s"$stackName ${nanos / 1000}")
231 | }
232 | java.util.Collections.sort(stacksJavaList)
233 |
234 | outputPaths.foreach(path =>
235 | Files.write(
236 | path.underlying,
237 | stacksJavaList,
238 | StandardOpenOption.APPEND,
239 | StandardOpenOption.CREATE
240 | )
241 | )
242 | } else ()
243 |
244 | def dottify(graphName: String, outputPath: Path): Unit = {
245 | def clean(`type`: Type) = typeToString(`type`).replace("\"", "\'")
246 | def qualify(node: String, timing: Long, counter: Int): String = {
247 | val nodeName = node.stripPrefix("\"").stripSuffix("\"")
248 | val style = if (timing >= 500) "style=filled, fillcolor=\"#ea9d8f\"," else ""
249 | s"""$node [${style}label="${nodeName}\\l${counter} times = ${timing}ms"];"""
250 | }
251 |
252 | val nodes = implicitSearchesByType.keys
253 | val nodesIds = nodes.map(`type` => `type` -> s""""${clean(`type`)}"""").toMap
254 | def getNodeId(`type`: Type): String = {
255 | nodesIds.getOrElse(
256 | `type`,
257 | sys.error {
258 | s"""Id for ${`type`} doesn't exist.
259 | |
260 | | Information about the type:
261 | | - `structure` -> ${global.showRaw(`type`)}
262 | | - `safeToString` -> ${`type`.safeToString}
263 | | - `toLongString` after typer -> ${typeToString(`type`)}
264 | | - `typeSymbol` -> ${`type`.typeSymbol}
265 | """.stripMargin
266 | }
267 | )
268 | }
269 |
270 | val connections = for {
271 | (dependee, dependants) <- implicitsDependants.toSet
272 | dependant <- dependants
273 | dependantId = getNodeId(dependant)
274 | dependeeId = getNodeId(dependee)
275 | if dependeeId != dependantId && !dependantId.isEmpty && !dependeeId.isEmpty
276 | } yield s"$dependantId -> $dependeeId;"
277 |
278 | val nodeInfos = nodes.map { `type` =>
279 | val id = getNodeId(`type`)
280 | val timer = getImplicitTimerFor(`type`).nanos / 1000000
281 | val count = implicitSearchesByType.getOrElse(`type`, sys.error(s"No counter for ${`type`}"))
282 | qualify(id, timer, count)
283 | }
284 |
285 | val graph = s"""digraph "$graphName" {
286 | | graph [ranksep=0, rankdir=LR];
287 | |${nodeInfos.mkString(" ", "\n ", "\n ")}
288 | |${connections.mkString(" ", "\n ", "\n ")}
289 | |}""".stripMargin.getBytes
290 | Files.write(outputPath, graph, StandardOpenOption.WRITE, StandardOpenOption.CREATE)
291 | }
292 |
293 | private def getImplicitTimerFor(candidate: Type): statistics.Timer =
294 | implicitsTimers.getOrElse(candidate, sys.error(s"Timer for ${candidate} doesn't exist"))
295 |
296 | private def getSearchTimerFor(searchId: Int): statistics.Timer = {
297 | searchIdsToTimers
298 | .getOrElse(searchId, sys.error(s"Missing non-cumulative timer for $searchId"))
299 | }
300 |
301 | override def pluginsNotifyImplicitSearch(search: global.analyzer.ImplicitSearch): Unit = {
302 | if (SettingsOps.areStatisticsEnabled(global)) {
303 | val targetType = search.pt
304 | val targetPos = search.pos
305 |
306 | // Stop counter of dependant implicit search
307 | implicitsStack.headOption.foreach {
308 | case (search, _, searchStart) =>
309 | val searchTimer = getSearchTimerFor(search.searchId)
310 | statistics.stopTimer(searchTimer, searchStart)
311 | }
312 |
313 | // We add ourselves to the child list of our parent implicit search
314 | implicitsStack.headOption match {
315 | case Some((prevSearch, _, _)) =>
316 | val prevId = prevSearch.searchId
317 | val prevChilds = searchIdChildren.getOrElse(prevId, Nil)
318 | searchIdChildren.update(prevId, search :: prevChilds)
319 | case None => ()
320 | }
321 |
322 | // Create timer and unregister it so that it is invisible in console output
323 | val prefix = s" $targetType"
324 | val perTypeTimer = implicitsTimers
325 | .getOrElseUpdate(targetType, statistics.newTimer(prefix, "typer"))
326 | registeredQuantities.remove(s"/$prefix")
327 |
328 | // Create non-cumulative timer for the search and unregister it too
329 | val searchId = search.searchId
330 | val searchPrefix = s" implicit search ${searchId}"
331 | val searchTimer = registerTyperTimerFor(searchPrefix)
332 | searchIdsToTimers.+=(searchId -> searchTimer)
333 |
334 | // Start the timer as soon as possible
335 | val implicitTypeStart = statistics.startTimer(perTypeTimer)
336 | val searchStart = statistics.startTimer(searchTimer)
337 |
338 | // Update all timers and counters
339 | val typeCounter = implicitSearchesByType.getOrElse(targetType, 0)
340 | implicitSearchesByType.update(targetType, typeCounter + 1)
341 | val posCounter = implicitSearchesByPos.getOrElse(targetPos, 0)
342 | implicitSearchesByPos.update(targetPos, posCounter + 1)
343 |
344 | if (config.showProfiles) {
345 | val sourceFiles =
346 | implicitSearchesSourceFilesByType.getOrElseUpdate(targetType, mutable.HashSet.empty)
347 | if (!sourceFiles.contains(targetPos.source)) {
348 | sourceFiles.add(targetPos.source)
349 | }
350 | }
351 |
352 | if (global.analyzer.openMacros.nonEmpty)
353 | statistics.incCounter(implicitSearchesByMacrosCount)
354 |
355 | searchIdsToTargetTypes.+=((search.searchId, targetType))
356 |
357 | /* // Add dependants once we hit a concrete node
358 | search.context.openImplicits.headOption.foreach { dependant =>
359 | implicitsDependants
360 | .getOrElseUpdate(targetType, new mutable.HashSet())
361 | .+=(dependant.pt)
362 | }*/
363 |
364 | implicitsStack = (search, implicitTypeStart, searchStart) :: implicitsStack
365 | }
366 | }
367 |
368 | override def pluginsNotifyImplicitSearchResult(result: global.analyzer.SearchResult): Unit = {
369 | super.pluginsNotifyImplicitSearchResult(result)
370 | if (SettingsOps.areStatisticsEnabled(global)) {
371 | // 1. Get timer of the running search
372 | val (search, implicitTypeStart, searchStart) = implicitsStack.head
373 | val targetType = search.pt
374 | val timer = getImplicitTimerFor(targetType)
375 |
376 | // 2. Register the timing diff for every stacked name.
377 | def stopTimerFlamegraph(prev: Option[analyzer.ImplicitSearch]): Unit = {
378 | val searchId = search.searchId
379 | def missing(name: String): Nothing =
380 | sys.error(s"Missing $name for $searchId ($targetType).")
381 |
382 | val forcedExpansions =
383 | ProfilingMacroPlugin.searchIdsToMacroStates.getOrElse(searchId, Nil)
384 | val expandedStr = s"(expanded macros ${forcedExpansions.size})"
385 |
386 | // Detect macro name if the type we get comes from a macro to add it to the stack
387 | val suffix = {
388 | val errorTag = if (result.isFailure) " _[j]" else ""
389 | result.tree.attachments.get[analyzer.MacroExpansionAttachment] match {
390 | case Some(analyzer.MacroExpansionAttachment(expandee: Tree, _)) =>
391 | val expandeeSymbol = treeInfo.dissectApplied(expandee).core.symbol
392 | analyzer.loadMacroImplBinding(expandeeSymbol) match {
393 | case Some(a) =>
394 | val l = if (errorTag.isEmpty) " _[i]" else errorTag
395 | s" (id ${searchId}) $expandedStr (tree from `${a.className}.${a.methName}`)$l"
396 | case None => s" $expandedStr $errorTag"
397 | }
398 | case None => s" $expandedStr $errorTag"
399 | }
400 | }
401 |
402 | // Complete stack names of triggered implicit searches
403 | val children = searchIdChildren.getOrElse(searchId, Nil)
404 | prev.foreach { p =>
405 | val current = searchIdChildren.getOrElse(p.searchId, Nil)
406 | searchIdChildren.update(p.searchId, children ::: current)
407 | }
408 |
409 | val typeForStack = DealiasedType {
410 | if (!config.concreteTypeParamsInImplicits) targetType
411 | else concreteTypeFromSearch(result.subst(result.tree), targetType)
412 | }
413 |
414 | if (
415 | config.printSearchIds.contains(
416 | searchId
417 | ) || (result.isFailure && config.printFailedMacroImplicits)
418 | ) {
419 | logger.info(
420 | s"""implicit search ${searchId}:
421 | | -> valid ${result.isSuccess}
422 | | -> type `${typeForStack}`
423 | | -> ${search.undet_s}
424 | | -> ${search.ctx_s}
425 | | -> tree:
426 | |${showCode(result.tree)}
427 | | -> forced expansions:
428 | |${forcedExpansions.mkString(" ", " \n", "\n")}
429 | |""".stripMargin
430 | )
431 | }
432 |
433 | val thisStackName = s"${typeToString(typeForStack)}$suffix"
434 | stackedNames.update(searchId, List(thisStackName))
435 | children.foreach { childSearch =>
436 | val id = childSearch.searchId
437 | val childrenStackName = stackedNames.getOrElse(id, missing("stack name"))
438 | stackedNames.update(id, thisStackName :: childrenStackName)
439 | }
440 |
441 | // Save the nanos for this implicit search
442 | val searchTimer = getSearchTimerFor(searchId)
443 | val stackedType = searchIdsToTargetTypes.getOrElse(searchId, missing("stack type"))
444 | statistics.stopTimer(searchTimer, searchStart)
445 | val (previousNanos, _) = stackedNanos.getOrElse(searchId, (0L, stackedType))
446 | stackedNanos.+=((searchId, ((searchTimer.nanos + previousNanos), stackedType)))
447 | }
448 |
449 | // 3. Reset the stack and stop timer if there is a dependant search
450 | val previousImplicits = implicitsStack.tail
451 | implicitsStack = previousImplicits.headOption match {
452 | case Some((prevSearch, prevImplicitTypeStart, _)) =>
453 | stopTimerFlamegraph(Some(prevSearch))
454 | statistics.stopTimer(timer, implicitTypeStart)
455 | val newPrevStart = statistics.startTimer(getSearchTimerFor(prevSearch.searchId))
456 | (prevSearch, prevImplicitTypeStart, newPrevStart) :: previousImplicits.tail
457 | case None =>
458 | stopTimerFlamegraph(None)
459 | statistics.stopTimer(timer, implicitTypeStart)
460 | previousImplicits
461 | }
462 |
463 | }
464 | }
465 | }
466 |
467 | sealed trait MacroState {
468 | def pt: Type
469 | def tree: Tree
470 | }
471 |
472 | case class DelayedMacro(pt: Type, tree: Tree) extends MacroState
473 | case class SkippedMacro(pt: Type, tree: Tree) extends MacroState
474 | case class SuppressedMacro(pt: Type, tree: Tree) extends MacroState
475 | case class FallbackMacro(pt: Type, tree: Tree) extends MacroState
476 | case class FailedMacro(pt: Type, tree: Tree) extends MacroState
477 | case class SucceededMacro(pt: Type, tree: Tree) extends MacroState
478 |
479 | case class MacroEntry(
480 | id: Int,
481 | originalPt: Type,
482 | start: statistics.TimerSnapshot,
483 | state: Option[MacroState]
484 | )
485 |
486 | private var macrosStack: List[MacroEntry] = Nil
487 | private var macroCounter: Int = 0
488 |
489 | object ProfilingMacroPlugin extends global.analyzer.MacroPlugin {
490 | type Typer = analyzer.Typer
491 | private def guessTreeSize(tree: Tree): Int =
492 | 1 + tree.children.map(guessTreeSize).sum
493 |
494 | type RepeatedKey = (String, String)
495 | // case class RepeatedValue(original: Tree, result: Tree, count: Int)
496 | // private final val EmptyRepeatedValue = RepeatedValue(EmptyTree, EmptyTree, 0)
497 | // private[ProfilingImpl] val repeatedTrees = perRunCaches.newMap[RepeatedKey, RepeatedValue]
498 |
499 | val macroInfos = perRunCaches.newAnyRefMap[Position, MacroInfo]()
500 | val searchIdsToMacroStates = perRunCaches.newMap[Int, List[MacroState]]()
501 | private val macroIdsToTimers = perRunCaches.newMap[Int, statistics.Timer]()
502 | private val macroChildren = perRunCaches.newMap[Int, List[MacroEntry]]()
503 | private val stackedNanos = perRunCaches.newMap[Int, Long]()
504 | private val stackedNames = perRunCaches.newMap[Int, List[String]]()
505 |
506 | def foldMacroStacks(outputPaths: Seq[AbsolutePath]): Unit =
507 | if (outputPaths.nonEmpty) {
508 | // This part is memory intensive and hence the use of java collections
509 | val stacksJavaList = new java.util.ArrayList[String]()
510 | stackedNanos.foreach {
511 | case (id, nanos) =>
512 | val names =
513 | stackedNames.getOrElse(id, sys.error(s"Stack name for macro id ${id} doesn't exist!"))
514 | val stackName = names.mkString(";")
515 | stacksJavaList.add(s"$stackName ${nanos / 1000}")
516 | }
517 | java.util.Collections.sort(stacksJavaList)
518 |
519 | outputPaths.foreach(path =>
520 | Files.write(
521 | path.underlying,
522 | stacksJavaList,
523 | StandardOpenOption.WRITE,
524 | StandardOpenOption.CREATE
525 | )
526 | )
527 | } else ()
528 |
529 | import scala.tools.nsc.Mode
530 | override def pluginsMacroExpand(t: Typer, expandee: Tree, md: Mode, pt: Type): Option[Tree] = {
531 | val macroId = macroCounter
532 | macroCounter = macroCounter + 1
533 |
534 | object expander extends analyzer.DefMacroExpander(t, expandee, md, pt) {
535 | private var alreadyTracking: Boolean = false
536 |
537 | /** The default method that expands all macros. */
538 | override def apply(desugared: Tree): Tree = {
539 | def updateExpansionTime(desugared: Tree, start: statistics.TimerSnapshot): Unit = {
540 | statistics.stopTimer(preciseMacroTimer, start)
541 | val (nanos0, _) = start
542 | val timeNanos = (preciseMacroTimer.nanos - nanos0)
543 | val callSitePos = desugared.pos
544 | // Those that are not present failed to expand
545 | macroInfos.get(callSitePos).foreach { found =>
546 | val updatedInfo = found.copy(expansionNanos = timeNanos)
547 | macroInfos(callSitePos) = updatedInfo
548 | }
549 | }
550 | val shouldTrack = statistics.enabled && !alreadyTracking
551 |
552 | val prevData = macrosStack.headOption.map { prev =>
553 | macroIdsToTimers.getOrElse(
554 | prev.id,
555 | sys.error(s"fatal error: missing timer for ${prev.id}")
556 | ) -> prev
557 | }
558 |
559 | // Let's first stop the previous timer to have consistent times for the flamegraph
560 | prevData.foreach {
561 | case (prevTimer, prev) => statistics.stopTimer(prevTimer, prev.start)
562 | }
563 |
564 | // Let's create our own timer
565 | val searchPrefix = s" macro ${macroId}"
566 | val macroTimer = registerTyperTimerFor(searchPrefix)
567 | macroIdsToTimers += ((macroId, macroTimer))
568 | val start = {
569 | alreadyTracking = true
570 | statistics.startTimer(macroTimer)
571 | }
572 |
573 | val entry = MacroEntry(macroId, pt, start, None)
574 |
575 | if (config.generateMacroFlamegraph) {
576 | // We add ourselves to the child list of our parent macro
577 | prevData.foreach {
578 | case (_, entry) =>
579 | val prevId = entry.id
580 | val prevChilds = macroChildren.getOrElse(prevId, Nil)
581 | macroChildren.update(prevId, entry :: prevChilds)
582 | }
583 | }
584 |
585 | macrosStack = entry :: macrosStack
586 | try super.apply(desugared)
587 | finally {
588 | if (shouldTrack) {
589 | alreadyTracking = false
590 | updateExpansionTime(desugared, start)
591 | }
592 |
593 | val children = macroChildren.getOrElse(macroId, Nil)
594 | if (config.generateMacroFlamegraph) {
595 | // Complete stack names of triggered implicit searches
596 | prevData.foreach {
597 | case (_, p) =>
598 | val prevChildren = macroChildren.getOrElse(p.id, Nil)
599 | macroChildren.update(p.id, children ::: prevChildren)
600 | }
601 | }
602 |
603 | // We need to fetch the entry from the stack as it can be modified
604 | val parents = macrosStack.tail
605 | macrosStack.headOption match {
606 | case Some(head) =>
607 | if (config.generateMacroFlamegraph) {
608 | val thisStackName = head.state match {
609 | case Some(FailedMacro(pt, _)) => s"${typeToString(pt)} [failed]"
610 | case Some(DelayedMacro(pt, _)) => s"${typeToString(pt)} [delayed]"
611 | case Some(SucceededMacro(pt, _)) => s"${typeToString(pt)}"
612 | case Some(SuppressedMacro(pt, _)) => s"${typeToString(pt)} [suppressed]"
613 | case Some(SkippedMacro(pt, _)) => s"${typeToString(pt)} [skipped]"
614 | case Some(FallbackMacro(pt, _)) => s"${typeToString(pt)} [fallback]"
615 | case None => sys.error("Fatal error: macro has no state!")
616 | }
617 |
618 | stackedNames.update(macroId, thisStackName :: Nil)
619 | children.foreach { childSearch =>
620 | val id = childSearch.id
621 | val childrenStackName = stackedNames.getOrElse(id, sys.error("no stack name"))
622 | stackedNames.update(id, thisStackName :: childrenStackName)
623 | }
624 | }
625 |
626 | statistics.stopTimer(macroTimer, head.start)
627 | val previousNanos = stackedNanos.getOrElse(macroId, 0L)
628 | stackedNanos.+=((macroId, macroTimer.nanos + previousNanos))
629 | prevData match {
630 | case Some((prevTimer, prev)) =>
631 | // Let's restart the timer of the previous macro expansion
632 | val newStart = statistics.startTimer(prevTimer)
633 | // prev is the head of `parents`, so let's replace it on stack with the new start
634 | macrosStack = prev.copy(start = newStart) :: parents.tail
635 | case None => macrosStack = parents
636 | }
637 | case None => sys.error(s"fatal error: expected macro entry for macro id $macroId")
638 | }
639 | }
640 | }
641 |
642 | def mapToCurrentImplicitSearch(exp: MacroState): Unit = {
643 | implicitsStack.headOption match {
644 | case Some(i) =>
645 | val id = i._1.searchId
646 | val currentMacros = searchIdsToMacroStates.getOrElse(id, Nil)
647 | searchIdsToMacroStates.update(id, exp :: currentMacros)
648 | case None => ()
649 | }
650 | }
651 |
652 | def updateStack(state: MacroState): Unit = {
653 | macrosStack.headOption match {
654 | case Some(entry) =>
655 | macrosStack = entry.copy(state = Some(state)) :: macrosStack.tail
656 | case None => sys.error("fatal error: stack cannot be empty while updating!")
657 | }
658 | }
659 |
660 | override def onFailure(expanded: Tree) = {
661 | val state = FailedMacro(pt, expanded)
662 | mapToCurrentImplicitSearch(state)
663 | statistics.incCounter(failedMacros)
664 | updateStack(state)
665 | super.onFailure(expanded)
666 | }
667 |
668 | override def onSkipped(expanded: Tree) = {
669 | val state = SkippedMacro(pt, expanded)
670 | mapToCurrentImplicitSearch(state)
671 | statistics.incCounter(skippedMacros)
672 | updateStack(state)
673 | super.onDelayed(expanded)
674 | }
675 |
676 | override def onFallback(expanded: Tree) = {
677 | val state = FallbackMacro(pt, expanded)
678 | mapToCurrentImplicitSearch(state)
679 | statistics.incCounter(fallbackMacros)
680 | updateStack(state)
681 | super.onFallback(expanded)
682 | }
683 |
684 | override def onSuppressed(expanded: Tree) = {
685 | val state = SuppressedMacro(pt, expanded)
686 | mapToCurrentImplicitSearch(state)
687 | statistics.incCounter(suppressedMacros)
688 | updateStack(state)
689 | super.onSuppressed(expanded)
690 | }
691 |
692 | override def onDelayed(expanded: Tree) = {
693 | val state = DelayedMacro(pt, expanded)
694 | mapToCurrentImplicitSearch(state)
695 | statistics.incCounter(delayedMacros)
696 | updateStack(state)
697 | super.onDelayed(expanded)
698 | }
699 |
700 | override def onSuccess(expanded0: Tree) = {
701 | val expanded = super.onSuccess(expanded0)
702 | val expandedType = concreteTypeFromSearch(expanded, pt)
703 | val state = SucceededMacro(expandedType, expanded)
704 | mapToCurrentImplicitSearch(state)
705 | updateStack(state)
706 |
707 | // Update macro counter per type returned
708 | val macroTypeCounter = macrosByType.getOrElse(expandedType, 0)
709 | macrosByType.update(expandedType, macroTypeCounter + 1)
710 |
711 | val callSitePos = this.expandee.pos
712 | /* val printedExpandee = showRaw(expandee)
713 | val printedExpanded = showRaw(expanded)
714 | val key = (printedExpandee, printedExpanded)
715 | val currentValue = repeatedTrees.getOrElse(key, EmptyRepeatedValue)
716 | val newValue = RepeatedValue(expandee, expanded, currentValue.count + 1)
717 | repeatedTrees.put(key, newValue)*/
718 | val macroInfo = macroInfos.getOrElse(callSitePos, MacroInfo.Empty)
719 | val expandedMacros = macroInfo.expandedMacros + 1
720 | val treeSize = 0 // macroInfo.expandedNodes + guessTreeSize(expanded)
721 |
722 | // Use 0L for the timer because it will be filled in by the caller `apply`
723 | macroInfos.put(callSitePos, MacroInfo(expandedMacros, treeSize, 0L))
724 | expanded
725 | }
726 | }
727 | Some(expander(expandee))
728 | }
729 | }
730 | }
731 |
732 | trait ProfilingStats {
733 | val global: Global
734 | import global.statistics.{newSubCounter, macroExpandCount, implicitSearchCount, newTimer}
735 | macroExpandCount.children.clear()
736 | final val preciseMacroTimer = newTimer("precise time in macroExpand")
737 | final val failedMacros = newSubCounter(" of which failed macros", macroExpandCount)
738 | final val delayedMacros = newSubCounter(" of which delayed macros", macroExpandCount)
739 | final val suppressedMacros = newSubCounter(" of which suppressed macros", macroExpandCount)
740 | final val fallbackMacros = newSubCounter(" of which fallback macros", macroExpandCount)
741 | final val skippedMacros = newSubCounter(" of which skipped macros", macroExpandCount)
742 | final val implicitSearchesByMacrosCount = newSubCounter(" from macros", implicitSearchCount)
743 |
744 | import scala.reflect.internal.util.Position
745 | import scala.collection.mutable
746 |
747 | final val macrosByType = new mutable.HashMap[global.Type, Int]()
748 | final val implicitSearchesByType = global.perRunCaches.newMap[global.Type, Int]()
749 | final val implicitSearchesByPos = global.perRunCaches.newMap[Position, Int]()
750 | final val implicitSearchesSourceFilesByType =
751 | global.perRunCaches.newMap[global.Type, mutable.HashSet[SourceFile]]()
752 | }
753 |
--------------------------------------------------------------------------------
/plugin/src/main/scala/ch/epfl/scala/profilers/tools/Logger.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | ** **
9 | \* */
10 |
11 | package ch.epfl.scala.profilers.tools
12 |
13 | import scala.reflect.internal.util.NoPosition
14 |
15 | final class Logger[G <: scala.tools.nsc.Global](val global: G) {
16 | def debug(msg: String): Unit = global.debuglog(msg)
17 | def success(msg: String): Unit =
18 | debug(wrap(msg, scala.Console.GREEN))
19 |
20 | def info(msg: String): Unit =
21 | global.reporter.echo(NoPosition, msg)
22 | def info[T: pprint.TPrint](header: String, value: T): Unit = {
23 | val tokens = pprint.tokenize(value, height = 100000000).mkString
24 | info(s"$header:\n$tokens")
25 | }
26 |
27 | def wrap(content: String, `with`: String): String =
28 | s"${`with`}$content${scala.Console.RESET}"
29 | }
30 |
--------------------------------------------------------------------------------
/plugin/src/main/scala/ch/epfl/scala/profilers/tools/QuantitiesHijacker.scala:
--------------------------------------------------------------------------------
1 | package ch.epfl.scala.profilers.tools
2 |
3 | import scala.collection.mutable
4 | import scala.tools.nsc.Global
5 | import scala.reflect.internal.util.Statistics
6 |
7 | object QuantitiesHijacker {
8 | type Quantities = mutable.HashMap[String, Statistics#Quantity]
9 | def getRegisteredQuantities[G <: Global](global: G): Quantities = {
10 | val clazz = global.statistics.getClass()
11 | try { // see: https://github.com/scalacenter/scalac-profiling/pull/32/files#r790111968
12 | val field = clazz.getField("scala$reflect$internal$util$Statistics$$qs")
13 | field.get(global.statistics).asInstanceOf[Quantities]
14 | } catch {
15 | case _: NoSuchFieldException =>
16 | val method = clazz.getMethod("scala$reflect$internal$util$Statistics$$qs")
17 | method.invoke(global.statistics).asInstanceOf[Quantities]
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/plugin/src/test/scala/ch/epfl/scala/ImplicitTest.scala:
--------------------------------------------------------------------------------
1 | package shapeless {
2 | sealed trait HList extends Product with Serializable
3 |
4 | final case class ::[+H, +T <: HList](head: H, tail: T) extends HList {
5 | def ::[HH](h: HH): HH :: H :: T = shapeless.::(h, this)
6 |
7 | override def toString = head match {
8 | case _: ::[_, _] => "(" + head.toString + ") :: " + tail.toString
9 | case _ => head.toString + " :: " + tail.toString
10 | }
11 | }
12 |
13 | sealed trait HNil extends HList {
14 | def ::[H](h: H) = shapeless.::(h, this)
15 | override def toString = "HNil"
16 | }
17 |
18 | case object HNil extends HNil
19 |
20 | trait Selector[L <: HList, U] {
21 | def apply(l: L): U
22 | }
23 |
24 | object Selector {
25 | def apply[L <: HList, U](implicit selector: Selector[L, U]): Selector[L, U] = selector
26 |
27 | implicit def inHead[H, T <: HList]: Selector[H :: T, H] =
28 | new Selector[H :: T, H] {
29 | def apply(l: H :: T) = l.head
30 | }
31 |
32 | implicit def inTail[H, T <: HList, U](implicit st: Selector[T, U]): Selector[H :: T, U] =
33 | new Selector[H :: T, U] {
34 | def apply(l: H :: T) = st(l.tail)
35 | }
36 | }
37 | }
38 |
39 | import shapeless._
40 |
41 | object Test extends App {
42 | val sel = Selector[L, Boolean]
43 |
44 | // Int ::
45 | // Int ::
46 | // Int ::
47 | // Int ::
48 | // Int ::
49 | // Int ::
50 | // Int ::
51 | // Int ::
52 | // Int ::
53 | // Int ::
54 | // //
55 | // Int ::
56 | // Int ::
57 | // Int ::
58 | // Int ::
59 | // Int ::
60 | // Int ::
61 | // Int ::
62 | // Int ::
63 | // Int ::
64 | // Int ::
65 | // //
66 | // Int ::
67 | // Int ::
68 | // Int ::
69 | // Int ::
70 | // Int ::
71 | // Int ::
72 | // Int ::
73 | // Int ::
74 | // Int ::
75 | // Int ::
76 | // //
77 | // Int ::
78 | // Int ::
79 | // Int ::
80 | // Int ::
81 | // Int ::
82 | // Int ::
83 | // Int ::
84 | // Int ::
85 | // Int ::
86 | // Int ::
87 | // //
88 | // Int ::
89 | // Int ::
90 | // Int ::
91 | // Int ::
92 | // Int ::
93 | // Int ::
94 | // Int ::
95 | // Int ::
96 | // Int ::
97 | // Int ::
98 | // //
99 | // Int ::
100 | // Int ::
101 | // Int ::
102 | // Int ::
103 | // Int ::
104 | // Int ::
105 | // Int ::
106 | // Int ::
107 | // Int ::
108 | // Int ::
109 | // //
110 | // Int ::
111 | // Int ::
112 | // Int ::
113 | // Int ::
114 | // Int ::
115 | // Int ::
116 | // Int ::
117 | // Int ::
118 | // Int ::
119 | // Int ::
120 | // //
121 | // Int ::
122 | // Int ::
123 | // Int ::
124 | // Int ::
125 | // Int ::
126 | // Int ::
127 | // Int ::
128 | // Int ::
129 | // Int ::
130 | // Int ::
131 | // //
132 | // Int ::
133 | // Int ::
134 | // Int ::
135 | // Int ::
136 | // Int ::
137 | // Int ::
138 | // Int ::
139 | // Int ::
140 | // Int ::
141 | // Int ::
142 | // //
143 | // Int ::
144 | // Int ::
145 | // Int ::
146 | // Int ::
147 | // Int ::
148 | // Int ::
149 | // Int ::
150 | // Int ::
151 | // Int ::
152 | // Int ::
153 | // //
154 | // Int ::
155 | // Int ::
156 | // Int ::
157 | // Int ::
158 | // Int ::
159 | // Int ::
160 | // Int ::
161 | // Int ::
162 | // Int ::
163 | // Int ::
164 | // //
165 | // Int ::
166 | // Int ::
167 | // Int ::
168 | // Int ::
169 | // Int ::
170 | // Int ::
171 | // Int ::
172 | // Int ::
173 | // Int ::
174 | // Int ::
175 | // //
176 | // Int ::
177 | // Int ::
178 | // Int ::
179 | // Int ::
180 | // Int ::
181 | // Int ::
182 | // Int ::
183 | // Int ::
184 | // Int ::
185 | // Int ::
186 | // //
187 | // Int ::
188 | // Int ::
189 | // Int ::
190 | // Int ::
191 | // Int ::
192 | // Int ::
193 | // Int ::
194 | // Int ::
195 | // Int ::
196 | // Int ::
197 | // //
198 | // Int ::
199 | // Int ::
200 | // Int ::
201 | // Int ::
202 | // Int ::
203 | // Int ::
204 | // Int ::
205 | // Int ::
206 | // Int ::
207 | // Int ::
208 | // //
209 | // Int ::
210 | // Int ::
211 | // Int ::
212 | // Int ::
213 | // Int ::
214 | // Int ::
215 | // Int ::
216 | // Int ::
217 | // Int ::
218 | // Int ::
219 | // //
220 | // Int ::
221 | // Int ::
222 | // Int ::
223 | // Int ::
224 | // Int ::
225 | // Int ::
226 | // Int ::
227 | // Int ::
228 | // Int ::
229 | // Int ::
230 | // //
231 | // Int ::
232 | // Int ::
233 | // Int ::
234 | // Int ::
235 | // Int ::
236 | // Int ::
237 | // Int ::
238 | // Int ::
239 | // Int ::
240 | // Int ::
241 | // //
242 | // Int ::
243 | // Int ::
244 | // Int ::
245 | // Int ::
246 | // Int ::
247 | // Int ::
248 | // Int ::
249 | // Int ::
250 | // Int ::
251 | // Int ::
252 | // //
253 | // Int ::
254 | // Int ::
255 | // Int ::
256 | // Int ::
257 | // Int ::
258 | // Int ::
259 | // Int ::
260 | // Int ::
261 | // Int ::
262 | // Int ::
263 | // //
264 | // Int ::
265 | // Int ::
266 | // Int ::
267 | // Int ::
268 | // Int ::
269 | // Int ::
270 | // Int ::
271 | // Int ::
272 | // Int ::
273 | // Int ::
274 | // //
275 | // Int ::
276 | // Int ::
277 | // Int ::
278 | // Int ::
279 | // Int ::
280 | // Int ::
281 | // Int ::
282 | // Int ::
283 | // Int ::
284 | // Int ::
285 | // //
286 | // Int ::
287 | // Int ::
288 | // Int ::
289 | // Int ::
290 | // Int ::
291 | // Int ::
292 | // Int ::
293 | // Int ::
294 | // Int ::
295 | // Int ::
296 | // //
297 | // Int ::
298 | // Int ::
299 | // Int ::
300 | // Int ::
301 | // Int ::
302 | // Int ::
303 | // Int ::
304 | // Int ::
305 | // Int ::
306 | // Int ::
307 | // //
308 | // Int ::
309 | // Int ::
310 | // Int ::
311 | // Int ::
312 | // Int ::
313 | // Int ::
314 | // Int ::
315 | // Int ::
316 | // Int ::
317 | // Int ::
318 | // //
319 | // Int ::
320 | // Int ::
321 | // Int ::
322 | // Int ::
323 | // Int ::
324 | // Int ::
325 | // Int ::
326 | // Int ::
327 | // Int ::
328 | // Int ::
329 | // //
330 | // Int ::
331 | // Int ::
332 | // Int ::
333 | // Int ::
334 | // Int ::
335 | // Int ::
336 | // Int ::
337 | // Int ::
338 | // Int ::
339 | // Int ::
340 | // //
341 | // Int ::
342 | // Int ::
343 | // Int ::
344 | // Int ::
345 | // Int ::
346 | // Int ::
347 | // Int ::
348 | // Int ::
349 | // Int ::
350 | // Int ::
351 | // //
352 | // Int ::
353 | // Int ::
354 | // Int ::
355 | // Int ::
356 | // Int ::
357 | // Int ::
358 | // Int ::
359 | // Int ::
360 | // Int ::
361 | // Int ::
362 | // //
363 | // Int ::
364 | // Int ::
365 | // Int ::
366 | // Int ::
367 | // Int ::
368 | // Int ::
369 | // Int ::
370 | // Int ::
371 | // Int ::
372 | // Int ::
373 | // //
374 | // Int ::
375 | // Int ::
376 | // Int ::
377 | // Int ::
378 | // Int ::
379 | // Int ::
380 | // Int ::
381 | // Int ::
382 | // Int ::
383 | // Int ::
384 | // //
385 | // Int ::
386 | // Int ::
387 | // Int ::
388 | // Int ::
389 | // Int ::
390 | // Int ::
391 | // Int ::
392 | // Int ::
393 | // Int ::
394 | // Int ::
395 | // //
396 | // Int ::
397 | // Int ::
398 | // Int ::
399 | // Int ::
400 | // Int ::
401 | // Int ::
402 | // Int ::
403 | // Int ::
404 | // Int ::
405 | // Int ::
406 | // //
407 | // Int ::
408 | // Int ::
409 | // Int ::
410 | // Int ::
411 | // Int ::
412 | // Int ::
413 | // Int ::
414 | // Int ::
415 | // Int ::
416 | // Int ::
417 | // //
418 | // Int ::
419 | // Int ::
420 | // Int ::
421 | // Int ::
422 | // Int ::
423 | // Int ::
424 | // Int ::
425 | // Int ::
426 | // Int ::
427 | // Int ::
428 | // //
429 | // Int ::
430 | // Int ::
431 | // Int ::
432 | // Int ::
433 | // Int ::
434 | // Int ::
435 | // Int ::
436 | // Int ::
437 | // Int ::
438 | // Int ::
439 | // //
440 | // Int ::
441 | // Int ::
442 | // Int ::
443 | // Int ::
444 | // Int ::
445 | // Int ::
446 | // Int ::
447 | // Int ::
448 | // Int ::
449 | // Int ::
450 | // //
451 | // Int ::
452 | // Int ::
453 | // Int ::
454 | // Int ::
455 | // Int ::
456 | // Int ::
457 | // Int ::
458 | // Int ::
459 | // Int ::
460 | // Int ::
461 | // //
462 | // Int ::
463 | // Int ::
464 | // Int ::
465 | // Int ::
466 | // Int ::
467 | // Int ::
468 | // Int ::
469 | // Int ::
470 | // Int ::
471 | // Int ::
472 | // //
473 | // Int ::
474 | // Int ::
475 | // Int ::
476 | // Int ::
477 | // Int ::
478 | // Int ::
479 | // Int ::
480 | // Int ::
481 | // Int ::
482 | // Int ::
483 | // //
484 | // Int ::
485 | // Int ::
486 | // Int ::
487 | // Int ::
488 | // Int ::
489 | // Int ::
490 | // Int ::
491 | // Int ::
492 | // Int ::
493 | // Int ::
494 | // //
495 | // Int ::
496 | // Int ::
497 | // Int ::
498 | // Int ::
499 | // Int ::
500 | // Int ::
501 | // Int ::
502 | // Int ::
503 | // Int ::
504 | // Int ::
505 | // //
506 | // Int ::
507 | // Int ::
508 | // Int ::
509 | // Int ::
510 | // Int ::
511 | // Int ::
512 | // Int ::
513 | // Int ::
514 | // Int ::
515 | // Int ::
516 | // //
517 | // Int ::
518 | // Int ::
519 | // Int ::
520 | // Int ::
521 | // Int ::
522 | // Int ::
523 | // Int ::
524 | // Int ::
525 | // Int ::
526 | // Int ::
527 | // //
528 | // Int ::
529 | // Int ::
530 | // Int ::
531 | // Int ::
532 | // Int ::
533 | // Int ::
534 | // Int ::
535 | // Int ::
536 | // Int ::
537 | // Int ::
538 | // //
539 | // Int ::
540 | // Int ::
541 | // Int ::
542 | // Int ::
543 | // Int ::
544 | // Int ::
545 | // Int ::
546 | // Int ::
547 | // Int ::
548 | // Int ::
549 | // //
550 | // Int ::
551 | // Int ::
552 | // Int ::
553 | // Int ::
554 | // Int ::
555 | // Int ::
556 | // Int ::
557 | // Int ::
558 | // Int ::
559 | // Int ::
560 | // //
561 | // Int ::
562 | // Int ::
563 | // Int ::
564 | // Int ::
565 | // Int ::
566 | // Int ::
567 | // Int ::
568 | // Int ::
569 | // Int ::
570 | // Int ::
571 | // //
572 | // Int ::
573 | // Int ::
574 | // Int ::
575 | // Int ::
576 | // Int ::
577 | // Int ::
578 | // Int ::
579 | // Int ::
580 | // Int ::
581 | // Int ::
582 | // //
583 | type L =
584 | Int ::
585 | Int ::
586 | Int ::
587 | Int ::
588 | Int ::
589 | Int ::
590 | Int ::
591 | Int ::
592 | Int ::
593 | Int ::
594 | //
595 | Boolean ::
596 | HNil
597 | }
598 |
--------------------------------------------------------------------------------
/plugin/src/test/scala/ch/epfl/scala/tools/TestUtil.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | package newtype.tools
11 |
12 | import scala.reflect._
13 |
14 | object TestUtil {
15 | import tools.reflect.{ToolBox, ToolBoxError}
16 |
17 | def intercept[T <: Throwable: ClassTag](test: => Any): T = {
18 | try {
19 | test
20 | throw new Exception(s"Expected exception ${classTag[T]}")
21 | } catch {
22 | case t: Throwable =>
23 | if (classTag[T].runtimeClass != t.getClass) throw t
24 | else t.asInstanceOf[T]
25 | }
26 | }
27 |
28 | def eval(code: String, compileOptions: String = ""): Any = {
29 | val tb = mkToolbox(compileOptions)
30 | tb.eval(tb.parse(code))
31 | }
32 |
33 | def mkToolbox(compileOptions: String = ""): ToolBox[_ <: scala.reflect.api.Universe] = {
34 | val m = scala.reflect.runtime.currentMirror
35 | import scala.tools.reflect.ToolBox
36 | m.mkToolBox(options = compileOptions)
37 | }
38 |
39 | def getResourceContent(resourceName: String): String = {
40 | val resource = getClass.getClassLoader.getResource(resourceName)
41 | val file = scala.io.Source.fromFile(resource.toURI)
42 | file.getLines().mkString("")
43 | }
44 |
45 | lazy val toolboxClasspath: String = getResourceContent("toolbox.classpath")
46 | lazy val toolboxPluginOptions: String = getResourceContent("toolbox.extra")
47 |
48 | def expectError(
49 | errorSnippet: String,
50 | compileOptions: String = "",
51 | baseCompileOptions: String = s"-cp $toolboxClasspath $toolboxPluginOptions"
52 | )(code: String): Unit = {
53 | val errorMessage = intercept[ToolBoxError] {
54 | eval(code, s"$compileOptions $baseCompileOptions")
55 | }.getMessage
56 | val userMessage =
57 | s"""
58 | |FOUND: $errorMessage
59 | |EXPECTED: $errorSnippet
60 | """.stripMargin
61 | assert(errorMessage.contains(errorSnippet), userMessage)
62 | }
63 |
64 | def expectWarning(
65 | errorSnippet: String,
66 | compileOptions: String = "",
67 | baseCompileOptions: String = s"-cp $toolboxClasspath $toolboxPluginOptions"
68 | )(code: String): Unit = {
69 | expectError(errorSnippet, compileOptions + "-Xfatal-warnings", baseCompileOptions)(code)
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/profiledb/src/main/protobuf/profiledb.proto:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | syntax = "proto3";
11 |
12 | package ch.epfl.scala.profiledb;
13 |
14 | import "google/protobuf/timestamp.proto";
15 | import "google/protobuf/duration.proto";
16 |
17 | enum ContentType {
18 | GLOBAL = 0;
19 | PER_COMPILATION_UNIT = 1;
20 | }
21 |
22 | message Database {
23 | ContentType type = 1;
24 | repeated DatabaseEntry entries = 2;
25 | }
26 |
27 | message DatabaseEntry {
28 | google.protobuf.Timestamp timestamp = 1;
29 | RunProfile runProfile = 2;
30 | CompilationUnitProfile compilationUnitProfile = 3;
31 | }
32 |
33 | message RunProfile {
34 | repeated PhaseProfile phaseProfiles = 2;
35 | }
36 |
37 | message PhaseProfile {
38 | string name = 1;
39 | repeated Timer timers = 2;
40 | repeated Counter counters = 3;
41 | }
42 |
43 | message Timer {
44 | string id = 1;
45 | google.protobuf.Duration duration = 2;
46 | }
47 |
48 | message Counter {
49 | string id = 1;
50 | int64 ticks = 2;
51 | }
52 |
53 | message CompilationUnitProfile {
54 | repeated MacroProfile macroProfiles = 1;
55 | repeated ImplicitSearchProfile implicitSearchProfiles = 2;
56 | }
57 |
58 | message MacroProfile {
59 | Position position = 1;
60 | int64 expandedMacros = 2;
61 | int64 approximateSize = 3;
62 | google.protobuf.Duration duration = 4;
63 | }
64 |
65 | message ImplicitSearchProfile {
66 | Position position = 1;
67 | int64 searches = 2;
68 | google.protobuf.Duration duration = 3;
69 | }
70 |
71 | message Position {
72 | sint32 point = 1;
73 | sint32 line = 2;
74 | sint32 column = 3;
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/profiledb/src/main/scala/ch.epfl.scala.profiledb/ProfileDb.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | package ch.epfl.scala.profiledb
11 |
12 | import java.nio.file.Files
13 |
14 | import ch.epfl.scala.profiledb.{profiledb => schema}
15 | import com.google.protobuf.{CodedInputStream, CodedOutputStream}
16 |
17 | import scala.util.Try
18 |
19 | object ProfileDb {
20 | def read(path: ProfileDbPath): Try[schema.Database] = Try {
21 | val inputStream = Files.newInputStream(path.target.underlying)
22 | val reader = CodedInputStream.newInstance(inputStream)
23 | val read = schema.Database.parseFrom(reader)
24 | inputStream.close()
25 | read
26 | }
27 |
28 | def write(database: schema.Database, path: ProfileDbPath): Try[schema.Database] = Try {
29 | val targetPath = path.target.underlying
30 | if (!Files.exists(targetPath))
31 | Files.createDirectories(targetPath.getParent())
32 | val outputStream = Files.newOutputStream(targetPath)
33 | val writer = CodedOutputStream.newInstance(outputStream)
34 | database.writeTo(writer)
35 | writer.flush()
36 | outputStream.close()
37 | database
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/profiledb/src/main/scala/ch.epfl.scala.profiledb/ProfileDbPath.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | package ch.epfl.scala.profiledb
11 |
12 | import ch.epfl.scala.profiledb.utils.{AbsolutePath, RelativePath}
13 |
14 | final class ProfileDbPath private (outputDir: AbsolutePath, targetPath: RelativePath) {
15 | lazy val target: AbsolutePath = {
16 | require(ProfileDbPath.hasDbExtension(targetPath))
17 | require(targetPath.underlying.startsWith(ProfileDbPath.Prefix.underlying))
18 | targetPath.toAbsolute(outputDir)
19 | }
20 | }
21 |
22 | object ProfileDbPath {
23 | def apply(outputDir: AbsolutePath, targetPath: RelativePath): ProfileDbPath =
24 | new ProfileDbPath(outputDir, targetPath)
25 |
26 | private[profiledb] final val ProfileDbName = "profiledb"
27 | private[profiledb] final val ProfileDbExtension = s".$ProfileDbName"
28 | private[profiledb] final val Prefix = RelativePath("META-INF").resolve(s"$ProfileDbName")
29 | final val GlobalProfileDbRelativePath = toProfileDbPath(RelativePath("global"))
30 | final val GraphsProfileDbRelativePath = Prefix.resolveRelative(RelativePath("graphs"))
31 |
32 | private[profiledb] def hasDbExtension(path: RelativePath): Boolean =
33 | path.underlying.getFileName.toString.endsWith(ProfileDbExtension)
34 |
35 | def toProfileDbPath(relativeSourceFile: RelativePath): RelativePath =
36 | Prefix.resolveRelative(addDbExtension(relativeSourceFile))
37 |
38 | def toGraphsProfilePath(path: AbsolutePath): AbsolutePath =
39 | path.resolve(GraphsProfileDbRelativePath)
40 |
41 | private[profiledb] def addDbExtension(path: RelativePath): RelativePath = {
42 | val realPath = path.underlying
43 | val extendedName = realPath.getFileName.toString + ProfileDbExtension
44 | val parent = path.underlying.getParent
45 | if (parent == null) RelativePath(extendedName)
46 | else RelativePath(parent.resolve(extendedName))
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/profiledb/src/main/scala/ch.epfl.scala.profiledb/utils/AbsolutePath.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | // Slight modification of scalameta io utils
11 | package ch.epfl.scala.profiledb.utils
12 |
13 | import java.io.File
14 | import java.nio.file.{Files, Path, Paths}
15 |
16 | final class AbsolutePath private (val underlying: Path) extends AnyVal {
17 | def syntax: String = toString
18 | def structure: String = s"""AbsolutePath("$syntax")"""
19 | override def toString: String = underlying.toString
20 |
21 | def toRelative(prefix: AbsolutePath): RelativePath =
22 | RelativePath(prefix.underlying.relativize(underlying))
23 |
24 | def resolve(other: RelativePath): AbsolutePath =
25 | AbsolutePath(underlying.resolve(other.underlying))(this)
26 | def resolve(other: String): AbsolutePath = AbsolutePath(underlying.resolve(other))(this)
27 |
28 | def getParent: AbsolutePath = AbsolutePath(underlying.getParent)
29 |
30 | def isFile: Boolean = Files.isRegularFile(underlying)
31 | def isDirectory: Boolean = Files.isDirectory(underlying)
32 | def readAllBytes: Array[Byte] = Files.readAllBytes(underlying)
33 | }
34 |
35 | object AbsolutePath {
36 | implicit def workingDirectory: AbsolutePath = new AbsolutePath(Paths.get(sys.props("user.dir")))
37 | def apply(file: File)(implicit cwd: AbsolutePath): AbsolutePath = apply(file.toPath)(cwd)
38 | def apply(path: String)(implicit cwd: AbsolutePath): AbsolutePath = apply(Paths.get(path))(cwd)
39 | def apply(path: Path)(implicit cwd: AbsolutePath): AbsolutePath =
40 | if (path.isAbsolute) new AbsolutePath(path) else cwd.resolve(path.toString)
41 | }
42 |
--------------------------------------------------------------------------------
/profiledb/src/main/scala/ch.epfl.scala.profiledb/utils/RelativePath.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | // Slight modification of scalameta io utils
11 | package ch.epfl.scala.profiledb.utils
12 |
13 | import java.io.File
14 | import java.nio.file.{Path, Paths}
15 |
16 | final class RelativePath private (val underlying: Path) extends AnyVal {
17 | def syntax: String = toString
18 | def structure: String = s"""RelativePath("$syntax")"""
19 | override def toString: String = underlying.toString
20 |
21 | def toAbsolute(root: AbsolutePath): AbsolutePath = root.resolve(this)
22 | def relativize(other: RelativePath): RelativePath =
23 | RelativePath(underlying.relativize(other.underlying))
24 |
25 | def resolve(other: Path): RelativePath = RelativePath(underlying.resolve(other))
26 | def resolveRelative(other: RelativePath): RelativePath = resolve(other.underlying)
27 | def resolve(path: String): RelativePath = resolve(Paths.get(path))
28 | def resolveSibling(f: String => String): RelativePath =
29 | RelativePath(underlying.resolveSibling(f(underlying.getFileName.toString)))
30 | }
31 |
32 | object RelativePath {
33 | def apply(path: String): RelativePath = RelativePath(Paths.get(path))
34 | def apply(file: File): RelativePath = RelativePath(file.toPath)
35 | def apply(path: Path): RelativePath =
36 | if (!path.isAbsolute) new RelativePath(path)
37 | else throw new RuntimeException(s"$path is not relative")
38 | }
39 |
--------------------------------------------------------------------------------
/project/BuildPlugin.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | package ch.epfl.scala.profiling.build
11 |
12 | import sbt._
13 |
14 | object BuildPlugin extends AutoPlugin {
15 | override def trigger: PluginTrigger = allRequirements
16 | override def requires: Plugins = plugins.JvmPlugin
17 | val autoImport = BuildKeys
18 |
19 | override def globalSettings: Seq[Def.Setting[_]] =
20 | BuildImplementation.globalSettings
21 | override def buildSettings: Seq[Def.Setting[_]] =
22 | BuildImplementation.buildSettings
23 | override def projectSettings: Seq[Def.Setting[_]] =
24 | BuildImplementation.projectSettings
25 | }
26 |
27 | object BuildKeys {
28 | final val enableStatistics =
29 | settingKey[Boolean]("Enable performance debugging if true.")
30 | final val optionsForSourceCompilerPlugin =
31 | taskKey[Seq[String]]("Generate scalac options for source compiler plugin")
32 | final val allDepsForCompilerPlugin =
33 | taskKey[Def.Classpath]("Return all dependencies for the source compiler plugin.")
34 | final val showScalaInstances = taskKey[Unit]("Show versions of all integration tests")
35 |
36 | // Refer to setting via reference because there is no dependency to the scalac build here.
37 | final val scalacVersionSuffix = sbt.SettingKey[String]("baseVersionSuffix")
38 |
39 | // Use absolute paths so that references work even though the `ThisBuild` changes
40 | final val AbsolutePath = file(".").getCanonicalFile.getAbsolutePath
41 | final val HomeBuild = BuildRef(RootProject(file(AbsolutePath)).build)
42 |
43 | // Source dependencies from git are cached by sbt
44 | val BetterFiles = RootProject(
45 | uri(
46 | "https://git@github.com/pathikrit/better-files.git#6f2e3f1328b1b18eddce973510db71bc6c14fadb"
47 | ) // v3.9.2
48 | )
49 | val Wartremover = RootProject(
50 | uri(
51 | "https://git@github.com/wartremover/wartremover.git#29bb7b69ad49eb87c19d9ba865298071c2795bb7"
52 | ) // v3.1.4
53 | )
54 |
55 | val BetterFilesCore = ProjectRef(BetterFiles.build, "core")
56 | val WartremoverCore = ProjectRef(Wartremover.build, "core")
57 |
58 | val IntegrationProjectsAndReferences = List[(ProjectRef, String)](
59 | BetterFilesCore -> "BetterFilesCore",
60 | WartremoverCore -> "WartremoverCore"
61 | )
62 |
63 | val AllIntegrationProjects = IntegrationProjectsAndReferences.map(_._1)
64 |
65 | // Assumes that the previous scala version is the last bincompat version
66 | // final val ScalacVersion = Keys.version in BuildKeys.ScalacCompiler
67 | // final val ScalacScalaVersion = Keys.scalaVersion in BuildKeys.ScalacCompiler
68 |
69 | /**
70 | * Write all the compile-time dependencies of the compiler plugin to a file,
71 | * in order to read it from the created Toolbox to run the neg tests.
72 | */
73 | lazy val generateToolboxClasspath = Def.task {
74 | val scalaBinVersion = (Compile / Keys.scalaBinaryVersion).value
75 | val targetDir = (Compile / Keys.target).value
76 | val compiledClassesDir = targetDir / s"scala-$scalaBinVersion/classes"
77 | val testClassesDir = targetDir / s"scala-$scalaBinVersion/test-classes"
78 | val libraryJar = Keys.scalaInstance.value.libraryJars.head.getAbsolutePath
79 | val deps = (Compile / Keys.libraryDependencies).value.mkString(":")
80 | val classpath = s"$compiledClassesDir:$testClassesDir:$libraryJar:$deps"
81 | val resourceDir = (Compile / Keys.resourceManaged).value
82 | val toolboxTestClasspath = resourceDir / "toolbox.classpath"
83 | sbt.IO.write(toolboxTestClasspath, classpath)
84 | List(toolboxTestClasspath.getAbsoluteFile)
85 | }
86 |
87 | /**
88 | * Sbt does not like overrides of setting values that happen in ThisBuild,
89 | * nor in other project settings like integrations'. No. Sbt is exigent and
90 | * always asks you to give your best.
91 | *
92 | * Why so much code for such a simple idea? Well, `Project.extract` does force
93 | * the execution and initialization of settings, so as `onLoad` is a setting
94 | * it causes a recursive call to itself, yay!
95 | *
96 | * So, in short, solution: use an attribute in the state to short-circuit the
97 | * recursive invocation.
98 | *
99 | * Notes to the future reader: the bug that prompted this solution is weird
100 | * I can indeed override lots of settings via project refs, but when it comes
101 | * to overriding a setting **in a project** (that has been generated via
102 | * sbt-cross-project), it does not work. On top of this, this wouldn't happen
103 | * if monocle defined the scala versions at the build level (it instead does it
104 | * at the project level, which is bad practice). So, finding a repro for this
105 | * is going to be fun.
106 | */
107 | final val hijacked = sbt.AttributeKey[Boolean]("the hijacked sexy option.")
108 |
109 | ////////////////////////////////////////////////////////////////////////////////
110 |
111 | def inProject(ref: Reference)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
112 | sbt.inScope(sbt.ThisScope.copy(project = Select(ref)))(ss)
113 |
114 | def inProjectRefs(refs: Seq[Reference])(ss: Setting[_]*): Seq[Setting[_]] =
115 | refs.flatMap(inProject(_)(ss))
116 |
117 | def inCompileAndTest(ss: Setting[_]*): Seq[Setting[_]] =
118 | Seq(sbt.Compile, sbt.Test).flatMap(sbt.inConfig(_)(ss))
119 |
120 | object Keywords {
121 | val Integration = " integration"
122 | val BetterFiles = " better-files"
123 | val Wartremover = " wartremover"
124 | }
125 |
126 | private val AllKeywords = List(
127 | Keywords.Integration,
128 | Keywords.BetterFiles,
129 | Keywords.Wartremover
130 | )
131 |
132 | import sbt.complete.Parser
133 | import sbt.complete.DefaultParsers._
134 | private val AllParsers =
135 | AllKeywords.tail.foldLeft(AllKeywords.head: Parser[String]) { case (p, s) => p.|(s) }
136 | private val keywordsParser = AllParsers.+.examples(AllKeywords: _*)
137 | val keywordsSetting: Def.Initialize[sbt.State => Parser[Seq[String]]] =
138 | Def.setting((state: sbt.State) => keywordsParser)
139 | }
140 |
141 | object BuildImplementation {
142 |
143 | // This should be added to upstream sbt.
144 | def GitHub(org: String, project: String): java.net.URL =
145 | url(s"https://github.com/$org/$project")
146 | def GitHubDev(handle: String, fullName: String, email: String) =
147 | Developer(handle, fullName, email, url(s"https://github.com/$handle"))
148 |
149 | // import ch.epfl.scala.sbt.release.ReleaseEarlyPlugin.{autoImport => ReleaseEarlyKeys}
150 |
151 | final val PluginProject = sbt.LocalProject("plugin")
152 | private final val ThisRepo = GitHub("scalacenter", "scalac-profiling")
153 | final val publishSettings: Seq[Def.Setting[_]] = Seq(
154 | Keys.startYear := Some(2017),
155 | Keys.autoAPIMappings := true,
156 | Keys.homepage := Some(ThisRepo),
157 | Test / Keys.publishArtifact := false,
158 | Keys.licenses := Seq("Apache-2.0" -> url("https://opensource.org/licenses/Apache-2.0")),
159 | Keys.developers := List(GitHubDev("jvican", "Jorge Vicente Cantero", "jorge@vican.me")),
160 | // ReleaseEarlyKeys.releaseEarlyWith := ReleaseEarlyKeys.SonatypePublisher,
161 | Keys.pomExtra := scala.xml.NodeSeq.Empty
162 | )
163 |
164 | object BuildDefaults {
165 | final val showScalaInstances: Def.Initialize[sbt.Task[Unit]] = Def.task {
166 | val logger = Keys.streams.value.log
167 | logger.info((BuildKeys.BetterFilesCore / Test / Keys.name).value)
168 | logger.info((BuildKeys.BetterFilesCore / Test / Keys.scalaInstance).value.toString)
169 | logger.info((BuildKeys.WartremoverCore / Compile / Keys.name).value)
170 | logger.info((BuildKeys.WartremoverCore / Compile / Keys.scalaInstance).value.toString)
171 | ()
172 | }
173 |
174 | import sbt.Command
175 | def fixPluginCross(commands: Seq[Command]): Seq[Command] = {
176 | val pruned = commands.filterNot(p => p == sbt.WorkingPluginCross.oldPluginSwitch)
177 | sbt.WorkingPluginCross.pluginSwitch +: pruned
178 | }
179 |
180 | type Hook = Def.Initialize[State => State]
181 |
182 | def scalacProfilingScalacOptions(ref: ProjectRef): Def.Initialize[sbt.Task[Seq[String]]] = {
183 | Def.task {
184 | val projectBuild = ref.build
185 | val workingDir = Keys.buildStructure.value.units(projectBuild).localBase.getAbsolutePath
186 | val sourceRoot = s"-P:scalac-profiling:sourceroot:$workingDir"
187 | val pluginOpts = (PluginProject / BuildKeys.optionsForSourceCompilerPlugin).value
188 | sourceRoot +: pluginOpts
189 | }
190 | }
191 |
192 | def setUpUnmanagedJars: Def.Initialize[sbt.Task[Def.Classpath]] = Def.task {
193 | val previousJars = (Compile / Keys.unmanagedJars).value
194 | val allPluginDeps = (PluginProject / BuildKeys.allDepsForCompilerPlugin).value
195 | previousJars ++ allPluginDeps
196 | }
197 |
198 | object MethodRefs {
199 | private final val build = "_root_.ch.epfl.scala.profiling.build"
200 | def scalacProfilingScalacOptionsRef(ref: String): String =
201 | s"${build}.BuildImplementation.BuildDefaults.scalacProfilingScalacOptions($ref)"
202 | final val setUpUnmanagedJarsRef: String =
203 | s"${build}.BuildImplementation.BuildDefaults.setUpUnmanagedJars"
204 | }
205 |
206 | def setUpSourceDependenciesCmd(refs: List[String]): Def.Initialize[String] = {
207 | Def.setting {
208 | val scalaV = Keys.scalaVersion.value
209 | def setScalaVersion(ref: String) =
210 | s"""$ref / ${Keys.scalaVersion.key.label} := "$scalaV""""
211 | def setScalacOptions(ref: String) =
212 | s"""$ref / ${Keys.scalacOptions.key.label} := ${MethodRefs
213 | .scalacProfilingScalacOptionsRef(ref)}.value""".stripMargin
214 | def setUnmanagedJars(ref: String, config: String) =
215 | s"""$ref / $config / ${Keys.unmanagedJars.key.label} := ${MethodRefs.setUpUnmanagedJarsRef}.value"""
216 | val msg = "The build integrations are set up."
217 | val setLoadMessage = s"""sbt.Global / ${Keys.onLoadMessage.key.label} := "$msg""""
218 | val allSettingsRedefinitions = refs.flatMap { ref =>
219 | val setsUnmanagedJars =
220 | List(setUnmanagedJars(ref, "Compile"), setUnmanagedJars(ref, "Test"))
221 | List(setScalaVersion(ref), setScalacOptions(ref)) ++ setsUnmanagedJars
222 | } ++ List(setLoadMessage)
223 |
224 | s"set List(${allSettingsRedefinitions.mkString(",")})"
225 | }
226 | }
227 |
228 | final val hijackScalaVersions: Hook = Def.settingDyn {
229 | val cmd = setUpSourceDependenciesCmd(BuildKeys.IntegrationProjectsAndReferences.map(_._2))
230 | Def.setting { (state: State) =>
231 | if (state.get(BuildKeys.hijacked).getOrElse(false)) state.remove(BuildKeys.hijacked)
232 | else cmd.value :: state.put(BuildKeys.hijacked, true)
233 | }
234 | }
235 |
236 | final val customOnLoad: Hook = Def.settingDyn {
237 | Def.setting(hijackScalaVersions.value)
238 | }
239 | }
240 |
241 | final val globalSettings: Seq[Def.Setting[_]] = Seq(
242 | Test / Keys.testOptions += sbt.Tests.Argument("-oD"),
243 | // BuildKeys.useScalacFork := false,
244 | Keys.commands ~= BuildDefaults.fixPluginCross _,
245 | Keys.onLoadMessage := Header.intro,
246 | Keys.onLoad := (Keys.onLoad in sbt.Global).value andThen (BuildDefaults.customOnLoad.value)
247 | )
248 |
249 | final val commandAliases: Seq[Def.Setting[sbt.State => sbt.State]] = {
250 | // val scalacRef = sbt.Reference.display(BuildKeys.ScalacBuild)
251 | // val scalac = sbt.addCommandAlias("scalac", s"project ${scalacRef}")
252 | val homeRef = sbt.Reference.display(BuildKeys.HomeBuild)
253 | val home = sbt.addCommandAlias("home", s"project ${homeRef}")
254 | home
255 | }
256 |
257 | final val buildSettings: Seq[Def.Setting[_]] = Seq(
258 | Keys.organization := "ch.epfl.scala",
259 | Keys.resolvers += Resolver.jcenterRepo,
260 | Keys.updateOptions := Keys.updateOptions.value.withCachedResolution(true),
261 | Keys.scalaVersion := "2.12.20",
262 | sbt.nio.Keys.watchTriggeredMessage := Watch.clearScreenOnTrigger,
263 | BuildKeys.enableStatistics := true,
264 | BuildKeys.showScalaInstances := BuildDefaults.showScalaInstances.value
265 | ) ++ publishSettings ++ commandAliases
266 |
267 | final val projectSettings: Seq[Def.Setting[_]] = Seq(
268 | Compile / Keys.scalacOptions := {
269 | val base = (
270 | "-deprecation" :: "-encoding" :: "UTF-8" :: "-feature" :: "-language:existentials" ::
271 | "-language:higherKinds" :: "-language:implicitConversions" :: "-unchecked" ::
272 | "-Ywarn-numeric-widen" :: "-Xlint" :: Nil
273 | )
274 |
275 | if (Keys.scalaVersion.value.startsWith("2.13")) base else base :+ "-Xfuture"
276 | }
277 | // Necessary because the scalac version has to be always SNAPSHOT to avoid caching issues
278 | // Scope here is wrong -- we put it here temporarily until this is fixed upstream
279 | // ReleaseEarlyKeys.releaseEarlyBypassSnapshotCheck := true
280 | )
281 | }
282 |
283 | object Header {
284 | val intro: String =
285 | """ _____ __ ______ __
286 | | / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____
287 | | \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/
288 | | ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ /
289 | | /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/
290 | |
291 | | ***********************************************************
292 | | *** Welcome to the build of scalac-profiling ***
293 | | *** An effort funded by the Scala Center Advisory Board ***
294 | | ***********************************************************
295 | """.stripMargin
296 | }
297 |
--------------------------------------------------------------------------------
/project/WorkingPluginCross.scala:
--------------------------------------------------------------------------------
1 | package sbt
2 |
3 | import sbt.complete.Parser
4 | import sbt.complete.DefaultParsers._
5 | import sbt.Keys._
6 | import sbt.internal.CommandStrings._
7 | import Cross.{requireSession, spacedFirst}
8 | import sbt.internal.SettingCompletions
9 |
10 | object WorkingPluginCross {
11 | final val oldPluginSwitch = sbt.PluginCross.pluginSwitch
12 | lazy val pluginSwitch: Command = {
13 | def switchParser(state: State): Parser[(String, String)] = {
14 | val knownVersions = Nil
15 | lazy val switchArgs = token(NotSpace.examples(knownVersions: _*)) ~ (token(
16 | Space ~> matched(state.combinedParser)
17 | ) ?? "")
18 | lazy val nextSpaced = spacedFirst(PluginSwitchCommand)
19 | token(PluginSwitchCommand ~ OptSpace) flatMap { _ =>
20 | switchArgs & nextSpaced
21 | }
22 | }
23 |
24 | def crossExclude(s: Def.Setting[_]): Boolean =
25 | s.key match {
26 | case Def.ScopedKey(Scope(_, _, pluginCrossBuild.key, _), sbtVersion.key) => true
27 | case _ => false
28 | }
29 |
30 | Command.arb(requireSession(switchParser), pluginSwitchHelp) {
31 | case (state, (version, command)) =>
32 | val x = Project.extract(state)
33 | import x._
34 | state.log.info(s"Setting `sbtVersion in pluginCrossBuild` to $version")
35 | val add = List(sbtVersion in GlobalScope in pluginCrossBuild :== version) ++
36 | List(scalaVersion := PluginCross.scalaVersionSetting.value) ++
37 | inScope(GlobalScope.copy(project = Select(currentRef)))(
38 | Seq(scalaVersion := PluginCross.scalaVersionSetting.value)
39 | )
40 | val session = SettingCompletions.setThis(x, add, "").session
41 | BuiltinCommands.reapply(session, structure, command :: state)
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.11.1
2 |
--------------------------------------------------------------------------------
/project/build.sbt:
--------------------------------------------------------------------------------
1 | lazy val root = project
2 | .in(file("."))
3 | .settings(
4 | addSbtPlugin("com.github.sbt" % "sbt-git" % "2.1.0"),
5 | addSbtPlugin("com.thesamet" % "sbt-protoc" % "1.0.8"),
6 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.3.1"),
7 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.11.1"),
8 | addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4"),
9 | addSbtPlugin("com.github.sbt" % "sbt-ghpages" % "0.8.0"),
10 | addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.7.1"),
11 | addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.13.1"),
12 | // // Let's add our sbt plugin to the sbt too ;)
13 | // unmanagedSourceDirectories in Compile ++= {
14 | // val pluginMainDir = baseDirectory.value.getParentFile / "sbt-plugin" / "src" / "main"
15 | // List(pluginMainDir / "scala", pluginMainDir / s"scala-sbt-${Keys.sbtBinaryVersion.value}")
16 | // },
17 | libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.17"
18 | )
19 |
--------------------------------------------------------------------------------
/project/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.9.9
2 |
--------------------------------------------------------------------------------
/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/ProfilingSbtPlugin.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | package sbt.ch.epfl.scala
11 |
12 | import java.util.concurrent.ConcurrentHashMap
13 | import sbt.{AutoPlugin, Def, Keys, PluginTrigger, Select}
14 |
15 | object ProfilingSbtPlugin extends AutoPlugin {
16 | override def trigger: PluginTrigger = allRequirements
17 | val autoImport = BuildKeys
18 |
19 | override def globalSettings: Seq[Def.Setting[_]] =
20 | ProfilingPluginImplementation.globalSettings
21 | override def buildSettings: Seq[Def.Setting[_]] =
22 | ProfilingPluginImplementation.buildSettings
23 | override def projectSettings: Seq[Def.Setting[_]] =
24 | ProfilingPluginImplementation.projectSettings
25 | }
26 |
27 | object BuildKeys {
28 | import sbt.{settingKey, taskKey, AttributeKey, ProjectRef, ConfigKey}
29 | val profilingWarmupDuration = settingKey[Int]("The duration of the compiler warmup in seconds.")
30 | val profilingWarmupCompiler = taskKey[Unit]("Warms up the compiler for a given period of time.")
31 | private[sbt] val currentProject = AttributeKey[ProjectRef]("thisProjectRef")
32 | private[sbt] val currentConfigKey = AttributeKey[Option[ConfigKey]]("thisConfig")
33 | }
34 |
35 | object ProfilingPluginImplementation {
36 | import java.lang.{Long => BoxedLong}
37 | import sbt.{Compile, Test, Project, Task, ScopedKey, Tags}
38 |
39 | private val timingsForCompilers = new ConcurrentHashMap[ClassLoader, BoxedLong]()
40 | private val timingsForKeys = new ConcurrentHashMap[ScopedKey[_], BoxedLong]()
41 | private val WarmupTag = Tags.Tag("Warmup")
42 |
43 | val globalSettings: Seq[Def.Setting[_]] = List(
44 | Keys.commands += BuildDefaults.profilingWarmupCommand,
45 | BuildKeys.profilingWarmupDuration := BuildDefaults.profilingWarmupDuration.value,
46 | Keys.concurrentRestrictions += Tags.limit(WarmupTag, 1),
47 | Keys.progressReports := {
48 | val debug = (Keys.progressReports / Keys.logLevel).value == sbt.Level.Debug
49 | Seq(new Keys.TaskProgress(new SbtTaskTimer(timingsForKeys, debug)))
50 | },
51 | Keys.progressReports / Keys.logLevel := sbt.Level.Info
52 | )
53 |
54 | val buildSettings: Seq[Def.Setting[_]] = Nil
55 | val projectSettings: Seq[Def.Setting[_]] = List(
56 | Compile / BuildKeys.profilingWarmupCompiler :=
57 | BuildDefaults.profilingWarmupCompiler.tag(WarmupTag).value,
58 | Test / BuildKeys.profilingWarmupCompiler :=
59 | BuildDefaults.profilingWarmupCompiler.tag(WarmupTag).value
60 | )
61 |
62 | object BuildDefaults {
63 | import sbt.{Command, State}
64 | import sbt.complete.Parser
65 |
66 | val profilingWarmupCompiler: Def.Initialize[Task[Unit]] = Def.task {
67 | // Meh, we don't care about the resulting state, we'll throw it away.
68 | def runCommandAndRemaining(command: sbt.Exec): State => State = { st: State =>
69 | @annotation.tailrec
70 | def runCommand(command: sbt.Exec, state: State): State = {
71 | val nextState = Parser.parse(command.commandLine, state.combinedParser) match {
72 | case Right(cmd) => cmd()
73 | case Left(msg) => sys.error(s"Invalid programmatic input:\n$msg")
74 | }
75 | nextState.remainingCommands match {
76 | case Nil => nextState
77 | case head :: tail => runCommand(head, nextState.copy(remainingCommands = tail))
78 | }
79 | }
80 | runCommand(command, st.copy(remainingCommands = Nil))
81 | .copy(remainingCommands = st.remainingCommands)
82 | }
83 |
84 | val currentState = Keys.state.value
85 | val currentConfigKey = Keys.resolvedScoped.value.scope.config.toOption
86 | val tweakedState = currentState
87 | .put(BuildKeys.currentConfigKey, currentConfigKey)
88 | .put(BuildKeys.currentProject, Keys.thisProjectRef.value)
89 |
90 | // This is ugly, but the Command sbt API is constrained in this regard.
91 | val commandName = profilingWarmupCommand.asInstanceOf[sbt.SimpleCommand].name
92 | runCommandAndRemaining(sbt.Exec(commandName, None, None))(tweakedState)
93 | ()
94 | }
95 |
96 | val profilingWarmupDuration: Def.Initialize[Int] = Def.setting(60)
97 |
98 | private def getWarmupTime(compilerLoader: ClassLoader): Long = {
99 | val time = timingsForCompilers.get(compilerLoader)
100 | if (time == null) 0 else time.toLong
101 | }
102 |
103 | import sbt.{Scope, IO, Path}
104 |
105 | /**
106 | * This command defines the warming up behaviour.
107 | *
108 | * After incessant attempts to get it working within tasks by only limiting ourselves
109 | * to the task API, this task has proven itself impossible because sbt does not allow
110 | * recursiveness at the task level. Any tried workaround (using task proxies et al) has
111 | * miserably failed.
112 | *
113 | * As a result, we have no other choice than delegating to the Command API and using
114 | * the state directly, implementing a traditional while loop that takes care of warming
115 | * the compiler up.
116 | *
117 | * This command is private and SHOULD NOT be invoked directly. Use `profilingWarmupCompiler`.
118 | */
119 | val profilingWarmupCommand: Command = Command.command("warmupCompileFor") { (st0: State) =>
120 | def getStateAttribute[T](key: sbt.AttributeKey[T]): T =
121 | st0.get(key).getOrElse(sys.error(s"The caller did not pass the attribute ${key.label}"))
122 |
123 | // We do this because sbt does not correctly report `thisProjectRef` here, neither via
124 | // the extracted state nor the access to the build structure with `get(Keys.thisProjectRef)`.
125 | val currentProject = getStateAttribute(BuildKeys.currentProject)
126 |
127 | // We do this because `configuration` does not return the referencing configuration in scope
128 | // and `resolvedScoped` only reports the scope in which it was defined, not called from.
129 | val currentConfigKey = getStateAttribute(BuildKeys.currentConfigKey)
130 |
131 | val logger = st0.log
132 | val extracted = Project.extract(st0)
133 | val (st1, compilers) = extracted.runTask(extracted.currentRef / Keys.compilers, st0)
134 | val compilerLoader = compilers.scalac.scalaInstance.loader()
135 |
136 | val warmupDurationMs = extracted.get(BuildKeys.profilingWarmupDuration) * 1000
137 | var currentDurationMs = getWarmupTime(compilerLoader)
138 |
139 | val baseScope = Scope.ThisScope.copy(project = Select(currentProject))
140 | val scope = currentConfigKey.map(k => baseScope.copy(config = Select(k))).getOrElse(baseScope)
141 | val classDirectory = extracted.get(Keys.classDirectory.in(scope))
142 | val compileKeyRef = Keys.compile.in(scope)
143 | // We get the scope from `taskDefinitionKey` to be the same than the timer uses.
144 | val compileTaskKey = extracted.get(compileKeyRef).info.get(Def.taskDefinitionKey).get
145 |
146 | def deleteClassFiles(): Unit = {
147 | logger.info(s"Removing class files in ${classDirectory.getAbsolutePath}")
148 | IO.delete(Path.allSubpaths(classDirectory).toIterator.map(_._1).toIterable)
149 | }
150 |
151 | var lastState = st1
152 | if (currentDurationMs < warmupDurationMs)
153 | deleteClassFiles()
154 |
155 | while (currentDurationMs < warmupDurationMs) {
156 | logger.warn(s"Warming up compiler ($currentDurationMs out of $warmupDurationMs)ms...")
157 | val (afterCompile, _) = extracted.runTask(compileKeyRef, st1)
158 | lastState = afterCompile
159 |
160 | // Let's update the timing for the compile task with the knowledge of the task timer!
161 | val key = compileTaskKey.scopedKey
162 | currentDurationMs = timingsForKeys.get(key) match {
163 | case executionTime: java.lang.Long =>
164 | logger.debug(s"Registering $executionTime compile time for $key")
165 | timingsForCompilers.put(compilerLoader, executionTime)
166 | executionTime.toLong
167 | case null => sys.error("Abort: compile key was not measured. Report this error.")
168 | }
169 |
170 | // Clean class files so that incremental compilation doesn't kick in and then compile.
171 | deleteClassFiles()
172 | }
173 |
174 | logger.success(s"The compiler has been warmed up for ${warmupDurationMs}ms.")
175 | lastState
176 | }
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/sbt-plugin/src/main/scala/sbt/ch/epfl/scala/SbtTaskTimer.scala:
--------------------------------------------------------------------------------
1 | /* *\
2 | ** _____ __ ______ __ **
3 | ** / ___/_________ _/ /___ _ / ____/__ ____ / /____ _____ **
4 | ** \__ \/ ___/ __ `/ / __ `/ / / / _ \/ __ \/ __/ _ \/ ___/ Scala Center **
5 | ** ___/ / /__/ /_/ / / /_/ / / /___/ /__/ / / / /_/ /__/ / https://scala.epfl.ch **
6 | ** /____/\___/\__,_/_/\__,_/ \____/\___/_/ /_/\__/\___/_/ (c) 2017-2018, LAMP/EPFL **
7 | ** **
8 | \* */
9 |
10 | package sbt.ch.epfl.scala
11 |
12 | import java.lang.{Long => BoxedLong}
13 | import java.util.concurrent.ConcurrentHashMap
14 | import sbt.{ExecuteProgress, Result, Task, ScopedKey, Def}
15 |
16 | class SbtTaskTimer(timers: ConcurrentHashMap[ScopedKey[_], BoxedLong], isDebugEnabled: Boolean)
17 | extends ExecuteProgress[Task] {
18 |
19 | type S = Unit
20 | override def initial: Unit = {}
21 |
22 | private def getKey(task: Task[_]): Option[ScopedKey[_]] =
23 | task.info.get(Def.taskDefinitionKey)
24 |
25 | private val pending = new ConcurrentHashMap[ScopedKey[_], BoxedLong]()
26 | def mkUniformRepr(scopedKey: ScopedKey[_]): ScopedKey[_] = scopedKey
27 |
28 | import sbt.Task
29 | type Tasks = Iterable[sbt.Task[_]]
30 | override def afterRegistered(task: Task[_], allDeps: Tasks, pendingDeps: Tasks): Unit = {
31 | getKey(task) match {
32 | case Some(key) => pending.put(key, System.currentTimeMillis())
33 | case None => ()
34 | }
35 | }
36 |
37 | override def afterCompleted[A](task: Task[A], result: Result[A]): Unit = {
38 | def finishTiming(scopedKey: ScopedKey[_]): Unit = {
39 | pending.get(scopedKey) match {
40 | case startTime: BoxedLong =>
41 | pending.remove(scopedKey)
42 | val duration = System.currentTimeMillis() - startTime
43 | timers.get(scopedKey) match {
44 | // We aggregate running time for those tasks that we target
45 | case currentDuration: BoxedLong => timers.put(scopedKey, currentDuration + duration)
46 | case null => timers.put(scopedKey, duration)
47 | }
48 | case null =>
49 | if (isDebugEnabled) {
50 | // We cannot use sLog here because the logger gets garbage collected and throws NPE after `set` commands are run
51 | println(
52 | s"[sbt-scalac-profiling] ${task.info} finished, but its start wasn't recorded"
53 | )
54 | }
55 | }
56 | }
57 |
58 | getKey(task) match {
59 | case Some(key) => finishTiming(key)
60 | case None => () // Ignore tasks that do not have key information
61 | }
62 |
63 | }
64 |
65 | def workStarting(task: Task[_]): Unit = ()
66 | def allCompleted(state: Unit, results: sbt.RMap[Task, sbt.Result]): Unit = ()
67 | def completed[T](state: Unit, task: Task[T], result: sbt.Result[T]): Unit = ()
68 | def ready(state: Unit, task: Task[_]): Unit = ()
69 | def afterAllCompleted(results: sbt.internal.util.RMap[sbt.Task, sbt.Result]): Unit = ()
70 | def afterReady(task: sbt.Task[_]): Unit = ()
71 | def afterWork[A](task: sbt.Task[A], result: Either[sbt.Task[A], sbt.Result[A]]): Unit = ()
72 | def beforeWork(task: sbt.Task[_]): Unit = ()
73 | def stop(): Unit = ()
74 | }
75 |
--------------------------------------------------------------------------------
/sbt-plugin/src/sbt-test/compiler-profiling/warmup-60-seconds-compile/build.sbt:
--------------------------------------------------------------------------------
1 | scalaVersion := "2.12.15"
2 | profilingWarmupDuration := 20
3 |
4 | ////////////////////////////////////////////////////////////
5 | val checkCompilerIsWarmedUp = settingKey[Boolean]("")
6 | Global / checkCompilerIsWarmedUp := false
7 |
8 | Compile / compile := {
9 | if (checkCompilerIsWarmedUp.value)
10 | sys.error("Compilation of files has been called again!")
11 | (Compile / compile).value
12 | }
13 |
--------------------------------------------------------------------------------
/sbt-plugin/src/sbt-test/compiler-profiling/warmup-60-seconds-compile/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("ch.epfl.scala" % "sbt-scalac-profiling" % sys.props.apply("plugin.version"))
2 |
--------------------------------------------------------------------------------
/sbt-plugin/src/sbt-test/compiler-profiling/warmup-60-seconds-compile/src/main/scala/Target.scala:
--------------------------------------------------------------------------------
1 | package shapeless {
2 | sealed trait HList extends Product with Serializable
3 |
4 | final case class ::[+H, +T <: HList](head: H, tail: T) extends HList {
5 | def ::[HH](h: HH): HH :: H :: T = shapeless.::(h, this)
6 |
7 | override def toString = head match {
8 | case _: ::[_, _] => "(" + head.toString + ") :: " + tail.toString
9 | case _ => head.toString + " :: " + tail.toString
10 | }
11 | }
12 |
13 | sealed trait HNil extends HList {
14 | def ::[H](h: H) = shapeless.::(h, this)
15 | override def toString = "HNil"
16 | }
17 |
18 | case object HNil extends HNil
19 |
20 | trait Selector[L <: HList, U] {
21 | def apply(l: L): U
22 | }
23 |
24 | object Selector {
25 | def apply[L <: HList, U](implicit selector: Selector[L, U]): Selector[L, U] = selector
26 |
27 | implicit def inHead[H, T <: HList]: Selector[H :: T, H] =
28 | new Selector[H :: T, H] {
29 | def apply(l: H :: T) = l.head
30 | }
31 |
32 | implicit def inTail[H, T <: HList, U](implicit st: Selector[T, U]): Selector[H :: T, U] =
33 | new Selector[H :: T, U] {
34 | def apply(l: H :: T) = st(l.tail)
35 | }
36 | }
37 | }
38 |
39 | import shapeless._
40 |
41 | object Test extends App {
42 | val sel = Selector[L, Boolean]
43 |
44 | type L =
45 | Int ::
46 | Int ::
47 | Int ::
48 | Int ::
49 | Int ::
50 | Int ::
51 | Int ::
52 | Int ::
53 | Int ::
54 | Int ::
55 | //
56 | Int ::
57 | Int ::
58 | Int ::
59 | Int ::
60 | Int ::
61 | Int ::
62 | Int ::
63 | Int ::
64 | Int ::
65 | Int ::
66 | //
67 | Int ::
68 | Int ::
69 | Int ::
70 | Int ::
71 | Int ::
72 | Int ::
73 | Int ::
74 | Int ::
75 | Int ::
76 | Int ::
77 | //
78 | Int ::
79 | Int ::
80 | Int ::
81 | Int ::
82 | Int ::
83 | Int ::
84 | Int ::
85 | Int ::
86 | Int ::
87 | Int ::
88 | //
89 | Int ::
90 | Int ::
91 | Int ::
92 | Int ::
93 | Int ::
94 | Int ::
95 | Int ::
96 | Int ::
97 | Int ::
98 | Int ::
99 | //
100 | Int ::
101 | Int ::
102 | Int ::
103 | Int ::
104 | Int ::
105 | Int ::
106 | Int ::
107 | Int ::
108 | Int ::
109 | Int ::
110 | //
111 | Int ::
112 | Int ::
113 | Int ::
114 | Int ::
115 | Int ::
116 | Int ::
117 | Int ::
118 | Int ::
119 | Int ::
120 | Int ::
121 | //
122 | Int ::
123 | Int ::
124 | Int ::
125 | Int ::
126 | Int ::
127 | Int ::
128 | Int ::
129 | Int ::
130 | Int ::
131 | Int ::
132 | //
133 | Int ::
134 | Int ::
135 | Int ::
136 | Int ::
137 | Int ::
138 | Int ::
139 | Int ::
140 | Int ::
141 | Int ::
142 | Int ::
143 | //
144 | Int ::
145 | Int ::
146 | Int ::
147 | Int ::
148 | Int ::
149 | Int ::
150 | Int ::
151 | Int ::
152 | Int ::
153 | Int ::
154 | //
155 | Int ::
156 | Int ::
157 | Int ::
158 | Int ::
159 | Int ::
160 | Int ::
161 | Int ::
162 | Int ::
163 | Int ::
164 | Int ::
165 | //
166 | Int ::
167 | Int ::
168 | Int ::
169 | Int ::
170 | Int ::
171 | Int ::
172 | Int ::
173 | Int ::
174 | Int ::
175 | Int ::
176 | //
177 | Int ::
178 | Int ::
179 | Int ::
180 | Int ::
181 | Int ::
182 | Int ::
183 | Int ::
184 | Int ::
185 | Int ::
186 | Int ::
187 | //
188 | Boolean ::
189 | HNil
190 | }
191 |
--------------------------------------------------------------------------------
/sbt-plugin/src/sbt-test/compiler-profiling/warmup-60-seconds-compile/test:
--------------------------------------------------------------------------------
1 | > profilingWarmupCompiler
2 | > set Compile / checkCompilerIsWarmedUp := true
3 | > profilingWarmupCompiler
4 |
--------------------------------------------------------------------------------
/sbt-plugin/src/sbt-test/compiler-profiling/warmup-60-seconds-test/build.sbt:
--------------------------------------------------------------------------------
1 | scalaVersion := "2.12.15"
2 | profilingWarmupDuration := 20
3 |
4 | ////////////////////////////////////////////////////////////
5 | val checkCompilerIsWarmedUp = settingKey[Boolean]("")
6 | Global / checkCompilerIsWarmedUp := false
7 |
8 | Test / compile := {
9 | if (checkCompilerIsWarmedUp.value)
10 | sys.error("Compilation of files has been called again!")
11 | (Test / compile).value
12 | }
13 |
--------------------------------------------------------------------------------
/sbt-plugin/src/sbt-test/compiler-profiling/warmup-60-seconds-test/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("ch.epfl.scala" % "sbt-scalac-profiling" % sys.props.apply("plugin.version"))
2 |
--------------------------------------------------------------------------------
/sbt-plugin/src/sbt-test/compiler-profiling/warmup-60-seconds-test/src/test/scala/Target.scala:
--------------------------------------------------------------------------------
1 | package shapeless {
2 | sealed trait HList extends Product with Serializable
3 |
4 | final case class ::[+H, +T <: HList](head: H, tail: T) extends HList {
5 | def ::[HH](h: HH): HH :: H :: T = shapeless.::(h, this)
6 |
7 | override def toString = head match {
8 | case _: ::[_, _] => "(" + head.toString + ") :: " + tail.toString
9 | case _ => head.toString + " :: " + tail.toString
10 | }
11 | }
12 |
13 | sealed trait HNil extends HList {
14 | def ::[H](h: H) = shapeless.::(h, this)
15 | override def toString = "HNil"
16 | }
17 |
18 | case object HNil extends HNil
19 |
20 | trait Selector[L <: HList, U] {
21 | def apply(l: L): U
22 | }
23 |
24 | object Selector {
25 | def apply[L <: HList, U](implicit selector: Selector[L, U]): Selector[L, U] = selector
26 |
27 | implicit def inHead[H, T <: HList]: Selector[H :: T, H] =
28 | new Selector[H :: T, H] {
29 | def apply(l: H :: T) = l.head
30 | }
31 |
32 | implicit def inTail[H, T <: HList, U](implicit st: Selector[T, U]): Selector[H :: T, U] =
33 | new Selector[H :: T, U] {
34 | def apply(l: H :: T) = st(l.tail)
35 | }
36 | }
37 | }
38 |
39 | import shapeless._
40 |
41 | object Test extends App {
42 | val sel = Selector[L, Boolean]
43 |
44 | type L =
45 | Int ::
46 | Int ::
47 | Int ::
48 | Int ::
49 | Int ::
50 | Int ::
51 | Int ::
52 | Int ::
53 | Int ::
54 | Int ::
55 | //
56 | Int ::
57 | Int ::
58 | Int ::
59 | Int ::
60 | Int ::
61 | Int ::
62 | Int ::
63 | Int ::
64 | Int ::
65 | Int ::
66 | //
67 | Int ::
68 | Int ::
69 | Int ::
70 | Int ::
71 | Int ::
72 | Int ::
73 | Int ::
74 | Int ::
75 | Int ::
76 | Int ::
77 | //
78 | Int ::
79 | Int ::
80 | Int ::
81 | Int ::
82 | Int ::
83 | Int ::
84 | Int ::
85 | Int ::
86 | Int ::
87 | Int ::
88 | //
89 | Int ::
90 | Int ::
91 | Int ::
92 | Int ::
93 | Int ::
94 | Int ::
95 | Int ::
96 | Int ::
97 | Int ::
98 | Int ::
99 | //
100 | Int ::
101 | Int ::
102 | Int ::
103 | Int ::
104 | Int ::
105 | Int ::
106 | Int ::
107 | Int ::
108 | Int ::
109 | Int ::
110 | //
111 | Int ::
112 | Int ::
113 | Int ::
114 | Int ::
115 | Int ::
116 | Int ::
117 | Int ::
118 | Int ::
119 | Int ::
120 | Int ::
121 | //
122 | Int ::
123 | Int ::
124 | Int ::
125 | Int ::
126 | Int ::
127 | Int ::
128 | Int ::
129 | Int ::
130 | Int ::
131 | Int ::
132 | //
133 | Int ::
134 | Int ::
135 | Int ::
136 | Int ::
137 | Int ::
138 | Int ::
139 | Int ::
140 | Int ::
141 | Int ::
142 | Int ::
143 | //
144 | Int ::
145 | Int ::
146 | Int ::
147 | Int ::
148 | Int ::
149 | Int ::
150 | Int ::
151 | Int ::
152 | Int ::
153 | Int ::
154 | //
155 | Int ::
156 | Int ::
157 | Int ::
158 | Int ::
159 | Int ::
160 | Int ::
161 | Int ::
162 | Int ::
163 | Int ::
164 | Int ::
165 | //
166 | Boolean ::
167 | HNil
168 | }
169 |
--------------------------------------------------------------------------------
/sbt-plugin/src/sbt-test/compiler-profiling/warmup-60-seconds-test/test:
--------------------------------------------------------------------------------
1 | > test:profilingWarmupCompiler
2 | > set Test / checkCompilerIsWarmedUp := true
3 | > test:profilingWarmupCompiler
4 |
--------------------------------------------------------------------------------
/website/core/Footer.js:
--------------------------------------------------------------------------------
1 | const React = require("react");
2 |
3 | const siteConfig = require(process.cwd() + "/siteConfig.js");
4 |
5 | class Footer extends React.Component {
6 | render() {
7 | const currentYear = new Date().getFullYear();
8 | const {
9 | copyright,
10 | colors: { secondaryColor }
11 | } = this.props.config;
12 | return (
13 |
64 | );
65 | }
66 | }
67 |
68 | module.exports = Footer;
69 |
--------------------------------------------------------------------------------
/website/core/GridBlock.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) 2017-present, Facebook, Inc.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | const React = require('react');
9 | const classNames = require('classnames');
10 |
11 | const MarkdownBlock = require('docusaurus/lib/core/MarkdownBlock.js');
12 |
13 | class GridBlock extends React.Component {
14 | renderBlock(origBlock) {
15 | const blockDefaults = {
16 | imageAlign: 'left',
17 | };
18 |
19 | const block = {
20 | ...blockDefaults,
21 | ...origBlock,
22 | };
23 |
24 | const blockClasses = classNames('blockElement', this.props.className, {
25 | alignCenter: this.props.align === 'center',
26 | alignRight: this.props.align === 'right',
27 | fourByGridBlock: this.props.layout === 'fourColumn',
28 | imageAlignSide:
29 | block.image &&
30 | (block.imageAlign === 'left' || block.imageAlign === 'right'),
31 | imageAlignTop: block.image && block.imageAlign === 'top',
32 | imageAlignRight: block.image && block.imageAlign === 'right',
33 | imageAlignBottom: block.image && block.imageAlign === 'bottom',
34 | imageAlignLeft: block.image && block.imageAlign === 'left',
35 | threeByGridBlock: this.props.layout === 'threeColumn',
36 | twoByGridBlock: this.props.layout === 'twoColumn',
37 | });
38 |
39 | const topLeftImage =
40 | (block.imageAlign === 'top' || block.imageAlign === 'left') &&
41 | this.renderBlockImage(block.image, block.imageLink, block.imageAlt, block.imageClassName);
42 |
43 | const bottomRightImage =
44 | (block.imageAlign === 'bottom' || block.imageAlign === 'right') &&
45 | this.renderBlockImage(block.image, block.imageLink, block.imageAlt, block.imageClassName);
46 |
47 | const extra = block.extra
48 |
49 | return (
50 |