├── .gitignore
├── .jvmopts
├── .sbtopts
├── .travis.yml
├── LICENSE
├── README.md
├── Resources
├── Benchmarks
│ ├── README.md
│ ├── States.scala
│ ├── StreamsBenchmarks.scala
│ ├── build.sbt
│ └── project
│ │ ├── build.properties
│ │ └── plugins.sbt
├── Iterators.scala
├── Lists.scala
├── Options.scala
├── Slides
│ └── Scalaxy-Streams Scala.io 2014.pdf
├── Streams.scala
├── build.sbt
├── example1.scala
├── example2.scala
├── example3.scala
├── example4.scala
├── example5.scala
├── exampleWiki.scala
├── package.scala
├── pom.xml
├── releaseTest
│ ├── build.sbt
│ └── example.scala
├── scripts
│ ├── analyze_logs.sh
│ ├── compile_scala.sh
│ └── self_optimize.sh
├── test.scala
├── test2.scala
└── wiki
│ ├── scalaxy_settings_eclipse.png
│ └── scalaxy_working_in_eclipse.png
├── build.sbt
├── circle.yml
├── project
├── build.properties
└── plugins.sbt
└── src
├── main
├── resources
│ └── scalac-plugin.xml
└── scala
│ ├── HacksAndWorkarounds.scala
│ ├── Utils.scala
│ ├── package.scala
│ ├── strategy.scala
│ └── streams
│ ├── Blacklists.scala
│ ├── ConsoleReporters.scala
│ ├── Optimizations.scala
│ ├── OptionalSymbols.scala
│ ├── Reporters.scala
│ ├── ScalaJsSymbols.scala
│ ├── Strategies.scala
│ ├── StreamComponents.scala
│ ├── StreamInterruptors.scala
│ ├── StreamResults.scala
│ ├── StreamTransforms.scala
│ ├── Streams.scala
│ ├── WithLocalContext.scala
│ ├── WithMacroContext.scala
│ ├── WithRuntimeUniverse.scala
│ ├── flags.scala
│ ├── matchers
│ ├── Strippers.scala
│ ├── SymbolMatchers.scala
│ ├── TransformationClosures.scala
│ ├── TuploidValues.scala
│ ├── Tuploids.scala
│ ├── WhileLoops.scala
│ ├── legacy
│ │ ├── TreeBuilders.scala
│ │ └── TuploidAnalysis.scala
│ └── testing
│ │ └── WithTestFresh.scala
│ ├── ops
│ ├── ArrayOpsOps.scala
│ ├── ClosureStreamOps.scala
│ ├── CoerceOps.scala
│ ├── CollectOp.scala
│ ├── CountOps.scala
│ ├── ExistsOp.scala
│ ├── FilterOps.scala
│ ├── FindOps.scala
│ ├── FlatMapOps.scala
│ ├── FlattenOps.scala
│ ├── ForeachOps.scala
│ ├── IsEmptyOps.scala
│ ├── JsArrayOpsOps.scala
│ ├── MapOps.scala
│ ├── MkStringOps.scala
│ ├── OptionOps.scala
│ ├── ReductionOps.scala
│ ├── StreamOps.scala
│ ├── TakeDropOps.scala
│ ├── ToCollectionOps.scala
│ ├── WhileOps.scala
│ └── ZipWithIndexOps.scala
│ ├── plugin
│ ├── StreamsCompiler.scala
│ ├── StreamsComponent.scala
│ └── StreamsPlugin.scala
│ ├── sideeffects
│ ├── SideEffects.scala
│ ├── SideEffectsDetection.scala
│ ├── SideEffectsMessages.scala
│ └── SideEffectsWhitelists.scala
│ ├── sinks
│ ├── ArrayBuilderSinks.scala
│ ├── ArrayOpsSinks.scala
│ ├── BuilderSinks.scala
│ ├── CanBuildFromSinks.scala
│ ├── IteratorSinks.scala
│ ├── JsArrayBuilderSinks.scala
│ ├── ListBufferSinks.scala
│ ├── OptionSinks.scala
│ ├── SetBuilderSink.scala
│ ├── StreamSinks.scala
│ ├── UnusableSinks.scala
│ └── VectorBuilderSinks.scala
│ └── sources
│ ├── ArrayStreamSources.scala
│ ├── InlineRangeStreamSources.scala
│ ├── InlineSeqStreamSources.scala
│ ├── IteratorStreamSources.scala
│ ├── JsArrayStreamSources.scala
│ ├── ListStreamSources.scala
│ ├── OptionStreamSources.scala
│ └── StreamSources.scala
└── test
└── scala
├── AdHocManualTest.scala
├── IntegrationTests.scala
├── LoopsTest.scala
├── MacroIntegrationTest.scala
├── OptionStreamsTest.scala
├── Parallelized.scala
├── PluginCompilationTest.scala
├── SideEffectsTest.scala
├── StrategyTest.scala
├── StreamComponentsTestBase.scala
├── StreamsOutputNeedsTest.scala
├── StreamsTest.scala
├── SubTreeEvaluationOrderTest.scala
├── matchers
└── TransformationClosuresTest.scala
├── ops
├── ArrayOpsTest.scala
├── CoerceOpsTest.scala
├── FilterOpsTest.scala
├── FlatMapOpsTest.scala
├── ForeachOpsTest.scala
├── MapOpsTest.scala
├── ToCollectionOpsTest.scala
└── ZipWithIndexOpsTest.scala
├── performance
├── CollectionPerformanceTests.scala
├── PerformanceTestBase.scala
└── PerformanceTests.scala
└── sources
└── StreamSourcesTest.scala
/.gitignore:
--------------------------------------------------------------------------------
1 | target/
2 | *.class
3 | .ensime_lucene/
4 | tmp/
5 | *.log
6 |
--------------------------------------------------------------------------------
/.jvmopts:
--------------------------------------------------------------------------------
1 | -Dfile.encoding=UTF8
2 | -XX:MaxPermSize=512m
3 | -Xms1g
4 | -Xmx5g
5 | -Xss2m
6 | -XX:+CMSClassUnloadingEnabled
7 | -XX:+UseConcMarkSweepGC
8 |
--------------------------------------------------------------------------------
/.sbtopts:
--------------------------------------------------------------------------------
1 | -J-Xmx8G
2 | -J-XX:MaxMetaspaceSize=2G
3 | -J-XX:MaxPermSize=2G
4 | -J-XX:+CMSClassUnloadingEnabled
5 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: scala
2 | scala:
3 | - 2.11.7
4 | - 2.12.0-M2
5 | jdk:
6 | - oraclejdk8
7 | git:
8 | depth: 1
9 | cache:
10 | directories:
11 | - $HOME/.m2
12 | - $HOME/.ivy2
13 | - $HOME/.sbt
14 |
15 | env:
16 | global:
17 | - secure: EiIU/od4+xs+UYNI+an212eR9P4aks1wp6fd3f1xRW5D/KCTDHcXRnpHsiiZM3+A9klVqTODdV1RDThUo8wcSro4IGjbTY3TniRjzYJnLOyuZFIGJWwN++lnepn/DwFk3Ckr661BcdIS0+5TybOD4ZzkcUOreU/0q/1qd4uqTRU=
18 | - secure: DNSLyto/JVJTLMsBxrxK0C0B1/bdHZrDmKxr2ZTgEE6HFTZOLnAB39MD3RS6rnPotZcffdwraBTjYwH21Z8G0byquDyYR0khKdACsW77vy9ZBkNa6YUwlrUov9t2TfdU83QFvjoe8TCBmNqiQqkgB62YO7XS1wIe+pXeFbAqe/U=
19 |
20 | script:
21 | - "[[ $TRAVIS_BRANCH == \"master\" ]] && export CMDS=\"test publish\" || export CMDS=\"test\""
22 | - "echo \"Commands = $CMDS\""
23 | - sbt ++$TRAVIS_SCALA_VERSION 'set javaOptions += "-Djunit.parallel.threads=2"' $CMDS
24 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | SCALAXY LICENSE
2 |
3 | Copyright (c) 2012-2015 Olivier Chafik, unless otherwise specified.
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
7 |
8 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
9 |
10 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
11 |
12 | 3. Neither the name of Scalaxy nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
13 |
14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
15 |
--------------------------------------------------------------------------------
/Resources/Benchmarks/README.md:
--------------------------------------------------------------------------------
1 | These jmh-based benchmarks are not (yet) a real benchmark suite. To test performance, run:
2 |
3 | SCALAXY_TEST_PERF=1 sbt "project scalaxy-streams" "; clean ; ~test"
4 |
5 | (hopefully the perf tests will all be migrated to using jmh eventually, but I might not have enough time to do it myself)
6 |
--------------------------------------------------------------------------------
/Resources/Benchmarks/States.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams.benchmark.jmh
2 |
3 | import java.util.concurrent.TimeUnit
4 |
5 | import org.openjdk.jmh.annotations._
6 |
7 | trait BaseState {
8 | @Param(Array("1000", "1000000"))
9 | var size: Int = 0
10 | }
11 |
12 | @State(Scope.Thread)
13 | class ArrayState extends BaseState {
14 | var intArray: Array[Int] = _
15 | var tup2Array: Array[(Int, Int)] = _
16 | var tup3Array: Array[(Int, Int, Int)] = _
17 |
18 | @Setup
19 | def init {
20 | intArray = Array.tabulate(size)(i => i)
21 | tup2Array = Array.tabulate(size)(i => (i, i * 10))
22 | tup3Array = Array.tabulate(size)(i => (i, i * 10, i * 100))
23 | }
24 | }
25 |
26 | @State(Scope.Thread)
27 | class ListState extends BaseState {
28 | var intList: List[Int] = _
29 | var tup2List: List[(Int, Int)] = _
30 | var tup3List: List[(Int, Int, Int)] = _
31 |
32 | @Setup
33 | def init {
34 | intList = Array.tabulate(size)(i => i).toList
35 | tup2List = Array.tabulate(size)(i => (i, i * 10)).toList
36 | tup3List = Array.tabulate(size)(i => (i, i * 10, i * 100)).toList
37 | }
38 | }
39 |
40 | @State(Scope.Thread)
41 | class RangeState extends BaseState
42 |
--------------------------------------------------------------------------------
/Resources/Benchmarks/StreamsBenchmarks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams.benchmark.jmh
2 |
3 | import java.util.concurrent.TimeUnit
4 |
5 | import org.openjdk.jmh.annotations._
6 |
7 | import scalaxy.streams.optimize
8 |
9 | /**
10 | * http://java-performance.info/introduction-jmh-profilers/
11 | * -prof cl,comp
12 | * -prof HS_GC
13 | */
14 | @BenchmarkMode(Array(Mode.AverageTime))
15 | @OutputTimeUnit(TimeUnit.MICROSECONDS)
16 | class ArrayBenchmarks
17 | {
18 | @Benchmark
19 | def intArrayForeach(state: ArrayState) = {
20 | var tot = 0L; { for (v <- state.intArray) tot += v };
21 | tot
22 | }
23 | @Benchmark
24 | def intArrayForeach_optimized(state: ArrayState) = {
25 | var tot = 0L; optimize { for (v <- state.intArray) tot += v };
26 | tot
27 | }
28 |
29 | @Benchmark
30 | def intArrayZippedWithIndexForeach(state: ArrayState) = {
31 | var tot = 0L; { for ((v, i) <- state.intArray.zipWithIndex) tot += v + i };
32 | tot
33 | }
34 | @Benchmark
35 | def intArrayZippedWithIndexForeach_optimized(state: ArrayState) = {
36 | var tot = 0L; optimize { for ((v, i) <- state.intArray.zipWithIndex) tot += v + i };
37 | tot
38 | }
39 |
40 | // @Benchmark
41 | // def tup2ArrayForeach(state: ArrayState) = {
42 | // var tot = 0L; { for ((a, b) <- state.tup2Array) tot += a + b };
43 | // tot
44 | // }
45 | // @Benchmark
46 | // def tup2ArrayForeach_optimized(state: ArrayState) = {
47 | // var tot = 0L; optimize { for ((a, b) <- state.tup2Array) tot += a + b };
48 | // tot
49 | // }
50 | }
51 |
--------------------------------------------------------------------------------
/Resources/Benchmarks/build.sbt:
--------------------------------------------------------------------------------
1 | scalaVersion := "2.11.6"
2 |
3 | jmhSettings
4 |
5 | autoCompilerPlugins := true
6 |
7 | // addCompilerPlugin("com.nativelibs4java" %% "scalaxy-fastcaseclasses" % "0.4-SNAPSHOT")
8 |
9 | // scalacOptions += "-Xplugin-require:scalaxy-fastcaseclasses"
10 |
11 | // addCompilerPlugin("com.nativelibs4java" %% "scalaxy-streams" % "0.4-SNAPSHOT")
12 |
13 | // scalacOptions += "-Xplugin-require:scalaxy-streams"
14 |
15 | libraryDependencies += "com.nativelibs4java" %% "scalaxy-streams" % "0.4-SNAPSHOT"
16 |
17 | scalacOptions ++= Seq("-optimise", "-Yclosure-elim", "-Yinline")
18 |
19 | // scalacOptions += "-Xprint:cleanup"
20 |
21 | // Scalaxy snapshots are published on the Sonatype repository.
22 | resolvers += Resolver.sonatypeRepo("snapshots")
23 |
--------------------------------------------------------------------------------
/Resources/Benchmarks/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=0.13.1
2 |
--------------------------------------------------------------------------------
/Resources/Benchmarks/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.1.6")
2 |
--------------------------------------------------------------------------------
/Resources/Iterators.scala:
--------------------------------------------------------------------------------
1 | import scalaxy.streams.optimize
2 |
3 | object Iterators_Opt extends App {
4 |
5 | val n = 10000000
6 | val a: Array[Int] = Array.tabulate(n)(i => i * i)
7 |
8 | while (true) optimize {
9 | println(
10 | (for ((v, i) <- a.zipWithIndex; if i % 2 == 0) yield v * v - i).sum)
11 | }
12 | }
13 |
14 | object Iterators_NotOpt extends App {
15 |
16 | val n = 10000000
17 | val a: Array[Int] = Array.tabulate(n)(i => i * i)
18 |
19 | while (true) {
20 | println(
21 | (for ((v, i) <- a.toIterator.zipWithIndex; if i % 2 == 0) yield v * v - i).sum)
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Resources/Lists.scala:
--------------------------------------------------------------------------------
1 | import scalaxy.streams.optimize
2 |
3 | object Lists_Opt extends App {
4 |
5 | val n = 10000000
6 | val a: List[Int] = Array.tabulate(n)(i => i * i).toList
7 |
8 | while (true) optimize {
9 | println(
10 | (for ((v, i) <- a.zipWithIndex; if i % 2 == 0) yield v * v - i).sum)
11 | }
12 | }
13 |
14 | object Lists_NotOpt extends App {
15 |
16 | val n = 10000000
17 | val a: List[Int] = Array.tabulate(n)(i => i * i).toList
18 |
19 | while (true) {
20 | println(
21 | (for ((v, i) <- a.zipWithIndex; if i % 2 == 0) yield v * v - i).sum)
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Resources/Options.scala:
--------------------------------------------------------------------------------
1 | import scalaxy.streams.optimize
2 |
3 | object Options_Opt extends App {
4 |
5 | val n = 10000000
6 | val a: Array[Option[Int]] = Array.tabulate(n)(i => if (i % 2 == 0) None else Some(i * i))
7 |
8 | while (true) optimize {
9 | println(
10 | (for ((o, i) <- a.zipWithIndex; v <- o) yield v * v - i).sum)
11 | }
12 | }
13 |
14 | object Options_NotOpt extends App {
15 |
16 | val n = 10000000
17 | val a: Array[Option[Int]] = Array.tabulate(n)(i => if (i % 2 == 0) None else Some(i * i))
18 |
19 | while (true) {
20 | println(
21 | (for ((o, i) <- a.toIterator.zipWithIndex; v <- o) yield v * v - i).sum)
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Resources/Slides/Scalaxy-Streams Scala.io 2014.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nativelibs4java/scalaxy-streams/ef48ae2d652eb718ac6dd4b916f96fdba7a3804a/Resources/Slides/Scalaxy-Streams Scala.io 2014.pdf
--------------------------------------------------------------------------------
/Resources/Streams.scala:
--------------------------------------------------------------------------------
1 | import scalaxy.streams.optimize
2 |
3 | object Arrays_Opt extends App {
4 |
5 | val n = 10000000
6 | val a: Array[Int] = Array.tabulate(n)(i => i * i)
7 |
8 | while (true) optimize {
9 | println(
10 | (for ((v, i) <- a.zipWithIndex; if i % 2 == 0) yield v * v - i).sum)
11 | }
12 | }
13 |
14 | object Arrays_NotOpt extends App {
15 |
16 | val n = 10000000
17 | val a: Array[Int] = Array.tabulate(n)(i => i * i)
18 |
19 | while (true) {
20 | println(
21 | (for ((v, i) <- a.zipWithIndex; if i % 2 == 0) yield v * v - i).sum)
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Resources/build.sbt:
--------------------------------------------------------------------------------
1 | scalaVersion := "2.11.6"
2 |
3 | autoCompilerPlugins := true
4 |
5 | addCompilerPlugin("com.nativelibs4java" %% "scalaxy-streams" % "0.4-SNAPSHOT")
6 |
7 | libraryDependencies += "com.nativelibs4java" %% "scalaxy-streams" % "0.4-SNAPSHOT" % "provided"
8 |
9 | // addCompilerPlugin("com.nativelibs4java" %% "scalaxy-streams" % "0.3.4")
10 |
11 | scalacOptions += "-Xplugin-require:scalaxy-streams"
12 |
13 | scalacOptions ++= Seq("-optimise", "-Yclosure-elim", "-Yinline")
14 |
15 | // Scalaxy/Streams snapshots are published on the Sonatype repository.
16 | resolvers += Resolver.sonatypeRepo("snapshots")
17 |
--------------------------------------------------------------------------------
/Resources/example1.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | object Example1 extends App {
4 | // import scalaxy.loops.optimize
5 |
6 | val n = 10
7 |
8 | // optimize { 10 }
9 |
10 | {
11 | for (i <- 0 to n) {
12 | println(i)
13 | }
14 | }
15 |
16 | println {
17 | for (i <- 0 to n) yield {
18 | i + 2
19 | }
20 | }
21 |
22 | println {
23 | for (i <- 0 to n; if i % 2 == 1) yield {
24 | i + 2
25 | }
26 | }
27 |
28 | println {
29 | for (i <- 0 to n; j <- i to 1 by -1; if i % 2 == 1) yield { i + j }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/Resources/example2.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | object Example2 {
4 | def main(args: Array[String]): Unit = {
5 | // val n = 20;
6 | // println {
7 | // for (i <- 0 to n;
8 | // ii = i * i;
9 | // j <- i to n;
10 | // jj = j * j;
11 | // if (ii - jj) % 2 == 0;
12 | // k <- (i + j) to n)
13 | // yield { (ii, jj, k) }
14 | // }
15 |
16 |
17 | // val col = (0 to 10).toList
18 | // val arr = col.map(_ * 2).toArray
19 | //(0 to 10).map(_ * 2).toList
20 | //
21 | // (0 to 10).map(_ * 2).toArray
22 |
23 | // println((0 to 1000).filter(v => (v % 2) == 0).map(_ * 2).toArray.toSeq)
24 |
25 | //println(List(1, 2, 3).flatMap(v => List(v, v + 1)).count(_ % 2 == 0))
26 | // println(List(1, 2) collect {
27 | // case x if x > 1 =>
28 | // x - 1
29 | // case x =>
30 | // x
31 | // } map (_ + 1))
32 |
33 | import scalaxy.streams.optimize
34 |
35 | optimize {
36 | case class Foo(i: Int)
37 | val arr = new Array[Foo](5)
38 | for (Foo(i) <- arr) yield i
39 | }
40 |
41 | // val value = List(1, 2)
42 |
43 | // Option(value) collect {
44 | // case List(a, b) =>
45 | // print(a)
46 | // case Nil =>
47 | // print("haa")
48 | // }
49 |
50 | // val a = Array(1, 2)
51 | // val b = new collection.mutable.ArrayBuilder.ofInt()
52 | // var i = 0
53 | // val n = a.length
54 | // while (i < n) {
55 | // val item = a(i)
56 | // var found = false
57 | // var value = 0
58 | // item match {
59 | // case x if x > 1 =>
60 | // found = true;
61 | // value = x - 1;
62 | // case x =>
63 | // found = true;
64 | // value = x;
65 | // }
66 | // if (found) {
67 | // b += value
68 | // }
69 | // i += 1
70 | // }
71 |
72 | // println(List(1, 2, 3).flatMap(v => List(v * 2, v * 2 + 1).map(_ + 1)).count(_ % 2 == 0))
73 | // println(List(1, 2, 3).flatMap(v => List(v * 2, v * 2 + 1)).dropWhile(_ < 3))
74 | // println(List(1, 2, 3).flatMap(v => List(v * 2, v * 2 + 1).map(_ + 1)).dropWhile(_ < 3))
75 |
76 | // (0 to 10).toList
77 | // val n = 5
78 | // println(for (i <- 0 until n; v <- (i to 0 by -1).toArray; j <- 0 until v) yield {
79 | // (i, v, j)
80 | // })
81 | // print(arr.mkString)
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/Resources/example3.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | /*
4 | ~/bin/scala-2.11.0-M8/bin/scalac -Xprint:delambdafy -optimise -Yclosure-elim -Yinline examples/example3.scala
5 | */
6 | // import scalaxy.loops._
7 |
8 | object Example3 {
9 | def main(args: Array[String]) {
10 | val n = 20;
11 | for (v <- 0 to n) yield v
12 |
13 | println(Array(1, 3, 4).map(_ + 1).map(_ * 2))
14 |
15 | val o = Option(10)
16 | for (oo <- o; if oo < 10) {
17 | println(oo)
18 | }
19 |
20 | for (oo <- Option(10); if oo < 10) {
21 | println(oo)
22 | }
23 | println(Option("a").map(_ + " b"))
24 | // for (v <- 0 to n) println(v)
25 | // optimized {
26 | // }
27 | // for ((v, i) <- (0 to n).zipWithIndex) {
28 | // println(v + i)
29 | // }
30 | // println((1 to n).map(_ * 2).toList)
31 | // println((1 to n).map(_ * 2).toArray)
32 | // println((1 to n).map(_ * 2).toSet)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Resources/example4.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | // run examples/example4.scala -Xprint:scalaxy-streams
4 | object Example4 extends App {
5 | // println(for (p <- (20 until 0 by -2).zipWithIndex) yield p.toString)
6 |
7 | // println(Some(1).map(_ * 2).filter(_ < 3))
8 | //(1 to 3).map(_ * 2).filter(_ < 3)
9 | // Seq(0, 1, 2, 3).map(_ * 2).filter(_ < 3)
10 | // val s = "1"
11 | // Option(s).flatMap(s => Option(s).filter(_ != null))
12 |
13 | // val array = (1 to 3).map(i => (i, i * 10)).toArray
14 | // for (((x, y), i) <- array.zipWithIndex; if (x + y) % 2 == 0) { println(s"array[$i] = ($x, $y)") }
15 |
16 | // val n = 20;
17 | // println(
18 | // for (i <- 0 to n;
19 | // ii = i * i;
20 | // j <- i to n;
21 | // jj = j * j;
22 | // if (ii - jj) % 2 == 0;
23 | // k <- (i + j) to n)
24 | // yield { (ii, jj, k) })
25 | // for ((v, i) <- (20 until 0 by -2).zipWithIndex) yield (v + i)
26 | val start = 10
27 | val end = 100
28 | println(
29 | (for (i <- start to end by 2) yield
30 | (() => (i * 2))
31 | ).map(_())
32 | )
33 | }
34 |
--------------------------------------------------------------------------------
/Resources/example5.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | object Example5 {
4 | def doIt(name: String) {
5 | val n = Option(name).filter(_.startsWith("_")).orNull
6 | for (nn <- Option(n)) {
7 | println(nn)
8 | }
9 |
10 | print(List(1, 2, 3).flatMap(x => List(x + 1, x + 2)).filter(_ < 2).mkString)
11 | // 1 1 3 1 1 (1)
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/Resources/exampleWiki.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | object WikiTest extends App {
4 | // For instance, given the following array:
5 | val array = if (args.length == 0) Array(1, 2, 3, 4) else args.map(_.toInt)
6 |
7 | // The following for comprehension:
8 | for ((item, i) <- array.zipWithIndex; if item % 2 == 0) {
9 | println(s"array[$i] = $item")
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Resources/package.scala:
--------------------------------------------------------------------------------
1 | package object example {
2 | // implicit val strategy = scalaxy.streams.strategy.aggressive
3 | }
4 |
--------------------------------------------------------------------------------
/Resources/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 | com.nativelibs4java
5 | scalaxy-streams-example
6 | 1.0-SNAPSHOT
7 |
8 |
9 | 2.11.1
10 |
11 |
12 |
13 |
14 | org.scala-lang
15 | scala-library
16 | ${scala.version}
17 |
18 |
30 |
31 |
32 |
33 | .
34 |
35 |
36 |
37 |
40 | net.alchim31.maven
41 | scala-maven-plugin
42 | 3.1.6
43 |
44 |
45 |
46 | compile
47 | testCompile
48 |
49 |
50 |
51 |
52 |
53 |
54 | com.nativelibs4java
55 | scalaxy-streams_2.11
56 |
59 | 0.3-SNAPSHOT
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 | org.scala-tools
71 | maven-scala-plugin
72 | 2.15.2
73 |
74 |
75 |
76 |
77 |
78 |
79 | sonatype-oss-public
80 | Sonatype Snapshots
81 | https://oss.sonatype.org/content/groups/public/
82 |
83 |
84 |
85 |
86 | snapshots-repo
87 | https://oss.sonatype.org/content/repositories/snapshots
88 | false
89 | true
90 |
91 |
92 |
93 |
94 |
95 |
--------------------------------------------------------------------------------
/Resources/releaseTest/build.sbt:
--------------------------------------------------------------------------------
1 | scalaVersion := "2.11.4"
2 |
3 | //autoCompilerPlugins := true
4 |
5 | // addCompilerPlugin("com.nativelibs4java" %% "scalaxy-streams" % "0.3.4")
6 |
7 | // scalacOptions += "-Xplugin-require:scalaxy-streams"
8 | //
9 | // scalacOptions += "-Xprint:scalaxy-streams"
10 |
11 | // scalacOptions += "-Xprint:patmat"
12 |
13 | libraryDependencies += "com.nativelibs4java" %% "scalaxy-streams" % "0.3.4" % "provided"
14 |
15 | scalacOptions ++= Seq("-optimise", "-Yclosure-elim", "-Yinline")
16 |
17 | // Scalaxy/Streams snapshots are published on the //Sonatype repository.
18 | // resolvers += Resolver.sonatypeRepo("snapshots")
19 | // resolvers += Resolver.sonatypeRepo("releases")
20 |
21 | //resolvers += Resolver.defaultLocal
22 |
23 | resolvers += "New Release" at "https://oss.sonatype.org/content/repositories/comnativelibs4java-1027/"
24 |
--------------------------------------------------------------------------------
/Resources/releaseTest/example.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | object Example1 extends App {
4 | import scalaxy.streams.optimize
5 |
6 | // import scalaxy.streams.strategy.safer
7 | // import scalaxy.streams.strategy.safe
8 | // import scalaxy.streams.strategy.aggressive
9 | import scalaxy.streams.strategy.safe
10 |
11 | // val n = 10
12 |
13 | // def outsider[A](a: A) = a
14 |
15 | // optimize {
16 | // // println((0 to 10).map(i => (i, i)))
17 |
18 | // (0 to n)
19 | // .foreach(i => {
20 | // println(i)
21 | // })
22 | // // print((0 to 10).toList)
23 | // // print((0 to 10).map(outsider))
24 | // // print((0 to 10).map(outsider).map(_ + 2))
25 | // // print((0 to 10).map(outsider).map(_.toString + new Object().toString).map(outsider))
26 | // }
27 |
28 |
29 | case class Ident(name: String)
30 | def example {
31 | val a = new Array[Ident](10);
32 | optimize
33 | {
34 | for (Ident(name) <- a) {
35 | println(name)
36 | }
37 | }
38 | }
39 | example
40 | }
41 |
--------------------------------------------------------------------------------
/Resources/scripts/analyze_logs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Just pipe compilation logs (with SCALAXY_STREAMS_VERBOSE=1) into this.
4 | #
5 | # SCALAXY_STREAMS_VERBOSE=1 sbt clean compile | ./Resources/scripts/analyze_logs.sh
6 | #
7 | set -e
8 |
9 | grep "\[Scalaxy\] Optimized stream " | \
10 | sed 's/.*Optimized stream //' | sed 's/ (strategy:.*//' | \
11 | sort | uniq -c | sort -n
12 |
--------------------------------------------------------------------------------
/Resources/scripts/compile_scala.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # ./Resources/scripts/compile_scala.sh
4 | #
5 | set -e
6 |
7 | declare plugin_jar
8 | if [[ "$#" -eq 0 ]]; then
9 | sbt package
10 | plugin_jar=`ls ${PWD}/target/scala-2.11/scalaxy-streams_2.11-*.jar`
11 | else
12 | plugin_jar=$1
13 | shift 1
14 | fi
15 |
16 | declare use_plugin
17 | if [[ -z "${plugin_jar}" ]]; then
18 | use_plugin=0
19 | else
20 | [[ -f "${plugin_jar}" ]] || ( echo "${plugin_jar} not found" && exit 1 )
21 | use_plugin=1
22 | echo "Plugin: ${plugin_jar}"
23 | fi
24 |
25 | declare -a libs=( "$@" )
26 | if [[ "${#libs[@]}" -eq 0 ]]; then
27 | libs+=( library reflect compiler )
28 | fi
29 |
30 | readonly SCALA_BRANCH=2.11.x
31 | readonly SCALA_DIR="${HOME}/.scala-${SCALA_BRANCH}"
32 |
33 | if [[ -d "${SCALA_DIR}" ]]; then
34 | cd "${SCALA_DIR}"
35 | git pull
36 | else
37 | git clone --depth 1 --single-branch -b "${SCALA_BRANCH}" \
38 | git@github.com:scala/scala.git "${SCALA_DIR}"
39 | cd "${SCALA_DIR}"
40 | fi
41 |
42 | echo "Scala directory: ${SCALA_DIR}"
43 |
44 | # Build locker
45 | ant build locker.unlock
46 |
47 | # Make sure we're rebuilding quick for each of the required libs:
48 | for lib in "${libs[@]}"; do
49 | echo "Cleaning 'quick' artifacts of ${lib}"
50 | rm -fR build/quick/${lib}.complete build/quick/classes/${lib}/
51 | done
52 |
53 | # Build quick with the Scalaxy/Stream plugin:
54 | # (note: there are a couple of lingering problematic rewrites that must be skipped)
55 | declare -a exceptions=(
56 | InlineExceptionHandlers.scala
57 | Typers.scala
58 | LambdaLift.scala
59 | )
60 |
61 | if (( use_plugin )); then
62 | readonly SCALAC_ARGS="-Xplugin-require:scalaxy-streams -Xplugin:${plugin_jar}"
63 |
64 | echo "Compiling ${libs[*]} with the plugin"
65 | SCALAXY_STREAMS_SKIP=$(IFS=, echo "${exceptions[*]}") \
66 | SCALAXY_STREAMS_VERY_VERBOSE=1 \
67 | ant "-Dscalac.args=${SCALAC_ARGS}" \
68 | build | 2>&1 \
69 | tee ant_build_scalaxy_streams.log
70 | else
71 | echo "Compiling ${libs[*]} without the plugin"
72 | ant build
73 | fi
74 |
75 | if [[ "${TEST}" == "1" ]]; then
76 | SCALAXY_STREAMS_VERY_VERBOSE=1 \
77 | ant "-Dscalac.args=${SCALAC_ARGS}" \
78 | "-Dpartest.scalac_opts=${SCALAC_ARGS}" \
79 | test | 2>&1 \
80 | tee ant_test_scalaxy_streams.log
81 | fi
82 |
--------------------------------------------------------------------------------
/Resources/scripts/self_optimize.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Build and test a self-optimized version of Scalaxy/Streams.
4 | #
5 | # To also publish: ./Resources/scripts/self_optimize.sh publish
6 | #
7 | set -e
8 |
9 | # Note: these are scala expressions
10 | readonly GROUP_AND_NAME_EXPR='"com.nativelibs4java" %% "scalaxy-streams"'
11 | readonly OPTIMIZED_NAME_EXPR='"scalaxy-streams-experimental-self-optimized"'
12 | readonly FAKE_VERSION_EXPR='"0-SNAPSHOT"'
13 |
14 | # Publish a reversioned artifact locally.
15 | sbt "set version := ${FAKE_VERSION_EXPR}" \
16 | clean publish-local
17 |
18 | # Rebrand the project to allow self-dependency.
19 | # Recompile in aggressive mode, but don't run the tests yet
20 | # (otherwise their strategy would be forced to aggressive too).
21 | SCALAXY_STREAMS_STRATEGY=aggressive \
22 | SCALAXY_STREAMS_VERY_VERBOSE=1 \
23 | sbt "set name := ${OPTIMIZED_NAME_EXPR}" \
24 | "set addCompilerPlugin(${GROUP_AND_NAME_EXPR} % ${FAKE_VERSION_EXPR})" \
25 | clean compile | 2>&1 tee self_optimization.log
26 |
27 | # Now run the tests and any other command passed in
28 | sbt "set name := ${OPTIMIZED_NAME_EXPR}" \
29 | "set addCompilerPlugin(${GROUP_AND_NAME_EXPR} % ${FAKE_VERSION_EXPR})" \
30 | test "$@"
31 |
--------------------------------------------------------------------------------
/Resources/test.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | object Test {
4 | def main(args: Array[String]) {
5 | val arrays = Array(Array(1, 2), Array(3, 4))
6 |
7 | for (array <- arrays;
8 | length = array.length * 30;
9 | if length < 10;
10 | v <- array)
11 | yield
12 | (length, v)
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Resources/test2.scala:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | import scala.reflect.ClassTag
4 | object Test2 {
5 | def main(args: Array[String]) {
6 | val arrays = Array(Array(1, 2), Array(3, 4))
7 |
8 |
9 | var i = 0
10 | val length1 = arrays.length
11 | val b = Array.canBuildFrom[(Int, Int)](implicitly[ClassTag[(Int, Int)]])()
12 | while (i < length1) {
13 | val array = arrays(i)
14 | val length = array.length
15 | if (length < 10) {
16 | var j = 0
17 | val length2 = array.length
18 | while (j < length2) {
19 | val v = array(j)
20 | b += ((length, v))
21 | j += 2
22 | }
23 | }
24 | i += 1
25 | }
26 | b.result()
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/Resources/wiki/scalaxy_settings_eclipse.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nativelibs4java/scalaxy-streams/ef48ae2d652eb718ac6dd4b916f96fdba7a3804a/Resources/wiki/scalaxy_settings_eclipse.png
--------------------------------------------------------------------------------
/Resources/wiki/scalaxy_working_in_eclipse.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nativelibs4java/scalaxy-streams/ef48ae2d652eb718ac6dd4b916f96fdba7a3804a/Resources/wiki/scalaxy_working_in_eclipse.png
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | name := "scalaxy-streams"
2 |
3 | organization := "com.nativelibs4java"
4 |
5 | version := "0.4-SNAPSHOT"
6 |
7 | scalaVersion := "2.11.7"
8 |
9 | resolvers += Resolver.sonatypeRepo("snapshots")
10 |
11 | libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)
12 |
13 | libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-reflect" % _)
14 |
15 | libraryDependencies ++= Seq(
16 | "org.scala-js" %% "scalajs-library" % "0.6.5" % "test",
17 | "junit" % "junit" % "4.12" % "test",
18 | "com.novocode" % "junit-interface" % "0.11" % "test"
19 | )
20 |
21 | testOptions in Global += Tests.Argument(TestFrameworks.JUnit, "-v")
22 |
23 | fork in Test := true
24 |
25 | // Needed to avoid cryptic EOFException crashes in forked tests
26 | // in Travis with `sudo: false`.
27 | // See https://github.com/sbt/sbt/issues/653
28 | // and https://github.com/travis-ci/travis-ci/issues/3775
29 | javaOptions += "-Xmx4G"
30 |
31 | scalacOptions ++= Seq(
32 | "-encoding", "UTF-8",
33 | "-deprecation", "-feature", "-unchecked",
34 | "-optimise", "-Yclosure-elim", "-Yinline",
35 | "-YclasspathImpl:flat",
36 | "-Xlog-free-types"
37 | )
38 |
39 | watchSources <++= baseDirectory map { path => (path / "examples" ** "*.scala").get }
40 |
41 | scalacOptions in console in Compile <+= (packageBin in Compile) map("-Xplugin:" + _)
42 |
43 | scalacOptions in console in Compile ++= Seq(
44 | "-Xplugin-require:scalaxy-streams",
45 | "-Xprint:scalaxy-streams"
46 | )
47 |
48 | // ScalariformKeys.preferences := {
49 | // import scalariform.formatter.preferences._
50 | // FormattingPreferences()
51 | // .setPreference(AlignParameters, true)
52 | // .setPreference(AlignSingleLineCaseStatements, true)
53 | // .setPreference(CompactControlReadability, true)
54 | // .setPreference(DoubleIndentClassDeclaration, true)
55 | // .setPreference(IndentSpaces, 2)
56 | // .setPreference(IndentWithTabs, false)
57 | // .setPreference(MultilineScaladocCommentsStartOnFirstLine, true)
58 | // .setPreference(PreserveDanglingCloseParenthesis, true)
59 | // }
60 | // scalariformSettings
61 |
62 | homepage := Some(url("https://github.com/nativelibs4java/scalaxy-streams"))
63 |
64 | pomExtra := (
65 |
66 | git@github.com:nativelibs4java/scalaxy-streams.git
67 | scm:git:git@github.com:nativelibs4java/scalaxy-streams.git
68 |
69 |
70 |
71 | ochafik
72 | Olivier Chafik
73 | http://ochafik.com/
74 |
75 |
76 | )
77 |
78 | licenses := Seq("BSD-3-Clause" -> url("http://www.opensource.org/licenses/BSD-3-Clause"))
79 |
80 | pomIncludeRepository := { _ => false }
81 |
82 | publishMavenStyle := true
83 |
84 | publishTo <<= version { (v: String) =>
85 | val nexus = "https://oss.sonatype.org/"
86 | if (v.trim.endsWith("-SNAPSHOT"))
87 | Some("snapshots" at nexus + "content/repositories/snapshots")
88 | else
89 | Some("releases" at nexus + "service/local/staging/deploy/maven2")
90 | }
91 |
92 | credentials ++= (for {
93 | username <- Option(System.getenv("SONATYPE_USERNAME"));
94 | password <- Option(System.getenv("SONATYPE_PASSWORD"))
95 | } yield Credentials(
96 | "Sonatype Nexus Repository Manager",
97 | "oss.sonatype.org", username, password
98 | )).toSeq
99 |
--------------------------------------------------------------------------------
/circle.yml:
--------------------------------------------------------------------------------
1 | machine:
2 | environment:
3 | SBT_OPTS: -Xmx2G -XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:MaxPermSize=2G -Xss2M
4 |
5 | dependencies:
6 | cache_directories:
7 | - ~/.m2
8 | - ~/.ivy2
9 | - ~/.sbt
10 | - ~/.scala-2.11.x
11 |
12 | test:
13 | override:
14 | - ./Resources/scripts/compile_scala.sh
15 | #- sbt 'set javaOptions += "-Djunit.parallel.threads=2"' 'set parallelExecution in Test := false' test
16 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=0.13.7
2 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | // See: http://www.scala-sbt.org/0.13/docs/Resolvers.html
2 |
3 | resolvers += Resolver.typesafeRepo("releases")
4 |
5 | addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.3.0")
6 |
7 | addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.2")
8 |
9 | addSbtPlugin("org.ensime" % "ensime-sbt-cmd" % "0.1.4")
10 |
--------------------------------------------------------------------------------
/src/main/resources/scalac-plugin.xml:
--------------------------------------------------------------------------------
1 |
2 | scalaxy-streams
3 | scalaxy.streams.StreamsPlugin
4 |
5 |
--------------------------------------------------------------------------------
/src/main/scala/HacksAndWorkarounds.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.reflect.macros.blackbox.Context
4 |
5 | private[scalaxy] object HacksAndWorkarounds
6 | {
7 | def call(obj: Any, method: String, args: Any*): Any = {
8 | val cls = obj.getClass
9 | val ms = cls.getMethods
10 | val name = method.replace("=", "$eq")
11 | ms.filter(_.getName == name) match {
12 | case Array(m) =>
13 | m.invoke(obj, args.map(_.asInstanceOf[Object]).toArray:_*)
14 | case Array() =>
15 | sys.error(s"No method $name in $cls:\n\t${ms.map(_.getName).sorted.reduce(_ + "\n" + _)}")
16 | }
17 | }
18 |
19 | def replaceDeletedOwner(u: scala.reflect.api.Universe)
20 | (tree: u.Tree, deletedOwner: u.Symbol, newOwner: u.Symbol) = {
21 | import u._
22 | val dup = tree // TODO: tree.duplicate (but does it keep positions??)
23 |
24 | new Traverser {
25 | override def traverse(tree: Tree) {
26 | for (sym <- Option(tree.symbol); if sym != NoSymbol) {
27 | if (sym.owner == deletedOwner) {
28 | assert(sym != deletedOwner)
29 | assert(newOwner != sym)
30 | assert(newOwner != sym.owner)
31 |
32 | call(sym, "owner_=", newOwner)
33 | if (tree.isInstanceOf[DefTreeApi]) {
34 | val decls = newOwner.info.decls
35 | if (decls.toString == "[]") {
36 | // println(s"\nENTERING SYM IN NEW OWNER.")
37 | call(decls, "enter", sym)
38 | }
39 | }
40 | // } catch {
41 | // case ex: Throwable =>
42 | // // ex.printStackTrace()
43 | // println(s"""
44 | // ex: $ex
45 | // sym: $sym
46 | // deletedOwner: $deletedOwner
47 | // deletedOwner.owner: ${deletedOwner.owner}
48 | // newOwner: $newOwner
49 | // newOwner.owner: ${newOwner.owner}
50 | // """)
51 | // throw ex
52 | // }
53 | }
54 | }
55 | super.traverse(tree)
56 | }
57 | } traverse dup
58 |
59 | dup
60 | }
61 |
62 | // TODO(ochafik): Remove this!
63 | def cast[A](a: Any): A = a.asInstanceOf[A]
64 | }
65 |
--------------------------------------------------------------------------------
/src/main/scala/Utils.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.language.postfixOps
4 | import scala.language.implicitConversions
5 |
6 | //private[streams]
7 | trait Utils {
8 | val global: scala.reflect.api.Universe
9 | import global._
10 | import definitions._
11 |
12 | private[streams] lazy val EmptyName = TermName("")
13 |
14 | trait Extractor[From, To] {
15 | def unapply(from: From): Option[To]
16 | }
17 |
18 | // private[streams]
19 | object S {
20 | def unapply(symbol: Symbol) = Option(symbol).map(_.name.toString)
21 | }
22 |
23 | class N(val s: String) {
24 | def unapply(n: Name): Boolean = n.toString == s
25 | def apply() = TermName(s)
26 | }
27 | object N {
28 | def apply(s: String) = new N(s)
29 | def unapply(n: Name): Option[String] = Option(n).map(_.toString)
30 | }
31 | implicit def N2TermName(n: N) = n()
32 |
33 | object Predef {
34 | private[this] lazy val PredefModule =
35 | rootMirror.staticModule("scala.Predef")
36 |
37 | def unapply(tree: Tree): Boolean =
38 | tree.symbol == PredefModule
39 | }
40 |
41 | implicit class SymbolExtensions(s: Symbol) {
42 | def asOption: Option[Symbol] =
43 | if (s == NoSymbol) None else Some(s)
44 | }
45 | implicit class TreeExtensions(t: Tree) {
46 | def asOption: Option[Tree] =
47 | if (t == EmptyTree) None else Some(t)
48 | }
49 | implicit class NameExtensions(n: TermName) {
50 | def asOption: Option[TermName] =
51 | if (n.toString == "") None else Some(n)
52 | }
53 |
54 | private[streams] def trySome[T](v: => T): Option[T] =
55 | try {
56 | Some(v)
57 | } catch { case ex: Throwable =>
58 | if (flags.debug)
59 | ex.printStackTrace()
60 | None
61 | }
62 |
63 | private[streams] def tryOrNone[T](v: => Option[T]): Option[T] =
64 | try {
65 | v
66 | } catch { case ex: Throwable =>
67 | if (flags.debug)
68 | ex.printStackTrace()
69 | None
70 | }
71 |
72 | private[streams] def dummyStatement(fresh: String => TermName) =
73 | q"val ${fresh("dummy")} = null"
74 |
75 | // Option(tpe).map(_.dealias.widen).orNull
76 | // private[streams]
77 | def normalize(tpe: Type): Type = Option(tpe).map(_.dealias) collect {
78 | case t @ SingleType(_, _) =>
79 | t.widen
80 | case t @ ConstantType(_) =>
81 | /// There's no `deconst` in the api (only in internal). Work around it:
82 | t.typeSymbol.asType.toType
83 | case t =>
84 | t
85 | } orNull
86 |
87 | private[streams] def findType(tree: Tree): Option[Type] =
88 | Option(tree.tpe).orElse(Option(tree.symbol).map(_.typeSignature))
89 | .filter(_ != NoType)
90 | .map(normalize)
91 |
92 | lazy val isPrimitiveNumeric: Type => Boolean =
93 | Set(ByteTpe, IntTpe, ShortTpe, LongTpe, FloatTpe, DoubleTpe)
94 |
95 | def getDefaultValue(tpe: Type): Any = normalize(tpe) match {
96 | case IntTpe => 0
97 | case BooleanTpe => false
98 | case ByteTpe => 0: Byte
99 | case ShortTpe => 0: Short
100 | case CharTpe => '\u0000'
101 | case LongTpe => 0L
102 | case FloatTpe => 0.0f
103 | case DoubleTpe => 0.0
104 | case s => null
105 | }
106 |
107 | def getDefaultValueTree(tpe: Type): Tree = {
108 | Option(getDefaultValue(tpe))
109 | .map(v => Literal(Constant(v)))
110 | .getOrElse(q"null.asInstanceOf[$tpe]")
111 | }
112 |
113 | private[streams] def newVar(name: TermName, tpe: Type, rhs: Tree = EmptyTree): ValDef = {
114 | // Note: null.asInstanceOf[T] would work in almost all cases as well.
115 | val ntpe = normalize(tpe)
116 | q"private[this] var $name: $ntpe = ${rhs.orElse(getDefaultValueTree(ntpe))}"
117 | }
118 | }
119 |
--------------------------------------------------------------------------------
/src/main/scala/package.scala:
--------------------------------------------------------------------------------
1 | package scalaxy
2 |
3 | import scala.language.reflectiveCalls
4 |
5 | import scala.language.experimental.macros
6 | import scala.language.implicitConversions
7 |
8 | import scala.reflect.NameTransformer
9 | import scala.reflect.macros.blackbox.Context
10 |
11 | import scala.reflect.runtime.{ universe => ru }
12 |
13 | import scalaxy.streams.HacksAndWorkarounds.cast
14 |
15 | package object streams {
16 | def optimize[A](a: A): A = macro impl.recursivelyOptimize[A]
17 | }
18 |
19 | package streams
20 | {
21 | object impl
22 | {
23 | def recursivelyOptimize[A : c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
24 | optimize[A](c)(a, recurse = true)
25 | }
26 |
27 | def optimizeTopLevelStream[A : c.WeakTypeTag](c: Context)(a: c.Expr[A]): c.Expr[A] = {
28 | optimize[A](c)(a, recurse = false)
29 | }
30 |
31 | private[streams] def optimize[A : c.WeakTypeTag](c: Context)(a: c.Expr[A], recurse: Boolean): c.Expr[A] = {
32 |
33 | if (flags.disabled) {
34 | a
35 | } else {
36 | object Optimize extends StreamTransforms with WithMacroContext with Optimizations {
37 | override val context = c
38 | import global._
39 |
40 | val strategy = matchStrategyTree(
41 | tpe => c.inferImplicitValue(
42 | tpe.asInstanceOf[c.Type],
43 | pos = a.tree.pos.asInstanceOf[c.Position]
44 | ).asInstanceOf[Tree]
45 | )
46 |
47 | val result = try {
48 |
49 | c.internal.typingTransform(cast(a.tree))((tree_, api) => {
50 | val tree: Tree = cast(tree_)
51 |
52 | // TODO(ochafik): Remove these warts (needed because of dependent types mess).
53 | def apiDefault(tree: Tree): Tree = cast(api.default(cast(tree)))
54 | def apiRecur(tree: Tree): Tree =
55 | if (recurse)
56 | cast(api.recur(cast(tree)))
57 | else
58 | tree
59 | def apiTypecheck(tree: Tree): Tree = cast(api.typecheck(cast(tree)))
60 |
61 | def opt(tree: Tree) =
62 | transformStream(
63 | tree = tree,
64 | strategy = strategy,
65 | fresh = c.freshName(_),
66 | currentOwner = cast(api.currentOwner),
67 | recur = apiRecur,
68 | typecheck = apiTypecheck)
69 |
70 | val result = opt(tree).getOrElse {
71 | if (recurse) {
72 | val sup = apiDefault(tree)
73 | opt(sup).getOrElse(sup)
74 | } else {
75 | tree
76 | }
77 | }
78 |
79 | // println(s"result = $result")
80 | result.asInstanceOf[c.universe.Tree]
81 | })
82 | } catch {
83 | case ex: Throwable =>
84 | logException(cast(a.tree.pos), ex)
85 | a.tree
86 | }
87 | }
88 |
89 | c.Expr[A](Optimize.result)
90 | }
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/src/main/scala/strategy.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams;
2 |
3 | private[streams] sealed trait SafetyCriteria
4 | private[streams] object SafetyCriteria {
5 | case object Safe extends SafetyCriteria
6 | case object ProbablySafe extends SafetyCriteria
7 | case object Unsafe extends SafetyCriteria
8 | }
9 |
10 | private[streams] sealed trait SpeedupCriteria
11 | private[streams] object SpeedupCriteria {
12 | case object Never extends SpeedupCriteria
13 | case object OnlyWhenFaster extends SpeedupCriteria
14 | case object AlwaysEvenIfSlower extends SpeedupCriteria
15 | }
16 |
17 | sealed trait OptimizationStrategy {
18 | def name: String
19 | def safety: SafetyCriteria
20 | def speedup: SpeedupCriteria
21 | def fullName = getClass.getPackage.getName + ".strategy." + name
22 | }
23 |
24 | private[streams] abstract class OptimizationStrategyImpl(
25 | override val name: String,
26 | override val safety: SafetyCriteria,
27 | override val speedup: SpeedupCriteria)
28 | extends OptimizationStrategy
29 |
30 | /**
31 | * Example:
32 | * import scalaxy.streams.strategy.aggressive
33 | * for (x <- Array(1, 2, 3); y = x * 10; z = y + 2) print(z)
34 | */
35 | object strategy {
36 | implicit case object none extends OptimizationStrategyImpl(
37 | name = "none",
38 | safety = SafetyCriteria.Safe,
39 | speedup = SpeedupCriteria.Never)
40 |
41 | /** Performs optimizations that don't alter any Scala semantics, using strict
42 | * side-effect detection. */
43 | implicit case object safer extends OptimizationStrategyImpl(
44 | name = "safer",
45 | safety = SafetyCriteria.Safe,
46 | speedup = SpeedupCriteria.OnlyWhenFaster)
47 |
48 | /** Performs optimizations that don't alter any Scala semantics, using reasonably
49 | * optimistic side-effect detection (for instance, assumes hashCode / equals / toString
50 | * are side-effect-free for all objects). */
51 | implicit case object safe extends OptimizationStrategyImpl(
52 | name = "safe",
53 | safety = SafetyCriteria.ProbablySafe,
54 | speedup = SpeedupCriteria.OnlyWhenFaster)
55 |
56 | /** Performs optimizations that don't alter any Scala semantics, using reasonably
57 | * optimistic side-effect detection (for instance, assumes hashCode / equals / toString
58 | * are side-effect-free for all objects), but performing rewrites that could be
59 | * counter-productive / slower.
60 | *
61 | * Makes sure all possible lambdas are rewritten away. */
62 | implicit case object zealous extends OptimizationStrategyImpl(
63 | name = "zealous",
64 | safety = SafetyCriteria.ProbablySafe,
65 | speedup = SpeedupCriteria.AlwaysEvenIfSlower)
66 |
67 | /** Performs unsafe rewrites, ignoring side-effect analysis (which may
68 | * alter the semantics of the code, but only performing rewrites that are known to
69 | * be faster. */
70 | implicit case object aggressive extends OptimizationStrategyImpl(
71 | name = "aggressive",
72 | safety = SafetyCriteria.Unsafe,
73 | speedup = SpeedupCriteria.OnlyWhenFaster)
74 |
75 | /** Performs all possible rewrites, even those known to be slower or unsafe. */
76 | implicit case object foolish extends OptimizationStrategyImpl(
77 | name = "foolish",
78 | safety = SafetyCriteria.Unsafe,
79 | speedup = SpeedupCriteria.AlwaysEvenIfSlower)
80 |
81 | implicit val default: OptimizationStrategy = safe
82 |
83 | private[this] val strategies =
84 | List(none, safe, safer, aggressive, foolish)
85 |
86 | private[this] val strategyByName: Map[String, OptimizationStrategy] =
87 | strategies.map(s => (s.name, s)).toMap
88 |
89 | def forName(name: String): Option[OptimizationStrategy] =
90 | if (name == null || name == "") None
91 | else Some(strategyByName(name))
92 |
93 | private[streams] lazy val global: OptimizationStrategy =
94 | flags.strategy.getOrElse(default)
95 | }
96 |
--------------------------------------------------------------------------------
/src/main/scala/streams/Blacklists.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.annotation.tailrec
4 |
5 | trait Blacklists extends Reporters {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | private[this] val SCALAXY_STREAMS_SKIP_ENV_NAME = "SCALAXY_STREAMS_SKIP"
10 |
11 | private[this] val SCALAXY_STREAMS_SKIP_PROPERTY_NAME = "scalaxy.streams.skip"
12 |
13 | /** fileName:symbolName:line */
14 | private[this] val patterns =
15 | Option(System.getenv(SCALAXY_STREAMS_SKIP_ENV_NAME))
16 | .getOrElse(System.getProperty(SCALAXY_STREAMS_SKIP_PROPERTY_NAME, ""))
17 | .split(",")
18 | .map(_.split(":")) map {
19 | case Array(name, symbol, lineStr) =>
20 | (name, Some(symbol), Some(lineStr.toInt))
21 | case Array(name, symbolOrLine) =>
22 | try {
23 | (name, None, Some(symbolOrLine.toInt))
24 | } catch { case ex: Throwable =>
25 | (name, Some(symbolOrLine), None)
26 | }
27 | case Array(name) =>
28 | (name, None, None)
29 | }
30 |
31 | @tailrec
32 | private[this] def enclosingSymbol(sym: Symbol): Option[Symbol] = {
33 | def isNamed(tsym: TermSymbol) =
34 | tsym.isMethod || tsym.isVal || tsym.isModule
35 |
36 | if (sym == NoSymbol) {
37 | None
38 | } else if (!sym.isSynthetic &&
39 | sym.isTerm && isNamed(sym.asTerm) ||
40 | sym.isType && sym.asType.isClass) {
41 | Some(sym)
42 | } else {
43 | enclosingSymbol(sym.owner)
44 | }
45 | }
46 |
47 | def isBlacklisted(pos: Position, currentOwner: Symbol): Boolean = {
48 | // TODO: optimize this (indexing patterns by file name).
49 | val fileName = pos.source.file.path.split("/").last
50 | lazy val enclosingSymbolName = enclosingSymbol(currentOwner).map(_.name.toString)
51 | val line: Int = pos.line
52 | patterns.exists({
53 | case (name, symbolOpt, lineOpt) if name == fileName =>
54 | symbolOpt.forall(s => enclosingSymbolName.contains(s)) &&
55 | lineOpt.forall(_ == line)
56 |
57 | case _ =>
58 | false
59 | })
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ConsoleReporters.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | trait ConsoleReporters extends Reporters
4 | {
5 | val global: scala.reflect.api.Universe
6 | import global._
7 |
8 | def log(level: String, pos: Position, msg: String) {
9 | println(s"[$level] $msg ($pos)")
10 | }
11 | override def info(pos: Position, msg: String, force: Boolean) {
12 | log("info", pos, msg)
13 | }
14 | override def warning(pos: Position, msg: String) {
15 | log("warning", pos, msg)
16 | }
17 | override def error(pos: Position, msg: String) {
18 | log("error", pos, msg)
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/scala/streams/Optimizations.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] object Optimizations
4 | {
5 | def messageHeader = "[Scalaxy] "
6 |
7 | def optimizedStreamMessage(streamDescription: String, strategy: OptimizationStrategy): String =
8 | messageHeader +
9 | "Optimized stream " + streamDescription +
10 | " (strategy: " + strategy.name + ")"
11 | }
12 |
13 | private[streams] trait Optimizations extends OptionalSymbols
14 | {
15 | val global: scala.reflect.api.Universe
16 | import global._
17 |
18 | private[this] lazy val OptimizationStrategySymOpt =
19 | optionalStaticClass("scalaxy.streams.OptimizationStrategy")
20 |
21 | def matchStrategyTree(inferImplicitValue: Type => Tree): OptimizationStrategy =
22 | {
23 | flags.strategy.getOrElse {
24 | val optimizationStrategyValue: Tree =
25 | try {
26 | OptimizationStrategySymOpt
27 | .map(sym => inferImplicitValue(sym.asType.toType))
28 | .getOrElse(EmptyTree)
29 | } catch {
30 | case ex: Throwable =>
31 | ex.printStackTrace()
32 | EmptyTree
33 | }
34 |
35 | optimizationStrategyValue match {
36 | case EmptyTree =>
37 | scalaxy.streams.strategy.global
38 |
39 | case strategyTree =>
40 | scalaxy.streams.strategy.forName(strategyTree.symbol.name.toString).get
41 | }
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/scala/streams/OptionalSymbols.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait OptionalSymbols extends Reporters
4 | {
5 | val global: scala.reflect.api.Universe
6 | import global._
7 |
8 | def optionalStaticClass(name: String): Option[Symbol] = {
9 | try {
10 | Option(rootMirror.staticClass(name))
11 | } catch {
12 | case ex: Throwable =>
13 | if (flags.debug)
14 | warning(NoPosition, s"Failed to get optional class $name: $ex")
15 | None
16 | }
17 | }
18 | def optionalStaticModule(name: String): Option[Symbol] = {
19 | try {
20 | Option(rootMirror.staticModule(name))
21 | } catch {
22 | case ex: Throwable =>
23 | if (flags.debug)
24 | warning(NoPosition, s"Failed to get optional module $name: $ex")
25 | None
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/scala/streams/Reporters.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | trait Reporters {
4 | val global: scala.reflect.api.Universe
5 | import global._
6 |
7 | def info(pos: Position, msg: String, force: Boolean = true): Unit
8 | def warning(pos: Position, msg: String): Unit
9 | def error(pos: Position, msg: String): Unit
10 |
11 | def logException(pos: Position, ex: Throwable) = {
12 | warning(pos, Optimizations.messageHeader + "An exception ocurred: " + ex)
13 | if (flags.veryVerbose) {
14 | ex.printStackTrace()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ScalaJsSymbols.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait ScalaJsSymbols extends OptionalSymbols
4 | {
5 | val global: scala.reflect.api.Universe
6 | import global._
7 |
8 | private[streams] lazy val JsArraySymOpt =
9 | optionalStaticClass("scala.scalajs.js.Array")
10 |
11 | private[streams] lazy val JsArrayOpsSymOpt =
12 | optionalStaticClass("scala.scalajs.js.ArrayOps")
13 |
14 | private[this] lazy val JsAnyModuleOpt =
15 | optionalStaticModule("scala.scalajs.js.Any")
16 |
17 | object JsAny {
18 | def unapply(tree: Tree): Boolean =
19 | JsAnyModuleOpt.exists(_ == tree.symbol)
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/main/scala/streams/StreamComponents.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait StreamComponents
4 | extends StreamResults
5 | with SideEffects
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | type OpsAndOutputNeeds = List[(StreamOp, OutputNeeds)]
11 |
12 | trait StreamComponent
13 | {
14 | def describe: Option[String]
15 |
16 | def sinkOption: Option[StreamSink]
17 |
18 | /**
19 | * Subtrees that are kept as-is in the rewrites.
20 | * For instance, a foldLeft's seed, or an mkString's separator.
21 | */
22 | def preservedSubTrees: List[Tree] = subTrees
23 |
24 | /**
25 | * Any sub-tree of the component (closures, normal preserved arguments...).
26 | */
27 | def subTrees: List[Tree]
28 |
29 | def lambdaCount: Int = 0
30 |
31 | def closureSideEffectss: List[List[SideEffect]] = Nil
32 |
33 | def emit(input: StreamInput,
34 | outputNeeds: OutputNeeds,
35 | nextOps: OpsAndOutputNeeds): StreamOutput
36 |
37 | protected def emitSub(
38 | input: StreamInput,
39 | nextOps: OpsAndOutputNeeds,
40 | coercionSuccessVarRef: Option[Tree] = None): StreamOutput =
41 | {
42 | nextOps match {
43 | case (firstOp, outputNeeds) :: otherOpsAndOutputNeeds =>
44 | val sub =
45 | firstOp.emit(input, outputNeeds, otherOpsAndOutputNeeds)
46 | coercionSuccessVarRef match {
47 | case Some(varRef) =>
48 | sub.copy(body = List(q"""
49 | if ($varRef) {
50 | ..${sub.body};
51 | }
52 | """))
53 |
54 | case _ =>
55 | sub
56 | }
57 |
58 | case Nil =>
59 | sys.error("Cannot call base emit at the end of an ops stream.")
60 | }
61 | }
62 | }
63 |
64 | trait StreamSource extends StreamComponent
65 |
66 | trait StreamOp extends StreamComponent
67 | {
68 | def canInterruptLoop: Boolean = false
69 | def canAlterSize: Boolean
70 | def isPassThrough = false
71 |
72 | def transmitOutputNeedsBackwards(paths: Set[TuploidPath]): Set[TuploidPath]
73 | }
74 |
75 | trait StreamSink extends StreamOp
76 | {
77 | /** Sometimes, a sink may not be emit-able because it's not implemented, but can appear in intermediate ops (as long as it's followed by an implemented sink. */
78 | def isImplemented = true
79 |
80 | /** If true, this sink is skipped unless it's at the end of the stream, i.e. after all ops. */
81 | def isFinalOnly: Boolean = false
82 | /** Sinks are "neutral" and chainable / elidable by default, except for scalar sinks. */
83 | def canBeElided = true
84 | def isJustAWrapper: Boolean = false
85 | override def canAlterSize = false
86 | override def sinkOption = Some(this)
87 |
88 | def outputNeeds: Set[TuploidPath] = Set(RootTuploidPath)
89 |
90 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) = {
91 | val needs = outputNeeds
92 | require(paths.isEmpty || paths == needs)
93 |
94 | needs
95 | }
96 |
97 | def requireSinkInput(input: StreamInput,
98 | outputNeeds: OutputNeeds,
99 | nextOps: OpsAndOutputNeeds) {
100 | require(nextOps.isEmpty,
101 | "Cannot chain ops through a sink (got nextOps = " + nextOps + ")")
102 | require(outputNeeds == this.outputNeeds,
103 | "Expected outputNeeds " + this.outputNeeds + " for sink, got " + outputNeeds)
104 | }
105 | }
106 |
107 | trait PassThroughStreamOp extends StreamOp {
108 |
109 | override def isPassThrough = true
110 |
111 | override def describe: Option[String] = None
112 |
113 | override def canAlterSize = false
114 |
115 | override def subTrees = Nil
116 |
117 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) = paths
118 |
119 | override def emit(input: StreamInput,
120 | outputNeeds: OutputNeeds,
121 | nextOps: OpsAndOutputNeeds): StreamOutput =
122 | {
123 | var (nextOp, nextOutputNeeds) :: subsequentOps = nextOps
124 | nextOp.emit(input, nextOutputNeeds, subsequentOps)
125 | }
126 | }
127 |
128 | // Allow loose coupling between sources, ops and sinks traits:
129 | val SomeStreamSource: Extractor[Tree, StreamSource]
130 | val SomeStreamOps: Extractor[Tree, (Tree, List[StreamOp])]
131 | val SomeStreamSink: Extractor[Tree, (Tree, StreamSink)]
132 |
133 | case class ExtractedStreamOp(target: Tree, op: StreamOp) {
134 | def isEmpty: Boolean = target == null && op == null
135 | def get: ExtractedStreamOp = this
136 | def _1: Tree = target
137 | def _2: StreamOp = op
138 | }
139 |
140 | lazy val NoExtractedStreamOp = ExtractedStreamOp(null, null)
141 |
142 | trait StreamOpExtractor {
143 | def unapply(tree: Tree): ExtractedStreamOp
144 | }
145 |
146 | private[streams] def printOps(ops: List[StreamOp]) {
147 | println(s"ops = " + ops.map(_.getClass.getSimpleName).mkString("\n\t"))
148 | // println(s"ops = " + ops.mkString("\n\t"))
149 | }
150 | }
151 |
--------------------------------------------------------------------------------
/src/main/scala/streams/StreamInterruptors.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait StreamInterruptors
4 | extends StreamComponents
5 | with CoerceOps
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | class StreamInterruptor(input: StreamInput, nextOps: OpsAndOutputNeeds)
11 | {
12 | import input.{ fresh, typed }
13 |
14 | private[this] val continue = fresh("continue")
15 |
16 | private[this] val Block(List(
17 | continueVarDef),
18 | continueVarRef) = typed(q"""
19 | private[this] var $continue = true;
20 | $continue
21 | """)
22 |
23 | private[this] val needsContinue = nextOps.exists(_._1.canInterruptLoop)
24 |
25 | val loopInterruptor: Option[Tree] = input.loopInterruptor orElse {
26 | if (needsContinue) Some(continueVarRef) else None
27 | }
28 |
29 | val (defs, test) =
30 | if (!input.loopInterruptor.isEmpty) {
31 | (Seq(), input.loopInterruptor.get)
32 | } else {
33 | if (needsContinue)
34 | (Seq(continueVarDef), continueVarRef)
35 | else
36 | (Seq(), q"true")
37 | }
38 |
39 | def composeTest(condition: Tree) = test match {
40 | case q"true" =>
41 | condition
42 | case _ =>
43 | q"$test && $condition"
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/src/main/scala/streams/StreamResults.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait StreamResults extends TuploidValues {
4 | val global: scala.reflect.api.Universe
5 | import global._
6 |
7 | type OutputNeeds = Set[TuploidPath]
8 |
9 | case class StreamOutput(
10 | prelude: List[Tree] = Nil,
11 | beforeBody: List[Tree] = Nil,
12 | body: List[Tree] = Nil,
13 | afterBody: List[Tree] = Nil,
14 | ending: List[Tree] = Nil)
15 | {
16 | // val flatten: List[Tree] = {
17 | // val b = collection.mutable.ListBuffer[Tree]()
18 | // for (list <- List(prelude, beforeBody, body, afterBody);
19 | // item <- list) {
20 | // item match {
21 | // case Block(items, v) =>
22 | // b ++= items
23 | // // println("V = " + v + ": " + v.getClass)
24 | // b += v
25 | // case t =>
26 | // b += t
27 | // }
28 | // }
29 | // b ++= ending
30 | // b.result()
31 | // }
32 | def flatten: List[Tree] =
33 | prelude ++ beforeBody ++ body ++ afterBody ++ ending
34 |
35 | // for ((n, list) <- Map("prelude" -> prelude, "beforeBody" -> beforeBody, "body" -> body, "afterBody" -> afterBody, "ending" -> ending);
36 | // Block(list, v) <- list) {
37 | // println(s"FOUND block item $v in $n")
38 | // }
39 |
40 | def compose(typed: Tree => Tree) =
41 | typed(q"..$flatten")
42 |
43 | def map(f: Tree => Tree): StreamOutput =
44 | copy(
45 | prelude = prelude.map(f),
46 | beforeBody = beforeBody.map(f),
47 | body = body.map(f),
48 | afterBody = afterBody.map(f),
49 | ending = ending.map(f))
50 | }
51 |
52 | val NoStreamOutput = StreamOutput()
53 |
54 | case class StreamInput(
55 | vars: TuploidValue[Tree],
56 | outputSize: Option[Tree] = None,
57 | index: Option[Tree] = None,
58 | loopInterruptor: Option[Tree] = None,
59 | fresh: String => TermName,
60 | transform: Tree => Tree,
61 | currentOwner: Symbol,
62 | typed: Tree => Tree)
63 | }
64 |
--------------------------------------------------------------------------------
/src/main/scala/streams/StreamTransforms.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | // private[scalaxy]
4 | trait StreamTransforms
5 | extends Streams
6 | with StreamSources
7 | with StreamSinks
8 | with StreamOps
9 | with Strategies
10 | with Reporters
11 | with Blacklists
12 | {
13 | import global._
14 |
15 | private[this] def verboseInfo(tree: Tree, msg: => String) {
16 | if (!flags.quiet) {
17 | info(tree.pos, msg, force = flags.verbose)
18 | }
19 | }
20 |
21 | /**
22 | * Transforms a stream if it can, or returns None if it can't.
23 | *
24 | * Recurses in to stream's subTrees with recur.
25 | */
26 | def transformStream(tree: Tree,
27 | strategy: OptimizationStrategy,
28 | fresh: String => String,
29 | currentOwner: Symbol,
30 | recur: Tree => Tree,
31 | typecheck: Tree => Tree)
32 | : Option[Tree] = {
33 | // println(tree)
34 | tree match {
35 | case tree @ SomeStream(stream) =>
36 | if (hasKnownLimitationOrBug(stream)) {
37 | if (flags.veryVerbose) {
38 | verboseInfo(
39 | tree,
40 | Optimizations.messageHeader + s"Stream ${stream.describe()} has known limitations or bugs with strategy $strategy")
41 | }
42 | None
43 | } else {
44 | if (isBlacklisted(tree.pos, currentOwner)) {
45 | verboseInfo(
46 | tree,
47 | Optimizations.messageHeader + s"Skipped stream ${stream.describe()}")
48 |
49 | None
50 | } else if (isWorthOptimizing(stream, strategy)) {
51 | // println(s"stream = ${stream.describe()}\n\t${stream.tree}")
52 | verboseInfo(
53 | tree,
54 | Optimizations.optimizedStreamMessage(stream.describe(), strategy))
55 |
56 | try {
57 | val result: Tree = stream
58 | .emitStream(
59 | n => TermName(fresh(n)),
60 | recur,
61 | currentOwner = currentOwner,
62 | typed = typecheck)
63 | .compose(typecheck)
64 |
65 | if (flags.debug) {
66 | verboseInfo(
67 | tree,
68 | Optimizations.messageHeader + s"Result for ${stream.describe()} (owner: ${currentOwner.fullName}):\n$result")
69 | }
70 | Some(result)
71 |
72 | } catch {
73 | case ex: Throwable =>
74 | logException(tree.pos, ex)
75 | None
76 | }
77 | } else {
78 | if (flags.veryVerbose && !stream.isDummy && !flags.quietWarnings) {
79 | verboseInfo(
80 | tree,
81 | Optimizations.messageHeader + s"Stream ${stream.describe()} is not worth optimizing with strategy $strategy")
82 | }
83 | None
84 | }
85 | }
86 |
87 | case _ =>
88 | None
89 | }
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/src/main/scala/streams/Streams.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | trait Streams
4 | extends StreamComponents
5 | with UnusableSinks
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeStream extends Extractor[Tree, Stream] {
11 | def findSink(ops: List[StreamComponent]): Option[StreamSink] = {
12 | def isAcceptableSink(sink: StreamSink, indexFromEnd: Int): Boolean = {
13 | val res = (sink != InvalidSink) &&
14 | (!sink.isFinalOnly || indexFromEnd == 0)
15 |
16 | // if (flags.debug)
17 | // println("Unacceptable sink: " + sink + " for list of ops " + ops)
18 | res
19 | }
20 |
21 | ops.reverse.toIterator.zipWithIndex.map({
22 | case (op, i) => (op.sinkOption, i)
23 | }).collectFirst({
24 | case (Some(sink), i) if isAcceptableSink(sink, i) =>
25 | sink
26 | })
27 | }
28 |
29 | def unapply(tree: Tree): Option[Stream] = tree match {
30 | case SomeStreamSink(SomeStreamOps(SomeStreamSource(source), ops), sink) =>
31 | Some(new Stream(tree, source, ops, sink, hasExplicitSink = true))
32 |
33 | case SomeStreamOps(SomeStreamSource(source), ops) =>
34 | findSink(source :: ops)
35 | .map(sink => new Stream(tree, source, ops, sink, hasExplicitSink = false))
36 |
37 | case SomeStreamSource(source) =>
38 | findSink(List(source))
39 | .map(sink => new Stream(tree, source, Nil, sink, hasExplicitSink = false))
40 |
41 | case _ =>
42 | None
43 | }
44 | }
45 |
46 | case class Stream(
47 | tree: Tree,
48 | source: StreamSource,
49 | ops: List[StreamOp],
50 | sink: StreamSink,
51 | hasExplicitSink: Boolean)
52 | {
53 | // println("FOUND STREAM: " + describe())
54 | // println("FOUND STREAM: " + this)
55 |
56 | def isDummy: Boolean =
57 | ops.isEmpty && (!hasExplicitSink || sink.isJustAWrapper)
58 |
59 | private[this] val sourceAndOps = source :: ops
60 |
61 | val components: List[StreamComponent] =
62 | sourceAndOps :+ sink
63 |
64 | def describe(describeSink: Boolean = true) =
65 | sourceAndOps.flatMap(_.describe).mkString(".") +
66 | sink.describe.filter(_ => describeSink).map(" -> " + _).getOrElse("")
67 |
68 | def lambdaCount: Int =
69 | components.map(_.lambdaCount).sum
70 | lazy val closureSideEffectss: List[List[SideEffect]] =
71 | components.flatMap(_.closureSideEffectss)
72 |
73 | lazy val subTrees: List[Tree] =
74 | components.flatMap(_.subTrees)
75 |
76 | lazy val preservedSubTreess: List[List[Tree]] =
77 | components.map(_.preservedSubTrees)
78 |
79 | lazy val preservedSubTreesSideEffectss: List[List[SideEffect]] =
80 | preservedSubTreess.map(_.flatMap(analyzeSideEffects))
81 |
82 | private[streams] def computeOutputNeedsBackwards(sinkNeeds: Set[TuploidPath]) =
83 | ops.scanRight(sinkNeeds)({
84 | case (op, refs) =>
85 | op.transmitOutputNeedsBackwards(refs)
86 | })
87 |
88 | def emitStream(fresh: String => TermName,
89 | transform: Tree => Tree,
90 | typed: Tree => Tree,
91 | currentOwner: Symbol,
92 | sinkNeeds: Set[TuploidPath] = sink.outputNeeds,
93 | loopInterruptor: Option[Tree] = None): StreamOutput =
94 | {
95 | val sourceNeeds :: outputNeeds =
96 | computeOutputNeedsBackwards(sinkNeeds)
97 |
98 | val nextOps = ops.zip(outputNeeds) :+ (sink, sinkNeeds)
99 | // println(s"source = $source")
100 | // println(s"""ops =\n\t${ops.map(_.getClass.getName).mkString("\n\t")}""")
101 | // println(s"stream = ${describe()}")
102 | // println(s"outputNeeds = ${nextOps.map(_._2)}")
103 | source.emit(
104 | input = StreamInput(
105 | vars = UnitTreeScalarValue,
106 | loopInterruptor = loopInterruptor,
107 | fresh = fresh,
108 | transform = transform,
109 | currentOwner = currentOwner,
110 | typed = typed),
111 | outputNeeds = sourceNeeds,
112 | nextOps = nextOps)
113 | }
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/src/main/scala/streams/WithLocalContext.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * ScalaCL - putting Scala on the GPU with JavaCL / OpenCL
3 | * http://scalacl.googlecode.com/
4 | *
5 | * Copyright (c) 2009-2013, Olivier Chafik (http://ochafik.com/)
6 | * All rights reserved.
7 | *
8 | * Redistribution and use in source and binary forms, with or without
9 | * modification, are permitted provided that the following conditions are met:
10 | *
11 | * * Redistributions of source code must retain the above copyright
12 | * notice, this list of conditions and the following disclaimer.
13 | * * Redistributions in binary form must reproduce the above copyright
14 | * notice, this list of conditions and the following disclaimer in the
15 | * documentation and/or other materials provided with the distribution.
16 | * * Neither the name of Olivier Chafik nor the
17 | * names of its contributors may be used to endorse or promote products
18 | * derived from this software without specific prior written permission.
19 | *
20 | * THIS SOFTWARE IS PROVIDED BY OLIVIER CHAFIK AND CONTRIBUTORS ``AS IS'' AND ANY
21 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
22 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
24 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
25 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
26 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
27 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 | * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 | */
31 | package scalaxy.streams
32 |
33 | trait WithLocalContext extends Reporters {
34 |
35 | val global: reflect.api.Universe
36 | import global._
37 |
38 | def inferImplicitValue(pt: Type): Tree
39 | def fresh(s: String): String
40 | // def typeCheck(x: Tree): Tree
41 | // def typeCheck(x: Expr[_]): Tree
42 | // def typeCheck(tree: Tree, pt: Type): Tree
43 | // def typecheck(tree: Tree): Tree
44 | def typecheck(tree: Tree, pt: Type = WildcardType): Tree
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/scala/streams/WithMacroContext.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * ScalaCL - putting Scala on the GPU with JavaCL / OpenCL
3 | * http://scalacl.googlecode.com/
4 | *
5 | * Copyright (c) 2009-2013, Olivier Chafik (http://ochafik.com/)
6 | * All rights reserved.
7 | *
8 | * Redistribution and use in source and binary forms, with or without
9 | * modification, are permitted provided that the following conditions are met:
10 | *
11 | * * Redistributions of source code must retain the above copyright
12 | * notice, this list of conditions and the following disclaimer.
13 | * * Redistributions in binary form must reproduce the above copyright
14 | * notice, this list of conditions and the following disclaimer in the
15 | * documentation and/or other materials provided with the distribution.
16 | * * Neither the name of Olivier Chafik nor the
17 | * names of its contributors may be used to endorse or promote products
18 | * derived from this software without specific prior written permission.
19 | *
20 | * THIS SOFTWARE IS PROVIDED BY OLIVIER CHAFIK AND CONTRIBUTORS ``AS IS'' AND ANY
21 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
22 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
24 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
25 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
26 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
27 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 | * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 | */
31 | package scalaxy.streams
32 |
33 | import language.experimental.macros
34 | import scala.reflect.macros.blackbox.Context
35 |
36 | import scalaxy.streams.HacksAndWorkarounds.cast
37 |
38 | trait WithMacroContext extends WithLocalContext {
39 |
40 | val context: Context
41 | lazy val global = context.universe
42 | import global._
43 |
44 | def verbose = false
45 |
46 | override def info(pos: Position, msg: String, force: Boolean) {
47 | context.info(cast(pos), msg, force = force)
48 | }
49 | override def warning(pos: Position, msg: String) {
50 | context.warning(cast(pos), msg)
51 | }
52 | override def error(pos: Position, msg: String) {
53 | context.error(cast(pos), msg)
54 | }
55 |
56 | def inferImplicitValue(pt: Type): Tree =
57 | context.inferImplicitValue(pt.asInstanceOf[context.universe.Type]).asInstanceOf[Tree]
58 |
59 | def fresh(s: String) =
60 | context.freshName(s)
61 |
62 | // def typeCheck(x: Expr[_]): Tree =
63 | // context.typecheck(x.tree.asInstanceOf[context.universe.Tree]).asInstanceOf[Tree]
64 |
65 | def typecheck(tree: Tree): Tree =
66 | context.typecheck(tree.asInstanceOf[context.universe.Tree]).asInstanceOf[Tree]
67 |
68 | def typecheck(tree: Tree, pt: Type): Tree = {
69 | if (tree.tpe != null && tree.tpe =:= pt)
70 | tree
71 | else
72 | context.typecheck(
73 | tree.asInstanceOf[context.universe.Tree],
74 | pt = pt.asInstanceOf[context.universe.Type]
75 | ).asInstanceOf[Tree]
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/src/main/scala/streams/WithRuntimeUniverse.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * ScalaCL - putting Scala on the GPU with JavaCL / OpenCL
3 | * http://scalacl.googlecode.com/
4 | *
5 | * Copyright (c) 2009-2013, Olivier Chafik (http://ochafik.com/)
6 | * All rights reserved.
7 | *
8 | * Redistribution and use in source and binary forms, with or without
9 | * modification, are permitted provided that the following conditions are met:
10 | *
11 | * * Redistributions of source code must retain the above copyright
12 | * notice, this list of conditions and the following disclaimer.
13 | * * Redistributions in binary form must reproduce the above copyright
14 | * notice, this list of conditions and the following disclaimer in the
15 | * documentation and/or other materials provided with the distribution.
16 | * * Neither the name of Olivier Chafik nor the
17 | * names of its contributors may be used to endorse or promote products
18 | * derived from this software without specific prior written permission.
19 | *
20 | * THIS SOFTWARE IS PROVIDED BY OLIVIER CHAFIK AND CONTRIBUTORS ``AS IS'' AND ANY
21 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
22 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
24 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
25 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
26 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
27 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 | * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 | */
31 | package scalaxy.streams
32 |
33 | import scala.reflect.runtime.{ universe => ru }
34 | import scala.reflect.runtime.{ currentMirror => cm }
35 | import scala.tools.reflect.ToolBox
36 |
37 | trait WithRuntimeUniverse
38 | extends WithLocalContext
39 | with ConsoleReporters {
40 | lazy val global = ru
41 | import global._
42 |
43 | def inferImplicitValue(pt: Type): Tree =
44 | toolbox.inferImplicitValue(pt.asInstanceOf[toolbox.u.Type]).asInstanceOf[global.Tree]
45 |
46 | lazy val toolbox = cm.mkToolBox()
47 |
48 | // def typeCheck(x: Expr[_]): Tree =
49 | // toolbox.typeCheck(x.tree.asInstanceOf[toolbox.u.Tree]).asInstanceOf[Tree]
50 |
51 | def verbose = false
52 |
53 | // def warning(pos: Position, msg: String) =
54 | // println(msg + " (" + pos + ")")
55 |
56 | // def withSymbol[T <: Tree](sym: Symbol, tpe: Type = NoType)(tree: T): T = tree
57 | def typed[T <: Tree](tree: T): T = {
58 | // if (tree.tpe == null && tree.tpe == NoType)
59 | // toolbox.typeCheck(tree.asInstanceOf[toolbox.u.Tree]).asInstanceOf[T]
60 | // else
61 | tree
62 | }
63 | // def inferImplicitValue(pt: Type): Tree =
64 | // toolbox.inferImplicitValue(pt.asInstanceOf[toolbox.u.Type]).asInstanceOf[global.Tree]
65 |
66 | // lazy val toolbox = cm.mkToolBox()
67 |
68 | // def typeCheck(x: Expr[_]): Tree =
69 | // typeCheck(x.tree)
70 |
71 | def typecheck(tree: Tree, pt: Type = WildcardType): Tree = {
72 | val ttree = tree.asInstanceOf[toolbox.u.Tree]
73 | if (ttree.tpe != null && ttree.tpe != NoType)
74 | tree
75 | else {
76 | try {
77 | toolbox.typecheck(
78 | ttree,
79 | pt = pt.asInstanceOf[toolbox.u.Type])
80 | } catch {
81 | case ex: Throwable =>
82 | throw new RuntimeException(s"Failed to typeCheck($tree, $pt): $ex", ex)
83 | }
84 | }.asInstanceOf[Tree]
85 | }
86 |
87 | // // def cleanTypeCheck(tree: Tree): Tree = {
88 | // // toolbox.typeCheck(toolbox.resetAllAttrs(tree.asInstanceOf[toolbox.u.Tree])).asInstanceOf[Tree]
89 | // // }
90 |
91 | // def resetLocalAttrs(tree: Tree): Tree = {
92 | // toolbox.untypecheck(tree.asInstanceOf[toolbox.u.Tree]).asInstanceOf[Tree]
93 | // }
94 |
95 | // // def resetAllAttrs(tree: Tree): Tree = {
96 | // // toolbox.resetAllAttrs(tree.asInstanceOf[toolbox.u.Tree]).asInstanceOf[Tree]
97 | // // }
98 |
99 | }
100 |
--------------------------------------------------------------------------------
/src/main/scala/streams/flags.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] object LogLevel extends Enumeration {
4 | type LogLevel = Value
5 | val Quiet, Normal, Verbose, VeryVerbose, Debug = Value
6 | }
7 |
8 | object flags
9 | {
10 | private[this] def isEnv(propName: String, envName: String, enabled: Boolean = true): Boolean = {
11 | var env = System.getenv("SCALAXY_STREAMS_" + envName)
12 | var prop = System.getProperty("scalaxy.streams." + propName)
13 |
14 | env == (if (enabled) "1" else "0") ||
15 | prop == enabled.toString()
16 | }
17 |
18 | import LogLevel._
19 |
20 | private[streams] var logLevel: LogLevel = {
21 | if (isEnv("debug", "DEBUG")) Debug
22 | else if (isEnv("veryVerbose", "VERY_VERBOSE")) VeryVerbose
23 | else if (isEnv("verbose", "VERBOSE")) Verbose
24 | else if (isEnv("quiet", "QUIET")) Quiet
25 | else Normal
26 | }
27 |
28 | private[streams] def verbose: Boolean = logLevel >= Verbose
29 | private[streams] def veryVerbose: Boolean = logLevel >= VeryVerbose
30 | private[streams] def debug: Boolean = logLevel >= Debug
31 | private[streams] def quiet: Boolean = logLevel == Quiet
32 |
33 | private[streams] var experimental: Boolean =
34 | isEnv("experimental", "EXPERIMENTAL")
35 |
36 | private[streams] var disabled: Boolean =
37 | isEnv("optimize", "OPTIMIZE", false)
38 |
39 | private[streams] var strategy: Option[OptimizationStrategy] =
40 | Option(System.getenv("SCALAXY_STREAMS_STRATEGY"))
41 | .orElse(Option(System.getProperty("scalaxy.streams.strategy")))
42 | .flatMap(scalaxy.streams.strategy.forName)
43 |
44 | /** For testing */
45 | private[streams] var quietWarnings = false
46 |
47 | private[streams] def withQuietWarnings[A](a: => A): A = {
48 | val old = quietWarnings
49 | try {
50 | quietWarnings = true
51 | a
52 | } finally {
53 | quietWarnings = old
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/scala/streams/matchers/Strippers.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait Strippers
4 | {
5 | val global: scala.reflect.api.Universe
6 | import global._
7 |
8 | /** Strip quasi-no-op trees (blocks without statements, type ascriptions...). */
9 | object Strip {
10 | def unapply(tree: Tree): Option[Tree] = Some(tree match {
11 | case Block(Nil, Strip(value)) =>
12 | value
13 |
14 | case Typed(Strip(value), _) =>
15 | value
16 |
17 | case _ =>
18 | tree
19 | })
20 | }
21 |
22 | object BlockOrNot {
23 | def unapply(tree: Tree): Option[(List[Tree], Tree)] = Some(tree match {
24 | case Block(statements, value) =>
25 | (statements, value)
26 |
27 | case _ =>
28 | (Nil, tree)
29 | })
30 | }
31 |
32 | private[this] lazy val OptionModule = rootMirror.staticModule("scala.Option")
33 |
34 | private[streams] object Option2Iterable {
35 | def unapply(tree: Tree): Option[Tree] = Option(tree) collect {
36 | case q"$target.option2Iterable[${_}]($value)" if target.symbol == OptionModule =>
37 | value
38 | }
39 | }
40 |
41 | def stripOption2Iterable(tree: Tree): Tree = tree match {
42 | case Option2Iterable(value) => value
43 | case value => value
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/scala/streams/matchers/TransformationClosures.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | import scala.collection.breakOut
3 | import scala.collection.mutable.ListBuffer
4 |
5 | private[streams] trait TransformationClosures
6 | extends TuploidValues
7 | with Strippers
8 | with StreamResults
9 | with SymbolMatchers
10 | {
11 | val global: scala.reflect.api.Universe
12 | import global._
13 |
14 | object Closure {
15 | def unapply(tree: Tree): Option[Function] = Option(tree) collect {
16 | case Strip(closure @ Function(List(_), _)) =>
17 | closure
18 | }
19 | }
20 |
21 | object Closure2 {
22 | def unapply(tree: Tree): Option[Function] = Option(tree) collect {
23 | case Strip(closure @ Function(List(_, _), _)) =>
24 | closure
25 | }
26 | }
27 |
28 | object SomeTransformationClosure {
29 | def unapply(closure: Tree): Option[TransformationClosure] = {
30 |
31 | Option(closure) collect {
32 | case q"""($param) => ${Strip(pref @ Ident(_))} match {
33 | case ${CaseTuploidValue(inputValue, body)}
34 | }""" if param.name == pref.name =>
35 | (inputValue, body)
36 |
37 | case q"($param) => $body" =>
38 | (ScalarValue(param.symbol.typeSignature, alias = param.symbol.asOption), body)
39 | } collect {
40 | case (inputValue, BlockOrNot(statements, TuploidValue(outputValue))) =>
41 | // println(s"closureSymbol = ${closure.symbol}; tpe = ${closure.tpe}")
42 | TransformationClosure(inputValue, statements, outputValue, closureSymbol = closure.symbol)
43 | }
44 | }
45 | }
46 |
47 | case class TransformationClosure(
48 | inputs: TuploidValue[Symbol],
49 | statements: List[Tree],
50 | outputs: TuploidValue[Symbol],
51 | closureSymbol: Symbol)
52 | {
53 | private[this] val inputSymbols: Set[Symbol] = inputs.collectAliases.values.toSet
54 | private[this] val outputSymbols: Map[TuploidPath, Symbol] = outputs.collectAliases
55 |
56 | private[this] def usedInputs: Set[Symbol] = (statements ++ outputs.collectValues).flatMap(_.collect {
57 | case t: RefTree if inputSymbols(t.symbol) =>
58 | t.symbol
59 | })(breakOut)
60 |
61 |
62 | val outputPathToInputPath: Map[TuploidPath, TuploidPath] = {
63 |
64 | outputSymbols.toSeq.collect({
65 | case (path, s) if inputSymbols(s) =>
66 | path -> inputs.find(s).get
67 | })(breakOut)
68 | }
69 | // println(s"""
70 | // outputSymbols = $outputSymbols
71 | // inputSymbols = $inputSymbols
72 | // outputs = $outputs
73 | // inputs = $inputs
74 | // closureSymbol = $closureSymbol
75 | // outputPathToInputPath = $outputPathToInputPath
76 | // """)
77 |
78 | def getPreviousReferencedPaths(
79 | nextReferencedPaths: Set[TuploidPath],
80 | isMapLike: Boolean = true)
81 | : Set[TuploidPath] =
82 | {
83 | val closureReferencePaths = usedInputs.map(inputs.find(_).get)
84 |
85 | val transposedPaths =
86 | if (isMapLike)
87 | nextReferencedPaths.collect(outputPathToInputPath)
88 | else
89 | nextReferencedPaths
90 |
91 | closureReferencePaths ++ transposedPaths
92 | }
93 |
94 | def replaceClosureBody(streamInput: StreamInput, outputNeeds: OutputNeeds): (List[Tree], TuploidValue[Tree]) =
95 | {
96 | // println(s"""
97 | // inputs = $inputs
98 | // statements = $statements
99 | // outputs = $outputs
100 | // closureSymbol = $closureSymbol
101 | // streamInput = $streamInput
102 | // outputNeeds = $outputNeeds
103 | // outputPathToInputPath = $outputPathToInputPath
104 | // """)
105 |
106 | import streamInput.{ fresh, transform, typed, currentOwner }
107 |
108 | val replacer = getReplacer(inputs, streamInput.vars)
109 | val fullTransform = (tree: Tree) => {
110 | transform(
111 | HacksAndWorkarounds.replaceDeletedOwner(global)(
112 | replacer(tree),
113 | deletedOwner = closureSymbol,
114 | newOwner = currentOwner))
115 | }
116 |
117 | val ClosureWiringResult(pre, post, outputVars) =
118 | wireInputsAndOutputs(
119 | inputSymbols,
120 | outputs,
121 | outputPathToInputPath,
122 | streamInput.copy(transform = fullTransform),
123 | outputNeeds)
124 |
125 | val blockStatements = statements.map(fullTransform) ++ post
126 | val results =
127 | pre ++
128 | (
129 | if (blockStatements.isEmpty)
130 | Nil
131 | else
132 | List(Block(blockStatements.dropRight(1), blockStatements.last))
133 | )
134 |
135 | (results, outputVars)
136 | }
137 | }
138 | }
139 |
--------------------------------------------------------------------------------
/src/main/scala/streams/matchers/Tuploids.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * ScalaCL - putting Scala on the GPU with JavaCL / OpenCL
3 | * http://scalacl.googlecode.com/
4 | *
5 | * Copyright (c) 2009-2013, Olivier Chafik (http://ochafik.com/)
6 | * All rights reserved.
7 | *
8 | * Redistribution and use in source and binary forms, with or without
9 | * modification, are permitted provided that the following conditions are met:
10 | *
11 | * * Redistributions of source code must retain the above copyright
12 | * notice, this list of conditions and the following disclaimer.
13 | * * Redistributions in binary form must reproduce the above copyright
14 | * notice, this list of conditions and the following disclaimer in the
15 | * documentation and/or other materials provided with the distribution.
16 | * * Neither the name of Olivier Chafik nor the
17 | * names of its contributors may be used to endorse or promote products
18 | * derived from this software without specific prior written permission.
19 | *
20 | * THIS SOFTWARE IS PROVIDED BY OLIVIER CHAFIK AND CONTRIBUTORS ``AS IS'' AND ANY
21 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
22 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
24 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
25 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
26 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
27 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 | * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 | */
31 | package scalaxy.streams
32 |
33 | trait Tuploids { //extends CommonScalaNames {
34 | val global: reflect.api.Universe
35 | import global._
36 | import definitions._
37 |
38 | private lazy val primTypes = Set(IntTpe, LongTpe, ShortTpe, CharTpe, BooleanTpe, DoubleTpe, FloatTpe, ByteTpe)
39 |
40 | def isPrimitiveType(tpe: Type) = tpe != null && primTypes.contains(tpe.dealias)
41 |
42 | def getTupleComponentTypes(tpe: Type): List[Type] = {
43 | tpe match {
44 | case ref @ TypeRef(pre, sym, args) if isTupleTypeRef(ref) => args
45 | }
46 | }
47 |
48 |
49 | def isTupleType(tpe: Type): Boolean =
50 | Option(tpe).exists(t => isTupleSymbol(t.typeSymbol))
51 |
52 | def isTupleSymbol(sym: Symbol): Boolean =
53 | Option(sym).exists(_.fullName.toString.matches("scala\\.Tuple\\d+"))
54 |
55 | def isTupleTypeRef(ref: TypeRef): Boolean = {
56 | !ref.args.isEmpty &&
57 | ref.pre.typeSymbol == ScalaPackageClass &&
58 | ref.sym.name.toString.matches("Tuple\\d+")
59 | }
60 |
61 | object TupleComponentName {
62 | private[this] val rx = "_(\\d+)".r
63 | def unapply(n: Name): Option[Int] = Option(n).map(_.toString) collect {
64 | case rx(n) =>
65 | n.toInt
66 | }
67 | }
68 |
69 | object TupleComponent {
70 | val rx = "_(\\d+)".r
71 | def unapply(tree: Tree) = Option(tree) collect {
72 | case Select(target, TupleComponentName(n)) if isTupleType(target.tpe) =>
73 | //if (tree.symbol != NoSymbol && isTupleSymbol(tree.symbol.owner) || isTupleSymbol(target.tpe.typeSymbol)) {
74 | (target, n - 1)
75 | }
76 | }
77 |
78 | // def isTupleType(tpe: Type): Boolean = {
79 | // tpe match {
80 | // case ref @ TypeRef(pre, sym, args) if isTupleTypeRef(ref) =>
81 | // true
82 | // case _ =>
83 | // //if (tpe != null)
84 | // // println("NOT A TUPLE TYPE: " + tpe + ": " + tpe.getClass.getName)
85 | // //if (tpe.toString.contains("Tuple"))
86 | // // println(s"tpe($tpe: ${tpe.getClass.getName}).typeConstructor = ${tpe.typeConstructor} (${tpe.typeSymbol})")
87 | // false
88 | // }
89 | // }
90 |
91 | private def isValOrVar(s: Symbol): Boolean =
92 | s.isTerm && !s.isMethod && !s.isJava
93 |
94 | // private def isStableNonLazyVal(ts: TermSymbol): Boolean = {
95 | // //println(s"""
96 | // // isVal = ${ts.isVal}
97 | // // isStable = ${ts.isStable}
98 | // // isVar = ${ts.isVar}
99 | // // isSetter = ${ts.isSetter}
100 | // // isGetter = ${ts.isGetter}
101 | // // isLazy = ${ts.isLazy}
102 | // //""")
103 | // val res = ts.isStable && ts.isVal && !ts.isLazy
104 | // //println("res = " + res)
105 | // res
106 | // }
107 | // private def isImmutableClassMember(s: Symbol): Boolean = {
108 | // //println(s + " <- " + s.owner + " overrides " + s.allOverriddenSymbols)
109 | // //println(s"\tisFinal = ${s.isFinal}, isMethod = ${s.isMethod}, isTerm = ${s.isTerm}")
110 | // if (isValOrVar(s)) {
111 | // isStableNonLazyVal(s.asTerm)
112 | // } else {
113 | // // Either a method or a sub-type
114 | // true
115 | // }
116 | // }
117 |
118 | // A tuploid is a scalar, a tuple of tuploids or an immutable case class with tuploid fields.
119 | // def isTuploidType(tpe: Type): Boolean = tpe != null && {
120 | // isPrimitiveType(tpe) ||
121 | // isTupleType(tpe) && getTupleComponentTypes(tpe).forall(isTuploidType _) ||
122 | // {
123 | // tpe.decls.exists(isValOrVar _) &&
124 | // tpe.decls.forall(isImmutableClassMember _)
125 | // }
126 | // }
127 | }
128 |
--------------------------------------------------------------------------------
/src/main/scala/streams/matchers/WhileLoops.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | import scala.reflect.NameTransformer.{ encode, decode }
3 |
4 | trait WhileLoops
5 | {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | object WhileLoop {
10 | def unapply(tree: Tree): Option[(Tree, Seq[Tree])] = Option(tree) collect {
11 | case LabelDef(
12 | label,
13 | List(),
14 | If(
15 | condition,
16 | Block(
17 | statements,
18 | Apply(
19 | Ident(label2),
20 | List()
21 | )
22 | ),
23 | Literal(Constant(()))
24 | )
25 | ) if (label == label2) =>
26 | (condition, statements)
27 | }
28 |
29 | def apply(condition: Tree, statements: Seq[Tree]): Tree =
30 | q"while ($condition) { ..$statements ; () }"
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/src/main/scala/streams/matchers/legacy/TreeBuilders.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.reflect.api.Universe
4 | import scala.collection.mutable.ArrayBuffer
5 |
6 | /**
7 | * Refugee from Scalaxy/Components
8 | * TODO: modernize (quasiquotes...) / make it go away.
9 | */
10 | trait TreeBuilders
11 | extends WithLocalContext {
12 | val global: Universe
13 | import global._
14 | import global.definitions._
15 |
16 | type TreeGen = () => Tree
17 | type IdentGen = () => Ident
18 |
19 | case class ValueDef(rawIdentGen: IdentGen, definition: ValDef, tpe: Type) {
20 | var identUsed = false
21 | val identGen: IdentGen = () => {
22 | identUsed = true
23 | rawIdentGen()
24 | }
25 | def apply() = identGen()
26 |
27 | def defIfUsed = ifUsed(definition)
28 | def ifUsed[V](v: => V) = if (identUsed) Some(v) else None
29 | }
30 |
31 | def newVal(prefix: String, value: Tree, tpe: Type) =
32 | newValueDef(prefix, false, value, tpe)
33 |
34 | def newVar(prefix: String, value: Tree, tpe: Type) =
35 | newValueDef(prefix, true, value, tpe)
36 |
37 | private def newValueDef(prefix: String, mutable: Boolean, value: Tree, tpe: Type) = {
38 | val vd = ValDef(
39 | if (mutable) Modifiers(Flag.MUTABLE) else NoMods,
40 | TermName(fresh(prefix)),
41 | TypeTree(tpe),
42 | value)
43 | ValueDef(() => Ident(vd.name), vd, tpe)
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/scala/streams/matchers/testing/WithTestFresh.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * ScalaCL - putting Scala on the GPU with JavaCL / OpenCL
3 | * http://scalacl.googlecode.com/
4 | *
5 | * Copyright (c) 2009-2013, Olivier Chafik (http://ochafik.com/)
6 | * All rights reserved.
7 | *
8 | * Redistribution and use in source and binary forms, with or without
9 | * modification, are permitted provided that the following conditions are met:
10 | *
11 | * * Redistributions of source code must retain the above copyright
12 | * notice, this list of conditions and the following disclaimer.
13 | * * Redistributions in binary form must reproduce the above copyright
14 | * notice, this list of conditions and the following disclaimer in the
15 | * documentation and/or other materials provided with the distribution.
16 | * * Neither the name of Olivier Chafik nor the
17 | * names of its contributors may be used to endorse or promote products
18 | * derived from this software without specific prior written permission.
19 | *
20 | * THIS SOFTWARE IS PROVIDED BY OLIVIER CHAFIK AND CONTRIBUTORS ``AS IS'' AND ANY
21 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
22 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
24 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
25 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
26 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
27 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29 | * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 | */
31 | package scalaxy.streams.testing
32 |
33 | trait WithTestFresh {
34 | private var nextId = 0L
35 |
36 | def fresh(s: String) = synchronized {
37 | val v = nextId
38 | nextId += 1
39 | s + v
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/ArrayOpsOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait ArrayOpsOps
4 | extends StreamComponents
5 | with ArrayOpsSinks
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeArrayOp {
11 | def unapply(tree: Tree): Tree = tree match {
12 | case Apply(
13 | Select(Predef(), N(
14 | "intArrayOps" |
15 | "longArrayOps" |
16 | "byteArrayOps" |
17 | "shortArrayOps" |
18 | "charArrayOps" |
19 | "booleanArrayOps" |
20 | "floatArrayOps" |
21 | "doubleArrayOps" |
22 | // These now have a _ prefix in 2.12.0-M2:
23 | "_intArrayOps" |
24 | "_longArrayOps" |
25 | "_byteArrayOps" |
26 | "_shortArrayOps" |
27 | "_charArrayOps" |
28 | "_booleanArrayOps" |
29 | "_floatArrayOps" |
30 | "_doubleArrayOps")) |
31 | TypeApply(
32 | Select(Predef(), N(
33 | "refArrayOps" |
34 | "_refArrayOps" |
35 | "genericArrayOps")),
36 | List(_)),
37 | List(array)) =>
38 | array
39 |
40 | case _ =>
41 | EmptyTree
42 | }
43 | }
44 |
45 | object SomeArrayOpsOp extends StreamOpExtractor {
46 | override def unapply(tree: Tree) =
47 | SomeArrayOp.unapply(tree) match {
48 | case EmptyTree =>
49 | NoExtractedStreamOp
50 |
51 | case array =>
52 | ExtractedStreamOp(array, ArrayOpsOp)
53 | }
54 | }
55 |
56 | case object ArrayOpsOp extends PassThroughStreamOp {
57 | override val sinkOption = Some(ArrayOpsSink)
58 | }
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/ClosureStreamOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait ClosureStreamOps
4 | extends StreamComponents
5 | with TransformationClosures
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | trait ClosureStreamOp extends StreamOp {
11 | def closure: Function
12 | def isMapLike: Boolean = true
13 | override def lambdaCount = 1
14 | override def subTrees: List[Tree] = List(closure)
15 | override def preservedSubTrees = Nil
16 |
17 | private[this] lazy val closureSideEffects = analyzeSideEffects(closure)
18 | override def closureSideEffectss = List(closureSideEffects)
19 |
20 | lazy val closureSymbol = closure.symbol
21 |
22 | // TODO: remove this stripBody nonsense (here to allow FlatMapOps to do some magics)
23 | //lazy val q"($param) => $body_" = transformationClosure
24 | lazy val q"($param) => $body_" = closure
25 | lazy val body = stripBody(body_)
26 | def stripBody(tree: Tree): Tree = tree
27 |
28 | lazy val SomeTransformationClosure(transformationClosure) = closure// q"($param) => $body"
29 |
30 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) =
31 | transformationClosure.getPreviousReferencedPaths(paths, isMapLike = isMapLike)
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/CoerceOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait CoerceOps
4 | extends StreamComponents
5 | with Strippers
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeCoerceOp extends StreamOpExtractor {
11 | override def unapply(tree: Tree) = tree match {
12 | case q"$target.withFilter(${Strip(Function(List(param), body))})" =>
13 | body match {
14 | case q"""
15 | ${Strip(Ident(name))} match {
16 | case ${CaseTuploidValue(inputValue, Literal(Constant(true)))}
17 | case _ => false
18 | }""" if name == param.name && body.tpe =:= typeOf[Boolean] =>
19 | ExtractedStreamOp(target, CoerceOp(inputValue))
20 |
21 | case _ =>
22 | NoExtractedStreamOp
23 | }
24 |
25 | case _ =>
26 | NoExtractedStreamOp
27 | }
28 | }
29 |
30 | case class CoerceOp(coercedInputValue: TuploidValue[Symbol]) extends StreamOp {
31 | override def describe = Some("withFilter") // Some("withFilter(checkIfRefutable)") //None
32 | override def lambdaCount = 1
33 | override def sinkOption = None
34 | override def canAlterSize = true
35 | override def subTrees = Nil
36 |
37 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) =
38 | if (paths.isEmpty)
39 | Set(RootTuploidPath)
40 | else
41 | paths
42 |
43 | override def emit(input: StreamInput,
44 | outputNeeds: OutputNeeds,
45 | nextOps: OpsAndOutputNeeds): StreamOutput =
46 | {
47 | import input.typed
48 |
49 | val sub = emitSub(input.copy(outputSize = None), nextOps)
50 |
51 | val pathsThatCantBeNull = input.vars.collect({
52 | case (path, t @ TupleValue(_, _, _, false)) =>
53 | path
54 | }).toSet
55 |
56 | val coercedTuplePaths = coercedInputValue.collect({
57 | case (path @ (_ :: _), _) =>
58 | path.dropRight(1)
59 | }).distinct
60 |
61 | val pathsThatNeedToBeNullChecked = coercedTuplePaths.filterNot(pathsThatCantBeNull)
62 |
63 | // val pathsThatNeedToBeNullChecked = coercedTuplePaths.filter(pathsThatCantBeNull)
64 |
65 | // println(s"""
66 | // pathsThatCantBeNull: $pathsThatCantBeNull
67 | // coercedTuplePaths: $coercedTuplePaths
68 | // pathsThatNeedToBeNullChecked: $pathsThatNeedToBeNullChecked
69 | // pathsThatNeedToBeNullChecked.isEmpty = ${pathsThatNeedToBeNullChecked.isEmpty}
70 | // sub: ${sub}
71 | // """)
72 |
73 | if (pathsThatNeedToBeNullChecked.isEmpty) {
74 | sub
75 | } else {
76 | val conditions = pathsThatNeedToBeNullChecked.map(path => {
77 | // Expression that points to a tuple, e.g. `input._1._2`
78 | val expr = path.foldLeft(input.vars.alias.get.duplicate) {
79 | case (tree, i) =>
80 | Select(tree, TermName("_" + (i + 1)))
81 | }
82 | q"$expr != null"
83 | })
84 | val condition = conditions.reduceLeft((a, b) => q"$a && $b")
85 |
86 | sub.copy(body = List(typed(q"""
87 | if ($condition) {
88 | ..${sub.body};
89 | }
90 | """)))
91 | // sub.copy(
92 | // beforeBody = Nil,
93 | // body = List(typed(q"""
94 | // if ($condition) {
95 | // ..${sub.beforeBody}
96 | // ..${sub.body};
97 | // }
98 | // """)))
99 | }
100 | }
101 |
102 | }
103 |
104 | def newCoercionSuccessVarDefRef(
105 | nextOps: OpsAndOutputNeeds,
106 | fresh: String => TermName,
107 | typed: Tree => Tree)
108 | : (Option[Tree], Option[Tree]) =
109 | {
110 | nextOps.find(!_._1.isPassThrough) match {
111 | case Some((CoerceOp(_), _)) =>
112 | val name = fresh("coercionSuccess")
113 | val Block(List(varDef), varRef) = typed(q"""
114 | private[this] var $name = true;
115 | $name
116 | """)
117 | (Some(varDef), Some(varRef))
118 |
119 | case _ =>
120 | (None, None)
121 | }
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/CollectOp.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait CollectOps
4 | extends CanBuildFromSinks
5 | with Strippers
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | // object SomeCollectOp extends StreamOpExtractor {
11 | // override def unapply(tree: Tree) = tree match {
12 | // case q"""$target.collect[$outputTpt](${
13 | // Strip(
14 | // Block(
15 | // List(
16 | // cls @ ClassDef(_, _, _,
17 | // Template(_, _, classBody))),
18 | // _))
19 | // })""" =>
20 |
21 | // println(s"cls = $cls")
22 | // classBody.collectFirst {
23 | // case DefDef(_, name, _, _, _, Match(selector, cases))
24 | // if name.toString == "applyOrElse" =>
25 | // (target, CollectOp(outputTpt.tpe, cases, None))
26 | // }
27 |
28 | // case _ =>
29 | // None
30 | // }
31 | // }
32 |
33 | // // def mapLast[A, B](list: List[A])(f: A => B): List[B] = {
34 | // // val last :: others = list.reverse
35 | // // (f(last) :: others).reverse
36 | // // }
37 |
38 | // case class CollectOp(outputTpe: Type, cases: List[CaseDef], canBuildFrom: Option[Tree]) extends StreamOp {
39 | // override def canInterruptLoop = false
40 | // override def canAlterSize = true
41 | // override def lambdaCount = 1
42 | // override def subTrees = cases ++ canBuildFrom
43 | // override def closureSideEffectss = ???
44 | // override val sinkOption = canBuildFrom.map(CanBuildFromSink(_))
45 | // override def describe = Some("collect")
46 | // override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) =
47 | // Set(RootTuploidPath)
48 |
49 | // override def emit(input: StreamInput,
50 | // outputNeeds: OutputNeeds,
51 | // nextOps: OpsAndOutputNeeds): StreamOutput =
52 | // {
53 | // import input.{ typed, untyped, fresh }
54 |
55 | // // val tpe = input.vars.tpe
56 |
57 | // val value = fresh("value")
58 | // val collected = fresh("collected")
59 |
60 | // // Force typing of declarations and get typed references to various vars and vals.
61 | // val Block(List(
62 | // collectedVarDef,
63 | // valueVarDef,
64 | // collectedFalse),
65 | // TupleCreation(List(
66 | // collectedVarRef, valueVarRef))) = typed(q"""
67 | // private[this] var $collected = true;
68 | // ${newVar(value, outputTpe)};
69 | // $collected = false;
70 | // ($collected, $value)
71 | // """)
72 |
73 | // val caseUntyper = new Transformer {
74 | // override def transform(tree: Tree) = tree match {
75 | // // case Ident(n) =>
76 | // // println("FOUND IDENT " + tree + "; sym = " + tree.symbol)
77 | // // untyped(tree)
78 | // case Bind(name, body) =>
79 | // // Bind(name, body)//transform(body))
80 | // typed(untyped(tree))
81 | // case _ =>
82 | // // println("FOUND a " + tree.getClass.getSimpleName + ": " + tree)
83 | // super.transform(tree)
84 | // }
85 | // }
86 |
87 | // //val untypedCases = cases.map(caseUntyper.transform(_))
88 | // val untypedCases = cases
89 | // // val untypedCases = cases.map(untyped)
90 | // val matchCode = typed(Match(input.vars.alias.get.duplicate,
91 | // untypedCases.dropRight(1).map({
92 | // case CaseDef(pat, guard, caseValue) =>
93 | // CaseDef(pat, guard, q"$valueVarRef = $caseValue")
94 | // }) :+
95 | // (untypedCases.last match {
96 | // case CaseDef(pat, guard, _) =>
97 | // // This is the default
98 | // CaseDef(pat, guard, collectedFalse)
99 | // })
100 | // ))
101 |
102 | // // TODO: use TransformationClosure to flatten tuples.
103 |
104 | // val sub = emitSub(
105 | // input.copy(
106 | // vars = ScalarValue(outputTpe, alias = Some(valueVarRef)),
107 | // outputSize = None,
108 | // index = None),
109 | // nextOps)
110 | // // ..${sub.body.map(untyped)};
111 | // sub.copy(body = List(typed(q"""
112 | // $collectedVarDef;
113 | // $valueVarDef;
114 | // $matchCode;
115 | // if ($collectedVarRef) {
116 | // ..${sub.body};
117 | // }
118 | // """)))
119 | // }
120 | // }
121 | }
122 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/CountOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait CountOps
4 | extends ClosureStreamOps
5 | with Strippers
6 | with OptionSinks
7 | with UnusableSinks
8 | {
9 | val global: scala.reflect.api.Universe
10 | import global._
11 |
12 | object SomeCountOp extends StreamOpExtractor {
13 | override def unapply(tree: Tree) = tree match {
14 | case q"$target.count(${Closure(closure)})" =>
15 | ExtractedStreamOp(target, CountOp(closure))
16 |
17 | case _ =>
18 | NoExtractedStreamOp
19 | }
20 | }
21 |
22 | case class CountOp(closure: Function) extends ClosureStreamOp {
23 | override def canInterruptLoop = false
24 | override def canAlterSize = true
25 | override def isMapLike = false
26 | override def describe = Some("count")
27 | override def sinkOption = Some(ScalarSink)
28 |
29 | override def emit(input: StreamInput,
30 | outputNeeds: OutputNeeds,
31 | nextOps: OpsAndOutputNeeds): StreamOutput =
32 | {
33 | val List((ScalarSink, _)) = nextOps
34 |
35 | import input.{ typed, fresh }
36 |
37 | val (replacedStatements, outputVars) =
38 | transformationClosure.replaceClosureBody(
39 | input,
40 | outputNeeds + RootTuploidPath)
41 |
42 | var test = outputVars.alias.get
43 |
44 | val count = fresh("count")
45 |
46 | // Force typing of declarations and get typed references to various vars and vals.
47 | val Block(List(
48 | countVarDef,
49 | countIncr), countVarRef) = typed(q"""
50 | private[this] var $count = 0;
51 | $count += 1;
52 | $count
53 | """)
54 |
55 | StreamOutput(
56 | prelude = List(countVarDef),
57 | body = List(q"""
58 | ..$replacedStatements;
59 | if ($test) {
60 | $countIncr;
61 | }
62 | """),
63 | ending = List(countVarRef))
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/ExistsOp.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait ExistsOps
4 | extends ClosureStreamOps
5 | with Strippers
6 | with OptionSinks
7 | with UnusableSinks
8 | {
9 | val global: scala.reflect.api.Universe
10 | import global._
11 |
12 | object SomeExistsOp extends StreamOpExtractor {
13 | override def unapply(tree: Tree) = tree match {
14 | case q"$target.exists(${Closure(closure)})" =>
15 | ExtractedStreamOp(target, ExistsOp(closure))
16 |
17 | case q"$target.forall(${Closure(closure)})" =>
18 | ExtractedStreamOp(target, ForallOp(closure))
19 |
20 | case _ =>
21 | NoExtractedStreamOp
22 | }
23 | }
24 |
25 | case class ExistsOp(override val closure: Function)
26 | extends ExistsOpLike("exists", exists = true, closure)
27 |
28 | case class ForallOp(override val closure: Function)
29 | extends ExistsOpLike("forall", exists = false, closure)
30 |
31 | class ExistsOpLike(name: String, exists: Boolean, val closure: Function) extends ClosureStreamOp {
32 | override def canInterruptLoop = true
33 | override def canAlterSize = true
34 | override def isMapLike = false
35 | override def sinkOption = Some(ScalarSink)
36 | override def describe = Some(name)
37 |
38 | override def emit(input: StreamInput,
39 | outputNeeds: OutputNeeds,
40 | nextOps: OpsAndOutputNeeds): StreamOutput =
41 | {
42 | val List((ScalarSink, _)) = nextOps
43 |
44 | import input.{ typed, fresh }
45 |
46 | val (replacedStatements, outputVars) =
47 | transformationClosure.replaceClosureBody(
48 | input,
49 | outputNeeds + RootTuploidPath)
50 |
51 | var test = outputVars.alias.get
52 |
53 | val result = fresh("result")
54 |
55 | // Force typing of declarations and get typed references to various vars and vals.
56 | val Block(List(
57 | resultVarDef,
58 | resultFalse,
59 | resultTrue), resultVarRef) = typed(q"""
60 | private[this] var $result = ${if (exists) q"false" else q"true"};
61 | $result = false;
62 | $result = true;
63 | $result
64 | """)
65 |
66 | StreamOutput(
67 | prelude = List(resultVarDef),
68 | body = List(q"""
69 | ..$replacedStatements;
70 | if (${if (exists) test else q"!$test"}) {
71 | ${if (exists) resultTrue else resultFalse};
72 | ${input.loopInterruptor.get.duplicate} = false;
73 | }
74 | """),
75 | ending = List(resultVarRef))
76 | }
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/FilterOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait FilterOps
4 | extends ClosureStreamOps
5 | with Strippers
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeFilterOp extends StreamOpExtractor {
11 | override def unapply(tree: Tree) = tree match {
12 | case q"$target.filter(${Closure(closure)})" =>
13 | ExtractedStreamOp(target, FilterOp(closure))
14 |
15 | case q"$target.filterNot(${Closure(closure)})" =>
16 | ExtractedStreamOp(target, FilterNotOp(closure))
17 |
18 | case q"$target.withFilter(${Closure(closure)})" =>
19 | ExtractedStreamOp(target, WithFilterOp(closure))
20 |
21 | case _ =>
22 | NoExtractedStreamOp
23 | }
24 | }
25 |
26 | case class FilterOp(override val closure: Function)
27 | extends FilterLikeOp(closure, isNegative = false, "filter")
28 |
29 | case class FilterNotOp(override val closure: Function)
30 | extends FilterLikeOp(closure, isNegative = true, "filterNot")
31 |
32 | case class WithFilterOp(override val closure: Function)
33 | extends FilterLikeOp(closure, isNegative = false, "withFilter")
34 |
35 | class FilterLikeOp(val closure: Function,
36 | val isNegative: Boolean,
37 | val name: String) extends ClosureStreamOp
38 | {
39 | override def describe = Some(name)
40 |
41 | override def sinkOption = None
42 |
43 | override def isMapLike = false
44 |
45 | override def canAlterSize = true
46 |
47 | override def emit(input: StreamInput,
48 | outputNeeds: OutputNeeds,
49 | nextOps: OpsAndOutputNeeds): StreamOutput =
50 | {
51 | import input.typed
52 |
53 | val (replacedStatements, outputVars) =
54 | transformationClosure.replaceClosureBody(
55 | input,
56 | outputNeeds + RootTuploidPath)
57 |
58 | var test = outputVars.alias.get
59 | if (isNegative) {
60 | test = typed(q"!$test")
61 | }
62 |
63 | var sub = emitSub(input.copy(outputSize = None), nextOps)
64 | sub.copy(body = List(q"""
65 | ..$replacedStatements;
66 | if ($test) {
67 | ..${sub.body};
68 | }
69 | """))
70 | }
71 | }
72 |
73 | }
74 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/FindOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait FindOps
4 | extends ClosureStreamOps
5 | with Strippers
6 | with OptionSinks
7 | {
8 | val global: scala.reflect.api.Universe
9 | import global._
10 |
11 | object SomeFindOp extends StreamOpExtractor {
12 | override def unapply(tree: Tree) = tree match {
13 | case q"$target.find(${Closure(closure)})" =>
14 | ExtractedStreamOp(target, FindOp(closure))
15 |
16 | case _ =>
17 | NoExtractedStreamOp
18 | }
19 | }
20 | case class FindOp(closure: Function)
21 | extends ClosureStreamOp
22 | {
23 | override def describe = Some("find")
24 |
25 | override def sinkOption = Some(OptionSink)
26 |
27 | override def canInterruptLoop = true
28 |
29 | override def canAlterSize = true
30 |
31 | override def isMapLike = false
32 |
33 | override def emit(input: StreamInput,
34 | outputNeeds: OutputNeeds,
35 | nextOps: OpsAndOutputNeeds): StreamOutput =
36 | {
37 | import input.typed
38 |
39 | val (replacedStatements, outputVars) =
40 | transformationClosure.replaceClosureBody(
41 | input,
42 | outputNeeds + RootTuploidPath)
43 |
44 | var test = outputVars.alias.get
45 |
46 | var sub = emitSub(input.copy(outputSize = None), nextOps)
47 | sub.copy(body = List(q"""
48 | ..$replacedStatements;
49 | if ($test) {
50 | ..${sub.body};
51 | ${input.loopInterruptor.get.duplicate} = false;
52 | }
53 | """))
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/FlattenOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import flags.withQuietWarnings
4 |
5 | private[streams] trait FlattenOps
6 | extends ClosureStreamOps
7 | with CanBuildFromSinks
8 | with Streams
9 | with Strippers
10 | {
11 | val global: scala.reflect.api.Universe
12 | import global._
13 |
14 | val SomeStream: Extractor[Tree, Stream]
15 |
16 | object SomeFlattenOp extends StreamOpExtractor {
17 | private[this] lazy val PredefSymbol = rootMirror.staticModule("scala.Predef")
18 |
19 | private[this] def isAKnownAsTraversable(asTrav: Tree, targetTpe: Type) = asTrav match {
20 | case q"$predef.`$conforms`[$colTpt]" if predef.symbol == PredefSymbol =>
21 | targetTpe match {
22 | case TypeRef(_, _, List(internalColTpe)) =>
23 | internalColTpe =:= colTpt.tpe
24 | case _ =>
25 | false
26 | }
27 |
28 | case Strip(Function(List(param), Option2Iterable(ref))) if param.symbol == ref.symbol =>
29 | true
30 |
31 | case _ =>
32 | false
33 | }
34 |
35 | override def unapply(tree: Tree) = tree match {
36 | case q"$target.flatten[$tpt]($asTrav)"
37 | if isAKnownAsTraversable(asTrav, target.tpe) &&
38 | flags.experimental =>
39 | ExtractedStreamOp(target, FlattenOp(tpt.tpe))
40 |
41 | case _ =>
42 | NoExtractedStreamOp
43 | }
44 | }
45 |
46 | case class FlattenOp(tpe: Type) extends StreamOp
47 | {
48 | override def describe = Some("flatten")
49 |
50 | override def lambdaCount = 0
51 |
52 | override def subTrees = Nil
53 |
54 | override def canInterruptLoop = false
55 |
56 | override def canAlterSize = true
57 |
58 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) = paths
59 |
60 | override val sinkOption = None
61 |
62 | override def emit(input: StreamInput,
63 | outputNeeds: OutputNeeds,
64 | nextOps: OpsAndOutputNeeds): StreamOutput =
65 | {
66 | import input.{ vars, fresh, transform, typed, currentOwner }
67 |
68 | val itemVal = fresh("item")
69 | val Function(List(itemValDef @ ValDef(_, _, _, _)), itemValRef @ Ident(_)) = typed(q"""
70 | ($itemVal: $tpe) => $itemVal
71 | """)
72 |
73 | val sub = emitSub(
74 | input.copy(
75 | vars = ScalarValue(tpe, alias = Some(itemValRef)),
76 | outputSize = None,
77 | index = None),
78 | nextOps)
79 |
80 | // Note: need to attach any symbols in sub.body currently owned by currentOwner to the closure symbol.
81 | assert(flags.experimental)
82 |
83 | sub.copy(body = List(withQuietWarnings(transform(typed(q"""
84 | ${vars.alias.get}.foreach(($itemValDef) => {
85 | ..${sub.body};
86 | })
87 | """)))))
88 | }
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/ForeachOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait ForeachOps
4 | extends ClosureStreamOps
5 | with UnusableSinks
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeForeachOp extends StreamOpExtractor {
11 | override def unapply(tree: Tree) = tree match {
12 | case q"$target.foreach[${_}](${Closure(closure)})" =>
13 | ExtractedStreamOp(target, ForeachOp(closure))
14 |
15 | case _ =>
16 | NoExtractedStreamOp
17 | }
18 | }
19 | case class ForeachOp(closure: Function)
20 | extends ClosureStreamOp
21 | {
22 | override def describe = Some("foreach")
23 |
24 | override def sinkOption = Some(ScalarSink)
25 |
26 | /// Technically, the output size of the Unit output is zero, so it's altered.
27 | override def canAlterSize = true
28 |
29 | override def emit(input: StreamInput,
30 | outputNeeds: OutputNeeds,
31 | nextOps: OpsAndOutputNeeds): StreamOutput =
32 | {
33 | val List((ScalarSink, _)) = nextOps
34 |
35 | val (replacedStatements, outputVars) =
36 | transformationClosure.replaceClosureBody(input, outputNeeds)
37 |
38 | // require(outputVars.tpe.dealias =:= typeOf[Unit], "Expected Unit, got " + outputVars.tpe)
39 |
40 | StreamOutput(body = replacedStatements)
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/IsEmptyOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait IsEmptyOps
4 | extends UnusableSinks
5 | {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | object SomeIsEmptyOp extends StreamOpExtractor {
10 | override def unapply(tree: Tree) = tree match {
11 | case q"$target.isEmpty" =>
12 | ExtractedStreamOp(target, IsEmptyOp("isEmpty", true))
13 |
14 | case q"$target.isDefined" =>
15 | ExtractedStreamOp(target, IsEmptyOp("isDefined", false))
16 |
17 | case q"$target.nonEmpty" =>
18 | ExtractedStreamOp(target, IsEmptyOp("nonEmpty", false))
19 |
20 | case _ =>
21 | NoExtractedStreamOp
22 | }
23 | }
24 |
25 | case class IsEmptyOp(name: String, isPositivelyEmpty: Boolean) extends StreamOp {
26 | override def lambdaCount = 0
27 | override def sinkOption = Some(ScalarSink)
28 | override def describe = Some(name)
29 | override def canInterruptLoop = true
30 | override def canAlterSize = true
31 | override def subTrees = Nil
32 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) =
33 | Set() // TODO: check this.
34 |
35 | override def emit(input: StreamInput,
36 | outputNeeds: OutputNeeds,
37 | nextOps: OpsAndOutputNeeds): StreamOutput =
38 | {
39 | val List((ScalarSink, _)) = nextOps
40 |
41 | import input.{ typed, fresh }
42 |
43 | val isEmpty = fresh("isEmpty")
44 |
45 | // Force typing of declarations and get typed references to various vars and vals.
46 | val Block(List(
47 | isEmptyVarDef,
48 | isEmptyIsFalse), result) = typed(q"""
49 | private[this] var $isEmpty = true;
50 | $isEmpty = false;
51 | ${if (isPositivelyEmpty) q"$isEmpty" else q"!$isEmpty"}
52 | """)
53 |
54 | StreamOutput(
55 | prelude = List(isEmptyVarDef),
56 | body = List(q"""
57 | $isEmptyIsFalse;
58 | ${input.loopInterruptor.get.duplicate} = false;
59 | """),
60 | ending = List(result))
61 | }
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/JsArrayOpsOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait JsArrayOpsOps
4 | extends StreamComponents
5 | with ScalaJsSymbols
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeJsArrayOp {
11 | def isJsArrayOpType(tpe: Type): Boolean =
12 | tpe != null && JsArrayOpsSymOpt.contains(tpe.typeSymbol)
13 |
14 | def unapply(tree: Tree): Tree = {
15 | val tpe = tree.tpe
16 | if (isJsArrayOpType(tree.tpe)) {
17 | tree match {
18 | case Apply(TypeApply(Select(JsAny(), N("jsArrayOps")), List(_)), List(array)) =>
19 | array
20 |
21 | case Apply(Select(New(_), termNames.CONSTRUCTOR), List(array)) =>
22 | array
23 |
24 | case _ =>
25 | EmptyTree
26 | }
27 | } else {
28 | EmptyTree
29 | }
30 | }
31 | }
32 |
33 | object SomeJsArrayOpsOp extends StreamOpExtractor {
34 | override def unapply(tree: Tree): ExtractedStreamOp =
35 | SomeJsArrayOp.unapply(tree) match {
36 | case EmptyTree =>
37 | NoExtractedStreamOp
38 |
39 | case array =>
40 | ExtractedStreamOp(array, JsArrayOpsOp)
41 | }
42 | }
43 |
44 | case object JsArrayOpsOp extends PassThroughStreamOp {
45 | // No need to wrap js.Arrays in sink as ops as of Arrays?
46 | override val sinkOption = None
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/MapOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait MapOps
4 | extends ClosureStreamOps
5 | with CanBuildFromSinks
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeMapOp extends StreamOpExtractor {
11 | override def unapply(tree: Tree) = tree match {
12 | case q"$target.map[${_}, ${_}](${Closure(closure)})($canBuildFrom)" =>
13 | ExtractedStreamOp(target, MapOp(closure, canBuildFrom = Some(canBuildFrom)))
14 |
15 | // Option.map and Iterator.map don't take a CanBuildFrom.
16 | case q"$target.map[${_}](${Closure(closure)})" =>
17 | ExtractedStreamOp(target, MapOp(closure, canBuildFrom = None))
18 |
19 | case _ =>
20 | NoExtractedStreamOp
21 | }
22 | }
23 |
24 | case class MapOp(closure: Function, canBuildFrom: Option[Tree])
25 | extends ClosureStreamOp
26 | {
27 | override def describe = Some("map")
28 |
29 | override val sinkOption = canBuildFrom.map(CanBuildFromSink(_))
30 |
31 | override def canAlterSize = false
32 |
33 | override def emit(input: StreamInput,
34 | outputNeeds: OutputNeeds,
35 | nextOps: OpsAndOutputNeeds): StreamOutput =
36 | {
37 | val (replacedStatements, outputVars) =
38 | transformationClosure.replaceClosureBody(input, outputNeeds)
39 |
40 | val sub = emitSub(input.copy(vars = outputVars), nextOps)
41 | sub.copy(body = replacedStatements ++ sub.body)
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/MkStringOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait MkStringOps
4 | extends UnusableSinks
5 | with OptionSinks
6 | with Streams
7 | {
8 | val global: scala.reflect.api.Universe
9 | import global._
10 |
11 | object SomeMkStringOp extends StreamOpExtractor {
12 | private[this] def isString(tpe: Type): Boolean =
13 | tpe != null && tpe <:< typeOf[String]
14 |
15 | override def unapply(tree: Tree) = {
16 | if (isString(tree.tpe)) {
17 | tree match {
18 | case q"$target.mkString" =>
19 | ExtractedStreamOp(target, MkStringOp(None, None, None))
20 |
21 | case q"$target.mkString($sep)" =>
22 | ExtractedStreamOp(target, MkStringOp(None, Some(sep), None))
23 |
24 | case q"$target.mkString($start, $sep, $end)" =>
25 | ExtractedStreamOp(target, MkStringOp(Some(start), Some(sep), Some(end)))
26 |
27 | case _ =>
28 | NoExtractedStreamOp
29 | }
30 | } else {
31 | NoExtractedStreamOp
32 | }
33 | }
34 | }
35 |
36 | case class MkStringOp(start: Option[Tree],
37 | sep: Option[Tree],
38 | end: Option[Tree]) extends StreamOp {
39 | override def lambdaCount = 0
40 | override def sinkOption = Some(ScalarSink)
41 | override def canAlterSize = false
42 | override def describe = Some("mkString")
43 | override def subTrees: List[Tree] =
44 | start.toList ++ sep ++ end
45 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) =
46 | Set(RootTuploidPath)
47 |
48 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
49 | {
50 | import input._
51 |
52 | // TODO: remove this to unlock flatMap
53 | val List((ScalarSink, _)) = nextOps
54 |
55 | val startVal = fresh("start")
56 | val sepVal = fresh("sep")
57 | val endVal = fresh("end")
58 | val firstVar = fresh("first")
59 | val builderVal = fresh("builder")
60 |
61 | require(input.vars.alias.nonEmpty, s"input.vars = $input.vars")
62 |
63 | def emptyString: Tree = q""" "" """
64 |
65 | val Block(List(
66 | startDef,
67 | sepDef,
68 | endDef,
69 | builderDef,
70 | firstDef,
71 | appendStart,
72 | appendSep,
73 | appendInput,
74 | appendEnd), result) = typed(q"""
75 | private[this] val $startVal: String = ${start.getOrElse(emptyString)};
76 | private[this] val $sepVal: String = ${sep.getOrElse(emptyString)};
77 | private[this] val $endVal: String = ${end.getOrElse(emptyString)};
78 | private[this] val $builderVal = new scala.collection.mutable.StringBuilder();
79 | private[this] var $firstVar: Boolean = true;
80 | $builderVal.append($startVal);
81 | if ($firstVar) {
82 | $firstVar = false;
83 | } else {
84 | $builderVal.append($sepVal);
85 | }
86 | $builderVal.append(${input.vars.alias.get});
87 | $builderVal.append($endVal);
88 | $builderVal.result()
89 | """)
90 |
91 | (start, sep, end) match {
92 | case (None, None, None) =>
93 | StreamOutput(
94 | prelude = List(builderDef),
95 | body = List(appendInput),
96 | ending = List(result))
97 |
98 | case (None, Some(_), None) =>
99 | StreamOutput(
100 | prelude = List(builderDef, firstDef, sepDef),
101 | body = List(appendSep, appendInput),
102 | ending = List(result))
103 |
104 | case _ =>
105 | StreamOutput(
106 | prelude = List(builderDef, firstDef, startDef, sepDef, endDef, appendStart),
107 | body = List(appendSep, appendInput),
108 | ending = List(appendEnd, result))
109 | }
110 | }
111 | }
112 | }
113 |
114 |
115 | // def mkString(start: String, sep: String, end: String): String =
116 | // addString(new StringBuilder(), start, sep, end).toString
117 |
118 | // def mkString(sep: String): String = mkString("", sep, "")
119 |
120 | // def mkString: String = mkString("")
121 |
122 | // def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
123 | // var first = true
124 |
125 | // b append start
126 | // for (x <- self) {
127 | // if (first) {
128 | // b append x
129 | // first = false
130 | // }
131 | // else {
132 | // b append sep
133 | // b append x
134 | // }
135 | // }
136 | // b append end
137 |
138 | // b
139 | // }
140 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/StreamOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait StreamOps
4 | extends ArrayOpsOps
5 | with CoerceOps
6 | with CollectOps
7 | with CountOps
8 | with FilterOps
9 | with FindOps
10 | with ExistsOps
11 | with FlattenOps
12 | with FlatMapOps
13 | with ForeachOps
14 | with IsEmptyOps
15 | with JsArrayOpsOps
16 | with MapOps
17 | with MkStringOps
18 | with OptionOps
19 | with ReductionOps
20 | with ToCollectionOps
21 | with TakeDropOps
22 | with TakeWhileOps
23 | with ZipWithIndexOps
24 | {
25 | val global: scala.reflect.api.Universe
26 | import global._
27 |
28 | object SomeStreamOps extends Extractor[Tree, (Tree, List[StreamOp])] {
29 | val extractors = List[StreamOpExtractor](
30 | SomeArrayOpsOp,
31 | SomeCoerceOp,
32 | // TODO: fix intractable typing issues with case classes:
33 | // SomeCollectOp,
34 | SomeCountOp,
35 | SomeExistsOp,
36 | SomeFilterOp,
37 | SomeFindOp,
38 | SomeFlattenOp,
39 | SomeFlatMapOp,
40 | SomeForeachOp,
41 | SomeIsEmptyOp,
42 | SomeJsArrayOpsOp,
43 | SomeMapOp,
44 | SomeMkStringOp,
45 | SomeOptionOp,
46 | SomeReductionOp,
47 | SomeTakeDropOp,
48 | SomeTakeWhileOp,
49 | SomeToCollectionOp,
50 | SomeZipWithIndexOp
51 | )
52 |
53 | object ExtractOps {
54 | def unapply(extractorAndTree: (StreamOpExtractor, Tree)): Option[(Tree, List[StreamOp])] = {
55 | val (extractor, tree) = extractorAndTree
56 | extractor.unapply(tree) match {
57 | case e @ ExtractedStreamOp(target, op) if !e.isEmpty =>
58 | target match {
59 | case SomeStreamOps(src, ops)
60 | if !ops.lastOption.exists(_.sinkOption == Some(ScalarSink)) =>
61 | Some(src, ops :+ op)
62 |
63 | case src =>
64 | Some(src, List(op))
65 | }
66 |
67 | case _ =>
68 | None
69 | }
70 | }
71 | }
72 |
73 | def unapply(tree: Tree): Option[(Tree, List[StreamOp])] = {
74 | extractors.toIterator.map(x => (x, tree)).collectFirst({
75 | case ExtractOps(src, ops) =>
76 | (src, ops)
77 | })
78 | }
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/TakeDropOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait TakeDropOps
4 | extends ClosureStreamOps
5 | with Strippers
6 | with OptionSinks
7 | with UnusableSinks
8 | {
9 | val global: scala.reflect.api.Universe
10 | import global._
11 |
12 | object SomeTakeDropOp extends StreamOpExtractor {
13 |
14 | override def unapply(tree: Tree) = tree match {
15 | case q"$target.take($n)" =>
16 | ExtractedStreamOp(target, TakeOp(n))
17 |
18 | case q"$target.drop($n)" =>
19 | ExtractedStreamOp(target, DropOp(n))
20 |
21 | case _ =>
22 | NoExtractedStreamOp
23 | }
24 | }
25 |
26 | trait TakeDropOp extends StreamOp {
27 | def n: Tree
28 | override def subTrees = List(n)
29 | override def canAlterSize = true
30 | override def sinkOption = None
31 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) = paths
32 | }
33 |
34 | case class TakeOp(n: Tree) extends TakeDropOp
35 | {
36 | override def canInterruptLoop = true
37 | override def describe = Some("take")
38 |
39 | override def emit(input: StreamInput,
40 | outputNeeds: OutputNeeds,
41 | nextOps: OpsAndOutputNeeds): StreamOutput =
42 | {
43 | import input.{ typed, fresh, transform }
44 |
45 | val nn = fresh("nn")
46 | val i = fresh("i")
47 |
48 | // Force typing of declarations and get typed references to various vars and vals.
49 | val Block(List(
50 | nValDef,
51 | iVarDef,
52 | test,
53 | iIncr), iVarRef) = typed(q"""
54 | private[this] val $nn = ${transform(n)};
55 | private[this] var $i = 0;
56 | $i < $n;
57 | $i += 1;
58 | $i
59 | """)
60 |
61 | var sub = emitSub(input.copy(outputSize = None), nextOps)
62 | sub.copy(
63 | beforeBody = sub.beforeBody ++ List(nValDef, iVarDef),
64 | body = List(q"""
65 | if ($test) {
66 | ..${sub.body};
67 | $iIncr
68 | } else {
69 | ${input.loopInterruptor.get.duplicate} = false;
70 | }
71 | """))
72 | }
73 | }
74 |
75 | case class DropOp(n: Tree) extends TakeDropOp
76 | {
77 | override def canInterruptLoop = false
78 | override def describe = Some("drop")
79 |
80 | override def emit(input: StreamInput,
81 | outputNeeds: OutputNeeds,
82 | nextOps: OpsAndOutputNeeds): StreamOutput =
83 | {
84 | import input.{ typed, fresh, transform }
85 |
86 | val nn = fresh("nn")
87 | val i = fresh("i")
88 |
89 | // Force typing of declarations and get typed references to various vars and vals.
90 | val Block(List(
91 | nValDef,
92 | iVarDef,
93 | test,
94 | iIncr), iVarRef) = typed(q"""
95 | private[this] val $nn = ${transform(n)};
96 | private[this] var $i = 0;
97 | $i < $n;
98 | $i += 1;
99 | $i
100 | """)
101 |
102 | var sub = emitSub(input.copy(outputSize = None), nextOps)
103 | sub.copy(
104 | beforeBody = sub.beforeBody ++ List(nValDef, iVarDef),
105 | body = List(q"""
106 | if ($test) {
107 | $iIncr
108 | } else {
109 | ..${sub.body};
110 | }
111 | """))
112 | }
113 | }
114 | }
115 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/ToCollectionOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait ToCollectionOps
4 | extends StreamComponents
5 | with ArrayBuilderSinks
6 | with IteratorSinks
7 | with ListBufferSinks
8 | with VectorBuilderSinks
9 | {
10 | val global: scala.reflect.api.Universe
11 | import global._
12 |
13 | object SomeToCollectionOp extends StreamOpExtractor {
14 | override def unapply(tree: Tree) = tree match {
15 | case q"$target.toIterator" =>
16 | ExtractedStreamOp(target, ToIteratorOp)
17 |
18 | case q"$target.toList" =>
19 | ExtractedStreamOp(target, ToListOp)
20 |
21 | case q"$target.toVector" =>
22 | ExtractedStreamOp(target, ToVectorOp)
23 |
24 | case q"$target.toArray[${_}](${_})" =>
25 | ExtractedStreamOp(target, ToArrayOp)
26 |
27 | case _ =>
28 | NoExtractedStreamOp
29 | }
30 | }
31 |
32 | class ToCollectionOp(name: String, sink: StreamSink) extends PassThroughStreamOp {
33 | override def describe = Some(name)
34 | override def sinkOption = Some(sink)
35 | override def canAlterSize = false
36 | }
37 |
38 | case object ToListOp extends ToCollectionOp("toList", ListBufferSink)
39 |
40 | case object ToArrayOp extends ToCollectionOp("toArray", ArrayBuilderSink)
41 |
42 | case object ToVectorOp extends ToCollectionOp("toVector", VectorBuilderSink)
43 |
44 | case object ToIteratorOp extends ToCollectionOp("toIterator", IteratorSink)
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/WhileOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait TakeWhileOps
4 | extends ClosureStreamOps
5 | with Strippers
6 | with OptionSinks
7 | with UnusableSinks
8 | {
9 | val global: scala.reflect.api.Universe
10 | import global._
11 |
12 | object SomeTakeWhileOp extends StreamOpExtractor {
13 | def sinkOptionForReturnType(tpe: Type) =
14 | if (tpe <:< typeOf[Range])
15 | Some(InvalidSink)
16 | else
17 | None
18 |
19 | override def unapply(tree: Tree) = tree match {
20 | case q"$target.takeWhile(${Closure(closure)})" =>
21 | ExtractedStreamOp(target, TakeWhileOp(closure, sinkOptionForReturnType(tree.tpe)))
22 |
23 | case q"$target.dropWhile(${Closure(closure)})" =>
24 | ExtractedStreamOp(target, DropWhileOp(closure, sinkOptionForReturnType(tree.tpe)))
25 |
26 | case _ =>
27 | NoExtractedStreamOp
28 | }
29 | }
30 |
31 | trait WhileOp extends ClosureStreamOp {
32 | // override def sinkOption = None
33 | override def canAlterSize = true
34 | override def isMapLike = false
35 | }
36 |
37 | case class TakeWhileOp(closure: Function, sinkOption: Option[StreamSink]) extends WhileOp
38 | {
39 | override def canInterruptLoop = true
40 | override def describe = Some("takeWhile")
41 |
42 | override def emit(input: StreamInput,
43 | outputNeeds: OutputNeeds,
44 | nextOps: OpsAndOutputNeeds): StreamOutput =
45 | {
46 | import input.typed
47 |
48 | val (replacedStatements, outputVars) =
49 | transformationClosure.replaceClosureBody(
50 | input,
51 | outputNeeds + RootTuploidPath)
52 |
53 | var test = outputVars.alias.get
54 |
55 | var sub = emitSub(input.copy(outputSize = None), nextOps)
56 | sub.copy(body = List(q"""
57 | ..$replacedStatements;
58 | if ($test) {
59 | ..${sub.body};
60 | } else {
61 | ${input.loopInterruptor.get.duplicate} = false;
62 | }
63 | """))
64 | }
65 | }
66 |
67 | case class DropWhileOp(closure: Function, sinkOption: Option[StreamSink]) extends WhileOp
68 | {
69 | override def canInterruptLoop = false
70 | override def describe = Some("dropWhile")
71 |
72 | override def emit(input: StreamInput,
73 | outputNeeds: OutputNeeds,
74 | nextOps: OpsAndOutputNeeds): StreamOutput =
75 | {
76 | import input.{ typed, fresh }
77 |
78 | val (replacedStatements, outputVars) =
79 | transformationClosure.replaceClosureBody(
80 | input,
81 | outputNeeds + RootTuploidPath)
82 |
83 | val test = outputVars.alias.get
84 |
85 | val doneDropping = fresh("doneDropping")
86 | // Force typing of declarations and get typed references to various vars and vals.
87 | val Block(List(
88 | doneDroppingVarDef,
89 | setDoneDropping), combinedTest) = typed(q"""
90 | private[this] var $doneDropping = false;
91 | $doneDropping = true;
92 | $doneDropping || !$test
93 | """)
94 |
95 | val sub = emitSub(input.copy(outputSize = None), nextOps)
96 | sub.copy(
97 | beforeBody = sub.beforeBody :+ doneDroppingVarDef,
98 | body = List(q"""
99 | ..$replacedStatements;
100 | if ($combinedTest) {
101 | $setDoneDropping;
102 | ..${sub.body};
103 | }
104 | """))
105 | }
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/src/main/scala/streams/ops/ZipWithIndexOps.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.reflect.NameTransformer.{ encode, decode }
4 |
5 | private[streams] trait ZipWithIndexOps
6 | extends TransformationClosures
7 | with CanBuildFromSinks
8 | {
9 | val global: scala.reflect.api.Universe
10 | import global._
11 |
12 | object SomeZipWithIndexOp extends StreamOpExtractor {
13 | override def unapply(tree: Tree) = tree match {
14 | case q"$target.zipWithIndex[${_}, ${_}]($canBuildFrom)" =>
15 | ExtractedStreamOp(target, ZipWithIndexOp(canBuildFrom))
16 |
17 | case _ =>
18 | NoExtractedStreamOp
19 | }
20 | }
21 |
22 | case class ZipWithIndexOp(canBuildFrom: Tree) extends StreamOp
23 | {
24 | override def describe = Some("zipWithIndex")
25 |
26 | override def canAlterSize = false
27 |
28 | override def subTrees = List(canBuildFrom)
29 |
30 | override val sinkOption = Some(CanBuildFromSink(canBuildFrom))
31 |
32 | override def transmitOutputNeedsBackwards(paths: Set[TuploidPath]) = {
33 | paths collect {
34 | // Only transmit _._1 and its children backwards
35 | case 0 :: sub =>
36 | sub
37 | }
38 | }
39 |
40 | override def emit(input: StreamInput,
41 | outputNeeds: OutputNeeds,
42 | nextOps: OpsAndOutputNeeds): StreamOutput =
43 | {
44 | import input.{ fresh, transform, typed }
45 |
46 | // TODO wire input and output fiber vars
47 | val indexVar = fresh("indexVar")
48 | val indexVal = fresh("indexVal")
49 |
50 | val needsPair: Boolean = outputNeeds(RootTuploidPath)
51 | val pairName = if (needsPair) fresh("zipWithIndexPair") else TermName("")
52 |
53 | // Early typing / symbolization.
54 | val Block(List(
55 | indexVarDef,
56 | indexValDef,
57 | pairDef,
58 | indexVarIncr),
59 | TupleCreation(List(
60 | indexVarRef, indexValRef, pairRef))) = typed(q"""
61 | private[this] var $indexVar = 0;
62 | private[this] val $indexVal = $indexVar;
63 | private[this] val $pairName = (${input.vars.alias.getOrElse(EmptyTree)}, $indexVal);
64 | $indexVar += 1;
65 | ($indexVar, $indexVal, $pairName)
66 | """)
67 |
68 | import compat._
69 | val TypeRef(pre, sym, List(_, _)) = typeOf[(Int, Int)]
70 | val tupleTpe = internal.typeRef(pre, sym, List(input.vars.tpe, typeOf[Int]))
71 | require(tupleTpe != null && tupleTpe != NoType)
72 | val outputVars =
73 | TupleValue[Tree](
74 | tpe = tupleTpe,
75 | Map(
76 | 0 -> input.vars,
77 | 1 -> ScalarValue(typeOf[Int], alias = Some(indexValRef))),
78 | alias = Some(pairRef),
79 | couldBeNull = false)
80 |
81 | val sub = emitSub(input.copy(vars = outputVars), nextOps)
82 | sub.copy(
83 | // TODO pass source collection to canBuildFrom if it exists.
84 | beforeBody = sub.beforeBody :+ indexVarDef,
85 | body = List(q"""
86 | $indexValDef;
87 | ..${if (needsPair) List(pairDef) else Nil}
88 | ..${sub.body};
89 | $indexVarIncr
90 | """))
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/src/main/scala/streams/plugin/StreamsCompiler.scala:
--------------------------------------------------------------------------------
1 | // Author: Olivier Chafik (http://ochafik.com)
2 | package scalaxy.streams
3 |
4 | import scala.tools.nsc.CompilerCommand
5 | import scala.tools.nsc.Global
6 | import scala.tools.nsc.Settings
7 | import scala.tools.nsc.plugins.PluginComponent
8 | import scala.tools.nsc.reporters.{ ConsoleReporter, Reporter }
9 |
10 | /**
11 | * This modified compiler enforces a "default-private" semantics, with a `@public` annotation to mark entities as public.
12 | */
13 | object StreamsCompiler {
14 | def jarOf(c: Class[_]) =
15 | Option(c.getProtectionDomain.getCodeSource).map(_.getLocation.getFile)
16 | val scalaLibraryJar = jarOf(classOf[List[_]])
17 | val streamsLibraryJar = jarOf(classOf[OptimizationStrategy])
18 |
19 | def main(args: Array[String]) {
20 | try {
21 | makeCompiler(consoleReportGetter)(args)
22 | } catch {
23 | case ex: Throwable =>
24 | ex.printStackTrace
25 | System.exit(2)
26 | throw ex
27 | }
28 | }
29 |
30 | def consoleReportGetter = (settings: Settings) => new ConsoleReporter(settings)
31 |
32 | def defaultInternalPhasesGetter: Global => List[PluginComponent] =
33 | StreamsPlugin.getInternalPhases _
34 |
35 | def makeCompiler[R <: Reporter]
36 | (reporterGetter: Settings => R,
37 | internalPhasesGetter: Global => List[PluginComponent] = defaultInternalPhasesGetter)
38 | : (Array[String] => Unit) =
39 | {
40 | val settings = new Settings
41 |
42 | val jars = scalaLibraryJar ++ streamsLibraryJar
43 | val bootclasspathArg = if (jars.isEmpty) Nil else List("-bootclasspath", jars.reduce(_ + ":" + _))
44 |
45 | val reporter = reporterGetter(settings)
46 | val global = new Global(settings, reporter) {
47 | override protected def computeInternalPhases() {
48 | super.computeInternalPhases
49 | phasesSet ++= internalPhasesGetter(this)
50 | }
51 | }
52 |
53 | (args: Array[String]) => {
54 | val command = new CompilerCommand(bootclasspathArg ++ args, settings)
55 |
56 | if (!command.ok)
57 | System.exit(1)
58 |
59 | new global.Run().compile(command.files)
60 | }
61 | }
62 |
63 | // def compile[R <: Reporter](args: Array[String],
64 | // reporterGetter: Settings => R,
65 | // internalPhasesGetter: Global => List[PluginComponent] = defaultInternalPhasesGetter): R =
66 | // {
67 | // val settings = new Settings
68 |
69 | // val jars = scalaLibraryJar ++ streamsLibraryJar
70 | // val bootclasspathArg = if (jars.isEmpty) Nil else List("-bootclasspath", jars.reduce(_ + ":" + _))
71 | // val command = new CompilerCommand(bootclasspathArg ++ args, settings)
72 |
73 | // if (!command.ok)
74 | // System.exit(1)
75 |
76 | // val reporter = reporterGetter(settings)
77 | // val global = new Global(settings, reporter) {
78 | // override protected def computeInternalPhases() {
79 | // super.computeInternalPhases
80 | // phasesSet ++= internalPhasesGetter(this)
81 | // }
82 | // }
83 | // new global.Run().compile(command.files)
84 |
85 | // reporter
86 | // }
87 | }
88 |
--------------------------------------------------------------------------------
/src/main/scala/streams/plugin/StreamsComponent.scala:
--------------------------------------------------------------------------------
1 | // Author: Olivier Chafik (http://ochafik.com)
2 | package scalaxy.streams
3 |
4 | import scala.tools.nsc.Global
5 | import scala.tools.nsc.Phase
6 | import scala.tools.nsc.plugins.PluginComponent
7 | import scala.tools.nsc.symtab.Flags
8 | import scala.tools.nsc.transform.TypingTransformers
9 |
10 | /**
11 | * To understand / reproduce this, you should use paulp's :power mode in the scala console:
12 | *
13 | * scala
14 | * > :power
15 | * > :phase parser // will show us ASTs just after parsing
16 | * > val Some(List(ast)) = intp.parse("@public def str = self.toString")
17 | * > nodeToString(ast)
18 | * > val DefDef(mods, name, tparams, vparamss, tpt, rhs) = ast // play with extractors to explore the tree and its properties.
19 | */
20 | private[streams] object StreamsComponent {
21 | val phaseName = "scalaxy-streams"
22 | }
23 | private[streams] class StreamsComponent(
24 | val global: Global, runAfter: String = "typer")
25 | extends PluginComponent
26 | with StreamTransforms
27 | with TypingTransformers
28 | with Optimizations {
29 | import global._
30 | import definitions._
31 | import Flags._
32 |
33 | override val phaseName = StreamsComponent.phaseName
34 |
35 | override val runsRightAfter = None
36 | override val runsAfter = List("typer")
37 | override val runsBefore = List("patmat")
38 |
39 | override def info(pos: Position, msg: String, force: Boolean) {
40 | reporter.info(pos, msg, force = force)
41 | }
42 | override def warning(pos: Position, msg: String) {
43 | reporter.warning(pos, msg)
44 | }
45 | override def error(pos: Position, msg: String) {
46 | reporter.error(pos, msg)
47 | }
48 |
49 | override def newPhase(prev: Phase) = new StdPhase(prev) {
50 | def apply(unit: CompilationUnit) {
51 | if (!flags.disabled) {
52 | val transformer = new TypingTransformer(unit) {
53 |
54 | def typed(tree: Tree) = try {
55 | localTyper.typed(tree)
56 | } catch { case ex: Throwable =>
57 | throw new RuntimeException("Failed to type " + tree + "\n(" + ex + ")", ex)
58 | }
59 |
60 | // TODO: this is probably a very slow way to get the strategy :-S
61 | def getStrategy(pos: Position) =
62 | matchStrategyTree(
63 | tpe => analyzer.inferImplicit(
64 | EmptyTree,
65 | tpe,
66 | reportAmbiguous = true,
67 | isView = false,
68 | context = localTyper.context,
69 | saveAmbiguousDivergent = false,
70 | pos = pos
71 | ).tree
72 | )
73 |
74 | override def transform(tree: Tree) = {
75 | def opt(tree: Tree) = try {
76 | transformStream(
77 | tree = tree,
78 | strategy = getStrategy(tree.pos),
79 | fresh = unit.fresh.newName,
80 | currentOwner = currentOwner,
81 | recur = transform(_),
82 | typecheck = typed(_))
83 | } catch {
84 | case ex: Throwable =>
85 | logException(tree.pos, ex)
86 | None
87 | }
88 |
89 | opt(tree).getOrElse {
90 | val sup = super.transform(tree)
91 | opt(sup).getOrElse(sup)
92 | }
93 | }
94 | }
95 |
96 | unit.body = transformer transform unit.body
97 | }
98 | }
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/src/main/scala/streams/plugin/StreamsPlugin.scala:
--------------------------------------------------------------------------------
1 | // Author: Olivier Chafik (http://ochafik.com)
2 | package scalaxy.streams
3 |
4 | import scala.tools.nsc.Global
5 | import scala.tools.nsc.plugins.Plugin
6 | import scala.tools.nsc.plugins.PluginComponent
7 |
8 | private[streams] object StreamsPlugin {
9 | def getInternalPhases(global: Global): List[PluginComponent] =
10 | List(new StreamsComponent(global))
11 | }
12 |
13 | private[streams] class StreamsPlugin(override val global: Global) extends Plugin {
14 |
15 | override val name = "scalaxy-streams"
16 |
17 | override val description = "Compiler plugin that rewrites collection streams into while loops."
18 |
19 | override val components = StreamsPlugin.getInternalPhases(global)
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sideeffects/SideEffects.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | import scala.reflect.NameTransformer
3 | import scala.collection.breakOut
4 | import scala.collection.mutable.ArrayBuffer
5 | import scala.collection.mutable.ListBuffer
6 |
7 | private[streams] sealed class SideEffectSeverity(
8 | private val level: Int,
9 | val description: String)
10 | extends Comparable[SideEffectSeverity] {
11 | override def compareTo(s: SideEffectSeverity) = level.compareTo(s.level)
12 | }
13 |
14 | /**
15 | * Severity of a detected side-effect.
16 | *
17 | * TODO: rename to Purity / ProbablyPure / Impure to match common naming.
18 | */
19 | private[streams] object SideEffectSeverity {
20 | /**
21 | * For side-effects that are "probably safe".
22 | *
23 | * For instance, `toString`, `equals`, `hashCode`, `+`, `++` are considered probably safe.
24 | * They can be even considered safe when called on truly immutable type such as `Int`,
25 | * but not on `List[T]`: `List.toString` is only as safe as it's components' `toString`
26 | * method.
27 | */
28 | case object ProbablySafe extends SideEffectSeverity(1, "probably safe")
29 |
30 | /**
31 | * For side-effects that may have unknown consequences.
32 | *
33 | * Most arbitrary references fall into this category (for instance `System.setProperty`).
34 | */
35 | case object Unsafe extends SideEffectSeverity(3, "unsafe")
36 | }
37 |
38 | private[streams] trait SideEffects
39 | {
40 | val global: scala.reflect.api.Universe
41 | import global._
42 |
43 | case class SideEffect(tree: Tree, description: String, severity: SideEffectSeverity)
44 |
45 | def analyzeSideEffects(tree: Tree): List[SideEffect]
46 | }
47 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sideeffects/SideEffectsMessages.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | import scala.reflect.NameTransformer
3 |
4 | private[streams] trait SideEffectsMessages
5 | {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | implicit class ExtractibleMap[A, B](m: Map[A, B]) {
10 | def unapply(key: A): Option[B] = {
11 | val msg = m.get(key)
12 | // println(s"msg for $key: $msg (known keys: ${m.keys})")
13 | msg
14 | }
15 | }
16 |
17 | def termNamesMessages(m: Map[String, String]): ExtractibleMap[TermName, String] =
18 | m.map({ case (k, v) => TermName(NameTransformer.encode(k)) -> v })
19 |
20 | private[this] val assumedSideEffectFreeMessageSuffix = "generally assumed to be side-effect free"
21 |
22 | def anyMethodMessage(name: String) =
23 | s"Any.$name is $assumedSideEffectFreeMessageSuffix"
24 |
25 | private[this] val aritMessage = s"Arithmetic / ensemblist operators are $assumedSideEffectFreeMessageSuffix"
26 |
27 | lazy val ProbablySafeUnaryNames = termNamesMessages(Map(
28 | "+" -> aritMessage,
29 | "-" -> aritMessage,
30 | "/" -> aritMessage,
31 | "*" -> aritMessage,
32 | "++" -> s"Collection composition is $assumedSideEffectFreeMessageSuffix",
33 | "--" -> s"Collection composition is $assumedSideEffectFreeMessageSuffix"
34 | // "canBuildFrom" -> s"CanBuildFrom's are $assumedSideEffectFreeMessageSuffix",
35 | // "zipWithIndex" -> s"zipWithIndex is $assumedSideEffectFreeMessageSuffix"
36 | ))
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/ArrayBuilderSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.collection.generic.CanBuildFrom
4 |
5 | private[streams] trait ArrayBuilderSinks extends BuilderSinks {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | case object ArrayBuilderSink extends BuilderSink
10 | {
11 | override def describe = Some("Array")
12 |
13 | override def lambdaCount = 0
14 |
15 | override def subTrees = Nil
16 |
17 | override def usesSizeHint = true
18 |
19 | // TODO build array of same size as source collection if it is known.
20 | override def createBuilder(inputVars: TuploidValue[Tree], typed: Tree => Tree) = {
21 | val builderModule =
22 | rootMirror.staticModule("scala.collection.mutable.ArrayBuilder")
23 | typed(q"$builderModule.make[${inputVars.tpe}]")
24 | }
25 |
26 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
27 | {
28 | import input._
29 |
30 | input.outputSize match {
31 | case Some(outputSize) =>//if false => // buggy
32 | // If the output size is known, just create an array.
33 | // println(s"outputSize in ArraySink is ${input.outputSize}")
34 | // new RuntimeException(s"outputSize in ArraySink is ${input.outputSize}").printStackTrace()
35 |
36 | require(input.vars.alias.nonEmpty, s"input.vars = $input.vars")
37 |
38 | val array = fresh("array")
39 | val index = fresh("i")
40 |
41 | val componentTpe = normalize(input.vars.tpe)
42 |
43 | val Block(List(
44 | arrayDecl,
45 | arrayCreation,
46 | indexDef,
47 | append,
48 | incr), arrayRef) = typed(q"""
49 | private[this] var $array: Array[$componentTpe] = null;
50 | $array = new Array[$componentTpe]($outputSize);
51 | private[this] var $index = 0;
52 | $array($index) = ${input.vars.alias.get};
53 | $index += 1;
54 | $array
55 | """)
56 |
57 | StreamOutput(
58 | prelude = List(arrayDecl),
59 | beforeBody = List(arrayCreation, indexDef),
60 | body = List(append, incr),
61 | ending = List(arrayRef))
62 |
63 | // case None =>
64 | case _ =>
65 | super.emit(input, outputNeeds, nextOps)
66 | }
67 | }
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/ArrayOpsSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.collection.generic.CanBuildFrom
4 |
5 | private[streams] trait ArrayOpsSinks extends ArrayBuilderSinks {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | case object ArrayOpsSink extends StreamSink
10 | {
11 | override def isFinalOnly = true
12 | override def isJustAWrapper = true
13 | override def describe = Some("ArrayOps")
14 | override def lambdaCount = 0
15 | override def subTrees = Nil
16 |
17 | private[this] val arrayOpsClass = "scala.collection.mutable.ArrayOps"
18 | private[this] lazy val anyValOpsClassNameByType: Map[Type, String] = Map(
19 | typeOf[Boolean] -> (arrayOpsClass + ".ofBoolean"),
20 | typeOf[Byte] -> (arrayOpsClass + ".ofByte"),
21 | typeOf[Char] -> (arrayOpsClass + ".ofChar"),
22 | typeOf[Double] -> (arrayOpsClass + ".ofDouble"),
23 | typeOf[Float] -> (arrayOpsClass + ".ofFloat"),
24 | typeOf[Int] -> (arrayOpsClass + ".ofInt"),
25 | typeOf[Long] -> (arrayOpsClass + ".ofLong"),
26 | typeOf[Short] -> (arrayOpsClass + ".ofShort"),
27 | typeOf[Unit] -> (arrayOpsClass + ".ofUnit")
28 | )
29 |
30 | private def replaceLast[A](list: List[A], f: A => A): List[A] = {
31 | val last :: reversedRest = list.reverse
32 | (f(last) :: reversedRest).reverse
33 | }
34 |
35 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
36 | {
37 | import input._
38 |
39 | val arrayOutput = ArrayBuilderSink.emit(input, outputNeeds, nextOps)
40 | val componentTpe = input.vars.tpe.dealias
41 |
42 | def getResult(array: Tree) = typed(
43 | anyValOpsClassNameByType.get(componentTpe) match {
44 | case Some(primitiveOpsClass) =>
45 | q"new ${rootMirror.staticClass(primitiveOpsClass)}($array)"
46 | case _ if componentTpe <:< typeOf[AnyRef] =>
47 | q"new scala.collection.mutable.ArrayOps.ofRef[$componentTpe]($array)"
48 | case _ =>
49 | q"genericArrayOps[$componentTpe]($array)"
50 | }
51 | )
52 |
53 | arrayOutput.copy(ending = replaceLast[Tree](arrayOutput.ending, getResult(_)))
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/BuilderSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait BuilderSinks extends StreamComponents {
4 | val global: scala.reflect.api.Universe
5 | import global._
6 |
7 | // Base class for builder-based sinks.
8 | trait BuilderSink extends StreamSink
9 | {
10 | override def lambdaCount = 0
11 |
12 | def usesSizeHint: Boolean
13 |
14 | def createBuilder(inputVars: TuploidValue[Tree], typed: Tree => Tree): Tree
15 |
16 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
17 | {
18 | import input._
19 |
20 | requireSinkInput(input, outputNeeds, nextOps)
21 |
22 | val builder = fresh("builder")
23 | require(input.vars.alias.nonEmpty, s"input.vars = $input.vars")
24 |
25 | // println("input.vars.alias.get = " + input.vars.alias.get + ": " + input.vars.tpe)
26 | val sizeHintOpt = input.outputSize.map(s => q"$builder.sizeHint($s)")
27 | val Block(List(
28 | builderDef,
29 | sizeHint,
30 | builderAdd), result) = typed(q"""
31 | private[this] val $builder = ${createBuilder(input.vars, typed)};
32 | ${sizeHintOpt.getOrElse(dummyStatement(fresh))};
33 | $builder += ${input.vars.alias.get};
34 | $builder.result()
35 | """)
36 |
37 | StreamOutput(
38 | prelude = List(builderDef),
39 | beforeBody = input.outputSize.filter(_ => usesSizeHint).map(_ => sizeHint).toList,
40 | body = List(builderAdd),
41 | ending = List(result))
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/CanBuildFromSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.collection.generic.CanBuildFrom
4 |
5 | private[streams] trait CanBuildFromSinks
6 | extends BuilderSinks
7 | with ArrayBuilderSinks
8 | with JsArrayBuilderSinks {
9 | val global: scala.reflect.api.Universe
10 | import global._
11 |
12 | class CanBuildFromSink(canBuildFrom: Tree) extends BuilderSink
13 | {
14 | override def subTrees = List(canBuildFrom)
15 |
16 | val TypeRef(_, _, List(_, _, toTpe: Type)) = {
17 | val sym = rootMirror.staticClass("scala.collection.generic.CanBuildFrom")
18 | canBuildFrom.tpe.baseType(sym)
19 | }
20 |
21 | override def describe =
22 | Some(toTpe.typeSymbol.fullName.replaceAll("^scala\\.collection(\\.immutable)?\\.", ""))
23 |
24 | override def usesSizeHint = true
25 |
26 | override def createBuilder(inputVars: TuploidValue[Tree], typed: Tree => Tree) = {
27 | typed(q"$canBuildFrom()")
28 | }
29 | }
30 |
31 | object CanBuildFromSink
32 | {
33 | def unapply(op: StreamOp): Option[CanBuildFromSink] =
34 | Option(op) collect { case op: CanBuildFromSink => op }
35 |
36 | def apply(canBuildFrom: Tree): StreamSink =
37 | Option(canBuildFrom.symbol)
38 | .filter(_ != NoSymbol)
39 | .map(s => (s.owner.fullName, s.name.toString)) match {
40 | case Some(("scala.Array", "canBuildFrom")) =>
41 | ArrayBuilderSink
42 |
43 | case Some(("scala.scalajs.js.Any", "canBuildFromArray")) =>
44 | JsArrayBuilderSink
45 |
46 | case _ =>
47 | new CanBuildFromSink(canBuildFrom)
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/IteratorSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait IteratorSinks
4 | extends StreamComponents
5 | with UnusableSinks
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | case object IteratorSink extends StreamSink
11 | {
12 | // Implementing an iterator sink is *tricky* and will require a new emission
13 | // mode for sources. Tracked by issue nativelibs4java/scalaxy-streams#3.
14 | override def isImplemented = false
15 |
16 | override def lambdaCount = 0
17 |
18 | override def subTrees = Nil
19 |
20 | override def describe = Some("Iterator")
21 |
22 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
23 | {
24 | import input._
25 |
26 | ???
27 | // TODO(ochafik): Add a new emission mode for sources / invert responsibilities.
28 | //
29 | // requireSinkInput(input, outputNeeds, nextOps)
30 | //
31 | // val next = fresh("next")
32 | // val hasNext = fresh("hasNext")
33 | // val computedHasNext = fresh("computedHasNext")
34 | // require(input.vars.alias.nonEmpty, s"input.vars = $input.vars")
35 | //
36 | // val Block(List(...), ...) = typed(q"""
37 | // new Iterator[Int] {
38 | // private[this] var $next: ${input.vars.tpe} = _
39 | // private[this] var $hasNext: Boolean = _
40 | // private[this] var $computedHasNext: Boolean = _
41 | // override def hasNext = {
42 | // if (!$computedHasNext) {
43 | // $hasNext = false
44 | // $computedHasNext = true
45 | // if (true /* interruptor */) {
46 | // $next = ${input.vars.alias}
47 | // $hasNext = true
48 | // }
49 | // }
50 | // $hasNext
51 | // }
52 | // override def next = {
53 | // if (!hasNext)
54 | // throw new java.util.NoSuchElementException("next on empty iterator")
55 | // $computedHasNext = false
56 | // $next
57 | // }
58 | // }
59 | // """)
60 | // StreamOutput(
61 | // prelude = List(...),
62 | // body = List(...),
63 | // ending = List(...))
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/JsArrayBuilderSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.collection.generic.CanBuildFrom
4 |
5 | private[streams] trait JsArrayBuilderSinks
6 | extends BuilderSinks
7 | with ScalaJsSymbols
8 | {
9 | val global: scala.reflect.api.Universe
10 | import global._
11 |
12 | case object JsArrayBuilderSink extends StreamSink
13 | {
14 | override def describe = Some("js.Array")
15 |
16 | override def lambdaCount = 0
17 |
18 | override def subTrees = Nil
19 |
20 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
21 | {
22 | import input._
23 |
24 | // Note: unlike in ArrayStreamSource, we don't optimize the case
25 | // where the output size is known, for JavaScript arrays often perform
26 | // just as well (or better) with sequential append than with
27 | // fixed alloc + indexed updates.
28 |
29 | require(input.vars.alias.nonEmpty, s"input.vars = $input.vars")
30 |
31 | val array = fresh("array")
32 |
33 | val componentTpe = normalize(input.vars.tpe)
34 |
35 | val Block(List(
36 | arrayDecl,
37 | append), arrayRef) = typed(q"""
38 | private[this] var $array = new ${JsArraySymOpt.get}[$componentTpe]();
39 | $array.push(${input.vars.alias.get});
40 | $array
41 | """)
42 |
43 | StreamOutput(
44 | prelude = List(arrayDecl),
45 | body = List(append),
46 | ending = List(arrayRef))
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/ListBufferSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.collection.generic.CanBuildFrom
4 |
5 | private[streams] trait ListBufferSinks extends BuilderSinks {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | case object ListBufferSink extends BuilderSink
10 | {
11 | override def describe = Some("List")
12 |
13 | override def usesSizeHint = false
14 |
15 | override def subTrees = Nil
16 |
17 | lazy val ListBufferModule =
18 | rootMirror.staticModule("scala.collection.mutable.ListBuffer")
19 |
20 | // lazy val ListModule =
21 | // rootMirror.staticModule("scala.collection.immutable.List")
22 |
23 | override def createBuilder(inputVars: TuploidValue[Tree], typed: Tree => Tree) = {
24 | typed(q"$ListBufferModule[${inputVars.tpe}]()")
25 | // typed(q"$ListModule.newBuilder[${inputVars.tpe}]")
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/OptionSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait OptionSinks extends StreamComponents {
4 | val global: scala.reflect.api.Universe
5 | import global._
6 |
7 | case object OptionSink extends StreamSink
8 | {
9 | override def lambdaCount = 0
10 |
11 | override def subTrees = Nil
12 |
13 | override def describe = Some("Option")
14 |
15 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
16 | {
17 | import input._
18 |
19 | requireSinkInput(input, outputNeeds, nextOps)
20 |
21 | val value = fresh("value")
22 | val nonEmpty = fresh("nonEmpty")
23 | require(input.vars.alias.nonEmpty, s"input.vars = $input.vars")
24 |
25 | val Block(List(
26 | valueDef,
27 | nonEmptyDef,
28 | assignment), result) = typed(q"""
29 | ${newVar(value, input.vars.tpe)};
30 | private[this] var $nonEmpty = false;
31 | {
32 | $value = ${input.vars.alias.get};
33 | $nonEmpty = true;
34 | };
35 | if ($nonEmpty) Some($value) else None
36 | """)
37 |
38 | StreamOutput(
39 | prelude = List(valueDef, nonEmptyDef),
40 | body = List(assignment),
41 | ending = List(result))
42 | }
43 | }
44 |
45 | case class StagedOptionSink(valueSymbol: Symbol, nonEmptySymbol: Symbol) extends StreamSink
46 | {
47 | override def lambdaCount = 0
48 |
49 | override def subTrees = Nil
50 |
51 | override def describe = Some("Option")
52 |
53 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
54 | {
55 | import input._
56 |
57 | requireSinkInput(input, outputNeeds, nextOps)
58 |
59 | StreamOutput(
60 | body = List(typed(q"""{
61 | ${valueSymbol} = ${input.vars.alias.get};
62 | ${nonEmptySymbol} = true;
63 | }"""))
64 | )
65 | }
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/SetBuilderSink.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.collection.generic.CanBuildFrom
4 |
5 | private[streams] trait SetBuilderSinks extends BuilderSinks {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | case object SetBuilderSink extends BuilderSink
10 | {
11 | override def describe = Some("Set")
12 |
13 | override def usesSizeHint = false
14 |
15 | override def subTrees = Nil
16 |
17 | override def isFinalOnly = true
18 |
19 | override def createBuilder(inputVars: TuploidValue[Tree], typed: Tree => Tree) = {
20 | val setModule =
21 | rootMirror.staticModule("scala.collection.immutable.Set")
22 | typed(q"$setModule.canBuildFrom[${inputVars.tpe}]()")
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/StreamSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait StreamSinks
4 | extends StreamComponents
5 | with ArrayBuilderSinks
6 | with IteratorSinks
7 | with ListBufferSinks
8 | with SetBuilderSinks
9 | with VectorBuilderSinks
10 | {
11 | val global: scala.reflect.api.Universe
12 | import global._
13 |
14 | object SomeStreamSink extends Extractor[Tree, (Tree, StreamSink)] {
15 | def unapply(tree: Tree): Option[(Tree, StreamSink)] = Option(tree) collect {
16 | case q"$target.toList" =>
17 | (target, ListBufferSink)
18 |
19 | case q"$target.toIterator" =>
20 | (target, IteratorSink)
21 |
22 | case q"$target.toArray[${_}](${_})" =>
23 | (target, ArrayBuilderSink)
24 |
25 | case q"$target.toSet[${_}]" =>
26 | (target, SetBuilderSink)
27 |
28 | case q"$target.toVector" =>
29 | (target, VectorBuilderSink)
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/UnusableSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait UnusableSinks extends StreamComponents {
4 | val global: scala.reflect.api.Universe
5 | import global._
6 |
7 | trait UnusableSinkBase extends StreamSink
8 | {
9 | override def describe = None
10 |
11 | override def lambdaCount = 0
12 |
13 | override def subTrees = Nil
14 |
15 | override def outputNeeds = Set()
16 |
17 | override def emit(input: StreamInput, outputNeeds: OutputNeeds, nextOps: OpsAndOutputNeeds): StreamOutput =
18 | ???
19 | }
20 |
21 | /// Sink is explicitly invalid: a stream cannot end with it.
22 | case object InvalidSink extends UnusableSinkBase
23 |
24 | /// Sink that outputs a Unit (e.g. for a foreach).
25 | case object ScalarSink extends UnusableSinkBase {
26 | override def canBeElided = false
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sinks/VectorBuilderSinks.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | import scala.collection.generic.CanBuildFrom
4 |
5 | private[streams] trait VectorBuilderSinks extends BuilderSinks {
6 | val global: scala.reflect.api.Universe
7 | import global._
8 |
9 | case object VectorBuilderSink extends BuilderSink
10 | {
11 | override def describe = Some("Vector")
12 |
13 | override def lambdaCount = 0
14 |
15 | override def subTrees = Nil
16 |
17 | override def usesSizeHint = false
18 |
19 | // TODO build Vector of same size as source collection if it is known.
20 | override def createBuilder(inputVars: TuploidValue[Tree], typed: Tree => Tree) = {
21 | val module = rootMirror.staticModule("scala.collection.immutable.Vector")
22 | typed(q"$module.newBuilder[${inputVars.tpe.dealias}]")
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sources/ArrayStreamSources.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait ArrayStreamSources
4 | extends ArrayBuilderSinks
5 | with StreamInterruptors
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeArrayStreamSource {
11 | // Testing the type would be so much better, but yields an awkward MissingRequirementError.
12 | private[this] lazy val ArraySym = rootMirror.staticClass("scala.Array")
13 |
14 | private[this] def isArrayType(tree: Tree) =
15 | findType(tree).exists(_.typeSymbol == ArraySym)
16 |
17 | def unapply(tree: Tree): Option[ArrayStreamSource] =
18 | Option(tree).filter(isArrayType).map(ArrayStreamSource(_))
19 | }
20 |
21 | case class ArrayStreamSource(
22 | array: Tree,
23 | describe: Option[String] = Some("Array"),
24 | sinkOption: Option[StreamSink] = Some(ArrayBuilderSink))
25 | extends StreamSource
26 | {
27 | override def lambdaCount = 0
28 | override def subTrees = List(array)
29 |
30 | override def emit(input: StreamInput,
31 | outputNeeds: OutputNeeds,
32 | nextOps: OpsAndOutputNeeds): StreamOutput =
33 | {
34 | import input.{ fresh, transform, typed }
35 |
36 | val arrayVal = fresh("array")
37 | val lengthVal = fresh("length")
38 | val iVar = fresh("i")
39 | val itemVal = fresh("item")
40 |
41 | val arrayTpe = findType(array).orNull
42 | // getOrElse {
43 | // sys.error(s"Failed to find type of $array")
44 | // }
45 |
46 | // Early typing / symbolization.
47 | val Block(List(
48 | arrayValDef,
49 | lengthValDef,
50 | iVarDef,
51 | itemValDef),
52 | TupleCreation(List(
53 | lengthValRef, iVarRef, itemValRef))) = typed(q"""
54 | private[this] val $arrayVal: $arrayTpe = ${transform(array)};
55 | private[this] val $lengthVal = $arrayVal.length;
56 | private[this] var $iVar = 0;
57 | private[this] val $itemVal = $arrayVal($iVar);
58 | ($lengthVal, $iVar, $itemVal)
59 | """)
60 |
61 | val TuploidPathsExtractionDecls(extractionCode, outputVars, coercionSuccessVarDefRef) =
62 | createTuploidPathsExtractionDecls(
63 | itemValRef.tpe, itemValRef, outputNeeds, fresh, typed,
64 | newCoercionSuccessVarDefRef(nextOps, fresh, typed))
65 |
66 | val interruptor = new StreamInterruptor(input, nextOps)
67 |
68 | val sub = emitSub(
69 | input.copy(
70 | vars = outputVars,
71 | loopInterruptor = interruptor.loopInterruptor,
72 | outputSize = Some(lengthValRef)),
73 | nextOps,
74 | coercionSuccessVarDefRef._2)
75 |
76 | sub.copy(
77 | beforeBody = Nil,
78 | body = List(typed(q"""
79 | $arrayValDef;
80 | $lengthValDef;
81 | $iVarDef;
82 | ..${interruptor.defs}
83 | ..${sub.beforeBody};
84 | while (${interruptor.composeTest(q"$iVarRef < $lengthValRef")}) {
85 | $itemValDef;
86 | ..$extractionCode
87 | ..${sub.body};
88 | $iVarRef += 1
89 | }
90 | ..${sub.afterBody}
91 | """)),
92 | afterBody = Nil
93 | )
94 | }
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sources/InlineSeqStreamSources.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait InlineSeqStreamSources
4 | extends ArrayStreamSources
5 | with ListBufferSinks
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeInlineSeqStreamSource {
11 | private[this] lazy val ArrayModuleSym = rootMirror.staticModule("scala.Array")
12 | private[this] lazy val SeqModuleSym = rootMirror.staticModule("scala.collection.Seq")
13 | private[this] lazy val ListModuleSym = rootMirror.staticModule("scala.collection.immutable.List")
14 |
15 | def unapply(tree: Tree): Option[StreamSource] = Option(tree) collect {
16 | case q"$seq.apply[$tpe](..$elements)" if seq.symbol == SeqModuleSym =>
17 | ArrayStreamSource(
18 | q"${ArrayModuleSym}.apply[$tpe](..$elements)",
19 | describe = Some("Seq"),
20 | sinkOption = Some(ListBufferSink))
21 |
22 | case q"$list.apply[$tpe](..$elements)" if list.symbol == ListModuleSym =>
23 | ArrayStreamSource(
24 | q"${ArrayModuleSym}.apply[$tpe](..$elements)",
25 | describe = Some("List"),
26 | sinkOption = Some(ListBufferSink))
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sources/IteratorStreamSources.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait IteratorStreamSources
4 | extends IteratorSinks
5 | with StreamInterruptors
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeIteratorStreamSource {
11 | def unapply(tree: Tree): Option[IteratorStreamSource] =
12 | Option(tree)
13 | .flatMap(t => Option(t.tpe))
14 | .filter(_ <:< typeOf[Iterator[Any]])
15 | .map(tpe => IteratorStreamSource(tree))
16 | }
17 |
18 | case class IteratorStreamSource(
19 | iterator: Tree)
20 | extends StreamSource
21 | {
22 | override def describe = Some("Iterator")
23 | override def sinkOption = Some(IteratorSink)
24 |
25 | override def lambdaCount = 0
26 |
27 | override def subTrees = List(iterator)
28 |
29 | override def emit(input: StreamInput,
30 | outputNeeds: OutputNeeds,
31 | nextOps: OpsAndOutputNeeds): StreamOutput =
32 | {
33 | import input.{ fresh, transform, typed }
34 |
35 | val itVal = fresh("it")
36 | val itemVal = fresh("item")
37 |
38 | // Early typing / symbolization.
39 | val Block(List(
40 | itValDef,
41 | itHasNext,
42 | itemValDef),
43 | itemValRef) = typed(q"""
44 | private[this] val $itVal = ${transform(iterator)};
45 | $itVal.hasNext;
46 | private[this] val $itemVal = $itVal.next;
47 | $itemVal
48 | """)
49 | val TuploidPathsExtractionDecls(extractionCode, outputVars, coercionSuccessVarDefRef) =
50 | createTuploidPathsExtractionDecls(
51 | itemValRef.tpe, itemValRef, outputNeeds, fresh, typed,
52 | newCoercionSuccessVarDefRef(nextOps, fresh, typed))
53 |
54 | val interruptor = new StreamInterruptor(input, nextOps)
55 |
56 | val sub = emitSub(
57 | input.copy(
58 | vars = outputVars,
59 | loopInterruptor = interruptor.loopInterruptor,
60 | outputSize = None),
61 | nextOps,
62 | coercionSuccessVarDefRef._2)
63 | sub.copy(
64 | beforeBody = Nil,
65 | body = List(typed(q"""
66 | $itValDef;
67 | ..${interruptor.defs}
68 | ..${sub.beforeBody};
69 | while (${interruptor.composeTest(itHasNext)}) {
70 | $itemValDef;
71 | ..$extractionCode
72 | ..${sub.body};
73 | }
74 | ..${sub.afterBody}
75 | """)),
76 | afterBody = Nil
77 | )
78 | }
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sources/JsArrayStreamSources.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait JsArrayStreamSources
4 | extends ArrayStreamSources
5 | with JsArrayBuilderSinks
6 | with StreamInterruptors
7 | with ScalaJsSymbols
8 | {
9 | val global: scala.reflect.api.Universe
10 | import global._
11 |
12 | object SomeJsArrayStreamSource {
13 | private[this] def isJsArrayType(tree: Tree) =
14 | JsArraySymOpt.exists(arr => findType(tree).exists(_.typeSymbol == arr))
15 |
16 | def unapply(tree: Tree): Option[ArrayStreamSource] =
17 | Option(tree)
18 | .filter(isJsArrayType)
19 | .map(array => ArrayStreamSource(
20 | array,
21 | describe = Some("js.Array"),
22 | sinkOption = Some(JsArrayBuilderSink)
23 | ))
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sources/ListStreamSources.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait ListStreamSources
4 | extends ListBufferSinks
5 | with StreamInterruptors
6 | {
7 | val global: scala.reflect.api.Universe
8 | import global._
9 |
10 | object SomeListStreamSource {
11 | def unapply(tree: Tree): Option[ListStreamSource] = Option(tree) collect {
12 | case _ if Option(tree.tpe).exists(_ <:< typeOf[List[Any]]) =>
13 | ListStreamSource(tree)
14 | }
15 | }
16 |
17 | case class ListStreamSource(
18 | list: Tree,
19 | describe: Option[String] = Some("List"),
20 | sinkOption: Option[StreamSink] = Some(ListBufferSink))
21 | extends StreamSource
22 | {
23 | override def lambdaCount = 0
24 |
25 | override def subTrees = List(list)
26 |
27 | override def emit(input: StreamInput,
28 | outputNeeds: OutputNeeds,
29 | nextOps: OpsAndOutputNeeds): StreamOutput =
30 | {
31 | import input.{ fresh, transform, typed }
32 |
33 | val listVal = fresh("list")
34 | val listVar = fresh("currList")
35 | val itemVal = fresh("item")
36 |
37 | // Early typing / symbolization.
38 | val Block(List(
39 | listValDef,
40 | listVarDef,
41 | itemValDef,
42 | listVarUpdate),
43 | TupleCreation(List(
44 | listSize, nonEmptyListTest, itemValRef))) = typed(q"""
45 | private[this] val $listVal = ${transform(list)}
46 | private[this] var $listVar = $listVal;
47 | private[this] val $itemVal = $listVar.head;
48 | $listVar = $listVar.tail;
49 | ($listVal.size, $listVar ne Nil, $itemVal)
50 | """)
51 | val TuploidPathsExtractionDecls(extractionCode, outputVars, coercionSuccessVarDefRef) =
52 | createTuploidPathsExtractionDecls(
53 | itemValRef.tpe, itemValRef, outputNeeds, fresh, typed,
54 | newCoercionSuccessVarDefRef(nextOps, fresh, typed))
55 |
56 | val interruptor = new StreamInterruptor(input, nextOps)
57 |
58 | val sub = emitSub(
59 | input.copy(
60 | vars = outputVars,
61 | loopInterruptor = interruptor.loopInterruptor,
62 | outputSize = Some(listSize)),
63 | nextOps,
64 | coercionSuccessVarDefRef._2)
65 | sub.copy(
66 | beforeBody = Nil,
67 | body = List(typed(q"""
68 | $listValDef;
69 | $listVarDef;
70 | ..${interruptor.defs}
71 | ..${sub.beforeBody};
72 | while (${interruptor.composeTest(nonEmptyListTest)}) {
73 | $itemValDef;
74 | ..$extractionCode
75 | ..${sub.body};
76 | $listVarUpdate
77 | }
78 | ..${sub.afterBody}
79 | """)),
80 | afterBody = Nil
81 | )
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/main/scala/streams/sources/StreamSources.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | private[streams] trait StreamSources
4 | extends InlineRangeStreamSources
5 | with InlineSeqStreamSources
6 | with IteratorStreamSources
7 | with ListStreamSources
8 | with OptionStreamSources
9 | with ArrayStreamSources
10 | with JsArrayStreamSources
11 | {
12 | val global: scala.reflect.api.Universe
13 | import global._
14 |
15 | object SomeStreamSource extends Extractor[Tree, StreamSource] {
16 | def unapply(tree: Tree): Option[StreamSource] = Option(tree) collect {
17 | case SomeInlineRangeStreamSource(source) => source
18 | case SomeInlineSeqStreamSource(source) => source
19 | case SomeListStreamSource(source) => source
20 | case SomeIteratorStreamSource(source) => source
21 | case SomeOptionStreamSource(source) => source
22 | case SomeArrayStreamSource(source) => source
23 | case SomeJsArrayStreamSource(source) => source
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/src/test/scala/LoopsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams.test
2 |
3 | import org.junit._
4 | import org.junit.Assert._
5 |
6 | class LoopsTest {
7 | import scalaxy.streams.optimize
8 |
9 | @Test
10 | def test {
11 | val n = 10
12 | // val cols = for (i <- 0 to n) yield for (i <- 0)
13 | // val s: Seq[Int] = optimize {
14 | // for (i <- 1 to n; j <- 0 to n; val prod = i * j; if prod < (i + j)) yield {
15 | // (i - j) / (prod + 1)
16 | // }
17 | // }
18 |
19 | optimize {
20 | // def ff {
21 | val n = 3;
22 | assertEquals(Vector(0, 1, 2, 3), for (v <- 0 to n) yield v)
23 | // }
24 | // for (i <- 0 to n) {
25 | // println(i)
26 | // }
27 |
28 | // for (l <- 10L until 2L by -2) yield { l + 1 }
29 |
30 |
31 | // val a = Array(1, 2, 4)
32 | // a.map(_ + 2).map(_ * 10).filter(_ < 3)
33 |
34 | // val arrays = Array(Array(1, 2), Array(3, 4))
35 |
36 | // arrays.map(_.map(_ + 2).map(_ * 10).filter(_ < 3))
37 |
38 | // for ((a, i) <- Array(Array(1)).zipWithIndex; len = a.length; if len < i) {
39 | // println(s"$a, $len, $i")
40 | // }
41 |
42 |
43 |
44 | // for (array <- arrays;
45 | // length = array.length * 30;
46 | // if length < 10;
47 | // v <- array)
48 | // yield
49 | // (length, v)
50 |
51 |
52 | // scala.this.Predef.refArrayOps(scala.this.Predef.refArrayOps(arrays)
53 | // .map(((array: Array[Int]) => {
54 | // val length: Int = array.length;
55 | // scala.Tuple2.apply[Array[Int], Int](array, length)
56 | // }))
57 | // .withFilter(((x$1: (Array[Int], Int)) =>
58 | // (x$1: (Array[Int], Int) @unchecked) match {
59 | // case ((array @ _), (length @ _)) =>
60 | // length.<(10)
61 | // }))
62 | // .flatMap(((x$2: (Array[Int], Int)) =>
63 | // (x$2: (Array[Int], Int) @unchecked) match {
64 | // case ((array @ _), (length @ _)) =>
65 |
66 | // scala.this.Predef.refArrayOps(scala.this.Predef.intArrayOps(array)
67 | // .map(((v: Int) =>
68 | // scala.Tuple2.apply(length, v))
69 | // }
70 | // ))
71 |
72 | // var i = 0
73 | // val length1 = arrays.length
74 | // val b = ArrayBuffer[(Int, Int)]()
75 | // while (i < length1) {
76 | // val array = arrays(i)
77 | // val length = array.length
78 | // if (length < 10) {
79 | // var j = 0
80 | // val length2 = array.length
81 | // while (j < length2) {
82 | // val v = array(j)
83 | // b += ((length, v))
84 | // j += 2
85 | // }
86 | // }
87 | // i += 1
88 | // }
89 |
90 |
91 | {}
92 |
93 |
94 | }
95 | }
96 |
97 | }
98 |
--------------------------------------------------------------------------------
/src/test/scala/MacroIntegrationTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | package test
4 |
5 | import org.junit._
6 | import org.junit.runner.RunWith
7 | import org.junit.runners.Parameterized.Parameters
8 |
9 | import scala.collection.JavaConversions._
10 |
11 | @RunWith(classOf[Parallelized])
12 | class MacroIntegrationTest(
13 | name: String,
14 | source: String,
15 | expectedMessages: CompilerMessages)
16 | {
17 | import MacroIntegrationTest._
18 |
19 | @Test
20 | def test = testMessages(source, expectedMessages)(strategy)
21 | }
22 |
23 | object MacroIntegrationTest
24 | extends StreamComponentsTestBase with StreamTransforms {
25 | scalaxy.streams.flags.logLevel = LogLevel.Verbose
26 | scalaxy.streams.flags.quietWarnings = true
27 | scalaxy.streams.flags.experimental = true
28 |
29 | implicit def strategy = scalaxy.streams.strategy.foolish
30 |
31 | @Parameters(name = "{0}")
32 | def data: java.util.Collection[Array[AnyRef]] =
33 | IntegrationTests.data.map(t =>
34 | Array[AnyRef](t.name, t.source, t.expectedMessages))
35 | }
36 |
--------------------------------------------------------------------------------
/src/test/scala/OptionStreamsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | package test
4 |
5 | import org.junit._
6 | import org.junit.runner.RunWith
7 | import org.junit.runners.Parameterized.Parameters
8 |
9 | import scala.collection.JavaConversions._
10 |
11 | import IntegrationTests.{msgs, potentialSideEffectMsgs}
12 |
13 | /** This is just a testbed for "fast" manual tests */
14 | class OptionStreamsTest
15 | extends StreamComponentsTestBase
16 | with PerformanceTestBase
17 | with StreamTransforms
18 | {
19 | import global._
20 |
21 | scalaxy.streams.flags.logLevel = LogLevel.Verbose
22 |
23 | @Test
24 | def testOptionCombinations {
25 | val options = List(
26 | "None" -> "None",
27 | "(None: Option[Int])" -> "Option",
28 | "Option[Any](null)" -> "Option",
29 | "Option[String](null)" -> "Option",
30 | "Option[String](\"Y\")" -> "Option",
31 | "Some(0)" -> "Some",
32 | "Some(\"X\")" -> "Some")
33 | val suffixes = List(
34 | None,
35 | Some("orNull" -> "orNull"),
36 | Some("getOrElse(\"Z\")" -> "getOrElse"),
37 | Some("get" -> "get"),
38 | Some("find(_.contains(\"2\"))" -> "find"))
39 |
40 | val src = s"""
41 | def f1(x: Any) = x.toString + "1"
42 | def f2(x: Any) = x.toString + "2"
43 | def f3(x: Any) = x.toString + "3"
44 | def wrap[A](a: => A): Either[A, String] =
45 | try {
46 | Left(a)
47 | } catch {
48 | case ex: Throwable =>
49 | Right(ex.getMessage)
50 | }
51 |
52 | List(
53 | ${{
54 | for ((lhs, _) <- options; (rhs, _) <- options; suf <- suffixes) yield {
55 | val stream = s"$lhs.map(f1).orElse($rhs.map(f2)).map(f3)"
56 | suf.map({case (s, _) => stream + "." + s}).getOrElse(stream)
57 | }
58 | }.map("wrap(" + _ + ")").mkString(",\n ")}
59 | )
60 | """
61 | // println(src)
62 |
63 | assertMacroCompilesToSameValue(
64 | src,
65 | strategy = scalaxy.streams.strategy.foolish)
66 |
67 | // {
68 | // import scalaxy.streams.strategy.foolish
69 | // testMessages(src, msgs("Some.orElse(Some.map).map -> Option"),
70 | // expectWarningRegexp = Some(List("there were \\d+ inliner warnings; re-run with -Yinline-warnings for details")))
71 | // }
72 | }
73 |
74 | }
75 |
--------------------------------------------------------------------------------
/src/test/scala/Parallelized.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams;
2 |
3 | import org.junit.runners.Parameterized
4 | import org.junit.runners.model.RunnerScheduler
5 | import java.util.concurrent.TimeUnit
6 | import java.util.concurrent.Executors
7 |
8 | private[streams] class ThreadPoolScheduler extends RunnerScheduler
9 | {
10 | private[this] val numThreads = {
11 | val n = Integer.parseInt(System.getProperty(
12 | "junit.parallel.threads",
13 | (Runtime.getRuntime().availableProcessors * 2) + ""))
14 | println("scalaxy.streams.ThreadPoolScheduler.numThreads = " + n)
15 |
16 | n
17 | }
18 | private[this] val executor = Executors.newFixedThreadPool(numThreads)
19 |
20 | override def finished() {
21 | executor.shutdown()
22 | try {
23 | executor.awaitTermination(30, TimeUnit.MINUTES)
24 | } catch {
25 | case ex: InterruptedException =>
26 | throw new RuntimeException(ex)
27 | }
28 | }
29 |
30 | override def schedule(statement: Runnable) = executor.submit(statement)
31 | }
32 |
33 | class Parallelized(cls: Class[_]) extends Parameterized(cls) {
34 | setScheduler(new ThreadPoolScheduler());
35 | }
36 |
--------------------------------------------------------------------------------
/src/test/scala/PluginCompilationTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | package test
4 |
5 | import org.junit._
6 | import org.junit.runner.RunWith
7 | import org.junit.runners.Parameterized.Parameters
8 |
9 | import scala.collection.JavaConversions._
10 |
11 | @RunWith(classOf[Parallelized])
12 | class PluginCompilationTest(name: String, source: String)
13 | extends StreamComponentsTestBase with StreamTransforms {
14 | @Test def test = assertPluginCompilesSnippetFine(source)
15 | }
16 |
17 | object PluginCompilationTest {
18 |
19 | implicit def strategy = scalaxy.streams.strategy.default
20 |
21 | @Parameters(name = "{0}")
22 | def data: java.util.Collection[Array[AnyRef]] =
23 | IntegrationTests.data.map(t =>
24 | Array[AnyRef](t.name, t.source))
25 | }
26 |
--------------------------------------------------------------------------------
/src/test/scala/StreamsOutputNeedsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | package test
4 |
5 | import SideEffectSeverity._
6 |
7 | import org.junit._
8 | import org.junit.Assert._
9 |
10 | class StreamsOutputNeedsTest extends StreamComponentsTestBase with StreamTransforms {
11 | import global._
12 |
13 | @Test
14 | def testArrayMapMapFilterMap {
15 | println("TODO(ochafik): Finish this test!!!")
16 |
17 | // for (o <- Some(Some((1, 2))); (a, b) <- o) yield a + b
18 | val SomeStream(stream) = typecheck(q"""
19 | Some(Some((1, 2))).flatMap(o => o.map(p => (p._1, p._2)))
20 | """)
21 | // assertEquals("Some.flatMap(Option.withFilter.withFilter.map) -> Option",
22 | assertEquals("Some.flatMap(Option.map) -> Option",
23 | stream.describe())
24 |
25 | val List(fop @ FlatMapOp(_, _, _)) = stream.ops
26 | val Some(nestedStream) = fop.nestedStream
27 |
28 | // val List(f1 @ CoerceOp(_), f2 @ CoerceOp(_), m @ MapOp(_, _)) = nestedStream.ops
29 | val List(m @ MapOp(_, _)) = nestedStream.ops
30 |
31 | assertEquals(Set(RootTuploidPath), stream.sink.outputNeeds)
32 | val inputNeeds = stream.computeOutputNeedsBackwards(stream.sink.outputNeeds)
33 |
34 | // val List(Set())
35 | println(s"inputNeeds = $inputNeeds")
36 | assertTrue(inputNeeds.head.contains(RootTuploidPath))
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/test/scala/StreamsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | package test
4 |
5 | import SideEffectSeverity._
6 |
7 | import org.junit._
8 | import org.junit.Assert._
9 |
10 | class StreamsTest extends StreamComponentsTestBase with StreamTransforms {
11 | import global._
12 |
13 | @Test
14 | def testListToIterator {
15 | val SomeStream(Stream(_, source, List(ToIteratorOp), IteratorSink, false)) = typecheck(q"""
16 | List(1).toIterator
17 | """)
18 | val ArrayStreamSource(_, Some("List"), _) = source
19 | }
20 |
21 | @Test
22 | def testFindSink {
23 | assertEquals(Some(ArrayOpsSink), SomeStream.findSink(List(ArrayOpsOp)))
24 | assertEquals(Some(VectorBuilderSink), SomeStream.findSink(List(ArrayOpsOp, VectorBuilderSink)))
25 | assertEquals(None, SomeStream.findSink(List(ArrayOpsOp, FilterOp(null))))
26 | // val Some(CanBuildFromSink(null)) = SomeStream.findSink(List(ListBufferSink, ZipWithIndexOp(null)))
27 | assertEquals(Some(ListBufferSink), SomeStream.findSink(List(ArrayBuilderSink, ListBufferSink)))
28 | }
29 |
30 | @Test
31 | def testArrayMapMapFilterMap {
32 | val SomeStream(Stream(_, ArrayStreamSource(_, _, _), ops, ArrayBuilderSink, false)) = typecheck(q"""
33 | Array(1).map(_ + 1).map(_ * 10).filter(_ < 10)
34 | """)
35 | val List(ArrayOpsOp, MapOp(_, _), ArrayOpsOp, MapOp(_, _), ArrayOpsOp, FilterOp(_)) = ops
36 | }
37 |
38 | @Test
39 | def testArrayMapFilterMap {
40 | val SomeStream(Stream(_, ArrayStreamSource(_, _, _), ops, ArrayBuilderSink, false)) = typecheck(q"""
41 | (null: Array[Int]).map(_ + 2).filter(_ < 3).map(_.hashCode)
42 | """)
43 | val List(ArrayOpsOp, MapOp(_, _), ArrayOpsOp, FilterOp(_), ArrayOpsOp, MapOp(_, _)) = ops
44 | }
45 |
46 | @Test
47 | def testArrayMap {
48 | val SomeStream(Stream(_, ArrayStreamSource(_, _, _), ops, ArrayBuilderSink, false)) = typecheck(q"""
49 | Array(1).map(_ + 1)
50 | """)
51 | val List(ArrayOpsOp, MapOp(_, _)) = ops
52 | }
53 |
54 | @Test
55 | def testListMap {
56 | val SomeStream(s) = typecheck(q"""
57 | List(1).map(_ + 1)
58 | """)
59 | // Inline list creation is rewritten to an array.
60 | val Stream(_, ArrayStreamSource(_, _, _), ops, CanBuildFromSink(_), false) = s
61 | val List(MapOp(_, _)) = ops
62 | }
63 |
64 | @Test
65 | def testRangeMapMapFilterMap {
66 | val SomeStream(Stream(_, InlineRangeStreamSource(_, _, 2, true, _), ops, CanBuildFromSink(_), false)) = typecheck(q"""
67 | (1 to 10 by 2).map(_ + 1).map(_ * 10).filter(_ < 10)
68 | """)
69 | val List(MapOp(_, _), MapOp(_, _), FilterOp(_)) = ops
70 | }
71 |
72 | @Test
73 | def testFlatMap {
74 | val SomeStream(Stream(_, source, ops, sink, false)) = typecheck(q"""
75 | for (a <- Array(Array(1)); len = a.length; v <- a) yield (a, len, v)
76 | """)
77 | }
78 |
79 | @Test
80 | def testToVector {
81 | val tree = typecheck(q"""
82 | Array(1, 2, 3).map(_ + 1).toVector
83 | """)
84 | val SomeStream(Stream(_, source, ops, sink, _)) = tree
85 | val List(ArrayOpsOp, MapOp(_, _), ArrayOpsOp) = ops
86 | val VectorBuilderSink = sink
87 | }
88 |
89 | @Test
90 | def testMaps {
91 | val SomeStream(Stream(_, source, ops, sink, _)) = typecheck(q"""
92 | for ((a, i) <- Array(Array(1)).zipWithIndex; len = a.length; if len < i) {
93 | println(a + ", " + len + ", " + i)
94 | }
95 | """)
96 | }
97 |
98 | @Test
99 | def testBasicSideEffects {
100 | val SomeStream(stream) = typecheck(q"""
101 | (0 to 10).map(i => { println(i); i }).map(println)
102 | """)
103 | assertEquals(List(Unsafe, Unsafe), stream.closureSideEffectss.flatten.map(_.severity))
104 |
105 | val SomeStream(stream2) = typecheck(q"""
106 | (0 to 10).map(i => { new Object().toString + i })
107 | """)
108 | assertEquals(List(Unsafe), stream2.closureSideEffectss.flatten.map(_.severity))
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/src/test/scala/SubTreeEvaluationOrderTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 |
3 | package test
4 |
5 | import org.junit._
6 | import org.junit.Assert._
7 | import org.junit.runner.RunWith
8 | import org.junit.runners.Parameterized
9 | import org.junit.runners.Parameterized.Parameters
10 |
11 | import scala.collection.JavaConversions._
12 |
13 | @RunWith(classOf[Parallelized])
14 | class SubTreeEvaluationOrderTest(
15 | name: String,
16 | source: String,
17 | expectedMessages: CompilerMessages)
18 | {
19 | scalaxy.streams.flags.logLevel = LogLevel.Verbose
20 | scalaxy.streams.flags.quietWarnings = true
21 |
22 | import SubTreeEvaluationOrderTest._
23 |
24 | @Test
25 | def test = testMessages(source, expectedMessages)(strategy)
26 | }
27 |
28 | object SubTreeEvaluationOrderTest
29 | extends StreamComponentsTestBase with StreamTransforms {
30 |
31 | import IntegrationTests.msgs
32 |
33 | implicit def strategy = scalaxy.streams.strategy.safe
34 |
35 | @Parameters(name = "{0}")
36 | def data: java.util.Collection[Array[AnyRef]] = List[(String, CompilerMessages)](
37 |
38 | """
39 | List(named("a", 1))
40 | .map(_ + named("added", 1))
41 | .mkString(named("pre", "{{"), named("sep", "; "), named("suf", "}}"))
42 | """
43 | -> msgs(/* side effects + safe strategy */),
44 |
45 | """
46 | List(1)
47 | .map(_ + 1)
48 | .mkString("{{", "; ", "}}")
49 | """
50 | -> msgs("List.map.mkString"),
51 |
52 | """
53 | List(named("a", 1), named("b", 2)).map(_ + 1).toList
54 | """
55 | -> msgs("List.map -> List")
56 |
57 | ).map({
58 | case (src, msgs) =>
59 | Array[AnyRef](
60 | src.replaceAll(raw"(?m)\s+", " ").trim,
61 | """
62 | var names = collection.mutable.ArrayBuffer[String]();
63 | def named[A](name: String, a: A): A = {
64 | names += name
65 | a
66 | }
67 | val value = {""" + src + """};
68 | (names.toList, value)
69 | """,
70 | msgs
71 | )
72 | })
73 | }
74 |
--------------------------------------------------------------------------------
/src/test/scala/ops/ArrayOpsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class ArrayOpsTest extends StreamComponentsTestBase with StreamTransforms {
8 | import global._
9 |
10 | @Test
11 | def testExtraction {
12 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"genericArrayOps(Array[Any]())")
13 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"refArrayOps(Array[AnyRef]())")
14 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"intArrayOps(Array[Int]())")
15 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"longArrayOps(Array[Long]())")
16 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"byteArrayOps(Array[Byte]())")
17 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"shortArrayOps(Array[Short]())")
18 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"charArrayOps(Array[Char]())")
19 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"booleanArrayOps(Array[Boolean]())")
20 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"floatArrayOps(Array[Float]())")
21 | val SomeArrayOpsOp(_, ArrayOpsOp) = typecheck(q"doubleArrayOps(Array[Double]())")
22 | }
23 |
24 | @Test
25 | def testJsExtraction {
26 | val SomeJsArrayOpsOp(_, JsArrayOpsOp) = typecheck(
27 | q"scala.scalajs.js.Any.jsArrayOps[Int](scala.scalajs.js.Array[Int]())")
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/test/scala/ops/CoerceOpsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class CoerceOpsTest extends StreamComponentsTestBase
8 | with StreamTransforms
9 | with CoerceOps
10 | {
11 | import global._
12 |
13 | @Test
14 | def testCoerceExtractor {
15 | val v @ SomeCoerceOp(_, CoerceOp(_)) = typecheck(q"""Array(Array(1)).zipWithIndex.withFilter(
16 | (item2: (Array[Int], Int)) => (item2: (Array[Int], Int) @unchecked) match {
17 | case ((a @ _), (i @ _)) => true
18 | case _ => false
19 | }
20 | )""")
21 | val SomeStreamOps(_, ops) = v
22 | // printOps(ops)
23 | val List(ArrayOpsOp, ZipWithIndexOp(_), ArrayOpsOp, CoerceOp(_)) = ops
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/test/scala/ops/FilterOpsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class FilterOpsTest extends StreamComponentsTestBase with StreamTransforms {
8 | import global._
9 |
10 | @Test
11 | def testFilterExtractor {
12 | val v1 @ SomeFilterOp(_, FilterOp(_)) = typecheck(q"Array(1).filter(_ == 0)")
13 | val SomeStreamOps(_, _ :: _ :: Nil) = v1
14 |
15 | val v2 @ SomeFilterOp(_, FilterNotOp(_)) = typecheck(q"Array(1).filterNot(_ == 0)")
16 | val SomeStreamOps(_, _ :: _ :: Nil) = v2
17 |
18 | val v3 @ SomeFilterOp(_, WithFilterOp(_)) = typecheck(q"Array(1).withFilter(_ == 0)")
19 | val SomeStreamOps(_, _ :: _ :: Nil) = v3
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/test/scala/ops/FlatMapOpsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class FlatMapOpsTest extends StreamComponentsTestBase with StreamTransforms {
8 | import global._
9 |
10 | @Test
11 | def testFlatMapExtractor {
12 | val v @ SomeFlatMapOp(_, FlatMapOp(_, _, _)) = typecheck(q"""
13 | Array(1).flatMap(v => Seq(v + 1))
14 | """)
15 | val SomeStreamOps(_, _ :: _ :: Nil) = v
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/src/test/scala/ops/ForeachOpsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class ForeachOpsTest extends StreamComponentsTestBase with StreamTransforms {
8 | import global._
9 |
10 | @Test
11 | def testForeachExtractor {
12 | val v @ SomeForeachOp(_, ForeachOp(_)) = typecheck(q"(1 to 10).foreach(println _)")
13 | val SomeStreamOps(_, _ :: Nil) = v
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/src/test/scala/ops/MapOpsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class MapOpsTest extends StreamComponentsTestBase with StreamTransforms {
8 | import global._
9 |
10 | @Test
11 | def testMapExtractor {
12 | val v @ SomeMapOp(_, MapOp(_, _)) = typecheck(q"Array(1).map(_ + 1)")
13 | val SomeStreamOps(_, _ :: _ :: Nil) = v
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/src/test/scala/ops/ToCollectionOpsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class ToCollectionOpsTest extends StreamComponentsTestBase with StreamTransforms {
8 | import global._
9 |
10 | @Test
11 | def testToIterator {
12 | val SomeToCollectionOp(_, ToIteratorOp) =
13 | typecheck(q"Array(1).toIterator")
14 | val SomeReductionOp(SomeToCollectionOp(_, ToIteratorOp), _) =
15 | typecheck(q"Array(1).toIterator.sum")
16 | }
17 |
18 | @Test
19 | def testToList {
20 | val SomeToCollectionOp(_, ToListOp) =
21 | typecheck(q"Array(1).toList")
22 | val SomeReductionOp(SomeToCollectionOp(_, ToListOp), _) =
23 | typecheck(q"Array(1).toList.sum")
24 | }
25 |
26 | @Test
27 | def testToArray {
28 | val SomeToCollectionOp(_, ToArrayOp) =
29 | typecheck(q"Array(1).toArray")
30 | val SomeReductionOp(SomeArrayOpsOp(SomeToCollectionOp(_, ToArrayOp), _), _) =
31 | typecheck(q"Array(1).toArray.sum")
32 | }
33 |
34 | @Test
35 | def testToVector {
36 | val SomeToCollectionOp(_, ToVectorOp) =
37 | typecheck(q"Array(1).toVector")
38 | val SomeReductionOp(SomeToCollectionOp(_, ToVectorOp), _) =
39 | typecheck(q"Array(1).toVector.sum")
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/test/scala/ops/ZipWithIndexOpsTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class ZipWithIndexOpsTest extends StreamComponentsTestBase with StreamTransforms {
8 | import global._
9 |
10 | @Test
11 | def testMapExtractor {
12 | val v @ SomeZipWithIndexOp(_, ZipWithIndexOp(_)) = typecheck(q"Array(1).zipWithIndex")
13 | val SomeStreamOps(_, _ :: _ :: Nil) = v
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/src/test/scala/performance/CollectionPerformanceTests.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | trait ChainedPerformanceTests {
8 | this: CollectionPerformanceTests =>
9 | def chain(du: (String, String)) = {
10 | val (definition, use) = du
11 | (definition, use + ".filter(v => (v % 2) == 0).map(_ * 2)")
12 | }
13 | }
14 |
15 | trait NoRightTests { //extends CollectionPerformanceTests {
16 | this: CollectionPerformanceTests =>
17 | override def simpleScanRight = {}
18 | override def simpleFoldRight = {}
19 | override def simpleReduceRight = {}
20 | }
21 | trait NoScalarReductionTests {//extends CollectionPerformanceTests {
22 | this: CollectionPerformanceTests =>
23 | override def simpleSum = {}
24 | override def simpleProduct = {}
25 | override def simpleMin = {}
26 | override def simpleMax = {}
27 | }
28 | class ListPerformanceTest extends CollectionPerformanceTests with NoRightTests {
29 | override def col = ("val col: List[Int] = (0 to n).toList", "col")//.filter(v => (v % 2) == 0).map(_ * 2)")
30 | }
31 | class ListChainedPerformanceTest extends ListPerformanceTest with ChainedPerformanceTests {
32 | override def col = chain(super.col)
33 | }
34 | class ArrayPerformanceTest extends CollectionPerformanceTests {
35 | override def col = ("val col = Array.tabulate(n)(i => i)", "col")
36 | @Test def simpleArrayTabulate = if (!skip) ensureFasterCodeWithSameResult(null, "Array.tabulate(n)(i => i).toSeq")
37 | }
38 | class ArrayChainedPerformanceTest extends ArrayPerformanceTest with ChainedPerformanceTests {
39 | override def col = chain(super.col)
40 | }
41 | class RangePerformanceTest extends CollectionPerformanceTests with NoRightTests with NoScalarReductionTests {
42 | override def col = (null: String, "(0 until n)")
43 | // override def simpleToArray = {}
44 | // override def simpleToList = {}
45 | // override def simpleTakeWhile = {}
46 | // override def simpleDropWhile = {}
47 | // override def simpleSum = {}
48 | // override def simpleProduct = {}
49 | // override def simpleMin = {}
50 | // override def simpleMax = {}
51 | }
52 | class RangeChainedPerformanceTest extends CollectionPerformanceTests with ChainedPerformanceTests with NoRightTests with NoScalarReductionTests {
53 | override def col = chain((null, "(0 until n)"))
54 | }
55 |
56 | trait CollectionPerformanceTests extends PerformanceTests {
57 | val skip = PerformanceTests.skip
58 | def col: (String, String)
59 |
60 | /**************************
61 | * Collection conversions *
62 | **************************/
63 | @Test def simpleToArray = if (!skip) testToArray(col)
64 | @Test def simpleToList = if (!skip) testToList(col)
65 | // @Ignore @Test def simpleToVector = if (!skip) testToVector(col)
66 |
67 | @Test def simpleFilter = testFilter(col)
68 | @Test def simpleFilterNot = testFilterNot(col)
69 | @Test def simpleCount = testCount(col)
70 | @Test def simpleExists = testExists(col)
71 | @Test def simpleForall = testForall(col)
72 | @Test def simpleTakeWhile = testTakeWhile(col)
73 | @Test def simpleDropWhile = testDropWhile(col)
74 | @Test def simpleForeach = testForeach(col)
75 | @Test def simpleMap = testMap(col)
76 | @Test def simpleSum = testSum(col)
77 | @Test def simpleProduct = testProduct(col)
78 | @Ignore @Test def simpleMin = testMin(col)
79 | @Ignore @Test def simpleMax = testMax(col)
80 | @Ignore @Test def simpleScanLeft = testScanLeft(col)
81 | @Ignore @Test def simpleScanRight = testScanRight(col)
82 | @Ignore @Test def simpleFoldLeft = testFoldLeft(col)
83 | @Ignore @Test def simpleFoldRight = testFoldRight(col)
84 | @Ignore @Test def simpleReduceLeft = testReduceLeft(col)
85 | @Ignore @Test def simpleReduceRight = testReduceRight(col)
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/src/test/scala/performance/PerformanceTests.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | object PerformanceTests {
8 | lazy val skip = {
9 | val perfEnvVar = "SCALAXY_TEST_PERF"
10 | val test = "1" == System.getenv(perfEnvVar)
11 | if (!test)
12 | println(s"You can run ${getClass.getName} by setting the environment variable $perfEnvVar=1")
13 | !test
14 | }
15 | }
16 |
17 | trait PerformanceTests extends PerformanceTestBase {
18 | val skip: Boolean
19 |
20 | val oddPred = "x => (x % 2) != 0"
21 | val firstHalfPred = "x => x < n / 2"
22 | val midPred = "x => x == n / 2"
23 |
24 | def testToList(cc: (String, String)) = if (!skip)
25 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".toList")
26 |
27 | def testToArray(cc: (String, String)) = if (!skip)
28 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".toArray.toSeq")
29 |
30 | def testFilter(cc: (String, String)) = if (!skip)
31 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".filter(" + oddPred + ").toSeq")
32 |
33 | def testFilterNot(cc: (String, String)) = if (!skip)
34 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".filterNot(" + oddPred + ").toSeq")
35 |
36 | def testCount(cc: (String, String)) = if (!skip)
37 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".count(" + oddPred + ")")
38 |
39 | def testForeach(cc: (String, String)) = if (!skip)
40 | ensureFasterCodeWithSameResult(cc._1, "var tot = 0L; for (v <- " + cc._2 + ") { tot += v }; tot")
41 |
42 | def testMap(cc: (String, String)) = if (!skip)
43 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".map(_ + 1).toSeq")
44 |
45 | def testTakeWhile(cc: (String, String)) = if (!skip)
46 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".takeWhile(" + firstHalfPred + ").toSeq")
47 |
48 | def testDropWhile(cc: (String, String)) = if (!skip)
49 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".dropWhile(" + firstHalfPred + ").toSeq")
50 |
51 | def testExists(cc: (String, String)) = if (!skip)
52 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".exists(" + midPred + ")")
53 |
54 | def testForall(cc: (String, String)) = if (!skip)
55 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".forall(" + firstHalfPred + ")")
56 |
57 | def testSum(cc: (String, String)) = if (!skip)
58 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".sum")
59 |
60 | def testProduct(cc: (String, String)) = if (!skip)
61 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".product")
62 |
63 | def testMin(cc: (String, String)) = if (!skip)
64 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".min")
65 |
66 | def testMax(cc: (String, String)) = if (!skip)
67 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".max")
68 |
69 | def testReduceLeft(cc: (String, String)) = if (!skip)
70 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".reduceLeft(_ + _)")
71 |
72 | def testReduceRight(cc: (String, String)) = if (!skip)
73 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".reduceRight(_ + _)")
74 |
75 | def testFoldLeft(cc: (String, String)) = if (!skip)
76 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".foldLeft(0)(_ + _)")
77 |
78 | def testFoldRight(cc: (String, String)) = if (!skip)
79 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".foldRight(0)(_ + _)")
80 |
81 | def testScanLeft(cc: (String, String)) = if (!skip)
82 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".scanLeft(0)(_ + _).toSeq")
83 |
84 | def testScanRight(cc: (String, String)) = if (!skip)
85 | ensureFasterCodeWithSameResult(cc._1, cc._2 + ".scanRight(0)(_ + _).toSeq")
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/src/test/scala/sources/StreamSourcesTest.scala:
--------------------------------------------------------------------------------
1 | package scalaxy.streams
2 | package test
3 |
4 | import org.junit._
5 | import org.junit.Assert._
6 |
7 | class StreamSourcesTest extends StreamComponentsTestBase with StreamTransforms {
8 | import global._
9 |
10 | @Test
11 | def testArrayExtractor {
12 | val v1 @ SomeArrayStreamSource(_) = typecheck(q"Array(1)")
13 | val SomeStreamSource(_) = v1
14 |
15 | val v2 @ SomeArrayStreamSource(_) = typecheck(q"(null: Array[Int])")
16 | val SomeStreamSource(_) = v2
17 | }
18 |
19 | @Test
20 | def testJsArrayExtractor {
21 | val v1 @ SomeJsArrayStreamSource(_) = typecheck(
22 | q"scala.scalajs.js.Array(1)")
23 | val SomeStreamSource(_) = v1
24 |
25 | val v2 @ SomeJsArrayStreamSource(_) = typecheck(
26 | q"(null: scala.scalajs.js.Array[Int])")
27 | val SomeStreamSource(_) = v2
28 | }
29 |
30 | @Test
31 | def testInlineRangeExtractor {
32 | val v1 @ SomeInlineRangeStreamSource(InlineRangeStreamSource(_, _, 1, true, _)) = typecheck(q"1 to 10")
33 | val SomeStreamSource(_) = v1
34 |
35 | val v2 @ SomeInlineRangeStreamSource(InlineRangeStreamSource(_, _, 1, false, _)) = typecheck(q"1 until 10")
36 | val SomeStreamSource(_) = v2
37 |
38 | val v3 @ SomeInlineRangeStreamSource(InlineRangeStreamSource(_, _, -2, true, _)) = typecheck(q"10 to 1 by -2")
39 | val SomeStreamSource(_) = v3
40 |
41 | val v4 @ SomeInlineRangeStreamSource(InlineRangeStreamSource(_, _, -2, false, _)) = typecheck(q"10 until 1 by -2")
42 | val SomeStreamSource(_) = v4
43 | }
44 |
45 | @Test
46 | def testInlineSeqExtractor {
47 | val v1 @ SomeInlineSeqStreamSource(_) = typecheck(q"List(1)")
48 | val SomeStreamSource(_) = v1
49 |
50 | val v2 @ SomeInlineSeqStreamSource(_) = typecheck(q"Seq(1)")
51 | val SomeStreamSource(_) = v2
52 | }
53 |
54 | @Test
55 | def testListExtractor {
56 | val v1 @ SomeListStreamSource(_) = typecheck(q"(null: List[Int])")
57 | val SomeStreamSource(_) = v1
58 |
59 | val v2 @ SomeListStreamSource(_) = typecheck(q"Nil")
60 | val SomeStreamSource(_) = v2
61 |
62 | val v3 @ SomeListStreamSource(_) = typecheck(q"1 :: Nil")
63 | val SomeStreamSource(_) = v3
64 | }
65 | }
66 |
--------------------------------------------------------------------------------