├── src
├── main
│ └── scala
│ │ └── sperformance
│ │ ├── intelligence
│ │ ├── ClusteringTest.scala
│ │ ├── HistoricalResults.scala
│ │ └── Clustering.scala
│ │ ├── charting
│ │ ├── ChartGenerator.scala
│ │ ├── ChartingReporterTest.scala
│ │ └── SizeChartGenerator.scala
│ │ ├── Keys.scala
│ │ ├── generators
│ │ ├── SizeGenerator.scala
│ │ └── Generator.scala
│ │ ├── Main.scala
│ │ ├── util
│ │ ├── FileUtils.scala
│ │ └── PerformanceTestHelper.scala
│ │ ├── PerformanceTest.scala
│ │ ├── dsl
│ │ └── DSL.scala
│ │ ├── store
│ │ └── StoreResultStrategy.scala
│ │ └── RunContext.scala
└── test
│ └── scala
│ ├── PartialFunction.scala
│ ├── GoalTest.scala
│ ├── MyPerformanceTest.scala
│ ├── MapConcurrencyTest.scala
│ └── CollectionsShootout.scala
└── README
/src/main/scala/sperformance/intelligence/ClusteringTest.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package intelligence
3 |
4 | /**
5 | * A test the automatically clusters results...
6 | */
7 | trait ClusteringTest extends PerformanceTest {
8 |
9 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/charting/ChartGenerator.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package charting
3 |
4 | import intelligence.Cluster
5 |
6 | trait ChartGenerator {
7 | //This method needs *far* more info!!!!
8 | def generateChart(cluster : Cluster, context : RunContext) : Unit
9 | def canHandleCluster(cluster : Cluster) : Boolean
10 | }
--------------------------------------------------------------------------------
/README:
--------------------------------------------------------------------------------
1 | sperformance (under BSD License... TODO - Add that...)
2 |
3 | SPerformance is an attempt to create a performance testing framework for use with SBT and Scala projects. SPerformance is supposed to take performance related tests and create pretty graphs displaying the results for you.
4 |
5 | The second goal of SPerformance is to save historical information about a project and be able to compare current performance to previous performance. This project is to help the scala-io project and my own book writing.
6 |
--------------------------------------------------------------------------------
/src/main/scala/sperformance/charting/ChartingReporterTest.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package charting
3 |
4 | import intelligence._
5 |
6 |
7 |
8 | //hide all our variables in this object
9 | object Charting {
10 | val default_chartGenerators : Seq[ChartGenerator] = SizeChartGenerator :: Nil
11 |
12 | def createReports(clusters : Map[ClusterMetaData, Cluster], context : RunContext)(implicit chartGenerators : Seq[ChartGenerator] = default_chartGenerators) {
13 | //Find data...
14 | for {
15 | (_, cluster) <- clusters
16 | gen <- chartGenerators
17 | if(gen.canHandleCluster(cluster))
18 | } {
19 | gen.generateChart(cluster, context)
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/src/main/scala/sperformance/intelligence/HistoricalResults.scala:
--------------------------------------------------------------------------------
1 | package sperformance.intelligence
2 | import sperformance.PerformanceTestResult
3 |
4 | /**
5 | * @author jeichar
6 | */
7 | class HistoricalResults extends ClusterResults {
8 | override def reportResult(result: PerformanceTestResult): Unit = {
9 | //TODO - Add to appropriate clusters of information, adjust clusters as appropriate...
10 | val metaData = ClusterMetaData(result.attributes, result.axisData.keySet)
11 | def clusterForAttr: Option[Cluster] = clusters.get(metaData)
12 | val cluster: Cluster = clusterForAttr.getOrElse(addAndReturnCluster(new Cluster(metaData)))
13 | cluster.addResult(result)
14 | }
15 |
16 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/Keys.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 |
3 | /**
4 | * Lists some attribute keys that have special meaning.
5 | *
6 | * @note perhaps one day the keys should by typed to their value but could be overkill
7 | */
8 | object Keys {
9 | /**
10 | * A key for Historical Run Context, all versions of the same module and methos
11 | * will be compared together in a single graph
12 | */
13 | val Version = "version"
14 | /**
15 | * The number of warm-up runs used to warm up the JVM before running the test
16 | */
17 | val WarmupRuns = "warm-up runs"
18 | /**
19 | * The number of times to run the tests before combining them into a single
20 | * result (depending on how they are combined)
21 | */
22 | val TestRuns = "test-runs"
23 | }
--------------------------------------------------------------------------------
/src/test/scala/PartialFunction.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 |
3 | import collection.Traversable
4 | import collection.generic.CanBuildFrom
5 |
6 |
7 | object PartialFunctionTest extends sperformance.dsl.PerformanceDSLTest {
8 | def collectWithTry[A,B, To](col : Traversable[A])(f : PartialFunction[A,B])(implicit cbf : CanBuildFrom[Traversable[A], B, To]) = {
9 | val bf = cbf()
10 | for (x <- col) {
11 | if (f.isDefinedAt(x)) {
12 | try bf += f(x)
13 | catch { case _ : MatchError => () }
14 | }
15 | }
16 | bf.result
17 | }
18 |
19 |
20 | performance of "Traversable" in {
21 | measure method "collectWithTry" in {
22 | withSize upTo 1000 withSetup { i =>
23 | (1 to i) map (j => math.random)
24 | } run { col =>
25 | collectWithTry(col)({
26 | case x if x < 0.5 => "Stringer"
27 | })(implicitly[CanBuildFrom[Traversable[Double],String,Traversable[String]]])
28 | }
29 | }
30 | measure method "collect" in {
31 | withSize upTo 1000 withSetup { i =>
32 | (1 to i) map (j => math.random)
33 | } run { col =>
34 | col.collect({
35 | case x if x < 0.5 => "Stringer"
36 | })(implicitly[CanBuildFrom[Traversable[Double],String,Traversable[String]]])
37 | }
38 | }
39 | }
40 | }
41 |
42 |
--------------------------------------------------------------------------------
/src/main/scala/sperformance/generators/SizeGenerator.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package generators
3 |
4 |
5 | /**
6 | * A Generator of Integers. This currently works by generating a range of integers (with increment of one).
7 | *
8 | * TODO - Take Range[Int] in constructor...
9 | */
10 | case class IntGenerator(name : String, startSize : Int, endSize : Int, increment : Int) extends Generator[Int] with GeneratorOperations[Int] {
11 | private[sperformance] lazy val medianSize = (endSize - startSize) / 2
12 |
13 |
14 | private class SizeGeneratorTestRun[S](size : Int, setupFunc : Int => S, testFunc : S => Unit) extends AbstractPerformanceTestRun[S] {
15 | override def setup = () => setupFunc(size)
16 | override def test(s : S) : Unit = testFunc(s)
17 | /** Apply whatever mods we need to the context for this specific test... e.g. Adding attribtues */
18 | override def modifyContext(ctx : PerformanceTestRunContext) : PerformanceTestRunContext = {
19 | ctx addAxisValue (name -> size)
20 | }
21 | }
22 |
23 | override def genWarmUp[S](setup : Int => S)(test : S => Unit) : PerformanceTestRun[S] = new SizeGeneratorTestRun(medianSize, setup, test)
24 | override def genTests[S](setup : Int => S)(test : S => Unit) : Traversable[PerformanceTestRun[S]] =
25 | for(i <- startSize to endSize by increment) yield new SizeGeneratorTestRun(i, setup,test)
26 |
27 |
28 | override def toString : String = "IntGenerator(" + name + ", " + startSize + " to " + endSize + " by "+increment+")"
29 | }
--------------------------------------------------------------------------------
/src/test/scala/GoalTest.scala:
--------------------------------------------------------------------------------
1 | import sperformance.store._
2 | import java.io.File
3 | import sperformance.Keys
4 |
5 | object GoalTest extends sperformance.dsl.PerformanceDSLTest {
6 |
7 | performance of "List" in {
8 | having attribute (Keys.WarmupRuns -> 3) in {
9 | having attribute (Keys.TestRuns -> 6) in {
10 | measure method "foreach" in {
11 | withSize from 100 upTo 1000 by 100 withSetup { size =>
12 | (1 to size).toList
13 | } run { collection =>
14 | var tmp = 0
15 | collection.foreach(x => tmp + x)
16 | }
17 | }
18 | measure method "foreach" in {
19 | having attribute (Keys.Version -> "with while") in {
20 | withSize from 100 upTo 1000 by 100 withSetup { size =>
21 | (1 to size).toList
22 | } run { collection =>
23 | var tmp = 0
24 | val iter = collection.iterator
25 | while (iter.hasNext) {
26 | val next = iter.next()
27 | tmp + next
28 | }
29 | }
30 | }
31 | }
32 | measure method "view foreach" in {
33 | withSize from 100 upTo 1000 by 100 withSetup { size =>
34 | (1 to size).toList
35 | } run { collection =>
36 | var tmp = 0
37 | collection.view.foreach(x => tmp + x)
38 | }
39 | }
40 | }
41 | }
42 | }
43 |
44 | def main(args: Array[String]) {
45 | val context = new sperformance.HistoricalRunContext(new File("target/sperformance/historical"), new XmlStoreResults(_), new XmlLoadResults(_))
46 | runTest(context)
47 |
48 | context.generateResultsPage(name)
49 |
50 | }
51 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/Main.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 |
3 | import java.io.{FileOutputStream, PrintStream, BufferedOutputStream, File}
4 |
5 | /**
6 | * This object is meant to be the final runner of the SPerformance test framework.
7 | */
8 | object Main {
9 | var outputDirectory = new File("target/sperformance")
10 |
11 |
12 | def runTestsReflectively(tests: Class[_ <: PerformanceTest]*) {
13 | //TODO - ClassLoader magic....
14 | runTests(tests.map(_.newInstance) : _* )
15 | }
16 |
17 | def runTests(tests : PerformanceTest*) {
18 | for(test <- tests) {
19 | val context = new DefaultRunContext(new File(outputDirectory,test.name), test.name)
20 | test.runTest(context)
21 | context.generateResultsPage()
22 | }
23 | }
24 |
25 | def writeMainSite(tests: PerformanceTest*) {
26 | val names = tests.map(_.name)
27 |
28 |
29 | def content = (
30 |
SPerformance Results
31 |
32 | SPerformance Results
33 |
34 | {
35 | for(name <- names) yield (-
36 | {name}
37 |
)
38 | }
39 |
40 |
41 | )
42 |
43 | val index = new File(outputDirectory, "index.html")
44 | val output = new PrintStream(new BufferedOutputStream(new FileOutputStream(index)))
45 | try {
46 | output.println(content)
47 | } finally {
48 | output.close()
49 | }
50 |
51 | }
52 |
53 |
54 | def main(args : Array[String]) {
55 | //TODO - determine command line settings, and go do them!
56 | runTestsReflectively(args.map( arg => Class.forName(arg).asInstanceOf[Class[_ <: PerformanceTest]]) : _*)
57 | }
58 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/charting/SizeChartGenerator.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package charting
3 |
4 | import intelligence.Cluster
5 | import org.jfree.data.xy.{XYSeries, XYSeriesCollection}
6 | import org.jfree.chart.{ChartFactory}
7 | import org.jfree.chart.plot.PlotOrientation
8 |
9 | object SizeChartGenerator extends ChartGenerator {
10 | val sizeAxisName = "size"
11 |
12 | override def canHandleCluster(cluster : Cluster) : Boolean = {
13 | cluster.metaData.axis.contains(sizeAxisName) && cluster.metaData.axis.size == 1
14 | }
15 |
16 | private def makeSeriesName(result : PerformanceTestResult) = result.attributes.values.mkString("-")
17 |
18 | def makeChartName(cluster : Cluster) = {
19 | cluster.metaData.attributes.mkString(" & ").replace(">", "") + " By Size"
20 | }
21 |
22 | def makeSeries(cluster : Cluster) = {
23 | val seriesCollection = new XYSeriesCollection()
24 | val groups = cluster.results.groupBy(makeSeriesName)
25 | for((name, results) <- groups) {
26 | val series = new XYSeries(name, true, true)
27 | for(result <- results) {
28 | series.add(result.axisData(sizeAxisName).asInstanceOf[Int], result.time.asInstanceOf[Long])
29 | }
30 | seriesCollection.addSeries(series)
31 | }
32 | seriesCollection
33 | }
34 |
35 | override def generateChart(cluster : Cluster, context :RunContext) : Unit = {
36 | //Now we rip through cluster data...
37 | val chartName = makeChartName(cluster)
38 | val series = makeSeries(cluster)
39 |
40 | //TODO - Fix this!!!
41 | //First we should compare different modules with similar named methods
42 | //Second we should compare methods on the same module....
43 | val chart = ChartFactory.createXYLineChart(chartName, "size", "time", series, PlotOrientation.VERTICAL, true, true, false)
44 | context.writeResultingChart(List(sizeAxisName),chartName, chart)
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/scala/sperformance/util/FileUtils.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package util
3 |
4 | import annotation.tailrec
5 | import java.io._
6 | import java.net.URL
7 |
8 | object FileUtils {
9 |
10 | def ensureDirectoryExists(file : File) : Unit = {
11 | val dir = file.getParentFile
12 | if(!dir.exists()) {
13 | dir.mkdirs();
14 | }
15 | }
16 |
17 | /**
18 | * Obtains the relative path between a directory and a file
19 | */
20 | def relativePath(dir : File, file : File) : String = {
21 | lazy val dirPath = {
22 | val path = dir.getAbsolutePath
23 | if(!path.endsWith(File.separator)) path + File.separator else path
24 | }
25 | if(file.getAbsolutePath.startsWith(dirPath)) {
26 | file.getAbsolutePath.drop(dirPath.size)
27 | } else error(dir + " is not a parent of " + file)
28 | }
29 |
30 | /** Finds all the index.html files in a directory... */
31 | def findIndexes(dir : File) : List[File] = {
32 | def getDirsOrIndexes(dir : File) : List[File] = dir.listFiles(new FileFilter {
33 | def accept(path : File) = path.isDirectory || path.getName.endsWith("index.html")
34 | }).toList
35 | def isIndexFile(f : File) = f.getPath.endsWith("index.html")
36 |
37 | @tailrec
38 | def findIndexesHelper(curFile : Option[File], filesToSearch : List[File], indexFiles : List[File]) : List[File] = curFile match {
39 | case None if filesToSearch.isEmpty =>
40 | indexFiles
41 | case None =>
42 | findIndexesHelper(filesToSearch.headOption, filesToSearch.tail, indexFiles)
43 | case Some(f) if f.isDirectory =>
44 | val nextFiles = getDirsOrIndexes(f) ++ filesToSearch
45 | findIndexesHelper(nextFiles.headOption, nextFiles.tail, indexFiles)
46 | case Some(f) => //Has to be index file
47 | findIndexesHelper(None, filesToSearch, f :: indexFiles)
48 | }
49 | findIndexesHelper(None, getDirsOrIndexes(dir), Nil)
50 | }
51 |
52 | val copy = org.apache.commons.io.FileUtils.copyURLToFile(_:URL,_:File)
53 |
54 | def writer[U](file:File)(f:Writer => U):U = manage(new OutputStreamWriter(new FileOutputStream(file), "UTF-8"))(f)
55 |
56 | def outputStream[U](file:File)(f:FileOutputStream => U):U = manage(new FileOutputStream(file))(f)
57 |
58 | def manage[U,R <: Closeable](c : R)(f:R => U):U = {
59 | try {
60 | f(c)
61 | } finally {
62 | c.close()
63 | }
64 | }
65 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/intelligence/Clustering.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package intelligence
3 |
4 | import collection.mutable.ListBuffer
5 |
6 |
7 |
8 | /** Metadata about clusters - used to ensure results go to appropriate cluster. */
9 | case class ClusterMetaData(attributes : Map[String,Any], axis : Set[String]) {
10 | def matchesCluster(result : PerformanceTestResult) : Boolean = {
11 | def containsAxis = axis.forall(result.axisData.contains)
12 |
13 | def containsAttributes = attributes.forall { case (key, value) =>
14 | result.attributes.get(key).map(_ == value).getOrElse(false)
15 | }
16 | containsAttributes && containsAxis
17 | }
18 | }
19 |
20 | object Cluster {
21 | def makeName(attributes: Map[String, Any]) = {
22 | val sortedAttributeKeys = attributes.keySet.toList.sortWith((x, y) => x.toLowerCase >= y.toLowerCase())
23 | sortedAttributeKeys.flatMap(attributes.get).mkString("", "-", "")
24 | }
25 | }
26 | /** A cluster of results */
27 | class Cluster(val metaData : ClusterMetaData) {
28 |
29 | val results = new ListBuffer[PerformanceTestResult]
30 |
31 |
32 | def addResult(result :PerformanceTestResult) {
33 | results append result
34 | }
35 |
36 |
37 | def name = Cluster.makeName(metaData.attributes)
38 |
39 | }
40 |
41 | /**
42 | * This class is eventually supposed to cluster all results into packets of results.
43 | *
44 | * For now we implement just one method cuz we're lazy...
45 | */
46 | class ClusterResults extends PerformanceTestRunContext {
47 |
48 | var clusters = Map[ClusterMetaData, Cluster]()
49 | override def attribute[U](key: String): Option[U] = None
50 | override def axisValue[U](key: String): Option[U] = None
51 |
52 | def addAndReturnCluster(cluster : Cluster) : Cluster = {
53 | Console.println("Creating cluster: " + cluster.metaData)
54 | clusters += ((cluster.metaData, cluster))
55 | cluster
56 | }
57 |
58 | def reportResult(result : PerformanceTestResult) : Unit = {
59 | //TODO - Add to appropriate clusters of information, adjust clusters as appropriate...
60 | for {
61 | attr <- result.attributes
62 | (axisName,_) <- result.axisData
63 | val metaData = ClusterMetaData(Map(attr), Set(axisName))
64 | } {
65 | def clusterForAttr : Option[Cluster] = clusters.get(metaData)
66 | val cluster : Cluster = clusterForAttr.getOrElse(addAndReturnCluster(new Cluster(metaData)))
67 | cluster.addResult(result)
68 | }
69 | }
70 |
71 | override def toString = "ClusterResults()"
72 | }
--------------------------------------------------------------------------------
/src/test/scala/MyPerformanceTest.scala:
--------------------------------------------------------------------------------
1 | import _root_.sperformance.dsl.PerformanceDSLTest
2 | import collection.mutable.ListBuffer
3 |
4 | object MyPerformanceTest extends PerformanceDSLTest {
5 | performance of "List" in {
6 | measure method "foreach" in {
7 | withSize upTo 1000 withSetup { size =>
8 | (1 to size).toList
9 | } run { collection =>
10 | var tmp = 0
11 | collection.foreach(x => tmp + x)
12 | }
13 | }
14 | measure method "size" in {
15 | withSize upTo 1000 withSetup { size =>
16 | (1 to size).toList
17 | } run { collection =>
18 | var tmp = 0
19 | tmp += collection.size
20 | }
21 | }
22 | }
23 | performance of "ListBuffer" in {
24 | measure method "foreach" in {
25 | withSize upTo 1000 withSetup { size =>
26 | val collection = new ListBuffer[Int]
27 | for( i <- 1 to size) collection += i
28 | collection
29 | } run { collection =>
30 | var tmp = 0
31 | collection.foreach(x => tmp + x)
32 | }
33 | }
34 | measure method "size" in {
35 | withSize upTo 1000 withSetup { size =>
36 | val collection = new ListBuffer[Int]
37 | for( i <- 1 to size) collection += i
38 | collection
39 | } run { collection =>
40 | var tmp = 0
41 | tmp += collection.size
42 | }
43 | }
44 | }
45 | performance of "Array" in {
46 | measure method "foreach" in {
47 | withSize upTo 1000 withSetup { size =>
48 | val collection = new Array[Int](size)
49 | for(i <- 1 to size) collection(i-1) = i
50 | collection
51 | } run { collection =>
52 | var tmp = 0
53 | collection.foreach(x => tmp = x * 20)
54 | }
55 | }
56 | measure method "size" in {
57 | withSize upTo 1000 withSetup { size =>
58 | val collection = new Array[Int](size)
59 | for(i <- 1 to size) collection(i-1) = i
60 | collection
61 | } run { collection =>
62 | var tmp = 0
63 | tmp += collection.size
64 | }
65 | }
66 | }
67 |
68 | performance of "java.util.ArrayList" in {
69 | import scala.collection.JavaConversions._
70 | measure method "foreach" in {
71 | withSize upTo 1000 withSetup { size =>
72 | val collection = new java.util.ArrayList[Int](size+1)
73 | for(i <- 1 to size) collection.add(i)
74 | collection
75 | } run { collection =>
76 | var tmp = 0
77 | collection.foreach(x => tmp = x * 20)
78 | }
79 | }
80 | measure method "size" in {
81 | withSize upTo 1000 withSetup { size =>
82 | val collection = new java.util.ArrayList[Int](size+1)
83 | for(i <- 1 to size) collection.add(i)
84 | collection
85 | } run { collection =>
86 | var tmp = 0
87 | tmp += collection.size
88 | }
89 | }
90 | }
91 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/util/PerformanceTestHelper.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package util
3 |
4 |
5 | /**t
6 | * Helper methods for running performance tests.
7 | */
8 | private[sperformance] object PerformanceTestHelper {
9 |
10 | /** Currently the number of times we run the warm up test when warming up the JVM. */
11 | lazy val defaultWarmUpRuns = 100000
12 | lazy val defaultRuns = 10
13 |
14 | /**
15 | * Aggregates test results by taking an average.
16 | */
17 | val AverageAggregator = { results : Seq[Long] =>
18 | results.sum / results.size
19 | }
20 |
21 | /**
22 | * Aggregates test results by taking the minimum value
23 | */
24 | val MinAggregator = { results : Seq[Long] =>
25 | results.min
26 | }
27 |
28 | /**
29 | * Aggregates test reuslts by taking the maximum value
30 | */
31 | val MaxAggregator = { results : Seq[Long] =>
32 | results.max
33 | }
34 |
35 | /**
36 | * Aggregates test results by taking the mediam value
37 | */
38 | val MedianAggregator = { results : Seq[Long] =>
39 | results.sorted.apply(results.size/2)
40 | }
41 |
42 | /**
43 | * Aggregates test results by taking the man of values withing two standard deviations of the original mean.
44 | */
45 | val MeanWithoutOutliers = { results : Seq[Long] =>
46 | val firstAvg = AverageAggregator(results)
47 | val deviation = AverageAggregator(results.map( result => math.abs(result - firstAvg)))
48 | val dev2 = deviation*2
49 | AverageAggregator(results.filter( result => math.abs(result - firstAvg) < dev2))
50 | }
51 |
52 | /** Runs a given function enough that the HotSpot JVM should optimize the method.
53 | * TODO - Do not warm up non JIT/HotSpot JVMs.
54 | */
55 | def warmUpJvm(method: Function0[Unit],warmUpRuns:Option[Int]) {
56 | val runs = warmUpRuns getOrElse defaultWarmUpRuns
57 | for(i <- 1 to runs) method.apply()
58 | }
59 |
60 | /**
61 | * Measures the execution of a function.
62 | * @param method
63 | * The method being measured.
64 | * @return
65 | * The nanosecond execution time of the function.
66 | */
67 | def measureOnce(method : Function0[Unit]) : Long = {
68 | val startNano = System.nanoTime
69 | method.apply()
70 | val endNano = System.nanoTime
71 | endNano - startNano
72 | }
73 |
74 | /**
75 | * Measures the execution time of a method several times, and returns the results
76 | * @param runs
77 | * The number of times to run the method
78 | * @param method
79 | * The method being measured
80 | * @return
81 | * Sequence of execution times for a method.
82 | */
83 | def measureMultiple(runs: Int)(method: Function0[Unit]) = for(i <- 1 to runs) yield measureOnce(method)
84 |
85 | /**
86 | * Measures the execution time of a method and aggregates the reuslts into one number using a given aggregator.
87 | * @param method
88 | * The method under test
89 | * @param combineResults
90 | * The mechanism of aggregating results (defaults to Minimum value)
91 | * @return
92 | * The aggregated result of the performance test.
93 | */
94 | def measure(method : Function0[Unit],runs:Option[Int])(implicit combineResults : Seq[Long] => Long = MinAggregator) : Long = {
95 | combineResults(measureMultiple(runs getOrElse defaultRuns)(method))
96 | }
97 |
98 |
99 |
100 | }
--------------------------------------------------------------------------------
/src/test/scala/MapConcurrencyTest.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 |
3 | import collection.immutable.{HashMap=>ImmutableHashMap}
4 | import collection.mutable.{HashMap=>MutableHashMap}
5 | import java.util.concurrent.{ExecutorService,Executors}
6 | import annotation.tailrec
7 |
8 | // Abstract type for memoizing function values.
9 | trait Service[Key,Value] {
10 | def lookUp(k : Key) : Option[Value]
11 | def insert(k : Key, v : Value) : Unit
12 | }
13 |
14 |
15 | class ImmutableService[Key, Value] extends Service[Key, Value] {
16 | var currentIndex = new ImmutableHashMap[Key,Value]
17 | def lookUp(k : Key) : Option[Value] = currentIndex.get(k)
18 | def insert(k : Key, v: Value) : Unit = synchronized {
19 | currentIndex = currentIndex + ((k, v))
20 | }
21 | }
22 |
23 |
24 | class MutableService[Key, Value] extends Service[Key, Value] {
25 | val currentIndex = new MutableHashMap[Key, Value]
26 | def lookUp(k : Key) : Option[Value] = synchronized(currentIndex.get(k))
27 | def insert(k : Key, v : Value) : Unit = synchronized {
28 | currentIndex.put(k,v)
29 | }
30 | }
31 |
32 |
33 | object TestLibs {
34 | var executor = Executors.newFixedThreadPool(2)
35 |
36 | implicit def functionToCallable[A](f : () => A) = new Runnable {
37 | override def run() {
38 | f()
39 | }
40 | }
41 |
42 | def runTest(r : Stream[Runnable]) {
43 |
44 | }
45 |
46 | class ListHolder {
47 | var list = List[Boolean]()
48 |
49 | def addValue(x : Boolean) : Unit = synchronized {
50 | list = x :: list
51 | }
52 | }
53 |
54 | def makeTest(n : Int, service : Service[Int, Int]) : (List[Runnable], ListHolder) = {
55 | var results = new ListHolder()
56 | def makeInsertRunnable(idx : Int) : Runnable = new Runnable {
57 | override def run() {
58 | service.insert(idx,idx)
59 | }
60 | }
61 | def makeReadRunnable(idx : Int) : Runnable = new Runnable {
62 | override def run() {
63 | val result = service.lookUp(idx)
64 | //Thread.sleep(10L)
65 | results.addValue(result.isDefined)
66 | }
67 | }
68 | def makeStream(generateIdx : Int, readIdx : Int, max : Int) : Stream[Runnable] = {
69 | (generateIdx, readIdx, math.random) match {
70 | case (i, j, k) if j > max =>
71 | Stream()
72 | case (i, j, k) if i > max =>
73 | Stream.cons(makeReadRunnable(j), makeStream(i, j+1, max))
74 | case (i, j, k) if i == j =>
75 | Stream.cons(makeInsertRunnable(i), makeStream(i+1, j, max))
76 | case (i, j, k) if k > 0.5 =>
77 | Stream.cons(makeInsertRunnable(i), makeStream(i+1, j, max))
78 | case (i, j, k) =>
79 | Stream.cons(makeReadRunnable(j), makeStream(i, j+1, max))
80 | }
81 | }
82 | (makeStream(1,1,n).toList, results)
83 | }
84 |
85 |
86 | }
87 |
88 | import _root_.java.util.concurrent._
89 |
90 | object MapConcurrencyTest extends sperformance.dsl.PerformanceDSLTest {
91 |
92 | performance of "ImmutableService" in {
93 | measure method "index-lookup" in {
94 | val x = Executors.newFixedThreadPool(2)
95 | withSize upTo 50 withSetup {
96 | size =>
97 | TestLibs.makeTest(size, new ImmutableService)
98 | } run {
99 | case (runnables, results) =>
100 | // Ensure we wait until all runnables are done
101 | runnables.map(x.submit(_, true)).foreach(_.get)
102 | }
103 | }
104 | }
105 | performance of "MutableService" in {
106 | measure method "index-lookup" in {
107 | val x = Executors.newFixedThreadPool(2)
108 | withSize upTo 50 withSetup {
109 | size =>
110 | TestLibs.makeTest(size, new MutableService)
111 | } run {
112 | case (runnables, results) =>
113 | // Ensure we wait until all runnables are done
114 | runnables.map(x.submit(_, true)).foreach(_.get)
115 | }
116 | }
117 | }
118 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/PerformanceTest.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 |
3 | import generators._
4 |
5 | /**
6 | * This class represents all state recorded from a given performance test. It is the quantum of communication of data in
7 | * the entire system.
8 | *
9 | * @param time The average time it took to run the test
10 | * @param axisData Data relating to alternative axes, such as size of collections. For checking algorithmic growth
11 | * @param attributes Random attributes relating to this run e.g. the class/method under test.
12 | */
13 | case class PerformanceTestResult(time: Long, axisData: Map[String, Any], attributes: Map[String, Any])
14 |
15 | /**
16 | * A Handler for specific performance test results. This is the means by which generators report testing
17 | * results. This can be modified with various attributes/axis values before being passed to a generator of tests.
18 | * Also, it allows users to define their own mechanisms of testing performance and reporting results.
19 | */
20 | trait PerformanceTestRunContext { self =>
21 |
22 | def attribute[U](key: String): Option[U]
23 | def axisValue[U](key: String): Option[U]
24 | /** Reports a test result to this context. The current attributes/axis values are appended and sent to the performance reporting engine. */
25 | def reportResult(result: PerformanceTestResult): Unit
26 |
27 | /**
28 | * Creates a new PerformanceTestRunContext that will append the given attribute to tests.
29 | */
30 | def addAttribute[T, U](attr: (String, U)): PerformanceTestRunContext = new DelegatedPerformanceTestRunContext(self) {
31 | override def reportResult(result: PerformanceTestResult) = super.reportResult(result.copy(attributes = result.attributes + attr))
32 | override def attribute[U](key: String): Option[U] = if (attr._1 == key) Some(attr._2.asInstanceOf[U]) else self.attribute[U](key)
33 | override def toString: String = "AtrributeDelegatingCtx(" + attr + " to " + self + ")"
34 | }
35 | /**
36 | * Creates a new PerformanceTestRunContext that will append the given attribute to tests.
37 | */
38 | def addAxisValue[T, U](axisVal: (String, U)): PerformanceTestRunContext = new DelegatedPerformanceTestRunContext(self) {
39 | override def reportResult(result: PerformanceTestResult) = super.reportResult(result.copy(axisData = result.axisData + axisVal))
40 | override def axisValue[U](key: String): Option[U] = if (axisVal._1 == key) Some(axisVal._2.asInstanceOf[U]) else self.attribute[U](key)
41 | override def toString: String = "AxiseDelegatingCtx(" + axisVal + " to " + self + ")"
42 | }
43 | }
44 |
45 | /**
46 | * Delegating version of the PerformanceTestRunContext. This class is used to make the decorator pattern (used in
47 | * the addAttribute and addAxisValue methods) easier to implement.
48 | */
49 | abstract class DelegatedPerformanceTestRunContext(delegate: PerformanceTestRunContext) extends PerformanceTestRunContext {
50 | override def reportResult(result: PerformanceTestResult): Unit = delegate.reportResult(result)
51 | override def attribute[U](key: String): Option[U] = delegate.attribute[U](key)
52 | override def axisValue[U](key: String): Option[U] = delegate.axisValue[U](key)
53 | }
54 |
55 | object NullPerformanceTestRunContext extends PerformanceTestRunContext {
56 | override def reportResult(result: PerformanceTestResult): Unit = {}
57 | override def attribute[U](key: String): Option[U] = None
58 | override def axisValue[U](key: String): Option[U] = None
59 |
60 | }
61 |
62 | /**
63 | * This trait is mixed in when you want to define performance testing runs
64 | */
65 | trait PerformanceTest {
66 |
67 | /**
68 | * Returns the name of the test being run...
69 | *
70 | * Default implementation is to look up reflectively the name of the class...
71 | */
72 | def name: String = {
73 | val className = getClass.getCanonicalName
74 |
75 | def isObject = className.endsWith("$")
76 | def isTrait = className.endsWith("$class")
77 | def isClass = !isObject && !isTrait
78 |
79 | if (isObject) "Object-" + className.dropRight(1) else "Class-" + className
80 | }
81 |
82 | /**
83 | * This method will execute a performance test.
84 | */
85 | def runTest(context: RunContext): Unit = { context.testFinished(this) }
86 |
87 | }
88 |
89 |
--------------------------------------------------------------------------------
/src/main/scala/sperformance/dsl/DSL.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package dsl
3 |
4 | import scala.reflect.Manifest
5 | import collection.mutable.ListBuffer
6 | import collection.mutable.Stack
7 | import generators._
8 |
9 | trait PerformanceDSLTest extends PerformanceTest {
10 |
11 | //DSL State variables... YUK
12 | private var _current_context : PerformanceTestRunContext = _
13 | //Console.println("Current context = " + _current_context)
14 | private def withNewContext[A](modify : PerformanceTestRunContext => PerformanceTestRunContext)(f : => A) = {
15 | val tmp = _current_context
16 | _current_context = modify(_current_context)
17 | f
18 | _current_context = tmp
19 | }
20 | /**
21 | * Ensures correct module/class reported in tests
22 | */
23 | private def withCurrentModule[A](module : String)(f : => A) = withNewContext(_.addAttribute("module", module))(f)
24 | /**
25 | * Ensures correct method reported in tests
26 | */
27 | private def withCurrentMethod[A](method : String)(f : => A) = withNewContext(_.addAttribute("method", method))(f)
28 |
29 | //Current gnerator of tests...
30 | private val _current_generator = new Stack[Generator[_]]
31 | private def addGenerator(g : Generator[_]) : Generator[_] = {
32 | if(!_current_generator.isEmpty) {
33 | val head = _current_generator.head
34 | val next = head.asInstanceOf[Generator[Any]].flatMap(ignore => g.asInstanceOf[Generator[Any]])
35 | _current_generator.push(next)
36 | next
37 | } else {
38 | _current_generator.push(g)
39 | g
40 | }
41 | }
42 | private def popGenerator() = {
43 | _current_generator.pop()
44 | }
45 | private def clearGenerators() = {
46 | _current_generator.clear()
47 | }
48 | private def withCurrentGenerator[A](f : Generator[A] => Unit) = f(_current_generator.head.asInstanceOf[Generator[A]])
49 |
50 |
51 |
52 | /**
53 | * Delays execution of some function until later
54 | *
55 | * TODO - Save generator stack too?
56 | */
57 | private final class DelayedRunner[A](ctx : PerformanceTestRunContext, function : () => A) {
58 | def execute() = {
59 | //TODO - Restore to state when delayed? Startup issues currently....
60 | //_current_context = ctx
61 | function()
62 | }
63 | }
64 | /** Stores delayed DSL-ish tasks until runTest is called */
65 | private val delayedTasks = new ListBuffer[DelayedRunner[_]]
66 |
67 | /**
68 | * Delays a given closure until runTest is called
69 | */
70 | private def delayUntilTest[A](f : => A) {
71 | delayedTasks append new DelayedRunner(_current_context, () => f)
72 | }
73 | private def executeDelayedTasks() : Unit = for(task <- delayedTasks) task.execute()
74 | /**
75 | * This method will execute a performance test.
76 | */
77 | override def runTest(context : RunContext) : Unit = {
78 | _current_context = context.defaultTestContext
79 | executeDelayedTasks()
80 | super.runTest(context);
81 | }
82 |
83 |
84 | /**
85 | * Start of performance DSL
86 | */
87 | object performance {
88 | def of(module : String) = new {
89 | def in[A](f : => A) {
90 | delayUntilTest(withCurrentModule(module)(f))
91 | }
92 | }
93 | def of[A : Manifest] = new {
94 | def in[B](f : => B) {
95 | delayUntilTest(withCurrentModule(implicitly[scala.reflect.Manifest[A]].erasure.getCanonicalName)(f))
96 | }
97 | }
98 | }
99 |
100 | /**
101 | * Start of method performance DSL. Must be wrapped in outer performance DSL
102 | */
103 | object measure {
104 | def method(methodName : String) = new {
105 | def in[A](f : => A) {
106 | withCurrentMethod(methodName)(f)
107 | }
108 | }
109 | }
110 |
111 | object having {
112 | def attribute[A](attr : (String, A)) = new {
113 | def in[A](f : => A) {
114 | withNewContext(_.addAttribute(attr))(f)
115 | }
116 | }
117 | def axis_value[A](axis : (String, A)) = new {
118 | def in[A](f : => A) {
119 | withNewContext(_.addAxisValue(axis))(f)
120 | }
121 | }
122 | }
123 |
124 | /**
125 | * This class is returned when the DSL is able to make use of the current performance test generator.
126 | */
127 | sealed class GeneratorUser[T] {
128 | def by(increment : Int) = {
129 | val gen = popGenerator().asInstanceOf[IntGenerator]
130 | addGenerator(gen.copy(increment = increment))
131 | this
132 | }
133 | def withSetup[A](setup : T => A) = new {
134 | def run(test : A => Unit) : Unit = {
135 | withCurrentGenerator[T](_.runTests(setup)(test)(_current_context))
136 | clearGenerators()
137 | }
138 | }
139 |
140 | def and[T<: GeneratorDSLStarter](x : T) = x
141 | }
142 | /** Marker interface for DSL to continue using "and" */
143 | sealed trait GeneratorDSLStarter
144 |
145 |
146 | sealed trait IntGeneratorDSLStarter extends GeneratorDSLStarter {
147 | val name : String
148 | def from(start:Int) = new GeneratorDSLStarter {
149 | def upTo(max : Int) = {
150 | addGenerator(new IntGenerator(name,start,max,1))
151 | new GeneratorUser[Int]
152 | }
153 | }
154 | def upTo(max : Int) = {
155 | addGenerator(new IntGenerator(name,1,max,1))
156 | new GeneratorUser[Int]
157 | }
158 | }
159 |
160 | /** Creates generator for size 1 to max */
161 | object withSize extends IntGeneratorDSLStarter {
162 | override val name = "size"
163 | }
164 | /** Creates generator for size 1 to max */
165 | object withIndex extends IntGeneratorDSLStarter {
166 | override val name = "index"
167 | }
168 |
169 |
170 |
171 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/store/StoreResultStrategy.scala:
--------------------------------------------------------------------------------
1 | package sperformance.store
2 |
3 | import sperformance.Keys
4 | import sperformance.PerformanceTestResult
5 | import java.net.URL
6 | import sperformance.intelligence.{Cluster, ClusterResults}
7 | import sperformance.util.FileUtils
8 | import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File}
9 | import java.beans.{XMLEncoder,XMLDecoder}
10 | import xml.{Node, XML, NodeSeq}
11 | import sperformance.PerformanceTestRunContext
12 | /**
13 | * Write a [[sperformance.intelligence.ClusterResults]] to persistent storage
14 | */
15 |
16 | trait StoreResultStrategy {
17 | def write(results:ClusterResults):Unit
18 | }
19 | trait LoadResultStrategy {
20 | def read(version:String, testContext:PerformanceTestRunContext):PerformanceTestRunContext
21 | }
22 |
23 |
24 |
25 | class CsvLoadResults(source:URL) extends LoadResultStrategy {
26 | private def toMap(data:Seq[String]) = {
27 | val parts = data.map(_ split "->" toSeq).map{case Seq(key,value) => (key,value)}
28 | Map(parts:_*)
29 | }
30 | override def read(version:String, testContext:PerformanceTestRunContext):PerformanceTestRunContext = {
31 | for ( line <- io.Source.fromURL(source).getLines() ) {
32 | val Seq(_, time, rest @ _*) = line.split("\",\"").toSeq
33 | val (axisData,atts) = rest.span(_ != "|")
34 | val result = PerformanceTestResult(time = time.toLong, axisData = toMap(axisData), attributes = toMap(atts drop 1))
35 | testContext.reportResult(result)
36 | }
37 | testContext
38 | }
39 | }
40 |
41 | class CsvStoreResults(outputFile:File) extends StoreResultStrategy {
42 | private def makeSeriesName(cluster:Cluster)(result : PerformanceTestResult) = Cluster.makeName(result.attributes)
43 | private def quote(string:String) = "\""+string.replaceAll("\"","\\\"")+"\""
44 | def write(results: ClusterResults) = {
45 | outputFile.getParentFile.mkdirs
46 | FileUtils.writer(outputFile) {
47 | writer =>
48 | for {
49 | (cluster,i) <- results.clusters.values.zipWithIndex
50 | (moduleName,results) <- cluster.results.groupBy(makeSeriesName(cluster) _)
51 | result <- results
52 | } {
53 | val atts = result.attributes.toSeq map {case (key,value) => key.toString + "->" + value}
54 | val axisData = result.axisData.toSeq map {case (key,value) => key.toString + "->" + value}
55 | val rowData = (i +: result.time +: axisData) ++ ("|" +: atts)
56 | val rowAsStrings = rowData map {_.toString} map (quote)
57 | writer.write(rowAsStrings mkString ",")
58 | writer.write("\n")
59 | }
60 | }
61 | }
62 | }
63 |
64 |
65 | class XmlStoreResults(outFile:File) extends StoreResultStrategy {
66 | private def encode(elem:Any):NodeSeq = {
67 | val out = new ByteArrayOutputStream()
68 | val enc = new XMLEncoder(out)
69 | enc.writeObject(elem)
70 | enc.close()
71 | out.close()
72 | (XML loadString out.toString) \ "_"
73 | }
74 | private def encMap(data:Map[String,Any]):NodeSeq =
75 | data.toSeq.map {
76 | elem =>
77 |
78 | {elem._1}
79 | {encode(elem._2)}
80 |
81 | }
82 |
83 | def write(results: ClusterResults) {
84 | val xml = {
85 | for { cluster <- results.clusters.values.toSeq } yield {
86 |
87 |
88 |
89 | {cluster.metaData.axis.toSeq.map (encode)}
90 |
91 |
92 | {encMap(cluster.metaData.attributes)}
93 |
94 |
95 |
96 | {
97 | cluster.results.toSeq.map {r =>
98 |
99 |
100 | {encMap(r.attributes)}
101 |
102 |
103 | {encMap(r.axisData)}
104 |
105 |
106 | }}
107 |
108 |
109 | }
110 | }
111 | outFile.getParentFile.mkdirs()
112 | FileUtils.writer(outFile) {
113 | writer=>
114 | XML.write(writer,xml,"UTF-8",true,null)
115 | }
116 | }
117 | }
118 |
119 | class XmlLoadResults(xmlFile:URL) extends LoadResultStrategy {
120 | private def readMap(mapRoot:NodeSeq)= Map(mapRoot \\ "element" map{readObj} :_*)
121 | private def readObj(e:Node) = {
122 | val name = (e \\ "name").text
123 | val value = ""+(e \\ "value" \ "_").toString+""
124 | val in = new ByteArrayInputStream(value.getBytes("UTF-8"))
125 | val dec = new XMLDecoder(in)
126 | val obj = dec.readObject()
127 | dec.close
128 | in.close
129 |
130 | name -> obj
131 | }
132 | override def read(version:String, testContext:PerformanceTestRunContext): PerformanceTestRunContext = {
133 | val xml = XML.load(xmlFile)
134 | (xml \\ "result") foreach { nextResult =>
135 | val time = (nextResult \\ "@time").text.toLong
136 | val nonVersionedAtts = readMap(nextResult \ "attributes")
137 | val versionInfo = nonVersionedAtts.get(Keys.Version) map {value => Keys.Version -> (version + value)} getOrElse (Keys.Version -> version)
138 | val atts = nonVersionedAtts + versionInfo
139 | val axisData = readMap(nextResult \ "axisData")
140 | val report = PerformanceTestResult(time,axisData = axisData, attributes = atts)
141 | testContext.reportResult(report)
142 | }
143 | testContext
144 | }
145 | }
--------------------------------------------------------------------------------
/src/main/scala/sperformance/generators/Generator.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 | package generators
3 |
4 | import util.PerformanceTestHelper._
5 |
6 | /**
7 | * This represents the smallest quantum of a performance test
8 | */
9 | trait PerformanceTestRun[S] {
10 | def setup : () => S
11 | def test(s : S) : Unit
12 | /** Apply whatever mods we need to the context for this specific test... e.g. Adding attribtues */
13 | def modifyContext(ctx : PerformanceTestRunContext) : PerformanceTestRunContext
14 |
15 | def run(ctx : PerformanceTestRunContext) : Unit
16 | }
17 |
18 | /** Simplest implementation of a performance test */
19 | trait AbstractPerformanceTestRun[S] extends PerformanceTestRun[S] {
20 | def run(ctx : PerformanceTestRunContext) : Unit = {
21 | val s = setup()
22 | val result = measure(() => test(s),ctx.attribute(Keys.TestRuns))
23 | modifyContext(ctx).reportResult(PerformanceTestResult(result, Map(), Map()));
24 | }
25 | }
26 |
27 | /**
28 | * Interface for performance test generators. A Generator is used to create any number of performance tests for a given type.
29 | * Generators also have a mechanism to generate a "warmup" test which is used in an attempt to get hotspot to optimize a
30 | * particular code path before measuring its execution.
31 | *
32 | * Generators are also required to modify the current test context with axis values relating to the generated types from
33 | * this generator. The informatino is used in clustering and graphing of performance results over a given variable.
34 | * The type of the generator is assumed to be an independent variable for graphing.
35 | *
36 | * Note: Generators are monadic in nature.
37 | */
38 | trait Generator[T] {
39 | /**
40 | * Creates a single performance test run that can be used to "warm up" the JVM hotspot for warm performance testing.
41 | */
42 | def genWarmUp[S](setup : T => S)(test : S => Unit) : PerformanceTestRun[S]
43 |
44 | /**
45 | * Generates a Traversable of the performance tests that need to be executed.
46 | *
47 | * TODO - TraversableView?
48 | */
49 | def genTests[S](setup : T => S)(test : S => Unit) : Traversable[PerformanceTestRun[S]]
50 |
51 |
52 | /**
53 | * Runs tests with the given setup method and testing method in a given context.
54 | *
55 | * @param S The type the setup method returns, and the test method expects. Consider the "testing state type"
56 | * @param setup A method that will create initial state for the test
57 | * @param f The method to be measured for performance
58 | */
59 | def runTests[S](setup : T => S)(f : S => Unit)(ctx : PerformanceTestRunContext) : Unit
60 |
61 | /**
62 | * Maps a generator of type T to a generator of type U. This mapping is done during the setup phase (and not quantumed)
63 | * @param U the new type to generate tests for
64 | */
65 | def map[U](f : T=>U) : Generator[U]
66 |
67 | /**
68 | * Takes a function converting this generator's type into another Generator and returns an flattened view of the new Generator type.
69 | */
70 | def flatMap[U](f : T => Generator[U]) : Generator[U]
71 |
72 |
73 |
74 |
75 | }
76 |
77 | /** Abstract implementations of Generator Operations that are generic */
78 | trait GeneratorOperations[T] extends Generator[T]{
79 |
80 | override def runTests[S](setup : T => S)(f : S => Unit)(ctx : PerformanceTestRunContext) : Unit = {
81 |
82 | val warmupRuns = ctx.attribute(Keys.WarmupRuns)
83 | Console.println("Warming up generator " + this + " with "+warmupRuns+" runs" + " on ctx " + ctx)
84 | val time = System.currentTimeMillis
85 | //Warmup JVM
86 | val warmUpTest = genWarmUp(setup)(f)
87 | warmUpJvm(() => warmUpTest.run(NullPerformanceTestRunContext), warmupRuns)
88 | val warmUpTime = System.currentTimeMillis - time
89 | Console.println("Warmup done in " + (warmUpTime / 1000.0) + " seconds");
90 | Console.println("Running generator " + this + " on ctx " + ctx)
91 | //Execute Tests
92 | genTests(setup)(f).foreach(_.run(ctx))
93 | }
94 |
95 |
96 | override def map[U](f : T=>U) : Generator[U] = new MappedGenerator(this,f)
97 | override def flatMap[U](f : T => Generator[U]) : Generator[U] = new NestedGenerator(this, f)
98 | }
99 |
100 | /** Maps a generator to another type using a transform function
101 | * @param T The new type generated
102 | * @param U The original generated type.
103 | * @param g The original generator
104 | * @param transform The function that modified the type generated
105 | */
106 | private [generators] final class MappedGenerator[T,U](g : Generator[T], transform : T => U) extends GeneratorOperations[U] {
107 | override def genWarmUp[S](setup : U => S)(test : S => Unit) : PerformanceTestRun[S] = g.genWarmUp( transform andThen setup)(test)
108 | override def genTests[S](setup : U => S)(test : S => Unit) : Traversable[PerformanceTestRun[S]] = g.genTests(transform andThen setup)(test)
109 |
110 | override def toString : String = "MappedGenerator(" + g + ")"
111 | }
112 |
113 | /** Nests one generator in another. Creates a new generator that will contain cross-product of all generated tests.
114 | * @param T The original Generated Type
115 | * @param U The new generated type
116 | * @param g The original generator
117 | * @param f The transformation function to take a generated type T and create a new Generator of U tests.
118 | */
119 | private[generators] final class NestedGenerator[T,U](g : Generator[T], f : T=>Generator[U]) extends GeneratorOperations[U] {
120 | override def genWarmUp[S](setup : U => S)(test : S => Unit) : PerformanceTestRun[S] = {
121 | //Steal value of warmUp early....
122 | val warmUp = g.genWarmUp( (t : T) => t)( ignore => ())
123 | //Now delegate!
124 | f(warmUp.setup()).genWarmUp(setup)(test)
125 | }
126 | override def genTests[S](setup : U => S)(test : S => Unit) : Traversable[PerformanceTestRun[S]] = for {
127 | initialTest <- g.genTests(identity)(ignore => ())
128 | secondTest <- f(initialTest.setup()).genTests(setup)(test)
129 | } yield new AbstractPerformanceTestRun[S] {
130 | def setup : () => S = secondTest.setup
131 | def test(s : S) : Unit = secondTest.test(s)
132 | def modifyContext(ctx : PerformanceTestRunContext) : PerformanceTestRunContext = {
133 | secondTest.modifyContext(initialTest.modifyContext(ctx))
134 | }
135 | }
136 |
137 | override def toString : String = "NestedGenerator(" + g + " mapped by" + f + ")"
138 |
139 | }
--------------------------------------------------------------------------------
/src/test/scala/CollectionsShootout.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 |
3 | import collection.immutable.List
4 | import collection.immutable.Vector
5 | import collection.{SeqView, Traversable}
6 | import collection.mutable.{IndexedSeqView, ArrayBuffer, ListBuffer, LinkedList}
7 |
8 | trait TraverableTestHelper extends sperformance.dsl.PerformanceDSLTest {
9 |
10 |
11 | def makeTraversableTest[T[X]](setup : (Int) => T[Int])(implicit ev0 : ClassManifest[T[Int]], conv : T[Int] => Traversable[Int]) = {
12 | val collectionName = ev0.toString //ev0.erasure.getName
13 |
14 | performance of collectionName in {
15 | measure method "foreach" in {
16 | withSize upTo 1000 withSetup setup run { collection =>
17 | var tmp = 0
18 | collection.foreach(x => tmp + x)
19 | }
20 | }
21 | measure method "size" in {
22 | withSize upTo 1000 withSetup setup run { collection =>
23 | var tmp = 0
24 | tmp += collection.size
25 | }
26 | }
27 |
28 | measure method "head" in {
29 | withSize upTo 1000 withSetup setup run { collection =>
30 | var tmp = 0
31 | tmp += collection.head
32 | }
33 | }
34 |
35 | measure method "last" in {
36 | withSize upTo 1000 withSetup setup run { collection =>
37 | var tmp = 0
38 | tmp += collection.last
39 | }
40 | }
41 |
42 | measure method "foldLeft" in {
43 | withSize upTo 1000 withSetup setup run { collection =>
44 | var tmp = collection.foldLeft(0)(_+_)
45 | tmp
46 | }
47 | }
48 |
49 | measure method "foldRight" in {
50 | withSize upTo 1000 withSetup setup run { collection =>
51 | var tmp = collection.foldRight(0)(_+_)
52 | tmp
53 | }
54 | }
55 |
56 | measure method "isEmpty" in {
57 | withSize upTo 1000 withSetup setup run { collection =>
58 | collection.isEmpty
59 | }
60 | }
61 |
62 |
63 | measure method "filter" in {
64 | withSize upTo 1000 withSetup setup run { collection =>
65 | collection.filter(_%2==0)
66 | }
67 | }
68 |
69 | measure method "collect" in {
70 | withSize upTo 1000 withSetup setup run { collection =>
71 | collection.collect {
72 | case x : Int if x % 2 == 0 => x
73 | }
74 | }
75 | }
76 |
77 | measure method "parition" in {
78 | withSize upTo 1000 withSetup setup run { collection =>
79 | collection.partition(_%2==0)
80 | }
81 | }
82 |
83 | measure method "groupBy" in {
84 | withSize upTo 1000 withSetup setup run { collection =>
85 | collection.groupBy(_ % 3)
86 | }
87 | }
88 |
89 | measure method "filterAndHeadOption" in {
90 | withSize upTo 1000 withSetup setup run { collection =>
91 | collection.filter(_ % 2 == 0).headOption
92 | }
93 | }
94 | }
95 | }
96 |
97 | }
98 |
99 | trait JavaCollectionTestHelper extends sperformance.dsl.PerformanceDSLTest {
100 | def makeJavaCollectionTest[T[X] <: java.util.Collection[X]](setup : (Int) => T[Int])(implicit ev0 : Manifest[T[Int]]) = {
101 | val collectionName = ev0.toString //ev0.erasure.getName
102 |
103 | performance of collectionName in {
104 |
105 | measure method "size" in {
106 | withSize upTo 1000 withSetup setup run { collection =>
107 | var tmp = 0
108 | tmp += collection.size
109 | }
110 | }
111 | }
112 | }
113 | }
114 |
115 | object ColShootOutTest extends TraverableTestHelper {
116 | makeTraversableTest[Vector] { size =>
117 | var collection : Vector[Int] = Vector[Int]()
118 | for( i <- 1 to size) collection = collection :+ i //collection = collection.updated(i,i)
119 | collection
120 | }
121 |
122 | makeTraversableTest[LinkedList] { size =>
123 | var collection = LinkedList[Int]()
124 | collection = collection ++ (1 to size).toList
125 | collection
126 | }
127 |
128 | makeTraversableTest[List](size => (1 to size).toList)
129 | makeTraversableTest[ListBuffer] { size =>
130 | val collection = new ListBuffer[Int]
131 | for( i <- 1 to size) collection += i
132 | collection
133 | }
134 |
135 | makeTraversableTest[Array] { size =>
136 | val x = new Array[Int](size)
137 | for(i <- 1 to size) x(i-1)=i
138 | x
139 | }
140 | makeTraversableTest[Set] { size => (1 to size).toSet }
141 | makeTraversableTest[Traversable] { size =>
142 | val tmp = ArrayBuffer.range(1,size)
143 | new Traversable[Int] {
144 | def foreach[U](f : Int => U) = tmp.foreach(f)
145 | }
146 | }
147 | makeTraversableTest[({type X[A] = SeqView[A, Seq[A]]})#X] { size =>
148 | List.range(1, size).toList.view
149 | }
150 | makeTraversableTest[IndexedSeq] { size =>
151 | ArrayBuffer.range(1, size)
152 | }
153 |
154 | makeTraversableTest[({type X[A] = IndexedSeqView[A, IndexedSeq[A]]})#X] { size =>
155 | ArrayBuffer.range(1, size).view
156 | }
157 |
158 |
159 | //TODO - We need to test java collections directly... but use similar names for comparison...
160 | import collection.JavaConversions
161 |
162 | // makeTraversableTest[java.util.HashSet] { size =>
163 | // val x = new java.util.HashSet[Int]
164 | // for(i <- 1 to size) x.add(i)
165 | // x
166 | // }(implicitly[Manifest[java.util.HashSet[Int]]], JavaConversions.asIterable(_ : java.util.Collection[Int]))
167 | //makeTraversableTest[Set] { size => (1 to size).toSet }
168 |
169 |
170 | }
171 |
172 | object ViewShootOut2 extends sperformance.dsl.PerformanceDSLTest {
173 | import collection.mutable.ArrayBuffer
174 | val maxSize = 1000
175 | def setup(size : Int) = {
176 | val data : Iterable[Int] = ArrayBuffer.range(1,size)
177 | new Iterable[Int] {
178 | override def iterator : Iterator[Int] = data.iterator
179 | }
180 | }
181 | performance of "views" in {
182 | measure method "takeWithMap" in {
183 | withSize upTo maxSize withSetup setup run { col =>
184 | col.view.map(_ * 2).take(100).force
185 | }
186 | }
187 | measure method "takeWithFilter" in {
188 | withSize upTo maxSize withSetup setup run { col =>
189 | col.view.filter(_ % 2 == 0).take(100).force
190 | }
191 | }
192 | measure method "takeWithZipAndFilter" in {
193 | withSize upTo maxSize withSetup setup run { col =>
194 | col.view.zipWithIndex.filter(_._2 % 2 == 0).take(100).force
195 | }
196 | }
197 | }
198 | performance of "non-views" in {
199 | measure method "takeWithMap" in {
200 | withSize upTo maxSize withSetup setup run { col =>
201 | col.map(_ * 2).take(100)
202 | }
203 | }
204 | measure method "takeWithFilter" in {
205 | withSize upTo maxSize withSetup setup run { col =>
206 | col.filter(_ % 2 == 0).take(100)
207 | }
208 | }
209 | measure method "takeWithZipAndFilter" in {
210 | withSize upTo maxSize withSetup setup run { col =>
211 | col.zipWithIndex.filter(_._2 % 2 == 0).take(100)
212 | }
213 | }
214 | }
215 | }
216 |
217 |
218 | object ViewShootOut extends sperformance.dsl.PerformanceDSLTest {
219 | import collection.mutable.ArrayBuffer
220 | import collection.generic.GenericCompanion
221 | def setup[CC[X] <: Traversable[X]](gg : GenericCompanion[CC])(size : Int) : CC[String] = {
222 | gg( (1 to size).map(_.toString).toSeq : _* )
223 | }
224 | performance of "Vector" in {
225 | measure method "zipWithIndexFilterAndMap" in {
226 | withSize upTo 1000 withSetup setup(Vector) run { a =>
227 | a.zipWithIndex.filter(x => x._1.startsWith("1") && (x._2%3==0)).map(_._1)
228 | }
229 | }
230 | measure method "filterAndReduce" in {
231 | withSize upTo 1000 withSetup setup(Vector) run { a =>
232 | a.filter(_.startsWith("1")).foldLeft("")((prev, cur) => cur)
233 | }
234 | }
235 | }
236 | performance of "Vector.view" in {
237 | measure method "zipWithIndexFilterAndMap" in {
238 | withSize upTo 1000 withSetup setup(Vector) run { a =>
239 | a.view.zipWithIndex.filter(x => x._1.startsWith("1") && (x._2%3==0)).map(_._1).force
240 | }
241 | }
242 | measure method "filterAndReduce" in {
243 | withSize upTo 1000 withSetup setup(Vector) run { a =>
244 | a.view.filter(_.startsWith("1")).foldLeft("")( (prev, cur) => cur)
245 | }
246 | }
247 | }
248 | performance of "ArrayBuffer" in {
249 | measure method "zipWithIndexFilterAndMap" in {
250 | withSize upTo 1000 withSetup setup(ArrayBuffer) run { a =>
251 | a.zipWithIndex.filter(x => x._1.startsWith("1") && (x._2%3==0)).map(_._1)
252 | }
253 | }
254 | measure method "filterAndReduce" in {
255 | withSize upTo 1000 withSetup setup(ArrayBuffer) run { a =>
256 | a.filter(_.startsWith("1")).foldLeft("")( (prev, cur) => cur)
257 | }
258 | }
259 | }
260 | performance of "ArrayBuffer.view" in {
261 | measure method "zipWithIndexFilterAndMap" in {
262 | withSize upTo 1000 withSetup setup(ArrayBuffer) run { a =>
263 | a.view.zipWithIndex.filter(x => x._1.startsWith("1") && (x._2%3==0)).map(_._1).force
264 | }
265 | }
266 | measure method "filterAndReduce" in {
267 | withSize upTo 1000 withSetup setup(ArrayBuffer) run { a =>
268 | a.view.filter(_.startsWith("1")).foldLeft("")( (prev, cur) => cur)
269 | }
270 | }
271 | }
272 | }
273 |
274 |
--------------------------------------------------------------------------------
/src/main/scala/sperformance/RunContext.scala:
--------------------------------------------------------------------------------
1 | package sperformance
2 |
3 | import charting.Charting
4 | import intelligence.Cluster
5 | import org.jfree.chart.{ChartUtilities, JFreeChart}
6 | import collection.mutable.ListBuffer
7 | import java.io.{FileOutputStream, BufferedOutputStream, PrintStream, File}
8 | import util.FileUtils
9 | import java.net.URL
10 | import store.StoreResultStrategy
11 | import store.LoadResultStrategy
12 | import sperformance.intelligence.ClusterMetaData
13 | import org.jfree.data.category.CategoryDataset
14 | import org.jfree.data.category.DefaultCategoryDataset
15 | import org.jfree.chart.ChartFactory
16 | import org.jfree.chart.plot.PlotOrientation
17 | import java.awt.Color
18 | import org.jfree.chart.plot.CategoryPlot
19 | import org.jfree.chart.axis.NumberAxis
20 | import org.jfree.chart.renderer.category.BarRenderer
21 | import java.awt.GradientPaint
22 | import org.jfree.chart.axis.CategoryLabelPositions
23 |
24 | /**
25 | * Abstract interface designed to allow customize where reports go and how they are generated. (i.e. could be sent to a Swing UI).
26 | *
27 | * This interface is by no means complete.
28 | */
29 | trait RunContext {
30 | /** The context to use when running tests */
31 | def testContext : PerformanceTestRunContext
32 | def writeResultingChart(clusterName : List[String], chartName : String, chart : JFreeChart) : Unit
33 | def testFinished(test:PerformanceTest):Unit = {}
34 | lazy val defaultTestContext = testContext addAttribute
35 | ("jvm-version", System.getProperty("java.vm.version")) addAttribute
36 | ("jvm-vendor", System.getProperty("java.vm.vendor")) addAttribute
37 | ("jvm-name", System.getProperty("java.vm.name")) addAttribute
38 | ("os-name", System.getProperty("os.name")) addAttribute
39 | ("os-arch", System.getProperty("os.arch")) addAttribute
40 | ("cores", Runtime.getRuntime.availableProcessors)
41 | }
42 |
43 |
44 | //This just dumps charts into output directories...
45 | class DefaultRunContext(val outputDirectory : File, testName : String) extends RunContext {
46 |
47 | val defaultChartHeight = 600
48 | val defaultChartWidth = 800
49 |
50 | //Funny how all our intelligence is embedded here for generating clusters...
51 | override val testContext = new intelligence.ClusterResults
52 |
53 | case class Chart(clusterName : List[String], chartName : String, chartFile: File)
54 |
55 | private val charts = new ListBuffer[Chart]
56 |
57 | def getResultingFile(clusterName : List[String], chartName : String) : File = {
58 | def relativeFilename = clusterName.mkString(File.separator) + File.separator + chartName + ".png"
59 | new File(outputDirectory,relativeFilename)
60 | }
61 |
62 | def writeResultingChart(clusterName : List[String], chartName : String, chart : JFreeChart) {
63 | val file = getResultingFile(clusterName, chartName)
64 | //Ensure
65 | if(!file.exists) {
66 | val parent = file.getParentFile
67 | if(!parent.exists) {
68 | parent.mkdirs
69 | }
70 | }
71 | ChartUtilities.saveChartAsPNG(file, chart,defaultChartWidth, defaultChartHeight)
72 |
73 | //TODO - Write information to cache...
74 | charts += Chart(clusterName, chartName, file)
75 | }
76 |
77 | import scala.xml._
78 | def resultsPage : Node = (
79 | {testName} Results
80 |
94 |
95 | {
96 | for {
97 | (cluster, charts) <- charts.groupBy( c => c.clusterName)
98 | } yield
99 |
Graphed By {cluster}
100 |
101 | {
102 | //TODO - Output Cluster attributes that lead to this chart?
103 | }
104 |
105 |
106 | {
107 | for(chart <- charts) yield
108 |

109 |
110 | }
111 |
112 |
113 | }
114 |
115 | )
116 |
117 |
118 | def generateResultsPage() {
119 | //TODO - more cleverness about how we make charts?
120 | Charting.createReports(testContext.clusters, this)
121 |
122 | // order is important resultsPage depends on charts the fact that charts
123 | // have already been created
124 | val content = resultsPage
125 | val index = new File(outputDirectory, "index.html")
126 | FileUtils.ensureDirectoryExists(index)
127 | val output = new PrintStream(new BufferedOutputStream(new FileOutputStream(index)))
128 | try {
129 | output.println(content)
130 | } finally {
131 | output.close()
132 | }
133 | }
134 | }
135 |
136 | /**
137 | * Creates bar charts that compares the results of several versions for each test ran.
138 | *
139 | * Basically it runs the test and collects the results. When the test is finished it writes
140 | * the results of the test in the historyDir/versions/current/filename.xml (in reality it is not
141 | * dependent on an xml file but the only useful implementation at the moment is xml so I am hardcoding that
142 | * until we have other solutions.)
143 | *
144 | * Once the files is written the same directoy (historyDir/versions) is checked for other versions and
145 | * all others are loaded as well (but with the version information added).
146 | *
147 | * All versions of the same module,test and method are grouped together and drawn to a bar chart with the
148 | * versions drawn together as a category.
149 | *
150 | * Since the chart is a bar chart it can only represent data with a few data points. For example
151 | * a test of size 0 to 1000 is a bad candidate. On the other hand a test of size 100 to 1000 by 250 is fine
152 | * because it only has 4 data points per version.
153 | *
154 | * @param historyDir The directory containing the history of performance tests
155 | * @param newVersion if true then a new version will be create if false then
156 | * the latest version will be overridden
157 | * @param factory The factory function for creating a StoreResultStrategy
158 | */
159 | class HistoricalRunContext(historyDir:File, storeFactory:File => StoreResultStrategy, loadFactory:URL => LoadResultStrategy) extends RunContext {
160 |
161 | val versionsDir = new File(historyDir, "versions")
162 | val graphsDir = new File(historyDir, "graphs") // probably also needs to be parameterized somehow
163 | graphsDir.mkdirs()
164 |
165 | val testContext = new PerformanceTestRunContext {
166 | val allVersions = new intelligence.HistoricalResults
167 | val results = new intelligence.HistoricalResults
168 |
169 | override def attribute[U](key:String):Option[U] = results.attribute[U](key)
170 | override def axisValue[U](key:String):Option[U] = results.axisValue[U](key)
171 |
172 | def reportResult(result : PerformanceTestResult) = {
173 | allVersions.reportResult(result)
174 | results.reportResult(result)
175 | }
176 | }
177 | val currentVersionDir = new File(versionsDir, "current")
178 |
179 | def writeResultingChart(clusterName : List[String], chartName : String, chart : JFreeChart) : Unit = {
180 | val allVersions = testContext.allVersions
181 |
182 | val grapher = new DefaultRunContext(new File(historyDir.getParentFile(), "graphs"), "unknown"){
183 | override val testContext = allVersions
184 | };
185 |
186 | grapher.writeResultingChart(clusterName, chartName, chart)
187 | }
188 |
189 | def generateResultsPage(chartName:String) {
190 | val VersionExtractor = """(.+? %% )?(.*)""".r
191 |
192 | def dropVersion(md: ClusterMetaData) = {
193 | md.attributes.map { case (VersionExtractor(version, att), value) => (att, value) }
194 | }
195 |
196 | val chartGrouping = testContext.allVersions.clusters.groupBy {
197 | case (md, cluster) =>
198 | val key = md.attributes.filter(n => n._1 == "module" || n._1 == "method")
199 | println(key)
200 | key
201 | }.map { case (group, map) => (group,map.map { case (key, value) => value }) }
202 |
203 | def version(atts:Map[String,Any]) =
204 | atts.find(_._1 == Keys.Version).map(v => v._2.toString) getOrElse "current"
205 |
206 | def clusterSorterByVersion(r1:Cluster, r2:Cluster):Boolean = {
207 | def clusterVersion(c:Cluster) = version(c.metaData.attributes)
208 |
209 | val v1 = clusterVersion(r1)
210 | val v2 = clusterVersion(r2)
211 |
212 | if(v1 == "current") false
213 | else if(v2 == "current") true
214 | else v1.compareToIgnoreCase(v2) < 0
215 | }
216 | for ((grouping, value) <- chartGrouping) {
217 | val dataset = new DefaultCategoryDataset()
218 | for {cluster <- value.toSeq.sortWith(clusterSorterByVersion)
219 | result <- cluster.results} {
220 |
221 | dataset.addValue(result.time, version(result.attributes), result.axisData.head._2.toString)
222 | }
223 |
224 | val name = Cluster.makeName(grouping)
225 | val chart = ChartFactory.createBarChart(name, // chart title
226 | value.head.metaData.axis.head, // domain axis label
227 | "time", // range axis label
228 | dataset, // data
229 | PlotOrientation.VERTICAL, // orientation
230 | true, // include legend
231 | true, // tooltips?
232 | false // URLs?
233 | );
234 |
235 |
236 | chart.setBackgroundPaint(Color.white);
237 |
238 | val plot = chart.getPlot().asInstanceOf[CategoryPlot];
239 | plot.setBackgroundPaint(Color.lightGray);
240 | plot.setDomainGridlinePaint(Color.white);
241 | plot.setDomainGridlinesVisible(true);
242 | plot.setRangeGridlinePaint(Color.white);
243 |
244 |
245 | val rangeAxis = plot.getRangeAxis().asInstanceOf[NumberAxis];
246 | rangeAxis.setStandardTickUnits(NumberAxis.createIntegerTickUnits());
247 |
248 | // disable bar outlines...
249 | val renderer = plot.getRenderer().asInstanceOf[BarRenderer];
250 | renderer.setDrawBarOutline(false);
251 | /*
252 | // set up gradient paints for series...
253 | val gp0 = new GradientPaint(0.0f, 0.0f, Color.blue,
254 | 0.0f, 0.0f, new Color(0, 0, 64));
255 | val gp1 = new GradientPaint(0.0f, 0.0f, Color.green,
256 | 0.0f, 0.0f, new Color(0, 64, 0));
257 | val gp2 = new GradientPaint(0.0f, 0.0f, Color.red,
258 | 0.0f, 0.0f, new Color(64, 0, 0));
259 | renderer.setSeriesPaint(0, gp0);
260 | renderer.setSeriesPaint(1, gp1);
261 | renderer.setSeriesPaint(2, gp2);*/
262 |
263 | val domainAxis = plot.getDomainAxis();
264 | domainAxis.setCategoryLabelPositions(
265 | CategoryLabelPositions.createUpRotationLabelPositions(java.lang.Math.PI / 6.0));
266 | val chartDir = new File(graphsDir, chartName)
267 | chartDir.mkdirs
268 | FileUtils.outputStream(new File(chartDir,name+".png")){
269 | out =>
270 | ChartUtilities.writeChartAsPNG(out,chart,800, 600)
271 | }
272 | }
273 |
274 | }
275 |
276 | /**
277 | * Write out a version
278 | */
279 | def writeVersion(testName:String) = {
280 | currentVersionDir.mkdirs()
281 | val testOutputFile = new File(currentVersionDir,testName+".xml")
282 | val strategy = storeFactory(testOutputFile)
283 | strategy.write(testContext.results)
284 | }
285 |
286 | override def testFinished(test: PerformanceTest): Unit = {
287 | def list(f: File) = Option(f.listFiles) getOrElse Array[File]()
288 |
289 | writeVersion(test.name)
290 | val versions = list(versionsDir).filterNot { _.getName == currentVersionDir.getName() }
291 | for (file <- versions.flatMap { f => list(f).find(_.getName() == test.name + ".xml") }) {
292 | val version = file.getParentFile().getName()
293 | loadFactory(file.toURI.toURL).read(version, testContext.allVersions)
294 | }
295 | }
296 | }
297 |
--------------------------------------------------------------------------------