├── .github └── workflows │ ├── ci.yml │ ├── cla.yml │ └── release.yml ├── .gitignore ├── .jvmopts ├── CODE_OF_CONDUCT.md ├── LICENSE ├── NOTICE ├── README.md ├── build.sbt ├── core └── src │ └── main │ └── scala │ └── scala │ └── collection │ ├── CustomParallelizable.scala │ ├── DebugUtils.scala │ ├── Parallel.scala │ ├── Parallelizable.scala │ ├── generic │ ├── CanCombineFrom.scala │ ├── GenericParCompanion.scala │ ├── GenericParTemplate.scala │ ├── GenericTraversableTemplate.scala │ ├── HasNewBuilder.scala │ ├── HasNewCombiner.scala │ ├── ParFactory.scala │ ├── ParMapFactory.scala │ ├── ParSetFactory.scala │ ├── Signalling.scala │ └── Sizing.scala │ ├── immutable │ ├── OldHashMap.scala │ ├── OldHashSet.scala │ └── TrieIterator.scala │ ├── mutable │ └── FlatHashTable.scala │ └── parallel │ ├── CollectionConverters.scala │ ├── Combiner.scala │ ├── ParIterable.scala │ ├── ParIterableLike.scala │ ├── ParMap.scala │ ├── ParMapLike.scala │ ├── ParSeq.scala │ ├── ParSeqLike.scala │ ├── ParSet.scala │ ├── ParSetLike.scala │ ├── PreciseSplitter.scala │ ├── RemainsIterator.scala │ ├── Splitter.scala │ ├── TaskSupport.scala │ ├── Tasks.scala │ ├── immutable │ ├── ParHashMap.scala │ ├── ParHashSet.scala │ ├── ParIterable.scala │ ├── ParMap.scala │ ├── ParRange.scala │ ├── ParSeq.scala │ ├── ParSet.scala │ ├── ParVector.scala │ └── package.scala │ ├── mutable │ ├── LazyCombiner.scala │ ├── ParArray.scala │ ├── ParFlatHashTable.scala │ ├── ParHashMap.scala │ ├── ParHashSet.scala │ ├── ParHashTable.scala │ ├── ParIterable.scala │ ├── ParMap.scala │ ├── ParMapLike.scala │ ├── ParSeq.scala │ ├── ParSet.scala │ ├── ParSetLike.scala │ ├── ParTrieMap.scala │ ├── ResizableParArrayCombiner.scala │ ├── UnrolledParArrayCombiner.scala │ └── package.scala │ └── package.scala ├── junit └── src │ └── test │ ├── scala-2 │ └── scala │ │ └── SerializationStabilityTest.scala │ ├── scala-3 │ └── scala │ │ └── SerializationStabilityTest.scala │ └── scala │ ├── MiscTest.scala │ ├── SerializationStabilityBase.scala │ └── scala │ ├── CollectTest.scala │ └── collection │ ├── CollectionConversionsTest.scala │ ├── NewBuilderTest.scala │ ├── concurrent │ ├── ctries_new │ │ ├── ConcurrentMapSpec.scala │ │ ├── DumbHash.scala │ │ ├── IteratorSpec.scala │ │ ├── LNodeSpec.scala │ │ ├── SnapshotSpec.scala │ │ ├── Spec.scala │ │ └── Wrap.scala │ └── ctries_old │ │ ├── ConcurrentMapSpec.scala │ │ ├── DumbHash.scala │ │ ├── IteratorSpec.scala │ │ ├── LNodeSpec.scala │ │ ├── SnapshotSpec.scala │ │ ├── Spec.scala │ │ └── Wrap.scala │ └── parallel │ ├── ParMapTest.scala │ ├── ParSeqConversionsTest.scala │ ├── SerializationTest.scala │ ├── TaskTest.scala │ ├── immutable │ └── ParRangeTest.scala │ └── mutable │ ├── ParArrayTest.scala │ └── ParHashSetTest.scala ├── project ├── GetScala3Next.scala ├── build.properties └── plugins.sbt ├── scalacheck └── src │ └── test │ └── scala │ ├── IntOperators.scala │ ├── IntValues.scala │ ├── Operators.scala │ ├── PairOperators.scala │ ├── PairValues.scala │ ├── ParallelArrayCheck.scala │ ├── ParallelCtrieCheck.scala │ ├── ParallelHashMapCheck.scala │ ├── ParallelHashSetCheck.scala │ ├── ParallelHashTrieCheck.scala │ ├── ParallelIterableCheck.scala │ ├── ParallelMapCheck1.scala │ ├── ParallelRangeCheck.scala │ ├── ParallelSeqCheck.scala │ ├── ParallelSetCheck.scala │ ├── ParallelVectorCheck.scala │ └── pc.scala └── testmacros └── src └── main ├── scala-2 └── testutil │ └── ShouldNotTypecheck.scala └── scala-3 └── testutil └── ShouldNotTypecheck.scala /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | on: 3 | schedule: 4 | - cron: '0 0 * * *' 5 | push: 6 | branches: 7 | - main 8 | pull_request: 9 | jobs: 10 | test: 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | java: [8, 11, 17, 21] 15 | scala: [2.13.x, 3.x] 16 | runs-on: ubuntu-latest 17 | if: ${{ github.event_name != 'schedule' }} 18 | steps: 19 | - uses: actions/checkout@v4 20 | with: 21 | fetch-depth: 0 22 | - uses: coursier/cache-action@v6 23 | - uses: actions/setup-java@v4 24 | with: 25 | distribution: temurin 26 | java-version: ${{matrix.java}} 27 | - uses: sbt/setup-sbt@v1 28 | - name: Test JVM 29 | run: sbt "setScalaVersion ${{matrix.scala}}" testJVM core/headerCheck package 30 | - name: Test Native 31 | run: sbt "setScalaVersion ${{matrix.scala}}" testNative 32 | 33 | test-rc: 34 | strategy: 35 | fail-fast: false 36 | matrix: 37 | java: [8] 38 | scala: [3.next] 39 | runs-on: ubuntu-latest 40 | if: "github.event_name == 'schedule' && github.repository == 'scala/scala-parallel-collections' 41 | || github.event_name == 'push' 42 | || ( 43 | github.event_name == 'pull_request' 44 | && contains(github.event.pull_request.body, '[test-rc]') 45 | )" 46 | steps: 47 | - uses: actions/checkout@v4 48 | with: 49 | fetch-depth: 0 50 | - uses: coursier/cache-action@v6 51 | - uses: actions/setup-java@v4 52 | with: 53 | distribution: temurin 54 | java-version: ${{matrix.java}} 55 | - uses: sbt/setup-sbt@v1 56 | - name: Test 57 | run: sbt "setScalaVersion ${{matrix.scala}}" testJVM core/headerCheck package 58 | -------------------------------------------------------------------------------- /.github/workflows/cla.yml: -------------------------------------------------------------------------------- 1 | name: "Check Scala CLA" 2 | on: 3 | pull_request: 4 | jobs: 5 | cla-check: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Verify CLA 9 | uses: scala/cla-checker@v1 10 | with: 11 | author: ${{ github.event.pull_request.user.login }} 12 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | push: 4 | tags: ["*"] 5 | jobs: 6 | publish: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | with: 11 | fetch-depth: 0 12 | - uses: actions/setup-java@v4 13 | with: 14 | distribution: temurin 15 | java-version: 8 16 | - uses: sbt/setup-sbt@v1 17 | - run: sbt versionCheck ci-release 18 | env: 19 | PGP_PASSPHRASE: ${{secrets.PGP_PASSPHRASE}} 20 | PGP_SECRET: ${{secrets.PGP_SECRET}} 21 | SONATYPE_PASSWORD: ${{secrets.SONATYPE_PASSWORD}} 22 | SONATYPE_USERNAME: ${{secrets.SONATYPE_USERNAME}} 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # 2 | # Are you tempted to edit this file? 3 | # 4 | # First consider if the changes make sense for all, 5 | # or if they are specific to your workflow/system. 6 | # If it is the latter, you can augment this list with 7 | # entries in .git/info/excludes 8 | # 9 | 10 | *.jar 11 | *~ 12 | 13 | # eclipse, intellij 14 | /.classpath 15 | /.project 16 | /.cache 17 | /.idea 18 | /.settings 19 | 20 | # bak files produced by ./cleanup-commit 21 | *.bak 22 | 23 | # Mac specific, but that is common enough a dev platform to warrant inclusion. 24 | .DS_Store 25 | 26 | target/ 27 | -------------------------------------------------------------------------------- /.jvmopts: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scala/scala-parallel-collections/f682519bda1c861ca28b77a3891deddd171739d8/.jvmopts -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | all repositories in these organizations: 2 | 3 | * [scala](https://github.com/scala) 4 | * [scalacenter](https://github.com/scalacenter) 5 | * [lampepfl](https://github.com/lampepfl) 6 | 7 | are covered by the Scala Code of Conduct: https://scala-lang.org/conduct/ 8 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Scala parallel collections 2 | Copyright (c) 2002-2025 EPFL 3 | Copyright (c) 2011-2025 Lightbend, Inc. dba Akka 4 | 5 | Scala includes software developed at 6 | LAMP/EPFL (https://lamp.epfl.ch/) and 7 | Akka (https://akka.io/). 8 | 9 | Licensed under the Apache License, Version 2.0 (the "License"). 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Scala parallel collections 2 | 3 | This Scala standard module contains the package 4 | `scala.collection.parallel`, with all of the parallel collections that 5 | used to be part of the Scala standard library (in Scala 2.10 through 2.12). 6 | 7 | For Scala 3 and Scala 2.13, this module is a separate JAR that can be 8 | omitted from projects that do not use parallel collections. 9 | 10 | ## Documentation 11 | 12 | * https://docs.scala-lang.org/overviews/parallel-collections/overview.html 13 | * https://javadoc.io/doc/org.scala-lang.modules/scala-parallel-collections_2.13 14 | 15 | ## Maintenance status 16 | 17 | This module is community-maintained, under the guidance of the Scala team at Akka. If you are 18 | interested in participating, please jump right in on issues and pull 19 | requests. 20 | 21 | ## Usage 22 | 23 | To depend on scala-parallel-collections in sbt, add this to your `build.sbt`: 24 | 25 | ```scala 26 | libraryDependencies += 27 | "org.scala-lang.modules" %% "scala-parallel-collections" % "" 28 | ``` 29 | 30 | In your code, adding this import: 31 | 32 | ```scala 33 | import scala.collection.parallel.CollectionConverters._ 34 | ``` 35 | 36 | will enable use of the `.par` method as in earlier Scala versions. 37 | 38 | ### Cross-building: dependency 39 | 40 | This module is published only for the Scala 3 and 2.13, so in a 41 | cross-built project, the dependency should take this form: 42 | 43 | ```scala 44 | libraryDependencies ++= { 45 | CrossVersion.partialVersion(scalaVersion.value) match { 46 | case Some((2, major)) if major <= 12 => 47 | Seq() 48 | case _ => 49 | Seq("org.scala-lang.modules" %% "scala-parallel-collections" % "") 50 | } 51 | } 52 | ``` 53 | 54 | This way of testing `scalaVersion` is robust across varying Scala 55 | version number formats (nightlies, milestones, release candidates, 56 | community build, etc). 57 | 58 | ### Cross-building: source compatibility 59 | 60 | Using `.par` is problematic in a cross-built project, since in Scala 61 | 2.13+ the `CollectionConverters._` import shown above is necessary, but 62 | in earlier Scala versions, that import will not compile. 63 | 64 | You may able to avoid the problem by directly constructing your 65 | parallel collections rather than going through `.par`. For other 66 | possible workarounds, see 67 | https://github.com/scala/scala-parallel-collections/issues/22, 68 | which is still under discussion. 69 | 70 | ## Releasing 71 | 72 | As with other Scala standard modules, build and release infrastructure 73 | is provided by the 74 | [sbt-scala-module](https://github.com/scala/sbt-scala-module/) sbt 75 | plugin. 76 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | val scalaVersions = Seq("2.13.16", "3.3.6") 2 | val defaultScalaVersion = scalaVersions.head 3 | 4 | // When defining JVM / Scala Native matrix we don't want duplicated projects for Scala 2/3 5 | val matrixScalaVersions = Seq(defaultScalaVersion) 6 | 7 | ThisBuild / crossScalaVersions := scalaVersions 8 | ThisBuild / scalaVersion := defaultScalaVersion 9 | 10 | Global / concurrentRestrictions += Tags.limit(NativeTags.Link, 1) 11 | Global / cancelable := true 12 | publish / skip := true // in root 13 | 14 | lazy val commonSettings: Seq[Setting[_]] = 15 | Seq(scalaModuleAutomaticModuleName := Some("scala.collection.parallel")) ++ 16 | ScalaModulePlugin.scalaModuleSettings ++ Seq( 17 | versionPolicyIntention := Compatibility.BinaryCompatible, 18 | crossScalaVersions := scalaVersions, 19 | Compile / compile / scalacOptions --= (CrossVersion.partialVersion(scalaVersion.value) match { 20 | case Some((3, _)) => Seq("-Xlint") 21 | case _ => Seq() 22 | }), 23 | Compile / compile / scalacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match { 24 | case Some((3, _)) => Seq() 25 | case _ => Seq("-Werror"), 26 | }), 27 | ) 28 | 29 | lazy val testNativeSettings: Seq[Setting[_]] = Seq( 30 | // Required by Scala Native testing infrastructure 31 | Test / fork := false, 32 | ) 33 | 34 | lazy val core = projectMatrix.in(file("core")) 35 | .settings(commonSettings) 36 | .settings( 37 | name := "scala-parallel-collections", 38 | Compile / doc / autoAPIMappings := true, 39 | ) 40 | .jvmPlatform(matrixScalaVersions) 41 | .nativePlatform(matrixScalaVersions, settings = testNativeSettings ++ Seq( 42 | versionPolicyPreviousArtifacts := Nil, // TODO: not yet published 43 | mimaPreviousArtifacts := Set.empty 44 | )) 45 | 46 | lazy val junit = projectMatrix.in(file("junit")) 47 | .settings(commonSettings) 48 | .settings( 49 | testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), 50 | publish / skip := true, 51 | ).dependsOn(testmacros, core) 52 | .jvmPlatform(matrixScalaVersions, 53 | settings = Seq( 54 | libraryDependencies += "com.github.sbt" % "junit-interface" % "0.13.3" % Test, 55 | libraryDependencies += "junit" % "junit" % "4.13.2" % Test, 56 | // for javax.xml.bind.DatatypeConverter, used in SerializationStabilityTest 57 | libraryDependencies += "javax.xml.bind" % "jaxb-api" % "2.3.1" % Test, 58 | Test / fork := true, 59 | ) 60 | ) 61 | .nativePlatform(matrixScalaVersions, 62 | axisValues = Nil, 63 | configure = _ 64 | .enablePlugins(ScalaNativeJUnitPlugin) 65 | .settings( 66 | Test/unmanagedSources/excludeFilter ~= { _ || 67 | "SerializationTest.scala" || // requires ObjectOutputStream 68 | "SerializationStability.scala" || // requires jaxb-api 69 | "SerializationStabilityBase.scala" || 70 | "SerializationStabilityTest.scala" 71 | }, 72 | Test / fork := false 73 | ) 74 | ) 75 | 76 | lazy val scalacheck = projectMatrix.in(file("scalacheck")) 77 | .settings(commonSettings) 78 | .settings( 79 | libraryDependencies += "org.scalacheck" %%% "scalacheck" % "1.18.1", 80 | Test / testOptions += Tests.Argument(TestFrameworks.ScalaCheck, "-workers", "1", "-minSize", "0", "-maxSize", "4000", "-minSuccessfulTests", "5"), 81 | publish / skip := true 82 | ) 83 | .dependsOn(core) 84 | .jvmPlatform(matrixScalaVersions, 85 | settings = Seq( 86 | Test / fork := true 87 | ) 88 | ) 89 | .nativePlatform(matrixScalaVersions, settings = testNativeSettings) 90 | 91 | lazy val testmacros = projectMatrix.in(file("testmacros")) 92 | .settings(commonSettings) 93 | .settings( 94 | libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { 95 | case Some((3, _)) => Nil 96 | case _ => List(scalaOrganization.value % "scala-compiler" % scalaVersion.value) 97 | }), 98 | publish / skip := true, 99 | ) 100 | .jvmPlatform(matrixScalaVersions) 101 | .nativePlatform(matrixScalaVersions, settings = testNativeSettings) 102 | 103 | commands += Command.single("setScalaVersion") { (state, arg) => 104 | val command = arg match { 105 | case "3.next" => s"++${GetScala3Next.get()}!" 106 | case _ => s"++$arg" 107 | } 108 | command :: state 109 | } 110 | 111 | import sbt.internal.{ProjectMatrix, ProjectFinder} 112 | def testPlatformCommand(name: String, selector: ProjectMatrix => ProjectFinder): Command = 113 | Command.command(name) { state => 114 | List(junit, scalacheck, testmacros) 115 | .flatMap(selector(_).get) 116 | .map{ project => s"${project.id}/test"} 117 | .toList ::: state 118 | } 119 | 120 | commands += testPlatformCommand("testNative", _.native) 121 | commands += testPlatformCommand("testJVM", _.jvm) 122 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/CustomParallelizable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | 16 | trait CustomParallelizable[+A, +ParRepr <: Parallel] extends Any with Parallelizable[A, ParRepr] { 17 | override def par: ParRepr 18 | override protected[this] def parCombiner = throw new UnsupportedOperationException("") 19 | } 20 | 21 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/DebugUtils.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection 14 | 15 | private[collection] object DebugUtils { 16 | 17 | def buildString(closure: (Any => Unit) => Unit): String = { 18 | val output = new collection.mutable.StringBuilder 19 | closure { any => 20 | output ++= any.toString 21 | output += '\n' 22 | } 23 | 24 | output.result() 25 | } 26 | 27 | def arrayString[T](array: Array[T], from: Int, until: Int): String = { 28 | array.slice(from, until) map ({ 29 | case null => "n/a" 30 | case x => "" + x 31 | }: scala.PartialFunction[T, String]) mkString " | " 32 | } 33 | } -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/Parallel.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | 16 | /** A marker trait for collections which have their operations parallelised. 17 | */ 18 | trait Parallel 19 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/Parallelizable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | 16 | import parallel.Combiner 17 | import scala.annotation.unchecked.uncheckedVariance 18 | 19 | /** This trait describes collections which can be turned into parallel collections 20 | * by invoking the method `par`. Parallelizable collections may be parameterized with 21 | * a target type different than their own. 22 | * 23 | * @tparam A the type of the elements in the collection 24 | * @tparam ParRepr the actual type of the collection, which has to be parallel 25 | */ 26 | trait Parallelizable[+A, +ParRepr <: Parallel] extends Any { 27 | 28 | def seq: IterableOnce[A] 29 | 30 | /** Returns a parallel implementation of this collection. 31 | * 32 | * For most collection types, this method creates a new parallel collection by copying 33 | * all the elements. For these collection, `par` takes linear time. Mutable collections 34 | * in this category do not produce a mutable parallel collection that has the same 35 | * underlying dataset, so changes in one collection will not be reflected in the other one. 36 | * 37 | * Specific collections (e.g. `ParArray` or `mutable.ParHashMap`) override this default 38 | * behaviour by creating a parallel collection which shares the same underlying dataset. 39 | * For these collections, `par` takes constant or sublinear time. 40 | * 41 | * All parallel collections return a reference to themselves. 42 | * 43 | * @return a parallel implementation of this collection 44 | */ 45 | def par: ParRepr = parCombiner.fromSequential(seq) 46 | 47 | /** The default `par` implementation uses the combiner provided by this method 48 | * to create a new parallel collection. 49 | * 50 | * @return a combiner for the parallel collection of type `ParRepr` 51 | */ 52 | protected[this] def parCombiner: Combiner[A @uncheckedVariance, ParRepr] 53 | } 54 | 55 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/CanCombineFrom.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | import scala.collection.parallel._ 18 | 19 | /** A base trait for parallel builder factories. 20 | * 21 | * @tparam From the type of the underlying collection that requests a 22 | * builder to be created. 23 | * @tparam Elem the element type of the collection to be created. 24 | * @tparam To the type of the collection to be created. 25 | */ 26 | trait CanCombineFrom[-From, -Elem, +To] extends Parallel { 27 | def apply(from: From): Combiner[Elem, To] 28 | def apply(): Combiner[Elem, To] 29 | } 30 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/GenericParCompanion.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | import scala.collection.parallel.Combiner 18 | import scala.collection.parallel.ParIterable 19 | import scala.collection.parallel.ParMap 20 | import scala.language.implicitConversions 21 | 22 | /** A template class for companion objects of parallel collection classes. 23 | * They should be mixed in together with `GenericCompanion` type. 24 | * 25 | * @define Coll `ParIterable` 26 | * @tparam CC the type constructor representing the collection class 27 | * @since 2.8 28 | */ 29 | trait GenericParCompanion[+CC[X] <: ParIterable[X]] { 30 | 31 | // `empty` and `apply` were previously inherited from `GenericCompanion` but this class 32 | // has been removed in 2.13. I’ve copied their old implementation here. 33 | 34 | /** An empty collection of type `$Coll[A]` 35 | * @tparam A the type of the ${coll}'s elements 36 | */ 37 | def empty[A]: CC[A] = newBuilder[A].result() 38 | 39 | /** Creates a $coll with the specified elements. 40 | * @tparam A the type of the ${coll}'s elements 41 | * @param elems the elements of the created $coll 42 | * @return a new $coll with elements `elems` 43 | */ 44 | def apply[A](elems: A*): CC[A] = { 45 | if (elems.isEmpty) empty[A] 46 | else { 47 | val b = newBuilder[A] 48 | b ++= elems 49 | b.result() 50 | } 51 | } 52 | 53 | /** The default builder for $Coll objects. 54 | */ 55 | def newBuilder[A]: Combiner[A, CC[A]] 56 | 57 | /** The parallel builder for $Coll objects. 58 | */ 59 | def newCombiner[A]: Combiner[A, CC[A]] 60 | 61 | implicit def toFactory[A]: Factory[A, CC[A]] = GenericParCompanion.toFactory(this) 62 | 63 | } 64 | 65 | 66 | // TODO Specialize `Factory` with parallel collection creation methods so that the `xs.to(ParArray)` syntax 67 | // does build the resulting `ParArray` in parallel 68 | object GenericParCompanion { 69 | /** 70 | * Implicit conversion for converting any `ParFactory` into a sequential `Factory`. 71 | * This provides supports for the `to` conversion method (eg, `xs.to(ParArray)`). 72 | */ 73 | implicit def toFactory[A, CC[X] <: ParIterable[X]](parFactory: GenericParCompanion[CC]): Factory[A, CC[A]] = 74 | new ToFactory(parFactory) 75 | 76 | @SerialVersionUID(3L) 77 | private class ToFactory[A, CC[X] <: ParIterable[X]](parFactory: GenericParCompanion[CC]) 78 | extends Factory[A, CC[A]] with Serializable{ 79 | def fromSpecific(it: IterableOnce[A]): CC[A] = (parFactory.newBuilder[A] ++= it).result() 80 | def newBuilder: mutable.Builder[A, CC[A]] = parFactory.newBuilder 81 | } 82 | 83 | } 84 | 85 | trait GenericParMapCompanion[+CC[P, Q] <: ParMap[P, Q]] { 86 | 87 | def newCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] 88 | 89 | implicit def toFactory[K, V]: Factory[(K, V), CC[K, V]] = GenericParMapCompanion.toFactory(this) 90 | 91 | } 92 | 93 | object GenericParMapCompanion { 94 | /** 95 | * Implicit conversion for converting any `ParFactory` into a sequential `Factory`. 96 | * This provides supports for the `to` conversion method (eg, `xs.to(ParMap)`). 97 | */ 98 | implicit def toFactory[K, V, CC[X, Y] <: ParMap[X, Y]]( 99 | parFactory: GenericParMapCompanion[CC] 100 | ): Factory[(K, V), CC[K, V]] = 101 | new ToFactory[K, V, CC](parFactory) 102 | 103 | @SerialVersionUID(3L) 104 | private class ToFactory[K, V, CC[X, Y] <: ParMap[X, Y]]( 105 | parFactory: GenericParMapCompanion[CC] 106 | ) extends Factory[(K, V), CC[K, V]] with Serializable { 107 | def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = (parFactory.newCombiner[K, V] ++= it).result() 108 | def newBuilder: mutable.Builder[(K, V), CC[K, V]] = parFactory.newCombiner 109 | } 110 | 111 | } 112 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/GenericParTemplate.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | import scala.collection.parallel.Combiner 18 | import scala.collection.parallel.ParIterable 19 | import scala.collection.parallel.ParMap 20 | 21 | import scala.annotation.unchecked.uncheckedVariance 22 | 23 | /** A template trait for collections having a companion. 24 | * 25 | * @tparam A the element type of the collection 26 | * @tparam CC the type constructor representing the collection class 27 | */ 28 | trait GenericParTemplate[+A, +CC[X] <: ParIterable[X]] 29 | extends GenericTraversableTemplate[A, CC] 30 | with HasNewCombiner[A, CC[A] @uncheckedVariance] 31 | { 32 | def companion: GenericParCompanion[CC] 33 | 34 | protected[this] override def newBuilder = newCombiner 35 | 36 | protected[this] override def newCombiner = companion.newCombiner[A] 37 | 38 | override def genericBuilder[B]: Combiner[B, CC[B]] = genericCombiner[B] 39 | 40 | def genericCombiner[B]: Combiner[B, CC[B]] = { 41 | val cb = companion.newCombiner[B] 42 | cb 43 | } 44 | 45 | } 46 | 47 | 48 | trait GenericParMapTemplate[K, +V, +CC[X, Y] <: ParMap[X, Y]] extends GenericParTemplate[(K, V), ParIterable] 49 | { 50 | protected[this] override def newCombiner: Combiner[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = { 51 | val cb = mapCompanion.newCombiner[K, V] 52 | cb 53 | } 54 | 55 | def mapCompanion: GenericParMapCompanion[CC] 56 | 57 | def genericMapCombiner[P, Q]: Combiner[(P, Q), CC[P, Q]] = { 58 | val cb = mapCompanion.newCombiner[P, Q] 59 | cb 60 | } 61 | } 62 | 63 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/HasNewBuilder.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | import mutable.Builder 18 | import scala.annotation.unchecked.uncheckedVariance 19 | 20 | trait HasNewBuilder[+A, +Repr] extends Any { 21 | /** The builder that builds instances of Repr */ 22 | protected[this] def newBuilder: Builder[A @uncheckedVariance, Repr] 23 | } 24 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/HasNewCombiner.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | import scala.collection.parallel.Combiner 18 | import scala.annotation.unchecked.uncheckedVariance 19 | 20 | trait HasNewCombiner[+T, +Repr] { 21 | protected[this] def newCombiner: Combiner[T @uncheckedVariance, Repr] 22 | } 23 | 24 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/ParMapFactory.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | import scala.collection.parallel.ParMap 18 | import scala.collection.parallel.ParMapLike 19 | import scala.collection.parallel.Combiner 20 | 21 | /** A template class for companion objects of `ParMap` and subclasses thereof. 22 | * This class extends `TraversableFactory` and provides a set of operations 23 | * to create `$Coll` objects. 24 | * 25 | * @define coll parallel map 26 | * @define Coll `ParMap` 27 | * @define factoryInfo 28 | * This object provides a set of operations needed to create `$Coll` values. 29 | */ 30 | abstract class ParMapFactory[CC[X, Y] <: ParMap[X, Y] with ParMapLike[X, Y, CC, CC[X, Y], Sequential[X, Y]], Sequential[X, Y] <: collection.Map[X, Y] with collection.MapOps[X, Y, Sequential, Sequential[X, Y]]] 31 | extends GenericParMapCompanion[CC] { 32 | 33 | // `apply` and `empty` methods were previously inherited from `GenMapFactory`, which 34 | // has been removed from the Scala library in 2.13 35 | 36 | /** A collection of type $Coll that contains given key/value bindings. 37 | * @param elems the key/value pairs that make up the $coll 38 | * @tparam K the type of the keys 39 | * @tparam V the type of the associated values 40 | * @return a new $coll consisting key/value pairs given by `elems`. 41 | */ 42 | def apply[K, V](elems: (K, V)*): CC[K, V] = (newCombiner[K, V] ++= elems).result() 43 | 44 | def empty[K, V]: CC[K, V] 45 | 46 | /** The default builder for $Coll objects. 47 | * @tparam K the type of the keys 48 | * @tparam V the type of the associated values 49 | */ 50 | def newBuilder[K, V]: mutable.Builder[(K, V), CC[K, V]] = newCombiner[K, V] 51 | 52 | /** The default combiner for $Coll objects. 53 | * @tparam K the type of the keys 54 | * @tparam V the type of the associated values 55 | */ 56 | def newCombiner[K, V]: Combiner[(K, V), CC[K, V]] 57 | 58 | class CanCombineFromMap[FromK, FromV, K, V] extends CanCombineFrom[CC[FromK, FromV], (K, V), CC[K, V]] { 59 | def apply(from: CC[FromK, FromV]) = from.genericMapCombiner[K, V].asInstanceOf[Combiner[(K, V), CC[K, V]]] 60 | def apply() = newCombiner[K, V] 61 | } 62 | 63 | } 64 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/ParSetFactory.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | import scala.collection.parallel.Combiner 18 | import scala.collection.parallel.ParSet 19 | import scala.collection.parallel.ParSetLike 20 | 21 | /** 22 | * @define factoryInfo 23 | * This object provides a set of operations needed to create `$Coll` values. 24 | */ 25 | abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC, CC[X], ?] with GenericParTemplate[X, CC]] 26 | extends GenericParCompanion[CC] { 27 | def newBuilder[A]: Combiner[A, CC[A]] = newCombiner[A] 28 | 29 | def newCombiner[A]: Combiner[A, CC[A]] 30 | 31 | class GenericCanCombineFrom[B, A] extends CanCombineFrom[CC[B], A, CC[A]] { 32 | override def apply(from: CC[B]) = from.genericCombiner[A] 33 | override def apply() = newCombiner[A] 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/Signalling.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | import java.util.concurrent.atomic.AtomicInteger 18 | 19 | /** 20 | * A message interface serves as a unique interface to the 21 | * part of the collection capable of receiving messages from 22 | * a different task. 23 | * 24 | * One example of use of this is the `find` method, which can use the 25 | * signalling interface to inform worker threads that an element has 26 | * been found and no further search is necessary. 27 | * 28 | * @define abortflag 29 | * Abort flag being true means that a worker can abort and produce whatever result, 30 | * since its result will not affect the final result of computation. An example 31 | * of operations using this are `find`, `forall` and `exists` methods. 32 | * 33 | * @define indexflag 34 | * The index flag holds an integer which carries some operation-specific meaning. For 35 | * instance, `takeWhile` operation sets the index flag to the position of the element 36 | * where the predicate fails. Other workers may check this index against the indices 37 | * they are working on and return if this index is smaller than their index. Examples 38 | * of operations using this are `takeWhile`, `dropWhile`, `span` and `indexOf`. 39 | */ 40 | trait Signalling { 41 | /** 42 | * Checks whether an abort signal has been issued. 43 | * 44 | * $abortflag 45 | * @return the state of the abort 46 | */ 47 | def isAborted: Boolean 48 | 49 | /** 50 | * Sends an abort signal to other workers. 51 | * 52 | * $abortflag 53 | */ 54 | def abort(): Unit 55 | 56 | /** 57 | * Returns the value of the index flag. 58 | * 59 | * $indexflag 60 | * @return the value of the index flag 61 | */ 62 | def indexFlag: Int 63 | 64 | /** 65 | * Sets the value of the index flag. 66 | * 67 | * $indexflag 68 | * @param f the value to which the index flag is set. 69 | */ 70 | def setIndexFlag(f: Int): Unit 71 | 72 | /** 73 | * Sets the value of the index flag if argument is greater than current value. 74 | * This method does this atomically. 75 | * 76 | * $indexflag 77 | * @param f the value to which the index flag is set 78 | */ 79 | def setIndexFlagIfGreater(f: Int): Unit 80 | 81 | /** 82 | * Sets the value of the index flag if argument is lesser than current value. 83 | * This method does this atomically. 84 | * 85 | * $indexflag 86 | * @param f the value to which the index flag is set 87 | */ 88 | def setIndexFlagIfLesser(f: Int): Unit 89 | 90 | /** 91 | * A read only tag specific to the signalling object. It is used to give 92 | * specific workers information on the part of the collection being operated on. 93 | */ 94 | def tag: Int 95 | } 96 | 97 | /** 98 | * This signalling implementation returns default values and ignores received signals. 99 | */ 100 | class DefaultSignalling extends Signalling with VolatileAbort { 101 | def indexFlag = -1 102 | def setIndexFlag(f: Int): Unit = () 103 | def setIndexFlagIfGreater(f: Int): Unit = () 104 | def setIndexFlagIfLesser(f: Int): Unit = () 105 | 106 | def tag = -1 107 | } 108 | 109 | /** 110 | * An object that returns default values and ignores received signals. 111 | */ 112 | object IdleSignalling extends DefaultSignalling 113 | 114 | /** 115 | * A mixin trait that implements abort flag behaviour using volatile variables. 116 | */ 117 | trait VolatileAbort extends Signalling { 118 | @volatile private var abortflag = false 119 | override def isAborted = abortflag 120 | override def abort() = abortflag = true 121 | } 122 | 123 | /** 124 | * A mixin trait that implements index flag behaviour using atomic integers. 125 | * The `setIndex` operation is wait-free, while conditional set operations `setIndexIfGreater` 126 | * and `setIndexIfLesser` are lock-free and support only monotonic changes. 127 | */ 128 | trait AtomicIndexFlag extends Signalling { 129 | private val intflag: AtomicInteger = new AtomicInteger(-1) 130 | abstract override def indexFlag = intflag.get 131 | abstract override def setIndexFlag(f: Int) = intflag.set(f) 132 | abstract override def setIndexFlagIfGreater(f: Int) = { 133 | var loop = true 134 | while (loop) { 135 | val old = intflag.get 136 | if (f <= old) loop = false 137 | else if (intflag.compareAndSet(old, f)) loop = false 138 | } 139 | } 140 | abstract override def setIndexFlagIfLesser(f: Int) = { 141 | var loop = true 142 | while (loop) { 143 | val old = intflag.get 144 | if (f >= old) loop = false 145 | else if (intflag.compareAndSet(old, f)) loop = false 146 | } 147 | } 148 | } 149 | 150 | /** 151 | * An implementation of the signalling interface using delegates. 152 | */ 153 | trait DelegatedSignalling extends Signalling { 154 | /** 155 | * A delegate that method calls are redirected to. 156 | */ 157 | var signalDelegate: Signalling 158 | 159 | def isAborted = signalDelegate.isAborted 160 | def abort() = signalDelegate.abort() 161 | 162 | def indexFlag = signalDelegate.indexFlag 163 | def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f) 164 | def setIndexFlagIfGreater(f: Int) = signalDelegate.setIndexFlagIfGreater(f) 165 | def setIndexFlagIfLesser(f: Int) = signalDelegate.setIndexFlagIfLesser(f) 166 | 167 | def tag = signalDelegate.tag 168 | } 169 | 170 | /** 171 | * Class implementing delegated signalling. 172 | */ 173 | class DelegatedContext(var signalDelegate: Signalling) extends DelegatedSignalling 174 | 175 | /** 176 | * Class implementing delegated signalling, but having its own distinct `tag`. 177 | */ 178 | class TaggedDelegatedContext(deleg: Signalling, override val tag: Int) extends DelegatedContext(deleg) 179 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/generic/Sizing.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package generic 16 | 17 | /** A trait for objects which have a size. 18 | */ 19 | trait Sizing { 20 | def size: Int 21 | } 22 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/Combiner.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.Parallel 17 | import scala.collection.mutable.Builder 18 | import scala.collection.generic.Sizing 19 | 20 | /** The base trait for all combiners. 21 | * A combiner incremental collection construction just like 22 | * a regular builder, but also implements an efficient merge operation of two builders 23 | * via `combine` method. Once the collection is constructed, it may be obtained by invoking 24 | * the `result` method. 25 | * 26 | * The complexity of the `combine` method should be less than linear for best 27 | * performance. The `result` method doesn't have to be a constant time operation, 28 | * but may be performed in parallel. 29 | * 30 | * @tparam Elem the type of the elements added to the builder 31 | * @tparam To the type of the collection the builder produces 32 | */ 33 | trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { 34 | 35 | @transient 36 | @volatile 37 | var _combinerTaskSupport = defaultTaskSupport 38 | 39 | def combinerTaskSupport = { 40 | val cts = _combinerTaskSupport 41 | if (cts eq null) { 42 | _combinerTaskSupport = defaultTaskSupport 43 | defaultTaskSupport 44 | } else cts 45 | } 46 | 47 | def combinerTaskSupport_=(cts: TaskSupport) = _combinerTaskSupport = cts 48 | 49 | /** Combines the contents of the receiver builder and the `other` builder, 50 | * producing a new builder containing both their elements. 51 | * 52 | * This method may combine the two builders by copying them into a larger collection, 53 | * by producing a lazy view that gets evaluated once `result` is invoked, or use 54 | * a merge operation specific to the data structure in question. 55 | * 56 | * Note that both the receiver builder and `other` builder become invalidated 57 | * after the invocation of this method, and should be cleared (see `clear`) 58 | * if they are to be used again. 59 | * 60 | * Also, combining two combiners `c1` and `c2` for which `c1 eq c2` is `true`, that is, 61 | * they are the same objects in memory: 62 | * 63 | * {{{ 64 | * c1.combine(c2) 65 | * }}} 66 | * 67 | * always does nothing and returns `c1`. 68 | * 69 | * @tparam N the type of elements contained by the `other` builder 70 | * @tparam NewTo the type of collection produced by the `other` builder 71 | * @param other the other builder 72 | * @return the parallel builder containing both the elements of this and the `other` builder 73 | */ 74 | def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] 75 | 76 | /** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared 77 | * across several threads constructing the collection. 78 | * 79 | * By default, this method returns `false`. 80 | */ 81 | def canBeShared: Boolean = false 82 | 83 | /** Constructs the result and sets the appropriate tasksupport object to the resulting collection 84 | * if this is applicable. 85 | */ 86 | def resultWithTaskSupport: To = { 87 | val res = result() 88 | setTaskSupport(res, combinerTaskSupport) 89 | } 90 | 91 | /** Add all elements from a sequential collection and return the result. 92 | */ 93 | def fromSequential(seq: IterableOnce[Elem]): To = { 94 | for (x <- seq.iterator) this += x 95 | result() 96 | } 97 | } 98 | 99 | /* 100 | private[collection] trait EnvironmentPassingCombiner[-Elem, +To] extends Combiner[Elem, To] { 101 | abstract override def result = { 102 | val res = super.result 103 | res 104 | } 105 | } 106 | */ 107 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/ParIterable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.generic._ 17 | import scala.collection.parallel.mutable.ParArrayCombiner 18 | 19 | /** A template trait for parallel iterable collections. 20 | * 21 | * $paralleliterableinfo 22 | * 23 | * $sideeffects 24 | * 25 | * @tparam T the element type of the collection 26 | */ 27 | trait ParIterable[+T] 28 | extends GenericParTemplate[T, ParIterable] 29 | with ParIterableLike[T, ParIterable, ParIterable[T], Iterable[T]] { 30 | def companion: GenericParCompanion[ParIterable] = ParIterable 31 | 32 | def stringPrefix = "ParIterable" 33 | } 34 | 35 | /** $factoryInfo 36 | */ 37 | object ParIterable extends ParFactory[ParIterable] { 38 | implicit def canBuildFrom[T, S]: CanCombineFrom[ParIterable[S], T, ParIterable[T]] = new GenericCanCombineFrom[S, T] 39 | 40 | def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]() 41 | 42 | def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]() 43 | } 44 | 45 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/ParMap.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.Map 17 | import scala.collection.generic.ParMapFactory 18 | import scala.collection.generic.GenericParMapTemplate 19 | import scala.collection.generic.GenericParMapCompanion 20 | import scala.collection.generic.CanCombineFrom 21 | 22 | /** A template trait for parallel maps. 23 | * 24 | * $sideeffects 25 | * 26 | * @tparam K the key type of the map 27 | * @tparam V the value type of the map 28 | */ 29 | trait ParMap[K, +V] 30 | extends GenericParMapTemplate[K, V, ParMap] 31 | with ParIterable[(K, V)] 32 | with ParMapLike[K, V, ParMap, ParMap[K, V], Map[K, V]] 33 | { 34 | self => 35 | 36 | def mapCompanion: GenericParMapCompanion[ParMap] = ParMap 37 | 38 | //protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] 39 | 40 | def empty: ParMap[K, V] = new mutable.ParHashMap[K, V] 41 | 42 | override def stringPrefix = "ParMap" 43 | 44 | } 45 | 46 | 47 | 48 | object ParMap extends ParMapFactory[ParMap, collection.Map] { 49 | def empty[K, V]: ParMap[K, V] = new mutable.ParHashMap[K, V] 50 | 51 | def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = mutable.ParHashMapCombiner[K, V] 52 | 53 | implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParMap[FromK, FromV], (K, V), ParMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V] 54 | 55 | /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map 56 | * because of variance issues. 57 | */ 58 | abstract class WithDefault[A, +B](underlying: ParMap[A, B], d: A => B) extends ParMap[A, B] { 59 | def size = underlying.size 60 | def get(key: A) = underlying.get(key) 61 | def splitter = underlying.splitter 62 | override def default(key: A): B = d(key) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/ParSeq.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.generic.GenericParCompanion 17 | import scala.collection.generic.GenericParTemplate 18 | import scala.collection.generic.ParFactory 19 | import scala.collection.generic.CanCombineFrom 20 | import scala.collection.parallel.mutable.ParArrayCombiner 21 | 22 | /** A template trait for parallel sequences. 23 | * 24 | * $parallelseqinfo 25 | * 26 | * $sideeffects 27 | * 28 | * @tparam T the type of the elements in this parallel sequence 29 | */ 30 | trait ParSeq[+T] extends ParIterable[T] 31 | with GenericParTemplate[T, ParSeq] 32 | with ParSeqLike[T, ParSeq, ParSeq[T], scala.collection.Seq[T]] 33 | { 34 | override def companion: GenericParCompanion[ParSeq] = ParSeq 35 | //protected[this] override def newBuilder = ParSeq.newBuilder[T] 36 | 37 | def apply(i: Int): T 38 | 39 | override def toString = super[ParIterable].toString 40 | 41 | override def stringPrefix = getClass.getSimpleName 42 | } 43 | 44 | object ParSeq extends ParFactory[ParSeq] { 45 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParSeq[S], T, ParSeq[T]] = new GenericCanCombineFrom[S, T] 46 | 47 | def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]() 48 | def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]() 49 | } 50 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/ParSet.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel 16 | 17 | import scala.collection.generic._ 18 | 19 | /** A template trait for parallel sets. 20 | * 21 | * $sideeffects 22 | * 23 | * @tparam T the element type of the set 24 | */ 25 | trait ParSet[T] 26 | extends GenericParTemplate[T, ParSet] 27 | with ParIterable[T] 28 | with ParSetLike[T, ParSet, ParSet[T], Set[T]] 29 | { self => 30 | 31 | override def empty: ParSet[T] = mutable.ParHashSet[T]() 32 | 33 | //protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T] 34 | 35 | override def companion: GenericParCompanion[ParSet] = ParSet 36 | 37 | override def stringPrefix = "ParSet" 38 | } 39 | 40 | object ParSet extends ParSetFactory[ParSet] { 41 | def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T] 42 | 43 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParSet[S], T, ParSet[T]] = new GenericCanCombineFrom[S, T] 44 | } 45 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/ParSetLike.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.{Set, SetOps} 17 | 18 | /** A template trait for parallel sets. This trait is mixed in with concrete 19 | * parallel sets to override the representation type. 20 | * 21 | * $sideeffects 22 | * 23 | * @tparam T the element type of the set 24 | * @define Coll `ParSet` 25 | * @define coll parallel set 26 | */ 27 | trait ParSetLike[T, 28 | +CC[X] <: ParIterable[X], 29 | +Repr <: ParSet[T], 30 | +Sequential <: Set[T] with SetOps[T, Set, Sequential]] 31 | extends ParIterableLike[T, CC, Repr, Sequential] 32 | with (T => Boolean) 33 | with Equals 34 | { self => 35 | 36 | // --- Members previously inherited from GenSetLike 37 | def contains(elem: T): Boolean 38 | final def apply(elem: T): Boolean = contains(elem) 39 | def +(elem: T): Repr 40 | def -(elem: T): Repr 41 | 42 | /** Computes the intersection between this set and another set. 43 | * 44 | * @param that the set to intersect with. 45 | * @return a new set consisting of all elements that are both in this 46 | * set and in the given set `that`. 47 | */ 48 | def intersect(that: ParSet[T]): Repr = this filter that 49 | def intersect(that: Set[T]): Repr = this filter that 50 | 51 | /** Computes the intersection between this set and another set. 52 | * 53 | * '''Note:''' Same as `intersect`. 54 | * @param that the set to intersect with. 55 | * @return a new set consisting of all elements that are both in this 56 | * set and in the given set `that`. 57 | */ 58 | def &(that: ParSet[T]): Repr = this intersect that 59 | def &(that: Set[T]): Repr = this intersect that 60 | 61 | /** Computes the union between this set and another set. 62 | * 63 | * '''Note:''' Same as `union`. 64 | * @param that the set to form the union with. 65 | * @return a new set consisting of all elements that are in this 66 | * set or in the given set `that`. 67 | */ 68 | def | (that: ParSet[T]): Repr = this union that 69 | def | (that: Set[T]): Repr = this union that 70 | 71 | /** The difference of this set and another set. 72 | * 73 | * '''Note:''' Same as `diff`. 74 | * @param that the set of elements to exclude. 75 | * @return a set containing those elements of this 76 | * set that are not also contained in the given set `that`. 77 | */ 78 | def &~(that: ParSet[T]): Repr = this diff that 79 | def &~(that: Set[T]): Repr = this diff that 80 | 81 | /** Tests whether this set is a subset of another set. 82 | * 83 | * @param that the set to test. 84 | * @return `true` if this set is a subset of `that`, i.e. if 85 | * every element of this set is also an element of `that`. 86 | */ 87 | def subsetOf(that: ParSet[T]): Boolean = this.forall(that) 88 | 89 | /** Compares this set with another object for equality. 90 | * 91 | * '''Note:''' This operation contains an unchecked cast: if `that` 92 | * is a set, it will assume with an unchecked cast 93 | * that it has the same element type as this set. 94 | * Any subsequent ClassCastException is treated as a `false` result. 95 | * @param that the other object 96 | * @return `true` if `that` is a set which contains the same elements 97 | * as this set. 98 | */ 99 | override def equals(that: Any): Boolean = that match { 100 | case that: ParSet[?] => 101 | (this eq that) || 102 | (that canEqual this) && 103 | (this.size == that.size) && 104 | (try this subsetOf that.asInstanceOf[ParSet[T]] 105 | catch { case ex: ClassCastException => false }) 106 | case _ => 107 | false 108 | } 109 | 110 | // Careful! Don't write a Set's hashCode like: 111 | // override def hashCode() = this map (_.hashCode) sum 112 | // Calling map on a set drops duplicates: any hashcode collisions would 113 | // then be dropped before they can be added. 114 | // Hash should be symmetric in set entries, but without trivial collisions. 115 | override def hashCode()= scala.util.hashing.MurmurHash3.unorderedHash(this, "ParSet".hashCode) 116 | 117 | def canEqual(other: Any): Boolean = true 118 | // --- 119 | 120 | def empty: Repr 121 | 122 | // note: should not override toSet (could be mutable) 123 | 124 | def union(that: Set[T]): Repr = sequentially { 125 | _ union that 126 | } 127 | 128 | def union(that: ParSet[T]): Repr = sequentially { 129 | _ union that.seq 130 | } 131 | 132 | def diff(that: Set[T]): Repr = sequentially { 133 | _ diff that 134 | } 135 | 136 | def diff(that: ParSet[T]): Repr = sequentially { 137 | _ diff that.seq 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/PreciseSplitter.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.Seq 17 | 18 | /** A precise splitter (or a precise split iterator) can be split into arbitrary number of splitters 19 | * that traverse disjoint subsets of arbitrary sizes. 20 | * 21 | * Implementors might want to override the parameterless `split` method for efficiency. 22 | * 23 | * @tparam T type of the elements this splitter traverses 24 | */ 25 | trait PreciseSplitter[+T] extends Splitter[T] { 26 | 27 | /** Splits the splitter into disjunct views. 28 | * 29 | * This overloaded version of the `split` method is specific to precise splitters. 30 | * It returns a sequence of splitters, each iterating some subset of the 31 | * elements in this splitter. The sizes of the subsplitters in the partition is equal to 32 | * the size in the corresponding argument, as long as there are enough elements in this 33 | * splitter to split it that way. 34 | * 35 | * If there aren't enough elements, a zero element splitter is appended for each additional argument. 36 | * If there are additional elements, an additional splitter is appended at the end to compensate. 37 | * 38 | * For example, say we have a splitter `ps` with 100 elements. Invoking: 39 | * {{{ 40 | * ps.split(50, 25, 25, 10, 5) 41 | * }}} 42 | * will return a sequence of five splitters, last two views being empty. On the other hand, calling: 43 | * {{{ 44 | * ps.split(50, 40) 45 | * }}} 46 | * will return a sequence of three splitters, last of them containing ten elements. 47 | * 48 | * '''Note:''' this method actually invalidates the current splitter. 49 | * 50 | * Unlike the case with `split` found in splitters, views returned by this method can be empty. 51 | * 52 | * @param sizes the sizes used to split this split iterator into iterators that traverse disjunct subsets 53 | * @return a sequence of disjunct subsequence iterators of this parallel iterator 54 | */ 55 | def psplit(sizes: Int*): Seq[PreciseSplitter[T]] 56 | 57 | def split: Seq[PreciseSplitter[T]] 58 | } 59 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/Splitter.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.{ Seq, Iterator } 17 | 18 | /** A splitter (or a split iterator) can be split into more splitters that traverse over 19 | * disjoint subsets of elements. 20 | * 21 | * @tparam T type of the elements this splitter traverses 22 | */ 23 | trait Splitter[+T] extends Iterator[T] { 24 | 25 | /** Splits the iterator into a sequence of disjunct views. 26 | * 27 | * Returns a sequence of split iterators, each iterating over some subset of the 28 | * elements in the collection. These subsets are disjoint and should be approximately 29 | * equal in size. These subsets are not empty, unless the iterator is empty in which 30 | * case this method returns a sequence with a single empty iterator. If the splitter has 31 | * more than two elements, this method will return two or more splitters. 32 | * 33 | * Implementors are advised to keep this partition relatively small - two splitters are 34 | * already enough when partitioning the collection, although there may be a few more. 35 | * 36 | * '''Note:''' this method actually invalidates the current splitter. 37 | * 38 | * @return a sequence of disjunct iterators of the collection 39 | */ 40 | def split: Seq[Splitter[T]] 41 | /* 42 | * '''Note:''' splitters in this sequence may actually be empty and it can contain a splitter 43 | * which iterates over the same elements as the original splitter AS LONG AS calling `split` 44 | * a finite number of times on the resulting splitters eventually returns a nontrivial partition. 45 | * 46 | * Note that the docs contract above yields implementations which are a subset of implementations 47 | * defined by this fineprint. 48 | * 49 | * The rationale behind this is best given by the following example: 50 | * try splitting an iterator over a linear hash table. 51 | */ 52 | } 53 | 54 | object Splitter { 55 | def empty[T]: Splitter[T] = new Splitter[T] { 56 | def hasNext = false 57 | def next() = Iterator.empty.next() 58 | def split = Seq(this) 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/TaskSupport.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import java.util.concurrent.ForkJoinPool 17 | import scala.concurrent.ExecutionContext 18 | 19 | /** A trait implementing the scheduling of a parallel collection operation. 20 | * 21 | * Parallel collections are modular in the way operations are scheduled. Each 22 | * parallel collection is parameterized with a task support object which is 23 | * responsible for scheduling and load-balancing tasks to processors. 24 | * 25 | * A task support object can be changed in a parallel collection after it has 26 | * been created, but only during a quiescent period, i.e. while there are no 27 | * concurrent invocations to parallel collection methods. 28 | * 29 | * There are currently a few task support implementations available for 30 | * parallel collections. The [[scala.collection.parallel.ForkJoinTaskSupport]] 31 | * uses a fork-join pool internally. 32 | * 33 | * The [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the 34 | * default execution context implementation found in scala.concurrent, and it 35 | * reuses the thread pool used in scala.concurrent. 36 | * 37 | * The execution context task support is set to each parallel collection by 38 | * default, so parallel collections reuse the same fork-join pool as the 39 | * future API. 40 | * 41 | * Here is a way to change the task support of a parallel collection: 42 | * 43 | * {{{ 44 | * import scala.collection.parallel._ 45 | * val pc = mutable.ParArray(1, 2, 3) 46 | * pc.tasksupport = new ForkJoinTaskSupport( 47 | * new java.util.concurrent.ForkJoinPool(2)) 48 | * }}} 49 | * 50 | * @see [[http://docs.scala-lang.org/overviews/parallel-collections/configuration.html Configuring Parallel Collections]] section 51 | * on the parallel collection's guide for more information. 52 | */ 53 | trait TaskSupport extends Tasks 54 | 55 | /** A task support that uses a fork join pool to schedule tasks. 56 | * 57 | * @see [[scala.collection.parallel.TaskSupport]] for more information. 58 | */ 59 | class ForkJoinTaskSupport(val environment: ForkJoinPool = ForkJoinTasks.defaultForkJoinPool) 60 | extends TaskSupport with AdaptiveWorkStealingForkJoinTasks 61 | 62 | /** A task support that uses an execution context to schedule tasks. 63 | * 64 | * It can be used with the default execution context implementation in the 65 | * `scala.concurrent` package. It internally forwards the call to either a 66 | * forkjoin based task support or a thread pool executor one, depending on 67 | * what the execution context uses. 68 | * 69 | * By default, parallel collections are parameterized with this task support 70 | * object, so parallel collections share the same execution context backend 71 | * as the rest of the `scala.concurrent` package. 72 | * 73 | * @see [[scala.collection.parallel.TaskSupport]] for more information. 74 | */ 75 | class ExecutionContextTaskSupport(val environment: ExecutionContext = scala.concurrent.ExecutionContext.global) 76 | extends TaskSupport with ExecutionContextTasks 77 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/immutable/ParIterable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.immutable 16 | 17 | import scala.collection.generic._ 18 | import scala.collection.parallel.ParIterableLike 19 | import scala.collection.parallel.Combiner 20 | 21 | /** A template trait for immutable parallel iterable collections. 22 | * 23 | * $paralleliterableinfo 24 | * 25 | * $sideeffects 26 | * 27 | * @tparam T the element type of the collection 28 | */ 29 | trait ParIterable[+T] 30 | extends scala.collection.parallel.ParIterable[T] 31 | with GenericParTemplate[T, ParIterable] 32 | with ParIterableLike[T, ParIterable, ParIterable[T], scala.collection.immutable.Iterable[T]] 33 | { 34 | override def companion: GenericParCompanion[ParIterable] = ParIterable 35 | // if `immutable.ParIterableLike` is introduced, please move these 4 methods there 36 | override def toIterable: ParIterable[T] = this 37 | override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) 38 | } 39 | 40 | /** $factoryInfo 41 | */ 42 | object ParIterable extends ParFactory[ParIterable] { 43 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParIterable[S], T, ParIterable[T]] = 44 | new GenericCanCombineFrom[S, T] 45 | 46 | def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T] 47 | def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T] 48 | } 49 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/immutable/ParMap.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.immutable 16 | 17 | import scala.collection.generic.ParMapFactory 18 | import scala.collection.generic.GenericParMapTemplate 19 | import scala.collection.generic.GenericParMapCompanion 20 | import scala.collection.generic.CanCombineFrom 21 | import scala.collection.parallel.Combiner 22 | 23 | /** A template trait for immutable parallel maps. 24 | * 25 | * $sideeffects 26 | * 27 | * @tparam K the key type of the map 28 | * @tparam V the value type of the map 29 | * 30 | */ 31 | trait ParMap[K, +V] 32 | extends GenericParMapTemplate[K, V, ParMap] 33 | with parallel.ParMap[K, V] 34 | with ParIterable[(K, V)] 35 | with ParMapLike[K, V, ParMap, ParMap[K, V], scala.collection.immutable.Map[K, V]] 36 | { 37 | self => 38 | 39 | override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap 40 | 41 | override def empty: ParMap[K, V] = new ParHashMap[K, V] 42 | 43 | override def stringPrefix = "ParMap" 44 | 45 | /** The same map with a given default function. 46 | * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. 47 | * 48 | * Invoking transformer methods (e.g. `map`) will not preserve the default value. 49 | * 50 | * @param d the function mapping keys to values, used for non-present keys 51 | * @return a wrapper of the map with a default value 52 | */ 53 | def withDefault[U >: V](d: K => U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, d) 54 | 55 | /** The same map with a given default value. 56 | * 57 | * Invoking transformer methods (e.g. `map`) will not preserve the default value. 58 | * 59 | * @param d default value used for non-present keys 60 | * @return a wrapper of the map with a default value 61 | */ 62 | def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d) 63 | 64 | } 65 | 66 | trait ParMapLike[ 67 | K, 68 | +V, 69 | +CC[X, Y] <: ParMap[X, Y], 70 | +Repr <: ParMapLike[K, V, ParMap, Repr, Sequential] with ParMap[K, V], 71 | +Sequential <: Map[K, V] with MapOps[K, V, Map, Sequential]] 72 | extends parallel.ParMapLike[K, V, CC, Repr, Sequential] 73 | with parallel.ParIterableLike[(K, V), ParIterable, Repr, Sequential] { 74 | 75 | def mapCompanion: GenericParMapCompanion[CC] 76 | 77 | def empty: Repr 78 | 79 | override def toMap[P, Q](implicit ev: (K, V) <:< (P, Q)): ParMap[P, Q] = this.asInstanceOf[ParMap[P, Q]] 80 | 81 | override def updated [U >: V](key: K, value: U): CC[K, U] = this + ((key, value)) 82 | 83 | def + [U >: V](kv: (K, U)): CC[K, U] 84 | 85 | def - (key: K): Repr 86 | 87 | } 88 | 89 | 90 | 91 | object ParMap extends ParMapFactory[ParMap, scala.collection.immutable.Map] { 92 | def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] 93 | 94 | def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = HashMapCombiner[K, V] 95 | 96 | implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParMap[FromK, FromV], (K, V), ParMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V] 97 | 98 | class WithDefault[K, +V](underlying: ParMap[K, V], d: K => V) 99 | extends scala.collection.parallel.ParMap.WithDefault[K, V](underlying, d) with ParMap[K, V] { 100 | override def knownSize = underlying.knownSize 101 | override def empty = new WithDefault(underlying.empty, d) 102 | override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) 103 | override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) 104 | override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) 105 | override def withDefault[U >: V](d: K => U): ParMap[K, U] = new WithDefault[K, U](underlying, d) 106 | override def withDefaultValue[U >: V](d: U): ParMap[K, U] = new WithDefault[K, U](underlying, x => d) 107 | override def seq = underlying.seq.withDefault(d) 108 | } 109 | 110 | } 111 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/immutable/ParRange.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel.immutable 15 | 16 | import scala.collection.immutable.Range 17 | import scala.collection.parallel.Combiner 18 | import scala.collection.parallel.SeqSplitter 19 | import scala.collection.Iterator 20 | 21 | /** Parallel ranges. 22 | * 23 | * $paralleliterableinfo 24 | * 25 | * $sideeffects 26 | * 27 | * @param range the sequential range this parallel range was obtained from 28 | * 29 | * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_range Scala's Parallel Collections Library overview]] 30 | * section on `ParRange` for more information. 31 | * 32 | * @define Coll `immutable.ParRange` 33 | * @define coll immutable parallel range 34 | */ 35 | @SerialVersionUID(1L) 36 | class ParRange(val range: Range) 37 | extends ParSeq[Int] 38 | with Serializable 39 | { 40 | self => 41 | 42 | override def seq = range 43 | 44 | @inline final def length = range.length 45 | @inline final override def knownSize = range.knownSize 46 | 47 | @inline final def apply(idx: Int) = range.apply(idx) 48 | 49 | def splitter = new ParRangeIterator 50 | 51 | class ParRangeIterator(range: Range = self.range) 52 | extends SeqSplitter[Int] { 53 | override def toString = "ParRangeIterator(over: " + range + ")" 54 | private var ind = 0 55 | private val len = range.length 56 | 57 | final def remaining = len - ind 58 | 59 | final def hasNext = ind < len 60 | 61 | final def next() = if (hasNext) { 62 | val r = range.apply(ind) 63 | ind += 1 64 | r 65 | } else Iterator.empty.next() 66 | 67 | private def rangeleft = range.drop(ind) 68 | 69 | def dup = new ParRangeIterator(rangeleft) 70 | 71 | def split = { 72 | val rleft = rangeleft 73 | val elemleft = rleft.length 74 | if (elemleft < 2) Seq(new ParRangeIterator(rleft)) 75 | else Seq( 76 | new ParRangeIterator(rleft.take(elemleft / 2)), 77 | new ParRangeIterator(rleft.drop(elemleft / 2)) 78 | ) 79 | } 80 | 81 | def psplit(sizes: Int*) = { 82 | var rleft = rangeleft 83 | for (sz <- sizes) yield { 84 | val fronttaken = rleft.take(sz) 85 | rleft = rleft.drop(sz) 86 | new ParRangeIterator(fronttaken) 87 | } 88 | } 89 | 90 | /* accessors */ 91 | 92 | override def foreach[U](f: Int => U): Unit = { 93 | rangeleft.foreach(f.asInstanceOf[Int => Unit]) 94 | ind = len 95 | } 96 | 97 | override def reduce[U >: Int](op: (U, U) => U): U = { 98 | val r = rangeleft.reduceLeft(op) 99 | ind = len 100 | r 101 | } 102 | 103 | /* transformers */ 104 | 105 | override def map2combiner[S, That](f: Int => S, cb: Combiner[S, That]): Combiner[S, That] = { 106 | while (hasNext) { 107 | cb += f(next()) 108 | } 109 | cb 110 | } 111 | } 112 | 113 | override def toString = s"Par$range" 114 | } 115 | 116 | object ParRange { 117 | def apply(start: Int, end: Int, step: Int, inclusive: Boolean) = new ParRange( 118 | if (inclusive) Range.inclusive(start, end, step) 119 | else Range(start, end, step) 120 | ) 121 | } 122 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/immutable/ParSeq.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.immutable 16 | 17 | import scala.collection.generic.{CanCombineFrom, GenericParCompanion, GenericParTemplate, ParFactory} 18 | import scala.collection.parallel.ParSeqLike 19 | import scala.collection.parallel.Combiner 20 | 21 | /** An immutable variant of `ParSeq`. 22 | * 23 | * @define Coll `mutable.ParSeq` 24 | * @define coll mutable parallel sequence 25 | */ 26 | trait ParSeq[+T] 27 | extends scala.collection.parallel.ParSeq[T] 28 | with ParIterable[T] 29 | with GenericParTemplate[T, ParSeq] 30 | with ParSeqLike[T, ParSeq, ParSeq[T], scala.collection.immutable.Seq[T]] 31 | { 32 | override def companion: GenericParCompanion[ParSeq] = ParSeq 33 | override def toSeq: ParSeq[T] = this 34 | } 35 | 36 | /** $factoryInfo 37 | * @define Coll `mutable.ParSeq` 38 | * @define coll mutable parallel sequence 39 | */ 40 | object ParSeq extends ParFactory[ParSeq] { 41 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParSeq[S], T, ParSeq[T]] = new GenericCanCombineFrom[S, T] 42 | 43 | def newBuilder[T]: Combiner[T, ParSeq[T]] = ParVector.newBuilder[T] 44 | def newCombiner[T]: Combiner[T, ParSeq[T]] = ParVector.newCombiner[T] 45 | } 46 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/immutable/ParSet.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.immutable 16 | 17 | import scala.collection.generic._ 18 | import scala.collection.parallel.ParSetLike 19 | import scala.collection.parallel.Combiner 20 | 21 | /** An immutable variant of `ParSet`. 22 | * 23 | * @define Coll `mutable.ParSet` 24 | * @define coll mutable parallel set 25 | */ 26 | trait ParSet[T] 27 | extends GenericParTemplate[T, ParSet] 28 | with parallel.ParSet[T] 29 | with ParIterable[T] 30 | with ParSetLike[T, ParSet, ParSet[T], scala.collection.immutable.Set[T]] 31 | { 32 | self => 33 | override def empty: ParSet[T] = ParHashSet[T]() 34 | 35 | override def companion: GenericParCompanion[ParSet] = ParSet 36 | 37 | override def stringPrefix = "ParSet" 38 | 39 | // ok, because this could only violate `apply` and we can live with that 40 | override def toSet[U >: T]: ParSet[U] = this.asInstanceOf[ParSet[U]] 41 | } 42 | 43 | /** $factoryInfo 44 | * @define Coll `mutable.ParSet` 45 | * @define coll mutable parallel set 46 | */ 47 | object ParSet extends ParSetFactory[ParSet] { 48 | def newCombiner[T]: Combiner[T, ParSet[T]] = HashSetCombiner[T] 49 | 50 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParSet[S], T, ParSet[T]] = new GenericCanCombineFrom[S, T] 51 | } 52 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/immutable/ParVector.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.immutable 16 | 17 | import scala.collection.generic.{GenericParTemplate, CanCombineFrom, ParFactory} 18 | import scala.collection.parallel.ParSeqLike 19 | import scala.collection.parallel.Combiner 20 | import scala.collection.parallel.SeqSplitter 21 | import mutable.ArrayBuffer 22 | import immutable.Vector 23 | import immutable.VectorBuilder 24 | import immutable.VectorIterator 25 | 26 | /** Immutable parallel vectors, based on vectors. 27 | * 28 | * $paralleliterableinfo 29 | * 30 | * $sideeffects 31 | * 32 | * @tparam T the element type of the vector 33 | * 34 | * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_vector Scala's Parallel Collections Library overview]] 35 | * section on `ParVector` for more information. 36 | * 37 | * @define Coll `immutable.ParVector` 38 | * @define coll immutable parallel vector 39 | */ 40 | class ParVector[+T](private[this] val vector: Vector[T]) 41 | extends ParSeq[T] 42 | with GenericParTemplate[T, ParVector] 43 | with ParSeqLike[T, ParVector, ParVector[T], Vector[T]] 44 | with Serializable 45 | { 46 | override def companion = ParVector 47 | 48 | def this() = this(Vector()) 49 | 50 | def apply(idx: Int) = vector.apply(idx) 51 | 52 | def length = vector.length 53 | override def knownSize = vector.knownSize 54 | 55 | def splitter: SeqSplitter[T] = { 56 | val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) 57 | vector.initIterator(pit) 58 | pit 59 | } 60 | 61 | override def seq: Vector[T] = vector 62 | 63 | override def toVector: Vector[T] = vector 64 | 65 | // TODO Implement ParVectorIterator without extending VectorIterator, which will eventually 66 | // become private final. Inlining the contents of the current VectorIterator is not as easy 67 | // as it seems because it relies a lot on Vector internals. 68 | // Duplicating the whole Vector data structure seems to be the safest way, but we will loose 69 | // interoperability with the standard Vector. 70 | class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] { 71 | def remaining: Int = remainingElementCount 72 | def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter 73 | def split: scala.collection.immutable.Seq[ParVectorIterator] = { 74 | val rem = remaining 75 | if (rem >= 2) psplit(rem / 2, rem - rem / 2) 76 | else scala.collection.immutable.Seq(this) 77 | } 78 | def psplit(sizes: Int*): scala.Seq[ParVectorIterator] = { 79 | var remvector = remainingVector 80 | val splitted = List.newBuilder[Vector[T]] 81 | for (sz <- sizes) { 82 | splitted += remvector.take(sz) 83 | remvector = remvector.drop(sz) 84 | } 85 | splitted.result().map(v => new ParVector(v).splitter.asInstanceOf[ParVectorIterator]) 86 | } 87 | } 88 | } 89 | 90 | /** $factoryInfo 91 | * @define Coll `immutable.ParVector` 92 | * @define coll immutable parallel vector 93 | */ 94 | object ParVector extends ParFactory[ParVector] { 95 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParVector[S], T, ParVector[T]] = 96 | new GenericCanCombineFrom[S, T] 97 | 98 | def newBuilder[T]: Combiner[T, ParVector[T]] = newCombiner[T] 99 | 100 | def newCombiner[T]: Combiner[T, ParVector[T]] = new LazyParVectorCombiner[T] // was: with EPC[T, ParVector[T]] 101 | } 102 | 103 | private[immutable] class LazyParVectorCombiner[T] extends Combiner[T, ParVector[T]] { 104 | //self: EnvironmentPassingCombiner[T, ParVector[T]] => 105 | var sz = 0 106 | val vectors = new ArrayBuffer[VectorBuilder[T]] += new VectorBuilder[T] 107 | 108 | def size: Int = sz 109 | 110 | def addOne(elem: T): this.type = { 111 | vectors.last += elem 112 | sz += 1 113 | this 114 | } 115 | 116 | def clear() = { 117 | vectors.clear() 118 | vectors += new VectorBuilder[T] 119 | sz = 0 120 | } 121 | 122 | def result(): ParVector[T] = { 123 | val rvb = new VectorBuilder[T] 124 | for (vb <- vectors) { 125 | rvb ++= vb.result() 126 | } 127 | new ParVector(rvb.result()) 128 | } 129 | 130 | def combine[U <: T, NewTo >: ParVector[T]](other: Combiner[U, NewTo]) = if (other eq this) this else { 131 | val that = other.asInstanceOf[LazyParVectorCombiner[T]] 132 | sz += that.sz 133 | vectors ++= that.vectors 134 | this 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/immutable/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.CustomParallelizable 17 | 18 | package immutable { 19 | 20 | /** A (parallel) sequence consisting of `length` elements `elem`. Used in the `padTo` method. 21 | * 22 | * @tparam T type of the elements 23 | * @param elem the element in the repetition 24 | * @param length the length of the collection 25 | */ 26 | private[parallel] class Repetition[T](elem: T, val length: Int) extends ParSeq[T] { 27 | self => 28 | 29 | def apply(idx: Int) = if (0 <= idx && idx < length) elem else throw new IndexOutOfBoundsException("" + idx) 30 | override def knownSize = length 31 | override def seq: collection.immutable.Seq[T] = new collection.AbstractSeq[T] with collection.immutable.Seq[T] with CustomParallelizable[T, ParSeq[T]] { 32 | override def length: Int = self.length 33 | override def apply(idx: Int): T = self.apply(idx) 34 | override def iterator: Iterator[T] = Iterator.continually(elem).take(length) 35 | override def par: ParSeq[T] = self 36 | } 37 | def update(idx: Int, elem: T) = throw new UnsupportedOperationException 38 | 39 | class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] { 40 | def remaining = until - i 41 | def hasNext = i < until 42 | def next() = { i += 1; elem } 43 | def dup = new ParIterator(i, until, elem) 44 | def psplit(sizes: Int*) = { 45 | val incr = sizes.scanLeft(0)(_ + _) 46 | for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) 47 | } 48 | def split = psplit(remaining / 2, remaining - remaining / 2) 49 | } 50 | 51 | def splitter = new ParIterator 52 | } 53 | } 54 | 55 | package object immutable { 56 | /* package level methods */ 57 | def repetition[T](elem: T, len: Int) = new Repetition(elem, len) 58 | } 59 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel.mutable 15 | 16 | import scala.collection.mutable.Growable 17 | import scala.collection.generic.Sizing 18 | import scala.collection.mutable.ArrayBuffer 19 | import scala.collection.parallel.Combiner 20 | 21 | /** Implements combining contents of two combiners 22 | * by postponing the operation until `result` method is called. It chains 23 | * the leaf results together instead of evaluating the actual collection. 24 | * 25 | * @tparam Elem the type of the elements in the combiner 26 | * @tparam To the type of the collection the combiner produces 27 | * @tparam Buff the type of the buffers that contain leaf results and this combiner chains together 28 | */ 29 | trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combiner[Elem, To] { 30 | //self: scala.collection.parallel.EnvironmentPassingCombiner[Elem, To] => 31 | val chain: ArrayBuffer[Buff] 32 | val lastbuff = chain.last 33 | def addOne(elem: Elem) = { lastbuff += elem; this } 34 | def result(): To = allocateAndCopy 35 | def clear() = { chain.clear() } 36 | def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) { 37 | if (other.isInstanceOf[LazyCombiner[?, ?, ?]]) { 38 | val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]] 39 | newLazyCombiner(chain ++= that.chain) 40 | } else throw new UnsupportedOperationException("Cannot combine with combiner of different type.") 41 | } else this 42 | def size = chain.foldLeft(0)(_ + _.size) 43 | 44 | /** Method that allocates the data structure and copies elements into it using 45 | * `size` and `chain` members. 46 | */ 47 | def allocateAndCopy: To 48 | def newLazyCombiner(buffchain: ArrayBuffer[Buff]): LazyCombiner[Elem, To, Buff] 49 | } 50 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParFlatHashTable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.mutable 16 | 17 | import scala.collection.parallel.IterableSplitter 18 | 19 | /** Parallel flat hash table. 20 | * 21 | * @tparam T type of the elements in the $coll. 22 | * @define coll table 23 | * @define Coll `ParFlatHashTable` 24 | * 25 | */ 26 | trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] { 27 | 28 | override def alwaysInitSizeMap = true 29 | 30 | abstract class ParFlatHashTableIterator(var idx: Int, val until: Int, val totalsize: Int) 31 | extends IterableSplitter[T] with SizeMapUtils { 32 | 33 | private[this] var traversed = 0 34 | private[this] val itertable = table 35 | 36 | if (hasNext) scan() 37 | 38 | private[this] def scan(): Unit = { 39 | while (itertable(idx) eq null) { 40 | idx += 1 41 | } 42 | } 43 | 44 | def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T] 45 | 46 | def remaining = totalsize - traversed 47 | def hasNext = traversed < totalsize 48 | def next() = if (hasNext) { 49 | val r = entryToElem(itertable(idx)) 50 | traversed += 1 51 | idx += 1 52 | if (hasNext) scan() 53 | r 54 | } else Iterator.empty.next() 55 | def dup = newIterator(idx, until, totalsize) 56 | def split = if (remaining > 1) { 57 | val divpt = (until + idx) / 2 58 | 59 | val fstidx = idx 60 | val fstuntil = divpt 61 | val fsttotal = calcNumElems(idx, divpt, itertable.length, sizeMapBucketSize) 62 | val fstit = newIterator(fstidx, fstuntil, fsttotal) 63 | 64 | val sndidx = divpt 65 | val snduntil = until 66 | val sndtotal = remaining - fsttotal 67 | val sndit = newIterator(sndidx, snduntil, sndtotal) 68 | 69 | scala.Seq(fstit, sndit) 70 | } else scala.Seq(this) 71 | 72 | import DebugUtils._ 73 | override def debugInformation = buildString { 74 | append => 75 | append("Parallel flat hash table iterator") 76 | append("---------------------------------") 77 | append("Traversed/total: " + traversed + " / " + totalsize) 78 | append("Table idx/until: " + idx + " / " + until) 79 | append("Table length: " + itertable.length) 80 | append("Table: ") 81 | append(arrayString(itertable, 0, itertable.length)) 82 | append("Sizemap: ") 83 | append(arrayString(sizemap, 0, sizemap.length)) 84 | } 85 | 86 | protected def countElems(from: Int, until: Int) = { 87 | var count = 0 88 | var i = from 89 | while (i < until) { 90 | if (itertable(i) ne null) count += 1 91 | i += 1 92 | } 93 | count 94 | } 95 | 96 | protected def countBucketSizes(frombucket: Int, untilbucket: Int) = { 97 | var count = 0 98 | var i = frombucket 99 | while (i < untilbucket) { 100 | count += sizemap(i) 101 | i += 1 102 | } 103 | count 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParHashTable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.mutable 16 | 17 | import scala.collection.mutable.HashEntry 18 | import scala.collection.parallel.IterableSplitter 19 | 20 | /** Provides functionality for hash tables with linked list buckets, 21 | * enriching the data structure by fulfilling certain requirements 22 | * for their parallel construction and iteration. 23 | */ 24 | trait ParHashTable[K, V, Entry >: Null <: HashEntry[K, Entry]] extends scala.collection.mutable.HashTable[K, V, Entry] 25 | with WithContents[K, V, Entry] { 26 | 27 | override def alwaysInitSizeMap = true 28 | 29 | /** A parallel iterator returning all the entries. 30 | */ 31 | abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]] 32 | (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) 33 | extends IterableSplitter[T] with SizeMapUtils { 34 | private val itertable = table 35 | private var traversed = 0 36 | scan() 37 | 38 | def entry2item(e: Entry): T 39 | def newIterator(idxFrom: Int, idxUntil: Int, totalSize: Int, es: Entry): IterRepr 40 | 41 | def hasNext = { 42 | es ne null 43 | } 44 | 45 | def next(): T = { 46 | val res = es 47 | es = es.next 48 | scan() 49 | traversed += 1 50 | entry2item(res) 51 | } 52 | 53 | def scan(): Unit = { 54 | while (es == null && idx < until) { 55 | es = itertable(idx).asInstanceOf[Entry] 56 | idx = idx + 1 57 | } 58 | } 59 | 60 | def remaining = totalsize - traversed 61 | 62 | private[parallel] override def debugInformation = { 63 | buildString { 64 | append => 65 | append("/--------------------\\") 66 | append("Parallel hash table entry iterator") 67 | append("total hash table elements: " + tableSize) 68 | append("pos: " + idx) 69 | append("until: " + until) 70 | append("traversed: " + traversed) 71 | append("totalsize: " + totalsize) 72 | append("current entry: " + es) 73 | append("underlying from " + idx + " until " + until) 74 | append(itertable.slice(idx, until).map(x => if (x != null) x.toString else "n/a").mkString(" | ")) 75 | append("\\--------------------/") 76 | } 77 | } 78 | 79 | def dup = newIterator(idx, until, totalsize, es) 80 | 81 | def split: scala.Seq[IterableSplitter[T]] = if (remaining > 1) { 82 | if (until > idx) { 83 | // there is at least one more slot for the next iterator 84 | // divide the rest of the table 85 | val divsz = (until - idx) / 2 86 | 87 | // second iterator params 88 | val sidx = idx + divsz + 1 // + 1 preserves iteration invariant 89 | val suntil = until 90 | val ses = itertable(sidx - 1).asInstanceOf[Entry] // sidx - 1 ensures counting from the right spot 91 | val stotal = calcNumElems(sidx - 1, suntil, table.length, sizeMapBucketSize) 92 | 93 | // first iterator params 94 | val fidx = idx 95 | val funtil = idx + divsz 96 | val fes = es 97 | val ftotal = totalsize - stotal 98 | 99 | scala.Seq( 100 | newIterator(fidx, funtil, ftotal, fes), 101 | newIterator(sidx, suntil, stotal, ses) 102 | ) 103 | } else { 104 | // otherwise, this is the last entry in the table - all what remains is the chain 105 | // so split the rest of the chain 106 | val arr = convertToArrayBuffer(es) 107 | val arrpit = new scala.collection.parallel.BufferSplitter[T](arr, 0, arr.length, signalDelegate) 108 | arrpit.split 109 | } 110 | } else scala.Seq(this.asInstanceOf[IterRepr]) 111 | 112 | private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = { 113 | val buff = mutable.ArrayBuffer[Entry]() 114 | var curr = chainhead 115 | while (curr ne null) { 116 | buff += curr 117 | curr = curr.next 118 | } 119 | // println("converted " + remaining + " element iterator into buffer: " + buff) 120 | buff map { e => entry2item(e) } 121 | } 122 | 123 | protected def countElems(from: Int, until: Int) = { 124 | var c = 0 125 | var idx = from 126 | var es: Entry = null 127 | while (idx < until) { 128 | es = itertable(idx).asInstanceOf[Entry] 129 | while (es ne null) { 130 | c += 1 131 | es = es.next 132 | } 133 | idx += 1 134 | } 135 | c 136 | } 137 | 138 | protected def countBucketSizes(fromBucket: Int, untilBucket: Int) = { 139 | var c = 0 140 | var idx = fromBucket 141 | while (idx < untilBucket) { 142 | c += sizemap(idx) 143 | idx += 1 144 | } 145 | c 146 | } 147 | } 148 | 149 | } 150 | 151 | trait WithContents[K, V, Entry >: Null <: HashEntry[K, Entry]] { this: scala.collection.mutable.HashTable[K, V, Entry] => 152 | 153 | protected def initWithContents(c: ParHashTable.Contents[K, Entry]) = { 154 | if (c != null) { 155 | _loadFactor = c.loadFactor 156 | table = c.table 157 | tableSize = c.tableSize 158 | threshold = c.threshold 159 | seedvalue = c.seedvalue 160 | sizemap = c.sizemap 161 | } 162 | if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild() 163 | } 164 | 165 | private[collection] def hashTableContents = new ParHashTable.Contents( 166 | _loadFactor, 167 | table, 168 | tableSize, 169 | threshold, 170 | seedvalue, 171 | sizemap 172 | ) 173 | } 174 | 175 | private[collection] object ParHashTable { 176 | class Contents[A, Entry >: Null <: HashEntry[A, Entry]]( 177 | val loadFactor: Int, 178 | val table: Array[HashEntry[A, Entry]], 179 | val tableSize: Int, 180 | val threshold: Int, 181 | val seedvalue: Int, 182 | val sizemap: Array[Int] 183 | ) { 184 | import scala.collection.DebugUtils._ 185 | private[collection] def debugInformation = buildString { 186 | append => 187 | append("Hash table contents") 188 | append("-------------------") 189 | append("Table: [" + arrayString(table, 0, table.length) + "]") 190 | append("Table size: " + tableSize) 191 | append("Load factor: " + loadFactor) 192 | append("Seedvalue: " + seedvalue) 193 | append("Threshold: " + threshold) 194 | append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]") 195 | } 196 | } 197 | } -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParIterable.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.mutable 16 | 17 | import scala.collection.generic._ 18 | import scala.collection.parallel.{ ParIterableLike, Combiner } 19 | 20 | /** A template trait for mutable parallel iterable collections. 21 | * 22 | * $paralleliterableinfo 23 | * 24 | * $sideeffects 25 | * 26 | * @tparam T the element type of the collection 27 | */ 28 | trait ParIterable[T] extends scala.collection.parallel.ParIterable[T] 29 | with GenericParTemplate[T, ParIterable] 30 | with ParIterableLike[T, ParIterable, ParIterable[T], Iterable[T]] { 31 | override def companion: GenericParCompanion[ParIterable] = ParIterable 32 | //protected[this] override def newBuilder = ParIterable.newBuilder[T] 33 | 34 | // if `mutable.ParIterableLike` is introduced, please move these methods there 35 | override def toIterable: ParIterable[T] = this 36 | 37 | override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T]) 38 | 39 | def seq: scala.collection.mutable.Iterable[T] 40 | } 41 | 42 | /** $factoryInfo 43 | */ 44 | object ParIterable extends ParFactory[ParIterable] { 45 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParIterable[S], T, ParIterable[T]] = new GenericCanCombineFrom[S, T] 46 | 47 | def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]() 48 | def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]() 49 | } 50 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParMap.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.mutable 16 | 17 | import scala.collection.generic._ 18 | import scala.collection.parallel.Combiner 19 | 20 | /** A template trait for mutable parallel maps. 21 | * 22 | * $sideeffects 23 | * 24 | * @tparam K the key type of the map 25 | * @tparam V the value type of the map 26 | */ 27 | trait ParMap[K, V] 28 | extends parallel.ParMap[K, V] 29 | with ParIterable[(K, V)] 30 | with GenericParMapTemplate[K, V, ParMap] 31 | with ParMapLike[K, V, ParMap, ParMap[K, V], mutable.Map[K, V]] 32 | { 33 | override def knownSize: Int = -1 34 | 35 | protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V] 36 | 37 | override def mapCompanion: GenericParMapCompanion[ParMap] = ParMap 38 | 39 | override def empty: ParMap[K, V] = new ParHashMap[K, V] 40 | 41 | def seq: scala.collection.mutable.Map[K, V] 42 | 43 | /** The same map with a given default function. 44 | * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. 45 | * 46 | * Invoking transformer methods (e.g. `map`) will not preserve the default value. 47 | * 48 | * @param d the function mapping keys to values, used for non-present keys 49 | * @return a wrapper of the map with a default value 50 | */ 51 | def withDefault(d: K => V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, d) 52 | 53 | /** The same map with a given default value. 54 | * 55 | * Invoking transformer methods (e.g. `map`) will not preserve the default value. 56 | * 57 | * @param d default value used for non-present keys 58 | * @return a wrapper of the map with a default value 59 | */ 60 | def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d) 61 | } 62 | 63 | object ParMap extends ParMapFactory[ParMap, scala.collection.mutable.Map] { 64 | def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V] 65 | 66 | def newCombiner[K, V]: Combiner[(K, V), ParMap[K, V]] = ParHashMapCombiner.apply[K, V] 67 | 68 | implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParMap[FromK, FromV], (K, V), ParMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V] 69 | 70 | class WithDefault[K, V](underlying: ParMap[K, V], d: K => V) 71 | extends scala.collection.parallel.ParMap.WithDefault(underlying, d) with ParMap[K, V] { 72 | override def knownSize = underlying.knownSize 73 | def addOne(kv: (K, V)) = {underlying += kv; this} 74 | def subtractOne(key: K) = {underlying -= key; this} 75 | override def empty = new WithDefault(underlying.empty, d) 76 | override def updated[U >: V](key: K, value: U): WithDefault[K, U] = new WithDefault[K, U](underlying.updated[U](key, value), d) 77 | override def + [U >: V](kv: (K, U)): WithDefault[K, U] = updated(kv._1, kv._2) 78 | override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) 79 | override def seq = underlying.seq.withDefault(d) 80 | def clear() = underlying.clear() 81 | def put(key: K, value: V): Option[V] = underlying.put(key, value) 82 | 83 | /** If these methods aren't overridden to thread through the underlying map, 84 | * successive calls to withDefault* have no effect. 85 | */ 86 | override def withDefault(d: K => V): ParMap[K, V] = new WithDefault[K, V](underlying, d) 87 | override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d) 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParMapLike.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | package mutable 16 | 17 | import scala.collection.mutable.Cloneable 18 | 19 | /** A template trait for mutable parallel maps. This trait is to be mixed in 20 | * with concrete parallel maps to override the representation type. 21 | * 22 | * $sideeffects 23 | * 24 | * @tparam K the key type of the map 25 | * @tparam V the value type of the map 26 | * @define Coll `ParMap` 27 | * @define coll parallel map 28 | */ 29 | trait ParMapLike[K, 30 | V, 31 | +CC[X, Y] <: ParMap[X, Y], 32 | +Repr <: ParMapLike[K, V, ParMap, Repr, Sequential] with ParMap[K, V], 33 | +Sequential <: scala.collection.mutable.Map[K, V] with scala.collection.mutable.MapOps[K, V, scala.collection.mutable.Map, Sequential]] 34 | extends scala.collection.parallel.ParIterableLike[(K, V), ParIterable, Repr, Sequential] 35 | with scala.collection.parallel.ParMapLike[K, V, CC, Repr, Sequential] 36 | with scala.collection.mutable.Growable[(K, V)] 37 | with scala.collection.mutable.Shrinkable[K] 38 | with Cloneable[Repr] 39 | { 40 | // note: should not override toMap 41 | 42 | override def knownSize: Int = -1 43 | 44 | def put(key: K, value: V): Option[V] 45 | 46 | def +[U >: V](kv: (K, U)) = this.clone().asInstanceOf[CC[K, U]] += kv 47 | 48 | def -(key: K) = this.clone() -= key 49 | 50 | def clear(): Unit 51 | 52 | override def clone(): Repr = empty ++= this 53 | } 54 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParSeq.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel.mutable 15 | 16 | import scala.collection.generic.GenericParTemplate 17 | import scala.collection.generic.GenericParCompanion 18 | import scala.collection.generic.CanCombineFrom 19 | import scala.collection.generic.ParFactory 20 | import scala.collection.parallel.ParSeqLike 21 | import scala.collection.parallel.Combiner 22 | 23 | /** A mutable variant of `ParSeq`. 24 | * 25 | * @define Coll `mutable.ParSeq` 26 | * @define coll mutable parallel sequence 27 | */ 28 | trait ParSeq[T] extends ParIterable[T] 29 | with scala.collection.parallel.ParSeq[T] 30 | with GenericParTemplate[T, ParSeq] 31 | with ParSeqLike[T, ParSeq, ParSeq[T], scala.collection.mutable.Seq[T]] { 32 | self => 33 | override def companion: GenericParCompanion[ParSeq] = ParSeq 34 | //protected[this] override def newBuilder = ParSeq.newBuilder[T] 35 | 36 | def update(i: Int, elem: T): Unit 37 | 38 | def seq: scala.collection.mutable.Seq[T] 39 | 40 | override def toSeq: ParSeq[T] = this 41 | } 42 | 43 | 44 | /** $factoryInfo 45 | * @define Coll `mutable.ParSeq` 46 | * @define coll mutable parallel sequence 47 | */ 48 | object ParSeq extends ParFactory[ParSeq] { 49 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParSeq[S], T, ParSeq[T]] = new GenericCanCombineFrom[S, T] 50 | 51 | def newBuilder[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]() 52 | 53 | def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]() 54 | } 55 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParSet.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel.mutable 15 | 16 | import scala.collection.generic._ 17 | import scala.collection.parallel.Combiner 18 | 19 | /** A mutable variant of `ParSet`. 20 | */ 21 | trait ParSet[T] 22 | extends ParIterable[T] 23 | with scala.collection.parallel.ParSet[T] 24 | with GenericParTemplate[T, ParSet] 25 | with ParSetLike[T, ParSet, ParSet[T], scala.collection.mutable.Set[T]] 26 | { 27 | self => 28 | override def knownSize: Int = -1 29 | override def companion: GenericParCompanion[ParSet] = ParSet 30 | override def empty: ParSet[T] = ParHashSet() 31 | def seq: scala.collection.mutable.Set[T] 32 | } 33 | 34 | 35 | /** $factoryInfo 36 | * @define Coll `mutable.ParSet` 37 | * @define coll mutable parallel set 38 | */ 39 | object ParSet extends ParSetFactory[ParSet] { 40 | implicit def canBuildFrom[S, T]: CanCombineFrom[ParSet[S], T, ParSet[T]] = new GenericCanCombineFrom[S, T] 41 | 42 | override def newBuilder[T]: Combiner[T, ParSet[T]] = ParHashSet.newBuilder 43 | 44 | override def newCombiner[T]: Combiner[T, ParSet[T]] = ParHashSet.newCombiner 45 | } 46 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParSetLike.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection 15 | package parallel.mutable 16 | 17 | import scala.collection.mutable.Cloneable 18 | import scala.collection.mutable.Growable 19 | import scala.collection.mutable.Shrinkable 20 | 21 | /** A template trait for mutable parallel sets. This trait is mixed in with concrete 22 | * parallel sets to override the representation type. 23 | * 24 | * $sideeffects 25 | * 26 | * @tparam T the element type of the set 27 | * @define Coll `mutable.ParSet` 28 | * @define coll mutable parallel set 29 | */ 30 | trait ParSetLike[T, 31 | +CC[X] <: ParIterable[X], 32 | +Repr <: ParSetLike[T, CC, Repr, Sequential] with ParSet[T], 33 | +Sequential <: mutable.Set[T] with mutable.SetOps[T, mutable.Set, Sequential]] 34 | extends scala.collection.parallel.ParIterableLike[T, CC, Repr, Sequential] 35 | with scala.collection.parallel.ParSetLike[T, CC, Repr, Sequential] 36 | with Growable[T] 37 | with Shrinkable[T] 38 | with Cloneable[Repr] 39 | { 40 | self => 41 | override def knownSize: Int = -1 42 | 43 | override def empty: Repr 44 | 45 | def addOne(elem: T): this.type 46 | 47 | def subtractOne(elem: T): this.type 48 | 49 | def +(elem: T) = this.clone() += elem 50 | 51 | def -(elem: T) = this.clone() -= elem 52 | 53 | override def clone(): Repr = empty ++= this 54 | // note: should not override toSet 55 | } 56 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel.mutable 15 | 16 | import scala.collection.generic._ 17 | import scala.collection.parallel.Combiner 18 | import scala.collection.parallel.IterableSplitter 19 | import scala.collection.parallel.Task 20 | import scala.collection.concurrent.BasicNode 21 | import scala.collection.concurrent.TNode 22 | import scala.collection.concurrent.LNode 23 | import scala.collection.concurrent.CNode 24 | import scala.collection.concurrent.SNode 25 | import scala.collection.concurrent.INode 26 | import scala.collection.concurrent.TrieMap 27 | import scala.collection.concurrent.TrieMapIterator 28 | 29 | /** Parallel TrieMap collection. 30 | * 31 | * It has its bulk operations parallelized, but uses the snapshot operation 32 | * to create the splitter. This means that parallel bulk operations can be 33 | * called concurrently with the modifications. 34 | * 35 | * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_concurrent_tries Scala's Parallel Collections Library overview]] 36 | * section on `ParTrieMap` for more information. 37 | */ 38 | final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V]) 39 | extends ParMap[K, V] 40 | with GenericParMapTemplate[K, V, ParTrieMap] 41 | with ParMapLike[K, V, ParTrieMap, ParTrieMap[K, V], TrieMap[K, V]] 42 | with ParTrieMapCombiner[K, V] 43 | with Serializable 44 | { 45 | def this() = this(new TrieMap) 46 | 47 | override def mapCompanion: GenericParMapCompanion[ParTrieMap] = ParTrieMap 48 | 49 | override def empty: ParTrieMap[K, V] = ParTrieMap.empty 50 | 51 | protected[this] override def newCombiner = ParTrieMap.newCombiner 52 | 53 | override def seq = ctrie 54 | 55 | def splitter = new ParTrieMapSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[TrieMap[K, V]], mustInit = true) 56 | 57 | override def clear() = ctrie.clear() 58 | 59 | def result() = this 60 | 61 | def get(key: K): Option[V] = ctrie.get(key) 62 | 63 | def put(key: K, value: V): Option[V] = ctrie.put(key, value) 64 | 65 | def update(key: K, value: V): Unit = ctrie.update(key, value) 66 | 67 | def remove(key: K): Option[V] = ctrie.remove(key) 68 | 69 | def addOne(kv: (K, V)): this.type = { 70 | ctrie.+=(kv) 71 | this 72 | } 73 | 74 | def subtractOne(key: K): this.type = { 75 | ctrie.-=(key) 76 | this 77 | } 78 | 79 | override def size = { 80 | val in = ctrie.readRoot() 81 | val r = in.gcasRead(ctrie) 82 | (r: @unchecked) match { 83 | case tn: TNode[?, ?] => tn.cachedSize(ctrie) 84 | case ln: LNode[?, ?] => ln.cachedSize(ctrie) 85 | case cn: CNode[?, ?] => 86 | tasksupport.executeAndWaitResult(new Size(0, cn.array.length, cn.array)) 87 | cn.cachedSize(ctrie) 88 | } 89 | } 90 | 91 | override def knownSize = -1 92 | 93 | override def stringPrefix = "ParTrieMap" 94 | 95 | /* tasks */ 96 | 97 | /** Computes TrieMap size in parallel. */ 98 | class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] { 99 | var result = -1 100 | def leaf(prev: Option[Int]) = { 101 | var sz = 0 102 | var i = offset 103 | val until = offset + howmany 104 | while (i < until) { 105 | (array(i): @unchecked) match { 106 | case sn: SNode[?, ?] => sz += 1 107 | case in: INode[K @unchecked, V @unchecked] => sz += in.cachedSize(ctrie) 108 | } 109 | i += 1 110 | } 111 | result = sz 112 | } 113 | def split = { 114 | val fp = howmany / 2 115 | Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array)) 116 | } 117 | def shouldSplitFurther = howmany > 1 118 | override def merge(that: Size) = result = result + that.result 119 | } 120 | } 121 | 122 | private[collection] class ParTrieMapSplitter[K, V](lev: Int, ct: TrieMap[K, V], mustInit: Boolean) 123 | extends TrieMapIterator[K, V](lev, ct, mustInit) 124 | with IterableSplitter[(K, V)] 125 | { 126 | // only evaluated if `remaining` is invoked (which is not used by most tasks) 127 | lazy val totalsize = new ParTrieMap(ct).size 128 | var iterated = 0 129 | 130 | protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): ParTrieMapSplitter[K, V] = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit) 131 | 132 | override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = { 133 | val maxsplits = 3 + Integer.highestOneBit(parallelismLevel) 134 | level < maxsplits 135 | } 136 | 137 | def dup = { 138 | val it = newIterator(0, ct, _mustInit = false) 139 | dupTo(it) 140 | it.iterated = this.iterated 141 | it 142 | } 143 | 144 | override def next() = { 145 | iterated += 1 146 | super.next() 147 | } 148 | 149 | def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]] 150 | 151 | override def isRemainingCheap = false 152 | 153 | def remaining: Int = totalsize - iterated 154 | } 155 | 156 | /** Only used within the `ParTrieMap`. */ 157 | private[mutable] trait ParTrieMapCombiner[K, V] extends Combiner[(K, V), ParTrieMap[K, V]] { 158 | 159 | def combine[N <: (K, V), NewTo >: ParTrieMap[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = 160 | if (this eq other) this 161 | else throw new UnsupportedOperationException("This shouldn't have been called in the first place.") 162 | 163 | override def canBeShared = true 164 | } 165 | 166 | object ParTrieMap extends ParMapFactory[ParTrieMap, TrieMap] { 167 | def empty[K, V]: ParTrieMap[K, V] = new ParTrieMap[K, V] 168 | def newCombiner[K, V]: Combiner[(K, V), ParTrieMap[K, V]] = new ParTrieMap[K, V] 169 | 170 | implicit def canBuildFrom[FromK, FromV, K, V]: CanCombineFrom[ParTrieMap[FromK, FromV], (K, V), ParTrieMap[K, V]] = new CanCombineFromMap[FromK, FromV, K, V] 171 | } 172 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel.mutable 15 | 16 | import scala.collection.mutable.ArraySeq 17 | import scala.collection.mutable.ArrayBuffer 18 | import scala.collection.parallel.Task 19 | 20 | /** An array combiner that uses a chain of arraybuffers to store elements. */ 21 | trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedArrayBuffer[T]] { 22 | 23 | override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz) 24 | 25 | // public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden. 26 | final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c) 27 | 28 | def allocateAndCopy = if (chain.size > 1) { 29 | val array = new Array[Any](size) 30 | val arrayseq = ArraySeq.make(array).asInstanceOf[ArraySeq[T]] 31 | 32 | combinerTaskSupport.executeAndWaitResult(new CopyChainToArray(array, 0, size)) 33 | 34 | new ParArray(arrayseq) 35 | } else { // optimisation if there is only 1 array 36 | new ParArray(ArraySeq.make(chain(0).internalArray).asInstanceOf[ArraySeq[T]], size) 37 | } 38 | 39 | override def toString = "ResizableParArrayCombiner(" + size + "): " //+ chain 40 | 41 | /* tasks */ 42 | 43 | class CopyChainToArray(array: Array[Any], offset: Int, howmany: Int) extends Task[Unit, CopyChainToArray] { 44 | var result = () 45 | def leaf(prev: Option[Unit]) = if (howmany > 0) { 46 | var totalleft = howmany 47 | val (stbuff, stind) = findStart(offset) 48 | var buffind = stbuff 49 | var ind = stind 50 | var arrayIndex = offset 51 | while (totalleft > 0) { 52 | val currbuff = chain(buffind) 53 | val chunksize = if (totalleft < (currbuff.size - ind)) totalleft else currbuff.size - ind 54 | val until = ind + chunksize 55 | 56 | copyChunk(currbuff.internalArray, ind, array, arrayIndex, until) 57 | arrayIndex += chunksize 58 | ind += chunksize 59 | 60 | totalleft -= chunksize 61 | buffind += 1 62 | ind = 0 63 | } 64 | } 65 | private def copyChunk(buffarr: Array[AnyRef], buffStart: Int, ra: Array[Any], arrayStart: Int, until: Int): Unit = { 66 | Array.copy(buffarr, buffStart, ra, arrayStart, until - buffStart) 67 | } 68 | private def findStart(pos: Int) = { 69 | var left = pos 70 | var buffind = 0 71 | while (left >= chain(buffind).size) { 72 | left -= chain(buffind).size 73 | buffind += 1 74 | } 75 | (buffind, left) 76 | } 77 | def split = { 78 | val fp = howmany / 2 79 | List(new CopyChainToArray(array, offset, fp), new CopyChainToArray(array, offset + fp, howmany - fp)) 80 | } 81 | def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) 82 | } 83 | } 84 | 85 | object ResizableParArrayCombiner { 86 | def apply[T](c: ArrayBuffer[ExposedArrayBuffer[T]]): ResizableParArrayCombiner[T] = { 87 | class ResizableParArrayCombinerC[A](val chain: ArrayBuffer[ExposedArrayBuffer[A]]) extends ResizableParArrayCombiner[A] // was: with EnvironmentPassingCombiner[T, ParArray[T]] 88 | new ResizableParArrayCombinerC[T](c) 89 | } 90 | def apply[T](): ResizableParArrayCombiner[T] = apply(new ArrayBuffer[ExposedArrayBuffer[T]] += new ExposedArrayBuffer[T]) 91 | } 92 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel.mutable 15 | 16 | import scala.collection.mutable.ArraySeq 17 | import scala.collection.mutable.DoublingUnrolledBuffer 18 | import scala.collection.mutable.UnrolledBuffer.Unrolled 19 | import scala.collection.parallel.Combiner 20 | import scala.collection.parallel.Task 21 | 22 | /** An array combiner that uses doubling unrolled buffers to store elements. */ 23 | trait UnrolledParArrayCombiner[T] 24 | extends Combiner[T, ParArray[T]] { 25 | //self: EnvironmentPassingCombiner[T, ParArray[T]] => 26 | // because size is doubling, random access is O(logn)! 27 | val buff = new DoublingUnrolledBuffer[Any] 28 | 29 | def addOne(elem: T) = { 30 | buff += elem 31 | this 32 | } 33 | 34 | def result() = { 35 | val array = new Array[Any](size) 36 | val arrayseq = ArraySeq.make(array).asInstanceOf[ArraySeq[T]] 37 | 38 | combinerTaskSupport.executeAndWaitResult(new CopyUnrolledToArray(array, 0, size)) 39 | 40 | new ParArray(arrayseq) 41 | } 42 | 43 | def clear(): Unit = { 44 | buff.clear() 45 | } 46 | 47 | override def sizeHint(sz: Int) = { 48 | buff.lastPtr.next = new Unrolled(0, new Array[Any](sz), null, buff) 49 | buff.lastPtr = buff.lastPtr.next 50 | } 51 | 52 | def combine[N <: T, NewTo >: ParArray[T]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = other match { 53 | case that if that eq this => this // just return this 54 | case that: UnrolledParArrayCombiner[t] => 55 | buff concat that.buff 56 | this 57 | case _ => throw new UnsupportedOperationException("Cannot combine with combiner of different type.") 58 | } 59 | 60 | def size = buff.size 61 | 62 | /* tasks */ 63 | 64 | class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int) 65 | extends Task[Unit, CopyUnrolledToArray] { 66 | var result = () 67 | 68 | def leaf(prev: Option[Unit]) = if (howmany > 0) { 69 | var totalleft = howmany 70 | val (startnode, startpos) = findStart(offset) 71 | var curr = startnode 72 | var pos = startpos 73 | var arroffset = offset 74 | while (totalleft > 0) { 75 | val lefthere = scala.math.min(totalleft, curr.size - pos) 76 | Array.copy(curr.array, pos, array, arroffset, lefthere) 77 | // println("from: " + arroffset + " elems " + lefthere + " - " + pos + ", " + curr + " -> " + array.toList + " by " + this + " !! " + buff.headPtr) 78 | totalleft -= lefthere 79 | arroffset += lefthere 80 | pos = 0 81 | curr = curr.next 82 | } 83 | } 84 | private def findStart(pos: Int) = { 85 | var left = pos 86 | var node = buff.headPtr 87 | while ((left - node.size) >= 0) { 88 | left -= node.size 89 | node = node.next 90 | } 91 | (node, left) 92 | } 93 | def split = { 94 | val fp = howmany / 2 95 | List(new CopyUnrolledToArray(array, offset, fp), new CopyUnrolledToArray(array, offset + fp, howmany - fp)) 96 | } 97 | def shouldSplitFurther = howmany > scala.collection.parallel.thresholdFromSize(size, combinerTaskSupport.parallelismLevel) 98 | override def toString = "CopyUnrolledToArray(" + offset + ", " + howmany + ")" 99 | } 100 | } 101 | 102 | object UnrolledParArrayCombiner { 103 | def apply[T](): UnrolledParArrayCombiner[T] = new UnrolledParArrayCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParArray[T]] 104 | } 105 | 106 | -------------------------------------------------------------------------------- /core/src/main/scala/scala/collection/parallel/mutable/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | package collection.parallel 15 | 16 | import scala.collection.mutable.ArrayBuffer 17 | import scala.collection.generic.Sizing 18 | 19 | package object mutable { 20 | /* aliases */ 21 | type ParArrayCombiner[T] = ResizableParArrayCombiner[T] 22 | val ParArrayCombiner = ResizableParArrayCombiner 23 | } 24 | 25 | package mutable { 26 | /* classes and traits */ 27 | private[mutable] trait SizeMapUtils { 28 | 29 | protected def calcNumElems(from: Int, until: Int, tableLength: Int, sizeMapBucketSize: Int) = { 30 | // find the first bucket 31 | val fbindex = from / sizeMapBucketSize 32 | 33 | // find the last bucket 34 | val lbindex = until / sizeMapBucketSize 35 | // note to self: FYI if you define lbindex as from / sizeMapBucketSize, the first branch 36 | // below always triggers and tests pass, so you spend a great day benchmarking and profiling 37 | 38 | if (fbindex == lbindex) { 39 | // if first and last are the same, just count between `from` and `until` 40 | // return this count 41 | countElems(from, until) 42 | } else { 43 | // otherwise count in first, then count in last 44 | val fbuntil = ((fbindex + 1) * sizeMapBucketSize) min tableLength 45 | val fbcount = countElems(from, fbuntil) 46 | val lbstart = lbindex * sizeMapBucketSize 47 | val lbcount = countElems(lbstart, until) 48 | 49 | // and finally count the elements in all the buckets between first and last using a sizemap 50 | val inbetween = countBucketSizes(fbindex + 1, lbindex) 51 | 52 | // return the sum 53 | fbcount + inbetween + lbcount 54 | } 55 | } 56 | 57 | protected def countElems(from: Int, until: Int): Int 58 | 59 | protected def countBucketSizes(fromBucket: Int, untilBucket: Int): Int 60 | } 61 | 62 | /* hack-arounds */ 63 | private[mutable] class ExposedArrayBuffer[T] extends ArrayBuffer[T] with Sizing { 64 | def internalArray = array 65 | def setInternalSize(s: Int) = size0 = s 66 | override def sizeHint(len: Int) = { 67 | if (len > size && len >= 1) { 68 | val newarray = new Array[AnyRef](len) 69 | Array.copy(array, 0, newarray, 0, size0) 70 | array = newarray 71 | } 72 | } 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /junit/src/test/scala-2/scala/SerializationStabilityTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | 15 | import javax.xml.bind.DatatypeConverter._ 16 | import java.nio.file.{ Path, Paths, Files } 17 | import org.junit.Test 18 | 19 | // This test is self-modifying when run as follows: 20 | // 21 | // junit/testOnly scala.SerializationStabilityTest -- -Doverwrite.source=src/test/scala-2/scala/SerializationStabilityTest.scala 22 | // 23 | // Use this to re-establish a baseline for serialization compatibility. 24 | 25 | // based on run/t8549.scala partest 26 | object SerializationStability extends App with SerializationStabilityBase { 27 | 28 | def check[T <: AnyRef](instance: => T)(prevResult: String, f: T => AnyRef = (x: T) => x): Unit = { 29 | val result = serialize(instance) 30 | overwrite match { 31 | case Some(f) => 32 | val lineNumberOfLiteralString = Thread.currentThread.getStackTrace.apply(2).getLineNumber 33 | patch(f, lineNumberOfLiteralString, prevResult, result) 34 | case None => 35 | checkRoundTrip(instance)(f) 36 | assert(f(deserialize(prevResult).asInstanceOf[T]) == f(instance), s"$instance != f(deserialize(prevResult))") 37 | assert(prevResult == result, s"instance = $instance : ${instance.getClass}\n serialization unstable: ${prevResult}\n found: ${result}") 38 | } 39 | } 40 | 41 | // Generated on 20220711-14:22:40 with Scala version 2.13.8) 42 | overwrite.foreach(updateComment) 43 | 44 | // check(new collection.concurrent.TrieMap[Any, Any]())( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmNvbmN1cnJlbnQuVHJpZU1hcKckxpgOIYHPAwAETAALZXF1YWxpdHlvYmp0ABJMc2NhbGEvbWF0aC9FcXVpdjtMAApoYXNoaW5nb2JqdAAcTHNjYWxhL3V0aWwvaGFzaGluZy9IYXNoaW5nO0wABHJvb3R0ABJMamF2YS9sYW5nL09iamVjdDtMAAtyb290dXBkYXRlcnQAOUxqYXZhL3V0aWwvY29uY3VycmVudC9hdG9taWMvQXRvbWljUmVmZXJlbmNlRmllbGRVcGRhdGVyO3hwc3IAMnNjYWxhLmNvbGxlY3Rpb24uY29uY3VycmVudC5UcmllTWFwJE1hbmdsZWRIYXNoaW5nhTBoJQ/mgb0CAAB4cHNyABhzY2FsYS5tYXRoLkVxdWl2JCRhbm9uJDLBbyx4dy/qGwIAAHhwc3IANHNjYWxhLmNvbGxlY3Rpb24uY29uY3VycmVudC5UcmllTWFwU2VyaWFsaXphdGlvbkVuZCSbjdgbbGCt2gIAAHhweA==") 45 | // not sure why this one needs stable serialization. 46 | 47 | import collection.parallel 48 | check(parallel.immutable.ParHashMap(1 -> 2))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoTWFwAAAAAAAAAAMCAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJ0xzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9PbGRIYXNoTWFwO3hwcHBzcgAxc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaE1hcCRPbGRIYXNoTWFwMS7zjpoZzkeMAgAESQAEaGFzaEwAA2tleXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wAAmt2dAAOTHNjYWxhL1R1cGxlMjtMAAV2YWx1ZXEAfgAGeHIAJXNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLk9sZEhhc2hNYXDeVobyra7aQwIAAHhw/4PO53NyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3IADHNjYWxhLlR1cGxlMgH73c0i5zR6AgACTAACXzFxAH4ABkwAAl8ycQB+AAZ4cHEAfgAMc3EAfgAKAAAAAnEAfgAP") 49 | check(parallel.immutable.ParHashSet(1, 2, 3))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoU2V0AAAAAAAAAAECAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJ0xzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9PbGRIYXNoU2V0O3hwcHBzcgAxc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaFNldCRIYXNoVHJpZVNldOJNxSausJmiAgADSQAGYml0bWFwSQAFc2l6ZTBbAAVlbGVtc3QAKFtMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvT2xkSGFzaFNldDt4cgAlc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaFNldBh++KjntXtjAgAAeHAAEECAAAAAA3VyAChbTHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLk9sZEhhc2hTZXQ7CAD6Mv3ASA4CAAB4cAAAAANzcgAxc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaFNldCRPbGRIYXNoU2V0MR1AgCzq7c4OAgACSQAEaGFzaEwAA2tleXQAEkxqYXZhL2xhbmcvT2JqZWN0O3hyADRzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5PbGRIYXNoU2V0JExlYWZPbGRIYXNoU2V0XYS37WNapt0CAAB4cQB+AAf/g87nc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAv/g6zOc3EAfgAPAAAAAnNxAH4AC/+DitRzcQB+AA8AAAAD") 50 | // TODO SI-8576 Uninitialized field under -Xcheckinit 51 | // check(new parallel.immutable.ParRange(new Range(0, 1, 2)))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJSYW5nZQAAAAAAAAABAgAETAAXUGFyUmFuZ2VJdGVyYXRvciRtb2R1bGV0AEBMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9pbW11dGFibGUvUGFyUmFuZ2UkUGFyUmFuZ2VJdGVyYXRvciQ7TAAPU2NhbkxlYWYkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvUGFySXRlcmFibGVMaWtlJFNjYW5MZWFmJDtMAA9TY2FuTm9kZSRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2Nhbk5vZGUkO0wABXJhbmdldAAiTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL1JhbmdlO3hwcHBwc3IAIHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLlJhbmdlabujVKsVMg0CAAdJAANlbmRaAAdpc0VtcHR5SQALbGFzdEVsZW1lbnRJABBudW1SYW5nZUVsZW1lbnRzSQAFc3RhcnRJAARzdGVwSQAPdGVybWluYWxFbGVtZW50eHAAAAABAAAAAAAAAAABAAAAAAAAAAIAAAAC") 52 | // TODO SI-8576 unstable under -Xcheckinit 53 | // check(parallel.mutable.ParArray(1, 2, 3))( "rO0ABXNyACpzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFyQXJyYXkAAAAAAAAAAQMABEwAF1BhckFycmF5SXRlcmF0b3IkbW9kdWxldAA+THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvbXV0YWJsZS9QYXJBcnJheSRQYXJBcnJheUl0ZXJhdG9yJDtMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAIYXJyYXlzZXF0ACNMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL0FycmF5U2VxO3hwcHBwc3IAMXNjYWxhLmNvbGxlY3Rpb24ucGFyYWxsZWwubXV0YWJsZS5FeHBvc2VkQXJyYXlTZXGx2OTefAodSQIAAkkABmxlbmd0aFsABWFycmF5dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTZXEVPD3SKEkOcwIAAkkABmxlbmd0aFsABWFycmF5cQB+AAd4cAAAAAN1cgATW0xqYXZhLmxhbmcuT2JqZWN0O5DOWJ8QcylsAgAAeHAAAAADcHBwAAAAA3VxAH4ACgAAABBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ADQAAAAJzcQB+AA0AAAADcHBwcHBwcHBwcHBwcHg=") 54 | check(parallel.mutable.ParHashMap(1 -> 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaE1hcAAAAAAAAAADAwAISQALX2xvYWRGYWN0b3JJAAlzZWVkdmFsdWVJAAl0YWJsZVNpemVJAAl0aHJlc2hvbGRMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7WwAHc2l6ZW1hcHQAAltJWwAFdGFibGV0ACVbTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9IYXNoRW50cnk7eHB3DQAAAu4AAAABAAAABAFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABgAAAAJ4") 55 | check(parallel.mutable.ParHashSet(1, 2, 3))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaFNldAAAAAAAAAABAwAISQALX2xvYWRGYWN0b3JJAAlzZWVkdmFsdWVJAAl0YWJsZVNpemVJAAl0aHJlc2hvbGRMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7WwAHc2l6ZW1hcHQAAltJWwAFdGFibGV0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB3DQAAAcIAAAADAAAAGwFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABgAAAAJzcQB+AAYAAAADeA==") 56 | } 57 | 58 | class SerializationStabilityTest { 59 | @Test 60 | def testAll: Unit = SerializationStability.main(new Array[String](0)) 61 | } 62 | -------------------------------------------------------------------------------- /junit/src/test/scala-3/scala/SerializationStabilityTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | 15 | import javax.xml.bind.DatatypeConverter._ 16 | import java.nio.file.{ Path, Paths, Files } 17 | import org.junit.Test 18 | 19 | // This test is self-modifying when run as follows: 20 | // 21 | // junit/testOnly scala.SerializationStabilityTest -- -Doverwrite.source=src/test/scala-3/scala/SerializationStabilityTest.scala 22 | // 23 | // Use this to re-establish a baseline for serialization compatibility. 24 | 25 | // based on run/t8549.scala partest 26 | object SerializationStability extends App with SerializationStabilityBase { 27 | 28 | def check[T <: AnyRef](instance: => T)(prevResult: String, f: T => AnyRef = (x: T) => x): Unit = { 29 | val result = serialize(instance) 30 | overwrite match { 31 | case Some(f) => 32 | val lineNumberOfLiteralString = Thread.currentThread.getStackTrace.apply(2).getLineNumber 33 | patch(f, lineNumberOfLiteralString, prevResult, result) 34 | case None => 35 | checkRoundTrip(instance)(f) 36 | assert(f(deserialize(prevResult).asInstanceOf[T]) == f(instance), s"$instance != f(deserialize(prevResult))") 37 | assert(prevResult == result, s"instance = $instance : ${instance.getClass}\n serialization unstable: ${prevResult}\n found: ${result}") 38 | } 39 | } 40 | 41 | // Generated on 20230524-12:12:24 with Scala version 2.13.10) 42 | overwrite.foreach(updateComment) 43 | 44 | // check(new collection.concurrent.TrieMap[Any, Any]())( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmNvbmN1cnJlbnQuVHJpZU1hcKckxpgOIYHPAwAETAALZXF1YWxpdHlvYmp0ABJMc2NhbGEvbWF0aC9FcXVpdjtMAApoYXNoaW5nb2JqdAAcTHNjYWxhL3V0aWwvaGFzaGluZy9IYXNoaW5nO0wABHJvb3R0ABJMamF2YS9sYW5nL09iamVjdDtMAAtyb290dXBkYXRlcnQAOUxqYXZhL3V0aWwvY29uY3VycmVudC9hdG9taWMvQXRvbWljUmVmZXJlbmNlRmllbGRVcGRhdGVyO3hwc3IAMnNjYWxhLmNvbGxlY3Rpb24uY29uY3VycmVudC5UcmllTWFwJE1hbmdsZWRIYXNoaW5nhTBoJQ/mgb0CAAB4cHNyABhzY2FsYS5tYXRoLkVxdWl2JCRhbm9uJDLBbyx4dy/qGwIAAHhwc3IANHNjYWxhLmNvbGxlY3Rpb24uY29uY3VycmVudC5UcmllTWFwU2VyaWFsaXphdGlvbkVuZCSbjdgbbGCt2gIAAHhweA==") 45 | // not sure why this one needs stable serialization. 46 | 47 | import collection.parallel 48 | check(parallel.immutable.ParHashMap(1 -> 2))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoTWFwAAAAAAAAAAMCAANMAA1TY2FuTGVhZiRsenkxdAASTGphdmEvbGFuZy9PYmplY3Q7TAANU2Nhbk5vZGUkbHp5MXEAfgABTAAEdHJpZXQAJ0xzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9PbGRIYXNoTWFwO3hwcHBzcgAxc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaE1hcCRPbGRIYXNoTWFwMS7zjpoZzkeMAgAESQAEaGFzaEwAA2tleXEAfgABTAACa3Z0AA5Mc2NhbGEvVHVwbGUyO0wABXZhbHVlcQB+AAF4cgAlc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaE1hcN5WhvKtrtpDAgAAeHD/g87nc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcgAMc2NhbGEuVHVwbGUyAfvdzSLnNHoCAAJMAAJfMXEAfgABTAACXzJxAH4AAXhwcQB+AApzcQB+AAgAAAACcQB+AA0=") 49 | check(parallel.immutable.ParHashSet(1, 2, 3))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoU2V0AAAAAAAAAAECAANMAA1TY2FuTGVhZiRsenkxdAASTGphdmEvbGFuZy9PYmplY3Q7TAANU2Nhbk5vZGUkbHp5MXEAfgABTAAEdHJpZXQAJ0xzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9PbGRIYXNoU2V0O3hwcHBzcgAxc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaFNldCRIYXNoVHJpZVNldOJNxSausJmiAgADSQAGYml0bWFwSQAFc2l6ZTBbAAVlbGVtc3QAKFtMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvT2xkSGFzaFNldDt4cgAlc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaFNldFjbkJLVO+YWAgAAeHAAEECAAAAAA3VyAChbTHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLk9sZEhhc2hTZXQ7CAD6Mv3ASA4CAAB4cAAAAANzcgAxc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuT2xkSGFzaFNldCRPbGRIYXNoU2V0MR1AgCzq7c4OAgACSQAEaGFzaEwAA2tleXEAfgABeHIANHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLk9sZEhhc2hTZXQkTGVhZk9sZEhhc2hTZXRdhLftY1qm3QIAAHhxAH4ABv+DzudzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ACv+DrM5zcQB+AA0AAAACc3EAfgAK/4OK1HNxAH4ADQAAAAM=") 50 | // TODO SI-8576 Uninitialized field under -Xcheckinit 51 | // check(new parallel.immutable.ParRange(new Range(0, 1, 2)))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJSYW5nZQAAAAAAAAABAgAETAAXUGFyUmFuZ2VJdGVyYXRvciRtb2R1bGV0AEBMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9pbW11dGFibGUvUGFyUmFuZ2UkUGFyUmFuZ2VJdGVyYXRvciQ7TAAPU2NhbkxlYWYkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvUGFySXRlcmFibGVMaWtlJFNjYW5MZWFmJDtMAA9TY2FuTm9kZSRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2Nhbk5vZGUkO0wABXJhbmdldAAiTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL1JhbmdlO3hwcHBwc3IAIHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLlJhbmdlabujVKsVMg0CAAdJAANlbmRaAAdpc0VtcHR5SQALbGFzdEVsZW1lbnRJABBudW1SYW5nZUVsZW1lbnRzSQAFc3RhcnRJAARzdGVwSQAPdGVybWluYWxFbGVtZW50eHAAAAABAAAAAAAAAAABAAAAAAAAAAIAAAAC") 52 | // TODO SI-8576 unstable under -Xcheckinit 53 | // check(parallel.mutable.ParArray(1, 2, 3))( "rO0ABXNyACpzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFyQXJyYXkAAAAAAAAAAQMABEwAF1BhckFycmF5SXRlcmF0b3IkbW9kdWxldAA+THNjYWxhL2NvbGxlY3Rpb24vcGFyYWxsZWwvbXV0YWJsZS9QYXJBcnJheSRQYXJBcnJheUl0ZXJhdG9yJDtMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAIYXJyYXlzZXF0ACNMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL0FycmF5U2VxO3hwcHBwc3IAMXNjYWxhLmNvbGxlY3Rpb24ucGFyYWxsZWwubXV0YWJsZS5FeHBvc2VkQXJyYXlTZXGx2OTefAodSQIAAkkABmxlbmd0aFsABWFycmF5dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTZXEVPD3SKEkOcwIAAkkABmxlbmd0aFsABWFycmF5cQB+AAd4cAAAAAN1cgATW0xqYXZhLmxhbmcuT2JqZWN0O5DOWJ8QcylsAgAAeHAAAAADcHBwAAAAA3VxAH4ACgAAABBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ADQAAAAJzcQB+AA0AAAADcHBwcHBwcHBwcHBwcHg=") 54 | check(parallel.mutable.ParHashMap(1 -> 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaE1hcAAAAAAAAAADAwAISQALX2xvYWRGYWN0b3JJAAlzZWVkdmFsdWVJAAl0YWJsZVNpemVJAAl0aHJlc2hvbGRMAA1TY2FuTGVhZiRsenkxdAASTGphdmEvbGFuZy9PYmplY3Q7TAANU2Nhbk5vZGUkbHp5MXEAfgABWwAHc2l6ZW1hcHQAAltJWwAFdGFibGV0ACVbTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9IYXNoRW50cnk7eHB3DQAAAu4AAAABAAAABAFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABQAAAAJ4") 55 | check(parallel.mutable.ParHashSet(1, 2, 3))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLm11dGFibGUuUGFySGFzaFNldAAAAAAAAAABAwAISQALX2xvYWRGYWN0b3JJAAlzZWVkdmFsdWVJAAl0YWJsZVNpemVJAAl0aHJlc2hvbGRMAA1TY2FuTGVhZiRsenkxdAASTGphdmEvbGFuZy9PYmplY3Q7TAANU2Nhbk5vZGUkbHp5MXEAfgABWwAHc2l6ZW1hcHQAAltJWwAFdGFibGV0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB3DQAAAcIAAAADAAAAGwFzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABQAAAAJzcQB+AAUAAAADeA==") 56 | } 57 | 58 | class SerializationStabilityTest { 59 | @Test 60 | def testAll: Unit = SerializationStability.main(new Array[String](0)) 61 | } -------------------------------------------------------------------------------- /junit/src/test/scala/MiscTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | import collection._ 14 | import scala.collection.parallel.CollectionConverters._ 15 | import org.junit.Test 16 | import org.junit.Assert._ 17 | 18 | import scala.collection.parallel.ParSeq 19 | 20 | class MiscTest { 21 | @Test 22 | def si4459: Unit = { 23 | for (i <- 0 until 2000) { 24 | foo((0 until 10000).toSeq.par) 25 | } 26 | } 27 | 28 | def foo(arg: ParSeq[?]): String = arg.map(x => x).mkString(",") 29 | 30 | @Test 31 | def si4608: Unit = { 32 | ((1 to 100) sliding 10).toList.par.map{_.map{i => i * i}}.flatten 33 | } 34 | 35 | @Test 36 | def si4761: Unit = { 37 | val gs = for (x <- (1 to 5)) yield { if (x % 2 == 0) List(1) else List(1).par } 38 | assertEquals("Vector(1, 1, 1, 1, 1)", gs.flatten.toString) 39 | // Commented because `transpose` require its argument to be convertible to an `Iterable` whereas 40 | // we only have an `IterableOnce` 41 | // assertEquals("Vector(Vector(1, 1, 1, 1, 1))", gs.transpose.toString) 42 | 43 | val s = LazyList(Vector(1).par, Vector(2).par) 44 | assertEquals("List(1, 2)", s.flatten.toList.toString) 45 | // assertEquals("List(List(1, 2))", s.transpose.map(_.toList).toList.toString) 46 | } 47 | 48 | @Test 49 | def si4894: Unit = { 50 | val phs = parallel.mutable.ParHashSet[Int]() 51 | phs ++= 1 to 10 52 | for (i <- 1 to 10) assertTrue(phs(i)) 53 | phs --= 1 to 10 54 | assertTrue(phs.isEmpty) 55 | 56 | val phm = parallel.mutable.ParHashMap[Int, Int]() 57 | phm ++= ((1 to 10) zip (1 to 10)) 58 | for (i <- 1 to 10) assertTrue(phm(i) == i) 59 | phm --= 1 to 10 60 | assertTrue(phm.isEmpty) 61 | } 62 | 63 | @Test 64 | def si4895: Unit = { 65 | def checkPar(sz: Int): Unit = { 66 | import collection._ 67 | val hs = mutable.HashSet[Int]() ++ (1 to sz) 68 | assertEquals((2 to (sz + 1)), hs.par.map(_ + 1).seq.toSeq.sorted) 69 | } 70 | 71 | for (i <- 0 until 100) checkPar(i) 72 | for (i <- 100 until 1000 by 50) checkPar(i) 73 | for (i <- 1000 until 10000 by 500) checkPar(i) 74 | for (i <- 10000 until 100000 by 5000) checkPar(i) 75 | } 76 | 77 | @Test 78 | def si5375: Unit = { 79 | val foos = (1 to 1000).toSeq 80 | try { 81 | foos.par.map(i => if (i % 37 == 0) throw new MultipleOf37Exception(i) else i) 82 | assert(false) 83 | } catch { 84 | case ex: MultipleOf37Exception => 85 | assert(ex.getSuppressed.size > 0) 86 | assert(ex.getSuppressed.forall(_.isInstanceOf[MultipleOf37Exception])) 87 | assert(ex.i == 37) 88 | assert(ex.getSuppressed.map(_.asInstanceOf[MultipleOf37Exception].i).forall(_ % 37 == 0)) 89 | case _: Throwable => 90 | assert(false) 91 | } 92 | class MultipleOf37Exception(val i: Int) extends RuntimeException 93 | } 94 | 95 | @Test 96 | def si6052: Unit = { 97 | def seqarr(i: Int) = Array[Int]() ++ (0 until i) 98 | def pararr(i: Int) = seqarr(i).par 99 | 100 | def check[T](i: Int, f: Int => T): Unit = { 101 | val gseq = seqarr(i).toSeq.groupBy(f) 102 | val gpar = pararr(i).groupBy(f) 103 | // Note: sequential and parallel collections can not be compared with ''=='' 104 | assertTrue(gseq.forall { case (k, vs) => gpar.get(k).exists(_.sameElements(vs)) }) 105 | assertTrue(gpar.forall { case (k, vs) => gseq.get(k).exists(_.sameElements(vs)) }) 106 | } 107 | 108 | for (i <- 0 until 20) check(i, _ > 0) 109 | for (i <- 0 until 20) check(i, _ % 2) 110 | for (i <- 0 until 20) check(i, _ % 4) 111 | } 112 | 113 | @Test 114 | def si6510: Unit = { 115 | val x = collection.parallel.mutable.ParArray.range(1,10) groupBy { _ % 2 } mapValues { _.size } 116 | assertTrue(x.isInstanceOf[parallel.ParMap[?, ?]]) 117 | val y = collection.parallel.immutable.ParVector.range(1,10) groupBy { _ % 2 } mapValues { _.size } 118 | assertTrue(y.isInstanceOf[parallel.ParMap[?, ?]]) 119 | } 120 | 121 | @Test 122 | def si6467: Unit = { 123 | assertEquals(List(1, 2, 3, 4).foldLeft(new java.lang.StringBuffer)(_ append _).toString, "1234") 124 | assertEquals(List(1, 2, 3, 4).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, "1234") 125 | assertEquals(Seq(0 until 100: _*).foldLeft(new java.lang.StringBuffer)(_ append _).toString, (0 until 100).mkString) 126 | assertEquals(Seq(0 until 100: _*).par.aggregate(new java.lang.StringBuffer)(_ append _, _ append _).toString, (0 until 100).mkString) 127 | } 128 | 129 | @Test 130 | def si6908: Unit = { 131 | val set = collection.mutable.Set("1", null, "3").par 132 | assert( set exists (_ eq null) ) 133 | } 134 | 135 | @Test 136 | def si7498: Unit = { 137 | class Collision(val idx: Int) { 138 | override def hashCode = idx % 10 139 | } 140 | val tm = scala.collection.concurrent.TrieMap[Collision, Unit]() 141 | for (i <- 0 until 1000) tm(new Collision(i)) = () 142 | tm.par.foreach(kv => ()) 143 | } 144 | 145 | @Test 146 | def si8955: Unit = { 147 | def test(): Unit = 148 | scala.collection.parallel.immutable.ParSet[Int]((1 to 10000): _*) foreach (x => ()) // hangs non deterministically 149 | for (i <- 1 to 2000) test() 150 | } 151 | 152 | @Test 153 | def si8072: Unit = { 154 | testutil.ShouldNotTypecheck( 155 | """ 156 | import collection.parallel._ 157 | val x = List(1,2) 158 | val y = x.ifParSeq[Int](throw new Exception).otherwise(0) // Shouldn't compile 159 | val z = x.toParArray 160 | """, "value ifParSeq is not a member of List\\[Int\\]") 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /junit/src/test/scala/SerializationStabilityBase.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | 15 | import javax.xml.bind.DatatypeConverter._ 16 | import java.nio.file.{ Path, Paths, Files } 17 | import org.junit.Test 18 | 19 | trait SerializationStabilityBase { 20 | 21 | val overwrite: Option[Path] = 22 | sys.props.get("overwrite.source") 23 | .map(s => Paths.get(s).toAbsolutePath) 24 | 25 | def serialize(o: AnyRef): String = { 26 | val bos = new java.io.ByteArrayOutputStream() 27 | val out = new java.io.ObjectOutputStream(bos) 28 | out.writeObject(o) 29 | out.flush() 30 | printBase64Binary(bos.toByteArray()) 31 | } 32 | 33 | def amend(path: Path)(f: String => String): Unit = { 34 | val old = new String(java.nio.file.Files.readAllBytes(path)) 35 | Files.write(path, f(old).getBytes) 36 | } 37 | 38 | def quote(s: String) = List("\"", s, "\"").mkString 39 | 40 | def patch(path: Path, line: Int, prevResult: String, result: String): Unit = { 41 | amend(path) { 42 | content => 43 | content.linesIterator.toList.zipWithIndex.map { 44 | case (content, i) if i == line - 1 => 45 | val newContent = content.replace(quote(prevResult), quote(result)) 46 | if (newContent != content) 47 | println(s"- $content\n+ $newContent\n") 48 | newContent 49 | case (content, _) => content 50 | }.mkString("\n") 51 | } 52 | } 53 | 54 | def updateComment(path: Path): Unit = { 55 | val timestamp = { 56 | import java.text.SimpleDateFormat 57 | val sdf = new SimpleDateFormat("yyyyMMdd-HH:mm:ss") 58 | sdf.format(new java.util.Date) 59 | } 60 | val newComment = s" // Generated on $timestamp with Scala ${scala.util.Properties.versionString})" 61 | amend(path) { 62 | content => 63 | content.linesIterator.toList.map { 64 | f => f.replaceAll("""^ +// Generated on.*""", newComment) 65 | }.mkString("\n") 66 | } 67 | } 68 | 69 | def deserialize(string: String): AnyRef = { 70 | val bis = new java.io.ByteArrayInputStream(parseBase64Binary(string)) 71 | val in = new java.io.ObjectInputStream(bis) 72 | in.readObject() 73 | } 74 | 75 | def checkRoundTrip[T <: AnyRef](instance: T)(f: T => AnyRef): Unit = { 76 | val result = serialize(instance) 77 | val reconstituted = deserialize(result).asInstanceOf[T] 78 | assert(f(instance) == f(reconstituted), (f(instance), f(reconstituted))) 79 | } 80 | 81 | } 82 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/CollectTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala 14 | 15 | import org.junit.Assert._ 16 | import org.junit.{Ignore, Test} 17 | 18 | import scala.collection.mutable.ArrayBuffer 19 | 20 | // based on run/t6448.scala partest 21 | 22 | // Tests to show that various `collect` functions avoid calling 23 | // both `PartialFunction#isDefinedAt` and `PartialFunction#apply`. 24 | // 25 | class CollectTest { 26 | class Counter { 27 | var count = 0 28 | def apply(i: Int) = synchronized {count += 1; true} 29 | } 30 | 31 | @Test 32 | def testParVectorCollect: Unit = { 33 | val counter = new Counter() 34 | val res = collection.parallel.immutable.ParVector(1, 2) collect { case x if counter(x) && x < 2 => x} 35 | assertEquals(collection.parallel.immutable.ParVector(1), res) 36 | assertEquals(2, counter.synchronized(counter.count)) 37 | } 38 | 39 | @Test 40 | def testParArrayCollect: Unit = { 41 | val counter = new Counter() 42 | val res = collection.parallel.mutable.ParArray(1, 2) collect { case x if counter(x) && x < 2 => x} 43 | assertEquals(collection.parallel.mutable.ParArray(1), res) 44 | assertEquals(2, counter.synchronized(counter.count)) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/concurrent/ctries_new/DumbHash.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.concurrent.ctries_new 14 | 15 | class DumbHash(val i: Int) { 16 | override def equals(other: Any) = other match { 17 | case that: DumbHash => that.i == this.i 18 | case _ => false 19 | } 20 | override def hashCode = i % 5 21 | override def toString = "DH(%s)".format(i) 22 | } 23 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/concurrent/ctries_new/LNodeSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.concurrent.ctries_new 14 | 15 | import collection.concurrent.TrieMap 16 | import org.junit.Test 17 | 18 | class LNodeSpec extends Spec { 19 | 20 | val initsz = 1500 21 | val secondsz = 1750 22 | 23 | @Test 24 | def test(): Unit = { 25 | "accept elements with the same hash codes" in { 26 | val ct = new TrieMap[DumbHash, Int] 27 | for (i <- 0 until initsz) ct.update(new DumbHash(i), i) 28 | } 29 | 30 | "lookup elements with the same hash codes" in { 31 | val ct = new TrieMap[DumbHash, Int] 32 | for (i <- 0 until initsz) ct.update(new DumbHash(i), i) 33 | for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i)) 34 | for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None) 35 | } 36 | 37 | "remove elements with the same hash codes" in { 38 | val ct = new TrieMap[DumbHash, Int] 39 | for (i <- 0 until initsz) ct.update(new DumbHash(i), i) 40 | for (i <- 0 until initsz) { 41 | val remelem = ct.remove(new DumbHash(i)) 42 | assert(remelem == Some(i), "removing " + i + " yields " + remelem) 43 | } 44 | for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None) 45 | } 46 | 47 | "put elements with the same hash codes if absent" in { 48 | val ct = new TrieMap[DumbHash, Int] 49 | for (i <- 0 until initsz) ct.put(new DumbHash(i), i) 50 | for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == i) 51 | for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i)) 52 | for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None) 53 | for (i <- initsz until secondsz) assert(ct.getOrElse(new DumbHash(i), null) == i) 54 | } 55 | 56 | "replace elements with the same hash codes" in { 57 | val ct = new TrieMap[DumbHash, Int] 58 | for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) 59 | for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == i) 60 | for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i)) 61 | for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == -i) 62 | for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true) 63 | } 64 | 65 | "remove elements with the same hash codes if mapped to a specific value" in { 66 | val ct = new TrieMap[DumbHash, Int] 67 | for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) 68 | for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true) 69 | } 70 | 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/concurrent/ctries_new/Spec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.concurrent.ctries_new 14 | 15 | import scala.reflect.{ClassTag, classTag} 16 | 17 | trait Spec { 18 | 19 | implicit class Str2ops(s: String) { 20 | def in[U](body: =>U): Unit = { 21 | // just execute body 22 | body 23 | } 24 | } 25 | 26 | implicit class Any2ops(a: Any) { 27 | def shouldEqual(other: Any) = assert(a == other) 28 | } 29 | 30 | trait HasShouldProduce[U] { def shouldProduce[T <: Throwable: ClassTag](): Unit } 31 | 32 | def evaluating[U](body: =>U): HasShouldProduce[U] = new HasShouldProduce[U] { 33 | override def shouldProduce[T <: Throwable: ClassTag]() = { 34 | var produced = false 35 | try body 36 | catch { 37 | case e: Throwable => if (e.getClass == implicitly[ClassTag[T]].runtimeClass) produced = true 38 | } finally { 39 | assert(produced, "Did not produce exception of type: " + implicitly[ClassTag[T]]) 40 | } 41 | } 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/concurrent/ctries_new/Wrap.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.concurrent.ctries_new 14 | 15 | case class Wrap(i: Int) { 16 | override def hashCode = i * 0x9e3775cd 17 | } 18 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/concurrent/ctries_old/DumbHash.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.concurrent.ctries_old 14 | 15 | class DumbHash(val i: Int) { 16 | override def equals(other: Any) = other match { 17 | case that: DumbHash => that.i == this.i 18 | case _ => false 19 | } 20 | override def hashCode = i % 5 21 | override def toString = "DH(%s)".format(i) 22 | } 23 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/concurrent/ctries_old/LNodeSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.concurrent.ctries_old 14 | 15 | import collection.concurrent.TrieMap 16 | 17 | import org.junit.Test 18 | 19 | class LNodeSpec extends Spec { 20 | 21 | val initsz = 1500 22 | val secondsz = 1750 23 | 24 | @Test 25 | def test(): Unit = { 26 | "accept elements with the same hash codes" in { 27 | val ct = new TrieMap[DumbHash, Int] 28 | for (i <- 0 until initsz) ct.update(new DumbHash(i), i) 29 | } 30 | 31 | "lookup elements with the same hash codes" in { 32 | val ct = new TrieMap[DumbHash, Int] 33 | for (i <- 0 until initsz) ct.update(new DumbHash(i), i) 34 | for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i)) 35 | for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None) 36 | } 37 | 38 | "remove elements with the same hash codes" in { 39 | val ct = new TrieMap[DumbHash, Int] 40 | for (i <- 0 until initsz) ct.update(new DumbHash(i), i) 41 | for (i <- 0 until initsz) { 42 | val remelem = ct.remove(new DumbHash(i)) 43 | assert(remelem == Some(i), "removing " + i + " yields " + remelem) 44 | } 45 | for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None) 46 | } 47 | 48 | "put elements with the same hash codes if absent" in { 49 | val ct = new TrieMap[DumbHash, Int] 50 | for (i <- 0 until initsz) ct.put(new DumbHash(i), i) 51 | for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == i) 52 | for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i)) 53 | for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None) 54 | for (i <- initsz until secondsz) assert(ct.getOrElse(new DumbHash(i), null) == i) 55 | } 56 | 57 | "replace elements with the same hash codes" in { 58 | val ct = new TrieMap[DumbHash, Int] 59 | for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) 60 | for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == i) 61 | for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i)) 62 | for (i <- 0 until initsz) assert(ct.getOrElse(new DumbHash(i), null) == -i) 63 | for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true) 64 | } 65 | 66 | "remove elements with the same hash codes if mapped to a specific value" in { 67 | val ct = new TrieMap[DumbHash, Int] 68 | for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) 69 | for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true) 70 | } 71 | 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/concurrent/ctries_old/Spec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.concurrent.ctries_old 14 | 15 | import scala.reflect.{ClassTag, classTag} 16 | 17 | trait Spec { 18 | 19 | implicit class Str2ops(s: String) { 20 | def in[U](body: =>U): Unit = { 21 | // just execute body 22 | body 23 | } 24 | } 25 | 26 | implicit class Any2ops(a: Any) { 27 | def shouldEqual(other: Any) = assert(a == other) 28 | } 29 | 30 | trait HasShouldProduce[U] { def shouldProduce[T <: Throwable: ClassTag](): Unit } 31 | 32 | def evaluating[U](body: =>U): HasShouldProduce[U] = new HasShouldProduce[U] { 33 | override def shouldProduce[T <: Throwable: ClassTag]() = { 34 | var produced = false 35 | try body 36 | catch { 37 | case e: Throwable => if (e.getClass == implicitly[ClassTag[T]].runtimeClass) produced = true 38 | } finally { 39 | assert(produced, "Did not produce exception of type: " + implicitly[ClassTag[T]]) 40 | } 41 | } 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/concurrent/ctries_old/Wrap.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.concurrent.ctries_old 14 | 15 | case class Wrap(i: Int) { 16 | override def hashCode = i * 0x9e3775cd 17 | } 18 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/parallel/ParMapTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | 15 | import scala.collection._ 16 | import scala.collection.parallel.CollectionConverters._ 17 | import org.junit.Test 18 | import org.junit.Assert._ 19 | 20 | // based on run/parmap-ops.scala partest 21 | class ParMapTest { 22 | 23 | @Test 24 | def test: Unit = { 25 | val gm: ParMap[Int, Int] = Map(0 -> 0, 1 -> 1).par 26 | 27 | // ops 28 | assertTrue(gm.isDefinedAt(1)) 29 | assertTrue(gm.contains(1)) 30 | assertTrue(gm.getOrElse(1, 2) == 1) 31 | assertTrue(gm.getOrElse(2, 3) == 3) 32 | assertTrue(gm.keysIterator.toSet == Set(0, 1)) 33 | assertTrue(gm.valuesIterator.toSet == Set(0, 1)) 34 | assertTrue(gm.keySet == ParSet(0, 1)) 35 | assertTrue(gm.keys.toSet == ParSet(0, 1)) 36 | assertTrue(gm.values.toSet == ParSet(0, 1)) 37 | try { 38 | gm.default(-1) 39 | assertTrue(false) 40 | } catch { 41 | case e: NoSuchElementException => // ok 42 | } 43 | 44 | assertTrue(gm.filterKeys(_ % 2 == 0)(0) == 0) 45 | assertTrue(gm.filterKeys(_ % 2 == 0).get(1) == None) 46 | assertTrue(gm.mapValues(_ + 1)(0) == 1) 47 | 48 | // with defaults 49 | val pm = parallel.mutable.ParMap(0 -> 0, 1 -> 1) 50 | val dm = pm.withDefault(x => -x) 51 | assertTrue(dm(0) == 0) 52 | assertTrue(dm(1) == 1) 53 | assertTrue(dm(2) == -2) 54 | assertTrue(dm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2)) 55 | dm.put(3, 3) 56 | assertTrue(dm(3) == 3) 57 | assertTrue(pm(3) == 3) 58 | assertTrue(dm(4) == -4) 59 | 60 | val imdm = parallel.immutable.ParMap(0 -> 0, 1 -> 1).withDefault(x => -x) 61 | assertTrue(imdm(0) == 0) 62 | assertTrue(imdm(1) == 1) 63 | assertTrue(imdm(2) == -2) 64 | assertTrue(imdm.updated(2, 2) == parallel.ParMap(0 -> 0, 1 -> 1, 2 -> 2)) 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/parallel/ParSeqConversionsTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | 15 | import CollectionConverters._ 16 | 17 | import org.junit.Test 18 | import org.junit.Assert._ 19 | 20 | // test conversions between collections 21 | // based on run/pc-conversions.scala partest 22 | class ParSeqConversionsTest { 23 | 24 | @Test 25 | def testConversions: Unit = { 26 | // par.to* and to*.par tests 27 | assertToPar(scala.collection.parallel.mutable.ParArray(1 -> 1, 2 -> 2, 3 -> 3)) 28 | assertToPar(scala.collection.parallel.immutable.ParVector(1 -> 1, 2 -> 2, 3 -> 3)) 29 | assertToPar(scala.collection.parallel.mutable.ParHashMap(1 -> 2)) 30 | assertToPar(scala.collection.parallel.mutable.ParHashSet(1 -> 2)) 31 | assertToPar(scala.collection.parallel.immutable.ParHashMap(1 -> 2)) 32 | assertToPar(scala.collection.parallel.immutable.ParHashSet(1 -> 3)) 33 | 34 | assertToParWoMap(scala.collection.parallel.immutable.ParRange(1, 10, 2, false)) 35 | assertToParWoMap(scala.collection.parallel.immutable.ParVector(1, 2, 3)) 36 | assertToParWoMap(scala.collection.parallel.mutable.ParArray(1, 2, 3)) 37 | 38 | // seq and par again conversions) 39 | assertSeqPar(scala.collection.parallel.mutable.ParArray(1, 2, 3)) 40 | assertSeqPar(scala.collection.parallel.immutable.ParVector(1, 2, 3)) 41 | assertSeqPar(scala.collection.parallel.immutable.ParRange(1, 50, 1, false)) 42 | } 43 | 44 | def assertSeqPar[T](pc: scala.collection.parallel.ParIterable[T]) = pc.seq.par == pc 45 | 46 | def assertToPar[K, V](xs: scala.collection.parallel.ParIterable[(K, V)]): Unit = { 47 | assertTrue(xs.toSeq.par == xs.toSeq) 48 | assertTrue(xs.par.toSeq == xs.toSeq) 49 | 50 | // assertTrue(xs.toSet.par == xs.toSet) 51 | // assertTrue(xs.par.toSet == xs.toSet) 52 | 53 | // assertTrue(xs.toMap.par == xs.toMap) 54 | // assertTrue(xs.par.toMap == xs.toMap) 55 | } 56 | 57 | def assertToParWoMap[T](xs: scala.collection.parallel.ParSeq[T]): Unit = { 58 | // assertTrue(xs.toIterable.par == xs.toIterable) 59 | // assertTrue(xs.par.toIterable == xs.toIterable) 60 | 61 | assertTrue(xs.toSeq.par == xs.toSeq) 62 | assertTrue(xs.par.toSeq == xs.toSeq) 63 | 64 | // assertTrue(xs.toSet.par == xs.toSet) 65 | // assertTrue(xs.par.toSet == xs.toSet) 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/parallel/SerializationTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | 15 | import org.junit.Test 16 | import org.junit.Assert._ 17 | 18 | // based on jvm/serialization-new.scala partest 19 | class SerializationTest { 20 | 21 | @throws(classOf[java.io.IOException]) 22 | def write[A](o: A): Array[Byte] = { 23 | val ba = new java.io.ByteArrayOutputStream(512) 24 | val out = new java.io.ObjectOutputStream(ba) 25 | out.writeObject(o) 26 | out.close() 27 | ba.toByteArray() 28 | } 29 | @throws(classOf[java.io.IOException]) 30 | @throws(classOf[ClassNotFoundException]) 31 | def read[A](buffer: Array[Byte]): A = { 32 | val in = 33 | new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer)) 34 | in.readObject().asInstanceOf[A] 35 | } 36 | def check[A, B](x: A, y: B): Unit = { 37 | assertEquals(x, y) 38 | assertEquals(y, x) 39 | } 40 | 41 | @Test 42 | def testParallel: Unit = { 43 | import scala.collection.parallel._ 44 | // UnrolledBuffer 45 | val ub = new collection.mutable.UnrolledBuffer[String] 46 | ub ++= List("one", "two") 47 | val _ub: collection.mutable.UnrolledBuffer[String] = read(write(ub)) 48 | check(ub, _ub) 49 | 50 | // mutable.ParArray 51 | val pa = mutable.ParArray("abc", "def", "etc") 52 | val _pa: mutable.ParArray[String] = read(write(pa)) 53 | check(pa, _pa) 54 | 55 | // mutable.ParHashMap 56 | val mpm = mutable.ParHashMap(1 -> 2, 2 -> 4) 57 | val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm)) 58 | check(mpm, _mpm) 59 | 60 | // mutable.ParTrieMap 61 | val mpc = mutable.ParTrieMap(1 -> 2, 2 -> 4) 62 | val _mpc: mutable.ParTrieMap[Int, Int] = read(write(mpc)) 63 | check(mpc, _mpc) 64 | 65 | // mutable.ParHashSet 66 | val mps = mutable.ParHashSet(1, 2, 3) 67 | val _mps: mutable.ParHashSet[Int] = read(write(mps)) 68 | check(mps, _mps) 69 | 70 | // immutable.ParRange 71 | val pr1 = immutable.ParRange(0, 4, 1, true) 72 | val _pr1: immutable.ParRange = read(write(pr1)) 73 | check(pr1, _pr1) 74 | 75 | val pr2 = immutable.ParRange(0, 4, 1, false) 76 | val _pr2: immutable.ParRange = read(write(pr2)) 77 | check(pr2, _pr2) 78 | 79 | // immutable.ParHashMap 80 | val ipm = immutable.ParHashMap(5 -> 1, 10 -> 2) 81 | val _ipm: immutable.ParHashMap[Int, Int] = read(write(ipm)) 82 | check(ipm, _ipm) 83 | 84 | // immutable.ParHashSet 85 | val ips = immutable.ParHashSet("one", "two") 86 | val _ips: immutable.ParHashSet[String] = read(write(ips)) 87 | check(ips, _ips) 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/parallel/TaskTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | 15 | import org.junit.Test 16 | import org.junit.Assert._ 17 | 18 | import java.util.concurrent.{ForkJoinPool, ForkJoinWorkerThread}, ForkJoinPool._ 19 | 20 | import CollectionConverters._ 21 | 22 | class TaskTest { 23 | @Test 24 | def `t10577 task executes on foreign pool`(): Unit = { 25 | def mkFactory(name: String) = new ForkJoinWorkerThreadFactory { 26 | override def newThread(pool: ForkJoinPool) = { 27 | val t = new ForkJoinWorkerThread(pool) {} 28 | t.setName(name) 29 | t 30 | } 31 | } 32 | def mkPool(name: String) = { 33 | val parallelism = 1 34 | val handler: Thread.UncaughtExceptionHandler = null 35 | val asyncMode = false 36 | new ForkJoinPool(parallelism, mkFactory(name), handler, asyncMode) 37 | } 38 | 39 | val one = List(1).par 40 | val two = List(2).par 41 | 42 | one.tasksupport = new ForkJoinTaskSupport(mkPool("one")) 43 | two.tasksupport = new ForkJoinTaskSupport(mkPool("two")) 44 | 45 | for (x <- one ; y <- two) assertEquals("two", Thread.currentThread.getName) 46 | } 47 | 48 | @Test 49 | def `t152 pass on task support`(): Unit = { 50 | val myTs = new ExecutionContextTaskSupport() 51 | val c = List(1).par 52 | c.tasksupport = myTs 53 | val r = c.filter(_ != 0).map(_ + 1) 54 | assertSame(myTs, r.tasksupport) 55 | } 56 | 57 | // was: Wrong exception: expected scala.collection.parallel.TaskTest$SpecialControl$1 but was java.lang.IllegalArgumentException 58 | @Test 59 | def `t10276 exception does not suppress itself when merging`: Unit = { 60 | import TestSupport._ 61 | import scala.util.control.ControlThrowable 62 | class SpecialControl extends ControlThrowable("special") 63 | val SpecialExcept = new SpecialControl 64 | 65 | class Special { 66 | def add(other: Special): Special = throw SpecialExcept 67 | } 68 | 69 | def listed(n: Int) = List.fill(n)(new Special) 70 | val specials = listed(1000).par 71 | assertThrows[SpecialControl](_ eq SpecialExcept)(specials.reduce(_ add _)) 72 | } 73 | } 74 | object TestSupport { 75 | import scala.reflect.ClassTag 76 | import scala.util.control.{ControlThrowable, NonFatal} 77 | private val Unthrown = new ControlThrowable {} 78 | 79 | def assertThrows[T <: Throwable: ClassTag](checker: T => Boolean)(body: => Any): Unit = 80 | try { 81 | body 82 | throw Unthrown 83 | } catch { 84 | case Unthrown => fail("Expression did not throw!") 85 | case e: T if checker(e) => () 86 | case failed: T => 87 | val ae = new AssertionError(s"Exception failed check: $failed") 88 | ae.addSuppressed(failed) 89 | throw ae 90 | case NonFatal(other) => 91 | val ae = new AssertionError(s"Wrong exception: expected ${implicitly[ClassTag[T]]} but was ${other.getClass.getName}") 92 | ae.addSuppressed(other) 93 | throw ae 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/parallel/immutable/ParRangeTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel.immutable 14 | 15 | import org.junit.runner.RunWith 16 | import org.junit.runners.JUnit4 17 | import org.junit.Test 18 | 19 | @RunWith(classOf[JUnit4]) 20 | class ParRangeTest { 21 | 22 | @Test 23 | def buildParRangeString: Unit = { 24 | assert(ParRange(1, 5, 1, true).toString == "ParRange 1 to 5") 25 | } 26 | 27 | } 28 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/parallel/mutable/ParArrayTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel.mutable 14 | 15 | import org.junit.Test 16 | import org.junit.Assert._ 17 | 18 | class ParArrayTest extends scala.collection.concurrent.ctries_old.Spec { 19 | 20 | @Test 21 | def `create new parallel array with a bad initial capacity`: Unit = { 22 | evaluating { new ParArray(-5) }.shouldProduce[IllegalArgumentException]() 23 | /** 24 | * this currently passes, but do we want it to? 25 | * does it have meaning to have an empty parallel array? 26 | */ 27 | new ParArray(0) 28 | () 29 | } 30 | 31 | @Test 32 | def `compare identical ParArrays`: Unit = { 33 | assert(new ParArray(5) == new ParArray(5)) 34 | assert(ParArray(1,2,3,4,5) == ParArray(1,2,3,4,5)) 35 | } 36 | 37 | /** 38 | * this test needs attention. how is equality defined on ParArrays? 39 | * Well, the same way it is for normal collections, I guess. For normal arrays its reference equality. 40 | * I do not think it should be that way in the case of ParArray-s. I'll check this with Martin. 41 | */ 42 | @Test 43 | def `compare non-identical ParArrays`: Unit = { 44 | assert(ParArray(1,2,3,4,5) != ParArray(1,2,3,4), 45 | "compared PA's that I expect to not be identical, but they were!") 46 | } 47 | 48 | @Test 49 | def `"creation via PA object [String]`: Unit = { 50 | val paFromApply: ParArray[String] = ParArray("x", "1", "true", "etrijwejiorwer") 51 | val paFromHandoff: ParArray[String] = ParArray.handoff(Array("x", "1", "true", "etrijwejiorwer")) 52 | val paFromCopy: ParArray[String] = ParArray.createFromCopy(Array("x", "1", "true", "etrijwejiorwer")) 53 | assert( paFromApply == paFromCopy ) 54 | assert( paFromApply == paFromCopy ) 55 | } 56 | 57 | // // handoffs dont work for primitive types... 58 | // test("creation via PA object [Boolean]"){ 59 | // val paFromApply: ParArray[Boolean] = ParArray(true, false, true, false) 60 | // val paFromCopy: ParArray[Boolean] = ParArray.createFromCopy(Array(true, false, true, false)) 61 | // assert( paFromApply == paFromCopy ) 62 | // } 63 | // 64 | // // handoffs dont work for primitive types... 65 | // test("creation via PA object [Int]"){ 66 | // val paFromApply: ParArray[Int] = ParArray(1, 2, 4, 3) 67 | // val paFromCopy: ParArray[Int] = ParArray.createFromCopy(Array(1, 2, 4, 3)) 68 | // assert( paFromApply == paFromCopy ) 69 | // } 70 | 71 | /** 72 | * This fails because handoff is really doing a copy. 73 | * TODO: look at handoff 74 | */ 75 | @Test 76 | def `Handoff Is Really A Handoff`: Unit = { 77 | val arrayToHandOff = Array("a", "x", "y", "z") 78 | val paFromHandoff: ParArray[String] = ParArray.handoff(arrayToHandOff) 79 | arrayToHandOff(0) = "w" 80 | assert(paFromHandoff(0) == "w") 81 | } 82 | 83 | @Test 84 | def `simple reduce`: Unit = { 85 | assert( ParArray(1,2,3,4,5).reduce(_+_) == 15 ) 86 | } 87 | 88 | @Test 89 | def `empty reduce`: Unit = { 90 | evaluating { ParArray.empty[Int].reduce(_+_) }.shouldProduce[UnsupportedOperationException]() 91 | } 92 | 93 | @Test 94 | def `simple count`: Unit = { 95 | assert( ParArray[Int]().count(_ > 7) == 0 ) 96 | assert( ParArray(1,2,3).count(_ > 7) == 0 ) 97 | assert( ParArray(1,2,3).count(_ <= 3) == 3 ) 98 | assert( ParArray(1,2,3,4,5,6,7,8,9,10).count(_ > 7 ) == 3 ) 99 | } 100 | 101 | @Test 102 | def `simple forall`: Unit = { 103 | assert( ParArray[Int]().forall(_ > 7) == true ) 104 | assert( ParArray(1,2,3).forall(_ > 3) == false ) 105 | assert( ParArray(1,2,3).forall(_ <= 3) == true ) 106 | assert( ParArray(1,2,3,4,5,6,7,8,9,10).forall(_ > 0) == true ) 107 | assert( ParArray(1,2,3,4,5,6,7,8,9,10).forall(_ < 5) == false ) 108 | } 109 | 110 | /** 111 | */ 112 | @Test 113 | def `simple foreach`: Unit = { 114 | val buf = new java.util.concurrent.ArrayBlockingQueue[Int](10000) 115 | ParArray((1 to 10000):_*).foreach(buf add _) 116 | (1 to 10000).foreach(i => assert( buf contains i, "buf should have contained:" + i )) 117 | } 118 | 119 | @Test 120 | def `simple exists`: Unit = { 121 | assert( ParArray[Int]().exists(_ => true) == false ) 122 | assert( ParArray(1,2,3).forall(_ > 3) == false ) 123 | assert( ParArray(1,2,3,4,5,6,7,8,9,10).exists(_ > 7) == true ) 124 | } 125 | 126 | @Test 127 | def `simple filter`: Unit = { 128 | assert(ParArray(1,2,3,4,5).filter( _ < 4 ) == ParArray(1,2,3)) 129 | } 130 | 131 | @Test 132 | def `simple map test`: Unit = { 133 | assert(ParArray(1,2,3,4,5).map( (_:Int) * 10 ) == ParArray(10,20,30,40,50)) 134 | } 135 | 136 | @Test 137 | def `empty min`: Unit = { 138 | evaluating { ParArray.empty[Int].min }.shouldProduce[UnsupportedOperationException]() 139 | } 140 | 141 | @Test 142 | def `empty max`: Unit = { 143 | evaluating { ParArray.empty[Int].max }.shouldProduce[UnsupportedOperationException]() 144 | } 145 | 146 | @Test 147 | def `empty minBy`: Unit = { 148 | evaluating { ParArray.empty[String].minBy(_.length) }.shouldProduce[UnsupportedOperationException]() 149 | } 150 | 151 | @Test 152 | def `emtpy maxBy`: Unit = { 153 | evaluating { ParArray.empty[String].maxBy(_.length) }.shouldProduce[UnsupportedOperationException]() 154 | } 155 | 156 | } 157 | -------------------------------------------------------------------------------- /junit/src/test/scala/scala/collection/parallel/mutable/ParHashSetTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel.mutable 14 | 15 | import org.junit.Test 16 | import org.junit.Assert._ 17 | 18 | // based on run/hashset.scala partest 19 | class ParHashSetTest { 20 | @Test 21 | def testPar: Unit = { 22 | val h1 = new ParHashSet[Int] 23 | for (i <- 0 until 20) h1 += i 24 | for (i <- 0 until 20) assertTrue(h1.contains(i)) 25 | for (i <- 20 until 40) assertFalse(h1.contains(i)) 26 | assertEquals((0 until 20).toList.sorted, h1.toList.sorted) 27 | 28 | val h2 = new ParHashSet[String] 29 | h2 += null 30 | for (i <- 0 until 20) h2 += "" + i 31 | assertTrue(h2 contains null) 32 | for (i <- 0 until 20) assertTrue(h2.contains("" + i)) 33 | for (i <- 20 until 40) assertFalse(h2.contains("" + i)) 34 | assertEquals((0 until 20).map("" + _).toList.sorted.mkString(",") + ",null", h2.toList.map("" + _).sorted.mkString(",")) 35 | 36 | h2 -= null 37 | h2 -= "" + 0 38 | assertFalse(h2 contains null) 39 | assertEquals((1 until 20).map("" + _).toList.sorted.mkString(","), h2.toList.map("" + _).sorted.mkString(",")) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /project/GetScala3Next.scala: -------------------------------------------------------------------------------- 1 | import java.nio.ByteBuffer 2 | 3 | import scala.concurrent._, duration._, ExecutionContext.Implicits._ 4 | 5 | import gigahorse._, support.okhttp.Gigahorse 6 | 7 | import sjsonnew.shaded.scalajson.ast.unsafe._ 8 | import sjsonnew.support.scalajson.unsafe.{ Converter, Parser } 9 | 10 | object GetScala3Next { 11 | val asJson = (r: FullResponse) => Parser.parseFromByteBuffer(r.bodyAsByteBuffer).get 12 | 13 | def get(): String = { 14 | val req = Gigahorse.url("https://api.github.com/repos/lampepfl/dotty/releases") 15 | .get.addQueryString("per_page" -> "1") 16 | 17 | val http = Gigahorse.http(Gigahorse.config) 18 | 19 | try { 20 | val f = http.run(req, asJson) 21 | 22 | val f2 = f.collect { 23 | case JArray(Array(JObject(fields))) => fields.collectFirst { 24 | case JField("tag_name", JString(version)) => version 25 | } 26 | }.map(_.getOrElse(sys.error(s"Expected an array of 1 string, got $f"))) 27 | 28 | Await.result(f2, 120.seconds) 29 | } finally http.close() 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.11.1 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.scala-lang.modules" % "sbt-scala-module" % "3.2.2") 2 | addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.11.0") 3 | addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.5.7") 4 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/IntOperators.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel.ops 14 | 15 | import scala.collection.parallel._ 16 | 17 | trait IntOperators extends Operators[Int] { 18 | def reduceOperators = List(_ + _, _ * _, math.min(_, _), math.max(_, _), _ ^ _) 19 | def countPredicates = List( 20 | x => true, 21 | _ >= 0, _ < 0, _ < 50, _ < 500, _ < 5000, _ < 50000, _ % 2 == 0, _ == 99, 22 | x => x > 50 && x < 150, 23 | x => x > 350 && x < 550, 24 | x => (x > 1000 && x < 1500) || (x > 400 && x < 500) 25 | ) 26 | def forallPredicates = List(_ >= 0, _ < 0, _ % 2 == 0, _ != 55, _ != 505, _ != 5005) 27 | def existsPredicates = List(_ >= 0, _ < 0, _ % 2 == 0, _ == 55, _ == 505, _ == 5005) 28 | def findPredicates = List(_ >= 0, _ % 2 == 0, _ < 0, _ == 50, _ == 500, _ == 5000) 29 | def mapFunctions = List(-_, math.abs(_), _ % 2, _ % 3, _ % 4, _ % 150, _ % 500) 30 | def partialMapFunctions = List({case x => -x}, { case 0 => -1; case x if x > 0 => x + 1}, {case x if x % 3 == 0 => x / 3}) 31 | def flatMapFunctions = List( 32 | (n: Int) => if (n < 0) List() else if (n % 2 == 0) List(1, 2, 3) else List(4, 5, 6), 33 | (n: Int) => List[Int](), 34 | (n: Int) => if (n == 0) List(1, 2, 3, 4, 5) else if (n < 0) List(1, 2, 3) else List() 35 | ) 36 | def filterPredicates = List( 37 | _ % 2 == 0, _ % 3 == 0, 38 | _ % 4 != 0, _ % 17 != 0, 39 | n => n > 50 && n < 100, 40 | _ >= 0, _ < 0, _ == 99, 41 | _ > 500, _ > 5000, _ > 50000, 42 | _ < 500, _ < 50, _ < -50, _ < -5e5, 43 | x => true, x => false, 44 | x => x % 53 == 0 && x % 17 == 0 45 | ) 46 | def filterNotPredicates = filterPredicates 47 | def partitionPredicates = filterPredicates 48 | def takeWhilePredicates = List( 49 | _ != 50, _ != 500, _ != 5000, _ != 50000, _ % 2 == 0, _ % 3 == 1, _ % 47 != 0, 50 | _ < 100, _ < 1000, _ < 10000, _ < 0, 51 | _ < -100, _ < -1000, _ > -200, _ > -50, 52 | n => -90 < n && n < -10, 53 | n => 50 < n && n < 550, 54 | n => 5000 < n && n < 7500, 55 | n => -50 < n && n < 450 56 | ) 57 | def dropWhilePredicates = takeWhilePredicates 58 | def spanPredicates = takeWhilePredicates 59 | def foldArguments = List( 60 | (0, _ + _), 61 | (1, _ * _), 62 | (Int.MinValue, math.max(_, _)), 63 | (Int.MaxValue, math.min(_, _)) 64 | ) 65 | def addAllIterables = List( 66 | List[Int](), 67 | List(1), 68 | List(1, 2), 69 | List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), 70 | Array.fill(1000)(1).toSeq 71 | ) 72 | def newArray(sz: Int) = new Array[Int](sz) 73 | def groupByFunctions = List( 74 | _ % 2, _ % 3, _ % 5, _ % 10, _ % 154, _% 3217, 75 | _ * 2, _ + 1 76 | ) 77 | } 78 | 79 | trait IntSeqOperators extends IntOperators with SeqOperators[Int] { 80 | def segmentLengthPredicates = List( 81 | _ % 2 == 0, _ > 0, _ >= 0, _ < 0, _ <= 0, _ > -5000, _ > 5000, _ % 541 != 0, _ < -50, _ > 500, 82 | n => -90 < n && n < -10, n => 500 < n && n < 1500 83 | ) 84 | def indexWherePredicates = List( 85 | _ % 2 == 0, _ % 11 == 0, _ % 123 == 0, _ % 901 == 0, 86 | _ > 0, _ >= 0, _ < 0, _ <= 0, 87 | _ > 50, _ > 500, _ > 5000, 88 | _ < -10, _ < -100, _ < -1000, 89 | n => n > 50 && n < 100, 90 | n => n * n > 1000000 && n % 111 == 0 91 | ) 92 | def lastIndexWherePredicates = List( 93 | _ % 2 == 0, _ % 17 == 0, _ % 314 == 0, _ % 1017 == 0, 94 | _ > 0, _ >= 0, _ < 0, _ <= 0, 95 | _ > 50, _ > 500, _ > 5000, 96 | _ < -20, _ < -200, _ < -2000, 97 | _ == 0, 98 | n => n > -40 && n < 40, 99 | n => n > -80 && n < -10, 100 | n => n > 110 && n < 150 101 | ) 102 | def reverseMapFunctions = List(-_, n => n * n, _ + 1) 103 | def sameElementsSeqs = List( 104 | List[Int](), 105 | List(1), 106 | List(1, 2, 3, 4, 5, 6, 7, 8, 9), 107 | Array.fill(150)(1).toSeq, 108 | Array.fill(1000)(1).toSeq 109 | ) 110 | def startEndSeqs = List( 111 | Nil, 112 | List(1), 113 | List(1, 2, 3, 4, 5), 114 | List(0, 1, 2, 3, 4, 5), 115 | List(4, 5, 6, 7, 8, 9, 10), 116 | List(4, 5, 6, 7, 8, 9, 0), 117 | List(-4, -3, -2, -1) 118 | ) 119 | } 120 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/IntValues.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel.ops 14 | 15 | import org.scalacheck._ 16 | import org.scalacheck.Gen 17 | import org.scalacheck.Gen._ 18 | import org.scalacheck.Prop._ 19 | import org.scalacheck.Properties 20 | import org.scalacheck.Arbitrary._ 21 | 22 | trait IntValues { 23 | def values = Seq( 24 | arbitrary[Int], 25 | arbitrary[Int] suchThat (_ >= 0), 26 | arbitrary[Int] suchThat (_ < 0), 27 | choose(0, 0), 28 | choose(0, 10), 29 | choose(0, 100), 30 | choose(0, 1000) suchThat (_ % 2 == 0), 31 | choose(0, 1000) suchThat (_ % 2 != 0), 32 | choose(0, 1000) suchThat (n => (n % 2 == 0) || (n % 3 == 0)) 33 | ) 34 | } 35 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/Operators.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | 15 | trait Operators[T] { 16 | def reduceOperators: List[(T, T) => T] 17 | def countPredicates: List[T => Boolean] 18 | def forallPredicates: List[T => Boolean] 19 | def existsPredicates: List[T => Boolean] 20 | def findPredicates: List[T => Boolean] 21 | def mapFunctions: List[T => T] 22 | def partialMapFunctions: List[PartialFunction[T, T]] 23 | def flatMapFunctions: List[T => Iterable[T]] 24 | def filterPredicates: List[T => Boolean] 25 | def filterNotPredicates: List[T => Boolean] 26 | def partitionPredicates: List[T => Boolean] 27 | def takeWhilePredicates: List[T => Boolean] 28 | def dropWhilePredicates: List[T => Boolean] 29 | def spanPredicates: List[T => Boolean] 30 | def foldArguments: List[(T, (T, T) => T)] 31 | def addAllIterables: List[Iterable[T]] 32 | def newArray(sz: Int): Array[T] 33 | def groupByFunctions: List[T => T] 34 | } 35 | 36 | trait SeqOperators[T] extends Operators[T] { 37 | def segmentLengthPredicates: List[T => Boolean] 38 | def indexWherePredicates: List[T => Boolean] 39 | def lastIndexWherePredicates: List[T => Boolean] 40 | def reverseMapFunctions: List[T => T] 41 | def sameElementsSeqs: List[Seq[T]] 42 | def startEndSeqs: List[Seq[T]] 43 | } 44 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/PairOperators.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel.ops 14 | 15 | import scala.collection.parallel._ 16 | 17 | trait PairOperators[K, V] extends Operators[(K, V)] { 18 | def koperators: Operators[K] 19 | def voperators: Operators[V] 20 | 21 | private def zipPredicates(kps: List[K => Boolean], vps: List[V => Boolean]): List[((K, V)) => Boolean] = for { 22 | (kp, vp) <- koperators.countPredicates zip voperators.countPredicates 23 | } yield new Function1[(K, V), Boolean] { 24 | def apply(kv: (K, V)) = kp(kv._1) && vp(kv._2) 25 | } 26 | 27 | /* operators */ 28 | 29 | def reduceOperators = for { 30 | (kop, vop) <- koperators.reduceOperators zip voperators.reduceOperators 31 | } yield new Function2[(K, V), (K, V), (K, V)] { 32 | def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2)) 33 | } 34 | 35 | def countPredicates = zipPredicates(koperators.countPredicates, voperators.countPredicates) 36 | 37 | def forallPredicates = zipPredicates(koperators.forallPredicates, voperators.forallPredicates) 38 | 39 | def existsPredicates = zipPredicates(koperators.existsPredicates, voperators.existsPredicates) 40 | 41 | def findPredicates = zipPredicates(koperators.findPredicates, voperators.findPredicates) 42 | 43 | def mapFunctions = for { 44 | (km, vm) <- koperators.mapFunctions zip voperators.mapFunctions 45 | } yield new Function1[(K, V), (K, V)] { 46 | def apply(kv: (K, V)) = (km(kv._1), vm(kv._2)) 47 | } 48 | 49 | def partialMapFunctions = for { 50 | (kpm, vpm) <- koperators.partialMapFunctions zip voperators.partialMapFunctions 51 | } yield new PartialFunction[(K, V), (K, V)] { 52 | def isDefinedAt(kv: (K, V)) = kpm.isDefinedAt(kv._1) && vpm.isDefinedAt(kv._2) 53 | def apply(kv: (K, V)) = (kpm(kv._1), vpm(kv._2)) 54 | } 55 | 56 | def flatMapFunctions = for { 57 | (kfm, vfm) <- koperators.flatMapFunctions zip voperators.flatMapFunctions 58 | } yield new Function1[(K, V), Iterable[(K, V)]] { 59 | def apply(kv: (K, V)) = kfm(kv._1) zip vfm(kv._2) 60 | } 61 | 62 | def filterPredicates = zipPredicates(koperators.filterPredicates, voperators.filterPredicates) 63 | 64 | def filterNotPredicates = filterPredicates 65 | 66 | def partitionPredicates = filterPredicates 67 | 68 | def takeWhilePredicates = zipPredicates(koperators.takeWhilePredicates, voperators.takeWhilePredicates) 69 | 70 | def dropWhilePredicates = takeWhilePredicates 71 | 72 | def spanPredicates = takeWhilePredicates 73 | 74 | def foldArguments = for { 75 | ((kinit, kop), (vinit, vop)) <- koperators.foldArguments zip voperators.foldArguments 76 | } yield ((kinit, vinit), new Function2[(K, V), (K, V), (K, V)] { 77 | def apply(kv1: (K, V), kv2: (K, V)) = (kop(kv1._1, kv2._1), vop(kv1._2, kv2._2)) 78 | }) 79 | 80 | def addAllIterables = for { 81 | (kt, vt) <- koperators.addAllIterables zip voperators.addAllIterables 82 | } yield kt zip vt 83 | 84 | def newArray(sz: Int) = new Array[(K, V)](sz) 85 | 86 | def groupByFunctions = (koperators.groupByFunctions zip voperators.groupByFunctions) map { 87 | opt => { (p: (K, V)) => (opt._1(p._1), opt._2(p._2)) } 88 | } 89 | 90 | } 91 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/PairValues.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel.ops 14 | 15 | import org.scalacheck._ 16 | import org.scalacheck.Gen 17 | import org.scalacheck.Gen._ 18 | import org.scalacheck.Prop._ 19 | import org.scalacheck.Properties 20 | import org.scalacheck.Arbitrary._ 21 | 22 | trait PairValues[K, V] { 23 | def kvalues: Seq[Gen[K]] 24 | def vvalues: Seq[Gen[V]] 25 | 26 | def values = for { 27 | kg <- kvalues 28 | vg <- vvalues 29 | } yield for { 30 | k <- kg 31 | v <- vg 32 | } yield (k, v) 33 | } 34 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelArrayCheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | package mutable 15 | 16 | import org.scalacheck._ 17 | import org.scalacheck.Gen 18 | import org.scalacheck.Gen._ 19 | import org.scalacheck.Prop._ 20 | import org.scalacheck.Properties 21 | import org.scalacheck.Arbitrary._ 22 | 23 | import scala.collection._ 24 | import scala.collection.parallel.ops._ 25 | 26 | abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("ParArray[" + tp + "]") { 27 | // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) 28 | // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) 29 | 30 | type CollType = ParArray[T] 31 | 32 | def hasStrictOrder = true 33 | 34 | def tasksupport: TaskSupport 35 | 36 | def ofSize(vals: Seq[Gen[T]], sz: Int) = { 37 | val a = new mutable.ArrayBuffer[T](sz) 38 | val gen = vals(rnd.nextInt(vals.size)) 39 | for (i <- 0 until sz) a += sample(gen) 40 | a 41 | } 42 | 43 | def fromSeq(a: Seq[T]) = { 44 | val pa = new ParArray[T](a.size) 45 | pa.tasksupport = tasksupport 46 | var i = 0 47 | for (elem <- a.toList) { 48 | pa(i) = elem 49 | i += 1 50 | } 51 | pa 52 | } 53 | 54 | property("array mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => 55 | val results = for ((f, ind) <- mapFunctions.zipWithIndex) 56 | yield ("op index: " + ind) |: t.map(f).sameElements(coll.map(f)) 57 | results.reduceLeft(_ && _) 58 | } 59 | 60 | } 61 | 62 | abstract class IntParallelArrayCheck(val tasksupport: TaskSupport) extends ParallelArrayCheck[Int]("Int") with IntSeqOperators with IntValues { 63 | override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz => 64 | (0 until sz).toArray.toSeq 65 | }, sized { sz => 66 | (-sz until 0).toArray.toSeq 67 | }) 68 | } 69 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelCtrieCheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | package mutable 15 | 16 | import org.scalacheck._ 17 | import org.scalacheck.Gen 18 | import org.scalacheck.Gen._ 19 | import org.scalacheck.Prop._ 20 | import org.scalacheck.Properties 21 | import org.scalacheck.Arbitrary._ 22 | 23 | import scala.collection._ 24 | import scala.collection.parallel.ops._ 25 | 26 | abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParConcurrentTrieMap[" + tp + "]") { 27 | // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) 28 | // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) 29 | 30 | type CollType = ParTrieMap[K, V] 31 | 32 | def hasStrictOrder = false 33 | 34 | def tasksupport: TaskSupport 35 | 36 | def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = { 37 | val ct = new concurrent.TrieMap[K, V] 38 | val gen = vals(rnd.nextInt(vals.size)) 39 | for (i <- 0 until sz) ct += sample(gen) 40 | ct 41 | } 42 | 43 | def fromIterable(t: Iterable[(K, V)]) = { 44 | val pct = new ParTrieMap[K, V] 45 | pct.tasksupport = tasksupport 46 | var i = 0 47 | for (kv <- t.toList) { 48 | pct += kv 49 | i += 1 50 | } 51 | pct 52 | } 53 | 54 | } 55 | 56 | abstract class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport) extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int") 57 | with PairOperators[Int, Int] 58 | with PairValues[Int, Int] 59 | { 60 | def intvalues = new IntValues {} 61 | def kvalues = intvalues.values 62 | def vvalues = intvalues.values 63 | 64 | val intoperators = new IntOperators {} 65 | def voperators = intoperators 66 | def koperators = intoperators 67 | 68 | override def printDataStructureDebugInfo(ds: AnyRef) = ds match { 69 | case pm: ParTrieMap[k, v] => 70 | println("Mutable parallel ctrie") 71 | case _ => 72 | println("could not match data structure type: " + ds.getClass) 73 | } 74 | 75 | override def checkDataStructureInvariants(orig: Iterable[(Int, Int)], ds: AnyRef) = ds match { 76 | // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster 77 | // val invs = pm.brokenInvariants 78 | 79 | // val containsall = (for ((k, v) <- orig) yield { 80 | // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true 81 | // else { 82 | // println("Does not contain original element: " + (k, v)) 83 | // false 84 | // } 85 | // }).foldLeft(true)(_ && _) 86 | 87 | 88 | // if (invs.isEmpty) containsall 89 | // else { 90 | // println("Invariants broken:\n" + invs.mkString("\n")) 91 | // false 92 | // } 93 | case _ => true 94 | } 95 | 96 | } 97 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelHashMapCheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | package mutable 15 | 16 | import org.scalacheck._ 17 | import org.scalacheck.Gen 18 | import org.scalacheck.Gen._ 19 | import org.scalacheck.Prop._ 20 | import org.scalacheck.Properties 21 | import org.scalacheck.Arbitrary._ 22 | 23 | import scala.collection._ 24 | import scala.collection.parallel.ops._ 25 | 26 | abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParHashMap[" + tp + "]") { 27 | // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) 28 | // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) 29 | 30 | type CollType = ParHashMap[K, V] 31 | 32 | def hasStrictOrder = false 33 | 34 | def tasksupport: TaskSupport 35 | 36 | def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = { 37 | val hm = new mutable.HashMap[K, V] 38 | val gen = vals(rnd.nextInt(vals.size)) 39 | for (i <- 0 until sz) hm += sample(gen) 40 | hm 41 | } 42 | 43 | def fromIterable(t: Iterable[(K, V)]) = { 44 | val phm = new ParHashMap[K, V] 45 | phm.tasksupport = tasksupport 46 | var i = 0 47 | for (kv <- t.toList) { 48 | phm += kv 49 | i += 1 50 | } 51 | phm 52 | } 53 | 54 | } 55 | 56 | abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int") 57 | with PairOperators[Int, Int] 58 | with PairValues[Int, Int] 59 | { 60 | def intvalues = new IntValues {} 61 | def kvalues = intvalues.values 62 | def vvalues = intvalues.values 63 | 64 | val intoperators = new IntOperators {} 65 | def voperators = intoperators 66 | def koperators = intoperators 67 | 68 | override def printDataStructureDebugInfo(ds: AnyRef) = ds match { 69 | case pm: ParHashMap[k, v] => 70 | println("Mutable parallel hash map\n" + pm.hashTableContents.debugInformation) 71 | case _ => 72 | println("could not match data structure type: " + ds.getClass) 73 | } 74 | 75 | override def checkDataStructureInvariants(orig: Iterable[(Int, Int)], ds: AnyRef) = ds match { 76 | // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster 77 | // val invs = pm.brokenInvariants 78 | 79 | // val containsall = (for ((k, v) <- orig) yield { 80 | // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true 81 | // else { 82 | // println("Does not contain original element: " + (k, v)) 83 | // false 84 | // } 85 | // }).foldLeft(true)(_ && _) 86 | 87 | 88 | // if (invs.isEmpty) containsall 89 | // else { 90 | // println("Invariants broken:\n" + invs.mkString("\n")) 91 | // false 92 | // } 93 | case _ => true 94 | } 95 | 96 | } 97 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelHashSetCheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | package mutable 15 | 16 | import org.scalacheck._ 17 | import org.scalacheck.Gen 18 | import org.scalacheck.Gen._ 19 | import org.scalacheck.Prop._ 20 | import org.scalacheck.Properties 21 | import org.scalacheck.Arbitrary._ 22 | 23 | import scala.collection._ 24 | import scala.collection.parallel.ops._ 25 | 26 | abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("mutable.ParHashSet[" + tp + "]") { 27 | // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) 28 | // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) 29 | 30 | type CollType = ParHashSet[T] 31 | 32 | def hasStrictOrder = false 33 | 34 | def tasksupport: TaskSupport 35 | 36 | def ofSize(vals: Seq[Gen[T]], sz: Int) = { 37 | val hm = new mutable.HashSet[T] 38 | val gen = vals(rnd.nextInt(vals.size)) 39 | for (i <- 0 until sz) hm += sample(gen) 40 | hm 41 | } 42 | 43 | def fromIterable(t: Iterable[T]) = { 44 | val phs = new ParHashSet[T] 45 | phs.tasksupport = tasksupport 46 | var i = 0 47 | for (kv <- t.toList) { 48 | phs += kv 49 | i += 1 50 | } 51 | phs 52 | } 53 | 54 | } 55 | 56 | abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int") 57 | with IntOperators 58 | with IntValues 59 | { 60 | override def printDataStructureDebugInfo(ds: AnyRef) = ds match { 61 | case pm: ParHashSet[t] => 62 | println("Mutable parallel hash set") 63 | case _ => 64 | println("could not match data structure type: " + ds.getClass) 65 | } 66 | 67 | override def checkDataStructureInvariants(orig: Iterable[Int], ds: AnyRef) = ds match { 68 | // case pm: ParHashSet[t] if 1 == 0 => 69 | // // for an example of how not to write code proceed below 70 | // val invs = pm.brokenInvariants 71 | 72 | // val containsall = (for (elem <- orig) yield { 73 | // if (pm.asInstanceOf[ParHashSet[Int]](elem) == true) true 74 | // else { 75 | // println("Does not contain original element: " + elem) 76 | // println(pm.hashTableContents.table.find(_ == elem)) 77 | // println(pm.hashTableContents.table.indexOf(elem)) 78 | // false 79 | // } 80 | // }).foldLeft(true)(_ && _) 81 | 82 | 83 | // if (invs.isEmpty) { 84 | // if (!containsall) println(pm.debugInformation) 85 | // containsall 86 | // } else { 87 | // println("Invariants broken:\n" + invs.mkString("\n")) 88 | // false 89 | // } 90 | case _ => true 91 | } 92 | 93 | } 94 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelHashTrieCheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | package immutable 15 | 16 | import org.scalacheck._ 17 | import org.scalacheck.Gen 18 | import org.scalacheck.Gen._ 19 | import org.scalacheck.Prop._ 20 | import org.scalacheck.Properties 21 | import org.scalacheck.Arbitrary._ 22 | 23 | import scala.collection._ 24 | import scala.collection.parallel.ops._ 25 | 26 | abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("immutable.ParHashMap[" + tp + "]") { 27 | // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) 28 | // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) 29 | 30 | type CollType = ParHashMap[K, V] 31 | 32 | def hasStrictOrder = false 33 | 34 | def tasksupport: TaskSupport 35 | 36 | def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = { 37 | var hm = new immutable.HashMap[K, V] 38 | val gen = vals(rnd.nextInt(vals.size)) 39 | for (i <- 0 until sz) hm += sample(gen) 40 | hm 41 | } 42 | 43 | def fromIterable(t: Iterable[(K, V)]) = { 44 | var phm = new ParHashMap[K, V] 45 | phm.tasksupport = tasksupport 46 | var i = 0 47 | for (kv <- t.toList) { 48 | phm += kv 49 | i += 1 50 | } 51 | phm 52 | } 53 | 54 | } 55 | 56 | abstract class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int") 57 | with PairOperators[Int, Int] 58 | with PairValues[Int, Int] 59 | { 60 | def intvalues = new IntValues {} 61 | def kvalues = intvalues.values 62 | def vvalues = intvalues.values 63 | 64 | val intoperators = new IntOperators {} 65 | def voperators = intoperators 66 | def koperators = intoperators 67 | 68 | override def printDataStructureDebugInfo(ds: AnyRef) = ds match { 69 | case pm: ParHashMap[k, v] => 70 | pm.printDebugInfo() 71 | case _ => 72 | println("could not match data structure type: " + ds.getClass) 73 | } 74 | } 75 | 76 | abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("immutable.ParHashSet[" + tp + "]") { 77 | // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) 78 | // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) 79 | 80 | type CollType = ParHashSet[T] 81 | 82 | def hasStrictOrder = false 83 | 84 | def tasksupport: TaskSupport 85 | 86 | def ofSize(vals: Seq[Gen[T]], sz: Int) = { 87 | var hm = immutable.OldHashSet.empty[T] 88 | val gen = vals(rnd.nextInt(vals.size)) 89 | for (i <- 0 until sz) hm += sample(gen) 90 | hm 91 | } 92 | 93 | def fromIterable(t: Iterable[T]) = { 94 | var phs = new ParHashSet[T] 95 | phs.tasksupport = tasksupport 96 | var i = 0 97 | for (kv <- t.toList) { 98 | phs += kv 99 | i += 1 100 | } 101 | phs 102 | } 103 | 104 | override def printDataStructureDebugInfo(ds: AnyRef) = ds match { 105 | case pm: ParHashSet[t] => 106 | println("Parallel hash set") 107 | case _ => 108 | println("could not match data structure type: " + ds.getClass) 109 | } 110 | 111 | } 112 | 113 | abstract class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int") 114 | with IntOperators 115 | with IntValues 116 | { 117 | def intvalues = new IntValues {} 118 | def kvalues = intvalues.values 119 | def vvalues = intvalues.values 120 | 121 | override def printDataStructureDebugInfo(ds: AnyRef) = ds match { 122 | case pm: ParHashMap[k, v] => 123 | pm.printDebugInfo() 124 | case _ => 125 | println("could not match data structure type: " + ds.getClass) 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelMapCheck1.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | 15 | import org.scalacheck._ 16 | import org.scalacheck.Gen 17 | import org.scalacheck.Gen._ 18 | import org.scalacheck.Prop._ 19 | import org.scalacheck.Properties 20 | 21 | import scala.collection._ 22 | import scala.collection.parallel._ 23 | 24 | abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterableCheck[(K, V)](collname) { 25 | type CollType <: ParMap[K, V] 26 | 27 | property("gets iterated keys") = forAllNoShrink(collectionPairs) { 28 | case (t, coll) => 29 | val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v)) 30 | val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) } 31 | ("Par contains elements of seq map" |: containsT.forall(_ == true)) && 32 | ("Par contains elements of itself" |: containsSelf.forall(_ == true)) 33 | } 34 | 35 | override def collectionPairs: Gen[(Map[K, V], CollType)] = 36 | super.collectionPairs.map { case (iterable, parmap) => 37 | (iterable.to(Map), parmap) 38 | } 39 | 40 | override def collectionTriplets: Gen[(Map[K, V], CollType, scala.Seq[(K, V)])] = 41 | super.collectionTriplets.map { case (iterable, parmap, seq) => 42 | (iterable.to(Map), parmap, seq) 43 | } 44 | 45 | // The following tests have been copied from `ParIterableCheck`, and adapted to test 46 | // overloads of the methods that return Map and ParMap collections 47 | // They are disabled for now because this behavior is unspecified and the tests fail. 48 | // property("mappings returning maps must be equal") = forAll/*NoShrink*/(collectionPairs) { case (t, coll) => 49 | // val results = for ((f, ind) <- mapFunctions.zipWithIndex.take(5)) yield { 50 | // val ms: Map[K, V] = t.map(f) 51 | // val mp: ParMap[K, V] = coll.map(f) 52 | // val invs = checkDataStructureInvariants(ms, mp) 53 | // if (!areEqual(ms, mp) || !invs) { 54 | // println(t) 55 | // println(coll) 56 | // println("mapped to: ") 57 | // println(ms) 58 | // println(mp) 59 | // println("sizes: ") 60 | // println(ms.size) 61 | // println(mp.size) 62 | // println("valid: " + invs) 63 | // } 64 | // ("op index: " + ind) |: (areEqual(ms, mp) && invs) 65 | // } 66 | // results.reduceLeft(_ && _) 67 | // } 68 | // 69 | // property("collects returning maps must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => 70 | // val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield { 71 | // val ps: Map[K, V] = t.collect(f) 72 | // val pp: ParMap[K, V] = coll.collect(f) 73 | // if (!areEqual(ps, pp)) { 74 | // println(t) 75 | // println(coll) 76 | // println("collected to: ") 77 | // println(ps) 78 | // println(pp) 79 | // } 80 | // ("op index: " + ind) |: areEqual(ps, pp) 81 | // } 82 | // results.reduceLeft(_ && _) 83 | // } 84 | // 85 | // property("flatMaps returning maps must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => 86 | // (for ((f, ind) <- flatMapFunctions.zipWithIndex) 87 | // yield ("op index: " + ind) |: { 88 | // val tf: Map[K, V] = t.flatMap(f) 89 | // val collf: ParMap[K, V] = coll.flatMap(f) 90 | // if (!areEqual(tf, collf)) { 91 | // println("----------------------") 92 | // println(s"t = $t") 93 | // println(s"coll = $coll") 94 | // println(s"tf = $tf") 95 | // println(s"collf = $collf") 96 | // } 97 | // areEqual(t.flatMap(f), coll.flatMap(f)) 98 | // }).reduceLeft(_ && _) 99 | // } 100 | // 101 | // property("++s returning maps must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) => 102 | // try { 103 | // val toadd = colltoadd 104 | // val tr: Map[K, V] = t ++ toadd.iterator 105 | // val cr: ParMap[K, V] = coll ++ toadd.iterator 106 | // if (!areEqual(tr, cr)) { 107 | // println("from: " + t) 108 | // println("and: " + coll.iterator.toList) 109 | // println("adding: " + toadd) 110 | // println(tr.toList) 111 | // println(cr.iterator.toList) 112 | // } 113 | // (s"adding " |: areEqual(tr, cr)) && 114 | // (for ((trav, ind) <- addAllIterables.zipWithIndex) yield { 115 | // val tadded: Map[K, V] = t ++ trav 116 | // val cadded: ParMap[K, V] = coll ++ trav 117 | // if (!areEqual(tadded, cadded)) { 118 | // println("----------------------") 119 | // println("from: " + t) 120 | // println("and: " + coll) 121 | // println("adding: " + trav) 122 | // println(tadded) 123 | // println(cadded) 124 | // } 125 | // ("traversable " + ind) |: areEqual(tadded, cadded) 126 | // }).reduceLeft(_ && _) 127 | // } catch { 128 | // case e: java.lang.Exception => 129 | // throw e 130 | // } 131 | // } 132 | 133 | } 134 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelRangeCheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | package immutable 15 | 16 | import org.scalacheck._ 17 | import org.scalacheck.Gen 18 | import org.scalacheck.Gen._ 19 | import org.scalacheck.Prop._ 20 | import org.scalacheck.Properties 21 | import org.scalacheck.Arbitrary._ 22 | 23 | import scala.collection._ 24 | import scala.collection.parallel.ops._ 25 | 26 | abstract class ParallelRangeCheck(val tasksupport: TaskSupport) extends ParallelSeqCheck[Int]("ParallelRange[Int]") with ops.IntSeqOperators { 27 | // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) 28 | // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) 29 | 30 | type CollType = collection.parallel.ParSeq[Int] 31 | 32 | def hasStrictOrder = true 33 | 34 | def ofSize(vals: Seq[Gen[Int]], sz: Int) = throw new UnsupportedOperationException 35 | 36 | override def instances(vals: Seq[Gen[Int]]): Gen[Seq[Int]] = sized { start => 37 | sized { end => 38 | sized { step => 39 | Range(start, end, if (step != 0) step else 1) 40 | } 41 | } 42 | } 43 | 44 | def fromSeq(a: Seq[Int]) = a match { 45 | case r: Range => 46 | val pr = ParRange(r.start, r.end, r.step, false) 47 | pr.tasksupport = tasksupport 48 | pr 49 | case _ => 50 | val pa = new parallel.mutable.ParArray[Int](a.length) 51 | pa.tasksupport = tasksupport 52 | for (i <- 0 until a.length) pa(i) = a(i) 53 | pa 54 | } 55 | 56 | def values = Seq(choose(-100, 100)) 57 | 58 | } 59 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelSetCheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection.parallel 14 | 15 | import org.scalacheck._ 16 | import org.scalacheck.Gen 17 | import org.scalacheck.Gen._ 18 | import org.scalacheck.Prop._ 19 | import org.scalacheck.Properties 20 | 21 | import scala.collection._ 22 | import scala.collection.parallel._ 23 | 24 | abstract class ParallelSetCheck[T](collname: String) extends ParallelIterableCheck[T](collname) { 25 | 26 | type CollType <: ParSet[T] 27 | 28 | property("gets iterated keys") = forAllNoShrink(collectionPairs) { 29 | case (t, coll) => 30 | val containsT = for (elem <- t) yield (coll.contains(elem)) 31 | val containsSelf = for (elem <- coll) yield (coll.contains(elem)) 32 | ("Par contains elements of seq map" |: containsT.forall(_ == true)) && 33 | ("Par contains elements of itself" |: containsSelf.forall(_ == true)) 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/ParallelVectorCheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package scala.collection 14 | package parallel.immutable 15 | 16 | import org.scalacheck._ 17 | import org.scalacheck.Gen 18 | import org.scalacheck.Gen._ 19 | import org.scalacheck.Prop._ 20 | import org.scalacheck.Properties 21 | import org.scalacheck.Arbitrary._ 22 | 23 | import scala.collection._ 24 | import scala.collection.parallel.ops._ 25 | 26 | import immutable.Vector 27 | import immutable.VectorBuilder 28 | 29 | import scala.collection.parallel.TaskSupport 30 | 31 | abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.ParallelSeqCheck[T]("ParVector[" + tp + "]") { 32 | // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) 33 | // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) 34 | 35 | type CollType = ParVector[T] 36 | 37 | def hasStrictOrder = true 38 | 39 | def tasksupport: TaskSupport 40 | 41 | def ofSize(vals: Seq[Gen[T]], sz: Int) = { 42 | val vb = new immutable.VectorBuilder[T]() 43 | val gen = vals(rnd.nextInt(vals.size)) 44 | for (i <- 0 until sz) vb += sample(gen) 45 | vb.result() 46 | } 47 | 48 | def fromSeq(a: Seq[T]) = { 49 | val pc = ParVector.newCombiner[T] 50 | for (elem <- a.toList) pc += elem 51 | val pv = pc.result() 52 | pv.tasksupport = tasksupport 53 | pv 54 | } 55 | 56 | } 57 | 58 | abstract class IntParallelVectorCheck(val tasksupport: TaskSupport) extends ParallelVectorCheck[Int]("Int") with IntSeqOperators with IntValues { 59 | override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz => 60 | (0 until sz).toArray.toSeq 61 | }, sized { sz => 62 | (-sz until 0).toArray.toSeq 63 | }) 64 | } 65 | -------------------------------------------------------------------------------- /scalacheck/src/test/scala/pc.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | import org.scalacheck._ 14 | import scala.collection.parallel._ 15 | 16 | // package here to be able access the package-private implementation and shutdown the pool 17 | package scala { 18 | 19 | abstract class ParCollProperties extends Properties("Parallel collections") { 20 | 21 | // Included tests have to be abstract classes, otherwise sbt tries to instantiate them on its own and fails 22 | def includeAllTestsWith(support: TaskSupport): Unit = { 23 | // parallel arrays with default task support 24 | include(new mutable.IntParallelArrayCheck(support) {}) 25 | 26 | // parallel ranges 27 | include(new immutable.ParallelRangeCheck(support) {}) 28 | 29 | // parallel immutable hash maps (tries) 30 | include(new immutable.IntIntParallelHashMapCheck(support) {}) 31 | 32 | // parallel immutable hash sets (tries) 33 | include(new immutable.IntParallelHashSetCheck(support) {}) 34 | 35 | // parallel mutable hash maps (tables) 36 | include(new mutable.IntIntParallelHashMapCheck(support) {}) 37 | 38 | // parallel ctrie 39 | include(new mutable.IntIntParallelConcurrentTrieMapCheck(support) {}) 40 | 41 | // parallel mutable hash sets (tables) 42 | include(new mutable.IntParallelHashSetCheck(support) {}) 43 | 44 | // parallel vectors 45 | include(new immutable.IntParallelVectorCheck(support) {}) 46 | } 47 | 48 | includeAllTestsWith(defaultTaskSupport) 49 | 50 | val ec = scala.concurrent.ExecutionContext.fromExecutorService(java.util.concurrent.Executors.newFixedThreadPool(5)) 51 | val ectasks = new collection.parallel.ExecutionContextTaskSupport(ec) 52 | includeAllTestsWith(ectasks) 53 | 54 | // no post test hooks in scalacheck, so cannot do: 55 | // ec.shutdown() 56 | 57 | } 58 | 59 | } 60 | 61 | object Test extends scala.ParCollProperties { 62 | /* 63 | def main(args: Array[String]) { 64 | val pc = new ParCollProperties 65 | org.scalacheck.Test.checkProperties( 66 | org.scalacheck.Test.Params( 67 | rng = new java.util.Random(5134L), 68 | testCallback = new ConsoleReporter(0), 69 | workers = 1, 70 | minSize = 0, 71 | maxSize = 4000, 72 | minSuccessfulTests = 5 73 | ), 74 | pc 75 | ) 76 | } 77 | */ 78 | } 79 | -------------------------------------------------------------------------------- /testmacros/src/main/scala-2/testutil/ShouldNotTypecheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. dba Akka 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package testutil 14 | 15 | import scala.language.experimental.macros 16 | import scala.reflect.macros.blackbox.Context 17 | import scala.reflect.macros.TypecheckException 18 | import java.util.regex.Pattern 19 | 20 | /** 21 | * A macro that ensures that a code snippet does not typecheck. 22 | */ 23 | object ShouldNotTypecheck { 24 | def apply(code: String): Unit = macro applyImplNoExp 25 | def apply(code: String, expected: String): Unit = macro applyImpl 26 | 27 | def applyImplNoExp(ctx: Context)(code: ctx.Expr[String]) = applyImpl(ctx)(code, null) 28 | 29 | def applyImpl(ctx: Context)(code: ctx.Expr[String], expected: ctx.Expr[String]): ctx.Expr[Unit] = { 30 | import ctx.universe._ 31 | 32 | val Expr(Literal(Constant(codeStr: String))) = code: @unchecked 33 | val (expPat, expMsg) = (expected: @unchecked) match { 34 | case null => (null, "Expected some error.") 35 | case Expr(Literal(Constant(s: String))) => 36 | (Pattern.compile(s, Pattern.CASE_INSENSITIVE | Pattern.DOTALL), "Expected error matching: "+s) 37 | } 38 | 39 | try ctx.typecheck(ctx.parse("{ "+codeStr+" }")) catch { case e: TypecheckException => 40 | val msg = e.getMessage 41 | if((expected ne null) && !(expPat.matcher(msg)).matches) 42 | ctx.abort(ctx.enclosingPosition, "Type-checking failed in an unexpected way.\n"+ 43 | expMsg+"\nActual error: "+msg) 44 | else return reify(()) 45 | } 46 | 47 | ctx.abort(ctx.enclosingPosition, "Type-checking succeeded unexpectedly.\n"+expMsg) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /testmacros/src/main/scala-3/testutil/ShouldNotTypecheck.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Scala (https://www.scala-lang.org) 3 | * 4 | * Copyright EPFL and Lightbend, Inc. 5 | * 6 | * Licensed under Apache License 2.0 7 | * (http://www.apache.org/licenses/LICENSE-2.0). 8 | * 9 | * See the NOTICE file distributed with this work for 10 | * additional information regarding copyright ownership. 11 | */ 12 | 13 | package testutil 14 | 15 | import scala.compiletime.testing._ 16 | 17 | /** 18 | * Ensures that a code snippet does not typecheck. 19 | */ 20 | object ShouldNotTypecheck { 21 | inline def apply(code: String): Unit = assert(!typeChecks(code)) 22 | inline def apply(code: String, expected: String): Unit = apply(code) 23 | } 24 | --------------------------------------------------------------------------------