├── settings.gradle
├── scala-repl
├── src
│ ├── main
│ │ └── scala
│ │ │ ├── org
│ │ │ └── jetbrains
│ │ │ │ └── ztools
│ │ │ │ └── scala
│ │ │ │ ├── reference
│ │ │ │ ├── ReferenceWrapper.scala
│ │ │ │ └── ReferenceManager.scala
│ │ │ │ ├── interpreter
│ │ │ │ ├── ScalaVariableInfo.scala
│ │ │ │ ├── InterpreterHandler.scala
│ │ │ │ └── ZtoolsInterpreterWrapper.scala
│ │ │ │ ├── handlers
│ │ │ │ ├── impls
│ │ │ │ │ ├── SpecialsHandler.scala
│ │ │ │ │ ├── NullHandler.scala
│ │ │ │ │ ├── SeqHandler.scala
│ │ │ │ │ ├── SetHandler.scala
│ │ │ │ │ ├── ArrayHandler.scala
│ │ │ │ │ ├── StringHandler.scala
│ │ │ │ │ ├── JavaCollectionHandler.scala
│ │ │ │ │ ├── ThrowableHandler.scala
│ │ │ │ │ ├── PrimitiveHandler.scala
│ │ │ │ │ ├── AbstractCollectionHandler.scala
│ │ │ │ │ ├── AbstractTypeHandler.scala
│ │ │ │ │ ├── MapHandler.scala
│ │ │ │ │ └── ObjectHandler.scala
│ │ │ │ ├── HandlerWrapper.scala
│ │ │ │ └── HandlerManager.scala
│ │ │ │ ├── core
│ │ │ │ ├── Loopback.scala
│ │ │ │ ├── ResNames.scala
│ │ │ │ ├── TypeHandler.scala
│ │ │ │ └── TrieMap.scala
│ │ │ │ └── VariablesView.scala
│ │ │ └── spark
│ │ │ ├── handlers
│ │ │ ├── SparkSessionHandler.scala
│ │ │ ├── SparkContextHandler.scala
│ │ │ ├── RDDHandler.scala
│ │ │ └── DatasetHandler.scala
│ │ │ ├── ztoolsbasic.sc
│ │ │ └── ztools.sc
│ └── test
│ │ └── scala
│ │ ├── org
│ │ └── jetbrains
│ │ │ └── ztools
│ │ │ └── scala
│ │ │ ├── TrieMapTest.scala
│ │ │ ├── ReplAware.scala
│ │ │ └── VariablesViewImplTest.scala
│ │ └── spark
│ │ └── SparkHandlersTest.scala
└── build.gradle
├── .gitignore
├── LICENSE_HEADER
├── spark-all
└── build.gradle
├── .run
├── SparkHandlersTest (1).run.xml
└── VariablesViewImplTest.run.xml
├── README.md
└── LICENSE
/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'ztools'
2 | include(':ztools-scala-repl')
3 | include(':ztools-spark-all')
4 | project(':ztools-scala-repl').projectDir = file('scala-repl')
5 | project(':ztools-spark-all').projectDir = file('spark-all')
6 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/reference/ReferenceWrapper.scala:
--------------------------------------------------------------------------------
1 | package org.jetbrains.ztools.scala.reference
2 |
3 | class ReferenceWrapper(val ref: AnyRef) {
4 | override def hashCode(): Int = ref.hashCode()
5 |
6 | override def equals(obj: Any): Boolean = obj match {
7 | case value: ReferenceWrapper =>
8 | ref.eq(value.ref)
9 | case _ => false
10 | }
11 | }
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | !.idea/codeStyles/codeStyleConfig.xml
3 | *.iml
4 | */target/**
5 | .gradle
6 | build/
7 | gradle/wrapper
8 | gradlew
9 | gradlew.bat
10 | # Ignore Gradle GUI config
11 | gradle-app.setting
12 |
13 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
14 | !gradle-wrapper.jar
15 |
16 | # Cache of project
17 | .gradletasknamecache
18 |
19 | ### Gradle Patch ###
20 | **/build/
21 |
22 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/interpreter/ScalaVariableInfo.scala:
--------------------------------------------------------------------------------
1 | package org.jetbrains.ztools.scala.interpreter
2 |
3 | case class ScalaVariableInfo(isAccessible: Boolean,
4 | isLazy: Boolean,
5 | value: Any,
6 | tpe: String,
7 | path: String,
8 | ref: String) {
9 | val name: String = if (path != null)
10 | path.substring(path.lastIndexOf('.') + 1)
11 | else
12 | null
13 | }
14 |
--------------------------------------------------------------------------------
/LICENSE_HEADER:
--------------------------------------------------------------------------------
1 | Copyright 2020 Jetbrains s.r.o.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
--------------------------------------------------------------------------------
/scala-repl/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'scala'
3 | id 'java'
4 | id "com.github.hierynomus.license" version "0.15.0"
5 | }
6 | license {
7 | header rootProject.file('LICENSE_HEADER')
8 | }
9 | dependencies {
10 | implementation 'org.scala-lang:scala-compiler:2.11.8'
11 | testImplementation 'junit:junit:4.12'
12 | compileOnly 'org.apache.spark:spark-core_2.11:2.3.1'
13 | compileOnly 'org.apache.spark:spark-sql_2.11:2.3.1'
14 | testImplementation 'org.apache.spark:spark-core_2.11:2.3.1'
15 | testImplementation 'org.apache.spark:spark-sql_2.11:2.3.1'
16 | testImplementation 'org.apache.spark:spark-hive_2.11:2.3.1'
17 | }
18 |
19 | sourceCompatibility = '1.8'
20 |
21 | description = 'ztools-scala-repl'
22 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/SpecialsHandler.scala:
--------------------------------------------------------------------------------
1 | package org.jetbrains.ztools.scala.handlers.impls
2 |
3 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
4 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
5 |
6 | import scala.collection.mutable
7 |
8 | class SpecialsHandler(limit: Int) extends AbstractTypeHandler {
9 | override def accept(obj: Any): Boolean = obj.getClass.getCanonicalName != null && obj.getClass.getCanonicalName.startsWith("scala.")
10 |
11 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = withJsonObject {
12 | json =>
13 | json.put(ResNames.VALUE, scalaInfo.value.toString.take(limit))
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/interpreter/InterpreterHandler.scala:
--------------------------------------------------------------------------------
1 | package org.jetbrains.ztools.scala.interpreter
2 |
3 | import scala.collection.immutable
4 | import scala.tools.nsc.interpreter.IMain
5 |
6 | class InterpreterHandler(val interpreter: IMain) {
7 | val wrapper = new ZtoolsInterpreterWrapper(interpreter)
8 |
9 | def getVariableNames: immutable.Seq[String] =
10 | interpreter.definedSymbolList.filter { x => x.isGetter }.map(_.name.toString).distinct
11 |
12 | def getInfo(name: String, tpe: String): ScalaVariableInfo = {
13 | val obj = valueOfTerm(name).orNull
14 | ScalaVariableInfo(isAccessible = true, isLazy = false, obj, tpe, name, null)
15 | }
16 |
17 | def valueOfTerm(id: String): Option[Any] = wrapper.valueOfTerm(id)
18 | }
19 |
--------------------------------------------------------------------------------
/spark-all/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'java'
3 | id 'com.github.johnrengelman.shadow' version '5.2.0'
4 | id 'maven-publish'
5 | id "com.github.hierynomus.license" version "0.15.0"
6 | }
7 | dependencies {
8 | implementation project(':ztools-scala-repl')
9 | }
10 |
11 | test.dependsOn (':ztools-scala-repl:test')
12 | licenseFormat.dependsOn(':ztools-scala-repl:licenseFormat')
13 |
14 | tasks.jar.configure {
15 | classifier = 'default'
16 | }
17 |
18 | shadowJar {
19 | dependencies {
20 | exclude("org.scala*", "org.apache.spark:*", "scala*")
21 | }
22 | classifier = null
23 | }
24 |
25 | tasks.withType(AbstractArchiveTask) {
26 | preserveFileTimestamps = false
27 | reproducibleFileOrder = true
28 | }
29 |
30 | publishing {
31 | publications {
32 | shadow(MavenPublication) { publication ->
33 | project.shadow.component(publication)
34 | }
35 | }
36 | repositories {
37 | maven {
38 | url "https://packages.jetbrains.team/maven/p/bdt/bigdatatools"
39 | }
40 | }
41 | }
42 |
43 | build.finalizedBy(shadowJar)
--------------------------------------------------------------------------------
/.run/SparkHandlersTest (1).run.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | false
21 | true
22 | false
23 |
24 |
25 |
--------------------------------------------------------------------------------
/.run/VariablesViewImplTest.run.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | false
21 | true
22 | false
23 |
24 |
25 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/NullHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | import org.jetbrains.ztools.scala.core.Loopback
19 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
20 |
21 | import scala.collection.mutable
22 |
23 | class NullHandler extends AbstractTypeHandler {
24 | override def accept(obj: Any): Boolean = obj == null
25 |
26 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
27 | mutable.Map[String, Any]()
28 | }
29 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/SeqHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | class SeqHandler(limit: Int, timeout: Int) extends AbstractCollectionHandler(limit, timeout) {
19 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Seq[_]]
20 |
21 | override def iterator(obj: Any): Iterator = new Iterator {
22 | private val it = obj.asInstanceOf[Seq[_]].iterator
23 |
24 | override def hasNext: Boolean = it.hasNext
25 |
26 | override def next: Any = it.next()
27 | }
28 |
29 | override def length(obj: Any): Int = obj.asInstanceOf[Seq[_]].size
30 | }
31 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/SetHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | class SetHandler(limit: Int,timeout: Int) extends AbstractCollectionHandler(limit,timeout) {
19 | override def iterator(obj: Any): Iterator = new Iterator {
20 | private val it = obj.asInstanceOf[Set[_]].iterator
21 |
22 | override def hasNext: Boolean = it.hasNext
23 |
24 | override def next: Any = it.next()
25 | }
26 |
27 | override def length(obj: Any): Int = obj.asInstanceOf[Set[_]].size
28 |
29 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Set[_]]
30 | }
31 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/ArrayHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | class ArrayHandler(limit: Int, timeout: Int) extends AbstractCollectionHandler(limit, timeout) {
19 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Array[_]]
20 |
21 | override def length(obj: Any): Int = obj.asInstanceOf[Array[_]].length
22 |
23 | override def iterator(obj: Any): Iterator = new Iterator {
24 | private val it = obj.asInstanceOf[Array[_]].iterator
25 |
26 | override def hasNext: Boolean = it.hasNext
27 |
28 | override def next: Any = it.next
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/StringHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
19 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
20 |
21 | import scala.collection.mutable
22 |
23 | class StringHandler(limit: Int) extends AbstractTypeHandler {
24 | override def accept(obj: Any): Boolean = obj.isInstanceOf[String]
25 |
26 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
27 | mutable.Map(
28 | ResNames.VALUE -> scalaInfo.value.asInstanceOf[String].take(limit)
29 | )
30 | }
31 |
32 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/JavaCollectionHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | import java.util
19 |
20 | class JavaCollectionHandler(limit: Int,timeout: Int) extends AbstractCollectionHandler(limit, timeout) {
21 | override def accept(obj: Any): Boolean = obj.isInstanceOf[util.Collection[_]]
22 |
23 | override def iterator(obj: Any): Iterator = new Iterator() {
24 | private val it = obj.asInstanceOf[util.Collection[_]].iterator()
25 |
26 | override def hasNext: Boolean = it.hasNext
27 |
28 | override def next: Any = it.next()
29 | }
30 |
31 | override def length(obj: Any): Int = obj.asInstanceOf[util.Collection[_]].size()
32 | }
33 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/core/Loopback.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *//**
16 | * Copyright 2020 Jetbrains s.r.o.
17 | *
18 | * Licensed under the Apache License, Version 2.0 (the "License");
19 | * you may not use this file except in compliance with the License.
20 | * You may obtain a copy of the License at
21 | *
22 | * http://www.apache.org/licenses/LICENSE-2.0
23 | *
24 | * Unless required by applicable law or agreed to in writing, software
25 | * distributed under the License is distributed on an "AS IS" BASIS,
26 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
27 | * See the License for the specific language governing permissions and
28 | * limitations under the License.
29 | */
30 | package org.jetbrains.ztools.scala.core
31 |
32 | trait Loopback {
33 | def pass(obj: Any, id: String): Any
34 | }
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/HandlerWrapper.scala:
--------------------------------------------------------------------------------
1 | package org.jetbrains.ztools.scala.handlers
2 |
3 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames, TypeHandler}
4 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
5 |
6 | import scala.collection.mutable
7 |
8 | class HandlerWrapper(val handler: TypeHandler, profile: Boolean) {
9 | def accept(info: ScalaVariableInfo): Boolean = info.isLazy || handler.accept(info.value)
10 |
11 | def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int, initStartTime: Long): Any = {
12 | val startTime = if (initStartTime!=null)
13 | initStartTime
14 | else
15 | System.currentTimeMillis()
16 |
17 | val data = if (scalaInfo.isLazy) {
18 | mutable.Map[String, Any](ResNames.LAZY -> true)
19 | }
20 | else {
21 | val data = handler.handle(scalaInfo, loopback, depth: Int)
22 | if (data.keys.count(_ == ResNames.IS_PRIMITIVE) > 0) {
23 | return data(ResNames.VALUE)
24 | }
25 | data
26 | }
27 |
28 | data.put(ResNames.TYPE, calculateType(scalaInfo))
29 | if (profile)
30 | data.put(ResNames.TIME, System.currentTimeMillis() - startTime)
31 |
32 | data
33 | }
34 |
35 | private def calculateType(scalaInfo: ScalaVariableInfo): String = {
36 | if (scalaInfo.tpe != null)
37 | return scalaInfo.tpe
38 |
39 | if (scalaInfo.value != null)
40 | scalaInfo.value.getClass.getCanonicalName
41 | else
42 | null
43 | }
44 | }
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/ThrowableHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
19 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
20 |
21 | import java.io.{PrintWriter, StringWriter}
22 | import scala.collection.mutable
23 |
24 | class ThrowableHandler extends AbstractTypeHandler {
25 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Throwable]
26 |
27 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = {
28 | val obj = scalaInfo.value
29 | val throwable = obj.asInstanceOf[Throwable]
30 | val writer = new StringWriter()
31 | val out = new PrintWriter(writer)
32 | throwable.printStackTrace(out)
33 |
34 | mutable.Map(
35 | ResNames.VALUE -> writer.toString
36 | )
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/core/ResNames.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *//**
16 | * Copyright 2020 Jetbrains s.r.o.
17 | *
18 | * Licensed under the Apache License, Version 2.0 (the "License");
19 | * you may not use this file except in compliance with the License.
20 | * You may obtain a copy of the License at
21 | *
22 | * http://www.apache.org/licenses/LICENSE-2.0
23 | *
24 | * Unless required by applicable law or agreed to in writing, software
25 | * distributed under the License is distributed on an "AS IS" BASIS,
26 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
27 | * See the License for the specific language governing permissions and
28 | * limitations under the License.
29 | */
30 | package org.jetbrains.ztools.scala.core
31 |
32 | object ResNames {
33 | val REF = "ref"
34 | val VALUE = "value"
35 | val IS_PRIMITIVE = "isPrimitive"
36 | val TYPE = "type"
37 | val TIME = "time"
38 | val LENGTH = "length"
39 | val LAZY = "lazy"
40 | }
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/PrimitiveHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
19 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
20 |
21 | import scala.collection.mutable
22 |
23 | class PrimitiveHandler extends AbstractTypeHandler {
24 | override def accept(obj: Any): Boolean =
25 | obj match {
26 | case _: Byte => true
27 | case _: Short => true
28 | case _: Boolean => true
29 | case _: Char => true
30 | case _: Int => true
31 | case _: Long => true
32 | case _: Float => true
33 | case _: Double => true
34 | case _ => false
35 | }
36 |
37 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
38 | mutable.Map[String, Any](
39 | ResNames.VALUE -> scalaInfo.value,
40 | ResNames.IS_PRIMITIVE -> scalaInfo.value
41 | )
42 | }
43 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/reference/ReferenceManager.scala:
--------------------------------------------------------------------------------
1 | package org.jetbrains.ztools.scala.reference
2 |
3 | import org.jetbrains.ztools.scala.core.TrieMap
4 |
5 | import scala.collection.mutable
6 |
7 | class ReferenceManager {
8 | private val refMap = mutable.Map[ReferenceWrapper, String]()
9 | private val refInvMap = new TrieMap[ReferenceWrapper]()
10 |
11 | /**
12 | * Returns a reference (e.g. valid path) to the object or creates a record in reference maps (and returns null).
13 | *
14 | * @param obj an object we want to find a reference for (can be null)
15 | * @param path path of the object e.g. myVar.myField.b
16 | * @return reference path to the object obj. The method returns null if obj is null itself or
17 | * obj hasn't been mentioned earlier or in the case of AnyVal object.
18 | */
19 | def getRef(obj: Any, path: String): String = obj match {
20 | case null | _: Unit =>
21 | clearRefIfPathExists(path)
22 | null
23 | case ref: AnyRef =>
24 | val wrapper = new ReferenceWrapper(ref)
25 | if (refMap.contains(wrapper)) {
26 | if (refInvMap.get(path).orNull != wrapper) clearRefIfPathExists(path)
27 | refMap(wrapper)
28 | } else {
29 | clearRefIfPathExists(path)
30 | refMap(wrapper) = path
31 | refInvMap.put(path, wrapper)
32 | null
33 | }
34 | case _ => null
35 | }
36 |
37 |
38 | private def clearRefIfPathExists(path: String): Unit = {
39 | if (refInvMap.contains(path)) {
40 | val tree = refInvMap.subtree(path)
41 | tree.forEach(refMap.remove(_: ReferenceWrapper))
42 | }
43 | }
44 | }
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/core/TypeHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *//**
16 | * Copyright 2020 Jetbrains s.r.o.
17 | *
18 | * Licensed under the Apache License, Version 2.0 (the "License");
19 | * you may not use this file except in compliance with the License.
20 | * You may obtain a copy of the License at
21 | *
22 | * http://www.apache.org/licenses/LICENSE-2.0
23 | *
24 | * Unless required by applicable law or agreed to in writing, software
25 | * distributed under the License is distributed on an "AS IS" BASIS,
26 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
27 | * See the License for the specific language governing permissions and
28 | * limitations under the License.
29 | */
30 | package org.jetbrains.ztools.scala.core
31 |
32 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
33 |
34 | import scala.collection.mutable
35 |
36 | trait TypeHandler {
37 | def accept(obj: Any): Boolean
38 |
39 | def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any]
40 |
41 | def getErrors: List[String] = List[String]()
42 | }
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/spark/handlers/SparkSessionHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package spark.handlers
17 |
18 | import org.apache.spark.sql.SparkSession
19 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
20 | import org.jetbrains.ztools.scala.handlers.impls.AbstractTypeHandler
21 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
22 |
23 | import scala.collection.mutable
24 |
25 | class SparkSessionHandler extends AbstractTypeHandler {
26 | override def accept(obj: Any): Boolean = obj.isInstanceOf[SparkSession]
27 |
28 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = withJsonObject {
29 | json =>
30 | val obj = scalaInfo.value
31 | val id = scalaInfo.path
32 |
33 | val spark = obj.asInstanceOf[SparkSession]
34 | json += (ResNames.VALUE -> withJsonObject { json =>
35 | json += ("version()" -> spark.version)
36 | json += ("sparkContext" -> loopback.pass(spark.sparkContext, s"$id.sparkContext"))
37 | json += ("sharedState" -> loopback.pass(spark.sharedState, s"$id.sharedState"))
38 | })
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/spark/handlers/SparkContextHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package spark.handlers
17 |
18 | import org.apache.spark.SparkContext
19 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
20 | import org.jetbrains.ztools.scala.handlers.impls.AbstractTypeHandler
21 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
22 |
23 | import scala.collection.mutable
24 |
25 | class SparkContextHandler extends AbstractTypeHandler {
26 | override def accept(obj: Any): Boolean = obj.isInstanceOf[SparkContext]
27 |
28 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = withJsonObject {
29 | json =>
30 | val sc = scalaInfo.value.asInstanceOf[SparkContext]
31 | json += (ResNames.VALUE -> withJsonObject { json =>
32 | json += ("sparkUser" -> wrap(sc.sparkUser, "String"))
33 | json += ("sparkTime" -> wrap(sc.startTime, "Long"))
34 | json += ("applicationId()" -> wrap(sc.applicationId, "String"))
35 | json += ("applicationAttemptId()" -> wrap(sc.applicationAttemptId.toString, "Option[String]"))
36 | json += ("appName()" -> sc.appName)
37 | })
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/spark/handlers/RDDHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package spark.handlers
17 |
18 | import org.apache.spark.rdd.RDD
19 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
20 | import org.jetbrains.ztools.scala.handlers.impls.AbstractTypeHandler
21 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
22 |
23 | import scala.collection.mutable
24 |
25 | class RDDHandler extends AbstractTypeHandler {
26 | override def accept(obj: Any): Boolean = obj.isInstanceOf[RDD[_]]
27 |
28 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = withJsonObject {
29 | json =>
30 | val obj = scalaInfo.value
31 | val rdd = obj.asInstanceOf[RDD[_]]
32 | json += (ResNames.VALUE -> withJsonObject { value =>
33 | value += ("getNumPartitions()" -> wrap(rdd.getNumPartitions, "Int"))
34 | value += ("name" -> wrap(rdd.name, "String"))
35 | value += ("id" -> wrap(rdd.id, "Int"))
36 | value += ("partitioner" -> wrap(rdd.partitioner.toString, "Option[org.apache.spark.Partitioner]"))
37 | value += ("getStorageLevel()" -> wrap(rdd.getStorageLevel.toString, "org.apache.spark.storage.StorageLevel"))
38 | })
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/AbstractCollectionHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
19 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
20 |
21 | import scala.collection.{breakOut, mutable}
22 |
23 | abstract class AbstractCollectionHandler(limit: Int, timeout: Int) extends AbstractTypeHandler {
24 | trait Iterator {
25 | def hasNext: Boolean
26 |
27 | def next: Any
28 | }
29 |
30 | def iterator(obj: Any): Iterator
31 |
32 | def length(obj: Any): Int
33 |
34 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = mutable.Map[String, Any](
35 | ResNames.LENGTH -> length(scalaInfo.value),
36 | ResNames.VALUE -> withJsonArray { json =>
37 | val startTime = System.currentTimeMillis()
38 | val it = iterator(scalaInfo.value)
39 | var index = 0
40 | while (it.hasNext && index < limit && !checkTimeoutError(scalaInfo.path, startTime, timeout)) {
41 | val id = scalaInfo.path
42 | json += loopback.pass(it.next, s"$id[$index]")
43 | index += 1
44 | }
45 | }
46 | )
47 | }
48 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/HandlerManager.scala:
--------------------------------------------------------------------------------
1 | package org.jetbrains.ztools.scala.handlers
2 |
3 | import org.jetbrains.ztools.scala.core.Loopback
4 | import org.jetbrains.ztools.scala.handlers.impls._
5 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
6 | import org.jetbrains.ztools.scala.reference.ReferenceManager
7 | import spark.handlers.{DatasetHandler, RDDHandler, SparkContextHandler, SparkSessionHandler}
8 |
9 | import scala.collection.mutable
10 | import scala.collection.mutable.ListBuffer
11 |
12 | class HandlerManager(enableProfiling: Boolean,
13 | timeout: Int,
14 | stringSizeLimit: Int,
15 | collectionSizeLimit: Int,
16 | referenceManager: ReferenceManager) {
17 | private val handlerChain = ListBuffer[AbstractTypeHandler](
18 | new NullHandler(),
19 | new StringHandler(stringSizeLimit),
20 | new ArrayHandler(collectionSizeLimit, timeout),
21 | new JavaCollectionHandler(collectionSizeLimit, timeout),
22 | new SeqHandler(collectionSizeLimit, timeout),
23 | new SetHandler(collectionSizeLimit, timeout),
24 | new MapHandler(collectionSizeLimit, timeout),
25 | new ThrowableHandler(),
26 | new SpecialsHandler(stringSizeLimit),
27 | new PrimitiveHandler(),
28 | new DatasetHandler(),
29 | new RDDHandler(),
30 | new SparkContextHandler(),
31 | new SparkSessionHandler(),
32 | new ObjectHandler(stringSizeLimit, this, referenceManager, timeout)
33 | ).map(new HandlerWrapper(_, enableProfiling))
34 |
35 | def getErrors: mutable.Seq[String] = handlerChain.flatMap(x => x.handler.getErrors)
36 |
37 | def handleVariable(info: ScalaVariableInfo, loopback: Loopback, depth: Int, startTime: Long = System.currentTimeMillis()): Any = {
38 | handlerChain.find(_.accept(info)).map(_.handle(info, loopback, depth, startTime)).getOrElse(mutable.Map[String, Any]())
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/AbstractTypeHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | import org.jetbrains.ztools.scala.core.{ResNames, TypeHandler}
19 |
20 | import scala.collection.mutable
21 |
22 | abstract class AbstractTypeHandler extends TypeHandler {
23 | val timeoutErrors: mutable.MutableList[String] = mutable.MutableList()
24 |
25 | override def getErrors: List[String] = timeoutErrors.toList
26 |
27 | protected def withJsonArray(body: mutable.MutableList[Any] => Unit): mutable.MutableList[Any] = {
28 | val arr = mutable.MutableList[Any]()
29 | body(arr)
30 | arr
31 | }
32 |
33 | protected def withJsonObject(body: mutable.Map[String, Any] => Unit): mutable.Map[String, Any] = {
34 | val obj = mutable.Map[String, Any]()
35 | body(obj)
36 | obj
37 | }
38 |
39 | protected def wrap(obj: Any, tpe: String): mutable.Map[String, Any] = mutable.Map[String, Any](
40 | ResNames.VALUE -> Option(obj).orNull,
41 | ResNames.TYPE -> tpe
42 | )
43 |
44 | protected def checkTimeoutError(name: String, startTime: Long, timeout: Int): Boolean = {
45 | val isTimeout = System.currentTimeMillis() - startTime > timeout
46 | if (isTimeout)
47 | timeoutErrors += f"Variable $name collect timeout exceed ${timeout}ms."
48 | isTimeout
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/MapHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.handlers.impls
17 |
18 | import org.jetbrains.ztools.scala.core.Loopback
19 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
20 |
21 | import scala.collection.mutable
22 |
23 | class MapHandler(limit: Int, timeout: Int) extends AbstractTypeHandler {
24 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
25 | withJsonObject {
26 | json =>
27 | val obj = scalaInfo.value
28 | val id = scalaInfo.path
29 | val map = obj.asInstanceOf[Map[_, _]]
30 | val keys = mutable.MutableList[Any]()
31 | val values = mutable.MutableList[Any]()
32 | json += ("jvm-type" -> obj.getClass.getCanonicalName)
33 | json += ("length" -> map.size)
34 | var index = 0
35 |
36 | json += ("key" -> keys)
37 | json += ("value" -> values)
38 |
39 | val startTime = System.currentTimeMillis()
40 | map.view.take(math.min(limit, map.size)).foreach {
41 | case (key, value) =>
42 | if (checkTimeoutError(scalaInfo.path, startTime, timeout))
43 | return json
44 | keys += loopback.pass(key, s"$id.key[$index]")
45 | values += loopback.pass(value, s"$id.value[$index]")
46 | index += 1
47 | }
48 | }
49 |
50 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Map[_, _]]
51 | }
52 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/spark/ztoolsbasic.sc:
--------------------------------------------------------------------------------
1 | import org.json4s.{Formats, NoTypeHints}
2 | import org.json4s.jackson.Serialization
3 |
4 | try {
5 | import org.apache.commons.lang.exception.ExceptionUtils
6 | import org.apache.spark.sql.SparkSession
7 |
8 | import java.io.{PrintWriter, StringWriter}
9 | import java.util
10 | import scala.collection.mutable.ListBuffer
11 | import scala.collection.{immutable, mutable}
12 | import scala.reflect.api.JavaUniverse
13 | import scala.tools.nsc.interpreter.IMain
14 |
15 | /**
16 | * Main section
17 | */
18 | val iMain: IMain = $intp
19 | val depth: Int = 0
20 | val filterUnitResults: Boolean = true
21 | val enableProfiling: Boolean = true
22 | val collectionSizeLimit = 100
23 | val stringSizeLimit = 400
24 | val timeout = 5000
25 | val variableTimeout = 2000
26 | val interpreterResCountLimit = 5
27 | val blackList = "$intp,z,engine".split(',').toList
28 | val whiteList: List[String] = null
29 |
30 |
31 | val variableView = new VariablesView(
32 | intp = iMain,
33 | timeout = timeout,
34 | variableTimeout = variableTimeout,
35 | collectionSizeLimit = collectionSizeLimit,
36 | stringSizeLimit = stringSizeLimit,
37 | blackList = blackList,
38 | whiteList = whiteList,
39 | filterUnitResults = filterUnitResults,
40 | enableProfiling = enableProfiling,
41 | depth = depth,
42 | interpreterResCountLimit = interpreterResCountLimit
43 | )
44 | implicit val ztoolsFormats: AnyRef with Formats = Serialization.formats(NoTypeHints)
45 | val variablesJson = variableView.getZtoolsJsonResult
46 | println("--ztools-scala--")
47 | println(variablesJson)
48 | println("--ztools-scala--")
49 | }
50 | catch {
51 | case t: Throwable =>
52 | import org.apache.commons.lang.exception.ExceptionUtils
53 | import org.json4s.jackson.Serialization
54 | import org.json4s.{Formats, NoTypeHints}
55 |
56 | implicit val ztoolsFormats: AnyRef with Formats = Serialization.formats(NoTypeHints)
57 | val result = Serialization.write(Map(
58 | "errors" -> Array(f"${ExceptionUtils.getMessage(t)}\n${ExceptionUtils.getStackTrace(t)}")
59 | ))
60 | println("--ztools-scala--")
61 | println(result)
62 | println("--ztools-scala--")
63 | }
64 |
65 |
66 |
67 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/spark/handlers/DatasetHandler.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package spark.handlers
17 |
18 | import org.apache.spark.sql.Dataset
19 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
20 | import org.jetbrains.ztools.scala.handlers.impls.AbstractTypeHandler
21 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
22 |
23 | import scala.collection.mutable
24 |
25 | class DatasetHandler extends AbstractTypeHandler {
26 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Dataset[_]]
27 |
28 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = {
29 | val obj = scalaInfo.value
30 | val df = obj.asInstanceOf[Dataset[_]]
31 |
32 |
33 | val schema = df.schema
34 | val jsonSchemaColumns = schema.fields.map(field => {
35 | val value = withJsonObject { jsonField =>
36 | jsonField += "name" -> wrap(field.name, null)
37 | jsonField += "nullable" -> wrap(field.nullable, null)
38 | jsonField += "dataType" -> wrap(field.dataType.typeName, null)
39 | }
40 | wrap(value, "org.apache.spark.sql.types.StructField")
41 | }
42 | )
43 |
44 | val jsonSchema = mutable.Map(
45 | ResNames.VALUE -> jsonSchemaColumns,
46 | ResNames.TYPE -> "org.apache.spark.sql.types.StructType",
47 | ResNames.LENGTH -> jsonSchemaColumns.length
48 | )
49 |
50 | val dfValue = mutable.Map(
51 | "schema()" -> jsonSchema,
52 | "getStorageLevel()" -> wrap(df.storageLevel.toString(), "org.apache.spark.storage.StorageLevel")
53 | )
54 |
55 | mutable.Map(
56 | ResNames.VALUE -> dfValue
57 | )
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/scala-repl/src/test/scala/org/jetbrains/ztools/scala/TrieMapTest.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *//**
16 | * Copyright 2020 Jetbrains s.r.o.
17 | *
18 | * Licensed under the Apache License, Version 2.0 (the "License");
19 | * you may not use this file except in compliance with the License.
20 | * You may obtain a copy of the License at
21 | *
22 | * http://www.apache.org/licenses/LICENSE-2.0
23 | *
24 | * Unless required by applicable law or agreed to in writing, software
25 | * distributed under the License is distributed on an "AS IS" BASIS,
26 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
27 | * See the License for the specific language governing permissions and
28 | * limitations under the License.
29 | */
30 | package org.jetbrains.ztools.scala
31 |
32 | import org.jetbrains.ztools.scala.core.TrieMap
33 | import org.junit.Assert.{assertEquals, assertNull}
34 | import org.junit.Test
35 |
36 | class TrieMapTest {
37 | @Test def testPutGet() = {
38 | val map = new TrieMap[String]
39 | map.put("a", "A")
40 | map.put("a.b", "A.B")
41 | assertEquals("A", map.get("a").orNull)
42 | assertEquals("A.B", map.get("a.b").orNull)
43 | assertNull(map.get("c").orNull)
44 | }
45 |
46 | @Test def testPutAgain() = {
47 | val map = new TrieMap[String]
48 | map.put("a", "A")
49 | map.put("a.b", "A.B")
50 | map.put("a.b", "Q")
51 | map.put("a", "X")
52 | assertEquals("X", map.get("a").orNull)
53 | assertNull(map.get("a.b").orNull)
54 | assertNull(map.get("c").orNull)
55 | }
56 |
57 | @Test def testSplit() = {
58 | var s = TrieMap.split("a.b.c")
59 | assertEquals(3, s.length)
60 | assertEquals("a", s(0))
61 | assertEquals("b", s(1))
62 | assertEquals("c", s(2))
63 | s = TrieMap.split("abc")
64 | assertEquals(1, s.length)
65 | assertEquals("abc", s(0))
66 | }
67 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ZTools
2 |
3 | [](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub)
4 |
5 | Wrapper for scala interpreter for use with Apache Zeppelin server to provide values of variables between paragraphs execution.
6 |
7 | Used to provide variable view functionality for Jetbrains Big Data Tools plugin.
8 |
9 | ## How it works
10 |
11 | The library allows us to access the current state of scala REPL and save the values of variables as JSON.
12 |
13 | Some computations may be performed for some types to save state that is not accessible readily as a set of fields (for example, this is the case for the number of partitions for RDD).
14 |
15 | These values can then be of use, in particular in Apache Zeppelin integration for Big Data Tools plugin we can show the collected values in variables view panel - an alternative to debugger evaluation in spark runtime.
16 |
17 | ## Usage
18 |
19 | To use ZTools with Big Data Tools you need to do 2 things.
20 |
21 | 1. In Zeppelin connection settings in BDT plugin please click to `Enable ZTools Integration (Experimental)` and save settings.
22 |
23 | 2. You need to install the ZTools jar to the Zeppelin server. You can do it in auto and manual ways. The automatic way is to click to `Install ZTools...` in a bubble that appears after enabling ZTools integration in settings. The library will be download from maven repo `https://packages.jetbrains.team/maven/p/bdt/bigdatatools/`. During this operation, the repository and the dependency on the Zeppelin interpreter will be added.
24 |
25 | If you do not have the permissions to add repositories and dependencies to Zeppelin interpreters you need to ask the administrators to do it manually.
26 | You can add dependency as a jar file:
27 |
28 | for scala 2.11 -> https://packages.jetbrains.team/maven/p/bdt/bigdatatools/org/jetbrains/ztools/ztools-spark-all/0.211.2/ztools-spark-all-0.211.2.jar
29 |
30 | for scala 2.12 -> https://packages.jetbrains.team/maven/p/bdt/bigdatatools/org/jetbrains/ztools/ztools-spark-all/0.212.2/ztools-spark-all-0.212.2.jar
31 |
32 | or add to Zeppelin maven repo (`https://packages.jetbrains.team/maven/p/bdt/bigdatatools`) and maven artifact:
33 |
34 | for scala 2.11 -> `org.jetbrains.ztools:ztools-spark-all:0.211.2`
35 |
36 | for scala 2.12 -> `org.jetbrains.ztools:ztools-spark-all:0.212.2`
37 |
38 | After install, you need to run any paragraph in a Zeppelin notebook to collect data from the server.
39 |
40 | For stand-alone usage see `org.jetbrains.ztools.spark.Tools.init` method for initialization and `Tools.getEnv` to get current state.
41 |
42 | ## Resources
43 |
44 | If you need support: [Our Slack channel](https://slack-bdt.mau.jetbrains.com/?_ga=2.181253743.913531920.1594027385-1936946878.1588841666)
45 |
46 | Apache Zeppelin (https://zeppelin.apache.org/)
47 |
48 | Big Data Tools (https://plugins.jetbrains.com/plugin/12494-big-data-tools)
49 |
50 | ## License
51 | [Apache v2](LICENSE.txt).
52 |
--------------------------------------------------------------------------------
/scala-repl/src/test/scala/org/jetbrains/ztools/scala/ReplAware.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala
17 |
18 | import scala.collection.mutable
19 | import scala.tools.nsc.Settings
20 | import scala.tools.nsc.interpreter.{ILoop, IMain, JPrintWriter}
21 |
22 | class ReplAware {
23 |
24 | trait Repl {
25 | def eval(code: String): Unit
26 |
27 | def getVariablesView(depth: Int = 3, enableProfiling: Boolean = false): VariablesView
28 | }
29 |
30 | def withRepl[T](body: Repl => T): T = {
31 | val classLoader = Thread.currentThread().getContextClassLoader
32 | beforeRepl()
33 |
34 | val iLoop = new ILoop(None, new JPrintWriter(Console.out, true))
35 | val settings = new Settings()
36 | settings.processArguments(List("-Yrepl-class-based"), processAll = true)
37 | settings.usejavacp.value = true
38 | iLoop.settings = settings
39 | iLoop.intp = new IMain(iLoop.settings)
40 | iLoop.initializeSynchronous()
41 |
42 | bindings(iLoop.intp)
43 |
44 | def env(depth: Int, isProfilingEnabled: Boolean): VariablesView = new VariablesView(
45 | intp = iLoop.intp,
46 | collectionSizeLimit = 100,
47 | variableTimeout = 100000,
48 | stringSizeLimit = 400,
49 | blackList = "$intp,sc,spark,sqlContext,z,engine".split(",").toList,
50 | filterUnitResults = true,
51 | enableProfiling = isProfilingEnabled,
52 | depth = depth,
53 | timeout = 100000)
54 |
55 |
56 | val result = body(new Repl {
57 | override def eval(code: String): Unit = {
58 | iLoop.intp.interpret(code)
59 | }
60 |
61 | override def getVariablesView(depth: Int, enableProfiling: Boolean): VariablesView = configure(env(depth, enableProfiling))
62 | })
63 |
64 | iLoop.closeInterpreter()
65 | afterRepl()
66 | Thread.currentThread().setContextClassLoader(classLoader)
67 | result
68 | }
69 |
70 | protected def configure(variablesView: VariablesView): VariablesView = variablesView
71 |
72 | protected def beforeRepl(): Unit = {}
73 |
74 | protected def afterRepl(): Unit = {}
75 |
76 | protected def bindings(intp: IMain): Unit = {}
77 |
78 | protected def getInPath[T](json: mutable.Map[String, Any], path: String): T = {
79 | val x :: xs = path.split('.').reverse.toList
80 | val data = xs.reverse.foldLeft(json) { (obj, key) =>
81 | obj(key).asInstanceOf[mutable.Map[String, Any]]
82 | }
83 | data(x).asInstanceOf[T]
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/interpreter/ZtoolsInterpreterWrapper.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala.interpreter
17 |
18 | import scala.tools.nsc.interpreter.IMain
19 | //noinspection TypeAnnotation
20 | class ZtoolsInterpreterWrapper(val iMain: IMain) {
21 | import scala.language.implicitConversions
22 | import scala.reflect.runtime.{universe => ru}
23 |
24 |
25 | import iMain.global._
26 |
27 | import scala.util.{Try => Trying}
28 |
29 | private lazy val importToGlobal = iMain.global mkImporter ru
30 | private lazy val importToRuntime = ru.internal createImporter iMain.global
31 |
32 | private implicit def importFromRu(sym: ru.Symbol) = importToGlobal importSymbol sym
33 |
34 | private implicit def importToRu(sym: Symbol): ru.Symbol = importToRuntime importSymbol sym
35 |
36 | // see https://github.com/scala/scala/pull/5852/commits/a9424205121f450dea2fe2aa281dd400a579a2b7
37 | def valueOfTerm(id: String): Option[Any] = exitingTyper {
38 | def fixClassBasedFullName(fullName: List[String]): List[String] = {
39 | if (settings.Yreplclassbased.value) {
40 | val line :: read :: rest = fullName
41 | line :: read :: "INSTANCE" :: rest
42 | } else fullName
43 | }
44 |
45 | def value(fullName: String) = {
46 | val universe = iMain.runtimeMirror.universe
47 | import universe.{InstanceMirror, Symbol, TermName}
48 | val pkg :: rest = fixClassBasedFullName((fullName split '.').toList)
49 | val top = iMain.runtimeMirror.staticPackage(pkg)
50 |
51 | @annotation.tailrec
52 | def loop(inst: InstanceMirror, cur: Symbol, path: List[String]): Option[Any] = {
53 | def mirrored =
54 | if (inst != null) inst
55 | else iMain.runtimeMirror reflect (iMain.runtimeMirror reflectModule cur.asModule).instance
56 |
57 | path match {
58 | case last :: Nil =>
59 | cur.typeSignature.decls find (x => x.name.toString == last && x.isAccessor) map { m =>
60 | (mirrored reflectMethod m.asMethod).apply()
61 | }
62 | case next :: rest =>
63 | val s = cur.typeSignature.member(TermName(next))
64 | val i =
65 | if (s.isModule) {
66 | if (inst == null) null
67 | else iMain.runtimeMirror reflect (inst reflectModule s.asModule).instance
68 | }
69 | else if (s.isAccessor) {
70 | iMain.runtimeMirror reflect (mirrored reflectMethod s.asMethod).apply()
71 | }
72 | else {
73 | assert(false, s.fullName)
74 | inst
75 | }
76 | loop(i, s, rest)
77 | case Nil => None
78 | }
79 | }
80 |
81 | loop(null, top, rest)
82 | }
83 |
84 | Option(iMain.symbolOfTerm(id)) filter (_.exists) flatMap (s => Trying(value(s.fullName)).toOption.flatten)
85 | }
86 | }
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/core/TrieMap.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *//**
16 | * Copyright 2020 Jetbrains s.r.o.
17 | *
18 | * Licensed under the Apache License, Version 2.0 (the "License");
19 | * you may not use this file except in compliance with the License.
20 | * You may obtain a copy of the License at
21 | *
22 | * http://www.apache.org/licenses/LICENSE-2.0
23 | *
24 | * Unless required by applicable law or agreed to in writing, software
25 | * distributed under the License is distributed on an "AS IS" BASIS,
26 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
27 | * See the License for the specific language governing permissions and
28 | * limitations under the License.
29 | */
30 | package org.jetbrains.ztools.scala.core
31 |
32 | import scala.collection.mutable
33 |
34 | object TrieMap {
35 | class Node[T](var value: Option[T]) {
36 | var children: mutable.Map[String, TrieMap.Node[T]] = _
37 |
38 | def put(key: String, node: TrieMap.Node[T]): Option[Node[T]] = {
39 | if (children == null)
40 | children = mutable.Map[String, TrieMap.Node[T]]()
41 | children.put(key, node)
42 | }
43 |
44 | def del(key: String): Option[Node[T]] = children.remove(key)
45 |
46 | def forEach(func: Function[T, _]): Unit = {
47 | func.apply(value.get)
48 | if (children != null) children.foreach(t => t._2.forEach(func))
49 | }
50 | }
51 |
52 | def split(key: String): Array[String] = {
53 | var n = 0
54 | var j = 0
55 | for (i <- 0 until key.length) {
56 | if (key.charAt(i) == '.') n += 1
57 | }
58 | val k = new Array[String](n + 1)
59 | val sb = new mutable.StringBuilder(k.length)
60 | for (i <- 0 until key.length) {
61 | val ch = key.charAt(i)
62 | if (ch == '.') {
63 | k({
64 | j += 1;
65 | j - 1
66 | }) = sb.toString
67 | sb.setLength(0)
68 | }
69 | else sb.append(ch)
70 | }
71 | k(j) = sb.toString
72 | k
73 | }
74 | }
75 |
76 | class TrieMap[T] {
77 | val root = new TrieMap.Node[T](null)
78 |
79 | def subtree(key: Array[String], length: Int): TrieMap.Node[T] = {
80 | var current = root
81 | var i = 0
82 | while ( {
83 | i < length && current != null
84 | }) {
85 | if (current.children == null) return null
86 | current = current.children.get(key(i)).orNull
87 | i += 1
88 | }
89 | current
90 | }
91 |
92 | def put(key: Array[String], value: T): Option[TrieMap.Node[T]] = {
93 | val node = subtree(key, key.length - 1)
94 | node.put(key(key.length - 1), new TrieMap.Node[T](Option.apply(value)))
95 | }
96 |
97 | def put(key: String, value: T): Option[TrieMap.Node[T]] = {
98 | val k = TrieMap.split(key)
99 | put(k, value)
100 | }
101 |
102 | def contains(key: String): Boolean = {
103 | val k = TrieMap.split(key)
104 | val node = subtree(k, k.length)
105 | node != null
106 | }
107 |
108 | def get(key: String): Option[T]= {
109 | val k = TrieMap.split(key)
110 | val node = subtree(k, k.length)
111 | if (node == null) return Option.empty
112 | node.value
113 | }
114 |
115 | def subtree(key: String): TrieMap.Node[T] = {
116 | val k = TrieMap.split(key)
117 | subtree(k, k.length)
118 | }
119 | }
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/handlers/impls/ObjectHandler.scala:
--------------------------------------------------------------------------------
1 | package org.jetbrains.ztools.scala.handlers.impls
2 |
3 | import org.apache.commons.lang.exception.ExceptionUtils
4 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
5 | import org.jetbrains.ztools.scala.handlers.HandlerManager
6 | import org.jetbrains.ztools.scala.interpreter.ScalaVariableInfo
7 | import org.jetbrains.ztools.scala.reference.ReferenceManager
8 |
9 | import scala.collection.mutable
10 | import scala.reflect.api.JavaUniverse
11 |
12 | class ObjectHandler(val stringSizeLimit: Int,
13 | val manager: HandlerManager,
14 | val referenceManager: ReferenceManager,
15 | val timeout: Int) extends AbstractTypeHandler {
16 | private val INACCESSIBLE = ScalaVariableInfo(isAccessible = false, isLazy = false, null, null, null, null)
17 | val ru: JavaUniverse = scala.reflect.runtime.universe
18 | val mirror: ru.Mirror = ru.runtimeMirror(getClass.getClassLoader)
19 |
20 | case class ReflectionProblem(e: Throwable, symbol: String, var count: Int)
21 |
22 | val problems: mutable.Map[String, ReflectionProblem] = mutable.Map[String, ReflectionProblem]()
23 |
24 | override def accept(obj: Any): Boolean = true
25 |
26 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
27 | withJsonObject { result =>
28 | val obj = scalaInfo.value
29 |
30 | if (obj == null) {
31 | return result
32 | }
33 | if (depth <= 0) {
34 | result += (ResNames.VALUE -> obj.toString.take(stringSizeLimit))
35 | return result
36 | }
37 |
38 | val startTime = System.currentTimeMillis()
39 | val fields = listAccessibleProperties(scalaInfo, startTime)
40 | if (fields.isEmpty) {
41 | result += (ResNames.VALUE -> obj.toString.take(stringSizeLimit))
42 | return result
43 | }
44 |
45 | val resolvedFields = mutable.Map[String, Any]()
46 | result += (ResNames.VALUE -> resolvedFields)
47 |
48 |
49 | fields.foreach { field =>
50 | if (checkTimeoutError(field.name, startTime, timeout)) {
51 | return result
52 | }
53 |
54 | if (field.ref != null && field.ref != field.path) {
55 | resolvedFields += (field.name -> (mutable.Map[String, Any]() += (ResNames.REF -> field.ref)))
56 | } else {
57 | resolvedFields += (field.name -> manager.handleVariable(field, loopback, depth - 1))
58 | }
59 | }
60 |
61 | result
62 | }
63 |
64 |
65 | override def getErrors: List[String] = problems.map(x =>
66 | f"Reflection error for ${x._2.symbol} counted ${x._2.count}.\n" +
67 | f"Error message: ${ExceptionUtils.getMessage(x._2.e)}\n " +
68 | f"Stacktrace:${ExceptionUtils.getStackTrace(x._2.e)}").toList ++ super.getErrors
69 |
70 | private def listAccessibleProperties(info: ScalaVariableInfo, startTime: Long): List[ScalaVariableInfo] = {
71 | val instanceMirror = mirror.reflect(info.value)
72 | val instanceSymbol = instanceMirror.symbol
73 | val members = instanceSymbol.toType.members
74 |
75 | val parsedMembers = mutable.MutableList[ScalaVariableInfo]()
76 | members.foreach { symbol =>
77 | if (checkTimeoutError(info.path, startTime, timeout))
78 | return parsedMembers.toList
79 | val variableInfo = get(instanceMirror, symbol, info.path)
80 | if (variableInfo.isAccessible)
81 | parsedMembers += variableInfo
82 | }
83 |
84 | parsedMembers.toList
85 | }
86 |
87 | private def get(instanceMirror: ru.InstanceMirror, symbol: ru.Symbol, path: String): ScalaVariableInfo = {
88 | if (!problems.contains(path))
89 | try {
90 | // is public property
91 | if (!symbol.isMethod && symbol.isTerm && symbol.asTerm.getter.isPublic) {
92 | val term = symbol.asTerm
93 | val f = instanceMirror.reflectField(term)
94 | val fieldPath = s"$path.${term.name.toString.trim}"
95 | val value = f.get
96 | val tpe = term.typeSignature.toString
97 | return ScalaVariableInfo(isAccessible = tpe != "", isLazy = term.isLazy, value, tpe,
98 | fieldPath, referenceManager.getRef(value, fieldPath))
99 | }
100 | } catch {
101 | case e: Throwable => problems(path) = ReflectionProblem(e, symbol.toString, 1)
102 | }
103 | else
104 | problems(path).count += 1
105 |
106 | INACCESSIBLE
107 | }
108 | }
--------------------------------------------------------------------------------
/scala-repl/src/test/scala/spark/SparkHandlersTest.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package spark
17 |
18 | import org.apache.spark.sql.SparkSession
19 | import org.jetbrains.ztools.scala.{ReplAware, VariablesView}
20 | import org.junit.Assert.assertEquals
21 | import org.junit.Test
22 |
23 | import scala.collection.mutable
24 | import scala.tools.nsc.interpreter.IMain
25 |
26 | class SparkHandlersTest extends ReplAware {
27 | @Test
28 | def simpleTest(): Unit = {
29 | withRepl {
30 | repl =>
31 | val view = repl.getVariablesView()
32 | repl.eval(
33 | """
34 | |import spark.implicits._
35 | |val df = sc.parallelize(List((1, "hello"), (2, "world"))).toDF("id", "name")
36 | |""".stripMargin)
37 | println(view.toJson)
38 | }
39 | }
40 |
41 | @Test
42 | def testRDDHandler(): Unit =
43 | withRepl {
44 | repl =>
45 | val view = repl.getVariablesView()
46 | repl.eval(
47 | """
48 | val rdd = sc.parallelize(List((1, "hello"), (2, "world")))
49 | """)
50 | val json = view.resolveVariables
51 | println(view.toJson)
52 | assertEquals("org.apache.spark.rdd.RDD", getInPath[String](json, "rdd.type"))
53 | assertEquals("Int", getInPath[Int](json, "rdd.value.id.type"))
54 | assertEquals("Int", getInPath[Int](json, "rdd.value.getNumPartitions().type"))
55 | assertEquals("org.apache.spark.storage.StorageLevel", getInPath[String](json, "rdd.value.getStorageLevel().type"))
56 | }
57 |
58 | @Test
59 | def testSparkSessionHandler(): Unit =
60 | withRepl {
61 | repl =>
62 | val view = repl.getVariablesView()
63 | repl.eval(
64 | """
65 | |import org.apache.spark.sql.SparkSession
66 | |class A(val spark: SparkSession)
67 | |val a = new A(spark)
68 | |""".stripMargin)
69 | val json = view.resolveVariables
70 | println(view.toJson)
71 | }
72 |
73 | @Test
74 | def testSparkContextHandler(): Unit = {
75 |
76 | }
77 |
78 | @Test
79 | def testDataFrame(): Unit = {
80 | withRepl { repl =>
81 | val view = repl.getVariablesView()
82 |
83 | repl.eval(
84 | """
85 | |val sqlContext= new org.apache.spark.sql.SQLContext(sc)
86 | |
87 | |import sqlContext.implicits._
88 | |val bankText = sc.parallelize(List("42, \"foo\", \"bar\", \"baz\", 69"))
89 | |
90 | |case class Bank(age: Integer, job: String, marital: String, education: String, balance: Integer)
91 | |
92 | |val bank = bankText.map(s => s.split(",")).filter(s => s(0) != "\"age\"").map(
93 | | s => Bank(s(0).toInt,
94 | | s(1).replaceAll("\"", ""),
95 | | s(2).replaceAll("\"", ""),
96 | | s(3).replaceAll("\"", ""),
97 | | s(4).toInt
98 | | )
99 | |).toDF()
100 | |""".stripMargin)
101 | val json = view.resolveVariables
102 | println(view.toJson)
103 | assertEquals("", view.errors.mkString(","))
104 | val schema = getInPath[mutable.Map[String, Any]](json, "bank.value.schema()")
105 | val schemaArray = schema("value").asInstanceOf[Array[Any]]
106 | checkStructField(schemaArray(0), true, "age", "integer")
107 | checkStructField(schemaArray(1), true, "job", "string")
108 | checkStructField(schemaArray(2), true, "marital", "string")
109 | checkStructField(schemaArray(3), true, "education", "string")
110 | checkStructField(schemaArray(4), true, "balance", "integer")
111 | }
112 | }
113 |
114 | @Test
115 | def testRddHandlerWithError(): Unit = withRepl {
116 | repl =>
117 | val view = repl.getVariablesView()
118 | repl.eval(
119 | """
120 | val rdd = sc.textFile("file:///home/nikita.pavlenko/big-data/online_retail.csv")
121 | """)
122 | val json = view.toJson
123 | assertEquals("{\"rdd\":{\"type\":\"org.apache.spark.rdd.RDD\",\"value\":\"NoSuchMethodException: org.apache.spark.io.LZ4CompressionCodec.(org.apache.spark.SparkConf)\"}}", json)
124 | }
125 |
126 | private def checkStructField(field: Any,
127 | nullable: Boolean,
128 | name: String,
129 | dataType: String): Unit = {
130 | val json = field.asInstanceOf[mutable.Map[String, Any]]
131 | assertEquals(nullable, json("nullable").asInstanceOf[mutable.Map[String, Any]]("value"))
132 | assertEquals(name, json("name").asInstanceOf[mutable.Map[String, Any]]("value"))
133 | assertEquals(dataType, json("dataType").asInstanceOf[mutable.Map[String, Any]]("value"))
134 | }
135 |
136 | override protected def configure(variablesView: VariablesView) =
137 | super.configure(variablesView)
138 |
139 |
140 | var spark: SparkSession = _
141 |
142 | override def beforeRepl(): Unit = {
143 | super.beforeRepl()
144 | spark = SparkSession
145 | .builder()
146 | .master("local[2]")
147 | .appName("Simple Application").getOrCreate()
148 | }
149 |
150 | override def afterRepl(): Unit = {
151 | spark.close()
152 | super.afterRepl()
153 | }
154 |
155 | override def bindings(intp: IMain): Unit = {
156 | super.bindings(intp)
157 | intp.bind("spark", spark)
158 | intp.bind("sc", spark.sparkContext)
159 | }
160 | }
161 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/org/jetbrains/ztools/scala/VariablesView.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala
17 |
18 | import org.apache.commons.lang.exception.ExceptionUtils
19 | import org.jetbrains.ztools.scala.core.{Loopback, ResNames}
20 | import org.jetbrains.ztools.scala.handlers._
21 | import org.jetbrains.ztools.scala.interpreter.{InterpreterHandler, ScalaVariableInfo}
22 | import org.jetbrains.ztools.scala.reference.ReferenceManager
23 | import org.json4s.jackson.Serialization
24 | import org.json4s.{Formats, NoTypeHints}
25 |
26 | import java.util.function.{Function => JFunction}
27 | import java.util.regex.Pattern
28 | import scala.collection.{immutable, mutable}
29 | import scala.language.implicitConversions
30 | import scala.tools.nsc.interpreter.IMain
31 | import scala.util.Try
32 |
33 | class VariablesView(val intp: IMain,
34 | val timeout: Int,
35 | val variableTimeout: Int,
36 | val collectionSizeLimit: Int,
37 | val stringSizeLimit: Int,
38 | val blackList: List[String],
39 | val whiteList: List[String] = null,
40 | val filterUnitResults: Boolean,
41 | val enableProfiling: Boolean,
42 | val depth: Int,
43 | val interpreterResCountLimit: Int = 5) {
44 | val errors: mutable.MutableList[String] = mutable.MutableList[String]()
45 | private val interpreterHandler = new InterpreterHandler(intp)
46 | private val referenceManager = new ReferenceManager()
47 |
48 | private val touched = mutable.Map[String, ScalaVariableInfo]()
49 |
50 | private val handlerManager = new HandlerManager(
51 | collectionSizeLimit = collectionSizeLimit,
52 | stringSizeLimit = stringSizeLimit,
53 | timeout = variableTimeout,
54 | referenceManager = referenceManager,
55 | enableProfiling = enableProfiling
56 | )
57 |
58 | //noinspection ScalaUnusedSymbol
59 | def getZtoolsJsonResult: String = {
60 | implicit val ztoolsFormats: AnyRef with Formats = Serialization.formats(NoTypeHints)
61 | Serialization.write(
62 | Map(
63 | "variables" -> resolveVariables,
64 | "errors" -> (errors ++ handlerManager.getErrors)
65 | )
66 | )
67 | }
68 |
69 | def toJson: String = {
70 | implicit val ztoolsFormats: AnyRef with Formats = Serialization.formats(NoTypeHints)
71 | Serialization.write(resolveVariables)
72 | }
73 |
74 | def resolveVariables: mutable.Map[String, Any] = {
75 | val result: mutable.Map[String, Any] = mutable.Map[String, Any]()
76 | val startTime = System.currentTimeMillis()
77 |
78 | val interpreterVariablesNames = interpreterHandler.getVariableNames
79 | val finalNames = filterVariableNames(interpreterVariablesNames)
80 |
81 | finalNames.foreach { name =>
82 | val varType = interpreterHandler.interpreter.typeOfTerm(name).toString().stripPrefix("()")
83 | val variable = mutable.Map[String, Any]()
84 |
85 | result += name -> variable
86 | variable += ResNames.TYPE -> varType
87 | if (!isUnitOrNullResult(result, name))
88 | variable += ResNames.VALUE -> ""
89 | }
90 |
91 | var passedVariablesCount = 0
92 | val totalVariablesCount = finalNames.size
93 |
94 | if (checkTimeout(startTime, passedVariablesCount, totalVariablesCount))
95 | return result
96 |
97 | finalNames.foreach { name =>
98 | if (checkTimeout(startTime, passedVariablesCount, totalVariablesCount))
99 | return result
100 | passedVariablesCount += 1
101 |
102 | if (!isUnitOrNullResult(result, name)) {
103 |
104 | calculateVariable(result, name)
105 | }
106 | }
107 | result
108 | }
109 |
110 | private def calculateVariable(result: mutable.Map[String, Any], name: String) = {
111 | val valMap = result(name).asInstanceOf[mutable.Map[String, Any]]
112 | try {
113 | val startTime = System.currentTimeMillis()
114 |
115 | val info = interpreterHandler.getInfo(name, valMap(ResNames.TYPE).asInstanceOf[String])
116 | val ref = referenceManager.getRef(info.value, name)
117 | touched(info.path) = info
118 |
119 | if (ref != null && ref != info.path) {
120 | result += (info.path -> mutable.Map[String, Any](ResNames.REF -> ref))
121 | } else {
122 | result += info.path -> parseInfo(info, depth, startTime)
123 | }
124 | } catch {
125 | case t: Throwable =>
126 | valMap += ResNames.VALUE -> ExceptionUtils.getRootCauseMessage(t)
127 | }
128 | }
129 |
130 | private def isUnitOrNullResult(result: mutable.Map[String, Any], name: String) = {
131 | val res = result(name).asInstanceOf[mutable.Map[String, Any]]
132 | val valType = res(ResNames.TYPE)
133 | valType == "Unit" || valType == "Null"
134 | }
135 |
136 | def resolveVariable(path: String): mutable.Map[String, Any] = {
137 | val result = mutable.Map[String, Any]()
138 | val obj = touched.get(path).orNull
139 | if (obj.ref != null) {
140 | result += (ResNames.VALUE -> mutable.Map[String, Any](ResNames.REF -> obj.ref))
141 | } else {
142 | result += (ResNames.VALUE -> parseInfo(obj, depth))
143 | }
144 | result
145 | }
146 |
147 | private def parseInfo(info: ScalaVariableInfo, depth: Int, startTime: Long = System.currentTimeMillis()): Any = {
148 | val loopback = new Loopback {
149 | override def pass(obj: Any, id: String): Any = {
150 | val si = ScalaVariableInfo(isAccessible = true, isLazy = false, obj, null, id, referenceManager.getRef(obj, id))
151 | parseInfo(si, depth - 1)
152 | }
153 | }
154 | handlerManager.handleVariable(info, loopback, depth, startTime)
155 | }
156 |
157 | private def filterVariableNames(interpreterVariablesNames: Seq[String]) = {
158 | val variablesNames = interpreterVariablesNames.seq
159 | .filter { name => !blackList.contains(name) }
160 | .filter { name => whiteList == null || whiteList.contains(name) }
161 |
162 |
163 | val p = Pattern.compile("res\\d*")
164 | val (resVariables, otherVariables: immutable.Seq[String]) = variablesNames.partition(x => p.matcher(x).matches())
165 | val sortedResVariables = resVariables
166 | .map(res => Try(res.stripPrefix("res").toInt))
167 | .filter(_.isSuccess)
168 | .map(_.get)
169 | .sortWith(_ > _)
170 | .take(interpreterResCountLimit)
171 | .map(num => "res" + num)
172 |
173 | val finalNames = otherVariables ++ sortedResVariables
174 | finalNames
175 | }
176 |
177 | //noinspection ScalaUnusedSymbol
178 | private implicit def toJavaFunction[A, B](f: A => B): JFunction[A, B] = new JFunction[A, B] {
179 | override def apply(a: A): B = f(a)
180 | }
181 |
182 | private def checkTimeout(startTimeout: Long, passed: Int, total: Int): Boolean = {
183 | val isTimeoutExceed = System.currentTimeMillis() - startTimeout > timeout
184 | if (isTimeoutExceed)
185 | errors += s"Variables collect timeout. Exceed ${timeout}ms. Parsed $passed from $total."
186 | isTimeoutExceed
187 | }
188 | }
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/scala-repl/src/test/scala/org/jetbrains/ztools/scala/VariablesViewImplTest.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2020 Jetbrains s.r.o.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.jetbrains.ztools.scala
17 |
18 | import org.junit.Assert.{assertEquals, assertNotNull, assertTrue}
19 | import org.junit.Test
20 |
21 | import scala.collection.JavaConversions.asJavaCollection
22 | import scala.collection.mutable
23 |
24 | class VariablesViewImplTest extends ReplAware {
25 | val base = 0
26 |
27 | @Test
28 | def testSimpleVarsAndCollections(): Unit = {
29 | withRepl { intp =>
30 | intp.eval("val x = 1")
31 | val view = intp.getVariablesView()
32 | assertNotNull(view)
33 | var json = view.resolveVariables
34 | println(view.toJson)
35 | val x = json("x").asInstanceOf[Int]
36 | assertEquals(1, x)
37 | assertEquals(1, json.keys.size)
38 |
39 | intp.eval("val list = List(1,2,3,4)")
40 | json = view.resolveVariables
41 | println(view.toJson)
42 | val list = json("list").asInstanceOf[mutable.Map[String, Any]]
43 | assertEquals(3, list.keys.size)
44 | assertEquals(4, list("length"))
45 | assertEquals("List", list("type"))
46 | val values = list("value").asInstanceOf[mutable.MutableList[mutable.Map[String, Any]]]
47 | assertEquals((mutable.MutableList[Any]() += 1 += 2 += 3 += 4).toString, values.toString)
48 | assertEquals(2, json.keys.size)
49 |
50 | intp.eval("val map = Map(1 -> 2, 2 -> 3, 3 -> 4)")
51 | json = view.resolveVariables
52 | println(view.toJson)
53 | val map = json("map").asInstanceOf[mutable.Map[String, Any]]
54 | assertEquals(5, map.keys.size)
55 | assertEquals(3, map("length"))
56 | assertEquals("scala.collection.immutable.Map", map("type"))
57 | val m = Map(1 -> 2, 2 -> 3, 3 -> 4)
58 | val key = map("key").asInstanceOf[mutable.MutableList[Any]].head.asInstanceOf[Int]
59 | val value = map("value").asInstanceOf[mutable.MutableList[Any]].head.asInstanceOf[Int]
60 | assertEquals(value, m(key))
61 | assertEquals(3, json.keys.size)
62 |
63 | intp.eval("1 + 1")
64 | json = view.resolveVariables
65 | val result = json(s"res$base").asInstanceOf[Int]
66 | assertEquals(2, result)
67 | assertEquals(4, json.keys.size)
68 | }
69 | }
70 |
71 | //noinspection AssertBetweenInconvertibleTypes
72 | @Test
73 | def testObjects(): Unit = {
74 | withRepl { intp =>
75 | intp.eval("class A(val x: Int)")
76 | intp.eval("val a = new A(1)")
77 | val view = intp.getVariablesView()
78 | assertNotNull(view)
79 | println(view.toJson)
80 | var json = view.resolveVariables
81 | val a = json("a").asInstanceOf[mutable.Map[String, Any]]
82 | assertEquals(2, a.keys.size)
83 | assertEquals("$line1.iw$A", a("type"))
84 | val aObj = a("value").asInstanceOf[mutable.Map[String, Any]]
85 | assertEquals(1, aObj.keys.size)
86 | val ax = aObj("x").asInstanceOf[Int]
87 | assertEquals(1, ax)
88 | // scala 2.11 returns scala.Int instead of Int in scala 2.12
89 | assertEquals(1, json.keys.size)
90 |
91 | val qDef = "class Q {\n" + "val a = Array(1,2,3)\n" + "val b = List(\"hello\", \"world\")\n" + "val c: List[List[String]] = List()\n" + "var y = 10\n" + "def m(): Int = 10\n" + "}"
92 | intp.eval(qDef)
93 | intp.eval("val q = new Q()")
94 | json = view.resolveVariables
95 | assertEquals("", view.errors.mkString(","))
96 | val qObj = json("q").asInstanceOf[mutable.Map[String, Any]]("value").asInstanceOf[mutable.Map[String, Any]]
97 | assertEquals(4, qObj.keys.size)
98 | assertEquals(3, qObj("a").asInstanceOf[mutable.Map[String, Any]]("length"))
99 | val tpe = qObj("c").asInstanceOf[mutable.Map[String, Any]]("type")
100 | assertTrue("scala.List[scala.List[String]]" == tpe || "List[List[String]]" == tpe)
101 | }
102 | }
103 |
104 | @Test
105 | def testReferences(): Unit = {
106 | withRepl { intp =>
107 | val view = intp.getVariablesView()
108 | assertNotNull(view)
109 | intp.eval("class A(var x: Int)")
110 | intp.eval("val a = new A(10)")
111 | intp.eval("class B(var q: A)")
112 | intp.eval("val b = new B(a)")
113 | intp.eval("val c = new B(a)")
114 | val json = view.resolveVariables
115 | assertEquals("", view.errors.mkString(","))
116 | assertEquals(3, json.keys.size) // a, b, c
117 |
118 | assertEquals("a", getInPath(json, "b.value.q.ref"))
119 | assertEquals("a", getInPath(json, "c.value.q.ref"))
120 | }
121 | }
122 |
123 | @Test
124 | def testBrokenReference(): Unit = {
125 | withRepl { intp =>
126 | val view = intp.getVariablesView()
127 | assertNotNull(view)
128 | intp.eval("class A(var x: Int)")
129 | intp.eval("val a = new A(10)")
130 | intp.eval("class B(var q: A)")
131 | intp.eval("val b = new B(a)")
132 | view.toJson
133 | intp.eval("val a = new A(11)") // top level term has been changed but looks the same
134 |
135 | val json = view.resolveVariables
136 | assertEquals(2, json.keys.size)
137 | assertEquals(10, getInPath[Int](json, "b.value.q.value.x"))
138 | }
139 | }
140 |
141 | @Test
142 | def testNull(): Unit = {
143 | withRepl { intp =>
144 | val view = intp.getVariablesView()
145 | assertNotNull(view)
146 | intp.eval("class A(var x: String)")
147 | intp.eval("val a = new A(null)")
148 | val json = view.resolveVariables
149 | println(view.toJson)
150 | assertEquals("String", getInPath(json, "a.value.x.type"))
151 | assertEquals(false, json("a").asInstanceOf[mutable.Map[String, Any]]("value").asInstanceOf[mutable.Map[String, Any]]("x").asInstanceOf[mutable.Map[String, Any]].keys.contains("value"))
152 | }
153 | }
154 |
155 | @Test
156 | def testReferenceInsideTheSameObject(): Unit = {
157 | withRepl { intp =>
158 | val view = intp.getVariablesView()
159 | assertNotNull(view)
160 | intp.eval("class A(var x: Int)")
161 | intp.eval("class B(var q: A, var p: A)")
162 | intp.eval("val b = new B(new A(10), null)")
163 | intp.eval("b.p = b.q")
164 | var json = view.resolveVariables
165 | assertEquals("b.p", getInPath(json, "b.value.q.ref"))
166 |
167 | intp.eval("b.q.x = 11")
168 | json = view.resolveVariables
169 | assertEquals(1, json.keys.size)
170 | assertEquals(11, getInPath[Int](json, "b.value.p.value.x"))
171 | assertEquals("b.p", getInPath(json, "b.value.q.ref"))
172 |
173 | intp.eval("b.p = null")
174 | json = view.resolveVariables
175 | assertEquals(1, json
176 | ("b").asInstanceOf[mutable.Map[String, Any]]
177 | ("value").asInstanceOf[mutable.Map[String, Any]]
178 | ("p").asInstanceOf[mutable.Map[String, Any]]
179 | .keys.size) // type only
180 |
181 | assertEquals(11, getInPath[Int](json, "b.value.q.value.x"))
182 | }
183 | }
184 |
185 | @Test
186 | def testTopLevelCyclicReferences(): Unit = {
187 | val code =
188 | """
189 | |class A {
190 | | var strInA : String = _
191 | | var memberB : B = _
192 | |}
193 | |
194 | |class B {
195 | | var strInB : String = _
196 | | var memberA : A = _
197 | |}
198 | |
199 | |val a = new A()
200 | |a.strInA = "class A"
201 | |
202 | |val b = new B()
203 | |b.strInB = "class B"
204 | |
205 | |a.memberB = b
206 | |b.memberA = a
207 | |""".stripMargin
208 | withRepl { intp =>
209 | val view = intp.getVariablesView()
210 | assertNotNull(view)
211 | intp.eval(code)
212 | val json = view.resolveVariables
213 | println(view.toJson)
214 | assertEquals("a", getInPath[String](json, "a.value.memberB.value.memberA.ref"))
215 | assertEquals("a.memberB", getInPath[String](json, "b.ref"))
216 | // assertEquals("b", getInPath(json, "a.value.memberB.ref"))
217 | // assertEquals("a", getInPath(json, "b.value.memberA.ref"))
218 | // assertEquals("class A", getInPath(json, "a.value.strInA.value"))
219 | // assertEquals("class B", getInPath(json, "b.value.strInB.value"))
220 | }
221 | }
222 |
223 | @Test
224 | def testCyclicReferences(): Unit = {
225 | val code =
226 | """
227 | |class A {
228 | | var strInA : String = _
229 | | var memberB : B = _
230 | |}
231 | |
232 | |class B {
233 | | var strInB : String = _
234 | | var memberA : A = _
235 | |}
236 | |
237 | |class C {
238 | | val a = new A()
239 | | val b = new B()
240 | |}
241 | |
242 | |val c = new C()
243 | |c.a.strInA = "class A"
244 | |c.b.strInB = "class B"
245 | |
246 | |c.a.memberB = c.b
247 | |c.b.memberA = c.a
248 | |""".stripMargin
249 | withRepl { intp =>
250 | val view = intp.getVariablesView()
251 | assertNotNull(view)
252 | intp.eval(code)
253 | val json = view.resolveVariables
254 | // println(json.toString(2))
255 | assertEquals("c.b", getInPath(json, "c.value.a.value.memberB.ref"))
256 | assertEquals("c.a", getInPath(json, "c.value.b.value.memberA.ref"))
257 | assertEquals("class A", getInPath(json, "c.value.a.value.strInA.value"))
258 | assertEquals("class B", getInPath(json, "c.value.b.value.strInB.value"))
259 | }
260 | }
261 |
262 | // @Test
263 | // def testTraverseAlongThePath(): Unit = {
264 | // val code =
265 | // """
266 | // |class A(val id: Int, val text: String)
267 | // |class B(val a: A)
268 | // |val a = new A(10, "Hello")
269 | // |val b = new B(a)
270 | // |""".stripMargin
271 | // withRepl { intp =>
272 | // intp.eval(code)
273 | // val env = intp.getVariablesView
274 | // val json = env.toJsonObject("b.a" ,2)
275 | // println(json.toString(2))
276 | // // {"path":"b.a","value":{"type":"A","value":{"id":{"type":"scala.Int","value":"10"}}}}
277 | // assertEquals(2, json.keySet().size()) // path & value
278 | // assertEquals("A", getInPath(json, "value.type"))
279 | // assertEquals("10", getInPath(json, "value.value.id.value"))
280 | // }
281 | // }
282 | @Test
283 | def testTraverseAlongThePath() {
284 | val code =
285 | """
286 | |class A(val id: Int, val text: String)
287 | |class B(val a: A)
288 | |val a = new A(10, "Hello")
289 | |val b = new B(a)
290 | |""".stripMargin
291 | withRepl { repl =>
292 | repl.eval(code)
293 | val env = repl.getVariablesView(0)
294 | val j = env.resolveVariables
295 | println(env.toJson)
296 | assertTrue(getInPath[String](j, "a.value").startsWith("$line1.$"))
297 | // println(env.toJsonObject.toString(2))
298 | val json = env.resolveVariable("b")
299 | // // {"path":"b.a","value":{"type":"Line_1.A","value":{"id":{"type":"kotlin.Int","value":"10"}}}}
300 | // println(json.toString(2))
301 | assertEquals(1, json.keys.size) // path & value
302 | // println(getInPath(json, "value.type"))
303 | }
304 | }
305 |
306 | // @Test
307 | // def testTraverseAlongThePathRefOrdering(): Unit = {
308 | // val code =
309 | // """
310 | // |class A(val x: Int)
311 | // |class B(val a: A)
312 | // |class C(val b: B, val b1: B)
313 | // |val a = new A(10)
314 | // |val c = new C(new B(a), new B(a))
315 | // |""".stripMargin
316 | // withRepl { intp =>
317 | // val view = intp.getVariablesView()
318 | // assertNotNull(view)
319 | // intp.eval(code)
320 | // view.toJsonObject("c.b1.a", 2)
321 | // val json = view.toJsonObject("c.b.a", 2)
322 | // assertEquals("c.b1.a", getInPath(json, "value.ref"))
323 | // }
324 | // }
325 |
326 | @Test
327 | def testFunctions(): Unit = {
328 | withRepl { intp =>
329 | val view = intp.getVariablesView()
330 | assertNotNull(view)
331 | intp.eval("def method(): Int = 1")
332 | val json = view.resolveVariables
333 | // variables() must filter classes and methods
334 | // like that iLoop.intp.definedSymbolList.filter { x => x.isGetter }
335 | // println(json.toString())
336 | assertTrue(json.isEmpty)
337 | }
338 | }
339 |
340 | @Test
341 | def testMethods(): Unit = {
342 | val code =
343 | """
344 | |class A {
345 | |val x = 10
346 | |def method(): Int = 1
347 | |}
348 | |val a = new A()
349 | |""".stripMargin
350 | withRepl { intp =>
351 | val view = intp.getVariablesView()
352 | assertNotNull(view)
353 | intp.eval(code)
354 | val json = view.resolveVariables
355 | assertEquals(1, json("a").asInstanceOf[mutable.Map[String, Any]]("value").asInstanceOf[mutable.Map[String, Any]].keys.size)
356 | }
357 | }
358 |
359 | @Test
360 | def testTypeChanged(): Unit = {
361 | withRepl { intp =>
362 | val view = intp.getVariablesView()
363 | intp.eval("val a = 2")
364 | view.resolveVariables
365 | intp.eval("val a = 2.0")
366 | val json = view.resolveVariables
367 | // println(json.toString(2))
368 | assertEquals("2.0", getInPath[Double](json, "a").toString)
369 | }
370 | }
371 |
372 | @Test
373 | def testArray2D(): Unit = {
374 | withRepl { intp =>
375 | val view = intp.getVariablesView()
376 | intp.eval("val a: Array[Array[Int]] = Array(Array(1,2,3), Array(4,5,6))")
377 | var json = view.resolveVariables
378 | println(view.toJson)
379 | assertEquals("Array[Array[Int]]", getInPath(json, "a.type"))
380 | val arr = json("a").asInstanceOf[mutable.Map[String, Any]]("value").asInstanceOf[mutable.MutableList[Any]]
381 | assertEquals(2, arr.size)
382 | assertEquals(3, arr(0).asInstanceOf[mutable.Map[String, Any]]("length"))
383 | intp.eval("a(0)")
384 | json = view.resolveVariables
385 | assertEquals("a[0]", getInPath(json, "res0.ref"))
386 | }
387 | }
388 |
389 | @Test
390 | def testArrayOfObjects(): Unit = {
391 | withRepl { intp =>
392 | val view = intp.getVariablesView()
393 | intp.eval("class A(var x: Int)")
394 | intp.eval("val b = Array(new A(1), new A(2))")
395 | val json = view.resolveVariables
396 | println(view.toJson)
397 | assertEquals("Array[$line1.iw$A]", getInPath(json, "b.type"))
398 | val arr = json("b").asInstanceOf[mutable.Map[String, Any]]("value").asInstanceOf[mutable.MutableList[Any]]
399 | assertEquals(2, getInPath[Int](arr(1).asInstanceOf[mutable.Map[String, Any]], "value.x"))
400 | }
401 | }
402 |
403 | @Test
404 | def testLazy(): Unit =
405 | withRepl { intp =>
406 | val code =
407 | """
408 | class A {
409 | lazy val x: Int = throw new RuntimeException
410 | }
411 | val a = new A()
412 | """
413 | val view = intp.getVariablesView()
414 | intp.eval(code)
415 | val json = view.resolveVariables
416 | println(view.toJson)
417 | val obj = getInPath[mutable.Map[String, Any]](json, "a.value.x")
418 | assertEquals(2, obj.size)
419 | assertEquals("scala.Int", obj("type"))
420 | assertTrue(obj("lazy").asInstanceOf[Boolean])
421 | }
422 |
423 | @Test
424 | def testListOfAny(): Unit =
425 | withRepl { intp =>
426 | val view = intp.getVariablesView()
427 | val code =
428 | """
429 | class A(val x: Int)
430 | val a = List(1,Map(1->2),List(1,2,3), new A(10))
431 | val b = a(2) // inferred Any
432 | """
433 | intp.eval(code)
434 | val json = view.resolveVariables
435 | println(view.toJson)
436 | assertEquals("a[2]", getInPath[String](json, "b.ref"))
437 | }
438 |
439 | @Test
440 | def testBrokenRefInCollections(): Unit =
441 | withRepl { intp =>
442 | val view = intp.getVariablesView()
443 | val p1 =
444 | """
445 | |val a = List(Map("sd"->List(Set(0),2,3,"tttt")))
446 | |val b = a(0)("sd")
447 | |""".stripMargin
448 | intp.eval(p1)
449 | var json = view.resolveVariables
450 | assertEquals("a[0].value[0]", getInPath[String](json, "b.ref"))
451 | val p2 =
452 | """
453 | |val a = List(1,2,3)
454 | |""".stripMargin
455 | intp.eval(p2)
456 | json = view.resolveVariables
457 | assertEquals("", view.errors.mkString(","))
458 | // b isn't reference anymore
459 | assertEquals(4, getInPath[Int](json, "b.length"))
460 | }
461 |
462 | @Test
463 | def testBrokenRefInObject(): Unit =
464 | withRepl { intp =>
465 | val view = intp.getVariablesView()
466 | val code =
467 | """
468 | class C(z: Int)
469 | class B(val y: C)
470 | class A(var x: B)
471 | val a = new A(new B(new C(10)))
472 | val c = a.x.y
473 | """
474 | intp.eval(code)
475 | var json = view.resolveVariables
476 | // println(json.toString(2))
477 | assertEquals("a.x.y", getInPath[String](json, "c.ref"))
478 | intp.eval("a.x = null")
479 | json = view.resolveVariables
480 | // println(json.toString(2))
481 | assertEquals("$line1.iw$C", getInPath[String](json, "c.type"))
482 | }
483 |
484 | @Test
485 | def testBrokenRef(): Unit =
486 | withRepl { intp =>
487 | val view = intp.getVariablesView()
488 | val code1 =
489 | """
490 | import java.text.DateFormat
491 | import java.util.Date
492 | val a = List(Map("sd"->List(Set(0),2,3,DateFormat.getDateTimeInstance)))
493 | val c = Map("1"->Set(100))
494 | val d = c("1")
495 |
496 | """
497 | val code3 =
498 | """
499 | class A2(){
500 | val c = false
501 | val e = BigDecimal(2)
502 | }
503 | val t = new A2()
504 | val a = Map("5"->t)
505 | val b = a("5")
506 | val c = b
507 | """
508 | intp.eval(code1)
509 | var json = view.resolveVariables
510 | // println(json.toString(2))
511 | println(">----------------")
512 | intp.eval(code3)
513 | json = view.resolveVariables
514 | // println(json.toString(2))
515 | assertEquals("scala.collection.immutable.Set", getInPath[String](json, "d.type"))
516 | }
517 |
518 | // @Test
519 | // def testPerformance(): Unit = {
520 | // withRepl { intp =>
521 | // val view = intp.getVariablesView()
522 | // Range(1, 100).foreach { _ =>
523 | // intp.eval("val a = 2.0")
524 | // val time = System.currentTimeMillis()
525 | // val json = view.toJsonObject
526 | // val t = System.currentTimeMillis() - time
527 | //// println("time = " + t + " ms")
528 | // }
529 | // val json = view.toJsonObject
530 | // println(json.toString(2))
531 | // }
532 | // }
533 |
534 | // @Test
535 | // def testPerformance2(): Unit = {
536 | // withRepl { intp =>
537 | // val view = intp.getVariablesView()
538 | // intp.eval("val a = 2.0")
539 | // Range(1, 200).foreach { _ =>
540 | // intp.eval("println(\"hello\")")
541 | // val time = System.currentTimeMillis()
542 | // val json = view.toJsonObject
543 | // val t = System.currentTimeMillis() - time
544 | // println("time = " + t + " ms")
545 | // }
546 | // val json = view.toJsonObject
547 | // println(json.toString(2))
548 | // }
549 | // }
550 |
551 | // @Test
552 | // def testPerformance3(): Unit = {
553 | // withRepl { intp =>
554 | // val view = intp.getVariablesView()
555 | // var json: JSONObject = null
556 | // Range(1, 200).foreach { _ =>
557 | // intp.eval("val a = 2\nThread.sleep(5)")
558 | // intp.eval("val b = 3\nThread.sleep(5)")
559 | // val time = System.currentTimeMillis()
560 | // json = view.toJsonObject
561 | // val t = System.currentTimeMillis() - time
562 | // println("time = " + t + " ms")
563 | // }
564 | // println(json.toString(2))
565 | // }
566 | // }
567 | }
568 |
--------------------------------------------------------------------------------
/scala-repl/src/main/scala/spark/ztools.sc:
--------------------------------------------------------------------------------
1 | try {
2 | import org.apache.commons.lang.exception.ExceptionUtils
3 | import org.apache.spark.sql.SparkSession
4 |
5 | import java.io.{PrintWriter, StringWriter}
6 | import java.util
7 | import scala.collection.mutable.ListBuffer
8 | import scala.collection.{immutable, mutable}
9 | import scala.reflect.api.JavaUniverse
10 | import scala.tools.nsc.interpreter.IMain
11 | import org.json4s.jackson.Serialization
12 | import org.json4s.{Formats, NoTypeHints}
13 |
14 | import java.util.function.{Function => JFunction}
15 | import java.util.regex.Pattern
16 | import scala.language.implicitConversions
17 | import scala.util.Try
18 | import org.apache.spark.sql.Dataset
19 | import org.apache.spark.rdd.RDD
20 | import org.apache.spark.SparkContext
21 |
22 | trait Loopback {
23 | def pass(obj: Any, id: String): Any
24 | }
25 |
26 | object ResNames {
27 | val REF = "ref"
28 | val VALUE = "value"
29 | val IS_PRIMITIVE = "isPrimitive"
30 | val TYPE = "type"
31 | val TIME = "time"
32 | val LENGTH = "length"
33 | val LAZY = "lazy"
34 | }
35 |
36 | object TrieMap {
37 | class Node[T](var value: Option[T]) {
38 | var children: mutable.Map[String, TrieMap.Node[T]] = _
39 |
40 | def put(key: String, node: TrieMap.Node[T]): Option[Node[T]] = {
41 | if (children == null)
42 | children = mutable.Map[String, TrieMap.Node[T]]()
43 | children.put(key, node)
44 | }
45 |
46 | def del(key: String): Option[Node[T]] = children.remove(key)
47 |
48 | def forEach(func: Function[T, _]): Unit = {
49 | func.apply(value.get)
50 | if (children != null) children.foreach(t => t._2.forEach(func))
51 | }
52 | }
53 |
54 | def split(key: String): Array[String] = {
55 | var n = 0
56 | var j = 0
57 | for (i <- 0 until key.length) {
58 | if (key.charAt(i) == '.') n += 1
59 | }
60 | val k = new Array[String](n + 1)
61 | val sb = new mutable.StringBuilder(k.length)
62 | for (i <- 0 until key.length) {
63 | val ch = key.charAt(i)
64 | if (ch == '.') {
65 | k({
66 | j += 1;
67 | j - 1
68 | }) = sb.toString
69 | sb.setLength(0)
70 | }
71 | else sb.append(ch)
72 | }
73 | k(j) = sb.toString
74 | k
75 | }
76 | }
77 |
78 | class TrieMap[T] {
79 | val root = new TrieMap.Node[T](null)
80 |
81 | def subtree(key: Array[String], length: Int): TrieMap.Node[T] = {
82 | var current = root
83 | var i = 0
84 | while ( {
85 | i < length && current != null
86 | }) {
87 | if (current.children == null) return null
88 | current = current.children.get(key(i)).orNull
89 | i += 1
90 | }
91 | current
92 | }
93 |
94 | def put(key: Array[String], value: T): Option[TrieMap.Node[T]] = {
95 | val node = subtree(key, key.length - 1)
96 | node.put(key(key.length - 1), new TrieMap.Node[T](Option.apply(value)))
97 | }
98 |
99 | def put(key: String, value: T): Option[TrieMap.Node[T]] = {
100 | val k = TrieMap.split(key)
101 | put(k, value)
102 | }
103 |
104 | def contains(key: String): Boolean = {
105 | val k = TrieMap.split(key)
106 | val node = subtree(k, k.length)
107 | node != null
108 | }
109 |
110 | def get(key: String): Option[T] = {
111 | val k = TrieMap.split(key)
112 | val node = subtree(k, k.length)
113 | if (node == null) return Option.empty
114 | node.value
115 | }
116 |
117 | def subtree(key: String): TrieMap.Node[T] = {
118 | val k = TrieMap.split(key)
119 | subtree(k, k.length)
120 | }
121 | }
122 |
123 | trait TypeHandler {
124 | def accept(obj: Any): Boolean
125 |
126 | def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any]
127 |
128 | def getErrors: List[String] = List[String]()
129 | }
130 |
131 | abstract class AbstractCollectionHandler(limit: Int, timeout: Int) extends AbstractTypeHandler {
132 | trait Iterator {
133 | def hasNext: Boolean
134 |
135 | def next: Any
136 | }
137 |
138 | def iterator(obj: Any): Iterator
139 |
140 | def length(obj: Any): Int
141 |
142 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = mutable.Map[String, Any](
143 | ResNames.LENGTH -> length(scalaInfo.value),
144 | ResNames.VALUE -> withJsonArray { json =>
145 | val startTime = System.currentTimeMillis()
146 | val it = iterator(scalaInfo.value)
147 | var index = 0
148 | while (it.hasNext && index < limit && !checkTimeoutError(scalaInfo.path, startTime, timeout)) {
149 | val id = scalaInfo.path
150 | json += loopback.pass(it.next, s"$id[$index]")
151 | index += 1
152 | }
153 | }
154 | )
155 | }
156 |
157 | abstract class AbstractTypeHandler extends TypeHandler {
158 | val timeoutErrors: mutable.MutableList[String] = mutable.MutableList()
159 |
160 | override def getErrors: List[String] = timeoutErrors.toList
161 |
162 | protected def withJsonArray(body: mutable.MutableList[Any] => Unit): mutable.MutableList[Any] = {
163 | val arr = mutable.MutableList[Any]()
164 | body(arr)
165 | arr
166 | }
167 |
168 | protected def withJsonObject(body: mutable.Map[String, Any] => Unit): mutable.Map[String, Any] = {
169 | val obj = mutable.Map[String, Any]()
170 | body(obj)
171 | obj
172 | }
173 |
174 | protected def wrap(obj: Any, tpe: String): mutable.Map[String, Any] = mutable.Map[String, Any](
175 | ResNames.VALUE -> Option(obj).orNull,
176 | ResNames.TYPE -> tpe
177 | )
178 |
179 | protected def checkTimeoutError(name: String, startTime: Long, timeout: Int): Boolean = {
180 | val isTimeout = System.currentTimeMillis() - startTime > timeout
181 | if (isTimeout)
182 | timeoutErrors += f"Variable $name collect timeout exceed ${timeout}ms."
183 | isTimeout
184 | }
185 |
186 | }
187 |
188 | class ArrayHandler(limit: Int, timeout: Int) extends AbstractCollectionHandler(limit, timeout) {
189 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Array[_]]
190 |
191 | override def length(obj: Any): Int = obj.asInstanceOf[Array[_]].length
192 |
193 | override def iterator(obj: Any): Iterator = new Iterator {
194 | private val it = obj.asInstanceOf[Array[_]].iterator
195 |
196 | override def hasNext: Boolean = it.hasNext
197 |
198 | override def next: Any = it.next
199 | }
200 | }
201 |
202 | class JavaCollectionHandler(limit: Int, timeout: Int) extends AbstractCollectionHandler(limit, timeout) {
203 | override def accept(obj: Any): Boolean = obj.isInstanceOf[util.Collection[_]]
204 |
205 | override def iterator(obj: Any): Iterator = new Iterator() {
206 | private val it = obj.asInstanceOf[util.Collection[_]].iterator()
207 |
208 | override def hasNext: Boolean = it.hasNext
209 |
210 | override def next: Any = it.next()
211 | }
212 |
213 | override def length(obj: Any): Int = obj.asInstanceOf[util.Collection[_]].size()
214 | }
215 | class MapHandler(limit: Int, timeout: Int) extends AbstractTypeHandler {
216 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
217 | withJsonObject {
218 | json =>
219 | val obj = scalaInfo.value
220 | val id = scalaInfo.path
221 | val map = obj.asInstanceOf[Map[_, _]]
222 | val keys = mutable.MutableList[Any]()
223 | val values = mutable.MutableList[Any]()
224 | json += ("jvm-type" -> obj.getClass.getCanonicalName)
225 | json += ("length" -> map.size)
226 | var index = 0
227 |
228 | json += ("key" -> keys)
229 | json += ("value" -> values)
230 |
231 | val startTime = System.currentTimeMillis()
232 | map.view.take(math.min(limit, map.size)).foreach {
233 | case (key, value) =>
234 | if (checkTimeoutError(scalaInfo.path, startTime, timeout))
235 | return json
236 | keys += loopback.pass(key, s"$id.key[$index]")
237 | values += loopback.pass(value, s"$id.value[$index]")
238 | index += 1
239 | }
240 | }
241 |
242 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Map[_, _]]
243 | }
244 |
245 | class NullHandler extends AbstractTypeHandler {
246 | override def accept(obj: Any): Boolean = obj == null
247 |
248 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
249 | mutable.Map[String, Any]()
250 | }
251 |
252 | class ObjectHandler(val stringSizeLimit: Int,
253 | val manager: HandlerManager,
254 | val referenceManager: ReferenceManager,
255 | val timeout: Int) extends AbstractTypeHandler {
256 | private val INACCESSIBLE = ScalaVariableInfo(isAccessible = false, isLazy = false, null, null, null, null)
257 | val ru: JavaUniverse = scala.reflect.runtime.universe
258 | val mirror: ru.Mirror = ru.runtimeMirror(getClass.getClassLoader)
259 |
260 | case class ReflectionProblem(e: Throwable, symbol: String, var count: Int)
261 |
262 | val problems: mutable.Map[String, ReflectionProblem] = mutable.Map[String, ReflectionProblem]()
263 |
264 | override def accept(obj: Any): Boolean = true
265 |
266 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
267 | withJsonObject { result =>
268 | val obj = scalaInfo.value
269 |
270 | if (obj == null) {
271 | return result
272 | }
273 | if (depth <= 0) {
274 | result += (ResNames.VALUE -> obj.toString.take(stringSizeLimit))
275 | return result
276 | }
277 |
278 | val startTime = System.currentTimeMillis()
279 | val fields = listAccessibleProperties(scalaInfo, startTime)
280 | if (fields.isEmpty) {
281 | result += (ResNames.VALUE -> obj.toString.take(stringSizeLimit))
282 | return result
283 | }
284 |
285 | val resolvedFields = mutable.Map[String, Any]()
286 | result += (ResNames.VALUE -> resolvedFields)
287 |
288 |
289 | fields.foreach { field =>
290 | if (checkTimeoutError(field.name, startTime, timeout)) {
291 | return result
292 | }
293 |
294 | if (field.ref != null && field.ref != field.path) {
295 | resolvedFields += (field.name -> (mutable.Map[String, Any]() += (ResNames.REF -> field.ref)))
296 | } else {
297 | resolvedFields += (field.name -> manager.handleVariable(field, loopback, depth - 1))
298 | }
299 | }
300 |
301 | result
302 | }
303 |
304 |
305 | override def getErrors: List[String] = problems.map(x =>
306 | f"Reflection error for ${x._2.symbol} counted ${x._2.count}.\n" +
307 | f"Error message: ${ExceptionUtils.getMessage(x._2.e)}\n " +
308 | f"Stacktrace:${ExceptionUtils.getStackTrace(x._2.e)}").toList ++ super.getErrors
309 |
310 | private def listAccessibleProperties(info: ScalaVariableInfo, startTime: Long): List[ScalaVariableInfo] = {
311 | val instanceMirror = mirror.reflect(info.value)
312 | val instanceSymbol = instanceMirror.symbol
313 | val members = instanceSymbol.toType.members
314 |
315 | val parsedMembers = mutable.MutableList[ScalaVariableInfo]()
316 | members.foreach { symbol =>
317 | if (checkTimeoutError(info.path, startTime, timeout))
318 | return parsedMembers.toList
319 | val variableInfo = get(instanceMirror, symbol, info.path)
320 | if (variableInfo.isAccessible)
321 | parsedMembers += variableInfo
322 | }
323 |
324 | parsedMembers.toList
325 | }
326 |
327 | private def get(instanceMirror: ru.InstanceMirror, symbol: ru.Symbol, path: String): ScalaVariableInfo = {
328 | if (!problems.contains(path))
329 | try {
330 | // is public property
331 | if (!symbol.isMethod && symbol.isTerm && symbol.asTerm.getter.isPublic) {
332 | val term = symbol.asTerm
333 | val f = instanceMirror.reflectField(term)
334 | val fieldPath = s"$path.${term.name.toString.trim}"
335 | val value = f.get
336 | val tpe = term.typeSignature.toString
337 | return ScalaVariableInfo(isAccessible = tpe != "", isLazy = term.isLazy, value, tpe,
338 | fieldPath, referenceManager.getRef(value, fieldPath))
339 | }
340 | } catch {
341 | case e: Throwable => problems(path) = ReflectionProblem(e, symbol.toString, 1)
342 | }
343 | else
344 | problems(path).count += 1
345 |
346 | INACCESSIBLE
347 | }
348 | }
349 |
350 | class PrimitiveHandler extends AbstractTypeHandler {
351 | override def accept(obj: Any): Boolean =
352 | obj match {
353 | case _: Byte => true
354 | case _: Short => true
355 | case _: Boolean => true
356 | case _: Char => true
357 | case _: Int => true
358 | case _: Long => true
359 | case _: Float => true
360 | case _: Double => true
361 | case _ => false
362 | }
363 |
364 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
365 | mutable.Map[String, Any](
366 | ResNames.VALUE -> scalaInfo.value,
367 | ResNames.IS_PRIMITIVE -> scalaInfo.value
368 | )
369 | }
370 |
371 | class SeqHandler(limit: Int, timeout: Int) extends AbstractCollectionHandler(limit, timeout) {
372 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Seq[_]]
373 |
374 | override def iterator(obj: Any): Iterator = new Iterator {
375 | private val it = obj.asInstanceOf[Seq[_]].iterator
376 |
377 | override def hasNext: Boolean = it.hasNext
378 |
379 | override def next: Any = it.next()
380 | }
381 |
382 | override def length(obj: Any): Int = obj.asInstanceOf[Seq[_]].size
383 | }
384 |
385 | class SetHandler(limit: Int, timeout: Int) extends AbstractCollectionHandler(limit, timeout) {
386 | override def iterator(obj: Any): Iterator = new Iterator {
387 | private val it = obj.asInstanceOf[Set[_]].iterator
388 |
389 | override def hasNext: Boolean = it.hasNext
390 |
391 | override def next: Any = it.next()
392 | }
393 |
394 | override def length(obj: Any): Int = obj.asInstanceOf[Set[_]].size
395 |
396 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Set[_]]
397 | }
398 |
399 | class SpecialsHandler(limit: Int) extends AbstractTypeHandler {
400 | override def accept(obj: Any): Boolean = obj.getClass.getCanonicalName != null && obj.getClass.getCanonicalName.startsWith("scala.")
401 |
402 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = withJsonObject {
403 | json =>
404 | json.put(ResNames.VALUE, scalaInfo.value.toString.take(limit))
405 | }
406 | }
407 |
408 | class StringHandler(limit: Int) extends AbstractTypeHandler {
409 | override def accept(obj: Any): Boolean = obj.isInstanceOf[String]
410 |
411 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] =
412 | mutable.Map(
413 | ResNames.VALUE -> scalaInfo.value.asInstanceOf[String].take(limit)
414 | )
415 | }
416 |
417 | class ThrowableHandler extends AbstractTypeHandler {
418 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Throwable]
419 |
420 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = {
421 | val obj = scalaInfo.value
422 | val throwable = obj.asInstanceOf[Throwable]
423 | val writer = new StringWriter()
424 | val out = new PrintWriter(writer)
425 | throwable.printStackTrace(out)
426 |
427 | mutable.Map(
428 | ResNames.VALUE -> writer.toString
429 | )
430 | }
431 | }
432 |
433 | class HandlerManager(enableProfiling: Boolean,
434 | timeout: Int,
435 | stringSizeLimit: Int,
436 | collectionSizeLimit: Int,
437 | referenceManager: ReferenceManager) {
438 | private val handlerChain = ListBuffer[AbstractTypeHandler](
439 | new NullHandler(),
440 | new StringHandler(stringSizeLimit),
441 | new ArrayHandler(collectionSizeLimit, timeout),
442 | new JavaCollectionHandler(collectionSizeLimit, timeout),
443 | new SeqHandler(collectionSizeLimit, timeout),
444 | new SetHandler(collectionSizeLimit, timeout),
445 | new MapHandler(collectionSizeLimit, timeout),
446 | new ThrowableHandler(),
447 | new SpecialsHandler(stringSizeLimit),
448 | new PrimitiveHandler(),
449 | new DatasetHandler(),
450 | new RDDHandler(),
451 | new SparkContextHandler(),
452 | new SparkSessionHandler(),
453 | new ObjectHandler(stringSizeLimit, this, referenceManager, timeout)
454 | ).map(new HandlerWrapper(_, enableProfiling))
455 |
456 | def getErrors: mutable.Seq[String] = handlerChain.flatMap(x => x.handler.getErrors)
457 |
458 | def handleVariable(info: ScalaVariableInfo, loopback: Loopback, depth: Int, startTime: Long = System.currentTimeMillis()): Any = {
459 | handlerChain.find(_.accept(info)).map(_.handle(info, loopback, depth, startTime)).getOrElse(mutable.Map[String, Any]())
460 | }
461 | }
462 |
463 | class HandlerWrapper(val handler: TypeHandler, profile: Boolean) {
464 | def accept(info: ScalaVariableInfo): Boolean = info.isLazy || handler.accept(info.value)
465 |
466 | def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int, initStartTime: Long): Any = {
467 | val startTime = if (initStartTime != null)
468 | initStartTime
469 | else
470 | System.currentTimeMillis()
471 |
472 | val data = if (scalaInfo.isLazy) {
473 | mutable.Map[String, Any](ResNames.LAZY -> true)
474 | }
475 | else {
476 | val data = handler.handle(scalaInfo, loopback, depth: Int)
477 | if (data.keys.count(_ == ResNames.IS_PRIMITIVE) > 0) {
478 | return data(ResNames.VALUE)
479 | }
480 | data
481 | }
482 |
483 | data.put(ResNames.TYPE, calculateType(scalaInfo))
484 | if (profile)
485 | data.put(ResNames.TIME, System.currentTimeMillis() - startTime)
486 |
487 | data
488 | }
489 |
490 | private def calculateType(scalaInfo: ScalaVariableInfo): String = {
491 | if (scalaInfo.tpe != null)
492 | return scalaInfo.tpe
493 |
494 | if (scalaInfo.value != null)
495 | scalaInfo.value.getClass.getCanonicalName
496 | else
497 | null
498 | }
499 | }
500 | class InterpreterHandler(val interpreter: IMain) {
501 | val wrapper = new ZtoolsInterpreterWrapper(interpreter)
502 |
503 | def getVariableNames: immutable.Seq[String] =
504 | interpreter.definedSymbolList.filter { x => x.isGetter }.map(_.name.toString).distinct
505 |
506 | def getInfo(name: String, tpe: String): ScalaVariableInfo = {
507 | val obj = valueOfTerm(name).orNull
508 | ScalaVariableInfo(isAccessible = true, isLazy = false, obj, tpe, name, null)
509 | }
510 |
511 | def valueOfTerm(id: String): Option[Any] = wrapper.valueOfTerm(id)
512 | }
513 |
514 | case class ScalaVariableInfo(isAccessible: Boolean,
515 | isLazy: Boolean,
516 | value: Any,
517 | tpe: String,
518 | path: String,
519 | ref: String) {
520 | val name: String = if (path != null)
521 | path.substring(path.lastIndexOf('.') + 1)
522 | else
523 | null
524 | }
525 |
526 |
527 |
528 | //noinspection TypeAnnotation
529 | class ZtoolsInterpreterWrapper(val iMain: IMain) {
530 | import scala.language.implicitConversions
531 | import scala.reflect.runtime.{universe => ru}
532 | import iMain.global._
533 |
534 | import scala.util.{Try => Trying}
535 |
536 | private lazy val importToGlobal = iMain.global mkImporter ru
537 | private lazy val importToRuntime = ru.internal createImporter iMain.global
538 |
539 | private implicit def importFromRu(sym: ru.Symbol) = importToGlobal importSymbol sym
540 |
541 | private implicit def importToRu(sym: Symbol): ru.Symbol = importToRuntime importSymbol sym
542 |
543 | // see https://github.com/scala/scala/pull/5852/commits/a9424205121f450dea2fe2aa281dd400a579a2b7
544 | def valueOfTerm(id: String): Option[Any] = exitingTyper {
545 | def fixClassBasedFullName(fullName: List[String]): List[String] = {
546 | if (settings.Yreplclassbased.value) {
547 | val line :: read :: rest = fullName
548 | line :: read :: "INSTANCE" :: rest
549 | } else fullName
550 | }
551 |
552 | def value(fullName: String) = {
553 | val universe = iMain.runtimeMirror.universe
554 | import universe.{InstanceMirror, Symbol, TermName}
555 | val pkg :: rest = fixClassBasedFullName((fullName split '.').toList)
556 | val top = iMain.runtimeMirror.staticPackage(pkg)
557 |
558 | @annotation.tailrec
559 | def loop(inst: InstanceMirror, cur: Symbol, path: List[String]): Option[Any] = {
560 | def mirrored =
561 | if (inst != null) inst
562 | else iMain.runtimeMirror reflect (iMain.runtimeMirror reflectModule cur.asModule).instance
563 |
564 | path match {
565 | case last :: Nil =>
566 | cur.typeSignature.decls find (x => x.name.toString == last && x.isAccessor) map { m =>
567 | (mirrored reflectMethod m.asMethod).apply()
568 | }
569 | case next :: rest =>
570 | val s = cur.typeSignature.member(TermName(next))
571 | val i =
572 | if (s.isModule) {
573 | if (inst == null) null
574 | else iMain.runtimeMirror reflect (inst reflectModule s.asModule).instance
575 | }
576 | else if (s.isAccessor) {
577 | iMain.runtimeMirror reflect (mirrored reflectMethod s.asMethod).apply()
578 | }
579 | else {
580 | assert(false, s.fullName)
581 | inst
582 | }
583 | loop(i, s, rest)
584 | case Nil => None
585 | }
586 | }
587 |
588 | loop(null, top, rest)
589 | }
590 |
591 | Option(iMain.symbolOfTerm(id)) filter (_.exists) flatMap (s => Trying(value(s.fullName)).toOption.flatten)
592 | }
593 | }
594 |
595 | class ReferenceManager {
596 | private val refMap = mutable.Map[ReferenceWrapper, String]()
597 | private val refInvMap = new TrieMap[ReferenceWrapper]()
598 |
599 | /**
600 | * Returns a reference (e.g. valid path) to the object or creates a record in reference maps (and returns null).
601 | *
602 | * @param obj an object we want to find a reference for (can be null)
603 | * @param path path of the object e.g. myVar.myField.b
604 | * @return reference path to the object obj. The method returns null if obj is null itself or
605 | * obj hasn't been mentioned earlier or in the case of AnyVal object.
606 | */
607 | def getRef(obj: Any, path: String): String = obj match {
608 | case null | _: Unit =>
609 | clearRefIfPathExists(path)
610 | null
611 | case ref: AnyRef =>
612 | val wrapper = new ReferenceWrapper(ref)
613 | if (refMap.contains(wrapper)) {
614 | if (refInvMap.get(path).orNull != wrapper) clearRefIfPathExists(path)
615 | refMap(wrapper)
616 | } else {
617 | clearRefIfPathExists(path)
618 | refMap(wrapper) = path
619 | refInvMap.put(path, wrapper)
620 | null
621 | }
622 | case _ => null
623 | }
624 |
625 |
626 | private def clearRefIfPathExists(path: String): Unit = {
627 | if (refInvMap.contains(path)) {
628 | val tree = refInvMap.subtree(path)
629 | tree.forEach(refMap.remove(_: ReferenceWrapper))
630 | }
631 | }
632 | }
633 |
634 | class ReferenceWrapper(val ref: AnyRef) {
635 | override def hashCode(): Int = ref.hashCode()
636 |
637 | override def equals(obj: Any): Boolean = obj match {
638 | case value: ReferenceWrapper =>
639 | ref.eq(value.ref)
640 | case _ => false
641 | }
642 | }
643 |
644 |
645 |
646 |
647 | class VariablesView(val intp: IMain,
648 | val timeout: Int,
649 | val variableTimeout: Int,
650 | val collectionSizeLimit: Int,
651 | val stringSizeLimit: Int,
652 | val blackList: List[String],
653 | val whiteList: List[String] = null,
654 | val filterUnitResults: Boolean,
655 | val enableProfiling: Boolean,
656 | val depth: Int,
657 | val interpreterResCountLimit: Int = 5) {
658 | val errors: mutable.MutableList[String] = mutable.MutableList[String]()
659 | private val interpreterHandler = new InterpreterHandler(intp)
660 | private val referenceManager = new ReferenceManager()
661 |
662 | private val touched = mutable.Map[String, ScalaVariableInfo]()
663 |
664 | private val handlerManager = new HandlerManager(
665 | collectionSizeLimit = collectionSizeLimit,
666 | stringSizeLimit = stringSizeLimit,
667 | timeout = variableTimeout,
668 | referenceManager = referenceManager,
669 | enableProfiling = enableProfiling
670 | )
671 |
672 | //noinspection ScalaUnusedSymbol
673 | def getZtoolsJsonResult: String = {
674 | implicit val ztoolsFormats: AnyRef with Formats = Serialization.formats(NoTypeHints)
675 | Serialization.write(
676 | Map(
677 | "variables" -> resolveVariables,
678 | "errors" -> (errors ++ handlerManager.getErrors)
679 | )
680 | )
681 | }
682 |
683 | def toJson: String = {
684 | implicit val ztoolsFormats: AnyRef with Formats = Serialization.formats(NoTypeHints)
685 | Serialization.write(resolveVariables)
686 | }
687 |
688 | def resolveVariables: mutable.Map[String, Any] = {
689 | val result: mutable.Map[String, Any] = mutable.Map[String, Any]()
690 | val startTime = System.currentTimeMillis()
691 |
692 | val interpreterVariablesNames = interpreterHandler.getVariableNames
693 | val finalNames = filterVariableNames(interpreterVariablesNames)
694 |
695 | finalNames.foreach { name =>
696 | val varType = interpreterHandler.interpreter.typeOfTerm(name).toString().stripPrefix("()")
697 | val variable = mutable.Map[String, Any]()
698 |
699 | result += name -> variable
700 | variable += ResNames.TYPE -> varType
701 | if (!isUnitOrNullResult(result, name))
702 | variable += ResNames.VALUE -> ""
703 | }
704 |
705 | var passedVariablesCount = 0
706 | val totalVariablesCount = finalNames.size
707 |
708 | if (checkTimeout(startTime, passedVariablesCount, totalVariablesCount))
709 | return result
710 |
711 | finalNames.foreach { name =>
712 | if (checkTimeout(startTime, passedVariablesCount, totalVariablesCount))
713 | return result
714 | passedVariablesCount += 1
715 |
716 | if (!isUnitOrNullResult(result, name)) {
717 |
718 | calculateVariable(result, name)
719 | }
720 | }
721 | result
722 | }
723 |
724 | private def calculateVariable(result: mutable.Map[String, Any], name: String) = {
725 | val valMap = result(name).asInstanceOf[mutable.Map[String, Any]]
726 | try {
727 | val startTime = System.currentTimeMillis()
728 |
729 | val info = interpreterHandler.getInfo(name, valMap(ResNames.TYPE).asInstanceOf[String])
730 | val ref = referenceManager.getRef(info.value, name)
731 | touched(info.path) = info
732 |
733 | if (ref != null && ref != info.path) {
734 | result += (info.path -> mutable.Map[String, Any](ResNames.REF -> ref))
735 | } else {
736 | result += info.path -> parseInfo(info, depth, startTime)
737 | }
738 | } catch {
739 | case t: Throwable =>
740 | valMap += ResNames.VALUE -> ExceptionUtils.getRootCauseMessage(t)
741 | }
742 | }
743 |
744 | private def isUnitOrNullResult(result: mutable.Map[String, Any], name: String) = {
745 | val res = result(name).asInstanceOf[mutable.Map[String, Any]]
746 | val valType = res(ResNames.TYPE)
747 | valType == "Unit" || valType == "Null"
748 | }
749 |
750 | def resolveVariable(path: String): mutable.Map[String, Any] = {
751 | val result = mutable.Map[String, Any]()
752 | val obj = touched.get(path).orNull
753 | if (obj.ref != null) {
754 | result += (ResNames.VALUE -> mutable.Map[String, Any](ResNames.REF -> obj.ref))
755 | } else {
756 | result += (ResNames.VALUE -> parseInfo(obj, depth))
757 | }
758 | result
759 | }
760 |
761 | private def parseInfo(info: ScalaVariableInfo, depth: Int, startTime: Long = System.currentTimeMillis()): Any = {
762 | val loopback = new Loopback {
763 | override def pass(obj: Any, id: String): Any = {
764 | val si = ScalaVariableInfo(isAccessible = true, isLazy = false, obj, null, id, referenceManager.getRef(obj, id))
765 | parseInfo(si, depth - 1)
766 | }
767 | }
768 | handlerManager.handleVariable(info, loopback, depth, startTime)
769 | }
770 |
771 | private def filterVariableNames(interpreterVariablesNames: Seq[String]) = {
772 | val variablesNames = interpreterVariablesNames.seq
773 | .filter { name => !blackList.contains(name) }
774 | .filter { name => whiteList == null || whiteList.contains(name) }
775 |
776 |
777 | val p = Pattern.compile("res\\d*")
778 | val (resVariables, otherVariables: immutable.Seq[String]) = variablesNames.partition(x => p.matcher(x).matches())
779 | val sortedResVariables = resVariables
780 | .map(res => Try(res.stripPrefix("res").toInt))
781 | .filter(_.isSuccess)
782 | .map(_.get)
783 | .sortWith(_ > _)
784 | .take(interpreterResCountLimit)
785 | .map(num => "res" + num)
786 |
787 | val finalNames = otherVariables ++ sortedResVariables
788 | finalNames
789 | }
790 |
791 | //noinspection ScalaUnusedSymbol
792 | private implicit def toJavaFunction[A, B](f: A => B): JFunction[A, B] = new JFunction[A, B] {
793 | override def apply(a: A): B = f(a)
794 | }
795 |
796 | private def checkTimeout(startTimeout: Long, passed: Int, total: Int): Boolean = {
797 | val isTimeoutExceed = System.currentTimeMillis() - startTimeout > timeout
798 | if (isTimeoutExceed)
799 | errors += s"Variables collect timeout. Exceed ${timeout}ms. Parsed $passed from $total."
800 | isTimeoutExceed
801 | }
802 | }
803 |
804 | class DatasetHandler extends AbstractTypeHandler {
805 | override def accept(obj: Any): Boolean = obj.isInstanceOf[Dataset[_]]
806 |
807 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = {
808 | val obj = scalaInfo.value
809 | val df = obj.asInstanceOf[Dataset[_]]
810 |
811 |
812 | val schema = df.schema
813 | val jsonSchemaColumns = schema.fields.map(field => {
814 | val value = withJsonObject { jsonField =>
815 | jsonField += "name" -> wrap(field.name, null)
816 | jsonField += "nullable" -> wrap(field.nullable, null)
817 | jsonField += "dataType" -> wrap(field.dataType.typeName, null)
818 | }
819 | wrap(value, "org.apache.spark.sql.types.StructField")
820 | }
821 | )
822 |
823 | val jsonSchema = mutable.Map(
824 | ResNames.VALUE -> jsonSchemaColumns,
825 | ResNames.TYPE -> "org.apache.spark.sql.types.StructType",
826 | ResNames.LENGTH -> jsonSchemaColumns.length
827 | )
828 |
829 | val dfValue = mutable.Map(
830 | "schema()" -> jsonSchema,
831 | "getStorageLevel()" -> wrap(df.storageLevel.toString(), "org.apache.spark.storage.StorageLevel")
832 | )
833 |
834 | mutable.Map(
835 | ResNames.VALUE -> dfValue
836 | )
837 | }
838 | }
839 |
840 |
841 |
842 | class RDDHandler extends AbstractTypeHandler {
843 | override def accept(obj: Any): Boolean = obj.isInstanceOf[RDD[_]]
844 |
845 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = withJsonObject {
846 | json =>
847 | val obj = scalaInfo.value
848 | val rdd = obj.asInstanceOf[RDD[_]]
849 | json += (ResNames.VALUE -> withJsonObject { value =>
850 | value += ("getNumPartitions()" -> wrap(rdd.getNumPartitions, "Int"))
851 | value += ("name" -> wrap(rdd.name, "String"))
852 | value += ("id" -> wrap(rdd.id, "Int"))
853 | value += ("partitioner" -> wrap(rdd.partitioner.toString, "Option[org.apache.spark.Partitioner]"))
854 | value += ("getStorageLevel()" -> wrap(rdd.getStorageLevel.toString, "org.apache.spark.storage.StorageLevel"))
855 | })
856 | }
857 | }
858 |
859 | class SparkContextHandler extends AbstractTypeHandler {
860 | override def accept(obj: Any): Boolean = obj.isInstanceOf[SparkContext]
861 |
862 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = withJsonObject {
863 | json =>
864 | val sc = scalaInfo.value.asInstanceOf[SparkContext]
865 | json += (ResNames.VALUE -> withJsonObject { json =>
866 | json += ("sparkUser" -> wrap(sc.sparkUser, "String"))
867 | json += ("sparkTime" -> wrap(sc.startTime, "Long"))
868 | json += ("applicationId()" -> wrap(sc.applicationId, "String"))
869 | json += ("applicationAttemptId()" -> wrap(sc.applicationAttemptId.toString, "Option[String]"))
870 | json += ("appName()" -> sc.appName)
871 | })
872 | }
873 | }
874 |
875 | class SparkSessionHandler extends AbstractTypeHandler {
876 | override def accept(obj: Any): Boolean = obj.isInstanceOf[SparkSession]
877 |
878 | override def handle(scalaInfo: ScalaVariableInfo, loopback: Loopback, depth: Int): mutable.Map[String, Any] = withJsonObject {
879 | json =>
880 | val obj = scalaInfo.value
881 | val id = scalaInfo.path
882 |
883 | val spark = obj.asInstanceOf[SparkSession]
884 | json += (ResNames.VALUE -> withJsonObject { json =>
885 | json += ("version()" -> spark.version)
886 | json += ("sparkContext" -> loopback.pass(spark.sparkContext, s"$id.sparkContext"))
887 | json += ("sharedState" -> loopback.pass(spark.sharedState, s"$id.sharedState"))
888 | })
889 | }
890 | }
891 |
892 |
893 | /**
894 | * Main section
895 | */
896 | val iMain: IMain = $intp
897 | val depth: Int = 0
898 | val filterUnitResults: Boolean = true
899 | val enableProfiling: Boolean = true
900 | val collectionSizeLimit = 100
901 | val stringSizeLimit = 400
902 | val timeout = 5000
903 | val variableTimeout = 2000
904 | val interpreterResCountLimit = 5
905 | val blackList = "$intp,sc,spark,sqlContext,z,engine".split(',').toList
906 | val whiteList: List[String] = null
907 |
908 |
909 | val variableView = new VariablesView(
910 | intp = iMain,
911 | timeout = timeout,
912 | variableTimeout = variableTimeout,
913 | collectionSizeLimit = collectionSizeLimit,
914 | stringSizeLimit = stringSizeLimit,
915 | blackList = blackList,
916 | whiteList = whiteList,
917 | filterUnitResults = filterUnitResults,
918 | enableProfiling = enableProfiling,
919 | depth = depth,
920 | interpreterResCountLimit = interpreterResCountLimit
921 | )
922 |
923 | implicit val ztoolsFormats: AnyRef with Formats = Serialization.formats(NoTypeHints)
924 | val variablesJson = variableView.getZtoolsJsonResult
925 | println("---ztools-scala---")
926 | println(variablesJson)
927 | println("---ztools-scala---")
928 | }
929 | catch {
930 | case t: Throwable =>
931 | import org.apache.commons.lang.exception.ExceptionUtils
932 | import org.json4s.jackson.Serialization
933 | import org.json4s.{Formats, NoTypeHints}
934 |
935 | implicit val ztoolsFormats: AnyRef with Formats = Serialization.formats(NoTypeHints)
936 | val result = Serialization.write(Map(
937 | "errors" -> Array(f"${ExceptionUtils.getMessage(t)}\n${ExceptionUtils.getStackTrace(t)}")
938 | ))
939 | println("---ztools-scala---")
940 | println(result)
941 | println("---ztools-scala---")
942 | }
--------------------------------------------------------------------------------