├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── beautify-js.py ├── beautify-js.sh ├── build.sbt ├── build.sh ├── build ├── dds.typesafe-conf ├── jarjar-1.4.jar └── rules.txt ├── core ├── build.sbt └── src │ ├── main │ ├── java │ │ └── de │ │ │ └── frosner │ │ │ └── dds │ │ │ └── core │ │ │ └── ManifestMetaData.java │ └── scala │ │ ├── de │ │ └── frosner │ │ │ └── dds │ │ │ ├── analytics │ │ │ ├── ColumnsStatisticsAggregator.scala │ │ │ ├── CorrelationAggregator.scala │ │ │ ├── DateColumnStatisticsAggregator.scala │ │ │ ├── MutualInformationAggregator.scala │ │ │ ├── NominalColumnStatisticsAggregator.scala │ │ │ └── NumericColumnStatisticsAggregator.scala │ │ │ ├── core │ │ │ ├── DDS.scala │ │ │ ├── ScalaFunctions.scala │ │ │ ├── Server.scala │ │ │ ├── SparkCoreFunctions.scala │ │ │ ├── SparkGraphxFunctions.scala │ │ │ └── SparkSqlFunctions.scala │ │ │ ├── servables │ │ │ └── package.scala │ │ │ └── util │ │ │ ├── DataFrameUtils.scala │ │ │ └── ServableUtils.scala │ │ └── org │ │ └── apache │ │ └── spark │ │ └── sql │ │ └── catalyst │ │ └── CatalystTypeConvertersAdapter.scala │ └── test │ ├── resources │ ├── log4j.properties │ └── manual-tests.txt │ └── scala │ └── de │ └── frosner │ └── dds │ ├── analytics │ ├── ColumnsStatisticsAggregatorTest.scala │ ├── CorrelationAggregatorTest.scala │ ├── DateColumnStatisticsAggregatorTest.scala │ ├── MutualInformationAggregatorTest.scala │ ├── NominalColumnStatisticsAggregatorTest.scala │ └── NumericColumnStatisticsAggregatorTest.scala │ ├── core │ └── DDSTest.scala │ └── util │ ├── DataFrameUtilsTest.scala │ └── ServableUtilsTest.scala ├── datasets ├── build.sbt └── src │ ├── main │ ├── resources │ │ └── de │ │ │ └── frosner │ │ │ └── dds │ │ │ └── datasets │ │ │ ├── enron.csv │ │ │ ├── flights.csv │ │ │ └── golf.csv │ └── scala │ │ └── de │ │ └── frosner │ │ └── dds │ │ └── datasets │ │ ├── FlightsRow.scala │ │ ├── GolfRow.scala │ │ └── package.scala │ └── test │ ├── resources │ └── log4j.properties │ └── scala │ └── de │ └── frosner │ └── dds │ └── datasets │ └── DatasetsTest.scala ├── project ├── Dependencies.scala ├── build.properties └── plugin.sbt └── web-ui ├── build.sbt └── src ├── main ├── resources │ ├── src │ │ └── img │ │ │ ├── PRGn.xcf │ │ │ ├── YlOrRd.xcf │ │ │ ├── arrowHead.xcf │ │ │ ├── edgeLabel.xcf │ │ │ ├── jitter.xcf │ │ │ ├── letters.xcf │ │ │ ├── lock.xcf │ │ │ └── nodeLabel.xcf │ └── ui │ │ ├── app │ │ ├── C3Chart.js │ │ ├── Cache.js │ │ ├── Composite.js │ │ ├── Empty.js │ │ ├── Graph.js │ │ ├── Histogram.js │ │ ├── KeyValueSequence.js │ │ ├── Matrix.js │ │ ├── Scatter2D.js │ │ ├── Table.js │ │ ├── Visualization.js │ │ ├── draw.js │ │ ├── init.js │ │ ├── main.js │ │ ├── require.config.js │ │ └── util.js │ │ ├── css │ │ ├── bootstrap.min.css │ │ ├── c3.css │ │ ├── d3.parcoords.css │ │ ├── graph.css │ │ ├── index.css │ │ ├── keyValue.css │ │ ├── matrix.css │ │ ├── scatter.css │ │ └── table.css │ │ ├── img │ │ ├── PRGn.png │ │ ├── YlOrRd.png │ │ ├── arrowHead.png │ │ ├── edgeLabel.png │ │ ├── jitter.png │ │ ├── letters.png │ │ ├── lock.png │ │ ├── logo.svg │ │ ├── nodeLabel.png │ │ └── watermark.svg │ │ ├── index.html │ │ └── lib │ │ ├── c3.min.js │ │ ├── chroma.min.js │ │ ├── d3.parcoords.min.js │ │ ├── d3.v3.min.js │ │ ├── divgrid.min.js │ │ ├── jquery-1.7.min.js │ │ ├── jquery.event.drag-2.2.min.js │ │ ├── require.js │ │ ├── slickgrid │ │ ├── dir │ │ ├── examples.css │ │ ├── images │ │ │ ├── actions.gif │ │ │ ├── ajax-loader-small.gif │ │ │ ├── arrow_redo.png │ │ │ ├── arrow_right_peppermint.png │ │ │ ├── arrow_right_spearmint.png │ │ │ ├── arrow_undo.png │ │ │ ├── bullet_blue.png │ │ │ ├── bullet_star.png │ │ │ ├── bullet_toggle_minus.png │ │ │ ├── bullet_toggle_plus.png │ │ │ ├── calendar.gif │ │ │ ├── collapse.gif │ │ │ ├── comment_yellow.gif │ │ │ ├── down.gif │ │ │ ├── drag-handle.png │ │ │ ├── editor-helper-bg.gif │ │ │ ├── expand.gif │ │ │ ├── header-bg.gif │ │ │ ├── header-columns-bg.gif │ │ │ ├── header-columns-over-bg.gif │ │ │ ├── help.png │ │ │ ├── info.gif │ │ │ ├── listview.gif │ │ │ ├── pencil.gif │ │ │ ├── row-over-bg.gif │ │ │ ├── sort-asc.gif │ │ │ ├── sort-asc.png │ │ │ ├── sort-desc.gif │ │ │ ├── sort-desc.png │ │ │ ├── stripes.png │ │ │ ├── tag_red.png │ │ │ ├── tick.png │ │ │ ├── ui-bg_glass_75_dadada_1x400.png │ │ │ ├── ui-bg_glass_75_e6e6e6_1x400.png │ │ │ ├── ui-icons_888888_256x240.png │ │ │ ├── user_identity.gif │ │ │ └── user_identity_plus.gif │ │ ├── jquery-ui-1.8.16.custom.css │ │ ├── jquery.event.drag-2.0.min.js │ │ ├── slick-default-theme.css │ │ ├── slick.core.min.js │ │ ├── slick.dataview.min.js │ │ ├── slick.grid.css │ │ ├── slick.grid.min.js │ │ ├── slick.pager.css │ │ └── slick.pager.min.js │ │ ├── underscore-min.map │ │ └── underscore.min.js └── scala │ └── de │ └── frosner │ └── dds │ └── webui │ ├── servables │ └── ServableJsonProtocol.scala │ └── server │ └── SprayServer.scala └── test ├── resources ├── dds.typesafe-conf └── ui │ ├── mocks │ └── .nothing │ ├── require.config.js │ ├── specs │ ├── Visualization.spec.js │ └── util.spec.js │ └── test.dependencies.js └── scala └── de └── frosner └── dds └── webui ├── servables └── ServableJsonProtocolTest.scala └── server └── SprayServerTest.scala /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | *.bak 4 | 5 | .DS_Store 6 | 7 | # sbt specific 8 | .cache/ 9 | .history/ 10 | .lib/ 11 | dist/* 12 | target/ 13 | lib_managed/ 14 | src_managed/ 15 | project/boot/ 16 | project/plugins/project/ 17 | 18 | # Scala-IDE specific 19 | .scala_dependencies 20 | .worksheet 21 | .idea 22 | 23 | # Metastore 24 | metastore_db 25 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: scala 2 | scala: 3 | - 2.10.5 4 | - 2.11.6 5 | script: 6 | - sbt ++$TRAVIS_SCALA_VERSION build 7 | - sbt ++$TRAVIS_SCALA_VERSION coverage test 8 | - '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && sbt ++$TRAVIS_SCALA_VERSION publish-snapshot || true' 9 | cache: 10 | directories: 11 | - $HOME/.m2 12 | - $HOME/.ivy2 13 | after_success: 14 | - pip install --user codecov && codecov 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Frank Rosner 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Data-Driven Spark [![Build Status](https://travis-ci.org/FRosner/spawncamping-dds.svg?branch=master)](https://travis-ci.org/FRosner/spawncamping-dds) [![Codacy Badge](https://api.codacy.com/project/badge/grade/0a6362a32754458f83a160eef13de7ae)](https://www.codacy.com/app/frank_7/spawncamping-dds) [![codecov.io](https://codecov.io/github/FRosner/spawncamping-dds/coverage.svg?branch=master)](https://codecov.io/github/FRosner/spawncamping-dds?branch=master) 2 | ### Description 3 | 4 | This library provides a comprehensible and simple interface for quick data exploration based on 5 | [Apache Spark](https://spark.apache.org/) and [D3.js/SVG](http://d3js.org/). The target audience is 6 | data scientists who miss functions like `summary()` and `plot()` from [R](http://www.r-project.org/) 7 | when working on the cluster with the Spark REPL. It does not offer a fully flexible plotting mechanism like [ggplot2](http://ggplot2.org/) but focuses on giving you quick insights into your data. 8 | 9 | ### Usage 10 | 11 | 1. Add spawncamping-dds jar to Spark classpath 12 | 13 | ```sh 14 | ./bin/spark-shell --jars spawncamping-dds-_.jar 15 | ``` 16 | 2. Import core functions and web UI 17 | 18 | ```scala 19 | import de.frosner.dds.core.DDS._ 20 | import de.frosner.dds.webui.server.SprayServer._ 21 | ``` 22 | 23 | 3. Start the web server + user interface 24 | 25 | ```scala 26 | start() 27 | ``` 28 | 29 | 4. Explore your data 30 | 31 | ```scala 32 | // load example data set 33 | val sql = new org.apache.spark.sql.SQLContext(sc) 34 | val golf = de.frosner.dds.datasets.golf(sql) 35 | 36 | // look at a sample of your data set 37 | show(golf) 38 | 39 | // compute column statistics 40 | summarize(golf) 41 | 42 | // visualize column dependencies 43 | mutualInformation(golf) 44 | ``` 45 | 46 | 5. Stop the server once you are done 47 | 48 | ```scala 49 | stop() 50 | ``` 51 | 52 | See the [User Guide](https://github.com/FRosner/spawncamping-dds/wiki/User-Guide) for a detailed explanation of the provided functionality. 53 | To achieve the best user experience for the web UI, we recommend using [Google Chrome](https://www.google.com/chrome/). 54 | 55 | ### Get Data-Driven Spark 56 | 57 | You can either grab the [latest release artifact](https://github.com/FRosner/spawncamping-dds/releases), use the most recent [SNAPSHOT](http://spawncamping-dds-snapshots.s3-website-us-east-1.amazonaws.com/) or build from source (`sbt build`). Data-Driven Spark (DDS) 4.x.y is currently developed and built against Spark 1.5. It can be cross built against Scala version 2.10 and 2.11, depending on which version was used to build your Spark. For older versions of Spark, please refer to the following table: 58 | 59 | | DDS Versions | Spark Versions | 60 | | --- | --- | 61 | | 4.x.y | 1.5.x | 62 | | 3.x.y | 1.4.x | 63 | | 2.x.y | 1.3.x | 64 | | 1.x.y | 1.2.x | 65 | 66 | 67 | ### Contribution 68 | 69 | Any contribution, e.g. in form of feature requests, comments, code reviews, pull requests are very welcome. Pull requests will be reviewed before they are merged and it makes sense to coordinate with one of the main committers before starting to work on something big. 70 | 71 | Please follow the general code style convention of Scala. It is advised to stick to the formatting / code style of the surrounding code when making changes to existing files. Reformatting should be done in separate commits. 72 | 73 | All (most of the) code committed should be covered by some automated unit tests. All existing tests need to pass before committing changes. 74 | 75 | Please view the [Developer Guide](https://github.com/FRosner/spawncamping-dds/wiki/Developer-Guide) for additional information about extending DDS. 76 | 77 | ### Authors 78 | 79 | - [Frank Rosner](https://github.com/FRosner) (Creator) 80 | - [Aleksandr Sorokoumov](https://github.com/Gerrrr) (Contributor) 81 | - [Rick Moritz](https://github.com/RPCMoritz) (Contributor) 82 | - [Milos Krstajic](https://github.com/milosk) (Contributor) 83 | - [Z. Chen](https://github.com/zhdchen) (Contributor) 84 | - [Basil Komboz](https://github.com/bkomboz) (Contributor) 85 | 86 | ### Licensing 87 | 88 | This project is licensed under the Apache License Version 2.0. For details please see the file called LICENSE. 89 | 90 | ### Included Libraries 91 | 92 | | Library | License | 93 | | ------------ | -------------- | 94 | | [spray](http://spray.io/) | Apache 2 | 95 | | [scalaj-http](https://github.com/scalaj/scalaj-http) | Apache 2 | 96 | | [D3.js](http://d3js.org/) | Custom | 97 | | [C3.js](http://c3js.org/) | MIT | 98 | | [Parallel Coordinates](https://github.com/syntagmatic/parallel-coordinates) | Custom | 99 | | [jQuery](http://jquery.com/) | Custom (MITish) | 100 | | [SlickGrid](https://github.com/mleibman/SlickGrid) | MIT | 101 | | [Chroma.js](https://github.com/gka/chroma.js) | BSD | 102 | | [Underscore.js](http://underscorejs.org/) | MIT | 103 | | [Bootstrap CSS](http://getbootstrap.com) | MIT | 104 | | [Scalaz](https://github.com/scalaz/scalaz) | Custom | 105 | -------------------------------------------------------------------------------- /beautify-js.py: -------------------------------------------------------------------------------- 1 | # pip install jsbeautifier 2 | import jsbeautifier 3 | import sys 4 | 5 | if len(sys.argv) < 2: 6 | print "Please specify the js file to beautify as a command line argument!" 7 | sys.exit(1) 8 | 9 | fileName = sys.argv[1] 10 | print "Beautifying " + fileName 11 | 12 | opts = jsbeautifier.default_options() 13 | opts.indent_size = 2 14 | opts.break_chained_methods = True 15 | opts.wrap_line_length = 120 16 | opts.end_with_newline = True 17 | 18 | beautified = jsbeautifier.beautify_file(fileName, opts) 19 | f = open(fileName, 'w') 20 | f.write(beautified) 21 | f.close() 22 | -------------------------------------------------------------------------------- /beautify-js.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | hash python 2>/dev/null || { echo >&2 "Python executable needed but not installed. Aborting."; exit 1; } 4 | 5 | ugly=$1 6 | if [ -d "${ugly}" ] ; then 7 | for f in $ugly/*.js 8 | do 9 | python beautify-js.py $f 10 | done 11 | else 12 | if [ -f "${ugly}" ]; then 13 | python beautify-js.py $ugly 14 | else 15 | echo "$ugly is neither a file nor a directory. Please specify the file or a directory to beautify." 16 | fi 17 | fi 18 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | ///////////// 2 | // Imports // 3 | ///////////// 4 | import uk.gov.hmrc.GitStampPlugin._ 5 | 6 | import S3._ 7 | 8 | import Dependencies._ 9 | 10 | //////////////////////////////////////////////// 11 | // Common Settings Shared Across Sub-Projects // 12 | //////////////////////////////////////////////// 13 | lazy val rootProjectName = settingKey[String]("Name of the root project") 14 | 15 | lazy val commonMetaInformationSettings = Seq( 16 | organization := "de.frosner", 17 | version := "4.0.0-gamma-SNAPSHOT", 18 | scalaVersion := "2.10.6", 19 | rootProjectName := "spawncamping-dds" 20 | ) 21 | 22 | lazy val commonCompileSettings = Seq( 23 | fork in Compile := true, 24 | scalacOptions := Seq("-unchecked", "-deprecation", "-encoding", "utf8"), 25 | resolvers += "jitpack" at "https://jitpack.io" 26 | ) 27 | 28 | lazy val commonSettings = commonMetaInformationSettings ++ commonCompileSettings 29 | 30 | commonSettings 31 | 32 | //////////////////////////////// 33 | // Root Project Only Settings // 34 | //////////////////////////////// 35 | lazy val shortScalaVersion = settingKey[String]("Scala major and minor version.") 36 | 37 | lazy val finalArtifactName = settingKey[String]("Name of the final artifact.") 38 | 39 | lazy val rootMetaInformationSettings = Seq( 40 | name := rootProjectName.value 41 | ) 42 | 43 | lazy val rootAssemblySettings = Seq(gitStampSettings:_*) ++ Seq( 44 | shortScalaVersion := scalaVersion.value.split("\\.").take(2).mkString("."), 45 | finalArtifactName := s"${name.value}-${version.value}_${shortScalaVersion.value}.jar", 46 | test in assembly := {}, 47 | assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = false), 48 | assemblyJarName in assembly := finalArtifactName.value 49 | ) 50 | 51 | lazy val rootSettings = rootMetaInformationSettings ++ rootAssemblySettings 52 | 53 | rootSettings // TODO check where we need these settings and if it makes sense to include them into common settings? 54 | 55 | /////////////////////// 56 | // Custom Build Task // 57 | /////////////////////// 58 | lazy val build = taskKey[Unit]("Jarjar link the assembly jar!") 59 | 60 | build <<= assembly map { (asm) => s"./build.sh ${asm.getAbsolutePath()}" ! } 61 | 62 | ///////////////////////////////// 63 | // Custom Artficat Upload Task // 64 | ///////////////////////////////// 65 | lazy val currentBranch = System.getenv("TRAVIS_BRANCH") 66 | 67 | val isSnapshotBranch = settingKey[Boolean]("Snapshot branch is active") 68 | 69 | isSnapshotBranch := (currentBranch != null) && (currentBranch == "master" || currentBranch.startsWith("release/")) 70 | 71 | val dontPublishTask = TaskKey[Unit]("dont-publish-to-s3", "Don't publish branch SNAPSHOT to S3.") 72 | 73 | dontPublishTask <<= (streams) map { (s) => { 74 | s.log.info(s"""Not publishing artifact to S3 (on branch $currentBranch)""") 75 | } 76 | } 77 | 78 | val publishOrDontPublishTask = TaskKey[Unit]("publish-snapshot", "Publish depending on the current branch.") 79 | 80 | publishOrDontPublishTask := Def.taskDyn({ 81 | if(isSnapshotBranch.value) S3.upload.toTask 82 | else dontPublishTask.toTask 83 | }).value 84 | 85 | s3Settings 86 | 87 | mappings in upload := Seq((new java.io.File(s"${System.getProperty("user.dir")}/target/scala-${shortScalaVersion.value}/${finalArtifactName.value}"),finalArtifactName.value)) 88 | 89 | host in upload := "spawncamping-dds-snapshots.s3.amazonaws.com" 90 | 91 | credentials += Credentials("Amazon S3", "spawncamping-dds-snapshots.s3.amazonaws.com", System.getenv("ARTIFACTS_KEY"), System.getenv("ARTIFACTS_SECRET")) 92 | 93 | /////////////////////// 94 | // Project Structure // 95 | /////////////////////// 96 | lazy val root = (project in file(".")). 97 | settings((commonSettings ++ rootSettings): _*). 98 | settings( 99 | name := rootProjectName.value 100 | ). 101 | aggregate(core, datasets, webUi). 102 | dependsOn(core, datasets, webUi) 103 | 104 | lazy val core = (project in file("core")). 105 | settings((commonSettings ++ rootSettings): _*). 106 | settings( 107 | name := rootProjectName.value + "-core", 108 | libraryDependencies ++= coreDependencies 109 | ) 110 | 111 | lazy val datasets = (project in file("datasets")). 112 | settings((commonSettings ++ rootSettings): _*). 113 | settings( 114 | name := rootProjectName.value + "-datasets", 115 | libraryDependencies ++= datasetsDependencies 116 | ) 117 | 118 | lazy val webUi = (project in file("web-ui")). 119 | dependsOn(core). 120 | settings((commonSettings ++ rootSettings): _*). 121 | settings( 122 | name := rootProjectName.value + "-web-ui", 123 | libraryDependencies ++= webUiDependencies 124 | ) 125 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | assemblyJar=$1 2 | jarjarjar=$assemblyJar-jarjared.jar 3 | 4 | echo "[info] Jarjaring $assemblyJar into $jarjarjar" 5 | java -jar build/jarjar-1.4.jar process build/rules.txt $assemblyJar $jarjarjar 6 | 7 | oldSprayConf=reference.conf 8 | echo "[info] Deleting $oldSprayConf from $jarjarjar" 9 | zip -d $jarjarjar $oldSprayConf >> build.log 10 | 11 | newSprayConf=dds.typesafe-conf 12 | echo "[info] Including $newSprayConf into $jarjarjar" 13 | cd build 14 | zip -g $jarjarjar $newSprayConf >> build.log 15 | cd .. 16 | 17 | echo "[info] Replacing $assemblyJar by $jarjarjar" 18 | mv $jarjarjar $assemblyJar 19 | -------------------------------------------------------------------------------- /build/jarjar-1.4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/build/jarjar-1.4.jar -------------------------------------------------------------------------------- /build/rules.txt: -------------------------------------------------------------------------------- 1 | rule akka.** de.frosner.dds.akka.@1 2 | rule com.typesafe.** de.frosner.dds.typesafe.@1 3 | -------------------------------------------------------------------------------- /core/build.sbt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/core/build.sbt -------------------------------------------------------------------------------- /core/src/main/java/de/frosner/dds/core/ManifestMetaData.java: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.core; 2 | 3 | import org.apache.log4j.Logger; 4 | 5 | import java.io.IOException; 6 | import java.net.MalformedURLException; 7 | import java.net.URL; 8 | import java.util.jar.Attributes; 9 | import java.util.jar.Manifest; 10 | 11 | // TODO move this to root project or keep here? is it even working? 12 | public class ManifestMetaData { 13 | 14 | private static Logger logger = Logger.getLogger("DDS"); 15 | 16 | public static void logWelcomeMessage() { 17 | try { 18 | Class clazz = ManifestMetaData.class; 19 | String className = clazz.getSimpleName() + ".class"; 20 | String classPath = clazz.getResource(className).toString(); 21 | if (classPath.startsWith("jar")) { 22 | String manifestPath = classPath.substring(0, classPath.lastIndexOf("!") + 1) + "/META-INF/MANIFEST.MF"; 23 | Manifest manifest; 24 | manifest = new Manifest(new URL(manifestPath).openStream()); 25 | Attributes attr = manifest.getMainAttributes(); 26 | 27 | logger.info("Initializing " + attr.getValue("Implementation-Title") + "-" + 28 | attr.getValue("Implementation-Version") + " (" + attr.getValue("Git-Head-Rev") + ", " + 29 | attr.getValue("Git-Branch") + ", " + attr.getValue("Build-Date") + ")"); 30 | } 31 | } catch (MalformedURLException e) { 32 | logger.error(e.getMessage()); 33 | } catch (IOException e) { 34 | logger.error(e.getMessage()); 35 | } 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/analytics/ColumnsStatisticsAggregator.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | import org.apache.spark.sql.Row 4 | import org.apache.spark.sql.types._ 5 | import de.frosner.dds.util.DataFrameUtils.{getDateFields, getNumericFields, getNominalFields, numericAsDouble, dateOrTimeAsTimestamp, anyAsAny} 6 | import scala.collection.mutable 7 | 8 | case class ColumnsStatisticsAggregator(schema: StructType) extends Serializable { 9 | 10 | def totalCount = { 11 | // This assumes that all aggregators have the same count (which should be the case) 12 | val allAggregators = numericAggregators.map{ case (idx, (agg, field)) => agg } ++ 13 | dateAggregators.map{ case (idx, (agg, field)) => agg } ++ 14 | nominalAggregators.map{ case (idx, (agg, field)) => agg } 15 | allAggregators.headOption.map(_.asInstanceOf[{ def totalCount: Long }].totalCount).getOrElse(0L) 16 | } 17 | 18 | private var numericAggregators: mutable.Map[Int, (NumericColumnStatisticsAggregator, StructField)] = { 19 | val numericFields = getNumericFields(schema) 20 | val numericAggregators = numericFields.map{ case (index, field) => (index, (new NumericColumnStatisticsAggregator(), field))} 21 | new mutable.HashMap() ++ numericAggregators 22 | } 23 | 24 | private var dateAggregators: mutable.Map[Int, (DateColumnStatisticsAggregator, StructField)] = { 25 | val dateFields = getDateFields(schema) 26 | val dateAggregators = dateFields.map{ case (index, field) => (index, (new DateColumnStatisticsAggregator(), field))} 27 | new mutable.HashMap() ++ dateAggregators 28 | } 29 | 30 | private var nominalAggregators: mutable.Map[Int, (NominalColumnStatisticsAggregator, StructField)] = { 31 | val nominalFields = getNominalFields(schema) 32 | val nominalAggregators = nominalFields.map{ case (index, field) => (index, (new NominalColumnStatisticsAggregator(), field))} 33 | new mutable.HashMap() ++ nominalAggregators 34 | } 35 | 36 | def iterate(row: Row): ColumnsStatisticsAggregator = { 37 | for ((idx, (agg, field)) <- numericAggregators) { 38 | agg.iterate(numericAsDouble(row, idx, field)) 39 | } 40 | for ((idx, (agg, field)) <- dateAggregators) { 41 | agg.iterate(dateOrTimeAsTimestamp(row, idx, field)) 42 | } 43 | for ((idx, (agg, field)) <- nominalAggregators) { 44 | agg.iterate(anyAsAny(row, idx, field)) 45 | } 46 | this 47 | } 48 | 49 | def merge(intermediateAggregator: ColumnsStatisticsAggregator): ColumnsStatisticsAggregator = { 50 | require(schema == intermediateAggregator.schema, "The schemas of two aggregators to be merged must match") 51 | 52 | val intermediateNumericAggregators = intermediateAggregator.numericAggregators 53 | numericAggregators = numericAggregators.map{ case (idx, (agg, field)) => { 54 | val (intermediateAgg, _) = intermediateNumericAggregators(idx) 55 | (idx, (agg.merge(intermediateAgg), field)) 56 | }} 57 | 58 | val intermediateDateAggregators = intermediateAggregator.dateAggregators 59 | dateAggregators = dateAggregators.map{ case (idx, (agg, field)) => { 60 | val (intermediateAgg, _) = intermediateDateAggregators(idx) 61 | (idx, (agg.merge(intermediateAgg), field)) 62 | }} 63 | 64 | val intermediateNominalAggregators = intermediateAggregator.nominalAggregators 65 | nominalAggregators = nominalAggregators.map{ case (idx, (agg, field)) => { 66 | val (intermediateAgg, _) = intermediateNominalAggregators(idx) 67 | (idx, (agg.merge(intermediateAgg), field)) 68 | }} 69 | 70 | this 71 | } 72 | 73 | def numericColumns = numericAggregators.toMap 74 | 75 | def dateColumns = dateAggregators.toMap 76 | 77 | def nominalColumns = nominalAggregators.toMap 78 | 79 | } 80 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/analytics/CorrelationAggregator.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | import scala.collection.mutable 4 | 5 | class CorrelationAggregator(val numColumns: Int) extends Serializable { 6 | 7 | require(numColumns > 0, "You need to pass a positive number of columns to use the aggregator.") 8 | 9 | def isEmpty: Boolean = !iteratedOnce 10 | private[this] var iteratedOnce = false 11 | 12 | private[analytics] var aggregators: mutable.Map[(Int, Int), (NumericColumnStatisticsAggregator, NumericColumnStatisticsAggregator)] = 13 | initializeMapWith(numColumns)((new NumericColumnStatisticsAggregator(), new NumericColumnStatisticsAggregator())) 14 | 15 | private[analytics] var runningCov: mutable.Map[(Int, Int), (Double)] = initializeMapWith(numColumns)(0d) 16 | 17 | def iterateWithoutNulls(columns: Seq[Double]): CorrelationAggregator = { 18 | iterate(columns.map(d => Option(d))) 19 | } 20 | 21 | def iterate(columns: Seq[Option[Double]]): CorrelationAggregator = { 22 | require(columns.size == numColumns) 23 | iteratedOnce = true 24 | val columnsWithIndex = columns.zipWithIndex 25 | for ((column1, idx1) <- columnsWithIndex; (column2, idx2) <- columnsWithIndex; if idx1 < idx2) 26 | if (column1.isDefined && column2.isDefined) { 27 | val value1 = column1.get 28 | val value2 = column2.get 29 | 30 | val (agg1, agg2) = aggregators(idx1, idx2) 31 | val prevCount = agg1.nonMissingCount.toDouble 32 | val (prevMean1, prevMean2) = (agg1.mean, agg2.mean) 33 | agg1.iterate(Option(value1)) 34 | agg2.iterate(Option(value2)) 35 | val newCount = agg1.nonMissingCount.toDouble 36 | val prevCov = runningCov((idx1, idx2)) 37 | val newCov = if (prevCount != 0d) 38 | (prevCov * prevCount + prevCount / newCount * (value1 - prevMean1) * (value2 - prevMean2)) / newCount 39 | else 40 | 0d 41 | runningCov.update((idx1, idx2), newCov) 42 | } 43 | this 44 | } 45 | 46 | def merge(intermediateAggregator: CorrelationAggregator): CorrelationAggregator = { 47 | require(numColumns == intermediateAggregator.numColumns) 48 | iteratedOnce = iteratedOnce || !intermediateAggregator.isEmpty 49 | 50 | for (((i, j), (agg1, agg2)) <- aggregators) { 51 | val (intermediateAgg1, intermediateAgg2) = intermediateAggregator.aggregators((i, j)) 52 | val count = agg1.nonMissingCount.toDouble 53 | val intermediateCount = intermediateAgg1.nonMissingCount.toDouble 54 | val coMoment = runningCov((i, j)) * count 55 | val intermediateCoMoment = intermediateAggregator.runningCov((i, j)) * intermediateCount 56 | val mergedCov = if (agg1.totalCount == 0) { 57 | intermediateCoMoment / intermediateCount 58 | } else if (intermediateAgg1.totalCount == 0) { 59 | coMoment / count 60 | } else { 61 | val totalCount = (agg1.nonMissingCount + intermediateAgg1.nonMissingCount).toDouble 62 | (coMoment + intermediateCoMoment + (agg1.mean - intermediateAgg1.mean) * (agg2.mean - intermediateAgg2.mean) * 63 | (agg1.nonMissingCount * intermediateAgg1.nonMissingCount / totalCount)) / totalCount 64 | } 65 | runningCov.update((i, j), mergedCov) 66 | aggregators.update((i, j), (agg1.merge(intermediateAgg1), agg2.merge(intermediateAgg2))) 67 | } 68 | this 69 | } 70 | 71 | private def means: Map[(Int, Int), (Double, Double)] = aggregators.map{ 72 | case ((i, j), (aggI, aggJ)) => (i, j) -> (aggI.mean, aggJ.mean) 73 | }.toMap 74 | 75 | def pearsonCorrelations: Map[(Int, Int), Double] = { 76 | val curMeans = means 77 | val halfOfTheMatrix = aggregators.map{ case ((idx1, idx2), (agg1, agg2)) => { 78 | ((idx1, idx2), runningCov((idx1, idx2)) / (agg1.stdev * agg2.stdev)) 79 | }} 80 | val diagonalOfTheMatrix = (0 to numColumns - 1).map(idx => ((idx, idx), 1d)) 81 | val otherHalfOfTheMatrix = halfOfTheMatrix.map { case ((idx1, idx2), correlation) => ((idx2, idx1), correlation) } 82 | (halfOfTheMatrix ++ diagonalOfTheMatrix ++ otherHalfOfTheMatrix).toMap 83 | } 84 | 85 | private def initializeMapWith[V](size: Int)(zeroValue: => V) = 86 | mutable.HashMap.empty[(Int, Int), V].++( 87 | for (i <- 0 to size - 1; j <- 0 to size - 1; if i < j) yield ((i, j), zeroValue) 88 | ) 89 | 90 | } 91 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/analytics/DateColumnStatisticsAggregator.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | import java.sql.Timestamp 4 | import java.util.Calendar 5 | 6 | import scalaz._ 7 | import Scalaz._ 8 | 9 | import scala.collection.mutable 10 | 11 | import DateColumnStatisticsAggregator._ 12 | 13 | class DateColumnStatisticsAggregator extends Serializable { 14 | 15 | private var counts: NominalColumnStatisticsAggregator = new NominalColumnStatisticsAggregator() 16 | private var runningYearFrequencies: mutable.Map[Int, Long] = mutable.HashMap.empty 17 | private var runningMonthFrequencies: mutable.Map[Int, Long] = mutable.HashMap.empty 18 | private var runningDayOfWeekFrequencies: mutable.Map[Int, Long] = mutable.HashMap.empty 19 | 20 | def iterate(value: Option[Timestamp]): DateColumnStatisticsAggregator = { 21 | counts = counts.iterate(value) 22 | if (value.isDefined) { 23 | val calendar = Calendar.getInstance() 24 | calendar.setTime(value.get) 25 | val year = calendar.get(Calendar.YEAR) 26 | val month = calendar.get(Calendar.MONTH) 27 | val day = calendar.get(Calendar.DAY_OF_WEEK) 28 | runningYearFrequencies.update(year, runningYearFrequencies.getOrElse(year, 0l) + 1l) 29 | runningMonthFrequencies.update(month, runningMonthFrequencies.getOrElse(month, 0l) + 1l) 30 | runningDayOfWeekFrequencies.update(day, runningDayOfWeekFrequencies.getOrElse(day, 0l) + 1l) 31 | } else { 32 | runningYearFrequencies.update(NULL_YEAR, runningYearFrequencies.getOrElse(NULL_YEAR, 0l) + 1l) 33 | runningMonthFrequencies.update(NULL_MONTH, runningMonthFrequencies.getOrElse(NULL_MONTH, 0l) + 1l) 34 | runningDayOfWeekFrequencies.update(NULL_DAY, runningDayOfWeekFrequencies.getOrElse(NULL_DAY, 0l) + 1l) 35 | } 36 | this 37 | } 38 | 39 | def merge(that: DateColumnStatisticsAggregator): DateColumnStatisticsAggregator = { 40 | counts = counts.merge(that.counts) 41 | runningYearFrequencies = mutable.HashMap.empty ++ 42 | (runningYearFrequencies.toMap |+| that.runningYearFrequencies.toMap) 43 | runningMonthFrequencies = mutable.HashMap.empty ++ 44 | (runningMonthFrequencies.toMap |+| that.runningMonthFrequencies.toMap) 45 | runningDayOfWeekFrequencies = mutable.HashMap.empty ++ 46 | (runningDayOfWeekFrequencies.toMap |+| that.runningDayOfWeekFrequencies.toMap) 47 | this 48 | } 49 | 50 | def totalCount = counts.totalCount 51 | 52 | def missingCount = counts.missingCount 53 | 54 | def nonMissingCount = counts.nonMissingCount 55 | 56 | def yearFrequencies = runningYearFrequencies.toMap 57 | 58 | def monthFrequencies = runningMonthFrequencies.toMap 59 | 60 | def dayOfWeekFrequencies = runningDayOfWeekFrequencies.toMap 61 | 62 | def topYear = yearFrequencies.maxBy{ case (year, freq) => freq } 63 | 64 | def topMonth = monthFrequencies.maxBy{ case (month, freq) => freq } 65 | 66 | def topDayOfWeek = dayOfWeekFrequencies.maxBy{ case (day, freq) => freq } 67 | 68 | } 69 | 70 | object DateColumnStatisticsAggregator { 71 | 72 | val NULL_YEAR = Integer.MAX_VALUE 73 | 74 | val NULL_MONTH = Integer.MAX_VALUE 75 | 76 | val NULL_DAY = Integer.MAX_VALUE 77 | 78 | def calendarYearToString(year: Int) = year match { 79 | case DateColumnStatisticsAggregator.NULL_YEAR => "NULL" 80 | case normalYear => normalYear.toString 81 | } 82 | 83 | def calendarMonthToString(month: Int) = month match { 84 | case Calendar.JANUARY => "Jan" 85 | case Calendar.FEBRUARY => "Feb" 86 | case Calendar.MARCH => "Mar" 87 | case Calendar.APRIL => "Apr" 88 | case Calendar.MAY => "May" 89 | case Calendar.JUNE => "Jun" 90 | case Calendar.JULY => "Jul" 91 | case Calendar.AUGUST => "Aug" 92 | case Calendar.SEPTEMBER => "Sep" 93 | case Calendar.OCTOBER => "Oct" 94 | case Calendar.NOVEMBER => "Nov" 95 | case Calendar.DECEMBER => "Dec" 96 | case DateColumnStatisticsAggregator.NULL_MONTH => "NULL" 97 | } 98 | 99 | def calendarDayToString(day: Int) = day match { 100 | case Calendar.MONDAY => "Mon" 101 | case Calendar.TUESDAY => "Tue" 102 | case Calendar.WEDNESDAY => "Wed" 103 | case Calendar.THURSDAY => "Thu" 104 | case Calendar.FRIDAY => "Fri" 105 | case Calendar.SATURDAY => "Sat" 106 | case Calendar.SUNDAY => "Sun" 107 | case DateColumnStatisticsAggregator.NULL_DAY => "NULL" 108 | } 109 | 110 | } 111 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/analytics/MutualInformationAggregator.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | import scala.collection.mutable 4 | 5 | class MutualInformationAggregator(val numColumns: Int) extends Serializable { 6 | 7 | require(numColumns > 0, "You need to pass a positive number of columns to use the aggregator.") 8 | 9 | private[analytics] var columnCounts: mutable.ArrayBuffer[mutable.Map[Any, Long]] = 10 | mutable.ArrayBuffer.empty ++ List.fill(numColumns)(mutable.HashMap.empty[Any, Long]) 11 | 12 | private[analytics] var crossColumnCounts: mutable.Map[(Int, Int), mutable.Map[(Any, Any), Long]] = { 13 | var pXY = mutable.HashMap.empty[(Int, Int), mutable.Map[(Any, Any), Long]] 14 | for (i <- 0 to numColumns - 1; j <- 0 to numColumns - 1; if i <= j) { 15 | pXY.put((i, j), mutable.HashMap.empty) 16 | } 17 | pXY 18 | } 19 | 20 | def isEmpty: Boolean = !iteratedOnce 21 | private[this] var iteratedOnce = false 22 | 23 | def iterate(columns: Seq[Any]): MutualInformationAggregator = { 24 | require(columns.size == numColumns) 25 | iteratedOnce = true 26 | for ((value, counts) <- columns.zip(columnCounts)) { 27 | counts.get(value) match { 28 | case Some(count) => counts.update(value, count + 1) 29 | case None => counts.put(value, 1) 30 | } 31 | } 32 | val columnsWithIndex = columns.zipWithIndex 33 | for ((value1, idx1) <- columnsWithIndex; (value2, idx2) <- columnsWithIndex; if idx1 <= idx2) { 34 | val crossCounts = crossColumnCounts((idx1, idx2)) 35 | crossCounts.get((value1, value2)) match { 36 | case Some(crossCount) => crossCounts.update((value1, value2), crossCount + 1) 37 | case None => crossCounts.put((value1, value2), 1) 38 | } 39 | } 40 | this 41 | } 42 | 43 | def merge(intermediateAggregator: MutualInformationAggregator): MutualInformationAggregator = { 44 | require(numColumns == intermediateAggregator.numColumns) 45 | iteratedOnce = iteratedOnce || !intermediateAggregator.isEmpty 46 | for ((counts, intermediateCounts) <- columnCounts.zip(intermediateAggregator.columnCounts); 47 | (intermediateValue, intermediateCount) <- intermediateCounts) { 48 | counts.get(intermediateValue) match { 49 | case Some(count) => counts.update(intermediateValue, count + intermediateCount) 50 | case None => counts.put(intermediateValue, intermediateCount) 51 | } 52 | } 53 | for (((idx1, idx2), intermediateCrossCounts) <- intermediateAggregator.crossColumnCounts; 54 | crossCounts = crossColumnCounts((idx1, idx2)); 55 | ((value1, value2), intermediateCrossCount) <- intermediateCrossCounts) { 56 | crossCounts.get((value1, value2)) match { 57 | case Some(crossCount) => crossCounts.update((value1, value2), crossCount + intermediateCrossCount) 58 | case None => crossCounts.put((value1, value2), intermediateCrossCount) 59 | } 60 | } 61 | this 62 | } 63 | 64 | def mutualInformation: Map[(Int, Int), Double] = { 65 | val totalCount = columnCounts.head.map{ case (key, value) => value }.sum.toDouble 66 | val columnProbabilities = columnCounts.map(counts => { 67 | counts.mapValues(_ / totalCount) 68 | }) 69 | val mutualColumnInformation = crossColumnCounts.map{ case ((idx1, idx2), crossCounts) => { 70 | val crossInformationCounts = crossCounts.map{ case ((value1, value2), crossCount) => { 71 | val column1Probabilities = columnProbabilities(idx1) 72 | val column2Probabilities = columnProbabilities(idx2) 73 | val crossProbability = crossCount / totalCount 74 | val crossInformation = crossProbability * 75 | (Math.log(crossProbability) - Math.log(column1Probabilities(value1)) - Math.log(column2Probabilities(value2))) 76 | ((value1, value2), crossInformation) 77 | }} 78 | val mutualInformation = crossInformationCounts.map{ case (key, value) => value }.sum 79 | ((idx1, idx2), mutualInformation) 80 | }} 81 | val otherHalfMutualColumnInfo = for (((idx1, idx2), mutualInformation) <- mutualColumnInformation; if idx1 != idx2) 82 | yield ((idx2, idx1), mutualInformation) 83 | (mutualColumnInformation ++ otherHalfMutualColumnInfo).toMap 84 | } 85 | 86 | def mutualInformationMetric: Map[(Int, Int), Double] = { 87 | val mutualInformationMatrix = mutualInformation 88 | for (((i, j), mi) <- mutualInformationMatrix) 89 | yield ((i, j), (mi / Math.max(mutualInformationMatrix(i,i), mutualInformationMatrix(j,j)))) 90 | } 91 | 92 | } 93 | 94 | object MutualInformationAggregator { 95 | val NO_NORMALIZATION = "no normalization" 96 | val METRIC_NORMALIZATION = "metric" 97 | val DEFAULT_NORMALIZATION = METRIC_NORMALIZATION 98 | 99 | def isValidNormalization(normalization: String) = Set(NO_NORMALIZATION, METRIC_NORMALIZATION).contains(normalization) 100 | } 101 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/analytics/NominalColumnStatisticsAggregator.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | class NominalColumnStatisticsAggregator extends Serializable { 4 | 5 | private[analytics] var runningTotalCount = 0l 6 | private[analytics] var runningMissingCount = 0l 7 | 8 | def iterate(value: Option[Any]): NominalColumnStatisticsAggregator = { 9 | runningTotalCount = runningTotalCount + 1 10 | if (value.isEmpty) { 11 | runningMissingCount = runningMissingCount + 1 12 | } 13 | this 14 | } 15 | 16 | def merge(intermediateAggregator: NominalColumnStatisticsAggregator): NominalColumnStatisticsAggregator = { 17 | runningTotalCount = runningTotalCount + intermediateAggregator.runningTotalCount 18 | runningMissingCount = runningMissingCount + intermediateAggregator.runningMissingCount 19 | this 20 | } 21 | 22 | def totalCount = runningTotalCount 23 | 24 | def missingCount = runningMissingCount 25 | 26 | def nonMissingCount = totalCount - missingCount 27 | 28 | } 29 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/analytics/NumericColumnStatisticsAggregator.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | class NumericColumnStatisticsAggregator extends Serializable { 4 | 5 | private var counts: NominalColumnStatisticsAggregator = new NominalColumnStatisticsAggregator() 6 | private var runningMin = Double.PositiveInfinity 7 | private var runningMax = Double.NegativeInfinity 8 | private var runningSum = 0d 9 | private var runningMean = 0d 10 | private var runningMeanSquareDif = 0d 11 | 12 | def iterate(value: Option[Double]): NumericColumnStatisticsAggregator = { 13 | counts = counts.iterate(value) 14 | if (value.isDefined) { 15 | val actualValue = value.get 16 | val delta = actualValue - runningMean 17 | runningSum = runningSum + actualValue 18 | runningMean = runningMean + delta / nonMissingCount 19 | runningMeanSquareDif = runningMeanSquareDif + delta * (actualValue - runningMean) 20 | runningMin = Math.min(runningMin, actualValue) 21 | runningMax = Math.max(runningMax, actualValue) 22 | } 23 | this 24 | } 25 | 26 | def merge(that: NumericColumnStatisticsAggregator): NumericColumnStatisticsAggregator = { 27 | runningSum = runningSum + that.runningSum 28 | val delta = that.runningMean - runningMean 29 | runningMin = Math.min(runningMin, that.runningMin) 30 | runningMax = Math.max(runningMax, that.runningMax) 31 | runningMean = (runningMean * nonMissingCount + that.runningMean * that.nonMissingCount) / 32 | (nonMissingCount + that.nonMissingCount) 33 | runningMeanSquareDif = runningMeanSquareDif + that.runningMeanSquareDif + 34 | (delta * delta * nonMissingCount * that.nonMissingCount) / 35 | (nonMissingCount + that.nonMissingCount) 36 | counts = counts.merge(that.counts) 37 | this 38 | } 39 | 40 | def totalCount = counts.totalCount 41 | 42 | def missingCount = counts.missingCount 43 | 44 | def nonMissingCount = counts.nonMissingCount 45 | 46 | def sum = runningSum 47 | 48 | def min = runningMin 49 | 50 | def max = runningMax 51 | 52 | def mean = if (nonMissingCount > 0) runningMean else Double.NaN 53 | 54 | def variance = if (nonMissingCount > 1) runningMeanSquareDif / nonMissingCount else Double.NaN 55 | 56 | def stdev = Math.sqrt(variance) 57 | 58 | } 59 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/core/ScalaFunctions.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.core 2 | 3 | import de.frosner.dds.servables._ 4 | import org.apache.spark.sql.Row 5 | import org.apache.spark.sql.catalyst.ScalaReflection.Schema 6 | import org.apache.spark.sql.catalyst.{CatalystTypeConverters, CatalystTypeConvertersAdapter, ScalaReflection} 7 | import org.apache.spark.sql.catalyst.expressions.GenericMutableRow 8 | import org.apache.spark.sql.types.{StringType, StructField, StructType} 9 | import org.apache.spark.unsafe.types.UTF8String 10 | 11 | import scala.collection.mutable.ArrayBuffer 12 | import scala.reflect.runtime.universe._ 13 | import scala.util.{Failure, Success} 14 | 15 | object ScalaFunctions { 16 | 17 | private[core] def createBar(values: Seq[Double], 18 | categories: Seq[String], 19 | seriesName: String, 20 | title: String): Option[Servable] = { 21 | createBars(List(seriesName), List(values), categories, title) 22 | } 23 | 24 | private[core] def createBar(values: Seq[Double], 25 | seriesName: String, 26 | title: String): Option[Servable] = { 27 | createBars(List(seriesName), List(values), title) 28 | } 29 | 30 | private[core] def createBars(labels: Seq[String], 31 | values: Seq[Seq[Double]], 32 | categories: Seq[String], 33 | title: String): Option[Servable] = { 34 | Option(BarChart(title, categories, values, labels)) 35 | } 36 | 37 | private[core] def createBars(labels: Seq[String], values: Seq[Seq[Double]], title: String): Option[Servable] = { 38 | val indexedCategories = (1 to values.map(_.size).max).map(_.toString).toSeq 39 | createBars(labels, values, indexedCategories, title) 40 | } 41 | 42 | private[core] def createPie(keyValuePairs: Iterable[(String, Double)], title: String): Option[Servable] = { 43 | Option(PieChart(title, keyValuePairs)) 44 | } 45 | 46 | // TODO unit tests for different number and values of zColorZeroes 47 | private[core] def createHeatmap(values: Seq[Seq[Double]], 48 | rowNames: Seq[String] = null, 49 | colNames: Seq[String] = null, 50 | zColorZeroes: Seq[Double] = Seq.empty, 51 | title: String): Option[Servable] = { 52 | if (values.size == 0 || values.head.size == 0) { 53 | println("Can't show empty heatmap!") 54 | Option.empty 55 | } else { 56 | val actualRowNames: Seq[String] = if (rowNames != null) rowNames else (1 to values.size).map(_.toString) 57 | val actualColNames: Seq[String] = if (colNames != null) colNames else (1 to values.head.size).map(_.toString) 58 | val flattenedZ = values.flatten.filter(!_.isNaN) 59 | val actualZColorZeroes = if (flattenedZ.isEmpty) { 60 | Seq(Double.NaN, Double.NaN) 61 | } else if (zColorZeroes.isEmpty) { 62 | Seq(flattenedZ.min, flattenedZ.max) 63 | } else if (zColorZeroes.size == 1) { 64 | val zero = zColorZeroes(0) 65 | val min = flattenedZ.min 66 | val max = flattenedZ.max 67 | if (zero <= min) { 68 | Seq(zero, max) 69 | } else if (zero >= max) { 70 | Seq(min, zero) 71 | } else { 72 | Seq(min, zero, max) 73 | } 74 | } else { 75 | zColorZeroes 76 | } 77 | Option(Heatmap(title, values, actualRowNames, actualColNames, actualZColorZeroes)) 78 | } 79 | } 80 | 81 | private[core] def createGraph[ID](vertices: Seq[(ID, String)], 82 | edges: Iterable[(ID, ID, String)], 83 | title: String): Option[Servable] = { 84 | val indexMap = vertices.map{ case (id, label) => id }.zip(0 to vertices.size).toMap 85 | val graph = Graph( 86 | title, 87 | vertices.map{ case (id, label) => label}, 88 | edges.map{ case (sourceId, targetId, label) => (indexMap(sourceId), indexMap(targetId), label)} 89 | ) 90 | Option(graph) 91 | } 92 | 93 | private[core] def createKeyValuePairs(pairs: List[(String, Any)], title: String): Option[Servable] = { 94 | if (pairs.isEmpty) { 95 | println("Cannot print empty key-value pairs.") 96 | Option.empty 97 | } else { 98 | Option(KeyValueSequence( 99 | title = title, 100 | keyValuePairs = pairs.map{ case (key, value) => (key, value.toString) } 101 | )) 102 | } 103 | } 104 | 105 | private[core] def createHistogram(bins: Seq[Double], frequencies: Seq[Long], title: String): Option[Servable] = { 106 | Option(Histogram(title, bins, frequencies)) 107 | } 108 | 109 | private[core] def createScatter[X, Y](values: Seq[(X, Y)], title: String) 110 | (implicit numX: Numeric[X] = null, numY: Numeric[Y] = null): Option[Servable] = { 111 | def toStringOrDouble[T](t: T, num: Numeric[T]) = if (num != null) num.toDouble(t) else t.toString 112 | Option(ScatterPlot( 113 | title = title, 114 | points = values.map{ case (x, y) => (toStringOrDouble(x, numX), toStringOrDouble(y, numY)) }, 115 | xIsNumeric = numX != null, 116 | yIsNumeric = numY != null 117 | )) 118 | } 119 | 120 | private[core] def createTable(schema: StructType, 121 | rows: Seq[Row], 122 | title: String): Option[Servable] = 123 | Option(Table(title, schema, rows)) 124 | 125 | private[core] def createShow[V](sequence: Seq[V], title: String)(implicit tag: TypeTag[V]): Option[Servable] = { 126 | val vType = tag.tpe 127 | if (sequence.isEmpty) { 128 | println("Sequence is empty!") 129 | Option.empty 130 | } else { 131 | // TODO check Option type (=> convert to nullable and what is inside?) 132 | val inferredSchema = scala.util.Try(ScalaReflection.schemaFor(tag)) 133 | val result = inferredSchema match { 134 | case Success(Schema(struct: StructType, nullable)) => { 135 | // convert product to row like in RDDConversions.productToRowRdd, but imitate a conversion back to Row (like 136 | // in DataFrame.take) to get the Scala types in th end 137 | val dataTypes = struct.fields.map(_.dataType) 138 | val catalystConverters = dataTypes.map(CatalystTypeConvertersAdapter.createToCatalystConverter) 139 | val scalaConverters = dataTypes.map(CatalystTypeConvertersAdapter.createToScalaConverter) 140 | val rows = sequence.asInstanceOf[Seq[Product]].map { product => 141 | val rowSeq = for (i <- struct.indices) yield 142 | scalaConverters(i)(catalystConverters(i)(product.productElement(i))) 143 | Row.fromSeq(rowSeq) 144 | } 145 | Table(title, struct, rows) 146 | } 147 | case Success(Schema(other, nullable)) => { 148 | // RDD of a different type that fits into the catalyst type structure 149 | val structType = StructType(List(StructField("1", other, nullable))) 150 | Table(title, structType, sequence.map(c => Row(c))) 151 | } 152 | case Failure(exception) => { 153 | // RDD of a type that does not fit into the catalyst type structure so we convert it to String 154 | val structType = StructType(List(StructField("1", StringType, true))) 155 | Table(title, structType, sequence.map(c => Row(if (c != null) c.toString else null))) 156 | } 157 | } 158 | Option(result) 159 | } 160 | } 161 | 162 | } 163 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/core/Server.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.core 2 | 3 | import de.frosner.dds.servables.Servable 4 | import Server.TransformedServable 5 | 6 | trait Server { 7 | 8 | def init(): Unit 9 | 10 | def tearDown(): Unit 11 | 12 | def serve(servable: Servable): TransformedServable 13 | 14 | } 15 | 16 | object Server { 17 | 18 | type TransformedServable = Any 19 | 20 | } 21 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/core/SparkCoreFunctions.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.core 2 | 3 | import de.frosner.dds.servables.{KeyValueSequence, Histogram, Servable} 4 | import de.frosner.dds.util.ServableUtils 5 | import org.apache.spark.rdd.RDD 6 | import org.apache.spark.sql.Row 7 | import org.apache.spark.sql.types.{DoubleType, StructField, StructType} 8 | import org.apache.spark.util.StatCounter 9 | 10 | import scala.reflect.ClassTag 11 | import scala.reflect.runtime.universe._ 12 | 13 | object SparkCoreFunctions { 14 | 15 | private[core] def createBar[V: ClassTag](values: RDD[V], seriesName: String, title: String): Option[Servable] = { 16 | val (distinctValues, distinctCounts) = 17 | values.map((_, 1)).reduceByKey(_ + _).collect.sortBy{ case (value, count) => count }.reverse.unzip 18 | ScalaFunctions.createBar(distinctCounts.map(_.toDouble), distinctValues.map(_.toString), seriesName, title) 19 | } 20 | 21 | private[core] def createPie[V: ClassTag](values: RDD[V], title: String): Option[Servable] = { 22 | val keyCounts = values.map((_, 1)).reduceByKey(_ + _) 23 | val collectedKeyCounts = keyCounts.map{ case (segment, count) => (segment.toString, count.toDouble)}.collect 24 | ScalaFunctions.createPie(collectedKeyCounts, title) 25 | } 26 | 27 | private[core] def createHistogram[N: ClassTag](values: RDD[N], numBuckets: Option[Int], title: String) 28 | (implicit num: Numeric[N]): Option[Servable] = { 29 | if (numBuckets.isDefined && numBuckets.get < 2) { 30 | println("Number of buckets must be greater than or equal to 2") 31 | Option.empty 32 | } else { 33 | val localNumBuckets = if (numBuckets.isEmpty) ServableUtils.optimalNumberOfBins(values.count) else numBuckets.get 34 | val tryHist = util.Try(values.map(v => num.toDouble(v)).histogram(localNumBuckets)) 35 | if (tryHist.isSuccess) { 36 | val (buckets, frequencies) = tryHist.get 37 | ScalaFunctions.createHistogram(buckets, frequencies, title) 38 | } else { 39 | println("Could not create histogram: " + tryHist.failed.get) 40 | Option.empty 41 | } 42 | } 43 | } 44 | 45 | private[core] def createHistogram[N1, N2](values: RDD[N1], buckets: Seq[N2], title: String) 46 | (implicit num1: Numeric[N1], num2: Numeric[N2]): Option[Servable] = { 47 | val doubleBuckets = buckets.map(num2.toDouble(_)).toArray 48 | val frequencies = values.map(v => num1.toLong(v)).histogram(doubleBuckets, false) 49 | ScalaFunctions.createHistogram( 50 | bins = doubleBuckets, 51 | frequencies = frequencies, 52 | title = title 53 | ) 54 | } 55 | 56 | private[core] def createPieGroups[K, N](groupValues: RDD[(K, Iterable[N])], title: String) 57 | (reduceFunction: (N, N) => N) 58 | (implicit num: Numeric[N]): Option[Servable] = { 59 | ScalaFunctions.createPie(groupValues.map { case (key, values) => { 60 | (key.toString, num.toDouble(values.reduce(reduceFunction))) 61 | } }.collect, title) 62 | } 63 | 64 | private[core] def createGroupAndPie[K: ClassTag, N: ClassTag](toBeGroupedValues: RDD[(K, N)], title: String) 65 | (reduceFunction: (N, N) => N) 66 | (implicit num: Numeric[N]): Option[Servable] = { 67 | val keyValuePairs = toBeGroupedValues.reduceByKey(reduceFunction).map { 68 | case (key, value) => (key.toString, num.toDouble(value)) 69 | }.collect 70 | val sortedKeyValuePairs = keyValuePairs.sortBy { case (value, count) => count } 71 | ScalaFunctions.createPie(keyValuePairs, title) 72 | } 73 | 74 | private[core] def createShow[V](rdd: RDD[V], sampleSize: Int, title: String) 75 | (implicit tag: TypeTag[V]): Option[Servable] = { 76 | ScalaFunctions.createShow(rdd.take(sampleSize), title)(tag) 77 | } 78 | 79 | private[core] def createMedian[N: ClassTag](values: RDD[N], title: String) 80 | (implicit num: Numeric[N]): Option[Servable] = { 81 | val sorted = values.sortBy(identity).zipWithIndex().map{ 82 | case (v, idx) => (idx, v) 83 | } 84 | val count = sorted.count 85 | if (count > 0) { 86 | val median: Double = if (count % 2 == 0) { 87 | val r = count / 2 88 | val l = r - 1 89 | num.toDouble(num.plus(sorted.lookup(l).head, sorted.lookup(r).head)) * 0.5 90 | } else { 91 | num.toDouble(sorted.lookup(count / 2).head) 92 | } 93 | ScalaFunctions.createTable(StructType(List(StructField("median", DoubleType, false))), List(Row(median)), title) 94 | } else { 95 | println("Median is not defined on an empty RDD!") 96 | Option.empty 97 | } 98 | } 99 | 100 | private[core] def createSummarize[N: ClassTag](values: RDD[N], title: String) 101 | (implicit num: Numeric[N] = null): Option[Servable] = { 102 | if (num != null) { 103 | Option(ServableUtils.statCounterToKeyValueSequence(values.stats(), title)) 104 | } else { 105 | val cardinality = values.distinct.count 106 | if (cardinality > 0) { 107 | val valueCounts = values.map((_, 1)).reduceByKey(_ + _) 108 | val (mode, modeCount) = valueCounts.max()(Ordering.by { case (value, count) => count}) 109 | ScalaFunctions.createKeyValuePairs( 110 | List( 111 | ("Mode", mode), 112 | ("Cardinality", cardinality) 113 | ), title 114 | ) 115 | } else { 116 | println("Summarize function requires a non-empty RDD!") 117 | Option.empty 118 | } 119 | } 120 | } 121 | 122 | private[core] def createSummarizeGroups[K, N](groupValues: RDD[(K, Iterable[N])], title: String) 123 | (implicit num: Numeric[N]): Option[Servable] = { 124 | val statCounters = groupValues.map{ case (key, values) => 125 | (key, StatCounter(values.map(num.toDouble(_)))) 126 | }.map{ case (key, stat) => 127 | (key.toString, stat) 128 | }.collect 129 | val (labels, stats) = statCounters.unzip 130 | Option(ServableUtils.statCountersToTable(labels, stats, title)) 131 | } 132 | 133 | private[core] def createGroupAndSummarize[K: ClassTag, N: ClassTag](toBeGroupedValues: RDD[(K, N)], title: String) 134 | (implicit num: Numeric[N]): Option[Servable] = { 135 | createSummarizeGroups(toBeGroupedValues.groupByKey(), title) 136 | } 137 | 138 | } 139 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/core/SparkGraphxFunctions.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.core 2 | 3 | import de.frosner.dds.servables.Servable 4 | import org.apache.spark.graphx 5 | import org.apache.spark.graphx._ 6 | import org.apache.spark.sql.Row 7 | import org.apache.spark.sql.types.{StructType, LongType, StructField, IntegerType} 8 | 9 | import scala.reflect.ClassTag 10 | 11 | object SparkGraphxFunctions { 12 | 13 | private[core] def createShowVertexSample[VD, ED](graph: graphx.Graph[VD, ED], 14 | sampleSize: Int, 15 | vertexFilter: (VertexId, VD) => Boolean): Option[Servable] = { 16 | val vertexSample = graph.vertices.filter{ 17 | case (id, attr) => vertexFilter(id, attr) 18 | }.take(sampleSize).map{ case (id, attr) => id }.toSet 19 | val sampledGraph = graph.subgraph( 20 | edge => vertexSample.contains(edge.srcId) && vertexSample.contains(edge.dstId), 21 | (vertexId, vertexAttr) => vertexSample.contains(vertexId) 22 | ) 23 | val vertices = sampledGraph.vertices.collect 24 | val edges = sampledGraph.edges.collect 25 | ScalaFunctions.createGraph( 26 | vertices = vertices.map{ case (id, label) => (id, label.toString) }, 27 | edges = edges.map(edge => (edge.srcId, edge.dstId, edge.attr.toString)), 28 | title = s"Vertex sample of $graph" 29 | ) 30 | } 31 | 32 | private[core] def createShowEdgeSample[VD, ED](graph: graphx.Graph[VD, ED], 33 | sampleSize: Int, 34 | edgeFilter: (Edge[ED]) => Boolean): Option[Servable] = { 35 | val edgeSample = graph.edges.filter(edgeFilter).take(sampleSize) 36 | val verticesToKeep = edgeSample.map(_.srcId).toSet ++ edgeSample.map(_.dstId).toSet 37 | val vertices = graph.vertices.filter{ case (id, attr) => verticesToKeep.contains(id) }.collect 38 | ScalaFunctions.createGraph( 39 | vertices = vertices.map{ case (id, label) => (id, label.toString) }, 40 | edges = edgeSample.map(edge => (edge.srcId, edge.dstId, edge.attr.toString)), 41 | title = s"Edge sample of $graph" 42 | ) 43 | } 44 | 45 | private[core] def createConnectedComponents[VD: ClassTag, ED: ClassTag](graph: graphx.Graph[VD, ED]): Option[Servable] = { 46 | val connectedComponents = graph.connectedComponents() 47 | val vertexCounts = connectedComponents.vertices.map{ 48 | case (id, connectedComponent) => (connectedComponent, 1) 49 | }.reduceByKey(_ + _) 50 | val edgeCounts = connectedComponents.edges.map(e => (e.srcId, 1)).join( 51 | connectedComponents.vertices 52 | ).map{ 53 | case (id, (count, connectedComponent)) => (connectedComponent, count) 54 | }.reduceByKey(_ + _) 55 | val counts = vertexCounts.leftOuterJoin(edgeCounts) 56 | val schema = StructType(List( 57 | StructField("Connected Component", LongType, false), 58 | StructField("#Vertices", IntegerType, false), 59 | StructField("#Edges", IntegerType, false) 60 | )) 61 | ScalaFunctions.createTable( 62 | schema, 63 | counts.map{ case (connectedComponent, (numVertices, numEdges)) => 64 | Row(connectedComponent, numVertices, numEdges.getOrElse(0)) 65 | }.collect, 66 | s"Connected Components of $graph" 67 | ) 68 | } 69 | 70 | 71 | } 72 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/servables/package.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds 2 | 3 | import org.apache.spark.sql.Row 4 | import org.apache.spark.sql.types.StructType 5 | 6 | package object servables { 7 | 8 | sealed trait Servable { 9 | val title: String 10 | } 11 | 12 | // TODO serialize function that takes a serializer 13 | 14 | case class BarChart(title: String, 15 | xDomain: Seq[String], 16 | heights: Seq[Seq[Double]], 17 | series: Seq[String]) extends Servable 18 | 19 | case class PieChart(title: String, categoryCountPairs: Iterable[(String, Double)]) extends Servable 20 | 21 | case class Histogram(title: String, bins: Seq[Double], frequencies: Seq[Long]) extends Servable 22 | 23 | case class Table(title: String, schema: StructType, content: Seq[Row]) extends Servable 24 | 25 | case class Heatmap(title: String, 26 | content: Seq[Seq[Double]], 27 | rowNames: Seq[String], 28 | colNames: Seq[String], 29 | zColorZeroes: Seq[Double]) extends Servable 30 | 31 | case class Graph(title: String, vertices: Seq[String], edges: Iterable[(Int, Int, String)]) extends Servable 32 | 33 | case class ScatterPlot(title: String, 34 | points: Seq[(Any, Any)], 35 | xIsNumeric: Boolean, 36 | yIsNumeric: Boolean) extends Servable 37 | 38 | case class KeyValueSequence(title: String, keyValuePairs: Seq[(String, String)]) extends Servable 39 | 40 | // TODO document that the 2D layout is only a recommendation and does not have to be taken into account 41 | // TODO if it is not appropriate (e.g. on mobile devices) 42 | case class Composite(title: String, servables: Seq[Seq[Servable]]) extends Servable 43 | 44 | object Blank extends Servable { 45 | val title = "" 46 | } 47 | 48 | } 49 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/util/DataFrameUtils.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.util 2 | 3 | import java.sql.Timestamp 4 | import java.sql.Date 5 | 6 | import de.frosner.dds.core.DDS 7 | import org.apache.spark.sql.types._ 8 | import org.apache.spark.sql.{DataFrame, Row} 9 | import org.apache.spark.sql.functions._ 10 | 11 | object DataFrameUtils { 12 | 13 | private def filterFields(schema: StructType)(filter: DataType => Boolean): Iterable[(Int, StructField)] = 14 | schema.fields.zipWithIndex.filter{ case (field, index) => filter(field.dataType) }.map(_.swap) 15 | 16 | def isNumeric(dataType: DataType) = dataType == DoubleType || dataType == FloatType || dataType == IntegerType || 17 | dataType == LongType || dataType == ShortType 18 | 19 | def getNumericFields(dataFrame: DataFrame): Iterable[(Int, StructField)] = getNumericFields(dataFrame.schema) 20 | 21 | def getNumericFields(schema: StructType): Iterable[(Int, StructField)] = filterFields(schema)(isNumeric) 22 | 23 | def isDateOrTime(dataType: DataType) = dataType == DateType || dataType == TimestampType 24 | 25 | def getDateFields(dataFrame: DataFrame): Iterable[(Int, StructField)] = getDateFields(dataFrame.schema) 26 | 27 | def getDateFields(schema: StructType): Iterable[(Int, StructField)] = filterFields(schema)(isDateOrTime) 28 | 29 | def isNominal(dataType: DataType) = !isNumeric(dataType) && !isDateOrTime(dataType) 30 | 31 | def getNominalFields(dataFrame: DataFrame): Iterable[(Int, StructField)] = getNominalFields(dataFrame.schema) 32 | 33 | def getNominalFields(schema: StructType): Iterable[(Int, StructField)] = filterFields(schema)(isNominal) 34 | 35 | def numericAsDouble(row: Row, index: Int, field: StructField): Option[Double] = { 36 | val dataType = field.dataType 37 | val nullable = field.nullable 38 | (dataType, nullable) match { 39 | case (DoubleType, true) => if (row.isNullAt(index)) Option.empty[Double] else Option(row.getDouble(index)) 40 | case (DoubleType, false) => Option(row.getDouble(index)) 41 | case (IntegerType, true) => if (row.isNullAt(index)) Option.empty[Double] else Option(row.getInt(index).toDouble) 42 | case (IntegerType, false) => Option(row.getInt(index).toDouble) 43 | case (FloatType, true) => if (row.isNullAt(index)) Option.empty[Double] else Option(row.getFloat(index).toDouble) 44 | case (FloatType, false) => Option(row.getFloat(index).toDouble) 45 | case (LongType, true) => if (row.isNullAt(index)) Option.empty[Double] else Option(row.getLong(index).toDouble) 46 | case (LongType, false) => Option(row.getLong(index).toDouble) 47 | case (ShortType, true) => if (row.isNullAt(index)) Option.empty[Double] else Option(row.getShort(index).toDouble) 48 | case (ShortType, false) => Option(row.getShort(index).toDouble) 49 | case (_, _) => throw new IllegalArgumentException(s"Column cannot be converted to double: $index ($field)") 50 | } 51 | } 52 | 53 | def dateOrTimeAsTimestamp(row: Row, index: Int, field: StructField): Option[Timestamp] = { 54 | val dataType = field.dataType 55 | val nullable = field.nullable 56 | (dataType, nullable) match { 57 | case (DateType, true) => if (row.isNullAt(index)) Option.empty[Timestamp] else Option(new Timestamp(row.getAs[Date](index).getTime)) 58 | case (DateType, false) => Option(new Timestamp(row.getAs[Date](index).getTime)) 59 | case (TimestampType, true) => if (row.isNullAt(index)) Option.empty[Timestamp] else Option(row.getAs[Timestamp](index)) 60 | case (TimestampType, false) => Option(row.getAs[Timestamp](index)) 61 | case (_, _) => throw new IllegalArgumentException(s"Column cannot be converted to timestamp: $index ($field)") 62 | } 63 | } 64 | 65 | def anyAsAny(row: Row, index: Int, field: StructField): Option[Any] = { 66 | if (field.nullable) { 67 | if (row.isNullAt(index)) Option.empty else Option(row.get(index)) 68 | } else { 69 | Option(row.get(index)) 70 | } 71 | } 72 | 73 | def requireSingleColumned[R](dataFrame: DataFrame, function: String)(toDo: => Option[R]): Option[R] = 74 | requireSingleColumned(dataFrame.schema, function)(toDo) 75 | 76 | def requireSingleColumned[R](schema: StructType, function: String)(toDo: => Option[R]): Option[R] = { 77 | if (schema.fields.size != 1) { 78 | println(function + " function only supported on single columns.") 79 | println 80 | DDS.help(function) 81 | Option.empty[R] 82 | } else { 83 | toDo 84 | } 85 | } 86 | 87 | def binDoubleUdf(numBins: Int, min: Double, max: Double) = { 88 | require(numBins > 0, "The number of bins must be greater than 0") 89 | require(min <= max, "The minimum value must not be greater than the maximum") 90 | require(!min.isInfinite, "Having an infinite minimum is not supported") 91 | require(!max.isInfinite, "Having an infinite maximum is not supported") 92 | udf((value: java.lang.Double) => { 93 | if (value == null || value.isNaN) { 94 | Option(value).toString 95 | } else { 96 | require(value <= max, s" Value ($value) must be less than max ($max)") 97 | require(value >= min, s"Value ($value) must be greater than min ($min)") 98 | val bin = (((value - min) / (max - min)) * numBins).toInt 99 | Math.min(bin, numBins - 1).toString 100 | } 101 | }) 102 | } 103 | 104 | def dfToString(dataFrame: DataFrame) = dataFrame.columns.mkString(", ") 105 | 106 | } 107 | -------------------------------------------------------------------------------- /core/src/main/scala/de/frosner/dds/util/ServableUtils.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.util 2 | 3 | import de.frosner.dds.core.ScalaFunctions 4 | import de.frosner.dds.servables.{Table, KeyValueSequence} 5 | import org.apache.spark.sql.Row 6 | import org.apache.spark.sql.types._ 7 | import org.apache.spark.util.StatCounter 8 | 9 | object ServableUtils { 10 | 11 | private def lg2IntCeil(number: Long): Int = { 12 | //integer lg_2 can be calculated by bitshifting 13 | var log = 0 14 | var countDown = number 15 | while (countDown > 1) { 16 | countDown = countDown >> 1 17 | log += 1 18 | } 19 | //ceiling by checking whether each LSB was zero, adding 1 if not. 20 | if (number == (1 << log) || number == 0) 21 | log 22 | else 23 | log + 1 24 | } 25 | 26 | /** 27 | * Compute the "optimal" number of bins for a histogram using the Sturge's formula. This assumes that the data 28 | * is generated by a Gaussian distribution. 29 | * 30 | * @param count Number of data points in the sample 31 | * @return Optimal number of bins 32 | */ 33 | def optimalNumberOfBins(count: Long): Int = lg2IntCeil(count) + 1 34 | 35 | def statCounterToKeyValueSequence(stats: StatCounter, title: String): KeyValueSequence = { 36 | KeyValueSequence( 37 | title, List( 38 | ("Count", stats.count.toString), 39 | ("Sum", stats.sum.toString), 40 | ("Min", stats.min.toString), 41 | ("Max", stats.max.toString), 42 | ("Mean", stats.mean.toString), 43 | ("Stdev", stats.stdev.toString), 44 | ("Variance", stats.variance.toString) 45 | ) 46 | ) 47 | } 48 | 49 | def statCounterToTable(stat: StatCounter, title: String): Table = 50 | statCountersToTable(List.empty, List(stat), title) 51 | 52 | def statCountersToTable(labels: Seq[String], stats: Seq[StatCounter], title: String): Table = { 53 | val optionalLabelHead = if (labels.size > 0) List(StructField("label", StringType, false)) else List.empty 54 | val head = optionalLabelHead ++ List( 55 | StructField("count", LongType, false), 56 | StructField("sum", DoubleType, false), 57 | StructField("min", DoubleType, false), 58 | StructField("max", DoubleType, false), 59 | StructField("mean", DoubleType, false), 60 | StructField("stdev", DoubleType, false), 61 | StructField("variance", DoubleType, false) 62 | ) 63 | val schema = StructType(head) 64 | val rows = if (labels.size > 0) { 65 | labels.zip(stats).map{ case (label, stats) => 66 | Row(label, stats.count, stats.sum, stats.min, stats.max, stats.mean, stats.stdev, stats.variance) 67 | } 68 | } else { 69 | stats.map(stats => Row(stats.count, stats.sum, stats.min, stats.max, stats.mean, stats.stdev, stats.variance)) 70 | } 71 | Table(title, schema, rows) 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /core/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConvertersAdapter.scala: -------------------------------------------------------------------------------- 1 | package org.apache.spark.sql.catalyst 2 | 3 | import org.apache.spark.sql.types.DataType 4 | 5 | object CatalystTypeConvertersAdapter { 6 | 7 | def createToCatalystConverter(dataType: DataType): Any => Any = 8 | CatalystTypeConverters.createToCatalystConverter(dataType) 9 | 10 | def createToScalaConverter(dataType: DataType): Any => Any = 11 | CatalystTypeConverters.createToScalaConverter(dataType) 12 | 13 | } 14 | -------------------------------------------------------------------------------- /core/src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Set everything to be logged to the console 2 | log4j.rootCategory=ERROR, console 3 | log4j.appender.console=org.apache.log4j.ConsoleAppender 4 | log4j.appender.console.target=System.err 5 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n 7 | -------------------------------------------------------------------------------- /core/src/test/resources/manual-tests.txt: -------------------------------------------------------------------------------- 1 | import de.frosner.dds.core.DDS._ 2 | import de.frosner.dds.webui.server.SprayServer._ 3 | 4 | start 5 | 6 | val sql = new org.apache.spark.sql.SQLContext(sc) 7 | val golf = de.frosner.dds.datasets.golf(sc) 8 | val sqlGolf = de.frosner.dds.datasets.golf(sql) 9 | val flights = de.frosner.dds.datasets.flights(sc) 10 | val sqlFlights = de.frosner.dds.datasets.flights(sql) 11 | 12 | bar(sqlGolf.select("play")) 13 | bars(List("Group A", "Group B", "Group C"), List(List(1,2,3), List(2,3,2), List(4,1,2)), List("Product A", "Product B", "Product C")) 14 | graph(List((1, "Frank"), (2, "Daniel"), (3, "Lucas"), (4, "Pietro"), (5, "Marco")), List((1,2,"Friend"), (1,3,"Friend"), (4,5,"Colleague"))) 15 | show(sqlGolf) 16 | show(List((2, 3), (3, 4), (3, 1))) 17 | show(sqlFlights.select("carrier")) 18 | median(golf.map(_.humidity)) 19 | dashboard(sqlFlights.select("flightDate", "carrier", "carrierDelay")) 20 | summarize(sqlFlights.select("flightDate", "carrier", "carrierDelay")) 21 | heatmap(List(List(1d,2d), List(3d,Double.NaN))) 22 | correlation(sqlGolf) 23 | mutualInformation(sqlGolf) 24 | histogram(sqlGolf.select("humidity")) 25 | pie(sqlGolf.select(sqlGolf("outlook"))) 26 | scatter(List(("1",1), ("2",5), (3,5), (10,1))) 27 | keyValuePairs(List((1,2), (3,4))) 28 | 29 | import org.apache.spark.sql.Row 30 | import org.apache.spark.sql.types._ 31 | 32 | val schema = StructType(List( 33 | StructField("0", ByteType, false), 34 | StructField("1", ByteType, true), 35 | StructField("2", ShortType, false), 36 | StructField("3", ShortType, true), 37 | StructField("4", IntegerType, false), 38 | StructField("5", IntegerType, true), 39 | StructField("6", LongType, false), 40 | StructField("7", LongType, true), 41 | StructField("8", FloatType, false), 42 | StructField("9", FloatType, true), 43 | StructField("10", DoubleType, false), 44 | StructField("11", DoubleType, true), 45 | StructField("12", DecimalType.Unlimited, true), 46 | StructField("13", StringType, true), 47 | StructField("14", BinaryType, true), 48 | StructField("15", BooleanType, false), 49 | StructField("16", BooleanType, true), 50 | StructField("17", TimestampType, true), 51 | StructField("18", DateType, true), 52 | StructField("19", ArrayType(StringType), true), 53 | StructField("20", MapType(StringType, IntegerType, valueContainsNull = false), true), 54 | StructField("21", StructType(List( 55 | StructField("a", IntegerType, false), 56 | StructField("b", IntegerType, false), 57 | StructField("c", IntegerType, false) 58 | )), true) 59 | )) 60 | val content = sc.parallelize(Seq( 61 | Row( 62 | 0.toByte, new java.lang.Byte(0.toByte), 63 | 1.toShort, new java.lang.Short(1.toShort), 64 | 2, new java.lang.Integer(2), 65 | 3l, new java.lang.Long(3l), 66 | 4f, new java.lang.Float(4f), 67 | 5d, new java.lang.Double(5d), 68 | new java.math.BigDecimal(6d), 69 | "abc", 70 | Array(8.toByte, 8.toByte, 8.toByte), 71 | true, new java.lang.Boolean(true), 72 | new java.sql.Timestamp(10000), 73 | new java.sql.Date(10000), 74 | Seq("a", "b", "c"), 75 | Map("a" -> 1, "b" -> 2, "c" -> 3), 76 | Row(1, 2, 3) 77 | ), 78 | Row( 79 | 0.toByte, null, 80 | 1.toShort, null, 81 | 2, null, 82 | 3l, null, 83 | 4f, null, 84 | 5d, null, 85 | null, 86 | null, 87 | null, 88 | true, null, 89 | null, 90 | null, 91 | null, 92 | null, 93 | null 94 | ) 95 | )) 96 | val df = sql.createDataFrame(content, schema) 97 | show(df) 98 | -------------------------------------------------------------------------------- /core/src/test/scala/de/frosner/dds/analytics/ColumnsStatisticsAggregatorTest.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | import java.sql.Date 4 | 5 | import org.apache.spark.sql.Row 6 | import org.apache.spark.sql.types._ 7 | import org.scalatest.{FlatSpec, Matchers} 8 | 9 | class ColumnsStatisticsAggregatorTest extends FlatSpec with Matchers { 10 | 11 | "A column statistics aggregator" should "be initialized properly" in { 12 | val dateColumn = StructField("1", DateType, false) 13 | val intColumn = StructField("2", IntegerType, false) 14 | val stringColumn = StructField("3", StringType, false) 15 | val schema = StructType(List(dateColumn, intColumn, stringColumn)) 16 | val agg = ColumnsStatisticsAggregator(schema) 17 | 18 | val actualNumericColumns = agg.numericColumns 19 | actualNumericColumns.size shouldBe 1 20 | val (actualNumericAggregator, actualNumericColumn) = actualNumericColumns(1) 21 | actualNumericAggregator.totalCount shouldBe 0l 22 | actualNumericColumn shouldBe intColumn 23 | 24 | val actualDateColumns = agg.dateColumns 25 | actualDateColumns.size shouldBe 1 26 | val (actualDateAggregator, actualDateColumn) = actualDateColumns(0) 27 | actualDateAggregator.totalCount shouldBe 0l 28 | actualDateColumn shouldBe dateColumn 29 | 30 | val actualNominalColumns = agg.nominalColumns 31 | actualNominalColumns.size shouldBe 1 32 | val (actualNominalAggregator, actualNominalColumn) = actualNominalColumns(2) 33 | actualNominalAggregator.totalCount shouldBe 0l 34 | actualNominalColumn shouldBe stringColumn 35 | 36 | agg.totalCount shouldBe 0l 37 | } 38 | 39 | it should "update the aggregators when iterating" in { 40 | val dateColumn = StructField("1", DateType, false) 41 | val intColumn = StructField("2", IntegerType, false) 42 | val stringColumn = StructField("3", StringType, false) 43 | val schema = StructType(List(dateColumn, intColumn, stringColumn)) 44 | val agg = ColumnsStatisticsAggregator(schema) 45 | 46 | for (expectedCount <- List(1l,2l)) { 47 | agg.iterate(Row(new Date(500), 10, "b")) 48 | 49 | val (actualNumericAggregator, _) = agg.numericColumns(1) 50 | actualNumericAggregator.totalCount shouldBe expectedCount 51 | 52 | val (actualDateAggregator, _) = agg.dateColumns(0) 53 | actualDateAggregator.totalCount shouldBe expectedCount 54 | 55 | val (actualNominalAggregator, _) = agg.nominalColumns(2) 56 | actualNominalAggregator.totalCount shouldBe expectedCount 57 | 58 | agg.totalCount shouldBe expectedCount 59 | } 60 | } 61 | 62 | it should "update the aggregators when merging" in { 63 | val dateColumn = StructField("1", DateType, false) 64 | val intColumn = StructField("2", IntegerType, false) 65 | val stringColumn = StructField("3", StringType, false) 66 | val schema = StructType(List(dateColumn, intColumn, stringColumn)) 67 | val agg1 = ColumnsStatisticsAggregator(schema) 68 | val agg2 = ColumnsStatisticsAggregator(schema) 69 | 70 | agg1.iterate(Row(new Date(500), 10, "b")) 71 | agg1.iterate(Row(new Date(500), 10, "b")) 72 | agg2.iterate(Row(new Date(500), 10, "b")) 73 | 74 | val merged = agg1.merge(agg2) 75 | 76 | val (actualNumericAggregator, _) = merged.numericColumns(1) 77 | actualNumericAggregator.totalCount shouldBe 3l 78 | 79 | val (actualDateAggregator, _) = merged.dateColumns(0) 80 | actualDateAggregator.totalCount shouldBe 3l 81 | 82 | val (actualNominalAggregator, _) = merged.nominalColumns(2) 83 | actualNominalAggregator.totalCount shouldBe 3l 84 | 85 | merged.totalCount shouldBe 3l 86 | } 87 | 88 | it should "only merge two aggregators having the same schema" in { 89 | val dateColumn = StructField("1", DateType, false) 90 | val intColumn = StructField("2", IntegerType, false) 91 | val stringColumn = StructField("3", StringType, false) 92 | val schema1 = StructType(List(intColumn, stringColumn)) 93 | val schema2 = StructType(List(dateColumn, stringColumn)) 94 | val agg1 = ColumnsStatisticsAggregator(schema1) 95 | val agg2 = ColumnsStatisticsAggregator(schema2) 96 | intercept[IllegalArgumentException] { 97 | agg1.merge(agg2) 98 | } 99 | } 100 | 101 | it should "use the total count of the first aggregator it finds" in { 102 | def testTotalCount(types: Seq[DataType]) = { 103 | val fields = types.zipWithIndex.map{ case (dataType, index) => 104 | StructField( 105 | name = index.toString, 106 | dataType = dataType, 107 | nullable = true 108 | ) 109 | } 110 | val schema = StructType(fields) 111 | val agg = ColumnsStatisticsAggregator(schema) 112 | agg.totalCount shouldBe 0L 113 | } 114 | Set(DoubleType, StringType, DateType).subsets.map(_.toSeq).foreach(testTotalCount) 115 | } 116 | 117 | } 118 | -------------------------------------------------------------------------------- /core/src/test/scala/de/frosner/dds/analytics/NominalColumnStatisticsAggregatorTest.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | import org.scalatest.{Matchers, FlatSpec} 4 | 5 | class NominalColumnStatisticsAggregatorTest extends FlatSpec with Matchers { 6 | 7 | "A nominal column statistics aggregator" should "be initialized correctly" in { 8 | val agg = new NominalColumnStatisticsAggregator 9 | agg.totalCount shouldBe 0l 10 | agg.missingCount shouldBe 0l 11 | agg.nonMissingCount shouldBe 0l 12 | } 13 | 14 | it should "compute the total count correctly" in { 15 | val agg = new NominalColumnStatisticsAggregator 16 | agg.iterate(Option.empty) 17 | agg.totalCount shouldBe 1l 18 | agg.iterate(Option(5)) 19 | agg.totalCount shouldBe 2l 20 | } 21 | 22 | it should "compute the missing count correctly" in { 23 | val agg = new NominalColumnStatisticsAggregator 24 | agg.iterate(Option.empty) 25 | agg.missingCount shouldBe 1l 26 | agg.iterate(Option(5)) 27 | agg.missingCount shouldBe 1l 28 | } 29 | 30 | it should "compute the non-missing count correctly" in { 31 | val agg = new NominalColumnStatisticsAggregator 32 | agg.iterate(Option.empty) 33 | agg.nonMissingCount shouldBe 0l 34 | agg.iterate(Option(5)) 35 | agg.nonMissingCount shouldBe 1l 36 | } 37 | 38 | it should "merge two total counts correctly" in { 39 | val agg1 = new NominalColumnStatisticsAggregator 40 | agg1.iterate(Option("a")) 41 | val agg2 = new NominalColumnStatisticsAggregator 42 | agg2.iterate(Option.empty) 43 | agg1.merge(agg2).totalCount shouldBe 2l 44 | } 45 | 46 | it should "merge two missing counts correctly" in { 47 | val agg1 = new NominalColumnStatisticsAggregator 48 | agg1.iterate(Option("a")) 49 | agg1.iterate(Option.empty) 50 | val agg2 = new NominalColumnStatisticsAggregator 51 | agg2.iterate(Option.empty) 52 | agg1.merge(agg2).missingCount shouldBe 2l 53 | } 54 | 55 | it should "merge two non-missing counts correctly" in { 56 | val agg1 = new NominalColumnStatisticsAggregator 57 | agg1.iterate(Option("a")) 58 | val agg2 = new NominalColumnStatisticsAggregator 59 | agg2.iterate(Option.empty) 60 | agg2.iterate(Option(5)) 61 | agg1.merge(agg2).nonMissingCount shouldBe 2l 62 | } 63 | 64 | } 65 | -------------------------------------------------------------------------------- /core/src/test/scala/de/frosner/dds/analytics/NumericColumnStatisticsAggregatorTest.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.analytics 2 | 3 | import org.scalatest.{FlatSpec, Matchers} 4 | 5 | class NumericColumnStatisticsAggregatorTest extends FlatSpec with Matchers { 6 | 7 | val epsilon = 0.000001 8 | 9 | "A numeric column statistics aggregator" should "be initialized properly" in { 10 | val agg = new NumericColumnStatisticsAggregator 11 | agg.totalCount shouldBe 0l 12 | agg.missingCount shouldBe 0l 13 | agg.sum shouldBe 0d 14 | agg.min shouldBe Double.PositiveInfinity 15 | agg.max shouldBe Double.NegativeInfinity 16 | agg.mean.isNaN shouldBe true 17 | agg.variance.isNaN shouldBe true 18 | agg.stdev.isNaN shouldBe true 19 | } 20 | 21 | it should "compute the correct total count" in { 22 | val agg = new NumericColumnStatisticsAggregator 23 | agg.iterate(Option(5d)) 24 | agg.totalCount shouldBe 1l 25 | agg.iterate(Option(2d)) 26 | agg.totalCount shouldBe 2l 27 | agg.iterate(Option.empty) 28 | agg.totalCount shouldBe 3l 29 | } 30 | 31 | it should "compute the missing value count correctly" in { 32 | val agg = new NumericColumnStatisticsAggregator 33 | agg.iterate(Option(5d)) 34 | agg.missingCount shouldBe 0l 35 | agg.iterate(Option.empty) 36 | agg.missingCount shouldBe 1l 37 | agg.iterate(Option(2d)) 38 | agg.missingCount shouldBe 1l 39 | } 40 | 41 | it should "compute the correct sums" in { 42 | val agg = new NumericColumnStatisticsAggregator 43 | agg.iterate(Option(5d)) 44 | agg.iterate(Option.empty) 45 | agg.sum shouldBe 5d +- epsilon 46 | agg.iterate(Option(1d)) 47 | agg.sum shouldBe 6d +- epsilon 48 | } 49 | 50 | it should "compute the min correctly" in { 51 | val agg = new NumericColumnStatisticsAggregator 52 | agg.iterate(Option.empty) 53 | agg.min shouldBe Double.PositiveInfinity 54 | agg.iterate(Option(5d)) 55 | agg.min shouldBe 5d 56 | agg.iterate(Option(2d)) 57 | agg.min shouldBe 2d 58 | agg.iterate(Option(10d)) 59 | agg.min shouldBe 2d 60 | } 61 | 62 | it should "compute the max correctly" in { 63 | val agg = new NumericColumnStatisticsAggregator 64 | agg.iterate(Option.empty) 65 | agg.max shouldBe Double.NegativeInfinity 66 | agg.iterate(Option(5d)) 67 | agg.max shouldBe 5d 68 | agg.iterate(Option(2d)) 69 | agg.max shouldBe 5d 70 | agg.iterate(Option(10d)) 71 | agg.max shouldBe 10d 72 | } 73 | 74 | it should "compute the mean correctly" in { 75 | val agg = new NumericColumnStatisticsAggregator 76 | agg.iterate(Option.empty) 77 | agg.mean.isNaN shouldBe true 78 | agg.iterate(Option(5d)) 79 | agg.mean shouldBe 5d +- epsilon 80 | agg.iterate(Option(0d)) 81 | agg.mean shouldBe 2.5 +- epsilon 82 | agg.iterate(Option(10)) 83 | agg.mean shouldBe 5d +- epsilon 84 | agg.iterate(Option.empty) 85 | agg.mean shouldBe 5d +- epsilon 86 | } 87 | 88 | it should "compute the variance correctly" in { 89 | val agg = new NumericColumnStatisticsAggregator 90 | agg.iterate(Option.empty) 91 | agg.variance.isNaN shouldBe true 92 | agg.iterate(Option(5d)) 93 | agg.variance.isNaN shouldBe true 94 | agg.iterate(Option(0d)) 95 | agg.variance shouldBe 6.25 +- epsilon 96 | agg.iterate(Option(10)) 97 | agg.variance shouldBe 16.6666667 +- epsilon 98 | agg.iterate(Option.empty) 99 | agg.variance shouldBe 16.6666667 +- epsilon 100 | } 101 | 102 | it should "compute the stdev correctly" in { 103 | val agg = new NumericColumnStatisticsAggregator 104 | agg.iterate(Option.empty) 105 | agg.stdev.isNaN shouldBe true 106 | agg.iterate(Option(5d)) 107 | agg.stdev.isNaN shouldBe true 108 | agg.iterate(Option(0d)) 109 | agg.stdev shouldBe 2.5 +- epsilon 110 | agg.iterate(Option.empty) 111 | agg.stdev shouldBe 2.5 +- epsilon 112 | } 113 | 114 | it should "merge two total counts correctly" in { 115 | val agg1 = new NumericColumnStatisticsAggregator 116 | agg1.iterate(Option(1)) 117 | val agg2 = new NumericColumnStatisticsAggregator 118 | agg2.iterate(Option(2)) 119 | agg1.merge(agg2).totalCount shouldBe 2l 120 | } 121 | 122 | it should "merge two missing value counts correctly" in { 123 | val agg1 = new NumericColumnStatisticsAggregator 124 | agg1.iterate(Option(1)) 125 | agg1.iterate(Option.empty) 126 | val agg2 = new NumericColumnStatisticsAggregator 127 | agg2.iterate(Option.empty) 128 | agg1.merge(agg2).missingCount shouldBe 2l 129 | } 130 | 131 | it should "merge two sums correctly" in { 132 | val agg1 = new NumericColumnStatisticsAggregator 133 | agg1.iterate(Option(1)) 134 | val agg2 = new NumericColumnStatisticsAggregator 135 | agg2.iterate(Option(2)) 136 | agg1.merge(agg2).sum shouldBe 3d +- epsilon 137 | } 138 | 139 | it should "merge two mins correctly" in { 140 | val agg1 = new NumericColumnStatisticsAggregator 141 | agg1.iterate(Option(1)) 142 | val agg2 = new NumericColumnStatisticsAggregator 143 | agg2.iterate(Option(2)) 144 | agg1.merge(agg2).min shouldBe 1d 145 | } 146 | 147 | it should "merge two maxs correctly" in { 148 | val agg1 = new NumericColumnStatisticsAggregator 149 | agg1.iterate(Option(1)) 150 | val agg2 = new NumericColumnStatisticsAggregator 151 | agg2.iterate(Option(2)) 152 | agg1.merge(agg2).max shouldBe 2d 153 | } 154 | 155 | it should "merge two means correctly" in { 156 | val agg1 = new NumericColumnStatisticsAggregator 157 | agg1.iterate(Option(1)) 158 | val agg2 = new NumericColumnStatisticsAggregator 159 | agg2.iterate(Option(2)) 160 | agg1.merge(agg2).mean shouldBe 1.5d +- epsilon 161 | } 162 | 163 | it should "merge two variances correctly" in { 164 | val agg1 = new NumericColumnStatisticsAggregator 165 | agg1.iterate(Option(5)) 166 | agg1.iterate(Option(0)) 167 | val agg2 = new NumericColumnStatisticsAggregator 168 | agg2.iterate(Option(10)) 169 | agg1.merge(agg2).variance shouldBe 16.6666667 +- epsilon 170 | } 171 | 172 | it should "merge two standard deviations correctly" in { 173 | val agg1 = new NumericColumnStatisticsAggregator 174 | agg1.iterate(Option(5)) 175 | val agg2 = new NumericColumnStatisticsAggregator 176 | agg2.iterate(Option(0)) 177 | agg1.merge(agg2).stdev shouldBe 2.5 +- epsilon 178 | } 179 | 180 | } 181 | -------------------------------------------------------------------------------- /core/src/test/scala/de/frosner/dds/util/ServableUtilsTest.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.util 2 | 3 | import de.frosner.dds.servables.{KeyValueSequence, Table} 4 | import org.apache.spark.sql.Row 5 | import org.apache.spark.sql.types._ 6 | import org.apache.spark.util.StatCounter 7 | import org.scalatest.{FlatSpec, Matchers} 8 | 9 | // TODO check all test names (not only here), whether they still make sense (e.g. there is no JSON in core anymore) 10 | class ServableUtilsTest extends FlatSpec with Matchers { 11 | 12 | "An optimal number of bins" should "be computed correctly" in { 13 | val testValues = List(0,1,3,5,8,12,16) 14 | val expectedValues = List(1,1,3,4,4,5,5) 15 | testValues.map(ServableUtils.optimalNumberOfBins(_)) shouldBe expectedValues 16 | } 17 | 18 | "A stat table" should "be constructed correctly from a stat counter" in { 19 | val statCounter = StatCounter(1D, 2D, 3D) 20 | val tableTitle = "Stats Table" 21 | val statsTable = ServableUtils.statCounterToTable(statCounter, tableTitle) 22 | 23 | statsTable.title shouldBe tableTitle 24 | statsTable.schema shouldBe StructType(List( 25 | StructField("count", LongType, false), 26 | StructField("sum", DoubleType, false), 27 | StructField("min", DoubleType, false), 28 | StructField("max", DoubleType, false), 29 | StructField("mean", DoubleType, false), 30 | StructField("stdev", DoubleType, false), 31 | StructField("variance", DoubleType, false) 32 | )) 33 | statsTable.content shouldBe Seq(Row( 34 | statCounter.count, 35 | statCounter.sum, 36 | statCounter.min, 37 | statCounter.max, 38 | statCounter.mean, 39 | statCounter.stdev, 40 | statCounter.variance 41 | )) 42 | } 43 | 44 | it should "be constructed correctly from multiple stat counters" in { 45 | val statCounter1 = StatCounter(1D, 2D, 3D) 46 | val statCounter2 = StatCounter(0D, 5D) 47 | val tableTitle = "table" 48 | val statsTable = ServableUtils.statCountersToTable(List("label1", "label2"), List(statCounter1, statCounter2), tableTitle) 49 | 50 | statsTable.title shouldBe tableTitle 51 | statsTable.schema shouldBe StructType(List( 52 | StructField("label", StringType, false), 53 | StructField("count", LongType, false), 54 | StructField("sum", DoubleType, false), 55 | StructField("min", DoubleType, false), 56 | StructField("max", DoubleType, false), 57 | StructField("mean", DoubleType, false), 58 | StructField("stdev", DoubleType, false), 59 | StructField("variance", DoubleType, false) 60 | )) 61 | statsTable.content shouldBe Seq( 62 | Row( 63 | "label1", 64 | statCounter1.count, 65 | statCounter1.sum, 66 | statCounter1.min, 67 | statCounter1.max, 68 | statCounter1.mean, 69 | statCounter1.stdev, 70 | statCounter1.variance 71 | ), Row( 72 | "label2", 73 | statCounter2.count, 74 | statCounter2.sum, 75 | statCounter2.min, 76 | statCounter2.max, 77 | statCounter2.mean, 78 | statCounter2.stdev, 79 | statCounter2.variance 80 | ) 81 | ) 82 | } 83 | 84 | "A key value sequence" should "have the correct format when constructed from single stat counter" in { 85 | val statCounter = StatCounter(1D, 2D, 3D) 86 | val sequenceTitle = "sequence" 87 | val statsSequence = ServableUtils.statCounterToKeyValueSequence(statCounter, sequenceTitle) 88 | 89 | statsSequence.title shouldBe sequenceTitle 90 | statsSequence.keyValuePairs.toList shouldBe List( 91 | ("Count", statCounter.count.toString), 92 | ("Sum", statCounter.sum.toString), 93 | ("Min", statCounter.min.toString), 94 | ("Max", statCounter.max.toString), 95 | ("Mean", statCounter.mean.toString), 96 | ("Stdev", statCounter.stdev.toString), 97 | ("Variance", statCounter.variance.toString) 98 | ) 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /datasets/build.sbt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/datasets/build.sbt -------------------------------------------------------------------------------- /datasets/src/main/resources/de/frosner/dds/datasets/golf.csv: -------------------------------------------------------------------------------- 1 | Outlook,Temperature,Humidity,Wind,Play 2 | sunny,85,85,false,no 3 | sunny,80,90,true,no 4 | overcast,83,78,false,yes 5 | rain,70,96,false,yes 6 | rain,68,80,false,yes 7 | rain,65,70,true,no 8 | overcast,64,65,true,yes 9 | sunny,72,95,false,no 10 | sunny,69,70,false,yes 11 | rain,75,80,false,yes 12 | sunny,75,70,true,yes 13 | overcast,72,90,true,yes 14 | overcast,81,75,false,yes 15 | rain,71,80,true,no 16 | -------------------------------------------------------------------------------- /datasets/src/main/scala/de/frosner/dds/datasets/FlightsRow.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.datasets 2 | 3 | import java.sql.Timestamp 4 | 5 | case class FlightsRow(flightDate: Timestamp, 6 | carrier: String, 7 | tailNumber: Option[String], 8 | flightNumber: String, 9 | originAirport: String, 10 | destinationAirport: String, 11 | crsDepartureTime: Timestamp, 12 | departureTime: Option[Timestamp], 13 | departureDelay: Option[Double], 14 | wheelsOffTime: Option[Timestamp], 15 | wheelsOnTime: Option[Timestamp], 16 | crsArrivalTime: Timestamp, 17 | arrivalTime: Option[Timestamp], 18 | arrivalDelay: Option[Double], 19 | airTime: Option[Double], 20 | carrierDelay: Option[Double], 21 | weatherDelay: Option[Double], 22 | nasDelay: Option[Double], 23 | securityDelay: Option[Double], 24 | lateAircraftDelay: Option[Double]) { 25 | // override equals because default implementation using pattern matching does not work in Spark REPL 26 | override def equals(thatAny: Any): Boolean = { 27 | if (thatAny.isInstanceOf[FlightsRow]) { 28 | val that = thatAny.asInstanceOf[FlightsRow] 29 | this.flightDate == that.flightDate && 30 | this.carrier == that.carrier && 31 | this.tailNumber == that.tailNumber && 32 | this.flightNumber == that.flightNumber && 33 | this.originAirport == that.originAirport && 34 | this.destinationAirport == that.destinationAirport && 35 | this.crsDepartureTime == that.crsDepartureTime && 36 | this.departureTime == that.departureTime && 37 | this.departureDelay == that.departureDelay && 38 | this.wheelsOffTime == that.wheelsOffTime && 39 | this.wheelsOnTime == that.wheelsOnTime && 40 | this.crsArrivalTime == that.crsArrivalTime && 41 | this.arrivalTime == that.arrivalTime && 42 | this.arrivalDelay == that.arrivalDelay && 43 | this.airTime == that.airTime && 44 | this.carrierDelay == that.carrierDelay && 45 | this.weatherDelay == that.weatherDelay && 46 | this.nasDelay == that.nasDelay && 47 | this.securityDelay == that.securityDelay && 48 | this.lateAircraftDelay == that.lateAircraftDelay 49 | } else { 50 | false 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /datasets/src/main/scala/de/frosner/dds/datasets/GolfRow.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.datasets 2 | 3 | case class GolfRow(outlook: String, 4 | temperature: Double, 5 | humidity: Double, 6 | wind: Boolean, 7 | play: Boolean) { 8 | // override equals because default implementation using pattern matching does not work in Spark REPL 9 | override def equals(thatAny: Any): Boolean = { 10 | if (thatAny.isInstanceOf[GolfRow]) { 11 | val that = thatAny.asInstanceOf[GolfRow] 12 | this.outlook == that.outlook && 13 | this.temperature == that.temperature && 14 | this.humidity == that.humidity && 15 | this.wind == that.wind && 16 | this.play == that.play 17 | } else { 18 | false 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /datasets/src/main/scala/de/frosner/dds/datasets/package.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds 2 | 3 | import java.text.SimpleDateFormat 4 | 5 | import org.apache.spark.SparkContext 6 | import org.apache.spark.rdd.RDD 7 | import org.apache.spark.sql.{DataFrame, SQLContext} 8 | import org.apache.spark.sql.Row 9 | import org.apache.spark.sql.types._ 10 | import org.apache.spark.graphx._ 11 | 12 | import scala.io.Source 13 | import scala.util.Try 14 | 15 | package object datasets { 16 | 17 | private def readCsvWithHeader(location: String) = { 18 | val raw = Source.fromInputStream(this.getClass.getResourceAsStream(location)).getLines().toSeq 19 | val (Seq(rawHead), rawBody) = raw.splitAt(1) 20 | (rawHead, rawBody) 21 | } 22 | 23 | private lazy val readGolf = readCsvWithHeader("/de/frosner/dds/datasets/golf.csv") 24 | 25 | def golf(sc: SparkContext): RDD[GolfRow] = { 26 | val (rawHead, rawBody) = readGolf 27 | sc.parallelize(rawBody).map(line => { 28 | val split = line.split(",", -1) 29 | GolfRow( 30 | outlook = split(0), 31 | temperature = split(1).toDouble, 32 | humidity = split(2).toDouble, 33 | wind = split(3).toBoolean, 34 | play = split(4) == "yes" 35 | ) 36 | }) 37 | } 38 | 39 | private lazy val readNetwork = readCsvWithHeader("/de/frosner/dds/datasets/enron.csv") 40 | 41 | def enron(sc: SparkContext): Graph[Int, String] = { 42 | val (rawHead, rawBody) = readNetwork 43 | 44 | val edgeRdd = sc.parallelize(rawBody).map(line => { 45 | val split = line.replaceAll("\"", "").split(",", -1) 46 | Edge(split(0).toLong, split(1).toLong, "") 47 | }) 48 | 49 | val allVertices = rawBody.flatMap(line => { 50 | val leftId = line.replaceAll("\"", "").split(",", -1)(0).toInt 51 | val rightId = line.replaceAll("\"", "").split(",", -1)(1).toInt 52 | List((leftId.toLong, leftId), (rightId.toLong, rightId)) 53 | }).toSet.toSeq 54 | val vertexRdd = sc.parallelize(allVertices) 55 | 56 | Graph(vertexRdd, edgeRdd) 57 | } 58 | 59 | def golf(sql: SQLContext): DataFrame = { 60 | import sql.implicits._ 61 | golf(sql.sparkContext).toDF 62 | } 63 | 64 | lazy val readFlights = readCsvWithHeader("/de/frosner/dds/datasets/flights.csv") 65 | 66 | private lazy val flightDateFormat = new SimpleDateFormat("yyyy-MM-dd") 67 | private lazy val hourMinuteDateFormat = new SimpleDateFormat("HHmm") 68 | 69 | def flights(sc: SparkContext): RDD[FlightsRow] = { 70 | val (rawHead, rawBody) = readFlights 71 | sc.parallelize(rawBody.map(line => { 72 | val split = line.split(",", -1).map(_.replace("\"", "")) 73 | FlightsRow( 74 | flightDate = new java.sql.Timestamp(flightDateFormat.parse(split(0)).getTime), 75 | carrier = split(1), 76 | tailNumber = if (split(2).isEmpty) Option.empty else Option(split(2)), 77 | flightNumber = split(3), 78 | originAirport = split(4), 79 | destinationAirport = split(5), 80 | crsDepartureTime = new java.sql.Timestamp(hourMinuteDateFormat.parse(split(6)).getTime), 81 | departureTime = Try(new java.sql.Timestamp(hourMinuteDateFormat.parse(split(7)).getTime)).toOption, 82 | departureDelay = Try(split(8).toDouble).toOption, 83 | wheelsOffTime = Try(new java.sql.Timestamp(hourMinuteDateFormat.parse(split(9)).getTime)).toOption, 84 | wheelsOnTime = Try(new java.sql.Timestamp(hourMinuteDateFormat.parse(split(10)).getTime)).toOption, 85 | crsArrivalTime = new java.sql.Timestamp(hourMinuteDateFormat.parse(split(11)).getTime), 86 | arrivalTime = Try(new java.sql.Timestamp(hourMinuteDateFormat.parse(split(12)).getTime)).toOption, 87 | arrivalDelay = Try(split(13).toDouble).toOption, 88 | airTime = Try(split(14).toDouble).toOption, 89 | carrierDelay = Try(split(15).toDouble).toOption, 90 | weatherDelay = Try(split(16).toDouble).toOption, 91 | nasDelay = Try(split(17).toDouble).toOption, 92 | securityDelay = Try(split(18).toDouble).toOption, 93 | lateAircraftDelay = Try(split(19).toDouble).toOption 94 | ) 95 | }), 100) 96 | } 97 | 98 | def flights(sql: SQLContext): DataFrame = { 99 | import sql.implicits._ 100 | flights(sql.sparkContext).toDF 101 | } 102 | 103 | } 104 | 105 | 106 | 107 | 108 | 109 | -------------------------------------------------------------------------------- /datasets/src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Set everything to be logged to the console 2 | log4j.rootCategory=ERROR, console 3 | log4j.appender.console=org.apache.log4j.ConsoleAppender 4 | log4j.appender.console.target=System.err 5 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n 7 | -------------------------------------------------------------------------------- /project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | 3 | object Dependencies { 4 | 5 | // Versions 6 | lazy val sparkVersion = "1.5.0" 7 | lazy val sprayVersion = "1.3.2" 8 | lazy val replHelperVersion = "2.0.0" 9 | 10 | // Dependencies 11 | val sparkDependencies = Seq( 12 | "org.apache.spark" %% "spark-core" % sparkVersion % "provided", 13 | "org.apache.spark" %% "spark-graphx" % sparkVersion % "provided", 14 | "org.apache.spark" %% "spark-sql" % sparkVersion % "provided" 15 | ) 16 | val scalaTestDependencies = Seq( 17 | "org.scalatest" %% "scalatest" % "2.2.4" % "test", 18 | "org.scalamock" %% "scalamock-scalatest-support" % "3.2.1" % "test" 19 | ) 20 | 21 | // Projects 22 | val coreDependencies = sparkDependencies ++ scalaTestDependencies ++ Seq( 23 | "org.scalaz" %% "scalaz-core" % "7.1.3", 24 | "com.github.FRosner" %% "repl-helper" % replHelperVersion 25 | ) 26 | val datasetsDependencies = sparkDependencies ++ scalaTestDependencies 27 | val webUiDependencies = scalaTestDependencies ++ sparkDependencies ++ Seq( 28 | "io.spray" %% "spray-can" % sprayVersion, 29 | "io.spray" %% "spray-routing" % sprayVersion, 30 | "io.spray" %% "spray-caching" % sprayVersion, 31 | "io.spray" %% "spray-json" % "1.3.1", 32 | "com.typesafe.akka" %% "akka-actor" % "2.3.6", 33 | "org.scalaj" %% "scalaj-http" % "1.1.4", 34 | "com.github.FRosner" %% "repl-helper" % replHelperVersion 35 | ) 36 | 37 | } 38 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.9 2 | -------------------------------------------------------------------------------- /project/plugin.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.typesafe.sbt" % "sbt-s3" % "0.8") 2 | 3 | resolvers += Resolver.url("hmrc-sbt-plugin-releases", 4 | url("https://dl.bintray.com/hmrc/sbt-plugin-releases"))(Resolver.ivyStylePatterns) 5 | 6 | addSbtPlugin("com.joescii" % "sbt-jasmine-plugin" % "1.3.0") 7 | 8 | addSbtPlugin("uk.gov.hmrc" % "sbt-git-stamp" % "4.5.0") 9 | 10 | resolvers += Classpaths.sbtPluginReleases 11 | 12 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.0.4") 13 | 14 | addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.0.0") 15 | 16 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.1") 17 | 18 | run in Compile <<= Defaults.runTask(fullClasspath in Compile, mainClass in (Compile, run), runner in (Compile, run)) 19 | -------------------------------------------------------------------------------- /web-ui/build.sbt: -------------------------------------------------------------------------------- 1 | ////////////////// 2 | // Test Options // 3 | ////////////////// 4 | Seq(jasmineSettings: _*) 5 | 6 | (test in Test) <<= (test in Test) dependsOn (jasmine) 7 | 8 | appJsDir <+= sourceDirectory { src => src / "main" / "resources" / "ui" / "app" } 9 | 10 | appJsLibDir <+= sourceDirectory { src => src / "main" / "resources" / "ui" / "lib" } 11 | 12 | jasmineTestDir <+= sourceDirectory { src => src / "test" / "resources" / "ui" } 13 | 14 | jasmineConfFile <+= sourceDirectory { src => src / "test" / "resources" / "ui" / "test.dependencies.js" } 15 | 16 | jasmineRequireJsFile <+= sourceDirectory { src => src / "main" / "resources" / "ui" / "lib" / "require.js" } 17 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/src/img/PRGn.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/src/img/PRGn.xcf -------------------------------------------------------------------------------- /web-ui/src/main/resources/src/img/YlOrRd.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/src/img/YlOrRd.xcf -------------------------------------------------------------------------------- /web-ui/src/main/resources/src/img/arrowHead.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/src/img/arrowHead.xcf -------------------------------------------------------------------------------- /web-ui/src/main/resources/src/img/edgeLabel.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/src/img/edgeLabel.xcf -------------------------------------------------------------------------------- /web-ui/src/main/resources/src/img/jitter.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/src/img/jitter.xcf -------------------------------------------------------------------------------- /web-ui/src/main/resources/src/img/letters.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/src/img/letters.xcf -------------------------------------------------------------------------------- /web-ui/src/main/resources/src/img/lock.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/src/img/lock.xcf -------------------------------------------------------------------------------- /web-ui/src/main/resources/src/img/nodeLabel.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/src/img/nodeLabel.xcf -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/C3Chart.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var Visualization = require("Visualization"); 4 | 5 | function C3Chart() {} 6 | 7 | C3Chart.prototype = new Visualization(); 8 | C3Chart.prototype.constructor = Visualization; 9 | C3Chart.prototype.parent = Visualization.prototype; 10 | 11 | C3Chart.prototype._draw = function(servable) { 12 | var Util = require("util"), 13 | c3 = require("c3"); 14 | 15 | var chart = servable.c3; 16 | this._chartDiv = Util.generateDiv(this._content, "chart-" + this._content.id); 17 | chart.bindto = "#" + this._chartDiv.id; 18 | var marginForLegend = 40; 19 | chart.size = { 20 | width: this._width, 21 | height: this._height - marginForLegend 22 | }; 23 | chart.padding = this._margin; 24 | c3.generate(chart); 25 | } 26 | 27 | C3Chart.prototype._clear = function() {} 28 | 29 | return C3Chart; 30 | 31 | }); 32 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/Cache.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | document.servablesCache = {}; 4 | 5 | return { 6 | existsConfig: function(id) { 7 | return !(document.servablesCache[id] == null); 8 | }, 9 | 10 | getConfig: function(id) { 11 | if (!document.servablesCache[id]) { 12 | // lazy initialization 13 | // TODO remove this lazy initialization 14 | document.servablesCache[id] = {}; 15 | } 16 | return document.servablesCache[id]; 17 | }, 18 | 19 | setConfig: function(id, config) { 20 | document.servablesCache[id] = config; 21 | } 22 | }; 23 | 24 | }); 25 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/Composite.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var Visualization = require("Visualization"); 4 | 5 | function Composite(id) { 6 | this.id = id; 7 | } 8 | 9 | Composite.prototype = new Visualization(); 10 | Composite.prototype.constructor = Visualization; 11 | Composite.prototype.parent = Visualization.prototype; 12 | 13 | Composite.prototype._draw = function(compositeServable) { 14 | var $ = require("jquery"), 15 | Util = require("util"), 16 | d3 = require("d3"), 17 | Drawer = require("draw"); 18 | 19 | var id = this.id; 20 | 21 | var servedComponents = []; 22 | var thisContent = this._content; 23 | var composite = compositeServable.servables.map(function(row, rowIdx) { 24 | return row.map(function(cell, cellIdx) { 25 | cell.containerId = "container-" + thisContent.id + "-" + rowIdx + "-" + cellIdx; 26 | cell.contentId = "content-" + thisContent.id + "-" + rowIdx + "-" + cellIdx; 27 | cell.headerId = "header-" + thisContent.id + "-" + rowIdx + "-" + cellIdx; 28 | cell.contentWidth = $(thisContent) 29 | .width(); 30 | return cell; 31 | }); 32 | }); 33 | var container = d3.select(this._content) 34 | .append("div") 35 | .attr("class", "container-fluid"); 36 | var rows = container.selectAll("div") 37 | .data(composite) 38 | .enter() 39 | .append("div") 40 | .attr("class", "bootstrap-div row"); 41 | var cells = rows.selectAll("div") 42 | .data(function(row) { 43 | if (row.length > 12) { 44 | console.warn("The following row has more than 12 cells which is not properly supported by grid layout: " + 45 | JSON.stringify(row)); 46 | } 47 | var enhancedRow = row.map(function(cell) { 48 | var enhancedCell = cell; 49 | var columnLayout = "bootstrap-div col-lg-" 50 | if (row.length == 1) { 51 | enhancedCell.cssClass = columnLayout + "12"; 52 | } else if (row.length == 2) { 53 | enhancedCell.cssClass = columnLayout + "6"; 54 | enhancedCell.contentWidth = enhancedCell.contentWidth / 2 55 | } else if (row.length == 3) { 56 | enhancedCell.cssClass = columnLayout + "4"; 57 | enhancedCell.contentWidth = enhancedCell.contentWidth / 3 58 | } else if (row.length == 4) { 59 | enhancedCell.cssClass = columnLayout + "3"; 60 | enhancedCell.contentWidth = enhancedCell.contentWidth / 4 61 | } else if (row.length <= 6) { 62 | enhancedCell.cssClass = columnLayout + "2"; 63 | enhancedCell.contentWidth = enhancedCell.contentWidth / 6 64 | } else { 65 | enhancedCell.cssClass = columnLayout + "1"; 66 | enhancedCell.contentWidth = enhancedCell.contentWidth / 12 67 | } 68 | return enhancedCell; 69 | }); 70 | return enhancedRow; 71 | }) 72 | .enter(); 73 | var container = cells.append("div") 74 | .attr("class", function(cell) { 75 | return cell.cssClass; 76 | }) 77 | .attr("id", function(cell) { 78 | return cell.containerId; 79 | }); 80 | var header = container.append("div") 81 | .attr("class", "header") 82 | .attr("id", function(cell) { 83 | return cell.headerId; 84 | }); 85 | var content = container.append("div") 86 | .attr("id", function(cell) { 87 | return cell.contentId; 88 | }) 89 | .attr("style", function(cell) { 90 | return "height: " + Math.ceil(cell.contentWidth / 16 * 9) + "px" 91 | }) 92 | .each(function(cell) { 93 | var servable = cell; 94 | // TODO this will not be required anymore once the id is part of the servable 95 | // TODO also then we don't need to hack the id hopefully... 96 | servable.id = id + cell.contentId; 97 | var contentId = cell.contentId; 98 | var headerId = cell.headerId; 99 | servedComponents.push(Drawer.drawServable(servable, headerId, contentId)); 100 | }); 101 | this._servedComponents = servedComponents; 102 | } 103 | 104 | Composite.prototype._clear = function() {} 105 | 106 | return Composite; 107 | 108 | }); 109 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/Empty.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var Visualization = require("Visualization"); 4 | 5 | function Empty() {} 6 | 7 | Empty.prototype = new Visualization(); 8 | Empty.prototype.constructor = Visualization; 9 | Empty.prototype.parent = Visualization.prototype; 10 | 11 | Empty.prototype._draw = function() {} 12 | 13 | Empty.prototype._clear = function() {} 14 | 15 | return Empty; 16 | 17 | }); 18 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/Histogram.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var Visualization = require("Visualization"); 4 | 5 | function Histogram() {} 6 | 7 | Histogram.prototype = new Visualization(); 8 | Histogram.prototype.constructor = Visualization; 9 | Histogram.prototype.parent = Visualization.prototype; 10 | 11 | Histogram.prototype._draw = function(histogram) { 12 | var Util = require("util"), 13 | d3 = require("d3"); 14 | 15 | var bins = histogram.frequencies.map(function(frequency, idx) { 16 | return { 17 | start : histogram.bins[idx], 18 | end : histogram.bins[idx + 1], 19 | y : frequency 20 | }; 21 | }); 22 | 23 | var divId = "histogram-" + this._content.id; 24 | 25 | this._chartDiv = Util.generateDiv(this._content, divId); 26 | this._chartDiv.className = "c3"; 27 | 28 | var margin = this._margin; 29 | var width = this._width - margin.left - margin.right, 30 | height = this._height - margin.top - margin.bottom; 31 | 32 | var svg = d3.select("#" + divId) 33 | .append("svg") 34 | .attr("width", width + margin.left + margin.right) 35 | .attr("height", height + margin.top + margin.bottom) 36 | .append("g") 37 | .attr("transform", "translate(" + margin.left + "," + margin.top + ")"); 38 | 39 | var x = d3.scale.linear() 40 | .range([0, width]); 41 | 42 | var y = d3.scale.linear() 43 | .range([height, 0]); 44 | 45 | x.domain([ 46 | d3.min(bins.map(function(bin) { 47 | return bin.start; 48 | })), 49 | d3.max(bins.map(function(bin) { 50 | return bin.end; 51 | })) 52 | ]); 53 | 54 | bins = bins.map(function(bin) { 55 | bin.width = x(bin.end) - x(bin.start); 56 | bin.height = bin.y / (bin.end - bin.start); 57 | return bin; 58 | }); 59 | 60 | y.domain([ 61 | 0, 62 | d3.max(bins.map(function(bin) { 63 | return bin.height; 64 | })) 65 | ]); 66 | 67 | bins = bins.map(function(bin) { 68 | bin.height = y(bin.height); 69 | return bin; 70 | }); 71 | 72 | svg.selectAll(".bin") 73 | .data(bins) 74 | .enter() 75 | .append("rect") 76 | .attr("fill", "steelblue") 77 | .attr("class", "bin") 78 | .attr("x", function(bin) { 79 | return x(bin.start); 80 | }) 81 | .attr("width", function(bin) { 82 | return bin.width - 1; 83 | }) 84 | .attr("y", function(bin) { 85 | return bin.height; 86 | }) 87 | .attr("height", function(bin) { 88 | return height - bin.height; 89 | }); 90 | 91 | svg.append("g") 92 | .attr("class", "x axis") 93 | .attr("transform", "translate(0," + height + ")") 94 | .call(d3.svg.axis() 95 | .scale(x) 96 | .orient("bottom")); 97 | 98 | svg.append("g") 99 | .attr("class", "y axis") 100 | .call(d3.svg.axis() 101 | .scale(y) 102 | .orient("left")); 103 | } 104 | 105 | Histogram.prototype._clear = function() {} 106 | 107 | return Histogram; 108 | 109 | }); 110 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/KeyValueSequence.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var Visualization = require("Visualization"); 4 | 5 | function KeyValueSequence() {} 6 | 7 | KeyValueSequence.prototype = new Visualization(); 8 | KeyValueSequence.prototype.constructor = Visualization; 9 | KeyValueSequence.prototype.parent = Visualization.prototype; 10 | 11 | KeyValueSequence.prototype._draw = function(servable) { 12 | var Util = require("util"), 13 | d3 = require("d3"); 14 | 15 | var contentId = this._content.id; 16 | var table = Util.generateElement(this._content, contentId + "-listing", "table"); 17 | this._content.style.display = "table-cell"; 18 | this._content.style.verticalAlign = "middle"; 19 | this._content.style.textAlign = "center"; 20 | this._content.style.width = "5000px"; 21 | table.setAttribute("class", "keyValueTable"); 22 | var keyValueArray = servable.keyValuePairs 23 | .map(function(keyValuePair) { 24 | return [{ 25 | entry: keyValuePair.key, 26 | class: "key" 27 | }, { 28 | entry: keyValuePair.val, 29 | class: "value" 30 | }]; 31 | }); 32 | var rows = d3.select(table) 33 | .selectAll("tr") 34 | .data(keyValueArray) 35 | .enter() 36 | .append("tr"); 37 | rows.selectAll("td") 38 | .data(function(row) { 39 | return row; 40 | }) 41 | .enter() 42 | .append("td") 43 | .text(function(cell) { 44 | return cell.entry; 45 | }) 46 | .attr("class", function(cell) { 47 | return cell.class; 48 | }); 49 | } 50 | 51 | KeyValueSequence.prototype._clear = function() {} 52 | 53 | return KeyValueSequence; 54 | 55 | }); 56 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/Matrix.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var Visualization = require("Visualization"), 4 | Util = require("util"), 5 | Cache = require("Cache"); 6 | 7 | function Matrix(id) { 8 | } 9 | 10 | Matrix.prototype = new Visualization(); 11 | Matrix.prototype.constructor = Visualization; 12 | Matrix.prototype.parent = Visualization.prototype; 13 | 14 | // TODO implement a true multi-zero color scale that does not just take the number of zeroes but also the values 15 | Matrix.prototype._draw = function(matrixAndNames) { 16 | var d3 = require("d3"), 17 | chroma = require("chroma"); 18 | 19 | var vizId = this._content.id; 20 | var config = this.config; 21 | var matrix = Util.flatMap(matrixAndNames.content, function(row, i) { 22 | return row.map(function(entry, j) { 23 | return { 24 | x: j, 25 | y: i, 26 | z: entry 27 | }; 28 | }); 29 | }); 30 | var rowNames = matrixAndNames.rowNames; 31 | var colNames = matrixAndNames.colNames; 32 | var zColorZeroes = matrixAndNames.zColorZeroes; 33 | if (zColorZeroes.length < 2) { 34 | console.error("zColorZeroes must have at least min and max but has only: " + zColorZeroes); 35 | } 36 | 37 | this._chartDiv = Util.generateDiv(this._content, "chart-" + vizId); 38 | this._chartDiv.className = "c3"; 39 | 40 | var margin = this._margin; 41 | var width = this._width - margin.left - margin.right; 42 | var height = this._height - margin.top - margin.bottom; 43 | 44 | var x = d3.scale.ordinal() 45 | .domain(colNames) 46 | .rangeBands([0, width]); 47 | 48 | var y = d3.scale.ordinal() 49 | .domain(rowNames) 50 | .rangeBands([height, 0]); 51 | 52 | var zDomain = [ 53 | matrixAndNames.zColorZeroes[0], 54 | matrixAndNames.zColorZeroes[zColorZeroes.length - 1] 55 | ]; 56 | var zScaleString = "YlOrRd"; 57 | if (zColorZeroes.length == 3) { 58 | zScaleString = "PRGn"; 59 | } else if (zColorZeroes.length > 3) { 60 | console.warn("Currently only up to three colors are supported, but given: " + zColorZeroes); 61 | } 62 | var z = chroma.scale(zScaleString) 63 | .domain(zDomain); 64 | 65 | var chart = d3.select("#chart-" + vizId) 66 | .append('svg:svg') 67 | .attr('width', width + margin.right + margin.left) 68 | .attr('height', height + margin.top + margin.bottom) 69 | .attr('class', 'c3') 70 | 71 | var main = chart.append('g') 72 | .attr('transform', 'translate(' + margin.left + ',' + margin.top + ')') 73 | .attr('width', width) 74 | .attr('height', height) 75 | .attr('class', 'main') 76 | 77 | var xAxis = d3.svg.axis() 78 | .scale(x) 79 | .orient('bottom'); 80 | 81 | main.append('g') 82 | .attr('transform', 'translate(0,' + height + ')') 83 | .attr('class', 'x axis') 84 | .call(xAxis); 85 | 86 | var yAxis = d3.svg.axis() 87 | .scale(y) 88 | .orient('left'); 89 | 90 | main.append('g') 91 | .attr('transform', 'translate(0,0)') 92 | .attr('class', 'y axis') 93 | .call(yAxis); 94 | 95 | var g = main.append("svg:g"); 96 | 97 | var rects = g.selectAll("matrix-rects") 98 | .data(matrix) 99 | .enter() 100 | .append("rect") 101 | .attr("class", "cell") 102 | .attr("x", function(p) { 103 | return x(colNames[p.x]) + 1; 104 | }) 105 | .attr("y", function(p) { 106 | return y(rowNames[p.y]); 107 | }) 108 | .attr("width", x.rangeBand() - 1) 109 | .attr("height", y.rangeBand() - 1) 110 | .attr("fill", function(value) { 111 | return (value.z != null) ? z(value.z) : "#000000"; 112 | }) 113 | .attr("class", "matrix-cell") 114 | rects.append("svg:title") 115 | .text(function(value) { 116 | return value.z; 117 | }); 118 | } 119 | 120 | Matrix.prototype._clear = function() { 121 | } 122 | 123 | return Matrix; 124 | 125 | }); 126 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/Scatter2D.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var Visualization = require("Visualization"), 4 | Util = require("util"), 5 | Cache = require("Cache"); 6 | 7 | function Scatter2D(id) { 8 | if (Cache.existsConfig(id)) { 9 | this.config = Cache.getConfig(id); 10 | } else { 11 | this.config = { 12 | jitterEnabled: false 13 | }; 14 | Cache.setConfig(id, this.config); 15 | } 16 | } 17 | 18 | Scatter2D.prototype = new Visualization(); 19 | Scatter2D.prototype.constructor = Visualization; 20 | Scatter2D.prototype.parent = Visualization.prototype; 21 | 22 | Scatter2D.prototype._draw = function(pointsWithTypes) { 23 | var d3 = require("d3"); 24 | 25 | var scatterVis = this; 26 | var divId = "scatter-" + this._content.id; 27 | var config = this.config; 28 | 29 | function drawScatter() { 30 | var points = pointsWithTypes.points; 31 | var xIsNumeric = pointsWithTypes.xIsNumeric; 32 | var yIsNumeric = pointsWithTypes.yIsNumeric; 33 | 34 | scatterVis._chartDiv = Util.generateDiv(scatterVis._content, divId); 35 | scatterVis._chartDiv.className = "c3"; 36 | 37 | var margin = scatterVis._margin; 38 | var width = scatterVis._width - margin.left - margin.right; 39 | var height = scatterVis._height - margin.top - margin.bottom; 40 | 41 | var x; 42 | if (xIsNumeric) { 43 | var minX = d3.min(points, function(p) { 44 | return p.x; 45 | }); 46 | var maxX = d3.max(points, function(p) { 47 | return p.x; 48 | }); 49 | var dX = maxX - minX; 50 | x = d3.scale.linear() 51 | .domain([minX - dX * 0.01, maxX + dX * 0.01]) 52 | .range([0, width]); 53 | } else { 54 | x = d3.scale.ordinal() 55 | .domain(_.uniq(points.map(function(p) { 56 | return p.x 57 | }))) 58 | .rangeBands([0, width]); 59 | } 60 | 61 | var y; 62 | if (yIsNumeric) { 63 | var minY = d3.min(points, function(p) { 64 | return p.y; 65 | }); 66 | var maxY = d3.max(points, function(p) { 67 | return p.y; 68 | }); 69 | var dY = maxY - minY; 70 | y = d3.scale.linear() 71 | .domain([minY - dY * 0.02, maxY + dY * 0.02]) 72 | .range([height, 0]); 73 | } else { 74 | y = d3.scale.ordinal() 75 | .domain(_.uniq(points.map(function(p) { 76 | return p.y 77 | }))) 78 | .rangeBands([height, 0]); 79 | } 80 | 81 | var chart = d3.select("#" + divId) 82 | .append('svg:svg') 83 | .attr('width', width + margin.right + margin.left) 84 | .attr('height', height + margin.top + margin.bottom) 85 | .attr('class', 'c3') 86 | 87 | var main = chart.append('g') 88 | .attr('transform', 'translate(' + margin.left + ',' + margin.top + ')') 89 | .attr('width', width) 90 | .attr('height', height) 91 | .attr('class', 'main') 92 | 93 | var xAxis = d3.svg.axis() 94 | .scale(x) 95 | .orient('bottom'); 96 | 97 | main.append('g') 98 | .attr('transform', 'translate(0,' + height + ')') 99 | .attr('class', 'x axis') 100 | .call(xAxis); 101 | 102 | var yAxis = d3.svg.axis() 103 | .scale(y) 104 | .orient('left'); 105 | 106 | main.append('g') 107 | .attr('transform', 'translate(0,0)') 108 | .attr('class', 'y axis') 109 | .call(yAxis); 110 | 111 | var g = main.append("svg:g"); 112 | 113 | g.selectAll("scatter-dots") 114 | .data(points) 115 | .enter() 116 | .append("svg:circle") 117 | .attr("cx", function(p) { 118 | if (xIsNumeric) { 119 | return x(p.x) 120 | } else { 121 | var jitter = (config.jitterEnabled) ? (x.rangeBand() * (Math.random(1) - 0.5) * 0.4) : 0; 122 | return x(p.x) + (x.rangeBand() / 2) + jitter; 123 | } 124 | }) 125 | .attr("cy", function(p) { 126 | if (yIsNumeric) { 127 | return y(p.y) 128 | } else { 129 | var jitter = (config.jitterEnabled) ? (y.rangeBand() * (Math.random(1) - 0.5) * 0.4) : 0; 130 | return y(p.y) + (y.rangeBand() / 2) + jitter; 131 | } 132 | }) 133 | .attr("r", 3); 134 | } 135 | 136 | var enableJitterButton = document.createElement('div'); 137 | enableJitterButton.setAttribute("id", "enableJitterButton"); 138 | this._header.appendChild(enableJitterButton); 139 | var contentId = this._content.id; 140 | enableJitterButton.onclick = function() { 141 | if (config.jitterEnabled) { 142 | config.jitterEnabled = false; 143 | enableJitterButton.setAttribute("class", "headerButton disabled"); 144 | enableJitterButton.setAttribute("title", "Enable Jitter"); 145 | } else { 146 | config.jitterEnabled = true; 147 | enableJitterButton.setAttribute("class", "headerButton enabled"); 148 | enableJitterButton.setAttribute("title", "Disable Jitter"); 149 | } 150 | document.getElementById(contentId) 151 | .innerHTML = ""; 152 | drawScatter(); 153 | }; 154 | if (config.jitterEnabled) { 155 | enableJitterButton.setAttribute("class", "headerButton enabled"); 156 | enableJitterButton.setAttribute("title", "Disable Jitter"); 157 | } else { 158 | enableJitterButton.setAttribute("class", "headerButton disabled"); 159 | enableJitterButton.setAttribute("title", "Enable Jitter"); 160 | } 161 | this._enableJitterButton = enableJitterButton; 162 | drawScatter(); 163 | } 164 | 165 | Scatter2D.prototype._clear = function() { 166 | Util.removeElementIfExists(this._enableJitterButton); 167 | } 168 | 169 | return Scatter2D; 170 | 171 | }); 172 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/Visualization.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Base prototype for visualizations of DDS. Each visualization can use two parts of the DOM: 3 | * 1. Header to add navigation and configuration elements like buttons or sliders 4 | * 2. Content to add SVG and canvas elements for the visualization itself 5 | * 6 | * When implementing a new visualization, one needs to specify how to draw the visualization 7 | * given a data object (_draw). Each visualization also needs to be able to remove all elements 8 | * it has constructed (clear). 9 | * 10 | * 11 | * Methods: 12 | * 13 | * - title(String): Set title of the visualization 14 | * - title(): Get title of the visualization 15 | * 16 | * - header(String): Set ID of DOM element to use as header; w/o '#' 17 | * - header(): Get DOM element to use as header 18 | * 19 | * - content(String): Set ID of DOM element to use for drawing; w/o '#' 20 | * - content(): Get DOM element to use for drawing 21 | * 22 | * - margin(Object): Set margins to add between content div borders and drawing; 23 | * Needs to have Integer values for margin.top, margin.bottom, margin.left and margin.right; 24 | * If no margin is specified, default values of 0 will be used for all four dimensions 25 | * - margin(): Get margins 26 | * 27 | * - height(Number): Set height of the visualization 28 | * - height(): Get height of the visualization 29 | * 30 | * - width(Number): Set width of the visualization 31 | * - width(): Get width of the visualization 32 | * 33 | * - data(Object): Set data object to be used when drawing 34 | * - data(): Get data 35 | * 36 | * - draw(): Draw the current visualization 37 | * - _draw(): Subclass implementation of drawing method (NEEDS TO BE IMPLEMENTED BY SUBCLASS) 38 | * 39 | * - _verify(): Checks whether the visualization has been initialized correctly 40 | * 41 | * - clear(): Removes all header elements generated by the visualization 42 | * - _clear() Subclass implementation of clear method (NEEDS TO BE IMPLEMENTED BY SUBCLASS) 43 | * 44 | */ 45 | define(function(require) { 46 | 47 | var Util = require("util") 48 | 49 | function Visualization() { 50 | this._defaultMargin = { 51 | top: 0, 52 | bottom: 0, 53 | left: 0, 54 | right: 0 55 | }; 56 | this._margin = this._defaultMargin; 57 | this._title = ""; 58 | } 59 | 60 | Visualization.prototype.title = function(newTitle) { 61 | if (newTitle != null) { 62 | this._title = newTitle; 63 | return this; 64 | } else { 65 | return this._title; 66 | } 67 | } 68 | 69 | Visualization.prototype.header = function(newHeaderId) { 70 | if (newHeaderId != null) { 71 | this._header = document.getElementById(newHeaderId); 72 | return this; 73 | } else { 74 | return this._header; 75 | } 76 | } 77 | 78 | Visualization.prototype.content = function(newContentId) { 79 | if (newContentId != null) { 80 | this._content = document.getElementById(newContentId); 81 | if (!this._width) { 82 | this._width = $(this._content) 83 | .width(); 84 | } 85 | if (!this._height) { 86 | this._height = $(this._content) 87 | .height(); 88 | } 89 | return this; 90 | } else { 91 | return this._content; 92 | } 93 | } 94 | 95 | Visualization.prototype.margin = function(newMargin) { 96 | if (newMargin != null) { 97 | for (marginType in this._defaultMargin) { 98 | if (!newMargin[marginType]) { 99 | newMargin[marginType] = this._defaultMargin[marginType]; 100 | } 101 | } 102 | this._margin = newMargin; 103 | return this; 104 | } else { 105 | return this._margin; 106 | } 107 | } 108 | 109 | Visualization.prototype.width = function(newWidth) { 110 | if (newWidth != null) { 111 | this._width = newWidth; 112 | return this; 113 | } else { 114 | return this._width; 115 | } 116 | } 117 | 118 | Visualization.prototype.height = function(newHeight) { 119 | if (newHeight != null) { 120 | this._height = newHeight; 121 | return this; 122 | } else { 123 | return this._height; 124 | } 125 | } 126 | 127 | Visualization.prototype.data = function(newData) { 128 | if (newData != null) { 129 | this._data = newData; 130 | return this; 131 | } else { 132 | return this._data; 133 | } 134 | } 135 | 136 | Visualization.prototype._verify = function() { 137 | if (this._header == null) { 138 | console.error("Header element not specified."); 139 | } 140 | if (this._content == null) { 141 | console.error("Content element not specified."); 142 | } 143 | if (this._margin == null) { 144 | console.error("Margin not specified."); 145 | } 146 | if (this._width == null) { 147 | console.error("Width not specified."); 148 | } 149 | if (this._height == null) { 150 | console.error("Height not specified."); 151 | } 152 | if (this._data == null) { 153 | console.error("Data not specified."); 154 | } 155 | } 156 | 157 | Visualization.prototype._draw = function() { 158 | console.error("Protected _draw method needs to be implemented when using the Visualization prototype."); 159 | } 160 | 161 | Visualization.prototype.draw = function() { 162 | this._verify(); 163 | var titleSpan = Util.generateSpan(this._header, this._content.id + "-title") 164 | titleSpan.innerHTML = this._title; 165 | titleSpan.className = "servable-title"; 166 | this._titleSpan = titleSpan; 167 | this._draw(this._data); 168 | return this; 169 | } 170 | 171 | Visualization.prototype.clear = function() { 172 | Util.removeElementIfExists(this._titleSpan); 173 | this._clear(); 174 | } 175 | 176 | Visualization.prototype._clear = function() { 177 | console.error("_clear method needs to be implemented when using the Visualization prototype."); 178 | } 179 | 180 | return Visualization; 181 | 182 | }); 183 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/draw.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | function drawServable(servableIdAndType, headerId, contentId) { 4 | var Visualization = require("Visualization"), 5 | C3Chart = require("C3Chart"), 6 | Empty = require("Empty"), 7 | Composite = require("Composite"), 8 | Graph = require("Graph"), 9 | Histogram = require("Histogram"), 10 | KeyValueSequence = require("KeyValueSequence"), 11 | Matrix = require("Matrix"), 12 | Scatter2D = require("Scatter2D"), 13 | Table = require("Table"); 14 | 15 | var toDraw; 16 | var id = servableIdAndType.id; 17 | var servable = servableIdAndType.servable; 18 | var servableType = servableIdAndType.type; 19 | 20 | if (servableType == "composite") { 21 | toDraw = new Composite(id) 22 | .margin({ 23 | top: 30, 24 | right: 0, 25 | bottom: 0, 26 | left: 0 27 | }) 28 | .data(servable); 29 | } else if (servableType == "blank") { 30 | toDraw = new Empty(); 31 | } else if (servableType == "keyValueSequence") { 32 | toDraw = new KeyValueSequence() 33 | .data(servable); 34 | } else if (servableType == "bar") { 35 | var c3Chart = { 36 | data : { 37 | columns : servable.series.map(function(label, idx) { 38 | return [label].concat(servable.heights[idx]); 39 | }), 40 | types : servable.series.reduce(function(agg, label) { 41 | agg[label] = "bar"; 42 | return agg; 43 | }, {}) 44 | }, 45 | axis : { 46 | x : { 47 | type : "category", 48 | categories : servable.xDomain 49 | } 50 | } 51 | } 52 | servable.c3 = c3Chart; 53 | toDraw = new C3Chart() 54 | .margin({ 55 | top: 5, 56 | right: 15, 57 | left: 60 58 | }) 59 | .data(servable); 60 | } else if (servableType == "pie") { 61 | var c3Chart = { 62 | data : { 63 | columns : servable.categoryCountPairs.map(function(categoryCountPair) { 64 | var category = categoryCountPair[0]; 65 | var count = categoryCountPair[1]; 66 | return [category, count]; 67 | }), 68 | type : "pie" 69 | } 70 | } 71 | servable.c3 = c3Chart; 72 | toDraw = new C3Chart() 73 | .margin({ 74 | top: 5, 75 | right: 15, 76 | left: 60 77 | }) 78 | .data(servable); 79 | } else if (servableType == "table") { 80 | toDraw = new Table(id) 81 | .margin({ 82 | top: 30, 83 | right: 0, 84 | bottom: 0, 85 | left: 0 86 | }) 87 | .data(servable); 88 | } else if (servableType == "histogram") { 89 | toDraw = new Histogram() 90 | .margin({ 91 | top: 20, 92 | right: 60, 93 | bottom: 60, 94 | left: 60 95 | }) 96 | .data(servable); 97 | } else if (servableType == "graph") { 98 | toDraw = new Graph(id) 99 | .data(servable); 100 | } else if (servableType == "scatter") { 101 | toDraw = new Scatter2D(id) 102 | .margin({ 103 | top: 10, 104 | right: 15, 105 | bottom: 60, 106 | left: 60 107 | }) 108 | .data(servable); 109 | } else if (servableType == "heatmap") { 110 | toDraw = new Matrix(id) 111 | .margin({ 112 | top: 10, 113 | right: 15, 114 | bottom: 60, 115 | left: 60 116 | }) 117 | .data(servable); 118 | } else { 119 | console.error("Unrecognized response: " + response); 120 | } 121 | if (toDraw != null) { 122 | toDraw = toDraw.header(headerId) 123 | .content(contentId) 124 | .title(servable.title) 125 | .draw(); 126 | return toDraw; 127 | } 128 | } 129 | 130 | return { 131 | drawServable : drawServable 132 | }; 133 | 134 | }); 135 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/init.js: -------------------------------------------------------------------------------- 1 | require(['main'], function(Main){ 2 | Main.start(); 3 | }); 4 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/main.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var $ = require("jquery"), 4 | Util = require("util"), 5 | Cache = require("Cache"), 6 | d3 = require("d3"), 7 | Drawer = require("draw"); 8 | 9 | function updateServableSelector() { 10 | $.get('/servables', function(servablesString) { 11 | var servables = JSON.parse(servablesString).concat([{id: "latest"}]).reverse(); 12 | // TODO factor this out to Utils 13 | function pad(num) { 14 | return ("0" + num).substr(-2); 15 | }; 16 | d3.select("#servableSelector") 17 | .selectAll("option") 18 | .data(servables, function(servable) {return (servable) ? servable.id : d3.select(this).attr("value");}) 19 | .enter() 20 | .append("option") 21 | .text(function(servable) { 22 | if (servable.id == "latest") { 23 | return "Latest"; 24 | } else { 25 | var date = new Date(servable.time); 26 | var dateString = pad(date.getHours()) + ":" + pad(date.getMinutes()) + ":" + pad(date.getSeconds()); 27 | return servable.id + " - " + servable.title + " (" + dateString + ")"; 28 | } 29 | }).attr("value", function(servable) { 30 | return servable.id; 31 | }); 32 | }); 33 | } 34 | 35 | function handleServableResponse(response) { 36 | if (response != "{}") { 37 | updateServableSelector(); 38 | var servableAndId = JSON.parse(response); 39 | var servable = servableAndId.servable; 40 | // TODO put the ID in also in the back-end 41 | servable.id = servableAndId.id; 42 | document.lastServedId = servableAndId.id; 43 | document.isNewVisualization = true; 44 | Util.doAndRedoOnResizeOf(window, function() { 45 | var contentId = "content"; 46 | var headerId = "header"; 47 | if (document.lastServed) { 48 | document.lastServed.clear(); 49 | } 50 | var previousContentDiv = document.getElementById(contentId); 51 | var contentParent = previousContentDiv.parentNode; 52 | Util.removeElementIfExists(previousContentDiv); 53 | Util.generateDiv(contentParent, contentId); 54 | document.lastServed = Drawer.drawServable(servable, headerId, contentId); 55 | document.isNewVisualization = false; 56 | }); 57 | } 58 | } 59 | 60 | function checkForUpdate() { 61 | $.ajax({ 62 | url: "/servables/latest" + ((document.lastServedId != null) ? "?current=" + document.lastServedId : ""), 63 | success: handleServableResponse 64 | }); 65 | } 66 | 67 | function toggleUpdating() { 68 | var lockButton = document.getElementById("lockButton"); 69 | if (document.checkingForUpdate == true) { 70 | lockButton.className = "headerButton locked"; 71 | lockButton.title = "Unlock Vizboard"; 72 | document.checkingForUpdate = false; 73 | clearInterval(document.updater); 74 | document.updater = null; 75 | } else { 76 | lockButton.className = "headerButton unlocked"; 77 | lockButton.title = "Lock Vizboard"; 78 | document.getElementById("servableSelector").value = "latest"; 79 | document.updater = setInterval(checkForUpdate, 100); 80 | document.checkingForUpdate = true; 81 | } 82 | } 83 | 84 | return { 85 | start: function() { 86 | $(document) 87 | .ready(toggleUpdating); 88 | $(document).ready(function() { 89 | $("#lockButton").click(toggleUpdating); 90 | updateServableSelector(); 91 | $("#servableSelector").change(function(x) { 92 | var selectedServable = document.getElementById("servableSelector").value; 93 | if (selectedServable == "latest" && document.checkingForUpdate == false) { 94 | toggleUpdating(); 95 | } else { 96 | if (document.checkingForUpdate == true) { 97 | toggleUpdating(); 98 | } 99 | $.ajax({ 100 | url: "/servables/" + selectedServable, 101 | success: handleServableResponse 102 | }); 103 | } 104 | }); 105 | }); 106 | } 107 | } 108 | 109 | }); 110 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/require.config.js: -------------------------------------------------------------------------------- 1 | var libFolder = "../lib/"; 2 | var appFolder = "../app/"; 3 | var slickGridFolder = "slickgrid/"; 4 | 5 | require.config({ 6 | paths: { 7 | jquery: libFolder + "jquery-1.7.min", 8 | dragevent: libFolder + "jquery.event.drag-2.2.min", 9 | 10 | d3: libFolder + "d3.v3.min", 11 | parcoords: libFolder + "d3.parcoords.min", 12 | c3: libFolder + "c3.min", 13 | 14 | chroma: libFolder + "chroma.min", 15 | 16 | underscore: libFolder + "underscore.min", 17 | 18 | slickcore: libFolder + slickGridFolder + "slick.core.min", 19 | slickgrid: libFolder + slickGridFolder + "slick.grid.min", 20 | slickdataview: libFolder + slickGridFolder + "slick.dataview.min", 21 | slickpager: libFolder + slickGridFolder + "slick.pager.min" 22 | } 23 | }); 24 | 25 | require(['d3'], function(d3) { 26 | console.debug("Loading d3 v" + d3.version); 27 | }); 28 | 29 | require.config({ 30 | shim: { 31 | dragevent: ['jquery'], 32 | "d3.parcoords": { 33 | exports: "d3.parcoords" 34 | }, 35 | 36 | slickcore: ['dragevent'], 37 | slickgrid: ['slickcore', 'dragevent'], 38 | slickdataview: ['slickgrid'], 39 | slickpager: ['slickgrid'] 40 | } 41 | }); 42 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/app/util.js: -------------------------------------------------------------------------------- 1 | define(function(require) { 2 | 3 | var _ = require("underscore"); 4 | 5 | function Util() {}; 6 | 7 | Util.prototype.doAndRedoOnResizeOf = function(w, f) { 8 | f(); 9 | w.onresize = f; 10 | }; 11 | 12 | Util.prototype.flatMap = function(seq, f) { 13 | return _.flatten(seq.map(f), true); 14 | }; 15 | 16 | Util.prototype.removeElementIfExists = function(element) { 17 | if (element != null) { 18 | var parent = element.parentNode; 19 | if (parent == null) { 20 | console.warn("Trying to remove " + element.tagName + " (" + element.id + 21 | ") but parent node does not exist anymore."); 22 | } else { 23 | parent.removeChild(element); 24 | } 25 | } 26 | }; 27 | 28 | Util.prototype.removeElementByIdIfExists = function(elementId) { 29 | this.removeElementIfExists(document.getElementById(elementId)); 30 | }; 31 | 32 | Util.prototype.generateElement = function(root, id, type) { 33 | var element = document.createElement(type); 34 | element.setAttribute("id", id); 35 | root.appendChild(element); 36 | return element; 37 | } 38 | 39 | Util.prototype.generateDiv = function(root, id) { 40 | return this.generateElement(root, id, "div"); 41 | } 42 | 43 | Util.prototype.generateSpan = function(root, id) { 44 | return this.generateElement(root, id, "span"); 45 | } 46 | 47 | Util.prototype.generateTextInput = function(root, id) { 48 | var input = this.generateElement(root, id, "input"); 49 | input.type = "text"; 50 | return input; 51 | } 52 | 53 | return new Util(); 54 | 55 | }); 56 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/css/c3.css: -------------------------------------------------------------------------------- 1 | /*-- Chart --*/ 2 | 3 | .c3 svg { 4 | font: 10px sans-serif; 5 | } 6 | .c3 path, .c3 line { 7 | fill: none; 8 | stroke: #000; 9 | } 10 | .c3 text { 11 | -webkit-user-select: none; 12 | -moz-user-select: none; 13 | user-select: none; 14 | } 15 | 16 | .c3-legend-item-tile, 17 | .c3-xgrid-focus, 18 | .c3-ygrid, 19 | .c3-event-rect, 20 | .c3-bars path { 21 | shape-rendering: crispEdges; 22 | } 23 | 24 | .c3-chart-arc path { 25 | stroke: #fff; 26 | 27 | } 28 | .c3-chart-arc text { 29 | fill: #fff; 30 | font-size: 13px; 31 | } 32 | 33 | /*-- Axis --*/ 34 | 35 | .c3-axis-x .tick { 36 | } 37 | .c3-axis-x-label { 38 | } 39 | 40 | .c3-axis-y .tick { 41 | } 42 | .c3-axis-y-label { 43 | } 44 | 45 | .c3-axis-y2 .tick { 46 | } 47 | .c3-axis-y2-label { 48 | } 49 | 50 | /*-- Grid --*/ 51 | 52 | .c3-grid line { 53 | stroke: #aaa; 54 | } 55 | .c3-grid text { 56 | fill: #aaa; 57 | } 58 | .c3-xgrid, .c3-ygrid { 59 | stroke-dasharray: 3 3; 60 | } 61 | .c3-xgrid-focus { 62 | } 63 | 64 | /*-- Text on Chart --*/ 65 | 66 | .c3-text { 67 | } 68 | 69 | .c3-text.c3-empty { 70 | fill: #808080; 71 | font-size: 2em; 72 | } 73 | 74 | /*-- Line --*/ 75 | 76 | .c3-line { 77 | stroke-width: 1px; 78 | } 79 | /*-- Point --*/ 80 | 81 | .c3-circle._expanded_ { 82 | stroke-width: 1px; 83 | stroke: white; 84 | } 85 | .c3-selected-circle { 86 | fill: white; 87 | stroke-width: 2px; 88 | } 89 | 90 | /*-- Bar --*/ 91 | 92 | .c3-bar { 93 | stroke-width: 0; 94 | } 95 | .c3-bar._expanded_ { 96 | fill-opacity: 0.75; 97 | } 98 | 99 | /*-- Arc --*/ 100 | 101 | .c3-chart-arcs-title { 102 | dominant-baseline: middle; 103 | font-size: 1.3em; 104 | } 105 | 106 | /*-- Focus --*/ 107 | 108 | .c3-target.c3-focused { 109 | opacity: 1; 110 | } 111 | .c3-target.c3-focused path.c3-line, .c3-target.c3-focused path.c3-step { 112 | stroke-width: 2px; 113 | } 114 | .c3-target.c3-defocused { 115 | opacity: 0.3 !important; 116 | } 117 | 118 | 119 | /*-- Region --*/ 120 | 121 | .c3-region { 122 | fill: steelblue; 123 | fill-opacity: .1; 124 | } 125 | 126 | /*-- Brush --*/ 127 | 128 | .c3-brush .extent { 129 | fill-opacity: .1; 130 | } 131 | 132 | /*-- Select - Drag --*/ 133 | 134 | .c3-dragarea { 135 | } 136 | 137 | /*-- Legend --*/ 138 | 139 | .c3-legend-item { 140 | font-size: 12px; 141 | } 142 | .c3-legend-item-hidden { 143 | opacity: 0.15; 144 | } 145 | 146 | .c3-legend-background { 147 | opacity: 0.75; 148 | fill: white; 149 | stroke: lightgray; 150 | stroke-width: 1 151 | } 152 | 153 | /*-- Tooltip --*/ 154 | 155 | .c3-tooltip-container { 156 | z-index: 10; 157 | } 158 | .c3-tooltip { 159 | border-collapse:collapse; 160 | border-spacing:0; 161 | background-color:#fff; 162 | empty-cells:show; 163 | width:auto; 164 | -webkit-box-shadow: 7px 7px 12px -9px rgb(119,119,119); 165 | -moz-box-shadow: 7px 7px 12px -9px rgb(119,119,119); 166 | box-shadow: 7px 7px 12px -9px rgb(119,119,119); 167 | opacity: 0.9; 168 | } 169 | .c3-tooltip tr { 170 | border:1px solid #CCC; 171 | } 172 | .c3-tooltip th { 173 | background-color: #aaa; 174 | font-size:14px; 175 | padding:2px 5px; 176 | text-align:left; 177 | color:#FFF; 178 | } 179 | .c3-tooltip td { 180 | font-size:13px; 181 | padding: 3px 6px; 182 | background-color:#fff; 183 | border-left:1px dotted #999; 184 | color: #000; 185 | } 186 | .c3-tooltip td > span { 187 | display: inline-block; 188 | width: 10px; 189 | height: 10px; 190 | margin-right: 6px; 191 | } 192 | .c3-tooltip td.value{ 193 | text-align: right; 194 | } 195 | 196 | .c3-area { 197 | stroke-width: 0; 198 | opacity: 0.2; 199 | } 200 | 201 | .c3-chart-arcs .c3-chart-arcs-background { 202 | fill: #e0e0e0; 203 | stroke: none; 204 | } 205 | .c3-chart-arcs .c3-chart-arcs-gauge-unit { 206 | fill: #000; 207 | font-size: 16px; 208 | } 209 | .c3-chart-arcs .c3-chart-arcs-gauge-max { 210 | fill: #777; 211 | } 212 | .c3-chart-arcs .c3-chart-arcs-gauge-min { 213 | fill: #777; 214 | } 215 | 216 | .c3-chart-arc .c3-gauge-value { 217 | fill: #000; 218 | /* font-size: 28px !important;*/ 219 | } 220 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/css/d3.parcoords.css: -------------------------------------------------------------------------------- 1 | .parcoords > svg, .parcoords > canvas { 2 | font: 12px sans-serif; 3 | position: absolute; 4 | } 5 | .parcoords > canvas { 6 | pointer-events: none; 7 | } 8 | 9 | .parcoords text.label { 10 | cursor: default; 11 | } 12 | 13 | .parcoords rect.background { 14 | fill: transparent; 15 | } 16 | .parcoords rect.background:hover { 17 | fill: rgba(120,120,120,0.2); 18 | } 19 | .parcoords .resize rect { 20 | fill: rgba(0,0,0,0.1); 21 | } 22 | .parcoords rect.extent { 23 | fill: rgba(255,255,255,0.25); 24 | stroke: rgba(0,0,0,0.6); 25 | } 26 | .parcoords .axis line, .parcoords .axis path { 27 | fill: none; 28 | stroke: #222; 29 | shape-rendering: crispEdges; 30 | } 31 | .parcoords canvas { 32 | opacity: 1; 33 | -moz-transition: opacity 0.3s; 34 | -webkit-transition: opacity 0.3s; 35 | -o-transition: opacity 0.3s; 36 | } 37 | .parcoords canvas.faded { 38 | opacity: 0.25; 39 | } 40 | .parcoords { 41 | -webkit-touch-callout: none; 42 | -webkit-user-select: none; 43 | -khtml-user-select: none; 44 | -moz-user-select: none; 45 | -ms-user-select: none; 46 | user-select: none; 47 | } 48 | 49 | .colorSelector, .colorSelector-selected { 50 | stroke: #777; 51 | stroke-width: 1px; 52 | cursor: pointer; 53 | } 54 | 55 | .colorSelector { 56 | fill: #ccc; 57 | } 58 | 59 | .colorSelector-selected { 60 | fill: url(#gradient); 61 | } 62 | 63 | .hideLabelButton { 64 | background-image:url("/ui/img/letters.png"); 65 | } 66 | 67 | .hideLabelButton.hidden { 68 | background-position:14px; 69 | } 70 | 71 | .hideLabelButton.unhidden { 72 | background-position:0px; 73 | } 74 | 75 | .hideLabelButton.unhidden:hover { 76 | background-position:14px; 77 | } 78 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/css/graph.css: -------------------------------------------------------------------------------- 1 | .node { 2 | fill: #ccc; 3 | stroke: #777; 4 | stroke-width: 1px; 5 | } 6 | 7 | .link { 8 | stroke: #777; 9 | stroke-width: 1px; 10 | } 11 | 12 | .arrowHead { 13 | fill: #555; 14 | } 15 | 16 | .triggerNodeLabelsButton { 17 | background-image:url("/ui/img/nodeLabel.png"); 18 | } 19 | 20 | .triggerNodeLabelsButton.visible { 21 | background-position:0px; 22 | } 23 | 24 | .triggerNodeLabelsButton.hidden { 25 | background-position:14px; 26 | } 27 | 28 | .triggerNodeLabelsButton.visible:hover { 29 | background-position:14px; 30 | } 31 | 32 | .triggerEdgeLabelsButton { 33 | margin-top:1px; 34 | background-image:url("/ui/img/edgeLabel.png"); 35 | } 36 | 37 | .triggerEdgeLabelsButton.visible { 38 | background-position:0px; 39 | } 40 | 41 | .triggerEdgeLabelsButton.hidden { 42 | background-position:14px; 43 | } 44 | 45 | .triggerEdgeLabelsButton.visible:hover { 46 | background-position:14px; 47 | } 48 | 49 | .triggerDirectionsButton { 50 | margin-top:1px; 51 | background-image:url("/ui/img/arrowHead.png"); 52 | } 53 | 54 | .triggerDirectionsButton.visible { 55 | background-position:0px; 56 | } 57 | 58 | .triggerDirectionsButton.hidden { 59 | background-position:14px; 60 | } 61 | 62 | .triggerDirectionsButton.visible:hover { 63 | background-position:14px; 64 | } 65 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/css/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin-left:0; 3 | margin-right:0; 4 | margin-bottom:0; 5 | overflow: hidden; 6 | font: 10px sans-serif; 7 | } 8 | 9 | .header { 10 | width:100%; 11 | height:18px; 12 | } 13 | 14 | #vizTitle { 15 | border:0px; 16 | margin-left:10px; 17 | margin-top:2px; 18 | width:30%; 19 | } 20 | 21 | .servable-title { 22 | font: 11px sans-serif; 23 | } 24 | 25 | .headerElement { 26 | margin-right:10px; 27 | float:right; 28 | height:14px; 29 | } 30 | 31 | .headerButton { 32 | margin-right:10px; 33 | margin-top:2px; 34 | float:right; 35 | height:14px; 36 | 37 | cursor:pointer; 38 | width:14px; 39 | } 40 | 41 | #servableSelector { 42 | margin-right:10px; 43 | float:right; 44 | 45 | cursor:pointer; 46 | width:200px; 47 | } 48 | 49 | #lockButton { 50 | background-image:url("/ui/img/lock.png"); 51 | margin-top:3px; 52 | } 53 | 54 | #lockButton.unlocked { 55 | background-position:14px; 56 | } 57 | 58 | #lockButton.locked { 59 | background-position:0px; 60 | } 61 | 62 | #lockButton.unlocked:hover { 63 | background-position:0px; 64 | } 65 | 66 | #content { 67 | margin-top:10px; 68 | height: 100%; 69 | width: 100%; 70 | overflow-y: scroll; 71 | } 72 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/css/keyValue.css: -------------------------------------------------------------------------------- 1 | .keyValueTable { 2 | display: inline-block; 3 | } 4 | 5 | .keyValueTable td { 6 | font: 11px sans-serif; 7 | text-align: right; 8 | padding: 2px; 9 | } 10 | 11 | .keyValueTable .value { 12 | padding-left: 10px; 13 | } 14 | 15 | .keyValueTable .key { 16 | border-right: 1px solid #ccc; 17 | padding-right:10px; 18 | } 19 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/css/matrix.css: -------------------------------------------------------------------------------- 1 | .boundButton { 2 | width: 50px; 3 | height: 14px; 4 | text-align:right; 5 | font-size: 10px; 6 | border: 1px solid #ccc; 7 | 8 | } 9 | 10 | .boundArea { 11 | float: right; 12 | margin-right: 10px; 13 | margin-top:2px; 14 | } 15 | 16 | .ylOrRdButton { 17 | background-image:url("/ui/img/YlOrRd.png"); 18 | } 19 | 20 | .pRGnButton { 21 | background-image:url("/ui/img/PRGn.png"); 22 | } 23 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/css/scatter.css: -------------------------------------------------------------------------------- 1 | #enableJitterButton { 2 | background-image:url("/ui/img/jitter.png"); 3 | } 4 | 5 | #enableJitterButton.disabled { 6 | background-position:14px; 7 | } 8 | 9 | #enableJitterButton.enabled { 10 | background-position:0px; 11 | } 12 | 13 | #enableJitterButton.disabled:hover { 14 | background-position:0px; 15 | } 16 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/css/table.css: -------------------------------------------------------------------------------- 1 | #grid, #pager { 2 | position: static; 3 | width: 100%; 4 | } 5 | 6 | #pager { 7 | height: 20px; 8 | } 9 | 10 | .slick-row:hover { 11 | font-weight: bold; 12 | color: #069; 13 | } 14 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/img/PRGn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/img/PRGn.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/img/YlOrRd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/img/YlOrRd.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/img/arrowHead.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/img/arrowHead.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/img/edgeLabel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/img/edgeLabel.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/img/jitter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/img/jitter.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/img/letters.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/img/letters.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/img/lock.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/img/lock.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/img/nodeLabel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/img/nodeLabel.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 27 |
28 | 29 |
30 | 31 | 32 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/divgrid.min.js: -------------------------------------------------------------------------------- 1 | d3.divgrid=function(){var e=[],t=function(l){0==e.length&&(e=d3.keys(l.data()[0][0])),l.selectAll(".header").data([!0]).enter().append("div").attr("class","header");var r=l.select(".header").selectAll(".cell").data(e);r.enter().append("div").attr("class",function(e,t){return"col-"+t}).classed("cell",!0),l.selectAll(".header .cell").text(function(e){return e}),r.exit().remove();var n=l.selectAll(".row").data(function(e){return e});n.enter().append("div").attr("class","row"),n.exit().remove();var a=l.selectAll(".row").selectAll(".cell").data(function(t){return e.map(function(e){return t[e]})});return a.enter().append("div").attr("class",function(e,t){return"col-"+t}).classed("cell",!0),a.exit().remove(),l.selectAll(".cell").text(function(e){return e}),t};return t.columns=function(t){return arguments.length?(e=t,this):e},t}; 2 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/jquery.event.drag-2.2.min.js: -------------------------------------------------------------------------------- 1 | !function(t){t.fn.drag=function(e,a,n){var r="string"==typeof e?e:"",o=t.isFunction(e)?e:t.isFunction(a)?a:null;return 0!==r.indexOf("drag")&&(r="drag"+r),n=(e==o?a:n)||{},o?this.bind(r,n,o):this.trigger(r)};var e=t.event,a=e.special,n=a.drag={defaults:{which:1,distance:0,not:":input",handle:null,relative:!1,drop:!0,click:!1},datakey:"dragdata",noBubble:!0,add:function(e){var a=t.data(this,n.datakey),r=e.data||{};a.related+=1,t.each(n.defaults,function(t){void 0!==r[t]&&(a[t]=r[t])})},remove:function(){t.data(this,n.datakey).related-=1},setup:function(){if(!t.data(this,n.datakey)){var a=t.extend({related:0},n.defaults);t.data(this,n.datakey,a),e.add(this,"touchstart mousedown",n.init,a),this.attachEvent&&this.attachEvent("ondragstart",n.dontstart)}},teardown:function(){var a=t.data(this,n.datakey)||{};a.related||(t.removeData(this,n.datakey),e.remove(this,"touchstart mousedown",n.init),n.textselect(!0),this.detachEvent&&this.detachEvent("ondragstart",n.dontstart))},init:function(r){if(!n.touched){var o,i=r.data;if(!(0!=r.which&&i.which>0&&r.which!=i.which||t(r.target).is(i.not)||i.handle&&!t(r.target).closest(i.handle,r.currentTarget).length||(n.touched="touchstart"==r.type?this:null,i.propagates=1,i.mousedown=this,i.interactions=[n.interaction(this,i)],i.target=r.target,i.pageX=r.pageX,i.pageY=r.pageY,i.dragging=null,o=n.hijack(r,"draginit",i),!i.propagates)))return o=n.flatten(o),o&&o.length&&(i.interactions=[],t.each(o,function(){i.interactions.push(n.interaction(this,i))})),i.propagates=i.interactions.length,i.drop!==!1&&a.drop&&a.drop.handler(r,i),n.textselect(!1),n.touched?e.add(n.touched,"touchmove touchend",n.handler,i):e.add(document,"mousemove mouseup",n.handler,i),!n.touched||i.live?!1:void 0}},interaction:function(e,a){var r=t(e)[a.relative?"position":"offset"]()||{top:0,left:0};return{drag:e,callback:new n.callback,droppable:[],offset:r}},handler:function(r){var o=r.data;switch(r.type){case!o.dragging&&"touchmove":r.preventDefault();case!o.dragging&&"mousemove":if(Math.pow(r.pageX-o.pageX,2)+Math.pow(r.pageY-o.pageY,2)0?void t.removeData(this,"suppress."+e.type):r.apply(this,arguments)};var o=e.fixHooks.touchstart=e.fixHooks.touchmove=e.fixHooks.touchend=e.fixHooks.touchcancel={props:"clientX clientY pageX pageY screenX screenY".split(" "),filter:function(e,a){if(a){var n=a.touches&&a.touches[0]||a.changedTouches&&a.changedTouches[0]||null;n&&t.each(o.props,function(t,a){e[a]=n[a]})}return e}};a.draginit=a.dragstart=a.dragend=n}(jQuery); 2 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/dir: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/dir -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/examples.css: -------------------------------------------------------------------------------- 1 | @import url('slick-default-theme.css'); 2 | 3 | ul { 4 | margin-left: 0; 5 | padding: 0; 6 | cursor: default; 7 | } 8 | 9 | li { 10 | background: url("../images/arrow_right_spearmint.png") no-repeat center left; 11 | padding: 0 0 0 14px; 12 | 13 | list-style: none; 14 | margin: 0; 15 | } 16 | 17 | #grid { 18 | background: white; 19 | outline: 0; 20 | } 21 | 22 | .grid-header { 23 | border: 1px solid gray; 24 | border-bottom: 0; 25 | border-top: 0; 26 | color: black; 27 | height: 24px; 28 | line-height: 24px; 29 | } 30 | 31 | .grid-header label { 32 | display: inline-block; 33 | font-weight: bold; 34 | margin: auto auto auto 6px; 35 | } 36 | 37 | .grid-header .ui-icon { 38 | margin: 4px 4px auto 6px; 39 | background-color: transparent; 40 | border-color: transparent; 41 | } 42 | 43 | .grid-header .ui-icon.ui-state-hover { 44 | background-color: white; 45 | } 46 | 47 | .grid-header #txtSearch { 48 | margin: 0 4px 0 4px; 49 | padding: 2px 2px; 50 | -moz-border-radius: 2px; 51 | -webkit-border-radius: 2px; 52 | border: 1px solid silver; 53 | } 54 | 55 | .options-panel { 56 | -moz-border-radius: 6px; 57 | -webkit-border-radius: 6px; 58 | border: 1px solid silver; 59 | background: #f0f0f0; 60 | padding: 4px; 61 | margin-bottom: 20px; 62 | width: 320px; 63 | position: absolute; 64 | top: 0px; 65 | left: 650px; 66 | } 67 | 68 | /* Individual cell styles */ 69 | .slick-cell.task-name { 70 | font-weight: bold; 71 | text-align: right; 72 | } 73 | 74 | .slick-cell.task-percent { 75 | text-align: right; 76 | } 77 | 78 | .slick-cell.cell-move-handle { 79 | font-weight: bold; 80 | text-align: right; 81 | border-right: solid gray; 82 | 83 | background: #efefef; 84 | cursor: move; 85 | } 86 | 87 | .cell-move-handle:hover { 88 | background: #b6b9bd; 89 | } 90 | 91 | .slick-row.selected .cell-move-handle { 92 | background: #D5DC8D; 93 | } 94 | 95 | .slick-row .cell-actions { 96 | text-align: left; 97 | } 98 | 99 | .slick-row.complete { 100 | background-color: #DFD; 101 | color: #555; 102 | } 103 | 104 | .percent-complete-bar { 105 | display: inline-block; 106 | height: 6px; 107 | -moz-border-radius: 3px; 108 | -webkit-border-radius: 3px; 109 | } 110 | 111 | /* Slick.Editors.Text, Slick.Editors.Date */ 112 | input.editor-text { 113 | width: 100%; 114 | height: 100%; 115 | border: 0; 116 | margin: 0; 117 | background: transparent; 118 | outline: 0; 119 | padding: 0; 120 | 121 | } 122 | 123 | .ui-datepicker-trigger { 124 | margin-top: 2px; 125 | padding: 0; 126 | vertical-align: top; 127 | } 128 | 129 | /* Slick.Editors.PercentComplete */ 130 | input.editor-percentcomplete { 131 | width: 100%; 132 | height: 100%; 133 | border: 0; 134 | margin: 0; 135 | background: transparent; 136 | outline: 0; 137 | padding: 0; 138 | 139 | float: left; 140 | } 141 | 142 | .editor-percentcomplete-picker { 143 | position: relative; 144 | display: inline-block; 145 | width: 16px; 146 | height: 100%; 147 | overflow: visible; 148 | z-index: 1000; 149 | float: right; 150 | } 151 | 152 | .editor-percentcomplete-helper { 153 | border: 0 solid gray; 154 | position: absolute; 155 | top: -2px; 156 | left: -9px; 157 | padding-left: 9px; 158 | 159 | width: 120px; 160 | height: 140px; 161 | display: none; 162 | overflow: visible; 163 | } 164 | 165 | .editor-percentcomplete-wrapper { 166 | background: beige; 167 | padding: 20px 8px; 168 | 169 | width: 100%; 170 | height: 98px; 171 | border: 1px solid gray; 172 | border-left: 0; 173 | } 174 | 175 | .editor-percentcomplete-buttons { 176 | float: right; 177 | } 178 | 179 | .editor-percentcomplete-buttons button { 180 | width: 80px; 181 | } 182 | 183 | .editor-percentcomplete-slider { 184 | float: left; 185 | } 186 | 187 | .editor-percentcomplete-picker:hover .editor-percentcomplete-helper { 188 | display: block; 189 | } 190 | 191 | .editor-percentcomplete-helper:hover { 192 | display: block; 193 | } 194 | 195 | /* Slick.Editors.YesNoSelect */ 196 | select.editor-yesno { 197 | width: 100%; 198 | margin: 0; 199 | vertical-align: middle; 200 | } 201 | 202 | /* Slick.Editors.Checkbox */ 203 | input.editor-checkbox { 204 | margin: 0; 205 | height: 100%; 206 | padding: 0; 207 | border: 0; 208 | } 209 | 210 | 211 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/actions.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/actions.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/ajax-loader-small.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/ajax-loader-small.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/arrow_redo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/arrow_redo.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/arrow_right_peppermint.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/arrow_right_peppermint.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/arrow_right_spearmint.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/arrow_right_spearmint.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/arrow_undo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/arrow_undo.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/bullet_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/bullet_blue.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/bullet_star.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/bullet_star.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/bullet_toggle_minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/bullet_toggle_minus.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/bullet_toggle_plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/bullet_toggle_plus.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/calendar.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/calendar.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/collapse.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/collapse.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/comment_yellow.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/comment_yellow.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/down.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/down.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/drag-handle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/drag-handle.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/editor-helper-bg.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/editor-helper-bg.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/expand.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/expand.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/header-bg.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/header-bg.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/header-columns-bg.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/header-columns-bg.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/header-columns-over-bg.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/header-columns-over-bg.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/help.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/help.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/info.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/info.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/listview.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/listview.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/pencil.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/pencil.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/row-over-bg.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/row-over-bg.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/sort-asc.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/sort-asc.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/sort-asc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/sort-asc.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/sort-desc.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/sort-desc.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/sort-desc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/sort-desc.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/stripes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/stripes.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/tag_red.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/tag_red.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/tick.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/tick.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/ui-bg_glass_75_dadada_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/ui-bg_glass_75_dadada_1x400.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/ui-bg_glass_75_e6e6e6_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/ui-bg_glass_75_e6e6e6_1x400.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/ui-icons_888888_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/ui-icons_888888_256x240.png -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/user_identity.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/user_identity.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/images/user_identity_plus.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FRosner/spawncamping-dds/bdf84f5dd62f1b24573094c6dff8818957656dbd/web-ui/src/main/resources/ui/lib/slickgrid/images/user_identity_plus.gif -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/jquery.event.drag-2.0.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * jquery.event.drag - v 2.0.0 3 | * Copyright (c) 2010 Three Dub Media - http://threedubmedia.com 4 | * Open Source MIT License - http://threedubmedia.com/code/license 5 | */ 6 | ;(function(f){f.fn.drag=function(b,a,d){var e=typeof b=="string"?b:"",k=f.isFunction(b)?b:f.isFunction(a)?a:null;if(e.indexOf("drag")!==0)e="drag"+e;d=(b==k?a:d)||{};return k?this.bind(e,d,k):this.trigger(e)};var i=f.event,h=i.special,c=h.drag={defaults:{which:1,distance:0,not:":input",handle:null,relative:false,drop:true,click:false},datakey:"dragdata",livekey:"livedrag",add:function(b){var a=f.data(this,c.datakey),d=b.data||{};a.related+=1;if(!a.live&&b.selector){a.live=true;i.add(this,"draginit."+ c.livekey,c.delegate)}f.each(c.defaults,function(e){if(d[e]!==undefined)a[e]=d[e]})},remove:function(){f.data(this,c.datakey).related-=1},setup:function(){if(!f.data(this,c.datakey)){var b=f.extend({related:0},c.defaults);f.data(this,c.datakey,b);i.add(this,"mousedown",c.init,b);this.attachEvent&&this.attachEvent("ondragstart",c.dontstart)}},teardown:function(){if(!f.data(this,c.datakey).related){f.removeData(this,c.datakey);i.remove(this,"mousedown",c.init);i.remove(this,"draginit",c.delegate);c.textselect(true); this.detachEvent&&this.detachEvent("ondragstart",c.dontstart)}},init:function(b){var a=b.data,d;if(!(a.which>0&&b.which!=a.which))if(!f(b.target).is(a.not))if(!(a.handle&&!f(b.target).closest(a.handle,b.currentTarget).length)){a.propagates=1;a.interactions=[c.interaction(this,a)];a.target=b.target;a.pageX=b.pageX;a.pageY=b.pageY;a.dragging=null;d=c.hijack(b,"draginit",a);if(a.propagates){if((d=c.flatten(d))&&d.length){a.interactions=[];f.each(d,function(){a.interactions.push(c.interaction(this,a))})}a.propagates= a.interactions.length;a.drop!==false&&h.drop&&h.drop.handler(b,a);c.textselect(false);i.add(document,"mousemove mouseup",c.handler,a);return false}}},interaction:function(b,a){return{drag:b,callback:new c.callback,droppable:[],offset:f(b)[a.relative?"position":"offset"]()||{top:0,left:0}}},handler:function(b){var a=b.data;switch(b.type){case !a.dragging&&"mousemove":if(Math.pow(b.pageX-a.pageX,2)+Math.pow(b.pageY-a.pageY,2)=0;n--)t[n]===i&&t.splice(n,1)},this.notify=function(n,o,e){o=o||new i,e=e||this;for(var r,s=0;s=this.fromRow&&t<=this.toRow&&i>=this.fromCell&&i<=this.toCell},this.toString=function(){return this.isSingleCell()?"("+this.fromRow+":"+this.fromCell+")":"("+this.fromRow+":"+this.fromCell+" - "+this.toRow+":"+this.toCell+")"}}function r(){this.__nonDataRow=!0}function s(){this.__group=!0,this.__updated=!1,this.count=0,this.value=null,this.title=null,this.collapsed=!1,this.totals=null}function l(){this.__groupTotals=!0,this.group=null}function u(){var t=null;this.isActive=function(i){return i?t===i:null!==t},this.activate=function(i){if(i!==t){if(null!==t)throw"SlickGrid.EditorLock.activate: an editController is still active, can't activate another editController";if(!i.commitCurrentEdit)throw"SlickGrid.EditorLock.activate: editController must implement .commitCurrentEdit()";if(!i.cancelCurrentEdit)throw"SlickGrid.EditorLock.activate: editController must implement .cancelCurrentEdit()";t=i}},this.deactivate=function(i){if(t!==i)throw"SlickGrid.EditorLock.deactivate: specified editController is not the currently active one";t=null},this.commitCurrentEdit=function(){return t?t.commitCurrentEdit():!0},this.cancelCurrentEdit=function(){return t?t.cancelCurrentEdit():!0}}t.extend(!0,window,{Slick:{Event:n,EventData:i,EventHandler:o,Range:e,NonDataRow:r,Group:s,GroupTotals:l,EditorLock:u,GlobalEditorLock:new u}}),s.prototype=new r,s.prototype.equals=function(t){return this.value===t.value&&this.count===t.count&&this.collapsed===t.collapsed},l.prototype=new r}(jQuery); 2 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/slick.dataview.min.js: -------------------------------------------------------------------------------- 1 | !function(t){function e(e){function n(){ve=!0}function i(){ve=!1,J()}function r(t){$e=t}function o(t){te=t}function a(t){t=t||0;for(var e,n=t,i=ge.length;i>n;n++){if(e=ge[n][fe],void 0===e)throw"Each data element must implement a unique 'id' property";_e[e]=n}}function l(){for(var t,e=0,n=ge.length;n>e;e++)if(t=ge[e][fe],void 0===t||_e[t]!==e)throw"Each data element must implement a unique 'id' property"}function u(){return ge}function s(t,e){void 0!==e&&(fe=e),ge=Ie=t,_e={},a(),l(),J()}function c(t){void 0!=t.pageSize&&(Ce=t.pageSize,Fe=Ce?Math.min(Fe,Math.max(0,Math.ceil(be/Ce)-1)):0),void 0!=t.pageNum&&(Fe=Math.min(t.pageNum,Math.max(0,Math.ceil(be/Ce)-1))),Pe.notify(f(),null,se),J()}function f(){var t=Ce?Math.max(1,Math.ceil(be/Ce)):1;return{pageSize:Ce,pageNum:Fe,totalRows:be,totalPages:t}}function g(t,e){we=e,Z=t,Y=null,e===!1&&ge.reverse(),ge.sort(t),e===!1&&ge.reverse(),_e={},a(),J()}function h(t,e){we=e,Y=t,Z=null;var n=Object.prototype.toString;Object.prototype.toString="function"==typeof t?t:function(){return this[t]},e===!1&&ge.reverse(),ge.sort(),Object.prototype.toString=n,e===!1&&ge.reverse(),_e={},a(),J()}function _(){Z?g(Z,we):Y&&h(Y,we)}function m(t){de=t,e.inlineFilters&&(ee=z(),ne=U()),J()}function d(t,n,i){e.groupItemMetadataProvider||(e.groupItemMetadataProvider=new Slick.Data.GroupItemMetadataProvider),ie=t,re="function"==typeof ie,oe=n,ae=i,Se={},Re=[],J()}function p(t,e){le=t,Me=void 0!==e?e:Me,ue=[];for(var n=le.length;n--;)ue[n]=q(le[n]);J()}function v(t){return ge[t]}function w(t){return _e[t]}function $(){if(!me){me={};for(var t=0,e=he.length;e>t;t++)me[he[t][fe]]=t}}function y(t){return $(),me[t]}function I(t){return ge[_e[t]]}function x(t){var e=[];$();for(var n=0;na;a++)i=t[a],n=re?ie(i):i[ie],n=n||0,e=o[n],e||(e=new Slick.Group,e.count=0,e.value=n,e.rows=[],r[r.length]=e,o[n]=e),e.rows[e.count++]=i;return r}function E(t){if(!t.collapsed||Me){for(var e,n=new Slick.GroupTotals,i=le.length;i--;)e=le[i],e.init(),ue[i].call(e,t.rows),e.storeResult(n);n.group=t,t.totals=n}}function A(t){for(var e=t.length;e--;)E(t[e])}function B(t){for(var e,n=t.length;n--;)e=t[n],e.collapsed=e.value in Se,e.title=oe?oe(e):e.value}function j(t){for(var e,n=[],i=0,r=0,o=t.length;o>r;r++){if(e=t[r],n[i++]=e,!e.collapsed)for(var a=0,l=e.rows.length;l>a;a++)n[i++]=e.rows[a];!e.totals||e.collapsed&&!Me||(n[i++]=e.totals)}return n}function O(t){var e=/^function[^(]*\(([^)]*)\)\s*{([\s\S]*)}$/,n=t.toString().match(e);return{params:n[1].split(","),body:n[2]}}function q(t){var e=O(t.accumulate),n=new Function("_items","for (var "+e.params[0]+", _i=0, _il=_items.length; _i<_il; _i++) {"+e.params[0]+" = _items[_i]; "+e.body+"}");return n.displayName=n.name="compiledAccumulatorLoop",n}function z(){var t=O(de),e=t.body.replace(/return false[;}]/gi,"{ continue _coreloop; }").replace(/return true[;}]/gi,"{ _retval[_idx++] = $item$; continue _coreloop; }").replace(/return ([^;}]+?);/gi,"{ if ($1) { _retval[_idx++] = $item$; }; continue _coreloop; }"),n=["var _retval = [], _idx = 0; ","var $item$, $args$ = _args; ","_coreloop: ","for (var _i = 0, _il = _items.length; _i < _il; _i++) { ","$item$ = _items[_i]; ","$filter$; ","} ","return _retval; "].join("");n=n.replace(/\$filter\$/gi,e),n=n.replace(/\$item\$/gi,t.params[0]),n=n.replace(/\$args\$/gi,t.params[1]);var i=new Function("_items,_args",n);return i.displayName=i.name="compiledFilter",i}function U(){var t=O(de),e=t.body.replace(/return false[;}]/gi,"{ continue _coreloop; }").replace(/return true[;}]/gi,"{ _cache[_i] = true;_retval[_idx++] = $item$; continue _coreloop; }").replace(/return ([^;}]+?);/gi,"{ if ((_cache[_i] = $1)) { _retval[_idx++] = $item$; }; continue _coreloop; }"),n=["var _retval = [], _idx = 0; ","var $item$, $args$ = _args; ","_coreloop: ","for (var _i = 0, _il = _items.length; _i < _il; _i++) { ","$item$ = _items[_i]; ","if (_cache[_i]) { ","_retval[_idx++] = $item$; ","continue _coreloop; ","} ","$filter$; ","} ","return _retval; "].join("");n=n.replace(/\$filter\$/gi,e),n=n.replace(/\$item\$/gi,t.params[0]),n=n.replace(/\$args\$/gi,t.params[1]);var i=new Function("_items,_args,_cache",n);return i.displayName=i.name="compiledFilterWithCaching",i}function L(t,e){for(var n=[],i=0,r=0,o=t.length;o>r;r++)de(t[r],e)&&(n[i++]=t[r]);return n}function H(t,e,n){for(var i,r=[],o=0,a=0,l=t.length;l>a;a++)i=t[a],n[a]?r[o++]=i:de(i,e)&&(r[o++]=i,n[a]=!0);return r}function Q(t){if(de){var n=e.inlineFilters?ee:L,i=e.inlineFilters?ne:H;$e.isFilterNarrowing?Ie=n(Ie,te):$e.isFilterExpanding?Ie=i(t,te,xe):$e.isFilterUnchanged||(Ie=n(t,te))}else Ie=Ce?t:t.concat();var r;return Ce?(Ie.lengthu;u++)u>=s?o[o.length]=u:(n=e[u],i=t[u],(ie&&(r=n.__nonDataRow||i.__nonDataRow)&&n.__group!==i.__group||n.__updated||n.__group&&!n.equals(i)||le&&r&&(n.__groupTotals||i.__groupTotals)||n[fe]!=i[fe]||pe&&pe[n[fe]])&&(o[o.length]=u));return o}function W(t){me=null,($e.isFilterNarrowing!=ye.isFilterNarrowing||$e.isFilterExpanding!=ye.isFilterExpanding)&&(xe=[]);var e=Q(t);be=e.totalRows;var n=e.rows;Re=[],null!=ie&&(Re=k(n),Re.length&&(B(Re),le&&A(Re),Re.sort(ae),n=j(Re)));var i=V(he,n);return he=n,i}function J(){if(!ve){var t=he.length,e=be,n=W(ge,de);Ce&&Fe*Ce>be&&(Fe=Math.max(0,Math.ceil(be/Ce)-1),n=W(ge,de)),pe=null,ye=$e,$e={},e!=be&&Pe.notify(f(),null,se),t!=he.length&&Ne.notify({previous:t,current:he.length},null,se),n.length>0&&Te.notify({rows:n},null,se)}}function K(t,e){var n,i=this,r=i.mapRowsToIds(t.getSelectedRows());t.onSelectedRowsChanged.subscribe(function(){n||(r=i.mapRowsToIds(t.getSelectedRows()))}),this.onRowsChanged.subscribe(function(){if(r.length>0){n=!0;var o=i.mapIdsToRows(r);e||(r=i.mapRowsToIds(o)),t.setSelectedRows(o),n=!1}})}function X(t,e){function n(t){i={};for(var e in t){var n=he[e][fe];i[n]=t[e]}}var i,r;n(t.getCellCssStyles(e)),t.onCellCssStylesChanged.subscribe(function(t,i){r||e==i.key&&i.hash&&n(i.hash)}),this.onRowsChanged.subscribe(function(){if(i){r=!0,$();var n={};for(var o in i){var a=me[o];void 0!=a&&(n[a]=i[o])}t.setCellCssStyles(e,n),r=!1}})}var Y,Z,te,ee,ne,ie,re,oe,ae,le,ue,se=this,ce={groupItemMetadataProvider:null,inlineFilters:!1},fe="id",ge=[],he=[],_e={},me=null,de=null,pe=null,ve=!1,we=!0,$e={},ye={},Ie=[],xe=[],Re=[],Se={},Me=!1,Ce=0,Fe=0,be=0,Ne=new Slick.Event,Te=new Slick.Event,Pe=new Slick.Event;return e=t.extend(!0,{},ce,e),{beginUpdate:n,endUpdate:i,setPagingOptions:c,getPagingInfo:f,getItems:u,setItems:s,setFilter:m,sort:g,fastSort:h,reSort:_,groupBy:d,setAggregators:p,collapseGroup:P,expandGroup:D,getGroups:G,getIdxById:w,getRowById:y,getItemById:I,getItemByIdx:v,mapRowsToIds:R,mapIdsToRows:x,setRefreshHints:r,setFilterArgs:o,refresh:J,updateItem:S,insertItem:M,addItem:C,deleteItem:F,syncGridSelection:K,syncGridCellCssStyles:X,getLength:b,getItem:N,getItemMetadata:T,onRowCountChanged:Ne,onRowsChanged:Te,onPagingInfoChanged:Pe}}function n(t){this.field_=t,this.init=function(){this.count_=0,this.nonNullCount_=0,this.sum_=0},this.accumulate=function(t){var e=t[this.field_];this.count_++,null!=e&&""!=e&&0/0!=e&&(this.nonNullCount_++,this.sum_+=parseFloat(e))},this.storeResult=function(t){t.avg||(t.avg={}),0!=this.nonNullCount_&&(t.avg[this.field_]=this.sum_/this.nonNullCount_)}}function i(t){this.field_=t,this.init=function(){this.min_=null},this.accumulate=function(t){var e=t[this.field_];null!=e&&""!=e&&0/0!=e&&(null==this.min_||ethis.max_)&&(this.max_=e)},this.storeResult=function(t){t.max||(t.max={}),t.max[this.field_]=this.max_}}function o(t){this.field_=t,this.init=function(){this.sum_=null},this.accumulate=function(t){var e=t[this.field_];null!=e&&""!=e&&0/0!=e&&(this.sum_+=parseFloat(e))},this.storeResult=function(t){t.sum||(t.sum={}),t.sum[this.field_]=this.sum_}}t.extend(!0,window,{Slick:{Data:{DataView:e,Aggregators:{Avg:n,Min:i,Max:r,Sum:o}}}})}(jQuery); 2 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/slick.grid.css: -------------------------------------------------------------------------------- 1 | /* 2 | IMPORTANT: 3 | In order to preserve the uniform grid appearance, all cell styles need to have padding, margin and border sizes. 4 | No built-in (selected, editable, highlight, flashing, invalid, loading, :focus) or user-specified CSS 5 | classes should alter those! 6 | */ 7 | 8 | .slick-header.ui-state-default, .slick-headerrow.ui-state-default { 9 | width: 100%; 10 | overflow: hidden; 11 | border-left: 0px; 12 | } 13 | 14 | .slick-header-columns, .slick-headerrow-columns { 15 | position: relative; 16 | white-space: nowrap; 17 | cursor: default; 18 | overflow: hidden; 19 | } 20 | 21 | .slick-header-column.ui-state-default { 22 | position: relative; 23 | display: inline-block; 24 | overflow: hidden; 25 | text-overflow: ellipsis; 26 | height: 16px; 27 | line-height: 16px; 28 | margin: 0; 29 | padding: 4px; 30 | border-right: 1px solid silver; 31 | border-left: 0px; 32 | border-top: 0px; 33 | border-bottom: 0px; 34 | float: left; 35 | } 36 | 37 | .slick-headerrow-column.ui-state-default { 38 | padding: 4px; 39 | } 40 | 41 | .slick-header-column-sorted { 42 | font-style: italic; 43 | } 44 | 45 | .slick-sort-indicator { 46 | display: inline-block; 47 | width: 8px; 48 | height: 5px; 49 | margin-left: 4px; 50 | } 51 | 52 | .slick-sort-indicator-desc { 53 | background: url(images/sort-desc.gif); 54 | } 55 | 56 | .slick-sort-indicator-asc { 57 | background: url(images/sort-asc.gif); 58 | } 59 | 60 | .slick-resizable-handle { 61 | position: absolute; 62 | font-size: 0.1px; 63 | display: block; 64 | cursor: col-resize; 65 | width: 4px; 66 | right: 0px; 67 | top: 0; 68 | height: 100%; 69 | } 70 | 71 | .slick-sortable-placeholder { 72 | background: silver; 73 | } 74 | 75 | .grid-canvas { 76 | position: relative; 77 | outline: 0; 78 | } 79 | 80 | .slick-row.ui-widget-content, .slick-row.ui-state-active { 81 | position: absolute; 82 | border: 0px; 83 | width: 100%; 84 | } 85 | 86 | .slick-cell, .slick-headerrow-column { 87 | position: absolute; 88 | 89 | border: 1px solid transparent; 90 | border-right: 1px dotted silver; 91 | border-bottom-color: silver; 92 | 93 | overflow: hidden; 94 | text-overflow: ellipsis; 95 | white-space: nowrap; 96 | vertical-align: middle; 97 | z-index: 1; 98 | padding: 1px 2px 2px 1px; 99 | margin: 0; 100 | 101 | white-space: nowrap; 102 | 103 | cursor: default; 104 | } 105 | 106 | .slick-group { 107 | } 108 | 109 | .slick-group-toggle { 110 | display: inline-block; 111 | } 112 | 113 | .slick-cell.highlighted { 114 | background: lightskyblue; 115 | background: rgba(0, 0, 255, 0.2); 116 | -webkit-transition: all 0.5s; 117 | -moz-transition: all 0.5s; 118 | transition: all 0.5s; 119 | } 120 | 121 | .slick-cell.flashing { 122 | border: 1px solid red !important; 123 | } 124 | 125 | .slick-cell.editable { 126 | z-index: 11; 127 | overflow: visible; 128 | background: white; 129 | border-color: black; 130 | border-style: solid; 131 | } 132 | 133 | .slick-cell:focus { 134 | outline: none; 135 | } 136 | 137 | .slick-reorder-proxy { 138 | display: inline-block; 139 | background: blue; 140 | opacity: 0.15; 141 | filter: alpha(opacity = 15); 142 | cursor: move; 143 | } 144 | 145 | .slick-reorder-guide { 146 | display: inline-block; 147 | height: 2px; 148 | background: blue; 149 | opacity: 0.7; 150 | filter: alpha(opacity = 70); 151 | } 152 | 153 | .slick-selection { 154 | z-index: 10; 155 | position: absolute; 156 | border: 2px dashed black; 157 | } -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/slick.pager.css: -------------------------------------------------------------------------------- 1 | .slick-pager { 2 | width: 100%; 3 | height: 26px; 4 | border: 1px solid gray; 5 | border-top: 0; 6 | vertical-align: middle; 7 | } 8 | 9 | .slick-pager .slick-pager-status { 10 | display: inline-block; 11 | padding: 6px; 12 | } 13 | 14 | .slick-pager .ui-icon-container { 15 | display: inline-block; 16 | margin: 2px; 17 | border-color: gray; 18 | } 19 | 20 | .slick-pager .slick-pager-nav { 21 | display: inline-block; 22 | float: left; 23 | padding: 2px; 24 | } 25 | 26 | .slick-pager .slick-pager-settings { 27 | display: block; 28 | float: right; 29 | padding: 2px; 30 | } 31 | 32 | .slick-pager .slick-pager-settings * { 33 | vertical-align: middle; 34 | } 35 | 36 | .slick-pager .slick-pager-settings a { 37 | padding: 2px; 38 | text-decoration: underline; 39 | cursor: pointer; 40 | } 41 | -------------------------------------------------------------------------------- /web-ui/src/main/resources/ui/lib/slickgrid/slick.pager.min.js: -------------------------------------------------------------------------------- 1 | !function(a){function n(n,e,i){function t(){n.onPagingInfoChanged.subscribe(function(a,n){u(n)}),l(),u(n.getPagingInfo())}function s(){var a=!Slick.GlobalEditorLock.commitCurrentEdit(),e=n.getPagingInfo(),i=e.totalPages-1;return{canGotoFirst:!a&&0!=e.pageSize&&e.pageNum>0,canGotoLast:!a&&0!=e.pageSize&&e.pageNum!=i,canGotoPrev:!a&&0!=e.pageSize&&e.pageNum>0,canGotoNext:!a&&0!=e.pageSize&&e.pageNum").appendTo(i),t=a("").appendTo(i);r=a("").appendTo(i),t.append(""),t.find("a[data]").click(function(n){var i=a(n.target).attr("data");if(void 0!=i)if(-1==i){var t=e.getViewport();o(t.bottom-t.top)}else o(parseInt(i))});var s="";a(s+"ui-icon-lightbulb"+l).click(function(){a(".slick-pager-settings-expanded").toggle()}).appendTo(t),a(s+"ui-icon-seek-first"+l).click(c).appendTo(n),a(s+"ui-icon-seek-prev"+l).click(g).appendTo(n),a(s+"ui-icon-seek-next"+l).click(d).appendTo(n),a(s+"ui-icon-seek-end"+l).click(p).appendTo(n),i.find(".ui-icon-container").hover(function(){a(this).toggleClass("ui-state-hover")}),i.children().wrapAll("
")}function u(a){var n=s();i.find(".slick-pager-nav span").removeClass("ui-state-disabled"),n.canGotoFirst||i.find(".ui-icon-seek-first").addClass("ui-state-disabled"),n.canGotoLast||i.find(".ui-icon-seek-end").addClass("ui-state-disabled"),n.canGotoNext||i.find(".ui-icon-seek-next").addClass("ui-state-disabled"),n.canGotoPrev||i.find(".ui-icon-seek-prev").addClass("ui-state-disabled"),r.text(0==a.pageSize?"Showing all "+a.totalRows+" rows":"Showing page "+(a.pageNum+1)+" of "+a.totalPages)}var r;t()}a.extend(!0,window,{Slick:{Controls:{Pager:n}}})}(jQuery); 2 | -------------------------------------------------------------------------------- /web-ui/src/main/scala/de/frosner/dds/webui/server/SprayServer.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.webui.server 2 | 3 | import java.awt.Desktop 4 | import java.net.URI 5 | import java.util.Date 6 | 7 | import akka.actor.ActorSystem 8 | import com.typesafe.config.ConfigFactory 9 | import de.frosner.dds.core.{DDS, Server} 10 | import de.frosner.dds.servables.Servable 11 | import de.frosner.dds.webui.servables.ServableJsonProtocol 12 | import de.frosner.replhelper.Help 13 | import spray.json._ 14 | import spray.routing.authentication._ 15 | import spray.routing.directives.AuthMagnet 16 | import spray.routing.{Route, SimpleRoutingApp} 17 | 18 | import scala.concurrent.ExecutionContext.Implicits.global 19 | import scala.concurrent.Future 20 | import scala.concurrent.duration._ 21 | import scala.util.Try 22 | 23 | /** 24 | * [[de.frosner.dds.core.Server]] based on spray-can HTTP server. If multiple servers shall be used, they need to have different names. 25 | * 26 | * @param name of the server 27 | * @param launchBrowser indicating whether a browser window pointing to the web UI should be launched 28 | * when the server is started 29 | * @param interface to bind the server to 30 | * @param port to bind the server to 31 | */ 32 | case class SprayServer(name: String, 33 | launchBrowser: Boolean = true, 34 | interface: String, 35 | port: Int, 36 | password: Option[String] = Option.empty, 37 | enableHistory: Boolean = true) 38 | extends SimpleRoutingApp with Server { 39 | 40 | private var servables: Seq[(Servable, Date)] = Vector.empty 41 | 42 | private implicit val system = ActorSystem(name + "-system", { 43 | val conf = ConfigFactory.parseResources("dds.typesafe-conf") 44 | conf.resolve() 45 | }) 46 | 47 | private val actorName = "chart-server-" + name + "-actor" 48 | 49 | private def withAuthentication(innerRoute: Route) = 50 | if (password.isDefined) { 51 | authenticate(AuthMagnet.fromContextAuthenticator( 52 | new BasicHttpAuthenticator( 53 | "DDS Web UI has been password protected", 54 | (userPass: Option[UserPass]) => Future( 55 | if (userPass.exists(_.pass == password.get)) Some(true) 56 | else None 57 | ) 58 | ) 59 | ))(authenticated => innerRoute) 60 | } else { 61 | innerRoute 62 | } 63 | 64 | def init(): Unit = { 65 | val tryToConnectToSocket = Try(scalaj.http.Http(s"http://$interface:$port").asString) 66 | if (tryToConnectToSocket.isSuccess) { 67 | println(s"""$interface:$port is already in use. Server started already? Another server blocking the socket?""") 68 | println() 69 | DDS.help("start") 70 | } else { 71 | println(s"""Starting server on $interface:$port""") 72 | if (password.isDefined) println(s"""Basic HTTP authentication enabled (password = ${password.get}). """ + 73 | s"""Password will be transmitted unencrypted. Do not reuse it somewhere else!""") 74 | val server = startServer(interface, port, actorName) { 75 | path("") { 76 | withAuthentication { 77 | getFromResource("ui/index.html") 78 | } 79 | } ~ 80 | path("servables") { 81 | withAuthentication { 82 | get { 83 | complete { 84 | val servableObjects = servables.zipWithIndex.map{ 85 | case ((servable, date), index) => JsObject(Map( 86 | ("id", JsNumber(index)), 87 | ("title", JsString(servable.title)), // TODO I changed this from type to title (check JS) 88 | ("time", JsNumber(date.getTime)) 89 | )) 90 | } 91 | JsArray(servableObjects.toVector).compactPrint 92 | } 93 | } 94 | } 95 | } ~ 96 | path("servables" / IntNumber) { id => 97 | withAuthentication { 98 | get { 99 | if (id < servables.size) 100 | complete { 101 | val (servable, date) = servables(id) 102 | SprayServer.wrapIdAndServable(id, servable) 103 | } 104 | else 105 | failWith(new IllegalArgumentException( 106 | s"There is no servable with id $id. Id needs to be within (0, ${servables.size})" 107 | )) 108 | } 109 | } 110 | } ~ 111 | (path("servables" / "latest") & parameter('current ?)) { currentServableIdParameter => 112 | withAuthentication { 113 | get { 114 | complete { 115 | val currentServableIndex = currentServableIdParameter.map(_.toInt) 116 | val lastServableIndex = servables.size - 1 117 | if (lastServableIndex >= 0 && currentServableIndex.forall(_ != lastServableIndex)) { 118 | val (servable, date) = servables(lastServableIndex) 119 | SprayServer.wrapIdAndServable(lastServableIndex, servable) 120 | } else { 121 | "{}" 122 | } 123 | } 124 | } 125 | } 126 | } ~ 127 | pathPrefix("ui") { 128 | withAuthentication { 129 | getFromResourceDirectory("ui") 130 | } 131 | } 132 | } 133 | 134 | Thread.sleep(1000) 135 | if (launchBrowser && Desktop.isDesktopSupported()) { 136 | println("Opening browser") 137 | Desktop.getDesktop().browse(new URI( s"""http://$interface:$port/""")) 138 | } 139 | } 140 | } 141 | 142 | def tearDown(): Unit = { 143 | println("Stopping server") 144 | servables = Seq.empty 145 | system.scheduler.scheduleOnce(1.milli)(system.shutdown())(system.dispatcher) 146 | } 147 | 148 | def serve(servable: Servable) = { 149 | val toAdd = (servable, new Date()) 150 | if (enableHistory) 151 | servables = servables :+ toAdd 152 | else 153 | servables = Vector(toAdd) 154 | } 155 | 156 | } 157 | 158 | object SprayServer { 159 | 160 | val DEFAULT_INTERFACE = "localhost" 161 | val DEFAULT_PORT = 23080 162 | 163 | private var serverNumber = 0 164 | 165 | private var serverInstance = Option.empty[SprayServer] 166 | 167 | @Help( 168 | category = "Web UI", 169 | shortDescription = "Starts the DDS Web UI", 170 | longDescription = "Starts the DDS Web UI bound to the default interface and port. You can stop it by calling stop()." 171 | ) 172 | def start(): Unit = start(DEFAULT_INTERFACE, DEFAULT_PORT) 173 | 174 | @Help( 175 | category = "Web UI", 176 | shortDescription = "Starts the DDS Web UI bound to the given interface and port with an optional authentication mechanism", 177 | longDescription = "Starts the DDS Web UI bound to the given interface and port. You can also specify a password " + 178 | "which will be used for a simple HTTP authentication. Note however, that this is transmitting the password " + 179 | "unencrypted and you should not reuse this password somewhere else. You can stop it by calling stop().", 180 | parameters = "interface: String, port: Int, (optional) password: String" 181 | ) 182 | def start(interface: String, port: Int, password: String = null): Unit = 183 | DDS.setServer(SprayServer( 184 | name = "dds-" + serverNumber, 185 | interface = interface, 186 | port = port, 187 | launchBrowser = true, 188 | password = Option(password) 189 | )) 190 | 191 | @Help( 192 | category = "Web UI", 193 | shortDescription = "Stops the DDS Web UI", 194 | longDescription = "Stops the DDS Web UI. You can restart it again by calling start()." 195 | ) 196 | def stop(): Unit = DDS.unsetServer() 197 | 198 | private def wrapIdAndServable(id: Int, servable: Servable): String = { 199 | JsObject( 200 | ("servable", servable.toJson(ServableJsonProtocol.ServableJsonFormat)), 201 | ("id", JsNumber(id)) 202 | ).compactPrint 203 | } 204 | 205 | } 206 | -------------------------------------------------------------------------------- /web-ui/src/test/resources/dds.typesafe-conf: -------------------------------------------------------------------------------- 1 | akka { 2 | loglevel = "WARNING" 3 | } 4 | -------------------------------------------------------------------------------- /web-ui/src/test/resources/ui/mocks/.nothing: -------------------------------------------------------------------------------- 1 | This is just a placeholder for mocks because sbt-jasmine-plugin is looking for that folder. 2 | -------------------------------------------------------------------------------- /web-ui/src/test/resources/ui/require.config.js: -------------------------------------------------------------------------------- 1 | require.config({ 2 | baseUrl: EnvJasmine.rootDir 3 | }); 4 | -------------------------------------------------------------------------------- /web-ui/src/test/resources/ui/specs/Visualization.spec.js: -------------------------------------------------------------------------------- 1 | define(['Visualization'], function(Visualization) { 2 | 3 | describe('Visualization', function() { 4 | 5 | it('has a working title setter and getter', function() { 6 | var vis = new Visualization(); 7 | vis.title("test"); 8 | expect(vis.title()).toEqual("test"); 9 | }); 10 | 11 | it('has a working header setter and getter', function() { 12 | // TODO: refactoring; not testable because of dependency to global document variable 13 | }); 14 | 15 | it('has a working content setter and getter', function() { 16 | // TODO: refactoring; not testable because of dependency to global document variable 17 | }); 18 | 19 | it('has a working margin setter and getter', function() { 20 | var vis = new Visualization(); 21 | vis.margin({ 22 | top: 1, 23 | bottom: 3, 24 | left: 3, 25 | right: 7 26 | }); 27 | expect(vis.margin()).toEqual({ 28 | top: 1, 29 | bottom: 3, 30 | left: 3, 31 | right: 7 32 | }); 33 | }); 34 | 35 | it('its margin setter fills missing margin values with default', function() { 36 | var vis = new Visualization(); 37 | vis.margin({ 38 | bottom: 3, 39 | left: 3, 40 | right: 7 41 | }); 42 | expect(vis.margin()).toEqual({ 43 | top: 0, 44 | bottom: 3, 45 | left: 3, 46 | right: 7 47 | }); 48 | }); 49 | 50 | it('has a working width setter and getter', function() { 51 | var vis = new Visualization(); 52 | vis.width(5); 53 | expect(vis.width()).toEqual(5); 54 | }); 55 | 56 | it('has a working height setter and getter', function() { 57 | var vis = new Visualization(); 58 | vis.height(10); 59 | expect(vis.height()).toEqual(10); 60 | }); 61 | 62 | it('has a working data setter and getter', function() { 63 | var vis = new Visualization(); 64 | vis.data({val : "test"}); 65 | expect(vis.data()).toEqual({val : "test"}); 66 | }); 67 | 68 | it('has a working draw method', function() { 69 | // TODO: refactoring; not testable because of not working content setter 70 | }); 71 | 72 | it('has a working clear method', function() { 73 | // TODO: refactoring; not testable because of not working header setter 74 | }); 75 | 76 | }); 77 | 78 | }); 79 | -------------------------------------------------------------------------------- /web-ui/src/test/resources/ui/specs/util.spec.js: -------------------------------------------------------------------------------- 1 | define(["util"], function(Util) { 2 | 3 | describe('util', function() { 4 | 5 | it('flatMaps one level correctly', function() { 6 | expect(Util.flatMap([1,2,3], function(element) { 7 | return [element - 1, element, element + 1] 8 | })).toEqual([0,1,2,1,2,3,2,3,4]); 9 | }); 10 | 11 | it('flatMaps two levels correctly', function() { 12 | expect(Util.flatMap([1,2,3], function(element) { 13 | return [[element - 1, element, element + 1]] 14 | })).toEqual([[0,1,2],[1,2,3],[2,3,4]]); 15 | }); 16 | 17 | it('does and redoes on resize correctly', function() { 18 | var x = 0 19 | var f = function() { 20 | x = x + 1; 21 | }; 22 | var w = {}; 23 | Util.doAndRedoOnResizeOf(w, f) 24 | expect(x).toEqual(1); 25 | w.onresize(); 26 | expect(x).toEqual(2); 27 | }); 28 | 29 | it('removes an existing element', function() { 30 | var removedChild = {}; 31 | var parent = { 32 | id : 0, 33 | removeChild : function(child) { 34 | removedChild = child; 35 | } 36 | }; 37 | var child = { 38 | id : 1, 39 | parentNode: parent 40 | }; 41 | Util.removeElementIfExists(child); 42 | expect(removedChild).toEqual(child); 43 | }); 44 | 45 | }); 46 | 47 | }); 48 | -------------------------------------------------------------------------------- /web-ui/src/test/resources/ui/test.dependencies.js: -------------------------------------------------------------------------------- 1 | var mainRequireConf = EnvJasmine.rootDir + "require.config.js"; 2 | console.log("[info] Loading " + mainRequireConf); 3 | EnvJasmine.loadGlobal(mainRequireConf); 4 | 5 | var testRequireConf = EnvJasmine.testDir + "require.config.js"; 6 | console.log("[info] Loading " + testRequireConf); 7 | EnvJasmine.loadGlobal(testRequireConf); 8 | -------------------------------------------------------------------------------- /web-ui/src/test/scala/de/frosner/dds/webui/servables/ServableJsonProtocolTest.scala: -------------------------------------------------------------------------------- 1 | package de.frosner.dds.webui.servables 2 | 3 | import de.frosner.dds.servables._ 4 | import org.apache.spark.sql.Row 5 | import org.apache.spark.sql.types._ 6 | import org.scalatest.{Matchers, FlatSpec} 7 | import spray.json._ 8 | 9 | class ServableJsonProtocolTest extends FlatSpec with Matchers { 10 | 11 | private def checkSerDeEquality(servable: Servable) = { 12 | val servableJs = servable.toJson(ServableJsonProtocol.ServableJsonFormat).asJsObject 13 | servableJs.convertTo[Servable](ServableJsonProtocol.ServableJsonFormat) shouldBe servable 14 | } 15 | 16 | // TODO check serialization of null values for all servables (put nulls wherever possible) 17 | 18 | "A bar chart" should "be serialized and deserialized correctly" in { 19 | checkSerDeEquality { 20 | BarChart( 21 | title = "bar", 22 | xDomain = Seq("a", "b", "c"), 23 | heights = Seq(Seq(1, 2, 3), Seq(4, 5, 6)), 24 | series = Seq("1", "2") 25 | ) 26 | } 27 | } 28 | 29 | "A pie chart" should "be serialized and deserialized correctly" in { 30 | checkSerDeEquality { 31 | PieChart( 32 | title = "pie", 33 | categoryCountPairs = List(("a", 1d), ("b", 2d)) 34 | ) 35 | } 36 | } 37 | 38 | "A histogram" should "be serialized and deserialized correctly" in { 39 | checkSerDeEquality { 40 | Histogram( 41 | title = "hist", 42 | bins = List(1d, 2d, 3d), 43 | frequencies = List(2, 5) 44 | ) 45 | } 46 | } 47 | 48 | "A table" should "be serialized and deserialized correctly (no bytearray)" in { 49 | checkSerDeEquality { 50 | Table( 51 | title = "table", 52 | schema = StructType(List( 53 | StructField("0", ByteType, false), 54 | StructField("1", ByteType, true), 55 | StructField("2", ShortType, false), 56 | StructField("3", ShortType, true), 57 | StructField("4", IntegerType, false), 58 | StructField("5", IntegerType, true), 59 | StructField("6", LongType, false), 60 | StructField("7", LongType, true), 61 | StructField("8", FloatType, false), 62 | StructField("9", FloatType, true), 63 | StructField("10", DoubleType, false), 64 | StructField("11", DoubleType, true), 65 | StructField("12", DecimalType.Unlimited, true), 66 | StructField("13", StringType, true), 67 | StructField("15", BooleanType, false), 68 | StructField("16", BooleanType, true), 69 | StructField("17", TimestampType, true), 70 | StructField("18", DateType, true), 71 | StructField("19", ArrayType(StringType), true), 72 | StructField("20", MapType(StringType, IntegerType, valueContainsNull = false), true), 73 | StructField("21", StructType(List( 74 | StructField("a", IntegerType, false), 75 | StructField("b", IntegerType, false), 76 | StructField("c", IntegerType, false) 77 | )), true) 78 | )), 79 | content = Seq( 80 | Row( 81 | 0.toByte, new java.lang.Byte(0.toByte), 82 | 1.toShort, new java.lang.Short(1.toShort), 83 | 2, new java.lang.Integer(2), 84 | 3l, new java.lang.Long(3l), 85 | 4f, new java.lang.Float(4f), 86 | 5d, new java.lang.Double(5d), 87 | new java.math.BigDecimal(6d), 88 | "abc", 89 | true, new java.lang.Boolean(true), 90 | new java.sql.Timestamp(10000), 91 | new java.sql.Date(10000), 92 | Seq("a", "b", "c"), 93 | Map("a" -> 1, "b" -> 2, "c" -> 3), 94 | Row(1, 2, 3) 95 | ), 96 | Row( 97 | 0.toByte, null, 98 | 1.toShort, null, 99 | 2, null, 100 | 3l, null, 101 | 4f, null, 102 | 5d, null, 103 | null, 104 | null, 105 | true, null, 106 | null, 107 | null, 108 | null, 109 | null, 110 | null 111 | ) 112 | ) 113 | ) 114 | } 115 | } 116 | 117 | it should "be serialized and deserialized correctly (bytearray)" in { 118 | // this needs to be tested in a special way because Array[Byte] does not implement equals 119 | val table: Servable = Table( 120 | title = "table", 121 | schema = StructType(List(StructField("14", BinaryType, true))), 122 | content = Seq(Row(Array(8.toByte, 8.toByte, 8.toByte)), Row(null)) 123 | ) 124 | val serialized = table.toJson(ServableJsonProtocol.ServableJsonFormat).asJsObject 125 | val deserialized = serialized.convertTo[Servable](ServableJsonProtocol.ServableJsonFormat).asInstanceOf[Table] 126 | deserialized.content should have length 2 127 | deserialized.content(0).toSeq should have length 1 128 | deserialized.content(0).getAs[Array[Byte]](0).toSeq shouldBe Seq(8.toByte, 8.toByte, 8.toByte) 129 | deserialized.content(1).toSeq should have length 1 130 | deserialized.content(1).isNullAt(0) shouldBe true 131 | } 132 | 133 | "A heatmap" should "be serialized and deserialized correctly" in { 134 | checkSerDeEquality { 135 | Heatmap( 136 | title = "heatmap", 137 | content = Seq( 138 | Seq(1d, 2d, 3d), 139 | Seq(4d, 5d, 6d), 140 | Seq(7d, 8d, 9d) 141 | ), 142 | rowNames = Seq("1", "2", "3"), 143 | colNames = Seq("a", "b", "c"), 144 | zColorZeroes = Seq(0d, 9d) 145 | ) 146 | } 147 | } 148 | 149 | "A graph" should "be serialized and deserialized correctly" in { 150 | checkSerDeEquality { 151 | Graph( 152 | title = "graph", 153 | vertices = Seq("v1", "v2", "v3"), 154 | edges = Seq( 155 | (0, 1, "v1-v2"), 156 | (1, 2, "v2-v3") 157 | ) 158 | ) 159 | } 160 | } 161 | 162 | "A scatter plot" should "be serialized and deserialized correctly" in { 163 | checkSerDeEquality { 164 | ScatterPlot( 165 | title = "scatter", 166 | points = Seq( 167 | (5d, "a"), 168 | (2d, "b"), 169 | (3d, "c") 170 | ), 171 | xIsNumeric = true, 172 | yIsNumeric = false 173 | ) 174 | } 175 | } 176 | 177 | "A key value sequence" should "be serialized and deserialized correctly" in { 178 | checkSerDeEquality { 179 | KeyValueSequence( 180 | title = "Key Value Sequence", 181 | keyValuePairs = Seq( 182 | ("a", "5"), 183 | ("b", "3") 184 | ) 185 | ) 186 | } 187 | } 188 | 189 | "A composite servable" should "be serialized and deserialized correctly" in { 190 | checkSerDeEquality { 191 | Composite( 192 | title = "Composite", 193 | servables = Seq( 194 | Seq(Blank, KeyValueSequence("kvs", Seq(("a", "b")))), 195 | Seq(Histogram("hist", List(1d, 2d, 3d), List(2, 5))) 196 | ) 197 | ) 198 | } 199 | } 200 | 201 | "A blank servable" should "be serialized and deserialized correctly" in { 202 | checkSerDeEquality { 203 | Blank 204 | } 205 | } 206 | 207 | } 208 | --------------------------------------------------------------------------------