├── .gitignore ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE ├── NOTICE ├── README.md ├── build.sbt ├── js-engine-tester ├── build.sbt └── src │ └── main │ ├── resources │ ├── application.conf │ └── com │ │ └── typesafe │ │ └── jse │ │ └── tester │ │ └── test.js │ └── scala │ └── com │ └── typesafe │ └── jse │ └── tester │ └── Main.scala ├── project ├── build.properties └── plugins.sbt ├── src ├── main │ └── scala │ │ ├── akka │ │ └── contrib │ │ │ └── process │ │ │ └── BlockingProcess.scala │ │ └── com │ │ └── typesafe │ │ └── jse │ │ ├── Engine.scala │ │ ├── JavaxEngine.scala │ │ ├── LocalEngine.scala │ │ ├── Rhino.scala │ │ └── Trireme.scala └── test │ ├── resources │ ├── application.conf │ └── com │ │ └── typesafe │ │ └── jse │ │ ├── hello.js │ │ ├── test-javax.js │ │ ├── test-node.js │ │ └── test-rhino.js │ ├── scala-2.10 │ └── com │ │ └── typesafe │ │ └── jse │ │ └── AkkaCompat.scala │ ├── scala-2.11 │ └── com │ │ └── typesafe │ │ └── jse │ │ └── AkkaCompat.scala │ ├── scala-2.12 │ └── com │ │ └── typesafe │ │ └── jse │ │ └── AkkaCompat.scala │ └── scala │ └── com │ └── typesafe │ └── jse │ ├── JavaxEngineSpec.scala │ ├── RhinoSpec.scala │ ├── TestActorSystem.scala │ └── TriremeSpec.scala └── version.sbt /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | 4 | # sbt 5 | dist/* 6 | target/ 7 | lib_managed/ 8 | src_managed/ 9 | project/boot/ 10 | project/plugins/project/ 11 | 12 | # sbt-web 13 | node_modules/ 14 | 15 | # Eclipse / Scala-IDE 16 | bin/ 17 | .scala_dependencies 18 | .classpath 19 | .project 20 | .settings/ 21 | 22 | # Idea 23 | .idea/ 24 | .idea_modules/ 25 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: scala 3 | 4 | jdk: 5 | - oraclejdk8 6 | 7 | script: 8 | - sbt +test 9 | - sbt +publishLocal 10 | 11 | cache: 12 | directories: 13 | - $HOME/.ivy2/cache 14 | 15 | before_cache: 16 | # Delete all ivydata files 17 | - find $HOME/.ivy2/cache -name "ivydata-*.properties" | xargs rm 18 | 19 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Typesafe Project & Developer Guidelines 2 | 3 | These guidelines are meant to be a living document that should be changed and adapted as needed. We encourage changes that makes it easier to achieve our goals in an efficient way. 4 | 5 | These guidelines mainly applies to Typesafe’s “mature” projects - not necessarily to projects of the type ‘collection of scripts’ etc. 6 | 7 | ## General Workflow 8 | 9 | This is the process for committing code into master. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc. 10 | 11 | 1. Make sure you have signed the [Typesafe CLA](http://www.typesafe.com/contribute/cla), if not, sign it online. 12 | 2. Before starting to work on a feature or a fix, you have to make sure that: 13 | 1. There is a ticket for your work in the project's issue tracker. If not, create it first. 14 | 2. The ticket has been scheduled for the current milestone. 15 | 3. The ticket is estimated by the team. 16 | 4. The ticket have been discussed and prioritized by the team. 17 | 3. You should always perform your work in a Git feature branch. The branch should be given a descriptive name that explains its intent. Some teams also like adding the ticket number and/or the [GitHub](http://github.com) user ID to the branch name, these details is up to each of the individual teams. 18 | 4. When the feature or fix is completed you should open a [Pull Request](https://help.github.com/articles/using-pull-requests) on GitHub. 19 | 5. The Pull Request should be reviewed by other maintainers (as many as feasible/practical). Note that the maintainers can consist of outside contributors, both within and outside Typesafe. Outside contributors (for example from EPFL or independent committers) are encouraged to participate in the review process, it is not a closed process. 20 | 6. After the review you should fix the issues as needed (pushing a new commit for new review etc.), iterating until the reviewers give their thumbs up. 21 | 7. Once the code has passed review the Pull Request can be merged into the master branch. 22 | 23 | ## Pull Request Requirements 24 | 25 | For a Pull Request to be considered at all it has to meet these requirements: 26 | 27 | 1. Live up to the current code standard: 28 | - Not violate [DRY](http://programmer.97things.oreilly.com/wiki/index.php/Don%27t_Repeat_Yourself). 29 | - [Boy Scout Rule](http://programmer.97things.oreilly.com/wiki/index.php/The_Boy_Scout_Rule) needs to have been applied. 30 | 2. Regardless if the code introduces new features or fixes bugs or regressions, it must have comprehensive tests. 31 | 3. The code must be well documented in the Typesafe's standard documentation format (see the ‘Documentation’ section below). 32 | 4. Copyright: 33 | All Typesafe projects must include Typesafe copyright notices. Each project can choose between one of two approaches: 34 | 1. All source files in the project must have a Typesafe copyright notice in the file header. 35 | 2. The Notices file for the project includes the Typesafe copyright notice and no other files contain copyright notices. See http://www.apache.org/legal/src-headers.html for instructions for managing this approach for copyrights. 36 | 37 | Other guidelines to follow for copyright notices: 38 | - Use a form of ``Copyright (C) 2011-2014 Typesafe Inc. ``, where the start year is when the project or file was first created and the end year is the last time the project or file was modified. 39 | - Never delete or change existing copyright notices, just add additional info. 40 | - Do not use ``@author`` tags since it does not encourage [Collective Code Ownership](http://www.extremeprogramming.org/rules/collective.html). However, each project should make sure that the contributors gets the credit they deserve—in a text file or page on the project website and in the release notes etc. 41 | 42 | If these requirements are not met then the code should **not** be merged into master, or even reviewed - regardless of how good or important it is. No exceptions. 43 | 44 | ## Continuous Integration 45 | 46 | Each project should be configured to use a continuous integration (CI) tool (i.e. a build server ala Jenkins). Typesafe has a Jenkins server farm that can be used. The CI tool should, on each push to master, build the **full** distribution and run **all** tests, and if something fails it should email out a notification with the failure report to the committer and the core team. The CI tool should also be used in conjunction with Typesafe’s Pull Request Validator (discussed below). 47 | 48 | ## Documentation 49 | 50 | All documentation should be generated using the sbt-site-plugin, *or* publish artifacts to a repository that can be consumed by the typesafe stack. 51 | 52 | All documentation must abide by the following maxims: 53 | 54 | - Example code should be run as part of an automated test suite. 55 | - Version should be **programmatically** specifiable to the build. 56 | - Generation should be **completely automated** and available for scripting. 57 | - Artifacts that must be included in the Typesafe Stack should be published to a maven “documentation” repository as documentation artifacts. 58 | 59 | All documentation is preferred to be in Typesafe's standard documentation format [reStructuredText](http://doc.akka.io/docs/akka/snapshot/dev/documentation.html) compiled using Typesafe's customized [Sphinx](http://sphinx.pocoo.org/) based documentation generation system, which among other things allows all code in the documentation to be externalized into compiled files and imported into the documentation. 60 | 61 | For more info, or for a starting point for new projects, look at the [Typesafe Documentation Template project](https://github.com/typesafehub/doc-template) 62 | 63 | For larger projects that have invested a lot of time and resources into their current documentation and samples scheme (like for example Play), it is understandable that it will take some time to migrate to this new model. In these cases someone from the project needs to take the responsibility of manual QA and verifier for the documentation and samples. 64 | 65 | ## External Dependencies 66 | 67 | All the external runtime dependencies for the project, including transitive dependencies, must have an open source license that is equal to, or compatible with, [Apache 2](http://www.apache.org/licenses/LICENSE-2.0). 68 | 69 | This must be ensured by manually verifying the license for all the dependencies for the project: 70 | 71 | 1. Whenever a committer to the project changes a version of a dependency (including Scala) in the build file. 72 | 2. Whenever a committer to the project adds a new dependency. 73 | 3. Whenever a new release is cut (public or private for a customer). 74 | 75 | Which licenses that are compatible with Apache 2 are defined in [this doc](http://www.apache.org/legal/3party.html#category-a), where you can see that the licenses that are listed under ``Category A`` automatically compatible with Apache 2, while the ones listed under ``Category B`` needs additional action: 76 | > “Each license in this category requires some degree of [reciprocity](http://www.apache.org/legal/3party.html#define-reciprocal); therefore, additional action must be taken in order to minimize the chance that a user of an Apache product will create a derivative work of a reciprocally-licensed portion of an Apache product without being aware of the applicable requirements.” 77 | 78 | Each project must also create and maintain a list of all dependencies and their licenses, including all their transitive dependencies. This can be done in either in the documentation or in the build file next to each dependency. 79 | 80 | ## Work In Progress 81 | 82 | It is ok to work on a public feature branch in the GitHub repository. Something that can sometimes be useful for early feedback etc. If so then it is preferable to name the branch accordingly. This can be done by either prefix the name with ``wip-`` as in ‘Work In Progress’, or use hierarchical names like ``wip/..``, ``feature/..`` or ``topic/..``. Either way is fine as long as it is clear that it is work in progress and not ready for merge. This work can temporarily have a lower standard. However, to be merged into master it will have to go through the regular process outlined above, with Pull Request, review etc.. 83 | 84 | Also, to facilitate both well-formed commits and working together, the ``wip`` and ``feature``/``topic`` identifiers also have special meaning. Any branch labelled with ``wip`` is considered “git-unstable” and may be rebased and have its history rewritten. Any branch with ``feature``/``topic`` in the name is considered “stable” enough for others to depend on when a group is working on a feature. 85 | 86 | ## Creating Commits And Writing Commit Messages 87 | 88 | Follow these guidelines when creating public commits and writing commit messages. 89 | 90 | 1. If your work spans multiple local commits (for example; if you do safe point commits while working in a feature branch or work in a branch for long time doing merges/rebases etc.) then please do not commit it all but rewrite the history by squashing the commits into a single big commit which you write a good commit message for (like discussed in the following sections). For more info read this article: [Git Workflow](http://sandofsky.com/blog/git-workflow.html). Every commit should be able to be used in isolation, cherry picked etc. 91 | 2. First line should be a descriptive sentence what the commit is doing. It should be possible to fully understand what the commit does by just reading this single line. It is **not ok** to only list the ticket number, type "minor fix" or similar. Include reference to ticket number, prefixed with #, at the end of the first line. If the commit is a small fix, then you are done. If not, go to 3. 92 | 3. Following the single line description should be a blank line followed by an enumerated list with the details of the commit. 93 | 4. Add keywords for your commit (depending on the degree of automation we reach, the list may change over time): 94 | * ``Review by @gituser`` - if you want to notify someone on the team. The others can, and are encouraged to participate. 95 | * ``Fix/Fixing/Fixes/Close/Closing/Refs #ticket`` - if you want to mark the ticket as fixed in the issue tracker (Assembla understands this). 96 | * ``backport to _branch name_`` - if the fix needs to be cherry-picked to another branch (like 2.9.x, 2.10.x, etc) 97 | 98 | Example: 99 | 100 | Adding monadic API to Future. Fixes #2731 101 | 102 | * Details 1 103 | * Details 2 104 | * Details 3 105 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This software is licensed under the Apache 2 license, quoted below. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this project except in compliance with 4 | the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. 5 | 6 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 7 | "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific 8 | language governing permissions and limitations under the License. -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright (C) 2013-2014 Typesafe Inc. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | JavaScript Engine 2 | ================= 3 | 4 | [![Build Status](https://api.travis-ci.org/typesafehub/js-engine.png?branch=master)](https://travis-ci.org/typesafehub/js-engine) 5 | 6 | The JavaScript Engine library (jse) provides an [Actor](http://en.wikipedia.org/wiki/Actor_model) based abstraction so that JavaScript code can be 7 | executed in a browser-less fashion. In-jvm support is provided in the form of [Trireme](https://github.com/apigee/trireme#trireme), 8 | a Node API for [Rhino](https://developer.mozilla.org/en/docs/Rhino). Standalone Rhino is also supported with a RhinoShell environment. 9 | Native JavaScript performance is provided by using 10 | [Common Node](http://olegp.github.io/common-node/), 11 | [node.js](http://nodejs.org/) and 12 | [PhantomJS](http://phantomjs.org/) (these latter 3 are required to be installed separately). 13 | 14 | While multiple engines are provided, plugin authors are encouraged to target the Node API. Doing so means that the 15 | engine options generally come down to Trireme and Node, depending on whether in-JVM or native support is required. Trireme 16 | is therefore provided as a default as there should be no JS coding differences between Trireme and Node, and Trireme 17 | requires no manual installation. 18 | 19 | Sample usage can be obtained by inspecting the js-engine-tester sub-project. There's a main class that 20 | illustrates essential interactions. Here is a snippet of it: 21 | 22 | val engine = system.actorOf(Node.props(), "engine") 23 | val f = new File(Main.getClass.getResource("test.js").toURI) 24 | for ( 25 | result <- (engine ? Engine.ExecuteJs(f, Seq("999"))).mapTo[JsExecutionResult] 26 | ) yield { 27 | println(new String(result.output.toArray, "UTF-8")) 28 | ... 29 | 30 | An additional sbt-js-engine sub-project is provided that declares a base for sbt plugins that use the engine. 31 | This sub-project has a separate release cycle to jse itself and could be spun off into its own repo at a later 32 | point in time e.g. if/when Maven/Gradle support is required. The main point here is that the core JavaScript engine 33 | library is not related to sbt at all and should be usable from other build tools. 34 | 35 | The library is entirely [reactive](http://www.reactivemanifesto.org/) and uses [Akka](http://akka.io/). 36 | 37 | © Typesafe Inc., 2013, 2014 38 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | organization := "com.typesafe" 2 | name := "jse" 3 | 4 | scalaVersion := "2.10.7" 5 | crossScalaVersions := Seq(scalaVersion.value, "2.11.12", "2.12.7") 6 | 7 | libraryDependencies ++= { 8 | val akkaVersion = scalaBinaryVersion.value match { 9 | case "2.10" => "2.3.16" 10 | case "2.11" => "2.3.16" 11 | case "2.12" => "2.5.18" 12 | } 13 | Seq( 14 | "com.typesafe.akka" %% "akka-actor" % akkaVersion, 15 | "io.apigee.trireme" % "trireme-core" % "0.9.4", 16 | "io.apigee.trireme" % "trireme-node10src" % "0.9.4", 17 | "io.spray" %% "spray-json" % "1.3.5", 18 | "org.slf4j" % "slf4j-api" % "1.7.25", 19 | "com.typesafe.akka" %% "akka-testkit" % akkaVersion % "test", 20 | "junit" % "junit" % "4.12" % "test", 21 | "org.slf4j" % "slf4j-simple" % "1.7.25" % "test", 22 | "org.specs2" %% "specs2-core" % "3.10.0" % "test" 23 | ) 24 | } 25 | 26 | lazy val root = project in file(".") 27 | 28 | lazy val `js-engine-tester` = project.dependsOn(root) 29 | 30 | // Somehow required to get a js engine in tests (https://github.com/sbt/sbt/issues/1214) 31 | fork in Test := true 32 | parallelExecution in Test := false 33 | 34 | // Publish settings 35 | publishTo := { 36 | if (isSnapshot.value) Some(Opts.resolver.sonatypeSnapshots) 37 | else Some(Opts.resolver.sonatypeStaging) 38 | } 39 | 40 | homepage := Some(url("https://github.com/typesafehub/js-engine")) 41 | licenses := Seq("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0.html")) 42 | pomExtra := { 43 | 44 | git@github.com:typesafehub/js-engine.git 45 | scm:git:git@github.com:typesafehub/js-engine.git 46 | 47 | 48 | 49 | playframework 50 | Play Framework Team 51 | https://github.com/playframework 52 | 53 | 54 | } 55 | pomIncludeRepository := { _ => false } 56 | 57 | // Release settings 58 | sonatypeProfileName := "com.typesafe" 59 | releaseCrossBuild := true 60 | releasePublishArtifactsAction := PgpKeys.publishSigned.value 61 | releaseTagName := (version in ThisBuild).value 62 | releaseProcess := { 63 | import ReleaseTransformations._ 64 | 65 | Seq[ReleaseStep]( 66 | checkSnapshotDependencies, 67 | inquireVersions, 68 | runTest, 69 | setReleaseVersion, 70 | commitReleaseVersion, 71 | tagRelease, 72 | publishArtifacts, 73 | releaseStepCommand("sonatypeRelease"), 74 | setNextVersion, 75 | commitNextVersion, 76 | pushChanges 77 | ) 78 | } 79 | 80 | -------------------------------------------------------------------------------- /js-engine-tester/build.sbt: -------------------------------------------------------------------------------- 1 | resolvers += Resolver.sonatypeRepo("snapshots") 2 | 3 | fork in run := true 4 | 5 | libraryDependencies ++= Seq( 6 | "org.slf4j" % "slf4j-simple" % "1.7.6" 7 | ) 8 | 9 | javaOptions ++= Seq("-Dorg.slf4j.simpleLogger.defaultLogLevel=trace") -------------------------------------------------------------------------------- /js-engine-tester/src/main/resources/application.conf: -------------------------------------------------------------------------------- 1 | blocking-process-io-dispatcher { 2 | type = Dispatcher 3 | executor = "thread-pool-executor" 4 | thread-pool-executor { 5 | core-pool-size-min = 3 6 | core-pool-size-factor = 1.0 7 | core-pool-size-max = 100 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /js-engine-tester/src/main/resources/com/typesafe/jse/tester/test.js: -------------------------------------------------------------------------------- 1 | var log; 2 | try { 3 | log = require("console").log; 4 | } catch (e) { 5 | log = print; 6 | } 7 | 8 | log("Hi there"); 9 | log("and again!"); 10 | -------------------------------------------------------------------------------- /js-engine-tester/src/main/scala/com/typesafe/jse/tester/Main.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse.tester 2 | 3 | import akka.actor.ActorSystem 4 | import akka.pattern.ask 5 | 6 | import com.typesafe.jse.{Trireme, Rhino, CommonNode, Engine, Node} 7 | import akka.util.Timeout 8 | import scala.concurrent.duration._ 9 | import scala.concurrent.ExecutionContext.Implicits.global 10 | import java.io.File 11 | import com.typesafe.jse.Engine.JsExecutionResult 12 | import scala.collection.immutable 13 | 14 | object Main { 15 | def main(args: Array[String]) { 16 | implicit val system = ActorSystem("jse-system") 17 | implicit val timeout = Timeout(5.seconds) 18 | 19 | system.scheduler.scheduleOnce(7.seconds) { 20 | system.shutdown() 21 | System.exit(1) 22 | } 23 | 24 | val engine = system.actorOf(Trireme.props(), "engine") 25 | val f = new File(Main.getClass.getResource("test.js").toURI) 26 | for ( 27 | result <- (engine ? Engine.ExecuteJs(f, immutable.Seq("999"), timeout.duration)).mapTo[JsExecutionResult] 28 | ) yield { 29 | println(s"output\n======\n${new String(result.output.toArray, "UTF-8")}\n") 30 | println(s"error\n=====\n${new String(result.error.toArray, "UTF-8")}\n") 31 | 32 | try { 33 | system.shutdown() 34 | System.exit(0) 35 | } catch { 36 | case _: Throwable => 37 | } 38 | 39 | } 40 | 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.17 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | libraryDependencies ++= Seq( 2 | "org.scala-sbt" % "scripted-plugin" % sbtVersion.value 3 | ) 4 | 5 | addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.10") 6 | addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.1") 7 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.3") 8 | addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.4") 9 | -------------------------------------------------------------------------------- /src/main/scala/akka/contrib/process/BlockingProcess.scala: -------------------------------------------------------------------------------- 1 | package akka.contrib.process 2 | 3 | import akka.actor._ 4 | import akka.util.{Timeout, ByteString} 5 | import java.io._ 6 | import scala.collection.JavaConverters._ 7 | import scala.collection.immutable 8 | import scala.concurrent.{Await, blocking} 9 | import java.lang.{ProcessBuilder => JdkProcessBuilder} 10 | import akka.contrib.process.BlockingProcess.Started 11 | import akka.contrib.process.StreamEvents.{Read, Done, Ack} 12 | import akka.pattern.ask 13 | import scala.concurrent.duration.{FiniteDuration, Duration} 14 | import scala.annotation.tailrec 15 | import java.util.concurrent.atomic.AtomicBoolean 16 | 17 | /** 18 | * Process encapsulates an operating system process and its ability to be communicated with 19 | * via stdio i.e. stdin, stdout and stderr. The sink for stdin and the sources for stdout 20 | * and stderr are communicated in a Started event upon the actor being established. The 21 | * receiving actor (passed in as a constructor arg) is then subsequently sent stdout and 22 | * stderr events. When there are no more stdout or stderr events then the process's exit 23 | * code is communicated to the receiver as an int value. The exit code will always be 24 | * the last event communicated by the process unless the process is a detached one. 25 | * 26 | * The actor is expected to be associated with a blocking dispatcher as various calls are made 27 | * to input and output streams which can block. 28 | */ 29 | class BlockingProcess(args: immutable.Seq[String], environment: Map[String, String], receiver: ActorRef, detached: Boolean) 30 | extends Actor { 31 | 32 | // This quoting functionality is as recommended per http://bugs.java.com/view_bug.do?bug_id=6511002 33 | // The JDK can't change due to its backward compatibility requirements, but we have no such constraint 34 | // here. Args should be able to be expressed consistently by the user of our API no matter whether 35 | // execution is on Windows or not. 36 | 37 | def needsQuoting(s: String): Boolean = 38 | if (s.isEmpty) true else s.exists(c => c == ' ' || c == '\t' || c == '\\' || c == '"') 39 | 40 | def winQuote(s: String): String = { 41 | if (!needsQuoting(s)) { 42 | s 43 | } else { 44 | "\"" + s.replaceAll("([\\\\]*)\"", "$1$1\\\\\"").replaceAll("([\\\\]*)\\z", "$1$1") + "\"" 45 | } 46 | } 47 | 48 | val isWindows: Boolean = System.getProperty("os.name").toLowerCase.contains("win") 49 | 50 | def prepareArgs(args: immutable.Seq[String]): immutable.Seq[String] = 51 | if (isWindows) args.map(winQuote) else args 52 | 53 | val pb = new JdkProcessBuilder(prepareArgs(args).asJava) 54 | pb.environment().putAll(environment.asJava) 55 | val p = pb.start() 56 | 57 | val stdinSink = context.actorOf(OutputStreamSink.props(p.getOutputStream), "stdin") 58 | val stdoutSource = context.watch(context.actorOf(InputStreamSource.props(p.getInputStream, receiver), "stdout")) 59 | val stderrSource = context.watch(context.actorOf(InputStreamSource.props(p.getErrorStream, receiver), "stderr")) 60 | 61 | var openStreams = 2 62 | 63 | def receive = { 64 | case Terminated(`stdoutSource` | `stderrSource`) => 65 | openStreams -= 1 66 | if (openStreams == 0 && !detached) { 67 | val exitValue = blocking { 68 | p.waitFor() 69 | p.exitValue() 70 | } 71 | receiver ! exitValue 72 | context.stop(self) 73 | } 74 | } 75 | 76 | override def postStop() { 77 | if (!detached) p.destroy() 78 | } 79 | 80 | override def preStart() { 81 | receiver ! Started(stdinSink, stdoutSource, stderrSource) 82 | } 83 | } 84 | 85 | object BlockingProcess { 86 | 87 | /** 88 | * Return the props required to create a process actor. 89 | * @param args The sequence of string arguments to pass to the process. 90 | * @param receiver The actor to receive output and error events. 91 | * @param detached Whether the process will be daemonic. 92 | * @param ioDispatcherId The name given to the dispatcher configuration that will be used to manage blocking IO. 93 | * @return a props object that can be used to create the process actor. 94 | */ 95 | def props( 96 | args: immutable.Seq[String], 97 | environment: Map[String, String], 98 | receiver: ActorRef, 99 | detached: Boolean = false, 100 | ioDispatcherId: String = "blocking-process-io-dispatcher" 101 | ): Props = Props(classOf[BlockingProcess], args, environment, receiver, detached) 102 | .withDispatcher(ioDispatcherId) 103 | 104 | /** 105 | * Sent on startup to the receiver - specifies the actors used for managing input, output and 106 | * error respectively. 107 | */ 108 | case class Started(stdinSink: ActorRef, stdoutSource: ActorRef, stderrSource: ActorRef) 109 | 110 | } 111 | 112 | 113 | /** 114 | * Declares the types of event that are involved with streaming. 115 | */ 116 | object StreamEvents { 117 | 118 | /** 119 | * Sent in response to an Output even. 120 | */ 121 | case object Ack 122 | 123 | /** 124 | * Sent when no more Output events are expected. 125 | */ 126 | case object Done 127 | 128 | /** 129 | * Read n bytes from an input. 130 | */ 131 | case class Read(size: Int) 132 | 133 | } 134 | 135 | /** 136 | * A target to stream bytes to. Flow control is provided i.e. every message of bytes sent is acknowledged with an 137 | * Ack. A Done event is expected when there is no more data to be written. 138 | */ 139 | abstract class Sink extends Actor 140 | 141 | /** 142 | * A sink of data given an output stream. 143 | */ 144 | class OutputStreamSink(os: OutputStream) extends Sink { 145 | def receive = { 146 | case bytes: ByteString => 147 | blocking { 148 | os.write(bytes.toArray) 149 | } 150 | sender() ! Ack 151 | case Done => context.stop(self) 152 | } 153 | 154 | override def postStop() { 155 | os.close() 156 | } 157 | } 158 | 159 | object OutputStreamSink { 160 | def props( 161 | os: OutputStream, 162 | ioDispatcherId: String = "blocking-process-io-dispatcher" 163 | ): Props = Props(classOf[OutputStreamSink], os) 164 | .withDispatcher(ioDispatcherId) 165 | } 166 | 167 | /** 168 | * A buffering sink. The present implementation is quite limited in that the buffer can grow indefinitely. 169 | */ 170 | class BufferingSink extends Sink { 171 | var buffer = ByteString() 172 | 173 | def receive = { 174 | case bytes: ByteString => 175 | buffer = buffer.concat(bytes) 176 | sender() ! Ack 177 | case Read(size) => 178 | val (readBytes, remainingBytes) = buffer.splitAt(size) 179 | buffer = remainingBytes 180 | sender() ! readBytes 181 | case Done => context.stop(self) 182 | } 183 | } 184 | 185 | object BufferingSink { 186 | def props( 187 | ioDispatcherId: String = "blocking-process-io-dispatcher" 188 | ): Props = Props(classOf[BufferingSink]) 189 | .withDispatcher(ioDispatcherId) 190 | } 191 | 192 | 193 | /** 194 | * A holder of data received and forwarded on to a receiver with flow control. There is only one sender expected and 195 | * that sender should not send again until an Ack from the previous send. 196 | * @param receiver the receiver of data from the source. 197 | */ 198 | abstract class Source(receiver: ActorRef) extends Actor 199 | 200 | /** 201 | * A source of data given an input stream. Flow control is implemented and for each ByteString event received by the receiver, 202 | * an Ack is expected in return. At the end of the source, a Done event will be sent to the receiver and its associated 203 | * input stream is closed. 204 | */ 205 | class InputStreamSource(is: InputStream, receiver: ActorRef, pipeSize: Int) extends Source(receiver) { 206 | require(pipeSize > 0) 207 | 208 | val buffer = new Array[Byte](pipeSize) 209 | 210 | def receive = { 211 | case Ack => 212 | val len = blocking { 213 | is.read(buffer) 214 | } 215 | if (len > -1) { 216 | receiver ! ByteString.fromArray(buffer, 0, len) 217 | } else { 218 | receiver ! Done 219 | context.stop(self) 220 | } 221 | } 222 | 223 | override def postStop() { 224 | is.close() 225 | } 226 | 227 | override def preStart() { 228 | self ! Ack // Start reading 229 | } 230 | } 231 | 232 | object InputStreamSource { 233 | def props( 234 | is: InputStream, 235 | receiver: ActorRef, 236 | pipeSize: Int = 1024, 237 | ioDispatcherId: String = "blocking-process-io-dispatcher" 238 | ): Props = Props(classOf[InputStreamSource], is, receiver, pipeSize) 239 | .withDispatcher(ioDispatcherId) 240 | } 241 | 242 | /** 243 | * A source of data that simply forwards on to the receiver. 244 | */ 245 | class ForwardingSource(receiver: ActorRef) extends Source(receiver) { 246 | def receive = { 247 | case bytes: ByteString => 248 | val origSender = sender() 249 | receiver ! bytes 250 | context.become { 251 | case Ack => 252 | origSender ! Ack 253 | context.unbecome() 254 | case Done => context.stop(self) 255 | } 256 | case Done => context.stop(self) 257 | } 258 | } 259 | 260 | object ForwardingSource { 261 | def props( 262 | receiver: ActorRef, 263 | ioDispatcherId: String = "blocking-process-io-dispatcher" 264 | ): Props = Props(classOf[ForwardingSource], receiver) 265 | .withDispatcher(ioDispatcherId) 266 | } 267 | 268 | 269 | /** 270 | * Forwards messages on to a Source in a blocking manner conforming to the JDK OutputStream. 271 | */ 272 | class SinkStream(val source: ActorRef, timeout: FiniteDuration) extends OutputStream { 273 | implicit val akkaTimeout = new Timeout(timeout) 274 | 275 | var isClosed = new AtomicBoolean(false) 276 | 277 | override def close(): Unit = if (isClosed.compareAndSet(false, true)) source ! Done 278 | 279 | override def write(b: Int): Unit = { 280 | try { 281 | Await.result(source ? ByteString(b), timeout) 282 | } catch { 283 | case e: RuntimeException => 284 | throw new IOException("While writing source stream", e) 285 | } 286 | } 287 | 288 | override def write(bytes: Array[Byte]): Unit = { 289 | try { 290 | Await.result(source ? ByteString.fromArray(bytes), timeout) 291 | } catch { 292 | case e: RuntimeException => 293 | throw new IOException("While writing to the source. Closing stream.", e) 294 | } 295 | } 296 | } 297 | 298 | /** 299 | * Reads from a sink in a blocking manner conforming to the JDK InputStream 300 | */ 301 | class SourceStream(val sink: ActorRef, timeout: FiniteDuration) extends InputStream { 302 | implicit val akkaTimeout = new Timeout(timeout) 303 | 304 | var isClosed = new AtomicBoolean(false) 305 | 306 | override def close(): Unit = if (isClosed.compareAndSet(false, true)) sink ! Done 307 | 308 | private def getBytes(size: Int): ByteString = { 309 | try { 310 | val bytes = Await.result((sink ? Read).mapTo[ByteString], timeout: Duration) 311 | sink ! Ack 312 | bytes 313 | } catch { 314 | case e: RuntimeException => 315 | throw new IOException("Problem when reading bytes from the sink. Closing stream.", e) 316 | } 317 | } 318 | 319 | @tailrec 320 | override final def read(): Int = { 321 | val bs = getBytes(1) 322 | if (!bs.isEmpty) { 323 | bs(0) 324 | } else { 325 | Thread.sleep(100) 326 | read() 327 | } 328 | } 329 | 330 | @tailrec 331 | override final def read(bytes: Array[Byte]): Int = { 332 | val bs = getBytes(bytes.size) 333 | if (!bs.isEmpty) { 334 | bs.copyToArray(bytes, 0, bs.size) 335 | bs.size 336 | } else { 337 | Thread.sleep(100) 338 | read(bytes) 339 | } 340 | } 341 | } -------------------------------------------------------------------------------- /src/main/scala/com/typesafe/jse/Engine.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import java.util.concurrent.TimeUnit 4 | 5 | import akka.actor.{Terminated, ActorRef, Actor} 6 | import com.typesafe.config.Config 7 | import scala.concurrent.duration._ 8 | import akka.util.ByteString 9 | import scala.collection.immutable 10 | import com.typesafe.jse.Engine.JsExecutionResult 11 | 12 | /** 13 | * A JavaScript engine. JavaScript engines are intended to be short-lived and will terminate themselves on 14 | * completion of executing some JavaScript. 15 | */ 16 | abstract class Engine(stdArgs: immutable.Seq[String], stdEnvironment: Map[String, String]) extends Actor { 17 | 18 | /* 19 | * An engineIOHandler is a receiver that aggregates stdout and stderr from JavaScript execution. 20 | * Execution may also be timed out. The contract is that an exit value is always 21 | * only ever sent after all stdio has completed. 22 | */ 23 | def engineIOHandler( 24 | stdinSink: ActorRef, 25 | stdoutSource: ActorRef, 26 | stderrSource: ActorRef, 27 | receiver: ActorRef, 28 | ack: => Any, 29 | timeout: FiniteDuration, 30 | timeoutExitValue: Int 31 | ): Receive = { 32 | 33 | val errorBuilder = ByteString.newBuilder 34 | val outputBuilder = ByteString.newBuilder 35 | 36 | def handleStdioBytes(sender: ActorRef, bytes: ByteString): Unit = { 37 | sender match { 38 | case `stderrSource` => errorBuilder ++= bytes 39 | case `stdoutSource` => outputBuilder ++= bytes 40 | } 41 | sender ! ack 42 | } 43 | 44 | def sendExecutionResult(exitValue: Int): Unit = { 45 | receiver ! JsExecutionResult(exitValue, outputBuilder.result(), errorBuilder.result()) 46 | } 47 | 48 | context.watch(stdinSink) 49 | context.watch(stdoutSource) 50 | context.watch(stderrSource) 51 | 52 | val timeoutTimer = context.system.scheduler.scheduleOnce(timeout, self, timeoutExitValue)(context.dispatcher) 53 | 54 | var openStreams = 3 55 | 56 | def stopContext(): Unit = { 57 | timeoutTimer.cancel() 58 | context.stop(self) 59 | } 60 | 61 | { 62 | case bytes: ByteString => handleStdioBytes(sender(), bytes) 63 | case exitValue: Int => 64 | if (exitValue != timeoutExitValue) { 65 | context.become { 66 | case bytes: ByteString => handleStdioBytes(sender(), bytes) 67 | case Terminated(`stdinSink` | `stdoutSource` | `stderrSource`) => { 68 | openStreams -= 1 69 | if (openStreams == 0) { 70 | sendExecutionResult(exitValue) 71 | stopContext() 72 | } 73 | } 74 | } 75 | } else { 76 | stopContext() 77 | } 78 | case Terminated(`stdinSink` | `stdoutSource` | `stderrSource`) => 79 | openStreams -= 1 80 | if (openStreams == 0) { 81 | context.become { 82 | case exitValue: Int => 83 | sendExecutionResult(exitValue) 84 | stopContext() 85 | } 86 | } 87 | } 88 | } 89 | 90 | } 91 | 92 | object Engine { 93 | 94 | /** 95 | * Execute JS. Execution will result in a JsExecutionResult being replied to the sender. 96 | * @param source The source file to execute. 97 | * @param args The sequence of arguments to pass to the js source. 98 | * @param timeout The amount of time to wait for the js to execute. Recommend at least 1 minute given slow CI servers in particular. 99 | * @param timeoutExitValue The exit value to receive if the above timeout occurs. 100 | * @param environment A mapping of environment variables to use. 101 | */ 102 | case class ExecuteJs( 103 | source: java.io.File, 104 | args: immutable.Seq[String], 105 | timeout: FiniteDuration, 106 | timeoutExitValue: Int = Int.MinValue, 107 | environment: Map[String, String] = Map.empty 108 | ) 109 | 110 | /** 111 | * The response of JS execution in the cases where it has been aggregated. A non-zero exit value 112 | * indicates failure as per the convention of stdio processes. The output and error fields are 113 | * aggregated from any respective output and error streams from the process. 114 | */ 115 | case class JsExecutionResult(exitValue: Int, output: ByteString, error: ByteString) 116 | 117 | case object IsNode 118 | 119 | // Internal types 120 | 121 | private[jse] case object FinishProcessing 122 | 123 | /** 124 | * Get an "infinite" timeout for Akka's default scheduler. 125 | * 126 | * Of course, there's no such thing as an infinite timeout, so this value is the maximum timeout that the scheduler 127 | * will accept, which is equal to the maximum value of an integer multiplied by the tick duration. 128 | * 129 | * @param config The configuration to read the tick duration from. 130 | */ 131 | def infiniteSchedulerTimeout(config: Config): FiniteDuration = { 132 | val tickNanos = config.getDuration("akka.scheduler.tick-duration", TimeUnit.NANOSECONDS) 133 | 134 | // we subtract tickNanos here because of this bug: 135 | // https://github.com/akka/akka/issues/15598 136 | (tickNanos * Int.MaxValue - tickNanos).nanos 137 | } 138 | 139 | } 140 | -------------------------------------------------------------------------------- /src/main/scala/com/typesafe/jse/JavaxEngine.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import akka.actor._ 4 | import akka.contrib.process.StreamEvents.Ack 5 | import akka.contrib.process._ 6 | import java.io._ 7 | import javax.script._ 8 | import scala.collection.immutable 9 | import scala.concurrent.blocking 10 | import scala.concurrent.duration._ 11 | import scala.util.Try 12 | 13 | import com.typesafe.jse.Engine.{ExecuteJs,IsNode} 14 | 15 | /** 16 | * Declares an in-JVM JavaScript engine. The actor is expected to be associated with a blocking dispatcher as the 17 | * javax.script API is synchronous. 18 | */ 19 | class JavaxEngine( 20 | stdArgs: immutable.Seq[String], 21 | ioDispatcherId: String, 22 | engineName: String 23 | ) extends Engine(stdArgs, Map.empty) { 24 | 25 | val StdioTimeout = Engine.infiniteSchedulerTimeout(context.system.settings.config) 26 | 27 | def receive = { 28 | case ExecuteJs(source, args, timeout, timeoutExitValue, environment) => 29 | val requester = sender() 30 | 31 | val stdinSink = context.actorOf(BufferingSink.props(ioDispatcherId = ioDispatcherId), "stdin") 32 | val stdinIs = new SourceStream(stdinSink, StdioTimeout) 33 | val stdoutSource = context.actorOf(ForwardingSource.props(self, ioDispatcherId = ioDispatcherId), "stdout") 34 | val stdoutOs = new SinkStream(stdoutSource, StdioTimeout) 35 | val stderrSource = context.actorOf(ForwardingSource.props(self, ioDispatcherId = ioDispatcherId), "stderr") 36 | val stderrOs = new SinkStream(stderrSource, StdioTimeout) 37 | 38 | try { 39 | context.become(engineIOHandler( 40 | stdinSink, stdoutSource, stderrSource, 41 | requester, 42 | Ack, 43 | timeout, timeoutExitValue 44 | )) 45 | 46 | context.actorOf(JavaxEngineShell.props( 47 | source.getCanonicalFile, 48 | stdArgs ++ args, 49 | stdinIs, stdoutOs, stderrOs, 50 | engineName 51 | ), "javax-engine-shell") ! JavaxEngineShell.Execute 52 | 53 | } finally { 54 | // We don't need stdin 55 | blocking(Try(stdinIs.close())) 56 | } 57 | case IsNode => 58 | sender ! false 59 | } 60 | } 61 | 62 | object JavaxEngine { 63 | 64 | /** 65 | * Creates the Props of a JavaxEngine 66 | * 67 | * @param stdArgs 68 | * @param ioDispatcherId 69 | * @param engineName The name of the engine to load. Defaults to "js". 70 | * @return 71 | */ 72 | def props( 73 | stdArgs: immutable.Seq[String] = Nil, 74 | ioDispatcherId: String = "blocking-process-io-dispatcher", 75 | engineName: String = "js") = 76 | Props(new JavaxEngine(stdArgs, ioDispatcherId, engineName)).withDispatcher(ioDispatcherId) 77 | 78 | } 79 | 80 | private[jse] class JavaxEngineShell( 81 | script: File, 82 | args: immutable.Seq[String], 83 | stdinIs: InputStream, 84 | stdoutOs: OutputStream, 85 | stderrOs: OutputStream, 86 | engineName: String 87 | ) extends Actor with ActorLogging { 88 | 89 | import JavaxEngineShell._ 90 | 91 | val engine = new ScriptEngineManager(null).getEngineByName(engineName) 92 | 93 | if (engine == null) throw new Exception(s"Javascript engine '$engineName' not found") 94 | 95 | def receive = { 96 | 97 | case Execute => 98 | 99 | val scriptReader = new FileReader(script) 100 | 101 | val reader = new InputStreamReader(stdinIs) 102 | val writer = new PrintWriter(stdoutOs, true) 103 | val errorWriter = new PrintWriter(stderrOs, true) 104 | 105 | val context = { 106 | val c: ScriptContext = new SimpleScriptContext() 107 | c.setReader(reader) 108 | c.setWriter(writer) 109 | c.setErrorWriter(errorWriter) 110 | // If you create a new ScriptContext object and use it to evaluate scripts, then 111 | // ENGINE_SCOPE of that context has to be associated with a nashorn Global object somehow. 112 | // See https://wiki.openjdk.java.net/display/Nashorn/Nashorn+jsr223+engine+notes 113 | c.setBindings(engine.getContext().getBindings(ScriptContext.ENGINE_SCOPE), ScriptContext.ENGINE_SCOPE) 114 | c.setAttribute("arguments", args.toArray, ScriptContext.ENGINE_SCOPE) 115 | c.setAttribute(ScriptEngine.FILENAME, script.getName, ScriptContext.ENGINE_SCOPE) 116 | c 117 | } 118 | 119 | try { 120 | blocking(engine.eval(scriptReader, context)) 121 | sender() ! 0 122 | } catch { 123 | case e: ScriptException => 124 | e.printStackTrace(new PrintStream(stderrOs)) 125 | sender() ! 1 126 | } finally { 127 | // Will close the underlying stdoutOs and stderrOs 128 | Try(writer.close()) 129 | Try(errorWriter.close()) 130 | } 131 | 132 | } 133 | 134 | } 135 | 136 | private[jse] object JavaxEngineShell { 137 | 138 | def props( 139 | source: File, 140 | args: immutable.Seq[String], 141 | stdinIs: InputStream, 142 | stdoutOs: OutputStream, 143 | stderrOs: OutputStream, 144 | engineName: String) = 145 | Props(new JavaxEngineShell(source, args, stdinIs, stdoutOs, stderrOs, engineName)) 146 | 147 | case object Execute 148 | 149 | } 150 | -------------------------------------------------------------------------------- /src/main/scala/com/typesafe/jse/LocalEngine.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import akka.actor._ 4 | import akka.contrib.process.BlockingProcess 5 | import akka.contrib.process.BlockingProcess.Started 6 | import akka.contrib.process.StreamEvents.Ack 7 | import com.typesafe.jse.Engine.{ExecuteJs,IsNode} 8 | import java.io.File 9 | import scala.collection.immutable 10 | 11 | /** 12 | * Provides an Actor on behalf of a JavaScript Engine. Engines are represented as operating system processes and are 13 | * communicated with by launching with arguments and returning a status code. 14 | * @param stdArgs a sequence of standard command line arguments used to launch the engine from the command line. 15 | * @param stdEnvironment a sequence of standard module paths. 16 | */ 17 | class LocalEngine(stdArgs: immutable.Seq[String], stdEnvironment: Map[String, String], isNode: Boolean) extends Engine(stdArgs, stdEnvironment) { 18 | 19 | def receive = { 20 | case ExecuteJs(f, args, timeout, timeoutExitValue, environment) => 21 | val requester = sender() 22 | 23 | context.actorOf(BlockingProcess.props( 24 | (stdArgs :+ f.getCanonicalPath) ++ args, 25 | stdEnvironment ++ environment, self), 26 | "process" 27 | ) 28 | context.become { 29 | case Started(i, o, e) => 30 | context.become(engineIOHandler(i, o, e, requester, Ack, timeout, timeoutExitValue)) 31 | i ! PoisonPill // We don't need an input stream so close it out straight away. 32 | } 33 | case IsNode => 34 | sender() ! isNode 35 | } 36 | } 37 | 38 | 39 | /** 40 | * Local engine utilities. 41 | */ 42 | object LocalEngine { 43 | 44 | def path(path: Option[File], command: String): String = path.fold(command)(_.getCanonicalPath) 45 | 46 | val nodePathDelim = if (System.getProperty("os.name").toLowerCase.contains("win")) ";" else ":" 47 | 48 | def nodePathEnv(modulePaths: immutable.Seq[String]): Map[String, String] = { 49 | val nodePath = modulePaths.mkString(nodePathDelim) 50 | val newNodePath = Option(System.getenv("NODE_PATH")).fold(nodePath)(_ + nodePathDelim + nodePath) 51 | if (newNodePath.isEmpty) Map.empty[String, String] else Map("NODE_PATH" -> newNodePath) 52 | } 53 | } 54 | 55 | /** 56 | * Used to manage a local instance of Node.js with CommonJs support. common-node is assumed to be on the path. 57 | */ 58 | object CommonNode { 59 | 60 | import LocalEngine._ 61 | 62 | def props(command: Option[File] = None, stdArgs: immutable.Seq[String] = Nil, stdEnvironment: Map[String, String] = Map.empty): Props = { 63 | val args = Seq(path(command, "common-node")) ++ stdArgs 64 | Props(classOf[LocalEngine], args, stdEnvironment, true) 65 | } 66 | } 67 | 68 | /** 69 | * Used to manage a local instance of Node.js. Node is assumed to be on the path. 70 | */ 71 | object Node { 72 | 73 | import LocalEngine._ 74 | 75 | def props(command: Option[File] = None, stdArgs: immutable.Seq[String] = Nil, stdEnvironment: Map[String, String] = Map.empty): Props = { 76 | val args = Seq(path(command, "node")) ++ stdArgs 77 | Props(classOf[LocalEngine], args, stdEnvironment, true) 78 | } 79 | } 80 | 81 | /** 82 | * Used to manage a local instance of PhantomJS. PhantomJS is assumed to be on the path. 83 | */ 84 | object PhantomJs { 85 | 86 | import LocalEngine._ 87 | 88 | def props(command: Option[File] = None, stdArgs: immutable.Seq[String] = Nil, stdEnvironment: Map[String, String] = Map.empty): Props = { 89 | val args = Seq(path(command, "phantomjs")) ++ stdArgs 90 | Props(classOf[LocalEngine], args, stdEnvironment, false) 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/main/scala/com/typesafe/jse/Rhino.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import akka.actor._ 4 | import akka.contrib.process._ 5 | import akka.contrib.process.StreamEvents.Ack 6 | import java.io._ 7 | import java.net.URI 8 | import scala.collection.immutable 9 | import scala.concurrent.blocking 10 | import scala.concurrent.duration._ 11 | import scala.util.Try 12 | 13 | import org.mozilla.javascript._ 14 | import org.mozilla.javascript.commonjs.module.RequireBuilder 15 | import org.mozilla.javascript.commonjs.module.provider.{UrlModuleSourceProvider, SoftCachingModuleScriptProvider} 16 | import org.mozilla.javascript.tools.shell.Global 17 | 18 | import com.typesafe.jse.Engine.{ExecuteJs,IsNode} 19 | 20 | /** 21 | * Declares an in-JVM Rhino based JavaScript engine. The actor is expected to be 22 | * associated with a blocking dispatcher as calls to Rhino and its use of Jdk streams 23 | * are blocking. 24 | */ 25 | class Rhino( 26 | stdArgs: immutable.Seq[String], 27 | stdModulePaths: immutable.Seq[String], 28 | ioDispatcherId: String 29 | ) extends Engine(stdArgs, Map.empty) { 30 | 31 | // The main objective of this actor implementation is to establish actors for both the execution of 32 | // Rhino code (Rhino's execution is blocking), and actors for the source of stdio (which is also blocking). 33 | // This actor is then a conduit of the IO as a result of execution. 34 | 35 | val StdioTimeout = Engine.infiniteSchedulerTimeout(context.system.settings.config) 36 | 37 | def receive = { 38 | case ExecuteJs(source, args, timeout, timeoutExitValue, environment) => 39 | val requester = sender() 40 | 41 | val stdinSink = context.actorOf(BufferingSink.props(ioDispatcherId = ioDispatcherId), "stdin") 42 | val stdinIs = new SourceStream(stdinSink, StdioTimeout) 43 | val stdoutSource = context.actorOf(ForwardingSource.props(self, ioDispatcherId = ioDispatcherId), "stdout") 44 | val stdoutOs = new SinkStream(stdoutSource, StdioTimeout) 45 | val stderrSource = context.actorOf(ForwardingSource.props(self, ioDispatcherId = ioDispatcherId), "stderr") 46 | val stderrOs = new SinkStream(stderrSource, StdioTimeout) 47 | 48 | try { 49 | context.become(engineIOHandler( 50 | stdinSink, stdoutSource, stderrSource, 51 | requester, 52 | Ack, 53 | timeout, timeoutExitValue 54 | )) 55 | 56 | context.actorOf(RhinoShell.props( 57 | source.getCanonicalFile, 58 | stdArgs ++ args, 59 | stdModulePaths, 60 | stdinIs, stdoutOs, stderrOs 61 | ), "rhino-shell") ! RhinoShell.Execute 62 | 63 | } finally { 64 | // We don't need stdin 65 | blocking(Try(stdinIs.close())) 66 | } 67 | case IsNode => 68 | sender() ! false 69 | } 70 | } 71 | 72 | object Rhino { 73 | /** 74 | * Give me a Rhino props. 75 | */ 76 | def props( 77 | stdArgs: immutable.Seq[String] = Nil, 78 | stdModulePaths: immutable.Seq[String] = Nil, 79 | ioDispatcherId: String = "blocking-process-io-dispatcher" 80 | ): Props = { 81 | Props(classOf[Rhino], stdArgs, stdModulePaths, ioDispatcherId) 82 | .withDispatcher(ioDispatcherId) 83 | } 84 | 85 | } 86 | 87 | 88 | /** 89 | * Manage the execution of the Rhino shell setting up its environment, running the main entry point 90 | * and sending its parent the exit code when it can see that the stdio sources have closed. 91 | */ 92 | private[jse] class RhinoShell( 93 | script: File, 94 | args: immutable.Seq[String], 95 | modulePaths: immutable.Seq[String], 96 | stdinIs: InputStream, 97 | stdoutOs: OutputStream, 98 | stderrOs: OutputStream 99 | ) extends Actor with ActorLogging { 100 | 101 | import RhinoShell._ 102 | 103 | // Some doc to help understanding this code 104 | // https://groups.google.com/d/msg/envjs/Tnvpvvzu_9Q/F-g9MoJ8nNgJ 105 | // https://groups.google.com/forum/#!msg/mozilla-rhino/HCMh_lAKiI4/P1MA3sFsNKQJ 106 | // http://stackoverflow.com/questions/11080037/java-7-rhino-1-7r3-support-for-commonjs-modules (on why we can't use javax API) 107 | 108 | val requireBuilder = { 109 | import scala.collection.JavaConversions 110 | val paths = script.getParentFile.toURI +: modulePaths.map(new URI(_)) 111 | val sourceProvider = new UrlModuleSourceProvider(JavaConversions.asJavaIterable(paths), null) 112 | val scriptProvider = new SoftCachingModuleScriptProvider(sourceProvider) 113 | new RequireBuilder().setModuleScriptProvider(scriptProvider) 114 | } 115 | 116 | def receive = { 117 | 118 | case Execute => 119 | 120 | val ctx = Context.enter() 121 | 122 | try { 123 | 124 | // Create a global object so that we have Rhino shell functions in scope (e.g. load, print, ...) 125 | val global = { 126 | val g = new Global() 127 | g.init(ctx) 128 | g.setIn(stdinIs) 129 | g.setErr(new PrintStream(stderrOs)) 130 | g.setOut(new PrintStream(stdoutOs)) 131 | g 132 | } 133 | 134 | // Prepare a scope by passing the arguments and adding CommonJS support 135 | val scope = { 136 | val s = ctx.initStandardObjects(global, false) 137 | s.defineProperty("arguments", args.toArray, ScriptableObject.READONLY) 138 | val require = requireBuilder.createRequire(ctx, s) 139 | require.install(s) 140 | s 141 | } 142 | 143 | // Evaluate 144 | val reader = new FileReader(script) 145 | blocking(ctx.evaluateReader(scope, reader, script.getName, 0, null)) 146 | sender() ! 0 147 | 148 | } catch { 149 | 150 | case e: RhinoException => 151 | stderrOs.write(e.getLocalizedMessage.getBytes("UTF-8")) 152 | stderrOs.write(e.getScriptStackTrace.getBytes("UTF-8")) 153 | sender() ! 1 154 | 155 | case t: Exception => 156 | t.printStackTrace(new PrintStream(stderrOs)) 157 | sender() ! 1 158 | 159 | } finally { 160 | 161 | Try(stdoutOs.close()) 162 | Try(stderrOs.close()) 163 | Context.exit() 164 | } 165 | 166 | } 167 | 168 | } 169 | 170 | private[jse] object RhinoShell { 171 | def props( 172 | moduleBase: File, 173 | args: immutable.Seq[String], 174 | modulePaths: immutable.Seq[String], 175 | stdinIs: InputStream, 176 | stdoutOs: OutputStream, 177 | stderrOs: OutputStream 178 | ): Props = { 179 | Props(classOf[RhinoShell], moduleBase, args, modulePaths, stdinIs, stdoutOs, stderrOs) 180 | } 181 | 182 | case object Execute 183 | 184 | } 185 | -------------------------------------------------------------------------------- /src/main/scala/com/typesafe/jse/Trireme.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import java.io._ 4 | import java.util.concurrent.{TimeUnit, AbstractExecutorService} 5 | 6 | import scala.concurrent.blocking 7 | import scala.collection.immutable 8 | import scala.collection.JavaConverters._ 9 | import scala.util.Try 10 | import scala.concurrent.duration._ 11 | 12 | import akka.actor._ 13 | import akka.contrib.process.StreamEvents.Ack 14 | import akka.contrib.process._ 15 | import akka.pattern.AskTimeoutException 16 | import com.typesafe.jse.Engine.{ExecuteJs,IsNode} 17 | import io.apigee.trireme.core._ 18 | import io.apigee.trireme.kernel.streams.{NoCloseOutputStream, NoCloseInputStream} 19 | import org.mozilla.javascript.RhinoException 20 | 21 | /** 22 | * Declares an in-JVM Rhino based JavaScript engine supporting the Node API. 23 | * The Trireme project provides this capability. 24 | * The actor is expected to be associated with a blocking dispatcher as its use of Jdk streams are blocking. 25 | */ 26 | class Trireme( 27 | stdArgs: immutable.Seq[String], 28 | stdEnvironment: Map[String, String], 29 | ioDispatcherId: String 30 | ) extends Engine(stdArgs, stdEnvironment) { 31 | 32 | // The main objective of this actor implementation is to establish actors for both the execution of 33 | // Trireme code (Trireme's execution is blocking), and actors for the source of stdio (which is also blocking). 34 | // This actor is then a conduit of the IO as a result of execution. 35 | 36 | val StdioTimeout = Engine.infiniteSchedulerTimeout(context.system.settings.config) 37 | 38 | def receive = { 39 | case ExecuteJs(source, args, timeout, timeoutExitValue, environment) => 40 | val requester = sender() 41 | 42 | val stdinSink = context.actorOf(BufferingSink.props(ioDispatcherId = ioDispatcherId), "stdin") 43 | val stdinIs = new SourceStream(stdinSink, StdioTimeout) 44 | val stdoutSource = context.actorOf(ForwardingSource.props(self, ioDispatcherId = ioDispatcherId), "stdout") 45 | val stdoutOs = new SinkStream(stdoutSource, StdioTimeout) 46 | val stderrSource = context.actorOf(ForwardingSource.props(self, ioDispatcherId = ioDispatcherId), "stderr") 47 | val stderrOs = new SinkStream(stderrSource, StdioTimeout) 48 | 49 | try { 50 | context.become(engineIOHandler( 51 | stdinSink, stdoutSource, stderrSource, 52 | requester, 53 | Ack, 54 | timeout, timeoutExitValue 55 | )) 56 | 57 | context.actorOf(TriremeShell.props( 58 | source.getCanonicalFile, 59 | stdArgs ++ args, 60 | stdEnvironment ++ environment, 61 | ioDispatcherId, 62 | stdinIs, stdoutOs, stderrOs 63 | ), "trireme-shell") ! TriremeShell.Execute 64 | 65 | } finally { 66 | // We don't need stdin 67 | blocking(Try(stdinIs.close())) 68 | } 69 | case IsNode => 70 | sender() ! false 71 | } 72 | } 73 | 74 | object Trireme { 75 | /** 76 | * Give me a Trireme props. 77 | */ 78 | def props( 79 | stdArgs: immutable.Seq[String] = Nil, 80 | stdEnvironment: Map[String, String] = Map.empty, 81 | ioDispatcherId: String = "blocking-process-io-dispatcher" 82 | ): Props = { 83 | Props(classOf[Trireme], stdArgs, stdEnvironment, ioDispatcherId) 84 | .withDispatcher(ioDispatcherId) 85 | } 86 | 87 | } 88 | 89 | 90 | /** 91 | * Manage the execution of the Trireme shell setting up its environment, running the main entry point 92 | * and sending its parent the exit code when we're done. 93 | */ 94 | private[jse] class TriremeShell( 95 | source: File, 96 | args: immutable.Seq[String], 97 | environment: Map[String, String], 98 | ioDispatcherId: String, 99 | stdinIs: InputStream, 100 | stdoutOs: OutputStream, 101 | stderrOs: OutputStream 102 | ) extends Actor with ActorLogging { 103 | 104 | import TriremeShell._ 105 | 106 | val AwaitTerminationTimeout = 1.second 107 | 108 | val blockingDispatcher = context.system.dispatchers.lookup(ioDispatcherId) 109 | val executorService = new AbstractExecutorService { 110 | def shutdown() = throw new UnsupportedOperationException 111 | def isTerminated = false 112 | def awaitTermination(l: Long, timeUnit: TimeUnit) = throw new UnsupportedOperationException 113 | def shutdownNow() = throw new UnsupportedOperationException 114 | def isShutdown = false 115 | def execute(runnable: Runnable) = blockingDispatcher.execute(runnable) 116 | } 117 | 118 | val env = (sys.env ++ environment).asJava 119 | val sandbox = new Sandbox() 120 | sandbox.setAsyncThreadPool(executorService) 121 | val nodeEnv = new NodeEnvironment() 122 | nodeEnv.setSandbox(sandbox) 123 | sandbox.setStdin(new NoCloseInputStream(stdinIs)) 124 | sandbox.setStdout(new NoCloseOutputStream(stdoutOs)) 125 | sandbox.setStderr(new NoCloseOutputStream(stderrOs)) 126 | 127 | def receive = { 128 | case Execute => 129 | 130 | if (log.isDebugEnabled) { 131 | log.debug("Invoking Trireme with {}", args) 132 | } 133 | 134 | val script = nodeEnv.createScript(source.getName, source, args.toArray) 135 | script.setEnvironment(env) 136 | 137 | val senderSel = sender().path 138 | val senderSys = context.system 139 | script.execute.setListener(new ScriptStatusListener { 140 | def onComplete(script: NodeScript, status: ScriptStatus): Unit = { 141 | if (status.hasCause) { 142 | try { 143 | status.getCause match { 144 | case e: RhinoException => 145 | stderrOs.write(e.getLocalizedMessage.getBytes("UTF-8")) 146 | stderrOs.write(e.getScriptStackTrace.getBytes("UTF-8")) 147 | case t => 148 | t.printStackTrace(new PrintStream(stderrOs)) 149 | } 150 | } catch { 151 | case e: Throwable => 152 | if (e.isInstanceOf[AskTimeoutException] || status.getCause.isInstanceOf[AskTimeoutException]) { 153 | log.error(e, "Received a timeout probably because stdio sinks and sources were closed early given a timeout waiting for the JS to execute. Increase the timeout.") 154 | } else { 155 | log.error(status.getCause, "Problem completing Trireme. Throwing exception, meanwhile here's the Trireme problem") 156 | throw e 157 | } 158 | } 159 | } 160 | // The script holds an NIO selector that needs to be closed, otherwise it leaks. 161 | script.close() 162 | stdoutOs.close() 163 | stderrOs.close() 164 | senderSys.actorSelection(senderSel) ! status.getExitCode 165 | } 166 | }) 167 | } 168 | 169 | override def postStop() = { 170 | // The script pool is a cached thread pool so it should shut itself down, but it's better to clean up immediately, 171 | // and this means that our tests work. 172 | nodeEnv.getScriptPool.shutdown() 173 | nodeEnv.getScriptPool.awaitTermination(AwaitTerminationTimeout.toMillis, TimeUnit.MILLISECONDS) 174 | } 175 | } 176 | 177 | private[jse] object TriremeShell { 178 | def props( 179 | moduleBase: File, 180 | args: immutable.Seq[String], 181 | environment: Map[String, String], 182 | ioDispatcherId: String = "blocking-process-io-dispatcher", 183 | stdinIs: InputStream, 184 | stdoutOs: OutputStream, 185 | stderrOs: OutputStream 186 | ): Props = { 187 | Props(classOf[TriremeShell], moduleBase, args, environment, ioDispatcherId, stdinIs, stdoutOs, stderrOs) 188 | } 189 | 190 | case object Execute 191 | 192 | } -------------------------------------------------------------------------------- /src/test/resources/application.conf: -------------------------------------------------------------------------------- 1 | blocking-process-io-dispatcher { 2 | type = Dispatcher 3 | executor = "thread-pool-executor" 4 | thread-pool-executor { 5 | core-pool-size-min = 3 6 | core-pool-size-factor = 1.0 7 | core-pool-size-max = 100 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/test/resources/com/typesafe/jse/hello.js: -------------------------------------------------------------------------------- 1 | // A simple CommonJS module 2 | exports.sayHello = function (you) { 3 | return "Hello " + you; 4 | }; 5 | 6 | -------------------------------------------------------------------------------- /src/test/resources/com/typesafe/jse/test-javax.js: -------------------------------------------------------------------------------- 1 | print(arguments[0]); -------------------------------------------------------------------------------- /src/test/resources/com/typesafe/jse/test-node.js: -------------------------------------------------------------------------------- 1 | var console = require("console"); 2 | 3 | console.log(process.argv[2]); 4 | -------------------------------------------------------------------------------- /src/test/resources/com/typesafe/jse/test-rhino.js: -------------------------------------------------------------------------------- 1 | // Check that we have Rhino shell methods in scope 2 | readFile("src/test/resources/com/typesafe/jse/hello.js"); 3 | 4 | // Check CommonJS support 5 | var sayHello = require("hello").sayHello; 6 | 7 | print(sayHello(arguments[0])); -------------------------------------------------------------------------------- /src/test/scala-2.10/com/typesafe/jse/AkkaCompat.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import akka.actor.ActorSystem 4 | 5 | object AkkaCompat { 6 | def terminate(system: ActorSystem): Unit = { 7 | system.shutdown() 8 | system.awaitTermination() 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/test/scala-2.11/com/typesafe/jse/AkkaCompat.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import akka.actor.ActorSystem 4 | 5 | object AkkaCompat { 6 | def terminate(system: ActorSystem): Unit = { 7 | system.shutdown() 8 | system.awaitTermination() 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/test/scala-2.12/com/typesafe/jse/AkkaCompat.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import akka.actor.ActorSystem 4 | import scala.concurrent.Await 5 | import scala.concurrent.duration._ 6 | 7 | object AkkaCompat { 8 | def terminate(system: ActorSystem): Unit = { 9 | Await.ready(system.terminate(), 10.seconds) 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/test/scala/com/typesafe/jse/JavaxEngineSpec.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import org.specs2.mutable.Specification 4 | import com.typesafe.jse.Engine.JsExecutionResult 5 | import java.io.File 6 | import scala.collection.immutable 7 | import akka.pattern.ask 8 | import org.specs2.time.NoTimeConversions 9 | import akka.util.Timeout 10 | import akka.actor.{ActorRef, ActorSystem} 11 | import scala.concurrent.Await 12 | import java.util.concurrent.TimeUnit 13 | 14 | class JavaxEngineSpec extends Specification { 15 | 16 | def withEngine[T](block: ActorRef => T): T = { 17 | val system = ActorSystem() 18 | val engine = system.actorOf(JavaxEngine.props(engineName = "js")) 19 | try block(engine) finally { 20 | AkkaCompat.terminate(system) 21 | } 22 | } 23 | 24 | "A JavaxEngine" should { 25 | 26 | "execute some javascript by passing in a string arg and comparing its return value" in { 27 | withEngine { 28 | engine => 29 | val f = new File(classOf[JavaxEngineSpec].getResource("test-javax.js").toURI) 30 | implicit val timeout = Timeout(5000L, TimeUnit.MILLISECONDS) 31 | 32 | val futureResult = (engine ? Engine.ExecuteJs(f, immutable.Seq("999"), timeout.duration)).mapTo[JsExecutionResult] 33 | val result = Await.result(futureResult, timeout.duration) 34 | new String(result.error.toArray, "UTF-8").trim must_== "" 35 | new String(result.output.toArray, "UTF-8").trim must_== "999" 36 | } 37 | } 38 | 39 | "execute some javascript by passing in a string arg and comparing its return value expecting an error" in { 40 | withEngine { 41 | engine => 42 | val f = new File(classOf[JavaxEngineSpec].getResource("test-node.js").toURI) 43 | implicit val timeout = Timeout(5000L, TimeUnit.MILLISECONDS) 44 | 45 | val futureResult = (engine ? Engine.ExecuteJs(f, immutable.Seq("999"), timeout.duration)).mapTo[JsExecutionResult] 46 | val result = Await.result(futureResult, timeout.duration) 47 | new String(result.output.toArray, "UTF-8").trim must_== "" 48 | new String(result.error.toArray, "UTF-8").trim must contain("""ReferenceError: "require" is not defined""") 49 | } 50 | } 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /src/test/scala/com/typesafe/jse/RhinoSpec.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import org.specs2.mutable.Specification 4 | import com.typesafe.jse.Engine.JsExecutionResult 5 | import java.io.File 6 | import scala.collection.immutable 7 | import akka.pattern.ask 8 | import akka.util.Timeout 9 | import akka.actor.{ActorRef, ActorSystem} 10 | import scala.concurrent.Await 11 | import java.util.concurrent.TimeUnit 12 | 13 | class RhinoSpec extends Specification { 14 | 15 | //sequential 16 | 17 | def withEngine[T](block: ActorRef => T): T = { 18 | val system = ActorSystem() 19 | val engine = system.actorOf(Rhino.props(), "engine") 20 | try { 21 | block(engine) 22 | } finally { 23 | AkkaCompat.terminate(system) 24 | } 25 | } 26 | 27 | "The Rhino engine" should { 28 | 29 | "execute some javascript by passing in a string arg and comparing its return value" in { 30 | withEngine { 31 | engine => 32 | val f = new File(classOf[RhinoSpec].getResource("test-rhino.js").toURI) 33 | implicit val timeout = Timeout(5000L, TimeUnit.MILLISECONDS) 34 | 35 | val futureResult = (engine ? Engine.ExecuteJs(f, immutable.Seq("999"), timeout.duration)).mapTo[JsExecutionResult] 36 | val result = Await.result(futureResult, timeout.duration) 37 | 38 | new String(result.error.toArray, "UTF-8").trim mustEqual "" 39 | new String(result.output.toArray, "UTF-8").trim mustEqual "Hello 999" 40 | } 41 | } 42 | 43 | "execute some javascript by passing in a string arg and comparing its return value expecting an error" in { 44 | withEngine { 45 | engine => 46 | val f = new File(classOf[RhinoSpec].getResource("test-node.js").toURI) 47 | implicit val timeout = Timeout(5000L, TimeUnit.MILLISECONDS) 48 | 49 | val futureResult = (engine ? Engine.ExecuteJs(f, immutable.Seq("888"), timeout.duration)).mapTo[JsExecutionResult] 50 | val result = Await.result(futureResult, timeout.duration) 51 | 52 | new String(result.output.toArray, "UTF-8").trim mustEqual "" 53 | new String(result.error.toArray, "UTF-8").trim must contain("""Error: Module "console" not found""") 54 | } 55 | } 56 | 57 | "not be reported as node" in { 58 | withEngine { 59 | engine => 60 | implicit val timeout = Timeout(1000L, TimeUnit.MILLISECONDS) 61 | 62 | val futureResult = (engine ? Engine.IsNode).mapTo[Boolean] 63 | val result = Await.result(futureResult, timeout.duration) 64 | 65 | result mustEqual false 66 | } 67 | } 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /src/test/scala/com/typesafe/jse/TestActorSystem.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import akka.testkit.{ImplicitSender, TestKit} 4 | import akka.actor.ActorSystem 5 | import org.specs2.mutable.After 6 | 7 | /** 8 | * A utility that assists with Specs2 testing for Akka. Sets up a test actor system and then 9 | * brings it down after the test. Each test therefore gets its own actor system. 10 | */ 11 | abstract class TestActorSystem 12 | extends TestKit(ActorSystem()) 13 | with After 14 | with ImplicitSender { 15 | 16 | def after = AkkaCompat.terminate(system) 17 | } -------------------------------------------------------------------------------- /src/test/scala/com/typesafe/jse/TriremeSpec.scala: -------------------------------------------------------------------------------- 1 | package com.typesafe.jse 2 | 3 | import org.specs2.mutable.Specification 4 | import com.typesafe.jse.Engine.JsExecutionResult 5 | import java.io.File 6 | import scala.collection.immutable 7 | import akka.pattern.ask 8 | import org.specs2.time.NoTimeConversions 9 | import akka.util.Timeout 10 | import akka.actor.{ActorRef, ActorSystem} 11 | import scala.concurrent.Await 12 | import java.util.concurrent.TimeUnit 13 | 14 | class TriremeSpec extends Specification { 15 | 16 | sequential 17 | 18 | def withActorSystem[T](block: ActorSystem => T): T = { 19 | val system = ActorSystem() 20 | try block(system) finally { 21 | AkkaCompat.terminate(system) 22 | } 23 | } 24 | 25 | def withEngine[T](block: ActorRef => T): T = withActorSystem { system => 26 | val engine = system.actorOf(Trireme.props(), "trireme-spec") 27 | block(engine) 28 | } 29 | 30 | "The Trireme engine" should { 31 | "execute some javascript by passing in a string arg and comparing its return value" in { 32 | withEngine { 33 | engine => 34 | val f = new File(classOf[TriremeSpec].getResource("test-node.js").toURI) 35 | implicit val timeout = Timeout(5000L, TimeUnit.MILLISECONDS) 36 | 37 | val futureResult = (engine ? Engine.ExecuteJs(f, immutable.Seq("999"), timeout.duration)).mapTo[JsExecutionResult] 38 | val result = Await.result(futureResult, timeout.duration) 39 | new String(result.output.toArray, "UTF-8").trim must_== "999" 40 | new String(result.error.toArray, "UTF-8").trim must_== "" 41 | } 42 | } 43 | 44 | "execute some javascript by passing in a string arg and comparing its return value expecting an error" in { 45 | withEngine { 46 | engine => 47 | val f = new File(classOf[TriremeSpec].getResource("test-rhino.js").toURI) 48 | implicit val timeout = Timeout(5000L, TimeUnit.MILLISECONDS) 49 | 50 | val futureResult = (engine ? Engine.ExecuteJs(f, immutable.Seq("999"), timeout.duration)).mapTo[JsExecutionResult] 51 | val result = Await.result(futureResult, timeout.duration) 52 | new String(result.output.toArray, "UTF-8").trim must_== "" 53 | new String(result.error.toArray, "UTF-8").trim must startWith("""ReferenceError: "readFile" is not defined""") 54 | } 55 | } 56 | 57 | def runSimpleTest(system: ActorSystem) = { 58 | implicit val timeout = Timeout(5000L, TimeUnit.MILLISECONDS) 59 | val f = new File(classOf[TriremeSpec].getResource("test-node.js").toURI) 60 | val engine = system.actorOf(Trireme.props(), "not-leak-threads-test") 61 | val futureResult = (engine ? Engine.ExecuteJs(f, immutable.Seq("999"), timeout.duration)).mapTo[JsExecutionResult] 62 | val result = Await.result(futureResult, timeout.duration) 63 | new String(result.output.toArray, "UTF-8").trim must_== "999" 64 | new String(result.error.toArray, "UTF-8").trim must_== "" 65 | } 66 | 67 | "not leak threads" in withActorSystem { system => 68 | // this test assumes that there are no other trireme tests running concurrently, if there are, the trireme thread 69 | // count will be non 0 70 | runSimpleTest(system) 71 | 72 | Thread.sleep(1) 73 | 74 | import scala.collection.JavaConverters._ 75 | val triremeThreads = Thread.getAllStackTraces.keySet.asScala 76 | .filter(_.getName.contains("Trireme")) 77 | 78 | ("trireme threads: " + triremeThreads) <==> (triremeThreads.size === 0) 79 | ok 80 | } 81 | 82 | 83 | "not leak file descriptors" in withActorSystem { system => 84 | import java.lang.management._ 85 | val os = ManagementFactory.getOperatingSystemMXBean 86 | // To get the open file descriptor count, you need to use non portable APIs, so use reflection 87 | try { 88 | val method = os.getClass.getMethod("getOpenFileDescriptorCount") 89 | // method is public native, needs to be set accessible to be invoked using reflection 90 | method.setAccessible(true) 91 | def getCount = method.invoke(os).asInstanceOf[Long] 92 | 93 | val openFds = getCount 94 | runSimpleTest(system) 95 | getCount must_== openFds 96 | } catch { 97 | case nse: NoSuchMethodException => 98 | println("Skipping file descriptor leak test because OS mbean doesn't have getOpenFileDescriptorCount") 99 | ok 100 | } 101 | } 102 | } 103 | 104 | } 105 | -------------------------------------------------------------------------------- /version.sbt: -------------------------------------------------------------------------------- 1 | version in ThisBuild := "1.2.5-SNAPSHOT" 2 | --------------------------------------------------------------------------------