├── .bsp
└── sbt.json
├── .gitignore
├── README.md
├── _config.yml
├── akka
└── src
│ └── main
│ └── scala
│ └── streams
│ ├── SourceSinkRunnableGraph.scala
│ ├── Step1_SourceAndSinck.scala
│ ├── Step2_SourceReuse_Flow.scala
│ └── Step3_Timing.scala
├── algorithms
├── README.md
└── src
│ └── test
│ └── scala
│ ├── tests
│ ├── AggregateSpec.scala
│ └── WordsSpec.scala
│ └── tests99
│ ├── test01_last
│ └── FindLastElementSpec.scala
│ ├── test02_lastButOne
│ └── FindLastButOneElementSpec.scala
│ ├── test03_getByIndex
│ └── FindKthSpec.scala
│ ├── test05_reverse
│ └── ReverseSpec.scala
│ └── test06_palindrome
│ └── PalindromeSpec.scala
├── build.sbt
├── project
├── BuildProject.scala
├── build.properties
├── build.sbt~
└── plugins.sbt
├── scalajs
├── README.md
├── index.html
├── screen.css
└── src
│ └── main
│ └── scala
│ ├── bench_rx
│ └── ScalaRxSample.scala
│ └── webapp
│ ├── PostClient.scala
│ ├── SamplePage.scala
│ ├── ScalaJsSample1.scala
│ └── model
│ └── Post.scala
├── spark
├── README.md
└── src
│ ├── main
│ ├── resources
│ │ ├── alice-in_wonder_land.txt
│ │ └── step1
│ │ │ └── sample-data.txt
│ └── scala
│ │ ├── step1
│ │ └── Step1.scala
│ │ └── step2
│ │ └── Step2.scala
│ └── test
│ └── scala
│ └── mainfunctions
│ ├── SparkFunctions1Spec.scala
│ ├── SparkFunctions2Spec.scala
│ ├── SparkFunctions3Spec.scala
│ └── model
│ ├── Book.scala
│ └── Event.scala
└── src
├── main
├── resources
│ ├── application.conf
│ └── logback.xml
└── scala
│ ├── algorithms
│ └── combinations.sc
│ ├── annotations
│ └── Service.scala
│ ├── apply_method
│ ├── ClassWithApplyMethod.scala
│ ├── FactoryApplyMethod.scala
│ ├── FunctionAsObject.scala
│ └── PrivateConstructor.scala
│ ├── calling_by_name
│ ├── CallingByNameTest.scala
│ └── CallingFunctionByNameTest.scala
│ ├── caseclasses
│ ├── CaseClassesTest.scala
│ ├── ProblemCopy.scala
│ └── copy.scala
│ ├── cast
│ └── CastTest.scala
│ ├── cats
│ ├── cats_retryable_case.scala
│ ├── effects
│ │ └── CustomThreadPoolAppEffect.scala
│ ├── step2_cats_combine_1_2.scala
│ ├── step3_cats_monoid_1.scala
│ └── step_1_cats_combine_1.scala
│ ├── closure
│ ├── ClosureSample.scala
│ └── ClosureTest.scala
│ ├── collections
│ ├── SeqFun.scala
│ ├── lists
│ │ ├── FlatMapExample.scala
│ │ ├── FoldExample.scala
│ │ ├── GroupByExample.scala
│ │ ├── ListComprehension.scala
│ │ ├── ListConcatenations.scala
│ │ ├── ListMethods.scala
│ │ ├── ReduceExamples.scala
│ │ ├── Sorting.scala
│ │ ├── Sorting2.scala
│ │ ├── ZipSamples.scala
│ │ ├── aggregate.scala
│ │ └── todo.txt
│ ├── mutable
│ │ └── ListBuffer_CRUD_Sample.scala
│ └── streams
│ │ ├── StreamCreations.scala
│ │ └── StreamSample.scala
│ ├── concurrency
│ ├── MapFlatMapFor.scala
│ ├── NestedFutures.scala
│ ├── NestedFutures2.scala
│ ├── NestedFutures3.scala
│ ├── SeveralExContexts.scala
│ ├── ThreadReuseInConnectionPool.scala
│ ├── ThreadSpawn.scala
│ ├── ThreadStarvation.scala
│ ├── antipatterns
│ │ ├── ExecutionContextTrap.scala
│ │ └── OneBasketExContext.scala
│ ├── apptmpl
│ │ ├── Client.scala
│ │ ├── controllers
│ │ │ ├── Controller1.scala
│ │ │ └── Controllers.scala
│ │ ├── managers
│ │ │ ├── Manager1.scala
│ │ │ └── Managers.scala
│ │ └── services
│ │ │ ├── Service.scala
│ │ │ └── Services.scala
│ ├── impl
│ │ └── ImplicitFuture.scala
│ ├── logging
│ │ ├── LogSample.scala
│ │ ├── SchedulingPoolStats.scala
│ │ └── threadfactory
│ │ │ └── CustomThreadFactory.scala
│ ├── performance
│ │ └── MyBench.scala
│ └── tips.txt
│ ├── currying
│ └── CurryingTest.scala
│ ├── design_patterns
│ └── decorator
│ │ └── DecoratorSample.scala
│ ├── dp_study
│ └── Fib.scala
│ ├── error_hadling
│ └── ExceptionSample.scala
│ ├── extractors
│ └── ExtractorsTest.scala
│ ├── fp_deep
│ ├── FunctorDemo.scala
│ ├── MonoidDemo.scala
│ └── TypeConstructorDemo.scala
│ ├── fp_study
│ └── recursion
│ │ ├── step1
│ │ ├── Factorial.scala
│ │ ├── Factorial2.scala
│ │ └── Factorial3_NoRecursion.scala
│ │ └── step2_list
│ │ └── ListAlgorithms.scala
│ ├── function_literal
│ └── FunctionLiteral.scala
│ ├── futures
│ └── FutureTest.scala
│ ├── generics
│ ├── CovariantBasket.scala
│ ├── CovariantBasketTest.scala
│ ├── InvariantBasket.scala
│ ├── InvariantBasketTest.scala
│ ├── LowerTypeBoundsTest.scala
│ ├── PlusMinusFruitBox.scala
│ └── model
│ │ ├── Apple.scala
│ │ ├── Fruit.scala
│ │ └── Orange.scala
│ ├── higher_order_functions
│ └── HigherOrderFunctionTest.scala
│ ├── implicits
│ ├── ImplicitArgs.scala
│ ├── ImplicitClassTest.scala
│ ├── ImplicitObjectTest.scala
│ ├── ImplicitlyLookupSample.scala
│ ├── SimpleSample.scala
│ ├── implicit_methods.sc
│ └── usecases
│ │ └── RequestResponseTest.scala
│ ├── json
│ ├── jackson
│ │ ├── AnySample.scala
│ │ ├── ClassToJson.scala
│ │ ├── JsonToClass.scala
│ │ └── SubClasses.scala
│ └── playjson
│ │ ├── JsValueToAnotherValue.scala
│ │ ├── ReadJsonToObject.scala
│ │ ├── StringToJsValue.scala
│ │ ├── WriteObjectToJson.scala
│ │ ├── models
│ │ ├── LisOfObjFormat.scala
│ │ └── Models.scala
│ │ └── readme.md
│ ├── lazy_test
│ └── LazyVal.scala
│ ├── lenses
│ ├── CommonData.scala
│ ├── Employee.scala
│ ├── NoLensesSample.scala
│ ├── monocle
│ │ └── LensesSample.scala
│ └── shapless
│ │ └── LensesSample.scala
│ ├── looping
│ ├── ForAsMap.scala
│ ├── ForLoop.scala
│ ├── ForeachLoop.scala
│ └── listComprehension.scala
│ ├── overriding
│ ├── OverrideBasics.scala
│ └── OverridePlus.scala
│ ├── partial_function
│ ├── PartialFunctionTest.scala
│ └── PartialToReal.scala
│ ├── partially_applied_functions
│ ├── PartialAppliedFunction_And_Currying.scala
│ └── PartiallyAppliedFunction.scala
│ ├── pattern_matching
│ ├── PatterMatching_CaseClasses.scala
│ ├── PatternMatchingTest.scala
│ ├── PatternMatching_AnonymousFunction.scala
│ ├── WithTuples.scala
│ ├── extractors
│ │ └── PatternValDefinitionExample.scala
│ └── map
│ │ └── MapMatching.scala
│ ├── performance
│ ├── BenchmarksCombine.scala
│ └── BenchmarksSimple.scala
│ ├── scala_99
│ ├── t4_length_of_list.scala
│ ├── t7_flatten_list.scala
│ ├── t8_duplicate_list.scala
│ └── t9_pack_duplicates.scala
│ ├── scalaz
│ └── 01_basic.scala
│ ├── strings
│ └── StringSamples.scala
│ ├── unapply_method
│ └── see_extractor
│ └── using_sample
│ └── UsingSample.scala
└── test
└── scala
├── mockito
└── MockitoTest.scala
├── scalacheck
└── ScalaCheckTest.scala
└── weaver
├── HelloWorldTest.scala
├── MyParallelSuite.scala
└── WeaverTest.scala
/.bsp/sbt.json:
--------------------------------------------------------------------------------
1 | {"name":"sbt","version":"1.10.7","bspVersion":"2.1.0-M1","languages":["scala"],"argv":["/home/sergii/java/amazon-corretto-21/bin/java","-Xms100m","-Xmx100m","-classpath","/home/sergii/.local/share/JetBrains/IntelliJIdea2025.1/Scala/launcher/sbt-launch.jar","-Dsbt.script=/home/sergii/.local/share/coursier/bin/sbt","xsbt.boot.Boot","-bsp"]}
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | *.iml
3 | target
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | scala-samples
2 | =============
3 |
4 |
Preface
5 | Sometimes we have to read some articles, forums, blogs to understand the thing.
6 |
7 | But if we want just remind ourselves something we already knew, then is better just look at code sample.
8 | You could easily change it and play with it and see what's going on.
9 |
10 | In this case "the thing" is: q
11 |
12 | Scala Language (lang folder)
13 | and its satellites technologies, such as ScalaJS, Apache Spark, Akka and more.
14 |
15 |
16 | There are samples of these in form of code that explains how to use it all.
17 |
18 | This code is supposed to be isolated as much as possible - like if we wish to explain/remember one feature of Scala then we are not
19 | going to use 5 more extra features for that (which might not be gotten yet by a reader of this code).
20 |
21 | Each example has no more than 100 lines of code. Also there is tagging is used in order to show to the reader what feature is involved
22 | for particular example. Like #feature-1 #feature-3.
23 |
24 |
25 | HOW TO INSTALL & RUN
26 |
27 | Make sure your build properties set the same sbt version as your global sbt version you use to build this project.
28 | see: [build.properties](project/build.properties) (especially if run from console
29 |
30 | sbt & nodejs (to work with ScalaJS samples) should be installed
31 |
32 | NOTE: as SBT project, the project/Build.scala defines the dependencies and sub-projects.
33 | So, to build run a particular project, say ScalaJS:
34 |
35 | Start sbt-console:
36 |
37 | ```sbt```
38 |
39 | Switch to that project, for example:
40 |
41 | ```project ScalaJS```
42 |
43 | Run:
44 |
45 | ```run```
46 |
47 | --
48 | Spec: http://www.scala-lang.org/files/archive/spec/2.11/
49 |
50 |
51 | ### NOTE: after 12 years of this project.
52 |
53 | Even with GPT, it still makes sense to have this project,
54 | since it gives you a chance to play with Scala.
55 |
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-minimal
--------------------------------------------------------------------------------
/akka/src/main/scala/streams/SourceSinkRunnableGraph.scala:
--------------------------------------------------------------------------------
1 | package streams
2 |
3 | import akka.NotUsed
4 | import akka.actor.ActorSystem
5 | import akka.stream.ActorMaterializer
6 | import akka.stream.scaladsl.{Keep, RunnableGraph, Sink, Source}
7 |
8 | import scala.concurrent.{Await, Future}
9 | import scala.concurrent.duration._
10 |
11 | /*
12 |
13 | Flow - A processing stage which has exactly one input and output,
14 | which connects its up- and downstreams by transforming the data elements flowing through it.
15 |
16 | RunnableGraph - A Flow that has both ends “attached” to a Source and Sink respectively,
17 | and is ready to be run().
18 | */
19 |
20 | object SourceSinkRunnableGraph extends App {
21 |
22 | implicit val system = ActorSystem("QuickStart") // the Actor to run streams
23 | implicit val materializer = ActorMaterializer() // a factory for stream execution engines, it is the thing that makes streams run
24 |
25 | // --
26 |
27 | val source = Source(1 to 10)
28 | val sink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _)
29 |
30 | // connect the Source to the Sink, obtaining a RunnableGraph
31 | val runnableGraph: RunnableGraph[Future[Int]] = source.toMat(sink)(Keep.right)
32 |
33 | // Materialization - is the process of allocating all resources needed to run the computation described by a Graph
34 |
35 | val futureResult: Future[Int] = runnableGraph.run() // running = materializing
36 | // get back the materialized value of type T of RunnableGraph[T].
37 |
38 | val result = Await.result(futureResult, 10.seconds) //
39 |
40 | println(result)
41 | }
42 |
--------------------------------------------------------------------------------
/akka/src/main/scala/streams/Step2_SourceReuse_Flow.scala:
--------------------------------------------------------------------------------
1 | package streams
2 |
3 | import java.io.File
4 |
5 | import akka.NotUsed
6 | import akka.actor.ActorSystem
7 | import akka.stream._
8 | import akka.stream.scaladsl._
9 | import akka.util.ByteString
10 |
11 | import scala.concurrent.{ExecutionContext, Future}
12 |
13 | // shows that we can reuse Sinks
14 | // shows how to use Flow
15 |
16 | object Step2_SourceReuse_Flow extends App {
17 |
18 | implicit val system = ActorSystem("QuickStart")
19 |
20 | // ActorMaterializer is a factory for stream execution engines, it is the thing that makes streams run
21 | implicit val materializer = ActorMaterializer() // the Actor to run streams
22 |
23 | //
24 | //val ec = ExecutionContext.global
25 |
26 |
27 | //converts sink of ByteStrings to Sink of Stings. Uses the Flow
28 | def lineSink(sink:Sink[ByteString, Future[IOResult]]): Sink[String, Future[IOResult]] =
29 | Flow[String] // #Flow of strings.
30 | .map(str => ByteString(str + "\n")) // converts each str to ByteString
31 | .toMat(sink)(Keep.right) // then feed to the file-writing Sink
32 |
33 | // --
34 |
35 | val numSource: Source[Int, NotUsed] = Source(1 to 100) // to emit the integers 1 to 100:
36 | val fileWritingSink: Sink[ByteString, Future[IOResult]] = FileIO.toFile(new File("factorial2.txt"))
37 |
38 | // converts source of number to the source of factorials of each number from those numbers
39 | val factorialsSource: Source[BigInt, NotUsed] = numSource.scan(BigInt(1))((acc, next) => acc * next) // "scan" like foldLeft ??
40 |
41 | // converts to file Sink into Sink of strings
42 | val sinkOfStrings: Sink[String, Future[IOResult]] = lineSink(sink = fileWritingSink)
43 |
44 | val ioResult: Future[IOResult] = factorialsSource.map(num => num.toString).runWith(sinkOfStrings)(materializer)
45 | }
46 |
--------------------------------------------------------------------------------
/algorithms/README.md:
--------------------------------------------------------------------------------
1 | S-99: Ninety-Nine Scala Problems
2 | =============
3 |
4 | Sometime it is nice to solve simple problems like this.
5 |
6 | **I found it useful. Especially when you prepare yourself for interview.**
7 |
8 | So will keep put them here, one by one as I go.
9 |
10 | All code will be put in a `test` folder as a Scala Code with Scala Test + Scala Check tests.
11 |
12 | To run and add news:
13 | - sbt
14 | - project algorithms
15 | - ~test
16 | - add new code along with tests in the same file, so you code will be auto-compiling as you do it
17 |
18 | Feel free to use this project for your own purposes.
19 | Please contribute here if you want.
20 |
21 |
--------------------------------------------------------------------------------
/algorithms/src/test/scala/tests/AggregateSpec.scala:
--------------------------------------------------------------------------------
1 | package tests
2 |
3 | /**
4 | * #aggregate #scalatest #scalamock #mockFunction
5 | *
6 | * Explains in test manner what is `aggregate` function is about.
7 | * Also demonstrate the test-technique like `mockFunction` with MockFactory - scalamock integration
8 | */
9 |
10 | import org.scalatest.{FlatSpec, Matchers}
11 | import org.scalamock.scalatest.MockFactory
12 |
13 | // https://duckduckgo.com/?q=scala+aggregate+function&t=canonical&ia=qa&iax=1
14 |
15 | class AggregateSpec extends FlatSpec with Matchers with MockFactory {
16 |
17 | // how many characters are in it
18 | val data = Seq("This", "is", "something", "I", "would", "like", "to", "know")
19 |
20 | "combop function" should "NOT be called for non-par collection" in {
21 |
22 | val mockCombop = mockFunction[Int, Int, Int]
23 |
24 | mockCombop.expects(*, *).never // should not ve called
25 |
26 | val result: Int = data.aggregate(0)(
27 | seqop = (acc, next) => acc + next.length,
28 | combop = mockCombop
29 | )
30 |
31 | }
32 |
33 | "combop function" should "BE called for par collection" in {
34 |
35 | val mockCombop = mockFunction[Int, Int, Int]
36 | mockCombop.expects(*, *).atLeastOnce() // should be called
37 |
38 | val parData = data.par
39 |
40 | val result: Int = parData.aggregate(0)(
41 | seqop = (acc, next) => acc + next.length,
42 | combop = mockCombop
43 | )
44 |
45 | result should === (0) // that's fine, because 'mockCombop' is mocked
46 |
47 | }
48 |
49 | "aggregate" should "calculate with combop for non-par collection" in {
50 |
51 | val myCombop: (Int, Int) => Int = (a, b) => {
52 | a + b
53 | }
54 |
55 | val result: Int = data.aggregate(0)(
56 | seqop = (acc, next) => acc + next.length,
57 | combop = myCombop
58 | )
59 |
60 | result === 31
61 |
62 | }
63 |
64 | "aggregate" should "calculate with combop for par collection" in {
65 |
66 | val myCombop: (Int, Int) => Int = (a, b) => {
67 | a + b
68 | }
69 |
70 | val parData = data.par
71 |
72 | val result: Int = parData.aggregate(0)(
73 | seqop = (acc, next) => acc + next.length,
74 | combop = myCombop
75 | )
76 |
77 | result === 31
78 |
79 | }
80 |
81 | }
--------------------------------------------------------------------------------
/algorithms/src/test/scala/tests/WordsSpec.scala:
--------------------------------------------------------------------------------
1 | package tests
2 |
3 | import org.scalamock.scalatest.MockFactory
4 | import org.scalatest.{FlatSpec, Matchers}
5 |
6 | // https://duckduckgo.com/?q=scala+aggregate+function&t=canonical&ia=qa&iax=1
7 |
8 | class WordsSpec extends FlatSpec with Matchers with MockFactory {
9 |
10 | val text =
11 | """Hello,
12 | | how are you?
13 | | I'm good, how about you?
14 | | Me too - good good!
15 | """.stripMargin
16 |
17 | type Tag = (String, String)
18 | sealed trait TextElem
19 | case class Word(text: String, tags: Seq[Tag] = Seq()) extends TextElem
20 | case class Sentence(words: Seq[Word]) extends TextElem
21 | case class Chapter(title: String) extends TextElem
22 | case class Book(title: String, sentences: Seq[Sentence]) extends TextElem
23 |
24 | def words(text: String): Seq[Word] = {
25 | text.replaceAll("(\r\n)|\r|\n", "").
26 | split("\\s+|\\,").
27 | map(w => Word(w))
28 | }
29 |
30 | "words" should "extract words by..." in {
31 | val result = words(text)
32 |
33 | result.head should equal ( Word("Hello", Seq()) )
34 | result.last should equal ( Word("good!", Seq()) )
35 |
36 | result.size should equal(16)
37 | }
38 |
39 |
40 | }
--------------------------------------------------------------------------------
/algorithms/src/test/scala/tests99/test01_last/FindLastElementSpec.scala:
--------------------------------------------------------------------------------
1 | package tests99.test01_last
2 |
3 | import org.scalatest.prop.GeneratorDrivenPropertyChecks
4 | import org.scalatest.{FlatSpec, Matchers}
5 |
6 | import scala.annotation.tailrec
7 |
8 | class FindLastElementSpec extends FlatSpec with GeneratorDrivenPropertyChecks with Matchers {
9 |
10 | // code
11 |
12 | @tailrec
13 | final def last[T](list: List[T]): Option[T] = {
14 | list match {
15 | case Nil => None
16 | case head :: Nil => Some(head)
17 | case _ :: tail => last(tail)
18 | }
19 | }
20 |
21 | // test
22 |
23 | forAll { list: List[Int] => {
24 | if (list.nonEmpty)
25 | last(list) should equal(Some(list.last))
26 | if (list.isEmpty)
27 | last(list) should equal(None)
28 | }
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/algorithms/src/test/scala/tests99/test02_lastButOne/FindLastButOneElementSpec.scala:
--------------------------------------------------------------------------------
1 | package tests99.test02_lastButOne
2 |
3 | import org.scalatest.{FlatSpec, Matchers}
4 | import org.scalatest.prop.GeneratorDrivenPropertyChecks
5 |
6 | import scala.annotation.tailrec
7 |
8 | class FindLastButOneElementSpec extends FlatSpec with GeneratorDrivenPropertyChecks with Matchers {
9 |
10 | // code
11 |
12 | @tailrec
13 | final def lastButOne[T](list: List[T]): Option[T] = {
14 | list match {
15 | case Nil => None
16 | case _ :: Nil => None
17 | case head :: tail if tail.size == 1 => Some(head)
18 | case _ :: tail => lastButOne(tail)
19 | }
20 | }
21 |
22 | // test
23 |
24 | forAll { list: List[Int] => {
25 | if (list.size > 1)
26 | lastButOne(list) should equal(Some(list.reverse(1)))
27 | else
28 | lastButOne(list) should equal(None)
29 | }
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/algorithms/src/test/scala/tests99/test03_getByIndex/FindKthSpec.scala:
--------------------------------------------------------------------------------
1 | package tests99.test03_getByIndex
2 |
3 | import org.scalatest.{FlatSpec, Matchers}
4 | import org.scalatest.prop.GeneratorDrivenPropertyChecks
5 |
6 | class FindKthSpec extends FlatSpec with GeneratorDrivenPropertyChecks with Matchers {
7 |
8 | def kth[T](index: Int, list: List[T]): Option[T] = {
9 |
10 | def searchElement(list: List[T], currentIndex: Int = 0): Option[T] = {
11 | currentIndex match {
12 | case i if i == index => Some(list.head)
13 | case _ if list.isEmpty=> None
14 | case _ => searchElement(list.tail, currentIndex + 1)
15 | }
16 | }
17 |
18 | list match {
19 | case Nil => None
20 | case _ => searchElement(list)
21 | }
22 | }
23 |
24 | // test
25 |
26 |
27 | kth(2, List("2", "1", "3")) should equal (Some("3"))
28 | kth(1, List("2", "1", "3")) should equal (Some("1"))
29 | kth(0, List("2", "1", "3")) should equal (Some("2"))
30 | kth(2, List()) should equal (None)
31 | kth(99, List("2", "1", "3")) should equal (None)
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/algorithms/src/test/scala/tests99/test05_reverse/ReverseSpec.scala:
--------------------------------------------------------------------------------
1 | package tests99.test05_reverse
2 |
3 | import org.scalatest.prop.GeneratorDrivenPropertyChecks
4 | import org.scalatest.{FlatSpec, Matchers}
5 |
6 | import scala.annotation.tailrec
7 |
8 | class ReverseSpec extends FlatSpec with GeneratorDrivenPropertyChecks with Matchers{
9 |
10 | // code
11 |
12 | def reverse[T](list: List[T], newList: List[T] = List()): List[T] = {
13 |
14 | @tailrec
15 | def toNewList(ls: List[T], newList: List[T] = List()): List[T] = {
16 | ls match {
17 | case Nil => newList
18 | case head :: tail => toNewList(tail, head :: newList)
19 | }
20 | }
21 |
22 | list match {
23 | case Nil => list
24 | case _ :: Nil => list
25 | case _ => toNewList(list)
26 | }
27 | }
28 |
29 | // this is a test
30 |
31 | forAll { list: List[Int] => {
32 | reverse(list) should equal (list.reverse)
33 | }
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/algorithms/src/test/scala/tests99/test06_palindrome/PalindromeSpec.scala:
--------------------------------------------------------------------------------
1 | package tests99.test06_palindrome
2 |
3 | import org.scalatest.{FlatSpec, Matchers}
4 | import org.scalatest.prop.GeneratorDrivenPropertyChecks
5 | import tests99.test05_reverse.ReverseSpec
6 |
7 | class PalindromeSpec extends FlatSpec with GeneratorDrivenPropertyChecks with Matchers {
8 |
9 | def palindrome[T](list: List[T]): Boolean = {
10 | new ReverseSpec().reverse(list) == list
11 | }
12 |
13 | assert ( palindrome(List()) === true)
14 | assert ( palindrome(List(1)) === true)
15 | assert ( palindrome(List(1,1)) === true)
16 | assert ( palindrome(List(1,2,3,2,1)) === true)
17 | //
18 | assert ( palindrome(List(1,2)) === false)
19 | assert ( palindrome(List(1,2,2)) === false)
20 |
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | ThisBuild / version := "0.1.0-SNAPSHOT"
2 | ThisBuild / scalaVersion := "3.6.3"
3 |
4 | //libraryDependencies += "com.typesafe.play" %% "play-json" % "3.0.4"
5 |
6 | val scalatestVersion = "3.3.0-SNAP4"
7 | val monocleVersion = "3.0.0-M6"
8 |
9 | lazy val root = (project in file("."))
10 | .settings(
11 | name := "scala_samples",
12 | libraryDependencies ++= Seq(
13 | "org.scalactic" %% "scalactic" % scalatestVersion,
14 | "org.scalatest" %% "scalatest" % scalatestVersion % Test,
15 | "org.scalatestplus" %% "mockito-4-6" % "3.2.14.0" % Test,
16 | "org.scalatestplus" %% "junit-4-13" % "3.2.14.0" % Test,
17 | "org.scalatestplus" %% "scalacheck-1-16" % "3.2.14.0" % Test,
18 | "com.disneystreaming" %% "weaver-cats" % "0.8.3" % Test,
19 |
20 |
21 | // "com.chuusai" %% "shapeless" % "2.3.12",
22 | "org.scala-lang.modules" %% "scala-parallel-collections" % "1.0.4",
23 | "joda-time" % "joda-time" % "2.12.5",
24 | // "com.storm-enroute" %% "scalameter-core" % "0.21",
25 | "com.typesafe.play" %% "play-json" % "2.10.0",
26 | // "com.typesafe.akka" %% "akka-actor-typed" % "2.10.0",
27 | "org.scalaz" %% "scalaz-core" % "7.4.0-M15",
28 | "com.github.julien-truffaut" %% "monocle-core" % monocleVersion,
29 | "com.github.julien-truffaut" %% "monocle-macro" % monocleVersion,
30 | // to replace with circle:
31 | "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.18.2",
32 |
33 | // cats
34 | "org.typelevel" %% "cats-core" % "2.10.0",
35 | "org.typelevel" %% "cats-effect" % "3.5.1",
36 | "com.github.cb372" %% "cats-retry" % "3.1.0",
37 |
38 | )
39 | )
40 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version = 1.10.7
--------------------------------------------------------------------------------
/project/build.sbt~:
--------------------------------------------------------------------------------
1 | sbt.version=0.13.0
2 |
3 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | //logLevel := Level.Warn
2 | //
3 | //addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.8")
4 | //
5 | //addCompilerPlugin("org.scalamacros" %% "paradise" % "2.1.0" cross CrossVersion.full)
--------------------------------------------------------------------------------
/scalajs/README.md:
--------------------------------------------------------------------------------
1 | scala-js-samples
2 | =============
3 |
4 | "Use the course, Luke!" (R)
5 |
6 | HOW TO INSTALL & RUN
7 |
8 | sbt & nodejs should be installed
9 |
10 | NOTE: as SBT project, the project/Build.scala defines the dependencies and sub-projects.
11 | So, to build run a particular project, say ScalaJS:
12 |
13 | Start sbt-console:
14 |
15 | ```sbt```
16 |
17 | Switch to that project:
18 |
19 | ```project ScalaJS```
20 |
21 | Run (will use NodeJS to run it):
22 |
23 | ```run```
24 |
25 | To compile to JS:
26 |
27 | ```fastOptJS``
28 |
29 | The result will be in: ../scalajs/target/scala-2.11/scalajs-fastopt.js
30 |
31 | --
32 | Spec: http://www.scala-lang.org/files/archive/spec/2.11/
33 |
34 | See the doc for more details: https://www.scala-js.org/tutorial/basic/
35 |
36 | Then run index.html in the browser, that would run/include scalajs-fastopt.js
37 | so you would see the result of your generated js in your browser.
--------------------------------------------------------------------------------
/scalajs/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | The Scala.js Tutorial
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | Click
22 |
23 | Got from the service call (from downstream) :
24 |
27 |
28 |
29 | Scala tags
30 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/scalajs/screen.css:
--------------------------------------------------------------------------------
1 | .data-list ul{height:300px; width:400px;}
2 | .data-list ul{overflow:hidden; overflow-y:scroll;}
--------------------------------------------------------------------------------
/scalajs/src/main/scala/bench_rx/ScalaRxSample.scala:
--------------------------------------------------------------------------------
1 | package bench_rx
2 |
3 | import rx._
4 |
5 | // https://vimeo.com/98477272
6 | // https://github.com/lihaoyi/scala.rx (up to date)
7 |
8 | object ScalaRxSample extends App {
9 |
10 | val a = Var(1)
11 | val b = Var(2)
12 | val c = Rx{ a() + b() }
13 | println(c.now) // 3
14 |
15 | a() = 4
16 |
17 | println(c.now) // 6
18 |
19 | var count = 0
20 |
21 | // Observers (Obs s can be created from Rx s or Var s and be used to perform side effects when they change)
22 |
23 | val cObs1: Obs = c.trigger {
24 | count = c.now + 1
25 | }
26 | // same as:
27 | val cObs2: Obs = c.foreach { x =>
28 | count = x + 1
29 | }
30 |
31 | println("count: " + count)
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/scalajs/src/main/scala/webapp/PostClient.scala:
--------------------------------------------------------------------------------
1 | package webapp
2 |
3 | import org.scalajs.dom.XMLHttpRequest
4 | import org.scalajs.dom.ext.Ajax
5 | import webapp.model.Post
6 |
7 | import scala.concurrent.Future
8 | import scala.scalajs.js.annotation.JSExport
9 |
10 | import upickle.default._
11 |
12 | import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
13 |
14 |
15 | /**
16 | * Created by User on 4/25/2016.
17 | */
18 | @JSExport
19 | object PostClient {
20 |
21 | val searchUrl = "http://jsonplaceholder.typicode.com/posts"
22 |
23 | @JSExport
24 | def posts() : Future[Seq[Post]] = {
25 |
26 | Ajax.get(searchUrl) map { (xhr: XMLHttpRequest) =>
27 |
28 | val posts = read[Seq[Post]](xhr.responseText)
29 |
30 | posts
31 |
32 | }
33 |
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/scalajs/src/main/scala/webapp/SamplePage.scala:
--------------------------------------------------------------------------------
1 | package webapp
2 |
3 | import scala.scalajs.js.annotation.JSExport
4 |
5 | @JSExport
6 | class SamplePage[Builder, Output <: FragT, FragT](val bundle: scalatags.generic.Bundle[Builder, Output, FragT]) {
7 |
8 | val htmlFrag = {
9 | import bundle.all._
10 |
11 | val inputBox = input(
12 | id:="new-todo",
13 | placeholder:="What needs to be done?",
14 | autofocus:=true
15 | )
16 |
17 | div(
18 | h1("ScalaTag's h1!"),
19 | form(
20 | inputBox
21 | ),
22 | label("label"),input(readonly)
23 | )
24 | }
25 | }
26 |
27 |
--------------------------------------------------------------------------------
/scalajs/src/main/scala/webapp/ScalaJsSample1.scala:
--------------------------------------------------------------------------------
1 | package webapp
2 |
3 | import webapp.all._
4 |
5 | // https://github.com/scala-js/scalajs-tutorial
6 |
7 | import org.scalajs.dom.raw.Event
8 | import rx._
9 |
10 | import scala.concurrent.Await
11 | import scala.scalajs.js
12 | import scala.scalajs.js.JSApp
13 | import scala.scalajs.js.annotation.JSExport
14 | import scalatags.JsDom.all._
15 |
16 | // DOM library:
17 | import org.scalajs.dom
18 | // is the root of the JavaScript DOM and corresponds to the global scope of JavaScript (aka the window object)
19 | import org.scalajs.dom.document
20 |
21 | // JQuery
22 | import org.scalajs.jquery.jQuery
23 |
24 | //import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
25 |
26 | //import scala.concurrent.ExecutionContext.Implicits.global
27 | import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
28 |
29 |
30 | object ScalaJsSample1 extends JSApp {
31 |
32 | def main(): Unit = {
33 |
34 | // 1.
35 | println("Hello world!") //
36 |
37 | // 2. DOM
38 | /*
39 | def appendPar(targetNode: dom.Node, text: String): Unit = {
40 | val parNode = document.createElement("p")
41 | val textNode = document.createTextNode(text)
42 | parNode.appendChild(textNode)
43 | targetNode.appendChild(parNode)
44 | }
45 | appendPar(document.body, "Hello World")
46 | */
47 |
48 | // 3. JQuery
49 |
50 | def addClickedMessage(): Unit = {
51 | jQuery("body").append("Hello World
")
52 | }
53 |
54 | def setupUI(): Unit = {
55 | jQuery("#click-me-button").click(addClickedMessage _)
56 | }
57 |
58 | // 4. Calling a service
59 |
60 | def callClient(): Unit = {
61 |
62 | val futurePosts = PostClient.posts()
63 | futurePosts foreach { posts =>
64 | posts foreach {post =>
65 | jQuery("#posts").append(s"${post.title} ")
66 | }
67 | }
68 |
69 | }
70 |
71 | jQuery(setupUI _)
72 | callClient()
73 |
74 | // 6. ScalaTag (+ TODO: ScalaRX)
75 |
76 | val samplePage = new SamplePage(scalatags.Text)
77 | dom.document.getElementById("scalatags").innerHTML = samplePage.htmlFrag.render
78 | //.bindOption(..
79 | //jQuery("scalatags").append(samplePage.htmlFrag.render)
80 |
81 |
82 |
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/scalajs/src/main/scala/webapp/model/Post.scala:
--------------------------------------------------------------------------------
1 | package webapp.model
2 |
3 | // http://stackoverflow.com/questions/36879109/upickle-read-from-scalajs-upickle-invaliddata-string-data-1
4 |
5 | case class Post(userId: Int, id: Int, title: String, body: String)
--------------------------------------------------------------------------------
/spark/README.md:
--------------------------------------------------------------------------------
1 | Docs:
2 |
3 | * http://spark.apache.org/docs/latest/programming-guide.html
4 | * https://trongkhoanguyenblog.wordpress.com/2014/11/27/understand-rdd-operations-transformations-and-actions/
5 | * http://homepage.cs.latrobe.edu.au/zhe/ZhenHeSparkRDDAPIExamples.html
6 |
7 | Steps:
8 | 1. sbt
9 | 2. project spark
10 | 2. console
11 | 3. .. TODO:
--------------------------------------------------------------------------------
/spark/src/main/resources/step1/sample-data.txt:
--------------------------------------------------------------------------------
1 | One line
2 | Second line
3 | Third line
--------------------------------------------------------------------------------
/spark/src/main/scala/step1/Step1.scala:
--------------------------------------------------------------------------------
1 | package step1
2 |
3 | // #first #spark #config #file #rdd #collect #transformation #action
4 | import java.io.File
5 |
6 | import org.apache.spark.rdd.RDD
7 | import org.apache.spark.{SparkConf, SparkContext}
8 |
9 | // How to start the Apache Spark
10 |
11 | case class DataLine(line: String)
12 |
13 | object Step1 extends App {
14 |
15 | val conf: SparkConf = new SparkConf()
16 | .setMaster("local[2]") // two threads - which represents “minimal” parallelism, which can help detect bugs that only exist when we run in a distributed context.
17 | .setAppName("fist-thing-first")
18 |
19 | val sc: SparkContext = new SparkContext(conf)
20 |
21 | // --
22 |
23 | val file = new File(this.getClass.getClassLoader.getResource("step1/sample-data.txt").toURI).getPath
24 |
25 | val dataLineRdd: RDD[DataLine] = sc.textFile(file).map { (line: String) =>
26 | //println(line)// just to print what we are getting
27 | DataLine(line)
28 | }
29 |
30 | // #transfomration(lazy)
31 |
32 | val wordsRdd: RDD[String] = dataLineRdd.flatMap { dataLine => // in case of dataLineRdd.map - we would have had: RDD[Array[String]], so we flatting it out up to the RDD[String]
33 | dataLine.line.split(" ")
34 | }
35 |
36 | // rdd is a lazy thing, we have to apply an #action to start evaluation
37 |
38 | val count = wordsRdd.count() // count-Action
39 |
40 | println("Word Count:" + count) // 6
41 |
42 | sc.stop()
43 |
44 | }
45 |
46 | // Check the log when you start it.
--------------------------------------------------------------------------------
/spark/src/main/scala/step2/Step2.scala:
--------------------------------------------------------------------------------
1 | // #alice #filtering #cache #word-count
2 | package step1
3 |
4 | import java.io.File
5 | import org.apache.spark.rdd.RDD
6 | import org.apache.spark.{SparkConf, SparkContext}
7 |
8 | // A meaningful example, analysing Alice In Wonder Land Book
9 |
10 | object Step2 extends App {
11 |
12 | val conf: SparkConf = new SparkConf()
13 | .setMaster("local[2]") // two threads
14 | .setAppName("step2")
15 |
16 | val sc: SparkContext = new SparkContext(conf)
17 |
18 | // -- operations on RDD: filter, map, count
19 |
20 | val file = new File(this.getClass.getClassLoader.getResource("alice-in_wonder_land.txt").toURI).getPath
21 |
22 |
23 | val allTextLines: RDD[String] = sc.textFile(file)
24 |
25 | val readableChars: Set[Char] = (('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9')).toSet
26 | def isReadable(s:String) = s.exists(readableChars.contains)
27 |
28 | val readableTextLines = allTextLines.filter(isReadable)
29 |
30 | // val emptyLines = text.filter(line => !isOrdinary(line)) // empty lines, or lines with no readable content
31 |
32 | val words = readableTextLines.flatMap(line => line.split(" "))
33 |
34 | words.cache() // a bit of optimization (cache is a synonym of persist with MEMORY_ONLY storage level.)
35 |
36 | val distinctWords = words.distinct() // from the cache
37 |
38 | println("text: " + allTextLines.count()) // 1071
39 | println("textLines: " + readableTextLines.count()) // 971
40 | println("words: " + words.count()) // 27354
41 |
42 | println("distinct words: " + distinctWords.count()) // 5194 - this is how many words we have to know to be able to read this book
43 |
44 | // --
45 |
46 | // TODO:
47 | }
--------------------------------------------------------------------------------
/spark/src/test/scala/mainfunctions/SparkFunctions2Spec.scala:
--------------------------------------------------------------------------------
1 | package mainfunctions
2 |
3 | import org.apache.spark.{SparkConf, SparkContext}
4 | import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
5 |
6 | /**
7 | * There are essential set of function that one should learn and know as alphabet.
8 | * Bellow some simple example with explanations how to use them.
9 | *
10 | * Here we cover Joins
11 | *
12 | * Also, here in the doc: https://spark.apache.org/docs/latest/api/scala/index.html#org.apache.spark.rdd.PairRDDFunctions - it is all about pairs.
13 | */
14 |
15 | class SparkFunctions2Spec extends FlatSpec with Matchers with BeforeAndAfter {
16 |
17 | val conf: SparkConf = new SparkConf()
18 | .setMaster("local[2]") // two threads
19 | .setAppName("step2")
20 |
21 | var sc: SparkContext = new SparkContext(conf)
22 |
23 | val userSubscriptionList = List(
24 | (1, ("user1", "subscription1")),
25 | (2, ("user2", "subscription2")),
26 | (3, ("user3", "subscription1")),
27 | (4, ("user4", "subscription3")),
28 | (5, ("user5", "subscription3")) // this user does not exist in userCity
29 | )
30 | val userSubscriptionRDD = sc.parallelize(userSubscriptionList)
31 |
32 | val userCityList = List(
33 | (1, ("user1", "Montreal")),
34 | (2, ("user2", "Ottawa")),
35 | (3, ("user3", "Montreal")),
36 | (4, ("user4", "Sherbrook"))
37 | )
38 |
39 | before {
40 |
41 | }
42 |
43 | after {
44 | //sc.stop()
45 | }
46 |
47 | "join" should "show how it works - inner join" in {
48 |
49 | val userCityRDD = sc.parallelize(userCityList)
50 |
51 | val result = userSubscriptionRDD.join(userCityRDD) // join assumes that both RDD has the same-type keys
52 |
53 | result.collect().length should equal(4) // not 5 (because it is inner join. there is no user5 with id = 5)
54 |
55 | }
56 |
57 | "joinLeft vs Right" should "show how it works - left join" in {
58 |
59 | val userCityRDD = sc.parallelize(userCityList)
60 |
61 | val resultLeft = userSubscriptionRDD/*5 records*/.leftOuterJoin(userCityRDD /* 4 records */)
62 | val resultRight = userSubscriptionRDD/*5 records*/.rightOuterJoin(userCityRDD/* 4 records */)
63 |
64 | resultLeft.collect().length should equal(5) // user5 is on the left side, with id = 5, that is on the left side
65 | resultRight.collect().length should equal(4) // there is not user5 on the right side with id = 5, that is on the left side
66 |
67 | // resultRight: (NoteL lef it optional that's we we see Some(..) here
68 | // (4, (Some((user4, subscription3)), (user4, Sherbrook)
69 | // (2, (Some((user2, subscription2)), (user2, Ottawa)
70 | // (1, (Some((user1, subscription1)), (user1, Montreal)
71 | // (3, (Some((user3, subscription1)), (user3, Montreal)
72 |
73 | //
74 | val rightOuterJoinReverse = userCityRDD/*4 records*/.rightOuterJoin(userSubscriptionRDD/*5 records*/).collect()
75 | rightOuterJoinReverse.length should equal(5) // 5, since if we do rightJoin then it is guaranteed to have 5 records from the right hand side
76 |
77 | // (5,(None,(user5,subscription3)))
78 | }
79 |
80 |
81 | }
--------------------------------------------------------------------------------
/spark/src/test/scala/mainfunctions/SparkFunctions3Spec.scala:
--------------------------------------------------------------------------------
1 | package mainfunctions
2 |
3 | import org.apache.spark.rdd.RDD
4 | import org.apache.spark.{SparkConf, SparkContext}
5 | import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
6 |
7 | /**
8 | * There are essential set of function that one should learn and know as alphabet.
9 | * Bellow some simple example with explanations how to use them.
10 | *
11 | * Also, here in the doc: https://spark.apache.org/docs/latest/api/scala/index.html#org.apache.spark.rdd.PairRDDFunctions - it is all about pairs.
12 | */
13 |
14 | class SparkFunctions3Spec extends FlatSpec with Matchers with BeforeAndAfter {
15 |
16 | val conf: SparkConf = new SparkConf()
17 | .setMaster("local[2]") // two threads
18 | .setAppName("step2")
19 |
20 | var sc: SparkContext = new SparkContext(conf)
21 |
22 | before {
23 |
24 | }
25 |
26 | after {
27 | //sc.stop()
28 | }
29 |
30 | "cartesian" should "show how it works" in {
31 |
32 | val rdd: RDD[Int] = sc.parallelize(1 to 5)
33 | val cartesian = rdd.cartesian(rdd)
34 | val combinations = cartesian.filter { case (a,b) => a < b }
35 | val combinationsResult: Array[(Int, Int)] = combinations.collect()
36 |
37 | val cartesianResult = cartesian.collect()
38 | cartesianResult.length should equal(25)
39 | combinations.collect().length should equal(10)
40 |
41 | println("=== cartesianResult == ")
42 | cartesianResult.foreach(println)
43 |
44 | println("=== combinationsResult (filtered) == ")
45 | combinationsResult.foreach(println)
46 |
47 | /*
48 | === cartesianResult ==
49 | (1,1)
50 | (1,2)
51 | (2,1)
52 | ...
53 | (5,5)
54 | */
55 | }
56 | // we may use 'cartesian' also for query-optimization purposes,
57 | // to rewrite the `join-filter` type of queries that are way slower
58 | // comparing to `cartesian-filer` equivalent one (sometimes 100xn times slower!)
59 |
60 | // But actually, that's ok to to go fist with join-filter solution because it is more natural / readable.
61 | // With some extra structural information for spark we may let spark do this optimization for us (see Spark SQL)
62 |
63 | // TODO: show `cartesian-filer` vs `join-filter`
64 |
65 |
66 | }
--------------------------------------------------------------------------------
/spark/src/test/scala/mainfunctions/model/Book.scala:
--------------------------------------------------------------------------------
1 | package mainfunctions.model
2 |
3 | import org.apache.spark.rdd.RDD
4 | import step1.Step2.readableTextLines
5 |
6 | object Book {
7 |
8 | private val readableChars: Set[Char] = (('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9')).toSet
9 |
10 | private def isReadableText(text: String): Boolean = {
11 | text.exists(readableChars.contains)
12 | }
13 |
14 | def words(rddLine: RDD[String]): RDD[String] = {
15 | readableTextLines.flatMap(line => line.split(" "))
16 | }
17 |
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/spark/src/test/scala/mainfunctions/model/Event.scala:
--------------------------------------------------------------------------------
1 | package mainfunctions.model
2 |
3 | /**
4 | * Created by sergey on 5/15/17.
5 | */
6 | case class Event(organizer: String, budget: Int)
7 |
--------------------------------------------------------------------------------
/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka {
2 | loggers = ["akka.event.slf4j.Slf4jLogger"]
3 | loglevel = "INFO"
4 | logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
5 | }
--------------------------------------------------------------------------------
/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | UTF-8
6 | [%5level] %date{ISO8601} %logger{36} %X{sourceThread} - %msg%n -->
7 |
8 |
9 |
10 |
11 | logs/app.log
12 |
13 | [%5level] %date{ISO8601} %logger{36} %X{sourceThread} - %msg%n -->
14 |
15 |
16 |
17 |
19 | logs/app_%d{yyyy-MM-dd}.%i.log
20 |
21 |
22 |
23 | 1MB
24 |
25 |
26 | 30
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/src/main/scala/algorithms/combinations.sc:
--------------------------------------------------------------------------------
1 | // #combinations, #subset, #powerset
2 | val data = List(1,2,3)
3 | // 1
4 | Set(1,2,3).subsets.map(_.toList).toList
5 | // 2
6 | val res = for(i <- 1 to data.size) yield List(1, 2, 3).combinations(i).toList
7 | // 3
8 | def powerset[A](s: Set[A]) = s.foldLeft(
9 | Set(Set.empty[A]))
10 | { case (acc, v) => acc ++ acc.map(_ + v) }
11 | powerset(data.toSet) // Set(Set(), Set(1, 3), Set(2), Set(1, 2), Set(2, 3), Set(3), Set(1, 2, 3), Set(1))
12 | // if list has unique values:
13 | data.foldLeft(List(List.empty[Int]) )
14 | { case (acc, x) => acc ++ acc.map(x :: _) }
15 |
16 |
--------------------------------------------------------------------------------
/src/main/scala/annotations/Service.scala:
--------------------------------------------------------------------------------
1 | //package annotations
2 | //
3 | //// TODO: TODO
4 | //
5 | //import scala.reflect.runtime.{universe => ru}
6 | //
7 | //case class RoleSecured(allowed: Seq[String], denied: Seq[String]) extends scala.annotation.StaticAnnotation
8 | //
9 | //
10 | //
11 | //class Service {
12 | //
13 | // @RoleSecured(allowed = Seq("role1"), denied = Seq("role2"))
14 | // def doService(): String = {
15 | // ""
16 | // }
17 | //
18 | //}
19 | //
20 | //object Service extends App {
21 | //
22 | // //
23 | // // Use reflection to extract the parameter details
24 | // // 1. Get the ClassSymbol for the class we want to check
25 | // // 2. Get the list of annotations from the ClassSymbol
26 | // // 3. Get the expected annotation type to match
27 | // // 4. Find the annotation
28 | // // 5. Retrieve the args. These are returned as a list of Tree.
29 | //
30 | ////
31 | // // val serviceSymbol: ru.ClassSymbol = ru.typeOf[Service].typeSymbol.asClass
32 | //
33 | //
34 | //
35 | //// val serviceSymbol: ru.MethodSymbol = ru.typeOf[Service].typeSymbol.asMethod
36 | //// val serviceAnnotations = serviceSymbol.annotations
37 | //// val roleSecuredAnnotationType = ru.typeOf[RoleSecured]
38 | //
39 | // val serviceSymbol: ru.MethodSymbol = ru.typeOf[Service].decl(ru.TermName("doService")).asMethod
40 | // val serviceAnnotations = serviceSymbol.annotations
41 | // val roleSecuredAnnotationType: ru.Type = getTypeTag(serviceSymbol).tpe
42 | //
43 | // serviceAnnotations foreach { x =>
44 | // println (x.tree.tpe + " == " + roleSecuredAnnotationType)
45 | // }
46 | //
47 | // val roleSecuredAnnotation = serviceAnnotations.find(a => a.tree.tpe == roleSecuredAnnotationType)
48 | //
49 | // val roleSecuredAnnotationArgs = roleSecuredAnnotation.get.tree.children.tail
50 | //
51 | // val a = 1
52 | //
53 | //// roleSecuredAnnotationArgs.foreach(a => println(ru.showRaw(a)))
54 | //
55 | // /*
56 | //
57 | // import scala.reflect.runtime.universe._
58 | //
59 | // def methodAnnotations[T: TypeTag]: Map[String, Map[String, Map[String, Any]]] = {
60 | // typeOf[T].decls.collect { case m: MethodSymbol =>
61 | // m
62 | // }.withFilter {
63 | // _.annotations.nonEmpty
64 | // }.map { m =>
65 | // m.name.toString -> m.annotations.map { a =>
66 | // a.tree.tpe.typeSymbol.name.toString -> a.tree.children.withFilter {
67 | // _.productPrefix eq "AssignOrNamedArg"
68 | // }.map { tree =>
69 | // tree.productElement(0).toString -> tree.productElement(1)
70 | // }.toMap
71 | // }.toMap
72 | // }.toMap
73 | // }
74 | //
75 | // import scala.reflect.runtime.universe
76 | // def annotationsOf[T: universe.TypeTag](obj: T) = {
77 | // universe.typeOf[T].members.foldLeft(Nil: List[universe.type#Annotation]) {
78 | // (xs, x) => x.annotations ::: xs
79 | // }
80 | // }
81 | //*/
82 | //
83 | // def getTypeTag[T: ru.TypeTag](obj: T) = ru.typeTag[T]
84 | //
85 | ///*
86 | // val service = new Service
87 | // val methodX = ru.typeOf[Service].decl(ru.TermName("doService")).asMethod
88 | // val theType: ru.Type = getTypeTag(methodX).tpe
89 | // println(theType)
90 | //*/
91 | //
92 | // //println(annotationsOf(testObj))
93 | //
94 | //}
95 |
--------------------------------------------------------------------------------
/src/main/scala/apply_method/ClassWithApplyMethod.scala:
--------------------------------------------------------------------------------
1 | package apply_method
2 |
3 | /*
4 | #apply-method
5 | */
6 |
7 | // sequence: #1 (look at #1, #2 after)
8 |
9 | class ClassWithApplyMethod(x:Int) {
10 |
11 | def apply(): Int = {println("apply method is calling with no arguments"); 1}
12 | def apply(x: Int): Unit = {println("apply method is calling with '" + x + "' argument")}
13 |
14 | }
15 |
16 | object Starter extends App {
17 |
18 | val obj = new ClassWithApplyMethod(1) // no any apply() method will be invoked, because the object is not yet created at this moment
19 |
20 |
21 | println("-1-")
22 |
23 | val y = obj(2) // equivalent to x.apply(2)
24 |
25 |
26 | println("-2-")
27 |
28 | //val z:Int = x // this will not lead to apply() evaluation, because we do not use "()" to evaluate it
29 |
30 | val z:Int = obj() // equivalent to x.apply()
31 |
32 | // in this sense apply() method is just default 'empty-name' function which we could call when we applying "()" on a object
33 |
34 | // so apply is default function if we want to evaluate an object as a function
35 |
36 | }
37 |
38 | /* Output:
39 | -1-
40 | apply method is calling with '2' argument
41 | -2-
42 | apply method is calling with no arguments
43 | */
44 |
--------------------------------------------------------------------------------
/src/main/scala/apply_method/FactoryApplyMethod.scala:
--------------------------------------------------------------------------------
1 | package apply_method
2 |
3 | /*
4 | #apply-method
5 | related: #factory-method #companion-object
6 | */
7 | // sequence: #2 (look at #1 first)
8 |
9 | // Shows general use case:how to use apply method as factory method
10 |
11 | class A(x:Int)
12 |
13 | class B(x:Int) // this class does not have companion object
14 |
15 | // companion object
16 | object A { // why do we use "object" here, but not "class"? because we want to use "()" method as class' constructor has
17 |
18 | def apply(x:Int) = new A(x) // #factory-method related
19 |
20 | }
21 |
22 | object FactoryApplyMethod extends App {
23 |
24 | val a1 = A(1) // create instance of class A by calling apply method on A object
25 |
26 | //val b1 = B(2) // this will not able to compile, we can NOT create an object without using "new"
27 |
28 | val a2 = new A(1) // same result by calling constructor
29 |
30 | // why the difference ?
31 |
32 | // by using A(1) we do not use method "new" to create an object (it make code more precise/short). And an object looks like a function call
33 |
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/scala/apply_method/FunctionAsObject.scala:
--------------------------------------------------------------------------------
1 | package apply_method
2 |
3 | /*
4 | #apply-method
5 | related: #lazy-evaluation #anonymous-function
6 | */
7 | // sequence: #3 (look at #1, #2 first)
8 |
9 | object FunctionAsObject extends App {
10 |
11 | def sum = (x:Int, y:Int) => x + y // #anonymous-function related
12 |
13 | println(sum) // // #lazy-evaluation related
14 |
15 | val v1 = sum.apply(1, 2) // demonstrates that function "sum" is an instance of Function2 that has method apply(_,_)
16 |
17 | println(v1) // 3
18 |
19 |
20 | // only because Function has apply() method we can do this:
21 | val v2 = sum(2,2) // this is apply method !
22 | println(v2) // 4
23 |
24 | // That means that in Scala everything tends to be a function (apply methods helps to get this feeling), but all functions are objects.
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/apply_method/PrivateConstructor.scala:
--------------------------------------------------------------------------------
1 | package apply_method
2 |
3 | case class Person (name:String)
4 |
5 | class Person007 private (val name:String)
6 |
7 | object Person007 {
8 | def apply(name:String) : Person007 = {
9 | new Person007(name) // companion object has access to private constructor
10 | }
11 | }
12 |
13 | object PrivateConstructor extends App {
14 |
15 | val p1 = Person(name = "bob")
16 | val p2 = Person007(name = "007") // will nto compile if we did not have Person007 companion object
17 |
18 |
19 | print(p2.name)
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/scala/calling_by_name/CallingByNameTest.scala:
--------------------------------------------------------------------------------
1 | package calling_by_name
2 |
3 | // Calling parameter "By Name"
4 | // See also: CallingFunctionByName
5 |
6 | /*
7 | #calling-by-name
8 | related: #calling-by-value #lazy-evaluation
9 | */
10 | // sequence: #1 (look at #2 after)
11 |
12 | object CallingByNameTest extends App {
13 |
14 | def fByValue(x:Int, y:Int) = {
15 |
16 | if (x < 0) y else x // if x < 0 we are not going to use "y"
17 |
18 | }
19 |
20 | def fByName(x:Int, y: => Int) = {
21 |
22 | if (x < 0) y else x // if we are not going to use "y", not need to trigger it to evaluate
23 |
24 | }
25 |
26 | def y() = {println ("y is calling"); 1} // will print the text by calling
27 |
28 | val result1 = fByValue( x=0, y=y() ) // y() is evaluating by passing .. (#calling-by-value related) but nobody is going to use its value though
29 | val result2 = fByName ( x=0, y=y() ) // y() will not be evaluated (#lazy-evaluation related)
30 |
31 | println (result1)
32 | println (result2)
33 |
34 | /*
35 | * Output:
36 | * y is calling
37 | * 0
38 | * 0
39 | * */
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/src/main/scala/calling_by_name/CallingFunctionByNameTest.scala:
--------------------------------------------------------------------------------
1 | package calling_by_name
2 |
3 | // #calling by name - arguments that are evaluating on the moment of use, but not on the moment of passing them to function.
4 | // #partially applied function - use "_" to omit some arguments and return a new function instead that expects rest of not-yet-applied arguments to be passed to that new function
5 | // #higher order function - take function as parameter
6 |
7 | // Calling function By Name
8 |
9 | /*
10 | #calling-by-name
11 | related: #calling-by-value #lazy-evaluation #anonymous-function #partially-applied-function #higher-order-function
12 | */
13 |
14 | // sequence: #2 (look at #1 first)
15 | object CallingFunctionByNameTest extends App {
16 |
17 | var var1 = 0
18 | var var2 = 0
19 |
20 | // 1.
21 | // the function that accepts arg-function with: two int params and returning String
22 | // the function passing v1 & v2 as parameters to arg-function, invoking arg-function 2 times, connecting the result to one string
23 | def takeFunction1(f: (Int, Int) => String, v1:Int, v2:Int ): String = {
24 | f(v1, v2) + f(v1, v2)
25 | }
26 |
27 | // 2. same as #1 but calling arg-function by-name
28 | def takeFunction2(f: => ((Int, Int) => String), v1:Int, v2:Int): String = {
29 | f(v1, v2) + f(v1, v2)
30 | }
31 |
32 |
33 | def aFun(v1:Int, v2:Int) : String = {
34 | var1 += 1
35 | (v1 + v2).toString
36 | }
37 |
38 |
39 | /* nGen does not have any parameters defined (on first glance),
40 | * but since fnGen returns 'partially applied function' which made by using "_" on aFun,
41 | * then it makes fnGen returns the function that's able to apply parameters
42 | */
43 | def fnGen() = {
44 |
45 | var2 += 1
46 |
47 | aFun _
48 |
49 | }
50 |
51 | // --
52 | // first try - aFun will be evaluated immediately, once by passing it
53 | println( takeFunction1( aFun, 2, 2) ) // btw.: we can not use 'aFun()' with brackets here !
54 | println("var1 = " + var1) // "aFun" interpreted as: (Int, Int) => String; (as expected by 'takeFunction1')
55 | // "aFun()" interpreted as: String
56 |
57 | println( takeFunction2( aFun, 2, 2) ) // evaluated immediately anyhow, because passing a reference to aFun leads it its initialization (it can not exist without its params)
58 | println("var1 = " + var1 + "\n")
59 |
60 |
61 | // second try
62 | println( takeFunction1( fnGen(), 2, 2) )
63 | println("var2 = " + var2)
64 |
65 | println( takeFunction2( fnGen(), 2, 2) ) // fnGen() will be evaluated afterwards, each time when on the moment of calling it
66 | println("var2 = " + var2) // fGen() - a reference to partial function, that does not require params to be passed to create it
67 |
68 | /* Output:
69 | 44
70 | var1 = 2
71 | 44
72 | var1 = 4
73 |
74 | 44
75 | var2 = 1
76 | 44
77 | var2 = 3
78 | */
79 |
80 |
81 | }
82 |
83 |
84 |
--------------------------------------------------------------------------------
/src/main/scala/caseclasses/CaseClassesTest.scala:
--------------------------------------------------------------------------------
1 | package caseclasses
2 |
3 | /*
4 | #case-classes
5 | related: #apply-method #unapply-method #extractor
6 | */
7 | object CaseClassesTest extends App {
8 |
9 | case class A(a:Int, b:Int)
10 |
11 | // 1. may omit using "new"
12 | val a1 = A(1,2) // same as A.apply(1,2)
13 |
14 |
15 | // 2. toString() is defined by default
16 | println(a1) // prints: A(1,2)
17 |
18 |
19 | // 3. public read-only(getters) properties by default
20 | println(a1.a) // so no need to put "val a:int" in arguments like for general classes
21 |
22 | //a1.a = 1 // can no do it (read only)
23 |
24 |
25 | // 4. equals() defined by default
26 | val a2 = A(1,2)
27 | if (a1 == a2) println ("equal!") // method '==' uses default built-in 'equals()'
28 |
29 | // 5. you we want setters to be defined
30 | case class B(var a:Int, var b:Int) // we need to put 'var' (same as for general classes)
31 | val b1 = B(1,2)
32 | b1.a=2 // that's ok, because it was defined as VAR
33 |
34 | // --
35 |
36 | // 6. works with "pattern matching" - #unapply-method #extractor related
37 |
38 | val b2 = B(1,2)
39 | var str = b2 match {
40 | case B(1,_) => "yes, first param is '1'" // it works because of unapply-method (extractor) method is defined by default
41 | }
42 | println(str)
43 |
44 | // 7. inheritance
45 | {
46 | case class A(a:Int)
47 | //case class A2(a:Int) extends A(1) // does allow us to extend, asks for 'val/val' and 'override'
48 | //case class A2(override val a:Int) extends A(1) // but it still does not allow. It is prohibited to use inheritance !
49 |
50 |
51 | // but
52 | class GeneralClass(val a:Int) // if we have general class (not case class)
53 | case class AA(override val a:Int) extends GeneralClass(1) // then we can extend our case class from it
54 |
55 | val g:GeneralClass = AA(2)
56 |
57 | println ("inherited, and overridden: " + g.a) // 2
58 | }
59 |
60 | // 8. a case class that accepts a function (what would happen ? )
61 | {
62 | case class F( f: Int => Int) // the case class that expects a function as parameter
63 |
64 | def f(a:Int) = {a + a} // function that returns back in two time more than it gets
65 |
66 | val obj = F(f)
67 | val f_ref = obj.f // actually.. it is a getter that returns a function. works like expected.
68 | println("result:" + f_ref(2))
69 |
70 | // and how patter-matching will work with it?
71 | obj match {
72 | case F( f:(Int=>Int) ) => println(" f:(Int=>Int) matched") // works as expected
73 | }
74 | obj match {
75 | case F( f:(Any=>Any) ) => println(" f:(Any=>Any) matched") // works as expected
76 | }
77 | obj match {
78 | case F( _ ) => println("_ matched") // works as expected
79 | }
80 | /*
81 | obj match {
82 | case F( 4 ) => println("will not work") // it expects a function, no a Int value (or function result)
83 | }*/
84 |
85 | }
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/src/main/scala/caseclasses/ProblemCopy.scala:
--------------------------------------------------------------------------------
1 | //// #case-class #copy-method #shapless
2 | //package caseclasses
3 | //
4 | //
5 | //sealed trait Thing {
6 | // val id : String
7 | //}
8 | //case class User(id : String, name : String) extends Thing
9 | //case class Item(id : String, description : String) extends Thing
10 | //
11 | //object ProblemCopy extends App {
12 | //
13 | // // summoning the shapeless gods
14 | // import copySyntax._
15 | //
16 | // val thing1 : Thing = User("unknown", "dave")
17 | //
18 | // // the following would not compile as copy method does not exist on the Thing trait
19 | // // but because of "copySyntax._" and shapless lib it works
20 | // val copy1 = thing1.copy(id = "1")
21 | //
22 | // println(copy1) // User(1, dave)
23 | //
24 | //
25 | //}
26 |
--------------------------------------------------------------------------------
/src/main/scala/cast/CastTest.scala:
--------------------------------------------------------------------------------
1 | package cast
2 |
3 | import scala.concurrent._
4 | import scala.concurrent.duration._
5 |
6 | import ExecutionContext.Implicits.global
7 |
8 | trait Order {
9 | var aType:String = _
10 | }
11 |
12 | class OrderB extends Order {
13 | aType = "b"
14 | }
15 | class OrderC extends Order {
16 | aType = "c"
17 | }
18 |
19 | class Client {
20 | def send(payload:String): Future[String] = {
21 | Future { "bright future" }
22 | }
23 | }
24 |
25 | object CastTest extends App {
26 |
27 | // there is some 'interesting' stuff about casting and
28 |
29 | val l: List[Int] = List[String]("a").asInstanceOf[List[Int]]
30 |
31 | println(l) // List[Int] = List(a)
32 |
33 | println(l.head) // a
34 |
35 | // --
36 |
37 | val client = new Client()
38 |
39 | val fResult = fun(new OrderB())
40 |
41 | val result = Await.result(fResult, 10.seconds)
42 |
43 | println(result)
44 |
45 | // --
46 | def fun(a:Order)(implicit executionContext: ExecutionContext) : Future[String] = {
47 |
48 | val x = a.asInstanceOf[OrderC] // case cast exception
49 |
50 | client.send("the payload")
51 |
52 | }
53 |
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/src/main/scala/cats/effects/CustomThreadPoolAppEffect.scala:
--------------------------------------------------------------------------------
1 | import cats.effect.{IO, IOApp, Resource}
2 | import java.util.concurrent.Executors
3 | import scala.concurrent.ExecutionContext
4 |
5 | object FullThreadControlApp extends IOApp.Simple {
6 |
7 | // Custom execution contexts
8 | val blockingEc: Resource[IO, ExecutionContext] =
9 | Resource.make { // safely acquires and release resources like files, database connections, network sockets, etc..
10 | IO(ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(4)))
11 | } { ec => IO(ec.shutdown()) }
12 |
13 | // Blocking task
14 | def blockingTask(ec: ExecutionContext): IO[Unit] =
15 | IO.blocking {
16 | println(s"[BLOCKING] Running on ${Thread.currentThread().getName}")
17 | Thread.sleep(2000)
18 | println(s"[BLOCKING] Finished on ${Thread.currentThread().getName}")
19 | }.evalOn(ec)
20 |
21 | // CPU-bound task
22 | import scala.concurrent.duration.DurationInt
23 |
24 | val computeTask: IO[Unit] =
25 | IO.println(s"[COMPUTE] Running on ${Thread.currentThread().getName}") *>
26 | IO.sleep(1.second) *>
27 | IO.println(s"[COMPUTE] Finished on ${Thread.currentThread().getName}")
28 |
29 | // Run everything
30 | val program: IO[Unit] = blockingEc.use { ec =>
31 | for {
32 | fiber1 <- computeTask.start // Uses default compute pool
33 | fiber2 <- blockingTask(ec).start // Uses our custom blocking pool
34 | _ <- fiber1.join
35 | _ <- fiber2.join
36 | } yield ()
37 | }
38 |
39 | override def run: IO[Unit] = program
40 | }
41 |
42 | /**
43 | * Result:
44 | * [COMPUTE] Running on main
45 | * [BLOCKING] Running on io-blocking-0
46 | * [COMPUTE] Finished on main
47 | * [BLOCKING] Finished on io-blocking-0
48 | */
49 |
50 | // what the diff between this IO.blocking { and say execution context, no cats effect code?
51 | /* import scala.concurrent.{ExecutionContext, Future}
52 | import java.util.concurrent.Executors
53 |
54 | // Define a fixed thread pool
55 | val ec: ExecutionContext = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(4))
56 |
57 | // Run blocking work manually
58 | val futureTask: Future[String] = Future {
59 | Thread.sleep(2000) // Blocking!
60 | s"Completed on thread: ${Thread.currentThread().getName}"
61 | }(ec)
62 |
63 | futureTask.foreach(println)(scala.concurrent.ExecutionContext.global) // Print result
64 | */
65 |
66 | //✅That no cat's effect approach is:
67 | //
68 | // + Runs on a separate thread pool
69 | // ❌ Still blocking the assigned thread—it’s just on a different pool.
70 | // ❌ No structured concurrency—you must manage cancellation yourself.
71 | // ❌ Not composable—you can't easily mix blocking + async safely.
72 |
--------------------------------------------------------------------------------
/src/main/scala/cats/step2_cats_combine_1_2.scala:
--------------------------------------------------------------------------------
1 | package cats
2 |
3 | import cats.*
4 | import cats.implicits.*
5 |
6 | case class Score(points: Int)
7 |
8 | // define semigroup -- it is all about that method `combine`tht we define here
9 | implicit val scoreSemigroup: Semigroup[Score] = (a, b) => Score(a.points + b.points) // in this case we say that when we combine a and b we will use `+`
10 |
11 | /*
12 | You do not see method `combine` here, but it is there in the Semigroup.... - defined as:
13 |
14 | `def combine(x: A, y: A): A`
15 |
16 | here we implement that combine method with the `+` operator.
17 |
18 | Nothing fancy, just like an OOP `interface` with one method (?)
19 | */
20 |
21 | @main
22 | def main(): Unit = {
23 |
24 | // Now we can use |+| - that is Cats' syntax for combining values.
25 | val total1 = Score(10) |+| Score(20)
26 | val total2 = Score(10) combine Score(20) // same
27 | println(total1) // Score(30)
28 | println(total2) // Score(30)
29 |
30 | /**
31 | A Semigroup in Cats is like injecting a method into a type,
32 | much like defining a common trait or interface in OOP,
33 | but without super class or interface.
34 |
35 | So we're implicitly injecting plus (+) to Score type. Unline interfaces in OOP - no need to build integrate, all is flat and implicit.
36 | **/
37 | }
38 |
39 | /**
40 | Even without Cats, you can achieve the same effect using Scala’s implicit mechanics.
41 | Cats just provides a structured and standardized way to use these concepts.
42 | I mean the Cats is just using what Scala can do anyhow, it just casts goe with standard naming
43 | - from abstract algebra.
44 | **/
45 |
46 |
--------------------------------------------------------------------------------
/src/main/scala/cats/step3_cats_monoid_1.scala:
--------------------------------------------------------------------------------
1 |
2 | import cats.Monoid
3 | import cats.implicits._
4 |
5 | // Monoid - extends Semigroup, so it has `combine` defined
6 | // But it also has `empty` (identity) -it defines what it means to be empty
7 | // So, Monoid is something that combines and know how it is to be empty.
8 |
9 | @main
10 | def monoid_1(): Unit = {
11 | val sum = Monoid[Int].combine(10, 20) // 30
12 |
13 | // Monoid: Semigroup (with combine) + Identity Element
14 |
15 | val zero = Monoid[Int].empty // 0
16 |
17 | // With that, for example
18 | // You can fold collections using monoidal operations:
19 |
20 | println(sum) // 30 (uses combine)
21 | println(zero) // 0
22 |
23 | // ----------
24 |
25 | val list = List(1, 2, 3, 4, 5)
26 |
27 | // fold map calls foldLeft, and foldLeft calls combine() method on Monoid, and list is Monoidal construct
28 |
29 | val sum2 = list.foldMap(identity)(Monoid[Int]) // 15 (identity is defined elsewhere),
30 | // here we're passing Monoid of Int - that knows how to combine Integers and what it meeans to be empty once found itself in boxed structure as a list - when many of its type is there,
31 | // if 0 - it means nobody is here, empty.
32 |
33 | // Monoid[Int] already has predefined methods in Cats,
34 | // which is why we can use it _without_ defining anything manually.
35 |
36 | println(sum2) // 15
37 |
38 |
39 | // --- Life Without Cats:
40 |
41 | val list1 = List(1, 2, 3, 4, 5)
42 | val sum1 = list.foldLeft(0)(_ + _) // could be simpler opt (but only works with numbers - because + is defined for numbers only, its algebra works with numbers)
43 | // see, there we did not pass Monoid - that is why we have to be specific - using + explicitly (no combine is called, just function that plays with operator +)
44 | println(sum1) // 15 - it explict, we can read it, sure + for all of those in list will give the sum
45 |
46 | // while with list.foldMap - it would ask for Monoid
47 |
48 | // going back to foldMap and Monoid:
49 |
50 | // It's not obvious from the name that foldMap (list.foldMap(identity)(Monoid[Int]) calls combine!
51 | // This is one of the downsides of abstract algebra-based APIs:
52 | // they hide the actual implementation details behind abstractions like Monoid.
53 | // Let’s make it clear why foldMap uses combine.
54 | // so, when you see Monoid - you know it is about combine with the empty
55 |
56 | // even foldLeft looks rather strange
57 |
58 | // simple foreach would do too
59 |
60 | var sum3 = 0
61 | List(1, 2, 3, 4, 5).foreach(sum3 += _)
62 | println(sum3) // 15
63 |
64 | // we should ask ourelf this practical questions somethins. why we go abstract algebra.
65 |
66 | // For a simple AWS Lambda function, you don’t need Cats or even foldLeft:
67 |
68 | // Right—thinking in terms of monads,
69 | // functors, and type classes requires extra brain power.
70 |
71 | // If you already know how to solve a problem using simpler,
72 | // explicit methods, why add this abstraction overhead. Think about it carefully.
73 | }
74 |
--------------------------------------------------------------------------------
/src/main/scala/cats/step_1_cats_combine_1.scala:
--------------------------------------------------------------------------------
1 | import cats._
2 | import cats.implicits._
3 |
4 | import cats.Monoid
5 | import cats.implicits._
6 |
7 | @main
8 | def main(): Unit = {
9 |
10 | val result = "Hello " |+| "Cats!" // Here, |+| is Cats' syntax for combining values.
11 | println(result) // "Hello Cats!"
12 |
13 | // why bother?
14 | /**
15 | * In Scala, + works only for types that have built-in support for addition, such as:
16 | *
17 | * Int, Double, etc.
18 | * String (concatenation)
19 | * List (concatenation of elements)
20 | */
21 |
22 | // But what if you want a generic way to combine values without knowing their
23 | // type upfront? This is where |+| and Semigroup (from Cats) become useful.
24 | }
25 |
26 |
--------------------------------------------------------------------------------
/src/main/scala/closure/ClosureTest.scala:
--------------------------------------------------------------------------------
1 | package closure
2 |
3 | /**
4 | * Created by sstarodu on 23/02/14.
5 | */
6 | object ClosureTest {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/src/main/scala/collections/SeqFun.scala:
--------------------------------------------------------------------------------
1 | package collections
2 |
3 | // Interesting to know that Seq[T] is a function Function1[Int, T]
4 |
5 | object SeqFun extends App {
6 |
7 | def first(f: Int => Int) = {
8 | f(0)
9 | }
10 |
11 | def second(f: Int => Int) = {
12 | f(1)
13 | }
14 |
15 | val s = Seq[Int](1,2,3)
16 |
17 | val restul_1 = first(s)
18 | val restul_2 = second(s)
19 |
20 | println(restul_1)
21 | println(restul_2)
22 |
23 | // actually it is partial function
24 |
25 | println( "isDefined: " + s.isDefinedAt(2) ) // true
26 | println( "isDefined: " + s.isDefinedAt(3) ) // false
27 | }
28 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/FlatMapExample.scala:
--------------------------------------------------------------------------------
1 | package collections.lists
2 |
3 | // #flatmap (#flatmap-method)
4 | // related: #map-method #flatten #product-trait (#product)
5 |
6 | object FlatMapExample extends App {
7 |
8 | // trying to be simple.
9 |
10 | val fruits = List("apple", "banana", "orange") // here it is: sequence of strings. Or: sequence of sequences of chars
11 |
12 | // map
13 |
14 | // # 1
15 | {
16 | val bigFruits = fruits map (_.toUpperCase)
17 |
18 | println (bigFruits) // List(APPLE, BANANA, ORANGE)
19 | }
20 |
21 | // #2 flatten
22 | {
23 |
24 | val result = fruits.flatten // flatten - converts each item (String [sequence of chars]) to initial collection -
25 | // to List of Chars
26 |
27 | println ("fruits flatten: " + result) // List(a, p, p, l, e, b, a, n, a, n, a, o, r, a, n, g, e)
28 |
29 | }
30 |
31 | // flatMap = flatten + map
32 |
33 | // #3
34 | {
35 | val bigLetters = fruits flatMap(_.toUpperCase)
36 |
37 | println ("bigLetters:" + bigLetters) // List(A, P, P, L, E, B, A, N, A, N, A, O, R, A, N, G, E)
38 |
39 | // Note: bigLetters is: List[Char] ! Sequence of chars were converted to initial collections to List
40 |
41 | // so, this is sam as:
42 |
43 | val bigLetters2 = fruits.flatten map(_.toUpper) // 'toUpper' here is method of Char (not 'toUpperCase' now)
44 |
45 | println ("bigLetters2: " + bigLetters2)
46 |
47 |
48 | }
49 |
50 | // so flatMap is flatten + map
51 |
52 |
53 | // #4 closer to reality. some often uses case..
54 |
55 | // 2dArray - for vs flatMap
56 |
57 | {
58 |
59 | val matrix = Array.ofDim[Int](2,2)
60 | matrix(0)(0) = 1; matrix(0)(1) = 2
61 | matrix(1)(0) = 3; matrix(1)(1) = 4 // Array[Array[Int]] = Array(Array(1, 2), Array(3, 4))
62 |
63 | // for:
64 | {
65 | //matrix: Array[Array[Int]] = Array(Array(1, 2), Array(3, 4))
66 |
67 | val elements = for ( // elements: Array[Int] = Array(1, 2, 3, 4)
68 | row <- matrix; // row is Array
69 | elem <- row
70 | ) yield elem
71 |
72 | println ("elements1: ")
73 | elements foreach ( print (_) ) // 1234
74 |
75 | }
76 |
77 | // flatMap:
78 | {
79 | val elements:Array[Int] = matrix flatMap( row => for (elements <-row) yield elements )
80 |
81 | println ("\nelements2: ")
82 | elements foreach ( print (_) ) // 1234
83 | }
84 |
85 | // so, you decide what is better for you - to use 'for' or 'flatMap'. But at least, now you know what flatMap is
86 |
87 | }
88 |
89 | // #5 and of course example with Option(s)
90 |
91 | val results = List(None, Some(1), Some(2), None) // List[Option[Int]]
92 |
93 | val flatResult1 = results flatMap(x=>x) // List[Int] = List(1, 2) .
94 | // So it treats Option as possible Collection of: Some or None
95 |
96 | // Related to Product trait:
97 | // http://stackoverflow.com/questions/1301907/how-should-i-think-about-scalas-product-classes
98 | // http://en.wiktionary.org/wiki/Cartesian_product
99 |
100 | // same as (int this case)
101 |
102 | val flatResult2 = results.flatten // List[Int] = List(1, 2)
103 |
104 |
105 | }
106 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/FoldExample.scala:
--------------------------------------------------------------------------------
1 | package collections.lists
2 |
3 | // #fold #list
4 | // related: #reduce (see: ReduceExample) #template-method-pattern #strategy-pattern #decorator-pattern
5 |
6 | // In nutshell: Fold is sophisticated version of 'reduce'
7 |
8 | object FoldExample extends App {
9 |
10 | println("#1")
11 |
12 | // # 1 - simple
13 | {
14 |
15 | val result1 = List(1,2,3).foldLeft(0)(_ + _)
16 |
17 | // empty list will work, because initial value is set to 0 ('reduceLeft' would not work [#reduce related] )
18 |
19 | val result2 = List[Int]().foldLeft(0)(_ + _)
20 |
21 | println (result1) // 6
22 | println (result2) // 0
23 |
24 | }
25 |
26 | // traversing the list by foldRight
27 | {
28 | val list = List(1,2,3)
29 | list.foldRight(List[Int]()) { (right, result) =>
30 | right :: result
31 | }
32 | println ("traversing by foldRight: " + list)
33 | }
34 |
35 | // traversing the list by foldLeft
36 | {
37 | val list = List(1,2,3)
38 | list.foldLeft(List[Int]()) { (result, left) =>
39 | left :: result
40 | }
41 | println ("traversing by foldLeft: " + list)
42 | }
43 |
44 | // #2 similar to: #template-method-pattern #strategy-pattern #decorator-pattern
45 | println("#2")
46 |
47 | // there are two operations ..
48 | def upperCaseOP (str:String) : String = {
49 | println(s"upperCaseOp($str)")
50 | str.toUpperCase
51 | }
52 |
53 | def addBarOP (str:String) : String = {
54 | println(s"addBarOP($str)")
55 | str + "bar"
56 | }
57 |
58 | def applyTransformations(initial: String, ops: Seq[String => String]) : String =
59 | ops.foldLeft(initial) {
60 | (currentResult, op) => op(currentResult) // applying an operation for each pair.
61 | // as you can see the pair has different types:
62 | // 1. currentResult: String
63 | // 2. op: String => String
64 | }
65 |
66 | // sequence of operations to be applied is important fo us. Like it is in #decorator-pattern
67 |
68 | val result = applyTransformations("hello", Seq( upperCaseOP, addBarOP))
69 |
70 | println ("result: " + result) // HELLObar
71 |
72 | }
73 |
74 | /*
75 | Full output is:
76 |
77 | #1
78 | 6
79 | 0
80 | #2
81 | upperCaseOp(hello)
82 | addBarOP(HELLO)
83 | result: HELLObar
84 |
85 | */
86 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/GroupByExample.scala:
--------------------------------------------------------------------------------
1 | package lists
2 |
3 | // #list #group-by
4 |
5 | object GroupByExample extends App {
6 |
7 | type Word = String
8 |
9 | type Occurrences = List[(Char, Int)]
10 |
11 | def wordOccurrences(word:Word):Occurrences = {
12 |
13 |
14 | val groupMap = word groupBy {
15 | case ch => ch
16 | } map { // Map(e -> e, ! -> !, l -> ll, h -> h, o -> o)
17 | case (k, chars) => (k, chars.length) // Map(e -> 1, ! -> 1, l -> 2, h -> 1, o -> 1)
18 | }
19 |
20 | groupMap.toList
21 | }
22 |
23 | val result = wordOccurrences("hello!") // List((e,1), (!,1), (l,2), (h,1), (o,1))
24 |
25 | println("occurrences: " + result)
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/ListComprehension.scala:
--------------------------------------------------------------------------------
1 | // #list-comprehension #haskell #scala #for
2 |
3 | package collections.lists
4 |
5 | // List comprehension - Haskell comparing to Scala
6 | object ListComprehension extends App{
7 |
8 | // in haskell:
9 | // variants = [ (x,y) | x <- [1,2,3], y <- ['a', 'b'] ]
10 |
11 | val variants = for (x <- List(1,2,3); y <- List("a", "b", "c")) yield (x,y)
12 |
13 | println(variants)
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/ListConcatenations.scala:
--------------------------------------------------------------------------------
1 | package lists
2 |
3 | // #list #concatenation
4 |
5 | object ListConcatenations extends App {
6 |
7 | val list1 = List(1,2,3)
8 | val list2 = List(4,5,6)
9 |
10 | val result1 = list1 ::: list2 // add to the beginning of list2. So ':::' is method of list2.
11 | val result11 = list2.:::(list1) // same as above
12 |
13 | val result2 = list1 ++ list2 // add to the end of list1. '++' is method of list1
14 |
15 | println ("list1 ::: list2 : " + result1)
16 | println ("list2.:::(list1) : " + result11)
17 |
18 | println ("list1 ++ list2 : " + result2)
19 |
20 | // what about "::" ?
21 |
22 | val result3 = list1 :: list2 // It add new ELEMENT to the BEGINNING of the list2. So, :: is method of list2
23 |
24 | println ("list1 :: list2 : " + result3) // List(List(1, 2, 3), 4, 5, 6)
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/ReduceExamples.scala:
--------------------------------------------------------------------------------
1 | package collections.lists
2 |
3 | // #list #reduce
4 |
5 | // Reduce - Simplified version of foldLeft (because we do not need pass seed-value, like we do in fold(seed)(fn)).
6 |
7 | // First argument is where result is passed
8 | // Second argument is always the current item in the collection.
9 |
10 | object ReduceExamples extends App {
11 |
12 | val theList = List(1,2,3,4,5)
13 |
14 | // 1.
15 | val sum1 = theList reduceLeft{ (total, current) => {
16 | println("total: " + total + " current: " + current)
17 | total + current}
18 | }
19 |
20 | // 2.
21 | val sum2 = theList reduceLeft(_ + _) // short version
22 |
23 | // 3.
24 |
25 | def sumOp(total: Int, current:Int) = total + current
26 |
27 | val sum3 = theList reduceLeft( sumOp )
28 |
29 |
30 | println ("sum1: " + sum1) // 15
31 |
32 | println ("sum2: " + sum2) // 15
33 |
34 | println ("sum3:" + sum3) // 15
35 |
36 |
37 | // 4. empty list - will fail
38 |
39 | val emptyList = List[Int]()
40 |
41 | //val sum4 = emptyList reduceLeft( sumOp ) // UnsupportedOperationException: empty.reduceLeft
42 |
43 | // putting initial value
44 |
45 | val sum4 = (0 :: emptyList).reduceLeft( sumOp ) // 0 -- same as #fod : foldLeft(0)(sumOp)
46 |
47 | println (sum4)
48 |
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/Sorting.scala:
--------------------------------------------------------------------------------
1 | package collections.lists
2 |
3 | // #sort Sorting by name and by position
4 |
5 | object Sorting extends App {
6 |
7 | val sortByNameAndPosition = new Ordering[Person] {
8 | def compare(p1: Person, p2: Person) = {
9 | val comparedByName =p1.name.compare(p2.name)
10 | if (comparedByName == 0) {
11 | p1.position.compare(p2.position)
12 | } else {
13 | comparedByName
14 | }
15 | }
16 | }
17 |
18 | case class Person(name:String, position:String) extends Ordered [Person] {
19 | override def compare(that: Person): Int = {
20 | sortByNameAndPosition.compare(this, that)
21 | }
22 | }
23 |
24 |
25 | val persons = Seq( Person("bob", "b"), Person("bob", "a"), Person("anh", "a") )
26 |
27 | // 1.
28 | val result1 = persons.sorted // / List(Person(anh,a), Person(bob,a), Person(bob,b))
29 |
30 | // 2.
31 | val result2 = persons.sorted(new Ordering[Person] {
32 | override def compare(x: Person, y: Person): Int = {
33 | x.name.compare(y.name) // only by name
34 | }
35 | })
36 |
37 | // 3.
38 | val result3 = persons.sortWith(_.name < _.name) // same as 2, but seems less verbose and less 'centric'
39 |
40 | val result4 = persons.sortBy(_.name) // same as 2 and 3, but maybe less specif/clear about order direction
41 |
42 | val result5 = persons.sortBy(_.name) // same as 2 and 3, but maybe less specif/clear about order direction
43 |
44 | val result6 = persons.sortBy( p => (p.name, p.position) ) // same as 1 but much less verbose and less 'centric' but less specific about sorting direction
45 |
46 | val result7 = persons.sorted(sortByNameAndPosition) // same as 1 but more specific (due the passed named function)
47 |
48 | val result8 = persons.groupBy(_.name).toList.
49 | sortWith(_._1 < _._1/*name*/).
50 | flatMap(_._2/*position*/.sortWith(_.position < _.position)) // same as 1. a little bit verbose, but step-by-step-clear and customizable
51 |
52 | println("1." + result1) // List(Person(anh,a), Person(bob,a), Person(bob,b))
53 | println("2." + result2) // List(Person(anh,a), Person(bob,b), Person(bob,a))
54 | println("3." + result3) // List(Person(anh,a), Person(bob,b), Person(bob,a))
55 | println("4." + result4) // List(Person(anh,a), Person(bob,b), Person(bob,a))
56 | println("5." + result5) // List(Person(anh,a), Person(bob,b), Person(bob,a))
57 | println("6." + result6) // List(Person(anh,a), Person(bob,b), Person(bob,a)) // same as 1.
58 | println("7." + result7) // List(Person(anh,a), Person(bob,b), Person(bob,a)) // same as 1.
59 | println("8." + result8) // List(Person(anh,a), Person(bob,a), Person(bob,b)) // same as 1
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/Sorting2.scala:
--------------------------------------------------------------------------------
1 | package collections.lists
2 |
3 |
4 | case class Model(price: BigDecimal, source: String, isQoute: Boolean, quoteType: String )
5 |
6 | object Sorting2 extends App {
7 |
8 | val all = List(
9 | Model(price = 1, source = "a-source1", isQoute = true, quoteType = "subject"),
10 | Model(price = 1, source = "b-source1", isQoute = true, quoteType = "fix"),
11 | Model(price = 1, source = "b-source1", isQoute = false, quoteType = "fix"),
12 | Model(price = 1, source = "c-source1", isQoute = false, quoteType = "."),
13 | Model(price = 2, source = "d-source1", isQoute = true, quoteType = "subject"),
14 | Model(price = 5, source = "e-source1", isQoute = false, quoteType = "subject")
15 | )
16 |
17 | val weightsQuoteTypeMap: Map[String, Int] = Map[String, Int](
18 | "subject" -> 0,
19 | "fix" -> 2
20 | )
21 |
22 | val groupedAndSortedByPrice: List[List[Model]] = all.groupBy(_.price).toList.sortWith(_._1 > _._1).map(x => x._2)
23 |
24 | val result = groupedAndSortedByPrice.flatMap(list =>
25 | list.sortWith((m1, m2) => m1.isQoute > m2.isQoute)
26 | .sortWith((m1, m2) => m1.source < m2.source)
27 | .sortWith { (m1, m2) => weightsQuoteTypeMap.getOrElse(m1.quoteType, 1) > weightsQuoteTypeMap.getOrElse(m2.quoteType, 1) })
28 |
29 |
30 | result foreach(println)
31 |
32 | }
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/ZipSamples.scala:
--------------------------------------------------------------------------------
1 | package collections.lists
2 |
3 | // #list #zip #index
4 |
5 | object ZipSamples extends App {
6 |
7 | val list = List("a", "b", "c")
8 |
9 | // #1
10 | val zipped1 = list zip List(0,1,2) // List[(String, Int)] = List((a,0), (b,1), (c,2))
11 | val zipped2 = list zip List(0,1,2,3,4) // List[(String, Int)] = List((a,0), (b,1), (c,2)) -- same as before !
12 |
13 | println("zipped1: " + zipped1)
14 | println("zipped2: " + zipped2)
15 |
16 | // #2 with Index
17 | val zippedIWithIndex = list.zipWithIndex // List((a,0), (b,1), (c,2))
18 |
19 | // same as: list zip List(0,1,2,3,4)
20 |
21 | println ("zippedIWithIndex:" + zippedIWithIndex)
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/aggregate.scala:
--------------------------------------------------------------------------------
1 | package collections.lists
2 |
3 | import scala.collection.parallel.CollectionConverters._
4 |
5 | object aggregate extends App {
6 |
7 | val ss = Seq(1,2,3,4) // 0 + 1 + 2 + 3 + 4 = 10
8 |
9 | // aggregate takes 3 parameters:
10 | // 1. a seed value,
11 | // 2. a computation function
12 | // 3. a combination function
13 |
14 | val compFn = (x:Int, y:Int) => {
15 | println(s"compFn: x+y=$x+$y")
16 | x + y
17 | }
18 | val combFn1 = (x:Int, y:Int) => {
19 | println(s"combFn1: x+y=$x+$y")
20 | x + y
21 | }
22 |
23 |
24 | // it splits the collection in a number of threads,
25 | // 1. compute partial results using the computation function
26 | // 2. and then combine all these partial results using the combination function.
27 |
28 | // in comparison to fold
29 |
30 | val result0 = ss.foldLeft( /*seed=*/0 ) ( compFn) // 10
31 | val result1 = ss.aggregate( /*seed=*/0 ) ( compFn, combFn1) // 10 - same !
32 |
33 | println("result0: " + result0) // 10
34 | println("result1: " + result1) // 10
35 |
36 | // 8 points:
37 | // compFn: x+y=0+1
38 | // compFn: x+y=1+2
39 | // compFn: x+y=3+3
40 | // compFn: x+y=6+4
41 | // compFn: x+y=0+1
42 | // compFn: x+y=1+2
43 | // compFn: x+y=3+3
44 | // compFn: x+y=6+4
45 |
46 | // The the combFn is not being used inside the aggregate() ? And compFn is applied twice for the whole list?
47 | //
48 | // Q. Why it is required to pass then?
49 | // A. The default implementation does Not create threads..
50 |
51 | // Let's go with parallelized implementation of it
52 |
53 | // The par-version, does use the second combine function
54 |
55 | println("par:")
56 |
57 | val resultPar1 = ss.par.aggregate( /*seed=*/0 ) ( compFn, combFn1)
58 |
59 | println("resultPar1: " + resultPar1) // 10 - same result, but in par
60 |
61 | // One of possible outputs (7 points):
62 | // compFn: x+y=0+1
63 | // compFn: x+y=0+2
64 | // combFn1: x+y=1+2
65 | // compFn: x+y=0+3
66 | // compFn: x+y=0+4
67 | // combFn1: x+y=3+4
68 | // combFn1: x+y=3+7
69 |
70 | // --
71 |
72 | val combFn2 = (x:Int, y:Int) => {
73 | println(s"combFn2: x,y=$x,$y")
74 | x * 2
75 | }
76 |
77 | println("par:")
78 |
79 | val resultPar2 = ss.par.aggregate( /*seed=*/0 ) ( compFn, combFn2)
80 |
81 | // One of possible outputs (7 points):
82 | // compFn: x+y=0+2
83 | // compFn: x+y=0+3
84 | // compFn: x+y=0+1
85 | // compFn: x+y=0+4
86 | // combFn2: x,y=1,2
87 | // combFn2: x,y=3,4
88 | // combFn2: x,y=2,6
89 |
90 | println("resultPar2: " + resultPar2) // 4
91 |
92 | }
--------------------------------------------------------------------------------
/src/main/scala/collections/lists/todo.txt:
--------------------------------------------------------------------------------
1 | I'm going to add even more examples for each method by creating one file for each method.
2 |
3 | Each example is supposed to be more ore less complicated than examples presented in ListMethods file.
--------------------------------------------------------------------------------
/src/main/scala/collections/mutable/ListBuffer_CRUD_Sample.scala:
--------------------------------------------------------------------------------
1 | package collections.mutable
2 |
3 | import scala.collection.mutable.ListBuffer
4 |
5 | // typical example where mutable List like ListBuffer might be useful
6 |
7 | // #ListBuffer
8 | // related: #zipWithIndex #mutable
9 |
10 | object ListBuffer_CRUD_Sample extends App {
11 |
12 | case class User(id:Long, name:String, email:String)
13 |
14 | object Users {
15 |
16 | val users = ListBuffer[User]() // mutable list
17 |
18 | def exists(id:Long) = users.exists (_.id == id)
19 |
20 | def add(user:User) = {
21 | users += user
22 | }
23 |
24 | def update(user:User) = {
25 |
26 | val zipped = users.zipWithIndex // #zipWithIndex example usage
27 |
28 | val optionResult = zipped find(u => u._1.id == user.id ) // then find user by ID
29 |
30 | if (optionResult.isDefined) {
31 |
32 | val ( _:User, index:Int) = optionResult.get
33 |
34 | users.update(index, user) // update expects Index
35 |
36 | }
37 |
38 | }
39 |
40 | def delete(user:User) = { // delete by object ref / instance
41 |
42 | val zipped = users.zipWithIndex // #zipWithIndex example usage
43 |
44 | val optionResult = zipped find(_._1 == user) // then find user by ID
45 |
46 | if (optionResult.isDefined) {
47 |
48 | val (_:User, index) = optionResult.get
49 |
50 | users.remove(index) // remove() expects Index
51 |
52 | }
53 |
54 | }
55 |
56 | }
57 |
58 | // test
59 |
60 | // add:
61 | Users.add( User(1:Long, "bob1", "bob1@bobs.com") )
62 | Users.add( User(2:Long, "bob1", "bob2@bobs.com") )
63 |
64 | // exists:
65 | println ( "user with id = 2 exists?: " + Users.exists(2:Long) )
66 |
67 | println ("Users.users.size: " + Users.users.size)
68 | println ("All: " + Users.users)
69 |
70 | // update:
71 |
72 | Users.update( User(1, "bob1", "new-email-bob@bobs.com") ) // update User that has id = 1
73 |
74 | println ("All after update: " + Users.users )
75 |
76 | // remove:
77 |
78 | Users.delete( User(1, "bob1", "new-email-bob@bobs.com") ) // bob1 will be deleted
79 |
80 | println ("All after delete: " + Users.users )
81 |
82 |
83 | }
84 |
--------------------------------------------------------------------------------
/src/main/scala/collections/streams/StreamCreations.scala:
--------------------------------------------------------------------------------
1 | package collections.streams
2 |
3 | // please at "StreamSample" first.
4 |
5 | object StreamCreations extends App {
6 | // A very common way to construct a Stream is to define a recursive method.
7 | // Each recursive call constructs a new element in the stream.
8 |
9 | // #1
10 | {
11 | def makeStream : Stream[Int] = Stream.cons(util.Random.nextInt(10), makeStream) // never ending loop
12 |
13 | val infinate = makeStream
14 |
15 | println (infinate) // Stream(4, ?) 1,2,3,4,5
37 |
38 | }
39 |
40 | println ("concat: " + concat(list1,list2))
41 |
42 | // --- reverse --- (recursive)
43 | {
44 | val theList = List(1,2,3,4,5)
45 |
46 | // to come up with that algorithm as a pattern (that you will repeat recursively),
47 | // try to think on simplest case, as if you have list of two elements [1,2]
48 | // what should you dot then?
49 | // you should put last element to be first one, so basically:
50 |
51 | // pattern is: tail + header
52 |
53 | // where tail is decreasing recursively
54 |
55 | def reverse[T](list:List[T]): List[T] = list match {
56 |
57 | case List() => list
58 | case head :: tail => reverse(tail) ::: List(head) // should "++" be deprecated then ???
59 | // reverse (2,3,4,5) ::: = 5,4,3,2 + 1
60 | // reverse (3,4,5) ::: 2 = 5,4,3 + 2
61 | // reverse (4,5) ::: 3 = 5.4 + 3
62 | // reverse (5) ::: 4 = 5 + 4
63 | // reverse (empty) = 5
64 |
65 | }
66 |
67 | println ("reverse of [1,2,3,4,5]: " + reverse(theList) )
68 |
69 | } // complexity is: n*n. Not nice !
70 |
71 |
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/scala/futures/FutureTest.scala:
--------------------------------------------------------------------------------
1 | package futures
2 |
3 | import scala.concurrent._
4 | import ExecutionContext.Implicits.global
5 | import scala.collection.mutable
6 |
7 | /*
8 | #future
9 | related:
10 | */
11 | object FutureTest extends App {
12 | def getData(): Iterable[Int] = {
13 | val data: mutable.ArrayDeque[Int] = mutable.ArrayDeque[Int]()
14 |
15 | for (i <- 1 to 10) {
16 | Thread.sleep(100) // emulating some delay in getting the data
17 | data += i
18 | }
19 |
20 | data
21 | }
22 |
23 | val f = Future {
24 | getData()
25 | }
26 |
27 | // there are two 'onComplete' - they are subscribes for future success, both of them will be eventually invoked
28 |
29 | // #1
30 | f.onComplete {
31 | case scala.util.Success(data) => for (item <- data) println(item)
32 | case scala.util.Failure(ex) => println(s"An error occurred: ${ex.getMessage}")
33 | }
34 |
35 | // #2
36 | f.onComplete {
37 | case scala.util.Success(data) => println("the amount of items received from data: " + data.size)
38 | case scala.util.Failure(ex) => println(s"An error occurred: ${ex.getMessage}")
39 | }
40 |
41 | println("hello!") // this will be printed first, then #2, and then #1
42 |
43 | // wait until done
44 | while (!f.isCompleted) {
45 | Thread.sleep(1000)
46 | }
47 | }
--------------------------------------------------------------------------------
/src/main/scala/generics/CovariantBasket.scala:
--------------------------------------------------------------------------------
1 | package generics
2 |
3 | // #lower-type-bounds
4 |
5 | class CovariantBasket[+T] { // support Covariance - f.e.: consider Basket[Fruit] a superType of Basket[Apple]
6 |
7 | //def add(x: T): T = {x} // will not compile when +T / covariance is on
8 |
9 | // add2' x:S is slightly less restrictive type:
10 | // While upper type bounds limit a type to a subtype of another type,
11 | // lower type bounds declare a type to be a supertype of another type
12 |
13 | def add2[S >: T](x: S): S = { // lower bounds S to super class T
14 | x
15 | }
16 |
17 | }
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/src/main/scala/generics/CovariantBasketTest.scala:
--------------------------------------------------------------------------------
1 | package generics
2 |
3 | // #lower-type-bounds
4 |
5 | import generics.model.{Orange, Fruit, Apple}
6 |
7 | // class CovariantBasket[+T]
8 |
9 | object CovariantBasketTest extends App {
10 |
11 | val q1 = new CovariantBasket[Apple]
12 |
13 | // def add2[S >: T](x: S): S = { // lower bound S to super class T
14 |
15 | val a: Fruit = q1.add2(new Orange) // Orange is lower bounded to Fruit
16 | val a2: Fruit = q1.add2(new Apple) // - so we can put Orange to Apple Basket
17 |
18 | // --
19 |
20 | val q2 = new CovariantBasket[Fruit]
21 |
22 | val b1: Fruit = q2.add2(new Orange) // Orange is lower bounded to Fruit
23 | val b2: Fruit = q2.add2(new Apple)
24 |
25 | // --
26 |
27 | val basketOfFruit: CovariantBasket[Fruit] = new CovariantBasket[Apple]() // will not work without +T defined
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/scala/generics/InvariantBasket.scala:
--------------------------------------------------------------------------------
1 | package generics
2 |
3 | class InvariantBasket[T] {
4 |
5 | def add2(x: T): T = { x }
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/src/main/scala/generics/InvariantBasketTest.scala:
--------------------------------------------------------------------------------
1 | package generics
2 |
3 | import generics.model.{Apple, Fruit}
4 |
5 | object InvariantBasketTest extends App {
6 |
7 | val b1:InvariantBasket[Fruit] = new InvariantBasket[Fruit]
8 |
9 | // InvariantBasket is invariant, so it will not compile
10 | // - it will not treat InvariantBasket[Fruit] as super type of InvariantBasket[Apple]
11 |
12 | //val b2:InvariantBasket[Fruit] = new InvariantBasket[Apple]
13 |
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/src/main/scala/generics/LowerTypeBoundsTest.scala:
--------------------------------------------------------------------------------
1 | package generics
2 |
3 | import generics.model.{Apple, Fruit}
4 |
5 | object LowerTypeBoundsTest extends App {
6 |
7 | class Basket[T] {
8 |
9 | def add(x: T) = x
10 | def add2[S >: T](x: S) = x // lower bounds T to S
11 |
12 | def addList(xs: List[T]) = xs
13 | def addList2[S >: T](xs: List[S]) = xs
14 |
15 | }
16 |
17 | val fruitBasket = new Basket[Apple]()
18 |
19 | // add
20 |
21 | val result1: Apple = fruitBasket.add( new Apple() )
22 | val result11: Fruit = fruitBasket.add( new Apple() ) // that will work too (still)
23 |
24 | // Apple Lower bounded to Fruit
25 | val result2: Fruit = fruitBasket.add2( new Apple() )
26 |
27 | // addList
28 |
29 | val fruitList = List[Fruit](new Apple(), new Apple())
30 |
31 | // will not work, because "addList" expects List of Apple
32 | // val apples1: List[Apple] = fruitBasket.addList( fruitList )
33 | // val apples2: List[Fruit] = fruitBasket.addList( fruitList )
34 |
35 | // will work, because addList2 will bound all apples into fruits (to super class of Apple) !
36 |
37 | val fruits = fruitBasket.addList2( fruitList ) // returns already bounded to "List[Fruit]"
38 | }
39 |
--------------------------------------------------------------------------------
/src/main/scala/generics/PlusMinusFruitBox.scala:
--------------------------------------------------------------------------------
1 | package generics
2 |
3 | // #covariance, #contravariance
4 |
5 | class BoxPlus[+A] // covariance
6 |
7 | class BoxMinus[-A] // contravariant
8 |
9 | trait Thing
10 | trait Fruit extends Thing
11 | class Orange extends Fruit
12 | class Apple extends Fruit
13 | class BigApple extends Apple
14 |
15 | object PlusMinusFruitBox extends App {
16 |
17 | def fooFruitPlus(x: BoxPlus[Fruit]) : BoxPlus[Fruit] = identity(x)
18 | def fooFruitMinus(x: BoxMinus[Fruit]) : BoxMinus[Fruit] = identity(x)
19 |
20 | fooFruitPlus( new BoxPlus[Apple]) // Apple >= Fruit // works only with "+"
21 | fooFruitPlus( new BoxPlus[Fruit]) // Fruit >= Fruit
22 | // fooFruitPlus( new BoxPlus[Thing]) // Thing is not >= Fruit
23 | fooFruitPlus( new BoxPlus[BigApple]) // BigApple >= Fruit // works only with "+"
24 |
25 | fooFruitMinus( new BoxMinus[Fruit] ) // Fruit <= Fruit (less or equal)
26 | fooFruitMinus( new BoxMinus[Thing] ) // Thing <= Fruit // works only with "-"
27 | // fooFruitMinus( new BoxMinus[Apple]) // Apple is not <= Fruit
28 |
29 | }
30 |
31 |
--------------------------------------------------------------------------------
/src/main/scala/generics/model/Apple.scala:
--------------------------------------------------------------------------------
1 | package generics.model
2 |
3 | import generics.model.Fruit
4 |
5 | /**
6 | * Created by sergey on 03/03/16.
7 | */
8 | class Apple extends Fruit
9 |
--------------------------------------------------------------------------------
/src/main/scala/generics/model/Fruit.scala:
--------------------------------------------------------------------------------
1 | package generics.model
2 |
3 | /**
4 | * Created by sergey on 03/03/16.
5 | */
6 | trait Fruit
7 |
--------------------------------------------------------------------------------
/src/main/scala/generics/model/Orange.scala:
--------------------------------------------------------------------------------
1 | package generics.model
2 |
3 | /**
4 | * Created by sergey on 03/03/16.
5 | */
6 | class Orange extends Fruit
7 |
--------------------------------------------------------------------------------
/src/main/scala/higher_order_functions/HigherOrderFunctionTest.scala:
--------------------------------------------------------------------------------
1 | package higher_order_functions
2 |
3 | // Higher Order Function example.
4 |
5 | object HigherOrderFunctionTest extends App {
6 |
7 | // #1
8 | // taking a function without trying to pass any param to it
9 | def takeFunction(f: (Int=>Int) ) = {
10 | println(f) // print: , - not evaluated yet
11 | println( f(1) ) // print: 1 - evaluation take place here !
12 | }
13 |
14 | // #2
15 | // taking a function with one single param 'p'
16 | def takeFunction2(f: (Int=>Int), p:Int ) = {
17 | println(f(p)) // evaluation takes place here !
18 | }
19 |
20 | // #3 composition of two (Int=>Int)
21 | def takeTwoFunctions1(f1: (Int=>Int), f2:(Int=>Int)) = {
22 | // empty
23 | }
24 |
25 | def takeTwoFunctions2(f1: (Int=>Int), f2:(Int=>Int), p1:Int, p2:Int) = {
26 | f1(p1)
27 | f2(p2)
28 | }
29 |
30 |
31 | def f1(p1:Int) = {println ("f1 is invoking"); p1}
32 | def f2(p1:Int) = {println ("f2 is invoking"); p1}
33 |
34 | // takeFunction( f(x) ) // we can not do it. Worth to remember the syntax !
35 |
36 | // 1--
37 |
38 | println ("#1 try:")
39 |
40 | takeFunction( f1 ) // passing a reference to function without passing any parameter to it.
41 | // So then a function "f1" is not evaluating in the moment of passing
42 | // "f1 is invoking" only once
43 |
44 | // 2--
45 |
46 | println ("\n#2 try:")
47 |
48 | takeFunction2( f1, 1) // this is how "Higher order function" is able to pass an parameter to a function it takes as an argument
49 | // and by passing it - an evaluating is taking place
50 |
51 | // 3-- composition of two (Int=>Int)
52 |
53 | println ("\n#3 try:")
54 |
55 | takeTwoFunctions1(f1, f2) // nothing will be printed, because f1, f2 are just references, so functions are not evaluating
56 | // evaluation takes place on the moment of passing param to the function
57 |
58 | takeTwoFunctions2(f1, f2, p1=1, p2=2) // actually nothing special - that works like expected
59 |
60 | /* Output:
61 | #1 try
62 |
63 | f1 is invoking
64 | 1
65 |
66 | #2 try:
67 | f1 is invoking
68 | 1
69 |
70 | #3 try:
71 | f1 is invoking
72 | f2 is invoking
73 | */
74 |
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/scala/implicits/ImplicitArgs.scala:
--------------------------------------------------------------------------------
1 | // #implicitly #implicit
2 |
3 | package implicits
4 |
5 | object ImplicitArgs extends App {
6 |
7 | // version without implicit
8 | def multiply2(f: Int => Int) = f(2) // apply 2
9 |
10 | // version with implicit
11 | def multiply2_v2(f: Int => Int) = f(implicitly[Int])
12 | //implicitly = "inject Int value from the implicit scope"
13 |
14 | val result1 = multiply2( x => x * 2) // 2 is 'hardcoded' value here
15 |
16 | implicit val integer: Int = 3 // registering integer value in the implicit scope
17 | // implicit val integer2 = 4 // error: ambiguous implicit values
18 |
19 | val result2 = multiply2_v2(x => x * 2)
20 |
21 | println (result1) // output: 2 * 2 = 4
22 | println (result2) // output: 2 * 3 = 6
23 |
24 | // so, the advantage of using 'implicitly'
25 | // - we are expecting that somewhere in the implicit scope integer value is defined
26 | // so it works like a configuration by Type
27 |
28 | // --
29 |
30 | def function3(f: Int => Int) (implicit x:Int) = {
31 | f(x) // x=3
32 | }
33 |
34 | var result3_1 = function3(x => x * 2) // we may use second parameter it is injected by default
35 | var result3_2 = function3(x => x * 2)(4)
36 |
37 | println( result3_1 ) // 3 * 2 = 6
38 | println( result3_2 ) // 3 * 4 = 8
39 |
40 | // and you can not do this
41 | def function4(implicit i:Int) = i // this works as default variable (ALMOST)
42 | // same as this
43 | def function4_1(i:Int = implicitly[Int]) = i // implicit scope has Int = 3
44 |
45 | val result4 = function4 // should be without ()
46 | val result4_1 = function4_1() // requires ()
47 |
48 | println("result4: " +result4) // 3
49 | println("result4_1: " +result4_1) // 3
50 |
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/src/main/scala/implicits/ImplicitClassTest.scala:
--------------------------------------------------------------------------------
1 | package implicits
2 |
3 | /*
4 | #implicit
5 | related: #implicit-class #implicit-parameters
6 | */
7 |
8 | /*
9 | * Add new method to exiting Int class, That works in particular scope.
10 | */
11 |
12 | object ImplicitsScope {
13 |
14 | // defines implicits method for Int class
15 | implicit class ExtendedInt(x: Int) { // the method of class is not really matter, it is jus a scope
16 |
17 | def times[A]( f: => A ): Unit = { // new method 'times()' will be added to Int class
18 | for(i <- 1 to x) {
19 | f
20 | }
21 | }
22 |
23 | }
24 |
25 | }
26 |
27 |
28 | object Starter extends App {
29 |
30 | foo1
31 |
32 | def foo1 = {
33 | import implicits.ImplicitsScope._
34 | 5 times println("hello") // you see ! there is a new method times() added in Int class (for this context only!)
35 | }
36 |
37 | // this will not work, because scope where method "times()" was defined is not imported
38 | /*
39 | def foo2 = {
40 | 5 times println("this should not work")
41 | }*/
42 |
43 | }
44 |
--------------------------------------------------------------------------------
/src/main/scala/implicits/ImplicitObjectTest.scala:
--------------------------------------------------------------------------------
1 | package implicits
2 |
3 | /*
4 | #implicit
5 | related: #implicit-objects
6 | */
7 |
8 | trait Worker[T] {
9 | def doIt(it:T): Unit
10 | }
11 |
12 | object ImplicitObjectTest extends App {
13 |
14 | implicit object StringWorker extends Worker[String] {
15 |
16 | def doIt(it: String) = println(it.charAt(0))
17 |
18 | }
19 |
20 | implicit object IntWorker extends Worker[Int] {
21 |
22 | def doIt(it: Int) = println(it.toString.charAt(0))
23 |
24 | }
25 |
26 | object Boss {
27 | def passWorkToDo[T](work: T)(implicit worker: Worker[T]): Unit = {
28 | worker.doIt(work)
29 | }
30 |
31 | }
32 |
33 | // MAIN - a boss are passing some work to do, without specifying a worker
34 |
35 | Boss.passWorkToDo(work = "take the first letter from this string") // prints "t"
36 |
37 | Boss.passWorkToDo(work = 123456789) // print "1", as we can see here we don't know which worker will do the work
38 |
39 | //Boss.makeWorkedDo(work = new Object) // if there is no worker, will fail on compilation
40 |
41 | // So, this quite close to OO' polymorphism, but this a bit smarter because:
42 | // - we use the same method "passWorkToDo", no need to override anything
43 | // - we may omit params, tha make function call even shorter
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/src/main/scala/implicits/ImplicitlyLookupSample.scala:
--------------------------------------------------------------------------------
1 | package implicits
2 |
3 | // https://issues.scala-lang.org/browse/SI-8849
4 |
5 | /**
6 | * " Nasty gotcha.
7 | * Here's the general implicit search behaviour in question: "
8 | */
9 |
10 | class A
11 | class B extends A
12 | class C extends B
13 |
14 | //class C extends A // NOTE: that would not work because we can NOT
15 | // put two same-level instance of A into implicit scope
16 |
17 | object ImplicitlyLookupSample extends App {
18 |
19 | // adding to implicit scope
20 | // key -> value
21 | implicit val a: A = new A
22 | implicit val b: A = new B
23 | implicit val c: B = new C
24 |
25 | // So C is on the bottom of hierarchy:
26 | // A
27 | // - B
28 | // - C
29 |
30 | {
31 |
32 | // we are asking for an instance of class A
33 |
34 | // so we wonder who is who? The guy who closer to A (B) or guys who is more far (C)?
35 |
36 | val who = implicitly[A]/*(c)*/ // implicitly - get instance of [..] implicitly
37 |
38 | println ("who: " + who.getClass) // class implicits.C
39 |
40 | // The answer is "C" !
41 |
42 | // So, if someone asks for instance of A as a KEY - it would lead to C :
43 | // A
44 | // - B
45 | // - C <- [A]
46 |
47 | // 1. Client:
48 | // Give me a value by type A as key
49 |
50 | // 2. Implicit scope:
51 | // Ok, how many implementations of type A in the key row? "A" itself and "B"
52 | // who of them is less abstract? Who is a child? B! because B extends A
53 | // then B is a key - and it refers to - > C
54 | // here we go you get a C
55 |
56 |
57 | // -- note: about 'implicitly' (syntactic sugar)
58 |
59 | // we use 'implicitly' because, otherwise we should create our own function, like:
60 | def getA(implicit a:A) = a
61 |
62 | val who2 = getA
63 |
64 | println ("who2: " + who2.getClass) // class implicits.C
65 | }
66 |
67 | }
68 |
69 |
--------------------------------------------------------------------------------
/src/main/scala/implicits/SimpleSample.scala:
--------------------------------------------------------------------------------
1 | package implicits
2 |
3 | object SimpleSample extends App {
4 |
5 | implicit val a: Int = 2:Int
6 | //implicit val b = 2:Int // will fail - Only one Int is allowed (name of variable does not matter)
7 |
8 | def fun1(p1:Int = 1)(implicit p2:Int = 1): Int = {
9 | p1 + p2
10 | }
11 |
12 | //def fun2(implicit p1:Int = 1)(implicit p2:Int = 1): Int // will not work - only one implicit is allowed
13 |
14 | println ( fun1() ) // 3
15 | println ( fun1(2) ) // 4
16 | println ( fun1(2)(3) ) // 5
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/scala/implicits/implicit_methods.sc:
--------------------------------------------------------------------------------
1 | // #implicit #implicit-methods
2 |
3 | case class A(val n: Int)
4 |
5 | object A {
6 | // just grouping implicits here
7 | implicit def aToString(a: A) : String = "A: %d" format a.n
8 | }
9 |
10 | val a = A(5)
11 |
12 | val s1: String = a // s == "A: 2"
13 | // same as:
14 | val s2:String = A.aToString(a)
15 |
16 | // "aToString" method is called in attempt to case A to String.
17 | // This method is called implicitly
18 |
19 |
20 |
--------------------------------------------------------------------------------
/src/main/scala/json/jackson/AnySample.scala:
--------------------------------------------------------------------------------
1 | package json.jackson
2 |
3 | // # json jackson any
4 |
5 | import com.fasterxml.jackson.databind.ObjectMapper
6 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
7 |
8 | /*
9 | class Model {
10 | var name: String = _
11 | var anyObject: Any = _ // anything (will be transformed to Map type)
12 | } */
13 |
14 | case class Model(
15 | name: String,
16 | anyObject: Any // anything
17 | )
18 |
19 | object AnySample extends App {
20 |
21 | // 1. create a mapper
22 | val mapper = new ObjectMapper
23 | mapper.registerModule(DefaultScalaModule)
24 |
25 | // 2. giving json string
26 | val json =
27 | """
28 | | {
29 | | "name": "fred",
30 | | "anyObject": {
31 | | "prop": "prop-value"
32 | | }
33 | | }
34 | """.stripMargin
35 |
36 |
37 | // 3.
38 | // convert to object
39 | val model = mapper.readValue(json, classOf[Model])
40 |
41 | print(model.anyObject.getClass.getSimpleName) // Map1
42 | }
43 |
--------------------------------------------------------------------------------
/src/main/scala/json/jackson/ClassToJson.scala:
--------------------------------------------------------------------------------
1 | package json.jackson
2 |
3 | import java.io.StringWriter
4 |
5 | import com.fasterxml.jackson.databind.ObjectMapper
6 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
7 |
8 | /**
9 | * jackson-module-scala
10 | *
11 | * The Scala Module supports serialization and limited deserialization of:
12 | * Scala Case Classes, Sequences, Maps, Tuples, Options, and Enumerations.
13 | *
14 | * + some info: https://github.com/FasterXML/jackson-module-scala/wiki/FAQ
15 | */
16 | object ClassToJson extends App {
17 |
18 | // 1. create a mapper
19 | val mapper = new ObjectMapper
20 | mapper.registerModule(DefaultScalaModule)
21 |
22 | // 2. Given a class, case class
23 |
24 | case class Person(name:String, age:Int)
25 | val person = Person("fred", 25)
26 |
27 | // 3. write it/person out
28 | val out = new StringWriter
29 | mapper.writeValue(out, person)
30 |
31 | // 4. get result as a string
32 | val json = out.toString
33 | println(json) // {"name":"fred","age":25}
34 |
35 |
36 | // from Map
37 | {
38 | val map = Map("a" -> person, "b" -> "not person")
39 | val mapOut = new StringWriter
40 | mapper.writeValue(mapOut, map)
41 | val mapJson = mapOut.toString
42 | println("mapJson: " + mapJson) // {"a":{"name":"fred","age":25},"b":"not person"}
43 |
44 | }
45 |
46 | // from Option
47 | val options = List(Option(1), None)
48 | val optionOut = new StringWriter
49 | mapper.writeValue(optionOut, options)
50 | val optionJson = optionOut.toString
51 |
52 | println("optionJson: " + optionJson) // mapJson: [1,null] well it is JS's null
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/src/main/scala/json/jackson/JsonToClass.scala:
--------------------------------------------------------------------------------
1 | package json.jackson
2 |
3 | import com.fasterxml.jackson.databind.ObjectMapper
4 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
5 |
6 | /**
7 | * jackson-module-scala
8 | *
9 | * The Scala Module supports serialization and limited deserialization of:
10 | * Scala Case Classes, Sequences, Maps, Tuples, Options, and Enumerations.
11 | *
12 | * + some info: https://github.com/FasterXML/jackson-module-scala/wiki/FAQ
13 | */
14 | object JsonToClass extends App {
15 |
16 | // 1. create a mapper
17 | val mapper = new ObjectMapper
18 | mapper.registerModule(DefaultScalaModule)
19 |
20 | // 2. giving json string
21 | val json = """{"name":"fred","age":"25"}"""
22 |
23 | // 3. having class, case class
24 | case class Person(name:String, age:Int)
25 |
26 | // convert to object
27 | val person = mapper.readValue(json, classOf[Person])
28 |
29 | println(person) // Person(fred,25)
30 |
31 | println ("person's age type: " + person.age.getClass.getName ) // int .. hmmm not Int (?)
32 | println ("person's age type package: " + person.age.getClass.getPackage ) // null
33 |
34 | val intAge:Int = 25
35 | assert(person.age == intAge)
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/scala/json/jackson/SubClasses.scala:
--------------------------------------------------------------------------------
1 | package json.jackson
2 |
3 | // shows how to use subclasses when it comes to de/serialization to/from json
4 |
5 | import java.io.StringWriter
6 | import com.fasterxml.jackson.annotation.JsonSubTypes.Type
7 | import com.fasterxml.jackson.annotation.{JsonSubTypes, JsonTypeInfo}
8 | import com.fasterxml.jackson.databind.ObjectMapper
9 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
10 |
11 | // TODO: https://github.com/FasterXML/jackson-module-scala/issues/199
12 | // at some point I had (not sure how I could reproduce it):
13 | // com.fasterxml.jackson.databind.JsonMappingException: Argument #0 of constructor [constructor for A$A0$A$A0$Group, annotations: [null]] has no property name annotation; must have name when multiple-parameter constructor annotated as Creator
14 |
15 | @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "aType")
16 | @JsonSubTypes(Array(
17 | new Type(value = classOf[ModelA], name = "ModelA"),
18 | new Type(value = classOf[ModelB], name = "ModelB")
19 | ))
20 | trait BaseModel{
21 | val commonData: String
22 | }
23 |
24 | case class ModelA(a:String, b:String, c:String, commonData:String) extends BaseModel
25 | case class ModelB(a:String, b:String, c:String, commonData:String) extends BaseModel
26 |
27 | object Subclasses extends App {
28 |
29 | // 0. create a mapper
30 | val mapper = new ObjectMapper
31 | mapper.registerModule(DefaultScalaModule)
32 |
33 | // from json to Model
34 | {
35 |
36 | // 1. giving json string
37 | val jsonA =
38 | """
39 | | {
40 | | "aType" : "ModelA",
41 | | "commonData" : "commonData-value",
42 | | "a" : "a-value",
43 | | "c" : "c-value"
44 | | }""".stripMargin
45 |
46 |
47 | // 2. convert to object
48 | val model = mapper.readValue(jsonA, classOf[ModelA])
49 |
50 | println("""model read from json (determines class immpl by "aType" field): """ + model) //
51 |
52 | }
53 |
54 | // from model to json --------------
55 |
56 | {
57 |
58 | // 1.model
59 | val modelA = ModelA("1", "2", "3", "4")
60 | val modelB = ModelB("1", "2", "3", "5")
61 |
62 | // 2. writers
63 | val stringWriterA = new StringWriter
64 | val stringWriterB = new StringWriter
65 | mapper.writeValue(stringWriterA, modelA)
66 | mapper.writeValue(stringWriterB, modelB)
67 |
68 | // 3. to json
69 | val jsonA = stringWriterA.toString
70 | val jsonB = stringWriterB.toString
71 |
72 | println("backToJsonA: " + jsonA) // {"aType":"ModelA","a":"1","b":"2","c":"3","commonData":"4"}
73 | println("backToJsonB: " + jsonB) // {"aType":"ModelB","a":"1","b":"2","c":"3","commonData":"5"}
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/src/main/scala/json/playjson/JsValueToAnotherValue.scala:
--------------------------------------------------------------------------------
1 | package json.playjson
2 |
3 | import play.api.libs.json._
4 |
5 | object JsValueToAnotherValue extends App {
6 |
7 | val json = Json.parse(
8 | s"""{
9 | "name" : "Watership Down",
10 | "location" : {
11 | "lat" : 51.235685,
12 | "long" : -1.309197
13 | }
14 | }""")
15 |
16 | val minifiedString: String = Json.stringify(json)
17 | val readableString: String = Json.prettyPrint(json)
18 |
19 |
20 | // The simplest way to convert a JsValue to another type is using JsValue.as[T](implicit fjs: Reads[T])
21 | val name = (json \ "name").as[String]
22 |
23 | println(s"name: $name")
24 |
25 | val nameOption = (json \ "name").asOpt[String]
26 | val nameOption1 = (json \ "name1").asOpt[String]
27 |
28 | println(nameOption1.toString + "\n") // None
29 |
30 |
31 |
32 | println("----- Using validation --------")
33 |
34 |
35 | val nameResult: JsResult[String] = (json \ "name").validate[String]
36 | nameResult match {
37 | case s: JsSuccess[String] => println("Result Name: " + s.get)
38 | case e: JsError => println("Result Errors: " + JsError.toJson(e).toString())
39 | }
40 |
41 | val nameUpperResult: JsResult[String] = nameResult.map(_.toUpperCase)
42 |
43 |
44 | println(nameUpperResult) // JsSuccess(WATERSHIP DOWN,)
45 |
46 |
47 | // To convert from JsValue to a model, you must define implicit Reads[T] where T is the type of your model.
48 |
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/src/main/scala/json/playjson/ReadJsonToObject.scala:
--------------------------------------------------------------------------------
1 | package json.playjson
2 |
3 | import play.api.libs.json._
4 |
5 | import json.playjson.models._
6 |
7 | object ReadJsonToObject extends App {
8 |
9 | // case class Location(lat: Double, long: Double)
10 |
11 | // or one line reader: implicit val personReads = Json.reads[Location]
12 | // that would include all fields
13 |
14 | val jsonValue = Json.parse(""" {
15 | "lat" : 51.235685,
16 | "long" : -1.309197
17 | }""")
18 |
19 | jsonValue.validate[Location] match { // 'validate' converts and validate to Location
20 | case s: JsSuccess[Location] => {
21 | val location: Location = s.get // Location(51.235685,-1.309197
22 |
23 | print(location)
24 | // do something with place
25 | }
26 | case e: JsError => {
27 | // error handling flow
28 | }
29 | }
30 |
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/scala/json/playjson/StringToJsValue.scala:
--------------------------------------------------------------------------------
1 | package json.playjson
2 |
3 | import play.api.libs.json._
4 |
5 | // The Play JSON library
6 |
7 | // https://www.playframework.com/documentation/2.4.x/ScalaJson
8 |
9 |
10 | object StringToJsValue extends App {
11 |
12 |
13 | val jsonString = s"""{
14 | "name" : "Watership Down",
15 | "location" : {
16 | "lat" : 51.235685,
17 | "long" : -1.309197
18 | },
19 | "residents" : [ {
20 | "name" : "Fiver",
21 | "age" : 4,
22 | "role" : null
23 | }, {
24 | "name" : "Bigwig",
25 | "age" : 6,
26 | "role" : "Owsla"
27 | } ]
28 | }"""
29 |
30 | val json: JsValue = Json.parse(jsonString)
31 |
32 | // Traversing a JsValue structure
33 |
34 | val lat = (json \ "location" \ "lat").as[Double]
35 | val lat2 = (json \\ "lat").head.as[Double] // Recursive path - returns array / JsArrays
36 |
37 | println(s"lat1: $lat")
38 | println(s"lat2: $lat2")
39 |
40 |
41 | println(json)
42 |
43 | /*
44 | JsValue to represent each valid JSON type:
45 |
46 | JsString
47 | JsNumber
48 | JsBoolean
49 | JsObject
50 | JsArray
51 | JsNull
52 | */
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/src/main/scala/json/playjson/WriteObjectToJson.scala:
--------------------------------------------------------------------------------
1 | package json.playjson
2 |
3 | import json.playjson.models.{Resident, Location, Place}
4 | import play.api.libs.json._
5 | /*
6 | The Play JSON API provides implicit Writes for most basic types,
7 | such as Int, Double, String, and Boolean.
8 | It also supports Writes for collections of any type T that a Writes[T]
9 | */
10 |
11 | /*
12 | * To convert your own models to JsValues,
13 | * you must define implicit Writes converters and provide them in scope.
14 | */
15 |
16 | // https://www.playframework.com/documentation/2.4.x/ScalaJsonCombinators
17 |
18 | // witter: writes object to json
19 |
20 |
21 | object WriteObjectToJson extends App {
22 |
23 | val place = Place(
24 | "Watership Down",
25 | Location(51.235685, -1.309197),
26 | Seq(
27 | Resident("Fiver", 4, None),
28 | Resident("Bigwig", 6, Some("Owsla"))
29 | )
30 | )
31 | val jsonValue = Json.toJson(place)
32 |
33 |
34 | println(jsonValue)
35 |
36 | println( Json.prettyPrint(jsonValue) )
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/src/main/scala/json/playjson/models/LisOfObjFormat.scala:
--------------------------------------------------------------------------------
1 | package json.playjson.models
2 |
3 | // Shows how to convert to list of JSON with minimal effort. Using Json.format[T]
4 | // https://www.playframework.com/documentation/2.5.x/api/scala/index.html#play.api.libs.json.Format
5 |
6 | case class MyObject(name: String, num: Double)
7 |
8 | object LisOfObjFormat extends App {
9 | import play.api.libs.json._
10 | implicit val myObjectFormat: Format[MyObject] = Json.format[MyObject]
11 |
12 | val objectList = Seq(
13 | MyObject("1", 1),
14 | MyObject("2", 2),
15 | MyObject("3", 3)
16 | )
17 |
18 | val jsObjects: JsObject = Json.obj("objects" -> Json.toJson(objectList))
19 |
20 | println(jsObjects) // {"objects":[{"name":"1","num":1},{"name":"2","num":2},{"name":"3","num":3}]}
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/src/main/scala/json/playjson/models/Models.scala:
--------------------------------------------------------------------------------
1 | package json.playjson.models
2 |
3 | import play.api.libs.json.{JsPath, Reads, Json, Writes}
4 |
5 | import play.api.libs.functional.syntax._
6 |
7 |
8 | case class Location(lat: Double, long: Double)
9 | case class Resident(name: String, age: Int, role: Option[String])
10 | case class Place(name: String, location: Location, residents: Seq[Resident])
11 |
12 | /*
13 | Set of companion objects.
14 |
15 | So all implicits defined in object's scope wll be available fo case classes,
16 | so that,
17 |
18 | Json.toJson(classInstance) will able to find proper writer
19 |
20 | and
21 |
22 | jsonValue.validate[Class] wll able to find proper reader
23 | */
24 |
25 | object Location {
26 |
27 | // minimalistic:
28 | implicit val locationWrites: Writes[Location] = Json.writes[Location]
29 |
30 | // same as:
31 | /*
32 | implicit val locationWrites = new Writes[Location] {
33 | def writes(location: Location) = Json.obj(
34 | "lat" -> location.lat,
35 | "long" -> location.long
36 | )
37 | }*/
38 |
39 |
40 | // minimalistic:
41 | implicit val locationReads: Reads[Location] = Json.reads[Location]
42 |
43 | // same as:
44 | /*
45 | implicit val locationReads: Reads[Location] = (
46 | (JsPath \ "lat").read[Double] and
47 | (JsPath \ "long").read[Double]
48 | )(Location.apply _)
49 | */
50 |
51 | }
52 |
53 | object Resident {
54 | // minimalistic:
55 | implicit val residentWrites: Writes[Resident] = Json.writes[Resident]
56 |
57 | // same as:
58 | /*
59 | implicit val residentWrites = new Writes[Resident] {
60 | def writes(resident: Resident) = Json.obj(
61 | "name" -> resident.name,
62 | "age" -> resident.age,
63 | "role" -> resident.role
64 | )
65 | }*/
66 |
67 | }
68 |
69 | object Place {
70 |
71 | implicit val placeWrites: Writes[Place] = Json.writes[Place]
72 | // same as:
73 | /*
74 | implicit val placeWrites = new Writes[Place] {
75 | def writes(place: Place) = Json.obj(
76 | "name" -> place.name,
77 | "location" -> place.location,
78 | "residents" -> place.residents)
79 | }
80 | */
81 |
82 |
83 | }
84 |
85 |
86 |
87 |
--------------------------------------------------------------------------------
/src/main/scala/json/playjson/readme.md:
--------------------------------------------------------------------------------
1 | 1. Say annotations "NO"
2 | 2. Say filterable serialization "NO"
3 | 3. Think, maybe you don't need to create case classes at all - just use JsonValue
4 | (
5 | basically we want objects for:
6 | 1. better ide support - to be able to access fields by dot (.)
7 | 2. to be able to use equals and other methods without converting json to object all the time
8 | )
9 | // --
10 |
11 | I do not see good reason to use annotations for model classes that are intended
12 | to be serialized to json. Like usually we do for java + jakson:
13 |
14 | ```
15 | public class Name {
16 | @JsonProperty("firstName")
17 | public String _first_name;
18 | }
19 | ```
20 |
21 | That all makes overcomplicated at once.
22 |
23 | Much nicer to use case classes dedicated to be 'transfer object' if you will with all
24 | fields ready to be converted into json. Si like this:
25 |
26 | ```
27 | case class Name(firstName:String)
28 | ```
29 |
30 |
31 | then convert/write it to json:
32 |
33 | Json.
34 |
35 | ```
36 | Json.toJson(name)
37 | ```
38 |
39 | where 'writer' is defined in companion object and included automatically for toJosn() functon to operate with
40 |
41 | ```
42 | object Name {
43 | implicit val nameWriter = Json.writes[Name]
44 | }
45 | ```
46 |
47 | or even
48 |
49 | ```
50 | object Name {
51 | implicit val nameFormat = Json.format[Name]
52 | }
53 | ```
54 |
55 |
56 | If we found ourself to filter fields to be serialized, then just do not do it.
57 | Just create new case class for that or use JsonValue directly.
58 |
59 | About the json request/responses standards:
60 | http://stackoverflow.com/questions/12806386/standard-json-api-response-format
61 |
--------------------------------------------------------------------------------
/src/main/scala/lazy_test/LazyVal.scala:
--------------------------------------------------------------------------------
1 | package lazy_test
2 |
3 | /*
4 | #lazy-val
5 | */
6 | // sequence: #1
7 |
8 | object LazyVal extends App {
9 |
10 | def fn(x:Int) = {println(x)}
11 |
12 | val v1 = fn(1) // will print "1"
13 | lazy val v2 = fn(2) // will print nothing ! Because if v2 lazy then no even need to evaluate the fn to get v2 initialized
14 |
15 | println("here we are")
16 |
17 | // but if I try to use v2..
18 | v2 // '2' will print
19 |
20 | v2 // calling it second time - noting will be printed (because it was already evaluated)
21 | }
22 |
--------------------------------------------------------------------------------
/src/main/scala/lenses/CommonData.scala:
--------------------------------------------------------------------------------
1 | package lenses
2 |
3 | /**
4 | * Created by sergey on 4/9/17.
5 | */
6 | object CommonData {
7 |
8 | val theEmployee = Employee("john", Company("awesome inc", Address("london", Street(23, "high street"))))
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/src/main/scala/lenses/Employee.scala:
--------------------------------------------------------------------------------
1 | package lenses
2 |
3 | case class Street(number: Int, name: String)
4 | case class Address(city: String, street: Street)
5 | case class Company(name: String, address: Address)
6 |
7 | case class Employee(name: String, company: Company) // Aggregate Root (if you will)
8 |
--------------------------------------------------------------------------------
/src/main/scala/lenses/NoLensesSample.scala:
--------------------------------------------------------------------------------
1 | package lenses
2 |
3 | // Sample a problem, that we would solve with either Moncole, Shapless or ScalaZ ' lenses.
4 |
5 | object NoLensesSample extends App {
6 |
7 | import CommonData.theEmployee // look there fo the detail of that object
8 |
9 | // The problem is here:
10 | val newEmployee = theEmployee.copy(
11 | company = theEmployee.company.copy(
12 | address = theEmployee.company.address.copy(
13 | street = theEmployee.company.address.street.copy(
14 | name = theEmployee.company.address.street.name.capitalize // capitalize !
15 | )
16 | )
17 | )
18 | )
19 | // too much of code !
20 |
21 | println(newEmployee) // ... ,High street
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/scala/lenses/monocle/LensesSample.scala:
--------------------------------------------------------------------------------
1 | //package lenses.monocle
2 | //
3 | //// http://julien-truffaut.github.io/Monocle/
4 | //
5 | //import lenses.{Address, Company, Employee, Street}
6 | //import monocle.{Lens, PLens}
7 | //import monocle.macros.GenLens
8 | //
9 | //object LensesSample extends App {
10 | //
11 | // import lenses.CommonData.theEmployee
12 | //
13 | // println(theEmployee)
14 | //
15 | // {
16 | //
17 | // // we have to create "lenses" for each employee' field.
18 | // // So then we can move/hide is somewhere (as configuration details)
19 | // val company: Lens[Employee, Company] = GenLens[Employee](_.company) // Company of Employee
20 | // val address: Lens[Company, Address] = GenLens[Company](_.address) // Address of Company
21 | // val street: Lens[Address, Street] = GenLens[Address](_.street) // Street of Address
22 | // val streetName: Lens[Street, String] = GenLens[Street](_.name) // String's street name of Street
23 | //
24 | // // Now, "zoom in" to the street name
25 | // val streetNameLense = company composeLens address composeLens street composeLens streetName
26 | //
27 | // // we can read it as: for theCompany field, modify by applying capitalize-function the employee object
28 | // val newEmployee1 = streetNameLense.modify(_.capitalize)(theEmployee) // much simple - one line of code
29 | //
30 | // import monocle.function.Cons.headOption // to able to zoom in more precisely - sort of partial lense
31 | // val newEmployee2 = streetNameLense.composeOptional(headOption).modify(_.toUpper)(theEmployee) // much simple - one line of code
32 | //
33 | // // How does it know that capitalize should be applied fro the Street name ?
34 | //
35 | // println(newEmployee1) // ~ ... ,High street
36 | // println(newEmployee2) // ~ ... ,High street
37 | // }
38 | //
39 | // // with macro it is even simpler
40 | //
41 | // {
42 | // import monocle.function.Cons.headOption // to able to zoom in more precisely
43 | // import monocle.macros.syntax.lens._ // macro !
44 | //
45 | // // use lens-macro - very clear now !
46 | //
47 | // val streetNameLense = theEmployee.lens(_.company.address.street.name).composeOptional(headOption)
48 | //
49 | // val newEmployee = streetNameLense.modify(_.toUpper)
50 | //
51 | // println(newEmployee) // ~ ... ,High street
52 | // }
53 | //
54 | //
55 | //}
56 | //
57 |
--------------------------------------------------------------------------------
/src/main/scala/lenses/shapless/LensesSample.scala:
--------------------------------------------------------------------------------
1 | package lenses.shapless
2 |
3 | import lenses.{Address, Company, Employee, Street}
4 |
5 | object LensesSample extends App {
6 |
7 | import lenses.CommonData.theEmployee
8 |
9 | // TODO: both shapless and monocle (as well as scalaz) have lenses
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/scala/looping/ForAsMap.scala:
--------------------------------------------------------------------------------
1 | package looping
2 |
3 | // #for #map-method #filter #flatMap #2d-array
4 |
5 | object ForAsMap extends App {
6 |
7 | // #1 for() and map()
8 |
9 | // for:
10 | {
11 | val list = for (x <- List(1,2,3)) yield x+1
12 |
13 | println (list) // 2,3,4
14 | }
15 |
16 | // same as using map:
17 | {
18 | val list = List(1,2,3).map(_+1)
19 |
20 | println (list) // List(2,3,4)
21 | }
22 |
23 | // so, it seems map() method fits better in this case.
24 | // Even more - compiler will convert 'for()' to 'map()'
25 |
26 |
27 | // #2 for and filter()
28 |
29 | {
30 |
31 | val list = for (x <- List(1,2,3) if x<3 ) yield x
32 |
33 | println ("list1: " + list) // List(1,2)
34 |
35 | }
36 |
37 | {
38 |
39 | val list = for (x <- List(1,2,3).withFilter(_<3) ) yield x
40 |
41 | println ("list2: " + list)
42 |
43 | }
44 |
45 |
46 | // #3 2dArray - for vs flatMap
47 |
48 | {
49 |
50 | val matrix = Array.ofDim[Int](2,2)
51 | // the way we can fill 2D array is:
52 | matrix(0)(0) = 1; matrix(0)(1) = 2
53 | matrix(1)(0) = 3; matrix(1)(1) = 4 // Array[Array[Int]] = Array(Array(1, 2), Array(3, 4))
54 |
55 | // for:
56 | {
57 | //matrix: Array[Array[Int]] = Array(Array(1, 2), Array(3, 4))
58 |
59 | // the way how we can traverse 2D array is:
60 |
61 | val elements = for ( // elements: Array[Int] = Array(1, 2, 3, 4)
62 | row <- matrix;
63 | elem <- row
64 | ) yield elem
65 |
66 | println ("elements1: ")
67 | elements foreach ( print (_) ) // 1234
68 |
69 | }
70 |
71 | // flatMap:
72 | {
73 | //val elements = matrix flatMap( row => for (elements <-row) yield elements ) // Array[Int]
74 | // simpler:
75 | val elements = matrix flatMap( for (elements <-_) yield elements ) // Array[Int]
76 |
77 | println ("\nelements2: ")
78 | elements foreach ( print (_) ) // 1234
79 | }
80 |
81 | // so, you decide what is better for you - to use 'for' or 'flatMap'
82 |
83 | }
84 |
85 |
86 |
87 | }
88 |
--------------------------------------------------------------------------------
/src/main/scala/looping/ForLoop.scala:
--------------------------------------------------------------------------------
1 | package looping
2 |
3 | /*
4 | #loop #foor-loop #list-comprehension
5 | related: #filtering #yield #tuple
6 | */
7 | object ForLoop extends App {
8 |
9 | //1.
10 | //for (int i=0; i<10; i++) // no-no-no, even do not try
11 |
12 | //2.
13 | for (i <- 0 to 3) print(i) // that is already better
14 |
15 | //2
16 | for (i <- 3 to 0) print(i) // nope.. but good try
17 |
18 | //2.1
19 | println()
20 | for (i <- 3 to 0 by -1) print(i) // you need do that to achieve that effect
21 |
22 | // 2.2
23 | println()
24 | for (i <- 0 to 3 by 2) print(i) // yeah of course you can do that
25 |
26 |
27 | // what about loop in the loop?
28 | // 3.1
29 | println()
30 | for (i <- 0 to 3; j <- 0 to 3) print(i, j)
31 |
32 | // 3.2 or maybe you prefer this syntax, without using ";"
33 | println()
34 | for {
35 | i <- 0 to 3
36 | j <- 0 to 3
37 | } print(i, j)
38 |
39 |
40 | // 4. you can collect(yielding) all info while lopping into some list
41 | println()
42 | val list1 = for (i <- 0 to 3) yield i
43 |
44 | println (list1) // Vector(0, 1, 2, 3)
45 |
46 | // 5. you can filter while looping
47 |
48 | val list2 = for (i <- 0 to 3 if i%2==0 ) yield i // list-comprehension
49 |
50 | println (list2) // Vector(0, 2)
51 |
52 | // 5.1 you also can filter like this.
53 | for (i <- 0 to 3)
54 | if (i%2==0) // you may omit "{..}". But maybe it's obvious
55 | print (i)
56 | // println (i) // no you can not continue like that. "i" already is not visible from out of "for" scope
57 |
58 | // 6. let's use variable/values just right in the loop
59 | val result = for { i <- 1 to 3 ;
60 | j = i * 2; // same as val j
61 | k = j * 2; // same as val k
62 | k <- j to k } yield k
63 |
64 | println("\n6. result: " + result)
65 |
66 | // 7. how traverse the map
67 | val mapResult = for { (key, value) <- Map(1->"One", 2->"two") } yield (key, value) // #tuple related
68 |
69 | println( "7. " + mapResult )
70 | }
71 |
--------------------------------------------------------------------------------
/src/main/scala/looping/ForeachLoop.scala:
--------------------------------------------------------------------------------
1 | package looping
2 |
3 | /*
4 | #loop #foreach
5 | */
6 |
7 | object ForeachLoop extends App {
8 |
9 | // 1.
10 |
11 | println ((0 to 3)) // Range(0, 1, 2, 3)
12 |
13 | (0 to 3) foreach { print _ } // 0123
14 |
15 | // 2.
16 |
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/scala/looping/listComprehension.scala:
--------------------------------------------------------------------------------
1 | // #list-comprehension #haskell
2 | // http://en.wikipedia.org/wiki/List_comprehension
3 |
4 | package looping
5 |
6 | /*
7 | While in Haskell:
8 |
9 | contact :: [[a]] -> [a]
10 | contact xss = [x | xs <- xss, x <- xs] -- 'hungarian-notation' for lists. meaning xss - list of lists; xs - list of x; x - end value
11 | result = contact [[1],[3,4,5],[6]] -- [1,2,3,4,5]
12 |
13 | */
14 |
15 | object ListComprehension extends App {
16 |
17 | def contact(xss: List[List[Int]]) : List[Int] = for ( xs <- xss ; x <- xs ) yield x
18 | val result = contact( List( List(1), List(3,4,5), List(6)) )
19 |
20 | println("result: " + result) // result: List(1, 3, 4, 5, 6)
21 |
22 | }
23 |
24 | // As you can seen Haskell code is more readable: Two reasons: 1. 'hungarian-notation' is used in Haskell 2. square-brackets for list
25 | // About Scala vs Haskell list syntax: http://stackoverflow.com/questions/6171955/scala-alternative-list-syntax-with-square-brackets-if-possible
26 |
27 |
--------------------------------------------------------------------------------
/src/main/scala/overriding/OverrideBasics.scala:
--------------------------------------------------------------------------------
1 | package overriding
2 |
3 | // note: overriding & overloading happens by name and arguments
4 | // like a() == a(), but a(p:Int) != a() != a(p:String)
5 |
6 | object OverrideBasics extends App {
7 |
8 | class A {
9 | def foo():String = ""
10 | }
11 |
12 | // --- override
13 |
14 | class B extends A {
15 | override def foo() = "" // need 'override' !
16 | def foo(a:Int) = "" // no need 'override' () != (_:Int)
17 | }
18 |
19 | // -- overloading
20 |
21 | def foo(a:String):String = "" // no name collision _:Int != _:String
22 | //def foo(a:String):Int = 1 // name collision _:String != _:String
23 |
24 | def foo(a:A) = "a"
25 | def foo(a:B) = "b" // no name collision _:A != _:B
26 |
27 | println ( foo(new A) ) // a
28 | println ( foo(new B) ) // b
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/scala/overriding/OverridePlus.scala:
--------------------------------------------------------------------------------
1 | package overriding
2 |
3 | // #generic-method-overriding
4 |
5 | // http://www.scala-lang.org/files/archive/spec/2.11/05-classes-and-objects.html
6 | // 5.1.4 - Overriding
7 |
8 | // TODO:move it another package and rename
9 |
10 | object OverridePlus extends App {
11 |
12 | class A {
13 | def aa(a:A):String = "boo"
14 | }
15 | class B() extends A {
16 | // override def aa(a:B):String = "boo" // ! you can NOT do this
17 | }
18 |
19 | // but there is a way
20 |
21 | trait A1 {
22 | type ChildOfA <: A // not concrete (this makes all it abstract)
23 | def aa(a:ChildOfA):A = new B
24 | }
25 |
26 | class B1 extends A1 {
27 | type ChildOfA = B // concrete type
28 | override def aa(b:B):B = new B // ! now you CAN do it
29 | }
30 |
31 | val result2 = (new B1).aa(new B)
32 |
33 | println(result2.getClass.getSimpleName) // B
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/scala/partial_function/PartialToReal.scala:
--------------------------------------------------------------------------------
1 | package partial_function
2 |
3 | // please look at 'PartialFunctionTest' first
4 |
5 | // #partial-function #lift
6 |
7 | object PartialToReal extends App {
8 |
9 | // # 1
10 | {
11 | type PF = PartialFunction[Int, Int] // just to make it shorter
12 |
13 | val pf1 : PF = { case 1 => 2 }
14 |
15 | println ( "pf2: " + pf1 ) //
16 |
17 |
18 | // jut check that we on the right way
19 | val defined = pf1.isDefinedAt(1)
20 | println ("defined: " + defined) // true
21 |
22 |
23 | val realFunction = pf1.lift // convert it to real function !
24 |
25 |
26 | println ( "realFunction(1): " + realFunction(1) ) // Some(2)
27 |
28 | }
29 |
30 | // #2
31 | {
32 |
33 | val level =
34 | """ooo-------
35 | |oSoooo----
36 | |ooooooooo-
37 | |-ooooooooo
38 | |-----ooToo
39 | |------ooo-""".stripMargin
40 |
41 | lazy val vector: Vector[Vector[Char]] = Vector(level.split("\n").map(str => Vector(str: _*)): _*)
42 |
43 |
44 | case class Pos(x: Int, y: Int) {
45 | def dx(d: Int) = copy(x = x + d)
46 | def dy(d: Int) = copy(y = y + d)
47 | }
48 |
49 | def terrainFunction(levelVector: Vector[Vector[Char]]): Pos => Boolean = {
50 |
51 | case Pos(x,y) => {
52 |
53 | //println (s"x=$x; y=$y")
54 |
55 | (
56 | for {
57 | row <- levelVector lift(x)
58 | ch <- row lift(y)
59 | if ch != '-'
60 | } yield ch
61 |
62 | ).isDefined
63 |
64 | }
65 |
66 | }
67 |
68 | lazy val terrain = terrainFunction(vector)
69 |
70 |
71 | println( "terrain: " + terrain lift (1) ) // Some(e) ???
72 |
73 |
74 | }
75 |
76 |
77 | }
78 |
--------------------------------------------------------------------------------
/src/main/scala/partially_applied_functions/PartialAppliedFunction_And_Currying.scala:
--------------------------------------------------------------------------------
1 | package partially_applied_functions
2 |
3 | /*
4 | #partially-applied-function
5 | related: #currying
6 | */
7 |
8 | /*
9 |
10 | Q1: So, what are the differences exactly between partially applied functions and curried functions in Scala?
11 |
12 | A: Short answer: Partially applied function (PAF) & Curried don't conflict with each others.
13 | Partially applied function is a tool to make General FUnction or Currying function to be partial.
14 |
15 | */
16 |
17 | object PartialAppliedFunction_And_Currying extends App {
18 |
19 |
20 | // Partially applied function (PAF) & Curried don't conflict with each others.
21 | // PAF could be used for general functions as well for "currying-functions".
22 |
23 | def f1(x:Int, y:Int) = {x+y}
24 | def f2(x:Int)(y:Int) = {x+y} // #currying related
25 |
26 | // So, "partially applied function" is just not-yet fully evaluated function.
27 | // We could convert "general function" or ether "currying function" to partially applied (partially evaluated)
28 |
29 | // to making them partial differs in syntax
30 | def ff1 = f1(1, _:Int) // "_" - specifies/represents "any value for Int type"
31 | def ff2 = f2(1)_ // "_" - specifies/represents "any function", - we don't know what it will return us
32 |
33 | println(ff1)
34 | println(ff2)
35 |
36 | println(ff1(2))
37 | println(ff2(2))
38 |
39 | /*
40 | Output:
41 |
42 |
43 | 3
44 | 3
45 | */
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/scala/partially_applied_functions/PartiallyAppliedFunction.scala:
--------------------------------------------------------------------------------
1 | package partially_applied_functions
2 |
3 | /*
4 | #partially-applied-function
5 | related: #currying
6 | */
7 |
8 | object PartiallyAppliedFunction extends App {
9 |
10 | // #1
11 | def f1(a:Int, b:Int) = a + b // was not born to be partial (there is no any "_" in its definition)
12 |
13 | //def partFun = f1(2)_ // can not do it.. (not so easy.. eh? )
14 |
15 | // #1.1
16 | // but we can reuse this - to turn it into partially applied function:
17 | val x = f1(1, _:Int) // first argument is defined, but since we use "_" then 'x' is partially applied function with ine argument
18 | println("x: " + x ) //
19 | println("x(2): " + x(2))
20 |
21 | // #2 "currying"
22 | def f2(a:Int)(b:Int) = a + b // this is "currying". we can use partially applied function with it (#currying related)
23 |
24 | def partFun = f2(2)_
25 |
26 | println("partFun: " + partFun)
27 | println("partFun: " + partFun(2))
28 |
29 | // thus, to convert "currying"-function to partial one is easier than do the same from general function..
30 |
31 | /*
32 | Output:
33 | x:
34 | x(2): 3
35 | partFun:
36 | partFun: 4
37 | */
38 | }
39 |
--------------------------------------------------------------------------------
/src/main/scala/pattern_matching/PatterMatching_CaseClasses.scala:
--------------------------------------------------------------------------------
1 | package pattern_matching
2 |
3 | /*
4 | * #pattern-matching #case-classes
5 | * related: #unapply-method #extractor #companion-object
6 | */
7 | object PatterMatching_CaseClasses extends App {
8 |
9 | // 1.
10 | case class A(a:Int, b:Int)
11 |
12 | val a = A(1,2)
13 |
14 | a match {
15 | case A(_,_) => println("_,_") // this is possible because case classes have built-in "unapply" defined
16 | }
17 |
18 |
19 | // 2. To understand how pattern-matching works..
20 | // Let's define our own unapply method for general class. Kind of reinventing what case-classe provide for us by default
21 |
22 | object MyA { // #companion-object related
23 |
24 | def apply(a:Int, b:Int) = new MyA(a, b)
25 |
26 | def unapply(myA:MyA) : Option[(Int, Int)] = { // here it is. it will be invoked every time when 'case' invoking against it
27 | // the body of unapply tells if the argument has matched or not
28 | Some(myA.a, myA.b) // in our case it match all the time
29 | }
30 | }
31 | class MyA(val a:Int, val b:Int) // was born to be able to participate in pattern-matching
32 |
33 | val myA = MyA(1, 2) // same as MyA.apply(1,2)
34 |
35 | myA match {
36 | case MyA(1, 2) => println ("got (1,2)") // 'case MyA(1,2)' will lead to MyA.unapply(1,2) invocation. Make sense to remember this !
37 | }
38 |
39 | // (that says that if a class does not have unapply method, it can not be used in pattern-matching)
40 |
41 | // let's check whether we can still use pattern magic like "_" having our general class
42 | myA match {
43 | case MyA(_, _) => println ("got it (_,_)") // all "_"-magic still here even for general(not case) classes
44 | // provided that they have unapply method defined
45 | }
46 |
47 | // 3. a case class that accepts a function as a parameter (what would happen ? )
48 | {
49 | case class F( f: Int => Int) // the case class that expects a function as parameter
50 |
51 | def f(a:Int) = {a + a} // function that returns back in two time more than it gets
52 |
53 | val obj = F(f)
54 | val f_ref = obj.f // actually.. it is a getter that returns a function. works like expected.
55 | println("result:" + f_ref(2))
56 |
57 | // and how patter-matching will work with it?
58 | obj match {
59 | case F( f:(Int=>Int) ) => println(" f:(Int=>Int) matched") // works as expected
60 | }
61 | obj match {
62 | case F( f: (Any=>Any) ) => println(" f:(Any=>Any) matched") // works as expected
63 | }
64 | obj match {
65 | case F( _ ) => println("_ matched") // works as expected
66 | }
67 | /*
68 | obj match {
69 | case F( 4 ) => println("will not work") // it expects a function, no a Int value (or function result)
70 | }*/
71 |
72 | }
73 |
74 | }
75 |
--------------------------------------------------------------------------------
/src/main/scala/pattern_matching/PatternMatchingTest.scala:
--------------------------------------------------------------------------------
1 | package pattern_matching
2 |
3 | /**
4 | * #pattern-matching
5 | * related: #pattern-overlaps #pattern-guards #variable-binding
6 | */
7 | object PatternMatchingTest extends App {
8 |
9 | // 1.
10 | val x1 = 1
11 |
12 | val str1 = x1 match {
13 | case 1 => "one"
14 | case 2 => "two"
15 | }
16 | println (str1) // "one"
17 |
18 | // 2.
19 | val x2 = 2
20 |
21 | val str2 = x2 match {
22 | case x2 if (x2>1) => "more than one" // yes you can put condition like this, and this is good (#pattern-guards related)
23 | case _ => "default" // wil fail in RUNTIME "scala.MatchError" if miss this part when x2 <= 1 !!!
24 | } // so, the rule is all cases should be covered !
25 |
26 | println (str2) // prints: "more than one" if x=2. And prints: "default" when x2=1
27 |
28 | // 3. Nested cases. !!
29 | {
30 | val x1 = 2
31 | val str = x1 match { // scala.MatchError: 1 (of class java.lang.Integer)
32 | case x if(x > 1) => "x"+x match {case "x2" => "yes"}
33 | case _ => "nope"
34 | }
35 | println (str)
36 | }
37 |
38 | // 4. how it works with lists
39 | val list = List(1,2,3)
40 | list match {
41 | case List(_,_,3) => println("yes, there is '3' as a last element")
42 | }
43 | list match {
44 | case List(_,_) => println("yes, there are two element defined") // will NOT be printed because it match to 2 long args list
45 | case List(_) => println("could not find what to match 1") // will NOT be printed because it match to 1 long args list
46 | case List(_*) => println("could not find what to match 2") // will be printed because '*_' means any args
47 | }
48 |
49 |
50 | // 5. how it matches by type
51 | def function(x:Any):Unit = x match {
52 |
53 | case _:String => println("yes, this is string")
54 |
55 | case x:Number => println("no, this is NOT string, this is number..")
56 |
57 | // #pattern-overlaps related
58 | // this will not happen, because Number-match is first in the 'match'. A developer should care about this by himself
59 | case _:Int => println("no, this is NOT string, this is Int..")
60 |
61 | case _ => println("who knows..")
62 | }
63 | function("123")
64 | function(123)
65 |
66 | // 6. you can define a variable binding inside 'case' ( #variable-binding related )
67 | {
68 | case class AA(a:Int, b:Int)
69 | val a = AA(1,2)
70 | a match {
71 | case AA(x @ myX, y) => println("myX: " + myX) // myX is binding to x (in this case myX is alias to x)
72 | }
73 | }
74 |
75 | // 6.1 but '@' sign is more than just aliasing. It is binding .. in wider sense
76 | // TODO: See: ExtractorsTest.scala that explains it
77 |
78 | }
79 |
--------------------------------------------------------------------------------
/src/main/scala/pattern_matching/PatternMatching_AnonymousFunction.scala:
--------------------------------------------------------------------------------
1 | package pattern_matching
2 |
3 |
4 | /*
5 | * #pattern-matching #anonymous-function #partial-function
6 | * related:
7 | */
8 | object PatternMatching_AnonymousFunction extends App {
9 |
10 | // 1. use pattern-matching as anonymous function
11 |
12 | // working with map, where type of Key and Value is defined
13 |
14 | val map = Map[Int,String](1->"A", 2->"B")
15 |
16 | map foreach { case(k,v) => println("k="+k+";v="+v) } // pattern-matching in action !
17 |
18 | // just reminding that foreach() is defined as: "def foreach[U](f: A => U):Unit "
19 |
20 | // If we did not use use pattern matching, then your code would look like this
21 | map.foreach(x => {
22 | if (x._1 == 1) println("k=" + x._1 + ";v=" + x._2)
23 | else println("k=" + x._1 + ";v=" + x._2)
24 | })
25 |
26 | // so, foreach expects a function
27 |
28 | // But what kind of function we have provided with " case(k,v) => println("k="+k+";v="+v) " ?
29 | // It is:
30 | // (Int,String) => Unit i.e. Tuple2[Int, String] => Unit ?
31 | // TODO: http://stackoverflow.com/questions/18807890/to-see-anonymous-function-declaration
32 |
33 |
34 | // 2.
35 | val list = List("a", "b", "c", 1, 2, 3) // here we use strings and integers in one line/list
36 |
37 | // will not compile :
38 |
39 | // list map { case (x:Int) => println(x+1) } // try to increase all Int values
40 |
41 | // it is because our anonymous incrementing function can not be applied for String values that this list has
42 | // I.e. the data we have (integers and strings) deprives our anonymous function sense.
43 |
44 | // but this will work
45 | list collect{ case (x:Int) => print(x+1) } // "234" - will work, because it accept PartialFunction
46 |
47 | // @see PartialFunctionTest ! to get explanation
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/src/main/scala/pattern_matching/WithTuples.scala:
--------------------------------------------------------------------------------
1 | package pattern_matching
2 |
3 | //#pattern-matching #tuples
4 |
5 | object WithTuples extends App {
6 |
7 | // often I wrote matching like this:
8 | def function(arg1:String, arg2:String) : String = arg1 match { // matching by one arguments
9 | case "a" => "first letter"
10 | case _ => "not first letter"
11 | }
12 |
13 | // but what if we need match by several one ?
14 | def function2(arg1:String, arg2:String) : String = (arg1, arg2) match { // here we go- we use tuples
15 | case ("val1", "val2") => "val1, val2"
16 | case (a, b) if a == b => "equal!" // use tuples to hit the case
17 | }
18 |
19 | val result = function2("a", "a") // equal!
20 |
21 |
22 | println(result)
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/scala/pattern_matching/extractors/PatternValDefinitionExample.scala:
--------------------------------------------------------------------------------
1 | package pattern_matching.extractors
2 |
3 | import scala.util.matching.Regex
4 | import scala.collection.SeqFactory
5 |
6 |
7 | // #pattern-definition, #PatVarDef #pattern-matching #extractor #unapply
8 |
9 | object PatternValDefinitionExample extends App {
10 |
11 | // #1
12 | {
13 | val someone = Some(1)
14 | val Some(one) = someone // a pattern definition
15 |
16 | println(one) // 1
17 | }
18 |
19 | // #2
20 | {
21 |
22 | case class Example(a:String, b: Int)
23 | val x = Example("hello", 42)
24 | val Example(s, i) = x // extractor/unapply is invoking
25 |
26 | println(s"$s, $i") // hello, 42
27 |
28 | }
29 |
30 | // #3
31 | {
32 | val foo = List(1,2,3,4)
33 | val List(one, two, three, four) = foo // extractor/unapplySeq is invoking
34 |
35 | println(s"$one, $two, $three, $four") // 1, 2, 3, 4
36 | }
37 |
38 | // #3.1 even like that:
39 | {
40 | val Seq(x, y, _, xx @ _* )= 1 to 10
41 |
42 | println(s"Seq(x, y, _, xx @ _* ): $x, $y, $xx" ) // 1, 2, Range(4, 5, 6, 7, 8, 9, 10)
43 | }
44 |
45 | // # 4
46 | {
47 |
48 | val Pattern = "([ 0-9]+) ([ A-Za-z]+)". r // RegEx
49 |
50 | // 4.1
51 | val Pattern(count0, fruit0) = "100 Cars"
52 |
53 | // same as
54 | // 4.2
55 | val res = Pattern.unapplySeq("100 Cars")
56 | val count1 = res.get(0) // 100
57 | val fruit1 = res.get(1) // Cars
58 |
59 | // same as
60 |
61 | // 4.3
62 | val (count2, fruit2) = "100 Cars" match { case Pattern( count, fruit) => (count, fruit) }
63 |
64 | // So, 4.1 is just sugared for short. That is just pattern matching.
65 |
66 | println (s"count0, fruit0: $count0, $fruit0") // .. 100, Cars
67 | println (s"count1, fruit1: $count1, $fruit1") // .. 100, Cars
68 | println (s"count1, fruit2: $count2, $fruit2") // .. 100, Cars
69 |
70 | }
71 |
72 | }
73 |
--------------------------------------------------------------------------------
/src/main/scala/pattern_matching/map/MapMatching.scala:
--------------------------------------------------------------------------------
1 | package pattern_matching.map
2 |
3 | //# pattern-matching #map
4 |
5 | object MapMatching extends App {
6 |
7 | val theMap = Map(
8 | "name1"->"value1",
9 | "name2"->"value2",
10 | "name3"->"value3"
11 | )
12 |
13 | // #1
14 |
15 | val intMaps = theMap map { // convert to Map[Int,Int]
16 |
17 | case ("name1", "value1" ) => (1,2)
18 | case _ => (0,0)
19 |
20 | }
21 |
22 | println( intMaps ) // Map(1 -> 2, 0 -> 0)
23 |
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/performance/BenchmarksCombine.scala:
--------------------------------------------------------------------------------
1 | //package performance
2 | //
3 | ////import org.scalameter.Key
4 | //
5 | ///**
6 | // * the way to use/combine several measures at on(c)e
7 | // */
8 | //
9 | //object BenchmarksCombine extends App {
10 | //
11 | //// import org.scalameter._
12 | //
13 | // /**
14 | // * @param measuresMap - map of name -> measure
15 | // * @param block - a block of code to execute,
16 | // * @param confs - (optional) configuration, if not set default will be used
17 | // * @return a name->Quantity map back
18 | // */
19 | //
20 | // def measures[S](measuresMap: Map[String, Measurer[Double]], confs: KeyValue* )
21 | // (block: ()=> S ): Map[String, Quantity[Double]] = {
22 | //
23 | // measuresMap.map {
24 | //
25 | // case(name, aMeasure) => {
26 | //
27 | // val cs: Seq[KeyValue] = if(confs.nonEmpty) confs else {
28 | // Array[KeyValue] { // deafult conf
29 | // Key.exec.benchRuns -> 100
30 | // Key.verbose -> false
31 | // }
32 | // }
33 | //
34 | // val quantity: Quantity[Double] = config (cs: _*).
35 | //
36 | // withWarmer {
37 | //
38 | // new Warmer.Default // hardcoded
39 | //
40 | // } withMeasurer {
41 | //
42 | // aMeasure
43 | //
44 | // } measure {
45 | //
46 | // block()
47 | // }
48 | //
49 | // (name , quantity)
50 | //
51 | // }
52 | // }
53 | // }
54 | //
55 | // val mm = measures (
56 | //
57 | // measuresMap = Map(
58 | // "time" -> new Measurer.Default,
59 | // "memory" -> new Measurer.MemoryFootprint
60 | // )
61 | // /*, confs = { // uncomment to pass specific params to replace default ones
62 | // Key.exec.benchRuns -> 20
63 | // Key.verbose -> true
64 | // }*/
65 | //
66 | // )(
67 | //
68 | // block = { () =>
69 | //
70 | // for {
71 | // i <- 0 to 20000
72 | // } yield new java.text.SimpleDateFormat("yyyy/MM/dd") // btw never use that SimpleDateFormat, just don't
73 | //
74 | // }
75 | //
76 | // )
77 | //
78 | // println(s"Used quantities: ${mm}") // depends on 'withMeasurer'
79 | //
80 | // // Output like:
81 | // // Used quantities: Map(time -> 27.582114 ms, memory -> 18651.976 kB)
82 | //}
83 |
--------------------------------------------------------------------------------
/src/main/scala/performance/BenchmarksSimple.scala:
--------------------------------------------------------------------------------
1 | //package performance
2 | //
3 | //// https://scalameter.github.io/home/gettingstarted/0.7/executors/
4 | //
5 | //object BenchmarksSimple extends App {
6 | //
7 | //// import org.scalameter._
8 | //
9 | // //
10 | //
11 | // val quantity: Quantity[Double] = config {
12 | //
13 | // Key.exec.benchRuns -> 100
14 | // Key.verbose -> false
15 | //
16 | // } withWarmer {
17 | //
18 | // new Warmer.Default
19 | //
20 | // } withMeasurer {
21 | //
22 | // //new Measurer.Default // time
23 | // //new Measurer.IgnoringGC // time without GC cycles
24 | // new Measurer.MemoryFootprint // memory
25 | //
26 | // } measure {
27 | // // creating many SimpleDateFormat objects
28 | // for {
29 | // i <- 0 to 20000
30 | // } yield new java.text.SimpleDateFormat("yyyy/MM/dd")
31 | //
32 | // }
33 | //
34 | // println(s"Used quantity: ${quantity}") // depends on 'withMeasurer'
35 | //
36 | // // Outputs like:
37 | // // Used quantity: 24.727337 ms
38 | // // Used quantity: 18599.848 kB
39 | //
40 | //}
--------------------------------------------------------------------------------
/src/main/scala/scala_99/t4_length_of_list.scala:
--------------------------------------------------------------------------------
1 | package scala_99
2 |
3 | // #list #tail #tail #recursive
4 | object t4_length_of_list extends App {
5 |
6 | // 1. looping / tail solution.
7 |
8 | def length(list:List[Int]):Int = {
9 | def loopToNil(list:List[Int], count:Int):Int = list match { // we have to keep 'count' between calls
10 | case Nil => count
11 | case _ => loopToNil(list.tail, count+1)
12 | }
13 | loopToNil(list, 0)
14 | }
15 |
16 | println ( length(List(1, 1, 2, 3, 5, 8)) ) //6
17 |
18 | // 2. recursive solution.
19 |
20 | def length2(list:List[Int]):Int = list match {
21 | case Nil => 0
22 | case _ :: tail => 1 + length2(list.tail) // 1 + 1 + ... + 0
23 | }
24 |
25 | println ( length2( List(1,2,3,4,5,6) ) ) // 6
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/src/main/scala/scala_99/t7_flatten_list.scala:
--------------------------------------------------------------------------------
1 | package scala_99
2 |
3 |
4 | object t7_flatten_list extends App {
5 |
6 | { // pure recursion
7 |
8 | println("pure recursion: ")
9 |
10 | def flatten(xs: List[Any]): List[Any] = xs match {
11 | case Nil => Nil
12 | case (head_list: List[Any]) :: tail => flatten(head_list) ::: flatten(tail)
13 | case head_elem :: tail => head_elem :: flatten(tail) // looping through the list
14 | }
15 |
16 | val list = List(1, List(2, 3), 4)
17 | println(s"$list -> ${flatten(list)}")
18 |
19 | }
20 |
21 | { // recursion + accumulation
22 | println("recursion + accumulation: ")
23 |
24 | def flatten(xs: List[Any], acc:List[Any] = List()): List[Any] = xs match {
25 | case Nil => Nil
26 | case (head_list: List[Any]) :: tail => flatten(head_list) ::: acc // adding result to the list
27 | case head_elem :: tail => head_elem :: flatten(tail) // looping through the list
28 | }
29 |
30 | val list = List(1, List(2, 3), 4)
31 | println(s"$list -> ${flatten(list)}")
32 |
33 |
34 | }
35 |
36 | {
37 | println("with flatMap: ")
38 |
39 | def flatten(xs: List[Any]) : List[Any] = xs flatMap {
40 | case list:List[Any] => list
41 | case elem => List[Any](elem)
42 | }
43 |
44 | val list = List(1, List(2, 3), 4)
45 | println(s"$list -> ${flatten(list)}")
46 |
47 | }
48 |
49 | // if use standard flatten method then:
50 | {
51 | println("trying to use build-in flatten() method:")
52 |
53 | val list = List(1, List(2, 3), 4)
54 | // No implicit view available from Any => scala.collection.GenTraversableOnce[B].
55 | // println ( list flatten ) //
56 |
57 | // But this (one type-list) will work out
58 | val list2 = List(List(1), List(2, 3), List(4) )
59 | println ( list2.flatten )
60 |
61 |
62 | }
63 |
64 |
65 | }
66 |
--------------------------------------------------------------------------------
/src/main/scala/scala_99/t8_duplicate_list.scala:
--------------------------------------------------------------------------------
1 | package scala_99
2 |
3 | import scala.annotation.tailrec
4 |
5 | //# duplicates #list #consecutive #tailrec #dropWhile
6 |
7 | object t8_duplicate_list extends App {
8 |
9 | // 1. use accumulator & tailrec
10 |
11 | @tailrec
12 | def compress[T](list:List[T], previous:T = "", acc:List[T] = List() ): List[T] = list match {
13 | case Nil => acc.reverse
14 | case head :: tail if (head == previous) => compress(tail, head, acc) // not adding to accumulator if equal
15 | case head :: tail => compress(tail, head, head :: acc) // or: acc ++ List(head), then no need to use "reverse'
16 | }
17 |
18 |
19 | val result = compress(List("a", "a", "a", "a", "b", "c", "c", "a", "a", "d", "e", "e", "e", "e"))
20 |
21 | println (result) // List('a, 'b, 'c, 'a, 'd, 'e)
22 |
23 |
24 | // 2. make use of existing method: #dropWhile. recursion
25 |
26 | {
27 | def compress[T](list: List[T]): List[T] = list match {
28 | case Nil => Nil
29 | case head :: tail => head :: compress(tail.dropWhile(_ == head)) // x=> x == head
30 | }
31 |
32 | val result = compress(List("a", "a", "a", "a", "b", "c", "c", "a", "a", "d", "e", "e", "e", "e"))
33 | println(result)
34 | }
35 |
36 |
37 | // 3. using fold
38 | {
39 | def compress[T](list:List[T]): List[T] =
40 | list.foldRight(List[T]()) { // will be adding tot he empty list
41 | (right, result) => if (result.isEmpty || result.head != right) right :: result
42 | else result
43 | }
44 |
45 | // just reminder that we may use 'case'
46 | def compress2[T](list:List[T]): List[T] =
47 | list.foldRight(List[T]()) { // will be adding to the empty list
48 | case (right, Nil) => List(right)
49 | case (right, result) if (result.head != right) => right :: result
50 | case (_, result) => result
51 | }
52 |
53 |
54 | val result1 = compress(List("a", "a", "a", "a", "b", "c", "c", "a", "a", "d", "e", "e", "e", "e"))
55 | val result2 = compress2(List("a", "a", "a", "a", "b", "c", "c", "a", "a", "d", "e", "e", "e", "e"))
56 | println (result1)
57 | println (result2)
58 |
59 | }
60 |
61 | }
--------------------------------------------------------------------------------
/src/main/scala/scala_99/t9_pack_duplicates.scala:
--------------------------------------------------------------------------------
1 | package scala_99
2 |
3 | // #tail-recursion #accumulating #if-else-expression #case #list-concatenation
4 |
5 | object t9_pack_duplicates extends App {
6 | def pack[A] (list:List[A]): List[List[A]] = {
7 |
8 | def loop( list:List[A], prior: A = list.head, packAcc:List[A] = List(), resultAcc:List[List[A]] = List() ) : List[List[A]] = list match {
9 |
10 | case Nil => (prior :: packAcc) :: resultAcc
11 |
12 | case head :: tail => loop(
13 | list = list.tail,
14 | prior = head,
15 | packAcc = if (head == prior) head :: packAcc else List[A](head),
16 | resultAcc = if (head != prior) packAcc :: resultAcc else resultAcc
17 | )
18 | }
19 |
20 | loop(list)
21 |
22 | }
23 |
24 | val result = pack(List[Symbol](Symbol("a"), Symbol("a"), Symbol("a"), Symbol("a"), Symbol("b"), Symbol("c"), Symbol("c"), Symbol("a"), Symbol("a"), Symbol("d"), Symbol("e"), Symbol("e"), Symbol("e"), Symbol("e"))).reverse
25 |
26 | println(result) // List(List(Symbol("a"), Symbol("a"), Symbol("a"), Symbol("a")), List(Symbol("b")), List(Symbol("c"), Symbol("c")), List(Symbol("a"), Symbol("a")), List(Symbol("d")), List(Symbol("e"), Symbol("e"), Symbol("e"), Symbol("e")))
27 |
28 | }
29 |
--------------------------------------------------------------------------------
/src/main/scala/scalaz/01_basic.scala:
--------------------------------------------------------------------------------
1 | import scalaz._
2 | import Scalaz._
3 |
4 | object ScalazDemo extends App {
5 | // Eq
6 |
7 | 1 === 1
8 |
9 | //1 === "abc" // scalaz: wil fail
10 |
11 | // 1 == "abc" // scala: false
12 | 1.some
13 | 1.some =/= 2.some
14 |
15 |
16 | // Ord
17 | 1 > 2.0 // scala False
18 |
19 | 1 gt 2 // False
20 | //1 gt 2.0 // will fail
21 |
22 | 1 max 2 // 2
23 |
24 |
25 | // Show - converts to str
26 |
27 | 3.show // Cord apparently is a purely functional data structure for potentially long Strings
28 | 3.shows //
29 | "hello".println()
30 |
31 | // tanges
32 |
33 | 'a' to 'e' // scala: NumericRange(a, b, c, d, e)
34 | 'a' |-> 'e' // List(a, b, c, d, e)
35 |
36 | 'b'.succ // c
37 | 1.succ
38 | //2
39 |
40 | // Functor for the scope
41 | var result = (1, 2, 3) map { _ + 1 } // (1,2,4)
42 |
43 | }
--------------------------------------------------------------------------------
/src/main/scala/strings/StringSamples.scala:
--------------------------------------------------------------------------------
1 | package strings
2 |
3 | // #string #string-context
4 |
5 | object StringSamples extends App {
6 |
7 | // #1 just don't forgot about this.
8 | {
9 | val name = "Bob"
10 |
11 | val someString = s"Hello, $name!" // this is much better that: "Hello" + name + "!"
12 |
13 | println (someString)
14 |
15 | }
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/src/main/scala/unapply_method/see_extractor:
--------------------------------------------------------------------------------
1 | Look at extractors/* files. (unapply method = extractor)
--------------------------------------------------------------------------------
/src/main/scala/using_sample/UsingSample.scala:
--------------------------------------------------------------------------------
1 | import cats.effect.{IO, IOApp}
2 |
3 | // Demonstrating `using` and `given` in Scala 3 with Cats Effect
4 |
5 | object UsingSample extends IOApp.Simple {
6 |
7 | // ✅ `sample1()`: Demonstrates `using`
8 | def sample1(): IO[Unit] = {
9 | def greet(name: String)(using language: String): String =
10 | s"Hello, $name! (Language: $language)"
11 |
12 | given String = "English" // `given` provides an implicit value for `using`
13 |
14 | for {
15 | message <- IO.pure(greet("Alice")) // Keep it inside IO
16 | _ <- IO.println(message) // Print using IO
17 | } yield ()
18 | }
19 |
20 | // ✅ `sample2()`: Demonstrates `given` for Logging
21 | def sample2(): IO[Unit] = {
22 | trait Logger:
23 | def log(msg: String): IO[Unit] // ✅ Changed to IO[Unit]
24 |
25 | given ConsoleLogger: Logger with
26 | def log(msg: String): IO[Unit] = IO.println(s"[LOG] $msg") // ✅ Use IO.println
27 |
28 | def debug(msg: String)(using logger: Logger): IO[Unit] =
29 | logger.log(msg) // ✅ Returns IO instead of performing println immediately
30 |
31 | debug("Something happened!") // ✅ Returns IO[Unit] properly
32 | }
33 |
34 | // ✅ Run both `sample1()` and `sample2()` in sequence
35 | val program: IO[Unit] = for {
36 | _ <- sample1()
37 | _ <- sample2()
38 | } yield ()
39 |
40 | override def run: IO[Unit] = program
41 | }
42 |
--------------------------------------------------------------------------------
/src/test/scala/mockito/MockitoTest.scala:
--------------------------------------------------------------------------------
1 | package mockito
2 |
3 | import org.junit.Test
4 | import org.scalatestplus.mockito.MockitoSugar;
5 | import org.mockito.Mockito._
6 | import org.mockito.ArgumentMatchers.{eq => eqTo, _} // have to use alias (because eq)
7 |
8 | // Shows how to use mocks with "MockitoSugar" from scalatest
9 |
10 | class Service {
11 |
12 | def call(args:List[String]) : List[String] = {
13 | args.reverse
14 | }
15 |
16 | }
17 |
18 | class MockitoTest extends MockitoSugar {
19 |
20 | val serviceMock = mock[Service]
21 |
22 | @Test
23 | def test(): Unit = {
24 |
25 | // GIVEN
26 | when( serviceMock. call(List("one", "two")) ).
27 | thenReturn(List("123"))
28 |
29 | // WHEN
30 | val result = serviceMock.call(List("one", "two"))
31 |
32 | // THEN 1
33 | verify( serviceMock, times(1) ).
34 | call( eqTo(List("one", "two")) )
35 |
36 | // THEN 2
37 | assert(result == List("123"))
38 | }
39 |
40 |
41 | }
42 |
43 |
44 |
--------------------------------------------------------------------------------
/src/test/scala/weaver/HelloWorldTest.scala:
--------------------------------------------------------------------------------
1 | import weaver.SimpleIOSuite
2 | import cats.effect.IO
3 | import cats.implicits._
4 |
5 | object HelloWorldTest extends SimpleIOSuite {
6 | test("basic test") { _ =>
7 | expect(1 + 1 == 2).pure[IO]
8 | }
9 |
10 | /***
11 | * How This Differs from ScalaTest
12 | * Feature | Weaver (SimpleIOSuite) | ScalaTest
13 | * ---------| -------------------- | -----------------------------------
14 | * Effectful Code Uses IO, avoids blocking Uses Future or blocks with Await.result(...)
15 | * Parallel Execution Yes (out of the box) No (unless explicitly configured)
16 | * Cats Effect Support Built-in Requires extra setup
17 | * Thread Safety Safer, runs tests in fibers Can block or create race conditions
18 | */
19 | }
20 |
--------------------------------------------------------------------------------
/src/test/scala/weaver/MyParallelSuite.scala:
--------------------------------------------------------------------------------
1 | import weaver._
2 | import cats.effect.IO
3 |
4 | import weaver._
5 | import cats.effect.{IO}
6 | import scala.concurrent.duration._
7 |
8 | object ParallelTest extends SimpleIOSuite {
9 | test("test 1") { _ =>
10 | IO.println("Starting test 1") *>
11 | IO.sleep(2.seconds) *>
12 | IO.println("Finished test 1") *>
13 | IO.pure(expect(1 + 1 == 2))
14 | }
15 |
16 | test("test 2") { _ =>
17 | IO.println("Starting test 2") *>
18 | IO.sleep(2.seconds) *>
19 | IO.println("Finished test 2") *>
20 | IO.pure(expect(2 + 2 == 4))
21 | }
22 | }
23 |
24 |
25 |
--------------------------------------------------------------------------------
/src/test/scala/weaver/WeaverTest.scala:
--------------------------------------------------------------------------------
1 | import weaver._
2 | import cats.effect.IO
3 |
4 | object WeaverTest extends SimpleIOSuite {
5 | println("This runs immediately!") // Side effect runs before tests!
6 |
7 | test("test with println") { _ =>
8 | println("Test println!") // Runs immediately, before test even executes
9 | IO.pure(expect(true))
10 | }
11 |
12 | test("test with IO.println") { _ =>
13 | IO.println("Test IO.println!") *> IO.pure(expect(true))
14 | }
15 |
16 | test("test with IO") { _ =>
17 | import scala.concurrent.duration.DurationInt
18 |
19 | // With IO.println:
20 | //
21 | //The print statement executes in the correct order after sleeping.
22 | //You can compose it safely with other IO effects.
23 |
24 | IO.sleep(2.seconds) *> IO.println("Done sleeping!") *> IO.pure(expect(true))
25 | }
26 |
27 | test("test with IO 2") { _ => // still - in parallel, respecting its own sleep pause
28 | import scala.concurrent.duration.DurationInt
29 |
30 | // With IO.println:
31 | //
32 | //The print statement executes in the correct order after sleeping.
33 | //You can compose it safely with other IO effects.
34 |
35 | IO.sleep(2.seconds) *> IO.println("Done sleeping! 2") *> IO.pure(expect(true))
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------