├── README.md
├── asset-enrichment
└── src
│ ├── test
│ ├── resources
│ │ ├── test_files
│ │ │ ├── test_audio.mp3
│ │ │ ├── test_other.pdf
│ │ │ └── test_video.mp4
│ │ ├── logback-test.xml
│ │ └── test.conf
│ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ ├── spec
│ │ ├── AssetFileUtilsSpec.scala
│ │ └── ThumbnailSpec.scala
│ │ └── fixture
│ │ └── EventFixture.scala
│ └── main
│ ├── resources
│ ├── log4j.properties
│ └── asset-enrichment.conf
│ └── scala
│ └── org
│ └── sunbird
│ └── job
│ └── assetenricment
│ ├── util
│ ├── AssetFileUtils.scala
│ ├── ImageResizerUtil.scala
│ └── ThumbnailUtil.scala
│ ├── models
│ └── Asset.scala
│ ├── domain
│ └── Event.scala
│ ├── helpers
│ └── OptimizerHelper.scala
│ └── functions
│ └── AssetEnrichmentEventRouter.scala
├── jobs-core
└── src
│ ├── main
│ ├── scala
│ │ └── org
│ │ │ └── sunbird
│ │ │ └── job
│ │ │ ├── domain
│ │ │ └── reader
│ │ │ │ ├── EventsPath.scala
│ │ │ │ ├── ParentType.scala
│ │ │ │ └── ParentMap.scala
│ │ │ ├── exception
│ │ │ ├── ExceptionCases.scala
│ │ │ ├── InvalidEventException.scala
│ │ │ └── InvalidInputException.scala
│ │ │ ├── cache
│ │ │ ├── local
│ │ │ │ └── FrameworkMasterCategoryMap.scala
│ │ │ └── RedisConnect.scala
│ │ │ ├── helper
│ │ │ └── FailedEventHelper.scala
│ │ │ ├── util
│ │ │ ├── ScalaJsonUtil.scala
│ │ │ ├── JSONUtil.scala
│ │ │ ├── FlinkUtil.scala
│ │ │ └── CassandraUtil.scala
│ │ │ ├── serde
│ │ │ ├── StringSerde.scala
│ │ │ └── MapSerde.scala
│ │ │ └── dedup
│ │ │ └── DeDupEngine.scala
│ └── resources
│ │ └── base-config.conf
│ └── test
│ ├── scala
│ └── org
│ │ └── sunbird
│ │ └── spec
│ │ ├── BaseTestSpec.scala
│ │ ├── BaseSpec.scala
│ │ ├── FileUtilsSpec.scala
│ │ ├── TestStringStreamFunc.scala
│ │ ├── TestMapStreamFunc.scala
│ │ ├── TestJobRequestStreamFunc.scala
│ │ ├── BaseMetricsReporter.scala
│ │ └── SlugSpec.scala
│ └── resources
│ └── base-test.conf
├── credential-generator
├── certificate-processor
│ ├── src
│ │ ├── main
│ │ │ ├── resources
│ │ │ │ └── Verdana.ttf
│ │ │ └── scala
│ │ │ │ └── org
│ │ │ │ └── sunbird
│ │ │ │ └── incredible
│ │ │ │ ├── pojos
│ │ │ │ ├── Gender.scala
│ │ │ │ ├── exceptions
│ │ │ │ │ └── InvalidDateFormatException.scala
│ │ │ │ └── valuator
│ │ │ │ │ ├── IEvaluator.scala
│ │ │ │ │ └── IssuedDateValuator.scala
│ │ │ │ ├── processor
│ │ │ │ ├── qrcode
│ │ │ │ │ ├── QRCodeImageGeneratorParams.scala
│ │ │ │ │ └── QRCodeGenerationModel.scala
│ │ │ │ ├── CertModel.scala
│ │ │ │ └── signature
│ │ │ │ │ └── Exceptions.scala
│ │ │ │ ├── CertificateConfig.scala
│ │ │ │ ├── HttpUtil.scala
│ │ │ │ ├── ScalaModuleJsonUtils.scala
│ │ │ │ └── UrlManager.scala
│ │ └── test
│ │ │ └── scala
│ │ │ └── org
│ │ │ └── sunbird
│ │ │ └── incredible
│ │ │ ├── processor
│ │ │ └── qrcode
│ │ │ │ ├── AccessCodeGeneratorTest.scala
│ │ │ │ └── QRCodeImageGeneratorTest.scala
│ │ │ ├── BaseTestSpec.scala
│ │ │ └── valuator
│ │ │ └── IssuedDateValuatorTest.scala
│ └── certificates
│ │ └── 8e57723e-4541-11eb-b378-0242ac130002.png
├── collection-certificate-generator
│ └── src
│ │ ├── main
│ │ ├── scala
│ │ │ └── org
│ │ │ │ └── sunbird
│ │ │ │ └── job
│ │ │ │ └── certgen
│ │ │ │ └── exceptions
│ │ │ │ ├── ValidationException.scala
│ │ │ │ └── ErrorMessages.scala
│ │ └── resources
│ │ │ ├── log4j.properties
│ │ │ └── collection-certificate-generator.conf
│ │ └── test
│ │ └── resources
│ │ ├── logback-test.xml
│ │ └── test.conf
├── collection-cert-pre-processor
│ └── src
│ │ ├── test
│ │ └── resources
│ │ │ ├── logback-test.xml
│ │ │ └── test.conf
│ │ └── main
│ │ ├── resources
│ │ ├── log4j.properties
│ │ └── collection-cert-pre-processor.conf
│ │ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── collectioncert
│ │ └── domain
│ │ ├── Models.scala
│ │ └── Event.scala
└── pom.xml
├── .gitignore
├── video-stream-generator
└── src
│ ├── main
│ ├── scala
│ │ └── org
│ │ │ └── sunbird
│ │ │ └── job
│ │ │ └── videostream
│ │ │ ├── exception
│ │ │ └── MediaServiceException.scala
│ │ │ ├── helpers
│ │ │ ├── Result.scala
│ │ │ ├── AzureRequestBody.scala
│ │ │ └── CaseClasses.scala
│ │ │ ├── service
│ │ │ ├── impl
│ │ │ │ └── MediaServiceFactory.scala
│ │ │ └── IMediaService.scala
│ │ │ └── domain
│ │ │ └── Event.scala
│ └── resources
│ │ ├── log4j.properties
│ │ └── video-stream-generator.conf
│ └── test
│ ├── resources
│ ├── logback-test.xml
│ ├── job_request.cql
│ ├── test.cql
│ └── test.conf
│ └── scala
│ └── org
│ └── sunbird
│ └── job
│ └── fixture
│ └── EventFixture.scala
├── publish-pipeline
├── questionset-publish
│ └── src
│ │ ├── main
│ │ ├── scala
│ │ │ └── org
│ │ │ │ └── sunbird
│ │ │ │ └── job
│ │ │ │ └── questionset
│ │ │ │ └── publish
│ │ │ │ └── domain
│ │ │ │ ├── Models.scala
│ │ │ │ └── Event.scala
│ │ └── resources
│ │ │ ├── log4j.properties
│ │ │ ├── questionset-publish.conf
│ │ │ └── questionSetTemplate.vm
│ │ └── test
│ │ ├── resources
│ │ ├── logback-test.xml
│ │ └── test.conf
│ │ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── fixture
│ │ └── EventFixture.scala
├── publish-core
│ └── src
│ │ ├── main
│ │ └── scala
│ │ │ └── org
│ │ │ └── sunbird
│ │ │ └── job
│ │ │ └── publish
│ │ │ ├── helpers
│ │ │ ├── EcarPackageType.scala
│ │ │ ├── ObjectEnrichment.scala
│ │ │ ├── ObjectTemplateGenerator.scala
│ │ │ └── ObjectValidator.scala
│ │ │ ├── handler
│ │ │ └── QuestionTypeHandler.scala
│ │ │ ├── config
│ │ │ └── PublishConfig.scala
│ │ │ └── core
│ │ │ └── Models.scala
│ │ └── test
│ │ ├── resources
│ │ ├── test.conf
│ │ └── questionSetTemplate.vm
│ │ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── publish
│ │ └── spec
│ │ └── ObjectValidatorTestSpec.scala
├── content-publish
│ └── src
│ │ ├── main
│ │ ├── scala
│ │ │ └── org
│ │ │ │ └── sunbird
│ │ │ │ └── job
│ │ │ │ └── content
│ │ │ │ └── publish
│ │ │ │ ├── processor
│ │ │ │ ├── BaseProcessor.scala
│ │ │ │ ├── IProcessor.scala
│ │ │ │ ├── EcrfObject.scala
│ │ │ │ └── XMLLoaderWithCData.scala
│ │ │ │ ├── helpers
│ │ │ │ └── ECMLExtractor.scala
│ │ │ │ └── domain
│ │ │ │ └── Event.scala
│ │ └── resources
│ │ │ ├── log4j.properties
│ │ │ └── content-publish.conf
│ │ └── test
│ │ ├── resources
│ │ ├── logback-test.xml
│ │ └── test.conf
│ │ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── fixture
│ │ └── EventFixture.scala
└── pom.xml
├── kubernets
└── job-cluster
│ ├── flink-configuration-configmap.yaml
│ ├── relation-cache-updater.sh
│ ├── activity-aggregate-updater.sh
│ ├── job-cluster-restservice.yaml
│ ├── job-cluster-service.yaml
│ ├── clean.sh
│ ├── k8s_job_deployment.sh
│ ├── job-cluster-taskmanager.yaml
│ └── job-cluster-jobmanager.yaml
├── audit-history-indexer
└── src
│ ├── test
│ └── resources
│ │ ├── test.conf
│ │ └── logback-test.xml
│ └── main
│ ├── resources
│ ├── audit-history-indexer.conf
│ └── log4j.properties
│ └── scala
│ └── org
│ └── sunbird
│ └── job
│ └── audithistory
│ ├── domain
│ ├── CaseClasses.scala
│ └── Event.scala
│ ├── task
│ └── AuditHistoryIndexerConfig.scala
│ └── functions
│ └── AuditHistoryIndexer.scala
├── jobs-distribution
├── Dockerfile
└── src
│ └── main
│ └── assembly
│ └── src.xml
├── auto-creator-v2
└── src
│ ├── main
│ ├── scala
│ │ └── org
│ │ │ └── sunbird
│ │ │ └── job
│ │ │ └── autocreatorv2
│ │ │ ├── model
│ │ │ └── Models.scala
│ │ │ ├── functions
│ │ │ └── LinkCollectionFunction.scala
│ │ │ └── domain
│ │ │ └── Event.scala
│ └── resources
│ │ ├── log4j.properties
│ │ └── auto-creator-v2.conf
│ └── test
│ ├── resources
│ ├── logback-test.xml
│ ├── test.cql
│ └── test.conf
│ └── scala
│ └── org
│ └── sunbird
│ └── job
│ └── autocreatorv2
│ └── spec
│ └── service
│ └── AutoCreatorFunctionSpec.scala
├── activity-aggregate-updater
└── src
│ ├── main
│ ├── scala
│ │ └── org
│ │ │ └── sunbird
│ │ │ └── job
│ │ │ └── aggregate
│ │ │ └── common
│ │ │ └── DeDupHelper.scala
│ └── resources
│ │ ├── log4j.properties
│ │ └── activity-aggregate-updater.conf
│ └── test
│ ├── resources
│ ├── logback-test.xml
│ └── test.conf
│ └── scala
│ └── org
│ └── sunbird
│ └── job
│ └── spec
│ └── BaseActivityAggregateTestSpec.scala
├── audit-event-generator
└── src
│ ├── test
│ └── resources
│ │ ├── test.conf
│ │ └── logback-test.xml
│ └── main
│ ├── resources
│ ├── audit-event-generator.conf
│ └── log4j.properties
│ └── scala
│ └── org
│ └── sunbird
│ └── job
│ └── auditevent
│ ├── domain
│ └── Event.scala
│ └── task
│ └── AuditEventGeneratorConfig.scala
├── relation-cache-updater
└── src
│ ├── test
│ ├── resources
│ │ ├── test.conf
│ │ └── logback-test.xml
│ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── fixture
│ │ └── EventFixture.scala
│ └── main
│ ├── resources
│ ├── relation-cache-updater.conf
│ └── log4j.properties
│ └── scala
│ └── org
│ └── sunbird
│ └── job
│ └── relationcache
│ ├── domain
│ └── Event.scala
│ └── task
│ └── RelationCacheUpdaterConfig.scala
├── post-publish-processor
└── src
│ ├── main
│ ├── scala
│ │ └── org
│ │ │ └── sunbird
│ │ │ └── job
│ │ │ └── postpublish
│ │ │ ├── models
│ │ │ └── Models.scala
│ │ │ ├── domain
│ │ │ └── Event.scala
│ │ │ └── functions
│ │ │ └── ShallowCopyPublishFunction.scala
│ └── resources
│ │ ├── log4j.properties
│ │ └── post-publish-processor.conf
│ └── test
│ └── resources
│ ├── logback-test.xml
│ ├── test.conf
│ └── test.cql
├── .github
├── workflows
│ └── jira-description-action.yml
└── pull_request_template.md
├── mvc-indexer
└── src
│ ├── main
│ ├── resources
│ │ ├── log4j.properties
│ │ └── mvc-indexer.conf
│ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── mvcindexer
│ │ ├── domain
│ │ └── Event.scala
│ │ └── util
│ │ └── ContentUtil.scala
│ └── test
│ └── resources
│ ├── logback-test.xml
│ ├── test.conf
│ └── test.cql
├── enrolment-reconciliation
└── src
│ ├── test
│ ├── resources
│ │ ├── logback-test.xml
│ │ └── test.conf
│ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── spec
│ │ └── BaseActivityAggregateTestSpec.scala
│ └── main
│ ├── resources
│ ├── log4j.properties
│ └── enrolment-reconciliation.conf
│ └── scala
│ └── org
│ └── sunbird
│ └── job
│ └── recounciliation
│ └── domain
│ └── Event.scala
├── search-indexer
└── src
│ ├── main
│ ├── resources
│ │ ├── log4j.properties
│ │ └── search-indexer.conf
│ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── searchindexer
│ │ ├── models
│ │ └── Models.scala
│ │ ├── compositesearch
│ │ └── domain
│ │ │ └── Event.scala
│ │ └── functions
│ │ └── TransactionEventRouter.scala
│ └── test
│ └── resources
│ └── test.conf
├── metrics-data-transformer
└── src
│ ├── main
│ ├── resources
│ │ ├── log4j.properties
│ │ └── metrics-data-transformer.conf
│ └── scala
│ │ └── org
│ │ └── sunbird
│ │ └── job
│ │ └── metricstransformer
│ │ ├── domain
│ │ └── Event.scala
│ │ └── task
│ │ └── MetricsDataTransformerConfig.scala
│ └── test
│ └── resources
│ └── test.conf
├── LICENSE
└── .circleci
└── config.yml
/README.md:
--------------------------------------------------------------------------------
1 | # sunbird-knowledge-platform-jobs
2 | Background and pipeline jobs of Knowledge Platform
3 |
--------------------------------------------------------------------------------
/asset-enrichment/src/test/resources/test_files/test_audio.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sphere/knowledge-platform-jobs/master/asset-enrichment/src/test/resources/test_files/test_audio.mp3
--------------------------------------------------------------------------------
/asset-enrichment/src/test/resources/test_files/test_other.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sphere/knowledge-platform-jobs/master/asset-enrichment/src/test/resources/test_files/test_other.pdf
--------------------------------------------------------------------------------
/asset-enrichment/src/test/resources/test_files/test_video.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sphere/knowledge-platform-jobs/master/asset-enrichment/src/test/resources/test_files/test_video.mp4
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/domain/reader/EventsPath.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.domain.reader
2 |
3 | object EventsPath {
4 |
5 | val MID_PATH = "mid"
6 |
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/resources/Verdana.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sphere/knowledge-platform-jobs/master/credential-generator/certificate-processor/src/main/resources/Verdana.ttf
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/domain/reader/ParentType.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.domain.reader
2 |
3 | trait ParentType {
4 | def readChild[T]: Option[T]
5 |
6 | def addChild(value: Any): Unit
7 | }
8 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/pojos/Gender.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.pojos
2 |
3 | class Gender extends Enumeration {
4 | val OTHER, MALE, FEMALE = Value
5 | }
6 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/certificates/8e57723e-4541-11eb-b378-0242ac130002.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sphere/knowledge-platform-jobs/master/credential-generator/certificate-processor/certificates/8e57723e-4541-11eb-b378-0242ac130002.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | target/
2 | *.class
3 | *.jar
4 | *.war
5 | *.ear
6 | *.logs
7 | *.iml
8 | .idea/
9 | .eclipse
10 | dependency-reduced-pom.xml
11 | .DS_Store
12 | *.log
13 | kubernets/config
14 | .project
15 | .classpath
16 | .factorypath
17 | bin/
18 | .settings/
--------------------------------------------------------------------------------
/video-stream-generator/src/main/scala/org/sunbird/job/videostream/exception/MediaServiceException.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.videostream.exception
2 |
3 | class MediaServiceException(var errorCode: String = null, msg: String, ex: Exception = null) extends Exception(msg, ex)
4 |
--------------------------------------------------------------------------------
/publish-pipeline/questionset-publish/src/main/scala/org/sunbird/job/questionset/publish/domain/Models.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.questionset.publish.domain
2 |
3 | case class PublishMetadata(identifier: String, objectType: String, mimeType: String, pkgVersion: Double, publishType: String)
4 |
--------------------------------------------------------------------------------
/kubernets/job-cluster/flink-configuration-configmap.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: ConfigMap
3 | metadata:
4 | name: kp-jobs-config
5 | labels:
6 | app: flink
7 | data:
8 | user-cache-config.yaml: |+
9 | kafka.broker-servers: "localhost:9092"
10 | kafka.zookeeper: "localhost:2181"
11 |
12 |
--------------------------------------------------------------------------------
/audit-history-indexer/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.learning.graph.events"
5 | groupId = "sunbirddev-audit-history-indexer-group"
6 | }
7 |
8 | task {
9 | consumer.parallelism = 1
10 | parallelism = 1
11 | window.time = 60
12 | }
13 |
14 | timezone = "IST"
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/pojos/exceptions/InvalidDateFormatException.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.pojos.exceptions
2 |
3 | class InvalidDateFormatException(msg: String) extends Exception(msg) {}
4 |
5 |
6 | class ServerException(code: String, msg: String) extends Exception(msg) {}
7 |
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/main/scala/org/sunbird/job/publish/helpers/EcarPackageType.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.publish.helpers
2 |
3 | object EcarPackageType extends Enumeration {
4 |
5 | val FULL = Value("FULL")
6 | val SPINE = Value("SPINE")
7 | val OPTIMIZED = Value("OPTIMIZED")
8 | val ONLINE = Value("ONLINE")
9 | }
10 |
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/main/scala/org/sunbird/job/publish/handler/QuestionTypeHandler.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.publish.handler
2 |
3 | trait QuestionTypeHandler {
4 |
5 | def getQuestion(extData: Option[Map[String, AnyRef]]): String
6 |
7 | def getAnswers(extData: Option[Map[String, AnyRef]]): List[String]
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/audit-history-indexer/src/main/resources/audit-history-indexer.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.learning.graph.events"
5 | groupId = "sunbirddev-audit-history-indexer-group"
6 | }
7 |
8 | task {
9 | consumer.parallelism = 1
10 | parallelism = 1
11 | window.time = 60
12 | }
13 |
14 | timezone = "IST"
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/pojos/valuator/IEvaluator.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.pojos.valuator
2 |
3 | import org.sunbird.incredible.pojos.exceptions.InvalidDateFormatException
4 |
5 | trait IEvaluator {
6 | @throws[InvalidDateFormatException]
7 | def evaluates(inputVal: String): String
8 | }
9 |
--------------------------------------------------------------------------------
/kubernets/job-cluster/relation-cache-updater.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SUNBIRD_DATAPIPELINE_IMAGE=manjudr/dev-kp-stream-jobs:1.0.1
3 | export JOB_NAME=relation-cache-updater-stream-job
4 | export JOB_CLASSNAME=org.sunbird.job.task.RelationCacheUpdaterStreamTask
5 | export AZURE_STORAGE_ACCOUNT=
6 | export AZURE_STORAGE_SECRET=
7 | export REST_SERVICE_PORT=30701
8 |
--------------------------------------------------------------------------------
/kubernets/job-cluster/activity-aggregate-updater.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export SUNBIRD_DATAPIPELINE_IMAGE=manjudr/dev-kp-stream-jobs:1.0.2
3 | export JOB_NAME=activity-aggregate-updater-stream-job
4 | export JOB_CLASSNAME=org.sunbird.job.task.ActivityAggregateUpdaterStreamTask
5 | export AZURE_STORAGE_ACCOUNT=
6 | export AZURE_STORAGE_SECRET=
7 | export REST_SERVICE_PORT=30702
8 |
--------------------------------------------------------------------------------
/jobs-distribution/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM anandp504/flink:1.12.0-scala_2.12-java11
2 |
3 | USER root
4 | RUN apt-get update
5 | RUN apt-get install -y imagemagick
6 |
7 | COPY target/jobs-distribution-1.0.tar.gz /tmp
8 | USER flink
9 | RUN tar -xvf /tmp/jobs-distribution-1.0.tar.gz -C $FLINK_HOME/lib/
10 | USER root
11 | RUN rm -f /tmp/jobs-distribution-1.0.tar.gz
12 | USER flink
13 |
--------------------------------------------------------------------------------
/jobs-distribution/src/main/assembly/src.xml:
--------------------------------------------------------------------------------
1 |
2 | false
3 |
4 | tar.gz
5 |
6 |
7 |
8 | false
9 | false
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/main/scala/org/sunbird/job/publish/config/PublishConfig.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.publish.config
2 |
3 | import com.typesafe.config.Config
4 | import org.sunbird.job.BaseJobConfig
5 |
6 | class PublishConfig(override val config: Config, override val jobName: String) extends BaseJobConfig(config, jobName) {
7 |
8 | def getConfig() = config
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/exception/ExceptionCases.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.exception
2 |
3 | class APIException(message: String, cause: Throwable) extends Exception(message, cause)
4 |
5 | class CassandraException(message: String, cause: Throwable) extends Exception(message, cause)
6 |
7 | class ElasticSearchException(message: String, cause: Throwable) extends Exception(message, cause)
8 |
--------------------------------------------------------------------------------
/credential-generator/collection-certificate-generator/src/main/scala/org/sunbird/job/certgen/exceptions/ValidationException.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.certgen.exceptions
2 |
3 | case class ValidationException(errorCode: String, msg: String, ex: Exception = null) extends Exception(msg, ex) {
4 |
5 | }
6 |
7 | case class ServerException (errorCode: String, msg: String, ex: Exception = null) extends Exception(msg, ex) {
8 | }
9 |
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/main/scala/org/sunbird/job/content/publish/processor/BaseProcessor.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.content.publish.processor
2 |
3 | import org.sunbird.job.util.CloudStorageUtil
4 |
5 | class BaseProcessor(basePath: String, identifier: String)(implicit cloudStorageUtil: CloudStorageUtil) extends IProcessor(basePath, identifier) {
6 | override def process(ecrf: Plugin): Plugin = {
7 | ecrf
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/audit-history-indexer/src/main/scala/org/sunbird/job/audithistory/domain/CaseClasses.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.audithistory.domain
2 |
3 | import java.io.Serializable
4 | import java.util.Date
5 |
6 | @SerialVersionUID(-5779950964487302125L)
7 | case class AuditHistoryRecord(var objectId: String, objectType: String, label: String, graphId: String, var userId: String, requestId: String, logRecord: String, operation: String, createdOn: Date) extends Serializable
--------------------------------------------------------------------------------
/kubernets/job-cluster/job-cluster-restservice.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: ${JOB_NAME}-jobmanager-rest
5 | annotations:
6 | service.beta.kubernetes.io/azure-load-balancer-internal: "true"
7 | spec:
8 | type: LoadBalancer
9 | ports:
10 | - name: rest
11 | nodePort: ${REST_SERVICE_PORT}
12 | port: 80
13 | protocol: TCP
14 | targetPort: 8081
15 | selector:
16 | app: flink
17 | component: ${JOB_NAME}-jobmanager
--------------------------------------------------------------------------------
/video-stream-generator/src/main/scala/org/sunbird/job/videostream/helpers/Result.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.videostream.helpers
2 |
3 |
4 | trait Result {
5 |
6 | def getSubmitJobResult(response: MediaResponse): Map[String, AnyRef]
7 |
8 | def getJobResult(response: MediaResponse): Map[String, AnyRef]
9 |
10 | def getCancelJobResult(response: MediaResponse): Map[String, AnyRef]
11 |
12 | def getListJobResult(response: MediaResponse): Map[String, AnyRef]
13 | }
14 |
--------------------------------------------------------------------------------
/kubernets/job-cluster/job-cluster-service.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: ${JOB_NAME}-jobmanager
5 | labels:
6 | app: flink
7 | component: ${JOB_NAME}-jobmanager
8 | spec:
9 | type: ClusterIP
10 | ports:
11 | - name: rpc
12 | port: 6123
13 | - name: blob
14 | port: 6124
15 | - name: query
16 | port: 6125
17 | - name: ui
18 | port: 8081
19 | selector:
20 | app: flink
21 | component: ${JOB_NAME}-jobmanager
22 |
--------------------------------------------------------------------------------
/kubernets/job-cluster/clean.sh:
--------------------------------------------------------------------------------
1 | kubectl delete pod/course-metrics-aggregator-jobmanager-tc6fc
2 | kubectl delete pod/course-metrics-aggregator-taskmanager-855cc88959-gmttk
3 | kubectl delete deployment.apps/course-metrics-aggregator-taskmanager
4 | kubectl delete job.batch/course-metrics-aggregator-jobmanager
5 | kubectl delete replicaset.apps/course-metrics-aggregator-taskmanager-855cc88959
6 | kubectl delete service/course-agg-job-jobmanager
7 | kubectl delete service/course-agg-job-jobmanager-rest
8 |
9 |
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/main/scala/org/sunbird/job/content/publish/helpers/ECMLExtractor.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.content.publish.helpers
2 |
3 | import org.sunbird.job.content.publish.processor.{BaseProcessor, MissingAssetValidatorProcessor}
4 | import org.sunbird.job.util.CloudStorageUtil
5 |
6 | class ECMLExtractor(basePath: String, identifier: String)(implicit cloudStorageUtil: CloudStorageUtil) extends BaseProcessor(basePath, identifier) with MissingAssetValidatorProcessor {
7 |
8 | }
9 |
--------------------------------------------------------------------------------
/auto-creator-v2/src/main/scala/org/sunbird/job/autocreatorv2/model/Models.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.autocreatorv2.model
2 |
3 | case class ExtDataConfig(keyspace: String, table: String, primaryKey:List[String], propsMapping: Map[String, AnyRef])
4 |
5 | case class ObjectParent(identifier: String, parents: List[Map[String, String]])
6 |
7 | class ObjectData(val identifier: String, val objectType: String, val metadata: Map[String, AnyRef], val extData: Option[Map[String, AnyRef]] = None, val hierarchy: Option[Map[String, AnyRef]] = None)
--------------------------------------------------------------------------------
/activity-aggregate-updater/src/main/scala/org/sunbird/job/aggregate/common/DeDupHelper.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.aggregate.common
2 |
3 | import java.security.MessageDigest
4 |
5 | object DeDupHelper {
6 |
7 | def getMessageId(collectionId: String, batchId: String, userId: String, contentId: String, status: Int): String = {
8 | val key = Array(collectionId, batchId, userId, contentId, status).mkString("|")
9 | MessageDigest.getInstance("MD5").digest(key.getBytes).map("%02X".format(_)).mkString;
10 | }
11 |
12 | }
13 |
--------------------------------------------------------------------------------
/audit-event-generator/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.learning.graph.events"
5 | output.topic = "sunbirddev.telemetry.raw"
6 | groupId = "sunbirddev-audit-event-generator-group"
7 | }
8 |
9 | task {
10 | consumer.parallelism = 1
11 | parallelism = 1
12 | producer.parallelism = 1
13 | window.time = 60
14 | }
15 |
16 | schema {
17 | basePath = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/local"
18 | }
19 |
20 | channel.default = "org.sunbird"
--------------------------------------------------------------------------------
/relation-cache-updater/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "flink.relation.cache.input"
5 | groupId = "flink-relation-cache-updater-group"
6 | }
7 |
8 | task {
9 | consumer.parallelism = 1
10 | }
11 |
12 | lms-cassandra {
13 | keyspace = "hierarchy_store"
14 | table = "content_hierarchy"
15 | host = "localhost"
16 | port = "9142"
17 | }
18 |
19 | redis {
20 | database.index = 10
21 | }
22 |
23 | dp-redis {
24 | host = localhost
25 | port = 6340
26 | database.index = 5
27 | }
28 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/test/scala/org/sunbird/incredible/processor/qrcode/AccessCodeGeneratorTest.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.processor.qrcode
2 |
3 |
4 | import org.sunbird.incredible.{BaseTestSpec}
5 |
6 | class AccessCodeGeneratorTest extends BaseTestSpec {
7 |
8 |
9 | "check accessCode generator" should "should return accessCode" in {
10 | val accessCodeGenerator: AccessCodeGenerator = new AccessCodeGenerator
11 | val code: String = accessCodeGenerator.generate(6)
12 | code.length should be(6)
13 | }
14 |
15 | }
--------------------------------------------------------------------------------
/audit-event-generator/src/main/resources/audit-event-generator.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.learning.graph.events"
5 | output.topic = "sunbirddev.telemetry.raw"
6 | groupId = "sunbirddev-audit-event-generator-group"
7 | }
8 |
9 | task {
10 | consumer.parallelism = 1
11 | parallelism = 1
12 | producer.parallelism = 1
13 | window.time = 60
14 | }
15 |
16 | schema {
17 | basePath = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/local"
18 | }
19 |
20 | channel.default = "org.sunbird"
21 |
--------------------------------------------------------------------------------
/relation-cache-updater/src/main/resources/relation-cache-updater.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.content.postpublish.request"
5 | groupId = "sunbirddev-relation-cache-updater-group"
6 | }
7 |
8 | task {
9 | consumer.parallelism = 1
10 | parallelism = 2
11 | }
12 |
13 | lms-cassandra {
14 | keyspace = "dev_hierarchy_store"
15 | table = "content_hierarchy"
16 | }
17 |
18 | redis {
19 | database.index = 10
20 | }
21 |
22 | dp-redis {
23 | host = 11.2.4.22
24 | port = 6379
25 | database.index = 5
26 | }
27 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/processor/qrcode/QRCodeImageGeneratorParams.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.processor.qrcode
2 |
3 | class QRCodeImageGeneratorParams extends Enumeration {
4 | val eid, processId, objectId, dialcodes, data, text, id, location, storage, container, path, config, errorCorrectionLevel, pixelsPerBlock, qrCodeMargi, textFontSize, textCharacterSpacing, imageFormat, colourModel, imageBorderSize, qrCodeMarginBottom, BE_QR_IMAGE_GENERATOR, fileName, imageMargin, qr_image_margin_bottom, qr_image_margin = Value
5 | }
6 |
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/domain/reader/ParentMap.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.domain.reader
2 |
3 | class ParentMap private[reader](var map: java.util.Map[String, Any], var childKey: String) extends ParentType {
4 | override def readChild[T]: Option[T] = {
5 | if (map != null && map.containsKey(childKey) && map.get(childKey) != null) {
6 | val child = map.get(childKey)
7 | return Some(child.asInstanceOf[T])
8 | }
9 | None
10 | }
11 |
12 | override def addChild(value: Any): Unit = {
13 | if (map != null) map.put(childKey, value)
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/post-publish-processor/src/main/scala/org/sunbird/job/postpublish/models/Models.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.postpublish.models
2 |
3 | class ObjectData(val identifier: String, val metadata: Map[String, AnyRef], val extData: Option[Map[String, AnyRef]] = None, val hierarchy: Option[Map[String, AnyRef]] = None) {
4 |
5 | val dbId = metadata.getOrElse("IL_UNIQUE_ID", identifier).asInstanceOf[String]
6 |
7 |
8 | val pkgVersion = metadata.getOrElse("pkgVersion", 0.0.asInstanceOf[Number]).asInstanceOf[Number].intValue()
9 |
10 | }
11 |
12 | case class ExtDataConfig(keyspace: String, table: String)
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/main/scala/org/sunbird/job/content/publish/processor/IProcessor.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.content.publish.processor
2 |
3 | import org.sunbird.job.util.CloudStorageUtil
4 |
5 | abstract class IProcessor(basePath: String, identifier: String)(implicit cloudStorageUtil: CloudStorageUtil) {
6 |
7 | implicit val ss = cloudStorageUtil.getService
8 |
9 | val widgetTypeAssets: List[String] = List("js", "css", "json", "plugin")
10 |
11 | def process(ecrf: Plugin): Plugin
12 |
13 | def getBasePath(): String = basePath
14 |
15 | def getIdentifier(): String = identifier
16 | }
17 |
--------------------------------------------------------------------------------
/.github/workflows/jira-description-action.yml:
--------------------------------------------------------------------------------
1 | name: jira-description-action
2 | on:
3 | pull_request_target:
4 | types: [opened, labeled]
5 | jobs:
6 | add-jira-description:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: project-sunbird/jira-description-action@v0.4.0
10 | name: jira-description-action
11 | with:
12 | github-token: ${{ secrets.GITHUB_TOKEN }}
13 | jira-token: ${{ secrets.JIRA_TOKEN }}
14 | jira-base-url: ${{ secrets.JIRA_BASE_URL }}
15 | fail-when-jira-issue-not-found: ${{ secrets.FAIL_WHEN_JIRA_ISSUE_NOT_FOUND }}
16 | use: both
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/CertificateConfig.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible
2 |
3 | case class CertificateConfig(basePath: String, encryptionServiceUrl: String, contextUrl: String, evidenceUrl: String,
4 | issuerUrl: String, signatoryExtension: String, accessCodeLength: Double = 6)
5 |
6 |
7 | case class StorageParams(cloudStorageType: String, azureStorageKey: String, azureStorageSecret: String, azureContainerName: String,
8 | awsStorageKey: Option[String] = None, awsStorageSecret: Option[String] = None, awsContainerName: Option[String] =None)
--------------------------------------------------------------------------------
/mvc-indexer/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=mvc-indexer.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/auto-creator-v2/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/processor/qrcode/QRCodeGenerationModel.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.processor.qrcode
2 |
3 | case class QRCodeGenerationModel(data: String, errorCorrectionLevel: String = "H", pixelsPerBlock: Int = 2, qrCodeMargin: Int = 3,
4 | text: String, textFontName: String = "Verdana", textFontSize: Int = 16, textCharacterSpacing: Double = 0.2,
5 | imageBorderSize: Int = 0, colorModel: String = "Grayscale", fileName: String, fileFormat: String = "png",
6 | qrCodeMarginBottom: Int = 1, imageMargin: Int = 1)
--------------------------------------------------------------------------------
/mvc-indexer/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/asset-enrichment/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=asset-enrichment.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/asset-enrichment/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/auto-creator-v2/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=auto-creator-v2.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/audit-event-generator/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/audit-history-indexer/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/enrolment-reconciliation/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/post-publish-processor/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/relation-cache-updater/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/search-indexer/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=post-publish-processor.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/video-stream-generator/src/main/scala/org/sunbird/job/videostream/helpers/AzureRequestBody.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.videostream.helpers
2 |
3 | object AzureRequestBody {
4 |
5 | val create_asset = " {\"properties\": {\"description\": \"assetDescription\",\"alternateId\" : \"assetId\"}}"
6 | val submit_job = "{\"properties\": {\"input\": {\"@odata.type\": \"#Microsoft.Media.JobInputHttp\",\"baseUri\": \"baseInputUrl\",\"files\": [\"inputVideoFile\"]},\"outputs\": [{\"@odata.type\": \"#Microsoft.Media.JobOutputAsset\",\"assetName\": \"assetId\"}]}}"
7 | val create_stream_locator="{\"properties\":{\"assetName\": \"assetId\",\"streamingPolicyName\": \"policyName\"}}"
8 | }
9 |
--------------------------------------------------------------------------------
/video-stream-generator/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/audit-event-generator/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=audit-event-generator.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/audit-history-indexer/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=audit-history-indexer.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/activity-aggregate-updater/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=course-metrics-updater.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/enrolment-reconciliation/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=relation-cache-updater.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/exception/InvalidEventException.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.exception
2 |
3 | import org.slf4j.LoggerFactory
4 |
5 | class InvalidEventException(message: String) extends Exception(message) {
6 | private[this] val logger = LoggerFactory.getLogger(classOf[InvalidEventException])
7 |
8 | def this(message: String, event: Map[String, Any], cause: Throwable) = {
9 | this(message)
10 | val partitionNum = event.getOrElse("partition", null)
11 | val offset = event.getOrElse("offset", null)
12 | logger.error(s"Error while processing message for Partition: ${partitionNum} and Offset: ${offset}. Error : $message", cause)
13 | }
14 | }
--------------------------------------------------------------------------------
/metrics-data-transformer/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=metrics-data-transformer.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/post-publish-processor/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=post-publish-processor.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=-content-publish.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/relation-cache-updater/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=relation-cache-updater.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/video-stream-generator/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=video-stream-generator.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/publish-pipeline/questionset-publish/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/test/scala/org/sunbird/incredible/BaseTestSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible
2 |
3 |
4 | import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
5 | import org.scalatestplus.mockito.MockitoSugar
6 |
7 | class BaseTestSpec extends FlatSpec with Matchers with BeforeAndAfterAll with MockitoSugar {
8 |
9 |
10 | implicit val certificateConfig: CertificateConfig = CertificateConfig(basePath = "http://localhost:9000", encryptionServiceUrl = "http://localhost:8013", contextUrl = "context.json", evidenceUrl = JsonKeys.EVIDENCE_URL, issuerUrl = JsonKeys.ISSUER_URL, signatoryExtension = JsonKeys.SIGNATORY_EXTENSION)
11 |
12 |
13 |
14 | }
15 |
--------------------------------------------------------------------------------
/publish-pipeline/questionset-publish/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=-questionset-publish.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/credential-generator/collection-cert-pre-processor/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/credential-generator/collection-certificate-generator/src/main/scala/org/sunbird/job/certgen/exceptions/ErrorMessages.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.certgen.exceptions
2 |
3 | object ErrorMessages {
4 |
5 | val INVALID_REQUESTED_DATA: String = "Invalid Request! Please Provide Valid Request."
6 | val INVALID_PARAM_VALUE: String = "Invalid value {0} for parameter {1}."
7 | val MANDATORY_PARAMETER_MISSING: String = "Mandatory parameter {0} is missing."
8 | }
9 |
10 | object ErrorCodes {
11 | val MANDATORY_PARAMETER_MISSING: String = "MANDATORY_PARAMETER_MISSING"
12 | val INVALID_PARAM_VALUE: String = "INVALID_PARAM_VALUE"
13 | val SYSTEM_ERROR: String = "SYSTEM_ERROR"
14 | }
15 |
16 |
17 |
--------------------------------------------------------------------------------
/mvc-indexer/src/main/resources/mvc-indexer.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.mvc.processor.job.request"
5 | groupId = "sunbirddev-mvc-indexer-group"
6 | output.failed.topic = "sunbirddev.mvc.events.failed"
7 | }
8 |
9 | task {
10 | consumer.parallelism = 1
11 | parallelism = 1
12 | }
13 |
14 | lms-cassandra.keyspace = "dev_content_store"
15 | lms-cassandra.table = "content_data"
16 |
17 | nested.fields="trackable,credentials"
18 |
19 | service.content.basePath = "http://11.2.6.6/content"
20 | es.indexAlias = "mvc-content"
21 |
22 | ml.vector.host="11.2.4.22"
23 | ml.vector.port=1729
24 | ml.keyword.host="11.2.4.22"
25 | ml.keyword.port=3579
26 |
--------------------------------------------------------------------------------
/credential-generator/collection-certificate-generator/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/mvc-indexer/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.mvc.processor.job.request"
5 | groupId = "sunbirddev-mvc-indexer-group"
6 | output.failed.topic = "sunbirddev.mvc.events.failed"
7 | }
8 |
9 | task {
10 | consumer.parallelism = 1
11 | parallelism = 1
12 | }
13 |
14 | lms-cassandra.keyspace = "local_content_store"
15 | lms-cassandra.table = "content_data"
16 |
17 | nested.fields="trackable,credentials"
18 |
19 | service.content.basePath = "http://localhost:8080/content"
20 | es.indexAlias = "mvc-content"
21 |
22 | ml.vector.host="localhost"
23 | ml.vector.port=1729
24 | ml.keyword.host="localhost"
25 | ml.keyword.port=3579
26 |
27 |
--------------------------------------------------------------------------------
/credential-generator/collection-cert-pre-processor/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=course-metrics-updater.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/jobs-core/src/test/scala/org/sunbird/spec/BaseTestSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.spec
2 |
3 | import org.apache.flink.configuration.Configuration
4 | import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FlatSpec, Matchers}
5 | import org.scalatestplus.mockito.MockitoSugar
6 |
7 | class BaseTestSpec extends FlatSpec with Matchers with BeforeAndAfterAll with MockitoSugar with BeforeAndAfterEach {
8 |
9 | def testConfiguration(): Configuration = {
10 | val config = new Configuration()
11 | config.setString("metrics.reporter", "job_metrics_reporter")
12 | config.setString("metrics.reporter.job_metrics_reporter.class", classOf[BaseMetricsReporter].getName)
13 | config
14 | }
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/credential-generator/collection-certificate-generator/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # log4j.appender.file=org.apache.log4j.FileAppender
2 | log4j.appender.file=org.apache.log4j.RollingFileAppender
3 | log4j.appender.file.file=collection-certificate-generator.log
4 | log4j.appender.file.append=true
5 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.file.MaxFileSize=256KB
7 | log4j.appender.file.MaxBackupIndex=4
8 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS} %-5p %-60c %x - %m%n
9 |
10 | # Suppress the irrelevant (wrong) warnings from the Netty channel handler
11 | log4j.logger.org.apache.flink.shaded.akka.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, file
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/exception/InvalidInputException.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.exception
2 |
3 | import org.slf4j.LoggerFactory
4 |
5 | class InvalidInputException(message: String, cause: Throwable = null) extends Exception(message, cause) {
6 |
7 | private[this] val logger = LoggerFactory.getLogger(classOf[InvalidInputException])
8 |
9 | def this(message: String, event: Map[String, Any], cause: Throwable) = {
10 | this(message, cause)
11 | val partitionNum = event.getOrElse("partition", null)
12 | val offset = event.getOrElse("offset", null)
13 | logger.error(s"Error while processing event for Partition: $partitionNum and Offset: $offset. Error : $message", cause)
14 | }
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/activity-aggregate-updater/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/jobs-core/src/test/scala/org/sunbird/spec/BaseSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.spec
2 |
3 | import com.opentable.db.postgres.embedded.EmbeddedPostgres
4 | import org.scalatest.{BeforeAndAfterAll, FlatSpec}
5 | import redis.embedded.RedisServer
6 |
7 | class BaseSpec extends FlatSpec with BeforeAndAfterAll {
8 | var redisServer: RedisServer = _
9 |
10 | override def beforeAll() {
11 | super.beforeAll()
12 | redisServer = new RedisServer(6340)
13 | redisServer.start()
14 | EmbeddedPostgres.builder.setPort(5430).start() // Use the same port 5430 which is defined in the base-test.conf
15 | }
16 |
17 | override protected def afterAll(): Unit = {
18 | super.afterAll()
19 | redisServer.stop()
20 | }
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/auto-creator-v2/src/main/resources/auto-creator-v2.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.object.import.request"
5 | failed.topic = "sunbirddev.auto.creation.job.request.failed"
6 | groupId = "sunbirddev-auto-creator-v2-group"
7 | }
8 |
9 | task {
10 | consumer.parallelism = 1
11 | parallelism = 1
12 | }
13 |
14 | redis {
15 | database {
16 | relationCache.id = 10
17 | collectionCache.id = 5
18 | }
19 | }
20 |
21 | questionset {
22 | keyspace = "dev_hierarchy_store"
23 | }
24 | question {
25 | keyspace = "dev_question_store"
26 | }
27 |
28 | service {
29 | content.basePath = "http://11.2.6.6/content"
30 | }
31 |
32 | source {
33 | baseUrl = "https://dev.sunbirded.org/api"
34 | }
35 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/HttpUtil.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible
2 |
3 | import kong.unirest.Unirest
4 |
5 | case class HTTPResponse(status: Int, body: String)
6 |
7 | class HttpUtil extends java.io.Serializable {
8 |
9 | def get(url: String): HTTPResponse = {
10 | val response = Unirest.get(url).header("Content-Type", "application/json").asString()
11 | HTTPResponse(response.getStatus, response.getBody)
12 | }
13 |
14 | def post(url: String, requestBody: String): HTTPResponse = {
15 | val response = Unirest.post(url).header("Content-Type", "application/json").body(requestBody).asString()
16 | HTTPResponse(response.getStatus, response.getBody)
17 | }
18 |
19 | }
20 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/test/scala/org/sunbird/incredible/processor/qrcode/QRCodeImageGeneratorTest.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.processor.qrcode
2 |
3 | import org.sunbird.incredible.{BaseTestSpec, CertificateGenerator, QrCodeModel}
4 | class QRCodeImageGeneratorTest extends BaseTestSpec {
5 |
6 |
7 |
8 | "check qrCode generator" should "should return qrCode file" in {
9 | val certificateGenerator = new CertificateGenerator()
10 | val qrCodeModel: QrCodeModel = certificateGenerator.generateQrCode("8e57723e-4541-11eb-b378-0242ac130002", "certificates/","http://localhost:9000")
11 | qrCodeModel.accessCode.length should be(6)
12 | qrCodeModel.qrFile.exists() should be(true)
13 | }
14 |
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/publish-pipeline/questionset-publish/src/test/scala/org/sunbird/job/fixture/EventFixture.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.fixture
2 |
3 | object EventFixture {
4 |
5 | val QUESTION_EVENT1: String =
6 | """
7 | |{"eid":"BE_JOB_REQUEST","ets":1609926636251,"mid":"LP.1609926636251.b93d8562-537e-4e52-bcf5-b9175a550391","actor":{"id":"question-publish","type":"System"},"context":{"pdata":{"ver":"1.0","id":"org.sunbird.platform"},"channel":""},"object":{"ver":"1609926299686","id":"do_113188615625731"},"edata":{"publish_type":"public","metadata":{"identifier":"do_113188615625731","mimeType":"application/vnd.sunbird.question","lastPublishedBy":null,"pkgVersion":1,"objectType":"Question"},"action":"publish","iteration":1}}
8 | |""".stripMargin
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/test/scala/org/sunbird/job/fixture/EventFixture.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.fixture
2 |
3 | object EventFixture {
4 |
5 | val PDF_EVENT1: String =
6 | """
7 | |{"eid":"BE_JOB_REQUEST","ets":1619527882745,"mid":"LP.1619527882745.32dc378a-430f-49f6-83b5-bd73b767ad36","actor":{"id":"content-publish","type":"System"},"context":{"channel":"","pdata":{"id":"org.sunbird.platform","ver":"1.0"}},"object":{"id":"do_11329603741667328018","ver":"1619153418829"},"edata":{"publish_type":"public","metadata":{"identifier":"do_11329603741667328018","mimeType":"application/pdf","objectType":"Content","lastPublishedBy":"sample-last-published-by","pkgVersion":1},"action":"publish","iteration":1}}
8 | |""".stripMargin
9 | }
10 |
--------------------------------------------------------------------------------
/metrics-data-transformer/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.learning.graph.events"
5 | groupId = "sunbirddev-metrics-data-transformer-group"
6 | }
7 |
8 | task {
9 | consumer.parallelism = 1
10 | parallelism = 1
11 | window.time = 60
12 | }
13 |
14 | timezone = "IST"
15 |
16 | service {
17 | content.basePath = "https://localhost:9000/action"
18 | sourcing.content.basePath = "http://localhost/content"
19 | }
20 |
21 | content_read_api = "/content/v3/read"
22 | content_update_api= "/content/v4/system/update"
23 |
24 | sourcing.update.api.response.error.code = ["404"]
25 |
26 | data.metrics = ["me_totalRatingsCount","me_averageRating","me_totalTimeSpentInSec","me_totalPlaySessionCount"]
--------------------------------------------------------------------------------
/auto-creator-v2/src/test/resources/test.cql:
--------------------------------------------------------------------------------
1 | CREATE KEYSPACE IF NOT EXISTS dev_hierarchy_store with replication = {'class':'SimpleStrategy','replication_factor':1};
2 | CREATE TABLE IF NOT EXISTS dev_hierarchy_store.questionset_hierarchy (
3 | identifier text,
4 | hierarchy text,
5 | PRIMARY KEY (identifier)
6 | );
7 |
8 | CREATE KEYSPACE IF NOT EXISTS dev_question_store with replication = {'class':'SimpleStrategy','replication_factor':1};
9 | CREATE TABLE IF NOT EXISTS dev_question_store.question_data (
10 | identifier text,
11 | body blob,
12 | editorstate text,
13 | answer blob,
14 | solutions text,
15 | instructions text,
16 | hints text,
17 | media text,
18 | responsedeclaration text,
19 | interactions text,
20 | PRIMARY KEY (identifier)
21 | );
--------------------------------------------------------------------------------
/metrics-data-transformer/src/main/resources/metrics-data-transformer.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.learning.graph.events"
5 | groupId = "sunbirddev-metrics-data-transformer-group"
6 | }
7 |
8 | task {
9 | consumer.parallelism = 1
10 | parallelism = 1
11 | window.time = 60
12 | }
13 |
14 | timezone = "IST"
15 |
16 | service {
17 | content.basePath = "https://localhost:9000/action"
18 | sourcing.content.basePath = "http://localhost/content"
19 | }
20 |
21 | content_read_api = "/content/v3/read"
22 | content_update_api= "/content/v4/system/update"
23 |
24 | sourcing.update.api.response.error.code = ["404"]
25 |
26 | data.metrics = ["me_totalRatingsCount","me_averageRating","me_totalTimeSpentInSec","me_totalPlaySessionCount"]
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/cache/local/FrameworkMasterCategoryMap.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.cache.local
2 |
3 | import com.twitter.storehaus.cache.Cache
4 | import com.twitter.util.Duration
5 |
6 | object FrameworkMasterCategoryMap {
7 |
8 | val ttlMS = 100000l//Platform.getLong("master.category.cache.ttl", 10000l)
9 | var cache = Cache.ttl[String, Map[String, AnyRef]](Duration.fromMilliseconds(ttlMS))
10 |
11 | def get(id: String):Map[String, AnyRef] = {
12 | cache.getNonExpired(id).getOrElse(null)
13 | }
14 |
15 | def put(id: String, data: Map[String, AnyRef]): Unit = {
16 | val updated = cache.putClocked(id, data)._2
17 | cache = updated
18 | }
19 |
20 | def containsKey(id: String): Boolean = {
21 | cache.contains(id)
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/publish-pipeline/questionset-publish/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.learning.job.request"
9 | post_publish.topic = "sunbirddev.content.postpublish.request"
10 | groupId = "local-questionset-publish-group"
11 | }
12 |
13 | task {
14 | consumer.parallelism = 1
15 | parallelism = 1
16 | router.parallelism = 1
17 | }
18 |
19 | question {
20 | keyspace = "dev_question_store"
21 | table = "question_data"
22 | }
23 |
24 | questionset {
25 | keyspace = "dev_hierarchy_store"
26 | table = "questionset_hierarchy"
27 | }
28 |
29 | neo4j {
30 | routePath = "bolt://localhost:7687"
31 | graph = "domain"
32 | }
33 |
34 |
35 | print_service.base_url="http://11.2.6.6/print"
36 |
--------------------------------------------------------------------------------
/kubernets/job-cluster/k8s_job_deployment.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Source the env properties script for each job
3 | # Comment or Uncomment depending upon the job you are deploying
4 | # Create another script if you are working on new job
5 | source ./activity-aggregate-updater.sh
6 |
7 | envsubst AWSMediaServiceImpl
16 | "azure" match {
17 | case "azure" => AzureMediaServiceImpl
18 | case _ => throw new MediaServiceException("ERR_INVALID_SERVICE_TYPE", "Please Provide Valid Media Service Name")
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/metrics-data-transformer/src/main/scala/org/sunbird/job/metricstransformer/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.metricstransformer.domain
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
7 |
8 | private val jobName = "MetricsDataTransformer"
9 |
10 | def nodeUniqueId: String = readOrDefault("nodeUniqueId", "")
11 |
12 | def channel: String = readOrDefault("channel","")
13 |
14 | def transactionData: Map[String, AnyRef] = readOrDefault("transactionData", Map())
15 |
16 | def isValidContent(originId: String, originData: Map[String,AnyRef]): Boolean = {
17 | StringUtils.isNotBlank(originId) && !originData.isEmpty
18 | }
19 | }
--------------------------------------------------------------------------------
/auto-creator-v2/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.learning.graph.events"
5 | failed.topic = "sunbirddev.auto.creation.job.request.failed"
6 | groupId = "sunbirddev-auto-creator-v2-group"
7 | output.metrics.topic = "sunbirddev.pipeline_metrics"
8 | }
9 |
10 | task {
11 | consumer.parallelism = 1
12 | parallelism = 1
13 | producer.parallelism = 1
14 | window.time = 60
15 | }
16 |
17 | redis {
18 | database {
19 | relationCache.id = 10
20 | collectionCache.id = 5
21 | }
22 | }
23 |
24 | questionset {
25 | keyspace = "dev_hierarchy_store"
26 | }
27 | question {
28 | keyspace = "dev_question_store"
29 | }
30 |
31 | service {
32 | content.basePath = "http://11.2.6.6/content"
33 | }
34 |
35 | source {
36 | baseUrl = "https://dev.sunbirded.org/api"
37 | }
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/processor/CertModel.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.processor
2 |
3 | import org.sunbird.incredible.pojos.ob.{Criteria, Issuer, SignatoryExtension}
4 |
5 | case class CertModel(courseName: String, recipientName: String, recipientId: Option[String] = None, recipientEmail: Option[String] = None, recipientPhone: Option[String] = None
6 | , certificateName: String, certificateDescription: Option[String] = None, certificateLogo: Option[String] = None, issuedDate: String, issuer: Issuer,
7 | validFrom: Option[String] = None, expiry: Option[String] = None, signatoryList: Array[SignatoryExtension], assessedOn: Option[String] = None, identifier: String, criteria: Criteria,
8 | keyId: String = "", tag: String = "")
9 |
--------------------------------------------------------------------------------
/jobs-core/src/test/scala/org/sunbird/spec/FileUtilsSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.spec
2 |
3 | import org.scalatest.{FlatSpec, Matchers}
4 | import org.sunbird.job.util.FileUtils
5 |
6 | import java.io.File
7 |
8 | class FileUtilsSpec extends FlatSpec with Matchers {
9 |
10 | "getBasePath with empty identifier" should "return the path" in {
11 | val result = FileUtils.getBasePath("")
12 | result.nonEmpty shouldBe (true)
13 | }
14 |
15 | "downloadFile " should " download the media source file starting with http or https " in {
16 | val fileUrl: String = "https://preprodall.blob.core.windows.net/ntp-content-preprod/content/do_21273718766395392014320/artifact/book-image_1554832478631.jpg"
17 | val downloadedFile: File = FileUtils.downloadFile(fileUrl, "/tmp/contentBundle")
18 | assert(downloadedFile.exists())
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/credential-generator/collection-certificate-generator/src/main/resources/collection-certificate-generator.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.generate.certificate.request"
5 | output.failed.topic = "sunbirddev.generate.certificate.failed"
6 | groupId = "certificate-generator-group"
7 | output.audit.topic = "sunbirddev.telemetry.raw"
8 | }
9 |
10 | task {
11 | consumer.parallelism = 1
12 | parallelism = 1
13 | notifier.parallelism = 1
14 | userfeed.parallelism = 1
15 | }
16 |
17 | service {
18 | certreg.basePath = "http://localhost:9000/certreg"
19 | learner.basePath = "http://localhost:9000/learner"
20 | enc.basePath = "http://localhost:9000/enc"
21 | }
22 |
23 | lms-cassandra {
24 | keyspace = "sunbird_courses"
25 | user_enrolments.table = "user_enrolments"
26 | course_batch.table = "course_batch"
27 | }
--------------------------------------------------------------------------------
/publish-pipeline/questionset-publish/src/main/resources/questionset-publish.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.assessment.publish.request"
9 | post_publish.topic = "sunbirddev.content.postpublish.request"
10 | groupId = "local-questionset-publish-group"
11 | }
12 |
13 | task {
14 | consumer.parallelism = 1
15 | parallelism = 1
16 | router.parallelism = 1
17 | }
18 |
19 | question {
20 | keyspace = "dev_question_store"
21 | table = "question_data"
22 | }
23 |
24 | questionset {
25 | keyspace = "dev_hierarchy_store"
26 | table = "questionset_hierarchy"
27 | tmp_file_location = "/tmp"
28 | template_name = "questionSetTemplate.vm"
29 | cloud_storage.itemset.folder = ""
30 | cloud_storage.content.folder = ""
31 | }
32 |
33 | print_service.base_url="http://11.2.6.6/print"
--------------------------------------------------------------------------------
/video-stream-generator/src/main/scala/org/sunbird/job/videostream/service/IMediaService.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.videostream.service
2 |
3 | import org.sunbird.job.videostream.task.VideoStreamGeneratorConfig
4 | import org.sunbird.job.util.HttpUtil
5 | import org.sunbird.job.videostream.helpers.{MediaRequest, MediaResponse}
6 |
7 |
8 | trait IMediaService {
9 |
10 | def submitJob(request: MediaRequest)(implicit config: VideoStreamGeneratorConfig, httpUtil: HttpUtil): MediaResponse
11 |
12 | def getJob(jobId: String)(implicit config: VideoStreamGeneratorConfig, httpUtil: HttpUtil): MediaResponse
13 |
14 | def getStreamingPaths(jobId: String)(implicit config: VideoStreamGeneratorConfig, httpUtil: HttpUtil): MediaResponse
15 |
16 | def listJobs(listJobsRequest: MediaRequest): MediaResponse
17 |
18 | def cancelJob(cancelJobRequest: MediaRequest): MediaResponse
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/asset-enrichment/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.learning.job.request"
9 | groupId = "sunbirddev-asset-enrichment-group"
10 | video_stream.topic = "sunbirddev.content.postpublish.request"
11 | }
12 |
13 | task {
14 | consumer.parallelism = 1
15 | router.parallelism = 1
16 | videoEnrichment.parallelism = 1
17 | imageEnrichment.parallelism = 1
18 | }
19 |
20 | content {
21 | stream {
22 | enabled = true
23 | mimeType = ["video/mp4", "video/webm"]
24 | }
25 | youtube {
26 | applicationName = "fetch-youtube-license"
27 | regexPattern = ["\\?vi?=([^&]*)", "watch\\?.*v=([^&]*)", "(?:embed|vi?)/([^/?]*)", "^([A-Za-z0-9\\-\\_]*)"]
28 | }
29 | upload.context.driven = true
30 | }
31 |
32 | thumbnail.max {
33 | sample = 5
34 | size.pixel = 150
35 | }
--------------------------------------------------------------------------------
/search-indexer/src/main/scala/org/sunbird/job/searchindexer/models/Models.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.searchindexer.models
2 |
3 | import org.sunbird.job.searchindexer.task.SearchIndexerConfig
4 |
5 | import java.util
6 | import scala.collection.JavaConverters._
7 |
8 | case class CompositeIndexer(graphId: String, objectType: String, identifier: String, messageId: String, message: util.Map[String, Any], config: SearchIndexerConfig) {
9 | def getNestedFields(): List[String] = config.nestedFields.asScala.toList
10 | def getDefinitionBasePath(): String = config.definitionBasePath
11 | def getVersionAsString(): String = config.schemaSupportVersionMap.getOrElse(objectType.toLowerCase(), "1.0").asInstanceOf[String]
12 | def getRestrictMetadataObjectTypes(): List[String] = config.restrictMetadataObjectTypes.asScala.toList
13 | def getIgnoredFields(): List[String] = config.ignoredFields
14 | }
15 |
--------------------------------------------------------------------------------
/jobs-core/src/test/scala/org/sunbird/spec/TestStringStreamFunc.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.spec
2 |
3 | import org.apache.flink.api.common.typeinfo.TypeInformation
4 | import org.apache.flink.streaming.api.functions.ProcessFunction
5 | import org.sunbird.job.{BaseProcessFunction, Metrics}
6 |
7 | class TestStringStreamFunc(config: BaseProcessTestConfig)(implicit val stringTypeInfo: TypeInformation[String])
8 | extends BaseProcessFunction[String, String](config) {
9 |
10 | override def metricsList(): List[String] = {
11 | val metrics = List(config.stringEventCount)
12 | metrics
13 | }
14 | override def processElement(event: String,
15 | context: ProcessFunction[String, String]#Context,
16 | metrics: Metrics): Unit = {
17 | context.output(config.stringOutputTag, event)
18 | metrics.incCounter(config.stringEventCount)
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/asset-enrichment/src/main/resources/asset-enrichment.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.learning.job.request"
9 | groupId = "sunbirddev-asset-enrichment-group"
10 | video_stream.topic = "sunbirddev.content.postpublish.request"
11 | }
12 |
13 | task {
14 | consumer.parallelism = 1
15 | router.parallelism = 1
16 | videoEnrichment.parallelism = 1
17 | imageEnrichment.parallelism = 1
18 | }
19 |
20 | content {
21 | stream {
22 | enabled = true
23 | mimeType = ["video/mp4", "video/webm"]
24 | }
25 | youtube {
26 | applicationName = "fetch-youtube-license"
27 | regexPattern = ["\\?vi?=([^&]*)", "watch\\?.*v=([^&]*)", "(?:embed|vi?)/([^/?]*)", "^([A-Za-z0-9\\-\\_]*)"]
28 | }
29 | upload.context.driven = true
30 | max.iteration.count = 2
31 | }
32 |
33 | thumbnail.max {
34 | sample = 5
35 | size.pixel = 150
36 | }
--------------------------------------------------------------------------------
/video-stream-generator/src/test/resources/test.cql:
--------------------------------------------------------------------------------
1 | CREATE KEYSPACE IF NOT EXISTS local_platform_db WITH replication = {
2 | 'class': 'SimpleStrategy',
3 | 'replication_factor': '1'
4 | };
5 |
6 | CREATE TABLE IF NOT EXISTS local_platform_db.job_request (
7 | client_key text,
8 | request_id text,
9 | job_id text,
10 | status text,
11 | request_data text,
12 | location text,
13 | dt_file_created timestamp,
14 | dt_first_event timestamp,
15 | dt_last_event timestamp,
16 | dt_expiration timestamp,
17 | iteration int,
18 | dt_job_submitted timestamp,
19 | dt_job_processing timestamp,
20 | dt_job_completed timestamp,
21 | input_events int,
22 | output_events int,
23 | file_size bigint,
24 | latency int,
25 | execution_time bigint,
26 | err_message text,
27 | stage text,
28 | stage_status text,
29 | job_name text,
30 | PRIMARY KEY (client_key, request_id)
31 | );
--------------------------------------------------------------------------------
/credential-generator/collection-certificate-generator/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "generate.certificate.request"
5 | output.failed.topic = "generate.certificate.failed"
6 | output.audit.topic = "generate.certificate.audit"
7 | groupId = "certificate-generator-group"
8 | }
9 |
10 | task {
11 | consumer.parallelism = 1
12 | }
13 |
14 | service {
15 | certreg.basePath = "http://localhost:9000/certreg"
16 | learner.basePath = "http://localhost:9000/learner"
17 | enc.basePath = "http://localhost:9000/enc"
18 | }
19 |
20 | cert_domain_url="https://dev.sunbirded.org"
21 | cert_cloud_storage_type="azure"
22 | cert_azure_storage_secret="secret"
23 | cert_container_name="credential"
24 | cert_azure_storage_key="key"
25 |
26 | lms-cassandra {
27 | keyspace = "sunbird_courses"
28 | user_enrolments.table = "user_enrolments"
29 | course_batch.table = "course_batch"
30 | host = "localhost"
31 | port = "9142"
32 | }
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.learning.job.request"
9 | post_publish.topic = "sunbirddev.content.postpublish.request"
10 | groupId = "local-questionset-publish-group"
11 | }
12 |
13 | task {
14 | consumer.parallelism = 1
15 | parallelism = 1
16 | router.parallelism = 1
17 | }
18 |
19 | question {
20 | keyspace = "dev_question_store"
21 | table = "question_data"
22 | }
23 |
24 | questionset {
25 | keyspace = "dev_hierarchy_store"
26 | table = "questionset_hierarchy"
27 | }
28 |
29 | neo4j {
30 | routePath = "bolt://localhost:7687"
31 | graph = "domain"
32 | }
33 |
34 | //cloud_storage_type="azure"
35 | //azure_storage_key="dev"
36 | //azure_storage_container="dev"
37 | //azure_storage_container="dummy_secret"
38 | //aws_storage_key="dev"
39 | //aws_storage_secret="dummy_secret"
40 | //aws_storage_container="dev"
41 |
--------------------------------------------------------------------------------
/post-publish-processor/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.content.postpublish.request"
9 | groupId = "local-post-publish-processor-group"
10 | publish.topic = "sunbirddev.learning.job.request"
11 | qrimage.topic = "sunbirddev.qrimage.request"
12 | }
13 |
14 | task {
15 | consumer.parallelism = 1
16 | router.parallelism = 1
17 | shallow_copy.parallelism = 1
18 | link_dialcode.parallelism = 1
19 | batch_create.parallelism = 1
20 | }
21 |
22 | lms-cassandra {
23 | keyspace = "sunbird_courses"
24 | batchTable = "course_batch"
25 | }
26 |
27 | dialcode-cassandra {
28 | keyspace = "dialcodes"
29 | imageTable = "dialcode_images"
30 | }
31 |
32 | service {
33 | search.basePath = "http://localhost/search"
34 | lms.basePath = "http://localhost/lms"
35 | learning_service.basePath = "https://localhost"
36 | dial.basePath = "https://localhost/dial/"
37 | }
38 |
--------------------------------------------------------------------------------
/search-indexer/src/main/resources/search-indexer.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.learning.graph.events"
9 | error.topic = "sunbirddev.learning.events.failed"
10 | groupId = "local-search-indexer-group"
11 | }
12 |
13 | task {
14 | consumer.parallelism = 1
15 | router.parallelism = 1
16 | compositeSearch.parallelism = 1
17 | dialcodeIndexer.parallelism = 1
18 | dialcodemetricsIndexer.parallelism = 1
19 | }
20 |
21 | compositesearch.index.name = "compositesearch"
22 | nested.fields = ["badgeAssertions", "targets", "badgeAssociations", "plugins", "me_totalTimeSpent", "me_totalPlaySessionCount", "me_totalTimeSpentInSec", "batches", "trackable", "credentials", "discussionForum", "provider", "osMetadata", "actions"]
23 | schema.definition_cache.expiry = 14400
24 | restrict {
25 | metadata.objectTypes = []
26 | objectTypes = ["EventSet", "Questionnaire", "Misconception", "FrameworkType", "EventSet", "Event"]
27 | }
--------------------------------------------------------------------------------
/post-publish-processor/src/main/scala/org/sunbird/job/postpublish/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.postpublish.domain
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | import java.util
7 |
8 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
9 |
10 | private val jobName = "PostPublishProcessor"
11 | def action: String = readOrDefault[String]("edata.action", "")
12 |
13 | def mimeType: String = readOrDefault[String]("edata.mimeType", "")
14 |
15 | def collectionId: String = readOrDefault[String]("edata.identifier", "")
16 |
17 | def eData: Map[String, AnyRef] = readOrDefault("edata", new util.HashMap[String, AnyRef]()).asInstanceOf[Map[String, AnyRef]]
18 |
19 | def validEvent(): Boolean = {
20 | StringUtils.equals("post-publish-process", action) &&
21 | StringUtils.equals("application/vnd.ekstep.content-collection", mimeType)
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/processor/signature/Exceptions.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.processor.signature
2 |
3 |
4 | case class CustomException(msg: String) extends Exception(msg) {}
5 |
6 | @SerialVersionUID(-6315798195661762882L)
7 | object SignatureException extends Exception {
8 |
9 | @SerialVersionUID(6174717850058203376L)
10 | class CreationException(msg: String)
11 | extends CustomException("Unable to create signature: " + msg)
12 |
13 | @SerialVersionUID(4996784337180620650L)
14 | class VerificationException(message: String)
15 | extends CustomException("Unable to verify signature " + message)
16 |
17 | @SerialVersionUID(5384120386096139083L)
18 | class UnreachableException(message: String)
19 | extends CustomException("Unable to reach service: " + message)
20 |
21 | @SerialVersionUID(8311355815972497247L)
22 | class KeyNotFoundException(message: String)
23 | extends CustomException("Unable to get key: " + message)
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/jobs-core/src/test/scala/org/sunbird/spec/TestMapStreamFunc.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.spec
2 |
3 | import java.util
4 |
5 | import org.apache.flink.api.common.typeinfo.TypeInformation
6 | import org.apache.flink.streaming.api.functions.ProcessFunction
7 | import org.sunbird.job.{BaseProcessFunction, Metrics}
8 |
9 |
10 | class TestMapStreamFunc(config: BaseProcessTestConfig)(implicit val stringTypeInfo: TypeInformation[String])
11 | extends BaseProcessFunction[util.Map[String, AnyRef], util.Map[String, AnyRef]](config) {
12 |
13 | override def metricsList(): List[String] = {
14 | List(config.mapEventCount)
15 | }
16 |
17 | override def processElement(event: util.Map[String, AnyRef],
18 | context: ProcessFunction[util.Map[String, AnyRef], util.Map[String, AnyRef]]#Context,
19 | metrics: Metrics): Unit = {
20 | metrics.get(config.mapEventCount)
21 | metrics.reset(config.mapEventCount)
22 | metrics.incCounter(config.mapEventCount)
23 | context.output(config.mapOutputTag, event)
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/relation-cache-updater/src/main/scala/org/sunbird/job/relationcache/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.relationcache.domain
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | import java.util
7 |
8 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
9 |
10 | val jobName = "RelationCacheUpdater"
11 |
12 | def identifier: String = readOrDefault[String]("edata.identifier", "")
13 |
14 | def action: String = readOrDefault[String]("edata.action", "")
15 |
16 | def mimeType: String = readOrDefault[String]("edata.mimeType", "")
17 |
18 | def eData: Map[String, AnyRef] = readOrDefault("edata", new util.HashMap[String, AnyRef]()).asInstanceOf[Map[String, AnyRef]]
19 |
20 |
21 | def isValidEvent(allowedActions:List[String]): Boolean = {
22 |
23 | allowedActions.contains(action) && StringUtils.equalsIgnoreCase(mimeType, "application/vnd.ekstep.content-collection") &&
24 | StringUtils.isNotBlank(identifier)
25 | }
26 |
27 | }
--------------------------------------------------------------------------------
/post-publish-processor/src/main/resources/post-publish-processor.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.content.postpublish.request"
9 | groupId = "local-post-publish-processor-group"
10 | publish.topic = "sunbirddev.learning.job.request"
11 | qrimage.topic = "sunbirddev.qrimage.request"
12 | }
13 |
14 | task {
15 | consumer.parallelism = 1
16 | router.parallelism = 1
17 | shallow_copy.parallelism = 1
18 | link_dialcode.parallelism = 1
19 | batch_create.parallelism = 1
20 | }
21 |
22 | lms-cassandra {
23 | keyspace = "sunbird_courses"
24 | batchTable = "course_batch"
25 | }
26 |
27 | dialcode-cassandra {
28 | keyspace = "dialcodes"
29 | imageTable = "dialcode_images"
30 | }
31 |
32 | service {
33 | search.basePath = "http://11.2.6.6/search"
34 | lms.basePath = "http://11.2.6.6/lms"
35 | learning_service.basePath = "http://11.2.4.22:8080/learning-service"
36 | dial.basePath = "https://dev.sunbirded.org/dial/"
37 | }
38 |
39 | dialcode {
40 | linkable.primaryCategory = ["Course"]
41 | }
--------------------------------------------------------------------------------
/enrolment-reconciliation/src/main/scala/org/sunbird/job/recounciliation/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.recounciliation.domain
2 | import org.apache.commons.lang3.StringUtils
3 | import org.sunbird.job.domain.reader.JobRequest
4 |
5 | import java.util
6 |
7 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
8 |
9 | val jobName = "EnrolmentReconciliation"
10 |
11 | def eData: Map[String, AnyRef] = readOrDefault("edata", new util.HashMap[String, AnyRef]()).asInstanceOf[Map[String, AnyRef]]
12 |
13 | def action: String = readOrDefault[String]("edata.action", "")
14 |
15 | def courseId: String = readOrDefault[String]("edata.courseId", "")
16 |
17 | def batchId: String = readOrDefault[String]("edata.batchId", "")
18 |
19 | def userId: String = readOrDefault[String]("edata.userId", "")
20 |
21 | def eType: String = readOrDefault[String]("edata.action", "")
22 |
23 | def isValidEvent(supportedEventType: String): Boolean = {
24 |
25 | StringUtils.equalsIgnoreCase(eType, supportedEventType)
26 | }
27 |
28 | }
--------------------------------------------------------------------------------
/asset-enrichment/src/main/scala/org/sunbird/job/assetenricment/util/AssetFileUtils.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.assetenricment.util
2 |
3 | import org.slf4j.LoggerFactory
4 |
5 | import java.io.File
6 | import javax.activation.MimetypesFileTypeMap
7 |
8 | object AssetFileUtils {
9 |
10 | private[this] val logger = LoggerFactory.getLogger("org.sunbird.job.util.AssetFileUtils")
11 | val mimeTypesMap: MimetypesFileTypeMap = initializeMimeTypes()
12 |
13 | def initializeMimeTypes(): MimetypesFileTypeMap = {
14 | val mimeTypes = new MimetypesFileTypeMap
15 | mimeTypes.addMimeTypes("image png jpg jpeg")
16 | mimeTypes.addMimeTypes("audio mp3 ogg wav")
17 | mimeTypes.addMimeTypes("video mp4")
18 | mimeTypes
19 | }
20 |
21 | def getFileType(file: File): String = {
22 | if (file.isDirectory) "Directory" else {
23 | val mimeType = mimeTypesMap.getContentType(file)
24 | mimeType.split("/")(0) match {
25 | case "image" => "Image"
26 | case "audio" => "Audio"
27 | case "video" => "Video"
28 | case _ => "Other"
29 | }
30 | }
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/credential-generator/collection-cert-pre-processor/src/main/resources/collection-cert-pre-processor.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.issue.certificate.request"
5 | output.topic = "sunbirddev.generate.certificate.request"
6 | output.failed.topic = "sunbirddev.issue.certificate.failed"
7 | groupId = "collection-cert-pre-processor-group"
8 | }
9 |
10 | task {
11 | consumer.parallelism = 1
12 | parallelism = 1
13 | generate_certificate.parallelism = 1
14 | }
15 |
16 | lms-cassandra {
17 | keyspace = "sunbird_courses"
18 | user_enrolments.table = "user_enrolments"
19 | course_batch.table = "course_batch"
20 | assessment_aggregator.table = "assessment_aggregator"
21 | }
22 |
23 | cert_domain_url="https://dev.sunbirded.org"
24 | user_read_api = "/private/user/v1/read"
25 | content_read_api = "/content/v3/read"
26 |
27 | service {
28 | content.basePath = "http://localhost:9000"
29 | learner.basePath = "http://localhost:9000"
30 | }
31 |
32 | redis-meta {
33 | host = localhost
34 | port = 6379
35 | }
36 | assessment.metrics.supported.contenttype = ["selfAssess"]
37 |
--------------------------------------------------------------------------------
/jobs-core/src/test/scala/org/sunbird/spec/TestJobRequestStreamFunc.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.spec
2 |
3 | import org.apache.flink.api.common.typeinfo.TypeInformation
4 | import org.apache.flink.streaming.api.functions.ProcessFunction
5 | import org.sunbird.job.{BaseProcessFunction, Metrics}
6 | import org.sunbird.job.domain.reader.JobRequest
7 |
8 | class TestJobRequestStreamFunc(config: BaseProcessTestConfig) extends BaseProcessFunction[TestJobRequest, TestJobRequest](config) {
9 |
10 | override def metricsList(): List[String] = {
11 | val metrics= List(config.jobRequestEventCount)
12 | metrics
13 | }
14 |
15 | override def processElement(event: TestJobRequest, context: ProcessFunction[TestJobRequest, TestJobRequest]#Context, metrics: Metrics): Unit = {
16 | metrics.get(config.jobRequestEventCount)
17 | metrics.reset(config.jobRequestEventCount)
18 | metrics.incCounter(config.jobRequestEventCount)
19 | context.output(config.jobRequestOutputTag, event)
20 | }
21 | }
22 |
23 | class TestJobRequest(map: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(map, partition, offset) {
24 |
25 | }
--------------------------------------------------------------------------------
/search-indexer/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.learning.graph.events"
9 | error.topic = "sunbirddev.learning.events.failed"
10 | groupId = "local-composite-search-indexer-group"
11 | }
12 |
13 | task {
14 | consumer.parallelism = 1
15 | router.parallelism = 1
16 | compositeSearch.parallelism = 1
17 | dialcodeIndexer.parallelism = 1
18 | dialcodemetricsIndexer.parallelism = 1
19 | }
20 |
21 | compositesearch.index.name = "compositesearch"
22 | nested.fields = ["badgeAssertions", "targets", "badgeAssociations", "plugins", "me_totalTimeSpent", "me_totalPlaySessionCount", "me_totalTimeSpentInSec", "batches", "trackable", "credentials"]
23 |
24 | restrict {
25 | metadata.objectTypes = []
26 | objectTypes = ["EventSet", "Questionnaire", "Misconception", "FrameworkType", "EventSet", "Event"]
27 | }
28 |
29 | schema {
30 | basePath = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/local"
31 | supportedVersion = {
32 | itemset = "2.0"
33 | }
34 | }
35 | schema.definition_cache.expiry = 14400
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Project Sunbird
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/main/scala/org/sunbird/job/content/publish/processor/EcrfObject.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.content.publish.processor
2 |
3 | case class Plugin(id: String, data: Map[String, AnyRef], innerText: String, cData: String, childrenPlugin: List[Plugin], manifest: Manifest, controllers: List[Controller], events: List[Event]) {
4 | def this() = this("", null, "", "", null, null, null, null)
5 | }
6 | case class Manifest(id: String, data: Map[String, AnyRef], innerText: String, cData: String, medias: List[Media]) {
7 | def this() = this("", null, "", "", null)
8 | }
9 | case class Controller(id: String, data: Map[String, AnyRef], innerText: String, cData: String) {
10 | def this() = this("", null, "", "")
11 | }
12 | case class Media(id: String, data: Map[String, AnyRef], innerText: String, cData: String, src: String, `type`: String, childrenPlugin: List[Plugin]) {
13 | def this() = this("", null, "", "", "", "", null)
14 | }
15 | case class Event(id: String, data: Map[String, AnyRef], innerText: String, cData: String, childrenPlugin: List[Plugin]) {
16 | def this() = this("", null, "", "", null)
17 | }
18 |
19 |
20 |
--------------------------------------------------------------------------------
/enrolment-reconciliation/src/main/resources/enrolment-reconciliation.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.batch.enrolment.sync.request"
5 | output.audit.topic = "sunbirddev.telemetry.raw"
6 | output.failed.topic = "sunbirddev.enrolment.reconciliation.failed"
7 | output.certissue.topic = "sunbirddev.issue.certificate.request"
8 | groupId = "sunbirddev-enrolment-reconciliation-group"
9 | }
10 |
11 | task {
12 | window.shards = 1
13 | consumer.parallelism = 1
14 | enrolment.reconciliation.parallelism = 1
15 | enrolment.complete.parallelism = 1
16 | }
17 |
18 | lms-cassandra {
19 | keyspace = "sunbird_courses"
20 | consumption.table = "user_content_consumption"
21 | user_activity_agg.table = "user_activity_agg"
22 | user_enrolments.table = "user_enrolments"
23 | }
24 |
25 | redis {
26 | database {
27 | relationCache.id = 10
28 | }
29 | }
30 |
31 | threshold.batch.write.size = 10
32 |
33 | activity {
34 | module.aggs.enabled = true
35 | filter.processed.enrolments = true
36 | collection.status.cache.expiry = 3600
37 | }
38 |
39 | service {
40 | search {
41 | basePath = "http://11.2.6.6/search"
42 | }
43 | }
--------------------------------------------------------------------------------
/jobs-core/src/test/scala/org/sunbird/spec/BaseMetricsReporter.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.spec
2 |
3 | import org.apache.flink.api.scala.metrics.ScalaGauge
4 | import org.apache.flink.metrics.reporter.MetricReporter
5 | import org.apache.flink.metrics.{Metric, MetricConfig, MetricGroup}
6 |
7 | import scala.collection.mutable
8 |
9 | class BaseMetricsReporter extends MetricReporter {
10 |
11 | override def open(config: MetricConfig): Unit = {}
12 |
13 | override def close(): Unit = {}
14 |
15 | override def notifyOfAddedMetric(metric: Metric, metricName: String, group: MetricGroup): Unit = {
16 | metric match {
17 | case gauge: ScalaGauge[_] => {
18 | val gaugeKey = group.getScopeComponents.toSeq.drop(6).mkString(".") + "." + metricName
19 | BaseMetricsReporter.gaugeMetrics(gaugeKey) = gauge.asInstanceOf[ScalaGauge[Long]]
20 | }
21 | case _ => // Do Nothing
22 | }
23 | }
24 | override def notifyOfRemovedMetric(metric: Metric, metricName: String, group: MetricGroup): Unit = {}
25 | }
26 |
27 | object BaseMetricsReporter {
28 | val gaugeMetrics : mutable.Map[String, ScalaGauge[Long]] = mutable.Map[String, ScalaGauge[Long]]()
29 | }
30 |
--------------------------------------------------------------------------------
/credential-generator/collection-cert-pre-processor/src/main/scala/org/sunbird/job/collectioncert/domain/Models.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.collectioncert.domain
2 |
3 | import java.util.{Date, UUID}
4 |
5 | case class EnrolledUser(userId: String, oldId: String = null, issuedOn: Date = null)
6 |
7 |
8 | case class ActorObject(id: String = "Certificate Generator", `type`: String = "System")
9 |
10 | case class EventContext(pdata: Map[String, String] = Map("ver" -> "1.0", "id" -> "org.sunbird.learning.platform"))
11 |
12 |
13 | case class EventObject(id: String, `type`: String = "GenerateCertificate")
14 |
15 | case class BEJobRequestEvent(actor: ActorObject= ActorObject(),
16 | eid: String = "BE_JOB_REQUEST",
17 | edata: Map[String, AnyRef],
18 | ets: Long = System.currentTimeMillis(),
19 | context: EventContext = EventContext(),
20 | mid: String = s"LMS.${UUID.randomUUID().toString}",
21 | `object`: EventObject
22 | )
23 |
24 | case class AssessmentUserAttempt(contentId: String, score: Double, totalScore: Double)
--------------------------------------------------------------------------------
/search-indexer/src/main/scala/org/sunbird/job/searchindexer/compositesearch/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.searchindexer.compositesearch.domain
2 |
3 | import org.apache.commons.lang3.BooleanUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | import java.util
7 |
8 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
9 |
10 | val jobName = "SearchIndexer"
11 |
12 | def index: Boolean = {
13 | val index = eventMap.get("index")
14 | if (index == null) true else {
15 | index match {
16 | case bool: Boolean => bool
17 | case _ => BooleanUtils.toBoolean(index.toString)
18 | }
19 | }
20 | }
21 |
22 | def operationType: String = readOrDefault("operationType", "")
23 |
24 | def id: String = readOrDefault("nodeUniqueId", "")
25 |
26 | def nodeType: String = readOrDefault("nodeType", "")
27 |
28 | def objectType: String = readOrDefault("objectType", "")
29 |
30 | def validEvent(restrictObjectTypes: util.List[String]): Boolean = {
31 | (operationType != null) && index && (!restrictObjectTypes.contains(objectType))
32 | }
33 |
34 | }
35 |
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/helper/FailedEventHelper.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.helper
2 |
3 | import org.sunbird.job.util.ScalaJsonUtil
4 |
5 | import java.io.{PrintWriter, StringWriter}
6 |
7 | trait FailedEventHelper {
8 |
9 | def getFailedEvent(jobName: String, eventMap: java.util.Map[String, Any], error: Throwable): String = {
10 | val errorString = getStackTrace(error).split("\\n\\t")
11 | val stackTrace = if (errorString.length > 21) errorString.toList.slice(errorString.length - 21, errorString.length - 1) else errorString.toList
12 | getFailedEvent(jobName, eventMap, s"${error.getMessage} : : $stackTrace")
13 | }
14 |
15 | def getFailedEvent(jobName: String, eventMap: java.util.Map[String, Any], errorString: String): String = {
16 | val failedEventMap = Map("error" -> s"$errorString")
17 | eventMap.put("jobName", jobName)
18 | eventMap.put("failInfo", failedEventMap)
19 | ScalaJsonUtil.serialize(eventMap)
20 | }
21 |
22 | def getStackTrace(throwable: Throwable): String = {
23 | val sw = new StringWriter
24 | val pw = new PrintWriter(sw, true)
25 | throwable.printStackTrace(pw)
26 | sw.getBuffer.toString
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/main/scala/org/sunbird/job/publish/core/Models.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.publish.core
2 |
3 | class ObjectData(val identifier: String, val metadata: Map[String, AnyRef], val extData: Option[Map[String, AnyRef]] = None, val hierarchy: Option[Map[String, AnyRef]] = None) {
4 |
5 | val dbId: String = metadata.getOrElse("identifier", identifier).asInstanceOf[String]
6 |
7 | val dbObjType: String = metadata.getOrElse("objectType", "").asInstanceOf[String]
8 |
9 | val pkgVersion: Int = metadata.getOrElse("pkgVersion", 0.0.asInstanceOf[Number]).asInstanceOf[Number].intValue()
10 |
11 | val mimeType: String = metadata.getOrElse("mimeType", "").asInstanceOf[String]
12 |
13 | def getString(key: String, defaultVal: String): String = metadata.getOrElse(key, defaultVal).asInstanceOf[String]
14 |
15 | }
16 |
17 | case class ExtDataConfig(keyspace: String, table: String, primaryKey: List[String] = List(), propsMapping: Map[String, AnyRef] = Map())
18 |
19 | case class DefinitionConfig(supportedVersion: Map[String, AnyRef], basePath: String)
20 |
21 | case class ObjectExtData(data: Option[Map[String, AnyRef]] = None, hierarchy: Option[Map[String, AnyRef]] = None)
22 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/ScalaModuleJsonUtils.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible
2 |
3 |
4 | import java.io.File
5 |
6 | import com.fasterxml.jackson.annotation.JsonInclude.Include
7 | import com.fasterxml.jackson.core.JsonGenerator.Feature
8 | import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature}
9 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
10 |
11 | object ScalaModuleJsonUtils {
12 |
13 | @transient val mapper = new ObjectMapper()
14 | mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
15 | mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
16 | mapper.configure(Feature.WRITE_BIGDECIMAL_AS_PLAIN, true)
17 | mapper.setSerializationInclusion(Include.NON_NULL)
18 | mapper.setSerializationInclusion(Include.NON_ABSENT)
19 | mapper.registerModule(DefaultScalaModule)
20 |
21 |
22 | @throws(classOf[Exception])
23 | def writeToJsonFile(file: File, obj: AnyRef) = {
24 | mapper.writeValue(file, obj)
25 | }
26 |
27 | @throws(classOf[Exception])
28 | def serialize(obj: AnyRef): String = {
29 | mapper.writeValueAsString(obj)
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/jobs-core/src/test/scala/org/sunbird/spec/SlugSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.spec
2 |
3 | import org.scalatest.{FlatSpec, Matchers}
4 | import org.sunbird.job.util.Slug
5 |
6 | import java.io.File
7 |
8 | class SlugSpec extends FlatSpec with Matchers {
9 |
10 | "test makeSlug" should "return make slug successfully" in {
11 | val sluggified = Slug.makeSlug(" -Cov -e*r+I/ αma.ge.png-- ")
12 | assert("cov-er-i-ma.ge.png" == sluggified)
13 | }
14 | "test makeSlug with null" should "throw IllegalArgumentException" in {
15 | intercept[IllegalArgumentException] {
16 | Slug.makeSlug(null)
17 | }
18 | }
19 | "test makeSlug with Transliterate" should "throw IllegalArgumentException" in {
20 | val sluggified = Slug.makeSlug(" Cov -e*r+I/ αma.ge.png ", true)
21 | assert("cov-er-i-ama.ge.png" == sluggified)
22 | }
23 | "test makeSlug with duplicates" should "throw IllegalArgumentException" in {
24 | val sluggified = Slug.removeDuplicateChars("akssaaklla")
25 | assert("aksakla" == sluggified)
26 | }
27 | "test create Slug file" should "throw IllegalArgumentException" in {
28 | val file = new File("-αimage.jpg")
29 | val slugFile = Slug.createSlugFile(file)
30 | assert("aimage.jpg" == slugFile.getName)
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/credential-generator/collection-cert-pre-processor/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "flink.issue.certificate.request"
5 | output.topic = "flink.generate.certificate.request"
6 | output.failed.topic = "flink.issue.certificate.failed"
7 | groupId = "flink-collection-cert-pre-processor-group"
8 | }
9 |
10 | task {
11 | consumer.parallelism = 1
12 | parallelism = 1
13 | generate_certificate.parallelism = 1
14 | }
15 |
16 | lms-cassandra {
17 | keyspace = "sunbird_courses"
18 | user_enrolments.table = "user_enrolments"
19 | course_batch.table = "course_batch"
20 | assessment_aggregator.table = "assessment_aggregator"
21 | host = "localhost"
22 | port = "9142"
23 | }
24 |
25 | dp-redis {
26 | host = localhost
27 | port = 6340
28 | database.index = 5
29 | }
30 |
31 | cert_domain_url="https://dev.sunbirded.org"
32 | user_read_api = "/private/user/v1/read"
33 | content_read_api = "/content/v3/read"
34 |
35 | service {
36 | content.basePath = "http://localhost:9000/content"
37 | learner.basePath = "http://localhost:9000/learner"
38 | }
39 |
40 | redis-meta {
41 | host = localhost
42 | port = 6379
43 | }
44 | assessment.metrics.supported.contenttype = ["selfAssess"]
45 |
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/main/scala/org/sunbird/job/publish/helpers/ObjectEnrichment.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.publish.helpers
2 |
3 | import org.sunbird.job.domain.`object`.DefinitionCache
4 | import org.sunbird.job.publish.config.PublishConfig
5 | import org.sunbird.job.publish.core.{DefinitionConfig, ExtDataConfig, ObjectData}
6 | import org.sunbird.job.util.{CassandraUtil, CloudStorageUtil, Neo4JUtil}
7 |
8 | trait ObjectEnrichment extends FrameworkDataEnrichment with ThumbnailGenerator {
9 |
10 | def enrichObject(obj: ObjectData)(implicit neo4JUtil: Neo4JUtil, cassandraUtil: CassandraUtil, readerConfig: ExtDataConfig, cloudStorageUtil: CloudStorageUtil, config: PublishConfig, definitionCache: DefinitionCache, definitionConfig: DefinitionConfig): ObjectData = {
11 | val newObj = enrichFrameworkData(obj)
12 | val enObj = enrichObjectMetadata(newObj).getOrElse(newObj)
13 | generateThumbnail(enObj).getOrElse(enObj)
14 | }
15 |
16 | def enrichObjectMetadata(obj: ObjectData)(implicit neo4JUtil: Neo4JUtil, cassandraUtil: CassandraUtil, readerConfig: ExtDataConfig, cloudStorageUtil: CloudStorageUtil, config: PublishConfig, definitionCache: DefinitionCache, definitionConfig: DefinitionConfig): Option[ObjectData]
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/asset-enrichment/src/main/scala/org/sunbird/job/assetenricment/models/Asset.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.assetenricment.models
2 |
3 | import java.util
4 | import scala.collection.mutable
5 |
6 | case class Asset(eventMap: util.Map[String, Any]) {
7 |
8 | val metadata: mutable.Map[String, AnyRef] = mutable.Map[String, AnyRef]()
9 |
10 | def putAll(data: Map[String, AnyRef]): Unit = metadata ++= data
11 |
12 | def put(key: String, value: AnyRef): Unit = metadata.put(key, value.asInstanceOf[AnyRef])
13 |
14 | def get(key: String, defaultValue: AnyRef): AnyRef = metadata.getOrElse(key, defaultValue)
15 |
16 | def getMetadata: Map[String, AnyRef] = metadata.toMap
17 |
18 | def artifactBasePath: String = metadata.getOrElse("artifactBasePath", "").asInstanceOf[String]
19 |
20 | def artifactUrl: String = metadata.getOrElse("artifactUrl", "").asInstanceOf[String]
21 |
22 | def identifier: String = metadata.getOrElse("IL_UNIQUE_ID", "").asInstanceOf[String]
23 |
24 | def mimeType: String = metadata.getOrElse("mimeType", "").asInstanceOf[String]
25 |
26 | def validate(contentUploadContextDriven: Boolean): Boolean = {
27 | contentUploadContextDriven && artifactBasePath.nonEmpty && artifactUrl.nonEmpty && artifactUrl.contains(artifactBasePath)
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/audit-event-generator/src/main/scala/org/sunbird/job/auditevent/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.auditevent.domain
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
7 |
8 | private val IMAGE_SUFFIX = ".img"
9 | private val jobName = "AuditEventGenerator"
10 |
11 | def id: String = readOrDefault("nodeUniqueId", "")
12 |
13 | def operationType: String = readOrDefault("operationType", "")
14 |
15 | def nodeUniqueId: String = readOrDefault("nodeUniqueId", null)
16 |
17 | def createdOn: String = read("createdOn").get
18 |
19 | def channelId(channel: String): String = readOrDefault("channel", channel)
20 |
21 | def objectId: String = if (null != nodeUniqueId) nodeUniqueId.replaceAll(IMAGE_SUFFIX, "") else nodeUniqueId
22 |
23 | def objectType: String = readOrDefault[String]("objectType", null)
24 |
25 | def userId: String = readOrDefault[String]("userId", "")
26 |
27 | def transactionData: Map[String, AnyRef] = readOrDefault("transactionData", Map[String, AnyRef]())
28 |
29 | def isValid: Boolean = {
30 | StringUtils.isNotBlank(objectType)
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/credential-generator/collection-cert-pre-processor/src/main/scala/org/sunbird/job/collectioncert/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.collectioncert.domain
2 |
3 | import org.sunbird.job.collectioncert.task.CollectionCertPreProcessorConfig
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
7 |
8 | def action:String = readOrDefault[String]("edata.action", "")
9 |
10 | def batchId: String = readOrDefault[String]("edata.batchId", "")
11 |
12 | def courseId: String = readOrDefault[String]("edata.courseId", "")
13 |
14 | def userId: String = {
15 | val list = readOrDefault[List[String]]("edata.userIds", List[String]())
16 | if(list.isEmpty) "" else list.head
17 | }
18 |
19 | def reIssue: Boolean = readOrDefault[Boolean]("edata.reIssue", false)
20 |
21 | def eData: Map[String, AnyRef] = readOrDefault[Map[String, AnyRef]]("edata", Map[String, AnyRef]())
22 |
23 |
24 | def isValid()(config: CollectionCertPreProcessorConfig): Boolean = {
25 | config.issueCertificate.equalsIgnoreCase(action) && !batchId.isEmpty && !courseId.isEmpty &&
26 | !userId.isEmpty
27 | }
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.publish.job.request"
9 | post_publish.topic = "sunbirddev.content.postpublish.request"
10 | error.topic = "sunbirddev.learning.events.failed"
11 | groupId = "local-content-publish-group"
12 | }
13 |
14 | task {
15 | consumer.parallelism = 1
16 | parallelism = 1
17 | router.parallelism = 1
18 | }
19 |
20 | redis {
21 | database {
22 | contentCache.id = 0
23 | }
24 | }
25 |
26 | content {
27 | bundleLocation = "/tmp/contentBundle"
28 | isECARExtractionEnabled = true
29 | retry_asset_download_count = 1
30 | keyspace = "dev_content_store"
31 | table = "content_data"
32 | tmp_file_location = "/tmp"
33 | objectType = ["Content", "ContentImage"]
34 | mimeType = ["application/pdf", "video/avi", "video/mpeg", "video/quicktime", "video/3gpp", "video/mpeg", "video/mp4", "video/ogg", "video/webm", "application/vnd.ekstep.html-archive","application/vnd.ekstep.ecml-archive"]
35 | stream {
36 | enabled = true
37 | mimeType = ["video/mp4", "video/webm"]
38 | }
39 | artifact.size.for_online=209715200
40 | }
41 |
42 | service {
43 | print.basePath = "http://11.2.6.6/print"
44 | }
45 |
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/main/scala/org/sunbird/job/publish/helpers/ObjectTemplateGenerator.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.publish.helpers
2 |
3 | import org.apache.velocity.VelocityContext
4 | import org.apache.velocity.app.Velocity
5 |
6 | import java.io.StringWriter
7 | import java.util.Properties
8 |
9 | trait ObjectTemplateGenerator {
10 |
11 |
12 | def handleHtmlTemplate(templateName: String, context: Map[String,AnyRef]): String = {
13 | initVelocityEngine(templateName)
14 | val veContext: VelocityContext = new VelocityContext()
15 | context.foreach(entry => veContext.put(entry._1, entry._2))
16 | val writer:StringWriter = new StringWriter()
17 | Velocity.mergeTemplate(templateName, "UTF-8", veContext, writer)
18 | writer.toString
19 | }
20 |
21 | private def initVelocityEngine(templateName: String): Unit = {
22 | val properties = new Properties()
23 | if (!templateName.startsWith("http") && !templateName.startsWith("/")) {
24 | properties.setProperty("resource.loader", "class")
25 | properties.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader")
26 | }
27 | Velocity.init(properties)
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/main/scala/org/sunbird/job/content/publish/processor/XMLLoaderWithCData.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.content.publish.processor
2 |
3 | import org.xml.sax.InputSource
4 | import org.xml.sax.ext.{DefaultHandler2, LexicalHandler}
5 |
6 | import scala.xml.factory.XMLLoader
7 | import scala.xml.parsing.FactoryAdapter
8 | import scala.xml.{Elem, PCData, SAXParser, TopScope}
9 |
10 | object XMLLoaderWithCData extends XMLLoader[Elem] {
11 | def lexicalHandler(adapter: FactoryAdapter): LexicalHandler =
12 | new DefaultHandler2 {
13 | def captureCData(): Unit = {
14 | adapter.hStack push PCData(adapter.buffer.toString)
15 | adapter.buffer.clear()
16 | }
17 |
18 | override def startCDATA(): Unit = adapter.captureText()
19 | override def endCDATA(): Unit = captureCData()
20 | }
21 |
22 | override def loadXML(source: InputSource, parser: SAXParser): Elem = {
23 | val newAdapter = adapter
24 |
25 | val xmlReader = parser.getXMLReader
26 | xmlReader.setProperty(
27 | "http://xml.org/sax/properties/lexical-handler",
28 | lexicalHandler(newAdapter))
29 |
30 | newAdapter.scopeStack push TopScope
31 | parser.parse(source, newAdapter)
32 | newAdapter.scopeStack.pop()
33 |
34 | newAdapter.rootElem.asInstanceOf[Elem]
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/util/ScalaJsonUtil.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.util
2 |
3 | import java.lang.reflect.{ParameterizedType, Type}
4 |
5 | import com.fasterxml.jackson.core.`type`.TypeReference
6 | import com.fasterxml.jackson.databind.ObjectMapper
7 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
8 |
9 | object ScalaJsonUtil {
10 |
11 | @transient val mapper = new ObjectMapper()
12 | mapper.registerModule(DefaultScalaModule)
13 |
14 | @throws(classOf[Exception])
15 | def serialize(obj: AnyRef): String = {
16 | mapper.writeValueAsString(obj);
17 | }
18 |
19 | @throws(classOf[Exception])
20 | def deserialize[T: Manifest](value: String): T = mapper.readValue(value, typeReference[T]);
21 |
22 | private[this] def typeReference[T: Manifest] = new TypeReference[T] {
23 | override def getType = typeFromManifest(manifest[T])
24 | }
25 |
26 |
27 | private[this] def typeFromManifest(m: Manifest[_]): Type = {
28 | if (m.typeArguments.isEmpty) { m.runtimeClass }
29 | // $COVERAGE-OFF$Disabling scoverage as this code is impossible to test
30 | else new ParameterizedType {
31 | def getRawType = m.runtimeClass
32 | def getActualTypeArguments = m.typeArguments.map(typeFromManifest).toArray
33 | def getOwnerType = null
34 | }
35 | // $COVERAGE-ON$
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/post-publish-processor/src/test/resources/test.cql:
--------------------------------------------------------------------------------
1 | CREATE KEYSPACE IF NOT EXISTS sunbird_courses with replication = {'class':'SimpleStrategy','replication_factor':1};
2 |
3 | CREATE TABLE IF NOT EXISTS sunbird_courses.course_batch (
4 | courseid text,
5 | batchid text,
6 | cert_templates map>>,
7 | createdby text,
8 | createddate text,
9 | createdfor list,
10 | description text,
11 | enddate text,
12 | enrollmentenddate text,
13 | enrollmenttype text,
14 | mentors list,
15 | name text,
16 | startdate text,
17 | status int,
18 | updateddate text,
19 | PRIMARY KEY (courseid, batchid)
20 | );
21 |
22 | CREATE KEYSPACE IF NOT EXISTS dialcodes with replication = {'class':'SimpleStrategy','replication_factor':1};
23 |
24 | CREATE TABLE IF NOT EXISTS dialcodes.dialcode_images (
25 | filename text PRIMARY KEY,
26 | channel text,
27 | config map,
28 | created_on timestamp,
29 | dialcode text,
30 | publisher text,
31 | status int,
32 | url text
33 | );
34 |
35 | INSERT INTO dialcodes.dialcode_images(filename, channel, dialcode, url) VALUES ('0_Q1I5I3', 'b00bc992ef25f1a9a8d63291e20efc8d', 'Q1I5I3', 'https://sunbirddev.blob.core.windows.net/sunbird-content-dev/in.ekstep/0_Q1I5I3.png') ;
36 |
--------------------------------------------------------------------------------
/activity-aggregate-updater/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.coursebatch.job.request"
5 | output.audit.topic = "sunbirddev.telemetry.raw"
6 | output.failed.topic = "sunbirddev.activity.agg.failed"
7 | output.certissue.topic = "sunbirddev.issue.certificate.request"
8 | groupId = "sunbirddev-activity-aggregate-updater-group"
9 | }
10 |
11 | task {
12 | window.shards = 1
13 | consumer.parallelism = 1
14 | dedup.parallelism = 1
15 | activity.agg.parallelism = 1
16 | enrolment.complete.parallelism = 1
17 | }
18 |
19 | lms-cassandra {
20 | keyspace = "sunbird_courses"
21 | consumption.table = "user_content_consumption"
22 | user_activity_agg.table = "user_activity_agg"
23 | user_enrolments.table = "user_enrolments"
24 | }
25 |
26 | redis {
27 | database {
28 | relationCache.id = 10
29 | }
30 | }
31 |
32 | threshold.batch.read.interval = 60 // In sec
33 | threshold.batch.read.size = 1
34 | threshold.batch.write.size = 5
35 |
36 | dedup-redis {
37 | host = localhost
38 | port = 6340
39 | database.index = 13
40 | database.expiry = 600
41 | }
42 |
43 | activity {
44 | module.aggs.enabled = true
45 | input.dedup.enabled = true
46 | collection.status.cache.expiry = 3600
47 | }
48 |
49 | service {
50 | search {
51 | basePath = "http://search-service:9000"
52 | }
53 | }
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/main/scala/org/sunbird/job/publish/helpers/ObjectValidator.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.publish.helpers
2 |
3 | import org.slf4j.LoggerFactory
4 | import org.sunbird.job.publish.core.ObjectData
5 |
6 | import scala.collection.mutable.ListBuffer
7 |
8 | trait ObjectValidator {
9 |
10 | private[this] val logger = LoggerFactory.getLogger(classOf[ObjectValidator])
11 |
12 | def validate(obj: ObjectData, identifier: String, customFn: (ObjectData, String) => List[String]): List[String] = {
13 | validate(obj, identifier) ++ customFn(obj, identifier)
14 | }
15 |
16 | def validate(obj: ObjectData, identifier: String): List[String] = {
17 | logger.info("Validating object with id: " + obj.identifier)
18 | val messages = ListBuffer[String]()
19 | if (obj.metadata.isEmpty) messages += s"""There is no metadata available for : $identifier"""
20 | if (obj.metadata.get("mimeType").isEmpty) messages += s"""There is no mimeType defined for : $identifier"""
21 | if (obj.metadata.get("primaryCategory").isEmpty) messages += s"""There is no primaryCategory defined for : $identifier"""
22 | if (obj.metadata.get("name").isEmpty) messages += s"""There is no name defined for : $identifier"""
23 | if (obj.metadata.get("code").isEmpty) messages += s"""There is no code defined for : $identifier"""
24 | messages.toList
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/test/scala/org/sunbird/job/publish/spec/ObjectValidatorTestSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.publish.spec
2 |
3 | import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
4 | import org.scalatestplus.mockito.MockitoSugar
5 | import org.sunbird.job.publish.core.ObjectData
6 | import org.sunbird.job.publish.helpers.ObjectValidator
7 |
8 | class ObjectValidatorTestSpec extends FlatSpec with BeforeAndAfterAll with Matchers with MockitoSugar {
9 |
10 | override protected def beforeAll(): Unit = {
11 | super.beforeAll()
12 | }
13 |
14 | override protected def afterAll(): Unit = {
15 | super.afterAll()
16 | }
17 |
18 | "Object Validator " should " validate the object and return messages" in {
19 | val objValidator = new TestObjectValidator()
20 | val obj = new ObjectData("do_123", Map[String, AnyRef]("name" -> "Content Name", "identifier" -> "do_123.img", "IL_UNIQUE_ID" -> "do_123.img", "pkgVersion" -> 2.0.asInstanceOf[AnyRef]))
21 | val messages = objValidator.validate(obj, "do_123")
22 | messages should have length(3)
23 | messages should contain ("There is no mimeType defined for : do_123")
24 | messages should contain ("There is no primaryCategory defined for : do_123")
25 | messages should contain ("There is no code defined for : do_123")
26 | }
27 |
28 | }
29 |
30 | class TestObjectValidator extends ObjectValidator {
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/jobs-core/src/test/resources/base-test.conf:
--------------------------------------------------------------------------------
1 | kafka {
2 | broker-servers = "localhost:9093"
3 | zookeeper = "localhost:2183"
4 | map.input.topic = "local.telemetry.map.input"
5 | map.output.topic = "local.telemetry.map.output"
6 | string.input.topic = "local.telemetry.string.input"
7 | string.output.topic = "local.telemetry.string.output"
8 | jobRequest.input.topic = "local.jobrequest.input"
9 | jobRequest.output.topic = "local.jobrequest.output"
10 | groupId = "test-consumer-group"
11 | auto.offset.reset = "earliest"
12 | producer {
13 | max-request-size = 102400
14 | }
15 | }
16 |
17 | task {
18 | checkpointing.compressed = true
19 | checkpointing.pause.between.seconds = 30000
20 | checkpointing.interval = 60000
21 | restart-strategy.attempts = 1
22 | restart-strategy.delay = 10000
23 | parallelism = 1
24 | consumer.parallelism = 1
25 | }
26 |
27 | redisdb.connection.timeout = 30000
28 |
29 | redis {
30 | host = localhost
31 | port = 6340
32 | database {
33 | key.expiry.seconds = 3600
34 | }
35 | }
36 |
37 | lms-cassandra {
38 | host = "localhost"
39 | port = 9142
40 | }
41 |
42 | neo4j {
43 | routePath = "bolt://localhost:7687"
44 | graph = "domain"
45 | }
46 |
47 | es {
48 | basePath = "localhost:9200"
49 | }
50 |
51 | schema {
52 | basePath = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/local"
53 | supportedVersion = {"itemset": "2.0"}
54 | }
--------------------------------------------------------------------------------
/kubernets/job-cluster/job-cluster-taskmanager.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: apps/v1
2 | kind: Deployment
3 | metadata:
4 | name: ${JOB_NAME}-taskmanager
5 | spec:
6 | replicas: 1
7 | selector:
8 | matchLabels:
9 | app: flink
10 | component: ${JOB_NAME}-taskmanager
11 | template:
12 | metadata:
13 | labels:
14 | app: flink
15 | component: ${JOB_NAME}-taskmanager
16 | spec:
17 | containers:
18 | - name: ${JOB_NAME}-taskmanager
19 | image: ${SUNBIRD_DATAPIPELINE_IMAGE}
20 | imagePullPolicy: Always
21 | workingDir: /opt/flink
22 | command: ["/opt/flink/bin/taskmanager.sh"]
23 | args: ["start-foreground",
24 | "-Dfs.azure.account.key.${AZURE_STORAGE_ACCOUNT}.blob.core.windows.net=${AZURE_STORAGE_SECRET}",
25 | "-Dweb.submit.enable=false",
26 | "-Dmetrics.reporter.prom.class=org.apache.flink.metrics.prometheus.PrometheusReporter",
27 | "-Dmetrics.reporter.prom.port=9251-9260",
28 | "-Djobmanager.rpc.address=${JOB_NAME}-jobmanager",
29 | "-Dtaskmanager.rpc.port=6122",
30 | "-Dtaskmanager.heap.size=1024m",
31 | "-Dtaskmanager.numberOfTaskSlots=2"]
32 | ports:
33 | - containerPort: 6122
34 | name: rpc
35 | livenessProbe:
36 | tcpSocket:
37 | port: 6122
38 | initialDelaySeconds: 30
39 | periodSeconds: 60
40 |
--------------------------------------------------------------------------------
/asset-enrichment/src/test/scala/org/sunbird/job/spec/AssetFileUtilsSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.spec
2 |
3 | import org.sunbird.job.assetenricment.util.AssetFileUtils
4 | import org.sunbird.spec.BaseTestSpec
5 |
6 | import java.io.File
7 |
8 | class AssetFileUtilsSpec extends BaseTestSpec {
9 |
10 | "getFileType" should "return file type for the provided audio file" in {
11 | val file = new File(getClass.getClassLoader.getResource("test_files/test_audio.mp3").getFile)
12 | val fileType = AssetFileUtils.getFileType(file)
13 | assert(fileType == "Audio")
14 | }
15 |
16 | "getFileType" should "return file type for the provided video file" in {
17 | val file = new File(getClass.getClassLoader.getResource("test_files/test_video.mp4").getFile)
18 | val fileType = AssetFileUtils.getFileType(file)
19 | assert(fileType == "Video")
20 | }
21 |
22 | "getFileType" should "return file type for the provided pdf file" in {
23 | val file = new File(getClass.getClassLoader.getResource("test_files/test_other.pdf").getFile)
24 | val fileType = AssetFileUtils.getFileType(file)
25 | assert(fileType == "Other")
26 | }
27 |
28 | "getFileType" should "return file type for the provided directory" in {
29 | val file = new File(getClass.getClassLoader.getResource("test_files").getFile)
30 | val fileType = AssetFileUtils.getFileType(file)
31 | assert(fileType == "Directory")
32 | }
33 |
34 | }
35 |
--------------------------------------------------------------------------------
/activity-aggregate-updater/src/main/resources/activity-aggregate-updater.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.coursebatch.job.request"
5 | output.audit.topic = "sunbirddev.telemetry.raw"
6 | output.failed.topic = "sunbirddev.activity.agg.failed"
7 | output.certissue.topic = "sunbirddev.issue.certificate.request"
8 | groupId = "sunbirddev-activity-aggregate-updater-group"
9 | }
10 |
11 | task {
12 | window.shards = 1
13 | consumer.parallelism = 1
14 | dedup.parallelism = 1
15 | activity.agg.parallelism = 1
16 | enrolment.complete.parallelism = 1
17 | }
18 |
19 | lms-cassandra {
20 | keyspace = "sunbird_courses"
21 | consumption.table = "user_content_consumption"
22 | user_activity_agg.table = "user_activity_agg"
23 | user_enrolments.table = "user_enrolments"
24 | }
25 |
26 | redis {
27 | database {
28 | relationCache.id = 10
29 | }
30 | }
31 |
32 | dedup-redis {
33 | host = 11.2.4.22
34 | port = 6379
35 | database.index = 3
36 | database.expiry = 604800
37 | }
38 |
39 | threshold.batch.read.interval = 60 // In sec
40 | threshold.batch.read.size = 1000
41 | threshold.batch.write.size = 10
42 |
43 | activity {
44 | module.aggs.enabled = true
45 | input.dedup.enabled = true
46 | filter.processed.enrolments = true
47 | collection.status.cache.expiry = 3600
48 | }
49 |
50 | service {
51 | search {
52 | basePath = "http://11.2.6.6/search"
53 | }
54 | }
--------------------------------------------------------------------------------
/enrolment-reconciliation/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.batch.enrolment.sync.request"
5 | output.audit.topic = "sunbirddev.telemetry.raw"
6 | output.failed.topic = "sunbirddev.enrolment.reconciliation.failed"
7 | output.certissue.topic = "sunbirddev.issue.certificate.request"
8 | groupId = "sunbirddev-enrolment-reconciliation-group"
9 | }
10 |
11 | task {
12 | window.shards = 1
13 | consumer.parallelism = 1
14 | dedup.parallelism = 1
15 | enrolment.reconciliation.parallelism = 1
16 | enrolment.complete.parallelism = 1
17 | }
18 |
19 | lms-cassandra {
20 | keyspace = "sunbird_courses"
21 | consumption.table = "user_content_consumption"
22 | user_activity_agg.table = "user_activity_agg"
23 | user_enrolments.table = "user_enrolments"
24 | }
25 |
26 | redis {
27 | database {
28 | relationCache.id = 10
29 | }
30 | }
31 |
32 | dedup-redis {
33 | host = localhost
34 | port = 6379
35 | database.index = 3
36 | database.expiry = 604800
37 | }
38 |
39 | threshold.batch.read.interval = 60 // In sec
40 | threshold.batch.read.size = 1000
41 | threshold.batch.write.size = 10
42 |
43 | activity {
44 | module.aggs.enabled = true
45 | input.dedup.enabled = true
46 | filter.processed.enrolments = true
47 | collection.status.cache.expiry = 3600
48 | }
49 |
50 | service {
51 | search {
52 | basePath = "http://localhost:9000"
53 | }
54 | }
--------------------------------------------------------------------------------
/video-stream-generator/src/main/scala/org/sunbird/job/videostream/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.videostream.domain
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | import java.util
7 |
8 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
9 |
10 | private val jobName = "VideoStreamGenerator"
11 |
12 | def action: String = readOrDefault[String]("edata.action", "")
13 |
14 | def mimeType: String = readOrDefault[String]("edata.mimeType", "")
15 |
16 | def channel: String = readOrDefault[String]("context.channel", "")
17 |
18 | def eid: String = readOrDefault[String]("eid", "")
19 |
20 | def artifactUrl: String = readOrDefault[String]("edata.artifactUrl", "")
21 |
22 | def identifier: String = readOrDefault[String]("edata.identifier", "")
23 |
24 | def eData: Map[String, AnyRef] = readOrDefault("edata", new util.HashMap[String, AnyRef]()).asInstanceOf[Map[String, AnyRef]]
25 |
26 | def isValid: Boolean = {
27 | StringUtils.isNotBlank(artifactUrl) &&
28 | StringUtils.isNotBlank(mimeType) &&
29 | (
30 | StringUtils.equalsIgnoreCase(mimeType, "video/mp4") ||
31 | StringUtils.equalsIgnoreCase(mimeType, "video/webm")
32 | ) &&
33 | StringUtils.isNotBlank(identifier) &&
34 | StringUtils.isNotBlank(channel)
35 | }
36 |
37 | }
38 |
--------------------------------------------------------------------------------
/asset-enrichment/src/main/scala/org/sunbird/job/assetenricment/util/ImageResizerUtil.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.assetenricment.util
2 |
3 | import org.im4java.core.{ConvertCmd, IMOperation}
4 |
5 | import java.io.File
6 |
7 | class ImageResizerUtil {
8 |
9 | def process(file: File, targetResolution: Double, width: Int, height: Int, outputFileNameSuffix: String): File = {
10 | val inputFileName = file.getAbsolutePath
11 | val outputFileName = file.getAbsolutePath.replaceAll("\\.", s"\\.${outputFileNameSuffix}\\.")
12 | // set optimize width and height
13 | val ow = width
14 | val oh = height
15 | // create command
16 | val cmd = new ConvertCmd
17 | // create the operation, add images and operators/options
18 | val op = new IMOperation
19 | op.addImage(inputFileName)
20 | op.resize(ow, oh)
21 | if (targetResolution.toInt > 0) op.resample(targetResolution.toInt)
22 | op.addImage(outputFileName)
23 | // execute the operation
24 | cmd.run(op)
25 | // replace the file
26 | if (outputFileNameSuffix.equalsIgnoreCase("out")) replace(file, new File(outputFileName)) else new File(outputFileName)
27 | }
28 |
29 | def replace(input: File, output: File): File = {
30 | val inputFile = input.getAbsolutePath
31 | input.delete
32 | output.renameTo(new File(inputFile))
33 | output
34 | }
35 |
36 | def isApplicable(fileType: String): Boolean = fileType.equalsIgnoreCase("image")
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/asset-enrichment/src/main/scala/org/sunbird/job/assetenricment/util/ThumbnailUtil.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.assetenricment.util
2 |
3 | import org.imgscalr.Scalr
4 | import org.slf4j.LoggerFactory
5 | import org.sunbird.job.util.FileUtils
6 |
7 | import java.awt.image.BufferedImage
8 | import java.io.File
9 | import javax.imageio.ImageIO
10 |
11 | trait ThumbnailUtil {
12 |
13 | private[this] val logger = LoggerFactory.getLogger(classOf[ThumbnailUtil])
14 |
15 | def generateOutFile(inFile: File, thumbnailSize: Int): Option[File] = {
16 | if (inFile != null) {
17 | try {
18 | val srcImage = ImageIO.read(inFile)
19 | if ((srcImage.getHeight > thumbnailSize) || (srcImage.getWidth > thumbnailSize)) {
20 | val scaledImage: BufferedImage = Scalr.resize(srcImage, thumbnailSize)
21 | val thumbFile = getThumbnailFileName(inFile)
22 | val outFile = FileUtils.createFile(thumbFile)
23 | ImageIO.write(scaledImage, "png", outFile)
24 | Some(outFile)
25 | } else None
26 | } catch {
27 | case ex: Exception =>
28 | logger.error("Please Provide Valid File Url!", ex)
29 | None
30 | }
31 | } else None
32 | }
33 |
34 | def getThumbnailFileName(input: File): String = {
35 | val outputFileName = input.getName.replaceAll("\\.", "\\.thumb\\.")
36 | val outputFolder = input.getParent
37 | s"$outputFolder/$outputFileName"
38 | }
39 |
40 | }
41 |
--------------------------------------------------------------------------------
/auto-creator-v2/src/main/scala/org/sunbird/job/autocreatorv2/functions/LinkCollectionFunction.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.autocreatorv2.functions
2 |
3 | import org.apache.flink.configuration.Configuration
4 | import org.apache.flink.streaming.api.functions.ProcessFunction
5 | import org.slf4j.LoggerFactory
6 | import org.sunbird.job.autocreatorv2.helpers.CollectionUpdater
7 | import org.sunbird.job.{BaseProcessFunction, Metrics}
8 | import org.sunbird.job.autocreatorv2.model.ObjectParent
9 | import org.sunbird.job.task.AutoCreatorV2Config
10 | import org.sunbird.job.util.HttpUtil
11 |
12 | class LinkCollectionFunction(config: AutoCreatorV2Config, httpUtil: HttpUtil)
13 | extends BaseProcessFunction[ObjectParent, String](config) with CollectionUpdater {
14 |
15 | private[this] lazy val logger = LoggerFactory.getLogger(classOf[LinkCollectionFunction])
16 |
17 | override def open(parameters: Configuration): Unit = {
18 | super.open(parameters)
19 | }
20 |
21 | override def close(): Unit = {
22 | super.close()
23 | }
24 |
25 | override def metricsList(): List[String] = {
26 | List()
27 | }
28 |
29 | override def processElement(event: ObjectParent, context: ProcessFunction[ObjectParent, String]#Context, metrics: Metrics): Unit = {
30 | logger.info(s"""Waiting 5sec before adding to hierarchy for ${event.identifier}""")
31 | Thread.sleep(5000)
32 | linkCollection(event.identifier, event.parents)(config, httpUtil)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/mvc-indexer/src/main/scala/org/sunbird/job/mvcindexer/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.mvcindexer.domain
2 |
3 | import org.apache.commons.lang3.BooleanUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
7 |
8 | private val jobName = "MVCIndexer"
9 |
10 | def index: AnyRef = readOrDefault("index", null)
11 |
12 | def identifier: String = readOrDefault("object.id", "")
13 |
14 | var eventData: Map[String, AnyRef] = readOrDefault[Map[String, AnyRef]]("eventData", Map[String, AnyRef]())
15 |
16 | def action: String = readOrDefault("eventData.action", "")
17 |
18 | def mlContentText: String = readOrDefault("eventData.ml_contentText", null)
19 |
20 | def mlKeywords: List[String] = readOrDefault("eventData.ml_Keywords", null)
21 |
22 | def metadata: Map[String, AnyRef] = readOrDefault[Map[String, AnyRef]]("eventData.metadata", Map[String, AnyRef]())
23 |
24 | def mlContentTextVector: List[Double] = {
25 | val mlContentTextVectorList = readOrDefault[List[List[Double]]]("eventData.ml_contentTextVector", null)
26 |
27 | if (mlContentTextVectorList != null) mlContentTextVectorList.head else null
28 | }
29 |
30 | def isValid: Boolean = {
31 | BooleanUtils.toBoolean(if (null == index) "true" else index.toString) && !eventMap.containsKey("edata") && eventData.nonEmpty
32 | }
33 |
34 | }
35 |
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/main/resources/content-publish.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | job {
4 | env = "sunbirddev"
5 | }
6 |
7 | kafka {
8 | input.topic = "sunbirddev.publish.job.request"
9 | post_publish.topic = "sunbirddev.content.postpublish.request"
10 | error.topic = "sunbirddev.learning.events.failed"
11 | groupId = "local-content-publish-group"
12 | }
13 |
14 | task {
15 | consumer.parallelism = 1
16 | parallelism = 1
17 | router.parallelism = 1
18 | }
19 |
20 | redis {
21 | database {
22 | contentCache.id = 0
23 | }
24 | }
25 |
26 | content {
27 | bundleLocation = "/tmp/contentBundle"
28 | isECARExtractionEnabled = true
29 | retry_asset_download_count = 1
30 | keyspace = "content_store"
31 | table = "content_data"
32 | tmp_file_location = "/tmp"
33 | objectType = ["Content", "ContentImage"]
34 | mimeType = ["application/pdf", "video/avi", "video/mpeg", "video/quicktime", "video/3gpp", "video/mpeg", "video/mp4", "video/ogg", "video/webm", "application/vnd.ekstep.html-archive","application/vnd.ekstep.ecml-archive"]
35 | asset_download_duration = "60 seconds"
36 |
37 | stream {
38 | enabled = true
39 | mimeType = ["video/mp4", "video/webm"]
40 | }
41 | artifact.size.for_online=209715200
42 | }
43 |
44 | cloud_storage {
45 | folder {
46 | content = "content"
47 | artifact = "artifact"
48 | }
49 | }
50 |
51 | service {
52 | print.basePath = "http://11.2.6.6/print"
53 | }
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/serde/StringSerde.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.serde
2 |
3 | import java.nio.charset.StandardCharsets
4 |
5 | import org.apache.flink.api.common.typeinfo.TypeInformation
6 | import org.apache.flink.api.java.typeutils.TypeExtractor
7 | import org.apache.flink.streaming.connectors.kafka.{KafkaDeserializationSchema, KafkaSerializationSchema}
8 | import org.apache.kafka.clients.consumer.ConsumerRecord
9 | import org.apache.kafka.clients.producer.ProducerRecord
10 |
11 | class StringDeserializationSchema extends KafkaDeserializationSchema[String] {
12 |
13 | override def isEndOfStream(nextElement: String): Boolean = false
14 |
15 | override def deserialize(record: ConsumerRecord[Array[Byte], Array[Byte]]): String = {
16 | new String(record.value(), StandardCharsets.UTF_8)
17 | }
18 |
19 | override def getProducedType: TypeInformation[String] = TypeExtractor.getForClass(classOf[String])
20 | }
21 |
22 | class StringSerializationSchema(topic: String, key: Option[String] = None) extends KafkaSerializationSchema[String] {
23 |
24 | override def serialize(element: String, timestamp: java.lang.Long): ProducerRecord[Array[Byte], Array[Byte]] = {
25 | key.map { kafkaKey =>
26 | new ProducerRecord[Array[Byte], Array[Byte]](topic, kafkaKey.getBytes(StandardCharsets.UTF_8), element.getBytes(StandardCharsets.UTF_8))
27 | }.getOrElse(new ProducerRecord[Array[Byte], Array[Byte]](topic, element.getBytes(StandardCharsets.UTF_8)))
28 | }
29 | }
30 |
31 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
2 |
3 | ### Type of change
4 |
5 | Please choose appropriate options.
6 |
7 | - [ ] Bug fix (non-breaking change which fixes an issue)
8 | - [ ] New feature (non-breaking change which adds functionality)
9 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
10 | - [ ] This change requires a documentation update
11 |
12 | ### How Has This Been Tested?
13 |
14 | Please describe the tests that you ran to verify your changes in the below checkboxes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
15 |
16 | - [ ] Ran Test A
17 | - [ ] Ran Test B
18 |
19 | **Test Configuration**:
20 | * Software versions:
21 | * Hardware versions:
22 |
23 | ### Checklist:
24 |
25 | - [ ] My code follows the style guidelines of this project
26 | - [ ] I have performed a self-review of my own code
27 | - [ ] I have commented my code, particularly in hard-to-understand areas
28 | - [ ] I have made corresponding changes to the documentation
29 | - [ ] My changes generate no new warnings
30 | - [ ] I have added tests that prove my fix is effective or that my feature works
31 | - [ ] New and existing unit tests pass locally with my changes
32 | - [ ] Any dependent changes have been merged and published in downstream modules
--------------------------------------------------------------------------------
/video-stream-generator/src/test/scala/org/sunbird/job/fixture/EventFixture.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.fixture
2 |
3 | object EventFixture {
4 |
5 | val EVENT_1: String =
6 | """
7 | |{"eid":"BE_JOB_REQUEST","ets":1598956686981,"mid":"LP.1598956686981.a260af12-cd9b-4ffd-a525-1d944df47c61","actor":{"id":"Post Publish Processor","type":"System"},"context":{"pdata":{"ver":"1.0","id":"org.ekstep.platform"},"channel":"01254290140407398431","env":"sunbirddev"},"object":{"ver":"1587632475439","id":"do_3126597193576939521910"},"edata":{"action":"post-publish-process","iteration":1,"identifier":"do_3126597193576939521910","artifactUrl":"https://sunbirddev.blob.core.windows.net/sunbird-content-dev/content/assets/do_1126980548391075841140/ariel-view-of-earth.mp4","mimeType":"video/mp4","contentType":"Resource","pkgVersion":1,"status":"Live"}}
8 | |""".stripMargin
9 |
10 | val EVENT_2: String =
11 | """
12 | |{"eid":"BE_JOB_REQUEST","ets":1598956686981,"mid":"LP.1598956686981.a260af12-cd9b-4ffd-a525-1d944df47c61","actor":{"id":"Post Publish Processor","type":"System"},"context":{"pdata":{"ver":"1.0","id":"org.ekstep.platform"},"channel":"01254290140407398431","env":"sunbirddev"},"object":{"ver":"1587632475439","id":"do_3126597193576939521910"},"edata":{"action":"post-publish-process","iteration":1,"identifier":"do_3126597193576939521910","artifactUrl":"https://sunbirded.com/test.mp4","mimeType":"video","contentType":"Resource","pkgVersion":1,"status":"Live"}}
13 | |""".stripMargin
14 | }
--------------------------------------------------------------------------------
/activity-aggregate-updater/src/test/scala/org/sunbird/job/spec/BaseActivityAggregateTestSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.spec
2 |
3 | import java.util
4 |
5 | import org.apache.flink.streaming.api.functions.sink.SinkFunction
6 |
7 |
8 | class AuditEventSink extends SinkFunction[String] {
9 |
10 | override def invoke(value: String): Unit = {
11 | synchronized {
12 | AuditEventSink.values.add(value)
13 | }
14 | }
15 | }
16 |
17 | object AuditEventSink {
18 | val values: util.List[String] = new util.ArrayList()
19 | }
20 |
21 | class FailedEventSink extends SinkFunction[String] {
22 |
23 | override def invoke(value: String): Unit = {
24 | synchronized {
25 | FailedEventSink.values.add(value)
26 | }
27 | }
28 | }
29 |
30 | object FailedEventSink {
31 | val values: util.List[String] = new util.ArrayList()
32 | }
33 |
34 | class SuccessEvent extends SinkFunction[String] {
35 |
36 | override def invoke(value: String): Unit = {
37 | synchronized {
38 | SuccessEventSink.values.add(value)
39 | }
40 | }
41 | }
42 |
43 | object SuccessEventSink {
44 | val values: util.List[String] = new util.ArrayList()
45 | }
46 |
47 |
48 | class CertificateIssuedEventsSink extends SinkFunction[String] {
49 |
50 | override def invoke(value: String): Unit = {
51 | synchronized {
52 | CertificateIssuedEvents.values.add(value)
53 | }
54 | }
55 | }
56 |
57 | object CertificateIssuedEvents {
58 | val values: util.List[String] = new util.ArrayList()
59 | }
60 |
--------------------------------------------------------------------------------
/enrolment-reconciliation/src/test/scala/org/sunbird/job/spec/BaseActivityAggregateTestSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.spec
2 |
3 | import org.apache.flink.streaming.api.functions.sink.SinkFunction
4 |
5 | import java.util
6 |
7 |
8 | class AuditEventSink extends SinkFunction[String] {
9 |
10 | override def invoke(value: String): Unit = {
11 | synchronized {
12 | AuditEventSink.values.add(value)
13 | }
14 | }
15 | }
16 |
17 | object AuditEventSink {
18 | val values: util.List[String] = new util.ArrayList()
19 | }
20 |
21 | class FailedEventSink extends SinkFunction[String] {
22 |
23 | override def invoke(value: String): Unit = {
24 | synchronized {
25 | FailedEventSink.values.add(value)
26 | }
27 | }
28 | }
29 |
30 | object FailedEventSink {
31 | val values: util.List[String] = new util.ArrayList()
32 | }
33 |
34 | class SuccessEvent extends SinkFunction[String] {
35 |
36 | override def invoke(value: String): Unit = {
37 | synchronized {
38 | SuccessEventSink.values.add(value)
39 | }
40 | }
41 | }
42 |
43 | object SuccessEventSink {
44 | val values: util.List[String] = new util.ArrayList()
45 | }
46 |
47 |
48 | class CertificateIssuedEventsSink extends SinkFunction[String] {
49 |
50 | override def invoke(value: String): Unit = {
51 | synchronized {
52 | CertificateIssuedEvents.values.add(value)
53 | }
54 | }
55 | }
56 |
57 | object CertificateIssuedEvents {
58 | val values: util.List[String] = new util.ArrayList()
59 | }
60 |
--------------------------------------------------------------------------------
/jobs-core/src/main/resources/base-config.conf:
--------------------------------------------------------------------------------
1 | kafka {
2 | broker-servers = "localhost:9092"
3 | zookeeper = "localhost:2181"
4 | }
5 |
6 | job {
7 | enable.distributed.checkpointing = false
8 | statebackend {
9 | blob {
10 | storage {
11 | account = "blob.storage.account"
12 | container = "kp-checkpoints"
13 | checkpointing.dir = "flink-jobs"
14 | }
15 | }
16 | base.url = "wasbs://"${job.statebackend.blob.storage.container}"@"${job.statebackend.blob.storage.account}"/"${job.statebackend.blob.storage.checkpointing.dir}
17 | }
18 | }
19 |
20 | task {
21 | checkpointing.compressed = true
22 | checkpointing.pause.between.seconds = 30000
23 | parallelism = 1
24 | checkpointing.interval = 60000
25 | restart-strategy.attempts = 3
26 | restart-strategy.delay = 30000 # in milli-seconds
27 | }
28 |
29 | redis {
30 | host = localhost
31 | port = 6379
32 | connection {
33 | max = 2
34 | idle.min = 1
35 | idle.max = 2
36 | minEvictableIdleTimeSeconds = 120
37 | timeBetweenEvictionRunsSeconds = 300
38 | }
39 | }
40 | lms-cassandra {
41 | host = "localhost"
42 | port = "9042"
43 | }
44 |
45 | neo4j {
46 | routePath = "bolt://localhost:7687"
47 | graph = "domain"
48 | }
49 |
50 | es {
51 | basePath = "localhost:9200"
52 | }
53 |
54 | schema {
55 | basePath = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/schemas/local"
56 | supportedVersion = {"itemset": "2.0"}
57 | }
58 |
59 | media_download_duration = "300 seconds"
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/dedup/DeDupEngine.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.dedup
2 |
3 | import org.slf4j.LoggerFactory
4 | import org.sunbird.job.BaseJobConfig
5 | import org.sunbird.job.cache.{DataCache, RedisConnect}
6 | import redis.clients.jedis.Jedis
7 |
8 | class DeDupEngine(val config: BaseJobConfig, val redisConnect: RedisConnect, val store: Int, val expirySeconds: Int) {
9 |
10 | private[this] val logger = LoggerFactory.getLogger(classOf[DataCache])
11 | private var redisConnection: Jedis = _
12 |
13 | def init() {
14 | this.redisConnection = redisConnect.getConnection(store)
15 | }
16 |
17 | def close() {
18 | this.redisConnection.close()
19 | }
20 |
21 | import redis.clients.jedis.exceptions.JedisException
22 |
23 | def isUniqueEvent(checksum: String): Boolean = {
24 | try !redisConnection.exists(checksum)
25 | catch {
26 | case ex: JedisException =>
27 | this.redisConnection.close
28 | this.redisConnection = redisConnect.getConnection(store, 10000)
29 | !redisConnection.exists(checksum)
30 | }
31 | }
32 |
33 | def storeChecksum(checksum: String): Unit = {
34 | try redisConnection.setex(checksum, expirySeconds, "")
35 | catch {
36 | case ex: JedisException =>
37 | this.redisConnection.close
38 | this.redisConnection = redisConnect.getConnection(store,10000)
39 | this.redisConnection.select(store)
40 | redisConnection.setex(checksum, expirySeconds, "")
41 | }
42 | }
43 |
44 | }
45 |
--------------------------------------------------------------------------------
/kubernets/job-cluster/job-cluster-jobmanager.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: batch/v1
2 | kind: Job
3 | metadata:
4 | name: ${JOB_NAME}-jobmanager
5 | spec:
6 | template:
7 | metadata:
8 | labels:
9 | app: flink
10 | component: ${JOB_NAME}-jobmanager
11 | spec:
12 | restartPolicy: OnFailure
13 | containers:
14 | - name: ${JOB_NAME}-jobmanager
15 | image: ${SUNBIRD_DATAPIPELINE_IMAGE}
16 | imagePullPolicy: Always
17 | workingDir: /opt/flink
18 | command: ["/opt/flink/bin/standalone-job.sh"]
19 | args: ["start-foreground",
20 | "--job-classname=${JOB_CLASSNAME}",
21 | "-Dfs.azure.account.key.${AZURE_STORAGE_ACCOUNT}.blob.core.windows.net=${AZURE_STORAGE_SECRET}",
22 | "-Dweb.submit.enable=false",
23 | "-Dmetrics.reporter.prom.class=org.apache.flink.metrics.prometheus.PrometheusReporter",
24 | "-Dmetrics.reporter.prom.port=9250",
25 | "-Djobmanager.rpc.address=${JOB_NAME}-jobmanager",
26 | "-Djobmanager.rpc.port=6123",
27 | "-Dparallelism.default=1",
28 | "-Dblob.server.port=6124",
29 | "-Dqueryable-state.server.ports=6125",
30 | "-Djobmanager.heap.size=1024m"]
31 | ports:
32 | - containerPort: 6123
33 | name: rpc
34 | - containerPort: 6124
35 | name: blob
36 | - containerPort: 6125
37 | name: query
38 | - containerPort: 8081
39 | name: ui
40 |
--------------------------------------------------------------------------------
/mvc-indexer/src/test/resources/test.cql:
--------------------------------------------------------------------------------
1 | CREATE KEYSPACE IF NOT EXISTS local_content_store WITH replication = {
2 | 'class': 'SimpleStrategy',
3 | 'replication_factor': '1'
4 | };
5 |
6 | CREATE TABLE IF NOT EXISTS local_content_store.content_data (
7 | content_id text PRIMARY KEY,
8 | body blob,
9 | externallink text,
10 | last_updated_on timestamp,
11 | level1_concept list,
12 | level1_name list,
13 | level2_concept list,
14 | level2_name list,
15 | level3_concept list,
16 | level3_name list,
17 | ml_content_text text,
18 | ml_content_text_vector frozen>,
19 | ml_keywords list,
20 | oldbody blob,
21 | screenshots blob,
22 | source text,
23 | sourceurl text,
24 | stageicons blob,
25 | textbook_name list
26 | ) WITH bloom_filter_fp_chance = 0.01
27 | AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'}
28 | AND comment = ''
29 | AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'}
30 | AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}
31 | AND crc_check_chance = 1.0
32 | AND dclocal_read_repair_chance = 0.1
33 | AND default_time_to_live = 0
34 | AND gc_grace_seconds = 864000
35 | AND max_index_interval = 2048
36 | AND memtable_flush_period_in_ms = 0
37 | AND min_index_interval = 128
38 | AND read_repair_chance = 0.0
39 | AND speculative_retry = '99PERCENTILE';
--------------------------------------------------------------------------------
/publish-pipeline/questionset-publish/src/main/scala/org/sunbird/job/questionset/publish/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.questionset.publish.domain
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | import java.util
7 | import scala.collection.JavaConverters._
8 |
9 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
10 |
11 | private val jobName = "questionset-publish"
12 |
13 | private val objectTypes = List("Question", "QuestionImage", "QuestionSet", "QuestionSetImage")
14 | private val mimeTypes = List("application/vnd.sunbird.question", "application/vnd.sunbird.questionset")
15 |
16 | def eData: Map[String, AnyRef] = readOrDefault("edata", new util.HashMap[String, AnyRef]()).asScala.toMap
17 |
18 | def action: String = readOrDefault[String]("edata.action", "")
19 |
20 | def publishType: String = readOrDefault[String]("edata.publish_type", "")
21 |
22 | def mimeType: String = readOrDefault[String]("edata.metadata.mimeType", "")
23 |
24 | def objectId: String = readOrDefault[String]("edata.metadata.identifier", "")
25 |
26 | def objectType: String = readOrDefault[String]("edata.metadata.objectType", "")
27 |
28 | def pkgVersion: Double = {
29 | val pkgVersion = readOrDefault[Int]("edata.metadata.pkgVersion", 0)
30 | pkgVersion.toDouble
31 | }
32 |
33 | def validEvent(): Boolean = {
34 | (StringUtils.equals("publish", action) && StringUtils.isNotBlank(objectId)) && (objectTypes.contains(objectType) && mimeTypes.contains(mimeType))
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/publish-pipeline/content-publish/src/main/scala/org/sunbird/job/content/publish/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.content.publish.domain
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.sunbird.job.content.task.ContentPublishConfig
5 | import org.sunbird.job.domain.reader.JobRequest
6 |
7 | import java.util
8 | import scala.collection.JavaConverters._
9 |
10 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
11 |
12 | val jobName = "content-publish"
13 |
14 | def eData: Map[String, AnyRef] = readOrDefault("edata", new util.HashMap[String, AnyRef]()).asScala.toMap
15 |
16 | def action: String = readOrDefault[String]("edata.action", "")
17 |
18 | def mimeType: String = readOrDefault[String]("edata.metadata.mimeType", "")
19 |
20 | def identifier: String = readOrDefault[String]("edata.metadata.identifier", "")
21 |
22 | def objectType: String = readOrDefault[String]("edata.metadata.objectType", "")
23 |
24 | def publishType: String = readOrDefault[String]("edata.publish_type", "")
25 |
26 | def lastPublishedBy: String = readOrDefault[String]("edata.metadata.lastPublishedBy", "")
27 |
28 | def pkgVersion: Double = {
29 | val pkgVersion: Number = readOrDefault[Number]("edata.metadata.pkgVersion", 0)
30 | pkgVersion.doubleValue()
31 | }
32 |
33 | def validEvent(config: ContentPublishConfig): Boolean = {
34 | (StringUtils.equals("publish", action) && StringUtils.isNotBlank(identifier)) && (config.supportedObjectType.contains(objectType) && config.supportedMimeType.contains(mimeType))
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/cache/RedisConnect.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.cache
2 |
3 | import com.typesafe.config.Config
4 | import org.slf4j.LoggerFactory
5 | import org.sunbird.job.BaseJobConfig
6 | import redis.clients.jedis.Jedis
7 |
8 | class RedisConnect(jobConfig: BaseJobConfig, host: Option[String] = None, port: Option[Int] = None) extends java.io.Serializable {
9 |
10 | private val serialVersionUID = -396824011996012513L
11 |
12 | val config: Config = jobConfig.config
13 | val redisHost: String = host.getOrElse(Option(config.getString("redis.host")).getOrElse("localhost"))
14 | val redisPort: Int = port.getOrElse(Option(config.getInt("redis.port")).getOrElse(6379))
15 | private val logger = LoggerFactory.getLogger(classOf[RedisConnect])
16 |
17 | private def getConnection(backoffTimeInMillis: Long): Jedis = {
18 | val defaultTimeOut = 30000
19 | if (backoffTimeInMillis > 0) try Thread.sleep(backoffTimeInMillis)
20 | catch {
21 | case e: InterruptedException =>
22 | e.printStackTrace()
23 | }
24 | logger.info("Obtaining new Redis connection... : for host :" + redisHost + " and port: " + redisPort)
25 | new Jedis(redisHost, redisPort, defaultTimeOut)
26 | }
27 |
28 |
29 | def getConnection(db: Int, backoffTimeInMillis: Long): Jedis = {
30 | val jedis: Jedis = getConnection(backoffTimeInMillis)
31 | jedis.select(db)
32 | jedis
33 | }
34 |
35 | def getConnection(db: Int): Jedis = {
36 | val jedis = getConnection(db, backoffTimeInMillis = 0)
37 | jedis.select(db)
38 | jedis
39 | }
40 |
41 | def getConnection: Jedis = getConnection(db = 0)
42 | }
43 |
--------------------------------------------------------------------------------
/auto-creator-v2/src/test/scala/org/sunbird/job/autocreatorv2/spec/service/AutoCreatorFunctionSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.autocreatorv2.spec.service
2 |
3 | import com.typesafe.config.{Config, ConfigFactory}
4 | import org.apache.flink.api.common.typeinfo.TypeInformation
5 | import org.apache.flink.api.java.typeutils.TypeExtractor
6 | import org.mockito.Mockito
7 | import org.sunbird.job.autocreatorv2.fixture.EventFixture
8 | import org.sunbird.job.autocreatorv2.functions.AutoCreatorFunction
9 | import org.sunbird.job.task.AutoCreatorV2Config
10 | import org.sunbird.job.util.{HttpUtil, JSONUtil}
11 | import org.sunbird.spec.BaseTestSpec
12 |
13 | import java.util
14 |
15 | class AutoCreatorFunctionSpec extends BaseTestSpec {
16 | implicit val mapTypeInfo: TypeInformation[util.Map[String, AnyRef]] = TypeExtractor.getForClass(classOf[util.Map[String, AnyRef]])
17 | implicit val stringTypeInfo: TypeInformation[String] = TypeExtractor.getForClass(classOf[String])
18 |
19 | val config: Config = ConfigFactory.load("test.conf")
20 | val mockHttpUtil = mock[HttpUtil](Mockito.withSettings().serializable())
21 | lazy val jobConfig: AutoCreatorV2Config = new AutoCreatorV2Config(config)
22 | lazy val autoCreatorV2: AutoCreatorFunction = new AutoCreatorFunction(jobConfig, mockHttpUtil)
23 |
24 | override protected def beforeAll(): Unit = {
25 | super.beforeAll()
26 | }
27 |
28 | override protected def afterAll(): Unit = {
29 | super.afterAll()
30 | }
31 |
32 | "AutoCreatorV2Service" should "generate event" in {
33 | val inputEvent:util.Map[String, Any] = JSONUtil.deserialize[util.Map[String, Any]](EventFixture.EVENT_1)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/video-stream-generator/src/main/resources/video-stream-generator.conf:
--------------------------------------------------------------------------------
1 | include "base-config.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.content.postpublish.request"
5 | groupId = "sunbirddev-video-stream-generator-group"
6 | }
7 |
8 | task {
9 | consumer.parallelism = 1
10 | parallelism = 1
11 | timer.duration = 60
12 | max.retries = 10
13 | }
14 |
15 | lms-cassandra {
16 | keyspace = "sunbirddev_platform_db"
17 | table = "job_request"
18 | }
19 |
20 | service {
21 | content {
22 | basePath = "http://11.2.6.6/content"
23 | }
24 | }
25 |
26 | # Azure Media Service Config
27 | azure {
28 | location = "centralindia"
29 | tenant = "tenant"
30 | subscription_id = "subscription id "
31 |
32 | login {
33 | endpoint="https://login.microsoftonline.com"
34 | }
35 |
36 | api {
37 | endpoint="https://management.azure.com"
38 | version = "2018-07-01"
39 | }
40 |
41 | account_name = "account name"
42 | resource_group_name = "group name"
43 |
44 | transform {
45 | default = "media_transform_default"
46 | hls = "media_transform_hls"
47 | }
48 |
49 | stream {
50 | base_url = "https://sunbirdspikemedia-inct.streaming.media.azure.net"
51 | endpoint_name = "default"
52 | protocol = "Hls"
53 | policy_name = "Predefined_ClearStreamingOnly"
54 | }
55 |
56 | token {
57 | client_key = "client key"
58 | client_secret = "client secret"
59 | }
60 | }
61 |
62 | azure_tenant="tenant"
63 | azure_subscription_id="subscription id"
64 | azure_account_name="account name"
65 | azure_resource_group_name="group name"
66 | azure_token_client_key="client key"
67 | azure_token_client_secret="client secret"
--------------------------------------------------------------------------------
/relation-cache-updater/src/test/scala/org/sunbird/job/fixture/EventFixture.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.fixture
2 |
3 | object EventFixture {
4 |
5 | val EVENT_1: String =
6 | """
7 | |{"actor":{"id":"Post Publish Processor", "type":"System"}, "eid":"BE_JOB_REQUEST", "edata":{"createdFor":["ORG_001"], "createdBy":"95e4942d-cbe8-477d-aebd-ad8e6de4bfc8", "name":"Untitled Course", "action":"post-publish-process", "iteration":1.0, "identifier":"do_11305855864948326411234", "mimeType":"application/vnd.ekstep.content-collection", "contentType":"Course", "pkgVersion":7.0, "status":"Live"}, "partition":0, "ets":"1.593769627322E12", "context":{"pdata":{"ver":1.0, "id":"org.ekstep.platform"}, "channel":"b00bc992ef25f1a9a8d63291e20efc8d", "env":"sunbirddev"}, "mid":"LP.1593769627322.459a018c-5ec3-4c11-96c1-cd84d3786b85", "object":{"ver":"1593769626118", "id":"do_11305855864948326411234"}}
8 | |""".stripMargin
9 | val EVENT_2: String =
10 | """
11 | |{"actor":{"id":"Post Publish Processor", "type":"System"}, "eid":"BE_JOB_REQUEST", "edata":{"createdFor":["ORG_001"], "createdBy":"95e4942d-cbe8-477d-aebd-ad8e6de4bfc8", "name":"Untitled Course", "action":"post-publish-process", "iteration":1.0, "identifier":"KP_FT_1594149835504", "mimeType":"application/vnd.ekstep.content-collection", "contentType":"Course", "pkgVersion":7.0, "status":"Live"}, "partition":0, "ets":"1.593769627322E12", "context":{"pdata":{"ver":1.0, "id":"org.ekstep.platform"}, "channel":"b00bc992ef25f1a9a8d63291e20efc8d", "env":"sunbirddev"}, "mid":"LP.1593769627322.459a018c-5ec3-4c11-96c1-cd84d3786b85", "object":{"ver":"1593769626118", "id":"KP_FT_1594149835504"}}
12 | |""".stripMargin
13 | }
--------------------------------------------------------------------------------
/publish-pipeline/publish-core/src/test/resources/questionSetTemplate.vm:
--------------------------------------------------------------------------------
1 |
65 |
66 |
67 |
68 |
69 |
$questions
70 |
71 |
75 |
76 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/test/scala/org/sunbird/incredible/valuator/IssuedDateValuatorTest.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.valuator
2 |
3 | import java.text.SimpleDateFormat
4 | import java.util.Calendar
5 |
6 | import org.junit.Assert.assertEquals
7 | import org.sunbird.incredible.BaseTestSpec
8 | import org.sunbird.incredible.pojos.exceptions.InvalidDateFormatException
9 | import org.sunbird.incredible.pojos.valuator.IssuedDateValuator
10 |
11 | class IssuedDateValuatorTest extends BaseTestSpec {
12 |
13 |
14 | private val issuedDateValuator = new IssuedDateValuator
15 | private val simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")
16 | private val cal = Calendar.getInstance
17 |
18 |
19 | "evaluate date in format1" should "should parse date in correct format" in {
20 | val date = issuedDateValuator.convertToDate("2019-01-20")
21 | cal.setTime(date)
22 | assertEquals("2019-01-20T00:00:00Z", simpleDateFormat.format(cal.getTime))
23 |
24 | }
25 |
26 | "evaluate date in format2" should "should parse date in correct format" in {
27 | val date = issuedDateValuator.convertToDate("2019-02-12T10:11:11Z")
28 | cal.setTime(date)
29 | assertEquals("2019-02-12T10:11:11Z", simpleDateFormat.format(cal.getTime))
30 |
31 | }
32 |
33 |
34 | "evaluate date which has null value" should "should throw exception" in {
35 | intercept[InvalidDateFormatException] {
36 | issuedDateValuator.convertToDate(null)
37 | }
38 | }
39 |
40 |
41 | "evaluate date For different formats " should "should throw exception" in {
42 | intercept[InvalidDateFormatException] {
43 | issuedDateValuator.convertToDate("2019-02")
44 | }
45 | }
46 |
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/publish-pipeline/questionset-publish/src/main/resources/questionSetTemplate.vm:
--------------------------------------------------------------------------------
1 |
65 |
66 |
67 |
68 |
69 |
$questions
70 |
71 |
75 |
76 |
--------------------------------------------------------------------------------
/credential-generator/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | org.sunbird
4 | knowledge-platform-jobs
5 | 1.0
6 |
7 | 4.0.0
8 |
9 | credential-generator
10 | pom
11 | credential-generator
12 |
13 |
14 | collection-cert-pre-processor
15 | certificate-processor
16 | collection-certificate-generator
17 |
18 |
19 |
20 |
21 |
22 |
23 | org.apache.maven.plugins
24 | maven-compiler-plugin
25 | 3.8.1
26 |
27 | 11
28 | 11
29 |
30 |
31 |
32 | org.scoverage
33 | scoverage-maven-plugin
34 | ${scoverage.plugin.version}
35 |
36 | ${scala.version}
37 | true
38 | true
39 | org.sunbird.incredible
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/auto-creator-v2/src/main/scala/org/sunbird/job/autocreatorv2/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.autocreatorv2.domain
2 |
3 | import org.apache.commons.lang3.StringUtils
4 | import org.sunbird.job.domain.reader.JobRequest
5 |
6 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
7 |
8 | private val jobName = "auto-creator-v2"
9 |
10 | private val objectTypes = List("Question", "QuestionSet")
11 |
12 | def eData: Map[String, AnyRef] = readOrDefault("edata", Map()).asInstanceOf[Map[String, AnyRef]]
13 |
14 | def metadata: Map[String, AnyRef] = readOrDefault("edata.metadata", Map())
15 |
16 | def collection: List[Map[String, String]] = readOrDefault("edata.collection", List(Map())).asInstanceOf[List[Map[String, String]]]
17 |
18 | def action: String = readOrDefault[String]("edata.action", "")
19 |
20 | def mimeType: String = readOrDefault[String]("edata.metadata.mimeType", "")
21 |
22 | def objectId: String = readOrDefault[String]("edata.metadata.identifier", "")
23 |
24 | def objectType: String = readOrDefault[String]("edata.objectType", "")
25 |
26 | def repository: Option[String] = read[String]("edata.repository")
27 |
28 | def downloadUrl: String = readOrDefault[String]("edata.metadata.downloadUrl", "")
29 |
30 | def pkgVersion: Double = {
31 | val pkgVersion = readOrDefault[Int]("edata.metadata.pkgVersion", 0)
32 | pkgVersion.toDouble
33 | }
34 |
35 | def isValid(): Boolean = {
36 | (StringUtils.equals("auto-create", action) && StringUtils.isNotBlank(objectId)) && (objectTypes.contains(objectType)
37 | && repository.nonEmpty && metadata.nonEmpty) && (StringUtils.isNotBlank(downloadUrl) && StringUtils.endsWith(downloadUrl, ".ecar"))
38 | }
39 | }
--------------------------------------------------------------------------------
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2.0
2 |
3 | jobs:
4 | unit-tests:
5 | docker:
6 | - image: circleci/openjdk:stretch
7 | resource_class: medium
8 | working_directory: ~/kp
9 | steps:
10 | - checkout
11 | - restore_cache:
12 | keys:
13 | - kp-jobs-dependency-cache-{{ checksum "pom.xml" }} # appends cache key with a hash of pom.xml fi
14 | - kp-jobs-dependency-cache- # fallback in case previous cache key is not foun
15 | - run:
16 | name: Installation of imagemagick
17 | command: |
18 | sudo apt-get update || sudo apt-get update
19 | sudo apt-get install -y imagemagick
20 | - run:
21 | name: Execute coverage report
22 | command: |
23 | mvn clean scoverage:report
24 | - run:
25 | name: Save test results
26 | command: |
27 | mkdir -p ~/test-results/junit/
28 | find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp {} ~/test-results/junit/ \;
29 | when: always
30 | - save_cache:
31 | paths:
32 | - ~/.m2
33 | key: kp-jobs-dependency-cache-{{ checksum "pom.xml" }}
34 | - store_test_results:
35 | path: ~/test-results
36 | - run:
37 | name: sonar
38 | command: |
39 | mvn -X sonar:sonar -Dlog4j.configuration=./logs sonar:sonar -Dsonar.projectKey=project-sunbird_knowledge-platform-jobs -Dsonar.organization=project-sunbird -Dsonar.host.url=https://sonarcloud.io -Dsonar.exclusions=**/cert-processor/** -Dsonar.scala.coverage.reportPaths=/home/circleci/kp/target/scoverage.xml
40 |
41 | workflows:
42 | version: 2
43 | build-and-test:
44 | jobs:
45 | - unit-tests
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/pojos/valuator/IssuedDateValuator.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible.pojos.valuator
2 |
3 | import java.text.{ParseException, SimpleDateFormat}
4 | import java.util.{Calendar, Date}
5 |
6 | import org.apache.commons.lang.StringUtils
7 | import org.sunbird.incredible.pojos.exceptions.InvalidDateFormatException
8 |
9 | import scala.util.control.Breaks
10 |
11 | class IssuedDateValuator extends IEvaluator {
12 | private val dateFormats = List(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"), new SimpleDateFormat("yyyy-MM-dd"))
13 | private val loop = new Breaks
14 |
15 | @throws[InvalidDateFormatException]
16 | override def evaluates(inputVal: String): String = {
17 | val simpleDateFormat: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")
18 | val cal: Calendar = Calendar.getInstance
19 | val date: Date = convertToDate(inputVal.asInstanceOf[String])
20 | cal.setTime(date)
21 | simpleDateFormat.format(cal.getTime)
22 | }
23 |
24 | def convertToDate(input: String): Date = {
25 | var date: Date = null
26 | if (StringUtils.isEmpty(input)) {
27 | throw new InvalidDateFormatException("issued date cannot be null")
28 | }
29 | loop.breakable {
30 | dateFormats.foreach(format => {
31 | try {
32 | format.setLenient(false)
33 | date = format.parse(input)
34 | } catch {
35 | case e: ParseException =>
36 | }
37 | if (date != null) {
38 | loop.break
39 | }
40 | })
41 | }
42 | if (date == null) {
43 | throw new InvalidDateFormatException("issued date is not in valid format")
44 | } else {
45 | date
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/asset-enrichment/src/test/scala/org/sunbird/job/fixture/EventFixture.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.fixture
2 |
3 | object EventFixture {
4 |
5 | val IMAGE_ASSET: String =
6 | """
7 | |{"eid":"BE_JOB_REQUEST","ets":1615443994411,"mid":"LP.1615443994411.c94524f2-0d45-4da7-93d8-6d4059f6f999","actor":{"id":"Asset Enrichment Samza Job","type":"System"},"context":{"pdata":{"ver":"1.0","id":"org.sunbird.platform"},"channel":"b00bc992ef25f1a9a8d63291e20efc8d","env":"dev"},"object":{"ver":"1615443994322","id":"do_113233716442578944194"},"edata":{"action":"assetenrichment","iteration":1,"mediaType":"image","status":"Processing","objectType":"Asset"}}
8 | |""".stripMargin
9 |
10 | val VIDEO_MP4_ASSET: String =
11 | """
12 | |{"eid":"BE_JOB_REQUEST","ets":1615444029534,"mid":"LP.1615444029534.a4588b0e-50e9-4af7-8a73-2efa8efc68aa","actor":{"id":"Asset Enrichment Samza Job","type":"System"},"context":{"pdata":{"ver":"1.0","id":"org.sunbird.platform"},"channel":"b00bc992ef25f1a9a8d63291e20efc8d","env":"dev"},"object":{"ver":"1551877993917","id":"do_1127129845261680641588"},"edata":{"action":"assetenrichment","iteration":1,"mediaType":"video","status":"Processing","objectType":"Asset"}}
13 | |""".stripMargin
14 |
15 | val VIDEO_YOUTUBE_ASSET: String =
16 | """
17 | |{"eid":"BE_JOB_REQUEST","ets":1615444029534,"mid":"LP.1615444029534.a4588b0e-50e9-4af7-8a73-2efa8efc68aa","actor":{"id":"Asset Enrichment Samza Job","type":"System"},"context":{"pdata":{"ver":"1.0","id":"org.sunbird.platform"},"channel":"b00bc992ef25f1a9a8d63291e20efc8d","env":"dev"},"object":{"ver":"1551877993917","id":"do_1127129845261680641599"},"edata":{"action":"assetenrichment","iteration":1,"mediaType":"video","status":"Processing","objectType":"Asset"}}
18 | |""".stripMargin
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/metrics-data-transformer/src/main/scala/org/sunbird/job/metricstransformer/task/MetricsDataTransformerConfig.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.metricstransformer.task
2 |
3 | import com.typesafe.config.Config
4 | import org.sunbird.job.BaseJobConfig
5 | import java.util.{List => JList}
6 |
7 | class MetricsDataTransformerConfig(override val config: Config) extends BaseJobConfig(config, "metrics-data-transformer") {
8 |
9 | // Kafka Topics Configuration
10 | val kafkaInputTopic: String = config.getString("kafka.input.topic")
11 | override val kafkaConsumerParallelism: Int = config.getInt("task.consumer.parallelism")
12 | override val parallelism: Int = config.getInt("task.parallelism")
13 |
14 | // Metric List
15 | val totalEventsCount = "total-events-count"
16 | val successEventCount = "success-events-count"
17 | val failedEventCount = "failed-events-count"
18 | val skippedEventCount = "skipped-events-count"
19 |
20 | val contentServiceBaseUrl : String = config.getString("service.content.basePath")
21 | val contentReadApi : String = config.getString("content_read_api")
22 |
23 | val lpURL: String = config.getString("service.sourcing.content.basePath")
24 | val contentUpdate = config.getString("content_update_api")
25 |
26 | val defaultHeaders = Map[String, String] ("Content-Type" -> "application/json")
27 | val updateAPIErrorCodeList = if(config.hasPath("sourcing.update.api.response.error.code")) config.getStringList("sourcing.update.api.response.error.code") else List("404").asInstanceOf[java.util.List[String]]
28 |
29 | // Consumers
30 | val eventConsumer = "metrics-data-transformer-consumer"
31 | val metricsDataTransformerFunction = "metrics-data-transformer-function"
32 |
33 | val metrics: JList[String] = config.getStringList("data.metrics")
34 | }
35 |
--------------------------------------------------------------------------------
/audit-history-indexer/src/main/scala/org/sunbird/job/audithistory/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.audithistory.domain
2 |
3 | import org.sunbird.job.domain.reader.JobRequest
4 |
5 | import java.text.{DateFormat, SimpleDateFormat}
6 | import java.util
7 | import java.util.Date
8 |
9 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
10 |
11 | private val df:DateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
12 |
13 | private val jobName = "AuditHistoryIndexer"
14 |
15 | def id: String = readOrDefault("nodeUniqueId", "")
16 |
17 | def nodeType: String = readOrDefault("nodeType", "")
18 |
19 | def ets: Long = readOrDefault("ets", 0L)
20 |
21 | def userId: String = readOrDefault("userId", "")
22 |
23 | def nodeUniqueId: String = readOrDefault("nodeUniqueId", "")
24 |
25 | def objectType: String = readOrDefault("objectType", "")
26 |
27 | def label: String = readOrDefault("label", "")
28 |
29 | def graphId: String = readOrDefault("graphId", "")
30 |
31 | def requestId: String = readOrDefault("requestId", "")
32 |
33 | def transactionData: Map[String, AnyRef] = {
34 | readOrDefault("transactionData", new util.HashMap[String, AnyRef]()).asInstanceOf[Map[String, AnyRef]]
35 | }
36 |
37 | def operationType: String = readOrDefault("operationType", null)
38 |
39 | def syncMessage: String = readOrDefault("syncMessage", null)
40 |
41 | def createdOn: String = readOrDefault("createdOn", "")
42 |
43 | def createdOnDate: Date = if (createdOn.isEmpty) new Date else df.parse(createdOn)
44 |
45 | def audit: Boolean = readOrDefault("audit", true)
46 |
47 | def isValid: Boolean = {
48 | operationType != null && null == syncMessage && audit
49 | }
50 |
51 |
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/mvc-indexer/src/main/scala/org/sunbird/job/mvcindexer/util/ContentUtil.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.mvcindexer.util
2 |
3 | import org.sunbird.job.exception.APIException
4 | import org.sunbird.job.mvcindexer.task.MVCIndexerConfig
5 | import org.sunbird.job.util.{HTTPResponse, HttpUtil, JSONUtil}
6 |
7 | object ContentUtil {
8 |
9 | /**
10 | * Get the Content Metadata and return the parsed metadata map
11 | *
12 | * @param newmap Content metadata from event envelope
13 | * @param identifer Content ID
14 | * @param httpUtil HttpUil instance
15 | * @param config Config instance
16 | * @return parsed metadata map
17 | */
18 | @throws[Exception]
19 | def getContentMetaData(newmap: Map[String, AnyRef], identifer: String, httpUtil: HttpUtil, config: MVCIndexerConfig): Map[String, AnyRef] = {
20 | try {
21 | val content: HTTPResponse = httpUtil.get(config.contentReadURL + identifer)
22 | val obj = JSONUtil.deserialize[Map[String, AnyRef]](content.body)
23 | val contentObj = obj("result").asInstanceOf[Map[String, AnyRef]]("content").asInstanceOf[Map[String, AnyRef]]
24 | filterData(newmap, contentObj, config)
25 | } catch {
26 | case e: Exception =>
27 | throw new APIException(s"Error in getContentMetaData for $identifer - ${e.getLocalizedMessage}", e)
28 | }
29 | }
30 |
31 | def filterData(obj: Map[String, AnyRef], content: Map[String, AnyRef], config: MVCIndexerConfig): Map[String, AnyRef] = {
32 | var contentObj = JSONUtil.deserialize[Map[String, AnyRef]](JSONUtil.serialize(obj))
33 |
34 | for (param <- config.elasticSearchParamSet) {
35 | val value = content.getOrElse(param, null)
36 | if (value != null) {
37 | contentObj += (param -> value)
38 | }
39 | }
40 | contentObj
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/audit-history-indexer/src/main/scala/org/sunbird/job/audithistory/task/AuditHistoryIndexerConfig.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.audithistory.task
2 |
3 | import com.typesafe.config.Config
4 | import org.apache.flink.api.common.typeinfo.TypeInformation
5 | import org.apache.flink.api.java.typeutils.TypeExtractor
6 | import org.sunbird.job.BaseJobConfig
7 |
8 | import java.util
9 |
10 | class AuditHistoryIndexerConfig(override val config: Config) extends BaseJobConfig(config, "audit-history-indexer") {
11 |
12 | private val serialVersionUID = 2905979434303791379L
13 |
14 | implicit val mapTypeInfo: TypeInformation[util.Map[String, AnyRef]] = TypeExtractor.getForClass(classOf[util.Map[String, AnyRef]])
15 | implicit val stringTypeInfo: TypeInformation[String] = TypeExtractor.getForClass(classOf[String])
16 |
17 | // Kafka Topics Configuration
18 | val kafkaInputTopic: String = config.getString("kafka.input.topic")
19 | override val kafkaConsumerParallelism: Int = config.getInt("task.consumer.parallelism")
20 | override val parallelism: Int = config.getInt("task.parallelism")
21 |
22 | // Metric List
23 | val totalEventsCount = "total-events-count"
24 | val successEventCount = "success-events-count"
25 | val failedEventCount = "failed-events-count"
26 | val esFailedEventCount = "elasticsearch-error-events-count"
27 | val skippedEventCount = "skipped-events-count"
28 |
29 | // Consumers
30 | val eventConsumer = "audit-history-indexer-consumer"
31 | val auditHistoryIndexerFunction = "audit-history-indexer-function"
32 |
33 | val configVersion = "1.0"
34 |
35 | // ES Configs
36 | val esConnectionInfo = config.getString("es.basePath")
37 |
38 | val timeZone = if(config.hasPath("timezone")) config.getString("timezone") else "IST"
39 | val auditHistoryIndex = "kp_audit_log"
40 | val operationCreate = "CREATE"
41 | val auditHistoryIndexType = "ah"
42 | }
43 |
--------------------------------------------------------------------------------
/asset-enrichment/src/main/scala/org/sunbird/job/assetenricment/domain/Event.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.assetenricment.domain
2 |
3 | import org.sunbird.job.domain.reader.JobRequest
4 |
5 | import java.util
6 |
7 | class Event(eventMap: java.util.Map[String, Any], partition: Int, offset: Long) extends JobRequest(eventMap, partition, offset) {
8 |
9 | val jobName = "AssetEnrichment"
10 |
11 | val data: util.Map[String, Any] = getMap()
12 |
13 | def eData: Map[String, AnyRef] = eventMap.getOrDefault("edata", new java.util.HashMap[String, AnyRef]()).asInstanceOf[Map[String, AnyRef]]
14 |
15 | def objectData: Map[String, AnyRef] = eventMap.getOrDefault("object", new java.util.HashMap[String, AnyRef]()).asInstanceOf[Map[String, AnyRef]]
16 |
17 | def id: String = objectData.getOrElse("id", "").asInstanceOf[String]
18 |
19 | def objectType: String = eData.getOrElse("objectType", "").asInstanceOf[String]
20 |
21 | def mediaType: String = eData.getOrElse("mediaType", "").asInstanceOf[String]
22 |
23 | def status: String = eData.getOrElse("status", "").asInstanceOf[String]
24 |
25 | def validate(maxIterationCount: Int): String = {
26 | val iteration = eData.getOrElse("iteration", 0).asInstanceOf[Int]
27 | if (id.isEmpty) s"Invalid ID present in the Event for the Object Data: ${objectData}."
28 | else if (!objectType.equalsIgnoreCase("asset")) s"Ignoring Event due to ObjectType : ${objectType} for ID : ${id}."
29 | else if (!mediaType.equalsIgnoreCase("image") && !mediaType.equalsIgnoreCase("video")) s"Ignoring Event due to MediaType: ${mediaType} for ID : ${id}."
30 | else if (iteration == 1 && status.equalsIgnoreCase("processing")) ""
31 | else if (iteration > 1 && iteration <= maxIterationCount && status.equalsIgnoreCase("failed")) ""
32 | else s"Ignoring Event due to Iteration Limit Exceed. Iteration Count : ${iteration} for ID : ${id}."
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/video-stream-generator/src/test/resources/test.conf:
--------------------------------------------------------------------------------
1 | include "base-test.conf"
2 |
3 | kafka {
4 | input.topic = "sunbirddev.content.postpublish.request"
5 | groupId = "sunbirddev-video-stream-generator-group"
6 | }
7 |
8 | task {
9 | consumer.parallelism = 1
10 | timer.duration = 10
11 | max.retries = 10
12 | }
13 |
14 | lms-cassandra {
15 | keyspace = "local_platform_db"
16 | table = "job_request"
17 | }
18 |
19 | service {
20 | content {
21 | basePath = "http://dev.sunbirded.org/content"
22 | }
23 | }
24 |
25 | threshold.batch.read.interval = 60 // In sec
26 | threshold.batch.read.size = 1000
27 | threshold.batch.write.size = 4
28 |
29 | azure {
30 | location = "centralindia"
31 | tenant = "tenant name"
32 | subscription_id = "subscription id"
33 |
34 | login {
35 | endpoint="https://login.microsoftonline.com"
36 | }
37 |
38 | api {
39 | endpoint="https://management.azure.com"
40 | version = "2018-07-01"
41 | }
42 |
43 | account_name = "account name"
44 | resource_group_name = "Resource Group Name"
45 |
46 | transform {
47 | default = "media_transform_default"
48 | hls = "media_transform_hls"
49 | }
50 |
51 | stream {
52 | base_url = "https://sunbirdspikemedia-inct.streaming.media.azure.net"
53 | endpoint_name = "default"
54 | protocol = "Hls"
55 | policy_name = "Predefined_ClearStreamingOnly"
56 | }
57 |
58 | token {
59 | client_key = "client key"
60 | client_secret = "client secret"
61 | }
62 | }
63 |
64 | azure_tenant="test_tenant"
65 | azure_subscription_id="test_id"
66 | azure_account_name="test_account_name"
67 | azure_resource_group_name="test_resource_group_name"
68 | azure_token_client_key="test_client_key"
69 | azure_token_client_secret="test_client_secret"
70 | elasticsearch.service.endpoint="test_service_endpoint"
71 | elasticsearch.index.compositesearch.name="test_compositesearch_name"
72 |
73 |
--------------------------------------------------------------------------------
/asset-enrichment/src/main/scala/org/sunbird/job/assetenricment/helpers/OptimizerHelper.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.assetenricment.helpers
2 |
3 | import org.slf4j.LoggerFactory
4 | import org.sunbird.job.assetenricment.models.Asset
5 | import org.sunbird.job.util.CloudStorageUtil
6 |
7 | import java.io.File
8 |
9 | trait OptimizerHelper {
10 |
11 | private[this] val logger = LoggerFactory.getLogger(classOf[OptimizerHelper])
12 |
13 | def replaceArtifactUrl(asset: Asset)(cloudStorageUtil: CloudStorageUtil): Unit = {
14 | logger.info(s"Replacing ArtifactUrl for Asset Id : ${asset.identifier}")
15 | val sourcePath = asset.artifactUrl.substring(asset.artifactUrl.indexOf(asset.artifactBasePath))
16 | val destinationPath = sourcePath.replace(asset.artifactBasePath + File.separator, "")
17 | try {
18 | cloudStorageUtil.copyObjectsByPrefix(sourcePath, destinationPath, false)
19 | logger.info(s"Copying Objects...DONE | Under: ${destinationPath} for identifier : ${asset.identifier}")
20 | val newArtifactUrl = asset.artifactUrl.replace(sourcePath, destinationPath)
21 | asset.put("artifactUrl", newArtifactUrl)
22 | asset.put("downloadUrl", newArtifactUrl)
23 | val storageKey = asset.get("cloudStorageKey", "").asInstanceOf[String]
24 | if (storageKey.nonEmpty) {
25 | val cloudStorageKey = storageKey.replace(asset.artifactBasePath + File.separator, "")
26 | asset.put("cloudStorageKey", cloudStorageKey)
27 | }
28 | val s3Key = asset.get("s3Key", "").asInstanceOf[String]
29 | if (s3Key.nonEmpty) {
30 | val s3KeyNew = s3Key.replace(asset.artifactBasePath + File.separator, "")
31 | asset.put("s3Key", s3KeyNew)
32 | }
33 | } catch {
34 | case e: Exception =>
35 | logger.error(s"Error while copying object by prefix for identifier : ${asset.identifier}", e)
36 | throw e
37 | }
38 | }
39 |
40 | }
41 |
--------------------------------------------------------------------------------
/credential-generator/certificate-processor/src/main/scala/org/sunbird/incredible/UrlManager.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.incredible
2 |
3 | import java.net.{MalformedURLException, URL}
4 |
5 | import org.apache.commons.lang3.StringUtils
6 | import org.slf4j.{Logger, LoggerFactory}
7 |
8 | object UrlManager {
9 | private val logger: Logger = LoggerFactory.getLogger(getClass.getName)
10 |
11 | def getSharableUrl(url: String, containerName: String): String = {
12 | var uri: String = null
13 | uri = removeQueryParams(url)
14 | uri = fetchFileFromUrl(uri)
15 | removeContainerName(uri, containerName)
16 | }
17 |
18 | def removeQueryParams(url: String): String = if (StringUtils.isNotBlank(url)) url.split("\\?")(0) else url
19 |
20 | private def fetchFileFromUrl(url: String) = try {
21 | val urlPath = new URL(url)
22 | urlPath.getFile
23 | } catch {
24 | case e: Exception =>
25 | logger.error("UrlManager:getUriFromUrl:some error occurred in fetch fileName from Url:".concat(url))
26 | StringUtils.EMPTY
27 | }
28 |
29 | private def removeContainerName(url: String, containerName: String) = {
30 | val containerNameStr = "/".concat(containerName)
31 | logger.info("UrlManager:removeContainerName:container string formed:".concat(containerNameStr))
32 | url.replace(containerNameStr, "")
33 | }
34 |
35 | /**
36 | * getting substring from url after domainUrl/slug
37 | * for example for the url domainUrl/slug/tagId/uuid.pdf then return tagId/uuid.pdf
38 | *
39 | * @param url
40 | * @return
41 | * @throws MalformedURLException
42 | */
43 | @throws[MalformedURLException]
44 | def getContainerRelativePath(url: String): String = if (url.startsWith("http")) {
45 | val uri = StringUtils.substringAfter(new URL(url).getPath, "/")
46 | val path = uri.split("/")
47 | StringUtils.join(path, "/", path.length - 2, path.length)
48 | }
49 | else url
50 | }
51 |
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/util/JSONUtil.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.util
2 |
3 | import java.lang.reflect.{ParameterizedType, Type}
4 | import com.fasterxml.jackson.annotation.JsonInclude.Include
5 | import com.fasterxml.jackson.core.JsonGenerator.Feature
6 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
7 | import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature}
8 | import com.fasterxml.jackson.core.`type`.TypeReference
9 | import com.fasterxml.jackson.core.json.JsonReadFeature
10 | import com.fasterxml.jackson.databind.json.JsonMapper
11 |
12 | object JSONUtil {
13 |
14 | @transient val mapper = JsonMapper.builder().enable(JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER).build();
15 | mapper.registerModule(DefaultScalaModule)
16 | mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
17 | mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
18 | mapper.configure(Feature.WRITE_BIGDECIMAL_AS_PLAIN, true)
19 | mapper.setSerializationInclusion(Include.NON_NULL)
20 |
21 | @throws(classOf[Exception])
22 | def serialize(obj: AnyRef): String = {
23 | mapper.writeValueAsString(obj)
24 | }
25 |
26 | def deserialize[T: Manifest](json: String): T = {
27 | mapper.readValue(json, typeReference[T])
28 | }
29 |
30 | def deserialize[T: Manifest](json: Array[Byte]): T = {
31 | mapper.readValue(json, typeReference[T])
32 | }
33 |
34 | private[this] def typeReference[T: Manifest] = new TypeReference[T] {
35 | override def getType = typeFromManifest(manifest[T])
36 | }
37 |
38 |
39 | private[this] def typeFromManifest(m: Manifest[_]): Type = {
40 | if (m.typeArguments.isEmpty) { m.runtimeClass }
41 | else new ParameterizedType {
42 | def getRawType = m.runtimeClass
43 | def getActualTypeArguments = m.typeArguments.map(typeFromManifest).toArray
44 | def getOwnerType = null
45 | }
46 | }
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/publish-pipeline/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | knowledge-platform-jobs
7 | org.sunbird
8 | 1.0
9 |
10 | 4.0.0
11 |
12 | publish-pipeline
13 | pom
14 | publish-pipeline
15 |
16 |
17 | publish-core
18 | questionset-publish
19 | content-publish
20 |
21 |
22 |
23 |
24 |
25 |
26 | org.apache.maven.plugins
27 | maven-compiler-plugin
28 | 3.8.1
29 |
30 | 11
31 |
32 |
33 |
34 | org.scoverage
35 | scoverage-maven-plugin
36 | ${scoverage.plugin.version}
37 |
38 | ${scala.version}
39 | true
40 | true
41 | org.sunbird.incredible
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/video-stream-generator/src/main/scala/org/sunbird/job/videostream/helpers/CaseClasses.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.videostream.helpers
2 |
3 | import org.joda.time.DateTime
4 | import scala.collection.immutable.HashMap
5 |
6 |
7 | case class MediaRequest(id: String, params: Map[String, AnyRef] = new HashMap[String, AnyRef], request: Map[String, AnyRef] = new HashMap[String, AnyRef]);
8 |
9 | case class MediaResponse(id: String, ts: String, params: Map[String, Any] = new HashMap[String, AnyRef], responseCode: String, result: Map[String, AnyRef] = new HashMap[String, AnyRef])
10 |
11 | object ResponseCode extends Enumeration {
12 | type Code = Value
13 | val OK = Value(200)
14 | val CLIENT_ERROR = Value(400)
15 | val SERVER_ERROR = Value(500)
16 | val RESOURCE_NOT_FOUND = Value(404)
17 | }
18 |
19 | trait AlgoOutput extends AnyRef
20 |
21 | case class JobRequest(client_key: String, request_id: String, job_id: Option[String], status: String, request_data: String, iteration: Int, dt_job_submitted: Option[DateTime] = None, location: Option[String] = None, dt_file_created: Option[DateTime] = None, dt_first_event: Option[DateTime] = None, dt_last_event: Option[DateTime] = None, dt_expiration: Option[DateTime] = None, dt_job_processing: Option[DateTime] = None, dt_job_completed: Option[DateTime] = None, input_events: Option[Int] = None, output_events: Option[Int] = None, file_size: Option[Long] = None, latency: Option[Int] = None, execution_time: Option[Long] = None, err_message: Option[String] = None, stage: Option[String] = None, stage_status: Option[String] = None, job_name: Option[String] = None) extends AlgoOutput
22 |
23 | case class JobStage(request_id: String, client_key: String, stage: String, stage_status: String, status: String, err_message: String = "", dt_job_processing: Option[DateTime] = Option(new DateTime()))
24 | case class StreamingStage(request_id: String, client_key: String, job_id: String, stage: String, stage_status: String, status: String, iteration: Int, err_message: String = "")
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/util/FlinkUtil.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.util
2 |
3 | import org.apache.flink.api.common.restartstrategy.RestartStrategies
4 | import org.apache.flink.runtime.state.StateBackend
5 | import org.apache.flink.runtime.state.filesystem.FsStateBackend
6 | import org.apache.flink.streaming.api.TimeCharacteristic
7 | import org.apache.flink.streaming.api.environment.CheckpointConfig
8 | import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
9 | import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
10 | import org.sunbird.job.BaseJobConfig
11 |
12 | object FlinkUtil {
13 |
14 | def getExecutionContext(config: BaseJobConfig): StreamExecutionEnvironment = {
15 | val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
16 | env.getConfig.setUseSnapshotCompression(config.enableCompressedCheckpointing)
17 | env.enableCheckpointing(config.checkpointingInterval)
18 | env.getCheckpointConfig.setCheckpointTimeout(config.checkpointingTimeout)
19 |
20 |
21 | /**
22 | * Use Blob storage as distributed state backend if enabled
23 | */
24 |
25 | config.enableDistributedCheckpointing match {
26 | case Some(true) => {
27 | val stateBackend: StateBackend = new FsStateBackend(s"${config.checkpointingBaseUrl.getOrElse("")}/${config.jobName}", true)
28 | env.setStateBackend(stateBackend)
29 | val checkpointConfig: CheckpointConfig = env.getCheckpointConfig
30 | checkpointConfig.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
31 | checkpointConfig.setMinPauseBetweenCheckpoints(config.checkpointingPauseSeconds)
32 | }
33 | case _ => // Do nothing
34 | }
35 |
36 | env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
37 | env.setRestartStrategy(RestartStrategies.fixedDelayRestart(config.restartAttempts, config.delayBetweenAttempts))
38 | env
39 | }
40 | }
--------------------------------------------------------------------------------
/asset-enrichment/src/main/scala/org/sunbird/job/assetenricment/functions/AssetEnrichmentEventRouter.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.assetenricment.functions
2 |
3 | import org.apache.flink.configuration.Configuration
4 | import org.apache.flink.streaming.api.functions.ProcessFunction
5 | import org.slf4j.LoggerFactory
6 | import org.sunbird.job.assetenricment.domain.Event
7 | import org.sunbird.job.assetenricment.task.AssetEnrichmentConfig
8 | import org.sunbird.job.{BaseProcessFunction, Metrics}
9 |
10 | class AssetEnrichmentEventRouter(config: AssetEnrichmentConfig)
11 | extends BaseProcessFunction[Event, String](config) {
12 |
13 | private[this] val logger = LoggerFactory.getLogger(classOf[AssetEnrichmentEventRouter])
14 |
15 | override def open(parameters: Configuration): Unit = {
16 | super.open(parameters)
17 | }
18 |
19 | override def close(): Unit = {
20 | super.close()
21 | }
22 |
23 | override def processElement(event: Event, context: ProcessFunction[Event, String]#Context, metrics: Metrics): Unit = {
24 | logger.info(s"Processing event for AssetEnrichment for identifier : ${event.id}")
25 | metrics.incCounter(config.totalEventsCount)
26 | val message = event.validate(config.maxIterationCount)
27 | if (message.isEmpty) {
28 | event.mediaType.toLowerCase match {
29 | case "image" =>
30 | context.output(config.imageEnrichmentDataOutTag, event)
31 | case "video" =>
32 | context.output(config.videoEnrichmentDataOutTag, event)
33 | case _ =>
34 | logSkippedEvent(s"Media Type UNKNOWN. Identifier: ${event.id} & mediaType: ${event.mediaType}.")(metrics)
35 | }
36 | } else logSkippedEvent(message)(metrics)
37 | }
38 |
39 | override def metricsList(): List[String] = {
40 | List(config.totalEventsCount, config.skippedEventCount)
41 | }
42 |
43 | def logSkippedEvent(message: String)(metrics: Metrics): Unit = {
44 | logger.info(message)
45 | metrics.incCounter(config.skippedEventCount)
46 | }
47 | }
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/serde/MapSerde.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.serde
2 |
3 | import java.nio.charset.StandardCharsets
4 | import java.util
5 |
6 | import com.google.gson.Gson
7 | import org.apache.flink.api.common.typeinfo.TypeInformation
8 | import org.apache.flink.api.java.typeutils.TypeExtractor
9 | import org.apache.flink.streaming.connectors.kafka.{KafkaDeserializationSchema, KafkaSerializationSchema}
10 | import org.apache.kafka.clients.consumer.ConsumerRecord
11 | import org.apache.kafka.clients.producer.ProducerRecord
12 |
13 | import scala.collection.JavaConverters._
14 |
15 | class MapDeserializationSchema extends KafkaDeserializationSchema[util.Map[String, AnyRef]] {
16 |
17 | override def isEndOfStream(nextElement: util.Map[String, AnyRef]): Boolean = false
18 |
19 | override def deserialize(record: ConsumerRecord[Array[Byte], Array[Byte]]): util.Map[String, AnyRef] = {
20 | val partition = new Integer(record.partition())
21 | val parsedString = new String(record.value(), StandardCharsets.UTF_8)
22 | val recordMap = new Gson().fromJson(parsedString, new util.HashMap[String, AnyRef]().getClass).asScala ++ Map("partition" -> partition.asInstanceOf[AnyRef])
23 | recordMap.asJava
24 | }
25 |
26 | override def getProducedType: TypeInformation[util.Map[String, AnyRef]] = TypeExtractor.getForClass(classOf[util.Map[String, AnyRef]])
27 | }
28 |
29 | class MapSerializationSchema(topic: String, key: Option[String] = None) extends KafkaSerializationSchema[util.Map[String, AnyRef]] {
30 |
31 | override def serialize(element: util.Map[String, AnyRef], timestamp: java.lang.Long): ProducerRecord[Array[Byte], Array[Byte]] = {
32 | val out = new Gson().toJson(element)
33 | key.map { kafkaKey =>
34 | new ProducerRecord[Array[Byte], Array[Byte]](topic, kafkaKey.getBytes(StandardCharsets.UTF_8), out.getBytes(StandardCharsets.UTF_8))
35 | }.getOrElse(new ProducerRecord[Array[Byte], Array[Byte]](topic, out.getBytes(StandardCharsets.UTF_8)))
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/post-publish-processor/src/main/scala/org/sunbird/job/postpublish/functions/ShallowCopyPublishFunction.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.postpublish.functions
2 |
3 | import org.apache.flink.api.common.typeinfo.TypeInformation
4 | import org.apache.flink.configuration.Configuration
5 | import org.apache.flink.streaming.api.functions.ProcessFunction
6 | import org.slf4j.LoggerFactory
7 | import org.sunbird.job.postpublish.task.PostPublishProcessorConfig
8 | import org.sunbird.job.{BaseProcessFunction, Metrics}
9 |
10 | import java.util.UUID
11 |
12 | class ShallowCopyPublishFunction(config: PostPublishProcessorConfig)
13 | (implicit val stringTypeInfo: TypeInformation[String])
14 | extends BaseProcessFunction[PublishMetadata, String](config) {
15 |
16 | private[this] val logger = LoggerFactory.getLogger(classOf[ShallowCopyPublishFunction])
17 |
18 | override def open(parameters: Configuration): Unit = {
19 | super.open(parameters)
20 | }
21 |
22 | override def close(): Unit = {
23 | super.close()
24 | }
25 |
26 | override def processElement(metadata: PublishMetadata, context: ProcessFunction[PublishMetadata, String]#Context, metrics: Metrics): Unit = {
27 | val epochTime = System.currentTimeMillis
28 | val event = s"""{"eid":"BE_JOB_REQUEST","ets":${epochTime},"mid":"LP.${epochTime}.${UUID.randomUUID()}","actor":{"id":"Publish Samza Job","type":"System"},"context":{"pdata":{"ver":"1.0","id":"org.ekstep.platform"},"channel":"sunbird","env":"sunbirddev"},"object":{"ver":"${metadata.pkgVersion}","id":"${metadata.identifier}"},"edata":{"publish_type":"public","metadata":{"mimeType":"${metadata.mimeType}","lastPublishedBy":"System","pkgVersion":${metadata.pkgVersion}},"action":"publish","iteration":1,"contentType":"${metadata.contentType}"}}"""
29 | context.output(config.publishEventOutTag, event)
30 | metrics.incCounter(config.shallowCopyCount)
31 | logger.info("Shallow copy content publish triggered for " + metadata.identifier)
32 | }
33 |
34 | override def metricsList(): List[String] = {
35 | List(config.shallowCopyCount)
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/audit-history-indexer/src/main/scala/org/sunbird/job/audithistory/functions/AuditHistoryIndexer.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.audithistory.functions
2 |
3 | import org.apache.flink.api.common.typeinfo.TypeInformation
4 | import org.apache.flink.configuration.Configuration
5 | import org.apache.flink.streaming.api.functions.KeyedProcessFunction
6 | import org.sunbird.job.audithistory.domain.Event
7 | import org.sunbird.job.audithistory.service.AuditHistoryIndexerService
8 | import org.sunbird.job.audithistory.task.AuditHistoryIndexerConfig
9 | import org.sunbird.job.util.ElasticSearchUtil
10 | import org.sunbird.job.{BaseProcessKeyedFunction, Metrics}
11 |
12 | import java.util
13 |
14 | class AuditHistoryIndexer(config: AuditHistoryIndexerConfig, var esUtil: ElasticSearchUtil)
15 | (implicit mapTypeInfo: TypeInformation[util.Map[String, Any]],
16 | stringTypeInfo: TypeInformation[String])
17 | extends BaseProcessKeyedFunction[String, Event, String](config) with AuditHistoryIndexerService{
18 |
19 | override def metricsList(): List[String] = {
20 | List(config.totalEventsCount, config.successEventCount, config.failedEventCount, config.esFailedEventCount, config.skippedEventCount)
21 | }
22 |
23 | override def open(parameters: Configuration): Unit = {
24 | super.open(parameters)
25 | if (esUtil == null) {
26 | esUtil = new ElasticSearchUtil(config.esConnectionInfo, config.auditHistoryIndex, config.auditHistoryIndexType)
27 | }
28 | }
29 |
30 | override def close(): Unit = {
31 | esUtil.close()
32 | super.close()
33 | }
34 |
35 | override def processElement(event: Event,
36 | context: KeyedProcessFunction[String, Event, String]#Context,
37 | metrics: Metrics): Unit = {
38 | metrics.incCounter(config.totalEventsCount)
39 | if(event.isValid) {
40 | processEvent(event, metrics)(esUtil, config)
41 | } else metrics.incCounter(config.skippedEventCount)
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/relation-cache-updater/src/main/scala/org/sunbird/job/relationcache/task/RelationCacheUpdaterConfig.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.relationcache.task
2 |
3 | import java.util
4 |
5 | import com.typesafe.config.Config
6 | import org.apache.flink.api.common.typeinfo.TypeInformation
7 | import org.apache.flink.api.java.typeutils.TypeExtractor
8 | import org.sunbird.job.BaseJobConfig
9 |
10 | class RelationCacheUpdaterConfig(override val config: Config) extends BaseJobConfig(config, "relation-cache-updater") {
11 |
12 | private val serialVersionUID = 2905979434303791379L
13 |
14 | implicit val mapTypeInfo: TypeInformation[util.Map[String, AnyRef]] = TypeExtractor.getForClass(classOf[util.Map[String, AnyRef]])
15 | implicit val stringTypeInfo: TypeInformation[String] = TypeExtractor.getForClass(classOf[String])
16 |
17 | // Kafka Topics Configuration
18 | val kafkaInputTopic: String = config.getString("kafka.input.topic")
19 | override val kafkaConsumerParallelism: Int = config.getInt("task.consumer.parallelism")
20 |
21 | // Metric List
22 | val totalEventsCount = "total-events-count"
23 | val successEventCount = "success-events-count"
24 | val failedEventCount = "failed-events-count"
25 | val skippedEventCount = "skipped-event-count"
26 | val cacheWrite = "cache-write-count"
27 | val dbReadCount = "db-read-count"
28 |
29 | // Consumers
30 | val relationCacheConsumer = "relation-cache-updater-consumer"
31 |
32 | // Cassandra Configurations
33 | val dbTable: String = config.getString("lms-cassandra.table")
34 | val dbKeyspace: String = config.getString("lms-cassandra.keyspace")
35 | val dbHost: String = config.getString("lms-cassandra.host")
36 | val dbPort: Int = config.getInt("lms-cassandra.port")
37 | val hierarchyPrimaryKey: List[String] = List("identifier")
38 |
39 | // Redis Configurations
40 | val relationCacheStore: Int = config.getInt("redis.database.index")
41 | val dpRedisHost: String = config.getString("dp-redis.host")
42 | val dpRedisPort: Int = config.getInt("dp-redis.port")
43 | val collectionCacheStore: Int = config.getInt("dp-redis.database.index")
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/audit-event-generator/src/main/scala/org/sunbird/job/auditevent/task/AuditEventGeneratorConfig.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.auditevent.task
2 |
3 | import java.util
4 | import com.typesafe.config.Config
5 | import org.apache.flink.api.common.typeinfo.TypeInformation
6 | import org.apache.flink.api.java.typeutils.TypeExtractor
7 | import org.apache.flink.streaming.api.scala.OutputTag
8 | import org.sunbird.job.BaseJobConfig
9 |
10 | class AuditEventGeneratorConfig(override val config: Config) extends BaseJobConfig(config, "audit-event-generator") {
11 |
12 | private val serialVersionUID = 2905979434303791379L
13 |
14 | implicit val mapTypeInfo: TypeInformation[util.Map[String, AnyRef]] = TypeExtractor.getForClass(classOf[util.Map[String, AnyRef]])
15 | implicit val stringTypeInfo: TypeInformation[String] = TypeExtractor.getForClass(classOf[String])
16 |
17 | // Kafka Topics Configuration
18 | val kafkaInputTopic: String = config.getString("kafka.input.topic")
19 | val kafkaOutputTopic: String = config.getString("kafka.output.topic")
20 | override val kafkaConsumerParallelism: Int = config.getInt("task.consumer.parallelism")
21 | override val parallelism: Int = config.getInt("task.parallelism")
22 | val kafkaProducerParallelism: Int = config.getInt("task.producer.parallelism")
23 |
24 | val auditOutputTag: OutputTag[String] = OutputTag[String]("audit-event-tag")
25 |
26 | val defaultChannel: String =config.getString("channel.default")
27 |
28 | // Metric List
29 | val totalEventsCount = "total-events-count"
30 | val successEventCount = "success-events-count"
31 | val failedEventCount = "failed-events-count"
32 | val skippedEventCount = "skipped-events-count"
33 | val emptySchemaEventCount = "empty-schema-events-count"
34 | val emptyPropsEventCount = "empty-props-events-count"
35 |
36 | // Consumers
37 | val auditEventConsumer = "audit-event-generator-consumer"
38 | val auditEventGeneratorFunction = "audit-event-generator-function"
39 | val auditEventProducer = "audit-event-generator-producer"
40 |
41 | val basePath = config.getString("schema.basePath")
42 | val configVersion = "1.0"
43 | }
44 |
--------------------------------------------------------------------------------
/jobs-core/src/main/scala/org/sunbird/job/util/CassandraUtil.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.util
2 |
3 | import com.datastax.driver.core._
4 | import com.datastax.driver.core.exceptions.DriverException
5 | import org.slf4j.LoggerFactory
6 |
7 | import java.util
8 |
9 | class CassandraUtil(host: String, port: Int) {
10 |
11 | private[this] val logger = LoggerFactory.getLogger("CassandraUtil")
12 |
13 | val cluster = {
14 | Cluster.builder()
15 | .addContactPoints(host)
16 | .withPort(port)
17 | .withoutJMXReporting()
18 | .build()
19 | }
20 | var session = cluster.connect()
21 |
22 | def findOne(query: String): Row = {
23 | try {
24 | val rs: ResultSet = session.execute(query)
25 | rs.one
26 | } catch {
27 | case ex: DriverException =>
28 | logger.error(s"findOne - Error while executing query $query :: ", ex)
29 | this.reconnect()
30 | this.findOne(query)
31 | }
32 | }
33 |
34 | def find(query: String): util.List[Row] = {
35 | try {
36 | val rs: ResultSet = session.execute(query)
37 | rs.all
38 | } catch {
39 | case ex: DriverException =>
40 | this.reconnect()
41 | this.find(query)
42 | }
43 | }
44 |
45 | def upsert(query: String): Boolean = {
46 | val rs: ResultSet = session.execute(query)
47 | rs.wasApplied
48 | }
49 |
50 | def getUDTType(keyspace: String, typeName: String): UserType = session.getCluster.getMetadata.getKeyspace(keyspace).getUserType(typeName)
51 |
52 | def reconnect(): Unit = {
53 | this.session.close()
54 | val cluster: Cluster = Cluster.builder.addContactPoint(host).withPort(port).build
55 | this.session = cluster.connect
56 | }
57 |
58 | def close(): Unit = {
59 | this.session.close()
60 | }
61 |
62 | def update(query: Statement): Boolean = {
63 | val rs: ResultSet = session.execute(query)
64 | rs.wasApplied
65 | }
66 |
67 | def executePreparedStatement(query: String, params: Object*): util.List[Row] = {
68 | val rs: ResultSet = session.execute(session.prepare(query).bind(params: _*))
69 | rs.all()
70 | }
71 |
72 | }
73 |
--------------------------------------------------------------------------------
/search-indexer/src/main/scala/org/sunbird/job/searchindexer/functions/TransactionEventRouter.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.searchindexer.functions
2 |
3 | import com.google.gson.reflect.TypeToken
4 | import org.apache.flink.configuration.Configuration
5 | import org.apache.flink.streaming.api.functions.KeyedProcessFunction
6 | import org.slf4j.LoggerFactory
7 | import org.sunbird.job.searchindexer.compositesearch.domain.Event
8 | import org.sunbird.job.searchindexer.task.SearchIndexerConfig
9 | import org.sunbird.job.{BaseProcessKeyedFunction, Metrics}
10 |
11 | import java.lang.reflect.Type
12 |
13 | class TransactionEventRouter(config: SearchIndexerConfig)
14 | extends BaseProcessKeyedFunction[String, Event, String](config) {
15 |
16 | private[this] val logger = LoggerFactory.getLogger(classOf[TransactionEventRouter])
17 | val mapType: Type = new TypeToken[java.util.Map[String, AnyRef]]() {}.getType
18 |
19 | override def open(parameters: Configuration): Unit = {
20 | super.open(parameters)
21 | }
22 |
23 | override def close(): Unit = {
24 | super.close()
25 | }
26 |
27 | override def processElement(event: Event, context: KeyedProcessFunction[String, Event, String]#Context, metrics: Metrics): Unit = {
28 | metrics.incCounter(config.totalEventsCount)
29 | if (event.validEvent(config.restrictObjectTypes)) {
30 | event.nodeType match {
31 | case "SET" | "DATA_NODE" => context.output(config.compositeSearchDataOutTag, event)
32 | case "EXTERNAL" => context.output(config.dialCodeExternalOutTag, event)
33 | case "DIALCODE_METRICS" => context.output(config.dialCodeMetricOutTag, event)
34 | case _ => {
35 | logger.info(s"UNKNOWN EVENT NODETYPE : ${event.nodeType} for Identifier : ${event.id}.")
36 | metrics.incCounter(config.skippedEventCount)
37 | }
38 | }
39 | } else {
40 | metrics.incCounter(config.skippedEventCount)
41 | logger.info(s"Event not qualified for indexing for Identifier : ${event.id}.")
42 | }
43 | }
44 |
45 | override def metricsList(): List[String] = {
46 | List(config.totalEventsCount, config.skippedEventCount)
47 | }
48 |
49 | }
--------------------------------------------------------------------------------
/asset-enrichment/src/test/scala/org/sunbird/job/spec/ThumbnailSpec.scala:
--------------------------------------------------------------------------------
1 | package org.sunbird.job.spec
2 |
3 | import com.typesafe.config.{Config, ConfigFactory}
4 | import org.sunbird.job.assetenricment.task.AssetEnrichmentConfig
5 | import org.sunbird.job.assetenricment.util.ThumbnailUtil
6 | import org.sunbird.job.util.FileUtils
7 | import org.sunbird.spec.BaseTestSpec
8 |
9 | import java.io.File
10 |
11 | class ThubnailUtilSpec extends BaseTestSpec {
12 |
13 | val config: Config = ConfigFactory.load("test.conf").withFallback(ConfigFactory.systemEnvironment())
14 | val jobConfig = new AssetEnrichmentConfig(config)
15 |
16 | "ThumbnailUtil.generateOutFile" should " return null for no file" in {
17 | val file = new ThumbnailUtilTest().generateOutFile(null, 150)
18 | file should be(None)
19 | }
20 |
21 | "ThumbnailUtil.generateOutFile" should " return None for the file" in {
22 | val imageUrl = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/content/do_113233717480390656195/artifact/bitcoin-4_1545114579639.jpg"
23 | try {
24 | val file = FileUtils.copyURLToFile("do_113233717480390656195", imageUrl, imageUrl.substring(imageUrl.lastIndexOf("/") + 1))
25 | val newFile = new ThumbnailUtilTest().generateOutFile(file.get, 15000)
26 | newFile should be(None)
27 | } finally {
28 | FileUtils.deleteDirectory(new File(s"/tmp/do_113233717480390656195"))
29 | }
30 | }
31 |
32 | "ThumbnailUtil.generateOutFile" should " return None for file" in {
33 | val contentId = "do_1127129845261680641588"
34 | val originalURL = "https://sunbirddev.blob.core.windows.net/sunbird-content-dev/content/kp_ft_1563562323128/artifact/sample_1563562323191.mp4"
35 | try {
36 | val originalFile = FileUtils.copyURLToFile(contentId, originalURL, originalURL.substring(originalURL.lastIndexOf("/") + 1, originalURL.length))
37 | val result = new ThumbnailUtilTest().generateOutFile(originalFile.get, 150)
38 | result should be(None)
39 | } finally {
40 | FileUtils.deleteDirectory(new File(s"/tmp/$contentId"))
41 | }
42 | }
43 | }
44 |
45 |
46 | class ThumbnailUtilTest extends ThumbnailUtil {
47 |
48 | }
--------------------------------------------------------------------------------