├── README.md
└── sparkutils
├── .DS_Store
├── .cache-main
├── .classpath
├── .project
├── .settings
├── org.eclipse.core.resources.prefs
├── org.eclipse.jdt.core.prefs
├── org.eclipse.m2e.core.prefs
└── org.scala-ide.sdt.core.prefs
├── pom.xml
├── src
├── main
│ └── scala
│ │ └── com
│ │ └── saurav
│ │ └── Utilities
│ │ ├── OrderedProperties.java
│ │ └── Utilities.scala
└── test
│ ├── resources
│ ├── incr1.csv
│ ├── regexReplaceTestData.csv
│ ├── res.csv
│ ├── res1.csv
│ ├── src.csv
│ ├── tgt.csv
│ └── tgt_blank.csv
│ └── scala
│ └── com
│ └── saurav
│ └── Utilities
│ └── UtilitiesTest.scala
└── target
├── .DS_Store
├── classes.2061037500.timestamp
├── classes
└── com
│ └── saurav
│ └── Utilities
│ ├── Utilities$$anonfun$1.class
│ ├── Utilities$$anonfun$2.class
│ ├── Utilities$$anonfun$3.class
│ ├── Utilities$$anonfun$4.class
│ ├── Utilities$$anonfun$5.class
│ ├── Utilities$$anonfun$6.class
│ ├── Utilities$$anonfun$7.class
│ ├── Utilities$$anonfun$convertDateTime$1.class
│ ├── Utilities$$anonfun$extractTimeComponent$1.class
│ ├── Utilities$$anonfun$generateSequenceId$1.class
│ ├── Utilities$$anonfun$generateSequenceId$2.class
│ ├── Utilities$$anonfun$getSortedFileObjects$1.class
│ ├── Utilities$$anonfun$getSortedFileObjects$2.class
│ ├── Utilities$$anonfun$regexReplacePlatformService$1.class
│ ├── Utilities$$anonfun$sequenceIDGenerator$1.class
│ ├── Utilities$$anonfun$sequenceIDGenerator$2.class
│ ├── Utilities$$anonfun$sortedFiles$1.class
│ ├── Utilities$$anonfun$sortedFiles$2.class
│ ├── Utilities$$anonfun$sortedFiles$3.class
│ ├── Utilities$$anonfun$trimDF$1.class
│ ├── Utilities$$anonfun$validateColumnLength$1.class
│ ├── Utilities$$anonfun$validateNotNull$1.class
│ ├── Utilities$$typecreator1$1.class
│ ├── Utilities$$typecreator2$1.class
│ ├── Utilities$$typecreator3$1.class
│ ├── Utilities$$typecreator4$1.class
│ ├── Utilities$.class
│ ├── Utilities$DataList$.class
│ ├── Utilities$DataList.class
│ └── Utilities.class
├── maven-archiver
└── pom.properties
├── surefire-reports
├── TEST-com.saurav.Utilities.UtilitiesTest.xml
├── TEST-org.scalatest.tools.DiscoverySuite-ca7fc693-2775-4b5e-a5ed-ee204c47d473.xml
├── TestSuite.txt
└── com.saurav.Utilities.UtilitiesTest.txt
├── surefire
├── surefire4033596394733709221tmp
└── surefire4217104294633344082tmp
├── test-classes.-971118635.timestamp
└── test-classes
├── com
└── saurav
│ └── Utilities
│ ├── UtilitiesTest$$anonfun$1.class
│ ├── UtilitiesTest$$anonfun$2.class
│ └── UtilitiesTest.class
├── incr1.csv
├── regexReplaceTestData.csv
├── res.csv
├── res1.csv
├── src.csv
├── tgt.csv
└── tgt_blank.csv
/README.md:
--------------------------------------------------------------------------------
1 | # SparkUtils
2 | This Scala Maven project contains various utility methods which can be reused across Spark Processing code.
3 |
4 | The set of utility includes logging, trimming, regex replace, time related functions, file/ directory handling functions, sequence generator and data quality checks like validate not null and validate column length. You can do SCD Type 2 CDC loads as well using the CDC utility.
5 |
6 | Happy Coding!
7 |
--------------------------------------------------------------------------------
/sparkutils/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/.DS_Store
--------------------------------------------------------------------------------
/sparkutils/.cache-main:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/.cache-main
--------------------------------------------------------------------------------
/sparkutils/.classpath:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/sparkutils/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | sparkutils
4 |
5 |
6 |
7 |
8 |
9 | org.scala-ide.sdt.core.scalabuilder
10 |
11 |
12 |
13 |
14 | org.eclipse.m2e.core.maven2Builder
15 |
16 |
17 |
18 |
19 |
20 | org.scala-ide.sdt.core.scalanature
21 | org.eclipse.jdt.core.javanature
22 | org.eclipse.m2e.core.maven2Nature
23 |
24 |
25 |
--------------------------------------------------------------------------------
/sparkutils/.settings/org.eclipse.core.resources.prefs:
--------------------------------------------------------------------------------
1 | eclipse.preferences.version=1
2 | encoding//src/main/resources=UTF-8
3 | encoding//src/main/scala=UTF-8
4 | encoding//src/test/resources=UTF-8
5 | encoding/=UTF-8
6 |
--------------------------------------------------------------------------------
/sparkutils/.settings/org.eclipse.jdt.core.prefs:
--------------------------------------------------------------------------------
1 | eclipse.preferences.version=1
2 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
3 | org.eclipse.jdt.core.compiler.compliance=1.7
4 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
5 | org.eclipse.jdt.core.compiler.source=1.7
6 |
--------------------------------------------------------------------------------
/sparkutils/.settings/org.eclipse.m2e.core.prefs:
--------------------------------------------------------------------------------
1 | activeProfiles=
2 | eclipse.preferences.version=1
3 | resolveWorkspaceProjects=true
4 | version=1
5 |
--------------------------------------------------------------------------------
/sparkutils/.settings/org.scala-ide.sdt.core.prefs:
--------------------------------------------------------------------------------
1 | eclipse.preferences.version=1
2 | scala.compiler.additionalParams=\ -Xsource\:2.11 -Ymacro-expand\:none
3 | scala.compiler.installation=2.11
4 | scala.compiler.sourceLevel=2.11
5 | scala.compiler.useProjectSettings=true
6 |
--------------------------------------------------------------------------------
/sparkutils/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 | com.saurav
5 | sparkutils
6 | 0.0.1-SNAPSHOT
7 | com.saurav.sparkutils
8 | sparkutils
9 |
10 | 1.8
11 | 1.8
12 | UTF-8
13 | 2.11.11
14 | 2.11
15 | 2.3.0
16 |
17 |
18 |
19 |
20 | org.scala-lang
21 | scala-library
22 | ${scala.version}
23 | provided
24 |
25 |
26 | org.scalactic
27 | scalactic_2.11
28 | 3.0.1
29 |
30 |
31 | org.scalatest
32 | scalatest_2.11
33 | 3.0.1
34 | test
35 |
36 |
37 | org.apache.spark
38 | spark-core_${spark.scala.version}
39 | provided
40 | ${spark.version}
41 |
42 |
43 | com.microsoft.azure
44 | azure-sqldb-spark
45 | 1.0.2
46 |
47 |
48 | org.apache.spark
49 | spark-sql_${spark.scala.version}
50 | ${spark.version}
51 | provided
52 |
53 |
54 | org.apache.spark
55 | spark-hive_${spark.scala.version}
56 | ${spark.version}
57 | provided
58 |
59 |
60 | com.databricks
61 | spark-avro_${spark.scala.version}
62 | 3.2.0
63 |
64 |
65 | org.apache.spark
66 | spark-avro_${spark.scala.version}
67 | 2.4.0
68 |
69 |
70 | org.apache.spark
71 | spark-streaming_${spark.scala.version}
72 | ${spark.version}
73 | provided
74 |
75 |
76 | com.typesafe
77 | config
78 | 1.2.1
79 |
80 |
81 | org.joda
82 | joda-convert
83 | 1.8.1
84 |
85 |
86 | com.databricks
87 | spark-xml_${spark.scala.version}
88 | 0.4.1
89 |
90 |
91 | org.scalaj
92 | scalaj-http_2.11
93 | 2.2.1
94 |
95 |
96 |
103 |
104 |
109 |
110 |
111 |
112 | com.microsoft.windowsazure
113 | microsoft-windowsazure-api
114 | 0.4.6
115 |
116 |
117 |
119 |
126 |
127 | org.mockito
128 | mockito-core
129 | 2.0.73-beta
130 |
131 |
132 | com.microsoft.sqlserver
133 | mssql-jdbc
134 | 6.4.0.jre8
135 |
136 |
137 |
138 | com.github.scopt
139 | scopt_2.11
140 | 3.3.0
141 |
142 |
143 | com.microsoft.azure
144 | azure-eventhubs-spark_2.11
145 | 2.2.4
146 |
147 |
148 | org.apache.kafka
149 | kafka_2.11
150 | 0.10.2.1
151 |
152 |
153 | org.apache.spark
154 | spark-sql-kafka-0-10_2.11
155 | 2.2.0
156 |
157 |
158 | org.apache.spark
159 | spark-streaming-kafka-0-10_2.11
160 | 2.2.0
161 |
162 |
163 | org.reflections
164 | reflections
165 | 0.9.11
166 |
167 |
168 |
169 | io.spray
170 | spray-json_2.11
171 | 1.3.3
172 |
173 |
174 |
175 | org.mockito
176 | mockito-core
177 | 2.24.0
178 | test
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 | net.alchim31.maven
187 | scala-maven-plugin
188 | 3.1.3
189 |
190 |
191 |
192 | compile
193 | testCompile
194 |
195 |
196 |
197 |
198 | -dependencyfile
199 | ${project.build.directory}/.scala_dependencies
200 |
201 |
202 |
203 |
204 |
205 |
206 | org.apache.maven.plugins
207 | maven-surefire-plugin
208 | 2.7
209 |
210 | false
211 |
212 |
213 |
214 |
215 |
216 | org.scalatest
217 | scalatest-maven-plugin
218 | 1.0
219 |
220 | ${project.build.directory}/surefire-reports
221 | .
222 | WDF TestSuite.txt
223 |
224 |
225 |
226 | test
227 |
228 | test
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 | maven-assembly-plugin
237 | 2.4.1
238 |
239 |
240 | jar-with-dependencies
241 |
242 |
243 |
244 |
245 | make-assembly
246 | package
247 |
248 | single
249 |
250 |
251 |
252 |
253 |
254 |
255 |
--------------------------------------------------------------------------------
/sparkutils/src/main/scala/com/saurav/Utilities/OrderedProperties.java:
--------------------------------------------------------------------------------
1 | package com.saurav.Utilities;
2 |
3 | import java.util.Enumeration;
4 | import java.util.Properties;
5 | import java.util.Vector;
6 |
7 | @SuppressWarnings("serial")
8 | public class OrderedProperties extends Properties {
9 |
10 | @SuppressWarnings("rawtypes")
11 | public OrderedProperties() {
12 | super();
13 | _names = new Vector();
14 | }
15 |
16 | @SuppressWarnings({ "unchecked", "rawtypes" })
17 | public Enumeration propertyNames() {
18 | return _names.elements();
19 | }
20 |
21 | @SuppressWarnings("unchecked")
22 | public Object put(Object key, Object value) {
23 | if (_names.contains(key)) {
24 | _names.remove(key);
25 | }
26 |
27 | _names.add(key);
28 |
29 | return super.put(key, value);
30 | }
31 |
32 | public Object remove(Object key) {
33 | _names.remove(key);
34 |
35 | return super.remove(key);
36 | }
37 |
38 | @SuppressWarnings("rawtypes")
39 | private Vector _names;
40 |
41 | }
--------------------------------------------------------------------------------
/sparkutils/src/main/scala/com/saurav/Utilities/Utilities.scala:
--------------------------------------------------------------------------------
1 | package com.saurav.Utilities
2 |
3 | import java.net.URI
4 | import scala.reflect.api.materializeTypeTag
5 | import scala.util.Failure
6 | import scala.util.Success
7 | import scala.util.Try
8 | import org.apache.hadoop.conf.Configuration
9 | import org.apache.hadoop.fs.FileSystem
10 | import org.apache.hadoop.fs.Path
11 | import org.apache.log4j.Logger
12 | import org.apache.spark.SparkEnv
13 | import org.apache.spark.sql.DataFrame
14 | import org.apache.spark.sql.SparkSession
15 | import org.apache.spark.sql.functions.col
16 | import org.apache.spark.sql.functions.length
17 | import org.apache.spark.sql.functions.lit
18 | import org.apache.spark.sql.functions.regexp_replace
19 | import org.apache.spark.sql.functions.trim
20 | import org.apache.spark.sql.functions.udf
21 | import org.joda.time.DateTime
22 | import org.joda.time.format.DateTimeFormat
23 | import org.joda.time.format.DateTimeFormatter
24 | import org.apache.spark.sql.functions.broadcast
25 |
26 | object Utilities {
27 |
28 | /**
29 | *
30 | * Logger will be used for application specific logs
31 | *
32 | */
33 | val logger = Logger.getLogger(getClass.getName.replaceAll("\\$$", ""))
34 | def caller = Thread.currentThread.getStackTrace()(3).getClassName.replaceAll("\\$$", "")
35 |
36 | //Trim Service
37 |
38 | /**
39 | * Trimming Service will trim data based on Metadata Table
40 | * @param inputDF : Input Dataframe
41 | * @param schemaList : SChema based on metadata table for trimming
42 | * @return Trimmed Dataframe
43 | */
44 | def trimDF(inputDF: DataFrame, schemaList: Array[String]): DataFrame = {
45 | var trimmedDf: DataFrame = inputDF
46 | try {
47 | for (columnDetails <- schemaList) {
48 | var columnName = inputDF { columnDetails.split('|')(0) }
49 | trimmedDf = trimmedDf.withColumn(columnName.toString(), lit(trim(col(columnName.toString()))))
50 | }
51 | } catch {
52 | case e: Exception =>
53 | logger.error("Unable to execute SQL please check yarn logs with applicationId " + SparkEnv.get.conf.getAppId)
54 | println(e.printStackTrace())
55 | throw e
56 |
57 | }
58 | return trimmedDf
59 |
60 | }
61 |
62 | /**
63 | * Service will replace regexs from the columns based on the regex mentioned in metadata table
64 | * @param inputDF : Input Dataframe
65 | * @param columnRegexList : Columns and Regex which need to be replaced
66 | * @return : Dataframe
67 | */
68 | def regexReplacePlatformService(inputDF: DataFrame, columnRegexList: Array[String]): DataFrame = {
69 | var regexReplaceDF: DataFrame = inputDF
70 | try {
71 | for (columnRegexDetails <- columnRegexList) {
72 |
73 | var columnName = inputDF { columnRegexDetails.split('|')(0) }
74 | var colRegex = columnRegexDetails.split('|')(1)
75 | regexReplaceDF = regexReplaceDF.withColumn(columnName.toString(), lit(regexp_replace(columnName, colRegex, "")))
76 |
77 | }
78 | } catch {
79 | case e: Exception =>
80 | logger.error("please check yarn logs with applicationId " + SparkEnv.get.conf.getAppId)
81 | println(e.printStackTrace())
82 | throw e
83 |
84 | }
85 | return regexReplaceDF
86 |
87 | }
88 |
89 | /**
90 | * Extract time component and fetch modification time of file
91 | * @param inputPath : Input Path
92 | * @param hadoopConfiguration : Hadoop Configuration
93 | * @return : Modification time as String
94 | */
95 | def extractTimeComponent(inputPath: String, hadoopConfiguration: Configuration): String = {
96 | val map = scala.collection.mutable.Map[String, Long]()
97 | val fs = FileSystem.get(new URI(inputPath), hadoopConfiguration)
98 | val status = fs.listStatus(new Path(inputPath))
99 | status.foreach(x => if (x.isDirectory()) (map(x.getPath.toString()) = x.getModificationTime))
100 | val lstMap = map.toList.sortWith((x, y) => x._2 > y._2)
101 | val hd = lstMap.head
102 | val key = hd._1
103 | val splitStr = key.split("/").last
104 | splitStr.split("=").last
105 | }
106 |
107 | /**
108 | * getCurrentTimestamp function returns the current timestamp in ISO format
109 | * @return Time stamp in string format
110 | */
111 | def getCurrentTimestamp(): String = {
112 | val ISOFormatGeneration: DateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
113 | val now: DateTime = new org.joda.time.DateTime()
114 | ISOFormatGeneration.print(now)
115 | }
116 |
117 | /**
118 | * Utility method will use to read properties file
119 | * @param inputPath - Input path of the Properties file
120 | * @param hdfsUri - HDFS Uri
121 | * @return
122 | */
123 |
124 | def readProperties(inputPath: String, hdfsUri: Option[String] = None): OrderedProperties = {
125 | val logger = Logger.getLogger(getClass.getName)
126 | logger.info("READING PROPERTIES FILE @@@@@@@@@@@@@@@@@@@@@@")
127 | val hdpConf = new Configuration
128 | hdfsUri match {
129 | case Some(hdfsUri) =>
130 | hdpConf.set("fs.defaultFS", hdfsUri)
131 | case _ =>
132 | }
133 | val fs = FileSystem.get(hdpConf)
134 | Try {
135 | logger.info("LOADING PROPERTIES FILE @@@@@@@@@@@@@@@@@@@@@@" + inputPath)
136 | val propFileInputStream = fs.open(new Path(inputPath))
137 | logger.info("READ PROPERTIES FILE @@@@@@@@@@@@@@@@@@@@@@")
138 | var properties = new OrderedProperties()
139 | logger.info("CREATED PROPERTIES OBJECT @@@@@@@@@@@@@@@@@@@@@@")
140 | properties.load(propFileInputStream)
141 | logger.info("LOADED PROPERTIES FILE @@@@@@@@@@@@@@@@@@@@@@")
142 | properties
143 | } match {
144 | case Failure(fail) => { throw new Exception(fail.getCause) }
145 | case Success(succ) => succ
146 | }
147 | }
148 |
149 | /**
150 | * convertDateTime function takes all the dateFields which are needed to be converted to ISO format
151 | * @param validDF : Valid records which passed the validateColumnLength Check
152 | * @param dateFields : List of Hive table columns read on which validation is to be performed
153 | * @return List[DataFrame] : List of dataframe of valid and invalid records.
154 | */
155 | def convertDateTime(validDF: DataFrame, dateFields: Array[String]): DataFrame = {
156 | var validDFDate: DataFrame = validDF;
157 | try {
158 | println("Inside convertDateTime")
159 | logger.info(" LOGGER se : Inside convertDateTime")
160 | val mydate_udfdd = udf(convertDateTimeUDF _)
161 | for (columns <- dateFields) {
162 | var dateField = columns.split('|')(0)
163 | var dateFormat = columns.split('|')(1)
164 | validDFDate = validDFDate.withColumn(dateField, mydate_udfdd(validDF { dateField }, lit(dateFormat)))
165 | }
166 |
167 | } catch {
168 | case e: Exception =>
169 | logger.error("Unable to CovertDateTime please check yarn logs for exact errror with applicationId " + SparkEnv.get.conf.getAppId)
170 | e.printStackTrace()
171 | throw e
172 | }
173 |
174 | return validDFDate
175 | }
176 |
177 | /**
178 | * Utility method will conevert input date of string type to given format
179 | * @param inputDate : Input date which needs to be converted
180 | * @param inputDateFormat: Required Input date format
181 | * @return
182 | */
183 | def convertDateTimeUDF(inputDate: String, inputDateFormat: String): String = {
184 | try {
185 | val isoFormatGeneration: DateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
186 | val dateFormatGeneration: DateTimeFormatter = DateTimeFormat.forPattern(inputDateFormat)
187 | val jodatime: DateTime = dateFormatGeneration.parseDateTime(inputDate);
188 | val dateString: String = isoFormatGeneration.print(jodatime);
189 | return dateString
190 | } catch {
191 | case e: Exception =>
192 | logger.error("Unable to CovertDateTime please check yarn logs for exact errror with applicationId " + SparkEnv.get.conf.getAppId)
193 | e.printStackTrace()
194 | throw e
195 | return ""
196 | }
197 |
198 | }
199 |
200 | //Case class to hold file path and its last modified timestamp
201 | case class DataList(filePath: String, mod_ts: Long)
202 |
203 | /**
204 | * getSortedFileObjects method lists the file name in a HDFS folder
205 | * @param sparkContext : Spark Context
206 | * @param path : Input path of the folder
207 | * @return : List of files
208 | */
209 |
210 | def getSortedFileObjects(spark: SparkSession, path: String, fs: FileSystem): List[DataList] = {
211 | fs.listStatus(new Path(path)).map { x => DataList(x.getPath.toString(), x.getModificationTime.toLong) }.toList.sortBy { files => files.mod_ts }
212 | }
213 |
214 | /**
215 | * sortedFiles method sorts the file based on modified timestamp of file.
216 | * @param sparkContext : Spark Context
217 | * @param path : Input path of the folder
218 | * @return : List of files
219 | */
220 |
221 | def sortedFiles(spark: SparkSession, path: String, last_modified_ts: Long, hdfsUri: String): List[String] = {
222 | val hdpConf = new Configuration
223 | if (!(hdfsUri.trim().isEmpty() || hdfsUri == null)) { hdpConf.set("fs.defaultFS", hdfsUri) }
224 | val fs = FileSystem.get(hdpConf)
225 | var finalList: List[DataList] = List[DataList]()
226 | var returnList: List[DataList] = List[DataList]()
227 | val listOfDirectories = fs.listStatus(new Path(path)).filter(x => x.isDirectory() == true).map { x => x.getPath.toString() }.toList
228 | if (listOfDirectories.length > 0) {
229 | listOfDirectories.foreach { directories =>
230 | var newList = getSortedFileObjects(spark, directories, fs)
231 | finalList = finalList ::: newList
232 | }
233 | returnList = finalList.sortBy(files => files.mod_ts).filter(_.mod_ts > last_modified_ts)
234 | } else {
235 | returnList = getSortedFileObjects(spark, path, fs)
236 | }
237 | val r = returnList.map { x => x.filePath }: List[String]
238 | return r
239 | }
240 |
241 | /**
242 | * @param spark : Spark session
243 | * @param incremental_file_list : List of files
244 | * @param hdfsUri : HDFS Uri
245 | * @return : Latest File Name
246 | */
247 | def getlatestfilets(spark: SparkSession, incremental_file_list: List[String], hdfsUri: String): Long = {
248 | val latesfile = incremental_file_list.last
249 | val hdpConf = new Configuration
250 | if (!(hdfsUri.trim().isEmpty() || hdfsUri == null)) { hdpConf.set("fs.defaultFS", hdfsUri) }
251 | val fs = FileSystem.get(hdpConf)
252 | val dirPath = new Path(latesfile)
253 | val filestatus = fs.listStatus(dirPath)
254 | val lst = filestatus.map(i => i.getModificationTime())
255 | var lastmodtime = lst(0)
256 | if (lastmodtime.equals(null)) lastmodtime = 0
257 | lastmodtime
258 | }
259 |
260 | def sequenceIDGenerator(str: String): Long = {
261 |
262 | import java.math.BigInteger
263 | var number = ""
264 | str.map(_.asDigit).foreach { i =>
265 | number = number + i
266 | }
267 | return (new BigInteger(number).longValue())
268 | }
269 |
270 | /**
271 | * generateSequenceId method lists the file name in a HDFS folder
272 | * @param df : DataFrame
273 | * @param list : List[String]
274 | * @return : DataFrame
275 | */
276 | def generateSequenceId(df: DataFrame, list: List[String], spark: SparkSession): DataFrame = {
277 | var natural_key_columns = ""
278 | list.foreach { x =>
279 | natural_key_columns = natural_key_columns + "," + x
280 | }
281 | natural_key_columns = natural_key_columns.drop(1)
282 | spark.udf.register("udf_sequenceIDGenerator", sequenceIDGenerator _)
283 | df.registerTempTable("raw_table")
284 | var sequenceIDGenerator_sql = f"""select udf_sequenceIDGenerator($natural_key_columns) as sequence_ID,* from raw_table"""
285 | println("Date Check Query:" + sequenceIDGenerator_sql)
286 | val seqId_df = spark.sql(sequenceIDGenerator_sql)
287 | spark.sql("drop table if exists raw_table")
288 | return seqId_df
289 | }
290 |
291 | /**
292 | * validateNotNull function checks whether the primary key column has
293 | * Null values and if any, loads them to the error table
294 | *
295 | * @param SQLContext
296 | * @param DataFrame which contains the input table data
297 | * @param List which contains the primary key columns read from the yaml file
298 | * @return DataFrame of valid and invalid records
299 | */
300 |
301 | def validateNotNull(spark: SparkSession, df: DataFrame, primary_key_col_list: List[String]): List[DataFrame] = {
302 | var primary_correct_col = ""
303 | var primary_incorrect_col = ""
304 |
305 | for (z <- primary_key_col_list) {
306 | primary_correct_col = primary_correct_col + "and length(trim(" + z + "))>0 and trim(" + z + ")<>'(null)' and trim(" + z + ") not like '%?'"
307 | primary_incorrect_col = primary_incorrect_col + "OR " + z + " is null OR length(trim(" + z + "))=0 OR trim(" + z + ")='(null)' OR trim(" + z + ") like '%?'"
308 | }
309 | df.show
310 | df.registerTempTable("null_data")
311 | val valid_select_query = "select * from null_data where " + (primary_correct_col.drop(3))
312 | val invalid_select_query = "select * from null_data where " + (primary_incorrect_col.drop(2))
313 |
314 | val validDF = spark.sql(valid_select_query)
315 | val invalidDF = spark.sql(invalid_select_query)
316 | List(validDF, invalidDF)
317 | }
318 |
319 | /**
320 | * validateColumnLength function checks length of a column.
321 | * For valid record it will store data in valid record
322 | * and for invalid records it will create two additional columns "err_desc " and "err_col_name" to store information about error and union invalid records for other columns error.
323 | *
324 | * @param inputDF : Input Dataframe from HIVE table
325 | * @param validDF : Valid records which passed the validateNotNull Check
326 | * @param schemaList : List of Hive table columns read from YAML file on which validation is to be performed
327 | * @return List[DataFrame] : List of dataframe of valid and invalid records.
328 | */
329 | def validateColumnLength(inputDF: DataFrame, validDF: DataFrame, schemaList: Array[String]): List[DataFrame] = {
330 | var invalidRecords: DataFrame = (inputDF.withColumn("err_desc", lit(""))).withColumn("err_col_name", lit("")) limit (0)
331 | var validRecords: DataFrame = validDF
332 | var invalidRecordColumnWise: DataFrame = null
333 | try {
334 | for (columnDetails <- schemaList) {
335 | var columnName = inputDF { columnDetails.split('|')(0) }
336 | if ((columnDetails.split('|')(1)).contains(",")) {
337 | validRecords = validRecords.filter(length(columnName) <= (columnDetails.split('|')(1).split(',')(0).toInt + 1))
338 | invalidRecordColumnWise = (inputDF.filter(length(columnName) > (columnDetails.split('|')(1).split(',')(0).toInt + 1)).withColumn("err_desc", lit("Length check failed"))).withColumn("err_col_name", lit(columnName.toString()))
339 | } else {
340 | validRecords = validRecords.filter(length(columnName) <= (columnDetails.split('|')(1)))
341 | invalidRecordColumnWise = (inputDF.filter(length(columnName) > (columnDetails.split('|')(1))).withColumn("err_desc", lit("Length check failed"))).withColumn("err_col_name", lit(columnName.toString()))
342 | }
343 | invalidRecords = invalidRecords.unionAll(invalidRecordColumnWise)
344 | invalidRecordColumnWise = null
345 | }
346 | } catch {
347 | case e: Exception =>
348 | logger.error("Unable to execute SQL please check yarn logs with applicationId " + SparkEnv.get.conf.getAppId)
349 | println(e.printStackTrace())
350 | throw e
351 | }
352 | return List(validRecords, invalidRecords)
353 | }
354 |
355 | /**
356 | * This method is responsible for performing the CDC Type 2 on Incremental data without using MD5
357 | * @param incremental_df : DataFrame of delta(incremental) data to be processed
358 | * @param target_df : Historical data on which CDC needs to be processed
359 | * @param primary_key_col_list : Primay Keys column list
360 | * @param cdc_schema : Schema to be considered for performing the CDC
361 | * @param partition_col : Column to used for identifying the partition of the target(historical) data on which CDC needs to be performed
362 | * @param start_date : Optional parameter of Start Date
363 | * @param end_date : Optional parameter of End Date
364 | * @return : DataFrame of processed data post CDC operation OR NULL if any exception
365 | */
366 | def IncrementalType2NonMD5(
367 | spark: SparkSession,
368 | incremental_df: DataFrame,
369 | target_df: DataFrame,
370 | primary_key_col_list: Array[String],
371 | partition_col: String,
372 | start_date: String,
373 | end_date: String): DataFrame = {
374 | try {
375 | logger.info("Starting CDC")
376 | target_df.createOrReplaceTempView("TGT_DF")
377 | incremental_df.createOrReplaceTempView("INC_DF")
378 | val history_df = if (partition_col != "") spark.sql("select * from TGT_DF where " + partition_col + " in (select distinct " + partition_col + " from INC_DF)") else target_df
379 | logger.info("Starting CDC")
380 | val dataSchema = history_df.columns
381 | val dfInnerJoin = history_df.filter(col("dl_load_flag")
382 | .eqNullSafe("Y")).as("L1").join(broadcast(incremental_df), primary_key_col_list)
383 | .select("L1.*").select(dataSchema.head, dataSchema.tail: _*)
384 | history_df.printSchema
385 | dfInnerJoin.printSchema
386 | val unchangedData = history_df.except(dfInnerJoin)
387 | if ((null != start_date) && (null != end_date)) {
388 | logger.info("Using custom start_date and end_date")
389 | val changedData = dfInnerJoin.drop("dl_load_flag", "end_date").withColumn("dl_load_flag", lit("N")).withColumn("end_date", lit(end_date)).select(dataSchema.head, dataSchema.tail: _*)
390 | val finalData = unchangedData.union(incremental_df.withColumn("dl_load_flag", lit("Y")).withColumn("dl_load_flag", lit("Y")).withColumn("start_date", lit(start_date)).withColumn("end_date", lit(end_date)).select(dataSchema.head, dataSchema.tail: _*))
391 | .union(changedData)
392 | logger.info("Completed CDC for Incremental Feed Type 2 Non-MD5 !!!")
393 | finalData
394 | } else {
395 | println("In else")
396 | val changedData = dfInnerJoin.drop("dl_load_flag", "end_date").withColumn("dl_load_flag", lit("N")).withColumn("end_date", lit(getCurrentTimestamp)).select(dataSchema.head, dataSchema.tail: _*)
397 | val finalData = unchangedData.union(incremental_df.withColumn("dl_load_flag", lit("Y")).withColumn("dl_load_flag", lit("Y")).withColumn("start_date", lit(getCurrentTimestamp)).withColumn("end_date", lit("9999-12-31 00:00:00")).select(dataSchema.head, dataSchema.tail: _*))
398 | .union(changedData)
399 | logger.info("Completed CDC for Incremental Feed Type 2 Non-MD5 !!!")
400 | finalData
401 | }
402 | } catch {
403 | case e: Exception =>
404 | logger.error("Please check yarn logs for exact error with applicationId " + SparkEnv.get.conf.getAppId)
405 | println(e.printStackTrace())
406 | throw e
407 | return null
408 | }
409 | }
410 |
411 | }
--------------------------------------------------------------------------------
/sparkutils/src/test/resources/incr1.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column
2 | 1,saurav,200,2018
3 | 4,priyanshu,400,2018
4 |
--------------------------------------------------------------------------------
/sparkutils/src/test/resources/regexReplaceTestData.csv:
--------------------------------------------------------------------------------
1 | Name,Id,Sal,JoiningDate,Dept
2 | Adit@ya,10\\\\1,100,01-09-2017,102
3 | Saroj,102,200,09-06-2015,103
4 | Veena,103,300,03-07-2011,104
5 | Saurabh,104,400,07-04-2010,103
6 | Priya@nshu,105,500,09-11-1990,109
7 | Priyanka,106,600,08-03-1997,105
8 | Bala,107,700,04-01-2019,102
9 | Gaurav,108,800,03-02-1994,109
10 | Shas@hankar,109,900,02-06-1993,106
11 | Aditya,101,700,08-03-1997,102
12 | Saroj,102,800,04-01-2019,103
13 | Veena,1\\\\03,100,08-02-1994,104
14 | Saurabh,104,200,02-06-1993,103
15 | Priya@nshu,105,300,01-09-2017,109
16 | Veena,115,1000,07-08-2015,104
17 | Sau@rabh,1\\\\1,1100,18-02-2012,103
18 | Priyanshu,12,1200,04-02-2017,109
19 | Priya@nshu,12,1200,04-02-2017,109
20 |
--------------------------------------------------------------------------------
/sparkutils/src/test/resources/res.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column,dl_load_flag
2 | 1,saurav,100,2018,N
3 | 2,gaurav,200,2018,Y
4 | 3,saroj,300,2018,N
5 | 1,saurav,200,2018,Y
6 | 4,priyanshu,400,2018,Y
7 |
--------------------------------------------------------------------------------
/sparkutils/src/test/resources/res1.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column,dl_load_flag
2 | 1,saurav,100,2018,Y
3 | 2,gaurav,200,2018,Y
4 | 3,saroj,300,2018,Y
5 |
--------------------------------------------------------------------------------
/sparkutils/src/test/resources/src.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column
2 | 1,saurav,100,2018
3 | 2,gaurav,200,2018
4 | 3,saroj,300,2018
--------------------------------------------------------------------------------
/sparkutils/src/test/resources/tgt.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column,dl_load_flag
2 | 1,saurav,100,2018,Y
3 | 2,gaurav,200,2018,Y
4 | 3,saroj,300,2018,Y
--------------------------------------------------------------------------------
/sparkutils/src/test/resources/tgt_blank.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column,dl_load_flag
--------------------------------------------------------------------------------
/sparkutils/src/test/scala/com/saurav/Utilities/UtilitiesTest.scala:
--------------------------------------------------------------------------------
1 | package com.saurav.Utilities
2 |
3 | import org.apache.spark.sql.SparkSession
4 | import org.scalatest.FunSuite
5 | import org.scalatest.Matchers
6 | import com.saurav.Utilities._
7 | import org.scalatest.{ FunSuite, Matchers }
8 | import org.apache.spark.sql.DataFrame
9 | import org.apache.spark.SparkContext
10 | import org.apache.spark.SparkConf
11 | import org.apache.spark.sql.functions._
12 | import org.apache.spark.sql.SparkSession
13 | import javax.management.ObjectName
14 |
15 | class UtilitiesTest extends FunSuite with Matchers {
16 |
17 | test("check CDC for Incr Feed SCD Type2 without Marker") {
18 |
19 | val spark = SparkSession.builder()
20 | .master("local[*]")
21 | .appName("SparkUtilityTests")
22 | .getOrCreate()
23 |
24 | val sourcedf = spark.read.format("csv").option("header", "true").load("src/test/resources/src.csv")
25 | val targetdf = spark.read.format("csv").option("header", "true").load("src/test/resources/tgt.csv")
26 | val targetdfblank = spark.read.format("csv").option("header", "true").load("src/test/resources/tgt_blank.csv")
27 | val incrdf = spark.read.format("csv").option("header", "true").load("src/test/resources/incr1.csv")
28 | val resdf = spark.read.format("csv").option("header", "true").load("src/test/resources/res.csv")
29 | val resdf1 = spark.read.format("csv").option("header", "true").load("src/test/resources/res1.csv")
30 |
31 | val primaryColumnList = Array("PK_Column")
32 | val partition_col = "Partition_Column"
33 | import org.apache.spark.sql.functions.lit
34 | val targetdf1blank = targetdfblank.withColumn("start_date", lit("X")).withColumn("end_date", lit("Y"))
35 | val targetdf1 = targetdf.withColumn("start_date", lit("X")).withColumn("end_date", lit("Y"))
36 | val targetdf1md5blank = targetdfblank.withColumn("md5value", lit("X")).withColumn("start_date", lit("X")).withColumn("end_date", lit("Y"))
37 | val cdcschema = sourcedf.drop("Partition_Column").columns
38 | val finalData = Utilities.IncrementalType2NonMD5(spark, incremental_df = sourcedf, target_df = targetdf1, primary_key_col_list = primaryColumnList, partition_col, start_date = "", end_date = "")
39 | val finalData1 = Utilities.IncrementalType2NonMD5(spark, incremental_df = incrdf, target_df = targetdf1, primary_key_col_list = primaryColumnList, partition_col = partition_col, start_date = "", end_date = "")
40 | //finalData.show - Check for first day feed
41 | finalData.drop("start_date", "end_date").show() shouldBe
42 | resdf.show()
43 | // finalData1.show - Check for 2nd day feed
44 | finalData1.drop("start_date", "end_date").show() shouldBe
45 | resdf1.show()
46 | }
47 |
48 | test("Regex Replace Test") {
49 |
50 | val spark = SparkSession
51 | .builder()
52 | .master("local[*]")
53 | .getOrCreate()
54 | val inputDF = spark.read.format("com.databricks.spark.csv")
55 | .option("header", "true")
56 | .option("inferSchema", "true").
57 | load("src/test/resources/regexReplaceTestData.csv")
58 |
59 | val readDF = spark.read.format("csv").option("header", "true").load("src/test/resources/regexReplaceTestData.csv")
60 | import spark.implicits._
61 | var regexReplacedDF = Utilities.regexReplacePlatformService(inputDF, Array("Name|@"))
62 | regexReplacedDF.filter(col("Name").contains("@")).count() shouldBe 0
63 |
64 | }
65 |
66 | }
--------------------------------------------------------------------------------
/sparkutils/target/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/.DS_Store
--------------------------------------------------------------------------------
/sparkutils/target/classes.2061037500.timestamp:
--------------------------------------------------------------------------------
1 | .
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$2.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$3.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$3.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$4.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$4.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$5.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$5.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$6.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$6.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$7.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$7.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$convertDateTime$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$convertDateTime$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$extractTimeComponent$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$extractTimeComponent$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$generateSequenceId$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$generateSequenceId$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$generateSequenceId$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$generateSequenceId$2.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$getSortedFileObjects$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$getSortedFileObjects$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$getSortedFileObjects$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$getSortedFileObjects$2.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$regexReplacePlatformService$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$regexReplacePlatformService$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sequenceIDGenerator$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sequenceIDGenerator$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sequenceIDGenerator$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sequenceIDGenerator$2.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sortedFiles$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sortedFiles$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sortedFiles$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sortedFiles$2.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sortedFiles$3.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$sortedFiles$3.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$trimDF$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$trimDF$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$validateColumnLength$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$validateColumnLength$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$validateNotNull$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$anonfun$validateNotNull$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$typecreator1$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$typecreator1$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$typecreator2$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$typecreator2$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$typecreator3$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$typecreator3$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$$typecreator4$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$$typecreator4$1.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$DataList$.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$DataList$.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities$DataList.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities$DataList.class
--------------------------------------------------------------------------------
/sparkutils/target/classes/com/saurav/Utilities/Utilities.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/classes/com/saurav/Utilities/Utilities.class
--------------------------------------------------------------------------------
/sparkutils/target/maven-archiver/pom.properties:
--------------------------------------------------------------------------------
1 | #Generated by Maven
2 | #Sun Jun 14 20:13:24 IST 2020
3 | version=0.0.1-SNAPSHOT
4 | groupId=com.saurav
5 | artifactId=sparkutils
6 |
--------------------------------------------------------------------------------
/sparkutils/target/surefire-reports/TEST-com.saurav.Utilities.UtilitiesTest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
34 |
35 |
36 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
46 |
47 |
49 |
50 |
51 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
64 |
65 |
66 |
67 |
69 |
70 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
82 |
83 |
85 |
86 |
87 |
88 |
90 |
91 |
92 |
93 |
94 |
95 |
97 |
98 |
100 |
101 |
102 |
103 |
104 |
--------------------------------------------------------------------------------
/sparkutils/target/surefire-reports/TEST-org.scalatest.tools.DiscoverySuite-ca7fc693-2775-4b5e-a5ed-ee204c47d473.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
34 |
35 |
36 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
46 |
47 |
49 |
50 |
51 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
64 |
65 |
66 |
67 |
69 |
70 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
82 |
83 |
85 |
86 |
87 |
88 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
--------------------------------------------------------------------------------
/sparkutils/target/surefire-reports/TestSuite.txt:
--------------------------------------------------------------------------------
1 | Discovery starting.
2 | Discovery completed in 140 milliseconds.
3 | Run starting. Expected test count is: 2
4 | UtilitiesTest:
5 | - check CDC for Incr Feed SCD Type2 without Marker (7 seconds, 498 milliseconds)
6 | - Regex Replace Test (329 milliseconds)
7 | Run completed in 7 seconds, 995 milliseconds.
8 | Total number of tests run: 2
9 | Suites: completed 2, aborted 0
10 | Tests: succeeded 2, failed 0, canceled 0, ignored 0, pending 0
11 | All tests passed.
12 |
--------------------------------------------------------------------------------
/sparkutils/target/surefire-reports/com.saurav.Utilities.UtilitiesTest.txt:
--------------------------------------------------------------------------------
1 | -------------------------------------------------------------------------------
2 | Test set: com.saurav.Utilities.UtilitiesTest
3 | -------------------------------------------------------------------------------
4 | Tests run: 0, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.018 sec
5 |
--------------------------------------------------------------------------------
/sparkutils/target/surefire/surefire4033596394733709221tmp:
--------------------------------------------------------------------------------
1 | #surefire
2 | #Sun Jun 14 20:13:15 IST 2020
3 | user.dir=/Users/saurav/workspace1/sparkutils
4 | localRepository=/Users/saurav/.m2/repository
5 | basedir=/Users/saurav/workspace1/sparkutils
6 |
--------------------------------------------------------------------------------
/sparkutils/target/surefire/surefire4217104294633344082tmp:
--------------------------------------------------------------------------------
1 | #surefire
2 | #Sun Jun 14 20:13:15 IST 2020
3 | classPathUrl.88=/Users/saurav/.m2/repository/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar
4 | dirscanner.0.types=java.io.File|java.util.ArrayList|java.util.ArrayList
5 | classPathUrl.87=/Users/saurav/.m2/repository/org/glassfish/jersey/core/jersey-client/2.22.2/jersey-client-2.22.2.jar
6 | classPathUrl.86=/Users/saurav/.m2/repository/org/scala-lang/scala-compiler/2.11.0/scala-compiler-2.11.0.jar
7 | childDelegation=false
8 | classPathUrl.251=/Users/saurav/.m2/repository/io/spray/spray-json_2.11/1.3.3/spray-json_2.11-1.3.3.jar
9 | classPathUrl.85=/Users/saurav/.m2/repository/org/scala-lang/scalap/2.11.0/scalap-2.11.0.jar
10 | classPathUrl.250=/Users/saurav/.m2/repository/org/javassist/javassist/3.21.0-GA/javassist-3.21.0-GA.jar
11 | classPathUrl.84=/Users/saurav/.m2/repository/org/json4s/json4s-ast_2.11/3.2.11/json4s-ast_2.11-3.2.11.jar
12 | classPathUrl.83=/Users/saurav/.m2/repository/org/json4s/json4s-core_2.11/3.2.11/json4s-core_2.11-3.2.11.jar
13 | classPathUrl.82=/Users/saurav/.m2/repository/org/json4s/json4s-jackson_2.11/3.2.11/json4s-jackson_2.11-3.2.11.jar
14 | classPathUrl.81=/Users/saurav/.m2/repository/commons-net/commons-net/2.2/commons-net-2.2.jar
15 | classPathUrl.80=/Users/saurav/.m2/repository/org/roaringbitmap/RoaringBitmap/0.5.11/RoaringBitmap-0.5.11.jar
16 | classPathUrl.249=/Users/saurav/.m2/repository/com/google/guava/guava/20.0/guava-20.0.jar
17 | classPathUrl.248=/Users/saurav/.m2/repository/org/reflections/reflections/0.9.11/reflections-0.9.11.jar
18 | classPathUrl.247=/Users/saurav/.m2/repository/org/apache/spark/spark-streaming-kafka-0-10_2.11/2.2.0/spark-streaming-kafka-0-10_2.11-2.2.0.jar
19 | classPathUrl.246=/Users/saurav/.m2/repository/org/apache/spark/spark-sql-kafka-0-10_2.11/2.2.0/spark-sql-kafka-0-10_2.11-2.2.0.jar
20 | classPathUrl.245=/Users/saurav/.m2/repository/org/apache/zookeeper/zookeeper/3.4.9/zookeeper-3.4.9.jar
21 | classPathUrl.79=/Users/saurav/.m2/repository/com/github/luben/zstd-jni/1.3.2-2/zstd-jni-1.3.2-2.jar
22 | classPathUrl.244=/Users/saurav/.m2/repository/com/101tec/zkclient/0.10/zkclient-0.10.jar
23 | classPathUrl.78=/Users/saurav/.m2/repository/org/lz4/lz4-java/1.4.0/lz4-java-1.4.0.jar
24 | classPathUrl.243=/Users/saurav/.m2/repository/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar
25 | classPathUrl.77=/Users/saurav/.m2/repository/org/xerial/snappy/snappy-java/1.1.2.6/snappy-java-1.1.2.6.jar
26 | classPathUrl.242=/Users/saurav/.m2/repository/net/sf/jopt-simple/jopt-simple/5.0.3/jopt-simple-5.0.3.jar
27 | classPathUrl.76=/Users/saurav/.m2/repository/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar
28 | classPathUrl.241=/Users/saurav/.m2/repository/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar
29 | classPathUrl.75=/Users/saurav/.m2/repository/org/slf4j/slf4j-log4j12/1.7.16/slf4j-log4j12-1.7.16.jar
30 | classPathUrl.240=/Users/saurav/.m2/repository/org/apache/kafka/kafka-clients/0.10.2.1/kafka-clients-0.10.2.1.jar
31 | classPathUrl.74=/Users/saurav/.m2/repository/log4j/log4j/1.2.17/log4j-1.2.17.jar
32 | classPathUrl.73=/Users/saurav/.m2/repository/org/slf4j/jcl-over-slf4j/1.7.16/jcl-over-slf4j-1.7.16.jar
33 | classPathUrl.72=/Users/saurav/.m2/repository/org/slf4j/jul-to-slf4j/1.7.16/jul-to-slf4j-1.7.16.jar
34 | classPathUrl.71=/Users/saurav/.m2/repository/org/slf4j/slf4j-api/1.7.16/slf4j-api-1.7.16.jar
35 | classPathUrl.70=/Users/saurav/.m2/repository/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar
36 | classPathUrl.239=/Users/saurav/.m2/repository/org/apache/kafka/kafka_2.11/0.10.2.1/kafka_2.11-0.10.2.1.jar
37 | classPathUrl.238=/Users/saurav/.m2/repository/org/scala-lang/modules/scala-java8-compat_2.11/0.9.0/scala-java8-compat_2.11-0.9.0.jar
38 | classPathUrl.237=/Users/saurav/.m2/repository/com/microsoft/azure/qpid-proton-j-extensions/1.0.0/qpid-proton-j-extensions-1.0.0.jar
39 | classPathUrl.236=/Users/saurav/.m2/repository/org/apache/qpid/proton-j/0.28.1/proton-j-0.28.1.jar
40 | classPathUrl.235=/Users/saurav/.m2/repository/com/microsoft/azure/azure-eventhubs/1.1.0/azure-eventhubs-1.1.0.jar
41 | classPathUrl.69=/Users/saurav/.m2/repository/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar
42 | classPathUrl.234=/Users/saurav/.m2/repository/com/microsoft/azure/azure-eventhubs-spark_2.11/2.2.4/azure-eventhubs-spark_2.11-2.2.4.jar
43 | classPathUrl.68=/Users/saurav/.m2/repository/org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.jar
44 | classPathUrl.233=/Users/saurav/.m2/repository/com/github/scopt/scopt_2.11/3.3.0/scopt_2.11-3.3.0.jar
45 | classPathUrl.67=/Users/saurav/.m2/repository/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar
46 | classPathUrl.232=/Users/saurav/.m2/repository/com/microsoft/sqlserver/mssql-jdbc/6.4.0.jre8/mssql-jdbc-6.4.0.jre8.jar
47 | classPathUrl.66=/Users/saurav/.m2/repository/org/apache/curator/curator-framework/2.6.0/curator-framework-2.6.0.jar
48 | classPathUrl.231=/Users/saurav/.m2/repository/org/objenesis/objenesis/2.6/objenesis-2.6.jar
49 | classPathUrl.65=/Users/saurav/.m2/repository/org/apache/curator/curator-recipes/2.6.0/curator-recipes-2.6.0.jar
50 | classPathUrl.230=/Users/saurav/.m2/repository/net/bytebuddy/byte-buddy-agent/1.9.7/byte-buddy-agent-1.9.7.jar
51 | classPathUrl.199=/Users/saurav/.m2/repository/org/apache/httpcomponents/httpclient/4.5.4/httpclient-4.5.4.jar
52 | classPathUrl.64=/Users/saurav/.m2/repository/net/iharder/base64/2.3.8/base64-2.3.8.jar
53 | testClassesDirectory=/Users/saurav/workspace1/sparkutils/target/test-classes
54 | classPathUrl.198=/Users/saurav/.m2/repository/net/hydromatic/eigenbase-properties/1.1.5/eigenbase-properties-1.1.5.jar
55 | classPathUrl.63=/Users/saurav/.m2/repository/com/jamesmurty/utils/java-xmlbuilder/1.1/java-xmlbuilder-1.1.jar
56 | classPathUrl.197=/Users/saurav/.m2/repository/org/apache/calcite/calcite-linq4j/1.2.0-incubating/calcite-linq4j-1.2.0-incubating.jar
57 | classPathUrl.62=/Users/saurav/.m2/repository/org/bouncycastle/bcprov-jdk15on/1.52/bcprov-jdk15on-1.52.jar
58 | classPathUrl.196=/Users/saurav/.m2/repository/org/apache/calcite/calcite-core/1.2.0-incubating/calcite-core-1.2.0-incubating.jar
59 | classPathUrl.61=/Users/saurav/.m2/repository/javax/activation/activation/1.1.1/activation-1.1.1.jar
60 | classPathUrl.195=/Users/saurav/.m2/repository/org/apache/calcite/calcite-avatica/1.2.0-incubating/calcite-avatica-1.2.0-incubating.jar
61 | classPathUrl.60=/Users/saurav/.m2/repository/org/apache/httpcomponents/httpcore/4.4.1/httpcore-4.4.1.jar
62 | classPathUrl.194=/Users/saurav/.m2/repository/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar
63 | classPathUrl.193=/Users/saurav/.m2/repository/javax/transaction/jta/1.1/jta-1.1.jar
64 | classPathUrl.192=/Users/saurav/.m2/repository/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar
65 | classPathUrl.191=/Users/saurav/.m2/repository/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar
66 | classPathUrl.190=/Users/saurav/.m2/repository/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar
67 | classPathUrl.229=/Users/saurav/.m2/repository/net/bytebuddy/byte-buddy/1.9.7/byte-buddy-1.9.7.jar
68 | classPathUrl.228=/Users/saurav/.m2/repository/org/mockito/mockito-core/2.24.0/mockito-core-2.24.0.jar
69 | classPathUrl.227=/Users/saurav/.m2/repository/javax/mail/mail/1.4.5/mail-1.4.5.jar
70 | classPathUrl.226=/Users/saurav/.m2/repository/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1.jar
71 | classPathUrl.225=/Users/saurav/.m2/repository/org/codehaus/jackson/jackson-xc/1.9.2/jackson-xc-1.9.2.jar
72 | classPathUrl.59=/Users/saurav/.m2/repository/net/java/dev/jets3t/jets3t/0.9.4/jets3t-0.9.4.jar
73 | classPathUrl.224=/Users/saurav/.m2/repository/org/codehaus/jackson/jackson-jaxrs/1.9.2/jackson-jaxrs-1.9.2.jar
74 | classPathUrl.58=/Users/saurav/.m2/repository/org/apache/spark/spark-unsafe_2.11/2.3.0/spark-unsafe_2.11-2.3.0.jar
75 | classPathUrl.223=/Users/saurav/.m2/repository/javax/xml/stream/stax-api/1.0-2/stax-api-1.0-2.jar
76 | classPathUrl.57=/Users/saurav/.m2/repository/org/apache/spark/spark-network-shuffle_2.11/2.3.0/spark-network-shuffle_2.11-2.3.0.jar
77 | classPathUrl.222=/Users/saurav/.m2/repository/javax/xml/bind/jaxb-api/2.2.2/jaxb-api-2.2.2.jar
78 | classPathUrl.56=/Users/saurav/.m2/repository/org/apache/spark/spark-network-common_2.11/2.3.0/spark-network-common_2.11-2.3.0.jar
79 | classPathUrl.221=/Users/saurav/.m2/repository/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar
80 | classPathUrl.55=/Users/saurav/.m2/repository/com/fasterxml/jackson/core/jackson-annotations/2.6.7/jackson-annotations-2.6.7.jar
81 | classPathUrl.220=/Users/saurav/.m2/repository/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar
82 | classPathUrl.189=/Users/saurav/.m2/repository/org/datanucleus/datanucleus-rdbms/3.2.9/datanucleus-rdbms-3.2.9.jar
83 | classPathUrl.54=/Users/saurav/.m2/repository/com/fasterxml/jackson/core/jackson-core/2.6.7/jackson-core-2.6.7.jar
84 | classPathUrl.188=/Users/saurav/.m2/repository/org/datanucleus/datanucleus-api-jdo/3.2.6/datanucleus-api-jdo-3.2.6.jar
85 | classPathUrl.53=/Users/saurav/.m2/repository/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar
86 | classPathUrl.187=/Users/saurav/.m2/repository/commons-cli/commons-cli/1.2/commons-cli-1.2.jar
87 | classPathUrl.52=/Users/saurav/.m2/repository/org/apache/spark/spark-kvstore_2.11/2.3.0/spark-kvstore_2.11-2.3.0.jar
88 | classPathUrl.186=/Users/saurav/.m2/repository/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar
89 | classPathUrl.51=/Users/saurav/.m2/repository/org/apache/spark/spark-launcher_2.11/2.3.0/spark-launcher_2.11-2.3.0.jar
90 | classPathUrl.185=/Users/saurav/.m2/repository/org/spark-project/hive/hive-metastore/1.2.1.spark2/hive-metastore-1.2.1.spark2.jar
91 | classPathUrl.50=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-annotations/2.6.5/hadoop-annotations-2.6.5.jar
92 | classPathUrl.184=/Users/saurav/.m2/repository/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar
93 | classPathUrl.183=/Users/saurav/.m2/repository/stax/stax-api/1.0.1/stax-api-1.0.1.jar
94 | classPathUrl.182=/Users/saurav/.m2/repository/org/iq80/snappy/snappy/0.2/snappy-0.2.jar
95 | classPathUrl.181=/Users/saurav/.m2/repository/com/googlecode/javaewah/JavaEWAH/0.3.2/JavaEWAH-0.3.2.jar
96 | classPathUrl.180=/Users/saurav/.m2/repository/org/antlr/ST4/4.0.4/ST4-4.0.4.jar
97 | classPathUrl.219=/Users/saurav/.m2/repository/com/sun/jersey/jersey-json/1.13/jersey-json-1.13.jar
98 | classPathUrl.218=/Users/saurav/.m2/repository/javax/inject/javax.inject/1/javax.inject-1.jar
99 | classPathUrl.217=/Users/saurav/.m2/repository/com/sun/jersey/jersey-core/1.13/jersey-core-1.13.jar
100 | classPathUrl.216=/Users/saurav/.m2/repository/com/sun/jersey/jersey-client/1.13/jersey-client-1.13.jar
101 | classPathUrl.215=/Users/saurav/.m2/repository/com/microsoft/windowsazure/microsoft-windowsazure-api/0.4.6/microsoft-windowsazure-api-0.4.6.jar
102 | classPathUrl.49=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-jobclient/2.6.5/hadoop-mapreduce-client-jobclient-2.6.5.jar
103 | classPathUrl.214=/Users/saurav/.m2/repository/org/scalaj/scalaj-http_2.11/2.2.1/scalaj-http_2.11-2.2.1.jar
104 | classPathUrl.48=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-yarn-common/2.6.5/hadoop-yarn-common-2.6.5.jar
105 | classPathUrl.213=/Users/saurav/.m2/repository/com/databricks/spark-xml_2.11/0.4.1/spark-xml_2.11-0.4.1.jar
106 | classPathUrl.47=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-core/2.6.5/hadoop-mapreduce-client-core-2.6.5.jar
107 | classPathUrl.212=/Users/saurav/.m2/repository/org/joda/joda-convert/1.8.1/joda-convert-1.8.1.jar
108 | classPathUrl.46=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-yarn-api/2.6.5/hadoop-yarn-api-2.6.5.jar
109 | classPathUrl.211=/Users/saurav/.m2/repository/com/typesafe/config/1.2.1/config-1.2.1.jar
110 | classPathUrl.45=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-shuffle/2.6.5/hadoop-mapreduce-client-shuffle-2.6.5.jar
111 | classPathUrl.210=/Users/saurav/.m2/repository/org/apache/spark/spark-streaming_2.11/2.3.0/spark-streaming_2.11-2.3.0.jar
112 | classPathUrl.179=/Users/saurav/.m2/repository/antlr/antlr/2.7.7/antlr-2.7.7.jar
113 | classPathUrl.44=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-yarn-server-common/2.6.5/hadoop-yarn-server-common-2.6.5.jar
114 | classPathUrl.178=/Users/saurav/.m2/repository/org/antlr/stringtemplate/3.2.1/stringtemplate-3.2.1.jar
115 | classPathUrl.43=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-yarn-client/2.6.5/hadoop-yarn-client-2.6.5.jar
116 | classPathUrl.177=/Users/saurav/.m2/repository/org/antlr/antlr-runtime/3.4/antlr-runtime-3.4.jar
117 | classPathUrl.42=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-common/2.6.5/hadoop-mapreduce-client-common-2.6.5.jar
118 | classPathUrl.176=/Users/saurav/.m2/repository/log4j/apache-log4j-extras/1.2.17/apache-log4j-extras-1.2.17.jar
119 | classPathUrl.41=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-mapreduce-client-app/2.6.5/hadoop-mapreduce-client-app-2.6.5.jar
120 | classPathUrl.175=/Users/saurav/.m2/repository/javolution/javolution/5.5.1/javolution-5.5.1.jar
121 | classPathUrl.40=/Users/saurav/.m2/repository/xml-apis/xml-apis/1.3.04/xml-apis-1.3.04.jar
122 | classPathUrl.174=/Users/saurav/.m2/repository/commons-io/commons-io/2.4/commons-io-2.4.jar
123 | classPathUrl.173=/Users/saurav/.m2/repository/org/spark-project/hive/hive-exec/1.2.1.spark2/hive-exec-1.2.1.spark2.jar
124 | classPathUrl.172=/Users/saurav/.m2/repository/com/twitter/parquet-hadoop-bundle/1.6.0/parquet-hadoop-bundle-1.6.0.jar
125 | classPathUrl.171=/Users/saurav/.m2/repository/org/apache/spark/spark-hive_2.11/2.3.0/spark-hive_2.11-2.3.0.jar
126 | classPathUrl.170=/Users/saurav/.m2/repository/com/vlkan/flatbuffers/1.2.0-3f79e055/flatbuffers-1.2.0-3f79e055.jar
127 | report.2=org.apache.maven.surefire.report.XMLReporter
128 | enableAssertions=true
129 | classPathUrl.209=/Users/saurav/.m2/repository/org/apache/spark/spark-avro_2.11/2.4.0/spark-avro_2.11-2.4.0.jar
130 | report.1=org.apache.maven.surefire.report.BriefFileReporter
131 | classPathUrl.208=/Users/saurav/.m2/repository/com/databricks/spark-avro_2.11/3.2.0/spark-avro_2.11-3.2.0.jar
132 | report.0=org.apache.maven.surefire.report.ForkingConsoleReporter
133 | classPathUrl.207=/Users/saurav/.m2/repository/org/apache/derby/derby/10.12.1.1/derby-10.12.1.1.jar
134 | classPathUrl.206=/Users/saurav/.m2/repository/org/apache/thrift/libfb303/0.9.3/libfb303-0.9.3.jar
135 | classPathUrl.205=/Users/saurav/.m2/repository/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar
136 | classPathUrl.39=/Users/saurav/.m2/repository/xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.jar
137 | classPathUrl.204=/Users/saurav/.m2/repository/org/datanucleus/datanucleus-core/3.2.10/datanucleus-core-3.2.10.jar
138 | classPathUrl.38=/Users/saurav/.m2/repository/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar
139 | classPathUrl.203=/Users/saurav/.m2/repository/org/jodd/jodd-core/3.5.2/jodd-core-3.5.2.jar
140 | classPathUrl.37=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-hdfs/2.6.5/hadoop-hdfs-2.6.5.jar
141 | classPathUrl.202=/Users/saurav/.m2/repository/joda-time/joda-time/2.9.3/joda-time-2.9.3.jar
142 | classPathUrl.36=/Users/saurav/.m2/repository/org/htrace/htrace-core/3.0.4/htrace-core-3.0.4.jar
143 | classPathUrl.201=/Users/saurav/.m2/repository/commons-codec/commons-codec/1.10/commons-codec-1.10.jar
144 | classPathUrl.35=/Users/saurav/.m2/repository/org/apache/curator/curator-client/2.6.0/curator-client-2.6.0.jar
145 | classPathUrl.200=/Users/saurav/.m2/repository/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar
146 | classPathUrl.169=/Users/saurav/.m2/repository/com/carrotsearch/hppc/0.7.2/hppc-0.7.2.jar
147 | classPathUrl.34=/Users/saurav/.m2/repository/org/apache/directory/api/api-util/1.0.0-M20/api-util-1.0.0-M20.jar
148 | isTrimStackTrace=true
149 | classPathUrl.168=/Users/saurav/.m2/repository/org/apache/arrow/arrow-memory/0.8.0/arrow-memory-0.8.0.jar
150 | classPathUrl.33=/Users/saurav/.m2/repository/org/apache/directory/api/api-asn1-api/1.0.0-M20/api-asn1-api-1.0.0-M20.jar
151 | classPathUrl.167=/Users/saurav/.m2/repository/org/apache/arrow/arrow-format/0.8.0/arrow-format-0.8.0.jar
152 | classPathUrl.32=/Users/saurav/.m2/repository/org/apache/directory/server/apacheds-i18n/2.0.0-M15/apacheds-i18n-2.0.0-M15.jar
153 | classPathUrl.166=/Users/saurav/.m2/repository/org/apache/arrow/arrow-vector/0.8.0/arrow-vector-0.8.0.jar
154 | classPathUrl.31=/Users/saurav/.m2/repository/org/apache/directory/server/apacheds-kerberos-codec/2.0.0-M15/apacheds-kerberos-codec-2.0.0-M15.jar
155 | classPathUrl.165=/Users/saurav/.m2/repository/org/apache/parquet/parquet-jackson/1.8.2/parquet-jackson-1.8.2.jar
156 | classPathUrl.30=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-auth/2.6.5/hadoop-auth-2.6.5.jar
157 | classPathUrl.164=/Users/saurav/.m2/repository/org/apache/parquet/parquet-format/2.3.1/parquet-format-2.3.1.jar
158 | classPathUrl.163=/Users/saurav/.m2/repository/org/apache/parquet/parquet-hadoop/1.8.2/parquet-hadoop-1.8.2.jar
159 | classPathUrl.162=/Users/saurav/.m2/repository/org/apache/parquet/parquet-encoding/1.8.2/parquet-encoding-1.8.2.jar
160 | classPathUrl.161=/Users/saurav/.m2/repository/org/apache/parquet/parquet-common/1.8.2/parquet-common-1.8.2.jar
161 | classPathUrl.160=/Users/saurav/.m2/repository/org/apache/parquet/parquet-column/1.8.2/parquet-column-1.8.2.jar
162 | excludes0=**/*$*
163 | classPathUrl.29=/Users/saurav/.m2/repository/commons-beanutils/commons-beanutils-core/1.8.0/commons-beanutils-core-1.8.0.jar
164 | classPathUrl.28=/Users/saurav/.m2/repository/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar
165 | classPathUrl.27=/Users/saurav/.m2/repository/commons-digester/commons-digester/1.8/commons-digester-1.8.jar
166 | classPathUrl.26=/Users/saurav/.m2/repository/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar
167 | classPathUrl.25=/Users/saurav/.m2/repository/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar
168 | classPathUrl.159=/Users/saurav/.m2/repository/org/apache/orc/orc-mapreduce/1.4.1/orc-mapreduce-1.4.1-nohive.jar
169 | classPathUrl.24=/Users/saurav/.m2/repository/xmlenc/xmlenc/0.52/xmlenc-0.52.jar
170 | classPathUrl.158=/Users/saurav/.m2/repository/io/airlift/aircompressor/0.8/aircompressor-0.8.jar
171 | classPathUrl.23=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-common/2.6.5/hadoop-common-2.6.5.jar
172 | classPathUrl.157=/Users/saurav/.m2/repository/commons-lang/commons-lang/2.6/commons-lang-2.6.jar
173 | classPathUrl.22=/Users/saurav/.m2/repository/org/apache/hadoop/hadoop-client/2.6.5/hadoop-client-2.6.5.jar
174 | classPathUrl.156=/Users/saurav/.m2/repository/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar
175 | classPathUrl.21=/Users/saurav/.m2/repository/org/apache/xbean/xbean-asm5-shaded/4.4/xbean-asm5-shaded-4.4.jar
176 | classPathUrl.155=/Users/saurav/.m2/repository/org/apache/orc/orc-core/1.4.1/orc-core-1.4.1-nohive.jar
177 | classPathUrl.20=/Users/saurav/.m2/repository/com/twitter/chill-java/0.8.4/chill-java-0.8.4.jar
178 | classPathUrl.154=/Users/saurav/.m2/repository/org/antlr/antlr4-runtime/4.7/antlr4-runtime-4.7.jar
179 | classPathUrl.153=/Users/saurav/.m2/repository/org/codehaus/janino/commons-compiler/3.0.8/commons-compiler-3.0.8.jar
180 | classPathUrl.152=/Users/saurav/.m2/repository/org/codehaus/janino/janino/3.0.8/janino-3.0.8.jar
181 | classPathUrl.151=/Users/saurav/.m2/repository/org/apache/spark/spark-catalyst_2.11/2.3.0/spark-catalyst_2.11-2.3.0.jar
182 | classPathUrl.150=/Users/saurav/.m2/repository/org/apache/spark/spark-sketch_2.11/2.3.0/spark-sketch_2.11-2.3.0.jar
183 | dirscanner.0=directoryScannerOptions
184 | dirscanner.0.params=/Users/saurav/workspace1/sparkutils/target/test-classes|[**/Test*.java, **/*Test.java, **/*TestCase.java]|[**/*$*]
185 | classPathUrl.19=/Users/saurav/.m2/repository/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar
186 | classPathUrl.18=/Users/saurav/.m2/repository/com/esotericsoftware/kryo-shaded/3.0.3/kryo-shaded-3.0.3.jar
187 | classPathUrl.17=/Users/saurav/.m2/repository/com/twitter/chill_2.11/0.8.4/chill_2.11-0.8.4.jar
188 | classPathUrl.16=/Users/saurav/.m2/repository/org/apache/avro/avro-ipc/1.7.7/avro-ipc-1.7.7-tests.jar
189 | classPathUrl.15=/Users/saurav/.m2/repository/org/apache/avro/avro-ipc/1.7.7/avro-ipc-1.7.7.jar
190 | classPathUrl.149=/Users/saurav/.m2/repository/com/univocity/univocity-parsers/2.5.9/univocity-parsers-2.5.9.jar
191 | classPathUrl.14=/Users/saurav/.m2/repository/org/apache/avro/avro-mapred/1.7.7/avro-mapred-1.7.7-hadoop2.jar
192 | classPathUrl.148=/Users/saurav/.m2/repository/org/apache/spark/spark-sql_2.11/2.3.0/spark-sql_2.11-2.3.0.jar
193 | classPathUrl.13=/Users/saurav/.m2/repository/org/tukaani/xz/1.0/xz-1.0.jar
194 | classPathUrl.9=/Users/saurav/.m2/repository/org/apache/avro/avro/1.7.7/avro-1.7.7.jar
195 | classPathUrl.147=/Users/saurav/.m2/repository/org/sonatype/plexus/plexus-cipher/1.4/plexus-cipher-1.4.jar
196 | classPathUrl.12=/Users/saurav/.m2/repository/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1.jar
197 | classPathUrl.8=/Users/saurav/.m2/repository/org/apache/spark/spark-core_2.11/2.3.0/spark-core_2.11-2.3.0.jar
198 | classPathUrl.146=/Users/saurav/.m2/repository/org/sonatype/plexus/plexus-sec-dispatcher/1.4/plexus-sec-dispatcher-1.4.jar
199 | classPathUrl.11=/Users/saurav/.m2/repository/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3.jar
200 | classPathUrl.7=/Users/saurav/.m2/repository/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar
201 | classPathUrl.145=/Users/saurav/.m2/repository/org/codehaus/plexus/plexus-utils/3.0.20/plexus-utils-3.0.20.jar
202 | classPathUrl.10=/Users/saurav/.m2/repository/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar
203 | classPathUrl.6=/Users/saurav/.m2/repository/org/scala-lang/modules/scala-xml_2.11/1.0.5/scala-xml_2.11-1.0.5.jar
204 | classPathUrl.144=/Users/saurav/.m2/repository/org/apache/maven/maven-model/2.2.1/maven-model-2.2.1.jar
205 | classPathUrl.5=/Users/saurav/.m2/repository/org/scalatest/scalatest_2.11/3.0.1/scalatest_2.11-3.0.1.jar
206 | classPathUrl.143=/Users/saurav/.m2/repository/org/apache/maven/maven-repository-metadata/2.2.1/maven-repository-metadata-2.2.1.jar
207 | classPathUrl.4=/Users/saurav/.m2/repository/org/scala-lang/scala-reflect/2.11.8/scala-reflect-2.11.8.jar
208 | classPathUrl.142=/Users/saurav/.m2/repository/org/apache/maven/maven-artifact/2.2.1/maven-artifact-2.2.1.jar
209 | classPathUrl.3=/Users/saurav/.m2/repository/org/scalactic/scalactic_2.11/3.0.1/scalactic_2.11-3.0.1.jar
210 | classPathUrl.141=/Users/saurav/.m2/repository/classworlds/classworlds/1.1-alpha-2/classworlds-1.1-alpha-2.jar
211 | classPathUrl.2=/Users/saurav/.m2/repository/org/scala-lang/scala-library/2.11.11/scala-library-2.11.11.jar
212 | classPathUrl.140=/Users/saurav/.m2/repository/junit/junit/3.8.1/junit-3.8.1.jar
213 | classPathUrl.1=/Users/saurav/workspace1/sparkutils/target/classes
214 | classPathUrl.0=/Users/saurav/workspace1/sparkutils/target/test-classes
215 | failIfNoTests=false
216 | reportsDirectory=/Users/saurav/workspace1/sparkutils/target/surefire-reports
217 | classPathUrl.139=/Users/saurav/.m2/repository/org/codehaus/plexus/plexus-container-default/1.0-alpha-9-stable-1/plexus-container-default-1.0-alpha-9-stable-1.jar
218 | classPathUrl.138=/Users/saurav/.m2/repository/org/codehaus/plexus/plexus-interpolation/1.11/plexus-interpolation-1.11.jar
219 | classPathUrl.137=/Users/saurav/.m2/repository/org/apache/maven/maven-plugin-registry/2.2.1/maven-plugin-registry-2.2.1.jar
220 | classPathUrl.136=/Users/saurav/.m2/repository/backport-util-concurrent/backport-util-concurrent/3.1/backport-util-concurrent-3.1.jar
221 | classPathUrl.135=/Users/saurav/.m2/repository/org/apache/maven/wagon/wagon-provider-api/1.0-beta-6/wagon-provider-api-1.0-beta-6.jar
222 | classPathUrl.134=/Users/saurav/.m2/repository/org/apache/maven/maven-artifact-manager/2.2.1/maven-artifact-manager-2.2.1.jar
223 | classPathUrl.133=/Users/saurav/.m2/repository/org/apache/maven/maven-profile/2.2.1/maven-profile-2.2.1.jar
224 | classPathUrl.132=/Users/saurav/.m2/repository/org/apache/maven/maven-settings/2.2.1/maven-settings-2.2.1.jar
225 | classPathUrl.131=/Users/saurav/.m2/repository/org/apache/maven/maven-project/2.2.1/maven-project-2.2.1.jar
226 | classPathUrl.130=/Users/saurav/.m2/repository/org/apache/maven/maven-plugin-api/2.2.1/maven-plugin-api-2.2.1.jar
227 | includes2=**/*TestCase.java
228 | includes1=**/*Test.java
229 | includes0=**/Test*.java
230 | providerConfiguration=org.apache.maven.surefire.junit.JUnit3Provider
231 | classPathUrl.129=/Users/saurav/.m2/repository/org/apache/maven/plugins/maven-gpg-plugin/1.6/maven-gpg-plugin-1.6.jar
232 | classPathUrl.128=/Users/saurav/.m2/repository/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar
233 | classPathUrl.127=/Users/saurav/.m2/repository/com/nimbusds/nimbus-jose-jwt/8.19/nimbus-jose-jwt-8.19.jar
234 | classPathUrl.126=/Users/saurav/.m2/repository/com/nimbusds/lang-tag/1.5/lang-tag-1.5.jar
235 | classPathUrl.125=/Users/saurav/.m2/repository/net/minidev/json-smart/1.3.1/json-smart-1.3.1.jar
236 | classPathUrl.124=/Users/saurav/.m2/repository/org/apache/commons/commons-collections4/4.1/commons-collections4-4.1.jar
237 | classPathUrl.123=/Users/saurav/.m2/repository/com/github/stephenc/jcip/jcip-annotations/1.0-1/jcip-annotations-1.0-1.jar
238 | classPathUrl.122=/Users/saurav/.m2/repository/com/nimbusds/oauth2-oidc-sdk/5.18.1/oauth2-oidc-sdk-5.18.1.jar
239 | classPathUrl.121=/Users/saurav/.m2/repository/com/microsoft/azure/adal4j/1.2.0/adal4j-1.2.0.jar
240 | classPathUrl.120=/Users/saurav/.m2/repository/com/microsoft/azure/azure-sqldb-spark/1.0.2/azure-sqldb-spark-1.0.2.jar
241 | useManifestOnlyJar=true
242 | surefireClassPathUrl.1=/Users/saurav/.m2/repository/org/apache/maven/surefire/surefire-api/2.7/surefire-api-2.7.jar
243 | surefireClassPathUrl.0=/Users/saurav/.m2/repository/org/apache/maven/surefire/surefire-junit3/2.7/surefire-junit3-2.7.jar
244 | classPathUrl.119=/Users/saurav/.m2/repository/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar
245 | classPathUrl.118=/Users/saurav/.m2/repository/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar
246 | classPathUrl.117=/Users/saurav/.m2/repository/org/apache/spark/spark-tags_2.11/2.3.0/spark-tags_2.11-2.3.0.jar
247 | classPathUrl.116=/Users/saurav/.m2/repository/net/sf/py4j/py4j/0.10.6/py4j-0.10.6.jar
248 | classPathUrl.115=/Users/saurav/.m2/repository/net/razorvine/pyrolite/4.13/pyrolite-4.13.jar
249 | classPathUrl.114=/Users/saurav/.m2/repository/oro/oro/2.0.8/oro-2.0.8.jar
250 | classPathUrl.113=/Users/saurav/.m2/repository/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar
251 | classPathUrl.112=/Users/saurav/.m2/repository/com/fasterxml/jackson/module/jackson-module-paranamer/2.7.9/jackson-module-paranamer-2.7.9.jar
252 | classPathUrl.111=/Users/saurav/.m2/repository/com/fasterxml/jackson/module/jackson-module-scala_2.11/2.6.7.1/jackson-module-scala_2.11-2.6.7.1.jar
253 | classPathUrl.110=/Users/saurav/.m2/repository/com/fasterxml/jackson/core/jackson-databind/2.6.7.1/jackson-databind-2.6.7.1.jar
254 | testSuiteDefinitionTestSourceDirectory=/Users/saurav/workspace1/sparkutils/src/test/java
255 | classPathUrl.109=/Users/saurav/.m2/repository/io/dropwizard/metrics/metrics-graphite/3.1.5/metrics-graphite-3.1.5.jar
256 | classPathUrl.108=/Users/saurav/.m2/repository/io/dropwizard/metrics/metrics-json/3.1.5/metrics-json-3.1.5.jar
257 | classPathUrl.107=/Users/saurav/.m2/repository/io/dropwizard/metrics/metrics-jvm/3.1.5/metrics-jvm-3.1.5.jar
258 | classPathUrl.106=/Users/saurav/.m2/repository/io/dropwizard/metrics/metrics-core/3.1.5/metrics-core-3.1.5.jar
259 | classPathUrl.105=/Users/saurav/.m2/repository/com/clearspring/analytics/stream/2.7.0/stream-2.7.0.jar
260 | classPathUrl.104=/Users/saurav/.m2/repository/io/netty/netty/3.9.9.Final/netty-3.9.9.Final.jar
261 | classPathUrl.99=/Users/saurav/.m2/repository/org/glassfish/jersey/media/jersey-media-jaxb/2.22.2/jersey-media-jaxb-2.22.2.jar
262 | classPathUrl.103=/Users/saurav/.m2/repository/io/netty/netty-all/4.1.17.Final/netty-all-4.1.17.Final.jar
263 | classPathUrl.98=/Users/saurav/.m2/repository/org/glassfish/jersey/core/jersey-server/2.22.2/jersey-server-2.22.2.jar
264 | classPathUrl.102=/Users/saurav/.m2/repository/org/glassfish/jersey/containers/jersey-container-servlet-core/2.22.2/jersey-container-servlet-core-2.22.2.jar
265 | classPathUrl.97=/Users/saurav/.m2/repository/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar
266 | classPathUrl.101=/Users/saurav/.m2/repository/org/glassfish/jersey/containers/jersey-container-servlet/2.22.2/jersey-container-servlet-2.22.2.jar
267 | classPathUrl.96=/Users/saurav/.m2/repository/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.22.2/jersey-guava-2.22.2.jar
268 | classPathUrl.100=/Users/saurav/.m2/repository/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar
269 | classPathUrl.95=/Users/saurav/.m2/repository/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar
270 | classPathUrl.94=/Users/saurav/.m2/repository/org/glassfish/jersey/core/jersey-common/2.22.2/jersey-common-2.22.2.jar
271 | classPathUrl.93=/Users/saurav/.m2/repository/org/glassfish/hk2/hk2-locator/2.4.0-b34/hk2-locator-2.4.0-b34.jar
272 | classPathUrl.92=/Users/saurav/.m2/repository/org/glassfish/hk2/external/javax.inject/2.4.0-b34/javax.inject-2.4.0-b34.jar
273 | classPathUrl.91=/Users/saurav/.m2/repository/org/glassfish/hk2/external/aopalliance-repackaged/2.4.0-b34/aopalliance-repackaged-2.4.0-b34.jar
274 | classPathUrl.90=/Users/saurav/.m2/repository/org/glassfish/hk2/hk2-utils/2.4.0-b34/hk2-utils-2.4.0-b34.jar
275 | useSystemClassLoader=true
276 | classPathUrl.89=/Users/saurav/.m2/repository/org/glassfish/hk2/hk2-api/2.4.0-b34/hk2-api-2.4.0-b34.jar
277 |
--------------------------------------------------------------------------------
/sparkutils/target/test-classes.-971118635.timestamp:
--------------------------------------------------------------------------------
1 | .
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/com/saurav/Utilities/UtilitiesTest$$anonfun$1.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/test-classes/com/saurav/Utilities/UtilitiesTest$$anonfun$1.class
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/com/saurav/Utilities/UtilitiesTest$$anonfun$2.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/test-classes/com/saurav/Utilities/UtilitiesTest$$anonfun$2.class
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/com/saurav/Utilities/UtilitiesTest.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sauravdev/SparkUtils/f8ae69f07175cb16288f48b6cc25ad2a71d0e742/sparkutils/target/test-classes/com/saurav/Utilities/UtilitiesTest.class
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/incr1.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column
2 | 1,saurav,200,2018
3 | 4,priyanshu,400,2018
4 |
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/regexReplaceTestData.csv:
--------------------------------------------------------------------------------
1 | Name,Id,Sal,JoiningDate,Dept
2 | Adit@ya,10\\\\1,100,01-09-2017,102
3 | Saroj,102,200,09-06-2015,103
4 | Veena,103,300,03-07-2011,104
5 | Saurabh,104,400,07-04-2010,103
6 | Priya@nshu,105,500,09-11-1990,109
7 | Priyanka,106,600,08-03-1997,105
8 | Bala,107,700,04-01-2019,102
9 | Gaurav,108,800,03-02-1994,109
10 | Shas@hankar,109,900,02-06-1993,106
11 | Aditya,101,700,08-03-1997,102
12 | Saroj,102,800,04-01-2019,103
13 | Veena,1\\\\03,100,08-02-1994,104
14 | Saurabh,104,200,02-06-1993,103
15 | Priya@nshu,105,300,01-09-2017,109
16 | Veena,115,1000,07-08-2015,104
17 | Sau@rabh,1\\\\1,1100,18-02-2012,103
18 | Priyanshu,12,1200,04-02-2017,109
19 | Priya@nshu,12,1200,04-02-2017,109
20 |
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/res.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column,dl_load_flag
2 | 1,saurav,100,2018,N
3 | 2,gaurav,200,2018,Y
4 | 3,saroj,300,2018,N
5 | 1,saurav,200,2018,Y
6 | 4,priyanshu,400,2018,Y
7 |
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/res1.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column,dl_load_flag
2 | 1,saurav,100,2018,Y
3 | 2,gaurav,200,2018,Y
4 | 3,saroj,300,2018,Y
5 |
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/src.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column
2 | 1,saurav,100,2018
3 | 2,gaurav,200,2018
4 | 3,saroj,300,2018
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/tgt.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column,dl_load_flag
2 | 1,saurav,100,2018,Y
3 | 2,gaurav,200,2018,Y
4 | 3,saroj,300,2018,Y
--------------------------------------------------------------------------------
/sparkutils/target/test-classes/tgt_blank.csv:
--------------------------------------------------------------------------------
1 | PK_Column,Data_Col1,Data_Col2,Partition_Column,dl_load_flag
--------------------------------------------------------------------------------