├── .gitignore ├── examples ├── scalding │ ├── data │ │ ├── .gitignore │ │ └── PackedAvroOutput.avro │ ├── project │ │ ├── build.properties │ │ ├── plugins.sbt │ │ ├── Build.scala │ │ ├── Dependencies.scala │ │ └── BuildSettings.scala │ ├── src │ │ └── main │ │ │ └── scala │ │ │ └── tutorial │ │ │ ├── models │ │ │ └── MyRecord.scala │ │ │ ├── JobRunner.scala │ │ │ ├── PackedAvroWriteJob.scala │ │ │ └── PackedAvroReadJob.scala │ └── README.md ├── spark │ ├── .gitignore │ ├── project │ │ ├── build.properties │ │ └── assembly.sbt │ ├── output │ │ └── twitter-wordcount-scala-spark-specific.tsv │ │ │ ├── _SUCCESS │ │ │ ├── ._SUCCESS.crc │ │ │ ├── part-00000 │ │ │ └── .part-00000.crc │ ├── twitter.avro │ ├── src │ │ └── main │ │ │ └── scala │ │ │ ├── twitter_schema.scala │ │ │ ├── MyKryoRegistrator.scala │ │ │ └── AvroSparkScala.scala │ ├── simple.sbt │ └── README.md ├── avro │ ├── project │ │ └── build.properties │ ├── src │ │ └── main │ │ │ ├── avro │ │ │ ├── AvroTypeProviderTest00.avro │ │ │ └── AvroTypeProviderTestNestedSchemaFile.avsc │ │ │ └── scala │ │ │ ├── AvroRecordExample.scala │ │ │ └── AvroTypeProviderExample.scala │ ├── README.md │ └── build.sbt ├── scavro │ ├── project │ │ └── build.properties │ ├── src │ │ └── main │ │ │ ├── avro │ │ │ ├── AvroTypeProviderTest00.avro │ │ │ └── AvroTypeProviderTestNestedSchemaFile.avsc │ │ │ └── scala │ │ │ ├── AvroRecordExample.scala │ │ │ └── AvroTypeProviderExample.scala │ ├── README.md │ └── build.sbt ├── scalavro │ ├── project │ │ └── build.properties │ ├── README.md │ ├── src │ │ └── main │ │ │ ├── avro │ │ │ └── twitter_schema.avsc │ │ │ └── scala │ │ │ └── Example.scala │ └── build.sbt ├── .gitignore └── README.md ├── project ├── build.properties └── Build.scala ├── tests └── src │ └── test │ ├── resources │ ├── AvroTypeProviderTest00.avro │ ├── AvroTypeProviderTest01.avro │ ├── AvroTypeProviderTest02.avro │ ├── AvroTypeProviderTest03.avro │ ├── AvroTypeProviderTest04.avro │ ├── AvroTypeProviderTest05.avro │ ├── AvroTypeProviderTest06.avro │ ├── AvroTypeProviderTest07.avro │ ├── AvroTypeProviderTest08.avro │ ├── AvroTypeProviderTest10.avro │ ├── AvroTypeProviderTest11.avro │ ├── AvroTypeProviderTest12.avro │ ├── AvroTypeProviderTest13.avro │ ├── AvroTypeProviderTest14.avro │ ├── AvroTypeProviderTest15.avro │ ├── AvroTypeProviderTest16.avro │ ├── AvroTypeProviderTest17.avro │ ├── AvroTypeProviderTest18.avro │ ├── AvroTypeProviderTest19.avro │ ├── AvroTypeProviderTest20.avro │ ├── AvroTypeProviderTest24.avro │ ├── AvroTypeProviderTest25.avro │ ├── AvroTypeProviderTest26.avro │ ├── AvroTypeProviderTest27.avro │ ├── AvroTypeProviderTest28.avro │ ├── AvroTypeProviderTest29.avro │ ├── AvroTypeProviderTest30.avro │ ├── AvroTypeProviderTest31.avro │ ├── AvroTypeProviderTest32.avro │ ├── AvroTypeProviderTest33.avro │ ├── AvroTypeProviderTest34.avro │ ├── AvroTypeProviderTest35.avro │ ├── AvroTypeProviderTest36.avro │ ├── AvroTypeProviderTest37.avro │ ├── AvroTypeProviderTest38.avro │ ├── AvroTypeProviderTest39.avro │ ├── AvroTypeProviderTest40.avro │ ├── AvroTypeProviderTest41.avro │ ├── AvroTypeProviderTest42.avro │ ├── AvroTypeProviderTest43.avro │ ├── AvroTypeProviderTest44.avro │ ├── AvroTypeProviderTest45.avro │ ├── AvroTypeProviderTest46.avro │ ├── AvroTypeProviderTest47.avro │ ├── AvroTypeProviderTest48.avro │ ├── AvroTypeProviderTest49.avro │ ├── AvroTypeProviderTest50.avro │ ├── AvroTypeProviderTest51.avro │ ├── AvroTypeProviderTest52.avro │ ├── AvroTypeProviderTest53.avro │ ├── AvroTypeProviderTest54.avro │ ├── AvroTypeProviderTest55.avro │ ├── AvroTypeProviderTest56.avro │ ├── AvroTypeProviderTest57.avro │ ├── AvroTypeProviderTest58.avro │ ├── AvroTypeProviderTest59.avro │ ├── AvroTypeProviderTest60.avro │ ├── AvroTypeProviderTest61.avro │ ├── AvroTypeProviderTest62.avro │ ├── AvroTypeProviderTest64.avro │ ├── AvroTypeProviderTest65.avro │ ├── AvroTypeProviderTest66.avro │ ├── AvroTypeProviderTest67.avro │ ├── AvroTypeProviderTest68.avro │ ├── AvroTypeProviderTestMap01.avro │ ├── AvroTypeProviderTestMap02.avro │ ├── AvroTypeProviderTestMap03.avro │ ├── AvroTypeProviderTestMap04.avro │ ├── AvroTypeProviderTestMap05.avro │ ├── AvroTypeProviderTestMap06.avro │ ├── AvroTypeProviderTestMap07.avro │ ├── AvroTypeProviderTestMap08.avro │ ├── AvroTypeProviderTestMap09.avro │ ├── AvroTypeProviderTestMap10.avro │ ├── AvroTypeProviderTestMap11.avro │ ├── AvroTypeProviderTestMap12.avro │ ├── AvroTypeProviderExtendedTest00.avro │ ├── AvroTypeProviderTestNoNamespace.avro │ ├── evolution │ │ ├── AvroRecordTestEvolution00.avro │ │ ├── AvroRecordTestEvolution01.avro │ │ └── AvroTypeProviderTestEvolution00.avro │ ├── AvroTypeProviderTestNoNamespaceNested.avro │ ├── AvroTypeProviderTestNoNamespaceOption.avro │ ├── AvroTypeProviderPreexistingCompanionTest00.avro │ ├── AvroTypeProviderTestNoNamespaceDeeplyNested.avro │ ├── AvroTypeProviderTestNoNamespaceDoubleNested.avro │ ├── default │ │ ├── AvroTypeProviderTestDefaultValue00.avsc │ │ ├── AvroTypeProviderTestDefaultValue02.avsc │ │ ├── AvroTypeProviderTestDefaultValue01.avsc │ │ ├── AvroTypeProviderTestDefaultValue03.avsc │ │ ├── AvroTypeProviderTestDefaultValue04.avsc │ │ ├── AvroTypeProviderTestDefaultValue05.avsc │ │ ├── AvroTypeProviderTestDefaultValue06.avsc │ │ ├── AvroTypeProviderTestDefaultValue08.avsc │ │ ├── AvroTypeProviderTestDefaultValue07.avsc │ │ ├── AvroTypeProviderTestDefaultValue10.avsc │ │ ├── AvroTypeProviderTestDefaultValue11.avsc │ │ ├── AvroTypeProviderTestDefaultValue14.avsc │ │ ├── AvroTypeProviderTestDefaultValue15.avsc │ │ ├── AvroTypeProviderTestDefaultValue09.avsc │ │ ├── AvroTypeProviderTestDefaultValue17.avsc │ │ ├── AvroTypeProviderTestDefaultValue16.avsc │ │ └── AvroTypeProviderTestDefaultValue18.avsc │ ├── AvroTypeProviderTestSchemaFile.avsc │ ├── AvroTypeProviderTestNestedSchemaFile.avsc │ ├── AvroTypeProviderExtendedCompanionTest00.avsc │ ├── AvroTypeProviderTestRelativeSchemaFilePath.avsc │ ├── AvroTypeProviderTestDifferentNamespace.avsc │ └── AvroTypeProviderTestSchemaUnion.avsc │ └── scala │ ├── AvroTypeProviderTests │ ├── AvroTypeProviderNoNamespaceTest.scala │ ├── AvroTypeProviderNestedSchemaFileTest.scala │ ├── AvroTypeProviderSchemaFileTest.scala │ ├── AvroTypeProviderRelativeSchemaFilePathTest.scala │ ├── AvroTypeProviderDifferentNamespaceTest.scala │ ├── AvroTypeProviderEvolutionTest.scala │ ├── AvroTypeProviderSchemaUnionTest.scala │ ├── AvroTypeProviderPreexistingCompanionTest.scala │ ├── datatypetests │ │ ├── AvroTypeProviderNestedPrimitivesTest.scala │ │ ├── AvroTypeProvider2ArityHomoTest.scala │ │ ├── AvroTypeProvider2ArityHeteroTest.scala │ │ ├── AvroTypeProvider2ArityHomoNestedTest.scala │ │ ├── AvroTypeProviderUserDefinedTypesTest.scala │ │ └── AvroTypeProviderPrimitivesTest.scala │ └── AvroTypeProviderProvideNamespaceTest.scala │ ├── AvroRecordTests │ ├── AvroRecordNoNamespaceTest.scala │ ├── AvroRecordDataFileStreamTest.scala │ ├── AvroRecordAlreadyExtendedTest.scala │ ├── AvroRecordEvolutionTest.scala │ ├── datatypetests │ │ ├── AvroRecordPrimitivesTest.scala │ │ ├── AvroRecordComplexTest.scala │ │ ├── AvroRecord2ArityHeteroTest.scala │ │ ├── AvroRecordNestedTest.scala │ │ ├── AvroRecord2ArityHomoTest.scala │ │ ├── AvroRecord2ArityHomoNestedTest.scala │ │ └── AvroRecordUserDefinedTypesTest.scala │ ├── AvroRecordEncoderTest.scala │ └── AvroRecordPreexistingCompanionTest.scala │ └── TestUtil.scala ├── macros └── src │ └── main │ └── scala │ └── avro │ └── scala │ └── macro │ └── annotations │ ├── record │ ├── schemagen │ │ ├── SchemaStore.scala │ │ ├── RecordSchemaGenerator.scala │ │ ├── ToJsonMatcher.scala │ │ └── FieldSchemaGenerator.scala │ ├── ctorgen │ │ ├── CtorGenerator.scala │ │ └── DefaultCtorParamMatcher.scala │ ├── namespacegen │ │ └── NamespaceGenerator.scala │ └── methodgen │ │ ├── GetDefCaseGenerator.scala │ │ └── PutDefCaseGenerator.scala │ ├── provider │ ├── ValDefGenerator.scala │ ├── NamespaceProbe.scala │ ├── FilePathProbe.scala │ ├── FileParser.scala │ ├── NestedSchemaExtractor.scala │ └── matchers │ │ ├── FromJsonMatcher.scala │ │ └── AvroTypeMatcher.scala │ ├── AvroTypeProviderMacro.scala │ └── AvroRecordMacro.scala └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | 3 | -------------------------------------------------------------------------------- /examples/scalding/data/.gitignore: -------------------------------------------------------------------------------- 1 | *.tsv -------------------------------------------------------------------------------- /examples/spark/.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.8 -------------------------------------------------------------------------------- /examples/avro/project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.8 2 | -------------------------------------------------------------------------------- /examples/scavro/project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.8 2 | -------------------------------------------------------------------------------- /examples/spark/project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.8 2 | -------------------------------------------------------------------------------- /examples/scalavro/project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.2 2 | -------------------------------------------------------------------------------- /examples/scalding/project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=0.13.9 2 | -------------------------------------------------------------------------------- /examples/spark/output/twitter-wordcount-scala-spark-specific.tsv/_SUCCESS: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/spark/output/twitter-wordcount-scala-spark-specific.tsv/._SUCCESS.crc: -------------------------------------------------------------------------------- 1 | crc -------------------------------------------------------------------------------- /examples/spark/project/assembly.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2") 2 | -------------------------------------------------------------------------------- /examples/spark/twitter.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/examples/spark/twitter.avro -------------------------------------------------------------------------------- /examples/scalding/data/PackedAvroOutput.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/examples/scalding/data/PackedAvroOutput.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest00.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest00.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest01.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest01.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest02.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest02.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest03.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest03.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest04.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest04.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest05.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest05.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest06.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest06.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest07.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest07.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest08.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest08.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest10.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest10.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest11.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest11.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest12.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest12.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest13.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest13.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest14.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest14.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest15.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest15.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest16.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest16.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest17.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest17.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest18.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest18.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest19.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest19.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest20.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest20.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest24.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest24.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest25.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest25.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest26.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest26.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest27.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest27.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest28.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest28.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest29.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest29.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest30.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest30.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest31.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest31.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest32.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest32.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest33.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest33.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest34.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest34.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest35.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest35.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest36.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest36.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest37.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest37.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest38.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest38.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest39.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest39.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest40.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest40.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest41.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest41.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest42.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest42.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest43.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest43.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest44.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest44.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest45.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest45.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest46.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest46.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest47.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest47.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest48.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest48.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest49.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest49.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest50.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest50.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest51.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest51.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest52.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest52.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest53.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest53.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest54.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest54.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest55.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest55.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest56.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest56.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest57.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest57.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest58.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest58.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest59.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest59.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest60.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest60.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest61.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest61.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest62.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest62.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest64.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest64.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest65.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest65.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest66.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest66.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest67.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest67.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTest68.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTest68.avro -------------------------------------------------------------------------------- /examples/avro/src/main/avro/AvroTypeProviderTest00.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/examples/avro/src/main/avro/AvroTypeProviderTest00.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap01.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap01.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap02.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap02.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap03.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap03.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap04.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap04.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap05.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap05.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap06.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap06.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap07.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap07.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap08.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap08.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap09.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap09.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap10.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap10.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap11.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap11.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestMap12.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestMap12.avro -------------------------------------------------------------------------------- /examples/scavro/src/main/avro/AvroTypeProviderTest00.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/examples/scavro/src/main/avro/AvroTypeProviderTest00.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderExtendedTest00.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderExtendedTest00.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestNoNamespace.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestNoNamespace.avro -------------------------------------------------------------------------------- /tests/src/test/resources/evolution/AvroRecordTestEvolution00.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/evolution/AvroRecordTestEvolution00.avro -------------------------------------------------------------------------------- /tests/src/test/resources/evolution/AvroRecordTestEvolution01.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/evolution/AvroRecordTestEvolution01.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestNoNamespaceNested.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestNoNamespaceNested.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestNoNamespaceOption.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestNoNamespaceOption.avro -------------------------------------------------------------------------------- /examples/scalding/src/main/scala/tutorial/models/MyRecord.scala: -------------------------------------------------------------------------------- 1 | package tutorial 2 | 3 | import com.julianpeeters.avro.annotations.AvroRecord 4 | 5 | @AvroRecord 6 | case class Person(var name: String, var age: Int) 7 | -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderPreexistingCompanionTest00.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderPreexistingCompanionTest00.avro -------------------------------------------------------------------------------- /tests/src/test/resources/evolution/AvroTypeProviderTestEvolution00.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/evolution/AvroTypeProviderTestEvolution00.avro -------------------------------------------------------------------------------- /examples/avro/README.md: -------------------------------------------------------------------------------- 1 | avro-scala-macro-annotation-examples 2 | ==================================== 3 | 4 | Example of how to use the @AvroRecord and @AvroTypeProvider annotations to make a Scala case class Avro serializable. -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestNoNamespaceDeeplyNested.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestNoNamespaceDeeplyNested.avro -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestNoNamespaceDoubleNested.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/tests/src/test/resources/AvroTypeProviderTestNoNamespaceDoubleNested.avro -------------------------------------------------------------------------------- /examples/scavro/README.md: -------------------------------------------------------------------------------- 1 | avro-scala-macro-annotation-examples 2 | ==================================== 3 | 4 | Example of how to use the @AvroRecord and @AvroTypeProvider annotations to make a Scala case class Avro serializable. -------------------------------------------------------------------------------- /examples/spark/output/twitter-wordcount-scala-spark-specific.tsv/part-00000: -------------------------------------------------------------------------------- 1 | intended.,1 2 | is,2 3 | as,1 4 | "paper,",1 5 | ,1 6 | Works,1 7 | fine.,1 8 | Rock:,1 9 | Nerf,1 10 | scissors,1 11 | Terran,1 12 | IMBA.,1 13 | -------------------------------------------------------------------------------- /examples/spark/output/twitter-wordcount-scala-spark-specific.tsv/.part-00000.crc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/julianpeeters/avro-scala-macro-annotations/HEAD/examples/spark/output/twitter-wordcount-scala-spark-specific.tsv/.part-00000.crc -------------------------------------------------------------------------------- /examples/scalding/project/plugins.sbt: -------------------------------------------------------------------------------- 1 | resolvers += Resolver.url("plugins-artifactory", url("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns) 2 | 3 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2") 4 | -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue00.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue00","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"int","doc":"Auto-Generated Field","default":0}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue02.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue02","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"long","doc":"Auto-Generated Field","default":2}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue01.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue01","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"float","doc":"Auto-Generated Field","default":7.0}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue03.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue03","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"double","doc":"Auto-Generated Field","default":2.0}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue04.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue04","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"boolean","doc":"Auto-Generated Field","default":false}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue05.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue05","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"string","doc":"Auto-Generated Field","default":""}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue06.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue06","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"null","doc":"Auto-Generated Field","default":null}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue08.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue08","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":["null","int"],"doc":"Auto-Generated Field","default":1}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue07.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue07","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":["null","string"],"doc":"Auto-Generated Field","default":null}]} -------------------------------------------------------------------------------- /examples/.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | 4 | # sbt specific 5 | .cache/ 6 | .history/ 7 | .lib/ 8 | dist/* 9 | target/ 10 | lib_managed/ 11 | src_managed/ 12 | project/boot/ 13 | project/plugins/project/ 14 | 15 | # Scala-IDE specific 16 | .scala_dependencies 17 | .worksheet 18 | -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue10.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue10","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"array","items":"string"},"doc":"Auto-Generated Field","default":null}]} -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue11.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue11","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"array","items":"int"},"doc":"Auto-Generated Field","default":[1,2]}]} -------------------------------------------------------------------------------- /examples/scalavro/README.md: -------------------------------------------------------------------------------- 1 | avro-scala-macro-annotation-examples 2 | ==================================== 3 | 4 | Example of how to use the @AvroRecord annotation to make a Scala case class Avro serializable. 5 | 6 | //TODO: Examples in Apache Avro, Scalding, Spark, Scalavro, Salat-Avro 7 | -------------------------------------------------------------------------------- /examples/spark/src/main/scala/twitter_schema.scala: -------------------------------------------------------------------------------- 1 | package com.miguno.avro 2 | 3 | import com.julianpeeters.avro.annotations._ 4 | 5 | // generates code for case class twitter_schema(var username: String, var tweet: String, var timestamp: Long) 6 | @AvroTypeProvider("twitter.avro") 7 | @AvroRecord 8 | case class twitter_schema() -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue14.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue14","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"record","name":"AvroTypeProviderTest00","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"int","doc":"Auto-Generated Field"}]},"doc":"Auto-Generated Field","default":{"x":4}}]} -------------------------------------------------------------------------------- /examples/scalavro/src/main/avro/twitter_schema.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"twitter_schema","namespace":"com.miguno.avro","fields":[{"name":"username","type":"string","doc":"Name of the user account on Twitter.com"},{"name":"tweet","type":"string","doc":"The content of the user's Twitter message"},{"name":"timestamp","type":"long","doc":"Unix epoch time in milliseconds"}],"doc:":"A basic schema for storing Twitter messages"} 2 | -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue15.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue15","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"record","name":"AvroTypeProviderTestDefaultValue00","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"int","doc":"Auto-Generated Field","default":0}]},"doc":"Auto-Generated Field","default":{"x":4}}]} -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestSchemaFile.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"twitter_schema","namespace":"com.miguno.avro","fields":[{"name":"username","type":"string","doc":"Name of the user account on Twitter.com"},{"name":"tweet","type":"string","doc":"The content of the user's Twitter message"},{"name":"timestamp","type":"long","doc":"Unix epoch time in milliseconds"}],"doc:":"A basic schema for storing Twitter messages"} 2 | -------------------------------------------------------------------------------- /examples/scalavro/build.sbt: -------------------------------------------------------------------------------- 1 | name := "avro-example" 2 | 3 | version := "0.0.1-SNAPSHOT" 4 | 5 | organization := "com.julianpeeters" 6 | 7 | scalaVersion := "2.10.5" 8 | 9 | libraryDependencies += "com.julianpeeters" %% "avro-scala-macro-annotations" % "0.4.5" 10 | 11 | libraryDependencies += "com.gensler" %% "scalavro" % "0.6.2" 12 | 13 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0-M5" cross CrossVersion.full) 14 | -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue09.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue09","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"map","values":{"type":"map","values":"int"}},"doc":"Auto-Generated Field","default":{"glory":{"kitty":3}}},{"name":"y","type":{"type":"map","values":{"type":"map","values":"int"}},"doc":"Auto-Generated Field","default":{"pride":{"doggy":4}}}]} 2 | -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue17.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue17","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"array","items":["null",{"type":"record","name":"AvroTypeProviderTestDefaultValue00","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"int","doc":"Auto-Generated Field","default":0}]}]},"doc":"Auto-Generated Field","default":[null,{"x":8}]}]} -------------------------------------------------------------------------------- /examples/avro/build.sbt: -------------------------------------------------------------------------------- 1 | name := "avro-example" 2 | 3 | version := "0.0.1-SNAPSHOT" 4 | 5 | organization := "com.julianpeeters" 6 | 7 | scalaVersion := "2.11.7" 8 | 9 | libraryDependencies += "com.julianpeeters" %% "avro-scala-macro-annotations" % "0.10.2" 10 | 11 | libraryDependencies += "org.apache.avro" % "avro" % "1.7.7" 12 | 13 | libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.1.2" 14 | 15 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0-M5" cross CrossVersion.full) 16 | -------------------------------------------------------------------------------- /examples/scavro/build.sbt: -------------------------------------------------------------------------------- 1 | name := "avro-example" 2 | 3 | version := "0.0.1-SNAPSHOT" 4 | 5 | organization := "com.julianpeeters" 6 | 7 | scalaVersion := "2.11.7" 8 | 9 | libraryDependencies += "com.julianpeeters" %% "avro-scala-macro-annotations" % "0.10.2" 10 | 11 | libraryDependencies += "org.apache.avro" % "avro" % "1.7.7" 12 | 13 | libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.1.2" 14 | 15 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0-M5" cross CrossVersion.full) 16 | -------------------------------------------------------------------------------- /examples/avro/src/main/avro/AvroTypeProviderTestNestedSchemaFile.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "TestMessage", 4 | "namespace": "", 5 | "fields": [ 6 | {"name": "message", "type": "string"}, 7 | { 8 | "name": "metaData", 9 | "type": { 10 | "type": "record", 11 | "name": "MetaData", 12 | "fields": [ 13 | {"name": "source", "type": "string"}, 14 | {"name": "timestamp", "type": "string"} 15 | ] 16 | } 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /examples/scavro/src/main/avro/AvroTypeProviderTestNestedSchemaFile.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "TestMessage", 4 | "namespace": "", 5 | "fields": [ 6 | {"name": "message", "type": "string"}, 7 | { 8 | "name": "metaData", 9 | "type": { 10 | "type": "record", 11 | "name": "MetaData", 12 | "fields": [ 13 | {"name": "source", "type": "string"}, 14 | {"name": "timestamp", "type": "string"} 15 | ] 16 | } 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestNestedSchemaFile.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "TestMessage", 4 | "namespace": "test", 5 | "fields": [ 6 | {"name": "message", "type": "string"}, 7 | { 8 | "name": "metaData", 9 | "type": { 10 | "type": "record", 11 | "name": "MetaData", 12 | "fields": [ 13 | {"name": "source", "type": "string"}, 14 | {"name": "timestamp", "type": "string"} 15 | ] 16 | } 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderExtendedCompanionTest00.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "AvroTypeProviderPreexistingCompanionTest01", 4 | "namespace": "test", 5 | "fields": [ 6 | {"name": "message", "type": "string"}, 7 | { 8 | "name": "metaData", 9 | "type": { 10 | "type": "record", 11 | "name": "MetaData", 12 | "fields": [ 13 | {"name": "source", "type": "string"}, 14 | {"name": "timestamp", "type": "string"} 15 | ] 16 | } 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | avro-scala-macro-annotation-examples 2 | ==================================== 3 | 4 | Example of how to use the @AvroRecord annotation to make a Scala case class Avro serializable. 5 | 6 | Annotated case classes that implement `SpecificRecord` are compatible with any Avro tool that takes care to not use the reflective constructors of the SpecificRecord API: 7 | 8 | Examples of compatibility with the following: 9 | 10 | - Avro* 11 | - Scalding 12 | - Spark* 13 | - Scalavro 14 | 15 | *requires a schema be passed in order to avoid reflection 16 | -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue16.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue16","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"record","name":"AvroTypeProviderTestDefaultValue14","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"record","name":"AvroTypeProviderTest00","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"int","doc":"Auto-Generated Field"}]},"doc":"Auto-Generated Field","default":{"x":4}}]},"doc":"Auto-Generated Field","default":{"x":{"x":5}}}]} -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestRelativeSchemaFilePath.avsc: -------------------------------------------------------------------------------- 1 | { "type":"record", 2 | "name":"Tweet", 3 | "namespace":"com.miguno.avro.relative", 4 | "fields":[ 5 | { 6 | "name":"username", 7 | "type":"string","doc":"Name of the user account on Twitter.com" 8 | }, 9 | { 10 | "name":"tweet","type":"string","doc":"The content of the user's Twitter message" 11 | }, 12 | {"name":"timestamp","type":"long","doc":"Unix epoch time in milliseconds"} 13 | ], 14 | "doc:":"A basic schema for storing Twitter messages" 15 | } 16 | -------------------------------------------------------------------------------- /tests/src/test/resources/default/AvroTypeProviderTestDefaultValue18.avsc: -------------------------------------------------------------------------------- 1 | {"type":"record","name":"AvroTypeProviderTestDefaultValue18","namespace":"test","doc":"Auto-Generated Schema","fields":[{"name":"x","type":{"type":"record","name":"AvroTypeProviderTest00","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"int","doc":"Auto-Generated Field"}]},"doc":"Auto-Generated Field","default":{"x":4}},{"name":"y","type":{"type":"record","name":"AvroTypeProviderTest01","doc":"Auto-Generated Schema","fields":[{"name":"x","type":"float","doc":"Auto-Generated Field"}]},"doc":"Auto-Generated Field","default":{"x":3.0}}]} -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderNoNamespaceTest.scala: -------------------------------------------------------------------------------- 1 | // no package 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | import com.julianpeeters.avro.annotations._ 6 | 7 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestNoNamespace.avro") 8 | @AvroRecord 9 | case class AvroTypeProviderTestNoNamespace() 10 | 11 | class AvroTypeProviderNoNamespaceTest extends Specification { 12 | 13 | "A case class with in the default package (i.e. without a namespace)" should { 14 | "serialize and deserialize correctly" in { 15 | val record = AvroTypeProviderTestNoNamespace(1) 16 | test.TestUtil.verifyRead(record) 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/schemagen/SchemaStore.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package schemagen 4 | 5 | import java.util.concurrent.ConcurrentHashMap 6 | import collection.JavaConversions._ 7 | 8 | import org.apache.avro.Schema 9 | 10 | object SchemaStore { 11 | 12 | val schemas: scala.collection.concurrent.Map[String, Schema] = scala.collection.convert.Wrappers.JConcurrentMapWrapper(new ConcurrentHashMap[String, Schema]()) 13 | 14 | def accept(schema: Schema) { 15 | val fullName = schema.getFullName 16 | if (!schemas.contains(fullName)) { 17 | val _ = schemas += (fullName -> schema) 18 | } 19 | } 20 | 21 | } -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/AvroRecordNoNamespaceTest.scala: -------------------------------------------------------------------------------- 1 | import org.specs2.mutable.Specification 2 | 3 | import test.TestUtil 4 | 5 | import com.julianpeeters.avro.annotations.AvroRecord 6 | 7 | @AvroRecord 8 | case class AvroRecordTestNoNamespace(var x: Int) 9 | 10 | class AvroRecordNoNamespaceTest extends Specification { 11 | 12 | "A case class with in the default package (i.e. without a namespace)" should { 13 | "serialize and deserialize correctly" in { 14 | val record1 = AvroRecordTestNoNamespace(1) 15 | val record2 = AvroRecordTestNoNamespace(2) 16 | val records = List(record1, record2) 17 | TestUtil.verifyWriteAndRead(records) 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestDifferentNamespace.avsc: -------------------------------------------------------------------------------- 1 | [ 2 | { "type":"record", 3 | "name":"Person", 4 | "namespace":"com.miguno.avro.differentns", 5 | "fields":[ 6 | {"name":"id", "type":"long"}, 7 | {"name":"name","type":"string"} 8 | ], 9 | "doc:":"A basic schema for a user, to be reused across namespaces." 10 | }, 11 | { "type":"record", 12 | "name":"Tweet", 13 | "namespace":"com.miguno.avro.differentns.twitter", 14 | "fields":[ 15 | { 16 | "name":"author", 17 | "type":"com.miguno.avro.differentns.Person" 18 | }, 19 | { 20 | "name":"text", "type":"string" 21 | } 22 | ], 23 | "doc:":"A basic schema for storing Twitter messages" 24 | } 25 | ] 26 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderNestedSchemaFileTest.scala: -------------------------------------------------------------------------------- 1 | 2 | package test 3 | // Specs2 4 | import org.specs2.mutable.Specification 5 | 6 | import java.io.File 7 | 8 | import com.julianpeeters.avro.annotations._ 9 | 10 | class AvroTypeProviderNestedSchemaFileTest extends Specification { 11 | 12 | "A case class with types provided from a .avsc avro schema file" should { 13 | "serialize and deserialize correctly" in { 14 | val record1 = TestMessage("Achilles", MetaData("ow", "12345")) 15 | val record2 = TestMessage("Tortoise", MetaData("ho", "67890")) 16 | val records = List(record1, record2) 17 | TestUtil.verifyWriteAndRead(records) 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/schemagen/RecordSchemaGenerator.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package schemagen 4 | 5 | import collection.JavaConversions._ 6 | import java.util.{Arrays => JArrays} 7 | import org.apache.avro.Schema 8 | import org.apache.avro.Schema.Field 9 | 10 | import collection.JavaConversions._ 11 | 12 | object RecordSchemaGenerator { 13 | 14 | 15 | def createSchema(className: String, namespace: String, avroFields: List[Field]) = { 16 | val avroSchema = Schema.createRecord(className, "Auto-Generated Schema", namespace, false) 17 | avroSchema.setFields(JArrays.asList(avroFields.toArray:_*)) 18 | SchemaStore.accept(avroSchema) 19 | avroSchema 20 | } 21 | 22 | 23 | } -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderSchemaFileTest.scala: -------------------------------------------------------------------------------- 1 | package com.miguno.avro 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | import test.TestUtil 6 | 7 | import com.julianpeeters.avro.annotations._ 8 | 9 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestSchemaFile.avsc") 10 | @AvroRecord 11 | case class twitter_schema() 12 | 13 | class AvroTypeProviderSchemaFileTest extends Specification { 14 | 15 | "A case class with types provided from a .avsc avro schema file" should { 16 | "serialize and deserialize correctly" in { 17 | val record1 = twitter_schema("Achilles", "ow", 2L) 18 | val record2 = twitter_schema("Tortoise", "ho", 3L) 19 | val records = List(record1, record2) 20 | TestUtil.verifyWriteAndRead(records) 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderRelativeSchemaFilePathTest.scala: -------------------------------------------------------------------------------- 1 | package com.miguno.avro.relative 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | import test.TestUtil 6 | 7 | import com.julianpeeters.avro.annotations._ 8 | 9 | @AvroTypeProvider("../../resources/AvroTypeProviderTestRelativeSchemaFilePath.avsc") 10 | @AvroRecord 11 | case class Tweet() 12 | 13 | class AvroTypeProviderRelativeSchemaFilePathTest extends Specification { 14 | 15 | "A case class with types provided from a .avsc avro schema file with path relative to the class' position" should { 16 | "serialize and deserialize correctly" in { 17 | val record1 = Tweet("Achilles", "ow", 2L) 18 | val record2 = Tweet("Tortoise", "ho", 3L) 19 | val records = List(record1, record2) 20 | TestUtil.verifyWriteAndRead(records) 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /examples/spark/simple.sbt: -------------------------------------------------------------------------------- 1 | import AssemblyKeys._ 2 | 3 | assemblySettings 4 | 5 | name := "Avro Spark Examples" 6 | 7 | version := "0.1-SNAPSHOT" 8 | 9 | scalaVersion := "2.10.5" 10 | 11 | libraryDependencies += "org.apache.spark" %% "spark-core" % "1.5.0" 12 | 13 | libraryDependencies += "org.apache.avro" % "avro-mapred" % "1.7.6" classifier "hadoop2" 14 | 15 | libraryDependencies += "org.apache.hadoop" % "hadoop-client" % "2.4.0" 16 | 17 | // see: http://apache-spark-user-list.1001560.n3.nabble.com/SparkContext-startup-time-out-td1753.html 18 | 19 | // libraryDependencies += "com.typesafe.akka" % "akka-cluster_2.11" % "2.2.4" 20 | 21 | // see: https://issues.apache.org/jira/browse/SPARK-1138 22 | 23 | libraryDependencies += "com.julianpeeters" % "avro-scala-macro-annotations_2.10" % "0.4.5" 24 | 25 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0-M5" cross CrossVersion.full) 26 | -------------------------------------------------------------------------------- /tests/src/test/resources/AvroTypeProviderTestSchemaUnion.avsc: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type":"record", 4 | "name":"User", 5 | "namespace":"com.miguno.avro", 6 | "fields": [ 7 | {"name":"name", "type":"string"}, 8 | {"name":"id", "type":"long"} 9 | ] 10 | }, 11 | { 12 | "type":"record", 13 | "name":"PictureSize", 14 | "namespace":"com.miguno.avro", 15 | "fields": [ 16 | {"name":"w", "type":"double"}, 17 | {"name":"h", "type":"double"} 18 | ], 19 | "doc":"Width and height of a photo, currently not referenced in the Tweet." 20 | }, 21 | { 22 | "type":"record", 23 | "name":"Tweet", 24 | "namespace":"com.miguno.avro", 25 | "fields": [ 26 | {"name":"text", "type":"string"}, 27 | {"name":"user", "type":"User", "doc":"The author of the tweet."}, 28 | {"name":"user_mentions", "type": {"type":"array", "items":"User"}, "doc":" Users mentioned in the tweet."} 29 | ], 30 | "doc:":"A basic schema for Tweets with repeating elements." 31 | } 32 | ] 33 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/ctorgen/CtorGenerator.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package ctorgen 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | 7 | import collection.JavaConversions._ 8 | 9 | abstract class CtorGenerator { 10 | 11 | //necessary for type refinement when trying to pass dependent types 12 | val context: Context 13 | 14 | import context.universe._ 15 | import Flag._ 16 | 17 | def toZeroArg(defaultParams: List[Tree]) = { 18 | val newCtorDef = q"""def this() = this(..$defaultParams)""" 19 | val defaultCtorPos = context.enclosingPosition //thanks to Eugene Burmako for the workaround to position the ctor correctly 20 | val newCtorPos = defaultCtorPos 21 | .withEnd(defaultCtorPos.endOrPoint + 1) 22 | .withStart(defaultCtorPos.startOrPoint + 1) 23 | .withPoint(defaultCtorPos.point + 1) 24 | List( atPos(newCtorPos)(newCtorDef) ) 25 | } 26 | } -------------------------------------------------------------------------------- /examples/spark/README.md: -------------------------------------------------------------------------------- 1 | Introduction 2 | ------------ 3 | 4 | This project is an example of computing the classic WordCount from a 5 | corpus of [Apache Avro](http://avro.apache.org/)-encoded records using Spark. There are examples using the Avro Specific records represented by Scala case classes made serializable by avro-scala-macro-annotations. 6 | 7 | Adapted from: Joe Crobak's [AvroSparkScala](https://github.com/jcrobak/avro-examples/blob/master/avro-spark/src/main/scala/AvroSparkScala.scala) and Sandy Ryza's [simplesparkavroapp](https://github.com/sryza/simplesparkavroapp). 8 | 9 | 10 | Spark 11 | -------- 12 | 13 | Requirements 14 | ============ 15 | 16 | * Java 1.7+ 17 | * sbt 18 | //* Hadoop (tested with Hadoop 1.2.1) installed and `hadoop` on the `PATH`. 19 | 20 | Reading Avro Files 21 | ================== 22 | Run in local mode with `$ sbt run`, then choose number `1` 23 | 24 | 25 | Writing Avro Files 26 | ================== 27 | Run in local mode with `$ sbt run`, then choose number `2` 28 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderDifferentNamespaceTest.scala: -------------------------------------------------------------------------------- 1 | package com.miguno.avro.differentns 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | import test.TestUtil 6 | 7 | import com.julianpeeters.avro.annotations._ 8 | 9 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestDifferentNamespace.avsc") 10 | @AvroRecord 11 | case class Person() 12 | 13 | // Nested namespace 14 | package twitter { 15 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestDifferentNamespace.avsc") 16 | @AvroRecord 17 | case class Tweet() 18 | } 19 | 20 | class AvroTypeProviderDifferentNamespaceTest extends Specification { 21 | 22 | "A case class with types provided from a .avsc avro schema file referencing another class in a different namespace" should { 23 | "serialize and deserialize correctly" in { 24 | val record = twitter.Tweet(Person(id = 1, name = "John"), "Yo!") 25 | TestUtil.verifyWriteAndRead(List(record)) 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/provider/ValDefGenerator.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package provider 3 | import matchers._ 4 | 5 | import org.apache.avro.Schema 6 | 7 | import scala.reflect.macros.blackbox.Context 8 | import scala.language.experimental.macros 9 | import scala.collection.JavaConversions._ 10 | 11 | object ValDefGenerator { 12 | 13 | def asScalaFields(schema: Schema, namespace: String, isImmutable: Boolean, c: Context) = { 14 | import c.universe._ 15 | import Flag._ 16 | 17 | schema.getFields.toList.map(f => { 18 | val fieldName = f.name 19 | val fieldType = AvroTypeMatcher.toScala(namespace, f.schema, c) 20 | val fieldDefault = FromJsonMatcher.getDefaultValue(f, c) 21 | isImmutable match { 22 | case true => q"""val ${TermName(fieldName)}: ${q"$fieldType"} = $fieldDefault""" 23 | case false => q"""var ${TermName(fieldName)}: ${q"$fieldType"} = $fieldDefault""" 24 | } 25 | }) 26 | } 27 | 28 | } -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/provider/NamespaceProbe.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package provider 3 | 4 | import scala.reflect.macros.blackbox.Context 5 | 6 | object NamespaceProbe { 7 | 8 | def getNamespace(c : Context) = { 9 | // getting the namespace from the scala package instead of the avro schema allows namespace-less 10 | // avros to be imported, not stuck in the default package 11 | import c.universe._ 12 | import Flag._ 13 | 14 | val freshName = c.freshName(TypeName("Probe$")) 15 | val probe = c.typecheck(q""" {class $freshName; ()} """) // Thanks again to Eugene Burmako 16 | val freshSymbol = probe match { 17 | case Block(List(t), r) => t.symbol 18 | } 19 | val fullFreshName = freshSymbol.fullName.toString 20 | val namespace = c.prefix.tree match { 21 | case _ => { 22 | if (fullFreshName.contains('.')) { fullFreshName.replace("." + freshName.toString, "")} //strips dot and class name 23 | else null 24 | } 25 | } 26 | namespace 27 | } 28 | 29 | } -------------------------------------------------------------------------------- /examples/scalavro/src/main/scala/Example.scala: -------------------------------------------------------------------------------- 1 | package com.miguno.avro 2 | 3 | import com.julianpeeters.avro.annotations._ 4 | 5 | import com.gensler.scalavro.types.AvroType 6 | import com.gensler.scalavro.io.AvroTypeIO 7 | import scala.util.{Try, Success, Failure} 8 | 9 | import java.io.File 10 | 11 | @AvroTypeProvider("src/main/avro/twitter_schema.avsc") 12 | case class twitter_schema() 13 | 14 | object Example extends App { 15 | 16 | val tweet = twitter_schema("Flicker, flicker, flicker, blam, pow", "Floyd", 72L) 17 | 18 | val tweetsType = AvroType[twitter_schema] 19 | 20 | val outStream: java.io.ByteArrayOutputStream = new java.io.ByteArrayOutputStream // some stream... 21 | 22 | tweetsType.io.write(tweet, outStream) 23 | 24 | val inStream: java.io.InputStream = new java.io.ByteArrayInputStream(outStream.toByteArray)// some stream... 25 | 26 | tweetsType.io.read(inStream) match { 27 | case Success(readResult) => println("deserialized record is the same as the pre-serialized record?: "+ (readResult == tweet)) // true 28 | case Failure(cause) => // handle failure... 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/namespacegen/NamespaceGenerator.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package namespacegen 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | 7 | import collection.JavaConversions._ 8 | 9 | object NamespaceGenerator { 10 | 11 | def probeNamespace(c: Context) = { 12 | import c.universe._ 13 | import Flag._ 14 | 15 | val freshName = c.freshName(TypeName("Probe$")) 16 | val probe = c.typecheck(q""" {class $freshName; ()} """) // Thanks again to Eugene Burmako 17 | val freshSymbol = probe match { 18 | case Block(List(t), r) => t.symbol 19 | } 20 | val fullFreshName = freshSymbol.fullName.toString 21 | val namespace = c.prefix.tree match { 22 | case Apply(_, List(Literal(Constant(x)))) => null //if there's an arg, force the omission of a namespace in the schema 23 | case _ => { 24 | if (fullFreshName.contains('.')) { fullFreshName.replace("." + freshName.toString, "")} //strips dot and class name 25 | else null 26 | } 27 | } 28 | namespace 29 | } 30 | } -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/provider/FilePathProbe.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package provider 3 | 4 | import scala.reflect.macros.blackbox.Context 5 | import java.io.File 6 | 7 | object FilePathProbe { 8 | 9 | //here's how we get the value of the filepath, it's the arg to the annotation 10 | def getPath(c: Context) = { 11 | import c.universe._ 12 | import Flag._ 13 | 14 | val path = c.prefix.tree match { 15 | case Apply(_, List(Literal(Constant(x)))) => x.toString 16 | case _ => c.abort(c.enclosingPosition, "file path not found, annotation argument must be a constant") 17 | } 18 | 19 | if (new File(path).exists) path else { 20 | // Allow paths relative to where the enclosing class is. This allows the working 21 | // directory to be different from the project base directory, which is useful 22 | // for example with sbt `ProjectRef` referencing a github URL and compiling the 23 | // project behind the scenes. 24 | val callSite = new File(c.macroApplication.pos.source.file.path).getParent 25 | val relative = new File(callSite, path) 26 | if (relative.exists) relative.getAbsolutePath else path 27 | } 28 | } 29 | 30 | } -------------------------------------------------------------------------------- /examples/scalding/src/main/scala/tutorial/JobRunner.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 SnowPlow Analytics Ltd. All rights reserved. 3 | * 4 | * This program is licensed to you under the Apache License Version 2.0, 5 | * and you may not use this file except in compliance with the Apache License Version 2.0. 6 | * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. 7 | * 8 | * Unless required by applicable law or agreed to in writing, 9 | * software distributed under the Apache License Version 2.0 is distributed on an 10 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. 12 | */ 13 | package tutorial 14 | 15 | // Hadoop 16 | 17 | // Scalding 18 | import com.twitter.scalding.Tool 19 | import org.apache.hadoop.util.ToolRunner 20 | import org.apache.hadoop.conf.Configuration 21 | 22 | /** 23 | * Entrypoint for Hadoop to kick off the job. 24 | * 25 | * Borrowed from com.twitter.scalding.Tool 26 | */ 27 | object JobRunner { 28 | def main(args : Array[String]) { 29 | ToolRunner.run(new Configuration, new Tool, args.head +: "--local" +: args.tail) 30 | } 31 | } -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/AvroRecordDataFileStreamTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | import org.specs2.mutable.Specification 3 | 4 | import java.io._ 5 | 6 | import org.apache.avro.generic._ 7 | import org.apache.avro.specific._ 8 | import org.apache.avro.Schema 9 | import org.apache.avro.file._ 10 | 11 | import collection.JavaConversions._ 12 | 13 | class AvroRecordDataFileStreamTest extends Specification { 14 | 15 | "A case class with in the default package (i.e. without a namespace)" should { 16 | "serialize and deserialize correctly" in { 17 | 18 | val record = AvroRecordTest00(1) 19 | val schema = AvroRecordTest00.SCHEMA$ 20 | 21 | val baos = new ByteArrayOutputStream() 22 | 23 | 24 | val userDatumWriter = new SpecificDatumWriter[AvroRecordTest00] 25 | val dataFileWriter = new DataFileWriter[AvroRecordTest00](userDatumWriter) 26 | dataFileWriter.create(schema, baos) 27 | dataFileWriter.append(record) 28 | dataFileWriter.close 29 | 30 | val bais = new ByteArrayInputStream( baos.toByteArray ) 31 | val userDatumReader = new SpecificDatumReader[AvroRecordTest00](schema) 32 | val dataFileReader = new DataFileStream[AvroRecordTest00](bais, userDatumReader) 33 | val sameRecord = dataFileReader.next() 34 | 35 | sameRecord must ===(record) 36 | } 37 | } 38 | } 39 | 40 | -------------------------------------------------------------------------------- /examples/avro/src/main/scala/AvroRecordExample.scala: -------------------------------------------------------------------------------- 1 | 2 | package test 3 | 4 | import com.julianpeeters.avro.annotations._ 5 | 6 | import org.apache.avro.specific._ 7 | import org.apache.avro.generic._ 8 | import org.apache.avro.file._ 9 | 10 | import java.io.File 11 | 12 | @AvroRecord 13 | case class AvroTypeProviderPreexistingCompanionTest00(var x: Int) 14 | 15 | object AvroRecordExample extends App { 16 | val record = AvroTypeProviderPreexistingCompanionTest00(4) 17 | 18 | val file = File.createTempFile("record", "avro") 19 | file.deleteOnExit() 20 | 21 | val userDatumWriter = new SpecificDatumWriter[AvroTypeProviderPreexistingCompanionTest00] 22 | val dataFileWriter = new DataFileWriter[AvroTypeProviderPreexistingCompanionTest00](userDatumWriter) 23 | dataFileWriter.create(AvroTypeProviderPreexistingCompanionTest00.SCHEMA$, file); 24 | dataFileWriter.append(record); 25 | dataFileWriter.close(); 26 | 27 | val schema = AvroTypeProviderPreexistingCompanionTest00.SCHEMA$ 28 | val userDatumReader = new SpecificDatumReader[AvroTypeProviderPreexistingCompanionTest00](schema) 29 | val dataFileReader = new DataFileReader[AvroTypeProviderPreexistingCompanionTest00](file, userDatumReader) 30 | val sameRecord = dataFileReader.next() 31 | 32 | println("deserialized record is the same as the pre-serialized record?: " + (sameRecord == record) ) 33 | 34 | } 35 | -------------------------------------------------------------------------------- /examples/scavro/src/main/scala/AvroRecordExample.scala: -------------------------------------------------------------------------------- 1 | 2 | package test 3 | 4 | import com.julianpeeters.avro.annotations._ 5 | 6 | import org.apache.avro.specific._ 7 | import org.apache.avro.generic._ 8 | import org.apache.avro.file._ 9 | 10 | import java.io.File 11 | 12 | @AvroRecord 13 | case class AvroTypeProviderPreexistingCompanionTest00(var x: Int) 14 | 15 | object AvroRecordExample extends App { 16 | val record = AvroTypeProviderPreexistingCompanionTest00(4) 17 | 18 | val file = File.createTempFile("record", "avro") 19 | file.deleteOnExit() 20 | 21 | val userDatumWriter = new SpecificDatumWriter[AvroTypeProviderPreexistingCompanionTest00] 22 | val dataFileWriter = new DataFileWriter[AvroTypeProviderPreexistingCompanionTest00](userDatumWriter) 23 | dataFileWriter.create(AvroTypeProviderPreexistingCompanionTest00.SCHEMA$, file); 24 | dataFileWriter.append(record); 25 | dataFileWriter.close(); 26 | 27 | val schema = AvroTypeProviderPreexistingCompanionTest00.SCHEMA$ 28 | val userDatumReader = new SpecificDatumReader[AvroTypeProviderPreexistingCompanionTest00](schema) 29 | val dataFileReader = new DataFileReader[AvroTypeProviderPreexistingCompanionTest00](file, userDatumReader) 30 | val sameRecord = dataFileReader.next() 31 | 32 | println("deserialized record is the same as the pre-serialized record?: " + (sameRecord == record) ) 33 | 34 | } 35 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/provider/FileParser.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package provider 3 | 4 | import org.apache.avro.file.DataFileReader 5 | import org.apache.avro.generic.{GenericDatumReader, GenericRecord} 6 | import org.apache.avro.Schema 7 | import org.apache.avro.Schema.Parser 8 | import org.apache.avro.Schema.Type._ 9 | 10 | import scala.collection.JavaConverters._ 11 | 12 | object FileParser { 13 | 14 | def getSchemas(infile: java.io.File): List[Schema] = { 15 | val schema = infile.getName.split("\\.").last match { 16 | case "avro" => 17 | val gdr = new GenericDatumReader[GenericRecord] 18 | val dfr = new DataFileReader(infile, gdr) 19 | dfr.getSchema 20 | case "avsc" => 21 | new Parser().parse(infile) 22 | case _ => throw new Exception("Invalid file ending. Must be .avsc for plain text json files and .avro for binary files.") 23 | } 24 | schema.getType match { 25 | case UNION => { 26 | val recordSchemas = schema.getTypes.asScala.toList.filter(_.getType == RECORD) 27 | if (recordSchemas.nonEmpty) recordSchemas 28 | else sys.error("no record type found in the union from " + infile) 29 | } 30 | case RECORD => List(schema) 31 | case _ => sys.error("The Schema in the datafile is neither a record nor a union of a record type, nothing to map to case class.") 32 | } 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/AvroRecordAlreadyExtendedTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | import java.io.File 6 | 7 | import org.apache.avro.generic._ 8 | import org.apache.avro.specific._ 9 | import org.apache.avro.Schema 10 | import org.apache.avro.Schema.{Type => AvroType} 11 | import org.apache.avro.file._ 12 | 13 | class AvroRecordExtendedTest extends Specification { 14 | 15 | "A case class that extends an arbitrary trait" should { 16 | "serialize and deserialize correctly" in { 17 | val record = AvroRecordExtendedTest00(1) 18 | val file = File.createTempFile("AvroRecordExtendedTest00", "avro") 19 | file.deleteOnExit() 20 | 21 | val userDatumWriter = new SpecificDatumWriter[AvroRecordExtendedTest00] 22 | val dataFileWriter = new DataFileWriter[AvroRecordExtendedTest00](userDatumWriter) 23 | dataFileWriter.create(record.getSchema(), file); 24 | dataFileWriter.append(record); 25 | dataFileWriter.close(); 26 | 27 | val schema = new DataFileReader(file, new GenericDatumReader[GenericRecord]).getSchema 28 | val userDatumReader = new SpecificDatumReader[AvroRecordExtendedTest00](schema) 29 | val dataFileReader = new DataFileReader[AvroRecordExtendedTest00](file, userDatumReader) 30 | val sameRecord = dataFileReader.next() 31 | sameRecord.e must ===(10) 32 | sameRecord must ===(record) 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /examples/scalding/src/main/scala/tutorial/PackedAvroWriteJob.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 Twitter, Inc. 3 | * 4 | * This program is licensed to you under the Apache License Version 2.0, 5 | * and you may not use this file except in compliance with the Apache License Version 2.0. 6 | * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. 7 | * 8 | * Unless required by applicable law or agreed to in writing, 9 | * software distributed under the Apache License Version 2.0 is distributed on an 10 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. 12 | */ 13 | package tutorial 14 | 15 | import com.twitter.scalding._ 16 | import com.twitter.scalding.avro.PackedAvroSource 17 | import TDsl._ 18 | 19 | class PackedAvroWriteJob(args: Args) extends Job(args) { 20 | 21 | /** 22 | * Dummy data 23 | */ 24 | val testList = List( 25 | Person("Oberon", 425), 26 | Person("Miranda", 419), 27 | Person("Titania", 424) ) 28 | 29 | /** 30 | * Write dummy data to PackedAvro 31 | */ 32 | val persons: TypedPipe[Person] = TypedPipe.from(testList)//getPersonPipe 33 | val writeToPackedAvro = 34 | persons 35 | .map{Person => Person.copy(name = "Dr. " + Person.name) } 36 | .debug 37 | .write(PackedAvroSource[Person]("data/PackedAvroOutput.avro")) 38 | 39 | } 40 | -------------------------------------------------------------------------------- /examples/spark/src/main/scala/MyKryoRegistrator.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package registrator 20 | import com.miguno.avro._ 21 | 22 | import com.esotericsoftware.kryo.Kryo 23 | 24 | import org.apache.spark.SparkConf 25 | import org.apache.spark.serializer.{KryoSerializer, KryoRegistrator} 26 | 27 | class MyKryoRegistrator extends KryoRegistrator { 28 | override def registerClasses(kryo: Kryo) { 29 | kryo.register(classOf[twitter_schema]) 30 | } 31 | } 32 | 33 | object MyKryoRegistrator { 34 | def register(conf: SparkConf) { 35 | conf.set("spark.serializer", classOf[KryoSerializer].getName) 36 | conf.set("spark.kryo.registrator", classOf[MyKryoRegistrator].getName) 37 | } 38 | } -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderEvolutionTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | import com.julianpeeters.avro.annotations._ 5 | 6 | import java.io.File 7 | 8 | import org.apache.avro.io.{DecoderFactory, EncoderFactory} 9 | import org.apache.avro.generic.GenericData.Record 10 | import org.apache.avro.generic._ 11 | import org.apache.avro.specific._ 12 | import org.apache.avro.Schema 13 | import org.apache.avro.Schema.{Type => AvroType} 14 | import org.apache.avro.file._ 15 | 16 | @AvroTypeProvider("tests/src/test/resources/evolution/AvroTypeProviderTestEvolution00.avro") 17 | @AvroRecord 18 | case class AvroTypeProviderTestEvolution00(var y: String = "NONE") //field x: Int is provided 19 | 20 | class AvroTypeProviderEvolutionTest extends Specification { 21 | 22 | "A case class that was serialized with a single field" should { 23 | "deserialize correctly if an additional field is added with a default value" in { 24 | 25 | val record = AvroTypeProviderTestEvolution00(1, "NONE") 26 | 27 | val file = new File("tests/src/test/resources/evolution/AvroTypeProviderTestEvolution00.avro") 28 | 29 | val schema = AvroTypeProviderTestEvolution00.SCHEMA$ 30 | val userDatumReader = new SpecificDatumReader[AvroTypeProviderTestEvolution00](schema) 31 | val dataFileReader = new DataFileReader[AvroTypeProviderTestEvolution00](file, userDatumReader) 32 | val sameRecord = dataFileReader.next() 33 | 34 | sameRecord must ===(record) 35 | 36 | } 37 | } 38 | 39 | 40 | } -------------------------------------------------------------------------------- /examples/scalding/src/main/scala/tutorial/PackedAvroReadJob.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 Twitter, Inc. 3 | * 4 | * This program is licensed to you under the Apache License Version 2.0, 5 | * and you may not use this file except in compliance with the Apache License Version 2.0. 6 | * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. 7 | * 8 | * Unless required by applicable law or agreed to in writing, 9 | * software distributed under the Apache License Version 2.0 is distributed on an 10 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. 12 | */ 13 | package tutorial 14 | 15 | 16 | import com.twitter.scalding._ 17 | import com.twitter.scalding.avro.{PackedAvroSource, UnpackedAvroSource} 18 | import TDsl._ 19 | 20 | 21 | class PackedAvroReadJob(args: Args) extends Job(args) { 22 | 23 | /** 24 | * Read data from PackedAvro 25 | */ 26 | PackedAvroSource[Person]("data/PackedAvroOutput.avro") 27 | .flatMap{rec: Person => tokenize(rec.name)} 28 | .groupBy(identity) 29 | .size 30 | .write(TypedTsv[(String, Long)]("data/AvroWordcountOutput.tsv")) 31 | 32 | // Split a piece of text into individual words. 33 | def tokenize(text : String) : Array[String] = { 34 | // Lowercase each word and remove punctuation. 35 | text.toLowerCase.replaceAll("[^a-zA-Z0-9\\s]", "").split("\\s+").filter(_.length > 0) 36 | } 37 | 38 | 39 | } 40 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderSchemaUnionTest.scala: -------------------------------------------------------------------------------- 1 | package com.miguno.avro 2 | 3 | import test.TestUtil 4 | 5 | import org.specs2.mutable.Specification 6 | 7 | import com.julianpeeters.avro.annotations._ 8 | 9 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestSchemaUnion.avsc") 10 | @AvroRecord 11 | case class User() 12 | 13 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestSchemaUnion.avsc") 14 | @AvroRecord 15 | case class PictureSize() 16 | 17 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestSchemaUnion.avsc") 18 | @AvroRecord 19 | case class Tweet() 20 | 21 | class AvroTypeProviderSchemaUnionTest extends Specification { 22 | 23 | "Case classes generated from an .avsc file containing a union of schemas" should { 24 | 25 | "serialize and deserialize types referencing other items in the union" in { 26 | val record1 = Tweet("This is Hector. The fool who thought he killed Achilles.", 27 | user = User("Achilles", 1), user_mentions = List(User("Hector", 2), User("Achilles", 1))) 28 | val record2 = Tweet("I must have made 500 errors.", 29 | user = User("Douglas", 2), user_mentions = List(User("James", 3), User("Banister", 2))) 30 | val records = List(record1, record2) 31 | TestUtil.verifyWriteAndRead(records) 32 | } 33 | 34 | "serialize and deserialize types which aren't referenced by any other item in the union" in { 35 | val record1 = PictureSize(300.0, 172.0) 36 | val record2 = PictureSize(500.0, 100.0) 37 | val records = List(record1, record2) 38 | TestUtil.verifyWriteAndRead(records) 39 | } 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/provider/NestedSchemaExtractor.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package provider 3 | 4 | import org.apache.avro.Schema 5 | import org.apache.avro.Schema.Type.{ARRAY, ENUM, MAP, RECORD, UNION} 6 | 7 | import scala.collection.JavaConverters._ 8 | 9 | object NestedSchemaExtractor { 10 | // if a record is found, extract nested RECORDs and ENUMS (i.e. top-level types) 11 | def getNestedSchemas(schema: Schema): List[Schema] = { 12 | 13 | def extract(schema: Schema): List[Schema] = { 14 | schema.getType match { 15 | 16 | case RECORD => 17 | val fields: List[Schema.Field] = schema.getFields.asScala.toList 18 | val fieldSchemas: List[Schema] = fields.map(field => field.schema()) 19 | 20 | def flattenSchema(fieldSchema: Schema): List[Schema] = { 21 | fieldSchema.getType match { 22 | case ARRAY => flattenSchema(fieldSchema.getElementType) 23 | case MAP => flattenSchema(fieldSchema.getValueType) 24 | case RECORD => fieldSchema :: extract(fieldSchema) 25 | case UNION => fieldSchema.getTypes.asScala.toList.flatMap(x => flattenSchema(x)) 26 | case ENUM => List(fieldSchema) 27 | case _ => List(fieldSchema) 28 | } 29 | } 30 | val flatSchemas = fieldSchemas.flatMap(fieldSchema => flattenSchema(fieldSchema)) 31 | 32 | def topLevelTypes(schema: Schema) = (schema.getType == RECORD | schema.getType == ENUM) 33 | val nestedTopLevelSchemas = flatSchemas.filter(topLevelTypes) 34 | 35 | nestedTopLevelSchemas 36 | case ENUM => List(schema) 37 | case _ => Nil 38 | }} 39 | 40 | schema::extract(schema) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /examples/scalding/project/Build.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 SnowPlow Analytics Ltd. All rights reserved. 3 | * 4 | * This program is licensed to you under the Apache License Version 2.0, 5 | * and you may not use this file except in compliance with the Apache License Version 2.0. 6 | * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. 7 | * 8 | * Unless required by applicable law or agreed to in writing, 9 | * software distributed under the Apache License Version 2.0 is distributed on an 10 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. 12 | */ 13 | import sbt._ 14 | import Keys._ 15 | 16 | object Strata2014ScaldingTutorialBuild extends Build { 17 | 18 | import Dependencies._ 19 | import BuildSettings._ 20 | 21 | // Configure prompt to show current project 22 | override lazy val settings = super.settings :+ { 23 | shellPrompt := { s => Project.extract(s).currentProject.id + " > " } 24 | } 25 | 26 | // Define our project, with basic project information and library dependencies 27 | lazy val project = Project("Tutorial", file(".")) 28 | .settings(buildSettings: _*) 29 | .settings( 30 | libraryDependencies ++= Seq( 31 | Libraries.scaldingCore, 32 | Libraries.scaldingAvro, 33 | Libraries.hadoopCore, 34 | Libraries.specs2, 35 | // Add your additional libraries here (comma-separated)... 36 | "com.julianpeeters" % "avro-scala-macro-annotations_2.11" % "0.10.3" 37 | ), 38 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0-M5" cross CrossVersion.full) 39 | ) 40 | } 41 | -------------------------------------------------------------------------------- /examples/scalding/project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 SnowPlow Analytics Ltd. All rights reserved. 3 | * 4 | * This program is licensed to you under the Apache License Version 2.0, 5 | * and you may not use this file except in compliance with the Apache License Version 2.0. 6 | * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. 7 | * 8 | * Unless required by applicable law or agreed to in writing, 9 | * software distributed under the Apache License Version 2.0 is distributed on an 10 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. 12 | */ 13 | import sbt._ 14 | 15 | object Dependencies { 16 | val resolutionRepos = Seq( 17 | "ScalaTools snapshots at Sonatype" at "https://oss.sonatype.org/content/repositories/snapshots/", 18 | "Concurrent Maven Repo" at "http://conjars.org/repo" // For Scalding, Cascading etc 19 | ) 20 | 21 | object V { 22 | val scalding = "0.15.0" 23 | val hadoop = "1.2.1" 24 | val specs2 = "2.3.12" 25 | // Add versions for your additional libraries here... 26 | } 27 | 28 | object Libraries { 29 | val scaldingCore= "com.twitter" % "scalding-core_2.11" % V.scalding 30 | val scaldingAvro= "com.twitter" % "scalding-avro_2.11" % V.scalding 31 | val hadoopCore= "org.apache.hadoop" % "hadoop-core" % V.hadoop 32 | // to run on Hadoop cluster you would want val hadoopCore= "org.apache.hadoop" % "hadoop-core" % V.hadoop % "provided" 33 | // Add additional libraries from mvnrepository.com (SBT syntax) here... 34 | 35 | // Scala (test only) 36 | val specs2 = "org.specs2" %% "specs2" % V.specs2 % "test" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /examples/avro/src/main/scala/AvroTypeProviderExample.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Julian Peeters 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import com.julianpeeters.avro.annotations._ 18 | 19 | import org.apache.avro.specific._ 20 | import org.apache.avro.generic._ 21 | import org.apache.avro.file._ 22 | 23 | import java.io.File 24 | 25 | @AvroTypeProvider("src/main/avro/AvroTypeProviderTestNestedSchemaFile.avsc") 26 | @AvroRecord 27 | case class TestMessage() 28 | 29 | @AvroTypeProvider("src/main/avro/AvroTypeProviderTestNestedSchemaFile.avsc") 30 | @AvroRecord 31 | case class MetaData() 32 | 33 | object AvroTypeProviderExample extends App { 34 | val record = TestMessage("Achilles", MetaData("ow", "12345")) 35 | 36 | val file = File.createTempFile("AvroTypeProviderNestedSchemaFileTest", "avro") 37 | file.deleteOnExit() 38 | 39 | val userDatumWriter = new SpecificDatumWriter[TestMessage] 40 | val dataFileWriter = new DataFileWriter[TestMessage](userDatumWriter) 41 | dataFileWriter.create(record.getSchema(), file); 42 | dataFileWriter.append(record); 43 | dataFileWriter.close(); 44 | 45 | val schema = new DataFileReader(file, new GenericDatumReader[GenericRecord]).getSchema 46 | val userDatumReader = new SpecificDatumReader[TestMessage](schema) 47 | val dataFileReader = new DataFileReader[TestMessage](file, userDatumReader) 48 | val sameRecord = dataFileReader.next() 49 | 50 | println("deserialized record is the same as a new record based on the schema in the file?: " + (sameRecord == record) ) 51 | 52 | } 53 | -------------------------------------------------------------------------------- /examples/scavro/src/main/scala/AvroTypeProviderExample.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014 Julian Peeters 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import com.julianpeeters.avro.annotations._ 18 | 19 | import org.apache.avro.specific._ 20 | import org.apache.avro.generic._ 21 | import org.apache.avro.file._ 22 | 23 | import java.io.File 24 | 25 | @AvroTypeProvider("src/main/avro/AvroTypeProviderTestNestedSchemaFile.avsc") 26 | @AvroRecord 27 | case class TestMessage() 28 | 29 | @AvroTypeProvider("src/main/avro/AvroTypeProviderTestNestedSchemaFile.avsc") 30 | @AvroRecord 31 | case class MetaData() 32 | 33 | object AvroTypeProviderExample extends App { 34 | val record = TestMessage("Achilles", MetaData("ow", "12345")) 35 | 36 | val file = File.createTempFile("AvroTypeProviderNestedSchemaFileTest", "avro") 37 | file.deleteOnExit() 38 | 39 | val userDatumWriter = new SpecificDatumWriter[TestMessage] 40 | val dataFileWriter = new DataFileWriter[TestMessage](userDatumWriter) 41 | dataFileWriter.create(record.getSchema(), file); 42 | dataFileWriter.append(record); 43 | dataFileWriter.close(); 44 | 45 | val schema = new DataFileReader(file, new GenericDatumReader[GenericRecord]).getSchema 46 | val userDatumReader = new SpecificDatumReader[TestMessage](schema) 47 | val dataFileReader = new DataFileReader[TestMessage](file, userDatumReader) 48 | val sameRecord = dataFileReader.next() 49 | 50 | println("deserialized record is the same as a new record based on the schema in the file?: " + (sameRecord == record) ) 51 | 52 | } 53 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/AvroRecordEvolutionTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | import com.julianpeeters.avro.annotations._ 5 | 6 | import java.io.File 7 | 8 | import org.apache.avro.generic.GenericData.Record 9 | import org.apache.avro.generic._ 10 | import org.apache.avro.specific._ 11 | import org.apache.avro.Schema 12 | import org.apache.avro.Schema.{Type => AvroType} 13 | import org.apache.avro.file._ 14 | 15 | @AvroRecord 16 | case class AvroRecordTestEvolution00(var x: Int, var y: String = "NONE") 17 | 18 | @AvroRecord 19 | case class AvroRecordTestEvolution01(var x: Int) 20 | 21 | class AvroRecordEvolutionTest extends Specification { 22 | 23 | "A case class that was serialized with a single field" should { 24 | "deserialize correctly if an additional field is added with a default value" in { 25 | 26 | val record = AvroRecordTestEvolution00(1, "NONE") 27 | 28 | val file = new File("tests/src/test/resources/evolution/AvroRecordTestEvolution00.avro") 29 | 30 | val schema = AvroRecordTestEvolution00.SCHEMA$ 31 | val userDatumReader = new SpecificDatumReader[AvroRecordTestEvolution00](schema) 32 | val dataFileReader = new DataFileReader[AvroRecordTestEvolution00](file, userDatumReader) 33 | val sameRecord = dataFileReader.next() 34 | 35 | sameRecord must ===(record) 36 | 37 | } 38 | } 39 | 40 | 41 | 42 | "A case class that was serialized with two fields with default values" should { 43 | "deserialize correctly if a field is removed from the record model" in { 44 | 45 | val record = AvroRecordTestEvolution01(1) 46 | 47 | val file = new File("tests/src/test/resources/evolution/AvroRecordTestEvolution00.avro") 48 | 49 | val schema = AvroRecordTestEvolution01.SCHEMA$ 50 | val userDatumReader = new SpecificDatumReader[AvroRecordTestEvolution01](schema) 51 | val dataFileReader = new DataFileReader[AvroRecordTestEvolution01](file, userDatumReader) 52 | val sameRecord = dataFileReader.next() 53 | 54 | sameRecord must ===(record) 55 | 56 | } 57 | } 58 | 59 | } 60 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/methodgen/GetDefCaseGenerator.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package methodgen 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | 7 | import collection.JavaConversions._ 8 | 9 | import org.apache.avro.Schema 10 | 11 | abstract class GetDefCaseGenerator { 12 | 13 | 14 | //necessary for type refinement when trying to pass dependent types 15 | val context: Context 16 | 17 | import context.universe._ 18 | import Flag._ 19 | 20 | def asGetCase(nme: TermName, tpe: Type, idx: Int) = { 21 | def convertToJava(typeTree: Type, convertable: Tree): Tree = { 22 | typeTree match { 23 | case o @ TypeRef(pre, symbol, args) if (o <:< typeOf[Option[Any]] && args.length == 1) => { 24 | if (args.head <:< typeOf[Option[Any]]) { 25 | throw new UnsupportedOperationException("Implementation limitation: Cannot immediately nest Option types") 26 | } 27 | else q""" 28 | $convertable match { 29 | case Some(x) => ${convertToJava(args.head, q"x")} 30 | case None => null 31 | }""" 32 | } 33 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[List[Any]] && args.length == 1) => { 34 | q"""scala.collection.JavaConversions.bufferAsJavaList($convertable.map(x => ${convertToJava(args.head, q"x")}).toBuffer)""" 35 | } 36 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[Map[String, Any]] && args.length == 2) => { 37 | q""" 38 | val map = new java.util.HashMap[String, Any]() 39 | $convertable.foreach(x => { 40 | val key = x._1 41 | val value = x._2 42 | map.put(key, ${convertToJava(args(1), q"value")}) 43 | }) 44 | map 45 | """ 46 | } 47 | case x => convertable 48 | } 49 | } 50 | val convertedToJava = convertToJava(tpe, q"${nme}") 51 | cq"""pos if (pos == ${idx}) => $convertedToJava.asInstanceOf[AnyRef]""" 52 | } 53 | 54 | 55 | 56 | 57 | } 58 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/datatypetests/AvroRecordPrimitivesTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroRecordPrimitivesTest extends Specification { 6 | 7 | 8 | "A case class with an `Int` field" should { 9 | "deserialize correctly" in { 10 | val record1 = AvroRecordTest00(1) 11 | val record2 = AvroRecordTest00(2) 12 | val records = List(record1, record2) 13 | TestUtil.verifyWriteAndRead(records) 14 | } 15 | } 16 | 17 | "A case class with an `Float` field" should { 18 | "deserialize correctly" in { 19 | val record1 = AvroRecordTest01(1F) 20 | val record2 = AvroRecordTest01(2F) 21 | val records = List(record1, record2) 22 | TestUtil.verifyWriteAndRead(records) 23 | } 24 | } 25 | 26 | "A case class with an `Long` field" should { 27 | "deserialize correctly" in { 28 | val record1 = AvroRecordTest02(1L) 29 | val record2 = AvroRecordTest02(2L) 30 | val records = List(record1, record2) 31 | TestUtil.verifyWriteAndRead(records) 32 | } 33 | } 34 | 35 | "A case class with an `Double` field" should { 36 | "deserialize correctly" in { 37 | val record1 = AvroRecordTest03(1D) 38 | val record2 = AvroRecordTest03(2D) 39 | val records = List(record1, record2) 40 | TestUtil.verifyWriteAndRead(records) 41 | } 42 | } 43 | 44 | "A case class with an `Boolean` field" should { 45 | "deserialize correctly" in { 46 | val record1 = AvroRecordTest04(true) 47 | val record2 = AvroRecordTest04(false) 48 | val records = List(record1, record2) 49 | TestUtil.verifyWriteAndRead(records) 50 | } 51 | } 52 | 53 | "A case class with an `String` field" should { 54 | "deserialize correctly" in { 55 | val record1 = AvroRecordTest05("hello world") 56 | val record2 = AvroRecordTest05("hello galaxy") 57 | val records = List(record1, record2) 58 | TestUtil.verifyWriteAndRead(records) 59 | } 60 | } 61 | 62 | "A case class with an `Null` field" should { 63 | "deserialize correctly" in { 64 | val record1 = AvroRecordTest06(null) 65 | val record2 = AvroRecordTest06(null) 66 | val records = List(record1, record2) 67 | TestUtil.verifyWriteAndRead(records) 68 | } 69 | } 70 | 71 | 72 | } 73 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/AvroRecordEncoderTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | import com.julianpeeters.avro.annotations._ 5 | 6 | import java.io.File 7 | 8 | import org.apache.avro.io.{DecoderFactory, EncoderFactory} 9 | import org.apache.avro.generic.GenericData.Record 10 | import org.apache.avro.generic._ 11 | import org.apache.avro.specific._ 12 | import org.apache.avro.Schema 13 | import org.apache.avro.Schema.{Type => AvroType} 14 | import org.apache.avro.file._ 15 | 16 | @AvroRecord 17 | case class AvroRecordTestEncoder01(var i: Int, var j: Option[Int]) 18 | 19 | class AvroRecordEncoderTest extends Specification { 20 | 21 | 22 | "A case class " should { 23 | "serialize and deserialize correctly via encoder and decoder" in { 24 | 25 | val record = AvroRecordTest00(0) 26 | val schema = AvroRecordTest00.SCHEMA$ 27 | 28 | val w = new SpecificDatumWriter[AvroRecordTest00](schema) 29 | 30 | val out = new java.io.ByteArrayOutputStream() 31 | val encoder = EncoderFactory.get().binaryEncoder(out, null) 32 | 33 | w.write(record, encoder) 34 | 35 | encoder.flush 36 | 37 | val ba = out.toByteArray 38 | 39 | ba.size must ===(1) 40 | ba(0) must ===(0) 41 | 42 | out.close 43 | 44 | val reader = new SpecificDatumReader[AvroRecordTest00](schema) 45 | 46 | val decoder = DecoderFactory.get().binaryDecoder(ba, null) 47 | val decoded = reader.read(null, decoder) 48 | 49 | decoded must ===(record) 50 | 51 | } 52 | } 53 | 54 | 55 | "A case class with two fields, Int and Option[Int]" should { 56 | "serialize and deserialize correctly via encoder and decoder" in { 57 | 58 | val record = AvroRecordTestEncoder01(0, None) 59 | val schema = AvroRecordTestEncoder01.SCHEMA$ 60 | 61 | val w = new SpecificDatumWriter[AvroRecordTestEncoder01](schema) 62 | 63 | val out = new java.io.ByteArrayOutputStream() 64 | val encoder = EncoderFactory.get().binaryEncoder(out, null) 65 | 66 | w.write(record, encoder) 67 | 68 | encoder.flush 69 | 70 | val ba = out.toByteArray 71 | 72 | ba.size must ===(2) 73 | ba(0) must ===(0) 74 | ba(1) must ===(0) 75 | 76 | 77 | out.close 78 | 79 | val reader = new SpecificDatumReader[AvroRecordTestEncoder01](schema) 80 | 81 | val decoder = DecoderFactory.get().binaryDecoder(ba, null) 82 | val decoded = reader.read(null, decoder) 83 | 84 | decoded must ===(record) 85 | } 86 | } 87 | 88 | 89 | 90 | } 91 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderPreexistingCompanionTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | import java.io.File 5 | 6 | import org.apache.avro.generic._ 7 | import org.apache.avro.specific._ 8 | import org.apache.avro.Schema 9 | import org.apache.avro.Schema.{Type => AvroType} 10 | import org.apache.avro.file._ 11 | 12 | class AvroTypeProviderCompanionTest extends Specification { 13 | 14 | "A case class that has a preexisting companion object" should { 15 | "serialize and deserialize correctly" in { 16 | 17 | val record = AvroRecordPreexistingCompanionTest00(1) 18 | 19 | val file = File.createTempFile("AvroRecordPreexistingCompanionTest00", "avro") 20 | file.deleteOnExit() 21 | 22 | val userDatumWriter = new SpecificDatumWriter[AvroRecordPreexistingCompanionTest00] 23 | val dataFileWriter = new DataFileWriter[AvroRecordPreexistingCompanionTest00](userDatumWriter) 24 | dataFileWriter.create(record.getSchema(), file); 25 | dataFileWriter.append(record); 26 | dataFileWriter.close(); 27 | 28 | val schema = new DataFileReader(file, new GenericDatumReader[GenericRecord]).getSchema 29 | val userDatumReader = new SpecificDatumReader[AvroRecordPreexistingCompanionTest00](schema) 30 | val dataFileReader = new DataFileReader[AvroRecordPreexistingCompanionTest00](file, userDatumReader) 31 | val sameRecord = dataFileReader.next() 32 | AvroRecordPreexistingCompanionTest00.o must ===(5) 33 | 34 | sameRecord must ===(record) 35 | } 36 | } 37 | 38 | "A case class that has a preexisting companion object with mixins" should { 39 | "serialize and deserialize correctly" in { 40 | 41 | val record = AvroRecordPreexistingCompanionTest01(1) 42 | 43 | val file = File.createTempFile("AvroRecordPreexistingCompanionTest01", "avsc") 44 | file.deleteOnExit() 45 | 46 | val userDatumWriter = new SpecificDatumWriter[AvroRecordPreexistingCompanionTest01] 47 | val dataFileWriter = new DataFileWriter[AvroRecordPreexistingCompanionTest01](userDatumWriter) 48 | dataFileWriter.create(record.getSchema(), file); 49 | dataFileWriter.append(record); 50 | dataFileWriter.close(); 51 | 52 | val schema = new DataFileReader(file, new GenericDatumReader[GenericRecord]).getSchema 53 | val userDatumReader = new SpecificDatumReader[AvroRecordPreexistingCompanionTest01](schema) 54 | val dataFileReader = new DataFileReader[AvroRecordPreexistingCompanionTest01](file, userDatumReader) 55 | val sameRecord = dataFileReader.next() 56 | AvroRecordPreexistingCompanionTest01.o must ===(6) 57 | 58 | sameRecord must ===(record) 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/datatypetests/AvroTypeProviderNestedPrimitivesTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroTypeProvider28Test extends Specification { 6 | 7 | "A case class with a `List[List[String]]` field" should { 8 | "serialize and deserialize correctly" in { 9 | val record = AvroTypeProviderTest28(List(List("blackbird.grackle"))) 10 | TestUtil.verifyRead(record) 11 | } 12 | } 13 | 14 | "A case class with a `List[List[Int]]` field" should { 15 | "serialize and deserialize correctly" in { 16 | val record = AvroTypeProviderTest29(List(List(1, 2))) 17 | TestUtil.verifyRead(record) 18 | } 19 | } 20 | 21 | "A case class with an `Option[List[String]]` field" should { 22 | "serialize and deserialize correctly" in { 23 | val record = AvroTypeProviderTest30(Some(List("starling.oriole"))) 24 | TestUtil.verifyRead(record) 25 | } 26 | } 27 | 28 | "A case class with an `Option[List[Int]]` field" should { 29 | "serialize and deserialize correctly" in { 30 | val record = AvroTypeProviderTest31(Some(List(5, 6))) 31 | TestUtil.verifyRead(record) 32 | } 33 | } 34 | 35 | "A case class with a `List[Option[String]]` field" should { 36 | "serialize and deserialize correctly" in { 37 | val record = AvroTypeProviderTest32(List(Some("cowbird"))) 38 | TestUtil.verifyRead(record) 39 | } 40 | } 41 | 42 | "A case class with a `List[Option[Int]]` field" should { 43 | "serialize and deserialize correctly" in { 44 | val record = AvroTypeProviderTest33(List(Some(1))) 45 | TestUtil.verifyRead(record) 46 | } 47 | } 48 | 49 | "A case class with a `Option[List[Option[String]]]` field" should { 50 | "serialize and deserialize correctly" in { 51 | val record = AvroTypeProviderTest34(Some(List(Some("cowbird"), None))) 52 | TestUtil.verifyRead(record) 53 | } 54 | } 55 | 56 | "A case class with a `Option[List[Option[Int]]]` field" should { 57 | "serialize and deserialize correctly" in { 58 | val record = AvroTypeProviderTest35(Some(List(Some(1), None))) 59 | TestUtil.verifyRead(record) 60 | } 61 | } 62 | 63 | "A case class with a `List[Option[List[Option[String]]]]` field" should { 64 | "serialize and deserialize correctly" in { 65 | val record = AvroTypeProviderTest36(List(None, Some(List(Some("cowbird"), None)))) 66 | TestUtil.verifyRead(record) 67 | } 68 | } 69 | 70 | "A case class with a `List[Option[List[Option[Int]]]]` field" should { 71 | "serialize and deserialize correctly" in { 72 | val record = AvroTypeProviderTest37(List(None, Some(List(Some(1), None)))) 73 | TestUtil.verifyRead(record) 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/methodgen/PutDefCaseGenerator.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package methodgen 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | 7 | import collection.JavaConversions._ 8 | 9 | import org.apache.avro.Schema 10 | 11 | abstract class PutDefCaseGenerator { 12 | 13 | 14 | //necessary for type refinement when trying to pass dependent types 15 | val context: Context 16 | 17 | import context.universe._ 18 | import Flag._ 19 | 20 | //expands to cases used in a pattern match, e.g. case 1 => this.username = value.asInstanceOf[String] 21 | def asPutCase(nme: TermName, tpe: Type, idx: Int) = { 22 | def convertToScala(fieldType: Type, tree: Tree): Tree = { 23 | fieldType match { 24 | case s @ TypeRef(pre, symbol, args) if (s =:= typeOf[String]) => { 25 | q"""$tree match { 26 | case x: org.apache.avro.util.Utf8 => $tree.toString 27 | case _ => $tree 28 | } """ 29 | } 30 | case o @ TypeRef(pre, symbol, args) if (o <:< typeOf[Option[Any]] && args.length == 1) => { 31 | if (args.head <:< typeOf[Option[Any]]) { 32 | throw new UnsupportedOperationException("Implementation limitation: Cannot immediately nest Option types") 33 | } 34 | else q"""Option(${convertToScala(args.head, tree)})""" 35 | } 36 | case o @ TypeRef(pre, symbol, args) if (o <:< typeOf[List[Any]] && args.length == 1) => { 37 | q"""$tree match { 38 | case null => null 39 | case array: java.util.List[_] => { 40 | scala.collection.JavaConversions.asScalaIterator(array.iterator).toList.map(e => ${convertToScala(args.head, q"e")}) 41 | } 42 | }""" 43 | } 44 | case o @ TypeRef(pre, symbol, args) if (o <:< typeOf[Map[String,Any]] && args.length == 2) => { 45 | q"""$tree match { 46 | case null => null 47 | case map: java.util.Map[_,_] => { 48 | scala.collection.JavaConversions.mapAsScalaMap(map).toMap.map(kvp => { 49 | val key = kvp._1.toString 50 | val value = kvp._2 51 | (key, ${convertToScala(args(1), q"value")}) 52 | }) 53 | } 54 | }""" 55 | } 56 | case _ => tree 57 | } 58 | } 59 | cq"""pos if (pos == ${idx}) => this.${nme} = ${convertToScala(tpe, q"value")}.asInstanceOf[${tpe}] """ 60 | } 61 | 62 | 63 | 64 | 65 | 66 | } 67 | -------------------------------------------------------------------------------- /tests/src/test/scala/TestUtil.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import java.io.File 4 | 5 | import org.apache.avro.generic.{ GenericDatumReader, GenericRecord} 6 | import org.apache.avro.specific.{ 7 | SpecificDatumReader, 8 | SpecificDatumWriter, 9 | SpecificRecordBase 10 | } 11 | import org.apache.avro.Schema 12 | import org.apache.avro.file.{ DataFileReader, DataFileWriter } 13 | 14 | import org.specs2.mutable.Specification 15 | 16 | object TestUtil extends Specification { 17 | 18 | def write[T <: SpecificRecordBase](file: File, records: List[T]) = { 19 | val userDatumWriter = new SpecificDatumWriter[T] 20 | val dataFileWriter = new DataFileWriter[T](userDatumWriter) 21 | dataFileWriter.create(records.head.getSchema, file); 22 | records.foreach(record => dataFileWriter.append(record)) 23 | dataFileWriter.close(); 24 | } 25 | 26 | def read[T <: SpecificRecordBase](file: File, records: List[T]) = { 27 | val dummyRecord = new GenericDatumReader[GenericRecord] 28 | val schema = new DataFileReader(file, dummyRecord).getSchema 29 | val userDatumReader = new SpecificDatumReader[T](schema) 30 | val dataFileReader = new DataFileReader[T](file, userDatumReader) 31 | // Adapted from: https://github.com/tackley/avrohugger-list-issue/blob/master/src/main/scala/net/tackley/Reader.scala 32 | // This isn't great scala, but represents how org.apache.avro.mapred.AvroInputFormat 33 | // (via org.apache.avro.file.DataFileStream) interacts with the SpecificDatumReader. 34 | var record: T = null.asInstanceOf[T] 35 | var sameRecord: T = null.asInstanceOf[T] 36 | val recordIter = records.iterator 37 | while (dataFileReader.hasNext) { 38 | sameRecord = dataFileReader.next(sameRecord) 39 | record = recordIter.next 40 | } 41 | dataFileReader.close() 42 | sameRecord must ===(record) 43 | } 44 | 45 | def verifyWriteAndRead[T <: SpecificRecordBase](records: List[T]) = { 46 | val fileName = s"${records.head.getClass.getName}" 47 | val fileEnding = "avro" 48 | val file = File.createTempFile(fileName, fileEnding) 49 | file.deleteOnExit() 50 | write(file, records) 51 | read(file, records) 52 | } 53 | 54 | def verifyRead[T <: SpecificRecordBase](record: T) = { 55 | val className = record.getClass.getName.split('.').last 56 | val fileName = s"tests/src/test/resources/${className}.avro" 57 | val file = new File(fileName) 58 | 59 | val dummyRecord = new GenericDatumReader[GenericRecord] 60 | val schema = new DataFileReader(file, dummyRecord).getSchema 61 | val userDatumReader = new SpecificDatumReader[T](schema) 62 | val dataFileReader = new DataFileReader[T](file, userDatumReader) 63 | val sameRecord = dataFileReader.next() 64 | 65 | sameRecord must ===(record) 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/AvroRecordPreexistingCompanionTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | import java.io.File 6 | 7 | import org.apache.avro.generic.{ GenericDatumReader, GenericRecord} 8 | import org.apache.avro.specific.{ 9 | SpecificDatumReader, 10 | SpecificDatumWriter, 11 | SpecificRecordBase 12 | } 13 | import org.apache.avro.Schema 14 | import org.apache.avro.file.{ DataFileReader, DataFileWriter } 15 | 16 | class AvroRecordCompanionTest extends Specification { 17 | 18 | "A case class that has a preexisting companion object" should { 19 | "serialize and deserialize correctly" in { 20 | val record = AvroRecordPreexistingCompanionTest00(1) 21 | 22 | val file = File.createTempFile("AvroRecordPreexistingCompanionTest00", "avro") 23 | file.deleteOnExit() 24 | 25 | val userDatumWriter = new SpecificDatumWriter[AvroRecordPreexistingCompanionTest00] 26 | val dataFileWriter = new DataFileWriter[AvroRecordPreexistingCompanionTest00](userDatumWriter) 27 | dataFileWriter.create(record.getSchema(), file); 28 | dataFileWriter.append(record); 29 | dataFileWriter.close(); 30 | 31 | val schema = new DataFileReader(file, new GenericDatumReader[GenericRecord]).getSchema 32 | val userDatumReader = new SpecificDatumReader[AvroRecordPreexistingCompanionTest00](schema) 33 | val dataFileReader = new DataFileReader[AvroRecordPreexistingCompanionTest00](file, userDatumReader) 34 | val sameRecord = dataFileReader.next() 35 | 36 | AvroRecordPreexistingCompanionTest00.o must ===(5) 37 | sameRecord must ===(record) 38 | } 39 | } 40 | 41 | "A case class that has a preexisting companion object with a mixin" should { 42 | "serialize and deserialize correctly" in { 43 | val record = AvroRecordPreexistingCompanionTest01(1) 44 | 45 | val file = File.createTempFile("AvroRecordPreexistingCompanionTest01", "avro") 46 | file.deleteOnExit() 47 | 48 | val userDatumWriter = new SpecificDatumWriter[AvroRecordPreexistingCompanionTest01] 49 | val dataFileWriter = new DataFileWriter[AvroRecordPreexistingCompanionTest01](userDatumWriter) 50 | dataFileWriter.create(record.getSchema(), file); 51 | dataFileWriter.append(record); 52 | dataFileWriter.close(); 53 | 54 | val schema = new DataFileReader(file, new GenericDatumReader[GenericRecord]).getSchema 55 | val userDatumReader = new SpecificDatumReader[AvroRecordPreexistingCompanionTest01](schema) 56 | val dataFileReader = new DataFileReader[AvroRecordPreexistingCompanionTest01](file, userDatumReader) 57 | val sameRecord = dataFileReader.next() 58 | 59 | AvroRecordPreexistingCompanionTest01.o must ===(6) 60 | sameRecord must ===(record) 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /examples/scalding/project/BuildSettings.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 SnowPlow Analytics Ltd. All rights reserved. 3 | * 4 | * This program is licensed to you under the Apache License Version 2.0, 5 | * and you may not use this file except in compliance with the Apache License Version 2.0. 6 | * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. 7 | * 8 | * Unless required by applicable law or agreed to in writing, 9 | * software distributed under the Apache License Version 2.0 is distributed on an 10 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. 12 | */ 13 | import sbt._ 14 | import Keys._ 15 | 16 | object BuildSettings { 17 | 18 | // Basic settings for our app 19 | lazy val basicSettings = Seq[Setting[_]]( 20 | organization := "SnowPlow Analytics Ltd", 21 | version := "0.0.5-SNAPSHOT", 22 | description := "The Scalding WordCountJob example as a standalone SBT project, ready for Amazon EMR", 23 | scalaVersion := "2.11.7", 24 | scalacOptions := Seq("-deprecation", "-encoding", "utf8"), 25 | resolvers ++= Dependencies.resolutionRepos 26 | ) 27 | 28 | // sbt-assembly settings for building a fat jar 29 | import sbtassembly.Plugin._ 30 | import AssemblyKeys._ 31 | lazy val sbtAssemblySettings = assemblySettings ++ Seq( 32 | 33 | // Slightly cleaner jar name 34 | // jarName in assembly <<= (name, version) { (name, version) => name + "-" + version + ".jar" }, 35 | jarName in assembly := { 36 | name.value + "-" + version.value + ".jar" 37 | }, 38 | 39 | // Drop these jars 40 | excludedJars in assembly <<= (fullClasspath in assembly) map { cp => 41 | val excludes = Set( 42 | "jsp-api-2.1-6.1.14.jar", 43 | "jsp-2.1-6.1.14.jar", 44 | "jasper-compiler-5.5.12.jar", 45 | "minlog-1.2.jar", // Otherwise causes conflicts with Kyro (which bundles it) 46 | "janino-2.5.16.jar", // Janino includes a broken signature, and is not needed anyway 47 | "commons-beanutils-core-1.8.0.jar", // Clash with each other and with commons-collections 48 | "commons-beanutils-1.7.0.jar" // " 49 | // "hadoop-core-0.20.2.jar", // Provided by Amazon EMR. Delete this line if you're not on EMR 50 | // "hadoop-tools-0.20.2.jar" // " 51 | ) 52 | cp filter { jar => excludes(jar.data.getName) } 53 | }, 54 | 55 | mergeStrategy in assembly <<= (mergeStrategy in assembly) { 56 | (old) => { 57 | case "project.clj" => MergeStrategy.discard // Leiningen build files 58 | case x => old(x) 59 | } 60 | } 61 | ) 62 | 63 | lazy val buildSettings = basicSettings ++ sbtAssemblySettings 64 | } 65 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/provider/matchers/FromJsonMatcher.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package provider 3 | package matchers 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | import collection.JavaConversions._ 7 | 8 | 9 | import org.apache.avro.Schema 10 | import org.codehaus.jackson.JsonNode 11 | import org.codehaus.jackson.node._ 12 | 13 | object FromJsonMatcher { 14 | 15 | def getDefaultValue(field: Schema.Field, c: Context) = { 16 | 17 | import c.universe._ 18 | import Flag._ 19 | 20 | def fromJsonNode(node: JsonNode, schema: Schema): Tree = { 21 | 22 | schema.getType match { 23 | case _ if node == null => EmptyTree //not `default=null`, but no default 24 | case Schema.Type.INT => q"${node.getIntValue}" 25 | case Schema.Type.FLOAT => q"${node.getDoubleValue.asInstanceOf[Float]}" 26 | case Schema.Type.LONG => q"${node.getLongValue}" 27 | case Schema.Type.DOUBLE => q"${node.getDoubleValue}" 28 | case Schema.Type.BOOLEAN => q"${node.getBooleanValue}" 29 | case Schema.Type.STRING => q"${node.getTextValue}" 30 | case Schema.Type.NULL => q"null" 31 | case Schema.Type.UNION => { 32 | val unionSchemas = schema.getTypes.toList 33 | if (unionSchemas.length == 2 && 34 | unionSchemas.exists(schema => schema.getType == Schema.Type.NULL) && 35 | unionSchemas.exists(schema => schema.getType != Schema.Type.NULL)) { 36 | val maybeSchema = unionSchemas.find(schema => schema.getType != Schema.Type.NULL) 37 | maybeSchema match { 38 | case Some(unionSchema) => { 39 | node match { 40 | case nn: NullNode => q"None" 41 | case nonNullNode => q"Some(${fromJsonNode(nonNullNode, unionSchema)})" 42 | } 43 | } 44 | case None => sys.error("no avro type found in this union") 45 | } 46 | } 47 | else sys.error("not a union field") 48 | } 49 | case Schema.Type.ARRAY => { 50 | q"List(..${node.getElements.toList.map(e => fromJsonNode(e, schema.getElementType))})" 51 | } 52 | case Schema.Type.MAP => { 53 | val kvps = node.getFields.toList.map(e => q"${e.getKey} -> ${fromJsonNode(e.getValue, schema.getValueType)}") 54 | q"Map(..$kvps)" 55 | } 56 | case Schema.Type.RECORD => { 57 | val fields = schema.getFields 58 | val fieldValues = fields.map(f => fromJsonNode(node.get(f.name), f.schema)) 59 | q"${TermName(schema.getName)}(..${fieldValues})" 60 | } 61 | case x => sys.error("Can't extract a default field, type not yet supported: " + x) 62 | } 63 | } 64 | 65 | q"${fromJsonNode(field.defaultValue, field.schema)}" 66 | } 67 | 68 | } -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/datatypetests/AvroTypeProvider2ArityHomoTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroTypeProvider2ArityHomoTest extends Specification { 6 | 7 | "A case class with an `Int` field in the second position" should { 8 | "serialize and deserialize correctly" in { 9 | val record = AvroTypeProviderTest14(1, 2) 10 | TestUtil.verifyRead(record) 11 | } 12 | } 13 | 14 | "A case class with an `Float` field in the second position" should { 15 | "serialize and deserialize correctly" in { 16 | val record = AvroTypeProviderTest15(1F, 2F) 17 | TestUtil.verifyRead(record) 18 | } 19 | } 20 | 21 | "A case class with an `Long` field in the second position" should { 22 | "serialize and deserialize correctly" in { 23 | val record = AvroTypeProviderTest16(1L, 2L) 24 | TestUtil.verifyRead(record) 25 | } 26 | } 27 | 28 | "A case class with an `Double` field in the second position" should { 29 | "serialize and deserialize correctly" in { 30 | val record = AvroTypeProviderTest17(1D, 2D) 31 | TestUtil.verifyRead(record) 32 | } 33 | } 34 | 35 | "A case class with an `Boolean` field in the second position" should { 36 | "serialize and deserialize correctly" in { 37 | val record = AvroTypeProviderTest18(true, false) 38 | TestUtil.verifyRead(record) 39 | } 40 | } 41 | 42 | "A case class with an `String` field in the second position" should { 43 | "serialize and deserialize correctly" in { 44 | val record = AvroTypeProviderTest19("1", "2") 45 | TestUtil.verifyRead(record) 46 | } 47 | } 48 | 49 | "A case class with an `Null` field in the second position" should { 50 | "serialize and deserialize correctly" in { 51 | val record = AvroTypeProviderTest20(null, null) 52 | TestUtil.verifyRead(record) 53 | } 54 | } 55 | 56 | "A case class with an `List[String]` field in the second position" should { 57 | "serialize and deserialize correctly" in { 58 | val record = AvroTypeProviderTest24(List("mekka.lekka.hi"), List("mekka.hiney.ho")) 59 | TestUtil.verifyRead(record) 60 | } 61 | } 62 | 63 | "A case class with an `List[Int]` field in the second position" should { 64 | "serialize and deserialize correctly" in { 65 | val record = AvroTypeProviderTest25(List(1, 2), List(3,4)) 66 | TestUtil.verifyRead(record) 67 | } 68 | } 69 | 70 | "A case class with an `Option[String]` field in the second position" should { 71 | "serialize and deserialize correctly" in { 72 | val record = AvroTypeProviderTest26(Some("sun"), Some("moon")) 73 | TestUtil.verifyRead(record) 74 | } 75 | } 76 | 77 | "A case class with an `Option[Int]` field in the second position" should { 78 | "serialize and deserialize correctly" in { 79 | val record = AvroTypeProviderTest27(Some(1), Some(2)) 80 | TestUtil.verifyRead(record) 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/ctorgen/DefaultCtorParamMatcher.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package ctorgen 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | 7 | import collection.JavaConversions._ 8 | 9 | abstract class DefaultCtorParamMatcher { 10 | 11 | //necessary for type refinement when trying to pass dependent types 12 | val context: Context 13 | 14 | import context.universe._ 15 | import Flag._ 16 | 17 | // from Connor Doyle, per http://stackoverflow.com/questions/16079113/scala-2-10-reflection-how-do-i-extract-the-field-values-from-a-case-class 18 | def caseClassParamsOf(tpe: Type): scala.collection.immutable.ListMap[TermName, Type] = { 19 | val constructorSymbol = tpe.decl(termNames.CONSTRUCTOR) 20 | val defaultConstructor = 21 | if (constructorSymbol.isMethod) constructorSymbol.asMethod 22 | else { 23 | val ctors = constructorSymbol.asTerm.alternatives 24 | ctors.map { _.asMethod }.find { _.isPrimaryConstructor }.get 25 | } 26 | 27 | scala.collection.immutable.ListMap[TermName, Type]() ++ defaultConstructor.paramLists.reduceLeft(_ ++ _).map { 28 | sym => TermName(sym.name.toString) -> tpe.member(sym.name).asMethod.returnType 29 | } 30 | } 31 | 32 | def asDefaultCtorParam(fieldType: context.universe.Type): context.universe.Tree = { 33 | fieldType match { 34 | case x if x =:= typeOf[Unit] => q"()" 35 | case x if x =:= typeOf[Boolean] => q""" true """ 36 | case x if x =:= typeOf[Int] => q"1" 37 | case x if x =:= typeOf[Long] => q"1L" 38 | case x if x =:= typeOf[Float] => q"1F" 39 | case x if x =:= typeOf[Double] => q"1D" 40 | case x if x =:= typeOf[String] => q""" "" """ 41 | case x if x =:= typeOf[Null] => q"null" 42 | // List 43 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[List[Any]] && args.length == 1) => { 44 | q"""List(${asDefaultCtorParam(args.head)})""" 45 | } 46 | // Option 47 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[Option[Any]] && args.length == 1) => { 48 | q"""None""" 49 | } 50 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[Map[String, Any]] && args.length == 2) => { 51 | q"""Map(""->${asDefaultCtorParam(args(1))})""" 52 | } 53 | // User-Defined 54 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[Product with Serializable] ) => { 55 | val defaultParams = caseClassParamsOf(x).map(p => asDefaultCtorParam(p._2)) 56 | q"""${TermName(symbol.name.toString)}(..$defaultParams)""" 57 | } 58 | case x => sys.error("Could not create a default. Not support yet: " + x ) 59 | } 60 | } 61 | 62 | 63 | def matchDefaultParams(tpe: Type, dv: Tree) = dv match { //If there are default vals present in the classdef, use those for 0-arg ctor 64 | case EmptyTree => { 65 | asDefaultCtorParam(tpe) 66 | } 67 | case defaultValue => defaultValue 68 | } 69 | 70 | } -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/datatypetests/AvroTypeProvider2ArityHeteroTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroTypeProvider2ArityHeteroTest extends Specification { 6 | 7 | "A case class with an `Int` field coexisting with a non-`Int` field" should { 8 | "serialize and deserialize correctly" in { 9 | val record = AvroTypeProviderTest48(1, "bonjour") 10 | TestUtil.verifyRead(record) 11 | } 12 | } 13 | 14 | "A case class with an `String` field coexisting with a non-`Int` field" should { 15 | "serialize and deserialize correctly" in { 16 | val record = AvroTypeProviderTest49("bueno", 2) 17 | TestUtil.verifyRead(record) 18 | } 19 | } 20 | 21 | "A case class with an `Option[String]` field coexisting with an `Option[Int]` field" should { 22 | "serialize and deserialize correctly" in { 23 | val record = AvroTypeProviderTest50(Some("tropics"), Some(3)) 24 | TestUtil.verifyRead(record) 25 | } 26 | } 27 | 28 | "A case class with an `Option[Int]` field coexisting with an `Option[String]` field" should { 29 | "serialize and deserialize correctly" in { 30 | val record = AvroTypeProviderTest51(Some(4), Some("level")) 31 | TestUtil.verifyRead(record) 32 | } 33 | } 34 | 35 | "A case class with a `List[String]` field coexisting with a `List[Int]` field" should { 36 | "serialize and deserialize correctly" in { 37 | val record = AvroTypeProviderTest52(List("am.pm"), List(5,6)) 38 | TestUtil.verifyRead(record) 39 | } 40 | } 41 | 42 | "A case class with an `List[Int]` field coexisting with a `List[String]` field" should { 43 | "serialize and deserialize correctly" in { 44 | val record = AvroTypeProviderTest53(List(7, 8), List("bon.sois")) 45 | TestUtil.verifyRead(record) 46 | } 47 | } 48 | 49 | "A case class with an `Option[List[Option[String]]]` field coexisting with a `Option[List[Option[Int]]]` field" should { 50 | "serialize and deserialize correctly" in { 51 | val record = AvroTypeProviderTest54(Some(List(Some("bronco"), None)), Some(List(Some(9), None))) 52 | TestUtil.verifyRead(record) 53 | } 54 | } 55 | 56 | "A case class with an `Option[List[Option[Int]]]` field coexisting with a `Option[List[Option[String]]]` field" should { 57 | "serialize and deserialize correctly" in { 58 | val record = AvroTypeProviderTest55(Some(List(Some(10), None)), Some(List(Some("bronca"), None))) 59 | TestUtil.verifyRead(record) 60 | } 61 | } 62 | 63 | "A case class with an `List[Option[List[Option[String]]]]` field coexisting with a `List[Option[List[Option[Int]]]]` field" should { 64 | "serialize and deserialize correctly" in { 65 | val record = AvroTypeProviderTest56(List(Some(List(Some("tibetan"), None)), None), List(Some(List(Some(11), None)), None)) 66 | TestUtil.verifyRead(record) 67 | } 68 | } 69 | 70 | "A case class with an `Int` field coexisting with a non-`Int` field" should { 71 | "serialize and deserialize correctly" in { 72 | val record = AvroTypeProviderTest57(List(Some(List(Some(12), None)), None), List(Some(List(Some("fire"), None)), None)) 73 | TestUtil.verifyRead(record) 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/datatypetests/AvroRecordComplexTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroRecordComplexTest extends Specification { 6 | 7 | "A case class with an empty `Option[String]` field" should { 8 | "serialize and deserialize correctly" in { 9 | val record1 = AvroRecordTest07(None) 10 | val record2 = AvroRecordTest07(None) 11 | val records = List(record1, record2) 12 | TestUtil.verifyWriteAndRead(records) 13 | } 14 | } 15 | 16 | "A case class with an empty `Option[Int]` field" should { 17 | "serialize and deserialize correctly" in { 18 | val record1 = AvroRecordTest08(None) 19 | val record2 = AvroRecordTest08(None) 20 | val records = List(record1, record2) 21 | TestUtil.verifyWriteAndRead(records) 22 | } 23 | } 24 | 25 | "A case class with an `List[String]` field" should { 26 | "serialize and deserialize correctly" in { 27 | val record1 = AvroRecordTest10(List("head", "tail")) 28 | val record2 = AvroRecordTest10(List("top", "bottom")) 29 | val records = List(record1, record2) 30 | TestUtil.verifyWriteAndRead(records) 31 | } 32 | } 33 | 34 | "A case class with an `List[Int]` field" should { 35 | "serialize and deserialize correctly" in { 36 | val record1 = AvroRecordTest11(List(1, 2)) 37 | val record2 = AvroRecordTest11(List(3, 4)) 38 | val records = List(record1, record2) 39 | TestUtil.verifyWriteAndRead(records) 40 | } 41 | } 42 | 43 | "A case class with an `Option[String]` field" should { 44 | "serialize and deserialize correctly" in { 45 | val record1 = AvroRecordTest12(Some("I'm here")) 46 | val record2 = AvroRecordTest12(Some("I'm there")) 47 | val records = List(record1, record2) 48 | TestUtil.verifyWriteAndRead(records) 49 | } 50 | } 51 | 52 | "A case class with an `Option[Int]` field" should { 53 | "serialize and deserialize correctly" in { 54 | val record1 = AvroRecordTest13(Some(1)) 55 | val record2 = AvroRecordTest13(Some(2)) 56 | val records = List(record1, record2) 57 | TestUtil.verifyWriteAndRead(records) 58 | } 59 | } 60 | 61 | "A case class with a `Map[String, Int]` field" should { 62 | "serialize and deserialize correctly" in { 63 | val record1 = AvroRecordTestMap01(Map("bongo"->2)) 64 | val record2 = AvroRecordTestMap01(Map("mongo"->3)) 65 | val records = List(record1, record2) 66 | TestUtil.verifyWriteAndRead(records) 67 | } 68 | } 69 | 70 | "A case class with a `Map[String, String]` field" should { 71 | "serialize and deserialize correctly" in { 72 | val record1 = AvroRecordTestMap02(Map("4"->"four")) 73 | val record2 = AvroRecordTestMap02(Map("5"->"five")) 74 | val records = List(record1, record2) 75 | TestUtil.verifyWriteAndRead(records) 76 | } 77 | } 78 | 79 | "A case class with a `Map[String, List[Int]]` field" should { 80 | "serialize and deserialize correctly" in { 81 | val record1 = AvroRecordTestMap03(Map("sherpa"->Some(List(5,6)))) 82 | val record2 = AvroRecordTestMap03(Map("autobus"->Some(List(8,9)))) 83 | val records = List(record1, record2) 84 | TestUtil.verifyWriteAndRead(records) 85 | } 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/schemagen/ToJsonMatcher.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package schemagen 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | 7 | import org.codehaus.jackson.JsonNode 8 | import org.codehaus.jackson.node._ 9 | 10 | import collection.JavaConversions._ 11 | 12 | abstract class ToJsonMatcher { 13 | 14 | //necessary for type refinement when trying to pass dependent types 15 | val c: Context 16 | val ns: String 17 | 18 | import c.universe._ 19 | import Flag._ 20 | 21 | def toJsonNode(dv: Tree) : JsonNode = { 22 | lazy val jsonNodeFactory = JsonNodeFactory.instance 23 | dv match { 24 | // use of null here is for Java interop, builds Avro FieldConstructor w/o default value 25 | case EmptyTree => null 26 | case Literal(Constant(x: Unit)) => jsonNodeFactory.nullNode 27 | case Literal(Constant(x: Boolean)) => jsonNodeFactory.booleanNode(x) 28 | case Literal(Constant(x: Int)) => jsonNodeFactory.numberNode(x) 29 | case Literal(Constant(x: Long)) => jsonNodeFactory.numberNode(x) 30 | case Literal(Constant(x: Float)) => jsonNodeFactory.numberNode(x) 31 | case Literal(Constant(x: Double)) => jsonNodeFactory.numberNode(x) 32 | case Literal(Constant(x: String)) => jsonNodeFactory.textNode(x) 33 | case Literal(Constant(null)) => jsonNodeFactory.nullNode 34 | case Ident(TermName("None")) => jsonNodeFactory.nullNode 35 | case Apply(Ident(TermName("Some")), List(x)) => toJsonNode(x) 36 | case Apply(Ident(TermName("List")), xs) => { 37 | val jsonArray = jsonNodeFactory.arrayNode 38 | xs.map(x => toJsonNode(x)).map(v => jsonArray.add(v)) 39 | jsonArray 40 | } 41 | case Apply(Ident(TermName("Map")), kvps) => { 42 | val jsonObject = jsonNodeFactory.objectNode 43 | kvps.foreach(kvp => kvp match { 44 | case Apply(Select(Literal(Constant(key: String)), TermName(tn)), List(x)) => { 45 | jsonObject.put(key, toJsonNode(x)) 46 | } 47 | }) 48 | jsonObject 49 | } 50 | // if the default value is another (i.e. nested) record/case class 51 | case Apply(Ident(TermName(name)), xs) if SchemaStore.schemas.contains(ns + "." + name) => { 52 | val jsonObject = jsonNodeFactory.objectNode 53 | xs.zipWithIndex.map( x => { 54 | val value = x._1 55 | val index = x._2 56 | val nestedRecordField = SchemaStore.schemas(ns + "." + name).getFields()(index) 57 | // values from the tree, field names from cross referencing tree's pos with schema field pos 58 | // (they always correspond since the schema is defined based on the fields in a class def) 59 | jsonObject.put(nestedRecordField.name, toJsonNode(value)) 60 | }) 61 | jsonObject 62 | } 63 | case x => sys.error("Could not extract default value. Found: " + x + ", " + showRaw(x)) 64 | } 65 | } 66 | 67 | 68 | 69 | } -------------------------------------------------------------------------------- /examples/spark/src/main/scala/AvroSparkScala.scala: -------------------------------------------------------------------------------- 1 | // Adapted from: https://github.com/jcrobak/avro-examples/blob/master/avro-spark/src/main/scala/AvroSparkScala.scala, 2 | // and https://github.com/sryza/simplesparkavroapp 3 | 4 | package com.miguno.avro 5 | 6 | import registrator._ 7 | import com.miguno.avro._ 8 | 9 | import org.apache.spark.SparkConf 10 | import org.apache.spark.SparkContext 11 | import org.apache.spark.SparkContext._ 12 | import org.apache.avro.mapred.AvroKey 13 | import org.apache.avro.mapreduce.AvroJob 14 | import org.apache.avro.mapreduce.AvroKeyInputFormat 15 | 16 | import org.apache.avro.mapreduce.AvroKeyOutputFormat 17 | import org.apache.hadoop.fs.Path 18 | 19 | import org.apache.hadoop.io.NullWritable 20 | import org.apache.hadoop.mapreduce.Job 21 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat 22 | 23 | import org.apache.hadoop.conf.Configuration 24 | import org.apache.commons.lang.StringEscapeUtils.escapeCsv 25 | 26 | 27 | object AvroSpecificWriteJob { 28 | def main(args: Array[String]) { 29 | val sparkConf = new SparkConf().setAppName("Spark Avro").setMaster("local") 30 | MyKryoRegistrator.register(sparkConf) 31 | val sc = new SparkContext(sparkConf) 32 | val c = new Configuration() 33 | val job = new Job(c) 34 | val conf = job.getConfiguration 35 | val outPath = "output/twitter.avro" 36 | 37 | val user1 = new twitter_schema("Alyssa", "I'm the boss", 4L) 38 | val user2 = new twitter_schema("Ben", "win my money", 5L) 39 | 40 | val records = sc.parallelize(Array(user1, user2)) 41 | val withValues = records.map((x) => (new AvroKey(x), NullWritable.get)) 42 | 43 | FileOutputFormat.setOutputPath(job, new Path(outPath)) 44 | val schema = twitter_schema.SCHEMA$ 45 | AvroJob.setOutputKeySchema(job, schema) 46 | job.setOutputFormatClass(classOf[AvroKeyOutputFormat[twitter_schema]]) 47 | 48 | withValues.saveAsNewAPIHadoopDataset(conf) 49 | } 50 | } 51 | 52 | 53 | object AvroSpecificReadJob { 54 | def main(args: Array[String]) { 55 | val sparkConf = new SparkConf().setAppName("Spark Avro").setMaster("local") 56 | MyKryoRegistrator.register(sparkConf) 57 | val sc = new SparkContext(sparkConf) 58 | val c = new Configuration() 59 | val job = new Job(c) 60 | val conf = job.getConfiguration 61 | val outPath = "output/twitter-wordcount-scala-spark-specific.tsv" 62 | 63 | // A Schema must be specified (avro.mapreduce tries to make one reflectively but fails since Scala fields are private) 64 | val schema = twitter_schema.SCHEMA$ 65 | AvroJob.setInputKeySchema(job, schema) 66 | 67 | val avroRdd = sc.newAPIHadoopFile("twitter.avro", 68 | classOf[AvroKeyInputFormat[twitter_schema]], 69 | classOf[AvroKey[twitter_schema]], 70 | classOf[NullWritable], 71 | conf) 72 | 73 | val specificRecords = avroRdd.map{case (ak, _) => ak.datum()} 74 | 75 | val wordCounts = specificRecords.map((sr: twitter_schema) => sr.get(1).asInstanceOf[String]) 76 | .flatMap{tweet: String => tweet.split(" ")} 77 | .map(word => (word, 1)) 78 | .reduceByKey((a, b) => a + b) 79 | 80 | val wordCountsFormatted = wordCounts.map{case (word, count) => (escapeCsv(word), count)} 81 | .map{case (word, count) => s"$word,$count"} 82 | 83 | wordCountsFormatted.saveAsTextFile(outPath) 84 | 85 | } 86 | } 87 | 88 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/record/schemagen/FieldSchemaGenerator.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package record 3 | package schemagen 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | 7 | import collection.JavaConversions._ 8 | import java.util.{Arrays => JArrays} 9 | import org.apache.avro.Schema 10 | import org.apache.avro.Schema.Field 11 | import org.apache.avro.Schema.{Type => AvroType} 12 | import org.apache.avro.util.Utf8 13 | 14 | abstract class FieldSchemaGenerator { 15 | 16 | //necessary for type refinement when trying to pass dependent types 17 | val context: Context 18 | 19 | import context.universe._ 20 | import Flag._ 21 | 22 | //map is from https://github.com/radlab/avro-scala-compiler-plugin/blob/master/src/main/scala/plugin/SchemaGen.scala 23 | val primitiveClasses: Map[Type, Schema] = Map( 24 | /** Primitives in the Scala and Avro sense */ 25 | typeOf[Int] -> Schema.create(AvroType.INT), 26 | typeOf[Float] -> Schema.create(AvroType.FLOAT), 27 | typeOf[Long] -> Schema.create(AvroType.LONG), 28 | typeOf[Double] -> Schema.create(AvroType.DOUBLE), 29 | typeOf[Boolean] -> Schema.create(AvroType.BOOLEAN), 30 | typeOf[String] -> Schema.create(AvroType.STRING), 31 | typeOf[Null] -> Schema.create(AvroType.NULL), 32 | /** Primitives in the Avro sense */ 33 | typeOf[java.nio.ByteBuffer] -> Schema.create(AvroType.BYTES), 34 | typeOf[Utf8] -> Schema.create(AvroType.STRING) 35 | ) 36 | 37 | def createSchema(tpe: context.universe.Type) : Schema = { 38 | tpe match { 39 | case x if (primitiveClasses.contains(x)) => primitiveClasses(x) 40 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[List[Any]] && args.length == 1) => { 41 | Schema.createArray(createSchema(args.head)) 42 | } 43 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[Option[Any]] && args.length == 1) => { 44 | if (args.head <:< typeOf[Option[Any]]) { 45 | throw new UnsupportedOperationException("Implementation limitation: Cannot immediately nest Option types") 46 | } 47 | else Schema.createUnion(JArrays.asList(Array(createSchema(typeOf[Null]), createSchema(args.head)):_*)) 48 | } 49 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[Map[String, Any]] && args.length == 2) => { 50 | Schema.createMap(createSchema(args(1))) 51 | } 52 | case x @ TypeRef(pre, symbol, args) if (x <:< typeOf[Product with Serializable] ) => { 53 | // if a case class (a nested record) is found, reuse the schema that was made and stored when its macro was expanded. 54 | // unsuccessful alternatives: reflectively getting the schema from its companion (can't get a tree from a Symbol), 55 | // or regenerating the schema (no way to get default param values from outside the current at compile time). 56 | SchemaStore.schemas(x.toString) 57 | } 58 | case x => throw new UnsupportedOperationException("Could not generate schema. Cannot support yet: " + x ) 59 | } 60 | } 61 | 62 | def toAvroField(namespace: String, nme: TermName, tpe: Type, dv: Tree) = { 63 | val toJsonMatcher = new {val c: context.type = context; val ns: String = namespace} with ToJsonMatcher 64 | 65 | new Field( 66 | nme.toString.trim, 67 | createSchema(tpe), 68 | "Auto-Generated Field", 69 | toJsonMatcher.toJsonNode(dv) 70 | ) 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/provider/matchers/AvroTypeMatcher.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | package provider 3 | 4 | import org.apache.avro.Schema 5 | 6 | import scala.reflect.macros.blackbox.Context 7 | import scala.language.experimental.macros 8 | import scala.collection.JavaConversions._ 9 | 10 | object AvroTypeMatcher { 11 | 12 | def toScala(namespace: String, schema: org.apache.avro.Schema, c: Context): c.universe.Type = { 13 | import c.universe._ 14 | import Flag._ 15 | 16 | def expandNestedTypes(typeTree: Tree) = { 17 | c.typecheck(q"type T = $typeTree") match { 18 | case x @ TypeDef(mods, name, tparams, rhs) => rhs.tpe 19 | } 20 | } 21 | 22 | schema.getType match { 23 | case Schema.Type.ARRAY => { 24 | val typeTree = tq"List[${toScala(namespace, schema.getElementType, c)}]" 25 | expandNestedTypes(typeTree) 26 | } 27 | case Schema.Type.BOOLEAN => typeOf[Boolean] 28 | case Schema.Type.DOUBLE => typeOf[Double] 29 | case Schema.Type.FLOAT => typeOf[Float] 30 | case Schema.Type.LONG => typeOf[Long] 31 | case Schema.Type.INT => typeOf[Int] 32 | case Schema.Type.NULL => typeOf[Null] 33 | case Schema.Type.RECORD => { 34 | schema.getName match { 35 | //cases where a record is found as a field vs found as a member of a union vs found as an element of an array 36 | case "array" | "union" => tq"schema.getName".tpe 37 | case recordName => { 38 | // Prefer the namespace of the type, can be different. 39 | val ns = Seq(schema.getNamespace, namespace). 40 | map(Option(_)).flatten.filterNot(_.isEmpty).headOption 41 | 42 | val fullName = ns match { 43 | case None => recordName 44 | case Some(ns) => (ns + "." + recordName) 45 | } 46 | try { 47 | c.mirror.staticClass(fullName).toType 48 | } 49 | catch { 50 | case scala.ScalaReflectionException(_) => { 51 | sys.error("no case class " + fullName + " corresponds to field type: " + recordName + ": record") 52 | } 53 | } 54 | } 55 | } 56 | } 57 | case Schema.Type.STRING => typeOf[String] 58 | case Schema.Type.UNION => { 59 | val unionSchemas = schema.getTypes.toList 60 | if (unionSchemas.length == 2 && 61 | unionSchemas.exists(schema => schema.getType == Schema.Type.NULL) && 62 | unionSchemas.exists(schema => schema.getType != Schema.Type.NULL)) { 63 | val maybeSchema = unionSchemas.find(schema => schema.getType != Schema.Type.NULL) 64 | val typeTree = tq"Option[${toScala(namespace, maybeSchema.get, c)}]" 65 | if (maybeSchema.isDefined) expandNestedTypes(typeTree) 66 | else sys.error("no avro type found in this union") 67 | } 68 | else sys.error("not a union field") 69 | } 70 | case Schema.Type.MAP => { 71 | val typeTree = tq"Map[String, ${toScala(namespace, schema.getValueType, c)}]" 72 | expandNestedTypes(typeTree) 73 | } 74 | case Schema.Type.BYTES => sys.error("BYTES is not yet supported") 75 | case Schema.Type.FIXED => sys.error("FIXED is not yet supported") 76 | case Schema.Type.ENUM => sys.error("ENUM is not yet supported") 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /examples/scalding/README.md: -------------------------------------------------------------------------------- 1 | Adapted from: 2 | 3 | Strata2014ScaldingTutorial 4 | ========================== 5 | 6 | This is a template project for a tutorial on [Effective Data Science With Scalding](http://strataconf.com/strata2014/public/schedule/detail/31957) 7 | at [Strata 2014 Conference](http://strataconf.com/strata2014). 8 | 9 | This project is based on amazingly simple [Scalding Example Project by Snowplow Analytics](https://github.com/snowplow/scalding-example-project). 10 | 11 | To make best use of your time at the tutorial, please do the following preparation steps: 12 | 13 | 14 | 1. Install SBT 15 | -------------- 16 | 17 | We use [SBT build tool](http://www.scala-sbt.org/) version `0.13.9` 18 | 19 | If you don't have it installed on your machine, one way to install it on Mac OS X would be 20 | by using [Homebrew](http://brew.sh/): 21 | 22 | > brew install sbt 23 | 24 | [The SBT documentation](http://www.scala-sbt.org/0.12.3/docs/Getting-Started/Setup.html) contains 25 | detailed information about install on other systems. 26 | 27 | 2. Clone this project 28 | --------------------- 29 | 30 | > git clone https://github.com/julianpeeters/avro-scala-macro-annotation-examples.git 31 | > cd avro-scala-macro-annotation-examples/scalding 32 | 33 | 34 | 3. Enter SBT Command Line 35 | ------------------------- 36 | 37 | > sbt 38 | ... 39 | Tutorial > 40 | 41 | This step may take quite a bit of time downloading all the parts and dependencies so it 42 | makes so much sense to run it in advance. 43 | 44 | 4. Compile and Run the Write Example 45 | ---------------------------------------- 46 | 47 | Tutorial > run tutorial.PackedAvroWriteJob 48 | ... 49 | [success] Total time: 2 s, completed Feb 3, 2014 9:15:12 PM 50 | Tutorial > 51 | 52 | This step can also take quite a bit of time. 53 | 54 | 55 | 5. See the Results 56 | ------------------ 57 | 58 | The file `PackedAvroOutput.avro` should be created in the `data` directory. 59 | 60 | 61 | 6. Compile and Run the Read Example 62 | ---------------------------------------- 63 | 64 | Tutorial > run tutorial.PackedAvroReadJob 65 | ... 66 | [success] Total time: 2 s, completed Feb 3, 2014 9:15:12 PM 67 | Tutorial > 68 | 69 | This step can also take quite a bit of time. 70 | 71 | 72 | 7. See the Results 73 | ------------------ 74 | 75 | The file `AvroWordcountOutput.tsv` should be created in the `data` directory. 76 | 77 | FAQ 78 | --- 79 | 80 | ### I am getting java.lang.OutOfMemoryError ### 81 | 82 | You will need to set JVM runtime parameters. Depending on the type of error you will need to increase 83 | heap size (`-Xmx` or PermGen space `-XX:MaxPermSize`). 84 | 85 | You can set up options in the SBT command or in file located at `~/.sbtconfig`, for example: 86 | 87 | > cat > ~/.sbtconfig 88 | export SBT_OPTS="-Xmx1024m -XX:MaxPermSize=256m" 89 | D 90 | > 91 | 92 | 93 | Copyright and license 94 | --------------------- 95 | 96 | Copyright 2012-2013 Snowplow Analytics Ltd, with significant portions copyright 2012 Twitter, Inc. 97 | 98 | Licensed under the [Apache License, Version 2.0] [license] (the "License"); 99 | you may not use this software except in compliance with the License. 100 | 101 | Unless required by applicable law or agreed to in writing, software 102 | distributed under the License is distributed on an "AS IS" BASIS, 103 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 104 | See the License for the specific language governing permissions and 105 | limitations under the License. 106 | 107 | [license]: http://www.apache.org/licenses/LICENSE-2.0 108 | 109 | 110 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/datatypetests/AvroTypeProvider2ArityHomoNestedTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroTypeProvider2ArityHomoNestedTest extends Specification { 6 | 7 | "A case class with more than one `List[List[String]]` field" should { 8 | "serialize and deserialize correctly" in { 9 | 10 | val record = AvroTypeProviderTest38( 11 | List( 12 | List("hi.bye"), 13 | List("yay.nay") 14 | ), 15 | List( 16 | List("one.two"), 17 | List("three.four") 18 | ) 19 | ) 20 | TestUtil.verifyRead(record) 21 | } 22 | } 23 | 24 | "A case class with more than one `List[List[String]]` field" should { 25 | "serialize and deserialize correctly" in { 26 | 27 | val record = AvroTypeProviderTest39( 28 | List( 29 | List(1, 2), 30 | List(3, 4) 31 | ), 32 | List( 33 | List(5, 6), 34 | List(7, 8) 35 | ) 36 | ) 37 | TestUtil.verifyRead(record) 38 | } 39 | } 40 | 41 | "A case class with more than one `Option[List[String]]` field" should { 42 | "serialize and deserialize correctly" in { 43 | val record = AvroTypeProviderTest40(Some(List("up.down")), Some(List("left.right"))) 44 | TestUtil.verifyRead(record) 45 | } 46 | } 47 | 48 | "A case class with more than one `Option[List[Int]]` field" should { 49 | "serialize and deserialize correctly" in { 50 | val record = AvroTypeProviderTest41(Some(List(1, 2)), Some(List(3, 4))) 51 | TestUtil.verifyRead(record) 52 | } 53 | } 54 | 55 | "A case class with more than one `List[Option[String]]` field" should { 56 | "serialize and deserialize correctly" in { 57 | val record = AvroTypeProviderTest42(List(None, Some("red")), List(Some("blue"), None)) 58 | TestUtil.verifyRead(record) 59 | } 60 | } 61 | 62 | "A case class with more than one `List[Option[Int]]` field" should { 63 | "serialize and deserialize correctly" in { 64 | val record = AvroTypeProviderTest43(List(Some(1), None), List(Some(3), None)) 65 | TestUtil.verifyRead(record) 66 | } 67 | } 68 | 69 | "A case class with more than one `Option[List[Option[String]]]` field" should { 70 | "serialize and deserialize correctly" in { 71 | val record = AvroTypeProviderTest44(Some(List(Some("gold"), None)), Some(List(Some("silver"), None))) 72 | TestUtil.verifyRead(record) 73 | } 74 | } 75 | 76 | "A case class with more than one `Option[List[Option[Int]]]` field" should { 77 | "serialize and deserialize correctly" in { 78 | val record = AvroTypeProviderTest45(Some(List(Some(8), None)), Some(List(Some(10), None))) 79 | TestUtil.verifyRead(record) 80 | } 81 | } 82 | 83 | "A case class with more than one `List[Option[List[String]]]` field" should { 84 | "serialize and deserialize correctly" in { 85 | val record = AvroTypeProviderTest46(List(None, Some(List(Some("green"), None))), List(None, Some(List(None, Some("yellow")))) ) 86 | TestUtil.verifyRead(record) 87 | } 88 | } 89 | 90 | /* //TODO make readable file for this class - not very urgent since this field type is tested in other contexts also 91 | class AvroTypeProvider47Test extends Specification { 92 | 93 | "A case class with more than one `List[Option[List[Int]]]` field" should { 94 | "serialize and deserialize correctly" in { 95 | val record = AvroTypeProviderTest47(List(None, Some(List(Some(2), None))), List(None, Some(List(None, Some(4)))) ) 96 | TestUtil.verifyRead(record) 97 | } 98 | } 99 | */ 100 | } 101 | -------------------------------------------------------------------------------- /project/Build.scala: -------------------------------------------------------------------------------- 1 | import sbt._ 2 | import Keys._ 3 | 4 | object BuildSettings { 5 | val buildSettings = Defaults.defaultSettings ++ Seq( 6 | organization := "com.julianpeeters", 7 | version := "0.11.1", 8 | scalaVersion := "2.11.8", 9 | crossScalaVersions := Seq("2.11.6", "2.11.7", "2.11.8", "2.12.1"), 10 | scalacOptions ++= Seq("-unchecked", "-deprecation", "-feature", "-Ywarn-value-discard"), 11 | resolvers += "scalaz-bintray" at "http://dl.bintray.com/scalaz/releases", 12 | resolvers += Resolver.sonatypeRepo("releases"), 13 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full), 14 | libraryDependencies += "org.apache.avro" % "avro" % "1.7.6", 15 | libraryDependencies := { 16 | CrossVersion.partialVersion(scalaVersion.value) match { 17 | case Some((2, scalaMajor)) if scalaMajor >= 11 => 18 | libraryDependencies.value ++ Seq ( 19 | "org.specs2" %% "specs2-core" % "3.8.6" % "test") 20 | case Some((2, 12)) => 21 | libraryDependencies.value ++ Seq() 22 | } 23 | }, 24 | // publishing 25 | publishMavenStyle := true, 26 | publishArtifact in Test := false, 27 | publishTo <<= version { (v: String) => 28 | val nexus = "https://oss.sonatype.org/" 29 | if (v.trim.endsWith("SNAPSHOT")) 30 | Some("snapshots" at nexus + "content/repositories/snapshots") 31 | else 32 | Some("releases" at nexus + "service/local/staging/deploy/maven2") 33 | }, 34 | pomIncludeRepository := { _ => false }, 35 | licenses := Seq("Apache 2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0")), 36 | homepage := Some(url("https://github.com/julianpeeters/avro-scala-macro-annotations")), 37 | pomExtra := ( 38 | 39 | git://github.com/julianpeeters/avro-scala-macro-annotations.git 40 | scm:git://github.com/julianpeeters/avro-scala-macro-annotations.git 41 | 42 | 43 | 44 | julianpeeters 45 | Julian Peeters 46 | http://github.com/julianpeeters 47 | 48 | ) 49 | ) 50 | 51 | } 52 | 53 | object MyBuild extends Build { 54 | import BuildSettings._ 55 | 56 | // Configure prompt to show current project 57 | override lazy val settings = super.settings :+ { 58 | shellPrompt := { s => Project.extract(s).currentProject.id + " > " } 59 | } 60 | 61 | lazy val root: Project = Project( 62 | "root", 63 | file("."), 64 | settings = buildSettings ++ Seq( 65 | publishArtifact := false, 66 | run <<= run in Compile in tests 67 | ) 68 | ) aggregate(macros, tests) 69 | 70 | lazy val macros: Project = Project( 71 | "avro-scala-macro-annotations", 72 | file("macros"), 73 | settings = buildSettings ++ Seq( 74 | libraryDependencies <+= (scalaVersion)("org.scala-lang" % "scala-reflect" % _), 75 | libraryDependencies += "org.codehaus.jackson" % "jackson-core-asl" % "1.9.13", 76 | libraryDependencies += "com.typesafe.scala-logging" %% "scala-logging" % "3.5.0", 77 | libraryDependencies += "com.typesafe.scala-logging" % "scala-logging-slf4j_2.11" % "2.1.2") 78 | ) 79 | 80 | lazy val tests: Project = Project( 81 | "tests", 82 | file("tests"), 83 | settings = buildSettings) 84 | .settings( 85 | publishArtifact := false 86 | // Add your additional libraries here (comma-separated)... 87 | ) dependsOn(macros) settings( 88 | // include the macro classes and resources in the main jar 89 | mappings in (Compile, packageBin) ++= mappings.in(macros, Compile, packageBin).value, 90 | // include the macro sources in the main source jar 91 | mappings in (Compile, packageSrc) ++= mappings.in(macros, Compile, packageSrc).value 92 | ) 93 | } 94 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/datatypetests/AvroTypeProviderUserDefinedTypesTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroTypeProvider58Test extends Specification { 6 | 7 | "A case class with another record as a field" should { 8 | "serialize and deserialize correctly" in { 9 | val record = AvroTypeProviderTest58(AvroTypeProviderTest00(1)) 10 | TestUtil.verifyRead(record) 11 | } 12 | } 13 | 14 | "A case class with an `Float` field" should { 15 | "serialize and deserialize correctly" in { 16 | val record = AvroTypeProviderTest59(AvroTypeProviderTest58(AvroTypeProviderTest00(1))) 17 | TestUtil.verifyRead(record) 18 | } 19 | } 20 | 21 | "A case class with an `Long` field" should { 22 | "serialize and deserialize correctly" in { 23 | val record = AvroTypeProviderTest60(AvroTypeProviderTest00(1), AvroTypeProviderTest58(AvroTypeProviderTest00(2))) 24 | TestUtil.verifyRead(record) 25 | } 26 | } 27 | 28 | "A case class with a field that is list of a user-defined type" should { 29 | "serialize and deserialize correctly" in { 30 | val record = AvroTypeProviderTest61(List(AvroTypeProviderTest00(1), AvroTypeProviderTest00(2))) 31 | TestUtil.verifyRead(record) 32 | } 33 | } 34 | 35 | "A case class with a field that is list of a nested user-defined type" should { 36 | "serialize and deserialize correctly" in { 37 | val record = AvroTypeProviderTest62(List(AvroTypeProviderTest58(AvroTypeProviderTest00(1)), AvroTypeProviderTest58(AvroTypeProviderTest00(2)))) 38 | TestUtil.verifyRead(record) 39 | } 40 | } 41 | 42 | 43 | 44 | /* //TODO make readable file for this class - not very urgent since this field type is tested in other contexts also 45 | 46 | "A case class with a field that is list of a nested user-defined type in the second position" should { 47 | "serialize and deserialize correctly" in { 48 | val record = AvroTypeProviderTest63(List(AvroTypeProviderTest00(1), AvroTypeProviderTest00(2)), List(AvroTypeProviderTest60(AvroTypeProviderTest00(3), AvroTypeProviderTest58(AvroTypeProviderTest00(2))))) 49 | TestUtil.verifyRead(record) 50 | } 51 | } 52 | 53 | */ 54 | 55 | "A case class with a field that is list of a nested user-defined type in the second position" should { 56 | "serialize and deserialize correctly" in { 57 | val record = AvroTypeProviderTest64(Some(AvroTypeProviderTest00(1))) 58 | TestUtil.verifyRead(record) 59 | } 60 | } 61 | 62 | "A case class with a field that is list of a nested user-defined type in the second position" should { 63 | "serialize and deserialize correctly" in { 64 | val record = AvroTypeProviderTest65(None) 65 | TestUtil.verifyRead(record) 66 | } 67 | } 68 | 69 | "A case class with a field that is list of a nested user-defined type in the second position" should { 70 | "serialize and deserialize correctly" in { 71 | val record = AvroTypeProviderTest66(Some(AvroTypeProviderTest58(AvroTypeProviderTest00(1)))) 72 | TestUtil.verifyRead(record) 73 | } 74 | } 75 | 76 | "A case class with a field that is list of a nested user-defined type in the second position" should { 77 | "serialize and deserialize correctly" in { 78 | val record = AvroTypeProviderTest67(Some(AvroTypeProviderTest00(1)), Some(AvroTypeProviderTest60(AvroTypeProviderTest00(4), AvroTypeProviderTest58(AvroTypeProviderTest00(1))))) 79 | TestUtil.verifyRead(record) 80 | } 81 | } 82 | 83 | "A case class with a field that is list of a nested user-defined type in the second position" should { 84 | "serialize and deserialize correctly" in { 85 | val record = AvroTypeProviderTest68(Some(List(Some(AvroTypeProviderTest00(1)), None)), List(None, Some(List(AvroTypeProviderTest01(1F), AvroTypeProviderTest01(2F))))) 86 | TestUtil.verifyRead(record) 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/AvroTypeProviderMacro.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | 3 | import provider._ 4 | 5 | import scala.reflect.macros.blackbox.Context 6 | import scala.language.experimental.macros 7 | import scala.annotation.{ compileTimeOnly, StaticAnnotation } 8 | 9 | import collection.JavaConversions._ 10 | import java.io.File 11 | 12 | import com.typesafe.scalalogging._ 13 | 14 | object AvroTypeProviderMacro extends LazyLogging { 15 | 16 | def impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { 17 | import c.universe._ 18 | import Flag._ 19 | 20 | val result = { 21 | annottees.map(_.tree).toList match { 22 | case q"$mods class $name[..$tparams](..$first)(...$rest) extends ..$parents { $self => ..$body }" :: tail => { 23 | 24 | // get the namespace from the context and passing it around instead of using schema.getNamespace 25 | // in order to read schemas that omit namespace (e.g. nested schemas or python's avrostorage default) 26 | val namespace = NamespaceProbe.getNamespace(c) 27 | 28 | val fullName: String = { 29 | if (namespace == null) name.toString 30 | else s"$namespace.$name" 31 | } 32 | 33 | // currently, having a `@AvroRecord` the only thing that will trigger the writing of vars instead of vals 34 | val isImmutable: Boolean = { 35 | !mods.annotations.exists(mod => mod.toString == "new AvroRecord()" | mod.toString =="new AvroRecord(null)") 36 | } 37 | 38 | // helpful for IDE users who may not be able to easily see where their files live 39 | logger.info(s"Current path: ${new File(".").getAbsolutePath}") 40 | 41 | // get the schema for the record that this class represents 42 | val avroFilePath = FilePathProbe.getPath(c) 43 | val infile = new File(avroFilePath) 44 | val fileSchemas = FileParser.getSchemas(infile) 45 | val nestedSchemas = fileSchemas.flatMap(NestedSchemaExtractor.getNestedSchemas) 46 | // first try matching schema record full name to class full name, then by the 47 | // regular name in case we're trying to read from a non-namespaced schema 48 | val classSchema = nestedSchemas.find(s => s.getFullName == fullName) 49 | .getOrElse(nestedSchemas.find(s => s.getName == name.toString && s.getNamespace == null) 50 | .getOrElse(sys.error("no record found with name " + name))) 51 | 52 | // wraps each schema field in a quasiquote, returning immutable val defs if immutable flag is true 53 | val newFields: List[ValDef] = ValDefGenerator.asScalaFields(classSchema, namespace, isImmutable, c) 54 | 55 | tail match { 56 | // if there is no preexisiting companion 57 | case Nil => q"$mods class $name[..$tparams](..${newFields:::first})(...$rest) extends ..$parents { $self => ..$body }" 58 | // if there is a preexisting companion, include it with the updated classDef 59 | case moduleDef @ q"object $moduleName extends ..$companionParents { ..$moduleBody }" :: Nil => { 60 | q"""$mods class $name[..$tparams](..${newFields:::first})(...$rest) extends ..$parents { $self => ..$body }; 61 | object ${name.toTermName} extends ..$companionParents { ..$moduleBody }""" 62 | } 63 | } 64 | } 65 | } 66 | } 67 | 68 | c.Expr[Any](result) 69 | } 70 | } 71 | 72 | /** 73 | * From the Macro Paradise Docs... 74 | * 75 | * note the @compileTimeOnly annotation. It is not mandatory, but is recommended to avoid confusion. 76 | * Macro annotations look like normal annotations to the vanilla Scala compiler, so if you forget 77 | * to enable the macro paradise plugin in your build, your annotations will silently fail to expand. 78 | * The @compileTimeOnly annotation makes sure that no reference to the underlying definition is 79 | * present in the program code after typer, so it will prevent the aforementioned situation 80 | * from happening. 81 | */ 82 | @compileTimeOnly("Enable Macro Paradise for Expansion of Annotations via Macros.") 83 | class AvroTypeProvider(inputPath: String) extends StaticAnnotation { 84 | def macroTransform(annottees: Any*): Any = macro AvroTypeProviderMacro.impl 85 | } 86 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/datatypetests/AvroRecord2ArityHeteroTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroRecord2ArityHeteroTest extends Specification { 6 | 7 | "A case class with an `Int` field coexisting with a non-`Int` field" should { 8 | "serialize and deserialize correctly" in { 9 | val record1 = AvroRecordTest48(1, "bonjour") 10 | val record2 = AvroRecordTest48(2, "moshi") 11 | val records = List(record1, record2) 12 | TestUtil.verifyWriteAndRead(records) 13 | } 14 | } 15 | 16 | "A case class with an `String` field coexisting with a non-`Int` field" should { 17 | "serialize and deserialize correctly" in { 18 | val record1 = AvroRecordTest49("bueno", 2) 19 | val record2 = AvroRecordTest49("hola", 3) 20 | val records = List(record1, record2) 21 | TestUtil.verifyWriteAndRead(records) 22 | } 23 | } 24 | 25 | "A case class with an `Option[String]` field coexisting with an `Option[Int]` field" should { 26 | "serialize and deserialize correctly" in { 27 | val record1 = AvroRecordTest50(Some("tropics"), Some(3)) 28 | val record2 = AvroRecordTest50(Some("equator"), Some(4)) 29 | val records = List(record1, record2) 30 | TestUtil.verifyWriteAndRead(records) 31 | } 32 | } 33 | 34 | "A case class with an `Option[Int]` field coexisting with an `Option[String]` field" should { 35 | "serialize and deserialize correctly" in { 36 | val record1 = AvroRecordTest51(Some(4), Some("level")) 37 | val record2 = AvroRecordTest51(Some(5), Some("inclined")) 38 | val records = List(record1, record2) 39 | TestUtil.verifyWriteAndRead(records) 40 | } 41 | } 42 | 43 | "A case class with a `List[String]` field coexisting with a `List[Int]` field" should { 44 | "serialize and deserialize correctly" in { 45 | val record1 = AvroRecordTest52(List("am.pm"), List(5,6)) 46 | val record2 = AvroRecordTest52(List("time"), List(7,8)) 47 | val records = List(record1, record2) 48 | TestUtil.verifyWriteAndRead(records) 49 | } 50 | } 51 | 52 | "A case class with an `List[Int]` field coexisting with a `List[String]` field" should { 53 | "serialize and deserialize correctly" in { 54 | val record1 = AvroRecordTest53(List(7, 8), List("bon.sois")) 55 | val record2 = AvroRecordTest53(List(9, 10), List("mon.amis")) 56 | val records = List(record1, record2) 57 | TestUtil.verifyWriteAndRead(records) 58 | } 59 | } 60 | 61 | "A case class with an `Option[List[Option[String]]]` field coexisting with a `Option[List[Option[Int]]]` field" should { 62 | "serialize and deserialize correctly" in { 63 | val record1 = AvroRecordTest54(Some(List(Some("bronco"), None)), Some(List(Some(9), None))) 64 | val record2 = AvroRecordTest54(Some(List(Some("bull"), None)), Some(List(Some(11), None))) 65 | val records = List(record1, record2) 66 | TestUtil.verifyWriteAndRead(records) 67 | } 68 | } 69 | 70 | "A case class with an `Option[List[Option[Int]]]` field coexisting with a `Option[List[Option[String]]]` field" should { 71 | "serialize and deserialize correctly" in { 72 | val record1 = AvroRecordTest55(Some(List(Some(10), None)), Some(List(Some("bronca"), None))) 73 | val record2 = AvroRecordTest55(Some(List(Some(12), None)), Some(List(Some("cow"), None))) 74 | val records = List(record1, record2) 75 | TestUtil.verifyWriteAndRead(records) 76 | } 77 | } 78 | 79 | "A case class with an `List[Option[List[Option[String]]]]` field coexisting with a `List[Option[List[Option[Int]]]]` field" should { 80 | "serialize and deserialize correctly" in { 81 | val record1 = AvroRecordTest56(List(Some(List(Some("tibetan"), None)), None), List(Some(List(Some(11), None)), None)) 82 | val record2 = AvroRecordTest56(List(Some(List(Some("nepalese"), None)), None), List(Some(List(Some(13), None)), None)) 83 | val records = List(record1, record2) 84 | TestUtil.verifyWriteAndRead(records) 85 | } 86 | } 87 | 88 | "A case class with an `Int` field coexisting with a non-`Int` field" should { 89 | "serialize and deserialize correctly" in { 90 | val record1 = AvroRecordTest57(List(Some(List(Some(12), None)), None), List(Some(List(Some("fire"), None)), None)) 91 | val record2 = AvroRecordTest57(List(Some(List(Some(15), None)), None), List(Some(List(Some("ice"), None)), None)) 92 | val records = List(record1, record2) 93 | TestUtil.verifyWriteAndRead(records) 94 | } 95 | } 96 | 97 | "A case class with two differing nested Map fields" should { 98 | "serialize and deserialize correctly" in { 99 | val record1 = AvroRecordTestMap11(Map("one"->Map("two"->3)), List(Map("state"->Map("knowledge"->"power")))) 100 | val record2 = AvroRecordTestMap11(Map("four"->Map("five"->6)), List(Map("country"->Map("truth"->"beauty")))) 101 | val records = List(record1, record2) 102 | TestUtil.verifyWriteAndRead(records) 103 | } 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/datatypetests/AvroRecordNestedTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroRecordNestedTest extends Specification { 6 | 7 | "A case class with a `List[List[String]]` field" should { 8 | "serialize and deserialize correctly" in { 9 | val record1 = AvroRecordTest28(List(List("blackbird.grackle"))) 10 | val record2 = AvroRecordTest28(List(List("pheasant.turkey"))) 11 | val records = List(record1, record2) 12 | TestUtil.verifyWriteAndRead(records) 13 | } 14 | } 15 | 16 | "A case class with a `List[List[Int]]` field" should { 17 | "serialize and deserialize correctly" in { 18 | val record1 = AvroRecordTest29(List(List(1, 2))) 19 | val record2 = AvroRecordTest29(List(List(3, 4))) 20 | val records = List(record1, record2) 21 | TestUtil.verifyWriteAndRead(records) 22 | } 23 | } 24 | 25 | "A case class with an `Option[List[String]]` field" should { 26 | "serialize and deserialize correctly" in { 27 | val record1 = AvroRecordTest30(Some(List("starling.oriole"))) 28 | val record2 = AvroRecordTest30(Some(List("buzzard.hawk"))) 29 | val records = List(record1, record2) 30 | TestUtil.verifyWriteAndRead(records) 31 | } 32 | } 33 | 34 | "A case class with an `Option[List[Int]]` field" should { 35 | "serialize and deserialize correctly" in { 36 | val record1 = AvroRecordTest31(Some(List(5, 6))) 37 | val record2 = AvroRecordTest31(Some(List(7, 8))) 38 | val records = List(record1, record2) 39 | TestUtil.verifyWriteAndRead(records) 40 | } 41 | } 42 | 43 | "A case class with a `List[Option[String]]` field" should { 44 | "serialize and deserialize correctly" in { 45 | val record1 = AvroRecordTest32(List(Some("cowbird"))) 46 | val record2 = AvroRecordTest32(List(Some("cuckoo"))) 47 | val records = List(record1, record2) 48 | TestUtil.verifyWriteAndRead(records) 49 | } 50 | } 51 | 52 | "A case class with a `List[Option[Int]]` field" should { 53 | "serialize and deserialize correctly" in { 54 | val record1 = AvroRecordTest33(List(Some(1))) 55 | val record2 = AvroRecordTest33(List(Some(2))) 56 | val records = List(record1, record2) 57 | TestUtil.verifyWriteAndRead(records) 58 | } 59 | } 60 | 61 | "A case class with a `Option[List[Option[String]]]` field" should { 62 | "serialize and deserialize correctly" in { 63 | val record1 = AvroRecordTest34(Some(List(Some("cowbird"), None))) 64 | val record2 = AvroRecordTest34(Some(List(Some("lark"), None))) 65 | val records = List(record1, record2) 66 | TestUtil.verifyWriteAndRead(records) 67 | } 68 | } 69 | 70 | "A case class with a `Option[List[Option[Int]]]` field" should { 71 | "serialize and deserialize correctly" in { 72 | val record1 = AvroRecordTest35(Some(List(Some(1), None))) 73 | val record2 = AvroRecordTest35(Some(List(Some(2), None))) 74 | val records = List(record1, record2) 75 | TestUtil.verifyWriteAndRead(records) 76 | } 77 | } 78 | 79 | "A case class with a `List[Option[List[Option[String]]]]` field" should { 80 | "serialize and deserialize correctly" in { 81 | val record1 = AvroRecordTest36(List(None, Some(List(Some("cowbird"), None)))) 82 | val record2 = AvroRecordTest36(List(None, Some(List(Some("goldfinch"), None)))) 83 | val records = List(record1, record2) 84 | TestUtil.verifyWriteAndRead(records) 85 | } 86 | } 87 | 88 | "A case class with a `List[Option[List[Option[Int]]]]` field" should { 89 | "serialize and deserialize correctly" in { 90 | val record1 = AvroRecordTest37(List(None, Some(List(Some(1), None)))) 91 | val record2 = AvroRecordTest37(List(None, Some(List(Some(2), None)))) 92 | val records = List(record1, record2) 93 | TestUtil.verifyWriteAndRead(records) 94 | } 95 | } 96 | 97 | "A case class with a Map[Int, Map[Int, Int]] field" should { 98 | "serialize and deserialize correctly" in { 99 | val record1 = AvroRecordTestMap07(Map("art"->Map("explode"->4))) 100 | val record2 = AvroRecordTestMap07(Map("science"->Map("define"->4))) 101 | val records = List(record1, record2) 102 | TestUtil.verifyWriteAndRead(records) 103 | } 104 | } 105 | 106 | "A case class with a List[Map[String, Map[Int, String]]] field" should { 107 | "serialize and deserialize correctly" in { 108 | val record1 = AvroRecordTestMap08(List(Map("hare"->Map("serpent"->"eagle")))) 109 | val record2 = AvroRecordTestMap08(List(Map("snake"->Map("bear"->"deer")))) 110 | val records = List(record1, record2) 111 | TestUtil.verifyWriteAndRead(records) 112 | } 113 | } 114 | 115 | "A case class with a Option[Map[String, Option[List[String]]]] field" should { 116 | "serialize and deserialize correctly" in { 117 | val record1 = AvroRecordTestMap09(Some(Map("Eje"->None))) 118 | val record2 = AvroRecordTestMap09(Some(Map("Rayo"->None))) 119 | val records = List(record1, record2) 120 | TestUtil.verifyWriteAndRead(records) 121 | } 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/datatypetests/AvroRecord2ArityHomoTest.scala: -------------------------------------------------------------------------------- 1 | 2 | package test 3 | 4 | import org.specs2.mutable.Specification 5 | 6 | class AvroRecord2ArityHomoTest extends Specification { 7 | 8 | "A case class with an `Int` field in the second position" should { 9 | "serialize and deserialize correctly" in { 10 | val record1 = AvroRecordTest14(1, 2) 11 | val record2 = AvroRecordTest14(3, 4) 12 | val records = List(record1, record2) 13 | TestUtil.verifyWriteAndRead(records) 14 | } 15 | } 16 | 17 | "A case class with an `Float` field in the second position" should { 18 | "serialize and deserialize correctly" in { 19 | val record1 = AvroRecordTest15(1F, 2F) 20 | val record2 = AvroRecordTest15(3F, 4F) 21 | val records = List(record1, record2) 22 | TestUtil.verifyWriteAndRead(records) 23 | } 24 | } 25 | 26 | "A case class with an `Long` field in the second position" should { 27 | "serialize and deserialize correctly" in { 28 | val record1 = AvroRecordTest16(1L, 2L) 29 | val record2 = AvroRecordTest16(3L, 4L) 30 | val records = List(record1, record2) 31 | TestUtil.verifyWriteAndRead(records) 32 | } 33 | } 34 | 35 | "A case class with an `Double` field in the second position" should { 36 | "serialize and deserialize correctly" in { 37 | val record1 = AvroRecordTest17(1D, 2D) 38 | val record2 = AvroRecordTest17(3D, 4D) 39 | val records = List(record1, record2) 40 | TestUtil.verifyWriteAndRead(records) 41 | } 42 | } 43 | 44 | "A case class with an `Boolean` field in the second position" should { 45 | "serialize and deserialize correctly" in { 46 | val record1 = AvroRecordTest18(true, false) 47 | val record2 = AvroRecordTest18(false, true) 48 | val records = List(record1, record2) 49 | TestUtil.verifyWriteAndRead(records) 50 | } 51 | } 52 | 53 | "A case class with an `String` field in the second position" should { 54 | "serialize and deserialize correctly" in { 55 | val record1 = AvroRecordTest19("1", "2") 56 | val record2 = AvroRecordTest19("1", "2") 57 | val records = List(record1, record2) 58 | TestUtil.verifyWriteAndRead(records) 59 | } 60 | } 61 | 62 | "A case class with an `Null` field in the second position" should { 63 | "serialize and deserialize correctly" in { 64 | val record1 = AvroRecordTest20(null, null) 65 | val record2 = AvroRecordTest20(null, null) 66 | val records = List(record1, record2) 67 | TestUtil.verifyWriteAndRead(records) 68 | } 69 | } 70 | 71 | "A case class with an `List[String]` field in the second position" should { 72 | "serialize and deserialize correctly" in { 73 | val record1 = AvroRecordTest24(List("mekka.lekka.hi"), List("mekka.hiney.ho")) 74 | val record2 = AvroRecordTest24(List("time"), List("travel")) 75 | val records = List(record1, record2) 76 | TestUtil.verifyWriteAndRead(records) 77 | } 78 | } 79 | 80 | "A case class with an `List[Int]` field in the second position" should { 81 | "serialize and deserialize correctly" in { 82 | val record1 = AvroRecordTest25(List(1, 2), List(3,4)) 83 | val record2 = AvroRecordTest25(List(5, 6), List(7,8)) 84 | val records = List(record1, record2) 85 | TestUtil.verifyWriteAndRead(records) 86 | } 87 | } 88 | 89 | "A case class with an `Option[String]` field in the second position" should { 90 | "serialize and deserialize correctly" in { 91 | val record1 = AvroRecordTest26(Some("sun"), Some("moon")) 92 | val record2 = AvroRecordTest26(Some("day"), Some("night")) 93 | val records = List(record1, record2) 94 | TestUtil.verifyWriteAndRead(records) 95 | } 96 | } 97 | 98 | "A case class with an `Option[Int]` field in the second position" should { 99 | "serialize and deserialize correctly" in { 100 | val record1 = AvroRecordTest27(Some(1), Some(2)) 101 | val record2 = AvroRecordTest27(Some(3), Some(4)) 102 | val records = List(record1, record2) 103 | TestUtil.verifyWriteAndRead(records) 104 | } 105 | } 106 | 107 | "A case class with two Map[Int, Int] fields" should { 108 | "serialize and deserialize correctly" in { 109 | val record1 = AvroRecordTestMap04(Map("Gorgonzola"->2), Map("Cheddar"->4)) 110 | val record2 = AvroRecordTestMap04(Map("Gouda"->5), Map("Swiss"->6)) 111 | val records = List(record1, record2) 112 | TestUtil.verifyWriteAndRead(records) 113 | } 114 | } 115 | 116 | "A case class with two Map[Int, String] fields" should { 117 | "serialize and deserialize correctly" in { 118 | val record1 = AvroRecordTestMap05(Map("Havana"->"Cuba"), Map("World"->"series")) 119 | val record2 = AvroRecordTestMap05(Map("Bogota"->"Colombia"), Map("time"->"series")) 120 | val records = List(record1, record2) 121 | TestUtil.verifyWriteAndRead(records) 122 | } 123 | } 124 | 125 | "A case class with two Map[String, Option[List[Int]]] fields" should { 126 | "serialize and deserialize correctly" in { 127 | val record1 = AvroRecordTestMap06(Map("Olala"->Some(List(1,4))), Map("Rumpole"->None)) 128 | val record2 = AvroRecordTestMap06(Map("Cran"->Some(List(3,5))), Map("Doc"->None)) 129 | val records = List(record1, record2) 130 | TestUtil.verifyWriteAndRead(records) 131 | } 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/datatypetests/AvroRecord2ArityHomoNestedTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroRecord2ArityHomoNestedTest extends Specification { 6 | 7 | "A case class with more than one `List[List[String]]` field" should { 8 | "serialize and deserialize correctly" in { 9 | 10 | val record1 = AvroRecordTest38( 11 | List( 12 | List("hi.bye"), 13 | List("yay.nay") 14 | ), 15 | List( 16 | List("one.two"), 17 | List("three.four") 18 | ) 19 | ) 20 | val record2 = AvroRecordTest38( 21 | List( 22 | List("goo.ga"), 23 | List("caloo.calay") 24 | ), 25 | List( 26 | List("one.two"), 27 | List("three.four") 28 | ) 29 | ) 30 | val records = List(record1, record2) 31 | TestUtil.verifyWriteAndRead(records) 32 | 33 | } 34 | } 35 | 36 | "A case class with more than one `List[List[String]]` field" should { 37 | "serialize and deserialize correctly" in { 38 | 39 | val record1 = AvroRecordTest39( 40 | List( 41 | List(1, 2), 42 | List(3, 4) 43 | ), 44 | List( 45 | List(5, 6), 46 | List(7, 8) 47 | ) 48 | ) 49 | val record2 = AvroRecordTest39( 50 | List( 51 | List(11, 12), 52 | List(13, 14) 53 | ), 54 | List( 55 | List(15, 16), 56 | List(17, 18) 57 | ) 58 | ) 59 | val records = List(record1, record2) 60 | TestUtil.verifyWriteAndRead(records) 61 | 62 | } 63 | } 64 | 65 | "A case class with more than one `Option[List[String]]` field" should { 66 | "serialize and deserialize correctly" in { 67 | val record1 = AvroRecordTest40(Some(List("up.down")), Some(List("left.right"))) 68 | val record2 = AvroRecordTest40(Some(List("b.a")), Some(List("select.start"))) 69 | val records = List(record1, record2) 70 | TestUtil.verifyWriteAndRead(records) 71 | } 72 | } 73 | 74 | "A case class with more than one `Option[List[Int]]` field" should { 75 | "serialize and deserialize correctly" in { 76 | val record1 = AvroRecordTest41(Some(List(1, 2)), Some(List(3, 4))) 77 | val record2 = AvroRecordTest41(Some(List(11, 12)), Some(List(13, 14))) 78 | val records = List(record1, record2) 79 | TestUtil.verifyWriteAndRead(records) 80 | } 81 | } 82 | 83 | "A case class with more than one `List[Option[String]]` field" should { 84 | "serialize and deserialize correctly" in { 85 | val record1 = AvroRecordTest42(List(None, Some("red")), List(Some("blue"), None)) 86 | val record2 = AvroRecordTest42(List(None, Some("green")), List(Some("yellow"), None)) 87 | val records = List(record1, record2) 88 | TestUtil.verifyWriteAndRead(records) 89 | } 90 | } 91 | 92 | "A case class with more than one `List[Option[Int]]` field" should { 93 | "serialize and deserialize correctly" in { 94 | val record1 = AvroRecordTest43(List(Some(1), None), List(Some(3), None)) 95 | val record2 = AvroRecordTest43(List(Some(6), None), List(Some(8), None)) 96 | val records = List(record1, record2) 97 | TestUtil.verifyWriteAndRead(records) 98 | } 99 | } 100 | 101 | "A case class with more than one `Option[List[Option[String]]]` field" should { 102 | "serialize and deserialize correctly" in { 103 | val record1 = AvroRecordTest44(Some(List(Some("gold"), None)), Some(List(Some("silver"), None))) 104 | val record2 = AvroRecordTest44(Some(List(Some("copper"), None)), Some(List(Some("bronze"), None))) 105 | val records = List(record1, record2) 106 | TestUtil.verifyWriteAndRead(records) 107 | } 108 | } 109 | 110 | "A case class with more than one `Option[List[Option[Int]]]` field" should { 111 | "serialize and deserialize correctly" in { 112 | val record1 = AvroRecordTest45(Some(List(Some(8), None)), Some(List(Some(10), None))) 113 | val record2 = AvroRecordTest45(Some(List(Some(9), None)), Some(List(Some(11), None))) 114 | val records = List(record1, record2) 115 | TestUtil.verifyWriteAndRead(records) 116 | } 117 | } 118 | 119 | "A case class with more than one `List[Option[List[String]]]` field" should { 120 | "serialize and deserialize correctly" in { 121 | val record1 = AvroRecordTest46(List(None, Some(List(Some("green"), None))), List(None, Some(List(None, Some("yellow")))) ) 122 | val record2 = AvroRecordTest46(List(None, Some(List(Some("orange"), None))), List(None, Some(List(None, Some("purple")))) ) 123 | val records = List(record1, record2) 124 | TestUtil.verifyWriteAndRead(records) 125 | } 126 | } 127 | 128 | "A case class with more than one `List[Option[List[Int]]]` field" should { 129 | "serialize and deserialize correctly" in { 130 | val record1 = AvroRecordTest47(List(None, Some(List(Some(2), None))), List(None, Some(List(None, Some(4)))) ) 131 | val record2 = AvroRecordTest47(List(None, Some(List(Some(7), None))), List(None, Some(List(None, Some(9)))) ) 132 | val records = List(record1, record2) 133 | TestUtil.verifyWriteAndRead(records) 134 | } 135 | } 136 | 137 | "A case class with two Map[String, Map[String, Int]] fields" should { 138 | "serialize and deserialize correctly" in { 139 | val record1 = AvroRecordTestMap10(Map("glory"->Map("kitty"->3)), Map("pride"->Map("doggy"->4))) 140 | val record2 = AvroRecordTestMap10(Map("sweet"->Map("horsey"->3)), Map("sour"->Map("piggy"->4))) 141 | val records = List(record1, record2) 142 | TestUtil.verifyWriteAndRead(records) 143 | } 144 | } 145 | 146 | } 147 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroRecordTests/datatypetests/AvroRecordUserDefinedTypesTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroRecordUserDefinedTypesTest extends Specification { 6 | 7 | "A case class with another record as a field" should { 8 | "serialize and deserialize correctly" in { 9 | val record1 = AvroRecordTest58(AvroRecordTest00(1)) 10 | val record2 = AvroRecordTest58(AvroRecordTest00(2)) 11 | val records = List(record1, record2) 12 | TestUtil.verifyWriteAndRead(records) 13 | } 14 | } 15 | 16 | "A case class with an `Float` field" should { 17 | "serialize and deserialize correctly" in { 18 | val record1 = AvroRecordTest59(AvroRecordTest58(AvroRecordTest00(1))) 19 | val record2 = AvroRecordTest59(AvroRecordTest58(AvroRecordTest00(2))) 20 | val records = List(record1, record2) 21 | TestUtil.verifyWriteAndRead(records) 22 | } 23 | } 24 | 25 | "A case class with an `Long` field" should { 26 | "serialize and deserialize correctly" in { 27 | val record1 = AvroRecordTest60(AvroRecordTest00(1), AvroRecordTest58(AvroRecordTest00(2))) 28 | val record2 = AvroRecordTest60(AvroRecordTest00(3), AvroRecordTest58(AvroRecordTest00(4))) 29 | val records = List(record1, record2) 30 | TestUtil.verifyWriteAndRead(records) 31 | } 32 | } 33 | 34 | "A case class with a field that is list of a user-defined type" should { 35 | "serialize and deserialize correctly" in { 36 | val record1 = AvroRecordTest61(List(AvroRecordTest00(1), AvroRecordTest00(2))) 37 | val record2 = AvroRecordTest61(List(AvroRecordTest00(3), AvroRecordTest00(4))) 38 | val records = List(record1, record2) 39 | TestUtil.verifyWriteAndRead(records) 40 | } 41 | } 42 | 43 | "A case class with a field that is list of a nested user-defined type" should { 44 | "serialize and deserialize correctly" in { 45 | val record1 = AvroRecordTest62(List(AvroRecordTest58(AvroRecordTest00(1)), AvroRecordTest58(AvroRecordTest00(2)))) 46 | val record2 = AvroRecordTest62(List(AvroRecordTest58(AvroRecordTest00(3)), AvroRecordTest58(AvroRecordTest00(4)))) 47 | val records = List(record1, record2) 48 | TestUtil.verifyWriteAndRead(records) 49 | } 50 | } 51 | 52 | 53 | 54 | /* //TODO make readable file for this class - not very urgent since this field type is tested in other contexts also 55 | "A case class with a field that is list of a nested user-defined type in the second position" should { 56 | "serialize and deserialize correctly" in { 57 | val record1 = AvroRecordTest63(List(AvroRecordTest00(1), AvroRecordTest00(2)), List(AvroRecordTest60(AvroRecordTest00(3), AvroRecordTest58(AvroRecordTest00(2))))) 58 | val record2 = AvroRecordTest63(List(AvroRecordTest00(3), AvroRecordTest00(2)), List(AvroRecordTest60(AvroRecordTest00(3), AvroRecordTest58(AvroRecordTest00(2))))) 59 | val records = List(record1, record2) 60 | TestUtil.verifyWriteAndRead(records) 61 | 62 | } 63 | } 64 | */ 65 | 66 | "A case class with a field that is list of a nested user-defined type in the second position" should { 67 | "serialize and deserialize correctly" in { 68 | val record1 = AvroRecordTest64(Some(AvroRecordTest00(1))) 69 | val record2 = AvroRecordTest64(Some(AvroRecordTest00(2))) 70 | val records = List(record1, record2) 71 | TestUtil.verifyWriteAndRead(records) 72 | } 73 | } 74 | 75 | "A case class with a field that is list of a nested user-defined type in the second position" should { 76 | "serialize and deserialize correctly" in { 77 | val record1 = AvroRecordTest65(None) 78 | val record2 = AvroRecordTest65(None) 79 | val records = List(record1, record2) 80 | TestUtil.verifyWriteAndRead(records) 81 | } 82 | } 83 | 84 | "A case class with a field that is list of a nested user-defined type in the second position" should { 85 | "serialize and deserialize correctly" in { 86 | val record1 = AvroRecordTest66(Some(AvroRecordTest58(AvroRecordTest00(1)))) 87 | val record2 = AvroRecordTest66(Some(AvroRecordTest58(AvroRecordTest00(2)))) 88 | val records = List(record1, record2) 89 | TestUtil.verifyWriteAndRead(records) 90 | } 91 | } 92 | 93 | "A case class with a field that is list of a nested user-defined type in the second position" should { 94 | "serialize and deserialize correctly" in { 95 | val record1 = AvroRecordTest67(Some(AvroRecordTest00(1)), Some(AvroRecordTest60(AvroRecordTest00(4), AvroRecordTest58(AvroRecordTest00(1))))) 96 | val record2 = AvroRecordTest67(Some(AvroRecordTest00(7)), Some(AvroRecordTest60(AvroRecordTest00(8), AvroRecordTest58(AvroRecordTest00(7))))) 97 | val records = List(record1, record2) 98 | TestUtil.verifyWriteAndRead(records) 99 | } 100 | } 101 | 102 | "A case class with a field that is list of a nested user-defined type in the second position" should { 103 | "serialize and deserialize correctly" in { 104 | val record1 = AvroRecordTest68(Some(List(Some(AvroRecordTest00(1)), None)), List(None, Some(List(AvroRecordTest01(1F), AvroRecordTest01(2F))))) 105 | val record2 = AvroRecordTest68(Some(List(Some(AvroRecordTest00(3)), None)), List(None, Some(List(AvroRecordTest01(3F), AvroRecordTest01(4F))))) 106 | val records = List(record1, record2) 107 | TestUtil.verifyWriteAndRead(records) 108 | } 109 | } 110 | 111 | "A case class with two differeing Map fields that contain user-defined types" should { 112 | "serialize and deserialize correctly" in { 113 | val record1 = AvroRecordTestMap12( 114 | Map("socialist"->Map("capitalist"->AvroRecordTest00(1))), 115 | Map("private"->AvroRecordTest58(AvroRecordTest00(1))) 116 | ) 117 | val record2 = AvroRecordTestMap12( 118 | Map("mixed"->Map("communist"->AvroRecordTest00(2))), 119 | Map("public"->AvroRecordTest58(AvroRecordTest00(2))) 120 | ) 121 | val records = List(record1, record2) 122 | TestUtil.verifyWriteAndRead(records) 123 | } 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/AvroTypeProviderProvideNamespaceTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | import java.io.File 6 | 7 | import org.apache.avro.generic._ 8 | import org.apache.avro.specific._ 9 | import org.apache.avro.Schema 10 | import org.apache.avro.file._ 11 | 12 | import collection.JavaConversions._ 13 | 14 | import com.julianpeeters.avro.annotations._ 15 | 16 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestNoNamespaceOption.avro") 17 | @AvroRecord 18 | case class AvroTypeProviderTestNoNamespaceOption() 19 | 20 | 21 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestNoNamespaceDeeplyNested.avro") 22 | @AvroRecord 23 | case class AvroTypeProviderTestNoNamespaceDeeplyNested() 24 | 25 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestNoNamespaceDoubleNested.avro") 26 | @AvroRecord 27 | case class AvroTypeProviderTestNoNamespaceDoubleNested() 28 | 29 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestNoNamespace.avro") 30 | @AvroRecord 31 | case class AvroTypeProviderTestNoNamespace() 32 | 33 | @AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestNoNamespaceNested.avro") 34 | @AvroRecord 35 | case class AvroTypeProviderTestNoNamespaceNested() 36 | 37 | 38 | class AvroTypeProviderProvideNamespaceTest extends Specification { 39 | 40 | "An avro file without a namespace" should { 41 | "deserialize into a non-default package" in { 42 | 43 | val record = AvroTypeProviderTestNoNamespace(1) 44 | 45 | val file = new File("tests/src/test/resources/AvroTypeProviderTestNoNamespace.avro") 46 | 47 | val schema = AvroTypeProviderTestNoNamespace.SCHEMA$ 48 | 49 | val userDatumReader = new SpecificDatumReader[AvroTypeProviderTestNoNamespace](schema) 50 | val dataFileReader = new DataFileReader[AvroTypeProviderTestNoNamespace](file, userDatumReader) 51 | val sameRecord = dataFileReader.next() 52 | 53 | 54 | sameRecord must ===(record) 55 | } 56 | } 57 | 58 | 59 | "A record without a namespace as a field's type" should { 60 | "deserialize into a non-default package" in { 61 | 62 | 63 | val record = AvroTypeProviderTestNoNamespaceNested(AvroTypeProviderTestNoNamespace(2)) 64 | 65 | val file = new File("tests/src/test/resources/AvroTypeProviderTestNoNamespaceNested.avro") 66 | val schema = AvroTypeProviderTestNoNamespaceNested.SCHEMA$ 67 | 68 | val userDatumReader = new SpecificDatumReader[AvroTypeProviderTestNoNamespaceNested](schema) 69 | 70 | val dataFileReader = new DataFileReader[AvroTypeProviderTestNoNamespaceNested](file, userDatumReader) 71 | val sameRecord = dataFileReader.next() 72 | 73 | sameRecord must ===(record) 74 | } 75 | } 76 | 77 | 78 | 79 | /* 80 | 81 | ##################################################################################################### 82 | 3 FAILING TESTS: 83 | Reading and writing succeeds for simple records whose fields are primitive types, 84 | and also succeeds for "nested" records whose fields are `record` types, 85 | however reading and writing fails for records whose fields are unions of `record` types. 86 | 87 | 88 | The issues seem to be due to the mismatch between a) the expected and actual schemas, and b) the full names of records vs specific classes. 89 | Thus, I believe this is an Avro issue, but so far no response on the users mailing list: 90 | http://apache-avro.679487.n3.nabble.com/Issues-reading-and-writing-namespace-less-schemas-from-namespaced-Specific-Records-tc4032092.html 91 | 92 | ###################################################################################################### 93 | 94 | 95 | "A case class with in the default package (i.e. without a namespace) as a field's 2-level nested type" should { 96 | "serialize and deserialize correctly" in { 97 | 98 | val record = AvroTypeProviderTestNoNamespaceDoubleNested(List(Some(AvroTypeProviderTestNoNamespace(5)))) 99 | 100 | val file = new File("tests/src/test/resources/AvroTypeProviderTestNoNamespaceDoubleNested.avro") 101 | 102 | val schema = AvroTypeProviderTestNoNamespaceDoubleNested.SCHEMA$ 103 | val userDatumReader = new SpecificDatumReader[AvroTypeProviderTestNoNamespaceDoubleNested](schema) 104 | val dataFileReader = new DataFileReader[AvroTypeProviderTestNoNamespaceDoubleNested](file, userDatumReader) 105 | val sameRecord = dataFileReader.next() 106 | 107 | sameRecord must ===(record) 108 | } 109 | } 110 | 111 | 112 | "A case class with in the default package (i.e. without a namespace) as a field's deeply nested type" should { 113 | "serialize and deserialize correctly" in { 114 | 115 | val record = AvroTypeProviderTestNoNamespaceDeeplyNested(Some(List(Some(AvroTypeProviderTestNoNamespace(4))))) 116 | 117 | val file = new File("tests/src/test/resources/AvroTypeProviderTestNoNamespaceDeeplyNested.avro") 118 | 119 | val schema = AvroTypeProviderTestNoNamespaceDeeplyNested.SCHEMA$ 120 | 121 | val userDatumReader = new SpecificDatumReader[AvroTypeProviderTestNoNamespaceDeeplyNested](schema) 122 | val dataFileReader = new DataFileReader[AvroTypeProviderTestNoNamespaceDeeplyNested](file, userDatumReader) 123 | val sameRecord = dataFileReader.next() 124 | 125 | sameRecord must ===(record) 126 | } 127 | } 128 | 129 | 130 | 131 | 132 | "A case class with in the default package (i.e. without a namespace) as an optional field's type" should { 133 | "serialize and deserialize correctly" in { 134 | 135 | val record = AvroTypeProviderTestNoNamespaceOption(Some(AvroTypeProviderTestNoNamespace(6))) 136 | 137 | val file = new File("tests/src/test/resources/AvroTypeProviderTestNoNamespaceOption.avro") 138 | val schema = AvroTypeProviderTestNoNamespaceOption.SCHEMA$ 139 | val userDatumReader = new GenericDatumReader[AvroTypeProviderTestNoNamespaceOption](schema, schema, SpecificData.get) 140 | val dataFileReader = new DataFileReader[AvroTypeProviderTestNoNamespaceOption](file, userDatumReader) 141 | val sameRecord = dataFileReader.next() 142 | 143 | sameRecord must ===(record) 144 | 145 | } 146 | } 147 | 148 | 149 | */ 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | } 158 | -------------------------------------------------------------------------------- /tests/src/test/scala/AvroTypeProviderTests/datatypetests/AvroTypeProviderPrimitivesTest.scala: -------------------------------------------------------------------------------- 1 | package test 2 | 3 | import org.specs2.mutable.Specification 4 | 5 | class AvroTypeProvider00Test extends Specification { 6 | 7 | "A case class with an `Int` field" should { 8 | "deserialize correctly" in { 9 | val record = AvroTypeProviderTest00(1) 10 | TestUtil.verifyRead(record) 11 | } 12 | } 13 | 14 | "A case class with an `Float` field" should { 15 | "deserialize correctly" in { 16 | val record = AvroTypeProviderTest01(1F) 17 | TestUtil.verifyRead(record) 18 | } 19 | } 20 | 21 | "A case class with an `Long` field" should { 22 | "deserialize correctly" in { 23 | val record = AvroTypeProviderTest02(1L) 24 | TestUtil.verifyRead(record) 25 | } 26 | } 27 | 28 | "A case class with an `Double` field" should { 29 | "deserialize correctly" in { 30 | val record = AvroTypeProviderTest03(1D) 31 | TestUtil.verifyRead(record) 32 | } 33 | } 34 | 35 | "A case class with an `Boolean` field" should { 36 | "deserialize correctly" in { 37 | val record = AvroTypeProviderTest04(true) 38 | TestUtil.verifyRead(record) 39 | } 40 | } 41 | 42 | "A case class with an `String` field" should { 43 | "deserialize correctly" in { 44 | val record = AvroTypeProviderTest05("hello world") 45 | TestUtil.verifyRead(record) 46 | } 47 | } 48 | 49 | "A case class with an `Null` field" should { 50 | "deserialize correctly" in { 51 | val record = AvroTypeProviderTest06(null) 52 | TestUtil.verifyRead(record) 53 | } 54 | } 55 | 56 | "A case class with an empty `Option[String]` field" should { 57 | "deserialize correctly" in { 58 | val record = AvroTypeProviderTest07(None) 59 | TestUtil.verifyRead(record) 60 | } 61 | } 62 | 63 | "A case class with an empty `Option[Int]` field" should { 64 | "deserialize correctly" in { 65 | val record = AvroTypeProviderTest08(None) 66 | TestUtil.verifyRead(record) 67 | } 68 | } 69 | 70 | "A case class with an `List[String]` field" should { 71 | "deserialize correctly" in { 72 | val record = AvroTypeProviderTest10(List("head.tail")) 73 | TestUtil.verifyRead(record) 74 | } 75 | } 76 | 77 | "A case class with an `List[Int]` field" should { 78 | "deserialize correctly" in { 79 | val record = AvroTypeProviderTest11(List(1, 2)) 80 | TestUtil.verifyRead(record) 81 | } 82 | } 83 | 84 | "A case class with an `Option[String]` field" should { 85 | "deserialize correctly" in { 86 | val record = AvroTypeProviderTest12(Some("I'm here")) 87 | TestUtil.verifyRead(record) 88 | } 89 | } 90 | 91 | "A case class with an `Option[Int]` field" should { 92 | "deserialize correctly" in { 93 | val record = AvroTypeProviderTest13(Some(1)) 94 | TestUtil.verifyRead(record) 95 | } 96 | } 97 | 98 | "A case class with an `Map[String,Int]` field" should { 99 | "deserialize correctly" in { 100 | val record = AvroTypeProviderTestMap01(Map("justice"->1)) 101 | TestUtil.verifyRead(record) 102 | } 103 | } 104 | 105 | "A case class with an `Map[String,String]` field" should { 106 | "deserialize correctly" in { 107 | val record = AvroTypeProviderTestMap02(Map("justice"->"law")) 108 | TestUtil.verifyRead(record) 109 | } 110 | } 111 | 112 | "A case class with an `Map[String,String]` field" should { 113 | "deserialize correctly" in { 114 | val record = AvroTypeProviderTestMap03(Map("justice"->Some(List(1,2)))) 115 | TestUtil.verifyRead(record) 116 | } 117 | } 118 | 119 | "A case class with two `Map[String,Int]` fields" should { 120 | "deserialize correctly" in { 121 | val record = AvroTypeProviderTestMap04(Map("justice"->2, "law"->4), Map("sweet"->1)) 122 | TestUtil.verifyRead(record) 123 | } 124 | } 125 | 126 | 127 | "A case class with two `Map[String,String]` fields" should { 128 | "deserialize correctly" in { 129 | val record = AvroTypeProviderTestMap05(Map("justice"->"crime", "law"->"order"), Map("sweet"->"sour")) 130 | TestUtil.verifyRead(record) 131 | } 132 | } 133 | 134 | 135 | "A case class with fields `x: Map[String, Option[List[Int]]], y: Map[String, Option[List[Int]]]`" should { 136 | "deserialize correctly" in { 137 | val record = AvroTypeProviderTestMap06(Map("justice"->None, "law"->Some(List(1,2))), Map("sweet"->Some(List(3,4)))) 138 | TestUtil.verifyRead(record) 139 | } 140 | } 141 | 142 | 143 | "A case class with a `Map[String, Map[String, Int]]` field" should { 144 | "deserialize correctly" in { 145 | val record = AvroTypeProviderTestMap07(Map("pepper"->Map("onion"->6))) 146 | TestUtil.verifyRead(record) 147 | } 148 | } 149 | 150 | "A case class with a `List[Map[String, Map[String, String]]]` field" should { 151 | "deserialize correctly" in { 152 | val record = AvroTypeProviderTestMap08(List(Map("pepper"->Map("onion"->"garlic")), Map("bongo"->Map("tabla"->"conga")))) 153 | TestUtil.verifyRead(record) 154 | } 155 | } 156 | 157 | "A case class with a `Option[Map[String, Option[List[String]]]]` field" should { 158 | "deserialize correctly" in { 159 | val record = AvroTypeProviderTestMap09(Some(Map("pepper"->Some(List("howdy", "doody"))))) 160 | TestUtil.verifyRead(record) 161 | } 162 | } 163 | 164 | "A case class with `x: Map[String, Map[String, Int]], y: Map[String, Map[String, Int]]` fields" should { 165 | "deserialize correctly" in { 166 | val record = AvroTypeProviderTestMap10(Map("pepper"->Map("onion"->6)), Map("salt"->Map("garlic"->7))) 167 | TestUtil.verifyRead(record) 168 | } 169 | } 170 | 171 | "A case class with `x: Map[String, Map[String, Int]], y: List[Map[String, Map[String, String]]]` fields" should { 172 | "deserialize correctly" in { 173 | val record = AvroTypeProviderTestMap11(Map("pepper"->Map("onion"->6)), List(Map("salt"->Map("garlic"->"oil")), Map("sriracha"->Map("chili"->"oil")))) 174 | TestUtil.verifyRead(record) 175 | } 176 | } 177 | 178 | "A case class with `x: Map[String, Map[String, AvroTypeProviderTest00]], y: Map[String, AvroTypeProviderTest58]` fields" should { 179 | "deserialize correctly" in { 180 | val record = AvroTypeProviderTestMap12(Map("c1"->Map("00"-> AvroTypeProviderTest00(2))), Map("58"->AvroTypeProviderTest58(AvroTypeProviderTest00(4)))) 181 | TestUtil.verifyRead(record) 182 | } 183 | } 184 | 185 | } 186 | -------------------------------------------------------------------------------- /macros/src/main/scala/avro/scala/macro/annotations/AvroRecordMacro.scala: -------------------------------------------------------------------------------- 1 | package com.julianpeeters.avro.annotations 2 | 3 | import record._ 4 | import ctorgen._ 5 | import namespacegen._ 6 | import methodgen._ 7 | import schemagen._ 8 | 9 | import scala.reflect.macros.blackbox.Context 10 | import scala.language.experimental.macros 11 | import scala.annotation.{ compileTimeOnly, StaticAnnotation } 12 | 13 | import org.apache.avro.Schema 14 | import org.apache.avro.Schema.Field 15 | 16 | import collection.JavaConversions._ 17 | 18 | object AvroRecordMacro { 19 | 20 | def impl(c: Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { 21 | import c.universe._ 22 | import Flag._ 23 | 24 | //holds info about the fields of the annotee 25 | case class IndexedField(nme: TermName, tpe: Type, dv: Tree, idx: Int) 26 | 27 | //Extender 28 | def generateNewBaseTypes = List( tq"org.apache.avro.specific.SpecificRecordBase") 29 | 30 | //CtorGen 31 | def generateNewCtors(indexedFields: List[IndexedField]): List[c.universe.DefDef] = { 32 | val dcpm = new { val context: c.type = c } with DefaultCtorParamMatcher 33 | val defaultParams = indexedFields.map(field => dcpm.matchDefaultParams(field.tpe, field.dv)) 34 | val ctorGenerator = new {val context: c.type = c} with CtorGenerator 35 | ctorGenerator.toZeroArg(defaultParams) //Return a list of new CtorDefs 36 | } 37 | 38 | //NamespaceGen - getting namespace from the scala code enable reading from namespace-less schemas 39 | def generateNamespace = NamespaceGenerator.probeNamespace(c) 40 | 41 | //SchemaGen - generates schemas and stores them 42 | def generateSchema(className: String, namespace: String, indexedFields: List[IndexedField]): Schema = { 43 | val fieldSchemaGenerator = new {val context: c.type = c} with FieldSchemaGenerator 44 | val avroFields = indexedFields.map(v => fieldSchemaGenerator.toAvroField(namespace, v.nme, v.tpe, v.dv)) 45 | RecordSchemaGenerator.createSchema(className, namespace, avroFields) 46 | } 47 | 48 | //MethodGen - generates put, get, and getSchema needed to implement SpecificRecord for serialization 49 | def generateNewMethods(name: TypeName, indexedFields: List[IndexedField]) = { 50 | val exceptionCase = cq"""_ => new org.apache.avro.AvroRuntimeException("Bad index")""" 51 | 52 | val getDefCaseGenerator = new { val context: c.type = c } with GetDefCaseGenerator 53 | val getCases = indexedFields.map(f => getDefCaseGenerator.asGetCase(f.nme, f.tpe, f.idx)) :+ exceptionCase 54 | val getDef = q"""def get(field$$: Int): AnyRef = field$$ match {case ..$getCases}""" 55 | 56 | val getSchemaDef = q""" def getSchema: Schema = ${name.toTermName}.SCHEMA$$ """ 57 | 58 | val putDefCaseGenerator = new { val context: c.type = c } with PutDefCaseGenerator 59 | val putCases = indexedFields.map(f => putDefCaseGenerator.asPutCase(f.nme, f.tpe, f.idx)) :+ exceptionCase 60 | val putDef = q"""def put(field$$: Int, value: scala.Any): Unit = { field$$ match {case ..$putCases}; () }""" 61 | 62 | List(getDef, getSchemaDef, putDef) 63 | } 64 | 65 | // Update ClassDef and Add Companion Object 66 | val result = { 67 | // match the annotated class 68 | annottees.map(_.tree).toList match { 69 | // Update ClassDef and add companion object 70 | case classDef @ q"$mods class $className[..$tparams](..$first)(...$rest) extends ..$parents { $self => ..$body };" :: tail => { 71 | 72 | //add an index field to the fields defined in the case class 73 | def indexField(f: ValDef) = { 74 | val fieldName = f.name 75 | val fieldType = c.typecheck(q"type T = ${f.tpt}") match { 76 | case x @ TypeDef(mods, name, tparams, rhs) => rhs.tpe 77 | } 78 | val defaultValue = f.rhs 79 | val position = first.indexWhere(f => f.name == fieldName) 80 | IndexedField(fieldName, fieldType, defaultValue, position) 81 | } 82 | 83 | //prep fields from annotee 84 | val indexedFields = first.map(f => indexField(f)) 85 | 86 | // updates to the annotated class 87 | val newImports = List(q"import org.apache.avro.Schema") 88 | val newCtors = generateNewCtors(indexedFields) // a no-arg ctor so `newInstance()` can be used 89 | val newDefs = generateNewMethods(className, indexedFields) // `get`, `put`, and `getSchema` methods 90 | val newParents = generateNewBaseTypes ::: parents // extend SpecificRecordBase 91 | val newBody = body ::: newImports ::: newCtors ::: newDefs // add new members to the body 92 | 93 | // updates to the companion object 94 | val schema = q"${generateSchema(className.toString, generateNamespace, indexedFields).toString}" 95 | val schemaVal = q"lazy val SCHEMA$$ = new org.apache.avro.Schema.Parser().parse($schema)" 96 | 97 | val companionDef = tail match { 98 | // if there is no preexisiting companion then make one with a SCHEMA$ field 99 | case Nil => q"object ${className.toTermName} {$schemaVal}" 100 | // if there is a preexisting companion, add a SCHEMA$ field 101 | case List( moduleDef @ q"object $moduleName extends ..$companionParents { ..$moduleBody }") => { 102 | val newModuleBody = List(schemaVal) ::: moduleBody 103 | q"object ${className.toTermName} extends ..$companionParents { ..$newModuleBody }" 104 | } 105 | } 106 | 107 | // return an updated class def and companion def 108 | q"""$mods class $className[..$tparams](..$first)(...$rest) extends ..$newParents { $self => ..$newBody}; 109 | $companionDef""" 110 | } 111 | } 112 | } 113 | c.Expr[Any](result) 114 | } 115 | } 116 | 117 | /** 118 | * From the Macro Paradise Docs... 119 | * 120 | * note the @compileTimeOnly annotation. It is not mandatory, but is recommended to avoid confusion. 121 | * Macro annotations look like normal annotations to the vanilla Scala compiler, so if you forget 122 | * to enable the macro paradise plugin in your build, your annotations will silently fail to expand. 123 | * The @compileTimeOnly annotation makes sure that no reference to the underlying definition is 124 | * present in the program code after typer, so it will prevent the aforementioned situation 125 | * from happening. 126 | */ 127 | @compileTimeOnly("Enable Macro Paradise for Expansion of Annotations via Macros.") 128 | class AvroRecord extends StaticAnnotation { 129 | def macroTransform(annottees: Any*): Any = macro AvroRecordMacro.impl 130 | } 131 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Herein lie assorted macro annotations for working with Avro in Scala: 2 | 3 | 1) `@AvroTypeProvider("path/to/schema")` - Convert Avro Schemas to Scala case class definitions for use in your favorite Scala Avro runtime. 4 | 5 | 6 | 2) `@AvroRecord` - Use Scala case classes to represent your Avro SpecificRecords, serializable by the Apache Avro runtime (a port of [Avro-Scala-Compiler-Plugin](https://code.google.com/p/avro-scala-compiler-plugin/)). 7 | 8 | Macros are an experimental feature of Scala. [Avrohugger](https://github.com/julianpeeters/avrohugger) is a more traditional alternative. 9 | 10 | #### Get the dependency: 11 | For Scala 2.11.x and 2.12.x ([for Scala 2.10.x](https://github.com/julianpeeters/avro-scala-macro-annotations/issues/6#issuecomment-77973333) please use version 0.4.9 with sbt 0.13.8+): 12 | 13 | 14 | libraryDependencies += "com.julianpeeters" % "avro-scala-macro-annotations_2.11" % "0.11.1" 15 | 16 | 17 | Macro annotations are only available in Scala 2.10.x, 2.11.x, and 2.12.x with the macro paradise plugin. Their inclusion in official Scala might happen in Scala 2.13 - [official docs](http://docs.scala-lang.org/overviews/macros/annotations.html). To use the plugin, add the following `build.sbt`: 18 | 19 | addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full) 20 | 21 | In your IDE of choice you may have to explicitly load this compiler plugin. In Eclipse for example, you can do so by providing the full path under the `Xplugin`, found in the advanced Scala compiler preferences; you should have the jar in a path like `~/.ivy2/cache/org.scalamacros/paradise_2.10.4/jars/paradise_2.10.4-2.0.1.jar`. 22 | 23 | 24 | #### Usage: 25 | Use the annotations separately, or together like this: 26 | 27 | ```scala 28 | package sample 29 | 30 | import com.julianpeeters.avro.annotations._ 31 | 32 | @AvroTypeProvider("data/input.avro") 33 | @AvroRecord 34 | case class MyRecord() 35 | ``` 36 | 37 | First the fields are added automatically from an Avro Schema in a file, then the methods necessary for de/serialization are generated for you, all at compile time. Please see warnings below. 38 | 39 | 40 | #### Supported data types: 41 | 42 | `int` 43 | 44 | `float` 45 | 46 | `long` 47 | 48 | `double` 49 | 50 | `boolean` 51 | 52 | `string` 53 | 54 | `null` 55 | 56 | `array`* 57 | 58 | `map` 59 | 60 | `record` 61 | 62 | `union`** 63 | 64 | *Arrays are represented by `List[T]`, where T is any other supported type. 65 | 66 | **Optional fields of type `[null, t]` are represented by `Option[T]` 67 | 68 | The remaining avro types, `fixed`, `enum`, and `union` (beyond nullable fields), are not yet supported. 69 | 70 | 71 | ## 1) Avro-Type-Provider 72 | If your use-case is "data-first" and you're using an Avro runtime library that allows you to use Scala case classes to represent your Avro records, then you are probably a little weary of transcribing Avro Schemas into their Scala case class equivalents. 73 | 74 | Annotate an "empty" case class, and its members will be generated automatically at compile time using the data found in the Schema of a given file: 75 | 76 | given the schema automatically found in `input.avro` or `input.avsc`: 77 | 78 | ``` 79 | {"type":"record","name":"MyRecord","namespace":"tutorial","doc":"Auto-generated schema","fields":[{"name":"x","type":{"type":"record","name":"Rec","doc":"Auto-generated schema","fields":[{"name":"i","type":"int","doc":"Auto-Generated Field"}]},"doc":"Auto-Generated Field","default":{"i":4}}]}} 80 | ``` 81 | 82 | annotated empty case classes: 83 | 84 | ```scala 85 | import com.julianpeeters.avro.annotations._ 86 | 87 | @AvroTypeProvider("data/input.avro") 88 | case class Rec() 89 | 90 | @AvroTypeProvider("data/input.avro") 91 | case class MyRecord() 92 | ``` 93 | 94 | expand to: 95 | 96 | ```scala 97 | package tutorial 98 | 99 | import com.julianpeeters.avro.annotations._ 100 | 101 | @AvroTypeProvider("data/input.avro") 102 | case class Rec(i: Int) 103 | 104 | @AvroTypeProvider("data/input.avro") 105 | case class MyRecord(x: Rec = Rec(4)) 106 | ``` 107 | 108 | #### Please note: 109 | 1) The datafile must be available at compile time. 110 | 111 | 2) The filepath must be a String literal. 112 | 113 | 3) The name of the empty case class must match the record name exactly (peek at the schema in the file, if needed). 114 | 115 | 4) The order of class definition must be such that the classes that represent the most-nested records are expanded first. 116 | 117 | 5) A class that is doubly annotated with `@AvroTypeProvider` and `@AvroRecord` will be updated with vars instead of vals. 118 | 119 | 120 | 121 | ## 2) Avro-Record: 122 | Implements `SpecificRecord` at compile time so you can use Scala case classes to represent Avro records (like [Scalavro](https://github.com/GenslerAppsPod/scalavro) or [Salat-Avro](https://github.com/julianpeeters/salat-avro/tree/master), but for the Apache Avro runtime so that it runs on your cluster). Since Avro-Scala-Compiler-Plugin doesn't work with Scala 2.10+ and the compiler still stumps me, I ported the serialization essentials over to use [Scala Macro Annotations](http://docs.scala-lang.org/overviews/macros/annotations.html) instead. 123 | 124 | Now you can annotate a case class that you'd like to have serve as your Avro record: 125 | 126 | ```scala 127 | package sample 128 | 129 | @AvroRecord 130 | case class A(var i: Int) 131 | 132 | @AvroRecord 133 | case class B(var a: Option[A] = None) 134 | ``` 135 | 136 | expands to implement `SpecificRecord`, adding `put`, `get`, and `getSchema` methods, and a static `lazy val SCHEMA$` with the schema: 137 | 138 | ``` 139 | {"type":"record","name":"B","namespace":"sample","doc":"Auto-generated schema","fields":[{"name":"a","type":["null",{"type":"record","name":"A","doc":"Auto-generated schema","fields":[{"name":"i","type":"int","doc":"Auto-Generated Field"}]}],"doc":"Auto-Generated Field",default: null}]} 140 | ``` 141 | 142 | Use the expanded class as you would a code-gen'd class with any `SpecificRecord` API. e.g.: 143 | 144 | ```scala 145 | //Writing avros 146 | val datumWriter = new SpecificDatumWriter[B](B.SCHEMA$) 147 | val dataFileWriter = new DataFileWriter[B](datumWriter) 148 | 149 | 150 | //Reading avros 151 | val userDatumReader = new SpecificDatumReader[B](B.SCHEMA$) 152 | val dataFileReader = new DataFileReader[B](file, userDatumReader) 153 | ``` 154 | 155 | #### Please note: 156 | 1) If your framework is one that relies on reflection to get the Schema, it will fail since Scala fields are private. Therefore preempt it by passing in a Schema to DatumReaders and DatumWriters (as in the Avro example above). 157 | 158 | 2) Fields must be `var`s in order to be compatible with the SpecificRecord API. 159 | 160 | 3) A class that is doubly annotated with `@AvroTypeProvider` and `@AvroRecord` will automatically be updated with vars instead of vals. 161 | 162 | 4) An annotatee may extend a trait (to become a mixin after expansion) but not a class, since SpecificRecordBase will need to occupy that position. 163 | --------------------------------------------------------------------------------