├── .github └── workflows │ ├── packaging.yml │ ├── push_rockspec.yml │ ├── reusable_testing.yml │ └── testing.yml ├── .gitignore ├── CHANGELOG.md ├── CMakeLists.txt ├── FindTarantool.cmake ├── README.md ├── arch └── PKGBUILD ├── avro-schema-scm-1.rockspec ├── avro_schema ├── backend.lua ├── compiler.lua ├── fingerprint.lua ├── frontend.lua ├── il.lua ├── init.lua ├── runtime.lua ├── utils.lua └── version.lua ├── benchmark.lua ├── debian ├── .gitignore ├── changelog ├── compat ├── control ├── copyright ├── docs ├── rules └── source │ └── format ├── exports ├── exports_osx ├── gen_il_filt.sh ├── il_filt.sh ├── lib └── phf │ ├── GNUmakefile │ ├── LICENSE │ ├── README.md │ ├── mk │ └── luapath │ ├── phf.cc │ └── phf.h ├── lua_indent.pl ├── rpm └── tarantool-avro-schema.spec ├── runtime ├── hash.c ├── misc.c └── pipeline.c ├── test ├── .ddt_cache ├── README.md ├── api_tests │ ├── evolution.lua │ ├── export.lua │ ├── reload.lua │ └── var.lua ├── buf_grow_test.lua ├── ddt_suite │ ├── array.lua │ ├── boolean.lua │ ├── bug1.lua │ ├── bytes.lua │ ├── bytes_promo.lua │ ├── compile_large.lua │ ├── double.lua │ ├── enum.lua │ ├── enum_large.lua │ ├── enum_versions.lua │ ├── fixed.lua │ ├── float.lua │ ├── float_promo.lua │ ├── incompatible.lua │ ├── incompatible_array.lua │ ├── incompatible_enum.lua │ ├── incompatible_fixed.lua │ ├── incompatible_map.lua │ ├── incompatible_record.lua │ ├── int.lua │ ├── int_promo.lua │ ├── invalid.lua │ ├── long.lua │ ├── long_promo.lua │ ├── map.lua │ ├── namespace.lua │ ├── null.lua │ ├── record.lua │ ├── record_array.lua │ ├── record_default.lua │ ├── record_hidden.lua │ ├── record_large.lua │ ├── record_nested.lua │ ├── record_union.lua │ ├── record_version.lua │ ├── record_vlo.lua │ ├── recursive.lua │ ├── service_fields.lua │ ├── string.lua │ ├── string_promo.lua │ ├── union.lua │ ├── union_versions.lua │ └── validate.lua ├── msgpack_helper.py └── run_ddt_tests.lua └── web ├── avro_online.lua ├── index.html ├── media └── avro.js └── nginx.conf.sample /.github/workflows/packaging.yml: -------------------------------------------------------------------------------- 1 | name: packaging 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | # Run not only on tags, otherwise dependent job will skip. 7 | version-check: 8 | # Skip pull request job when the source branch is in the same 9 | # repository. 10 | if: | 11 | github.event_name == 'push' || 12 | github.event.pull_request.head.repo.full_name != github.repository 13 | runs-on: ubuntu-24.04 14 | steps: 15 | - name: Check module version 16 | # We need this step to run only on push with tag. 17 | if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }} 18 | uses: tarantool/actions/check-module-version@master 19 | with: 20 | module-name: "avro_schema" 21 | 22 | package: 23 | # Skip pull request jobs when the source branch is in the same 24 | # repository. 25 | if: | 26 | github.event_name == 'push' || 27 | github.event.pull_request.head.repo.full_name != github.repository 28 | # Packaging for CentOS 7 does not work with other versions, see: 29 | # https://github.com/packpack/packpack/issues/145 30 | runs-on: ubuntu-24.04 31 | needs: version-check 32 | 33 | strategy: 34 | fail-fast: false 35 | matrix: 36 | platform: 37 | - { os: 'debian', dist: 'buster' } 38 | - { os: 'debian', dist: 'bullseye' } 39 | - { os: 'el', dist: '7' } 40 | - { os: 'el', dist: '8' } 41 | - { os: 'fedora', dist: '32' } 42 | - { os: 'fedora', dist: '33' } 43 | - { os: 'fedora', dist: '34' } 44 | - { os: 'fedora', dist: '35' } 45 | - { os: 'fedora', dist: '36' } 46 | - { os: 'ubuntu', dist: 'focal' } 47 | - { os: 'ubuntu', dist: 'groovy' } 48 | - { os: 'ubuntu', dist: 'jammy' } 49 | 50 | env: 51 | OS: ${{ matrix.platform.os }} 52 | DIST: ${{ matrix.platform.dist }} 53 | 54 | steps: 55 | - name: Clone the module 56 | uses: actions/checkout@v4 57 | with: 58 | fetch-depth: 0 59 | 60 | - name: Clone the packpack tool 61 | uses: actions/checkout@v4 62 | with: 63 | repository: packpack/packpack 64 | path: packpack 65 | 66 | - name: Fetch tags 67 | # Found that Github checkout Actions pulls all the tags, but 68 | # right it deannotates the testing tag, check: 69 | # https://github.com/actions/checkout/issues/290 70 | # But we use 'git describe ..' calls w/o '--tags' flag and it 71 | # prevents us from getting the needed tag for packages version 72 | # setup. To avoid of it, let's fetch it manually, to be sure 73 | # that all tags will exists always. 74 | run: git fetch --tags -f 75 | 76 | - name: Create packages 77 | run: ./packpack/packpack 78 | 79 | - name: Deploy packages 80 | # We need this step to run only on push with tag. 81 | if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }} 82 | env: 83 | RWS_URL_PART: https://rws.tarantool.org/tarantool-modules 84 | RWS_AUTH: ${{ secrets.RWS_AUTH }} 85 | PRODUCT_NAME: tarantool-avro-schema 86 | run: | 87 | CURL_CMD="curl -LfsS \ 88 | -X PUT ${RWS_URL_PART}/${OS}/${DIST} \ 89 | -u ${RWS_AUTH} \ 90 | -F product=${PRODUCT_NAME}" 91 | # We don't want to try to print secrets to the log, but we want 92 | # to print a "curl" command to see what's going on. 93 | CURL_CMD_ECHO="curl -LfsS \ 94 | -X PUT ${RWS_URL_PART}/${OS}/${DIST} \ 95 | -u '***' \ 96 | -F product=${PRODUCT_NAME}" 97 | for f in $(ls -I '*build*' -I '*.changes' ./build); do 98 | CURL_CMD+=" -F $(basename ${f})=@./build/${f}" 99 | CURL_CMD_ECHO+=" -F $(basename ${f})=@./build/${f}" 100 | done 101 | echo ${CURL_CMD_ECHO} 102 | ${CURL_CMD} 103 | -------------------------------------------------------------------------------- /.github/workflows/push_rockspec.yml: -------------------------------------------------------------------------------- 1 | name: Push rockspec 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | tags: 8 | - '*' 9 | 10 | env: 11 | ROCK_NAME: "avro-schema" 12 | 13 | jobs: 14 | version-check: 15 | # We need this job to run only on push with tag. 16 | if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }} 17 | runs-on: ubuntu-24.04 18 | steps: 19 | - name: Check module version 20 | uses: tarantool/actions/check-module-version@master 21 | with: 22 | module-name: "avro_schema" 23 | 24 | push-scm-rockspec: 25 | runs-on: ubuntu-24.04 26 | if: github.ref == 'refs/heads/master' 27 | steps: 28 | - uses: actions/checkout@master 29 | 30 | - uses: tarantool/rocks.tarantool.org/github-action@master 31 | with: 32 | auth: ${{ secrets.ROCKS_AUTH }} 33 | files: ${{ env.ROCK_NAME }}-scm-1.rockspec 34 | 35 | push-tagged-rockspec: 36 | runs-on: ubuntu-24.04 37 | if: startsWith(github.ref, 'refs/tags') 38 | needs: version-check 39 | steps: 40 | - uses: actions/checkout@master 41 | 42 | # https://stackoverflow.com/questions/58177786/get-the-current-pushed-tag-in-github-actions 43 | - name: Set env 44 | run: echo "GIT_TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV 45 | 46 | - name: Create release rockspec 47 | run: | 48 | sed \ 49 | -e "s/branch = '.\+'/tag = '${GIT_TAG}'/g" \ 50 | -e "s/version = '.\+'/version = '${GIT_TAG}-1'/g" \ 51 | ${{ env.ROCK_NAME }}-scm-1.rockspec > ${{ env.ROCK_NAME }}-${GIT_TAG}-1.rockspec 52 | - uses: tarantool/rocks.tarantool.org/github-action@master 53 | with: 54 | auth: ${{ secrets.ROCKS_AUTH }} 55 | files: ${{ env.ROCK_NAME }}-${GIT_TAG}-1.rockspec 56 | -------------------------------------------------------------------------------- /.github/workflows/reusable_testing.yml: -------------------------------------------------------------------------------- 1 | name: reusable_testing 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | artifact_name: 7 | description: 'The name of the tarantool build artifact' 8 | default: ubuntu-focal 9 | required: false 10 | type: string 11 | 12 | jobs: 13 | run_tests: 14 | runs-on: ubuntu-24.04 15 | steps: 16 | - name: 'Clone the avro-schema module' 17 | uses: actions/checkout@v4 18 | with: 19 | repository: ${{ github.repository_owner }}/avro-schema 20 | 21 | - name: 'Download the tarantool build artifact' 22 | uses: actions/download-artifact@v4 23 | with: 24 | name: ${{ inputs.artifact_name }} 25 | 26 | - name: 'Install tarantool' 27 | # Now we're lucky: all dependencies are already installed. Check package 28 | # dependencies when migrating to other OS version. 29 | run: sudo dpkg -i tarantool*.deb 30 | 31 | - run: cmake . && make 32 | - run: make check 33 | -------------------------------------------------------------------------------- /.github/workflows/testing.yml: -------------------------------------------------------------------------------- 1 | name: testing 2 | 3 | on: 4 | push: 5 | pull_request: 6 | workflow_dispatch: 7 | 8 | jobs: 9 | linux: 10 | # We want to run on external PRs, but not on our own internal 11 | # PRs as they'll be run by the push to the branch. 12 | # 13 | # The main trick is described here: 14 | # https://github.com/Dart-Code/Dart-Code/pull/2375 15 | if: github.event_name == 'push' || 16 | github.event.pull_request.head.repo.full_name != github.repository 17 | 18 | runs-on: ubuntu-24.04 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | tarantool: 24 | - '2.11' 25 | 26 | steps: 27 | - name: Clone the module 28 | uses: actions/checkout@v4 29 | 30 | - name: Setup tarantool ${{ matrix.tarantool }} 31 | uses: tarantool/setup-tarantool@v3 32 | with: 33 | tarantool-version: ${{ matrix.tarantool }} 34 | 35 | - run: cmake . && make 36 | - run: make check 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | Makefile 2 | CMakeFiles 3 | CMakeCache.txt 4 | CPackConfig.cmake 5 | CPackSourceConfig.cmake 6 | CTestTestfile.cmake 7 | Testing/ 8 | cmake_install.cmake 9 | 10 | .*.swp 11 | *.so 12 | 13 | # autogenerated files 14 | backend.lua 15 | il.lua 16 | il_filt 17 | 18 | build/ 19 | packpack/ 20 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) 5 | and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). 6 | 7 | ## [3.1.0] - 2023-03-20 8 | ### Added 9 | - [Add versioning support.](https://github.com/tarantool/avro-schema/pull/147) 10 | ### Changed 11 | - [Use git+https:// for git repository in rockspec](https://github.com/tarantool/avro-schema/pull/143) 12 | - [RPM build to support Fedora 33+](https://github.com/tarantool/avro-schema/issues/148) 13 | 14 | 15 | ## [3.0.6] - 2021-09-24 16 | ### Changed 17 | - [Fixed compatibility with tarantool-2.5.1](https://github.com/tarantool/avro-schema/issues/135), 18 | where `table.clear()` should be required explicitly (it is tarantool's bug, but anyway) 19 | - [Don't reconfigure default JSON serializer](https://github.com/tarantool/avro-schema/issues/140) 20 | - [Don't accept a string value for a field of 'long' type in a schema](https://github.com/tarantool/avro-schema/issues/133) 21 | - [Improved `validate()` performance.](https://github.com/tarantool/avro-schema/pull/134) 22 | (It speeds up on near 6% on our benchmark.) 23 | 24 | 25 | ## [3.0.5] - 2020-02-27 26 | ### Changed 27 | - [Full hotreload support](https://github.com/tarantool/avro-schema/issues/34) 28 | 29 | 30 | ## [3.0.4] - 2019-11-12 31 | ### Changed 32 | - [Support hotreload (partially)](https://github.com/tarantool/avro-schema/issues/34) 33 | - [Fix compilation of a large schema](https://github.com/tarantool/avro-schema/issues/124) 34 | 35 | 36 | ## [3.0.3] - 2018-11-12 37 | ### Changed 38 | - [Fix wrong array validation](https://github.com/tarantool/avro-schema/issues/117) 39 | 40 | 41 | ## [3.0.2] - 2018-10-14 42 | ### Changed 43 | - [Treat nil as box.NULL when validate a union](https://github.com/tarantool/avro-schema/issues/113) 44 | 45 | 46 | ## [3.0.1] - 2018-09-28 47 | ### Added 48 | - Allow default values for records, nullable records, unions 49 | ([#99](https://github.com/tarantool/avro-schema/issues/99), 50 | [595fe703](https://github.com/tarantool/avro-schema/commit/595fe703b5c2ce6624e8f3dfa752e787c97d0462), 51 | [9d853a79](https://github.com/tarantool/avro-schema/commit/9d853a795c78259d27986db2fb16a349f2457b7c)) 52 | - Comments to codebase 53 | ### Changed 54 | - [Fix temp_msgpack_gc/flatten race](https://github.com/tarantool/avro-schema/issues/109) 55 | - [Fix stack restore for `validate` error handling](https://github.com/tarantool/avro-schema/issues/11) 56 | - [Fix schema evolution for nullable fields](https://github.com/tarantool/avro-schema/issues/76) 57 | - [Fix installation via luarocks](https://github.com/tarantool/avro-schema/commit/6fbd4d6092f96a2dfad254a89eb85d829d89938d) 58 | - Code refactoring 59 | - Deleted unused register (ripv) 60 | - Extend documentation 61 | 62 | 63 | ## [3.0.0] - 2018-05-08 64 | ### Added 65 | - Error opcode for runtime exceptions 66 | ### Changed 67 | - [Change nullable flatten/unflatten/xflatten](https://github.com/tarantool/avro-schema/issues/63) 68 | - scalar nullable types are encoded with null or value 69 | - nullable record encoded with null or array of field values 70 | - xflatten for nullable record is in alpha stage 71 | - `get_names`, `get_types` changed 72 | ([#58](https://github.com/tarantool/avro-schema/issues/58), 73 | [#56](https://github.com/tarantool/avro-schema/issues/56)) 74 | - add nullable type support 75 | - scalars are exported as `string*` 76 | - nullable record is exported just as `record*` string 77 | - api changes 78 | - fixed field is exported as `fixed` (instead of its name) 79 | - union is exported as `union_type`, `union_value` 80 | - support `service_fields` 81 | - add `get_*` methods to `compiled` object 82 | - Give variables the same names in IR and in resulting Lua 83 | - Fix nullable field is not mandatory in flatten 84 | - Fix flatten for variable size types 85 | 86 | 87 | ## [2.3.2] - 2018-05-04 88 | ### Changed 89 | - Fix boolean field validation 90 | 91 | 92 | ## [2.3.1] - 2018-04-19 93 | ### Changed 94 | - [Fix one of null/non-null type tables is not initialized](https://github.com/tarantool/avro-schema/issues/77) 95 | - [Fix `preserve_in_ast` for record fields](https://github.com/tarantool/avro-schema/issues/78) 96 | - [Fix collapse nullable scalar on export](https://github.com/tarantool/avro-schema/issues/74) 97 | 98 | 99 | ## [2.3.0] - 2018-04-19 100 | ### Added 101 | - [Forward type reference](https://github.com/tarantool/avro-schema/issues/48) 102 | ### Changed 103 | - Improve benchmark script 104 | - [Fix nullable type reference export](https://github.com/tarantool/avro-schema/issues/49) 105 | - [Fix nullable type name export](https://github.com/tarantool/avro-schema/issues/38) 106 | - [Fix fingerprints for type references](https://github.com/tarantool/avro-schema/issues/52) 107 | - [Make `preserve_in_ast` work at any place of a schema](https://github.com/tarantool/avro-schema/issues/73) 108 | 109 | 110 | ## [2.2.3] - 2018-04-16 111 | ### Changed 112 | - [Fix nullability for `fixed` type](https://github.com/tarantool/avro-schema/issues/55) 113 | - [Treat lack of value for union field as null for `validate`](https://github.com/tarantool/avro-schema/issues/64) 114 | - [Make float validation rules stricter](https://github.com/tarantool/avro-schema/issues/60) 115 | 116 | 117 | ## [2.2.2] - 2018-04-06 118 | ### Added 119 | - [Tests for any type](https://github.com/tarantool/avro-schema/issues/47) 120 | ### Changed 121 | - [Fix nullable float type validation](https://github.com/tarantool/avro-schema/issues/50) 122 | 123 | 124 | ## [2.2.1] - 2018-03-26 125 | ### Changed 126 | - Fixed OSX support 127 | 128 | 129 | ## [2.2.0] - 2018-03-26 130 | ### Added 131 | - Support for UTF-8 Enum value 132 | ### Changed 133 | - [Fix nullable types inside of arrays](https://github.com/tarantool/avro-schema/issues/37) 134 | 135 | 136 | ## [2.1.0] - 2018-02-24 137 | ### Added 138 | - Allow NIL values for nullable types 139 | - Introduced model fingerprint 140 | - Allow to preserve extra fields in AST and fingerprint 141 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | project(avro_schema) 4 | 5 | set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}" ${CMAKE_MODULE_PATH}) 6 | 7 | # Find Tarantool and Lua dependecies 8 | set(TARANTOOL_FIND_REQUIRED ON) 9 | find_package(Tarantool) 10 | include_directories(${TARANTOOL_INCLUDE_DIRS}) 11 | 12 | # Check if __builtin_bswap16 is pesent 13 | include(CheckCSourceCompiles) 14 | check_c_source_compiles("int main() { return __builtin_bswap16(0); }" C_HAVE_BSWAP16) 15 | if(C_HAVE_BSWAP16) 16 | add_definitions(-DC_HAVE_BSWAP16=1) 17 | endif() 18 | 19 | # Set CFLAGS/CXXFLAGS 20 | 21 | # We need more warnings! 22 | set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wall -Wextra") 23 | set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -Wall -Wextra") 24 | 25 | # MsgPack parser violates strict aliasing rules 26 | set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99 -fno-strict-aliasing") 27 | 28 | # lib/phf is the only C++ user; it needs a bare minimum of C++ features 29 | # Note: we use a fraction of libphf features and we hate dead cruft! 30 | # Also we'd like to avoid changing the source too much. We rely on 31 | # the linker to drop unused code and, since the entire libphf 32 | # comes in the single object file, we have to arrange for each 33 | # function to live in a separate section (-ffunction-sections). 34 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -fno-rtti -fno-exceptions -fdata-sections -ffunction-sections") 35 | 36 | # libphf doesn't really require libstdc++ 37 | add_definitions(-DPHF_NO_LIBCXX=1) 38 | 39 | # libphf assumes UINT32_C macro definitions are available 40 | add_definitions(-D__STDC_LIMIT_MACROS -D__STDC_CONSTANT_MACROS) 41 | 42 | # 1) list exported symbols explicitly, since only a fraction 43 | # of libphf is actually needed (--version-script) 44 | # 2) enable linker to drop unused parts (--gc-sections) 45 | # 3) don't link default libs, since libstdc++ is unnecessary 46 | if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") 47 | set (CMAKE_SHARED_LINKER_FLAGS 48 | "-Wl,-exported_symbols_list,${CMAKE_SOURCE_DIR}/exports_osx -Wl,-dead_strip -nodefaultlibs") 49 | else() 50 | set (CMAKE_SHARED_LINKER_FLAGS 51 | "-Wl,--version-script,${CMAKE_SOURCE_DIR}/exports -Wl,--gc-sections -nodefaultlibs") 52 | endif() 53 | 54 | add_library(avro_schema_rt_c SHARED 55 | runtime/pipeline.c 56 | runtime/hash.c 57 | runtime/misc.c 58 | lib/phf/phf.cc) 59 | set_target_properties(avro_schema_rt_c PROPERTIES PREFIX "" OUTPUT_NAME 60 | "avro_schema_rt_c" SUFFIX ".so" MACOSX_RPATH 0) 61 | 62 | # link with libc explicitly (-nodefaultlibs earlier) 63 | target_link_libraries(avro_schema_rt_c c) 64 | 65 | # postprocess Lua file, replacing opcode.X named constants with values 66 | add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/il_filt 67 | DEPENDS avro_schema/il.lua 68 | COMMAND ${CMAKE_SOURCE_DIR}/gen_il_filt.sh 69 | ${CMAKE_SOURCE_DIR}/avro_schema/il.lua) 70 | 71 | add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/il.lua 72 | DEPENDS avro_schema/il.lua ${CMAKE_BINARY_DIR}/il_filt 73 | COMMAND ${CMAKE_SOURCE_DIR}/il_filt.sh 74 | ${CMAKE_SOURCE_DIR}/avro_schema/il.lua il.lua) 75 | 76 | add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/backend.lua 77 | DEPENDS avro_schema/backend.lua ${CMAKE_BINARY_DIR}/il_filt 78 | COMMAND ${CMAKE_SOURCE_DIR}/il_filt.sh 79 | ${CMAKE_SOURCE_DIR}/avro_schema/backend.lua backend.lua) 80 | 81 | add_custom_target(postprocess_lua ALL DEPENDS 82 | ${CMAKE_BINARY_DIR}/il.lua 83 | ${CMAKE_BINARY_DIR}/backend.lua) 84 | 85 | # Install module 86 | install(FILES avro_schema/init.lua avro_schema/compiler.lua 87 | avro_schema/frontend.lua avro_schema/runtime.lua 88 | avro_schema/fingerprint.lua avro_schema/utils.lua 89 | avro_schema/version.lua 90 | DESTINATION ${TARANTOOL_INSTALL_LUADIR}/avro_schema) 91 | 92 | install(FILES ${CMAKE_BINARY_DIR}/il.lua 93 | DESTINATION ${TARANTOOL_INSTALL_LUADIR}/avro_schema) 94 | 95 | install(FILES ${CMAKE_BINARY_DIR}/backend.lua 96 | DESTINATION ${TARANTOOL_INSTALL_LUADIR}/avro_schema) 97 | 98 | install(TARGETS avro_schema_rt_c LIBRARY 99 | DESTINATION ${TARANTOOL_INSTALL_LIBDIR}) 100 | 101 | # testing 102 | 103 | set(TARANTOOL tarantool) 104 | 105 | set(LUA_PATH 106 | "${CMAKE_SOURCE_DIR}/?.lua\\;${CMAKE_SOURCE_DIR}/?/init.lua\\;\\;") 107 | 108 | set(LUA_CPATH "${CMAKE_BINARY_DIR}/?.so\\;\\;") 109 | 110 | enable_testing() 111 | 112 | add_test(NAME ddt_tests 113 | COMMAND ${TARANTOOL} ${CMAKE_SOURCE_DIR}/test/run_ddt_tests.lua 114 | WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/test) 115 | 116 | add_test(NAME api_tests/var 117 | COMMAND ${TARANTOOL} ${CMAKE_SOURCE_DIR}/test/api_tests/var.lua 118 | WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/test) 119 | 120 | add_test(NAME api_tests/export 121 | COMMAND ${TARANTOOL} ${CMAKE_SOURCE_DIR}/test/api_tests/export.lua 122 | WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/test) 123 | 124 | add_test(NAME api_tests/evolution 125 | COMMAND ${TARANTOOL} ${CMAKE_SOURCE_DIR}/test/api_tests/evolution.lua 126 | WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/test) 127 | 128 | add_test(NAME api_tests/reload 129 | COMMAND ${TARANTOOL} ${CMAKE_SOURCE_DIR}/test/api_tests/reload.lua 130 | WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/test) 131 | 132 | add_test(NAME buf_grow_test 133 | COMMAND ${TARANTOOL} ${CMAKE_SOURCE_DIR}/test/buf_grow_test.lua 134 | WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/test) 135 | 136 | set(TESTS ddt_tests api_tests/var api_tests/export 137 | api_tests/evolution api_tests/reload buf_grow_test) 138 | foreach(test IN LISTS TESTS) 139 | 140 | set_property(TEST ${test} PROPERTY ENVIRONMENT "LUA_PATH=${LUA_PATH}") 141 | 142 | set_property(TEST ${test} APPEND PROPERTY ENVIRONMENT 143 | "LUA_CPATH=${LUA_CPATH}") 144 | 145 | endforeach() 146 | 147 | add_custom_target(check COMMAND sh -c 'ctest -V || ctest || ctest') 148 | 149 | add_custom_target(benchmark 150 | COMMAND env "LUA_PATH=${LUA_PATH}" 151 | "LUA_CPATH=${LUA_CPATH}" 152 | ${TARANTOOL} ${CMAKE_SOURCE_DIR}/benchmark.lua) 153 | -------------------------------------------------------------------------------- /FindTarantool.cmake: -------------------------------------------------------------------------------- 1 | # Define GNU standard installation directories 2 | include(GNUInstallDirs) 3 | 4 | macro(extract_definition name output input) 5 | string(REGEX MATCH "#define[\t ]+${name}[\t ]+\"([^\"]*)\"" 6 | _t "${input}") 7 | string(REGEX REPLACE "#define[\t ]+${name}[\t ]+\"(.*)\"" "\\1" 8 | ${output} "${_t}") 9 | endmacro() 10 | 11 | find_path(TARANTOOL_INCLUDE_DIR tarantool/module.h 12 | HINTS ${TARANTOOL_DIR} ENV TARANTOOL_DIR 13 | PATH_SUFFIXES include 14 | ) 15 | 16 | if(TARANTOOL_INCLUDE_DIR) 17 | set(_config "-") 18 | file(READ "${TARANTOOL_INCLUDE_DIR}/tarantool/module.h" _config0) 19 | string(REPLACE "\\" "\\\\" _config ${_config0}) 20 | unset(_config0) 21 | extract_definition(PACKAGE_VERSION TARANTOOL_VERSION ${_config}) 22 | extract_definition(INSTALL_PREFIX _install_prefix ${_config}) 23 | unset(_config) 24 | endif() 25 | 26 | include(FindPackageHandleStandardArgs) 27 | find_package_handle_standard_args(TARANTOOL 28 | REQUIRED_VARS TARANTOOL_INCLUDE_DIR VERSION_VAR TARANTOOL_VERSION) 29 | if(TARANTOOL_FOUND) 30 | set(TARANTOOL_INCLUDE_DIRS "${TARANTOOL_INCLUDE_DIR}" 31 | "${TARANTOOL_INCLUDE_DIR}/tarantool/" 32 | CACHE PATH "Include directories for Tarantool") 33 | set(TARANTOOL_INSTALL_LIBDIR "${CMAKE_INSTALL_LIBDIR}/tarantool" 34 | CACHE PATH "Directory for storing Lua modules written in Lua") 35 | set(TARANTOOL_INSTALL_LUADIR "${CMAKE_INSTALL_DATADIR}/tarantool" 36 | CACHE PATH "Directory for storing Lua modules written in C") 37 | 38 | if (NOT TARANTOOL_FIND_QUIETLY AND NOT FIND_TARANTOOL_DETAILS) 39 | set(FIND_TARANTOOL_DETAILS ON CACHE INTERNAL "Details about TARANTOOL") 40 | message(STATUS "Tarantool LUADIR is ${TARANTOOL_INSTALL_LUADIR}") 41 | message(STATUS "Tarantool LIBDIR is ${TARANTOOL_INSTALL_LIBDIR}") 42 | endif () 43 | endif() 44 | mark_as_advanced(TARANTOOL_INCLUDE_DIRS TARANTOOL_INSTALL_LIBDIR 45 | TARANTOOL_INSTALL_LUADIR) 46 | -------------------------------------------------------------------------------- /arch/PKGBUILD: -------------------------------------------------------------------------------- 1 | # Maintainer: Daniel YC Lin 2 | 3 | pkgname=tarantool-avro-schema 4 | _pkgname=avro-schema 5 | pkgver=2.0 6 | pkgrel=1 7 | pkgdesc='Apache Avro schema tools for Tarantool' 8 | arch=(i686 x86_64) 9 | url='http://github.com/tarantool/avro-schema' 10 | license=(BSD) 11 | makedepends=(cmake tarantool) 12 | source=(https://github.com/tarantool/$_pkgname/archive/$pkgver.tar.gz) 13 | 14 | build() { 15 | cd $_pkgname-$pkgver 16 | 17 | cmake \ 18 | -DCMAKE_INSTALL_PREFIX=/usr \ 19 | -DCMAKE_INSTALL_BINDIR=/usr/bin \ 20 | -DCMAKE_INSTALL_SYSCONFDIR=/etc \ 21 | -DCMAKE_INSTALL_LIBDIR=/usr/lib \ 22 | . 23 | 24 | make 25 | } 26 | 27 | check() { 28 | cd $_pkgname-$pkgver 29 | make check 30 | } 31 | 32 | package() { 33 | cd $_pkgname-$pkgver 34 | make install DESTDIR="$pkgdir" 35 | 36 | install -D -m644 lib/phf/LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE" 37 | } 38 | md5sums=('eace51cb4caf4666241cb7dbf80a4cff') 39 | -------------------------------------------------------------------------------- /avro-schema-scm-1.rockspec: -------------------------------------------------------------------------------- 1 | package = 'avro-schema' 2 | version = 'scm-1' 3 | source = { 4 | url = 'git+https://github.com/tarantool/avro-schema.git', 5 | branch = 'master', 6 | } 7 | description = { 8 | summary = "Apache Avro schema tools for Tarantool", 9 | homepage = 'https://github.com/tarantool/avro-schema', 10 | license = 'BSD' 11 | } 12 | dependencies = { 13 | 'lua >= 5.1', 14 | } 15 | external_dependencies = { 16 | TARANTOOL = { 17 | header = 'tarantool/module.h'; 18 | }; 19 | } 20 | build = { 21 | type = 'cmake', 22 | variables = { 23 | TARANTOOL_DIR="$(TARANTOOL_DIR)"; 24 | CMAKE_BUILD_TYPE="RelWithDebInfo", 25 | TARANTOOL_INSTALL_LIBDIR="$(LIBDIR)", 26 | TARANTOOL_INSTALL_LUADIR="$(LUADIR)", 27 | }, 28 | } 29 | -- vim: syntax=lua 30 | -------------------------------------------------------------------------------- /avro_schema/fingerprint.lua: -------------------------------------------------------------------------------- 1 | -- This file implements fingerprinting mechanism for Avro schema. 2 | -- It was necessary to implement our json encoder, because of some special 3 | -- rules for avro fingerptint generation and Parsing Canonical Form generation. 4 | 5 | local json = require("json").new() 6 | local frontend = require "avro_schema.frontend" 7 | -- Tarantool specific module 8 | local digest = require "digest" 9 | 10 | json.cfg{encode_use_tostring = true} 11 | 12 | local avro_json 13 | 14 | local function raise_error(message, ...) 15 | error(string.format("avro-fingerprint: "..message, ...)) 16 | end 17 | 18 | local function is_primitive_type(xtype) 19 | local ptypes = {"string", "number", "boolean"} 20 | for _,t in ipairs(ptypes) do 21 | if xtype == t then return true end 22 | end 23 | return false 24 | end 25 | 26 | local function avro_json_array(data, extra_fields) 27 | local res = {} 28 | for _,item in ipairs(data) do 29 | table.insert(res,avro_json(item, extra_fields)) 30 | end 31 | return string.format("[%s]", table.concat(res, ",")) 32 | end 33 | 34 | local function avro_json_object(data, extra_fields) 35 | local res = {} 36 | local necessary_order = {"name", "type", "fields", "symbols", "items", "values", "size"} 37 | -- 38 | -- There are a cases in which it is necessary to extend a schema. 39 | -- The source below provides method to add those attrs in sustainable way. 40 | -- 41 | for _, val in ipairs(extra_fields) do 42 | table.insert(necessary_order, val) 43 | end 44 | 45 | for _,name in ipairs(necessary_order) do 46 | local item = data[name] 47 | if item ~= nil then 48 | local inner = avro_json(item, extra_fields) 49 | inner = string.format([[%s:%s]], json.encode(name), inner) 50 | table.insert(res, inner) 51 | end 52 | end 53 | return string.format("{%s}", table.concat(res, ",")) 54 | end 55 | 56 | -- Takes normalized avro schema and produces normalized schema representation 57 | -- encoded in json format. 58 | avro_json = function (data, extra_fields) 59 | extra_fields = extra_fields or {} 60 | -- should be sorted for consistency 61 | table.sort(extra_fields) 62 | local xtype = type(data) 63 | if is_primitive_type(xtype) then 64 | return json.encode(data) 65 | end 66 | if xtype ~= "table" then 67 | raise_error("data type is not supported: %s", xtype) 68 | end 69 | -- array 70 | if #data > 0 then 71 | return avro_json_array(data, extra_fields) 72 | end 73 | -- object (dict) 74 | return avro_json_object(data, extra_fields) 75 | end 76 | 77 | local function get_fingerprint(schema, algo, size, options) 78 | if digest[algo] == nil or type(digest[algo]) ~= "function" then 79 | raise_error("The hash function %s is not supported", algo) 80 | end 81 | -- We have to call export first to replace type definitions on type 82 | -- references (all except the first). 83 | schema = frontend.export_helper(schema) 84 | local fp = digest[algo](avro_json(schema, options.preserve_in_fingerprint)) 85 | return fp:sub(1, size) 86 | end 87 | 88 | return { 89 | avro_json = avro_json, 90 | get_fingerprint = get_fingerprint, 91 | } 92 | -------------------------------------------------------------------------------- /avro_schema/utils.lua: -------------------------------------------------------------------------------- 1 | local function table_contains(t, xval) 2 | for k, val in ipairs(t) do 3 | if type(k) == "number" and val == xval then 4 | return true 5 | end 6 | end 7 | return false 8 | end 9 | 10 | -- This function copies fields from one table to the other. 11 | -- 12 | -- The precise behavior may be controlled with help of `opts` arg. 13 | -- opts: precise copy settings 14 | -- fields: table of field keys (names) which should be copied; 15 | -- if `nil` then all fields copied 16 | -- exclude: table of field keys (names) which should not be copied; 17 | -- works if `fields` are not passed 18 | -- deep: whether to copy fields deeply or not 19 | local function copy_fields(from, to, opts) 20 | opts = opts or {} 21 | assert(opts.fields == nil or opts.exclude == nil, 22 | "`exclude` attr should not be set if `fields` attr is set") 23 | local deep = opts.deep or true 24 | local fields_exclude = opts.exclude or {} 25 | -- convert exclude fields to dict to decrease asimptotic exec time 26 | local fields_exclude_dict = {} 27 | for _, name in pairs(fields_exclude) do 28 | fields_exclude_dict[name] = true 29 | end 30 | local fields_copy = opts.fields 31 | if fields_copy == nil then 32 | fields_copy = {} 33 | for k, _ in pairs(from) do 34 | if fields_exclude_dict[k] == nil then 35 | table.insert(fields_copy, k) 36 | end 37 | end 38 | end 39 | for _, name in pairs(fields_copy) do 40 | local field = from[name] 41 | if deep then 42 | to[name] = table.deepcopy(field) 43 | else 44 | to[name] = field 45 | end 46 | end 47 | end 48 | 49 | --- Check if given table has only one specific key. 50 | local function has_only(t, key) 51 | local fst_key = next(t) 52 | local snd_key = next(t, fst_key) 53 | return fst_key == key and snd_key == nil 54 | end 55 | 56 | local function map(func, tab) 57 | local res = {} 58 | for k, v in pairs(tab) do 59 | res[k] = func(k, v) 60 | end 61 | return res 62 | end 63 | 64 | -- Fast stack implementation. 65 | -- This stack can store several value in each frame. 66 | -- instead of storing tables for each frame, this stack maintains 67 | -- several arrays and stores different keys to different arrays. 68 | -- 69 | -- E.g. init_fstack({'schema', 'data'}) would create a table like this: 70 | -- { 71 | -- schema = {}, -- array which stores schema values 72 | -- set_schema = function(schema), 73 | -- get_schema = function(), 74 | -- data = {}, -- array which stores data values 75 | -- set_data = function(data), 76 | -- get_data = function(), 77 | -- len = 0, -- current size of the array 78 | -- push = function(schema, data), 79 | -- pop = function() -> { schema = schema[i], data = data[i] } 80 | -- get = function(i) -> { schema = schema[i], data = data[i] } 81 | -- remove_last = function(), 82 | -- clear = function(), 83 | -- } 84 | local function init_fstack(keys) 85 | assert(table_contains(keys, 'push') == false) 86 | assert(table_contains(keys, 'pop') == false) 87 | assert(table_contains(keys, 'remove_last') == false) 88 | assert(table_contains(keys, 'get') == false) 89 | assert(table_contains(keys, 'clear') == false) 90 | local fstack_str = [[ 91 | DECLARE_VARS 92 | local table_clear = require('table.clear') 93 | local stack = { 94 | len = 0, 95 | MOVE_VARS_TO_MODULE 96 | } 97 | stack.push = function(PUSH_VARS) 98 | stack.len = stack.len + 1 99 | PUSH_SET_VALS 100 | end 101 | stack.pop = function() 102 | local len = stack.len 103 | assert(len > 0) 104 | stack.len = len - 1 105 | return POP_RETURN_STMT 106 | end 107 | -- remove_last does not actually remove frames due to performance 108 | -- reasons (~10% RPS). 109 | stack.remove_last = function() 110 | stack.len = stack.len - 1 111 | assert(stack.len >= 0) 112 | end 113 | stack.get = function(pos) 114 | if pos <= 0 or pos > stack.len then 115 | error(("Attempt to get element %d " .. 116 | "from stack of size %d"):format(pos, stack.len)) 117 | end 118 | return GET_RETURN_STMT 119 | end 120 | stack.clear = function() 121 | stack.len = 0 122 | CLEAR_CLEAR_TABLES 123 | end 124 | SETTERS_GETTERS 125 | return stack 126 | ]] 127 | fstack_str = fstack_str:gsub('DECLARE_VARS', 128 | table.concat( 129 | map(function(i, key) 130 | return ('local %s = {}'):format(key, i) 131 | end, keys), 132 | '\n') 133 | ) 134 | fstack_str = fstack_str:gsub('MOVE_VARS_TO_MODULE', 135 | table.concat( 136 | map(function(_, key) 137 | return ('%s = %s'):format(key, key) 138 | end, keys), 139 | ',\n') 140 | ) 141 | fstack_str = fstack_str:gsub('PUSH_VARS', 142 | table.concat( 143 | map(function(i, _) return 'v' .. i end, keys), 144 | ',') 145 | ) 146 | fstack_str = fstack_str:gsub('PUSH_SET_VALS', 147 | table.concat( 148 | map(function(i, key) 149 | return ('%s[stack.len] = v%d'):format(key, i) 150 | end, keys), 151 | '\n') 152 | ) 153 | fstack_str = fstack_str:gsub('POP_RETURN_STMT', 154 | table.concat( 155 | map(function(_, key) 156 | return ('%s[len]'):format(key) 157 | end, keys), 158 | ',') 159 | ) 160 | -- remove_last does not actually remove frames due to performance 161 | -- reasons (~10% RPS). 162 | fstack_str = fstack_str:gsub('GET_RETURN_STMT', 163 | table.concat( 164 | map(function(_, key) 165 | return ('%s[pos]'):format(key) 166 | end, keys), 167 | ',') 168 | ) 169 | fstack_str = fstack_str:gsub('CLEAR_CLEAR_TABLES', 170 | table.concat( 171 | map(function(_, key) 172 | return ('table_clear(%s)'):format(key) 173 | end, keys), 174 | '\n') 175 | ) 176 | -- Gen setters/getters. 177 | -- Setters/getters are a bit slower than raw access. 178 | local setters_getters = {} 179 | for _, key in ipairs(keys) do 180 | local setter_str = ([[ 181 | stack.set_%s = function(val) 182 | assert(stack.len > 0) 183 | stack.%s[stack.len] = val 184 | end 185 | ]]):format(key, key) 186 | table.insert(setters_getters, setter_str) 187 | local getter_str = ([[ 188 | stack.get_%s = function(val) 189 | assert(stack.len > 0) 190 | return stack.%s[stack.len] 191 | end 192 | ]]):format(key, key) 193 | table.insert(setters_getters, getter_str) 194 | end 195 | setters_getters = table.concat(setters_getters, '\n') 196 | fstack_str = fstack_str:gsub('SETTERS_GETTERS', setters_getters) 197 | local stack = loadstring(fstack_str, 'avro.utils.fstack')() 198 | return stack 199 | end 200 | 201 | return { 202 | table_contains = table_contains, 203 | copy_fields = copy_fields, 204 | has_only = has_only, 205 | init_fstack = init_fstack, 206 | } 207 | -------------------------------------------------------------------------------- /avro_schema/version.lua: -------------------------------------------------------------------------------- 1 | -- Сontains the module version. 2 | -- Requires manual update in case of release commit. 3 | 4 | return '3.1.0' 5 | -------------------------------------------------------------------------------- /benchmark.lua: -------------------------------------------------------------------------------- 1 | local avro = require('avro_schema') 2 | 3 | local ok, person = avro.create({ 4 | type = 'record', 5 | name = 'Person', 6 | namespace = 'Person', 7 | fields = { 8 | { name = 'FirstName', type = 'string' }, 9 | { name = 'LastName', type = 'string' }, 10 | { name = 'Class', type = 'string' }, 11 | { name = 'Age', type = 'long' }, 12 | { 13 | name = 'Sex', 14 | type = { 15 | type = 'enum', 16 | name = 'Sex', 17 | symbols = { 'FEMALE', 'MALE' } 18 | } 19 | }, 20 | { 21 | name = 'Stats', 22 | type = { 23 | type = 'record', 24 | name = 'Stats', 25 | fields = { 26 | { name = 'Strength', type = 'long' }, 27 | { name = 'Perception', type = 'long' }, 28 | { name = 'Endurance', type = 'long' }, 29 | { name = 'Charisma', type = 'long' }, 30 | { name = 'Intelligence', type = 'long' }, 31 | { name = 'Agility', type = 'long' }, 32 | { name = 'Luck', type = 'long' } 33 | } 34 | } 35 | }, 36 | { 37 | name = 'Journal', 38 | type = { 39 | type = 'array', 40 | items = 'string' 41 | } 42 | } 43 | } 44 | }) 45 | 46 | if not ok then error(person) end 47 | 48 | local ok, person_c = avro.compile{person, dump_il='person.il'} 49 | local ok, person_c_debug = avro.compile{person, dump_il='person.il', debug=true} 50 | if not ok then error(person_c) end 51 | 52 | 53 | local data = { 54 | FirstName = 'John', 55 | LastName = 'Doe', 56 | Class = 'TechWizard', 57 | Age = 17, 58 | Sex = 'MALE', 59 | Stats = { 60 | Strength = 3, 61 | Perception = 5, 62 | Endurance = 1, 63 | Charisma = 4, 64 | Intelligence = 9, 65 | Agility = 3, 66 | Luck = 6 67 | }, 68 | Journal = { 69 | 'You are standing at the end of a road before a small brick building.', 70 | 'Around you is a forest.', 71 | 'A small stream plows out of the building and down a gully.', 72 | 'You enter the forest.', 73 | 'You are in a valley in the forest besides a stream tumling along a rocky end.', 74 | 'You feel thirsty!' 75 | } 76 | } 77 | local msgpack = require('msgpack') 78 | local c = person_c 79 | local d = person_c_debug 80 | local data_mp = msgpack.encode(data) 81 | local _, data_fl = c.flatten(data) 82 | local _, data_fl_mp = c.flatten_msgpack(data) 83 | local testcases = { 84 | -- { name , func , arg1 , arg2} 85 | { "msgpack(lua t)" , msgpack.encode , data } , 86 | { "msgpackdecode(mp)" , msgpack.decode , data_mp } , 87 | { "validate(lua t)" , avro.validate , person , data } , 88 | { "flatten(lua t)" , c.flatten , data } , 89 | { "flatten(mp)" , c.flatten , data_mp } , 90 | { "unflatten(lua t)" , c.unflatten , data_fl } , 91 | { "unflatten(mp)" , c.unflatten , data_fl_mp } , 92 | { "flatten_mp(lua t)" , c.flatten_msgpack , data } , 93 | { "flatten_mp(mp)" , c.flatten_msgpack , data_mp } , 94 | { "unflatten_mp(lua t)" , c.unflatten_msgpack , data_fl } , 95 | { "unflatten_mp(mp)" , c.unflatten_msgpack , data_fl_mp } , 96 | { "flatten_mp(mp) optimizations off" ,d.flatten_msgpack , data_mp } , 97 | { "unflatten_mp(mp) optimizations off" ,d.unflatten_msgpack, data_fl_mp }, 98 | } 99 | 100 | print('benchmark started...') 101 | local clock = require('clock') 102 | local n = 10000000 103 | for _, testcase in pairs(testcases) do 104 | local name = testcase[1] 105 | local xfunc = testcase[2] 106 | local arg1 = testcase[3] 107 | local arg2 = testcase[4] 108 | local t = clock.bench(function() 109 | -- This crutch is required, because we cannot just pass 110 | -- a nil arg to some functions implemented in C and expect the 111 | -- same behavior as if we do not pass the argument. 112 | if arg2 then 113 | for i = 1, n do 114 | xfunc(arg1, arg2) 115 | end 116 | else 117 | for i = 1, n do 118 | xfunc(arg1) 119 | end 120 | end 121 | end)[1] 122 | print(string.format('%f M RPS %s', n/1000000.0/t, name)) 123 | end 124 | -------------------------------------------------------------------------------- /debian/.gitignore: -------------------------------------------------------------------------------- 1 | tarantool-modulekit/ 2 | files 3 | stamp-* 4 | *.substvars 5 | *.log 6 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | tarantool-avro-schema (2.0.0-1) unstable; urgency=low 2 | 3 | Full rewrite in Lua: 4 | * added support for Avro schema defaults; 5 | * support for Avro schema aliases; 6 | * great error messages; 7 | * runtime code generation makes transformations fast. 8 | 9 | -- Nick Zavaritsky Wed, 13 Jul 2016 15:34:00 +0300 10 | 11 | tarantool-avro-schema (1.0.1-1) stable; urgency=low 12 | 13 | * Incremental update 14 | 15 | -- Nick Zavaritsky Wed, 13 Jul 2016 14:56:00 +0300 16 | 17 | tarantool-avro-schema (1.0.0-1) unstable; urgency=low 18 | 19 | * Initial release 20 | 21 | -- Nick Zavaritsky Wed, 6 Mar 2016 11:30:00 +0300 22 | -------------------------------------------------------------------------------- /debian/compat: -------------------------------------------------------------------------------- 1 | 9 2 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: tarantool-avro-schema 2 | Priority: optional 3 | Section: database 4 | Maintainer: Nick Zavaritsky 5 | Build-Depends: debhelper (>= 9), cdbs, 6 | cmake (>= 2.8), 7 | tarantool-dev (>= 1.6.8.0) 8 | Standards-Version: 3.9.6 9 | Homepage: https://github.com/tarantool/avro-schema 10 | Vcs-Git: git://github.com/tarantool/avro-schema.git 11 | Vcs-Browser: https://github.com/tarantool/avro-schema 12 | 13 | Package: tarantool-avro-schema 14 | Architecture: i386 amd64 armhf arm64 15 | Depends: tarantool (>= 1.6.8.0), ${shlibs:Depends}, ${misc:Depends} 16 | Pre-Depends: ${misc:Pre-Depends} 17 | Description: Apache Avro schema tools for Tarantools 18 | Provides: tarantool-avro 19 | Conflicts: tarantool-avro 20 | Replaces: tarantool-avro 21 | -------------------------------------------------------------------------------- /debian/copyright: -------------------------------------------------------------------------------- 1 | Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Debianized-By: Nick Zavaritsky 3 | Upstream-Name: avro-schema 4 | Upstream-Contact: mejedi@tarantool.org 5 | Source: https://github.com/tarantool/avro-schema 6 | 7 | Files: * 8 | Copyright: 2016 by Nick Zavaritsky 9 | License: BSD-2-Clause 10 | Redistribution and use in source and binary forms, with or 11 | without modification, are permitted provided that the following 12 | conditions are met: 13 | . 14 | 1. Redistributions of source code must retain the above 15 | copyright notice, this list of conditions and the 16 | following disclaimer. 17 | . 18 | 2. Redistributions in binary form must reproduce the above 19 | copyright notice, this list of conditions and the following 20 | disclaimer in the documentation and/or other materials 21 | provided with the distribution. 22 | . 23 | THIS SOFTWARE IS PROVIDED BY ``AS IS'' AND 24 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 25 | TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 26 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL 27 | OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 28 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 29 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 30 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 31 | BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 32 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 33 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF 34 | THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 35 | SUCH DAMAGE. 36 | -------------------------------------------------------------------------------- /debian/docs: -------------------------------------------------------------------------------- 1 | README.md 2 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | DEB_CMAKE_EXTRA_FLAGS := -DCMAKE_BUILD_TYPE=RelWithDebInfo 4 | DEB_CMAKE_EXTRA_FLAGS := -DCMAKE_INSTALL_LIBDIR=lib/$(DEB_HOST_MULTIARCH) \ 5 | -DCMAKE_BUILD_TYPE=RelWithDebInfo 6 | DEB_MAKE_CHECK_TARGET := check 7 | 8 | include /usr/share/cdbs/1/rules/debhelper.mk 9 | include /usr/share/cdbs/1/class/cmake.mk 10 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (quilt) 2 | -------------------------------------------------------------------------------- /exports: -------------------------------------------------------------------------------- 1 | { 2 | global: 3 | _init; 4 | _fini; 5 | 6 | parse_msgpack; 7 | unparse_msgpack; 8 | schema_rt_buf_grow; 9 | schema_rt_extract_location; 10 | schema_rt_xflatten_done; 11 | 12 | create_hash_func; 13 | eval_hash_func; 14 | eval_fnv1a_func; 15 | 16 | schema_rt_key_eq; 17 | schema_rt_search8; 18 | schema_rt_search16; 19 | schema_rt_search32; 20 | 21 | phf_init_uint32; 22 | phf_compact; 23 | phf_hash_uint32; 24 | phf_destroy; 25 | phf_hash_uint32_band_raw8; 26 | phf_hash_uint32_band_raw16; 27 | phf_hash_uint32_band_raw32; 28 | local: *; 29 | }; 30 | -------------------------------------------------------------------------------- /exports_osx: -------------------------------------------------------------------------------- 1 | _parse_msgpack 2 | _unparse_msgpack 3 | _schema_rt_buf_grow 4 | _schema_rt_extract_location 5 | _schema_rt_xflatten_done 6 | 7 | _create_hash_func 8 | _eval_hash_func 9 | _eval_fnv1a_func 10 | 11 | _schema_rt_key_eq 12 | _schema_rt_search8 13 | _schema_rt_search16 14 | _schema_rt_search32 15 | 16 | _phf_init_uint32 17 | _phf_compact 18 | _phf_hash_uint32 19 | _phf_destroy 20 | _phf_hash_uint32_band_raw8 21 | _phf_hash_uint32_band_raw16 22 | _phf_hash_uint32_band_raw32 23 | -------------------------------------------------------------------------------- /gen_il_filt.sh: -------------------------------------------------------------------------------- 1 | #! /bin/sh 2 | echo '/static/d' > il_filt 3 | grep static $1 | awk '{ print "s/opcode[.]"$4"/"$6}' | sed -e 's.;./.' | sort -r >> il_filt 4 | -------------------------------------------------------------------------------- /il_filt.sh: -------------------------------------------------------------------------------- 1 | #! /bin/sh 2 | sed -f il_filt $1 > $2 3 | -------------------------------------------------------------------------------- /lib/phf/GNUmakefile: -------------------------------------------------------------------------------- 1 | all: # default target 2 | 3 | -include .config 4 | 5 | prefix ?= /usr/local 6 | includedir ?= $(prefix)/include 7 | libdir ?= $(prefix)/lib 8 | bindir ?= $(prefix)/bin 9 | 10 | luacpath ?= 11 | lua51cpath ?= 12 | lua52cpath ?= 13 | lua53cpath ?= 14 | 15 | RMDIR ?= rmdir 16 | MKDIR ?= mkdir 17 | CP ?= cp 18 | 19 | OSNAME ?= $(shell uname -s) 20 | CXXNAME ?= $(shell env CC=$(CXX) mk/luapath ccname) 21 | 22 | ifeq ($(CXXNAME), sunpro) 23 | CXXFLAGS ?= -g 24 | else 25 | CXXFLAGS ?= -std=c++11 -fno-rtti -fno-exceptions -g -O3 -Wall -Wextra 26 | endif 27 | 28 | ifeq ($(CXXNAME), sunpro) 29 | SOFLAGS ?= -xcode=pic13 -shared 30 | else 31 | ifeq ($(OSNAME), Darwin) 32 | SOFLAGS ?= -fPIC -dynamiclib -undefined dynamic_lookup 33 | else 34 | SOFLAGS ?= -fPIC -shared 35 | endif 36 | endif 37 | 38 | ifeq ($(CXXNAME), sunpro) 39 | LOFLAGS ?= -xcode=pic13 -shared 40 | else 41 | ifeq ($(OSNAME), Darwin) 42 | LOFLAGS ?= -fPIC -bundle -undefined dynamic_lookup 43 | else 44 | LOFLAGS ?= -fPIC -shared 45 | endif 46 | endif 47 | 48 | ifeq ($(OSNAME), Darwin) 49 | LIBPHF ?= libphf.dylib 50 | else 51 | LIBPHF ?= libphf.so 52 | endif 53 | 54 | .PHONY: config 55 | 56 | config: 57 | printf 'prefix ?= $(value prefix)'"\n" >| .config 58 | printf 'includedir ?= $(value includedir)'"\n" >> .config 59 | printf 'libdir ?= $(value libdir)'"\n" >> .config 60 | printf 'bindir ?= $(value bindir)'"\n" >> .config 61 | printf 'luacpath ?= $(value luacpath)'"\n" >> .config 62 | printf 'lua51cpath ?= $(value lua51cpath)'"\n" >> .config 63 | printf 'lua52cpath ?= $(value lua52cpath)'"\n" >> .config 64 | printf 'lua53cpath ?= $(value lua53cpath)'"\n" >> .config 65 | printf 'CXX ?= $(value CXX)'"\n" >> .config 66 | printf 'CPPFLAGS ?= $(value CPPFLAGS)'"\n" >> .config 67 | printf 'CXXFLAGS ?= $(value CXXFLAGS)'"\n" >> .config 68 | printf 'LDFLAGS ?= $(value LDFLAGS)'"\n" >> .config 69 | printf 'SOFLAGS ?= $(value SOFLAGS)'"\n" >> .config 70 | printf 'LOFLAGS ?= $(value LOFLAGS)'"\n" >> .config 71 | printf 'LIBS ?= $(value LIBS)'"\n" >> .config 72 | printf 'LIBPHF ?= $(value LIBPHF)'"\n" >> .config 73 | printf 'RM ?= $(value RM)'"\n" >> .config 74 | printf 'RMDIR ?= $(value RMDIR)'"\n" >> .config 75 | printf 'MKDIR ?= $(value MKDIR)'"\n" >> .config 76 | printf 'CP ?= $(value CP)'"\n" >> .config 77 | printf 'OSNAME ?= $(value OSNAME)'"\n" >> .config 78 | printf 'CXXNAME ?= $(value CXXNAME)'"\n" >> .config 79 | 80 | phf: phf.cc phf.h 81 | $(CXX) -o $@ $< $(CXXFLAGS) $(CPPFLAGS) -DPHF_MAIN $(LIBS) 82 | 83 | $(LIBPHF): phf.cc phf.h 84 | $(CXX) -o $@ $< $(CXXFLAGS) $(CPPFLAGS) $(LDFLAGS) $(SOFLAGS) $(LIBS) 85 | 86 | all: phf $(LIBPHF) 87 | 88 | LUAPATH = $(shell env CC="$(CXX)" CPPFLAGS="$(CPPFLAGS)" LDFLAGS="$(LDFLAGS)" mk/luapath -krxm3 $(if $(includedir),$(if $(DESTDIR), -I$(DESTDIR)$(includedir)) -I$(includedir)) -I/usr/include -I/usr/local/include $(if $(DESTDIR),-P$(DESTDIR)$(bindir)) -P$(bindir) -v$(1) $(2)) 89 | 90 | define LUALIB_BUILD 91 | $(1)/phf.so: phf.cc phf.h 92 | test "$(1)" = "$$(call LUAPATH, $(1), version)" 93 | $$(MKDIR) -p $$(@D) 94 | $$(CXX) -o $$@ $$< $$(CXXFLAGS) $$(CPPFLAGS) $$(call LUAPATH, $(1), cppflags) -DPHF_LUALIB $$(LDFLAGS) $$(LOFLAGS) $(LIBS) 95 | 96 | .SECONDARY: all$(1) 97 | 98 | lua$(1) all$(1): $(1)/phf.so 99 | 100 | ifeq (all, $(filter all, $(or $(MAKECMDGOALS), all))) 101 | ifeq ($(1), $$(call LUAPATH, $(1), version)) 102 | all: $(1)/phf.so 103 | endif 104 | endif 105 | 106 | endef # LUALIB_BUILD 107 | 108 | $(eval $(call LUALIB_BUILD,5.1)) 109 | 110 | $(eval $(call LUALIB_BUILD,5.2)) 111 | 112 | $(eval $(call LUALIB_BUILD,5.3)) 113 | 114 | 115 | define LUALIB_INSTALL 116 | ifneq ($(filter install install$(1) uninstall uninstall$(1), $(MAKECMDGOALS)),) 117 | ifeq ($$($(2)),) # define lua5?cpath if empty 118 | $(2)_dyn = $$(call LUAPATH, $(1), cdir) 119 | $(2)_sed = $$(shell printf "$$(luacpath)" | sed -ne 's/[[:digit:]].[[:digit:]]/$(1)/p') 120 | $(2)_lib = $$(libdir)/lua/$(1) 121 | 122 | override $(2) = $$(or $$($(2)_dyn), $$($(2)_sed), $$($(2)_lib)) 123 | endif 124 | 125 | $$($(2))/phf.so: $(1)/phf.so 126 | $$(MKDIR) -p $$(@D) 127 | $$(CP) -fp $$< $$@ 128 | 129 | .SECONDARY: install install$(1) 130 | 131 | install install$(1): $$($(2))/phf.so 132 | 133 | .PHONY: uninstall uninstall$(1) 134 | 135 | uninstall$(1): 136 | $(RM) -f $$($(2))/phf.so 137 | 138 | uninstall: uninstall$(1) 139 | 140 | endif # if install or install$(1) 141 | endef # LUALIB_INSTALL 142 | 143 | $(eval $(call LUALIB_INSTALL,5.1,lua51cpath)) 144 | 145 | $(eval $(call LUALIB_INSTALL,5.2,lua52cpath)) 146 | 147 | $(eval $(call LUALIB_INSTALL,5.3,lua53cpath)) 148 | 149 | 150 | .PHONY: clean distclean clean~ 151 | 152 | distclean: clean 153 | $(RM) -f .config 154 | 155 | clean: 156 | $(RM) -f phf 157 | $(RM) -f $(LIBPHF) 158 | $(RM) -fr 5.?/ 159 | $(RM) -fr *.dSYM/ 160 | 161 | clean~: 162 | find . -name '*~' -exec rm {} + 163 | -------------------------------------------------------------------------------- /lib/phf/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014-2015 William Ahern 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to 5 | deal in the Software without restriction, including without limitation the 6 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 7 | sell copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 18 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 19 | IN THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /lib/phf/README.md: -------------------------------------------------------------------------------- 1 | # Introduction # 2 | 3 | This is a simple implementation of the CHD perfect hash algorithm. CHD can 4 | generate perfect hash functions for very large key sets--on the order of 5 | millions of keys--in a very short time. On my circa 2012 desktop and using 6 | the default parameters (hash load factor of 80% and average displacement map 7 | bucket load of 4.0 keys) this implementation can generate a hash function 8 | for 1,000 keys in less than 1/100th of a second, and 1,000,000 keys in less 9 | than a second. 10 | 11 | For more information about the algorithm, see 12 | http://cmph.sourceforge.net/chd.html. 13 | 14 | # Dependencies # 15 | 16 | * No runtime dependencies. 17 | * Requires a modern C++ compiler to build. 18 | * The included build requires GNU Make. 19 | 20 | # Building # 21 | 22 | ## Make Macros ## 23 | 24 | The typical GNU macros can be used control the build. 25 | 26 | ### Compilation ### 27 | 28 | Note that the modules for Lua 5.1, 5.2, and 5.3 can be built simultaneously. 29 | 30 | * CXX: C++ compiler path. 31 | * CXXFLAGS: C++ compiler flags. 32 | * CPPFLAGS: C preprocessor flags. Necessary if Lua API cannot be discovered 33 | automatically. You can specify multiple include paths if building more than 34 | one Lua module. 35 | * LDFLAGS: Linker flags. Not normally needed. 36 | * SOFLAGS: Flags needed to build dynamic library. 37 | * LOFLAGS: Flags needed to build loadable module. Normally should be the 38 | same as SOFLAGS, except on OS X. 39 | * LIBS: Library dependencies. Normally empty, but see the section Avoiding 40 | C++ Dependencies. 41 | 42 | #### Avoiding C++ Dependencies 43 | 44 | Defining the preprocessor macro PHF_NO_LIBCXX to 1 will prevent usage of C++ 45 | interfaces such as std::string that would require a dependency on libc++ or 46 | libstdc++. This allows using platform-dependent flags in CXXFLAGS, LDFLAGS, 47 | and SOFLAGS to prevent a dependency on the system C++ library. 48 | 49 | For example, on OS X you can do: 50 | ```sh 51 | $ make CPPFLAGS="-DPHF_NO_LIBCXX" \ 52 | CXXFLAGS="-std=c++11 -fno-rtti -fno-exceptions -O3 -march=native" \ 53 | LDFLAGS="-nostdlib" \ 54 | LIBS="-lSystem" 55 | ``` 56 | 57 | ### Installation #### 58 | * prefix 59 | * includedir 60 | * libdir 61 | * luacpath: Lua C module install path. Can be used for one-shot installation 62 | of a particular Lua version module. 63 | * lua51cpath: Lua 5.1 C module install path. 64 | * lua52cpath: Same as above, for 5.2. 65 | * lua53cpath: Same as above, for 5.3. 66 | 67 | ## Make Targets ## 68 | 69 | * phf: Builds command-line utility (development) 70 | * libphf.so: Builds dynamic library for non-OS X 71 | * libphf.dylib: Builds dynamic library for OS X 72 | * lua5.1: Builds Lua 5.1 module at 5.1/phf.so. Lua 5.1 headers should be 73 | specified using CPPFLAGS if not in normal locations. 74 | * lua5.2: Same as above, for Lua 5.2. 75 | * lua5.3: Same as above, for Lua 5.3. 76 | 77 | # Usage # 78 | 79 | ## Lua ## 80 | 81 | ## API ### 82 | 83 | ### phf.new(keys[, lambda][, alpha][, seed][, nodiv]) ### 84 | 85 | * keys: array of keys in order from 1..#keys. They should be all 86 | numbers or all strings. 87 | 88 | * lambda: number of keys per bucket when generating the g() function mapping. 89 | 90 | * alpha: output hash space loading factor as percentage from 91 | 1..100. 100% generates a *minimal* perfect hash function. But note that 92 | the implementation does *not* implement the necessary optimizations to 93 | ensure timely generation of minimal perfect hash functions. Normally you 94 | want a loading factor of 80% to 90% for large key sets. 95 | 96 | * seed: random integer seed. 97 | 98 | * nodiv: if true rounds r and m to powers of 2, and performs modular 99 | reduction using bitwise AND. Otherwise, r and m are rounded up to the 100 | nearest primes and modulo division used when indexing tables. Note that 101 | the rounding occurs after calculation of the intermediate and output hash 102 | table loading. 103 | 104 | This is more important when building small hash tables with the C 105 | interface. The optimization is substantial when the compiler can inline 106 | the code, but isn't substantial from Lua. 107 | 108 | Returns a callable object. 109 | 110 | ### phf:hash(key) 111 | 112 | * Returns an integer hash in the range 1..phf:m(). The returned integer will 113 | be unique for all keys in the original set. Otherwise the result is 114 | unspecified. 115 | 116 | ### Example ### 117 | 118 | ```Lua 119 | local phf = require"phf" 120 | 121 | local lambda = 4 -- how many keys per intermediate bucket 122 | local alpha = 80 -- output hash space loading in percentage. 123 | 124 | local keys = { "apple", "banana", "cherry", "date", "eggplant", "fig", 125 | "guava", "honeydew", "jackfruit", "kiwi", "lemon", "mango" } 126 | 127 | local F = phf.new(keys, lambda, alpha) 128 | 129 | for i=1,#keys do 130 | print(keys[i], F(keys[i])) 131 | end 132 | 133 | ``` 134 | 135 | ## C++ ## 136 | 137 | ## API ## 138 | 139 | ### PHF::uniq(T k[], size_t n); ### 140 | 141 | Similar to the shell command `sort | uniq`. Sorts, deduplicates, and shifts 142 | down the keys in the array k. Returns the number of unique keys, which will 143 | have been moved to the beginning of the array. If necessary do this before 144 | calling PHF::init, as PHF::init does not tolerate duplicate keys. 145 | 146 | ### int PHF::init(struct phf *f, const T k[], size_t n, size_t l, size_t a, phf_seed_t s); 147 | 148 | Generate a perfect hash function for the n keys in array k and store the 149 | results in f. Returns a system error number on failure, or 0 on success. f 150 | is unmodified on failure. 151 | 152 | ### void PHF::destroy(struct phf *); 153 | 154 | Deallocates internal tables, but not the struct object itself. 155 | 156 | ### void PHF::compact(struct phf *); 157 | 158 | By default the displacement map is an array of uint32_t integers. This 159 | function will select the smallest type necessary to hold the largest 160 | displacement value and update the internal state accordingly. For a loading 161 | factor of 80% (0.8) in the output hash space, and displacement map loading 162 | factor of 4 (400%), the smallest primitive type will often be uint8_t. 163 | 164 | ### phf_hash_t PHF::hash(struct phf *f, T k); 165 | 166 | Returns an integer hash value, h, where 0 <= h < f->m. h will be unique for 167 | each unique key provided when generating the function. f->m will be larger 168 | than the number of unique keys and is based on the specified loading factor 169 | (alpha), rounded up to the nearest prime or nearest power of 2, depending on 170 | the mode of modular reduction selected. For example, for a loading factor of 171 | 80% m will be 127: 100 is 80% of 125, and 127 is the closest prime greater 172 | than or equal to 125. With the nodiv option, m would be 128: 100 is 80% of 173 | 125, and 128 is the closest power of 2 greater than or equal to 125. 174 | 175 | ## C ## 176 | 177 | The C API is nearly identical to the C++ API, except the prefix is phf_ 178 | instead of PHF::. phf_uniq, phf_init, and phf_hash are macros which utilize 179 | C11's _Generic or GCC's __builtin_types_compatible_p interfaces to overload 180 | the interfaces by key type. The explicit suffixes _uint32, _uint64, and 181 | _string may be used directly. 182 | 183 | -------------------------------------------------------------------------------- /lib/phf/phf.h: -------------------------------------------------------------------------------- 1 | /* ========================================================================== 2 | * phf.h - Tiny perfect hash function library. 3 | * -------------------------------------------------------------------------- 4 | * Copyright (c) 2014-2015 William Ahern 5 | * 6 | * Permission is hereby granted, free of charge, to any person obtaining a 7 | * copy of this software and associated documentation files (the 8 | * "Software"), to deal in the Software without restriction, including 9 | * without limitation the rights to use, copy, modify, merge, publish, 10 | * distribute, sublicense, and/or sell copies of the Software, and to permit 11 | * persons to whom the Software is furnished to do so, subject to the 12 | * following conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be included 15 | * in all copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 18 | * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 19 | * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN 20 | * NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 21 | * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 22 | * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE 23 | * USE OR OTHER DEALINGS IN THE SOFTWARE. 24 | * ========================================================================== 25 | */ 26 | #ifndef PHF_H 27 | #define PHF_H 28 | 29 | #include /* size_t */ 30 | #include /* UINT32_MAX uint32_t uint64_t */ 31 | #include /* bool */ 32 | #include /* PRIu32 PRIx32 */ 33 | 34 | 35 | /* 36 | * C O M P I L E R F E A T U R E S & D I A G N O S T I C S 37 | * 38 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 39 | 40 | #define PHF_GNUC_PREREQ(M, m) (__GNUC__ > (M) || (__GNUC__ == (M) && __GNUC_MINOR__ >= (m))) 41 | 42 | #ifdef __clang__ 43 | #define phf_has_extension(x) __has_extension(x) 44 | #define phf_has_attribute(x) __has_attribute(x) 45 | #else 46 | #define phf_has_extension(x) 0 47 | #define phf_has_attribute(x) 0 48 | #endif 49 | 50 | #ifndef PHF_HAVE_NOEXCEPT 51 | #define PHF_HAVE_NOEXCEPT \ 52 | (__cplusplus >= 201103L || \ 53 | phf_has_extension(cxx_noexcept) || \ 54 | PHF_GNUC_PREREQ(4, 6)) 55 | #endif 56 | 57 | #ifndef PHF_HAVE_GENERIC 58 | #define PHF_HAVE_GENERIC \ 59 | (__STDC_VERSION__ >= 201112L || \ 60 | phf_has_extension(c_generic_selections) || \ 61 | PHF_GNUC_PREREQ(4, 9)) 62 | #endif 63 | 64 | #ifndef PHF_HAVE_BUILTIN_TYPES_COMPATIBLE_P 65 | #define PHF_HAVE_BUILTIN_TYPES_COMPATIBLE_P (defined __GNUC__) 66 | #endif 67 | 68 | #ifndef PHF_HAVE_BUILTIN_CHOOSE_EXPR 69 | #define PHF_HAVE_BUILTIN_CHOOSE_EXPR (defined __GNUC__) 70 | #endif 71 | 72 | #ifndef PHF_HAVE_ATTRIBUTE_VISIBILITY 73 | #define PHF_HAVE_ATTRIBUTE_VISIBILITY \ 74 | (phf_has_attribute(visibility) || PHF_GNUC_PREREQ(4, 0)) 75 | #endif 76 | 77 | #ifndef PHF_HAVE_COMPUTED_GOTOS 78 | #define PHF_HAVE_COMPUTED_GOTOS (defined __GNUC__) 79 | #endif 80 | 81 | #ifdef __clang__ 82 | #pragma clang diagnostic push 83 | #if __cplusplus < 201103L 84 | #pragma clang diagnostic ignored "-Wc++11-extensions" 85 | #pragma clang diagnostic ignored "-Wvariadic-macros" 86 | #endif 87 | #elif PHF_GNUC_PREREQ(4, 6) 88 | #pragma GCC diagnostic push 89 | #if __cplusplus < 201103L 90 | #pragma GCC diagnostic ignored "-Wpedantic" 91 | #pragma GCC diagnostic ignored "-Wvariadic-macros" 92 | #endif 93 | #endif 94 | 95 | 96 | /* 97 | * C / C + + V I S I B I L I T Y 98 | * 99 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 100 | 101 | #ifndef PHF_PUBLIC 102 | #define PHF_PUBLIC 103 | #endif 104 | 105 | #ifndef PHF_LOCAL 106 | #if PHF_HAVE_ATTRIBUTE_VISIBILITY 107 | #define PHF_LOCAL __attribute__((visibility("hidden"))) 108 | #else 109 | #define PHF_LOCAL 110 | #endif 111 | #endif 112 | 113 | 114 | /* 115 | * C / C + + S H A R E D T Y P E S 116 | * 117 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 118 | 119 | #define phf_error_t int /* for documentation purposes */ 120 | 121 | #define PHF_HASH_MAX UINT32_MAX 122 | #define PHF_PRIuHASH PRIu32 123 | #define PHF_PRIxHASH PRIx32 124 | 125 | typedef uint32_t phf_hash_t; 126 | typedef uint32_t phf_seed_t; 127 | 128 | typedef struct phf_string { 129 | void *p; 130 | size_t n; 131 | } phf_string_t; 132 | 133 | struct phf { 134 | bool nodiv; 135 | 136 | phf_seed_t seed; 137 | 138 | size_t r; /* number of elements in g */ 139 | size_t m; /* number of elements in perfect hash */ 140 | uint32_t *g; /* displacement map indexed by g(k) % r */ 141 | 142 | size_t d_max; /* maximum displacement value in g */ 143 | 144 | enum { 145 | PHF_G_UINT8_MOD_R = 1, 146 | PHF_G_UINT8_BAND_R, 147 | PHF_G_UINT16_MOD_R, 148 | PHF_G_UINT16_BAND_R, 149 | PHF_G_UINT32_MOD_R, 150 | PHF_G_UINT32_BAND_R, 151 | } g_op; 152 | 153 | const void *g_jmp; 154 | }; /* struct phf */ 155 | 156 | 157 | /* 158 | * C + + I N T E R F A C E S 159 | * 160 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 161 | #ifdef __cplusplus 162 | 163 | #if !PHF_NO_LIBCXX 164 | #include /* std::string */ 165 | #endif 166 | 167 | namespace PHF { 168 | template 169 | PHF_PUBLIC size_t uniq(key_t[], const size_t); 170 | 171 | template 172 | PHF_PUBLIC phf_error_t init(struct phf *, const key_t[], const size_t, const size_t, const size_t, const phf_seed_t); 173 | 174 | PHF_PUBLIC void compact(struct phf *); 175 | 176 | template 177 | PHF_PUBLIC phf_hash_t hash(struct phf *, key_t); 178 | 179 | PHF_PUBLIC void destroy(struct phf *); 180 | } 181 | 182 | extern template size_t PHF::uniq(uint32_t[], const size_t); 183 | extern template size_t PHF::uniq(uint64_t[], const size_t); 184 | extern template size_t PHF::uniq(phf_string_t[], const size_t); 185 | #if !PHF_NO_LIBCXX 186 | extern template size_t PHF::uniq(std::string[], const size_t); 187 | #endif 188 | 189 | extern template phf_error_t PHF::init(struct phf *, const uint32_t[], const size_t, const size_t, const size_t, const phf_seed_t); 190 | extern template phf_error_t PHF::init(struct phf *, const uint64_t[], const size_t, const size_t, const size_t, const phf_seed_t); 191 | extern template phf_error_t PHF::init(struct phf *, const phf_string_t[], const size_t, const size_t, const size_t, const phf_seed_t); 192 | #if !PHF_NO_LIBCXX 193 | extern template phf_error_t PHF::init(struct phf *, const std::string[], const size_t, const size_t, const size_t, const phf_seed_t); 194 | #endif 195 | 196 | extern template phf_error_t PHF::init(struct phf *, const uint32_t[], const size_t, const size_t, const size_t, const phf_seed_t); 197 | extern template phf_error_t PHF::init(struct phf *, const uint64_t[], const size_t, const size_t, const size_t, const phf_seed_t); 198 | extern template phf_error_t PHF::init(struct phf *, const phf_string_t[], const size_t, const size_t, const size_t, const phf_seed_t); 199 | #if !PHF_NO_LIBCXX 200 | extern template phf_error_t PHF::init(struct phf *, const std::string[], const size_t, const size_t, const size_t, const phf_seed_t); 201 | #endif 202 | 203 | extern template phf_hash_t PHF::hash(struct phf *, uint32_t); 204 | extern template phf_hash_t PHF::hash(struct phf *, uint64_t); 205 | extern template phf_hash_t PHF::hash(struct phf *, phf_string_t); 206 | #if !PHF_NO_LIBCXX 207 | extern template phf_hash_t PHF::hash(struct phf *, std::string); 208 | #endif 209 | 210 | #endif /* __cplusplus */ 211 | 212 | 213 | /* 214 | * C 8 9 I N T E R F A C E S 215 | * 216 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 217 | #ifdef __cplusplus 218 | extern "C" { 219 | #endif 220 | 221 | PHF_PUBLIC size_t phf_uniq_uint32(uint32_t *, const size_t); 222 | PHF_PUBLIC size_t phf_uniq_uint64(uint64_t *, const size_t); 223 | PHF_PUBLIC size_t phf_uniq_string(phf_string_t *, const size_t); 224 | 225 | PHF_PUBLIC phf_error_t phf_init_uint32(struct phf *, const uint32_t *, const size_t, const size_t, const size_t, const phf_seed_t, const bool nodiv); 226 | PHF_PUBLIC phf_error_t phf_init_uint64(struct phf *, const uint64_t *, const size_t, const size_t, const size_t, const phf_seed_t, const bool nodiv); 227 | PHF_PUBLIC phf_error_t phf_init_string(struct phf *, const phf_string_t *, const size_t, const size_t, const size_t, const phf_seed_t, const bool nodiv); 228 | 229 | PHF_PUBLIC void phf_compact(struct phf *); 230 | 231 | PHF_PUBLIC phf_hash_t phf_hash_uint32(struct phf *, const uint32_t); 232 | PHF_PUBLIC phf_hash_t phf_hash_uint64(struct phf *, const uint64_t); 233 | PHF_PUBLIC phf_hash_t phf_hash_string(struct phf *, const phf_string_t); 234 | 235 | PHF_PUBLIC void phf_destroy(struct phf *); 236 | 237 | #ifdef __cplusplus 238 | } 239 | #endif 240 | 241 | 242 | /* 243 | * C 1 1 / G N U I N T E R F A C E S 244 | * 245 | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 246 | #if PHF_HAVE_GENERIC 247 | 248 | #define phf_uniq(k, n) _Generic(*(k), \ 249 | uint32_t: phf_uniq_uint32, \ 250 | uint64_t: phf_uniq_uint64, \ 251 | phf_string_t: phf_uniq_string)((k), (n)) 252 | 253 | #define phf_init(f, k, ...) _Generic(*(k), \ 254 | uint32_t: phf_init_uint32, \ 255 | uint64_t: phf_init_uint64, \ 256 | phf_string_t: phf_init_string)((f), (k), __VA_ARGS__) 257 | 258 | #define phf_hash(f, k) _Generic((k), \ 259 | uint32_t: phf_hash_uint32, \ 260 | uint64_t: phf_hash_uint64, \ 261 | phf_string_t: phf_hash_string)((f), (k)) 262 | 263 | #elif PHF_HAVE_BUILTIN_TYPES_COMPATIBLE_P && PHF_HAVE_BUILTIN_CHOOSE_EXPR 264 | 265 | #define phf_choose(cond, a, b) __builtin_choose_expr(cond, a, b) 266 | #define phf_istype(E, T) __builtin_types_compatible_p(__typeof__(E), T) 267 | 268 | #define phf_uniq(k, n) \ 269 | phf_choose(phf_istype(*(k), uint32_t), phf_uniq_uint32((uint32_t *)(k), (n)), \ 270 | phf_choose(phf_istype(*(k), uint64_t), phf_uniq_uint64((uint64_t *)(k), (n)), \ 271 | phf_choose(phf_istype(*(k), phf_string_t), phf_uniq_string((phf_string_t *)(k), (n)), \ 272 | (void)0))) 273 | 274 | #define phf_init(f, k, ...) \ 275 | phf_choose(phf_istype(*(k), uint32_t), phf_init_uint32((f), (const uint32_t *)(k), __VA_ARGS__), \ 276 | phf_choose(phf_istype(*(k), uint64_t), phf_init_uint64((f), (const uint64_t *)(k), __VA_ARGS__), \ 277 | phf_choose(phf_istype(*(k), phf_string_t), phf_init_string((f), (const phf_string_t *)(k), __VA_ARGS__), \ 278 | (void)0))) 279 | 280 | #define phf_hash(f, k) ((*(phf_hash_t (*)()) \ 281 | phf_choose(phf_istype((k), uint32_t), &phf_hash_uint32, \ 282 | phf_choose(phf_istype((k), uint64_t), &phf_hash_uint64, \ 283 | phf_choose(phf_istype((k), phf_string_t), &phf_hash_string, \ 284 | (void)0))))((f), (k))) 285 | 286 | #endif 287 | 288 | 289 | #ifdef __clang__ 290 | #pragma clang diagnostic pop 291 | #elif PHF_GNUC_PREREQ(4, 6) 292 | #pragma GCC diagnostic pop 293 | #endif 294 | 295 | #endif /* PHF_H */ 296 | -------------------------------------------------------------------------------- /lua_indent.pl: -------------------------------------------------------------------------------- 1 | use strict; 2 | use warnings; 3 | 4 | use constant INDENT => ' '; 5 | my($currIndent, $nextIndent, $prevLength) = (0, 0, 0); 6 | 7 | while (<>) { 8 | chomp; 9 | s/^\s+|\s+$//g; # remote all spaces on both ends 10 | s/\s+/ /g; # replace all whitespaces inside the string with one space 11 | 12 | my $orig = $_; 13 | 14 | s/(['"])[^\1]*?\1//g; # remove all quoted fragments for proper bracket processing 15 | s/\s*--.+//; # remove all comments; this ignores long bracket style comments 16 | 17 | # open a level; increase next indentation; don't change current one 18 | if (/^((local )?function|repeat|while)\b/ && !/\bend\s*[\),;]*$/ 19 | || /\b(then|do)$/ && !/^elseif\b/ # only open on 'then' if there is no 'elseif' 20 | || /^if\b/ && /\bthen\b/ && !/\bend$/ # only open on 'if' if there is no 'end' at the end 21 | || /\bfunction\s*\([^\)]*\)$/) { 22 | $nextIndent = $currIndent + 1; 23 | } 24 | # close the level; change both current and next indentation 25 | elsif (/^until\b/ 26 | || /^end\s*[\),;]*$/ 27 | || /^end\s*\)\s*\.\./ # this is a special case of 'end).."some string"' 28 | || /^else(if)?\b/ && /\bend$/) { 29 | $nextIndent = $currIndent = $currIndent - 1; 30 | } 31 | # keep the level; decrease the current indentation; keep the next one 32 | elsif (/^else\b/ 33 | || /^elseif\b/) { 34 | ($nextIndent, $currIndent) = ($currIndent, $currIndent-1); 35 | } 36 | 37 | my $brackets = y/(// - y/)//; # capture unbalanced brackets 38 | my $curly = y/{// - y/}//; # capture unbalanced curly brackets 39 | 40 | # close (curly) brackets if needed 41 | $currIndent += $curly if $curly < 0 && /^\}/; 42 | $currIndent += $brackets if $brackets < 0 && /^\)/; 43 | 44 | warn "WARNING: negative indentation at line $.: $orig\n" if $currIndent < 0; 45 | 46 | print((length($orig) && $orig =~ m/^[^:]/ ? (INDENT x $currIndent) : ''), $orig, "\n") 47 | if $prevLength > 0 || length($orig) > 0; # this is to collapse empty lines 48 | 49 | $nextIndent += $brackets + $curly; 50 | 51 | $currIndent = $nextIndent; 52 | $prevLength = length($orig); 53 | } 54 | 55 | warn "WARNING: positive indentation at the end\n" if $nextIndent > 0; 56 | -------------------------------------------------------------------------------- /rpm/tarantool-avro-schema.spec: -------------------------------------------------------------------------------- 1 | Name: tarantool-avro-schema 2 | Version: 2.0.0 3 | Release: 1%{?dist} 4 | Summary: Apache Avro bindings for Tarantool 5 | Group: Applications/Databases 6 | License: BSD 7 | URL: https://github.com/tarantool/avro-schema 8 | Source0: https://github.com/tarantool/%{name}/archive/%{version}/%{name}-%{version}.tar.gz 9 | BuildRequires: cmake >= 2.8 10 | BuildRequires: gcc >= 4.5 11 | BuildRequires: tarantool-devel >= 1.6.8.0 12 | Requires: tarantool >= 1.6.8.0 13 | 14 | %description 15 | This package provides Apache Avro schema tools for Tarantool. 16 | 17 | %prep 18 | %setup -q -n %{name}-%{version} 19 | %if 0%{?fedora} >= 33 20 | %define mflags %{?_smp_mflags} -C "%{_vpath_builddir}" 21 | %else 22 | %define mflags %{?_smp_mflags} 23 | %endif 24 | 25 | 26 | %build 27 | %cmake . -DCMAKE_BUILD_TYPE=RelWithDebInfo 28 | make %{mflags} 29 | 30 | %check 31 | make %{mflags} check 32 | 33 | %install 34 | %if 0%{?fedora} >= 33 || 0%{?rhel} >= 8 35 | %cmake_install 36 | %else 37 | %make_install 38 | %endif 39 | 40 | %files 41 | %{_libdir}/tarantool/avro_schema_rt_c.so 42 | %{_datarootdir}/tarantool/avro_schema/*.lua 43 | 44 | %changelog 45 | * Wed Jul 13 2016 Nick Zavaritsky 2.0.0-1 46 | Full rewrite in Lua: 47 | - added support for Avro schema defaults; 48 | - support for Avro schema aliases; 49 | - great error messages; 50 | - runtime code generation makes transformations fast. 51 | * Wed Jul 13 2016 Nick Zavaritsky 1.0.1-1 52 | - Incremental update 53 | * Wed Mar 9 2016 Nick Zavaritsky 1.0.0-1 54 | - Initial version 55 | -------------------------------------------------------------------------------- /runtime/misc.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | int 5 | schema_rt_key_eq(const char *key, const char *str, size_t klen, size_t len) 6 | { 7 | return klen == 0 || klen != len ? -1 : memcmp(key, str, klen); 8 | } 9 | 10 | uint32_t 11 | schema_rt_search8(const uint8_t *tab, uint32_t k, size_t n) 12 | #define SCHEMA_RT_SEARCH_BODY \ 13 | uint32_t i = 0; \ 14 | while (i != n - 1 && tab[i] != k) i++; \ 15 | return i; 16 | { SCHEMA_RT_SEARCH_BODY } 17 | 18 | uint32_t 19 | schema_rt_search16(const uint16_t *tab, uint32_t k, size_t n) 20 | { SCHEMA_RT_SEARCH_BODY } 21 | 22 | uint32_t 23 | schema_rt_search32(const uint32_t *tab, uint32_t k, size_t n) 24 | { SCHEMA_RT_SEARCH_BODY } 25 | 26 | -------------------------------------------------------------------------------- /test/.ddt_cache: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarantool/avro-schema/49c20f7dd72700ab04e6de2aeb4c43310fee8964/test/.ddt_cache -------------------------------------------------------------------------------- /test/README.md: -------------------------------------------------------------------------------- 1 | # Testing 2 | 3 | There are several kinds of tests: 4 | 5 | 1. DDT tests; 6 | 2. API tests; 7 | 3. IL optimizer tests. 8 | 9 | **So-called DDT**, or *data-driven-tests*, cover the majority of features. 10 | Every test in this category performs the same sequence of steps: 11 | 12 | * create a schema; 13 | * validate data using a schema object (optional); 14 | * compile schema; 15 | * convert data using a compiled schema. 16 | 17 | The reason it is called *data-driven* is because these tests are configured with *data*, 18 | ex: 19 | 20 | ```lua 21 | t { 22 | schema = '["int", "string", "double"]', 23 | func = 'unflatten', input = '[3, 42]' 24 | error = '1: Bad value: 3', 25 | } 26 | ``` 27 | 28 | **API tests** While DDT tests cover the majority of *semantic* features, they all use 29 | virtually the same set of API calls. Auxiliary calls and incorect API usage aren't covered. 30 | This is where *API tests* come into play. 31 | 32 | **IL optimizer tests** ensure that code transformations in the IL optimizer are correct. 33 | 34 | ## DDT Tests 35 | 36 | ### Schema Creation 37 | 38 | `invalid.lua` — all sorts of misspelled schemas. Valid schemas are implicitly covered by other tests. 39 | 40 | ### Data Validation with a Schema Object 41 | 42 | `validate.lua` 43 | 44 | ### Schema Mapping (Versions) 45 | 46 | `incompatible.lua` 47 | `incompatible_array.lua` 48 | `incompatible_enum.lua` 49 | `incompatible_fixed.lua` 50 | `incompatible_map.lua` 51 | `incompatible_record.lua` 52 | 53 | Valid schema mappings are implicitly covered by other suites. 54 | For named type, `aliases` feature is tested, including `downgrade` mode. 55 | 56 | ### Generated Code: Basic Types 57 | 58 | `array.lua` 59 | `boolean.lua` 60 | `bytes.lua` 61 | `double.lua` 62 | `fixed.lua` 63 | `float.lua` 64 | `int.lua` 65 | `long.lua` 66 | `string.lua` 67 | `map.lua` 68 | `null.lua` 69 | 70 | Basic types in runtime and generated code, including type mismatches. 71 | 72 | ### Generated Code: Type Promotions 73 | 74 | `bytes_promo.lua` 75 | `float_promo.lua` 76 | `int_promo.lua` 77 | `long_promo.lua` 78 | `string_promo.lua` 79 | 80 | Type promotions in runtime and generated code, including type mismatches. 81 | 82 | ### Generated Code: Enum 83 | 84 | `enum.lua` 85 | `enum_versions.lua` 86 | 87 | Enums: type mismatches, on-the-fly conversion from one schema revision to another. 88 | 89 | `enum_large.lua` 90 | 91 | Ridiculously large enum. 92 | 93 | ### Generated Code: Record 94 | 95 | `record_array.lua` 96 | `record_hidden.lua` 97 | `record_large.lua` 98 | `record.lua` 99 | `record_nested.lua` 100 | `record_union.lua` 101 | `record_version.lua` 102 | `record_vlo.lua` 103 | 104 | Records in generated code, flattening nested records and enums, `xflatten`, 105 | on-the-fly conversion from one schema revision to another. Hidden fields. 106 | Large record. 107 | 108 | ### Generated Code: Union 109 | 110 | `union.lua` 111 | `union_versions.lua` 112 | 113 | ### Misc Tests 114 | 115 | `recursive.lua` — recursive schemas. 116 | 117 | `service_fields.lua` 118 | 119 | ## DDT Tests: Implementation Details and Usage 120 | 121 | Any file matching `ddt_suite/*.lua` pattern is a test bundle. 122 | 123 | Each file invokes `t()` several times, which is the testing function. 124 | The function receives a dictionary. It knows about the following keys: 125 | 126 | * `schema`, `schema1`, `schema2` — schema(s) definition, JSON string; 127 | * `create_error` — expected error message in schema create, a string; 128 | * `create_only` — don't run further steps; 129 | * `validate` — data to validate, JSON string or a value as-is; 130 | * `validate_error` — expected validation failure message, a string; 131 | * `validate_only` — stop here; 132 | * `compile_downgrade`, `service_fields` — compilation options; 133 | * `compile_dump` — dump compilation artefacts (compiler troubleshooting); 134 | * `compile_error` — expected compilation error, a string; 135 | * `compile_only` — stop now; 136 | * `func` — a function to invoke, `"flatten"` / `"unflatten"` / `"xflatten"`; 137 | * `input`, `output` — one value or a list of values (for service fields); 138 | * `error` 139 | 140 | ### Test Names 141 | 142 | Each test is automatically given a name after the containing bundle and the corresponding line that called `t()`, ex: 143 | 144 | `record_array/40` 145 | 146 | Sometimes, it is handy to perform the same test repeatedly in a loop with a parameter varying. 147 | Having this use case in mind, we automatically include `key=value` piece in a name for each global variable 148 | defined in a test bundle, ex: 149 | 150 | `enum_large/i_1/36` 151 | 152 | Translation: the test lives in `enum_large.lua`, line 36. This particular test instance was invoked with `i=1`. 153 | 154 | ### Input and Output 155 | 156 | Input and output are either a single value or a list of values. 157 | That single value or a first value in a list is an extended JSON string, converted to MsgPack 158 | before passing to `flatten` / `unflatten` / `xflatten` or comparing with the result. 159 | Second and further values are used as is. 160 | 161 | JSON extensions: 162 | * leading `!` to encode using floats instead of doubles, ex: `!42.0`; 163 | * MongoDB-inspired `{"$binary": "DEADBEAF"}` for binary data. 164 | 165 | The key order in the resulting MsgPack data is exactly the same as in the JSON string. 166 | `42` encodes as `int` while `42.0` encodes as `double`/`float`. 167 | 168 | Stock JSON/MsgPack modules lack the features necessary to implement the encoding defined above. 169 | For this reason, the conversion is implemented with an external tool, `msgpack_helper.py` (Python 3). 170 | The results are cached in `.ddt_cache` (added to the repository) to improve performance 171 | and to make it possible to run tests in an environment without Python 3. 172 | -------------------------------------------------------------------------------- /test/api_tests/evolution.lua: -------------------------------------------------------------------------------- 1 | local schema = require('avro_schema') 2 | local tap = require('tap') 3 | local json = require('json') 4 | local msgpack = require('msgpack') 5 | 6 | local test = tap.test('api-tests') 7 | 8 | test:plan(17) 9 | 10 | -- Schema evolution: extend a schema with a record field of type 11 | -- union or record with a default value. 12 | 13 | local default_record_1 = json.decode([[ 14 | { 15 | "type": "record", 16 | "name": "Frob", 17 | "fields": [ 18 | { "name": "bar", "type": "string" } 19 | ] 20 | } 21 | ]]) 22 | local default_record_2 = json.decode([[ 23 | { 24 | "type": "record", 25 | "name": "Frob", 26 | "fields": [ 27 | { "name": "foo", "type": 28 | { "type": "record", "name": "default_record", "fields":[ 29 | {"name": "f1", "type": "int"}, 30 | {"name": "f2", "type": "int"} ]}, 31 | "default": { "f1": 1, "f2": 2}}, 32 | { "name": "foo_nullable", "type": "default_record", 33 | "default": { "f1": 1, "f2": 2}}, 34 | { "name": "bar", "type": "string" } 35 | ] 36 | } 37 | ]]) 38 | local ok, handle_1 = schema.create(default_record_1) 39 | local ok, handle_2 = schema.create(default_record_2) 40 | local ok, compiled = schema.compile({handle_1, handle_2}) 41 | assert(ok, compiled) 42 | local ok, data = compiled.unflatten({"asd"}) 43 | test:is_deeply(data, {foo={f1=1,f2=2}, foo_nullable={f1=1,f2=2},bar="asd"}, 44 | 'evolution unflatten record') 45 | 46 | local default_union_1 = json.decode([[ 47 | { 48 | "type": "record", 49 | "name": "Frob", 50 | "fields": [ 51 | { "name": "bar", "type": "string" } 52 | ] 53 | } 54 | ]]) 55 | 56 | local default_union_2 = json.decode([[ 57 | { 58 | "type": "record", 59 | "name": "Frob", 60 | "fields": [ 61 | { "name": "foo", "type": 62 | { "type": "record*", "name": "default_record", "fields":[ 63 | {"name": "f1", "type": ["int", "null"]}, 64 | {"name": "f2", "type": ["null", "int"]} ]}, 65 | "default": { "f1": {"int": 1}}}, 66 | { "name": "bar", "type": "string" } 67 | ] 68 | } 69 | ]]) 70 | 71 | local ok, handle_1 = schema.create(default_union_1) 72 | local ok, handle_2 = schema.create(default_union_2) 73 | local ok, compiled = schema.compile({handle_1, handle_2}) 74 | local ok, data = compiled.unflatten({"asd"}) 75 | test:is_deeply(data, 76 | json.decode([[{"foo":{"f2":null,"f1":{"int":1}},"bar":"asd"}]]), 77 | 'evolution: add default union && unflatten') 78 | 79 | -- Add nullable record. 80 | 81 | local evolution_1 = json.decode([[ 82 | { 83 | "type": "record", 84 | "name": "Frob", 85 | "fields": [ 86 | { "name": "bar", "type": "string" } 87 | ] 88 | } 89 | ]]) 90 | local evolution_2 = json.decode([[ 91 | { 92 | "type": "record", 93 | "name": "Frob", 94 | "fields": [ 95 | { "name": "foo", "type": 96 | { "type": "record*", "name": "default_record", "fields":[ 97 | {"name": "f1", "type": "int"}, 98 | {"name": "f2", "type": "int"} ]}, 99 | "default": { "f1": 1, "f2": 2}}, 100 | { "name": "bar", "type": "string" } 101 | ] 102 | } 103 | ]]) 104 | local ok, handle_1 = schema.create(evolution_1) 105 | local ok, handle_2 = schema.create(evolution_2) 106 | local ok, compiled = schema.compile({handle_1, handle_2}) 107 | local ok, data = compiled.flatten({bar="asd"}) 108 | test:is_deeply(data, {{1,2},"asd"}, 109 | 'evolution: add nullable record && flatten') 110 | local ok, data = compiled.unflatten({"asd"}) 111 | test:is_deeply(data, {foo={f1=1, f2=2}, bar="asd"}, 112 | 'evolution: add nullable record && unflatten') 113 | 114 | -- Record become nullable. 115 | 116 | local evolution_1 = json.decode([[ 117 | { 118 | "type": "record", 119 | "name": "Frob", 120 | "fields": [ 121 | { "name": "foo", "type": 122 | { "type": "record", "name": "default_record", "fields":[ 123 | {"name": "f1", "type": "int"}, 124 | {"name": "f2", "type": "int"} ]}, 125 | "default": { "f1": 1, "f2": 2}}, 126 | { "name": "bar", "type": "string" } 127 | ] 128 | } 129 | ]]) 130 | local evolution_2 = json.decode([[ 131 | { 132 | "type": "record", 133 | "name": "Frob", 134 | "fields": [ 135 | { "name": "foo", "type": 136 | { "type": "record*", "name": "default_record", "fields":[ 137 | {"name": "f1", "type": "int"}, 138 | {"name": "f2", "type": "int"} ]}, 139 | "default": { "f1": 1, "f2": 2}}, 140 | { "name": "bar", "type": "string" } 141 | ] 142 | } 143 | ]]) 144 | local ok, handle_1 = schema.create(evolution_1) 145 | local ok, handle_2 = schema.create(evolution_2) 146 | local ok, compiled = schema.compile({handle_1, handle_2}) 147 | local ok, data = compiled.unflatten({1,2,"asd"}) 148 | test:is_deeply(data, {foo={f1=1,f2=2},bar="asd"}, 149 | 'evolution: made record nullable && unflatten') 150 | local ok, data = compiled.flatten({foo={f1=1,f2=2}, bar="asd"}) 151 | test:is_deeply(data, {{1,2},"asd"}, 152 | 'evolution: made record nullable && flatten') 153 | 154 | -- Non record become nullable. 155 | 156 | local evolution_1 = json.decode([[ 157 | { 158 | "type": "record", 159 | "name": "X", 160 | "fields": [ 161 | { "type": "int", "name": "f1" }, 162 | { "type": {"type": "array", "items": "int"}, "name": "f2" }, 163 | { "type": {"type": "map", "values": "string"}, "name": "f3" } 164 | ] 165 | } 166 | ]]) 167 | local evolution_2 = json.decode([[ 168 | { 169 | "type": "record", 170 | "name": "X", 171 | "fields": [ 172 | { "type": "int*", "name": "f1" }, 173 | { "type": {"type": "array*", "items": "int*"}, "name": "f2" }, 174 | { "type": {"type": "map*", "values": "string*"}, "name": "f3" } 175 | ] 176 | } 177 | ]]) 178 | local ok, handle_1 = schema.create(evolution_1) 179 | local ok, handle_2 = schema.create(evolution_2) 180 | local ok, compiled = schema.compile({handle_1, handle_2}) 181 | local ok, data = compiled.unflatten({1,{2,3},{a="4"}}) 182 | test:is_deeply(data, {f1=1, f2={2, 3}, f3={a="4"}}, 183 | 'evolution: made non-record nullable && unflatten') 184 | local ok, data = compiled.flatten({f1=1, f2={2, 3}, f3={a="4"}}) 185 | test:is_deeply(data, {1,{2,3},{a="4"}}, 186 | 'evolution: made non-record nullable && flatten') 187 | 188 | -- nullable -> non-nullable. 189 | 190 | local evolution_to_nonnull = { 191 | type = "record", 192 | name = "X", 193 | fields = { 194 | -- Different fields would be inserted here. 195 | } 196 | } 197 | 198 | local testcases = { 199 | { "int*", "int" }, 200 | { "string*", "string" }, 201 | { {type="array*", items = "int"}, {type="array", items = "int"} }, 202 | { {type="array", items = "int*"}, {type="array", items = "int"} }, 203 | { {type="map*", values = "int"}, {type="map", values = "int"} }, 204 | { {type="map", values = "int*"}, {type="map", values = "int"} }, 205 | { {type="fixed*", size = 4, name = "f2"}, 206 | {type="fixed", size = 4, name = "f2"} }, 207 | { {type="enum*", symbols = {"a", "b"}, name = "f2"}, 208 | {type="enum", symbols = {"a", "b"}, name = "f2"}}, 209 | { {type="record*", name = "Y", fields = { 210 | {name = "f1", type = "int"}}}, 211 | {type="record", name = "Y", fields = { 212 | {name = "f1", type = "int"}}} 213 | } 214 | } 215 | 216 | for _, testcase in pairs(testcases) do 217 | local evolution_1 = table.deepcopy(evolution_to_nonnull) 218 | evolution_1.fields[1] = {name = "f1", type = testcase[1]} 219 | local evolution_2 = table.deepcopy(evolution_to_nonnull) 220 | evolution_2.fields[1] = {name = "f1", type = testcase[2]} 221 | local typename = type(testcase[2]) == "string" and testcase[2] or 222 | testcase[2].type 223 | local ok, handle_1 = schema.create(evolution_1) 224 | local ok, handle_2 = schema.create(evolution_2) 225 | local ok, compiled = schema.compile({handle_1, handle_2}) 226 | assert(not ok, typename) 227 | test:like(compiled, "Types incompatible:", 228 | "nullable -> non-nullable " .. typename) 229 | end 230 | 231 | test:check() 232 | os.exit(test.planned == test.total and test.failed == 0 and 0 or -1) 233 | -------------------------------------------------------------------------------- /test/api_tests/export.lua: -------------------------------------------------------------------------------- 1 | local schema = require('avro_schema') 2 | local tap = require('tap') 3 | local json = require('json') 4 | local msgpack = require('msgpack') 5 | 6 | local test = tap.test('api-tests') 7 | 8 | test:plan(35) 9 | 10 | -- nested records, union, reference to earlier declared type 11 | local foobar_decl = { 12 | name = 'FooBar', 13 | type = 'record', 14 | fields = { 15 | { name = 'A', type = { 16 | name = 'nested', 17 | type = 'record', 18 | fields = { 19 | { name = 'X', type = 'double' }, 20 | { name = 'Y', type = 'double' } 21 | } 22 | }}, 23 | { name = 'B', type = 'nested' }, 24 | { name = 'C', type = {'null', 'int'}}, 25 | { name = 'D', type = 'string'} 26 | } 27 | } 28 | local _, foobar = schema.create(foobar_decl) 29 | test:is_deeply(schema.export(foobar), foobar_decl, 'export (FooBar)') 30 | 31 | for _, type in ipairs( 32 | { 33 | "int", "string", "null", "boolean", "long", 34 | "float", "double", "bytes" 35 | }) do 36 | res = {schema.create({type=type})} 37 | test:is_deeply(schema.export(res[2]), type, 'schema normalization '..type) 38 | end 39 | 40 | -- nullable scalar export 41 | for _, type in ipairs( 42 | { 43 | "int*", "string*", "null*", "boolean*", "long*", 44 | "float", "double*", "bytes*" 45 | }) do 46 | res = {schema.create({type=type})} 47 | test:is_deeply(schema.export(res[2]), type, 48 | 'nullable scalar normalization '..type) 49 | end 50 | 51 | -- fingerprint tests 52 | local fingerprint_testcases = { 53 | { 54 | schema = [[ 55 | { 56 | "name": "Pet", 57 | "type": "record", 58 | "fields": 59 | [{"name": "kind", "type": 60 | {"name": "Kind", "type": "enum", 61 | "symbols": ["CAT", "DOG"]}}, 62 | {"name": "name", "type": "string"} 63 | ] 64 | } 65 | ]], 66 | fingerprint = "42620f01b34833f1e70cf2a9567fc4d3b9cf8b74afba64af0e9dce9a148b1e90" 67 | }, 68 | { 69 | schema = [[{"type": "fixed", "name": "Id", "size": 4}]], 70 | fingerprint = "ecd9e5c6039fe40543f95176d664e1b9b56dddf1e8b1e3a6d87a6402b12e305d" 71 | }, 72 | { 73 | schema = [[ 74 | { 75 | "type": "record", 76 | "name": "HandshakeResponse", "namespace": "org.apache.avro.ipc", 77 | "fields": [ 78 | {"name": "match", 79 | "type": {"type": "enum", "name": "HandshakeMatch", 80 | "symbols": ["BOTH", "CLIENT", "NONE"]}}, 81 | {"name": "serverProtocol", "type": 82 | ["null", "string"]}, 83 | {"name": "serverHash", "type": 84 | ["null", {"type": "fixed", "name": "MD5", "size": 16}]}, 85 | {"name": "meta", "type": 86 | ["null", {"type": "map", "values": "bytes"}]} 87 | ] 88 | } 89 | ]], 90 | fingerprint = "a303cbbfe13958f880605d70c521a4b7be34d9265ac5a848f25916a67b11d889" 91 | }, 92 | { 93 | schema = [[ 94 | { 95 | "type": "record", 96 | "name": "HandshakeRequest", "namespace":"org.apache.avro.ipc", 97 | "fields": [ 98 | {"name": "clientHash", 99 | "type": {"type": "fixed", "name": "MD5", "size": 16}}, 100 | {"name": "clientProtocol", "type": ["null", "string"]}, 101 | {"name": "serverHash", "type": "MD5"}, 102 | {"name": "meta", "type": 103 | ["null", {"type": "map", "values": "bytes"}]} 104 | ] 105 | } 106 | ]], 107 | fingerprint = "2b2f7a9b22991fe0df9134cb6b5ff7355343e797aaea337e0150e20f3a35800e" 108 | }, 109 | } 110 | 111 | function string.tohex(str) 112 | return (str:gsub('.', function (c) 113 | return string.format('%02X', string.byte(c)) 114 | end)) 115 | end 116 | 117 | for i, testcase in ipairs(fingerprint_testcases) do 118 | local _, schema_handler = schema.create(json.decode(testcase.schema)) 119 | local fingerprint = schema.fingerprint(schema_handler, "sha256", 32) 120 | test:is(string.lower(string.tohex(fingerprint)), testcase.fingerprint, 121 | "Fingerprint testcase "..i) 122 | end 123 | 124 | local preserve_different_types_schema = { 125 | type = "record", 126 | name = "X", 127 | extra_field = "X", 128 | fields = { 129 | { 130 | name = "f1", 131 | type = "int", 132 | extra_field = "f1" 133 | }, 134 | { 135 | name = "f2", 136 | type = { 137 | type = "array", 138 | extra_field = "f2", 139 | items = "int" 140 | } 141 | }, 142 | { 143 | name = "f3", 144 | type = { 145 | type = "map", 146 | extra_field = "f3", 147 | values = "int" 148 | } 149 | }, 150 | { 151 | name = "f4", 152 | type = { 153 | type = "fixed", 154 | extra_field = "f4", 155 | size = 4, 156 | name = "f4" 157 | } 158 | }, 159 | } 160 | } 161 | 162 | local schema_preserve_fields_testcases = { 163 | { 164 | name = "1", 165 | schema = { 166 | type="int", 167 | extra_field="extra_field" 168 | }, 169 | options = {}, 170 | ast = "int" 171 | }, 172 | { 173 | name = "2", 174 | schema = { 175 | type="int", 176 | extra_field="extra_field" 177 | }, 178 | options = {preserve_in_ast={"extra_field"}}, 179 | ast = { 180 | type="int", 181 | extra_field="extra_field" 182 | } 183 | }, 184 | { 185 | name = "3-complex", 186 | schema = { 187 | type="int", 188 | extra_field={extra_field={"extra_field"}} 189 | }, 190 | options = {preserve_in_ast={"extra_field"}}, 191 | ast = { 192 | type="int", 193 | extra_field={extra_field={"extra_field"}} 194 | } 195 | }, 196 | { 197 | name = "4-different-types", 198 | schema = preserve_different_types_schema, 199 | options = {preserve_in_ast={"extra_field"}}, 200 | ast = preserve_different_types_schema, 201 | } 202 | } 203 | 204 | for _, testcase in ipairs(schema_preserve_fields_testcases) do 205 | res = {schema.create(testcase.schema, testcase.options)} 206 | test:is_deeply(schema.export(res[2]), testcase.ast, 207 | 'schema extra fields ' .. testcase.name) 208 | end 209 | 210 | test:is_deeply( 211 | {schema.create("int", { 212 | preserve_in_ast={}, 213 | preserve_in_fingerprint={"extra_field"}, 214 | })}, 215 | {false, "fingerprint should contain only fields from AST"}, 216 | 'preserve_in_fingerprint contains more fields than AST') 217 | 218 | local fingerprint 219 | res = {schema.create( 220 | { 221 | type = "record", 222 | name = "test", 223 | extra_field = "extra_field", 224 | fields = { 225 | { name = "bar", type = "null", default = msgpack.NULL, 226 | extra_field = "extra" }, 227 | { name = "foo", type = {"null", "int"}, 228 | default = msgpack.NULL }, 229 | } 230 | }, nil)} 231 | fingerprint = schema.fingerprint(res[2], "sha256", 32) 232 | test:is(string.lower(string.tohex(fingerprint)), 233 | "a64098ee437e9020923c6005db88f37a234ed60daae23b26e33d8ae1bf643356", 234 | "Fingerprint extra fields 1") 235 | 236 | res = {schema.create( 237 | { 238 | type = "record", 239 | name = "test", 240 | extra_field = "extra_field", 241 | fields = { 242 | { name = "bar", type = "null", default = msgpack.NULL, 243 | extra_field = "extra" }, 244 | { name = "foo", type = {"null", "int"}, 245 | default = msgpack.NULL }, 246 | } 247 | }, {preserve_in_ast={"extra_field"}, 248 | preserve_in_fingerprint={"extra_field"}})} 249 | fingerprint = schema.fingerprint(res[2], "sha256", 32) 250 | test:is(string.lower(string.tohex(fingerprint)), 251 | "ed1e8b275fe8c436839bdb2962f7595bbef6bc364dfdcd47d3d1334888350711", 252 | "Fingerprint extra fields 2") 253 | 254 | res = {schema.create( 255 | { 256 | type = "int", 257 | extra_field = "extra_field", 258 | }, {preserve_in_ast={"extra_field"}})} 259 | fingerprint = schema.fingerprint(res[2], "sha256", 32) 260 | test:is_deeply(schema.export(res[2]), 261 | {type = "int", extra_field = "extra_field"}, 262 | "Prevent primitive type collapse by extra field") 263 | 264 | -- avro_json is used for fingerprint 265 | fingerprint = require("avro_schema.fingerprint") 266 | test:is(fingerprint.avro_json({field1="1"}), "{}", "avro_json 1") 267 | test:is(fingerprint.avro_json({field1="1"}, {"field1"}), 268 | '{"field1":"1"}', "avro_json 2") 269 | test:is(fingerprint.avro_json({field2="1", field1="1"}, {"field2", "field1"}), 270 | '{"field1":"1","field2":"1"}', "avro_json 3 order") 271 | 272 | local nullable_orig = [[ { 273 | "name": "outer", "type": "record", "fields": 274 | [{ "name": "r1", "type": 275 | {"name": "tr1", "type": "record", "fields": 276 | [{"name": "v1", "type": "int"} , 277 | {"name": "v2", "type": "string*"} ] } }, 278 | { "name": "r2", "type": "tr1*"}, 279 | { "name": "dummy", "type": { 280 | "type": "array", "items": "int" }}, 281 | { "name": "r3", "type": { 282 | "name": "tr2", "type": "record*", "fields": [ 283 | {"name": "v1", "type": "string"} , 284 | {"name": "v2", "type": "int*"} ] } }, 285 | { "name": "r4", "type": "tr2" }] 286 | }]] 287 | nullable_orig = json.decode(nullable_orig) 288 | res = {schema.create(nullable_orig)} 289 | test:is(res[1], true, "Schema created successfully") 290 | res = schema.export(res[2]) 291 | test:is_deeply(res, nullable_orig, "Exported schema is valid.") 292 | 293 | -- check if nullable reference is not exported as a definition 294 | local nullable_reference = { 295 | name = "X", 296 | type = "record", 297 | fields = { 298 | { 299 | name = "first", 300 | type = { 301 | name = "first", 302 | type = "fixed", 303 | size = 16 304 | } 305 | }, 306 | { 307 | name = "second", 308 | type = "first*" 309 | } 310 | } 311 | } 312 | res = {schema.create(nullable_reference)} 313 | res = schema.export(res[2]) 314 | test:is_deeply(res, nullable_reference, 315 | "Export nullable reference") 316 | 317 | test:check() 318 | os.exit(test.planned == test.total and test.failed == 0 and 0 or -1) 319 | -------------------------------------------------------------------------------- /test/api_tests/reload.lua: -------------------------------------------------------------------------------- 1 | local tap = require('tap') 2 | local test = tap.test('reload test') 3 | 4 | test:plan(1) 5 | 6 | test:test('reload test', function(test) 7 | test:plan(1) 8 | 9 | -- Require the module first time. 10 | require('avro_schema') 11 | 12 | -- Unload it. 13 | package.loaded['avro_schema'] = nil 14 | package.loaded['avro_schema.backend'] = nil 15 | package.loaded['avro_schema.compiler'] = nil 16 | package.loaded['avro_schema.fingerprint'] = nil 17 | package.loaded['avro_schema.il'] = nil 18 | package.loaded['avro_schema.runtime'] = nil 19 | package.loaded['avro_schema.utils'] = nil 20 | 21 | -- Require it again. 22 | local ok, err = pcall(require, 'avro_schema') 23 | test:ok(ok, 'Successfully reloaded', {err = err}) 24 | end) 25 | 26 | os.exit(test:check() and 0 or 1) 27 | -------------------------------------------------------------------------------- /test/buf_grow_test.lua: -------------------------------------------------------------------------------- 1 | local msgpack = require('msgpack') 2 | local schema = require('avro_schema') 3 | local runtime = require('avro_schema.runtime') 4 | local tap = require('tap') 5 | 6 | local test = tap.test('buf-grow') 7 | test:plan(1) 8 | 9 | local log = {} 10 | local buf_grow = runtime.buf_grow 11 | 12 | -- hook buf_grow, must happen *before* compile 13 | runtime.buf_grow = function(r, min_capacity) 14 | local cur_capacity = tonumber(r.ot_capacity) 15 | if not log[1] then log[1] = cur_capacity end 16 | table.insert(log, min_capacity - cur_capacity) 17 | buf_grow(r, min_capacity) 18 | r.ot_capacity = min_capacity -- normally it grows X1.5 19 | end 20 | 21 | local _, s = schema.create({ 22 | type = 'array', items = { 23 | name = 'FooBar', type = 'record', fields = { 24 | {name = 'A', type = 'long'}, 25 | {name = 'B', type = 'long'}, 26 | {name = 'C', type = 'long'}, 27 | {name = 'D', type = { 28 | type = 'array', items = 'long' 29 | }} 30 | } 31 | } 32 | }) 33 | 34 | if not _ then error(s) end 35 | 36 | local _, m = schema.compile(s) 37 | if not _ then error(m) end 38 | 39 | 40 | local item = msgpack.encode({ 41 | A = 1, B = 2, C = 3, D = { 0, -1, -2, -3 } 42 | }) 43 | 44 | m.flatten_msgpack('\220\0\20' .. string.rep(item, 20)) 45 | 46 | -- Ensure that during the run buffer size is properly checked and increased. 47 | -- Initial buffer capacity is 128. 48 | -- The capcity is increased by +5 to accomodate for: 49 | -- array_header(FooBar) long(A), long(B), long(C), array_header(D) 50 | -- and then by +4 to accomodate nested array content. 51 | test:is(table.concat(log, ' '), '128 5 4 5 4 5 4 5 4 5 4 5 4', '#1') 52 | 53 | test:check() 54 | --os.exit(test.planned == test.total and test.failed == 0 and 0 or -1) 55 | -------------------------------------------------------------------------------- /test/ddt_suite/array.lua: -------------------------------------------------------------------------------- 1 | local int_array = [[{ 2 | "type": "array", 3 | "items": "int" 4 | }]] 5 | 6 | local string_array = [[{ 7 | "type": "array", 8 | "items": "string" 9 | }]] 10 | 11 | local string_array_array = [[{ 12 | "type": "array", 13 | "items": { 14 | "type": "array", 15 | "items": "string" 16 | } 17 | }]] 18 | 19 | local string_array_nullable = [[{ 20 | "type": "array*", 21 | "items": "string" 22 | }]] 23 | 24 | local string_array_items_nullable = [[{ 25 | "type": "array", 26 | "items": "string*" 27 | }]] 28 | ----------------------------------------------------------------------- 29 | 30 | t { 31 | schema = int_array, 32 | func = 'flatten', 33 | input = '[]', output = '[[]]' 34 | } 35 | 36 | t { 37 | schema = int_array, 38 | func = 'flatten', 39 | input = '[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]', 40 | output = '[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]]' 41 | } 42 | 43 | t { 44 | schema = string_array, 45 | func = 'flatten', 46 | input = '[]', output = '[[]]' 47 | } 48 | 49 | t { 50 | schema = string_array, 51 | func = 'flatten', 52 | input = '["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]', 53 | output = '[["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]]' 54 | } 55 | 56 | t { 57 | schema = string_array_array, 58 | func = 'flatten', 59 | input = '[]', output = '[[]]' 60 | } 61 | 62 | t { 63 | schema = string_array_array, 64 | func = 'flatten', 65 | input = '[[], ["1"], ["2", "3"], ["4", "5", "6"], ["7"], ["8", "9", "10"]]', 66 | output = '[[[], ["1"], ["2", "3"], ["4", "5", "6"], ["7"], ["8", "9", "10"]]]' 67 | } 68 | 69 | ----------------------------------------------------------------------- 70 | 71 | t { 72 | error = 'Expecting ARRAY, encountered NIL', 73 | schema = int_array, 74 | func = 'flatten', input = 'null' 75 | } 76 | 77 | t { 78 | error = 'Expecting ARRAY, encountered FALSE', 79 | schema = int_array, 80 | func = 'flatten', input = 'false' 81 | } 82 | 83 | t { 84 | error = 'Expecting ARRAY, encountered TRUE', 85 | schema = int_array, 86 | func = 'flatten', input = 'true' 87 | } 88 | 89 | t { 90 | error = 'Expecting ARRAY, encountered LONG', 91 | schema = int_array, 92 | func = 'flatten', input = '42' 93 | } 94 | 95 | t { 96 | error = 'Expecting ARRAY, encountered FLOAT', 97 | schema = int_array, 98 | func = 'flatten', input = '! 42.0' 99 | } 100 | 101 | t { 102 | error = 'Expecting ARRAY, encountered DOUBLE', 103 | schema = int_array, 104 | func = 'flatten', input = '42.0' 105 | } 106 | 107 | t { 108 | error = 'Expecting ARRAY, encountered STR', 109 | schema = int_array, 110 | func = 'flatten', input = '"Hello, array!"' 111 | } 112 | 113 | t { 114 | error = 'Expecting ARRAY, encountered BIN', 115 | schema = int_array, 116 | func = 'flatten', input = '{"$binary": "FFFF0055"}' 117 | } 118 | 119 | t { 120 | error = 'Expecting ARRAY, encountered MAP', 121 | schema = int_array, 122 | func = 'flatten', input = '{"key": 42}' 123 | } 124 | 125 | t { 126 | error = '1: Expecting INT, encountered DOUBLE', 127 | schema = int_array, 128 | func = 'flatten', input = '[42.0]' 129 | } 130 | 131 | t { 132 | error = '5: Expecting INT, encountered DOUBLE', 133 | schema = int_array, 134 | func = 'flatten', input = '[1, 2, 3, 4, 5.1]' 135 | } 136 | 137 | t { 138 | error = '1: Expecting STR, encountered LONG', 139 | schema = string_array, 140 | func = 'flatten', input = '[42]' 141 | } 142 | 143 | t { 144 | error = '5: Expecting STR, encountered LONG', 145 | schema = string_array, 146 | func = 'flatten', input = '["1", "2", "3", "4", 5]' 147 | } 148 | 149 | 150 | t { 151 | error = '1: Expecting ARRAY, encountered LONG', 152 | schema = string_array_array, 153 | func = 'flatten', input = '[1]' 154 | } 155 | 156 | t { 157 | error = '1/1: Expecting STR, encountered LONG', 158 | schema = string_array_array, 159 | func = 'flatten', input = '[[1]]' 160 | } 161 | 162 | t { 163 | error = '5: Expecting ARRAY, encountered LONG', 164 | schema = string_array_array, 165 | func = 'flatten', input = '[[],[],[],[],1]' 166 | } 167 | 168 | t { 169 | error = '5/1: Expecting STR, encountered LONG', 170 | schema = string_array_array, 171 | func = 'flatten', input = '[[],[],[],[],[1]]' 172 | } 173 | 174 | 175 | t { 176 | schema = string_array_nullable, 177 | func = 'flatten', 178 | input = '["hello", "world"]', output = '[["hello", "world"]]' 179 | } 180 | 181 | t { 182 | schema = string_array_nullable, 183 | func = 'flatten', 184 | input = 'null', output = '[null]' 185 | } 186 | 187 | t { 188 | schema = string_array_nullable, 189 | func = 'unflatten', 190 | input = '[["hello", "world"]]', output = '["hello", "world"]' 191 | } 192 | 193 | t { 194 | schema = string_array_nullable, 195 | func = 'unflatten', 196 | input = '[null]', output = 'null' 197 | } 198 | 199 | t { 200 | schema = string_array_items_nullable, 201 | func = 'flatten', 202 | input = '[null, null]', output = '[[null, null]]' 203 | } 204 | 205 | t { 206 | schema = string_array_items_nullable, 207 | func = 'unflatten', 208 | input = '[[null, null]]', output = '[null, null]' 209 | } 210 | 211 | t { 212 | schema = string_array_items_nullable, 213 | func = 'flatten', 214 | input = '[null, "hello"]', output = '[[null, "hello"]]' 215 | } 216 | 217 | t { 218 | schema = string_array_items_nullable, 219 | func = 'unflatten', 220 | input = '[[null, "hello"]]', output = '[null, "hello"]' 221 | } 222 | 223 | local array_complex_1 = [[ { 224 | "type": "array", 225 | "items": { 226 | "type": "record*", 227 | "name": "X", 228 | "fields": [ 229 | {"name":"f1", "type":"string*"}, 230 | {"name":"f2", "type":"string*"}, 231 | {"name":"f3", "type":"string*"} 232 | ] 233 | } 234 | } 235 | ]] 236 | 237 | t { 238 | schema = array_complex_1, 239 | func = "flatten", 240 | input = [[ [ 241 | {"f1":"1"}, 242 | {"f2":"2"}, 243 | null, 244 | {"f3":"3"}] ]], 245 | output = [=[ [[["1",null,null],[null,"2",null], null, 246 | [null,null,"3"]]] ]=] 247 | } 248 | 249 | t { 250 | schema = array_complex_1, 251 | func = "unflatten", 252 | output = [[ 253 | [{"f1": "1", "f2": null, "f3": null}, {"f1": null, "f2": "2", "f3": null}, 254 | null, {"f1": null, "f2": null, "f3": "3"}] ]], 255 | input = [=[ [[["1",null,null],[null,"2",null], null, 256 | [null,null,"3"]]] ]=] 257 | } 258 | 259 | local array_complex_2 = [[ { 260 | "type": "array", 261 | "items": { 262 | "type": "record", 263 | "name": "X", 264 | "fields": [ 265 | {"name":"f1", "type":"string*"}, 266 | {"name":"f2", "type":"string*"}, 267 | {"name":"f3", "type":"string*"} 268 | ] 269 | } 270 | } 271 | ]] 272 | 273 | t { 274 | schema = array_complex_2, 275 | func = "flatten", 276 | input = [[ [ 277 | {"f1":"1"}, 278 | {"f2":"2"}, 279 | {"f3":"3"}] ]], 280 | output = [=[ [[["1",null,null],[null,"2",null],[null,null,"3"]]] ]=] 281 | } 282 | 283 | t { 284 | schema = array_complex_2, 285 | func = "unflatten", 286 | output = [[ 287 | [{"f1": "1", "f2": null, "f3": null}, 288 | {"f1": null, "f2": "2", "f3": null}, 289 | {"f1": null, "f2": null, "f3": "3"}] ]], 290 | input = [=[ [[["1",null,null],[null,"2",null],[null,null,"3"]]] ]=] 291 | } 292 | 293 | local array_complex_3 = [[ { 294 | "type": "array", 295 | "items": { 296 | "type": "map*", 297 | "name": "X", 298 | "values": "string" 299 | } 300 | } 301 | ]] 302 | 303 | t { 304 | schema = array_complex_3, 305 | func = "flatten", 306 | input = [[ [ 307 | {"f1":"1", 308 | "f2":"2"}, 309 | null, 310 | {"f3":"3"}] ]], 311 | output = [=[ [[{"f1": "1", "f2": "2"}, null, {"f3": "3"}]] ]=] 312 | } 313 | 314 | t { 315 | schema = array_complex_3, 316 | func = "unflatten", 317 | output = [=[ [ 318 | {"f1":"1", 319 | "f2":"2"}, 320 | null, 321 | {"f3":"3"}] ]=], 322 | input = [=[ [[{"f1": "1", "f2": "2"}, null, {"f3": "3"}]] ]=] 323 | } 324 | 325 | local array_complex_4 = [[ { 326 | "type": "array", 327 | "items": [ 328 | "null", 329 | "int", 330 | { 331 | "type": "record*", 332 | "name": "X", 333 | "fields":[ 334 | {"name": "f1", "type":"string*"}, 335 | {"name": "f2", "type":"string*"} 336 | ] 337 | }]} ]] 338 | 339 | t { 340 | schema = array_complex_4, 341 | func = "flatten", 342 | input = [[ [ 343 | {"X":{"f1":"1", "f2":"2"}}, 344 | {"X":null}, 345 | null, 346 | {"int":7}] ]], 347 | output = [=[ [[[2, ["1", "2"]], [2, null], [0, null], [1, 7]]] ]=] 348 | } 349 | -------------------------------------------------------------------------------- /test/ddt_suite/boolean.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema = '"boolean"', 3 | func = 'flatten', input = 'false', output = '[false]' 4 | } 5 | 6 | t { 7 | schema = '"boolean"', 8 | func = 'flatten', input = 'true', output = '[true]' 9 | } 10 | 11 | t { 12 | error = 'Expecting BOOL, encountered NIL', 13 | schema = '"boolean"', 14 | func = 'flatten', input = 'null' 15 | } 16 | 17 | t { 18 | error = 'Expecting BOOL, encountered LONG', 19 | schema = '"boolean"', 20 | func = 'flatten', input = '42' 21 | } 22 | 23 | -- ! is a hack to enable single-precision floating point 24 | t { 25 | error = 'Expecting BOOL, encountered FLOAT', 26 | schema = '"boolean"', 27 | func = 'flatten', input = '! 42.0' 28 | } 29 | 30 | t { 31 | error = 'Expecting BOOL, encountered DOUBLE', 32 | schema = '"boolean"', 33 | func = 'flatten', input = '42.0' 34 | } 35 | 36 | t { 37 | error = 'Expecting BOOL, encountered STR', 38 | schema = '"boolean"', 39 | func = 'flatten', input = '"Hello, world!"' 40 | } 41 | 42 | t { 43 | error = 'Expecting BOOL, encountered BIN', 44 | schema = '"boolean"', 45 | func = 'flatten', input = '{"$binary": "DEADBEEF"}' 46 | } 47 | 48 | t { 49 | error = 'Expecting BOOL, encountered ARRAY', 50 | schema = '"boolean"', 51 | func = 'flatten', input = '[42]' 52 | } 53 | 54 | t { 55 | error = 'Expecting BOOL, encountered MAP', 56 | schema = '"boolean"', 57 | func = 'flatten', input = '{"key": 42}' 58 | } 59 | 60 | t { 61 | schema = '"boolean*"', 62 | func = 'flatten', input = 'true', output = '[true]' 63 | } 64 | 65 | t { 66 | schema = '"boolean*"', 67 | func = 'flatten', input = 'null', output = '[null]' 68 | } 69 | 70 | t { 71 | schema = '"boolean*"', 72 | func = 'unflatten', input = '[false]', output = 'false' 73 | } 74 | 75 | t { 76 | schema = '"boolean*"', 77 | func = 'unflatten', input = '[null]', output = 'null' 78 | } 79 | -------------------------------------------------------------------------------- /test/ddt_suite/bug1.lua: -------------------------------------------------------------------------------- 1 | local default_false = [[ 2 | { 3 | "name": "subscriber_contract_info", 4 | "type": "record", 5 | "fields": [ 6 | { "name": "msisdn", "type": "string"}, 7 | { "name": "register_ind", "type": "boolean", "default": false} 8 | ] 9 | }]] 10 | 11 | t { 12 | schema = default_false, 13 | func = 'flatten', input = '{ "msisdn": "79099421523"}', output = '["79099421523", false]' 14 | } 15 | -------------------------------------------------------------------------------- /test/ddt_suite/bytes.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema = '"bytes"', 3 | func = 'flatten', input = '{"$binary": "FFFF"}', 4 | output = '[{"$binary": "FFFF"}]' 5 | } 6 | 7 | t { 8 | schema = '"bytes"', 9 | func = 'flatten', input = '{"$binary": ""}', 10 | output = '[{"$binary": ""}]' 11 | } 12 | 13 | t { 14 | schema = '"bytes"', 15 | func = 'flatten', 16 | input = '{"$binary": "CAFEBABE00000000DEAD0000BEEF00FF010203040506070809559A"}', 17 | output = '[{"$binary": "CAFEBABE00000000DEAD0000BEEF00FF010203040506070809559A"}]' 18 | } 19 | 20 | -- validation errors 21 | t { 22 | error = 'Expecting BIN, encountered NIL', 23 | schema = '"bytes"', 24 | func = 'flatten', input = 'null' 25 | } 26 | 27 | t { 28 | error = 'Expecting BIN, encountered FALSE', 29 | schema = '"bytes"', 30 | func = 'flatten', input = 'false' 31 | } 32 | 33 | t { 34 | error = 'Expecting BIN, encountered TRUE', 35 | schema = '"bytes"', 36 | func = 'flatten', input = 'true' 37 | } 38 | 39 | t { 40 | error = 'Expecting BIN, encountered LONG', 41 | schema = '"bytes"', 42 | func = 'flatten', input = '42' 43 | } 44 | 45 | -- ! is a hack to enable single-precision floating point 46 | t { 47 | error = 'Expecting BIN, encountered FLOAT', 48 | schema = '"bytes"', 49 | func = 'flatten', input = '! 1.0' 50 | } 51 | 52 | t { 53 | error = 'Expecting BIN, encountered DOUBLE', 54 | schema = '"bytes"', 55 | func = 'flatten', input = '1.0' 56 | } 57 | 58 | t { 59 | error = 'Expecting BIN, encountered STR', 60 | schema = '"bytes"', 61 | func = 'flatten', input = '"Loads of oranges!"' 62 | } 63 | 64 | t { 65 | error = 'Expecting BIN, encountered ARRAY', 66 | schema = '"bytes"', 67 | func = 'flatten', input = '[]' 68 | } 69 | 70 | t { 71 | error = 'Expecting BIN, encountered MAP', 72 | schema = '"bytes"', 73 | func = 'flatten', input = '{}' 74 | } 75 | -------------------------------------------------------------------------------- /test/ddt_suite/bytes_promo.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '"bytes"', schema2 = '"string"', 3 | func = 'flatten', input = '{"$binary": "48656c6c6f2c20776f726c6421"}', 4 | output = '["Hello, world!"]' 5 | } 6 | 7 | t { 8 | error = "Expecting BIN, encountered STR", 9 | schema1 = '"bytes"', schema2 = '"string"', 10 | func = 'flatten', input = '"Hello, world!"' 11 | } 12 | 13 | t { 14 | error = "Expecting BIN, encountered NIL", 15 | schema1 = '"bytes"', schema2 = '"string"', 16 | func = 'flatten', input = 'null' 17 | } 18 | -------------------------------------------------------------------------------- /test/ddt_suite/double.lua: -------------------------------------------------------------------------------- 1 | -- ! is a hack to encode msgpack using single precision doubles 2 | t { 3 | schema = '"double"', 4 | func = 'flatten', input = '99.25', output = '[99.25]' 5 | } 6 | 7 | t { 8 | schema = '"double"', 9 | func = 'flatten', input = '! 99.25', output = '[99.25]' 10 | } 11 | 12 | -- auto long->double conversion 13 | t { 14 | schema = '"double"', 15 | func = 'flatten', input = '42', output = '[42.0]' 16 | } 17 | 18 | t { 19 | schema = '"double"', 20 | func = 'flatten', input = '-9000000', output = '[-9000000.0]' 21 | } 22 | 23 | -- validation errors 24 | t { 25 | error = 'Expecting DOUBLE, encountered NIL', 26 | schema = '"double"', 27 | func = 'flatten', input = 'null' 28 | } 29 | 30 | t { 31 | error = 'Expecting DOUBLE, encountered FALSE', 32 | schema = '"double"', 33 | func = 'flatten', input = 'false' 34 | } 35 | 36 | t { 37 | error = 'Expecting DOUBLE, encountered TRUE', 38 | schema = '"double"', 39 | func = 'flatten', input = 'true' 40 | } 41 | 42 | t { 43 | error = 'Expecting DOUBLE, encountered STR', 44 | schema = '"double"', 45 | func = 'flatten', input = '"Hello, world!"' 46 | } 47 | 48 | t { 49 | error = 'Expecting DOUBLE, encountered BIN', 50 | schema = '"double"', 51 | func = 'flatten', input = '{"$binary": "CAFEBABE"}' 52 | } 53 | 54 | t { 55 | error = 'Expecting DOUBLE, encountered ARRAY', 56 | schema = '"double"', 57 | func = 'flatten', input = '[]' 58 | } 59 | 60 | t { 61 | error = 'Expecting DOUBLE, encountered MAP', 62 | schema = '"double"', 63 | func = 'flatten', input = '{}' 64 | } 65 | 66 | t { 67 | schema = '"double*"', 68 | func = 'flatten', input = '42', output = '[42.0]' 69 | } 70 | 71 | t { 72 | schema = '"double*"', 73 | func = 'flatten', input = 'null', output = '[null]' 74 | } 75 | 76 | t { 77 | schema = '"double*"', 78 | func = 'unflatten', input = '[42]', output = '42.0' 79 | } 80 | 81 | t { 82 | schema = '"double*"', 83 | func = 'unflatten', input = '[null]', output = 'null' 84 | } 85 | -------------------------------------------------------------------------------- /test/ddt_suite/enum.lua: -------------------------------------------------------------------------------- 1 | local card_face = [[ 2 | {"name": "card_face", "type": "enum", "symbols": 3 | ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"]} 4 | ]] 5 | 6 | local card_face_nullable = [[ 7 | {"name": "card_face_nullable", "type": "enum*", "symbols": 8 | ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"]} 9 | ]] 10 | 11 | t { 12 | schema = card_face, 13 | func = 'flatten', 14 | input = '"SPADES"', output = '[0]' 15 | } 16 | 17 | t { 18 | schema = card_face, 19 | func = 'unflatten', 20 | input = '[0]', output = '"SPADES"' 21 | } 22 | 23 | t { 24 | schema = card_face, 25 | func = 'flatten', 26 | input = '"HEARTS"', output = '[1]' 27 | } 28 | 29 | t { 30 | schema = card_face, 31 | func = 'unflatten', 32 | input = '[1]', output = '"HEARTS"' 33 | } 34 | t { 35 | schema = card_face, 36 | func = 'flatten', 37 | input = '"DIAMONDS"', output = '[2]' 38 | } 39 | 40 | t { 41 | schema = card_face, 42 | func = 'unflatten', 43 | input = '[2]', output = '"DIAMONDS"' 44 | } 45 | 46 | t { 47 | schema = card_face, 48 | func = 'flatten', 49 | input = '"CLUBS"', output = '[3]' 50 | } 51 | 52 | t { 53 | schema = card_face, 54 | func = 'unflatten', 55 | input = '[3]', output = '"CLUBS"' 56 | } 57 | 58 | -- validation errors 59 | t { 60 | error = 'Expecting STR, encountered LONG', 61 | schema = card_face, 62 | func = 'flatten', input = '42' 63 | } 64 | 65 | t { 66 | error = 'Bad value: "Jizz"', 67 | schema = card_face, 68 | func = 'flatten', input = '"Jizz"' 69 | } 70 | 71 | t { 72 | error = '1: Expecting INT, encountered DOUBLE', 73 | schema = card_face, 74 | func = 'unflatten', input = '[3.14]' 75 | } 76 | 77 | t { 78 | error = '1: Bad value: 4', 79 | schema = card_face, 80 | func = 'unflatten', input = '[4]' 81 | } 82 | 83 | t { 84 | error = '1: Bad value: 1000', 85 | schema = card_face, 86 | func = 'unflatten', input = '[1000]' 87 | } 88 | 89 | t { 90 | error = '1: Bad value: -1', 91 | schema = card_face, 92 | func = 'unflatten', input = '[-1]' 93 | } 94 | 95 | t { 96 | schema = card_face_nullable, 97 | func = 'flatten', 98 | input = '"HEARTS"', output = '[1]' 99 | } 100 | 101 | t { 102 | schema = card_face_nullable, 103 | func = 'flatten', 104 | input = 'null', output = '[null]' 105 | } 106 | -------------------------------------------------------------------------------- /test/ddt_suite/enum_large.lua: -------------------------------------------------------------------------------- 1 | local large = [[{ 2 | "name": "large", "type": "enum", "symbols": [ 3 | "january", "february", "march", "april", 4 | "may", "june", "july", "august", 5 | "september", "october", "november", "december", 6 | "_13", "_14", "_15", "_16", "_17", "_18", "_19", "_20", 7 | "_21", "_22", "_23", "_24", "_25", "_26", "_27", "_28", "_29", "_30", 8 | "_31", "_32", "_33", "_34", "_35", "_36", "_37", "_38", "_39", "_40", 9 | "_41", "_42", "_43", "_44", "_45", "_46", "_47", "_48", "_49", "_50", 10 | "_51", "_52", "_53", "_54", "_55", "_56", "_57", "_58", "_59", "_60", 11 | "_61", "_62", "_63", "_64", "_65", "_66", "_67", "_68", "_69", "_70", 12 | "_71", "_72", "_73", "_74", "_75", "_76", "_77", "_78", "_79", "_80", 13 | "_81", "_82", "_83", "_84", "_85", "_86", "_87", "_88", "_89", "_90", 14 | "_91", "_92", "_93", "_94", "_95", "_96", "_97", "_98", "_99", "_100" 15 | ] 16 | }]] 17 | 18 | local symbols = { 19 | "january", "february", "march", "april", 20 | "may", "june", "july", "august", 21 | "september", "october", "november", "december", 22 | "_13", "_14", "_15", "_16", "_17", "_18", "_19", "_20", 23 | "_21", "_22", "_23", "_24", "_25", "_26", "_27", "_28", "_29", "_30", 24 | "_31", "_32", "_33", "_34", "_35", "_36", "_37", "_38", "_39", "_40", 25 | "_41", "_42", "_43", "_44", "_45", "_46", "_47", "_48", "_49", "_50", 26 | "_51", "_52", "_53", "_54", "_55", "_56", "_57", "_58", "_59", "_60", 27 | "_61", "_62", "_63", "_64", "_65", "_66", "_67", "_68", "_69", "_70", 28 | "_71", "_72", "_73", "_74", "_75", "_76", "_77", "_78", "_79", "_80", 29 | "_81", "_82", "_83", "_84", "_85", "_86", "_87", "_88", "_89", "_90", 30 | "_91", "_92", "_93", "_94", "_95", "_96", "_97", "_98", "_99", "_100" 31 | } 32 | 33 | for i = 1,#symbols do 34 | _G["i"] = i 35 | 36 | t { 37 | schema = large, 38 | func = "flatten", input = '"'..symbols[i]..'"', output = '['..(i-1)..']' 39 | } 40 | 41 | t { 42 | schema = large, 43 | func = "unflatten", output = '"'..symbols[i]..'"', input = '['..(i-1)..']' 44 | } 45 | end 46 | -------------------------------------------------------------------------------- /test/ddt_suite/enum_versions.lua: -------------------------------------------------------------------------------- 1 | local vehicle_v1 = [[{ 2 | "name": "vehicle", "type": "enum", "symbols": [ 3 | "CAR", "BUS", "TRICYCLE", "TRUCK" 4 | ] 5 | }]] 6 | 7 | -- TRICYCLE removed, TRUCK changes ID, SCOOTER added 8 | local vehicle_v2 = [[{ 9 | "name": "vehicle", "type": "enum", "symbols": [ 10 | "CAR", "BUS", "TRUCK", "SCOOTER" 11 | ] 12 | }]] 13 | 14 | ----------------------------------------------------------------------- 15 | 16 | t { 17 | schema1 = vehicle_v1, schema2 = vehicle_v2, 18 | func = 'flatten', input = '"CAR"', output = '[0]' 19 | } 20 | 21 | t { 22 | schema1 = vehicle_v2, schema2 = vehicle_v1, 23 | func = 'flatten', input = '"CAR"', output = '[0]' 24 | } 25 | 26 | t { 27 | schema1 = vehicle_v1, schema2 = vehicle_v2, 28 | func = 'unflatten', input = '[0]', output = '"CAR"' 29 | } 30 | 31 | t { 32 | schema1 = vehicle_v2, schema2 = vehicle_v1, 33 | func = 'unflatten', input = '[0]', output = '"CAR"' 34 | } 35 | 36 | ----------------------------------------------------------------------- 37 | 38 | t { 39 | schema1 = vehicle_v1, schema2 = vehicle_v2, 40 | func = 'flatten', input = '"BUS"', output = '[1]' 41 | } 42 | 43 | t { 44 | schema1 = vehicle_v2, schema2 = vehicle_v1, 45 | func = 'flatten', input = '"BUS"', output = '[1]' 46 | } 47 | 48 | t { 49 | schema1 = vehicle_v1, schema2 = vehicle_v2, 50 | func = 'unflatten', input = '[1]', output = '"BUS"' 51 | } 52 | 53 | t { 54 | schema1 = vehicle_v2, schema2 = vehicle_v1, 55 | func = 'unflatten', input = '[1]', output = '"BUS"' 56 | } 57 | 58 | ----------------------------------------------------------------------- 59 | 60 | t { 61 | error = 'Bad value: "TRICYCLE" (schema versioning)', 62 | schema1 = vehicle_v1, schema2 = vehicle_v2, 63 | func = 'flatten', input = '"TRICYCLE"' 64 | } 65 | 66 | t { 67 | error = 'Bad value: "TRICYCLE"', 68 | schema1 = vehicle_v2, schema2 = vehicle_v1, 69 | func = 'flatten', input = '"TRICYCLE"' 70 | } 71 | 72 | t { 73 | error = '1: Bad value: 2 (schema versioning)', 74 | schema1 = vehicle_v1, schema2 = vehicle_v2, 75 | func = 'unflatten', input = '[2]' 76 | } 77 | 78 | ----------------------------------------------------------------------- 79 | 80 | t { 81 | schema1 = vehicle_v1, schema2 = vehicle_v2, 82 | func = 'flatten', input = '"TRUCK"', output = '[2]' 83 | } 84 | 85 | t { 86 | schema1 = vehicle_v2, schema2 = vehicle_v1, 87 | func = 'flatten', input = '"TRUCK"', output = '[3]' 88 | } 89 | 90 | t { 91 | schema1 = vehicle_v1, schema2 = vehicle_v2, 92 | func = 'unflatten', input = '[3]', output = '"TRUCK"' 93 | } 94 | 95 | t { 96 | schema1 = vehicle_v2, schema2 = vehicle_v1, 97 | func = 'unflatten', input = '[2]', output = '"TRUCK"' 98 | } 99 | 100 | ----------------------------------------------------------------------- 101 | 102 | t { 103 | error = 'Bad value: "SCOOTER"', 104 | schema1 = vehicle_v1, schema2 = vehicle_v2, 105 | func = 'flatten', input = '"SCOOTER"' 106 | } 107 | 108 | t { 109 | error = 'Bad value: "SCOOTER" (schema versioning)', 110 | schema1 = vehicle_v2, schema2 = vehicle_v1, 111 | func = 'flatten', input = '"SCOOTER"' 112 | } 113 | 114 | t { 115 | error = '1: Bad value: 3 (schema versioning)', 116 | schema1 = vehicle_v2, schema2 = vehicle_v1, 117 | func = 'unflatten', input = '[3]' 118 | } 119 | -------------------------------------------------------------------------------- /test/ddt_suite/fixed.lua: -------------------------------------------------------------------------------- 1 | local fixed4 = '{"type": "fixed", "size":4, "name": "fixed4"}' 2 | local fixed8 = '{"type": "fixed", "size":8, "name": "fixed8"}' 3 | 4 | t { 5 | schema = fixed4, 6 | func = 'flatten', 7 | input = '{"$binary": "89ABCDEF"}', 8 | output = '[{"$binary": "89ABCDEF"}]', 9 | } 10 | 11 | t { 12 | schema = fixed8, 13 | func = 'flatten', 14 | input = '{"$binary": "0102030405060700"}', 15 | output = '[{"$binary": "0102030405060700"}]', 16 | } 17 | 18 | -- validation errors 19 | t { 20 | error = 'Expecting BIN, encountered NIL', 21 | schema = fixed8, 22 | func = 'flatten', input = 'null' 23 | } 24 | 25 | t { 26 | error = 'Expecting BIN, encountered FALSE', 27 | schema = fixed8, 28 | func = 'flatten', input = 'false' 29 | } 30 | 31 | t { 32 | error = 'Expecting BIN, encountered TRUE', 33 | schema = fixed8, 34 | func = 'flatten', input = 'true' 35 | } 36 | 37 | t { 38 | error = 'Expecting BIN, encountered LONG', 39 | schema = fixed8, 40 | func = 'flatten', input = '42' 41 | } 42 | 43 | -- ! is a hack to enable single-precision floating point 44 | t { 45 | error = 'Expecting BIN, encountered FLOAT', 46 | schema = fixed8, 47 | func = 'flatten', input = '! 1.0' 48 | } 49 | 50 | t { 51 | error = 'Expecting BIN, encountered DOUBLE', 52 | schema = fixed8, 53 | func = 'flatten', input = '1.0' 54 | } 55 | 56 | t { 57 | error = 'Expecting BIN, encountered STR', 58 | schema = fixed8, 59 | func = 'flatten', input = '"Chunky bacon!"' 60 | } 61 | 62 | t { 63 | error = 'Expecting BIN, encountered ARRAY', 64 | schema = fixed8, 65 | func = 'flatten', input = '[]' 66 | } 67 | 68 | t { 69 | error = 'Expecting BIN, encountered MAP', 70 | schema = fixed8, 71 | func = 'flatten', input = '{}' 72 | } 73 | 74 | t { 75 | error = 'Expecting BIN of length 8. Encountered BIN of length 2.', 76 | schema = fixed8, 77 | func = 'flatten', input = '{"$binary": "FFFF"}' 78 | } 79 | 80 | t { 81 | error = 'Expecting BIN of length 8. Encountered BIN of length 0.', 82 | schema = fixed8, 83 | func = 'flatten', input = '{"$binary": ""}' 84 | } 85 | 86 | t { 87 | error = 'Expecting BIN of length 4. Encountered BIN of length 7.', 88 | schema = fixed4, 89 | func = 'flatten', input = '{"$binary": "FF00FF11AA22CC"}' 90 | } 91 | 92 | t { 93 | error = 'Expecting BIN of length 4. Encountered BIN of length 1.', 94 | schema = fixed4, 95 | func = 'flatten', input = '{"$binary": "55"}' 96 | } 97 | -------------------------------------------------------------------------------- /test/ddt_suite/float.lua: -------------------------------------------------------------------------------- 1 | -- ! is a hack to encode msgpack using single precision floats 2 | t { 3 | schema = '"float"', 4 | func = 'flatten', input = '! 99.25', output = '! [99.25]' 5 | } 6 | 7 | t { 8 | schema = '"float"', 9 | func = 'flatten', input = '99.25', output = '! [99.25]' 10 | } 11 | 12 | -- auto long->float conversion 13 | t { 14 | schema = '"float"', 15 | func = 'flatten', input = '42', output = '! [42.0]' 16 | } 17 | 18 | t { 19 | schema = '"float"', 20 | func = 'flatten', input = '-9000000', output = '! [-9000000.0]' 21 | } 22 | 23 | -- validation errors 24 | t { 25 | error = 'Expecting FLOAT, encountered NIL', 26 | schema = '"float"', 27 | func = 'flatten', input = 'null' 28 | } 29 | 30 | t { 31 | error = 'Expecting FLOAT, encountered FALSE', 32 | schema = '"float"', 33 | func = 'flatten', input = 'false' 34 | } 35 | 36 | t { 37 | error = 'Expecting FLOAT, encountered TRUE', 38 | schema = '"float"', 39 | func = 'flatten', input = 'true' 40 | } 41 | 42 | t { 43 | error = 'Expecting FLOAT, encountered STR', 44 | schema = '"float"', 45 | func = 'flatten', input = '"Hello, world!"' 46 | } 47 | 48 | t { 49 | error = 'Expecting FLOAT, encountered BIN', 50 | schema = '"float"', 51 | func = 'flatten', input = '{"$binary": "CAFEBABE"}' 52 | } 53 | 54 | t { 55 | error = 'Expecting FLOAT, encountered ARRAY', 56 | schema = '"float"', 57 | func = 'flatten', input = '[]' 58 | } 59 | 60 | t { 61 | error = 'Expecting FLOAT, encountered MAP', 62 | schema = '"float"', 63 | func = 'flatten', input = '{}' 64 | } 65 | 66 | t { 67 | schema = '"float*"', 68 | func = 'flatten', input = '42', output = '! [42.0]' 69 | } 70 | 71 | t { 72 | schema = '"float*"', 73 | func = 'flatten', input = 'null', output = '[null]' 74 | } 75 | 76 | t { 77 | schema = '"float*"', 78 | func = 'unflatten', input = '[42]', output = '! 42.0' 79 | } 80 | 81 | t { 82 | schema = '"float*"', 83 | func = 'unflatten', input = '[null]', output = 'null' 84 | } 85 | -------------------------------------------------------------------------------- /test/ddt_suite/float_promo.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '"float"', schema2 = '"double"', 3 | func = 'flatten', input = '42.25', output = '[42.25]' 4 | } 5 | 6 | t { 7 | schema1 = '"float"', schema2 = '"double"', 8 | func = 'flatten', input = '! 42.25', output = '[42.25]' 9 | } 10 | 11 | t { 12 | schema1 = '"float"', schema2 = '"double"', 13 | func = 'flatten', input = '42', output = '[42.0]' 14 | } 15 | 16 | -- 17 | 18 | t { 19 | error = "Expecting FLOAT, encountered NIL", 20 | schema1 = '"float"', schema2 = '"double"', 21 | func = 'flatten', input = 'null' 22 | } 23 | 24 | t { 25 | compile_error = 'Types incompatible: double and float', 26 | schema1 = '"double"', schema2 = '"float"' 27 | } 28 | -------------------------------------------------------------------------------- /test/ddt_suite/incompatible_array.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '{"type":"array","items":"int"}', schema2 = '{"type":"array","items":"string"}', 3 | compile_error = ': Types incompatible: int and string' 4 | } 5 | -------------------------------------------------------------------------------- /test/ddt_suite/incompatible_enum.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '{"name":"foo", "type":"enum", "symbols":["A","B","C"]}', 3 | schema2 = '{"name":"foo", "type":"enum", "symbols":["D"]}', 4 | compile_error = 'foo: No common symbols' 5 | } 6 | 7 | t { 8 | schema1 = '{"name":"foo", "type":"enum", "symbols":["A","B","C"]}', 9 | schema2 = '{"name":"xfoo", "type":"enum", "symbols":["B"]}', 10 | compile_error = 'Types incompatible: foo and xfoo' 11 | } 12 | 13 | t { 14 | schema1 = '{"name":"foo", "type":"enum", "symbols":["A","B","C"]}', 15 | schema2 = '{"name":"xfoo", "type":"enum", "symbols":["B"], "aliases":["foo"]}', 16 | compile_only = true 17 | } 18 | 19 | t { 20 | schema1 = '{"name":"foo", "type":"enum", "symbols":["A","B","C"], "aliases":["xfoo"]}', 21 | schema2 = '{"name":"xfoo", "type":"enum", "symbols":["B"]}', 22 | compile_error = 'Types incompatible: foo and xfoo' 23 | } 24 | 25 | t { 26 | schema1 = '{"name":"foo", "type":"enum", "symbols":["A","B","C"], "aliases":["xfoo"]}', 27 | schema2 = '{"name":"xfoo", "type":"enum", "symbols":["B"]}', 28 | compile_downgrade = true, 29 | compile_only = true 30 | } 31 | -------------------------------------------------------------------------------- /test/ddt_suite/incompatible_fixed.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '{"name":"blob", "type":"fixed", "size": 16}', 3 | schema2 = '{"name":"blob", "type":"fixed", "size": 17}', 4 | compile_error = 'blob: Size mismatch: 16 vs 17' 5 | } 6 | 7 | t { 8 | schema1 = '{"name":"blob", "type":"fixed", "size": 16}', 9 | schema2 = '{"name":"xblob", "type":"fixed", "size": 16}', 10 | compile_error = 'Types incompatible: blob and xblob' 11 | } 12 | 13 | t { 14 | schema1 = '{"name":"blob", "type":"fixed", "size": 16}', 15 | schema2 = '{"name":"xblob", "type":"fixed", "size": 16, "aliases":["blob"]}', 16 | compile_only = true 17 | } 18 | 19 | t { 20 | schema1 = '{"name":"blob", "type":"fixed", "size": 16, "aliases":["xblob"]}', 21 | schema2 = '{"name":"xblob", "type":"fixed", "size": 16}', 22 | compile_error = 'Types incompatible: blob and xblob' 23 | } 24 | 25 | t { 26 | schema1 = '{"name":"blob", "type":"fixed", "size": 16, "aliases":["xblob"]}', 27 | schema2 = '{"name":"xblob", "type":"fixed", "size": 16}', 28 | compile_downgrade = true, 29 | compile_only = true 30 | } 31 | -------------------------------------------------------------------------------- /test/ddt_suite/incompatible_map.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '{"type":"map","values":"int"}', schema2 = '{"type":"map","values":"string"}', 3 | compile_error = ': Types incompatible: int and string' 4 | } 5 | -------------------------------------------------------------------------------- /test/ddt_suite/incompatible_record.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = [[{ 3 | "name": "foo", "type": "record", "fields": [ 4 | {"name":"A", "type":"int"} 5 | ] 6 | }]], 7 | schema2 = [[{ 8 | "name": "foo", "type": "record", "fields": [ 9 | {"name":"A", "type":"string"} 10 | ] 11 | }]], 12 | compile_error = 'foo/A: Types incompatible: int and string' 13 | } 14 | 15 | t { 16 | schema1 = [[{ 17 | "name": "foo", "type": "record", "fields": [ 18 | {"name":"A", "type":"int"} 19 | ] 20 | }]], 21 | schema2 = [[{ 22 | "name": "foo", "type": "record", "fields": [ 23 | {"name":"B", "type":"string"} 24 | ] 25 | }]], 26 | compile_error = 'foo: Field B is missing in source schema, and no default value was provided' 27 | } 28 | 29 | t { 30 | schema1 = [[{ 31 | "name": "foo", "type": "record", "fields": [ 32 | {"name":"A", "type":"int"} 33 | ] 34 | }]], 35 | schema2 = [[{ 36 | "name": "foo", "type": "record", "fields": [ 37 | {"name":"B", "type":"string", "aliases":["A"]} 38 | ] 39 | }]], 40 | compile_error = 'foo/(A aka B): Types incompatible: int and string' 41 | } 42 | 43 | t { 44 | schema1 = [[{ 45 | "name": "foo", "type": "record", "fields": [ 46 | {"name":"A", "type":"int", "aliases":["B"]} 47 | ] 48 | }]], 49 | schema2 = [[{ 50 | "name": "foo", "type": "record", "fields": [ 51 | {"name":"B", "type":"string"} 52 | ] 53 | }]], 54 | compile_error = 'foo: Field B is missing in source schema, and no default value was provided' 55 | } 56 | 57 | t { 58 | schema1 = [[{ 59 | "name": "foo", "type": "record", "fields": [ 60 | {"name":"A", "type":"int", "aliases":["B"]} 61 | ] 62 | }]], 63 | schema2 = [[{ 64 | "name": "foo", "type": "record", "fields": [ 65 | {"name":"B", "type":"string"} 66 | ] 67 | }]], 68 | compile_downgrade = true, 69 | compile_error = 'foo/(A aka B): Types incompatible: int and string' 70 | } 71 | 72 | t { 73 | schema1 = [[{ 74 | "name": "foo", "type": "record", "fields": [ 75 | {"name":"A", "type":"int"} 76 | ] 77 | }]], 78 | schema2 = [[{ 79 | "name": "xfoo", "type": "record", "fields": [ 80 | {"name":"A", "type":"string"} 81 | ] 82 | }]], 83 | compile_error = 'Types incompatible: foo and xfoo' 84 | } 85 | 86 | t { 87 | schema1 = [[{ 88 | "name": "foo", "type": "record", "fields": [ 89 | {"name":"A", "type":"int"} 90 | ] 91 | }]], 92 | schema2 = [[{ 93 | "name": "xfoo", "type": "record", "fields": [ 94 | {"name":"A", "type":"string"} 95 | ], "aliases": ["foo"] 96 | }]], 97 | compile_error = '(foo aka xfoo)/A: Types incompatible: int and string' 98 | } 99 | 100 | t { 101 | schema1 = [[{ 102 | "name": "foo", "type": "record", "fields": [ 103 | {"name":"A", "type":"int"} 104 | ], "aliases": ["xfoo"] 105 | }]], 106 | schema2 = [[{ 107 | "name": "xfoo", "type": "record", "fields": [ 108 | {"name":"A", "type":"string"} 109 | ] 110 | }]], 111 | compile_error = 'Types incompatible: foo and xfoo' 112 | } 113 | 114 | t { 115 | schema1 = [[{ 116 | "name": "foo", "type": "record", "fields": [ 117 | {"name":"A", "type":"int"} 118 | ], "aliases": ["xfoo"] 119 | }]], 120 | schema2 = [[{ 121 | "name": "xfoo", "type": "record", "fields": [ 122 | {"name":"A", "type":"string"} 123 | ] 124 | }]], 125 | compile_error = '(foo aka xfoo)/A: Types incompatible: int and string', 126 | compile_downgrade = true 127 | } 128 | -------------------------------------------------------------------------------- /test/ddt_suite/int.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema = '"int"', 3 | func = 'flatten', input = '42', output = '[42]' 4 | } 5 | 6 | t { 7 | schema = '"int"', 8 | func = 'flatten', input = '-9000', output = '[-9000]' 9 | } 10 | 11 | t { 12 | schema = '"int"', 13 | func = 'flatten', input = '9000000', output = '[9000000]' 14 | } 15 | 16 | t { 17 | schema = '"int"', 18 | func = 'flatten', input = '-9000000', output = '[-9000000]' 19 | } 20 | 21 | t { 22 | schema = '"int"', 23 | func = 'flatten', input = '2147483647', output = '[2147483647]' 24 | } 25 | 26 | t { 27 | schema = '"int"', 28 | func = 'flatten', input = '-2147483648', output = '[-2147483648]' 29 | } 30 | 31 | -- validation errors 32 | t { 33 | error = 'Expecting INT, encountered NIL', 34 | schema = '"int"', 35 | func = 'flatten', input = 'null' 36 | } 37 | 38 | t { 39 | error = 'Expecting INT, encountered FALSE', 40 | schema = '"int"', 41 | func = 'flatten', input = 'false' 42 | } 43 | 44 | t { 45 | error = 'Expecting INT, encountered TRUE', 46 | schema = '"int"', 47 | func = 'flatten', input = 'true' 48 | } 49 | 50 | -- ! is a hack to enable single-precision floating point 51 | t { 52 | error = 'Expecting INT, encountered FLOAT', 53 | schema = '"int"', 54 | func = 'flatten', input = '! 1.0' 55 | } 56 | 57 | t { 58 | error = 'Expecting INT, encountered DOUBLE', 59 | schema = '"int"', 60 | func = 'flatten', input = '1.0' 61 | } 62 | 63 | t { 64 | error = 'Expecting INT, encountered STR', 65 | schema = '"int"', 66 | func = 'flatten', input = '"Hello, world!"' 67 | } 68 | 69 | t { 70 | error = 'Expecting INT, encountered BIN', 71 | schema = '"int"', 72 | func = 'flatten', input = '{"$binary": "CAFEBABE"}' 73 | } 74 | 75 | t { 76 | error = 'Expecting INT, encountered ARRAY', 77 | schema = '"int"', 78 | func = 'flatten', input = '[]' 79 | } 80 | 81 | t { 82 | error = 'Expecting INT, encountered MAP', 83 | schema = '"int"', 84 | func = 'flatten', input = '{}' 85 | } 86 | 87 | t { 88 | error = 'Value exceeds INT range: 2147483648LL', 89 | schema = '"int"', 90 | func = 'flatten', input = '2147483648' 91 | } 92 | 93 | t { 94 | error = 'Value exceeds INT range: -2147483649LL', 95 | schema = '"int"', 96 | func = 'flatten', input = '-2147483649' 97 | } 98 | 99 | t { 100 | schema = '"int*"', 101 | func = 'flatten', input = '42', output = '[42]' 102 | } 103 | 104 | t { 105 | schema = '"int*"', 106 | func = 'flatten', input = 'null', output = '[null]' 107 | } 108 | 109 | t { 110 | schema = '"int*"', 111 | func = 'unflatten', input = '[42]', output = '42' 112 | } 113 | 114 | t { 115 | schema = '"int*"', 116 | func = 'unflatten', input = '[null]', output = 'null' 117 | } 118 | -------------------------------------------------------------------------------- /test/ddt_suite/int_promo.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '"int"', schema2 = '"long"', 3 | func = 'flatten', input = '42', output = '[42]' 4 | } 5 | 6 | t { 7 | schema1 = '"int"', schema2 = '"long"', 8 | func = 'flatten', input = '9000000', output = '[9000000]' 9 | } 10 | 11 | t { 12 | schema1 = '"int"', schema2 = '"long"', 13 | func = 'flatten', input = '-9000000', output = '[-9000000]' 14 | } 15 | 16 | t { 17 | schema1 = '"int"', schema2 = '"long"', 18 | func = 'flatten', input = '2147483647', output = '[2147483647]' 19 | } 20 | 21 | t { 22 | schema1 = '"int"', schema2 = '"long"', 23 | func = 'flatten', input = '-2147483648', output = '[-2147483648]' 24 | } 25 | 26 | -- 27 | 28 | t { 29 | schema1 = '"int"', schema2 = '"float"', 30 | func = 'flatten', input = '42', output = '![42.0]' 31 | } 32 | 33 | t { 34 | schema1 = '"int"', schema2 = '"float"', 35 | func = 'flatten', input = '9000000', output = '![9000000.0]' 36 | } 37 | 38 | t { 39 | schema1 = '"int"', schema2 = '"float"', 40 | func = 'flatten', input = '-9000000', output = '![-9000000.0]' 41 | } 42 | 43 | t { 44 | schema1 = '"int"', schema2 = '"float"', 45 | func = 'flatten', input = '2147483647', output = '![2147483647.0]' 46 | } 47 | 48 | t { 49 | schema1 = '"int"', schema2 = '"float"', 50 | func = 'flatten', input = '-2147483648', output = '![-2147483648.0]' 51 | } 52 | 53 | -- 54 | 55 | t { 56 | schema1 = '"int"', schema2 = '"double"', 57 | func = 'flatten', input = '42', output = '[42.0]' 58 | } 59 | 60 | t { 61 | schema1 = '"int"', schema2 = '"double"', 62 | func = 'flatten', input = '9000000', output = '[9000000.0]' 63 | } 64 | 65 | t { 66 | schema1 = '"int"', schema2 = '"double"', 67 | func = 'flatten', input = '-9000000', output = '[-9000000.0]' 68 | } 69 | 70 | t { 71 | schema1 = '"int"', schema2 = '"double"', 72 | func = 'flatten', input = '2147483647', output = '[2147483647.0]' 73 | } 74 | 75 | t { 76 | schema1 = '"int"', schema2 = '"double"', 77 | func = 'flatten', input = '-2147483648', output = '[-2147483648.0]' 78 | } 79 | 80 | -- 81 | 82 | t { 83 | compile_error = 'Types incompatible: long and int', 84 | schema1 = '"long"', schema2 = '"int"' 85 | } 86 | 87 | t { 88 | compile_error = 'Types incompatible: float and int', 89 | schema1 = '"float"', schema2 = '"int"' 90 | } 91 | 92 | t { 93 | compile_error = 'Types incompatible: double and int', 94 | schema1 = '"double"', schema2 = '"int"' 95 | } 96 | 97 | -- 98 | 99 | local schemas = {'"long"', '"float"', '"double"'} 100 | 101 | for other_schema = 1,#schemas do 102 | 103 | _G['other_schema'] = other_schema 104 | 105 | t { 106 | error = 'Expecting INT, encountered FLOAT', 107 | schema1 = '"int"', schema2 = schemas[other_schema], 108 | func = 'flatten', input = '!9999.1' 109 | } 110 | 111 | t { 112 | error = 'Expecting INT, encountered DOUBLE', 113 | schema1 = '"int"', schema2 = schemas[other_schema], 114 | func = 'flatten', input = '9999.1' 115 | } 116 | 117 | t { 118 | error = 'Expecting INT, encountered NIL', 119 | schema1 = '"int"', schema2 = schemas[other_schema], 120 | func = 'flatten', input = 'null' 121 | } 122 | 123 | t { 124 | error = 'Value exceeds INT range: 2147483648LL', 125 | schema1 = '"int"', schema2 = schemas[other_schema], 126 | func = 'flatten', input = '2147483648' 127 | } 128 | 129 | t { 130 | error = 'Value exceeds INT range: -2147483649LL', 131 | schema1 = '"int"', schema2 = schemas[other_schema], 132 | func = 'flatten', input = '-2147483649' 133 | } 134 | 135 | end 136 | -------------------------------------------------------------------------------- /test/ddt_suite/long.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema = '"long"', 3 | func = 'flatten', input = '42', output = '[42]' 4 | } 5 | 6 | t { 7 | error = 'Expecting LONG, encountered NIL', 8 | schema = '"long"', 9 | func = 'flatten', input = 'null' 10 | } 11 | 12 | t { 13 | error = 'Expecting LONG, encountered FALSE', 14 | schema = '"long"', 15 | func = 'flatten', input = 'false' 16 | } 17 | 18 | t { 19 | error = 'Expecting LONG, encountered TRUE', 20 | schema = '"long"', 21 | func = 'flatten', input = 'true' 22 | } 23 | 24 | -- ! is a hack to enable single-precision floating point 25 | t { 26 | error = 'Expecting LONG, encountered FLOAT', 27 | schema = '"long"', 28 | func = 'flatten', input = '! 42.0' 29 | } 30 | 31 | t { 32 | error = 'Expecting LONG, encountered DOUBLE', 33 | schema = '"long"', 34 | func = 'flatten', input = '42.0' 35 | } 36 | 37 | t { 38 | error = 'Expecting LONG, encountered STR', 39 | schema = '"long"', 40 | func = 'flatten', input = '"Hello, world!"' 41 | } 42 | 43 | t { 44 | error = 'Expecting LONG, encountered BIN', 45 | schema = '"long"', 46 | func = 'flatten', input = '{"$binary": "DEADBEEF"}' 47 | } 48 | 49 | t { 50 | error = 'Expecting LONG, encountered ARRAY', 51 | schema = '"long"', 52 | func = 'flatten', input = '[42]' 53 | } 54 | 55 | t { 56 | error = 'Expecting LONG, encountered MAP', 57 | schema = '"long"', 58 | func = 'flatten', input = '{"key": 42}' 59 | } 60 | 61 | t { 62 | schema = '"long*"', 63 | func = 'flatten', input = '42', output = '[42]' 64 | } 65 | 66 | t { 67 | schema = '"long*"', 68 | func = 'flatten', input = 'null', output = '[null]' 69 | } 70 | 71 | t { 72 | schema = '"long*"', 73 | func = 'unflatten', input = '[42]', output = '42' 74 | } 75 | 76 | t { 77 | schema = '"long*"', 78 | func = 'unflatten', input = '[null]', output = 'null' 79 | } 80 | -------------------------------------------------------------------------------- /test/ddt_suite/long_promo.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '"long"', schema2 = '"float"', 3 | func = 'flatten', input = '9999', output = '![9999.0]' 4 | } 5 | 6 | t { 7 | schema1 = '"long"', schema2 = '"double"', 8 | func = 'flatten', input = '9999', output = '[9999.0]' 9 | } 10 | 11 | -- 12 | 13 | t { 14 | compile_error = 'Types incompatible: float and long', 15 | schema1 = '"float"', schema2 = '"long"' 16 | } 17 | 18 | t { 19 | compile_error = 'Types incompatible: double and long', 20 | schema1 = '"double"', schema2 = '"long"' 21 | } 22 | 23 | -- 24 | 25 | local schemas = {'"float"', '"double"'} 26 | 27 | for other_schema = 1,#schemas do 28 | 29 | _G['other_schema'] = other_schema 30 | 31 | t { 32 | error = 'Expecting LONG, encountered FLOAT', 33 | schema1 = '"long"', schema2 = schemas[other_schema], 34 | func = 'flatten', input = '!9999.1' 35 | } 36 | 37 | t { 38 | error = 'Expecting LONG, encountered DOUBLE', 39 | schema1 = '"long"', schema2 = schemas[other_schema], 40 | func = 'flatten', input = '9999.1' 41 | } 42 | 43 | t { 44 | error = 'Expecting LONG, encountered NIL', 45 | schema1 = '"long"', schema2 = schemas[other_schema], 46 | func = 'flatten', input = 'null' 47 | } 48 | 49 | end 50 | -------------------------------------------------------------------------------- /test/ddt_suite/map.lua: -------------------------------------------------------------------------------- 1 | local int_map = [[{ 2 | "type": "map", 3 | "values": "int" 4 | }]] 5 | 6 | local string_map = [[{ 7 | "type": "map", 8 | "values": "string" 9 | }]] 10 | 11 | local string_map_map = [[{ 12 | "type": "map", 13 | "values": { 14 | "type": "map", 15 | "values": "string" 16 | } 17 | }]] 18 | 19 | local int_map_nullable = [[{ 20 | "type": "map*", 21 | "values": "int" 22 | }]] 23 | 24 | ----------------------------------------------------------------------- 25 | 26 | t { 27 | schema = int_map, 28 | func = 'flatten', 29 | input = '{}', output = '[{}]' 30 | } 31 | 32 | t { 33 | schema = int_map, 34 | func = 'flatten', 35 | input = '{"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10}', 36 | output = '[{"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10}]', 37 | } 38 | 39 | t { 40 | schema = string_map, 41 | func = 'flatten', 42 | input = '{}', output = '[{}]' 43 | } 44 | 45 | t { 46 | schema = string_map, 47 | func = 'flatten', 48 | input = [[{ 49 | "a": "1", "b": "2", "c": "3", "d": "4", "e": "5", "f": "6", 50 | "g": "7", "h": "8", "i": "9", "j": "10" 51 | }]], 52 | output = [=[[{ 53 | "a": "1", "b": "2", "c": "3", "d": "4", "e": "5", "f": "6", 54 | "g": "7", "h": "8", "i": "9", "j": "10" 55 | }]]=] 56 | } 57 | 58 | t { 59 | schema = string_map_map, 60 | func = 'flatten', 61 | input = '{}', output = '[{}]' 62 | } 63 | 64 | t { 65 | schema = string_map_map, 66 | func = 'flatten', 67 | input = [[{ 68 | "1":{}, "2":{"a": "1"}, "3":{"b": "2", "c": "3"}, 69 | "4":{"d": "4", "e": "5", "f": "6"}, 70 | "5":{"g": "7"}, "6":{"h": "8", "i": "9", "j": "10"} 71 | }]], 72 | output = [=[[{ 73 | "1":{}, "2":{"a": "1"}, "3":{"b": "2", "c": "3"}, 74 | "4":{"d": "4", "e": "5", "f": "6"}, 75 | "5":{"g": "7"}, "6":{"h": "8", "i": "9", "j": "10"} 76 | }]]=] 77 | } 78 | 79 | ----------------------------------------------------------------------- 80 | 81 | t { 82 | error = 'Expecting MAP, encountered NIL', 83 | schema = int_map, 84 | func = 'flatten', input = 'null' 85 | } 86 | 87 | t { 88 | error = 'Expecting MAP, encountered FALSE', 89 | schema = int_map, 90 | func = 'flatten', input = 'false' 91 | } 92 | 93 | t { 94 | error = 'Expecting MAP, encountered TRUE', 95 | schema = int_map, 96 | func = 'flatten', input = 'true' 97 | } 98 | 99 | t { 100 | error = 'Expecting MAP, encountered LONG', 101 | schema = int_map, 102 | func = 'flatten', input = '42' 103 | } 104 | 105 | t { 106 | error = 'Expecting MAP, encountered FLOAT', 107 | schema = int_map, 108 | func = 'flatten', input = '! 42.0' 109 | } 110 | 111 | t { 112 | error = 'Expecting MAP, encountered DOUBLE', 113 | schema = int_map, 114 | func = 'flatten', input = '42.0' 115 | } 116 | 117 | t { 118 | error = 'Expecting MAP, encountered STR', 119 | schema = int_map, 120 | func = 'flatten', input = '"Hello, MAP!"' 121 | } 122 | 123 | t { 124 | error = 'Expecting MAP, encountered BIN', 125 | schema = int_map, 126 | func = 'flatten', input = '{"$binary": "FFFF0055"}' 127 | } 128 | 129 | t { 130 | error = 'Expecting MAP, encountered ARRAY', 131 | schema = int_map, 132 | func = 'flatten', input = '[42]' 133 | } 134 | 135 | t { 136 | error = 'a: Expecting INT, encountered DOUBLE', 137 | schema = int_map, 138 | func = 'flatten', input = '{"a": 42.0}' 139 | } 140 | 141 | t { 142 | error = 'e: Expecting INT, encountered DOUBLE', 143 | schema = int_map, 144 | func = 'flatten', input = '{"a":1, "b":2, "c":3, "d":4, "e":5.1}' 145 | } 146 | 147 | t { 148 | error = 'a: Expecting STR, encountered LONG', 149 | schema = string_map, 150 | func = 'flatten', input = '{"a":42}' 151 | } 152 | 153 | t { 154 | error = 'e: Expecting STR, encountered LONG', 155 | schema = string_map, 156 | func = 'flatten', 157 | input = '{"a":"1", "b":"2", "c":"3", "d":"4", "e":5}' 158 | } 159 | 160 | t { 161 | error = 'a: Expecting MAP, encountered LONG', 162 | schema = string_map_map, 163 | func = 'flatten', input = '{"a":1}' 164 | } 165 | 166 | t { 167 | error = 'a/b: Expecting STR, encountered LONG', 168 | schema = string_map_map, 169 | func = 'flatten', input = '{"a":{"b":1}}' 170 | } 171 | 172 | t { 173 | error = 'e: Expecting MAP, encountered LONG', 174 | schema = string_map_map, 175 | func = 'flatten', input = '{"a":{}, "b":{}, "c":{}, "d":{}, "e":1}' 176 | } 177 | 178 | t { 179 | error = 'e/f: Expecting STR, encountered LONG', 180 | schema = string_map_map, 181 | func = 'flatten', input = '{"a":{}, "b":{}, "c":{}, "d":{}, "e":{"f": 1}}' 182 | } 183 | 184 | t { 185 | schema = int_map_nullable, 186 | func = 'flatten', 187 | input = '{"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10}', 188 | output = '[{"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10}]' 189 | } 190 | 191 | t { 192 | schema = int_map_nullable, 193 | func = 'flatten', 194 | input = 'null', 195 | output = '[null]' 196 | } 197 | 198 | t { 199 | schema = int_map_nullable, 200 | func = 'unflatten', 201 | input = '[{"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10}]', 202 | output = '{"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10}' 203 | } 204 | 205 | t { 206 | schema = int_map_nullable, 207 | func = 'unflatten', 208 | input = '[null]', 209 | output = 'null' 210 | } 211 | -------------------------------------------------------------------------------- /test/ddt_suite/namespace.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema = [[{ 3 | "type": "record", 4 | "name": "Y", 5 | "namespace": "X" 6 | }]], 7 | create_error = 'X.Y: Record type must have "fields"' 8 | } 9 | 10 | t { 11 | schema = [[{ 12 | "type": "record", 13 | "name": "X.Y", 14 | "namespace": "Z" 15 | }]], 16 | create_error = 'X.Y: Record type must have "fields"' 17 | } 18 | 19 | t { 20 | schema = [[{ 21 | "type": "record", 22 | "name": "Y", 23 | "namespace": "X", 24 | "fields": [{"name": "field1", "type": "W"}] 25 | }]], 26 | create_error = 'X.Y/field1: Unknown Avro type: X.W' 27 | } 28 | 29 | t { 30 | schema = [[{ 31 | "type": "record", 32 | "name": "X.Y", 33 | "namespace": "Z", 34 | "fields": [{"name": "field1", "type": "W"}] 35 | }]], 36 | create_error = 'X.Y/field1: Unknown Avro type: X.W' 37 | } 38 | 39 | t { 40 | schema = [[{ 41 | "type": "record", 42 | "name": "Y", 43 | "namespace": "X", 44 | "fields": [{"name": "field1", "type": "Z.W"}] 45 | }]], 46 | create_error = 'X.Y/field1: Unknown Avro type: Z.W' 47 | } 48 | -------------------------------------------------------------------------------- /test/ddt_suite/null.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema = '"null"', 3 | func = 'flatten', input = 'null', output = '[null]' 4 | } 5 | 6 | t { 7 | error = 'Expecting NIL, encountered FALSE', 8 | schema = '"null"', 9 | func = 'flatten', input = 'false' 10 | } 11 | 12 | t { 13 | error = 'Expecting NIL, encountered TRUE', 14 | schema = '"null"', 15 | func = 'flatten', input = 'true' 16 | } 17 | 18 | t { 19 | error = 'Expecting NIL, encountered LONG', 20 | schema = '"null"', 21 | func = 'flatten', input = '1' 22 | } 23 | 24 | -- ! is a hack to enable single-precision floating point 25 | t { 26 | error = 'Expecting NIL, encountered FLOAT', 27 | schema = '"null"', 28 | func = 'flatten', input = '! 1.1' 29 | } 30 | 31 | t { 32 | error = 'Expecting NIL, encountered DOUBLE', 33 | schema = '"null"', 34 | func = 'flatten', input = '1.1' 35 | } 36 | 37 | t { 38 | error = 'Expecting NIL, encountered STR', 39 | schema = '"null"', 40 | func = 'flatten', input = '"Hello, world!"' 41 | } 42 | 43 | t { 44 | error = 'Expecting NIL, encountered BIN', 45 | schema = '"null"', 46 | func = 'flatten', input = '{"$binary": "DEADBEEF"}' 47 | } 48 | 49 | t { 50 | error = 'Expecting NIL, encountered ARRAY', 51 | schema = '"null"', 52 | func = 'flatten', input = '[42]' 53 | } 54 | 55 | t { 56 | error = 'Expecting NIL, encountered MAP', 57 | schema = '"null"', 58 | func = 'flatten', input = '{"key": 42}' 59 | } 60 | 61 | t { -- test nullable fixed 62 | schema = [[ 63 | {"type":"record","name":"X","fields": 64 | [{"name":"f1","type":{"type":"fixed*","name":"ff","size":4}}, 65 | {"name":"f2","type":"int"}]}]], 66 | validate = '{"f2":1}', 67 | func = 'flatten', 68 | input = '{"f2":1}', 69 | output = '[null, 1]' 70 | } 71 | -------------------------------------------------------------------------------- /test/ddt_suite/record_array.lua: -------------------------------------------------------------------------------- 1 | local record_array = [[{ 2 | "type": "array", 3 | "items": { 4 | "name": "foo", 5 | "type": "record", 6 | "fields": [ 7 | {"name":"A", "type":"int", "default":1}, 8 | {"name":"B", "type":"int", "default":2}, 9 | {"name":"C", "type":"int", "default":3} 10 | ] 11 | } 12 | }]] 13 | 14 | local record_array2 = [[{ 15 | "type": "array", 16 | "items": { 17 | "name": "foo", 18 | "type": "record", 19 | "fields": [ 20 | {"name":"A", "type":"int", "default":1}, 21 | {"name":"B", "type":"int", "default":2}, 22 | {"name":"C", "type":{"type":"array", "items":"int"}} 23 | ] 24 | } 25 | }]] 26 | 27 | t { 28 | schema = record_array, 29 | func = 'flatten', 30 | input = '[]', output='[[]]' 31 | } 32 | 33 | t { 34 | schema = record_array, 35 | func = 'flatten', 36 | input = '[{},{"A":100},{"B":200},{"C":300}]', 37 | output='[[[1,2,3],[100,2,3],[1,200,3],[1,2,300]]]' 38 | } 39 | 40 | t { 41 | schema = record_array, 42 | func = 'unflatten', 43 | input = '[[]]', output='[]' 44 | } 45 | 46 | t { 47 | schema = record_array, 48 | func = 'unflatten', 49 | input='[[[1,2,3],[100,2,3],[1,200,3],[1,2,300]]]', 50 | output = [=[[ 51 | {"A":1,"B":2,"C":3}, 52 | {"A":100,"B":2,"C":3}, 53 | {"A":1,"B":200,"C":3}, 54 | {"A":1,"B":2,"C":300}] 55 | ]=] 56 | } 57 | 58 | -- 59 | 60 | t { 61 | schema = record_array2, 62 | func = 'flatten', 63 | input = '[]', output='[[]]' 64 | } 65 | 66 | t { 67 | schema = record_array2, 68 | func = 'flatten', 69 | input = '[{"C":[]},{"A":100,"C":[1,2,3,4]},{"B":200,"C":[5,6,7,8]}]', 70 | output='[[[1,2,[]],[100,2,[1,2,3,4]],[1,200,[5,6,7,8]]]]' 71 | } 72 | 73 | t { 74 | schema = record_array2, 75 | func = 'unflatten', 76 | input = '[[]]', output='[]' 77 | } 78 | 79 | t { 80 | schema = record_array2, 81 | func = 'unflatten', 82 | input='[[[1,2,[]],[100,2,[1,2,3,4]],[1,200,[5,6,7,8]]]]', 83 | output = '[{"A":1,"B":2,"C":[]},{"A":100,"B":2,"C":[1,2,3,4]},{"A":1,"B":200,"C":[5,6,7,8]}]' 84 | } 85 | -------------------------------------------------------------------------------- /test/ddt_suite/record_default.lua: -------------------------------------------------------------------------------- 1 | -- Default values + nullable type. 2 | 3 | local nullable_default = [[ 4 | { 5 | "type": "record", 6 | "name": "Frob", 7 | "fields": [ 8 | { "name": "foo", "type": "int*", "default": 42 }, 9 | { "name": "bar", "type": "string" } 10 | ] 11 | } 12 | ]] 13 | 14 | t { 15 | schema = nullable_default, 16 | func = "flatten", 17 | input = [[ {"bar": "str"} ]], 18 | output = [=[ [42, "str" ] ]=], 19 | } 20 | 21 | t { 22 | schema = nullable_default, 23 | func = "unflatten", 24 | input = [[ [null, "str" ] ]], 25 | output = [[ {"foo": null, "bar": "str"} ]], 26 | } 27 | 28 | local nullable_default_null = [[ 29 | { 30 | "type": "record", 31 | "name": "Frob", 32 | "fields": [ 33 | { "name": "foo", "type": "int*", "default": null }, 34 | { "name": "bar", "type": "string" } 35 | ] 36 | } 37 | ]] 38 | 39 | t { 40 | schema = nullable_default_null, 41 | func = "flatten", 42 | input = [[ {"bar": "str"} ]], 43 | output = [[ [null, "str" ] ]], 44 | } 45 | 46 | t { 47 | schema = nullable_default_null, 48 | func = "unflatten", 49 | input = [[ [42, "str" ] ]], 50 | output = [[ {"foo": 42, "bar": "str"} ]], 51 | } 52 | 53 | local nullable_default_record = [[ 54 | { 55 | "type": "record", 56 | "name": "Frob", 57 | "fields": [ 58 | { "name": "foo", "type": 59 | { "type": "record*", "name": "default_record", "fields":[ 60 | {"name": "f1", "type": "int"}, 61 | {"name": "f2", "type": "int"}, 62 | {"name": "f3", "type": 63 | {"type": "record*", "name": "default_record_2", "fields":[ 64 | {"name": "f3_1", "type": "int*"} 65 | ]} 66 | } 67 | ]}, "default": { "f1": 1, "f2": 2}}, 68 | { "name": "bar", "type": "string" } 69 | ] 70 | } 71 | ]] 72 | 73 | t { 74 | schema = nullable_default_record, 75 | func = "flatten", 76 | input = [[ {"bar": "str"} ]], 77 | output = [=[ [[1, 2, null], "str" ] ]=], 78 | } 79 | 80 | t { 81 | schema = nullable_default_record, 82 | func = "unflatten", 83 | input = [[ [null, "str" ] ]], 84 | output = [[ {"foo": null, "bar": "str"} ]], 85 | } 86 | 87 | local nullable_default_record_null = [[ 88 | { 89 | "type": "record", 90 | "name": "Frob", 91 | "fields": [ 92 | { "name": "foo", "type": 93 | { "type": "record*", "name": "default_record", "fields":[ 94 | {"name": "f1", "type": "int"}, 95 | {"name": "f2", "type": "int"}, 96 | {"name": "f3", "type": 97 | {"type": "record*", "name": "default_record_2", "fields":[ 98 | {"name": "f3_1", "type": "int*"} 99 | ]} 100 | } 101 | ]}, "default": null}, 102 | { "name": "bar", "type": "string" } 103 | ] 104 | } 105 | ]] 106 | 107 | t { 108 | schema = nullable_default_record_null, 109 | func = "flatten", 110 | input = [[ {"bar": "str"} ]], 111 | output = [=[ [null, "str" ] ]=], 112 | } 113 | 114 | t { 115 | schema = nullable_default_record_null, 116 | func = "unflatten", 117 | input = [[ [[1, 2, null], "str" ] ]], 118 | output = [[ {"foo": {"f1": 1, "f2": 2, "f3": null}, "bar": "str"} ]], 119 | } 120 | 121 | local default_inside_of_default = [[ 122 | { 123 | "type": "record", 124 | "name": "Frob", 125 | "fields": [ 126 | { "name": "foo", "type": 127 | { "type": "record*", "name": "default_record", "fields":[ 128 | {"name": "f1", "type": "int"}, 129 | {"name": "f2", "type": "int"}, 130 | {"name": "f3", "type": 131 | {"type": "record*", "name": "default_record_2", "fields":[ 132 | {"name": "f3_1", "type": "int*"} 133 | ]}, 134 | "default": {"f3_1": 3}} 135 | ]}, "default": { "f1": 1, "f2": 2}}, 136 | { "name": "bar", "type": "string" } 137 | ] 138 | } 139 | ]] 140 | 141 | t { 142 | schema = default_inside_of_default, 143 | func = "flatten", 144 | input = [[ {"bar": "str"} ]], 145 | output = [=[ [[1, 2, [3]], "str" ] ]=], 146 | } 147 | 148 | t { 149 | schema = default_inside_of_default, 150 | func = "flatten", 151 | input = [[ {"foo": {"f1":5, "f2":7}, "bar": "str"} ]], 152 | output = [=[ [[5, 7, [3]], "str" ] ]=], 153 | } 154 | 155 | t { 156 | schema = default_inside_of_default, 157 | func = "flatten", 158 | input = [[ {"foo": {"f1":5, "f2":7, "f3": null}, "bar": "str"} ]], 159 | output = [=[ [[5, 7, null], "str" ] ]=], 160 | } 161 | 162 | local default_inside_of_default_2 = [[ 163 | { 164 | "type": "record", 165 | "name": "Frob", 166 | "fields": [ 167 | { "name": "foo", "type": 168 | { "type": "record*", "name": "default_record", "fields":[ 169 | {"name": "f1", "type": "int"}, 170 | {"name": "f2", "type": "int"}, 171 | {"name": "f3", "type": 172 | {"type": "record*", "name": "default_record_2", "fields":[ 173 | {"name": "f3_1", "type": "int*", "default": 3} 174 | ]} 175 | } 176 | ]}, "default": { "f1": 1, "f2": 2, "f3": null}}, 177 | { "name": "bar", "type": "string" } 178 | ] 179 | } 180 | ]] 181 | 182 | t { 183 | schema = default_inside_of_default_2, 184 | func = "flatten", 185 | input = [[ {"bar": "str"} ]], 186 | output = [=[ [[1, 2, null], "str" ] ]=], 187 | } 188 | 189 | t { 190 | schema = default_inside_of_default_2, 191 | func = "flatten", 192 | input = [[ {"foo": {"f1":5, "f2":7, "f3": {}}, "bar": "str"} ]], 193 | output = [=[ [[5, 7, [3]], "str" ] ]=], 194 | } 195 | -------------------------------------------------------------------------------- /test/ddt_suite/record_hidden.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema = [[{ 3 | "name": "hidden", 4 | "type": "record", 5 | "fields": [ 6 | {"name":"A", "type":"int", "hidden":true}, 7 | {"name":"B", "type":"int", "hidden":true}, 8 | {"name":"C", "type":"int", "hidden":true}, 9 | {"name":"D", "type":"int"} 10 | ] 11 | }]], 12 | func = 'unflatten', input = '[100,200,300,400]', 13 | output = '{"D":400}' 14 | } 15 | 16 | t { 17 | schema = [[{ 18 | "name": "hidden", 19 | "type": "record", 20 | "fields": [ 21 | {"name":"A", "type":"int", "hidden":true}, 22 | {"name":"B", "type":"int", "hidden":true}, 23 | {"name":"C", "type":"int"}, 24 | {"name":"D", "type":"int", "hidden":true} 25 | ] 26 | }]], 27 | func = 'unflatten', input = '[100,200,300,400]', 28 | output = '{"C":300}' 29 | } 30 | 31 | t { 32 | schema = [[{ 33 | "name": "hidden", 34 | "type": "record", 35 | "fields": [ 36 | {"name":"A", "type":"int", "hidden":true}, 37 | {"name":"B", "type":"int"}, 38 | {"name":"C", "type":"int", "hidden":true}, 39 | {"name":"D", "type":"int", "hidden":true} 40 | ] 41 | }]], 42 | func = 'unflatten', input = '[100,200,300,400]', 43 | output = '{"B":200}' 44 | } 45 | 46 | t { 47 | schema = [[{ 48 | "name": "hidden", 49 | "type": "record", 50 | "fields": [ 51 | {"name":"A", "type":"int"}, 52 | {"name":"B", "type":"int"}, 53 | {"name":"C", "type":"int", "hidden":true}, 54 | {"name":"D", "type":"int", "hidden":true} 55 | ] 56 | }]], 57 | func = 'unflatten', input = '[100,200,300,400]', 58 | output = '{"A":100,"B":200}' 59 | } 60 | 61 | -- 62 | 63 | t { 64 | schema = [[{ 65 | "name": "hidden", 66 | "type": "record", 67 | "fields": [ 68 | {"name":"A", "type":"int"}, 69 | {"name":"B", "type": { 70 | "name": "nested", "type": "record", "fields": [ 71 | {"name":"X", "type":"int"}, 72 | {"name":"Y", "type":"int"} 73 | ] 74 | }, "hidden": true}, 75 | {"name":"C", "type":"int"} 76 | ] 77 | }]], 78 | func = 'unflatten', input = '[100,200,300,400]', 79 | output = '{"A":100,"C":400}' 80 | } 81 | -------------------------------------------------------------------------------- /test/ddt_suite/record_large.lua: -------------------------------------------------------------------------------- 1 | local large = [[{ 2 | "name": "large", 3 | "type": "record", 4 | "fields": [ 5 | {"name": "f01", "type": "int", "default": 1001}, 6 | {"name": "f02", "type": "int", "default": 1002}, 7 | {"name": "f03", "type": "int", "default": 1003}, 8 | {"name": "f04", "type": "int", "default": 1004}, 9 | {"name": "f05", "type": "int", "default": 1005}, 10 | {"name": "f06", "type": "int", "default": 1006}, 11 | {"name": "f07", "type": "int", "default": 1007}, 12 | {"name": "f08", "type": "int", "default": 1008}, 13 | {"name": "f09", "type": "int", "default": 1009}, 14 | {"name": "f10", "type": "int", "default": 1010}, 15 | {"name": "f11", "type": "int", "default": 1011}, 16 | {"name": "f12", "type": "int", "default": 1012}, 17 | {"name": "f13", "type": "int", "default": 1013}, 18 | {"name": "f14", "type": "int", "default": 1014}, 19 | {"name": "f15", "type": "int", "default": 1015}, 20 | {"name": "f16", "type": "int", "default": 1016}, 21 | {"name": "f17", "type": "int", "default": 1017}, 22 | {"name": "f18", "type": "int", "default": 1018}, 23 | {"name": "f19", "type": "int", "default": 1019}, 24 | {"name": "f20", "type": "int", "default": 1020}, 25 | {"name": "f21", "type": "int", "default": 1021}, 26 | {"name": "f22", "type": "int", "default": 1022}, 27 | {"name": "f23", "type": "int", "default": 1023}, 28 | {"name": "f24", "type": "int", "default": 1024}, 29 | {"name": "f25", "type": "int", "default": 1025} 30 | ] 31 | }]] 32 | 33 | t { 34 | schema = large, 35 | func = 'flatten', 36 | input = '{}', output = [=[[ 37 | 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010, 38 | 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 39 | 1021, 1022, 1023, 1024, 1025 40 | ]]=] 41 | } 42 | 43 | t { 44 | schema = large, 45 | func = 'flatten', 46 | input = [[{ 47 | "f01": 1, "f02": 2, "f03": 3, "f04": 4, "f05": 5, 48 | "f06": 6, "f07": 7, "f08": 8, "f09": 9, "f10": 10, 49 | "f11": 11, "f12": 12, "f13": 13, "f14": 14, "f15": 15, 50 | "f16": 16, "f17": 17, "f18": 18, "f19": 19, "f20": 20, 51 | "f21": 21, "f22": 22, "f23": 23, "f24": 24, "f25": 25 52 | 53 | }]], output = [=[[ 54 | 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25 55 | ]]=] 56 | } 57 | 58 | t { 59 | schema = large, 60 | func = 'flatten', 61 | input = [[{ 62 | "f01": 1, "f03": 3, "f05": 5, 63 | "f07": 7, "f09": 9, 64 | "f11": 11, "f13": 13, "f15": 15, 65 | "f17": 17, "f19": 19, 66 | "f21": 21, "f23": 23, "f25": 25 67 | 68 | }]], output = [=[[ 69 | 1,1002,3,1004,5,1006,7,1008,9,1010,11,1012,13,1014,15, 70 | 1016,17,1018,19,1020,21,1022,23,1024,25 71 | ]]=] 72 | } 73 | 74 | 75 | t { 76 | schema = large, 77 | func = 'unflatten', 78 | output = [[{ 79 | "f01": 1, "f02": 2, "f03": 3, "f04": 4, "f05": 5, 80 | "f06": 6, "f07": 7, "f08": 8, "f09": 9, "f10": 10, 81 | "f11": 11, "f12": 12, "f13": 13, "f14": 14, "f15": 15, 82 | "f16": 16, "f17": 17, "f18": 18, "f19": 19, "f20": 20, 83 | "f21": 21, "f22": 22, "f23": 23, "f24": 24, "f25": 25 84 | 85 | }]], input = [=[[ 86 | 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25 87 | ]]=] 88 | } 89 | -------------------------------------------------------------------------------- /test/ddt_suite/record_nested.lua: -------------------------------------------------------------------------------- 1 | local user = [[{ 2 | "type": "record", 3 | "name": "user", 4 | "fields": [ 5 | {"name": "uid", "type": "long"}, 6 | {"name": "p1", "type": "long"}, 7 | {"name": "p2", "type": "long"}, 8 | { 9 | "name": "nested", 10 | "type": { 11 | "type": "record", 12 | "name": "nested", 13 | "fields": [ 14 | {"name": "x", "type": "long"}, 15 | {"name": "y", "type": "long"}, 16 | {"name": "points", "type": { 17 | "type": "array", 18 | "items": { 19 | "name": "point", 20 | "type": "record", 21 | "fields": [ 22 | {"name": "x", "type": "long"}, 23 | {"name": "y", "type": "long"} 24 | ] 25 | } 26 | }} 27 | ] 28 | } 29 | } 30 | ] 31 | }]] 32 | 33 | t { 34 | schema = user, 35 | func = 'flatten', 36 | input = [[{ 37 | "p2": 79031234569, "p1": 79031234568, 38 | "uid": 79031234567, 39 | "nested": {"y": 2, "x": 1, "points": [ 40 | {"y": 20, "x": 10}, 41 | {"y": 22, "x": 12}]} 42 | }]], 43 | output = '[79031234567, 79031234568, 79031234569, 1, 2, [[10, 20], [12, 22]]]' 44 | } 45 | 46 | t { 47 | schema = user, 48 | func = 'unflatten', 49 | output = [[{ 50 | "uid": 79031234567, 51 | "p1": 79031234568, 52 | "p2": 79031234569, 53 | "nested": {"x": 1, "y": 2, "points": [ 54 | {"x": 10, "y": 20}, 55 | {"x": 12, "y": 22}]} 56 | }]], 57 | input = '[79031234567, 79031234568, 79031234569, 1, 2, [[10, 20], [12, 22]]]' 58 | } 59 | 60 | t { 61 | schema = user, 62 | func = 'xflatten', 63 | input = [[{ 64 | "p1": 79031234568, 65 | "nested": { 66 | "x": 1, 67 | "y": 2 68 | } 69 | }]], 70 | output = '[["=", 2, 79031234568], ["=", 4, 1], ["=", 5, 2]]' 71 | } 72 | -------------------------------------------------------------------------------- /test/ddt_suite/record_union.lua: -------------------------------------------------------------------------------- 1 | local foo = [[{ 2 | "name": "foo", 3 | "type": "record", 4 | "fields": [ 5 | {"name": "A", "type": "string"}, 6 | {"name": "B", "type": ["null", "string"]}, 7 | {"name": "C", "type": "int"} 8 | ] 9 | }]] 10 | 11 | t { 12 | schema = foo, 13 | func = 'flatten', 14 | input = '{"A":"Hello, world!","B":null,"C":42}', 15 | output = '["Hello, world!", 0, null, 42]' 16 | } 17 | 18 | t { 19 | schema = foo, 20 | func = 'flatten', 21 | input = '{"A":"Hello, world!","B":{"string":"OLOLO"},"C":42}', 22 | output = '["Hello, world!", 1, "OLOLO", 42]' 23 | } 24 | 25 | -- 26 | 27 | t { 28 | schema = foo, 29 | func = 'unflatten', 30 | input = '["Hello, world!", 0, null, 42]', 31 | output = '{"A":"Hello, world!","B":null,"C":42}' 32 | } 33 | 34 | t { 35 | schema = foo, 36 | func = 'unflatten', 37 | input = '["Hello, world!", 1, "OLOLO", 42]', 38 | output = '{"A":"Hello, world!","B":{"string":"OLOLO"},"C":42}' 39 | } 40 | 41 | -- 42 | 43 | t { 44 | schema = foo, 45 | func = 'xflatten', 46 | input = '{"A":"Hello, world!"}', 47 | output = '[["=",1,"Hello, world!"]]' 48 | } 49 | 50 | t { 51 | schema = foo, 52 | func = 'xflatten', 53 | input = '{"B":null}', 54 | output = '[["=",2,0],["=",3,null]]' 55 | } 56 | 57 | t { 58 | schema = foo, 59 | func = 'xflatten', 60 | input = '{"B":{"string":"OLOLO"}}', 61 | output = '[["=",2,1],["=",3,"OLOLO"]]' 62 | } 63 | 64 | t { 65 | schema = foo, 66 | func = 'xflatten', 67 | input = '{"C":42}', 68 | output = '[["=",4,42]]' 69 | } 70 | 71 | ----------------------------------------------------------------------- 72 | 73 | local foo = [[{ 74 | "name": "foo", 75 | "type": "record", 76 | "fields": [ 77 | {"name": "A", "type": "string"}, 78 | {"name": "B", "type": { 79 | "name": "nested", "type": "record", "fields": [ 80 | {"name":"_","type":["null", "string"]} 81 | ] 82 | }}, 83 | {"name": "C", "type": "int"} 84 | ] 85 | }]] 86 | 87 | t { 88 | schema = foo, 89 | func = 'flatten', 90 | input = '{"A":"Hello, world!","B":{"_":null},"C":42}', 91 | output = '["Hello, world!", 0, null, 42]' 92 | } 93 | 94 | t { 95 | schema = foo, 96 | func = 'flatten', 97 | input = '{"A":"Hello, world!","B":{"_":{"string":"OLOLO"}},"C":42}', 98 | output = '["Hello, world!", 1, "OLOLO", 42]' 99 | } 100 | 101 | -- 102 | 103 | t { 104 | schema = foo, 105 | func = 'unflatten', 106 | input = '["Hello, world!", 0, null, 42]', 107 | output = '{"A":"Hello, world!","B":{"_":null},"C":42}' 108 | } 109 | 110 | t { 111 | schema = foo, 112 | func = 'unflatten', 113 | input = '["Hello, world!", 1, "OLOLO", 42]', 114 | output = '{"A":"Hello, world!","B":{"_":{"string":"OLOLO"}},"C":42}' 115 | } 116 | 117 | -- 118 | 119 | t { 120 | schema = foo, 121 | func = 'xflatten', 122 | input = '{"A":"Hello, world!"}', 123 | output = '[["=",1,"Hello, world!"]]' 124 | } 125 | 126 | t { 127 | schema = foo, 128 | func = 'xflatten', 129 | input = '{"B":{"_":null}}', 130 | output = '[["=",2,0],["=",3,null]]' 131 | } 132 | 133 | t { 134 | schema = foo, 135 | func = 'xflatten', 136 | input = '{"B":{"_":{"string":"OLOLO"}}}', 137 | output = '[["=",2,1],["=",3,"OLOLO"]]' 138 | } 139 | 140 | t { 141 | schema = foo, 142 | func = 'xflatten', 143 | input = '{"C":42}', 144 | output = '[["=",4,42]]' 145 | } 146 | -------------------------------------------------------------------------------- /test/ddt_suite/record_version.lua: -------------------------------------------------------------------------------- 1 | local foo = [[{ 2 | "name": "foo", 3 | "type": "record", 4 | "fields": [ 5 | {"name": "A", "type": "int", "default": 1001}, 6 | {"name": "B", "type": "int", "default": 1002}, 7 | {"name": "C", "type": "int", "default": 1003}, 8 | {"name": "D", "type": "int", "default": 1004} 9 | ] 10 | }]] 11 | 12 | local foo_reversed = [[{ 13 | "name": "foo", 14 | "type": "record", 15 | "fields": [ 16 | {"name": "D", "type": "int", "default": 1005}, 17 | {"name": "C", "type": "int", "default": 1006}, 18 | {"name": "B", "type": "int", "default": 1007}, 19 | {"name": "A", "type": "int", "default": 1008} 20 | ] 21 | }]] 22 | 23 | local foo_reduced = [[{ 24 | "name": "foo", 25 | "type": "record", 26 | "fields": [ 27 | {"name": "A", "type": "int", "default": 1001}, 28 | {"name": "B", "type": "int", "default": 1002} 29 | ] 30 | }]] 31 | 32 | t { 33 | schema1 = foo, schema2 = foo_reversed, 34 | func = 'flatten', 35 | input = '{"A":1, "B":2, "C":3, "D":4}', 36 | output = '[4,3,2,1]' 37 | } 38 | 39 | t { 40 | schema1 = foo, schema2 = foo_reversed, 41 | func = 'unflatten', 42 | input = '[1,2,3,4]', 43 | output = '{"A":1, "B":2, "C":3, "D":4}' 44 | } 45 | 46 | -- 47 | 48 | t { 49 | schema1 = foo_reduced, schema2 = foo, 50 | func = 'flatten', 51 | input = '{"A":1, "B":2}', 52 | output = '[1,2,1003,1004]' 53 | } 54 | 55 | t { 56 | schema1 = foo_reduced, schema2 = foo_reversed, 57 | func = 'flatten', 58 | input = '{"A":1, "B":2}', 59 | output = '[1005,1006,2,1]' 60 | } 61 | 62 | t { 63 | schema1 = foo, schema2 = foo_reduced, 64 | func = 'flatten', 65 | input = '{"A":1, "B":2, "C":3, "D":4}', 66 | output = '[1,2]' 67 | } 68 | 69 | t { 70 | error = 'C: Expecting INT, encountered STR', 71 | schema1 = foo, schema2 = foo_reduced, 72 | func = 'flatten', 73 | input = '{"A":1, "B":2, "C":"Hello, world!"}' 74 | } 75 | 76 | -- 77 | 78 | t { 79 | schema1 = foo_reduced, schema2 = foo, 80 | func = 'unflatten', 81 | input = '[1,2]', 82 | output = '{"A":1, "B":2, "C":1003, "D":1004}' 83 | } 84 | 85 | t { 86 | schema1 = foo_reduced, schema2 = foo_reversed, 87 | func = 'unflatten', 88 | input = '[1,2]', 89 | output = '{"A":1, "B":2, "D":1005, "C":1006}' 90 | } 91 | 92 | t { 93 | schema1 = foo, schema2 = foo_reduced, 94 | func = 'unflatten', 95 | input = '[1,2,3,4]', 96 | output = '{"A":1, "B":2}' 97 | } 98 | 99 | t { 100 | error = '4: Expecting INT, encountered STR', 101 | schema1 = foo, schema2 = foo_reduced, 102 | func = 'unflatten', 103 | input = '[1,2,3,"Hello, world!"]' 104 | } 105 | 106 | -- 107 | 108 | t { 109 | schema1 = foo, schema2 = foo_reversed, 110 | func = 'xflatten', 111 | input = '{"A":100, "B":200, "C":300, "D":400}', 112 | output = '[["=",4,100],["=",3,200],["=",2,300],["=",1,400]]' 113 | } 114 | 115 | t { 116 | schema1 = foo, schema2 = foo_reduced, 117 | func = 'xflatten', 118 | input = '{"A":100, "B":200, "C":300, "D":400}', 119 | output = '[["=",1,100],["=",2,200]]' 120 | } 121 | 122 | 123 | t { 124 | schema1 = foo_reversed, schema2 = foo_reduced, 125 | func = 'xflatten', 126 | input = '{"A":100, "B":200, "C":300, "D":400}', 127 | output = '[["=",1,100],["=",2,200]]' 128 | } 129 | 130 | t { 131 | schema1 = foo_reduced, schema2 = foo, 132 | func = 'xflatten', 133 | input = '{"A":100, "B":200}', 134 | output = '[["=",1,100],["=",2,200]]' 135 | } 136 | 137 | t { 138 | schema1 = foo_reduced, schema2 = foo_reversed, 139 | func = 'xflatten', 140 | input = '{"A":100, "B":200}', 141 | output = '[["=",4,100],["=",3,200]]' 142 | } 143 | -------------------------------------------------------------------------------- /test/ddt_suite/record_vlo.lua: -------------------------------------------------------------------------------- 1 | -- 2 | -- VLO fields are handled differently i.r.t. flatten/defaults 3 | -- 4 | 5 | local vlo1 = [[{ 6 | "type": "record", 7 | "name": "vlo1", 8 | "fields": [ 9 | {"name": "A", "type": "int", "default": 100}, 10 | {"name": "B", "type": "int", "default": 101}, 11 | {"name": "VLO", "type": { "type": "array", "items": "int"}} 12 | ] 13 | }]] 14 | 15 | local vlo2 = [[{ 16 | "type": "record", 17 | "name": "vlo2", 18 | "fields": [ 19 | {"name": "VLO", "type": { "type": "array", "items": "int"}}, 20 | {"name": "A", "type": "int", "default": 100}, 21 | {"name": "B", "type": "int", "default": 101} 22 | ] 23 | }]] 24 | 25 | local vlo3 = [[{ 26 | "type": "record", 27 | "name": "vlo3", 28 | "fields": [ 29 | {"name": "VL1", "type": { "type": "array", "items": "int"}}, 30 | {"name": "A", "type": "int", "default": 100}, 31 | {"name": "B", "type": "int", "default": 101}, 32 | {"name": "VL2", "type": { "type": "array", "items": "int"}} 33 | ] 34 | }]] 35 | 36 | local vlo4 = [[{ 37 | "type": "record", 38 | "name": "vlo4", 39 | "fields": [ 40 | {"name": "A", "type": "int", "default": 100}, 41 | {"name": "VLO", "type":{ 42 | "name": "nested", "type": "record", "fields":[ 43 | {"name": "_", "type": { "type": "array", "items": "int"}} 44 | ] 45 | }}, 46 | {"name": "B", "type": "int", "default": 101} 47 | ] 48 | }]] 49 | 50 | -- 51 | 52 | t { 53 | schema = vlo1, 54 | func = 'flatten', input = '{"VLO": [1,2,3,4]}', output = '[100, 101, [1,2,3,4]]' 55 | } 56 | 57 | t { 58 | schema = vlo1, 59 | func = 'flatten', input = '{"A":1, "VLO": [1,2,3]}', output = '[1, 101, [1,2,3]]' 60 | } 61 | 62 | t { 63 | schema = vlo1, 64 | func = 'flatten', input = '{"B":2, "VLO": [1,2,3]}', output = '[100, 2, [1,2,3]]' 65 | } 66 | 67 | t { 68 | schema = vlo1, 69 | func = 'flatten', input = '{"A":1, "B":2, "VLO": [1,2,3]}', output = '[1, 2, [1,2,3]]' 70 | } 71 | 72 | -- 73 | 74 | t { 75 | schema = vlo2, 76 | func = 'flatten', input = '{"VLO": [1,2,3,4]}', output = '[[1,2,3,4], 100, 101]' 77 | } 78 | 79 | t { 80 | schema = vlo2, 81 | func = 'flatten', input = '{"A":1, "VLO": [1,2,3]}', output = '[[1,2,3], 1, 101]' 82 | } 83 | 84 | t { 85 | schema = vlo2, 86 | func = 'flatten', input = '{"B":2, "VLO": [1,2,3]}', output = '[[1,2,3], 100, 2]' 87 | } 88 | 89 | t { 90 | schema = vlo2, 91 | func = 'flatten', input = '{"A":1, "B":2, "VLO": [1,2,3]}', output = '[[1,2,3], 1, 2]' 92 | } 93 | 94 | -- 95 | 96 | t { 97 | schema = vlo3, 98 | func = 'flatten', input = '{"VL1": [1,2,3], "VL2": [4,5,6]}', output = '[[1,2,3],100,101,[4,5,6]]' 99 | } 100 | 101 | t { 102 | schema = vlo3, 103 | func = 'flatten', input = '{"A":1, "VL1": [1,2,3], "VL2": [4,5,6]}', output = '[[1,2,3],1,101,[4,5,6]]' 104 | } 105 | 106 | t { 107 | schema = vlo3, 108 | func = 'flatten', input = '{"B":2, "VL1": [1,2,3], "VL2": [4,5,6]}', output = '[[1,2,3],100,2,[4,5,6]]' 109 | } 110 | 111 | t { 112 | schema = vlo3, 113 | func = 'flatten', input = '{"A":1, "B":2, "VL1": [1,2,3], "VL2": [4,5,6]}', output = '[[1,2,3],1,2,[4,5,6]]' 114 | } 115 | 116 | -- 117 | 118 | t { 119 | schema = vlo4, 120 | func = 'flatten', input = '{"VLO": {"_":[1,2,3,4]}}', output = '[100,[1,2,3,4],101]' 121 | } 122 | 123 | t { 124 | schema = vlo4, 125 | func = 'flatten', input = '{"B":99, "VLO": {"_":[1,2,3,4]}}', output = '[100,[1,2,3,4],99]' 126 | } 127 | 128 | 129 | t { 130 | schema = vlo4, 131 | func = 'flatten', input = '{"A":1, "VLO": {"_":[1,2,3,4]}}', output = '[1,[1,2,3,4],101]' 132 | } 133 | -------------------------------------------------------------------------------- /test/ddt_suite/recursive.lua: -------------------------------------------------------------------------------- 1 | local recursive = [[{ 2 | "name": "node", 3 | "type": "record", 4 | "fields": [ 5 | {"name":"next", "type":["null", "node"]}, 6 | {"name":"label", "type":"string"} 7 | ] 8 | }]] 9 | 10 | t { 11 | schema = recursive, 12 | func = 'flatten', input = '{"label":"L1", "next":null}', 13 | output = '[0, null, "L1"]' 14 | } 15 | 16 | t { 17 | schema = recursive, 18 | func = 'flatten', input = '{"label":"L1", "next":{"node":{"label":"L2", "next":null}}}', 19 | output = '[1, [0, null, "L2"], "L1"]' 20 | } 21 | 22 | t { 23 | schema = recursive, 24 | func = 'flatten', input = [[{ 25 | "label":"L1", "next":{"node":{ 26 | "label":"L2", "next":{"node": 27 | {"label":"L3", "next":null} 28 | } 29 | }} 30 | }]], 31 | output = '[1, [1, [0, null, "L3"], "L2"], "L1"]' 32 | } 33 | 34 | -- 35 | 36 | t { 37 | schema = recursive, 38 | func = 'unflatten', input = '[0, null, "L1"]', 39 | output = '{"next":null, "label":"L1"}', 40 | } 41 | 42 | t { 43 | schema = recursive, 44 | func = 'unflatten', input = '[1, [0, null, "L2"], "L1"]', 45 | output = '{"next":{"node":{"next":null, "label":"L2"}},"label":"L1"}' 46 | } 47 | 48 | t { 49 | schema = recursive, 50 | func = 'unflatten', 51 | input = '[1, [1, [0, null, "L3"], "L2"], "L1"]', 52 | output = [[{ 53 | "next":{"node":{ 54 | "next":{"node": 55 | {"next":null, "label":"L3"} 56 | }, 57 | "label":"L2" 58 | }}, 59 | "label":"L1" 60 | }]] 61 | } 62 | 63 | -- 64 | 65 | t { 66 | schema = recursive, 67 | func = 'xflatten', input = '{"label": "LABEL"}', 68 | output = '[["=",3,"LABEL"]]' 69 | } 70 | 71 | t { 72 | schema = recursive, 73 | func = 'xflatten', input = '{"next": null}', 74 | output = '[["=",1,0],["=",2,null]]' 75 | } 76 | 77 | t { 78 | schema = recursive, 79 | func = 'xflatten', input = '{"next": {"node":{"label":"LABEL", "next":null}}}', 80 | output = '[["=",1,1],["=",2,[0,null,"LABEL"]]]' 81 | } 82 | 83 | t { 84 | schema = recursive, 85 | func = 'xflatten', input = [[{ 86 | "next": {"node":{"label":"LABEL1", "next":{"node":{"label":"LABEL2", "next":null}}}} 87 | }]], 88 | output = '[["=",1,1],["=",2,[1,[0,null,"LABEL2"],"LABEL1"]]]' 89 | } 90 | -------------------------------------------------------------------------------- /test/ddt_suite/string.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema = '"string"', 3 | func = 'flatten', input = '"Hello, world!"', 4 | output = '["Hello, world!"]' 5 | } 6 | 7 | t { 8 | schema = '"string"', 9 | func = 'flatten', input = '""', 10 | output = '[""]' 11 | } 12 | 13 | t { 14 | schema = '"string"', 15 | func = 'flatten', 16 | input = '"123456789.123456789.123456789.1234567890"', 17 | output = '["123456789.123456789.123456789.1234567890"]' 18 | } 19 | 20 | -- validation errors 21 | t { 22 | error = 'Expecting STR, encountered NIL', 23 | schema = '"string"', 24 | func = 'flatten', input = 'null' 25 | } 26 | 27 | t { 28 | error = 'Expecting STR, encountered FALSE', 29 | schema = '"string"', 30 | func = 'flatten', input = 'false' 31 | } 32 | 33 | t { 34 | error = 'Expecting STR, encountered TRUE', 35 | schema = '"string"', 36 | func = 'flatten', input = 'true' 37 | } 38 | 39 | t { 40 | error = 'Expecting STR, encountered LONG', 41 | schema = '"string"', 42 | func = 'flatten', input = '42' 43 | } 44 | 45 | -- ! is a hack to enable single-precision floating point 46 | t { 47 | error = 'Expecting STR, encountered FLOAT', 48 | schema = '"string"', 49 | func = 'flatten', input = '! 1.0' 50 | } 51 | 52 | t { 53 | error = 'Expecting STR, encountered DOUBLE', 54 | schema = '"string"', 55 | func = 'flatten', input = '1.0' 56 | } 57 | 58 | t { 59 | error = 'Expecting STR, encountered BIN', 60 | schema = '"string"', 61 | func = 'flatten', input = '{"$binary": "CAFEBABE"}' 62 | } 63 | 64 | t { 65 | error = 'Expecting STR, encountered ARRAY', 66 | schema = '"string"', 67 | func = 'flatten', input = '[]' 68 | } 69 | 70 | t { 71 | error = 'Expecting STR, encountered MAP', 72 | schema = '"string"', 73 | func = 'flatten', input = '{}' 74 | } 75 | 76 | t { 77 | schema = '"string*"', 78 | func = 'flatten', input = '"kek"', output = '["kek"]' 79 | } 80 | 81 | t { 82 | schema = '"string*"', 83 | func = 'flatten', input = 'null', output = '[null]' 84 | } 85 | 86 | t { 87 | schema = '"string*"', 88 | func = 'unflatten', input = '["kek"]', output = '"kek"' 89 | } 90 | 91 | t { 92 | schema = '"string*"', 93 | func = 'unflatten', input = '[null]', output = 'null' 94 | } 95 | -------------------------------------------------------------------------------- /test/ddt_suite/string_promo.lua: -------------------------------------------------------------------------------- 1 | t { 2 | schema1 = '"string"', schema2 = '"bytes"', 3 | func = 'flatten', input = '"Hello, world!"', 4 | output = '[{"$binary": "48656c6c6f2c20776f726c6421"}]' 5 | } 6 | 7 | t { 8 | error = "Expecting STR, encountered BIN", 9 | schema1 = '"string"', schema2 = '"bytes"', 10 | func = 'flatten', 11 | input = '{"$binary": "48656c6c6f2c20776f726c6421"}' 12 | } 13 | 14 | t { 15 | error = "Expecting STR, encountered NIL", 16 | schema1 = '"string"', schema2 = '"bytes"', 17 | func = 'flatten', 18 | input = 'null' 19 | } 20 | -------------------------------------------------------------------------------- /test/ddt_suite/union.lua: -------------------------------------------------------------------------------- 1 | -- flatten/unflatten for the schemas below, + most common validation errors 2 | 3 | local schema = { 4 | '["int", "string", "double"]', 5 | '["int", "string", "double", "null"]' 6 | } 7 | 8 | for i = 1,2 do 9 | 10 | -- included in test label 11 | _G['i'] = i 12 | 13 | -- flatten 14 | t { 15 | schema = schema[i], 16 | func = 'flatten', input = '{"int": 42}', output = '[0, 42]' 17 | } 18 | 19 | t { 20 | schema = schema[i], 21 | func = 'flatten', 22 | input = '{"string": "Hello, world!"}', 23 | output = '[1, "Hello, world!"]' 24 | } 25 | 26 | t { 27 | schema = '["int", "string", "double"]', 28 | func = 'flatten', input = '{"double": 99.1}', output = '[2, 99.1]' 29 | } 30 | 31 | -- flatten, errors 32 | t { 33 | error = 'Unknown key: "!!!"', 34 | schema = schema[i], 35 | func = 'flatten', input = '{"!!!": 42}' 36 | } 37 | 38 | t { 39 | error = 'Expecting MAP of length 1. Encountered MAP of length 0.', 40 | schema = schema[i], 41 | func = 'flatten', input = '{}' 42 | } 43 | 44 | t { 45 | error = 'int: Expecting INT, encountered STR', 46 | schema = schema[i], 47 | func = 'flatten', input = '{"int": "42"}' 48 | } 49 | 50 | t { 51 | error = 'string: Expecting STR, encountered LONG', 52 | schema = schema[i], 53 | func = 'flatten', input = '{"string": 42}' 54 | } 55 | 56 | t { 57 | error = 'double: Expecting DOUBLE, encountered STR', 58 | schema = schema[i], 59 | func = 'flatten', input = '{"double": "42"}' 60 | } 61 | 62 | -- unflatten 63 | t { 64 | schema = schema[i], 65 | func = 'unflatten', input = '[0, 42]', output = '{"int": 42}' 66 | } 67 | 68 | t { 69 | schema = schema[i], 70 | func = 'unflatten', input = '[1, "Hello, world!"]', 71 | output = '{"string": "Hello, world!"}' 72 | } 73 | 74 | t { 75 | schema = schema[i], 76 | func = 'unflatten', input = '[2, 99.8]', 77 | output = '{"double": 99.8}' 78 | } 79 | 80 | -- unflatten, errors 81 | t { 82 | error = 'Expecting ARRAY, encountered LONG', 83 | schema = schema[i], 84 | func = 'unflatten', input = '42' 85 | } 86 | 87 | t { 88 | error = 'Expecting ARRAY of length 2. Encountered ARRAY of length 3.', 89 | schema = schema[i], 90 | func = 'unflatten', input = '[0, 42, 42]' 91 | } 92 | 93 | t { 94 | error = '1: Expecting INT, encountered STR', 95 | schema = schema[i], 96 | func = 'unflatten', input = '["1", 42]' 97 | } 98 | 99 | t { 100 | error = '1: Bad value: -1', 101 | schema = schema[i], 102 | func = 'unflatten', input = '[-1, 42]' 103 | } 104 | 105 | t { 106 | error = '1: Bad value: 123', 107 | schema = schema[i], 108 | func = 'unflatten', input = '[123, 42]' 109 | } 110 | 111 | t { 112 | error = '2: Expecting INT, encountered STR', 113 | schema = schema[i], 114 | func = 'unflatten', input = '[0, "42"]' 115 | } 116 | 117 | t { 118 | error = '2: Expecting STR, encountered LONG', 119 | schema = schema[i], 120 | func = 'unflatten', input = '[1, 42]' 121 | } 122 | 123 | t { 124 | error = '2: Expecting DOUBLE, encountered STR', 125 | schema = schema[i], 126 | func = 'unflatten', input = '[2, "42"]' 127 | } 128 | 129 | end -- for i = 1,2 do 130 | 131 | -- schema-specific test cases, hence not in the loop above 132 | _G['i'] = nil 133 | 134 | -- flatten 135 | t { 136 | schema = '["int", "string", "double", "null"]', 137 | func = 'flatten', input = 'null', output = '[3, null]' 138 | } 139 | 140 | -- unflatten 141 | t { 142 | schema = '["int", "string", "double", "null"]', 143 | func = 'unflatten', input = '[3, null]', output = 'null' 144 | } 145 | 146 | -- flatten, errors 147 | t { 148 | error = 'Expecting MAP, encountered STR', 149 | schema = '["int", "string", "double"]', 150 | func = 'flatten', input = '"!!!"' 151 | } 152 | 153 | t { 154 | error = 'Expecting NIL or MAP, encountered STR', 155 | schema = '["int", "string", "double", "null"]', 156 | func = 'flatten', input = '"!!!"' 157 | } 158 | 159 | -- unflatten, errors 160 | t { 161 | error = '1: Bad value: 3', 162 | schema = '["int", "string", "double"]', 163 | func = 'unflatten', input = '[3, 42]' 164 | } 165 | 166 | t { 167 | error = '1: Bad value: 4', 168 | schema = '["int", "string", "double", "null"]', 169 | func = 'unflatten', input = '[4, 42]' 170 | } 171 | 172 | t { 173 | error = '2: Expecting NIL, encountered LONG', 174 | schema = '["int", "string", "double", "null"]', 175 | func = 'unflatten', input = '[3, 42]', output = 'null' 176 | } 177 | -------------------------------------------------------------------------------- /test/ddt_suite/union_versions.lua: -------------------------------------------------------------------------------- 1 | local union_v1 = '["null", "int", "double"]' 2 | local union_v2 = '["null", "string", "int"]' -- different int id 3 | 4 | -- flatten 5 | 6 | t { 7 | schema1 = union_v1, 8 | schema2 = union_v2, 9 | func = 'flatten', input = 'null', output = '[0, null]' 10 | } 11 | 12 | t { 13 | schema1 = union_v1, 14 | schema2 = union_v2, 15 | func = 'flatten', input = '{"int": 42}', output = '[2, 42]' 16 | } 17 | 18 | t { 19 | error = 'Unknown key: "string"', 20 | schema1 = union_v1, 21 | schema2 = union_v2, 22 | func = 'flatten', input = '{"string": "42"}' 23 | } 24 | 25 | t { 26 | error = 'Unknown key: "double" (schema versioning)', 27 | schema1 = union_v1, 28 | schema2 = union_v2, 29 | func = 'flatten', input = '{"double": "42"}' 30 | } 31 | 32 | -- 33 | 34 | t { 35 | schema1 = union_v2, 36 | schema2 = union_v1, 37 | func = 'flatten', input = 'null', output = '[0, null]' 38 | } 39 | 40 | t { 41 | schema1 = union_v2, 42 | schema2 = union_v1, 43 | func = 'flatten', input = '{"int": 42}', output = '[1, 42]' 44 | } 45 | 46 | t { 47 | error = 'Unknown key: "string" (schema versioning)', 48 | schema1 = union_v2, 49 | schema2 = union_v1, 50 | func = 'flatten', input = '{"string": "42"}' 51 | } 52 | 53 | t { 54 | error = 'Unknown key: "double"', 55 | schema1 = union_v2, 56 | schema2 = union_v1, 57 | func = 'flatten', input = '{"double": "42"}' 58 | } 59 | 60 | -- 61 | 62 | t { 63 | schema1 = '"int"', schema2 = union_v1, 64 | func = 'flatten', input = '42', output = '[1, 42]' 65 | } 66 | 67 | t { 68 | schema1 = union_v1, schema2 = '"int"', 69 | func = 'flatten', input = '{"int":42}', output = '[42]' 70 | } 71 | 72 | 73 | -- unflatten 74 | 75 | t { 76 | schema1 = union_v1, 77 | schema2 = union_v2, 78 | func = 'unflatten', input = '[0, null]', output = 'null' 79 | } 80 | 81 | t { 82 | schema1 = union_v1, 83 | schema2 = union_v2, 84 | func = 'unflatten', input = '[1, 42]', output = '{"int": 42}' 85 | } 86 | 87 | t { 88 | error = '1: Bad value: 2 (schema versioning)', 89 | schema1 = union_v1, 90 | schema2 = union_v2, 91 | func = 'unflatten', input = '[2, 42.0]' 92 | } 93 | 94 | -- 95 | 96 | t { 97 | schema1 = union_v2, 98 | schema2 = union_v1, 99 | func = 'unflatten', input = '[0, null]', output = 'null' 100 | } 101 | 102 | t { 103 | error = '1: Bad value: 1 (schema versioning)', 104 | schema1 = union_v2, 105 | schema2 = union_v1, 106 | func = 'unflatten', input = '[1, 42]' 107 | } 108 | 109 | t { 110 | schema1 = union_v2, 111 | schema2 = union_v1, 112 | func = 'unflatten', input = '[2, 42]', output = '{"int": 42}' 113 | } 114 | 115 | -- 116 | 117 | t { 118 | schema1 = '"int"', schema2 = union_v1, 119 | func = 'unflatten', input = '[42]', output = '{"int":42}' 120 | } 121 | 122 | t { 123 | schema1 = union_v1, schema2 = '"int"', 124 | func = 'unflatten', input = '[1,42]', output = '42' 125 | } 126 | -------------------------------------------------------------------------------- /test/msgpack_helper.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | # Order-preserving JSON->msgpack conversion. 3 | # Mongodb-inspired {"$binary": "FFFF"} representation for MsgPack BIN-s 4 | import sys 5 | import msgpack 6 | import simplejson as json 7 | from collections import OrderedDict 8 | from base64 import b64decode as base64_decode 9 | from binascii import hexlify 10 | 11 | def msgpack_pairs_hook(pairs): 12 | return OrderedDict(( 13 | (k,{'$binary': hexlify(v)}) if isinstance(v, bytes) else (k,v)) 14 | for k, v in pairs) 15 | 16 | def msgpack_list_hook(items): 17 | return list(({'$binary': hexlify(v)} if isinstance(v, bytes) else v) 18 | for v in items) 19 | 20 | def json_pairs_hook(pairs): 21 | for k,v in pairs: 22 | if k == '$binary': 23 | return bytes.fromhex(v) 24 | return OrderedDict(pairs) 25 | 26 | def msgpack_to_json(data): 27 | res = msgpack.loads(data, object_pairs_hook=msgpack_pairs_hook, 28 | list_hook=msgpack_list_hook, 29 | encoding='utf-8') 30 | if isinstance(res, bytes): 31 | res = { '$binary': hexlify(res) } 32 | return json.dumps(res) 33 | 34 | def json_to_msgpack(data): 35 | data = data.decode('utf-8') if isinstance(data, bytes) else data 36 | single_precision = False 37 | if data.startswith('!'): 38 | data = data[1:] 39 | single_precision = True 40 | return msgpack.dumps(json.loads(data, encoding='utf-8', 41 | object_pairs_hook=json_pairs_hook), 42 | use_bin_type=True, use_single_float=single_precision) 43 | 44 | def sanity_check(): 45 | json_data = '{"a": 1, "b": 2, "c": 3}' 46 | msgpack_data = b'\x83\xa1a\x01\xa1b\x02\xa1c\x03' 47 | assert(msgpack_data == json_to_msgpack(json_data)) 48 | assert(json_data == msgpack_to_json(msgpack_data)) 49 | ordered_map_samples = [ 50 | '{"a": 1, "b": 2, "c": 3}', 51 | '{"a": 1, "c": 2, "b": 3}', 52 | '{"b": 1, "a": 2, "c": 3}', 53 | '{"b": 1, "c": 2, "a": 3}', 54 | '{"c": 1, "a": 2, "b": 3}', 55 | '{"c": 1, "b": 2, "a": 3}' 56 | ] 57 | for sample in ordered_map_samples: 58 | assert(sample == msgpack_to_json(json_to_msgpack(sample))) 59 | 60 | if __name__ == '__main__': 61 | sanity_check() 62 | from argparse import ArgumentParser 63 | parser = ArgumentParser(description= 64 | 'Order-preserving JSON->msgpack conversion.') 65 | parser.add_argument('-D', dest='func', action='store_const', 66 | const=msgpack_to_json, default=json_to_msgpack, 67 | help='decode msgpack (default: encode)') 68 | parser.add_argument('base64_input', nargs='?', 69 | help='if missing, reads stdin') 70 | args = parser.parse_args() 71 | try: 72 | res = args.func(args.base64_input and base64_decode(args.base64_input) 73 | or sys.stdin.buffer.read()) 74 | sys.stdout.buffer.write(res.encode('utf-8') 75 | if isinstance(res, str) else res) 76 | except Exception as e: 77 | sys.stderr.write(str(e)+'\n') 78 | sys.exit(-1) 79 | -------------------------------------------------------------------------------- /test/run_ddt_tests.lua: -------------------------------------------------------------------------------- 1 | local jutil = require("jit.util") 2 | local math = require('math') 3 | local io = require('io') 4 | local digest = require('digest') 5 | local debug = require('debug') 6 | local json = require('json') 7 | local fio = require('fio') 8 | local schema = require('avro_schema') 9 | local ffi = require('ffi') 10 | local max = math.max 11 | local base64_encode = digest.base64_encode 12 | local format, gsub = string.format, string.gsub 13 | local insert, concat = table.insert, table.concat 14 | local sort = table.sort 15 | 16 | -- order-preserving JSON<->msgpack conversion, via external tool 17 | local function msgpack_helper(data, opts) 18 | if data=='' then error('Data empty') end 19 | local cmd = format("./msgpack_helper.py %s '%s'", opts or '', 20 | base64_encode(data)) 21 | local handle = io.popen(cmd, 'r') 22 | local res = handle:read('*a') 23 | handle:close() 24 | return res 25 | end 26 | 27 | local cvt_cache = {} 28 | 29 | local function cvt_cache_load(path) 30 | cvt_cache = {} 31 | local cache_file = io.open(path, 'rb') 32 | if not cache_file then return end 33 | local data = cache_file:read('*a') 34 | local data_as_code = loadstring(data) 35 | if data_as_code then 36 | -- funck() fetches a constant from a func prototype; 37 | -- that way we avoid untrusted code execution 38 | local data = jutil.funck(data_as_code, -1) 39 | if type(data) == 'table' then cvt_cache = data end 40 | end 41 | cache_file:close() 42 | end 43 | 44 | local function cvt_cache_save(path) 45 | local cache_file = io.open(path, 'wb') 46 | local keys = {} 47 | for key in pairs(cvt_cache) do 48 | insert(keys, key) 49 | end 50 | sort(keys) 51 | local data = {} 52 | for _, key in ipairs(keys) do 53 | insert(data, format('[%q] = %q', key, cvt_cache[key])) 54 | end 55 | cache_file:write(format([[ 56 | -- run_ddt_tests.lua / cvt_cache contents (generated) 57 | return { 58 | %s 59 | }]], concat(data, ',\n'))) 60 | cache_file:close() 61 | end 62 | 63 | local function json2msgpack(data) 64 | local res = cvt_cache[data] 65 | if not res then 66 | res = msgpack_helper(data) 67 | cvt_cache[data] = res 68 | end 69 | return tostring(res) -- in case wrong data was loaded 70 | end 71 | 72 | local function msgpack2json(data) 73 | local res = msgpack_helper(data, '-D') 74 | return res~='' and res or base64_encode(res) 75 | end 76 | 77 | local cache = setmetatable({}, {__mode='v'}) 78 | local function memoize(key, func, ...) 79 | local ok, res = nil, cache[key] 80 | if res then return true, res end 81 | ok, res = func(...) 82 | if ok then cache[key] = res end 83 | return ok, res 84 | end 85 | 86 | local function create_from_json(data) 87 | return schema.create(json.decode(data)) 88 | end 89 | 90 | -- schema / schema1 / schema2 (JSON) 91 | -- create_error - if create failed, error message 92 | -- create_only 93 | local function create_stage(test, args) 94 | local s = {} 95 | test.schema = s 96 | insert(s, args.schema) 97 | insert(s, args.schema1) 98 | insert(s, args.schema2) 99 | if #s == 0 then 100 | test.FAILED = 'schema/schema1/schema2 missing' 101 | return 102 | end 103 | test.schema_key = concat(s, ';') 104 | for i = 1,#s do 105 | local ok, schema = memoize(s[i], create_from_json, s[i]) 106 | local status = ok and '' or schema 107 | local expected_status = args.create_error or '' 108 | if status ~= expected_status then 109 | test.FAILED = format('schema.create: %q instead of %q', 110 | status, expected_status) 111 | return 112 | end 113 | s[i] = schema 114 | end 115 | if args.create_only or args.create_error then test.PASSED = true end 116 | end 117 | 118 | -- validate 119 | -- validate_error 120 | -- validate_only 121 | local function validate_stage(test, args) 122 | local validate = args.validate 123 | if validate ~= nil then 124 | if type(validate)=='string' then 125 | validate = json.decode(validate) 126 | end 127 | local ok, res = schema.validate(test.schema[1], validate) 128 | local status = ok and '' or res 129 | local expected_status = args.validate_error or '' 130 | if status ~= expected_status then 131 | test.FAILED = format('schema.validate: %q instead of %q', 132 | status, expected_status) 133 | return 134 | end 135 | if args.validate_only or args.validate_error then test.PASSED = true end 136 | end 137 | end 138 | 139 | -- service_fields - service fields in compile 140 | -- downgrade - downgrade flag 141 | -- compile_error - if compile failed, error message 142 | -- compile_only - stop after compile 143 | -- compile_dump - dump compilation artefacts 144 | local function compile_stage(test, args) 145 | local service_fields = args.service_fields or {} 146 | local compile_downgrade = args.compile_downgrade or false 147 | local compile_error = args.compile_error 148 | 149 | local key = format('%s;%s;%s', 150 | compile_downgrade, concat(service_fields, ';'), 151 | test.schema_key) 152 | local compile_opts = test.schema 153 | compile_opts.service_fields = service_fields 154 | compile_opts.downgrade = compile_downgrade 155 | -- would be deleted after #85 156 | compile_opts.alpha_nullable_record_xflatten = true 157 | local ok, schema_c 158 | if args.compile_dump then 159 | local path = gsub(test.id, '/', '_') 160 | compile_opts.debug = args.compile_debug 161 | compile_opts.dump_il = path .. '.il' 162 | compile_opts.dump_src = path .. '.lua' 163 | ok, schema_c = schema.compile(compile_opts) 164 | else 165 | ok, schema_c = memoize(key, schema.compile, compile_opts) 166 | end 167 | local status = ok and '' or schema_c 168 | local expected_status = args.compile_error or '' 169 | if status ~= expected_status then 170 | test.FAILED = format('schema.compile: %q instead of %q', 171 | status, expected_status) 172 | return 173 | end 174 | if args.compile_only or compile_error then test.PASSED = true end 175 | test.schema_c = schema_c 176 | end 177 | 178 | local function res_wrap(ok, ...) return ok, {...} end 179 | local function esc(v) return type(v)=='string' and format('%q', v) or v end 180 | 181 | -- func: flatten/unflatten/xflatten 182 | -- input 183 | -- output 184 | -- error 185 | local function convert_stage(test, args) 186 | local func = args.func 187 | local input = args.input 188 | local output = args.output 189 | if not func or not input or not (output or args.error) then 190 | test.FAILED = 'func/input/output/error missing' 191 | return 192 | end 193 | local call_func = test.schema_c[func .. '_msgpack'] 194 | if not call_func then 195 | test.FAILED = 'unknown function '..func 196 | return 197 | end 198 | if type(input) ~= 'table' then 199 | input = { input } 200 | end 201 | local input_1 = input[1] 202 | input[1] = json2msgpack(input_1) 203 | local ok, result = res_wrap(call_func(unpack(input))) 204 | local status = ok and '' or result[1] 205 | local expected_status = args.error or '' 206 | if status ~= expected_status then 207 | test.FAILED = format('%s: %q instead of %q', 208 | func, status, expected_status) 209 | return 210 | end 211 | if ok then 212 | if type(output) ~= 'table' then output = { output } end 213 | local n = max(#result, #output) 214 | for i = 1,n do 215 | local result_i = result[i] 216 | local output_i = output[i] 217 | if i == 1 then 218 | output_i = json2msgpack(output_i) 219 | end 220 | -- WARNING: This comparison is sensitive to order 221 | -- of fields in a dictionary. Use jsons as an input and output 222 | -- instead of lua tables. 223 | if result_i ~= output_i then 224 | if i == 1 then 225 | result_i = msgpack2json(result_i) 226 | output_i = msgpack2json(output_i) 227 | else 228 | result_i = esc(result_i) 229 | output_i = esc(output_i) 230 | end 231 | test.FAILED = format('%s, result[%d]: %s instead of %s', 232 | func, i, result_i, output_i) 233 | return 234 | end 235 | end 236 | end 237 | test.PASSED = true 238 | end 239 | 240 | local stages = { 241 | create_stage, 242 | validate_stage, 243 | compile_stage, 244 | convert_stage, 245 | -- the last stage always fails 246 | function(test) test.FAILED = 'auto-failer' end 247 | } 248 | 249 | -- test-id is - 250 | local test_name, test_env 251 | local test_env_ignore = {_G = true, t = true, ffi = true} 252 | local function test_id(caller) 253 | local keys = {} 254 | for k in pairs(test_env) do 255 | if not test_env_ignore[k] then insert(keys, k) end 256 | end 257 | sort(keys) 258 | local res = { test_name } 259 | for _, k in pairs(keys) do 260 | insert(res, format('%s_%s', k, test_env[k])) 261 | end 262 | insert(res, caller.currentline) 263 | return concat(res, '/') 264 | end 265 | 266 | local tests_failed = {} 267 | local function t(args) 268 | local id = test_id(debug.getinfo(2, 'l')) 269 | local test = { id = id } 270 | for i = 1, #stages do 271 | local ok, err = pcall(stages[i], test, args) 272 | if not ok then 273 | test.FAILED = err 274 | end 275 | if test.PASSED then 276 | print(format('%32s: PASSED', id)) 277 | return 278 | end 279 | if test.FAILED then 280 | print(format('%32s: FAILED (%s)', id, test.FAILED)) 281 | insert(tests_failed, id) 282 | return 283 | end 284 | end 285 | end 286 | 287 | local function run_tests(dir) 288 | for _, path in pairs(fio.glob(dir)) do 289 | local result, extra = loadfile(path) 290 | if not result then error(extra) end 291 | local test = result 292 | test_env = { t = t, ffi = ffi } 293 | test_env._G = test_env 294 | setfenv(test, test_env) 295 | test_name = gsub(gsub(path, '.*/', ''), '%.lua$', '') 296 | test() 297 | end 298 | end 299 | 300 | cvt_cache_load('.ddt_cache') 301 | run_tests('ddt_suite/*.lua') 302 | cvt_cache_save('.ddt_cache') 303 | if #tests_failed == 0 then 304 | print('All tests passed!') 305 | os.exit(0) 306 | else 307 | print('Some tests failed:\n\t'..concat(tests_failed, '\n\t')) 308 | os.exit(-1) 309 | end 310 | -------------------------------------------------------------------------------- /web/avro_online.lua: -------------------------------------------------------------------------------- 1 | local avro = require('avro_schema') 2 | local json = require('json') 3 | 4 | local Flattener = { 5 | compile = function(self, data_model) 6 | local ok, schema = avro.create(data_model) 7 | if not ok then 8 | return false, schema 9 | end 10 | return avro.compile({schema}) 11 | end, 12 | 13 | flatten = function(self, schema, data) 14 | local ok, compiled = self:compile(schema) 15 | if type(compiled) == 'string' then 16 | return compiled 17 | end 18 | local ok, tuple = compiled.flatten(data) 19 | if not ok then 20 | return string.gsub(tuple, '\"', '\'') 21 | end 22 | return tuple 23 | end 24 | } 25 | 26 | -- export compile, flatten functions and listen 3301 27 | function compile(schema) 28 | local data = nil 29 | local ok, err = pcall(function() 30 | data = json.decode(schema) 31 | end) 32 | if not ok then 33 | return {'Invalid JSON'} 34 | end 35 | 36 | local ok, compiled = Flattener:compile(data) 37 | if not ok then 38 | return string.gsub(compiled, '\"', '\'') 39 | end 40 | return 'Schema OK' 41 | end 42 | function validate(schema, data) 43 | local json_data = nil 44 | local json_schema = nil 45 | local ok, err = pcall(function() 46 | json_schema = json.decode(schema) 47 | json_data = json.decode(data) 48 | end) 49 | if not ok then 50 | return {'Invalid JSON'} 51 | end 52 | return Flattener:flatten(json_schema, json_data) 53 | end 54 | box.cfg{listen="3301"} 55 | box.schema.func.create('compile', {if_not_exists = true}) 56 | box.schema.func.create('validate', {if_not_exists = true}) 57 | box.schema.user.grant('guest', 'execute', 'function', 'validate', {if_not_exists = true}) 58 | box.schema.user.grant('guest', 'execute', 'function', 'compile', {if_not_exists = true}) 59 | -------------------------------------------------------------------------------- /web/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Tarantool avro validator 7 | 8 | 9 | 10 | 11 | 16 | 17 | 18 | 41 |
42 |
43 |
44 |

Avro schema

45 | 56 |
57 | Compile 58 |
59 |
60 |
61 |

Data

62 | 69 |
70 | Validate 71 |
72 |
73 |
74 |

Output

75 |
76 |
77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | -------------------------------------------------------------------------------- /web/media/avro.js: -------------------------------------------------------------------------------- 1 | var TNT_URI = '/tarantool'; 2 | 3 | function request(method, params){ 4 | $.post( 5 | TNT_URI, 6 | JSON.stringify( 7 | { 8 | "method": method, 9 | "params": params, 10 | "id": 1 11 | } 12 | ), 13 | function(res){ 14 | var html = '
';
15 | 	    html += JSON.stringify(res.result[0], '', 2) + '\n';
16 | 	    html == "
"; 17 | $('#requests').html(html); 18 | $('pre code').each(function(i, block) { 19 | hljs.highlightBlock(block); 20 | }); 21 | } 22 | ); 23 | } 24 | 25 | function compile() { 26 | var schema = $('#schema').val(); 27 | request('compile', [schema]); 28 | } 29 | 30 | function validate() { 31 | var schema = $('#schema').val(); 32 | var data = $('#data').val(); 33 | request('validate', [schema, data]); 34 | } 35 | 36 | function set_editor(id){ 37 | var text = document.getElementById(id); 38 | var editableCodeMirror = CodeMirror.fromTextArea(text, { 39 | mode: "javascript", 40 | theme: "default", 41 | lineNumbers: true, 42 | }); 43 | editableCodeMirror.on('change', function (cm) { 44 | text.value = cm.getValue(); 45 | }); 46 | } 47 | 48 | $(document).ready(function(){ 49 | $(document).on('click', '#btn_compile', compile); 50 | $(document).on('click', '#btn_validate', validate); 51 | $.map(['schema', 'data'], function(item){ 52 | set_editor(item); 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /web/nginx.conf.sample: -------------------------------------------------------------------------------- 1 | worker_processes 2; 2 | 3 | events { 4 | worker_connections 1024; 5 | } 6 | 7 | http { 8 | default_type application/octet-stream; 9 | 10 | log_format main '$remote_addr - $remote_user [$time_local] "$request" ' 11 | '$status $body_bytes_sent "$http_referer" ' 12 | '"$http_user_agent" "$http_x_forwarded_for"'; 13 | 14 | access_log /tmp/nginx/access.log main; 15 | error_log /tmp/nginx/error.log crit; 16 | 17 | sendfile on; 18 | 19 | keepalive_timeout 65; 20 | 21 | upstream backend { 22 | server 127.0.0.1:3301; 23 | } 24 | 25 | server { 26 | listen 8080; 27 | server_name localhost; 28 | 29 | root /tmp/avro; 30 | 31 | location /tarantool { 32 | tnt_pass backend; 33 | tnt_pure_result off; 34 | default_type application/json; 35 | } 36 | } 37 | } 38 | 39 | --------------------------------------------------------------------------------